diff --git a/.gitignore b/.gitignore index 3218e92..af496b7 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,9 @@ *.aps **/ipch +*.tpr +*.tpd + # for CMake CMakeFiles cmake_install.cmake @@ -28,8 +31,10 @@ __pycache__ **/.idea/misc.xml **/.idea/dictionaries **/.idea/watcherTasks.xml +**/.idea/dataSources.* **/.idea/inspectionProfiles **/.idea/codeStyles +**/.idea/dataSources **/.idea/inspectionProfiles **/.idea/vcs.xml **/.idea/modules.xml @@ -65,7 +70,6 @@ __pycache__ /server/share/db /server/share/log /server/share/replay -/server/testssh # for generated files. @@ -78,9 +82,7 @@ __pycache__ /client/tp_rdp /server/tp_core/protocol/rdp /client/tools/tprdp -/server/tp_core/testssh -/client/tp_assist_win_it_doctor -/dist/client/windows/assist-it-doctor +/client/build-tp-player-* # for MacOS. .DS_Store @@ -102,3 +104,7 @@ profile *.moved-aside /server/share/tmp +/server/tp_core/testssh/Debug +/server/tp_core/testssh/Release +/external/zlib +/client/tools/qt-redist diff --git a/.idea/encodings.xml b/.idea/encodings.xml index f49c862..8f8d6ca 100644 --- a/.idea/encodings.xml +++ b/.idea/encodings.xml @@ -21,6 +21,7 @@ + @@ -31,6 +32,7 @@ + diff --git a/CMakeCfg.txt b/CMakeCfg.txt index b4bbbc5..b80f03b 100644 --- a/CMakeCfg.txt +++ b/CMakeCfg.txt @@ -14,15 +14,16 @@ if ("${CMAKE_SYSTEM_NAME}" STREQUAL "Darwin") MESSAGE(STATUS "build on macOS...") set(OS_MACOS 1) set(OS_POSIX 1) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") set(TP_EXTERNAL_RELEASE_DIR "${PROJECT_SOURCE_DIR}/external/macos/release") elseif ("${CMAKE_SYSTEM_NAME}" STREQUAL "Linux") set(OS_LINUX 1) set(OS_POSIX 1) MESSAGE(STATUS "build on Linux...") - # add_subdirectory(server/tp_web/src) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") set(TP_EXTERNAL_RELEASE_DIR "${PROJECT_SOURCE_DIR}/external/linux/release") elseif ("${CMAKE_SYSTEM_NAME}" STREQUAL "Windows") - # MESSAGE(FATAL_ERROR "unsupported platform: Windows") + MESSAGE(FATAL_ERROR "unsupported platform: Windows") else () MESSAGE(FATAL_ERROR "unsupported platform: ${CMAKE_SYSTEM_NAME}") endif () diff --git a/CMakeLists.txt b/CMakeLists.txt index 8b9daf5..47ae499 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -36,7 +36,7 @@ endif() add_subdirectory(server/tp_core/core) add_subdirectory(server/tp_core/protocol/ssh) add_subdirectory(server/tp_core/protocol/telnet) -#add_subdirectory(server/testssh/testssh) +add_subdirectory(server/tp_core/testssh) if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/server/tp_core/protocol/rdp") add_subdirectory(server/tp_core/protocol/rdp) diff --git a/build/.idea/build.iml b/build/.idea/build.iml index 7cfc769..14585d1 100644 --- a/build/.idea/build.iml +++ b/build/.idea/build.iml @@ -1,8 +1,10 @@ - - + + + + diff --git a/build/build-py-static.sh b/build/build-py-static.sh index b501055..b3492af 100755 --- a/build/build-py-static.sh +++ b/build/build-py-static.sh @@ -3,10 +3,10 @@ ################################################################ # Basic settings. ################################################################ -VER_PYTHON="3.7.0" +VER_PYTHON="3.7.5" VER_PYTHON_SHORT="3.7" -VER_OPENSSL="1.0.2p" -VER_SQLITE="3250000" +VER_OPENSSL="1.1.1d" +VER_SQLITE="3300100" VER_ZLIB="1.2.11" VER_PYTHON_LIB="${VER_PYTHON_SHORT}m" @@ -38,15 +38,6 @@ function on_error() exit 1 } -function setp_build_git() -{ - # su -s - # yum install zlib-devel expat-devel libcurl-devel - # make prefix=/usr/local - # make prefix=/usr/local install - echo 'skip build git now.' -} - function dlfile() { echo -n "Downloading $1 ..." @@ -77,7 +68,7 @@ function step_download_files() dlfile "python source tarball" "https://www.python.org/ftp/python/${VER_PYTHON}/" "Python-${VER_PYTHON}.tgz" ${PATH_DOWNLOAD} dlfile "openssl source tarball" "https://www.openssl.org/source/" "openssl-${VER_OPENSSL}.tar.gz" ${PATH_DOWNLOAD} - dlfile "sqlite source tarball" "http://sqlite.org/2018/" "sqlite-autoconf-${VER_SQLITE}.tar.gz" ${PATH_DOWNLOAD} + dlfile "sqlite source tarball" "http://sqlite.org/2019/" "sqlite-autoconf-${VER_SQLITE}.tar.gz" ${PATH_DOWNLOAD} dlfile "zlib source tarball" "https://www.zlib.net/" "zlib-${VER_ZLIB}.tar.gz" ${PATH_DOWNLOAD} } @@ -121,6 +112,16 @@ function step_prepare_source() on_error "Can not prepare source code for build sqlite3 module for Python." fi + if [ ! -f "${PATH_FIX}/Python-${VER_PYTHON}/Modules/Setup.dist" ]; then + on_error "Can not fix source for build Python." + fi + if [ ! -f "${PATH_FIX}/Python-${VER_PYTHON}/Modules/_sqlite/cache.h" ]; then + on_error "Can not fix source for build sqlite3 module for Python." + fi + if [ ! -f "${PATH_FIX}/Python-${VER_PYTHON}/Modules/_sqlite/prepare_protocol.h" ]; then + on_error "Can not fix source for build sqlite3 module for Python." + fi + cp "${PATH_FIX}/Python-${VER_PYTHON}/Modules/Setup.dist" "${PY_PATH_SRC}/Modules/Setup.dist" cp "${PATH_FIX}/Python-${VER_PYTHON}/Modules/Setup.dist" "${PY_PATH_SRC}/Modules/Setup" cp "${PATH_FIX}/Python-${VER_PYTHON}/Modules/_sqlite/cache.h" "${PY_PATH_SRC}/Modules/_sqlite/cache.h" diff --git a/build/build.py b/build/build.py index 241bd29..9916afa 100644 --- a/build/build.py +++ b/build/build.py @@ -64,6 +64,9 @@ def main(): elif x == 'a': clean_everything() continue + elif x == 'e': + clean_external() + continue try: x = int(x) @@ -117,6 +120,27 @@ def clean_everything(): utils.remove(os.path.join(env.root_path, 'external', 'linux', 'release', 'lib', 'libuv.a')) +def clean_external(): + #utils.remove(os.path.join(env.root_path, 'out')) + utils.remove(os.path.join(env.root_path, 'external', 'jsoncpp')) + utils.remove(os.path.join(env.root_path, 'external', 'libuv')) + utils.remove(os.path.join(env.root_path, 'external', 'mbedtls')) + utils.remove(os.path.join(env.root_path, 'external', 'mongoose')) + #utils.remove(os.path.join(env.root_path, 'external', 'openssl')) + #utils.remove(os.path.join(env.root_path, 'external', 'python')) + #utils.remove(os.path.join(env.root_path, 'external', 'libssh-win-static', 'lib')) + #utils.remove(os.path.join(env.root_path, 'external', 'libssh-win-static', 'src')) + #utils.remove(os.path.join(env.root_path, 'external', 'linux', 'tmp')) + utils.remove(os.path.join(env.root_path, 'external', 'linux', 'release', 'lib', 'libmbedcrypto.a')) + utils.remove(os.path.join(env.root_path, 'external', 'linux', 'release', 'lib', 'libmbedtls.a')) + utils.remove(os.path.join(env.root_path, 'external', 'linux', 'release', 'lib', 'libmbedx509.a')) + utils.remove(os.path.join(env.root_path, 'external', 'linux', 'release', 'lib', 'libsqlite3.a')) + utils.remove(os.path.join(env.root_path, 'external', 'linux', 'release', 'lib', 'libssh.a')) + utils.remove(os.path.join(env.root_path, 'external', 'linux', 'release', 'lib', 'libssh_threads.a')) + + utils.remove(os.path.join(env.root_path, 'external', 'linux', 'release', 'lib', 'libuv.a')) + + def do_opt(opt): arg = '' @@ -155,6 +179,7 @@ def do_opt(opt): # cmd = '"%s" -B "%s" %s' % (utils.cfg.py_exec, os.path.join(BUILDER_PATH, script), arg) cmd = '%s -B %s %s' % (env.py_exec, os.path.join(env.builder_path, script), arg) + print(cmd) os.system(cmd) @@ -248,6 +273,7 @@ def show_menu(): cc.o((cc.CR_NORMAL, ' ['), (cc.CR_INFO, '%2d' % options[o]['id']), (cc.CR_NORMAL, '] ', options[o]['disp'])) cc.v(' -------------------------------------------------------') + cc.o((cc.CR_NORMAL, ' ['), (cc.CR_INFO, ' E'), (cc.CR_NORMAL, '] clean external temp. files.')) cc.o((cc.CR_NORMAL, ' ['), (cc.CR_INFO, ' C'), (cc.CR_NORMAL, '] clean build and dist.')) cc.o((cc.CR_NORMAL, ' ['), (cc.CR_INFO, ' A'), (cc.CR_NORMAL, '] clean everything.')) diff --git a/build/builder/build-assist.py b/build/builder/build-assist.py index da84c60..3bbf012 100644 --- a/build/builder/build-assist.py +++ b/build/builder/build-assist.py @@ -13,21 +13,24 @@ class BuilderBase: def __init__(self): self.out_dir = '' - def build_exe(self): - pass + def build_assist(self): + cc.e("this is a pure-virtual function.") + + def build_player(self): + cc.e("this is a pure-virtual function.") def build_rdp(self): - pass + cc.e("this is a pure-virtual function.") def build_installer(self): - pass + cc.e("this is a pure-virtual function.") class BuilderWin(BuilderBase): def __init__(self): super().__init__() - def build_exe(self): + def build_assist(self): cc.i('build tp_assist...') sln_file = os.path.join(env.root_path, 'client', 'tp_assist_win', 'tp_assist.vs2017.sln') out_file = os.path.join(env.root_path, 'out', 'client', ctx.bits_path, ctx.target_path, 'tp_assist.exe') @@ -36,6 +39,15 @@ class BuilderWin(BuilderBase): utils.msvc_build(sln_file, 'tp_assist', ctx.target_path, ctx.bits_path, False) utils.ensure_file_exists(out_file) + def build_player(self): + cc.i('build tp-player...') + prj_path = os.path.join(env.root_path, 'client', 'tp-player') + out_file = os.path.join(env.root_path, 'out', 'client', ctx.bits_path, ctx.target_path, 'tp-player.exe') + if os.path.exists(out_file): + utils.remove(out_file) + utils.qt_build_win(prj_path, 'tp-player', ctx.bits_path, ctx.target_path) + utils.ensure_file_exists(out_file) + # def build_rdp(self): # cc.n('build tp_rdp...') # sln_file = os.path.join(ROOT_PATH, 'client', 'tp_rdp', 'tp_rdp.2015.sln') @@ -74,12 +86,14 @@ class BuilderWin(BuilderBase): utils.makedirs(tmp_cfg_path) utils.copy_file(os.path.join(env.root_path, 'out', 'client', ctx.bits_path, ctx.target_path), tmp_app_path, 'tp_assist.exe') - utils.copy_file(os.path.join(env.root_path, 'client', 'cfg'), tmp_cfg_path, ('tp-assist.windows.json', 'tp-assist.json')) + utils.copy_file(os.path.join(env.root_path, 'client', 'tp_assist_win', 'runtime'), tmp_app_path, 'vcruntime140.dll') + utils.copy_file(os.path.join(env.root_path, 'client', 'cfg'), tmp_cfg_path, ('tp-assist.windows.json', 'tp-assist.json')) utils.copy_file(os.path.join(env.root_path, 'client', 'cfg'), tmp_cfg_path, 'cacert.cer') utils.copy_file(os.path.join(env.root_path, 'client', 'cfg'), tmp_cfg_path, 'localhost.key') utils.copy_file(os.path.join(env.root_path, 'client', 'cfg'), tmp_cfg_path, 'localhost.pem') + # assist configuration web page utils.copy_ex(os.path.join(env.root_path, 'client', 'tp_assist_win'), tmp_app_path, 'site') utils.makedirs(os.path.join(tmp_app_path, 'tools', 'putty')) @@ -90,14 +104,37 @@ class BuilderWin(BuilderBase): utils.copy_file(os.path.join(env.root_path, 'client', 'tools', 'winscp'), os.path.join(tmp_app_path, 'tools', 'winscp'), 'license.txt') utils.makedirs(os.path.join(tmp_app_path, 'tools', 'tprdp')) - utils.copy_file(os.path.join(env.root_path, 'client', 'tools', 'tprdp'), os.path.join(tmp_app_path, 'tools', 'tprdp'), 'tprdp-client.exe') - utils.copy_file(os.path.join(env.root_path, 'client', 'tools', 'tprdp'), os.path.join(tmp_app_path, 'tools', 'tprdp'), 'tprdp-replay.exe') - utils.copy_file(os.path.join(env.root_path, 'client', 'tools', 'tprdp'), os.path.join(tmp_app_path, 'tools', 'tprdp'), 'libeay32.dll') - utils.copy_file(os.path.join(env.root_path, 'client', 'tools', 'tprdp'), os.path.join(tmp_app_path, 'tools', 'tprdp'), 'ssleay32.dll') - utils.copy_file(os.path.join(env.root_path, 'client', 'tools', 'tprdp'), os.path.join(tmp_app_path, 'tools', 'tprdp'), 'msvcr120.dll') + # utils.copy_file(os.path.join(env.root_path, 'client', 'tools', 'tprdp'), os.path.join(tmp_app_path, 'tools', 'tprdp'), 'tprdp-client.exe') + # utils.copy_file(os.path.join(env.root_path, 'client', 'tools', 'tprdp'), os.path.join(tmp_app_path, 'tools', 'tprdp'), 'tprdp-replay.exe') + # utils.copy_file(os.path.join(env.root_path, 'client', 'tools', 'tprdp'), os.path.join(tmp_app_path, 'tools', 'tprdp'), 'libeay32.dll') + # utils.copy_file(os.path.join(env.root_path, 'client', 'tools', 'tprdp'), os.path.join(tmp_app_path, 'tools', 'tprdp'), 'ssleay32.dll') + # utils.copy_file(os.path.join(env.root_path, 'client', 'tools', 'tprdp'), os.path.join(tmp_app_path, 'tools', 'tprdp'), 'msvcr120.dll') + utils.copy_file(os.path.join(env.root_path, 'client', 'tools', 'tprdp'), os.path.join(tmp_app_path, 'tools', 'tprdp'), 'wfreerdp.exe') utils.copy_file(os.path.join(env.root_path, 'client', 'tools'), os.path.join(tmp_app_path, 'tools'), 'securecrt-telnet.vbs') + # tp-player + utils.copy_file(os.path.join(env.root_path, 'out', 'client', ctx.bits_path, ctx.target_path), tmp_app_path, 'tp-player.exe') + + # qt-redist + qt_redist_path = os.path.join(env.root_path, 'client', 'tools', 'qt-redist') + utils.copy_file(qt_redist_path, tmp_app_path, 'Qt5Core.dll') + utils.copy_file(qt_redist_path, tmp_app_path, 'Qt5Gui.dll') + utils.copy_file(qt_redist_path, tmp_app_path, 'Qt5Network.dll') + utils.copy_file(qt_redist_path, tmp_app_path, 'Qt5Widgets.dll') + utils.copy_ex(os.path.join(qt_redist_path, 'platforms'), os.path.join(tmp_app_path, 'platforms')) + utils.copy_ex(os.path.join(qt_redist_path, 'styles'), os.path.join(tmp_app_path, 'styles')) + utils.copy_ex(os.path.join(qt_redist_path, 'translations'), os.path.join(tmp_app_path, 'translations')) + + # zlib + suffix = 'd' if ctx.target_path == 'debug' else '' + utils.copy_file(os.path.join(env.root_path, 'external', 'zlib', 'build', ctx.target_path), tmp_app_path, 'zlib{}.dll'.format(suffix)) + + # openssl + utils.copy_file(os.path.join(env.root_path, 'external', 'openssl', 'bin'), tmp_app_path, 'libcrypto-1_1.dll') + utils.copy_file(os.path.join(env.root_path, 'external', 'openssl', 'bin'), tmp_app_path, 'libssl-1_1.dll') + + # final build utils.nsis_build(os.path.join(env.root_path, 'dist', 'client', 'windows', 'assist', 'installer.nsi')) @@ -105,7 +142,7 @@ class BuilderMacOS(BuilderBase): def __init__(self): super().__init__() - def build_exe(self): + def build_assist(self): cc.i('build tp_assist...') configuration = ctx.target_path.capitalize() @@ -117,6 +154,9 @@ class BuilderMacOS(BuilderBase): utils.xcode_build(proj_file, 'TP-Assist', configuration, False) utils.ensure_file_exists(os.path.join(out_file, 'Contents', 'Info.plist')) + def build_player(self): + cc.o('skip build tp_player now...') + def build_installer(self): cc.i('make tp_assist dmg file...') @@ -169,7 +209,7 @@ class BuilderLinux(BuilderBase): def __init__(self): super().__init__() - def build_exe(self): + def build_assist(self): cc.e('not support linux.') # def build_rdp(self): @@ -215,7 +255,8 @@ def main(): builder = gen_builder(ctx.host_os) if 'exe' in argv: - builder.build_exe() + builder.build_assist() + builder.build_player() # elif 'rdp' in argv: # builder.build_rdp() elif 'installer' in argv: diff --git a/build/builder/build-external.py b/build/builder/build-external.py index f79ab9a..ea8ac69 100644 --- a/build/builder/build-external.py +++ b/build/builder/build-external.py @@ -23,70 +23,90 @@ class BuilderBase: self._init_path() def _init_path(self): - cc.e("this is a pure-virtual function.") + cc.e("_init_path() pure-virtual function.") def build_jsoncpp(self): file_name = 'jsoncpp-{}.zip'.format(env.ver_jsoncpp) - if not utils.download_file('jsoncpp source tarball', 'https://github.com/open-source-parsers/jsoncpp/archive/{}.zip'.format(env.ver_jsoncpp), PATH_DOWNLOAD, file_name): - return self._build_jsoncpp(file_name) + def _download_jsoncpp(self, file_name): + return utils.download_file('jsoncpp source tarball', 'https://github.com/open-source-parsers/jsoncpp/archive/{}.zip'.format(env.ver_jsoncpp), PATH_DOWNLOAD, file_name) + def _build_jsoncpp(self, file_name): - cc.e("this is a pure-virtual function.") + cc.e("_build_jsoncpp() pure-virtual function.") def build_mongoose(self): file_name = 'mongoose-{}.zip'.format(env.ver_mongoose) - if not utils.download_file('mongoose source tarball', 'https://github.com/cesanta/mongoose/archive/{}.zip'.format(env.ver_mongoose), PATH_DOWNLOAD, file_name): - return self._build_mongoose(file_name) + def _download_mongoose(self, file_name): + return utils.download_file('mongoose source tarball', 'https://github.com/cesanta/mongoose/archive/{}.zip'.format(env.ver_mongoose), PATH_DOWNLOAD, file_name) + def _build_mongoose(self, file_name): - cc.e("this is a pure-virtual function.") + cc.e("_build_mongoose() pure-virtual function.") def build_openssl(self): file_name = 'openssl-{}.zip'.format(env.ver_ossl) self._build_openssl(file_name) - def _build_openssl(self, file_name): + def _download_openssl(self, file_name): _alt_ver = '_'.join(env.ver_ossl.split('.')) - if not utils.download_file('openssl source tarball', 'https://github.com/openssl/openssl/archive/OpenSSL_{}.zip'.format(_alt_ver), PATH_DOWNLOAD, file_name): - cc.e("can not download openssl source tarball.") - return False - else: - return True + return utils.download_file('openssl source tarball', 'https://github.com/openssl/openssl/archive/OpenSSL_{}.zip'.format(_alt_ver), PATH_DOWNLOAD, file_name) + + def _build_openssl(self, file_name): + cc.e("_build_openssl() pure-virtual function.") + # _alt_ver = '_'.join(env.ver_ossl.split('.')) + # if not utils.download_file('openssl source tarball', 'https://github.com/openssl/openssl/archive/OpenSSL_{}.zip'.format(_alt_ver), PATH_DOWNLOAD, file_name): + # cc.e("can not download openssl source tarball.") + # return False + # else: + # return True def build_libuv(self): file_name = 'libuv-{}.zip'.format(env.ver_libuv) - if not utils.download_file('libuv source tarball', 'https://github.com/libuv/libuv/archive/v{}.zip'.format(env.ver_libuv), PATH_DOWNLOAD, file_name): - return self._build_libuv(file_name) + def _download_libuv(self, file_name): + return utils.download_file('libuv source tarball', 'https://github.com/libuv/libuv/archive/v{}.zip'.format(env.ver_libuv), PATH_DOWNLOAD, file_name) + def _build_libuv(self, file_name): cc.e("this is a pure-virtual function.") def build_mbedtls(self): file_name = 'mbedtls-mbedtls-{}.zip'.format(env.ver_mbedtls) - if not utils.download_file('mbedtls source tarball', 'https://github.com/ARMmbed/mbedtls/archive/mbedtls-{}.zip'.format(env.ver_mbedtls), PATH_DOWNLOAD, file_name): - return self._build_mbedtls(file_name) + def _download_mbedtls(self, file_name): + return utils.download_file('mbedtls source tarball', 'https://github.com/ARMmbed/mbedtls/archive/mbedtls-{}.zip'.format(env.ver_mbedtls), PATH_DOWNLOAD, file_name) + def _build_mbedtls(self, file_name): cc.e("this is a pure-virtual function.") + def build_zlib(self): + file_name = 'zlilb{}.zip'.format(env.ver_zlib_number) + self._build_zlib(file_name) + + def _download_zlib(self, file_name): + return utils.download_file('mbedtls source tarball', 'https://www.zlib.net/zlib{}.zip'.format(env.ver_zlib_number), PATH_DOWNLOAD, file_name) + + def _build_zlib(self, file_name): + cc.e("_build_zlib() pure-virtual function.") + def build_libssh(self): file_name = 'libssh-{}.zip'.format(env.ver_libssh) - if not utils.download_file('libssh source tarball', 'https://git.libssh.org/projects/libssh.git/snapshot/libssh-{}.zip'.format(env.ver_libssh), PATH_DOWNLOAD, file_name): - return self._build_libssh(file_name) + def _download_libssh(self, file_name): + return utils.download_file('libssh source tarball', 'https://git.libssh.org/projects/libssh.git/snapshot/libssh-{}.zip'.format(env.ver_libssh), PATH_DOWNLOAD, file_name) + def _build_libssh(self, file_name): - cc.e("this is a pure-virtual function.") + cc.e("_build_libssh() pure-virtual function.") def prepare_python(self): self._prepare_python() def _prepare_python(self): - cc.e("this is a pure-virtual function.") + cc.e("_prepare_python() pure-virtual function.") def fix_output(self): pass @@ -103,9 +123,10 @@ class BuilderWin(BuilderBase): self.MBEDTLS_PATH_SRC = os.path.join(PATH_EXTERNAL, 'mbedtls') self.LIBUV_PATH_SRC = os.path.join(PATH_EXTERNAL, 'libuv') self.LIBSSH_PATH_SRC = os.path.join(PATH_EXTERNAL, 'libssh') + self.ZLIB_PATH_SRC = os.path.join(PATH_EXTERNAL, 'zlib') def _prepare_python(self): - cc.n('prepare python header files ...', end='') + cc.n('prepare python header files ... ', end='') if os.path.exists(os.path.join(PATH_EXTERNAL, 'python', 'include', 'Python.h')): cc.w('already exists, skip.') @@ -125,69 +146,98 @@ class BuilderWin(BuilderBase): utils.copy_ex(_header_path, os.path.join(PATH_EXTERNAL, 'python', 'include')) def _build_openssl(self, file_name): - cc.n('build openssl static library from source code... ') - - if not super()._build_openssl(file_name): - return - - _chk_output = [ - os.path.join(self.OPENSSL_PATH_SRC, 'out32', 'libeay32.lib'), - os.path.join(self.OPENSSL_PATH_SRC, 'out32', 'ssleay32.lib'), - os.path.join(self.OPENSSL_PATH_SRC, 'inc32', 'openssl', 'opensslconf.h'), - ] - - need_build = False - for f in _chk_output: - if not os.path.exists(f): - need_build = True - break - - if not need_build: - cc.n('build openssl static library from source code... ', end='') + cc.n('prepare OpenSSL pre-built package ... ', end='') + if os.path.exists(self.OPENSSL_PATH_SRC): cc.w('already exists, skip.') return cc.v('') - cc.n('prepare openssl source code...') _alt_ver = '_'.join(env.ver_ossl.split('.')) - if not os.path.exists(self.OPENSSL_PATH_SRC): - utils.unzip(os.path.join(PATH_DOWNLOAD, file_name), PATH_EXTERNAL) - os.rename(os.path.join(PATH_EXTERNAL, 'openssl-OpenSSL_{}'.format(_alt_ver)), self.OPENSSL_PATH_SRC) - if not os.path.exists(self.OPENSSL_PATH_SRC): - raise RuntimeError('can not prepare openssl source code.') - else: - cc.w('already exists, skip.') - os.chdir(self.OPENSSL_PATH_SRC) - os.system('""{}" Configure VC-WIN32"'.format(env.perl)) - os.system(r'ms\do_nasm') - # for vs2015 - # utils.sys_exec(r'"{}\VC\bin\vcvars32.bat" && nmake -f ms\nt.mak'.format(env.visual_studio_path), direct_output=True) - # for vs2017 community - utils.sys_exec(r'"{}VC\Auxiliary\Build\vcvars32.bat" && nmake -f ms\nt.mak'.format(env.visual_studio_path), direct_output=True) + file_name = 'Win32OpenSSL-{}.msi'.format(_alt_ver) + installer = os.path.join(PATH_DOWNLOAD, file_name) - for f in _chk_output: - if not os.path.exists(f): - raise RuntimeError('build openssl static library from source code failed.') + if not os.path.exists(installer): + if not utils.download_file('openssl installer', 'http://slproweb.com/download/{}'.format(filename), PATH_DOWNLOAD, file_name): + cc.e('can not download pre-built installer of OpenSSL.') + return + + utils.ensure_file_exists(installer) + + cc.w('On Windows, we use pre-built package of OpenSSL.') + cc.w('The installer have been downloaded at "{}".'.format(installer)) + cc.w('please install OpenSSL into "{}".'.format(self.OPENSSL_PATH_SRC)) + cc.w('\nOnce the OpenSSL installed, press Enter to continue or Q to quit...', end='') + try: + x = env.input() + except EOFError: + x = 'q' + if x == 'q': + return + + + # cc.n('build openssl static library from source code... ') + + # if not super()._build_openssl(file_name): + # return + + # _chk_output = [ + # os.path.join(self.OPENSSL_PATH_SRC, 'out32', 'libeay32.lib'), + # os.path.join(self.OPENSSL_PATH_SRC, 'out32', 'ssleay32.lib'), + # os.path.join(self.OPENSSL_PATH_SRC, 'inc32', 'openssl', 'opensslconf.h'), + # ] + + # need_build = False + # for f in _chk_output: + # if not os.path.exists(f): + # need_build = True + # break + + # if not need_build: + # cc.n('build openssl static library from source code... ', end='') + # cc.w('already exists, skip.') + # return + # cc.v('') + + # cc.n('prepare openssl source code...') + # _alt_ver = '_'.join(env.ver_ossl.split('.')) + # if not os.path.exists(self.OPENSSL_PATH_SRC): + # utils.unzip(os.path.join(PATH_DOWNLOAD, file_name), PATH_EXTERNAL) + # os.rename(os.path.join(PATH_EXTERNAL, 'openssl-OpenSSL_{}'.format(_alt_ver)), self.OPENSSL_PATH_SRC) + # if not os.path.exists(self.OPENSSL_PATH_SRC): + # raise RuntimeError('can not prepare openssl source code.') + # else: + # cc.w('already exists, skip.') + + # os.chdir(self.OPENSSL_PATH_SRC) + # os.system('""{}" Configure VC-WIN32"'.format(env.perl)) + # os.system(r'ms\do_nasm') + # # for vs2015 + # # utils.sys_exec(r'"{}\VC\bin\vcvars32.bat" && nmake -f ms\nt.mak'.format(env.visual_studio_path), direct_output=True) + # # for vs2017 community + # utils.sys_exec(r'"{}VC\Auxiliary\Build\vcvars32.bat" && nmake -f ms\nt.mak'.format(env.visual_studio_path), direct_output=True) + + # for f in _chk_output: + # if not os.path.exists(f): + # raise RuntimeError('build openssl static library from source code failed.') def _build_libssh(self, file_name): - cc.n('build libssh static library from source code... ', end='') + if not self._download_libssh(file_name): + return + cc.n('build libssh library from source code... ', end='') if not os.path.exists(self.LIBSSH_PATH_SRC): cc.v('') utils.unzip(os.path.join(PATH_DOWNLOAD, file_name), PATH_EXTERNAL) os.rename(os.path.join(PATH_EXTERNAL, 'libssh-{}'.format(env.ver_libssh)), self.LIBSSH_PATH_SRC) - # cc.n('fix libssh source code... ', end='') - # utils.ensure_file_exists(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', 'src', 'sftp.c')) - # utils.copy_file(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', 'src'), os.path.join(self.LIBSSH_PATH_SRC, 'src'), 'sftp.c') cc.n('fix libssh source code... ', end='') s_name = 'libssh-{}'.format(env.ver_libssh) utils.ensure_file_exists(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src', 'session.c')) - utils.ensure_file_exists(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src', 'libcrypto.c')) + # utils.ensure_file_exists(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src', 'libcrypto.c')) utils.ensure_file_exists(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src', 'libcrypto-compat.c')) utils.copy_file(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src'), os.path.join(self.LIBSSH_PATH_SRC, 'src'), 'session.c') - utils.copy_file(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src'), os.path.join(self.LIBSSH_PATH_SRC, 'src'), 'libcrypto.c') + # utils.copy_file(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src'), os.path.join(self.LIBSSH_PATH_SRC, 'src'), 'libcrypto.c') utils.copy_file(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src'), os.path.join(self.LIBSSH_PATH_SRC, 'src'), 'libcrypto-compat.c') out_file_lib = os.path.join(self.LIBSSH_PATH_SRC, 'lib', ctx.target_path, 'ssh.lib') @@ -210,7 +260,7 @@ class BuilderWin(BuilderBase): cc.i('build libssh...') sln_file = os.path.join(self.LIBSSH_PATH_SRC, 'build', 'libssh.sln') - utils.msvc_build(sln_file, 'ssh_shared', ctx.target_path, 'win32', False) + utils.msvc_build(sln_file, 'ssh', ctx.target_path, 'win32', False) utils.ensure_file_exists(os.path.join(self.LIBSSH_PATH_SRC, 'build', 'src', ctx.target_path, 'ssh.lib')) utils.ensure_file_exists(os.path.join(self.LIBSSH_PATH_SRC, 'build', 'src', ctx.target_path, 'ssh.dll')) utils.copy_file(os.path.join(self.LIBSSH_PATH_SRC, 'build', 'src', ctx.target_path), os.path.join(self.LIBSSH_PATH_SRC, 'lib', ctx.target_path), 'ssh.lib') @@ -218,7 +268,53 @@ class BuilderWin(BuilderBase): utils.ensure_file_exists(out_file_lib) utils.ensure_file_exists(out_file_dll) + def _build_zlib(self, file_name): + if not self._download_zlib(file_name): + return + cc.n('build zlib library from source code... ', end='') + + if not os.path.exists(self.ZLIB_PATH_SRC): + cc.v('') + utils.unzip(os.path.join(PATH_DOWNLOAD, file_name), PATH_EXTERNAL) + os.rename(os.path.join(PATH_EXTERNAL, 'zlib-{}'.format(env.ver_zlib)), self.ZLIB_PATH_SRC) + + if ctx.target_path == 'debug': + olib = 'zlibd.lib' + odll = 'zlibd.dll' + else: + olib = 'zlib.lib' + odll = 'zlib.dll' + out_file_lib = os.path.join(self.ZLIB_PATH_SRC, 'build', ctx.target_path, olib) + out_file_dll = os.path.join(self.ZLIB_PATH_SRC, 'build', ctx.target_path, odll) + + if os.path.exists(out_file_lib) and os.path.exists(out_file_dll): + cc.w('already exists, skip.') + return + cc.v('') + + cc.w('On Windows, when build zlib, need you use cmake-gui.exe to generate solution file') + cc.w('for Visual Studio 2017. Visit https://docs.tp4a.com for more details.') + cc.w('\nOnce the zlib.sln generated, press Enter to continue or Q to quit...', end='') + try: + x = env.input() + except EOFError: + x = 'q' + if x == 'q': + return + + cc.i('build zlib...') + sln_file = os.path.join(self.ZLIB_PATH_SRC, 'build', 'zlib.sln') + utils.msvc_build(sln_file, 'zlib', ctx.target_path, 'win32', False) + # utils.ensure_file_exists(os.path.join(self.ZLIB_PATH_SRC, 'build', ctx.target_path, 'zlib.lib')) + # utils.ensure_file_exists(os.path.join(self.ZLIB_PATH_SRC, 'build', ctx.target_path, 'zlib.dll')) + # utils.copy_file(os.path.join(self.ZLIB_PATH_SRC, 'build', ctx.target_path), os.path.join(self.ZLIB_PATH_SRC, 'lib', ctx.target_path), 'zlib.lib') + # utils.copy_file(os.path.join(self.ZLIB_PATH_SRC, 'build', ctx.target_path), os.path.join(self.ZLIB_PATH_SRC, 'lib', ctx.target_path), 'zlib.dll') + utils.ensure_file_exists(out_file_lib) + utils.ensure_file_exists(out_file_dll) + def _build_jsoncpp(self, file_name): + if not self._download_jsoncpp(file_name): + return cc.n('prepare jsoncpp source code... ', end='') if not os.path.exists(self.JSONCPP_PATH_SRC): cc.v('') @@ -228,6 +324,8 @@ class BuilderWin(BuilderBase): cc.w('already exists, skip.') def _build_mongoose(self, file_name): + if not self._download_mongoose(file_name): + return cc.n('prepare mongoose source code... ', end='') if not os.path.exists(self.MONGOOSE_PATH_SRC): cc.v('') @@ -237,6 +335,8 @@ class BuilderWin(BuilderBase): cc.w('already exists, skip.') def _build_mbedtls(self, file_name): + if not self._download_mbedtls(file_name): + return cc.n('prepare mbedtls source code... ', end='') if not os.path.exists(self.MBEDTLS_PATH_SRC): cc.v('') @@ -254,6 +354,8 @@ class BuilderWin(BuilderBase): # utils.copy_file(os.path.join(PATH_EXTERNAL, 'fix-external', 'mbedtls', 'library'), os.path.join(self.MBEDTLS_PATH_SRC, 'library'), 'rsa.c') def _build_libuv(self, file_name): + if not self._download_libuv(file_name): + return cc.n('prepare libuv source code... ', end='') if not os.path.exists(self.LIBUV_PATH_SRC): cc.v('') @@ -277,6 +379,7 @@ class BuilderLinux(BuilderBase): self.LIBUV_PATH_SRC = os.path.join(self.PATH_TMP, 'libuv-{}'.format(env.ver_libuv)) self.MBEDTLS_PATH_SRC = os.path.join(self.PATH_TMP, 'mbedtls-mbedtls-{}'.format(env.ver_mbedtls)) self.LIBSSH_PATH_SRC = os.path.join(self.PATH_TMP, 'libssh-{}'.format(env.ver_libssh)) + self.ZLIB_PATH_SRC = os.path.join(self.PATH_TMP, 'zlib-{}'.format(env.ver_zlib)) self.JSONCPP_PATH_SRC = os.path.join(PATH_EXTERNAL, 'jsoncpp') self.MONGOOSE_PATH_SRC = os.path.join(PATH_EXTERNAL, 'mongoose') @@ -288,7 +391,7 @@ class BuilderLinux(BuilderBase): cc.n('prepare python header and lib files ...') if os.path.exists(os.path.join(self.PATH_RELEASE, 'include', 'python', 'Python.h')): - cc.w(' - header file already exists, skip.') + cc.w('python header file already exists, skip.') else: utils.ensure_file_exists(os.path.join(self.PATH_RELEASE, 'include', 'python{}m'.format(ctx.py_dot_ver), 'Python.h')) utils.sys_exec('ln -s "{}" "{}"'.format( @@ -300,6 +403,8 @@ class BuilderLinux(BuilderBase): utils.ensure_file_exists(os.path.join(self.PATH_RELEASE, 'lib', lib_file)) def _build_jsoncpp(self, file_name): + if not self._download_jsoncpp(file_name): + return cc.n('prepare jsoncpp source code...', end='') if not os.path.exists(self.JSONCPP_PATH_SRC): cc.v('') @@ -309,6 +414,8 @@ class BuilderLinux(BuilderBase): cc.w('already exists, skip.') def _build_mongoose(self, file_name): + if not self._download_mongoose(file_name): + return cc.n('prepare mongoose source code...', end='') if not os.path.exists(self.MONGOOSE_PATH_SRC): cc.v('') @@ -318,9 +425,12 @@ class BuilderLinux(BuilderBase): cc.w('already exists, skip.') def _build_openssl(self, file_name): - pass # we do not need build openssl anymore, because first time run build.sh we built Python, it include openssl. + # we do not need build openssl anymore, because first time run build.sh we built Python with openssl included. + cc.w('skip build openssl again.') def _build_libuv(self, file_name): + if not self._download_libuv(file_name): + return if not os.path.exists(self.LIBUV_PATH_SRC): os.system('unzip "{}/{}" -d "{}"'.format(PATH_DOWNLOAD, file_name, self.PATH_TMP)) @@ -347,7 +457,11 @@ class BuilderLinux(BuilderBase): # use os.unlink() because some file should be a link. os.unlink(os.path.join(self.PATH_RELEASE, 'lib', i)) + utils.ensure_file_exists(os.path.join(self.PATH_RELEASE, 'lib', 'libuv.a')) + def _build_mbedtls(self, file_name): + if not self._download_mbedtls(file_name): + return if not os.path.exists(self.MBEDTLS_PATH_SRC): os.system('unzip "{}/{}" -d "{}"'.format(PATH_DOWNLOAD, file_name, self.PATH_TMP)) @@ -382,8 +496,6 @@ class BuilderLinux(BuilderBase): # fix source file utils.ensure_file_exists(os.path.join(PATH_EXTERNAL, 'fix-external', 'mbedtls', 'include', 'mbedtls', 'config.h')) utils.copy_file(os.path.join(PATH_EXTERNAL, 'fix-external', 'mbedtls', 'include', 'mbedtls'), os.path.join(self.MBEDTLS_PATH_SRC, 'include', 'mbedtls'), 'config.h') - # utils.ensure_file_exists(os.path.join(PATH_EXTERNAL, 'fix-external', 'mbedtls', 'library', 'rsa.c')) - # utils.copy_file(os.path.join(PATH_EXTERNAL, 'fix-external', 'mbedtls', 'library'), os.path.join(self.MBEDTLS_PATH_SRC, 'library'), 'rsa.c') old_p = os.getcwd() os.chdir(self.MBEDTLS_PATH_SRC) @@ -392,11 +504,14 @@ class BuilderLinux(BuilderBase): os.chdir(old_p) def _build_libssh(self, file_name): + if not self._download_libssh(file_name): + return if not os.path.exists(self.LIBSSH_PATH_SRC): os.system('unzip "{}/{}" -d "{}"'.format(PATH_DOWNLOAD, file_name, self.PATH_TMP)) cc.n('build libssh...', end='') - if os.path.exists(os.path.join(self.PATH_RELEASE, 'lib', 'libssh.a')): + out_file = os.path.join(self.PATH_RELEASE, 'lib64', 'libssh.a') + if os.path.exists(out_file): cc.w('already exists, skip.') return cc.v('') @@ -404,10 +519,10 @@ class BuilderLinux(BuilderBase): cc.n('fix libssh source code... ', end='') s_name = 'libssh-{}'.format(env.ver_libssh) utils.ensure_file_exists(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src', 'session.c')) - utils.ensure_file_exists(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src', 'libcrypto.c')) + # utils.ensure_file_exists(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src', 'libcrypto.c')) utils.ensure_file_exists(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src', 'libcrypto-compat.c')) utils.copy_file(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src'), os.path.join(self.LIBSSH_PATH_SRC, 'src'), 'session.c') - utils.copy_file(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src'), os.path.join(self.LIBSSH_PATH_SRC, 'src'), 'libcrypto.c') + # utils.copy_file(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src'), os.path.join(self.LIBSSH_PATH_SRC, 'src'), 'libcrypto.c') utils.copy_file(os.path.join(PATH_EXTERNAL, 'fix-external', 'libssh', s_name, 'src'), os.path.join(self.LIBSSH_PATH_SRC, 'src'), 'libcrypto-compat.c') build_path = os.path.join(self.LIBSSH_PATH_SRC, 'build') @@ -417,14 +532,52 @@ class BuilderLinux(BuilderBase): ' -DOPENSSL_LIBRARIES={path_release}/lib' \ ' -DWITH_SFTP=ON' \ ' -DWITH_SERVER=ON' \ - ' -DWITH_STATIC_LIB=ON' \ ' -DWITH_GSSAPI=OFF' \ - ' -DWITH_ZLIB=OFF' \ + ' -DWITH_ZLIB=ON' \ ' -DWITH_PCAP=OFF' \ + ' -DBUILD_SHARED_LIBS=OFF' \ ' -DUNIT_TESTING=OFF' \ ' -DWITH_EXAMPLES=OFF' \ ' -DWITH_BENCHMARKS=OFF' \ ' -DWITH_NACL=OFF' \ + ''.format(path_release=self.PATH_RELEASE) + + # ' -DWITH_STATIC_LIB=ON' + + + old_p = os.getcwd() + try: + utils.cmake(build_path, 'Release', False, cmake_define=cmake_define, cmake_pre_define='CFLAGS="-fPIC"') + os.chdir(build_path) + utils.sys_exec('make install') + except: + pass + os.chdir(old_p) + + utils.ensure_file_exists(out_file) + # files = os.listdir(os.path.join(self.PATH_RELEASE, 'lib')) + # for i in files: + # if i.startswith('libssh.so'): + # # use os.unlink() because some file should be a link. + # os.unlink(os.path.join(self.PATH_RELEASE, 'lib', i)) + + def _build_zlib(self, file_name): + # cc.w('skip build zlib again.') + if not self._download_zlib(file_name): + return + if not os.path.exists(self.ZLIB_PATH_SRC): + os.system('unzip "{}/{}" -d "{}"'.format(PATH_DOWNLOAD, file_name, self.PATH_TMP)) + + cc.n('build zlib...', end='') + out_file = os.path.join(self.PATH_RELEASE, 'lib', 'libz.a') + if os.path.exists(out_file): + cc.w('already exists, skip.') + return + cc.v('') + + build_path = os.path.join(self.ZLIB_PATH_SRC, 'build') + + cmake_define = ' -DCMAKE_INSTALL_PREFIX={path_release}' \ ' ..'.format(path_release=self.PATH_RELEASE) old_p = os.getcwd() @@ -436,10 +589,10 @@ class BuilderLinux(BuilderBase): pass os.chdir(old_p) - utils.ensure_file_exists(os.path.join(self.PATH_RELEASE, 'lib', 'libssh.a')) + utils.ensure_file_exists(out_file) files = os.listdir(os.path.join(self.PATH_RELEASE, 'lib')) for i in files: - if i.startswith('libssh.so'): + if i.startswith('libz.so'): # use os.unlink() because some file should be a link. os.unlink(os.path.join(self.PATH_RELEASE, 'lib', i)) @@ -458,6 +611,7 @@ class BuilderMacOS(BuilderBase): self.LIBUV_PATH_SRC = os.path.join(self.PATH_TMP, 'libuv-{}'.format(env.ver_libuv)) self.MBEDTLS_PATH_SRC = os.path.join(self.PATH_TMP, 'mbedtls-mbedtls-{}'.format(env.ver_mbedtls)) self.LIBSSH_PATH_SRC = os.path.join(self.PATH_TMP, 'libssh-{}'.format(env.ver_libssh)) + self.ZLIB_PATH_SRC = os.path.join(self.PATH_TMP, 'zlib-{}'.format(env.ver_zlib)) self.JSONCPP_PATH_SRC = os.path.join(PATH_EXTERNAL, 'jsoncpp') self.MONGOOSE_PATH_SRC = os.path.join(PATH_EXTERNAL, 'mongoose') @@ -466,6 +620,8 @@ class BuilderMacOS(BuilderBase): utils.makedirs(self.PATH_TMP) def _build_jsoncpp(self, file_name): + if not self._download_jsoncpp(file_name): + return cc.n('prepare jsoncpp source code...', end='') if not os.path.exists(self.JSONCPP_PATH_SRC): cc.v('') @@ -475,6 +631,8 @@ class BuilderMacOS(BuilderBase): cc.w('already exists, skip.') def _build_mongoose(self, file_name): + if not self._download_mongoose(file_name): + return cc.n('prepare mongoose source code...', end='') if not os.path.exists(self.MONGOOSE_PATH_SRC): cc.v('') @@ -484,7 +642,10 @@ class BuilderMacOS(BuilderBase): cc.w('already exists, skip.') def _build_openssl(self, file_name): - if not super()._build_openssl(file_name): + cc.w('skip build openssl again.') + return + + if not self._download_openssl(file_name): return cc.n('prepare openssl source code...', end='') @@ -512,6 +673,8 @@ class BuilderMacOS(BuilderBase): os.chdir(old_p) def _build_libuv(self, file_name): + if not self._download_libuv(file_name): + return cc.n('prepare libuv source code...', end='') if not os.path.exists(self.LIBUV_PATH_SRC): os.system('unzip "{}/{}" -d "{}"'.format(PATH_DOWNLOAD, file_name, self.PATH_TMP)) @@ -534,6 +697,8 @@ class BuilderMacOS(BuilderBase): os.chdir(old_p) def _build_mbedtls(self, file_name): + if not self._download_mbedtls(file_name): + return if not os.path.exists(self.MBEDTLS_PATH_SRC): os.system('unzip "{}/{}" -d "{}"'.format(PATH_DOWNLOAD, file_name, self.PATH_TMP)) @@ -578,6 +743,11 @@ class BuilderMacOS(BuilderBase): os.chdir(old_p) def _build_libssh(self, file_name): + # cc.n('skip build libssh on macOS.') + # return + + if not self._download_libssh(file_name): + return if not os.path.exists(self.LIBSSH_PATH_SRC): os.system('unzip "{}/{}" -d "{}"'.format(PATH_DOWNLOAD, file_name, self.PATH_TMP)) @@ -591,20 +761,24 @@ class BuilderMacOS(BuilderBase): build_path = os.path.join(self.LIBSSH_PATH_SRC, 'build') cmake_define = ' -DCMAKE_INSTALL_PREFIX={path_release}' \ - ' -DOPENSSL_INCLUDE_DIR={path_release}/include' \ - ' -DOPENSSL_LIBRARIES={path_release}/lib' \ + ' -DOPENSSL_INCLUDE_DIR=/usr/local/opt/openssl/include' \ + ' -DOPENSSL_LIBRARIES=/usr/local/opt/openssl/lib' \ + ' -DWITH_GCRYPT=OFF' \ + ' -DWITH_GEX=OFF' \ ' -DWITH_SFTP=ON' \ ' -DWITH_SERVER=ON' \ - ' -DWITH_STATIC_LIB=ON' \ ' -DWITH_GSSAPI=OFF' \ - ' -DWITH_ZLIB=OFF' \ + ' -DWITH_ZLIB=ON' \ ' -DWITH_PCAP=OFF' \ + ' -DBUILD_SHARED_LIBS=OFF' \ ' -DUNIT_TESTING=OFF' \ ' -DWITH_EXAMPLES=OFF' \ ' -DWITH_BENCHMARKS=OFF' \ ' -DWITH_NACL=OFF' \ ''.format(path_release=self.PATH_RELEASE) + # ' -DWITH_STATIC_LIB=ON' + try: utils.cmake(build_path, 'Release', False, cmake_define) except: @@ -618,6 +792,41 @@ class BuilderMacOS(BuilderBase): # utils.copy_file(os.path.join(self.LIBSSH_PATH_SRC, 'build', 'src', 'threads'), os.path.join(self.PATH_RELEASE, 'lib'), 'libssh_threads.a') utils.copy_ex(os.path.join(self.LIBSSH_PATH_SRC, 'include'), os.path.join(self.PATH_RELEASE, 'include'), 'libssh') + def _build_zlib(self, file_name): + # cc.w('skip build zlib again.') + if not self._download_zlib(file_name): + return + if not os.path.exists(self.ZLIB_PATH_SRC): + os.system('unzip "{}/{}" -d "{}"'.format(PATH_DOWNLOAD, file_name, self.PATH_TMP)) + + cc.n('build zlib...', end='') + out_file = os.path.join(self.PATH_RELEASE, 'lib', 'libz.a') + if os.path.exists(out_file): + cc.w('already exists, skip.') + return + cc.v('') + + build_path = os.path.join(self.ZLIB_PATH_SRC, 'build') + + cmake_define = ' -DCMAKE_INSTALL_PREFIX={path_release}' \ + ' ..'.format(path_release=self.PATH_RELEASE) + + old_p = os.getcwd() + try: + utils.cmake(build_path, 'Release', False, cmake_define=cmake_define, cmake_pre_define='CFLAGS="-fPIC"') + os.chdir(build_path) + utils.sys_exec('make install') + except: + pass + os.chdir(old_p) + + utils.ensure_file_exists(out_file) + files = os.listdir(os.path.join(self.PATH_RELEASE, 'lib')) + for i in files: + if i.startswith('libz.so'): + # use os.unlink() because some file should be a link. + os.unlink(os.path.join(self.PATH_RELEASE, 'lib', i)) + def _prepare_python(self): pass @@ -673,6 +882,7 @@ def main(): builder.build_openssl() builder.build_libuv() builder.build_mbedtls() + builder.build_zlib() builder.build_libssh() builder.fix_output() diff --git a/build/builder/build-pysrt.py b/build/builder/build-pysrt.py index 629c737..637763c 100644 --- a/build/builder/build-pysrt.py +++ b/build/builder/build-pysrt.py @@ -13,11 +13,11 @@ ctx = BuildContext() MODULES_WIN = ['_asyncio', '_bz2', '_ctypes', '_hashlib', '_lzma', '_overlapped', '_socket', '_sqlite3', '_ssl', 'select', 'sqlite3', 'libcrypto-1_1', 'libssl-1_1', 'unicodedata'] -PY_LIB_REMOVE_WIN = ['ctypes/test', 'curses', 'dbm', 'distutils', 'email/test', 'ensurepip', 'idlelib', 'lib2to3', +PY_LIB_REMOVE_WIN = ['ctypes/test', 'curses', 'dbm', 'distutils/test', 'email/tests', 'ensurepip', 'idlelib', 'lib2to3', 'lib-dynload', 'pydoc_data', 'site-packages', 'sqlite3/test', 'test', 'tkinter', 'turtledemo', 'unittest', 'venv', 'wsgiref', 'doctest.py', 'pdb.py', 'py_compile.py', 'pydoc.py', 'this.py', 'wave.py', 'webbrowser.py', 'zipapp.py'] -PY_LIB_REMOVE_LINUX = ['ctypes/test', 'curses', 'dbm', 'distutils', 'ensurepip', 'idlelib', 'lib2to3', +PY_LIB_REMOVE_LINUX = ['ctypes/test', 'curses', 'dbm', 'distutils/tests', 'ensurepip', 'idlelib', 'lib2to3', 'lib-dynload', 'pydoc_data', 'site-packages', 'sqlite3/test', 'test', 'tkinter', 'turtledemo', 'unittest', 'venv', 'wsgiref', 'doctest.py', 'pdb.py', 'py_compile.py', 'pydoc.py', 'this.py', 'wave.py', 'webbrowser.py', 'zipapp.py'] PY_MODULE_REMOVE_LINUX = ['_ctypes_test', '_testbuffer', '_testcapi', '_testimportmultiple', '_testmultiphase', '_xxtestfuzz'] @@ -46,7 +46,7 @@ class PYSBase: utils.sys_exec('{} -m pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pip --upgrade'.format(env.py_exec)) pip = self._get_pip() - pypi_modules = ['ldap3', 'mako', 'Pillow', 'psutil', 'pymysql', 'qrcode', 'tornado', 'wheezy.captcha'] + pypi_modules = ['cffi', 'cryptography', 'ldap3', 'mako', 'Pillow', 'psutil', 'pyasn1', 'pymysql', 'qrcode', 'tornado', 'wheezy.captcha'] for p in pypi_modules: cc.n('install {} ...'.format(p)) utils.sys_exec('{} install -i https://pypi.tuna.tsinghua.edu.cn/simple {}'.format(pip, p), direct_output=True) @@ -111,7 +111,7 @@ class PYSBase: utils.ensure_file_exists(out_file) cc.v('remove temp folder...') - utils.remove(_tmp_) + # utils.remove(_tmp_) def _make_py_ver_file(self): pass @@ -250,7 +250,7 @@ class PYSLinux(PYSBase): def _get_pip(self): _exec_path = os.path.dirname(env.py_exec) - return os.path.join(_exec_path, 'pip') + return os.path.join(_exec_path, 'pip3.7') def _make_py_ver_file(self): # do nothing. diff --git a/build/builder/build-version.py b/build/builder/build-version.py index e4933a0..aaac3f4 100644 --- a/build/builder/build-version.py +++ b/build/builder/build-version.py @@ -18,6 +18,7 @@ class Builder: self.VER_TP_TPCORE = '' self.VER_TP_TPWEB = '' self.VER_TP_ASSIST = '' + self.VER_TP_ASSIST_REQUIRE = '' def build(self): cc.n('update version...') @@ -43,12 +44,17 @@ class Builder: x = l.split(' ') self.VER_TP_ASSIST = x[1].strip() # self.VER_TP_ASSIST += '.0' + elif l.startswith('TP_ASSIST_REQUIRE '): + x = l.split(' ') + self.VER_TP_ASSIST_REQUIRE = x[1].strip() + # self.VER_TP_ASSIST += '.0' cc.v('new version:') cc.v(' Server : ', self.VER_TP_SERVER) cc.v(' - tp_core : ', self.VER_TP_TPCORE) cc.v(' - tp_web : ', self.VER_TP_TPWEB) cc.v(' Assist : ', self.VER_TP_ASSIST) + cc.v(' - Require : ', self.VER_TP_ASSIST_REQUIRE) cc.v('') self.make_builder_ver() @@ -100,7 +106,12 @@ class Builder: def make_server_ver(self): ver_file = os.path.join(env.root_path, 'server', 'www', 'teleport', 'webroot', 'app', 'app_ver.py') # ver_content = '# -*- coding: utf8 -*-\n\nTS_VER = "{}"\n'.format(self.VER_TELEPORT_SERVER) - ver_content = '# -*- coding: utf8 -*-\nTP_SERVER_VER = "{}"\n'.format(self.VER_TP_SERVER) + # ver_content = '# -*- coding: utf8 -*-\nTP_SERVER_VER = "{}"\n'.format(self.VER_TP_SERVER) + ver_content = '' \ + '# -*- coding: utf8 -*-\n' \ + 'TP_SERVER_VER = "{}"\n' \ + 'TP_ASSIST_REQUIRE_VER = "{}"\n' \ + ''.format(self.VER_TP_SERVER, self.VER_TP_ASSIST_REQUIRE) rewrite = False if not os.path.exists(ver_file): diff --git a/build/builder/core/env.py b/build/builder/core/env.py index 82a907e..4a89e73 100644 --- a/build/builder/core/env.py +++ b/build/builder/core/env.py @@ -148,6 +148,11 @@ class Env(object): if warn_miss_tool: cc.w(' - can not locate `nsis`, so I can not make installer.') + if 'qt' in _tmp: + self.qt = _tmp['qt'] + else: + self.qt = None + elif self.is_linux or self.is_macos: if 'cmake' in _tmp: self.cmake = _tmp['cmake'] @@ -178,6 +183,10 @@ class Env(object): self.ver_ossl = _v_openssl[0].strip() self.ver_ossl_number = _v_openssl[1].strip() + _v_zlib = _tmp['zlib'].split(',') + self.ver_zlib = _v_zlib[0].strip() + self.ver_zlib_number = _v_zlib[1].strip() + self.ver_libuv = _tmp['libuv'] self.ver_mbedtls = _tmp['mbedtls'] # self.ver_sqlite = _tmp['sqlite'] diff --git a/build/builder/core/utils.py b/build/builder/core/utils.py index c6aa97f..3aacf62 100644 --- a/build/builder/core/utils.py +++ b/build/builder/core/utils.py @@ -320,6 +320,21 @@ def msvc_build(sln_file, proj_name, target, platform, force_rebuild): raise RuntimeError('build MSVC project `{}` failed.'.format(proj_name)) +def qt_build_win(prj_path, prj_name, bit_path, target_path): + cc.n(env.visual_studio_path) + if env.qt is None: + raise RuntimeError('where is `qt`?') + + if env.is_win: + tmp_path = os.path.join(env.root_path, 'out', '_tmp_', prj_name, bit_path) + # C:\Windows\System32\cmd.exe /A /Q /K C:\Qt\Qt5.12.0\5.12.0\msvc2017\bin\qtenv2.bat + cmd = 'C:\\Windows\\System32\\cmd.exe /A /Q /C ""{}\qt-helper.bat" "{}\\bin\\qtenv2.bat" "{}VC\\Auxiliary\\Build\\vcvarsall.bat" {} "{}" "{}" {}"'.format(env.build_path, env.qt, env.visual_studio_path, bit_path, tmp_path, prj_path, target_path) + ret, _ = sys_exec(cmd, direct_output=True) + if ret != 0: + raise RuntimeError('build XCode project `{}` failed.'.format(proj_name)) + + + def xcode_build(proj_file, proj_name, target, force_rebuild): if force_rebuild: cmd = 'xcodebuild -project "{}" -target {} -configuration {} clean'.format(proj_file, proj_name, target) diff --git a/build/builder/core/ver.py b/build/builder/core/ver.py index 4bc5945..b92165b 100644 --- a/build/builder/core/ver.py +++ b/build/builder/core/ver.py @@ -1,3 +1,3 @@ -# -*- coding: utf8 -*- -VER_TP_SERVER = "3.2.0" -VER_TP_ASSIST = "3.2.0" +# -*- coding: utf8 -*- +VER_TP_SERVER = "3.5.5" +VER_TP_ASSIST = "3.5.5" diff --git a/client/cfg/localhost.key b/client/cfg/localhost.key index 852ff91..33108af 100644 --- a/client/cfg/localhost.key +++ b/client/cfg/localhost.key @@ -1,28 +1,28 @@ -----BEGIN PRIVATE KEY----- -MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDxI1ZDRvuNnkVB -JgTZmnwF97d7Ace+R0gSSkWi2l2oezakLSdUUkiysr1wx45u2Du36FNqMGg7LiCZ -SX1e2Zba96PI6vwNGnlprCfXTe2eV3W8kMPKA6c9X8BTktMZINNHO3K591jGx+uM -fyrl6/CFVPHNkl73Ium9u91JXIX9BOata4RTlphmHADc+hPXuC6oeN8qayZvV2rV -Jfx1wMlWCMiGJM36JJO5pywteBCKQkVJuJ7y29XF2wT690o+i6ugk+yI2/2OpiET -2E5SYdvyhlbcU+iBERsnY3X7IvFY8/m00YIjIc3reGSEwt9M5WTPRCjgonnpQGAx -9xWXwqkzAgMBAAECggEAT9b2YdInye0EWxy+cFoBBGzPeE/PlcW+LCghRFlutzEM -l3FH21hfL6OUq7m3BCZeJ3cp3zfl2upb6sT1WKlMlHV36jc7ew8v8fgJPPVVXp7w -oZ2A5estvVltsX4knOZMbgJV6xLldvOMnvkf9/6VpV/Jq9nxzXvmzmZcT0TuLCaF -uPk/g/yD5qQ8LkWXDVJeBiDrrOZYo5F+T8bveYKKIEZV0ZAlXwJqVOUFnhffIaDF -fZVDOv4K3+q0aRDLTY2hxptHZiKzpLXgU634nBN3fiy0Fj88upNIus22gjaz+Jfx -2pYv22iGNXAMFQwGaeuT7d4+qhgxze8C7YlLJsJWCQKBgQD8kkXbgYG+8NoKmovz -ki9nuK1R6On5pNjZ344SJm6t/s4FaxQhE/4oHvODwgolqKyT2Sq1K8/5NInRGA29 -xPqqkkhwWk3Zf9VTXgmuXsOikPhbCOuiehO+6/ZthmHYy1jBMqkAIWYaL9Ytn2qb -dKMHwzNdnppQNdQnwmXI2ZdRBQKBgQD0aVTSOmKfKdIxH9qFLdbi2CoyJMzjAjm9 -Ss5M0OhI9wZnCXyjPBx4hOs+M/BKx4lQ296u2Dh+gSK3L8K3x8lVqqx8gd614qaC -EWzXZpAbd1S835o2vVYEWXU0iI9s0jkj+VnILEWBMRPYManRUATB2phwRPulimdu -o+BWN0GG1wKBgCYBxO1hMasQB1+tHf5LM0MCcWJwEDV27wLqNzDYA7O/MjVyhZbs -sURMVAyxuGEuXrno5hpZO3SeyVZjrj2uVKIyXSA7FpfyOqHO9tn8fKgL9LOORhcv -E6WZUH3uyO6cuwBnpTLV082BAVPgN2SpSpcycppV8Za8Yu6QvExbIgAZAoGBALcq -ANETxDj3hHggIQlRkwqpaOXvQkSVtGOxne1fWdTkmz24lFlYgRWotwsErX29D6Ez -RSzPCXd0m2mhN1G3PaEfqOgeA6NXWeV73Y+HY1PSGAT7pXyEY+QajoVyGdo5qWzW -P3yOAQCSoQaSIWulhgspILhyWgxzLpRx53t1KXw9AoGBAOxsrIrx/S6onTz58ncZ -m99OWwJX4WmY5KKhc5dWrfgHrNfldSbhjRhjALy6hSPzkaVy01wXKeeIZl64rUbd -S/r58yALQ5wuIHAi53BLStxgqEdHQHLg16GqL3b/+Waaf+Fy9y5eoUQ976HPr33G -uDJ1AAnWjX3KvcyZeWLFTU2/ +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC+xdLVfN5IErS1 +UplYesMvkFZVBWlH2AojfJ8pSnaqfE6XeVWc/hoCpiqTSkGdoX4NLgy8cjAcvI9Z +1E4Xdcuch161F4HH68V0CZsSK7LHHjXI9SLbjSyoUL0BvDIEGG9D2Zyqy8xTiVc6 +gBZBvR1pGRsh59KiRMNCPN67lT6PGTt+OxMJmh2laYBWh28Lbqx4R1nBl8/m1wZa +QFhTTz0WBrBm4/3j95bQXIUjP0kW8uFcaIg0oA3/1EM5DrVQqJfp7ePEWevToP3H +4Ny1/Wg/gWTpSiB/dgN8c3vXokWxabGJG/Oq5CWjtw9gWFyR0I/OmFh8cnWPf2vf +QFVYQnv1AgMBAAECggEAeGs2ojuns6bbGnmBAjC7dBKP7Cr2QbtE6xGHBfFS5lqA +4WxddjOPB40L4t1EfdOqVXdz4p/RbtI3SmSQxo48cBmi1nx4F1Hj2VMW52ld+AJB +wQ+7aQq73aLZK3c3uw4Rbaq3EbiCyVgwD2U6p1RQdD68ubIzauostmrlzVJvorMZ +1J0hz1gsJuH87WpkgRdp910hEYiM7eUBrOKG+K0trohVeStsjjJyV47LKxXDtf2F +yUQvpbbIgHh2mXe29+d42hio2VrB5y1/+dc7wMiPlwBPG5xpv4eW1aaIGNTsHYCO +1dy8KQirOsrGLIp0GzEej2XL/wTlHJfv3nSpYR2gIQKBgQD4I3lySzFqFUlrvZoX +F9gYKbbH81gQakoAyk68qy4ENve5g+cChHTI6cO4fW8zIeEtLs/kDdpGuV20NmaD +Pb8lcd7nONGhQ1l75aNAy978e2WuAYQ4xfMLR+8jKdTDG9ttIqhGFSNXetMGINLJ +GkCl5fWAJ4p6oKBsUy2FgESECwKBgQDE0RWNQofR0UmxMPeHtD6i3pX2j3bb0GdM +1yh8vqE1IqXJesVM//gIgSZ4n3hd93AXDQIvJ6xkdtNKbnGi5wJLf1OiYW3iSkfC +l+Lgup10CVHJpOrBLxUGYZWjY4LsEX3z3MBNW4DQ6SNmIJN7xJAAewzq0UMMGk6P +IIQ7rvT//wKBgQCqiDa+xc6ACYEb+oIbvNdWQ9TKNgMfxOx2/pJ+N2a4ns5BQNVS +dZWNPpq0AACcM3x9gN5+7MZGNL6hS4HIUHc9VLTMU9A98/tbmsZHkdT90BBhNcmY ++vG9nwJKOEVwkYSLzHW5NG3FgTPl0kkKzHABk7jVClexTxLxX3i5dx2fYQKBgCla +bRbTJcp2GO+8BCZlPsvlzMiTeDvTXAEPLBiZzTFm6EKfIxl8ptbSnAy4JQhJVyng +t9bElTo+pUJ8VjAOLbNDO4Vgxz/Gr7E5TJg/XZnl42Nk3VZd2CMRGenMnNOREU/N +0DHwye4bLi7lJVfaAw+2yw4DjfzbAiqcgGwx5JRtAoGBAPFqMyLgZGtCBLrrJVxD +kswm0gABU7l/UXS7OfLTWmfr0vDzoZEcVeBcabwmpRnsTaj1+EHcpl8kZogO4mcg +0RiT+lc2E8TfZL5c4HEr4wSLbz8FEeKwhFa6ScNUOj5vVSnsFzW1xkVEBIM8akMR +UI4+yvEjUIpuQt35cyE9K/nx -----END PRIVATE KEY----- diff --git a/client/cfg/localhost.pem b/client/cfg/localhost.pem index b91d7e1..5b52a2a 100644 --- a/client/cfg/localhost.pem +++ b/client/cfg/localhost.pem @@ -1,24 +1,25 @@ -----BEGIN CERTIFICATE----- -MIIEGTCCAwGgAwIBAgIEASUKPDANBgkqhkiG9w0BAQsFADBSMQswCQYDVQQGEwJD +MIIEHzCCAwegAwIBAgIEASUKQDANBgkqhkiG9w0BAQsFADBSMQswCQYDVQQGEwJD TjENMAsGA1UECgwEVFA0QTEZMBcGA1UECwwQVFA0QSBUZWxlcG9ydCBDQTEZMBcG -A1UEAwwQVFA0QSBUZWxlcG9ydCBDQTAgFw0xODExMDgxNzMyMjdaGA8yMTE4MTAx -NTE3MzIyN1owXzELMAkGA1UEBhMCQ04xCzAJBgNVBAgMAkJKMQswCQYDVQQHDAJ0 +A1UEAwwQVFA0QSBUZWxlcG9ydCBDQTAgFw0xOTAxMjUxMDM0MTVaGA8yMTE5MDEw +MTEwMzQxNVowXzELMAkGA1UEBhMCQ04xCzAJBgNVBAgMAkJKMQswCQYDVQQHDAJ0 cDERMA8GA1UECgwIVGVsZXBvcnQxDzANBgNVBAsMBkFzc2lzdDESMBAGA1UEAwwJ -bG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA8SNWQ0b7 -jZ5FQSYE2Zp8Bfe3ewHHvkdIEkpFotpdqHs2pC0nVFJIsrK9cMeObtg7t+hTajBo -Oy4gmUl9XtmW2vejyOr8DRp5aawn103tnld1vJDDygOnPV/AU5LTGSDTRztyufdY -xsfrjH8q5evwhVTxzZJe9yLpvbvdSVyF/QTmrWuEU5aYZhwA3PoT17guqHjfKmsm -b1dq1SX8dcDJVgjIhiTN+iSTuacsLXgQikJFSbie8tvVxdsE+vdKPouroJPsiNv9 -jqYhE9hOUmHb8oZW3FPogREbJ2N1+yLxWPP5tNGCIyHN63hkhMLfTOVkz0Qo4KJ5 -6UBgMfcVl8KpMwIDAQABo4HnMIHkMB0GA1UdDgQWBBRc5d0h39QISTM55kCqPyy1 -dohEHTB6BgNVHSMEczBxgBSh6jvPH2KfGq3ekij4vF+Bqa/roqFWpFQwUjELMAkG +MTI3LjAuMC4xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvsXS1Xze +SBK0tVKZWHrDL5BWVQVpR9gKI3yfKUp2qnxOl3lVnP4aAqYqk0pBnaF+DS4MvHIw +HLyPWdROF3XLnIdetReBx+vFdAmbEiuyxx41yPUi240sqFC9AbwyBBhvQ9mcqsvM +U4lXOoAWQb0daRkbIefSokTDQjzeu5U+jxk7fjsTCZodpWmAVodvC26seEdZwZfP +5tcGWkBYU089FgawZuP94/eW0FyFIz9JFvLhXGiINKAN/9RDOQ61UKiX6e3jxFnr +06D9x+Dctf1oP4Fk6Uogf3YDfHN716JFsWmxiRvzquQlo7cPYFhckdCPzphYfHJ1 +j39r30BVWEJ79QIDAQABo4HtMIHqMB0GA1UdDgQWBBQHRB+sP9RolTsf34gPFAJw +6UKn2zB6BgNVHSMEczBxgBSh6jvPH2KfGq3ekij4vF+Bqa/roqFWpFQwUjELMAkG A1UEBhMCQ04xDTALBgNVBAoMBFRQNEExGTAXBgNVBAsMEFRQNEEgVGVsZXBvcnQg Q0ExGTAXBgNVBAMMEFRQNEEgVGVsZXBvcnQgQ0GCAQAwDAYDVR0TAQH/BAIwADAO -BgNVHQ8BAf8EBAMCA4gwEwYDVR0lBAwwCgYIKwYBBQUHAwEwFAYDVR0RBA0wC4IJ -bG9jYWxob3N0MA0GCSqGSIb3DQEBCwUAA4IBAQAfj/CpFDhv5CrnN2kxhtRAmesJ -q6/KxxkBaimjbS/BpfvqfC9RxGH7MIqGUkbC4/ADkEt2OmVU4+f2R3+rCl+x+r1t -9+3r/JSYYVBxFnF1GbDhiY9sKahgb4HoFjE2Fj8eVODcEzdApLr198p5IIIyfBys -WHV4CYFMvq5qCKbSR/JMfrm9GArAh1J+B+JMIfm8xwerFi0tfK2YT+N4QkvbidjG -sd+RKlR51GHo9m4iEQ7mDd9H8joVrVs2MVLGf2EoVU5y/Ahee4g7k3SKrn3GI/Ec -6BRCht+INCLI3bnC3MtJHJRzv5Vmu4pSh3cwnVHfe+VWLGvGlp2+KeC02xZ2 +BgNVHQ8BAf8EBAMCA4gwEwYDVR0lBAwwCgYIKwYBBQUHAwEwGgYDVR0RBBMwEYIJ +bG9jYWxob3N0hwR/AAABMA0GCSqGSIb3DQEBCwUAA4IBAQCagioxwrTdc9N5IVSH +qbOXTGpUE4R7dvfCKatNJrGen7lAGdfqgomwM+fjRO5Jt0Kc15q8gxvQ3kePwaBY +11f1FJ8iDMqxX7Hmb3KT0FeWKmUPgH3YtlitSAD7DrqMxBh5sr/28zN8XIjWWhY1 +huv7APQbuicxl/YZumPKa3r8FI1ca4pn4TKsm+YMN6Buy6k9CQV6POtNmawLNgNP +axErEeTzNsis1JalHFAdr6mPWY0xaZsdrMeJHMx/7lvM7Qo+odEyswguoCS8Bc1Y +6ZlEYZUev7lN0amqnoh25KrrGqpyHCXtXAEEwVyTmdpYDtqsetDYv7aCrfeITPBm +GAyD -----END CERTIFICATE----- diff --git a/client/cfg/tp-assist.macos.json b/client/cfg/tp-assist.macos.json index 93ff8bc..189b78f 100644 --- a/client/cfg/tp-assist.macos.json +++ b/client/cfg/tp-assist.macos.json @@ -102,7 +102,7 @@ "name": "FreeRDP", "display": "FreeRDP", "app": "", - "cmdline": "", + "cmdline": "/u:{user_name} /v:{host_ip} /port:{host_port}", "desc": [ "建议使用homebrew安装freerdp,安装后freerdp默认路径在:/usr/local/Cellar/freerdp/x.y.z/bin/xfreerdp", "首次安装freerdp后需要重新启动计算机" diff --git a/client/cfg/tp-assist.windows.json b/client/cfg/tp-assist.windows.json index f4fc876..ff007f9 100755 --- a/client/cfg/tp-assist.windows.json +++ b/client/cfg/tp-assist.windows.json @@ -1,9 +1,8 @@ { "ssh": { "selected": "putty", - "available": [ - { - "name":"putty", + "available": [{ + "name": "putty", "display": "PuTTY(内置)", "app": "{assist_tools_path}\\putty\\putty.exe", "cmdline": "-ssh -pw **** -P {host_port} -l {user_name} {host_ip}" @@ -30,9 +29,8 @@ }, "scp": { "selected": "winscp", - "available": [ - { - "name":"winscp", + "available": [{ + "name": "winscp", "display": "WinSCP(内置)", "app": "{assist_tools_path}\\winscp\\winscp.exe", "cmdline": "/sessionname=\"TP#{real_ip}\" {user_name}:****@{host_ip}:{host_port}" @@ -47,9 +45,8 @@ }, "telnet": { "selected": "putty", - "available": [ - { - "name":"putty", + "available": [{ + "name": "putty", "display": "PuTTY(内置)", "app": "{assist_tools_path}\\putty\\putty.exe", "cmdline": "telnet://{user_name}@{host_ip}:{host_port}" @@ -68,15 +65,20 @@ } ] }, - "rdp" : { - "available" : [ - { - "app" : "{assist_tools_path}\\tprdp\\tprdp-client.exe", - "cmdline" : "/v:{host_ip}:{host_port} /u:{user_name} /t:\"TP#{real_ip}\"", - "display" : "FreeRDP(内置)", - "name" : "freerdp" - } - ], - "selected" : "freerdp" - } -} + "rdp": { + "available": [{ + "app": "mstsc.exe", + "cmdline": "\"{tmp_rdp_file}\"", + "display": "微软RDP客户端(系统自带)", + "name": "mstsc" + }, + { + "app": "{assist_tools_path}\\tprdp\\wfreerdp.exe", + "cmdline": "/v:{host_ip}:{host_port} /u:{user_name} /t:\"TP#{real_ip}\"", + "display": "FreeRDP(内置)", + "name": "freerdp" + } + ], + "selected": "mstsc" + } +} \ No newline at end of file diff --git a/client/tp-player/bar.cpp b/client/tp-player/bar.cpp new file mode 100644 index 0000000..5ff3f28 --- /dev/null +++ b/client/tp-player/bar.cpp @@ -0,0 +1,718 @@ +#include "bar.h" +#include +#include +#include "mainwindow.h" + + +#define FONT_SIZE_DEFAULT 12 +#define FONT_SIZE_TIME 14 +#define TEXT_COLOR QColor(255,255,255,153) +#define SPEED_BTN_WIDTH 42 +#define CHKBOX_RIGHT_PADDING 6 +#define PADDING_TIME_PROGRESS_BAR 10 +#define SPEED_BTN_PADDING_TOP 8 +#define SPEED_BTN_PADDING_RIGHT 8 +#define SKIP_PADDING_TOP 10 + +#define BAR_ALIGN_TOP 10 +#define BAR_PADDING_TOP 18 +#define BAR_PADDING_LEFT 15 +#define BAR_PADDING_RIGHT 15 + +typedef struct RES_MAP { + RES_ID id; + const char* name; +}RES_MAP; + +static RES_MAP img_res[res__max] = { + {res_bg_left, "bg-left"}, + {res_bg_mid, "bg-mid"}, + {res_bg_right, "bg-right"}, + {res_btn_normal_left, "btn-normal-left"}, + {res_btn_normal_mid, "btn-normal-mid"}, + {res_btn_normal_right, "btn-normal-right"}, + {res_btn_sel_left, "btn-sel-left"}, + {res_btn_sel_mid, "btn-sel-mid"}, + {res_btn_sel_right, "btn-sel-right"}, + {res_btn_hover_left, "btn-hover-left"}, + {res_btn_hover_mid, "btn-hover-mid"}, + {res_btn_hover_right, "btn-hover-right"}, + + {res_prgbarh_left, "prgbarh-left"}, + {res_prgbarh_mid, "prgbarh-mid"}, + {res_prgbar_mid, "prgbar-mid"}, + {res_prgbar_right, "prgbar-right"}, + + {res_chkbox_normal, "chkbox-normal"}, + {res_chkbox_hover, "chkbox-hover"}, + {res_chkbox_sel_normal, "chkbox-sel-normal"}, + {res_chkbox_sel_hover, "chkbox-sel-hover"}, +}; + +typedef struct SPEED_MAP { + int id; + const char* title; +}SPEED_MAP; + +static SPEED_MAP speed[speed_count] = { + {speed_1x, "1x"}, + {speed_2x, "2x"}, + {speed_4x, "4x"}, + {speed_8x, "8x"}, +}; + +static inline int min(int a, int b){ + return a < b ? a : b; +} + +static inline int max(int a, int b){ + return a > b ? a : b; +} + +Bar::Bar() { + m_img_ready = false; + m_width = 0; + m_height = 0; + m_str_total_time = "00:00"; + m_str_passed_time = "00:00"; + m_str_passed_time_last_draw = "--:--"; + + m_percent = 0; + m_percent_last_draw = -1; + + m_play_hover = false; + m_playing = true; // false=paused + m_speed_selected = speed_1x; + m_speed_hover = speed_count; // speed_count=no-hover + m_skip_selected = false; + m_skip_hover = false; + m_progress_hover = false; + m_progress_pressed = false; + + m_resume_ms = 0; +} + +Bar::~Bar() { + +} + +bool Bar::init(MainWindow* owner) { + m_owner = owner; + + // 加载所需的图像资源 + int i = 0; + for(i = 0; i < res__max; ++i) { + QString name; + name.sprintf(":/tp-player/res/bar/%s.png", img_res[i].name); + if(!m_res[i].load(name)) + return false; + } + + // 无需合成的图像 + if(!m_img_btn_play[play_running][widget_normal].load(":/tp-player/res/bar/play-normal.png") + || !m_img_btn_play[play_running][widget_hover].load(":/tp-player/res/bar/play-hover.png") + || !m_img_btn_play[play_paused][widget_normal].load(":/tp-player/res/bar/pause-normal.png") + || !m_img_btn_play[play_paused][widget_hover].load(":/tp-player/res/bar/pause-hover.png") + || !m_img_progress_pointer[widget_normal].load(":/tp-player/res/bar/prgpt-normal.png") + || !m_img_progress_pointer[widget_hover].load(":/tp-player/res/bar/prgpt-hover.png") + ) { + return false; + } + + m_height = m_res[res_bg_left].height(); + + return true; +} + +void Bar::start(uint32_t total_ms, int width) { + bool is_first_start = (m_width == 0); + m_width = width; + + m_total_ms = total_ms; + _ms_to_str(total_ms, m_str_total_time); + + + // 首次播放时,调整位置左右居中,距窗口顶部10点处。 + if(is_first_start) { + _init_imgages(); + QRect rc = m_owner->rect(); + m_rc = QRect(0, 0, m_width, m_height); + m_rc.moveTo((rc.width() - m_width)/2, BAR_ALIGN_TOP); + } +} + +void Bar::end() { + if(m_played_ms != m_total_ms) + update_passed_time(m_total_ms); + + m_playing = false; + m_owner->update(m_rc.left()+m_rc_btn_play.left(), m_rc.top()+m_rc_btn_play.top(), m_rc_btn_play.width(), m_rc_btn_play.height()); +} + +void Bar::_init_imgages() { + m_img_bg = QPixmap(m_width, m_height); + m_img_bg.fill(Qt::transparent);//用透明色填充 + QPainter pp(&m_img_bg); + QFont font = pp.font(); + + // 合成背景图像 + { + pp.drawPixmap(0, 0, m_res[res_bg_left].width(), m_res[res_bg_left].height(), m_res[res_bg_left]); + pp.drawPixmap(m_res[res_bg_left].width(), 0, m_width - m_res[res_bg_left].width() - m_res[res_bg_right].width(), m_height, m_res[res_bg_mid]); + pp.drawPixmap(m_width-m_res[res_bg_right].width(), 0, m_res[res_bg_right].width(), m_height, m_res[res_bg_right]); + } + + { + m_rc_btn_play = QRect(BAR_PADDING_LEFT, (m_height - m_img_btn_play[play_running][widget_normal].height())/2 , m_img_btn_play[play_running][widget_normal].width(), m_img_btn_play[play_running][widget_normal].height()); + } + + // 合成速度按钮 + { + int w = SPEED_BTN_WIDTH, h = m_res[res_btn_normal_left].height(); + QRect rc(0, 0, w, h); + QPixmap btn[btnspd_state_count]; + + // 未选中状态 + btn[btnspd_normal] = QPixmap(w, h); + btn[btnspd_normal].fill(Qt::transparent);//用透明色填充 + QPainter pn(&btn[btnspd_normal]); + pn.drawPixmap(0, 0, m_res[res_btn_normal_left].width(), m_res[res_btn_normal_left].height(), m_res[res_btn_normal_left]); + pn.drawPixmap(m_res[res_btn_normal_left].width(), 0, w - m_res[res_btn_normal_left].width() - m_res[res_btn_normal_right].width(), h, m_res[res_btn_normal_mid]); + pn.drawPixmap(w-m_res[res_btn_normal_right].width(), 0, m_res[res_btn_normal_right].width(), h, m_res[res_btn_normal_right]); + // 选中状态 + btn[btnspd_sel] = QPixmap(w, h); + btn[btnspd_sel].fill(Qt::transparent);//用透明色填充 + QPainter ps(&btn[btnspd_sel]); + ps.drawPixmap(0, 0, m_res[res_btn_sel_left].width(), m_res[res_btn_sel_left].height(), m_res[res_btn_sel_left]); + ps.drawPixmap(m_res[res_btn_sel_left].width(), 0, w - m_res[res_btn_sel_left].width() - m_res[res_btn_sel_right].width(), h, m_res[res_btn_sel_mid]); + ps.drawPixmap(w-m_res[res_btn_sel_right].width(), 0, m_res[res_btn_sel_right].width(), h, m_res[res_btn_sel_right]); + // 鼠标滑过状态 + btn[btnspd_hover] = QPixmap(w, h); + btn[btnspd_hover].fill(Qt::transparent);//用透明色填充 + QPainter ph(&btn[btnspd_hover]); + ph.drawPixmap(0, 0, m_res[res_btn_hover_left].width(), m_res[res_btn_hover_left].height(), m_res[res_btn_hover_left]); + ph.drawPixmap(m_res[res_btn_hover_left].width(), 0, w - m_res[res_btn_hover_left].width() - m_res[res_btn_hover_right].width(), h, m_res[res_btn_hover_mid]); + ph.drawPixmap(w-m_res[res_btn_hover_right].width(), 0, m_res[res_btn_hover_right].width(), h, m_res[res_btn_hover_right]); + + for(int i = 0; i < btnspd_state_count; ++i) { + for(int j = 0; j < speed_count; ++j) { + m_img_btn_speed[j][i] = QPixmap(w, h); + m_img_btn_speed[j][i].fill(Qt::transparent); + QPainter ps(&m_img_btn_speed[j][i]); + ps.setPen(TEXT_COLOR); + QFont font = ps.font(); + font.setFamily("consolas"); + font.setPixelSize(FONT_SIZE_DEFAULT); + ps.setFont(font); + ps.drawPixmap(0, 0, w, h, btn[i]); + ps.drawText(rc, Qt::AlignCenter, speed[j].title); + } + } + } + + // 合成跳过无操作选项 + { + // 计算显示跳过无操作选项字符串的宽高 + font.setFamily("微软雅黑"); + font.setBold(false); + font.setPixelSize(FONT_SIZE_DEFAULT); + pp.setFont(font); + QFontMetrics fm = pp.fontMetrics(); + + { + int h = fm.height(); + if(h < m_res[res_chkbox_normal].height()) + h = m_res[res_chkbox_normal].height(); + m_rc_skip = QRect(0, 0, fm.width(LOCAL8BIT("无操作则跳过")) + CHKBOX_RIGHT_PADDING + m_res[res_chkbox_normal].width(), h); + } + + int w = m_rc_skip.width(); + int h = m_rc_skip.height(); + int chkbox_top = (m_rc_skip.height() - m_res[res_chkbox_normal].height()) / 2; + int text_left = m_res[res_chkbox_normal].width() + CHKBOX_RIGHT_PADDING; + int text_top = (m_rc_skip.height() - fm.height()) / 2; + + for(int i = 0; i < chkbox_state_count; ++i) { + for(int j = 0; j < widget_state_count; ++j) { + m_img_skip[i][j] = QPixmap(w,h); + m_img_skip[i][j].fill(Qt::transparent); + QPainter ps(&m_img_skip[i][j]); + ps.setPen(TEXT_COLOR); + QFont font = ps.font(); + font.setFamily("微软雅黑"); + font.setPixelSize(FONT_SIZE_DEFAULT); + ps.setFont(font); + + QPixmap* img = nullptr; + if(i == chkbox_normal && j == widget_normal) + img = &m_res[res_chkbox_normal]; + else if(i == chkbox_normal && j == widget_hover) + img = &m_res[res_chkbox_hover]; + else if(i == chkbox_selected && j == widget_normal) + img = &m_res[res_chkbox_sel_normal]; + else if(i == chkbox_selected && j == widget_hover) + img = &m_res[res_chkbox_sel_hover]; + + if(img == nullptr) { + qDebug("ERROR: can not load image for check-box."); + img = &m_res[res_chkbox_normal]; + } + ps.drawPixmap(0, chkbox_top, img->width(), img->height(), *img); + ps.drawText(QRect(text_left, text_top, w-text_left, h-text_top), Qt::AlignCenter, LOCAL8BIT("无操作则跳过")); + } + } + } + + // 定位进度条 + { + // 计算显示时间所需的宽高 + font.setFamily("consolas"); + font.setBold(true); + font.setPixelSize(FONT_SIZE_TIME); + pp.setFont(font); + { + QFontMetrics fm = pp.fontMetrics(); + m_rc_time_passed = QRect(0, 0, fm.width("00:00:00"), fm.height()); + m_rc_time_total = m_rc_time_passed; + } + + m_img_time_total = QPixmap(m_rc_time_total.width(), m_rc_time_total.height()); + m_img_time_total.fill(Qt::transparent); + QPainter pp(&m_img_time_total); + pp.setPen(TEXT_COLOR); + QFont font = pp.font(); + font.setFamily("consolas"); + font.setBold(true); + font.setPixelSize(FONT_SIZE_TIME); + pp.setFont(font); + pp.drawText(m_rc_time_total, Qt::AlignLeft, m_str_total_time); + + // 定位时间字符串的位置 + m_rc_time_passed.moveTo(BAR_PADDING_LEFT+m_img_btn_play[play_running][widget_normal].width()+PADDING_TIME_PROGRESS_BAR, BAR_PADDING_TOP); + m_rc_time_total.moveTo(m_width - BAR_PADDING_RIGHT - m_rc_time_total.width(), BAR_PADDING_TOP); + + int prog_width = m_rc_time_total.left() - PADDING_TIME_PROGRESS_BAR - PADDING_TIME_PROGRESS_BAR - m_rc_time_passed.right(); + int prog_height = max(m_res[res_prgbarh_left].height(), m_img_progress_pointer->height()); + m_rc_progress = QRect(0, 0, prog_width, prog_height); + m_rc_progress.moveTo(m_rc_time_passed.right() + PADDING_TIME_PROGRESS_BAR, m_rc_time_passed.top() + (m_rc_time_passed.height() - prog_height)/2); + } + + + // 定位速度按钮 + { + int left = m_rc_time_passed.right() + PADDING_TIME_PROGRESS_BAR; + int top = m_rc_time_passed.bottom() + SPEED_BTN_PADDING_TOP; + for(int i = 0; i < speed_count; i++) { + m_rc_btn_speed[i] = QRect(left, top, m_img_btn_speed[i][widget_normal].width(), m_img_btn_speed[i][widget_normal].height()); + left += m_img_btn_speed[i][widget_normal].width() + SPEED_BTN_PADDING_RIGHT; + } + } + + // 定位跳过选项 + { + int left = m_rc_time_total.left() - m_rc_skip.width() - PADDING_TIME_PROGRESS_BAR; + int top = m_rc_time_passed.bottom() + SKIP_PADDING_TOP;//m_rc_btn_speed[0].top() + (m_rc_btn_speed[0].height() - m_rc_skip.height())/2; + m_rc_skip.moveTo(left, top); + } + + m_img_ready = true; +} + +void Bar::_ms_to_str(uint32_t ms, QString& str) { + int h = 0, m = 0, s = 0; + s = ms / 1000; + if(ms % 1000 > 500) + s += 1; + + h = s / 3600; + s = s % 3600; + m = s / 60; + s = s % 60; + + if(h > 0) + str.sprintf("%02d:%02d:%02d", h, m, s); + else + str.sprintf("%02d:%02d", m, s); +} + +void Bar::update_passed_time(uint32_t ms) { + QString str_passed; + _ms_to_str(ms, str_passed); + + if(m_str_passed_time != str_passed) + { + m_str_passed_time = str_passed; + m_owner->update(m_rc.left()+m_rc_time_passed.left(), m_rc.top()+m_rc_time_passed.top(), m_rc_time_passed.width(), m_rc_time_passed.height()); + } + + int percent = 0; + if(ms >= m_total_ms) { + percent = 100; + m_played_ms = m_total_ms; + } + else { + m_played_ms = ms; + //percent = (int)(((double)m_played_ms / (double)m_total_ms) * 100); + percent = m_played_ms * 100 / m_total_ms; + } + + if(percent != m_percent) { + m_percent = percent; + m_owner->update(m_rc.left()+m_rc_progress.left(), m_rc.top()+m_rc_progress.top(), m_rc_progress.width(), m_rc_progress.height()); + } +} + +void Bar::onMouseMove(int x, int y) { + // 映射鼠标坐标点到本浮动窗内部的相对位置 + QPoint pt(x-m_rc.left(), y-m_rc.top()); + + if(m_progress_pressed) { + // 重新设置进度条指示器位置 + int percent = 0; + + if(pt.x() < m_rc_progress.left()) { + percent = 0; + m_resume_ms = 1; + } + else if(pt.x() > m_rc_progress.right()) { + percent = 100; + m_resume_ms = m_total_ms; + } + else { + percent = (pt.x() + m_img_progress_pointer[widget_normal].width()/2 - m_rc_progress.left()) * 100 / m_rc_progress.width(); + m_resume_ms = m_total_ms * percent / 100; + } + update_passed_time(m_resume_ms); + return; + } + + + bool play_hover = m_rc_btn_play.contains(pt); + if(play_hover != m_play_hover) { + m_play_hover = play_hover; + m_owner->update(m_rc.left()+m_rc_btn_play.left(), m_rc.top()+m_rc_btn_play.top(), m_rc_btn_play.width(), m_rc_btn_play.height()); + } + if(play_hover) + return; + + int speed_hover = speed_count; + for(int i = 0; i < speed_count; ++i) { + if(m_rc_btn_speed[i].contains(pt)) { + speed_hover = i; + break; + } + } + if(m_speed_hover != speed_hover) { + if(m_speed_hover != speed_count) { + m_owner->update(m_rc.left()+m_rc_btn_speed[m_speed_hover].left(), m_rc.top()+m_rc_btn_speed[m_speed_hover].top(), m_rc_btn_speed[m_speed_hover].width(), m_rc_btn_speed[m_speed_hover].height()); + } + m_speed_hover = speed_hover; + if(m_speed_hover != speed_count) { + m_owner->update(m_rc.left()+m_rc_btn_speed[m_speed_hover].left(), m_rc.top()+m_rc_btn_speed[m_speed_hover].top(), m_rc_btn_speed[m_speed_hover].width(), m_rc_btn_speed[m_speed_hover].height()); + } + } + + bool skip_hover = m_rc_skip.contains(pt); + if(skip_hover != m_skip_hover) { + m_skip_hover = skip_hover; + m_owner->update(m_rc.left()+m_rc_skip.left(), m_rc.top()+m_rc_skip.top(), m_rc_skip.width(), m_rc_skip.height()); + } + if(skip_hover) + return; +} + +void Bar::onMousePress(int x, int y, Qt::MouseButton button) { + // 我们只关心左键按下 + if(button != Qt::LeftButton) + return; + + // 映射鼠标坐标点到本浮动窗内部的相对位置 + QPoint pt(x-m_rc.left(), y-m_rc.top()); + + if(m_rc_btn_play.contains(pt)) { + if(m_playing) + m_owner->pause(); + else + m_owner->resume(false, 0); + + m_playing = !m_playing; + m_owner->update(m_rc.left()+m_rc_btn_play.left(), m_rc.top()+m_rc_btn_play.top(), m_rc_btn_play.width(), m_rc_btn_play.height()); + + return; + } + + int speed_sel = speed_count; + for(int i = 0; i < speed_count; ++i) { + if(m_rc_btn_speed[i].contains(pt)) { + speed_sel = i; + break; + } + } + if(m_speed_selected != speed_sel && speed_sel != speed_count) { + int old_sel = m_speed_selected; + m_speed_selected = speed_sel; + m_owner->set_speed(get_speed()); + m_owner->update(m_rc.left()+m_rc_btn_speed[old_sel].left(), m_rc.top()+m_rc_btn_speed[old_sel].top(), m_rc_btn_speed[old_sel].width(), m_rc_btn_speed[old_sel].height()); + m_owner->update(m_rc.left()+m_rc_btn_speed[m_speed_hover].left(), m_rc.top()+m_rc_btn_speed[m_speed_hover].top(), m_rc_btn_speed[m_speed_hover].width(), m_rc_btn_speed[m_speed_hover].height()); + return; + } + + if(m_rc_skip.contains(pt)) { + m_skip_selected = !m_skip_selected; + m_owner->set_skip(m_skip_selected); + m_owner->update(m_rc.left()+m_rc_skip.left(), m_rc.top()+m_rc_skip.top(), m_rc_skip.width(), m_rc_skip.height()); + return; + } + + // + if(m_rc_progress.contains(pt)) { + m_progress_pressed = true; + // TODO: 暂停播放,按比例计算出点击位置占整个录像时长的百分比,定位到此位置准备播放。 + // TODO: 如果点击的位置是进度条指示标志,则仅暂停播放 + m_owner->pause(); + m_playing = false; + m_owner->update(m_rc.left()+m_rc_btn_play.left(), m_rc.top()+m_rc_btn_play.top(), m_rc_btn_play.width(), m_rc_btn_play.height()); + + int percent = 0; + + if(pt.x() < m_rc_progress.left()) { + percent = 0; + m_resume_ms = 0; + } + else if(pt.x() > m_rc_progress.right()) { + percent = 100; + m_resume_ms = m_total_ms; + } + else { + percent = (pt.x() + m_img_progress_pointer[widget_normal].width()/2 - m_rc_progress.left()) * 100 / m_rc_progress.width(); + m_resume_ms = m_total_ms * percent / 100; + } + update_passed_time(m_resume_ms); + } +} + +void Bar::onMouseRelease(int, int, Qt::MouseButton button) { + // 我们只关心左键释放 + if(button != Qt::LeftButton) + return; + if(m_progress_pressed) { + m_progress_pressed = false; + qDebug("resume at %dms.", m_resume_ms); + m_owner->resume(true, m_resume_ms); + m_playing = true; + m_owner->update(m_rc.left()+m_rc_btn_play.left(), m_rc.top()+m_rc_btn_play.top(), m_rc_btn_play.width(), m_rc_btn_play.height()); + } +} + +int Bar::get_speed() { + switch (m_speed_selected) { + case speed_1x: + return 1; + case speed_2x: + return 2; + case speed_4x: + return 4; + case speed_8x: + return 8; + default: + return 1; + } +} + +void Bar::draw(QPainter& painter, const QRect& rc_draw){ + if(!m_width) + return; + if(!rc_draw.intersects(m_rc)) + return; + + // 绘制背景 + { + QRect rc(m_rc); + //rc.moveTo(m_rc.left()+rc.left(), m_rc.top() + rc.top()); + + int from_x = max(rc_draw.left(), m_rc.left()) - m_rc.left(); + int from_y = max(rc_draw.top(), m_rc.top()) - m_rc.top(); + int w = min(m_rc.right(), rc_draw.right()) - rc.left() - from_x + 1; + int h = min(m_rc.bottom(), rc_draw.bottom()) - rc.top() - from_y + 1; + int to_x = m_rc.left() + from_x; + int to_y = m_rc.top() + from_y; + painter.drawPixmap(to_x, to_y, m_img_bg, from_x, from_y, w, h); + } + + // 绘制播放按钮 + { + QRect rc(m_rc_btn_play); + rc.moveTo(m_rc.left()+rc.left(), m_rc.top() + rc.top()); + if(rc_draw.intersects(rc)) { + int from_x = max(rc_draw.left(), rc.left()) - rc.left(); + int from_y = max(rc_draw.top(), rc.top()) - rc.top(); + int w = min(rc.right(), rc_draw.right()) - rc.left() - from_x + 1; + int h = min(rc.bottom(), rc_draw.bottom()) - rc.top() - from_y + 1; + int to_x = rc.left() + from_x; + int to_y = rc.top() + from_y; + if(m_playing){ + if(m_play_hover) + painter.drawPixmap(to_x, to_y, m_img_btn_play[play_paused][widget_hover], from_x, from_y, w, h); + else + painter.drawPixmap(to_x, to_y, m_img_btn_play[play_paused][widget_normal], from_x, from_y, w, h); + } else { + if(m_play_hover) + painter.drawPixmap(to_x, to_y, m_img_btn_play[play_running][widget_hover], from_x, from_y, w, h); + else + painter.drawPixmap(to_x, to_y, m_img_btn_play[play_running][widget_normal], from_x, from_y, w, h); + } + } + } + + // 绘制已播放时间 + { + QRect rc(m_rc_time_passed); + rc.moveTo(m_rc.left()+rc.left(), m_rc.top() + rc.top()); + if(rc_draw.intersects(rc)) { + if(m_str_passed_time != m_str_passed_time_last_draw) { + m_img_time_passed = QPixmap(m_rc_time_passed.width(), m_rc_time_passed.height()); + m_img_time_passed.fill(Qt::transparent); + QPainter pp(&m_img_time_passed); + pp.setPen(TEXT_COLOR); + QFont font = pp.font(); + font.setFamily("consolas"); + font.setBold(true); + font.setPixelSize(FONT_SIZE_TIME); + pp.setFont(font); + pp.drawText(QRect(0,0,m_rc_time_passed.width(), m_rc_time_passed.height()), Qt::AlignRight, m_str_passed_time); + + m_str_passed_time_last_draw = m_str_passed_time; + } + + int from_x = max(rc_draw.left(), rc.left()) - rc.left(); + int from_y = max(rc_draw.top(), rc.top()) - rc.top(); + int w = min(rc.right(), rc_draw.right()) - rc.left() - from_x + 1; + int h = min(rc.bottom(), rc_draw.bottom()) - rc.top() - from_y + 1; + int to_x = rc.left() + from_x; + int to_y = rc.top() + from_y; + painter.drawPixmap(to_x, to_y, m_img_time_passed, from_x, from_y, w, h); + } + } + + // 绘制总时间 + { + QRect rc(m_rc_time_total); + rc.moveTo(m_rc.left()+rc.left(), m_rc.top() + rc.top()); + if(rc_draw.intersects(rc)) { + int from_x = max(rc_draw.left(), rc.left()) - rc.left(); + int from_y = max(rc_draw.top(), rc.top()) - rc.top(); + int w = min(rc.right(), rc_draw.right()) - rc.left() - from_x + 1; + int h = min(rc.bottom(), rc_draw.bottom()) - rc.top() - from_y + 1; + int to_x = rc.left() + from_x; + int to_y = rc.top() + from_y; + painter.drawPixmap(to_x, to_y, m_img_time_total, from_x, from_y, w, h); + } + } + + // 绘制进度条 + { + QRect rc(m_rc_progress); + rc.moveTo(m_rc.left()+rc.left(), m_rc.top() + rc.top()); + + if(rc_draw.intersects(rc)) { + if(m_percent_last_draw != m_percent) { + m_img_progress = QPixmap(m_rc_progress.width(), m_rc_progress.height()); + m_img_progress.fill(Qt::transparent); + QPainter pp(&m_img_progress); + + // 进度条 + int top = (rc.height() - m_res[res_prgbarh_left].height())/2; + int passed_width = rc.width() * m_percent / 100; // 已经播放的进度条宽度 + int remain_width = rc.width() - passed_width; // 剩下未播放的进度条宽度 + + if(passed_width >= m_res[res_prgbarh_left].width()) + pp.drawPixmap(0, top , m_res[res_prgbarh_left].width(), m_res[res_prgbarh_left].height(), m_res[res_prgbarh_left]); + if(passed_width > 0) { + //pp.drawPixmap(m_res[res_pbh_left].width(), top, passed_width - m_res[res_pbh_left].width(), m_res[res_pbh_mid].height(), m_res[res_pbh_mid]); + if(remain_width > m_res[res_prgbar_right].width()) + pp.drawPixmap(m_res[res_prgbarh_left].width(), top, passed_width - m_res[res_prgbarh_left].width(), m_res[res_prgbarh_mid].height(), m_res[res_prgbarh_mid]); + else + pp.drawPixmap(m_res[res_prgbarh_left].width(), top, passed_width - m_res[res_prgbarh_left].width() - m_res[res_prgbar_right].width(), m_res[res_prgbarh_mid].height(), m_res[res_prgbarh_mid]); + } + if(remain_width > 0) + pp.drawPixmap(passed_width, top, remain_width - m_res[res_prgbar_right].width(), m_res[res_prgbar_mid].height(), m_res[res_prgbar_mid]); + if(remain_width >= m_res[res_prgbar_right].width()) + pp.drawPixmap(rc.width() - m_res[res_prgbar_right].width(), top , m_res[res_prgbar_right].width(), m_res[res_prgbar_right].height(), m_res[res_prgbar_right]); + + // 进度位置指示 + int left = passed_width - m_img_progress_pointer->width() / 2; + if(left < 0) + left = 0; + if(left + m_img_progress_pointer->width() > rc.width()) + left = rc.width() - m_img_progress_pointer->width(); + top = (rc.height() - m_img_progress_pointer[widget_normal].height())/2; + pp.drawPixmap(left, top , m_img_progress_pointer[widget_normal].width(), m_img_progress_pointer[widget_normal].height(), m_img_progress_pointer[widget_normal]); + + m_percent_last_draw = m_percent; + } + + int from_x = max(rc_draw.left(), rc.left()) - rc.left(); + int from_y = max(rc_draw.top(), rc.top()) - rc.top(); + int w = min(rc.right(), rc_draw.right()) - rc.left() - from_x + 1; + int h = min(rc.bottom(), rc_draw.bottom()) - rc.top() - from_y + 1; + int to_x = rc.left() + from_x; + int to_y = rc.top() + from_y; + painter.drawPixmap(to_x, to_y, m_img_progress, from_x, from_y, w, h); + } + } + + // 绘制速度按钮 + { + for(int i = 0; i < speed_count; i++) { + QRect rc(m_rc_btn_speed[i]); + rc.moveTo(m_rc.left()+rc.left(), m_rc.top() + rc.top()); + if(rc_draw.intersects(rc)) { + int from_x = max(rc_draw.left(), rc.left()) - rc.left(); + int from_y = max(rc_draw.top(), rc.top()) - rc.top(); + int w = min(rc.right(), rc_draw.right()) - rc.left() - from_x + 1; + int h = min(rc.bottom(), rc_draw.bottom()) - rc.top() - from_y + 1; + int to_x = rc.left() + from_x; + int to_y = rc.top() + from_y; + + if(m_speed_hover == i) + painter.drawPixmap(to_x, to_y, m_img_btn_speed[i][btnspd_hover], from_x, from_y, w, h); + else if(m_speed_selected == i) + painter.drawPixmap(to_x, to_y, m_img_btn_speed[i][btnspd_sel], from_x, from_y, w, h); + else + painter.drawPixmap(to_x, to_y, m_img_btn_speed[i][btnspd_normal], from_x, from_y, w, h); + } + } + } + + // 绘制跳过选项 + { + QRect rc(m_rc_skip); + rc.moveTo(m_rc.left()+rc.left(), m_rc.top() + rc.top()); + + // painter.fillRect(rc, QColor(255, 255, 255)); + + if(rc_draw.intersects(rc)) { + int from_x = max(rc_draw.left(), rc.left()) - rc.left(); + int from_y = max(rc_draw.top(), rc.top()) - rc.top(); + int w = min(rc.right(), rc_draw.right()) - rc.left() - from_x + 1; + int h = min(rc.bottom(), rc_draw.bottom()) - rc.top() - from_y + 1; + int to_x = rc.left() + from_x; + int to_y = rc.top() + from_y; + //qDebug("skip (%d,%d), (%d,%d)/(%d,%d)", to_x, to_y, from_x, from_y, w, h); + if(m_skip_selected) { + if(m_skip_hover) + painter.drawPixmap(to_x, to_y, m_img_skip[chkbox_selected][widget_hover], from_x, from_y, w, h); + else + painter.drawPixmap(to_x, to_y, m_img_skip[chkbox_selected][widget_normal], from_x, from_y, w, h); + } + else { + if(m_skip_hover) + painter.drawPixmap(to_x, to_y, m_img_skip[chkbox_normal][widget_hover], from_x, from_y, w, h); + else + painter.drawPixmap(to_x, to_y, m_img_skip[chkbox_normal][widget_normal], from_x, from_y, w, h); + } + } + } +} + + diff --git a/client/tp-player/bar.h b/client/tp-player/bar.h new file mode 100644 index 0000000..7141cb3 --- /dev/null +++ b/client/tp-player/bar.h @@ -0,0 +1,149 @@ +#ifndef BAR_H +#define BAR_H + +#include +#include +#include + +typedef enum { + res_bg_left = 0, // 背景 + res_bg_mid, + res_bg_right, + res_btn_normal_left, // 按钮(速度选择),普通状态 + res_btn_normal_mid, + res_btn_normal_right, + res_btn_sel_left, // 按钮(速度选择),已选中 + res_btn_sel_mid, + res_btn_sel_right, + res_btn_hover_left, // 按钮(速度选择),鼠标滑过 + res_btn_hover_mid, + res_btn_hover_right, + + res_prgbarh_left, // 进度条(已经过)左侧 + res_prgbarh_mid, // 进度条(已经过)中间,拉伸填充 + res_prgbar_mid, // 进度条(未到达)中间,拉伸填充 + res_prgbar_right, // 进度条(未到达)右侧 + res_chkbox_normal, // 复选框 + res_chkbox_hover, + res_chkbox_sel_normal, + res_chkbox_sel_hover, + + res__max +}RES_ID; + +//typedef enum { +// widget_normal = 0, +// widget_hover, +// widget__max +//}WIDGET_STAT; + +#define widget_normal 0 +#define widget_hover 1 +#define widget_state_count 2 + +//typedef enum { +// play_running = 0, +// play_paused, +// play__max +//}PLAY_STAT; + +#define play_running 0 +#define play_paused 1 +#define play_state_count 2 + +#define speed_1x 0 +#define speed_2x 1 +#define speed_4x 2 +#define speed_8x 3 +#define speed_count 4 + +#define btnspd_normal 0 +#define btnspd_sel 1 +#define btnspd_hover 2 +#define btnspd_state_count 3 + +#define chkbox_normal 0 +#define chkbox_selected 1 +#define chkbox_state_count 2 + +class MainWindow; + +class Bar { +public: + Bar(); + ~Bar(); + + bool init(MainWindow* owner); + void start(uint32_t total_ms, int width); + void end(); + void draw(QPainter& painter, const QRect& rc); + void update_passed_time(uint32_t ms); + + int get_speed(); + + QRect rc(){return m_rc;} + + void onMouseMove(int x, int y); + void onMousePress(int x, int y, Qt::MouseButton button); + void onMouseRelease(int x, int y, Qt::MouseButton button); + +private: + void _init_imgages(); + void _ms_to_str(uint32_t ms, QString& str); + +private: + MainWindow* m_owner; + + uint32_t m_total_ms; // 录像的总时长 + uint32_t m_played_ms; // 已经播放了的时长 + int m_percent; // 已经播放了的百分比(0~100) + int m_percent_last_draw; + QString m_str_total_time; + QString m_str_passed_time; + QString m_str_passed_time_last_draw; + + bool m_img_ready; + + // 从资源中加载的原始图像 + QPixmap m_res[res__max]; + QPixmap m_img_progress_pointer[widget_state_count]; + + int m_width; + int m_height; + // 此浮动窗相对于父窗口的坐标和大小 + QRect m_rc; + + // 尺寸和定位(此浮动窗内部的相对坐标) + QRect m_rc_btn_play; + QRect m_rc_btn_speed[speed_count]; + QRect m_rc_time_passed; + QRect m_rc_time_total; + QRect m_rc_progress; + QRect m_rc_skip; + + // 画布,最终输出的图像 + //QPixmap m_canvas; + + // 合成的图像 + QPixmap m_img_bg; + QPixmap m_img_btn_play[play_state_count][widget_state_count]; + QPixmap m_img_btn_speed[speed_count][btnspd_state_count]; + QPixmap m_img_progress; + QPixmap m_img_skip[chkbox_state_count][widget_state_count]; + QPixmap m_img_time_passed; + QPixmap m_img_time_total; + + // 各种状态 + bool m_playing; // 0=play, 2=pause + bool m_play_hover; + int m_speed_selected; + int m_speed_hover; // speed__max=no-hover + bool m_skip_selected; + bool m_skip_hover; + bool m_progress_hover; + bool m_progress_pressed; + + uint32_t m_resume_ms; // after drag progress-pointer, resume play from here. +}; + +#endif // BAR_H diff --git a/client/tp-player/downloader.cpp b/client/tp-player/downloader.cpp new file mode 100644 index 0000000..0f5a10a --- /dev/null +++ b/client/tp-player/downloader.cpp @@ -0,0 +1,120 @@ +#include "downloader.h" +#include "record_format.h" + +#include +#include +#include + +Downloader::Downloader() : QObject () { + m_data = nullptr; + m_reply = nullptr; + m_result = false; +} + +Downloader::~Downloader() { +} + +bool Downloader::request(const QString& url, const QString& sid, const QString& filename) { + return _request(url, sid, filename, nullptr); +} + +bool Downloader::request(const QString& url, const QString& sid, QByteArray* data) { + QString fname; + return _request(url, sid, fname, data); +} + +bool Downloader::_request(const QString& url, const QString& sid, const QString& filename, QByteArray* data) { + if(filename.isEmpty() && data == nullptr) + return false; + if(!filename.isEmpty() && data != nullptr) + return false; + m_data = data; + + if(!filename.isEmpty()) { + m_file.setFileName(filename); + if(!m_file.open(QIODevice::WriteOnly | QFile::Truncate)){ + qDebug("open file for write failed."); + return false; + } + } + + QString cookie = QString("_sid=%1\r\n").arg(sid); + + QNetworkRequest req; + req.setUrl(QUrl(url)); + req.setRawHeader("Cookie", cookie.toLatin1()); + + QNetworkAccessManager* nam = new QNetworkAccessManager(); + QEventLoop eloop; + m_reply = nam->get(req); + + connect(m_reply, &QNetworkReply::finished, &eloop, &QEventLoop::quit); + connect(m_reply, &QNetworkReply::finished, this, &Downloader::_on_finished); + connect(m_reply, &QIODevice::readyRead, this, &Downloader::_on_data_ready); + +// qDebug("before eventLoop.exec(%p)", &eloop); + eloop.exec(); +// qDebug("after eventLoop.exec()"); + + disconnect(m_reply, &QNetworkReply::finished, &eloop, &QEventLoop::quit); + disconnect(m_reply, &QNetworkReply::finished, this, &Downloader::_on_finished); + disconnect(m_reply, &QIODevice::readyRead, this, &Downloader::_on_data_ready); + + delete m_reply; + m_reply = nullptr; + delete nam; + + qDebug("Downloader::_request() end."); + return m_result; +} + +void Downloader::_on_data_ready() { +// qDebug("Downloader::_on_data_ready(%p).", this); + QNetworkReply *reply = reinterpret_cast(sender()); + + if(m_data != nullptr) { + m_data->push_back(reply->readAll()); + } + else { + m_file.write(reply->readAll()); + } +} + +void Downloader::abort() { + if(m_reply) { + qDebug("Downloader::abort()."); + m_reply->abort(); + } +} + +void Downloader::_on_finished() { +// qDebug("Downloader::_on_finished(%p).", this); + QNetworkReply *reply = reinterpret_cast(sender()); + + QVariant statusCode = reply->attribute(QNetworkRequest::HttpStatusCodeAttribute); + + if (reply->error() != QNetworkReply::NoError) { + // reply->abort() got "Operation canceled" + //QString strError = reply->errorString(); + qDebug() << "ERROR:" << reply->errorString(); + if(m_data == nullptr) { + m_file.flush(); + m_file.close(); + } + m_result = false; + return; + } + + if(m_data != nullptr) { + m_data->push_back(reply->readAll()); + } + else { + m_file.write(reply->readAll()); + m_file.flush(); + m_file.close(); + } + + reply->deleteLater(); + + m_result = true; +} diff --git a/client/tp-player/downloader.h b/client/tp-player/downloader.h new file mode 100644 index 0000000..5f5a123 --- /dev/null +++ b/client/tp-player/downloader.h @@ -0,0 +1,40 @@ +#ifndef DOWNLOADER_H +#define DOWNLOADER_H + +#include +#include + +class Downloader : public QObject { + Q_OBJECT + +public: + // 从url下载数据,写入到filename文件中,或放入data中。 + Downloader(); + ~Downloader(); + + bool request(const QString& url, const QString& sid, const QString& filename); + bool request(const QString& url, const QString& sid, QByteArray* data); + void abort(); + +private: + bool _request(const QString& url, const QString& sid, const QString& filename, QByteArray* data); + +private slots: + void _on_data_ready(); // 有数据可读了,读取并写入文件 + void _on_finished(); // 下载结束了 + +private: + QFile m_file; + QByteArray* m_data; + + bool m_result; + QNetworkReply* m_reply; +}; + +typedef struct DownloadParam { + QString url; + QString sid; + QString fname; +}DownloadParam; + +#endif diff --git a/client/tp-player/main.cpp b/client/tp-player/main.cpp new file mode 100644 index 0000000..82e2531 --- /dev/null +++ b/client/tp-player/main.cpp @@ -0,0 +1,98 @@ +#include "mainwindow.h" +#include +#include +#include +#include +#include + +// 编译出来的可执行程序复制到单独目录,然后执行 windeployqt 应用程序文件名 +// 即可自动将依赖的动态库等复制到此目录中。有些文件是多余的,可以酌情删除。 + +// 命令行参数格式: +// ## 本地文件或目录 +// tp-player.exe path/of/tp-rdp.tpr 一个 .tpr 文件的文件名 +// tp-player.exe path/contains/tp-rdp.tpr 包含 .tpr 文件的路径 +// +// ## 从TP服务器上下载 +// tp-player.exe http://teleport.domain.com:7190/{sub/path/}tp_1491560510_ca67fceb75a78c9d/1234 (注意,并不直接访问此URI,实际上其并不存在) +// TP服务器地址(可能包含子路径,例如上例中的{sub/path}部分)/session-id(用于判断当前授权用户)/录像会话编号 +// 按 “/” 进行分割后,去掉最后两个项,剩下部分是TP服务器的WEB地址,用于合成后续的文件下载URL。 +// 根据下载的.tpr文件内容,本地合成类似于 "000000256-admin-administrator-123.45.77.88-20191109-020047" 的路径来存放下载的文件 +// 特别注意,如果账号是 domain\user 这种形式,需要将 "\" 替换为下划线,否则此符号作为路径分隔符,会导致路径不存在而无法保存下载的文件。 +// - 获取文件大小: http://127.0.0.1:7190/audit/get-file?act=size&type=rdp&rid=yyyyy&f=file-name +// - 'act'为`size`表示获取文件大小(返回一个数字字符串,就是指定的文件大小) +// - 'type'可以是`rdp`或`ssh`,目前仅用了`rdp` +// - 'rid'是录像会话编号(在服务端,一个会话的录像文件均放在录像会话编号命名的目录下) +// - 'f' 是文件名,即会话编号目录下的指定文件,例如 'tp-rdp.tpr' +// - 读取文件内容: http://127.0.0.1:7190/audit/get-file?act=read&type=rdp&rid=yyyyy&f=file-name&offset=1234&length=1024 +// - 'act'为`read`表示获取文件内容 +// - 'offset'表示要读取的偏移,如果未指定,则表示从文件起始处开始读取,即默认为 offset=0 +// - 'length'表示要读取的大小,如果未指定,表示读取整个文件,即默认为 length=-1(服务端对length=-1做完全读取处理) +// - 搭配使用 offst 和 length 可以做到分块下载、断点续传。 + + +void show_usage(QCommandLineParser& parser) { + QMessageBox::warning(nullptr, QGuiApplication::applicationDisplayName(), + "
"
+                         + parser.helpText()
+                         + "\n\n"
+                         + "RESOURCE could be:\n"
+                         + "   - teleport record file (.tpr).\n"
+                         + "   - a directory contains .tpr file.\n"
+                         + "   - an URL to download teleport record file."
+                         + "
"); +} + +int main(int argc, char *argv[]) +{ +//#if (QT_VERSION >= QT_VERSION_CHECK(5, 6, 0)) +// QCoreApplication::setAttribute(Qt::AA_EnableHighDpiScaling); +//#endif + + + QApplication a(argc, argv); + +//#ifdef __APPLE__ +// QString data_path_base = QStandardPaths::writableLocation(QStandardPaths::DesktopLocation); +// data_path_base += "/tp-testdata/"; +//#else +// QString data_path_base = QCoreApplication::applicationDirPath() + "/record"; +//#endif +// qDebug("data-path-base: %s", data_path_base.toStdString().c_str()); +// return 0; + + QGuiApplication::setApplicationDisplayName(LOCAL8BIT("[Teleport播放器]")); + + QCommandLineParser parser; + const QCommandLineOption opt_help = parser.addHelpOption(); + + parser.addPositionalArgument("RESOURCE", "teleport record resource to be play."); + + if(!parser.parse(QCoreApplication::arguments())) { + QMessageBox::warning(nullptr, QGuiApplication::applicationDisplayName(), + //"

" + parser.errorText() + "

"
+                             "

" + parser.errorText() + "

"
+                             + parser.helpText() + "
"); + return 1; + } + + if(parser.isSet(opt_help)) { + show_usage(parser); + return 2; + } + + const QStringList args = parser.positionalArguments(); + if(0 == args.size()) { + show_usage(parser); + return 2; + } + + QString resource = args.at(0); + qDebug() << resource; + + + MainWindow w; + w.set_resource(resource); + w.show(); + return a.exec(); +} diff --git a/client/tp-player/mainwindow.cpp b/client/tp-player/mainwindow.cpp new file mode 100644 index 0000000..2e2e458 --- /dev/null +++ b/client/tp-player/mainwindow.cpp @@ -0,0 +1,433 @@ +#include "mainwindow.h" +#include "ui_mainwindow.h" + +#include +#include +#include +#include +#include +#include +#include + +static inline int min(int a, int b){ + return a < b ? a : b; +} + +static inline int max(int a, int b){ + return a > b ? a : b; +} + +MainWindow::MainWindow(QWidget *parent) : + QMainWindow(parent), + ui(new Ui::MainWindow) +{ + m_show_default = true; + m_bar_shown = false; + m_bar_fade_in = false; + m_bar_fading = false; + m_bar_opacity = 1.0; + m_show_message = false; + memset(&m_pt, 0, sizeof(TS_RECORD_RDP_POINTER)); + + m_thr_play = nullptr; + m_play_state = PLAY_STATE_UNKNOWN; + m_thr_data = nullptr; + + m_disable_draw = false; + + ui->setupUi(this); + + ui->centralWidget->setMouseTracking(true); + setMouseTracking(true); + + // frame-less window. +//#ifdef __APPLE__ +// setWindowFlags(Qt::FramelessWindowHint | Qt::MSWindowsFixedSizeDialogHint | Qt::Window); +// OSXCode::fixWin(winId()); +//#else +// setWindowFlags(Qt::FramelessWindowHint | Qt::MSWindowsFixedSizeDialogHint | windowFlags()); +//#endif //__APPLE__ + + m_pt_normal.load(":/tp-player/res/cursor.png"); + m_default_bg.load(":/tp-player/res/bg.png"); + + m_canvas = QPixmap(m_default_bg.width(), m_default_bg.height()); + QPainter pp(&m_canvas); + pp.drawPixmap(0, 0, m_default_bg, 0, 0, m_default_bg.width(), m_default_bg.height()); + + + setWindowFlags(windowFlags()&~Qt::WindowMaximizeButtonHint); // 禁止最大化按钮 + setFixedSize(m_default_bg.width(), m_default_bg.height()); // 禁止拖动窗口大小 + + if(!m_bar.init(this)) { + qDebug("bar init failed."); + return; + } + + + connect(&m_timer_first_run, SIGNAL(timeout()), this, SLOT(_do_first_run())); + connect(&m_timer_bar_fade, SIGNAL(timeout()), this, SLOT(_do_bar_fade())); + connect(&m_timer_bar_delay_hide, SIGNAL(timeout()), this, SLOT(_do_bar_delay_hide())); + + m_timer_first_run.setSingleShot(true); + m_timer_first_run.start(500); +} + +MainWindow::~MainWindow() +{ + if(m_thr_play) { + m_thr_play->stop(); + + disconnect(m_thr_play, SIGNAL(signal_update_data(UpdateData*)), this, SLOT(_do_update_data(UpdateData*))); + + delete m_thr_play; + m_thr_play = nullptr; + } + + if(m_thr_data) { + m_thr_data->stop(); + disconnect(m_thr_data, SIGNAL(signal_update_data(UpdateData*)), this, SLOT(_do_update_data(UpdateData*))); + delete m_thr_data; + m_thr_data = nullptr; + } + + delete ui; +} + +void MainWindow::set_resource(const QString &res) { + m_res = res; +} + +void MainWindow::_do_first_run() { + m_thr_data = new ThrData(this, m_res); + connect(m_thr_data, SIGNAL(signal_update_data(UpdateData*)), this, SLOT(_do_update_data(UpdateData*))); + m_thr_data->start(QThread::TimeCriticalPriority); + + m_thr_play = new ThrPlay(this); + connect(m_thr_play, SIGNAL(signal_update_data(UpdateData*)), this, SLOT(_do_update_data(UpdateData*))); + + m_thr_play->speed(m_bar.get_speed()); + m_thr_play->start(); +} + +void MainWindow::set_speed(int s) { + if(m_thr_play) + m_thr_play->speed(s); +} + +void MainWindow::set_skip(bool s) { + if(m_thr_play) + m_thr_play->skip(s); +} + +void MainWindow::paintEvent(QPaintEvent *e) +{ + QPainter painter(this); + + if(m_show_default) { + painter.drawPixmap(e->rect(), m_default_bg, e->rect()); + } + else { + painter.drawPixmap(e->rect(), m_canvas, e->rect()); + + QRect rcpt(m_pt_normal.rect()); + rcpt.moveTo(m_pt.x - m_pt_normal.width()/2, m_pt.y-m_pt_normal.height()/2); + if(e->rect().intersects(rcpt)) { + painter.drawPixmap(m_pt.x-m_pt_normal.width()/2, m_pt.y-m_pt_normal.height()/2, m_pt_normal); + } + + // 绘制浮动控制窗 + if(m_bar_fading) { + painter.setOpacity(m_bar_opacity); + m_bar.draw(painter, e->rect()); + } + else if(m_bar_shown) { + m_bar.draw(painter, e->rect()); + } + } + + if(m_show_message) { + QRect rc_draw = e->rect(); + QRect rc(m_rc_message); + + if(e->rect().intersects(rc)) { + int from_x = max(rc_draw.left(), rc.left()) - rc.left(); + int from_y = max(rc_draw.top(), rc.top()) - rc.top(); + int w = min(rc.right(), rc_draw.right()) - rc.left() - from_x + 1; + int h = min(rc.bottom(), rc_draw.bottom()) - rc.top() - from_y + 1; + int to_x = rc.left() + from_x; + int to_y = rc.top() + from_y; + painter.drawPixmap(to_x, to_y, m_img_message, from_x, from_y, w, h); + } + } +} + +void MainWindow::pause() { + if(m_play_state != PLAY_STATE_RUNNING) + return; + m_thr_play->pause(); + m_play_state = PLAY_STATE_PAUSE; +} + +void MainWindow::resume(bool relocate, uint32_t ms) { + if(m_play_state == PLAY_STATE_PAUSE) { + if(relocate) + m_thr_data->restart(ms); + m_thr_play->resume(relocate, ms); + } + else if(m_play_state == PLAY_STATE_STOP) { + m_thr_data->restart(ms); + m_thr_play->resume(true, ms); + } + + m_play_state = PLAY_STATE_RUNNING; +} + +void MainWindow::_do_update_data(UpdateData* dat) { + if(!dat) + return; + + UpdateDataHelper data_helper(dat); + + if(dat->data_type() == TYPE_POINTER) { + TS_RECORD_RDP_POINTER pt; + memcpy(&pt, &m_pt, sizeof(TS_RECORD_RDP_POINTER)); + + // 更新虚拟鼠标信息,这样下一次绘制界面时就会在新的位置绘制出虚拟鼠标 + memcpy(&m_pt, dat->get_pointer(), sizeof(TS_RECORD_RDP_POINTER)); + update(m_pt.x - m_pt_normal.width()/2, m_pt.y - m_pt_normal.width()/2, m_pt_normal.width(), m_pt_normal.height()); + + update(pt.x - m_pt_normal.width()/2, pt.y - m_pt_normal.width()/2, m_pt_normal.width(), m_pt_normal.height()); + + return; + } + else if(dat->data_type() == TYPE_IMAGE) { + const UpdateImages uimgs = dat->get_images(); + if(uimgs.size() == 0) + return; + + if(uimgs.size() > 1 && !m_disable_draw) { + // 禁止界面更新 + setUpdatesEnabled(false); + } + + + QPainter pp(&m_canvas); + for(int i = 0; i < uimgs.size(); ++i) { + pp.drawImage(uimgs[i].x, uimgs[i].y, *(uimgs[i].img), 0, 0, uimgs[i].w, uimgs[i].h, Qt::AutoColor); + + if(!m_disable_draw) + update(uimgs[i].x, uimgs[i].y, uimgs[i].w, uimgs[i].h); + } + + + if(uimgs.size() > 1 && !m_disable_draw) { + // 允许界面更新 + setUpdatesEnabled(true); + } + + return; + } + + else if(dat->data_type() == TYPE_PLAYED_MS) { + m_bar.update_passed_time(dat->played_ms()); + return; + } + + else if(dat->data_type() == TYPE_DISABLE_DRAW) { + // 禁止界面更新 + m_disable_draw = true; + setUpdatesEnabled(false); + return; + } + + else if(dat->data_type() == TYPE_ENABLE_DRAW) { + // 允许界面更新 + m_disable_draw = false; + setUpdatesEnabled(true); + update(); + return; + } + + else if(dat->data_type() == TYPE_MESSAGE) { + if(dat->message().isEmpty()) { + m_show_message = false; + return; + } + + m_show_message = true; + + qDebug("1message, w=%d, h=%d", m_canvas.width(), m_canvas.height()); + + QPainter pp(&m_canvas); + QRect rcWin(0, 0, m_canvas.width(), m_canvas.height()); + pp.drawText(rcWin, Qt::AlignLeft|Qt::TextDontPrint, dat->message(), &m_rc_message); + + qDebug("2message, w=%d, h=%d", m_rc_message.width(), m_rc_message.height()); + m_rc_message.setWidth(m_rc_message.width()+60); + m_rc_message.setHeight(m_rc_message.height()+60); + + m_img_message = QPixmap(m_rc_message.width(), m_rc_message.height()); + m_img_message.fill(Qt::transparent); + QPainter pm(&m_img_message); + pm.setPen(QColor(255,255,255,153)); + pm.fillRect(m_rc_message, QColor(0,0,0,190)); + + QRect rcRect(m_rc_message); + rcRect.setWidth(rcRect.width()-1); + rcRect.setHeight(rcRect.height()-1); + pm.drawRect(rcRect); + + QRect rcText(m_rc_message); + rcText.setLeft(30); + rcText.setTop(30); + pm.drawText(rcText, Qt::AlignLeft, dat->message()); + m_rc_message.moveTo( + (m_canvas.width() - m_rc_message.width())/2, + (m_canvas.height() - m_rc_message.height())/3 + ); + + update(m_rc_message.x(), m_rc_message.y(), m_rc_message.width(), m_rc_message.height()); + + return; + } + + else if(dat->data_type() == TYPE_ERROR) { + QMessageBox::critical(this, QGuiApplication::applicationDisplayName(), dat->message()); + QApplication::instance()->exit(0); + return; + } + + // 这是播放开始时收到的第一个数据包 + else if(dat->data_type() == TYPE_HEADER_INFO) { + TS_RECORD_HEADER* hdr = dat->get_header(); + if(hdr == nullptr) + return; + memcpy(&m_rec_hdr, hdr, sizeof(TS_RECORD_HEADER)); + + qDebug() << "resize (" << m_rec_hdr.basic.width << "," << m_rec_hdr.basic.height << ")"; + + m_canvas = QPixmap(m_rec_hdr.basic.width, m_rec_hdr.basic.height); + + QDesktopWidget *desktop = QApplication::desktop(); // =qApp->desktop();也可以 + qDebug("desktop w:%d,h:%d, this w:%d,h:%d", desktop->width(), desktop->height(), width(), height()); + move(10, (desktop->height() - m_rec_hdr.basic.height)/2); + + setFixedSize(m_rec_hdr.basic.width, m_rec_hdr.basic.height); + + m_canvas.fill(QColor(38, 73, 111)); + + m_show_default = false; + repaint(); + + m_bar.start(m_rec_hdr.info.time_ms, 640); + m_bar_shown = true; + m_play_state = PLAY_STATE_RUNNING; + + update(m_bar.rc()); + + m_bar_fade_in = false; + m_bar_fading = true; + m_timer_bar_delay_hide.start(2000); + + QString title; + if (m_rec_hdr.basic.conn_port == 3389) { + title = QString(LOCAL8BIT("用户 %1 访问 %2 的 %3 账号").arg(m_rec_hdr.basic.user_username, m_rec_hdr.basic.conn_ip, m_rec_hdr.basic.acc_username)); + } + else { + QString _port; + _port.sprintf("%d", m_rec_hdr.basic.conn_port); + title = QString(LOCAL8BIT("用户 %1 访问 %2:%3 的 %4 账号").arg(m_rec_hdr.basic.user_username, m_rec_hdr.basic.conn_ip, _port, m_rec_hdr.basic.acc_username)); + } + + setWindowTitle(title); + + return; + } + + + else if(dat->data_type() == TYPE_END) { + m_bar.end(); + m_play_state = PLAY_STATE_STOP; + + return; + } +} + +void MainWindow::_do_bar_delay_hide() { + m_bar_fading = true; + m_timer_bar_delay_hide.stop(); + m_timer_bar_fade.stop(); + m_timer_bar_fade.start(50); +} + +void MainWindow::_do_bar_fade() { + if(m_bar_fade_in) { + if(m_bar_opacity < 1.0) + m_bar_opacity += 0.3; + if(m_bar_opacity >= 1.0) { + m_bar_opacity = 1.0; + m_bar_shown = true; + m_bar_fading = false; + m_timer_bar_fade.stop(); + } + } + else { + if(m_bar_opacity > 0.0) + m_bar_opacity -= 0.2; + if(m_bar_opacity <= 0.0) { + m_bar_opacity = 0.0; + m_bar_shown = false; + m_bar_fading = false; + m_timer_bar_fade.stop(); + } + } + + update(m_bar.rc()); +} + +void MainWindow::mouseMoveEvent(QMouseEvent *e) { + if(!m_show_default) { + QRect rc = m_bar.rc(); + if(e->y() > rc.top() - 20 && e->y() < rc.bottom() + 20) { + if((!m_bar_shown && !m_bar_fading) || (m_bar_fading && !m_bar_fade_in)) { + m_bar_fade_in = true; + m_bar_fading = true; + + m_timer_bar_delay_hide.stop(); + m_timer_bar_fade.stop(); + m_timer_bar_fade.start(50); + } + + if(rc.contains(e->pos())) + m_bar.onMouseMove(e->x(), e->y()); + } + else { + if((m_bar_shown && !m_bar_fading) || (m_bar_fading && m_bar_fade_in)) { + m_bar_fade_in = false; + m_bar_fading = true; + m_timer_bar_fade.stop(); + m_timer_bar_delay_hide.stop(); + + if(m_bar_opacity != 1.0) + m_timer_bar_fade.start(50); + else + m_timer_bar_delay_hide.start(1000); + } + } + } +} + +void MainWindow::mousePressEvent(QMouseEvent *e) { + if(!m_show_default) { + QRect rc = m_bar.rc(); + if(rc.contains(e->pos())) { + m_bar.onMousePress(e->x(), e->y(), e->button()); + } + } +} + +void MainWindow::mouseReleaseEvent(QMouseEvent *e) { + m_bar.onMouseRelease(e->x(), e->y(), e->button()); +} + diff --git a/client/tp-player/mainwindow.h b/client/tp-player/mainwindow.h new file mode 100644 index 0000000..174487f --- /dev/null +++ b/client/tp-player/mainwindow.h @@ -0,0 +1,90 @@ +#ifndef MAINWINDOW_H +#define MAINWINDOW_H + +#include +#include +#include +#include "bar.h" +#include "thr_play.h" +#include "thr_data.h" +#include "update_data.h" +#include "record_format.h" +#include "util.h" +#include "downloader.h" + +#define PLAY_STATE_UNKNOWN 0 +#define PLAY_STATE_RUNNING 1 +#define PLAY_STATE_PAUSE 2 +#define PLAY_STATE_STOP 3 + +namespace Ui { +class MainWindow; +} + +class MainWindow : public QMainWindow +{ + Q_OBJECT + +public: + explicit MainWindow(QWidget *parent = nullptr); + ~MainWindow(); + + void set_resource(const QString& res); + + void pause(); + void resume(bool relocate, uint32_t ms); + void restart(); + void set_speed(int s); + void set_skip(bool s); + + // TODO: 将thr_data移动到thr_play线程,由play线程进行管理 + ThrData* get_thr_data() {return m_thr_data;} + +private: + void paintEvent(QPaintEvent *e); + void mouseMoveEvent(QMouseEvent *e); + void mousePressEvent(QMouseEvent *e); + void mouseReleaseEvent(QMouseEvent *e); + +private slots: + void _do_first_run(); // 默认界面加载完成后,开始播放操作(可能会进行数据下载) + void _do_update_data(UpdateData*); + void _do_bar_fade(); + void _do_bar_delay_hide(); + +private: + Ui::MainWindow *ui; + + bool m_show_default; + bool m_bar_shown; + QPixmap m_default_bg; + + QString m_res; + ThrPlay* m_thr_play; + ThrData* m_thr_data; + + QPixmap m_canvas; + + Bar m_bar; + + TS_RECORD_HEADER m_rec_hdr; + + QPixmap m_pt_normal; + TS_RECORD_RDP_POINTER m_pt; + + QTimer m_timer_first_run; + QTimer m_timer_bar_fade; + QTimer m_timer_bar_delay_hide; + bool m_bar_fade_in; + bool m_bar_fading; + qreal m_bar_opacity; + + int m_play_state; + + bool m_show_message; + QPixmap m_img_message; + QRect m_rc_message; + bool m_disable_draw; +}; + +#endif // MAINWINDOW_H diff --git a/client/tp-player/mainwindow.ui b/client/tp-player/mainwindow.ui new file mode 100644 index 0000000..b296c52 --- /dev/null +++ b/client/tp-player/mainwindow.ui @@ -0,0 +1,26 @@ + + + MainWindow + + + + 0 + 0 + 500 + 360 + + + + + 微软雅黑 Light + + + + Teleport Replayer + + + + + + + diff --git a/client/tp-player/record_format.h b/client/tp-player/record_format.h new file mode 100644 index 0000000..9fc7ad5 --- /dev/null +++ b/client/tp-player/record_format.h @@ -0,0 +1,106 @@ +#ifndef RECORD_FORMAT_H +#define RECORD_FORMAT_H + +#include + +#define TS_TPPR_TYPE_UNKNOWN 0x0000 +#define TS_TPPR_TYPE_SSH 0x0001 +#define TS_TPPR_TYPE_RDP 0x0101 + + +#define TS_RECORD_TYPE_RDP_POINTER 0x12 // 鼠标坐标位置改变,用于绘制虚拟鼠标 +#define TS_RECORD_TYPE_RDP_IMAGE 0x13 // 服务端返回的图像,用于展示 +#define TS_RECORD_TYPE_RDP_KEYFRAME 0x14 // + +#define TS_RDP_BTN_FREE 0 +#define TS_RDP_BTN_PRESSED 1 +#define TS_RDP_IMG_RAW 0 // 未压缩,原始数据(根据bitsPerPixel,多个字节对应一个点的颜色) +#define TS_RDP_IMG_BMP 1 // 压缩的BMP数据 +#define TS_RDP_IMG_ALT 2 + +#pragma pack(push,1) + +// 录像文件头(随着录像数据写入,会改变的部分) +typedef struct TS_RECORD_HEADER_INFO { + uint32_t magic; // "TPPR" 标志 TelePort Protocol Record + uint16_t ver; // 录像文件版本,从3.5.0开始,为4 + uint16_t type; // + // uint32_t packages; // 总包数 + uint32_t time_ms; // 总耗时(毫秒) + uint32_t dat_file_count; // 数据文件数量 +}TS_RECORD_HEADER_INFO; +#define ts_record_header_info_size sizeof(TS_RECORD_HEADER_INFO) + +// 录像文件头(固定不变部分) +typedef struct TS_RECORD_HEADER_BASIC { + uint16_t protocol_type; // 协议:1=RDP, 2=SSH, 3=Telnet + uint16_t protocol_sub_type; // 子协议:100=RDP-DESKTOP, 200=SSH-SHELL, 201=SSH-SFTP, 300=Telnet + uint64_t timestamp; // 本次录像的起始时间(UTC时间戳) + uint16_t width; // 初始屏幕尺寸:宽 + uint16_t height; // 初始屏幕尺寸:高 + char user_username[64]; // teleport账号 + char acc_username[64]; // 远程主机用户名 + + char host_ip[40]; // 远程主机IP + char conn_ip[40]; // 远程主机IP + uint16_t conn_port; // 远程主机端口 + + char client_ip[40]; // 客户端IP + +// // RDP专有 +// uint8_t rdp_security; // 0 = RDP, 1 = TLS +}TS_RECORD_HEADER_BASIC; +#define ts_record_header_basic_size sizeof(TS_RECORD_HEADER_BASIC) + +typedef struct TS_RECORD_HEADER { + TS_RECORD_HEADER_INFO info; + uint8_t _reserve1[64 - ts_record_header_info_size]; + TS_RECORD_HEADER_BASIC basic; + uint8_t _reserve2[512 - 64 - ts_record_header_basic_size]; +}TS_RECORD_HEADER; + +// header部分(header-info + header-basic) = 512B +#define ts_record_header_size sizeof(TS_RECORD_HEADER) + +// 一个数据包的头 +typedef struct TS_RECORD_PKG { + uint8_t type; // 包的数据类型 + uint32_t size; // 这个包的总大小(不含包头) + uint32_t time_ms; // 这个包距起始时间的时间差(毫秒,意味着一个连接不能持续超过49天) + uint8_t _reserve[3]; // 保留 +}TS_RECORD_PKG; + + +typedef struct TS_RECORD_RDP_POINTER { + uint16_t x; + uint16_t y; + uint8_t button; + uint8_t pressed; +}TS_RECORD_RDP_POINTER; + +// RDP图像更新 +typedef struct TS_RECORD_RDP_IMAGE_INFO { + uint16_t destLeft; + uint16_t destTop; + uint16_t destRight; + uint16_t destBottom; + uint16_t width; + uint16_t height; + uint16_t bitsPerPixel; + uint8_t format; + uint8_t _reserved; + uint32_t dat_len; + uint32_t zip_len; +}TS_RECORD_RDP_IMAGE_INFO; + +// 关键帧索引 +typedef struct TS_RECORD_RDP_KEYFRAME_INFO { + uint32_t time_ms; // 此关键帧的时间点 + uint32_t file_index; // 此关键帧图像数据位于哪一个数据文件中 + uint32_t offset; // 此关键帧图像数据在数据文件中的偏移 +}TS_RECORD_RDP_KEYFRAME_INFO; + +#pragma pack(pop) + + +#endif // RECORD_FORMAT_H diff --git a/client/tp-player/res/bar.psd b/client/tp-player/res/bar.psd new file mode 100644 index 0000000..a2defc8 Binary files /dev/null and b/client/tp-player/res/bar.psd differ diff --git a/client/tp-player/res/bar/bg-left.png b/client/tp-player/res/bar/bg-left.png new file mode 100644 index 0000000..e20e24c Binary files /dev/null and b/client/tp-player/res/bar/bg-left.png differ diff --git a/client/tp-player/res/bar/bg-mid.png b/client/tp-player/res/bar/bg-mid.png new file mode 100644 index 0000000..974129a Binary files /dev/null and b/client/tp-player/res/bar/bg-mid.png differ diff --git a/client/tp-player/res/bar/bg-right.png b/client/tp-player/res/bar/bg-right.png new file mode 100644 index 0000000..c6b31a0 Binary files /dev/null and b/client/tp-player/res/bar/bg-right.png differ diff --git a/client/tp-player/res/bar/btn-hover-left.png b/client/tp-player/res/bar/btn-hover-left.png new file mode 100644 index 0000000..7914efb Binary files /dev/null and b/client/tp-player/res/bar/btn-hover-left.png differ diff --git a/client/tp-player/res/bar/btn-hover-mid.png b/client/tp-player/res/bar/btn-hover-mid.png new file mode 100644 index 0000000..a1db349 Binary files /dev/null and b/client/tp-player/res/bar/btn-hover-mid.png differ diff --git a/client/tp-player/res/bar/btn-hover-right.png b/client/tp-player/res/bar/btn-hover-right.png new file mode 100644 index 0000000..cfd1348 Binary files /dev/null and b/client/tp-player/res/bar/btn-hover-right.png differ diff --git a/client/tp-player/res/bar/btn-normal-left.png b/client/tp-player/res/bar/btn-normal-left.png new file mode 100644 index 0000000..eb9b22a Binary files /dev/null and b/client/tp-player/res/bar/btn-normal-left.png differ diff --git a/client/tp-player/res/bar/btn-normal-mid.png b/client/tp-player/res/bar/btn-normal-mid.png new file mode 100644 index 0000000..06608db Binary files /dev/null and b/client/tp-player/res/bar/btn-normal-mid.png differ diff --git a/client/tp-player/res/bar/btn-normal-right.png b/client/tp-player/res/bar/btn-normal-right.png new file mode 100644 index 0000000..53f68d8 Binary files /dev/null and b/client/tp-player/res/bar/btn-normal-right.png differ diff --git a/client/tp-player/res/bar/btn-sel-left.png b/client/tp-player/res/bar/btn-sel-left.png new file mode 100644 index 0000000..e26b79e Binary files /dev/null and b/client/tp-player/res/bar/btn-sel-left.png differ diff --git a/client/tp-player/res/bar/btn-sel-mid.png b/client/tp-player/res/bar/btn-sel-mid.png new file mode 100644 index 0000000..f69acf2 Binary files /dev/null and b/client/tp-player/res/bar/btn-sel-mid.png differ diff --git a/client/tp-player/res/bar/btn-sel-right.png b/client/tp-player/res/bar/btn-sel-right.png new file mode 100644 index 0000000..ccfab12 Binary files /dev/null and b/client/tp-player/res/bar/btn-sel-right.png differ diff --git a/client/tp-player/res/bar/chkbox-hover.png b/client/tp-player/res/bar/chkbox-hover.png new file mode 100644 index 0000000..485711a Binary files /dev/null and b/client/tp-player/res/bar/chkbox-hover.png differ diff --git a/client/tp-player/res/bar/chkbox-normal.png b/client/tp-player/res/bar/chkbox-normal.png new file mode 100644 index 0000000..1ac352d Binary files /dev/null and b/client/tp-player/res/bar/chkbox-normal.png differ diff --git a/client/tp-player/res/bar/chkbox-sel-hover.png b/client/tp-player/res/bar/chkbox-sel-hover.png new file mode 100644 index 0000000..47970ca Binary files /dev/null and b/client/tp-player/res/bar/chkbox-sel-hover.png differ diff --git a/client/tp-player/res/bar/chkbox-sel-normal.png b/client/tp-player/res/bar/chkbox-sel-normal.png new file mode 100644 index 0000000..872fe0c Binary files /dev/null and b/client/tp-player/res/bar/chkbox-sel-normal.png differ diff --git a/client/tp-player/res/bar/pause-hover.png b/client/tp-player/res/bar/pause-hover.png new file mode 100644 index 0000000..a220452 Binary files /dev/null and b/client/tp-player/res/bar/pause-hover.png differ diff --git a/client/tp-player/res/bar/pause-normal.png b/client/tp-player/res/bar/pause-normal.png new file mode 100644 index 0000000..455497e Binary files /dev/null and b/client/tp-player/res/bar/pause-normal.png differ diff --git a/client/tp-player/res/bar/play-hover.png b/client/tp-player/res/bar/play-hover.png new file mode 100644 index 0000000..1bb55a9 Binary files /dev/null and b/client/tp-player/res/bar/play-hover.png differ diff --git a/client/tp-player/res/bar/play-normal.png b/client/tp-player/res/bar/play-normal.png new file mode 100644 index 0000000..430f6f4 Binary files /dev/null and b/client/tp-player/res/bar/play-normal.png differ diff --git a/client/tp-player/res/bar/prgbar-mid.png b/client/tp-player/res/bar/prgbar-mid.png new file mode 100644 index 0000000..4e7d3a0 Binary files /dev/null and b/client/tp-player/res/bar/prgbar-mid.png differ diff --git a/client/tp-player/res/bar/prgbar-right.png b/client/tp-player/res/bar/prgbar-right.png new file mode 100644 index 0000000..8897c17 Binary files /dev/null and b/client/tp-player/res/bar/prgbar-right.png differ diff --git a/client/tp-player/res/bar/prgbarh-left.png b/client/tp-player/res/bar/prgbarh-left.png new file mode 100644 index 0000000..9a6ba68 Binary files /dev/null and b/client/tp-player/res/bar/prgbarh-left.png differ diff --git a/client/tp-player/res/bar/prgbarh-mid.png b/client/tp-player/res/bar/prgbarh-mid.png new file mode 100644 index 0000000..baeb2f6 Binary files /dev/null and b/client/tp-player/res/bar/prgbarh-mid.png differ diff --git a/client/tp-player/res/bar/prgpt-hover.png b/client/tp-player/res/bar/prgpt-hover.png new file mode 100644 index 0000000..e98d208 Binary files /dev/null and b/client/tp-player/res/bar/prgpt-hover.png differ diff --git a/client/tp-player/res/bar/prgpt-normal.png b/client/tp-player/res/bar/prgpt-normal.png new file mode 100644 index 0000000..59e1faa Binary files /dev/null and b/client/tp-player/res/bar/prgpt-normal.png differ diff --git a/client/tp-player/res/bar/select.png b/client/tp-player/res/bar/select.png new file mode 100644 index 0000000..38a1052 Binary files /dev/null and b/client/tp-player/res/bar/select.png differ diff --git a/client/tp-player/res/bar/selected.png b/client/tp-player/res/bar/selected.png new file mode 100644 index 0000000..c0067f5 Binary files /dev/null and b/client/tp-player/res/bar/selected.png differ diff --git a/client/tp-player/res/bg.png b/client/tp-player/res/bg.png new file mode 100644 index 0000000..e35d671 Binary files /dev/null and b/client/tp-player/res/bg.png differ diff --git a/client/tp-player/res/cursor.png b/client/tp-player/res/cursor.png new file mode 100644 index 0000000..5728f4f Binary files /dev/null and b/client/tp-player/res/cursor.png differ diff --git a/client/tp-player/res/tp-player.ico b/client/tp-player/res/tp-player.ico new file mode 100644 index 0000000..d3fce7a Binary files /dev/null and b/client/tp-player/res/tp-player.ico differ diff --git a/client/tp-player/rle.c b/client/tp-player/rle.c new file mode 100644 index 0000000..7aa0b49 --- /dev/null +++ b/client/tp-player/rle.c @@ -0,0 +1,974 @@ +/* -*- c-basic-offset: 8 -*- + rdesktop: A Remote Desktop Protocol client. + Bitmap decompression routines + Copyright (C) Matthew Chapman 1999-2008 + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +*/ + +/* three seperate function for speed when decompressing the bitmaps + when modifing one function make the change in the others + jay.sorg@gmail.com */ + +/* indent is confused by this file */ +/* *INDENT-OFF* */ + + +#include +#include "rle.h" + +/* Specific rename for RDPY integration */ +#define unimpl(str, code) + +//#define RD_BOOL int +//#define False 0 +//#define True 1 +/* end specific rename */ + +#define CVAL(p) (*(p++)) +//#ifdef NEED_ALIGN +//#ifdef L_ENDIAN +#define CVAL2(p, v) { v = (*(p++)); v |= (*(p++)) << 8; } +//#else +//#define CVAL2(p, v) { v = (*(p++)) << 8; v |= (*(p++)); } +//#endif /* L_ENDIAN */ +//#else +//#define CVAL2(p, v) { v = (*((uint16*)p)); p += 2; } +//#endif /* NEED_ALIGN */ + +#define UNROLL8(exp) { exp exp exp exp exp exp exp exp } + +#define REPEAT(statement) \ +{ \ + while((count & ~0x7) && ((x+8) < width)) \ + UNROLL8( statement; count--; x++; ); \ + \ + while((count > 0) && (x < width)) \ + { \ + statement; \ + count--; \ + x++; \ + } \ +} + +#define MASK_UPDATE() \ +{ \ + mixmask <<= 1; \ + if (mixmask == 0) \ + { \ + mask = fom_mask ? fom_mask : CVAL(input); \ + mixmask = 1; \ + } \ +} + +/* 1 byte bitmap decompress */ +RD_BOOL +bitmap_decompress1(uint8 * output, int width, int height, const uint8 * input, int size) +{ + const uint8 *end = input + size; + uint8 *prevline = NULL, *line = NULL; + int opcode, count, offset, isfillormix, x = width; + int lastopcode = -1, insertmix = False, bicolour = False; + uint8 code; + uint8 colour1 = 0, colour2 = 0; + uint8 mixmask, mask = 0; + uint8 mix = 0xff; + int fom_mask = 0; + + while (input < end) + { + fom_mask = 0; + code = CVAL(input); + opcode = code >> 4; + /* Handle different opcode forms */ + switch (opcode) + { + case 0xc: + case 0xd: + case 0xe: + opcode -= 6; + count = code & 0xf; + offset = 16; + break; + case 0xf: + opcode = code & 0xf; + if (opcode < 9) + { + count = CVAL(input); + count |= CVAL(input) << 8; + } + else + { + count = (opcode < 0xb) ? 8 : 1; + } + offset = 0; + break; + default: + opcode >>= 1; + count = code & 0x1f; + offset = 32; + break; + } + /* Handle strange cases for counts */ + if (offset != 0) + { + isfillormix = ((opcode == 2) || (opcode == 7)); + if (count == 0) + { + if (isfillormix) + count = CVAL(input) + 1; + else + count = CVAL(input) + offset; + } + else if (isfillormix) + { + count <<= 3; + } + } + /* Read preliminary data */ + switch (opcode) + { + case 0: /* Fill */ + if ((lastopcode == opcode) && !((x == width) && (prevline == NULL))) + insertmix = True; + break; + case 8: /* Bicolour */ + colour1 = CVAL(input); + case 3: /* Colour */ + colour2 = CVAL(input); + break; + case 6: /* SetMix/Mix */ + case 7: /* SetMix/FillOrMix */ + mix = CVAL(input); + opcode -= 5; + break; + case 9: /* FillOrMix_1 */ + mask = 0x03; + opcode = 0x02; + fom_mask = 3; + break; + case 0x0a: /* FillOrMix_2 */ + mask = 0x05; + opcode = 0x02; + fom_mask = 5; + break; + } + lastopcode = opcode; + mixmask = 0; + /* Output body */ + while (count > 0) + { + if (x >= width) + { + if (height <= 0) + return False; + x = 0; + height--; + prevline = line; + line = output + height * width; + } + switch (opcode) + { + case 0: /* Fill */ + if (insertmix) + { + if (prevline == NULL) + line[x] = mix; + else + line[x] = prevline[x] ^ mix; + insertmix = False; + count--; + x++; + } + if (prevline == NULL) + { + REPEAT(line[x] = 0) + } + else + { + REPEAT(line[x] = prevline[x]) + } + break; + case 1: /* Mix */ + if (prevline == NULL) + { + REPEAT(line[x] = mix) + } + else + { + REPEAT(line[x] = prevline[x] ^ mix) + } + break; + case 2: /* Fill or Mix */ + if (prevline == NULL) + { + REPEAT + ( + MASK_UPDATE(); + if (mask & mixmask) + line[x] = mix; + else + line[x] = 0; + ) + } + else + { + REPEAT + ( + MASK_UPDATE(); + if (mask & mixmask) + line[x] = prevline[x] ^ mix; + else + line[x] = prevline[x]; + ) + } + break; + case 3: /* Colour */ + REPEAT(line[x] = colour2) + break; + case 4: /* Copy */ + REPEAT(line[x] = CVAL(input)) + break; + case 8: /* Bicolour */ + REPEAT + ( + if (bicolour) + { + line[x] = colour2; + bicolour = False; + } + else + { + line[x] = colour1; + bicolour = True; count++; + } + ) + break; + case 0xd: /* White */ + REPEAT(line[x] = 0xff) + break; + case 0xe: /* Black */ + REPEAT(line[x] = 0) + break; + default: + unimpl("bitmap opcode 0x%x\n", opcode); + return False; + } + } + } + return True; +} + +/* 2 byte bitmap decompress */ +RD_BOOL +bitmap_decompress2(uint8 * output, int width, int height, const uint8 * input, int size) +{ + const uint8 *end = input + size; + uint16 *prevline = NULL, *line = NULL; + int opcode, count, offset, isfillormix, x = width; + int lastopcode = -1, insertmix = False, bicolour = False; + uint8 code; + uint16 colour1 = 0, colour2 = 0; + uint8 mixmask, mask = 0; + uint16 mix = 0xffff; + int fom_mask = 0; + + while (input < end) + { + fom_mask = 0; + code = CVAL(input); + opcode = code >> 4; + /* Handle different opcode forms */ + switch (opcode) + { + case 0xc: + case 0xd: + case 0xe: + opcode -= 6; + count = code & 0xf; + offset = 16; + break; + case 0xf: + opcode = code & 0xf; + if (opcode < 9) + { + count = CVAL(input); + count |= CVAL(input) << 8; + } + else + { + count = (opcode < 0xb) ? 8 : 1; + } + offset = 0; + break; + default: + opcode >>= 1; + count = code & 0x1f; + offset = 32; + break; + } + /* Handle strange cases for counts */ + if (offset != 0) + { + isfillormix = ((opcode == 2) || (opcode == 7)); + if (count == 0) + { + if (isfillormix) + count = CVAL(input) + 1; + else + count = CVAL(input) + offset; + } + else if (isfillormix) + { + count <<= 3; + } + } + /* Read preliminary data */ + switch (opcode) + { + case 0: /* Fill */ + if ((lastopcode == opcode) && !((x == width) && (prevline == NULL))) + insertmix = True; + break; + case 8: /* Bicolour */ + CVAL2(input, colour1); + case 3: /* Colour */ + CVAL2(input, colour2); + break; + case 6: /* SetMix/Mix */ + case 7: /* SetMix/FillOrMix */ + CVAL2(input, mix); + opcode -= 5; + break; + case 9: /* FillOrMix_1 */ + mask = 0x03; + opcode = 0x02; + fom_mask = 3; + break; + case 0x0a: /* FillOrMix_2 */ + mask = 0x05; + opcode = 0x02; + fom_mask = 5; + break; + } + lastopcode = opcode; + mixmask = 0; + /* Output body */ + while (count > 0) + { + if (x >= width) + { + if (height <= 0) + return False; + x = 0; + height--; + prevline = line; + line = ((uint16 *) output) + height * width; + } + switch (opcode) + { + case 0: /* Fill */ + if (insertmix) + { + if (prevline == NULL) + line[x] = mix; + else + line[x] = prevline[x] ^ mix; + insertmix = False; + count--; + x++; + } + if (prevline == NULL) + { + REPEAT(line[x] = 0) + } + else + { + REPEAT(line[x] = prevline[x]) + } + break; + case 1: /* Mix */ + if (prevline == NULL) + { + REPEAT(line[x] = mix) + } + else + { + REPEAT(line[x] = prevline[x] ^ mix) + } + break; + case 2: /* Fill or Mix */ + if (prevline == NULL) + { + REPEAT + ( + MASK_UPDATE(); + if (mask & mixmask) + line[x] = mix; + else + line[x] = 0; + ) + } + else + { + REPEAT + ( + MASK_UPDATE(); + if (mask & mixmask) + line[x] = prevline[x] ^ mix; + else + line[x] = prevline[x]; + ) + } + break; + case 3: /* Colour */ + REPEAT(line[x] = colour2) + break; + case 4: /* Copy */ + REPEAT(CVAL2(input, line[x])) + break; + case 8: /* Bicolour */ + REPEAT + ( + if (bicolour) + { + line[x] = colour2; + bicolour = False; + } + else + { + line[x] = colour1; + bicolour = True; + count++; + } + ) + break; + case 0xd: /* White */ + REPEAT(line[x] = 0xffff) + break; + case 0xe: /* Black */ + REPEAT(line[x] = 0) + break; + default: + unimpl("bitmap opcode 0x%x\n", opcode); + return False; + } + } + } + return True; +} + +/* 3 byte bitmap decompress */ +RD_BOOL +bitmap_decompress3(uint8 * output, int width, int height, const uint8 * input, int size) +{ + uint8 *end = input + size; + uint8 *prevline = NULL, *line = NULL; + int opcode, count, offset, isfillormix, x = width; + int lastopcode = -1, insertmix = False, bicolour = False; + uint8 code; + uint8 colour1[3] = {0, 0, 0}, colour2[3] = {0, 0, 0}; + uint8 mixmask, mask = 0; + uint8 mix[3] = {0xff, 0xff, 0xff}; + int fom_mask = 0; + + while (input < end) + { + fom_mask = 0; + code = CVAL(input); + opcode = code >> 4; + /* Handle different opcode forms */ + switch (opcode) + { + case 0xc: + case 0xd: + case 0xe: + opcode -= 6; + count = code & 0xf; + offset = 16; + break; + case 0xf: + opcode = code & 0xf; + if (opcode < 9) + { + count = CVAL(input); + count |= CVAL(input) << 8; + } + else + { + count = (opcode < + 0xb) ? 8 : 1; + } + offset = 0; + break; + default: + opcode >>= 1; + count = code & 0x1f; + offset = 32; + break; + } + /* Handle strange cases for counts */ + if (offset != 0) + { + isfillormix = ((opcode == 2) || (opcode == 7)); + if (count == 0) + { + if (isfillormix) + count = CVAL(input) + 1; + else + count = CVAL(input) + offset; + } + else if (isfillormix) + { + count <<= 3; + } + } + /* Read preliminary data */ + switch (opcode) + { + case 0: /* Fill */ + if ((lastopcode == opcode) && !((x == width) && (prevline == NULL))) + insertmix = True; + break; + case 8: /* Bicolour */ + colour1[0] = CVAL(input); + colour1[1] = CVAL(input); + colour1[2] = CVAL(input); + case 3: /* Colour */ + colour2[0] = CVAL(input); + colour2[1] = CVAL(input); + colour2[2] = CVAL(input); + break; + case 6: /* SetMix/Mix */ + case 7: /* SetMix/FillOrMix */ + mix[0] = CVAL(input); + mix[1] = CVAL(input); + mix[2] = CVAL(input); + opcode -= 5; + break; + case 9: /* FillOrMix_1 */ + mask = 0x03; + opcode = 0x02; + fom_mask = 3; + break; + case 0x0a: /* FillOrMix_2 */ + mask = 0x05; + opcode = 0x02; + fom_mask = 5; + break; + } + lastopcode = opcode; + mixmask = 0; + /* Output body */ + while (count > 0) + { + if (x >= width) + { + if (height <= 0) + return False; + x = 0; + height--; + prevline = line; + line = output + height * (width * 3); + } + switch (opcode) + { + case 0: /* Fill */ + if (insertmix) + { + if (prevline == NULL) + { + line[x * 3] = mix[0]; + line[x * 3 + 1] = mix[1]; + line[x * 3 + 2] = mix[2]; + } + else + { + line[x * 3] = + prevline[x * 3] ^ mix[0]; + line[x * 3 + 1] = + prevline[x * 3 + 1] ^ mix[1]; + line[x * 3 + 2] = + prevline[x * 3 + 2] ^ mix[2]; + } + insertmix = False; + count--; + x++; + } + if (prevline == NULL) + { + REPEAT + ( + line[x * 3] = 0; + line[x * 3 + 1] = 0; + line[x * 3 + 2] = 0; + ) + } + else + { + REPEAT + ( + line[x * 3] = prevline[x * 3]; + line[x * 3 + 1] = prevline[x * 3 + 1]; + line[x * 3 + 2] = prevline[x * 3 + 2]; + ) + } + break; + case 1: /* Mix */ + if (prevline == NULL) + { + REPEAT + ( + line[x * 3] = mix[0]; + line[x * 3 + 1] = mix[1]; + line[x * 3 + 2] = mix[2]; + ) + } + else + { + REPEAT + ( + line[x * 3] = + prevline[x * 3] ^ mix[0]; + line[x * 3 + 1] = + prevline[x * 3 + 1] ^ mix[1]; + line[x * 3 + 2] = + prevline[x * 3 + 2] ^ mix[2]; + ) + } + break; + case 2: /* Fill or Mix */ + if (prevline == NULL) + { + REPEAT + ( + MASK_UPDATE(); + if (mask & mixmask) + { + line[x * 3] = mix[0]; + line[x * 3 + 1] = mix[1]; + line[x * 3 + 2] = mix[2]; + } + else + { + line[x * 3] = 0; + line[x * 3 + 1] = 0; + line[x * 3 + 2] = 0; + } + ) + } + else + { + REPEAT + ( + MASK_UPDATE(); + if (mask & mixmask) + { + line[x * 3] = + prevline[x * 3] ^ mix [0]; + line[x * 3 + 1] = + prevline[x * 3 + 1] ^ mix [1]; + line[x * 3 + 2] = + prevline[x * 3 + 2] ^ mix [2]; + } + else + { + line[x * 3] = + prevline[x * 3]; + line[x * 3 + 1] = + prevline[x * 3 + 1]; + line[x * 3 + 2] = + prevline[x * 3 + 2]; + } + ) + } + break; + case 3: /* Colour */ + REPEAT + ( + line[x * 3] = colour2 [0]; + line[x * 3 + 1] = colour2 [1]; + line[x * 3 + 2] = colour2 [2]; + ) + break; + case 4: /* Copy */ + REPEAT + ( + line[x * 3] = CVAL(input); + line[x * 3 + 1] = CVAL(input); + line[x * 3 + 2] = CVAL(input); + ) + break; + case 8: /* Bicolour */ + REPEAT + ( + if (bicolour) + { + line[x * 3] = colour2[0]; + line[x * 3 + 1] = colour2[1]; + line[x * 3 + 2] = colour2[2]; + bicolour = False; + } + else + { + line[x * 3] = colour1[0]; + line[x * 3 + 1] = colour1[1]; + line[x * 3 + 2] = colour1[2]; + bicolour = True; + count++; + } + ) + break; + case 0xd: /* White */ + REPEAT + ( + line[x * 3] = 0xff; + line[x * 3 + 1] = 0xff; + line[x * 3 + 2] = 0xff; + ) + break; + case 0xe: /* Black */ + REPEAT + ( + line[x * 3] = 0; + line[x * 3 + 1] = 0; + line[x * 3 + 2] = 0; + ) + break; + default: + unimpl("bitmap opcode 0x%x\n", opcode); + return False; + } + } + } + return True; +} + +/* decompress a colour plane */ +static int +process_plane(uint8 * in, int width, int height, uint8 * out, int size) +{ + int indexw; + int indexh; + int code; + int collen; + int replen; + int color; + int x; + int revcode; + uint8 * last_line; + uint8 * this_line; + uint8 * org_in; + uint8 * org_out; + + org_in = in; + org_out = out; + last_line = 0; + indexh = 0; + while (indexh < height) + { + out = (org_out + width * height * 4) - ((indexh + 1) * width * 4); + color = 0; + this_line = out; + indexw = 0; + if (last_line == 0) + { + while (indexw < width) + { + code = CVAL(in); + replen = code & 0xf; + collen = (code >> 4) & 0xf; + revcode = (replen << 4) | collen; + if ((revcode <= 47) && (revcode >= 16)) + { + replen = revcode; + collen = 0; + } + while (collen > 0) + { + color = CVAL(in); + *out = color; + out += 4; + indexw++; + collen--; + } + while (replen > 0) + { + *out = color; + out += 4; + indexw++; + replen--; + } + } + } + else + { + while (indexw < width) + { + code = CVAL(in); + replen = code & 0xf; + collen = (code >> 4) & 0xf; + revcode = (replen << 4) | collen; + if ((revcode <= 47) && (revcode >= 16)) + { + replen = revcode; + collen = 0; + } + while (collen > 0) + { + x = CVAL(in); + if (x & 1) + { + x = x >> 1; + x = x + 1; + color = -x; + } + else + { + x = x >> 1; + color = x; + } + x = last_line[indexw * 4] + color; + *out = x; + out += 4; + indexw++; + collen--; + } + while (replen > 0) + { + x = last_line[indexw * 4] + color; + *out = x; + out += 4; + indexw++; + replen--; + } + } + } + indexh++; + last_line = this_line; + } + return (int) (in - org_in); +} + +/* 4 byte bitmap decompress */ +RD_BOOL +bitmap_decompress4(uint8 * output, int width, int height, const uint8 * input, int size) +{ + int code; + int bytes_pro; + int total_pro; + + code = CVAL(input); + if (code != 0x10) + { + return False; + } + total_pro = 1; + bytes_pro = process_plane(input, width, height, output + 3, size - total_pro); + total_pro += bytes_pro; + input += bytes_pro; + bytes_pro = process_plane(input, width, height, output + 2, size - total_pro); + total_pro += bytes_pro; + input += bytes_pro; + bytes_pro = process_plane(input, width, height, output + 1, size - total_pro); + total_pro += bytes_pro; + input += bytes_pro; + bytes_pro = process_plane(input, width, height, output + 0, size - total_pro); + total_pro += bytes_pro; + return size == total_pro; +} + +int +bitmap_decompress_15(uint8 * output, int output_width, int output_height, int input_width, int input_height, uint8* input, int size) { + uint8 * temp = (uint8*)malloc(input_width * input_height * 2); + RD_BOOL rv = bitmap_decompress2(temp, input_width, input_height, input, size); + + // convert to rgba + for (int y = 0; y < output_height; y++) { + for (int x = 0; x < output_width; x++) { + uint16 a = ((uint16*)temp)[y * input_width + x]; + uint8 r = (a & 0x7c00) >> 10; + uint8 g = (a & 0x03e0) >> 5; + uint8 b = (a & 0x001f); + r = r * 255 / 31; + g = g * 255 / 31; + b = b * 255 / 31; + ((uint32*)output)[y * output_width + x] = 0xff << 24 | b << 16 | g << 8 | r; + } + } + + free(temp); + return rv; +} + +int +bitmap_decompress_16(uint8 * output, int output_width, int output_height, int input_width, int input_height, uint8* input, int size) { + uint8 * temp = (uint8*)malloc(input_width * input_height * 2); + RD_BOOL rv = bitmap_decompress2(temp, input_width, input_height, input, size); + + // convert to rgba + for (int y = 0; y < output_height; y++) { + for (int x = 0; x < output_width; x++) { + uint16 a = ((uint16*)temp)[y * input_width + x]; + uint8 r = (a & 0xf800) >> 11; + uint8 g = (a & 0x07e0) >> 5; + uint8 b = (a & 0x001f); + r = r * 255 / 31; + g = g * 255 / 63; + b = b * 255 / 31; + ((uint32*)output)[y * output_width + x] = 0xff << 24 | b << 16 | g << 8 | r; + } + } + free(temp); + return rv; +} + +int +bitmap_decompress_24(uint8 * output, int output_width, int output_height, int input_width, int input_height, uint8* input, int size) { + uint8 * temp = (uint8*)malloc(input_width * input_height * 3); + RD_BOOL rv = bitmap_decompress3(temp, input_width, input_height, input, size); + + // convert to rgba + for (int y = 0; y < output_height; y++) { + for (int x = 0; x < output_width; x++) { + uint8 r = temp[(y * input_width + x) * 3]; + uint8 g = temp[(y * input_width + x) * 3 + 1]; + uint8 b = temp[(y * input_width + x) * 3 + 2]; + ((uint32*)output)[y * output_width + x] = 0xff << 24 | b << 16 | g << 8 | r; + } + } + free(temp); + + return rv; +} + +int +bitmap_decompress_32(uint8 * output, int output_width, int output_height, int input_width, int input_height, uint8* input, int size) { + uint8 * temp = (uint8*)malloc(input_width * input_height * 4); + RD_BOOL rv = bitmap_decompress4(temp, input_width, input_height, input, size); + + // convert to rgba + for (int y = 0; y < output_height; y++) { + for (int x = 0; x < output_width; x++) { + uint8 r = temp[(y * input_width + x) * 4]; + uint8 g = temp[(y * input_width + x) * 4 + 1]; + uint8 b = temp[(y * input_width + x) * 4 + 2]; + uint8 a = temp[(y * input_width + x) * 4 + 3]; + ((uint32*)output)[y * output_width + x] = 0xff << 24 | r << 16 | g << 8 | b; + } + } + free(temp); + + return rv; +} diff --git a/client/tp-player/rle.h b/client/tp-player/rle.h new file mode 100644 index 0000000..6de7624 --- /dev/null +++ b/client/tp-player/rle.h @@ -0,0 +1,31 @@ +#ifndef RLE_H +#define RLE_H + +#define RD_BOOL int +#define False 0 +#define True 1 + +#define uint8 unsigned char +#define uint16 unsigned short +#define uint32 unsigned int + +#ifdef __cplusplus +extern "C" { +#endif + +RD_BOOL bitmap_decompress1(uint8 * output, int width, int height, const uint8 * input, int size); +RD_BOOL bitmap_decompress2(uint8 * output, int width, int height, const uint8 * input, int size); +RD_BOOL bitmap_decompress3(uint8 * output, int width, int height, const uint8 * input, int size); +RD_BOOL bitmap_decompress4(uint8 * output, int width, int height, const uint8 * input, int size); + +int bitmap_decompress_15(uint8 * output, int output_width, int output_height, int input_width, int input_height, uint8* input, int size); +int bitmap_decompress_16(uint8 * output, int output_width, int output_height, int input_width, int input_height, uint8* input, int size); +int bitmap_decompress_24(uint8 * output, int output_width, int output_height, int input_width, int input_height, uint8* input, int size); +int bitmap_decompress_32(uint8 * output, int output_width, int output_height, int input_width, int input_height, uint8* input, int size); + + +#ifdef __cplusplus +} +#endif + +#endif // RLE_H diff --git a/client/tp-player/thr_data.cpp b/client/tp-player/thr_data.cpp new file mode 100644 index 0000000..af9526f --- /dev/null +++ b/client/tp-player/thr_data.cpp @@ -0,0 +1,709 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#include "thr_play.h" +#include "thr_data.h" +#include "util.h" +#include "downloader.h" +#include "record_format.h" +#include "mainwindow.h" + +#include "rle.h" + + +static QImage* _rdpimg2QImage(int w, int h, int bitsPerPixel, bool isCompressed, const uint8_t* dat, uint32_t len) { + QImage* out; + switch(bitsPerPixel) { + case 15: + if(isCompressed) { + uint8_t* _dat = reinterpret_cast(calloc(1, w*h*2)); + if(!bitmap_decompress1(_dat, w, h, dat, len)) { + free(_dat); + return nullptr; + } + out = new QImage(_dat, w, h, QImage::Format_RGB555); + free(_dat); + } + else { + out = new QImage(QImage(dat, w, h, QImage::Format_RGB555).transformed(QMatrix(1.0, 0.0, 0.0, -1.0, 0.0, 0.0))); + } + return out; + + case 16: + if(isCompressed) { + uint8_t* _dat = reinterpret_cast(calloc(1, w*h*2)); + if(!bitmap_decompress2(_dat, w, h, dat, len)) { + free(_dat); + qDebug() << "22------------------DECOMPRESS2 failed."; + return nullptr; + } + + // TODO: 这里需要进一步优化,直接操作QImage的buffer。 + out = new QImage(w, h, QImage::Format_RGB16); + for(int y = 0; y < h; y++) { + for(int x = 0; x < w; x++) { + uint16 a = ((uint16*)_dat)[y * w + x]; + uint8 r = ((a & 0xf800) >> 11) * 255 / 31; + uint8 g = ((a & 0x07e0) >> 5) * 255 / 63; + uint8 b = (a & 0x001f) * 255 / 31; + out->setPixelColor(x, y, QColor(r,g,b)); + } + } + free(_dat); + return out; + } + else { + out = new QImage(QImage(dat, w, h, QImage::Format_RGB16).transformed(QMatrix(1.0, 0.0, 0.0, -1.0, 0.0, 0.0))); + } + return out; + + case 24: + case 32: + default: + qDebug() << "--------NOT support " << bitsPerPixel << " bitsPerPix."; + return nullptr; + } +} + +static QImage* _raw2QImage(int w, int h, const uint8_t* dat, uint32_t len) { + QImage* out; + + // TODO: 这里需要进一步优化,直接操作QImage的buffer。 + out = new QImage(w, h, QImage::Format_RGB16); + for(int y = 0; y < h; y++) { + for(int x = 0; x < w; x++) { + uint16 a = ((uint16*)dat)[y * w + x]; + uint8 r = ((a & 0xf800) >> 11) * 255 / 31; + uint8 g = ((a & 0x07e0) >> 5) * 255 / 63; + uint8 b = (a & 0x001f) * 255 / 31; + out->setPixelColor(x, y, QColor(r,g,b)); + } + } + return out; +} + + + +//================================================================= +// ThrData +//================================================================= + +ThrData::ThrData(MainWindow* mainwin, const QString& res) { + m_mainwin = mainwin; + m_res = res; + m_need_download = false; + m_need_stop = false; + m_need_restart = false; + m_wait_restart = false; + m_need_show_kf = false; + + m_file_idx = 0; + m_offset = 0; + +#ifdef __APPLE__ + m_data_path_base = QStandardPaths::writableLocation(QStandardPaths::DesktopLocation); + m_data_path_base += "/tp-testdata/"; +#else + m_data_path_base = QCoreApplication::applicationDirPath() + "/record"; +#endif + qDebug("data-path-base: %s", m_data_path_base.toStdString().c_str()); + + // qDebug() << "AppConfigLocation:" << QStandardPaths::writableLocation(QStandardPaths::AppConfigLocation); + // qDebug() << "AppDataLocation:" << QStandardPaths::writableLocation(QStandardPaths::AppDataLocation); + // qDebug() << "AppLocalDataLocation:" << QStandardPaths::writableLocation(QStandardPaths::AppLocalDataLocation); + // qDebug() << "ConfigLocation:" << QStandardPaths::writableLocation(QStandardPaths::ConfigLocation); + // qDebug() << "CacheLocation:" << QStandardPaths::writableLocation(QStandardPaths::CacheLocation); + // qDebug() << "GenericCacheLocation:" << QStandardPaths::writableLocation(QStandardPaths::GenericCacheLocation); + + /* +AppConfigLocation: "C:/Users/apex/AppData/Local/tp-player" +AppDataLocation: "C:/Users/apex/AppData/Roaming/tp-player" +AppLocalDataLocation: "C:/Users/apex/AppData/Local/tp-player" +ConfigLocation: "C:/Users/apex/AppData/Local/tp-player" +CacheLocation: "C:/Users/apex/AppData/Local/tp-player/cache" +GenericCacheLocation: "C:/Users/apex/AppData/Local/cache" + */ +} + +ThrData::~ThrData() { + _clear_data(); +} + +void ThrData::stop() { + if(!isRunning()) + return; + m_need_stop = true; + wait(); + qDebug("data thread stop() end."); +} + +void ThrData::_notify_message(const QString& msg) { + UpdateData* _msg = new UpdateData(TYPE_MESSAGE); + _msg->message(msg); + emit signal_update_data(_msg); +} + +void ThrData::_notify_error(const QString& msg) { + UpdateData* _msg = new UpdateData(TYPE_ERROR); + _msg->message(msg); + emit signal_update_data(_msg); +} + +void ThrData::run() { + _run(); + qDebug("ThrData thread run() end."); +} + +void ThrData::_run() { + + QString _tmp_res = m_res.toLower(); + if(_tmp_res.startsWith("http")) { + m_need_download = true; + _notify_message(LOCAL8BIT("正在准备录像数据,请稍候...")); + + if(!m_thr_download.init(m_data_path_base, m_res)) { + _notify_error(QString("%1\n\n%2").arg(LOCAL8BIT("无法下载录像文件!\n\n"), m_res)); + return; + } + + m_thr_download.start(); + msleep(100); + + for(;;) { + if(m_need_stop) + return; + if(!m_thr_download.is_running() || m_thr_download.is_tpk_downloaded()) + break; + msleep(100); + } + + if(!m_thr_download.is_tpk_downloaded()) { + _notify_error(QString("%1\n%2").arg(LOCAL8BIT("无法下载录像文件!"), m_res)); + return; + } + + m_thr_download.get_data_path(m_data_path); + } + else { + QFileInfo fi_chk_link(m_res); + if(fi_chk_link.isSymLink()) + m_res = fi_chk_link.symLinkTarget(); + + QFileInfo fi(m_res); + if(!fi.exists()) { + _notify_error(QString("%1\n\n%2").arg(LOCAL8BIT("指定的文件或目录不存在!"), m_res)); + return; + } + + if(fi.isFile()) { + m_data_path = fi.path(); + } + else if(fi.isDir()) { + m_data_path = m_res; + } + + m_data_path = QDir::toNativeSeparators(m_data_path); + } + + // 到这里,.tpr和.tpk文件均已经下载完成了。 + + if(!_load_header()) + return; + + if(!_load_keyframe()) + return; + + + UpdateData* dat = new UpdateData(m_hdr); + emit signal_update_data(dat); + + + QFile* fdata = nullptr; + qint64 file_size = 0; + qint64 file_processed = 0; + qint64 read_len = 0; + QString str_fidx; + + for(;;) { + // 任何时候确保第一时间响应退出操作 + if(m_need_stop) + return; + + if(m_need_restart) { + if(fdata) { + fdata->close(); + delete fdata; + fdata = nullptr; + } + + m_wait_restart = true; + msleep(50); + continue; + } + + // 如果所有文件都已经处理完了,则等待(可能用户会拖动滚动条,或者重新播放) + if(m_file_idx >= m_hdr.info.dat_file_count) { + msleep(500); + continue; + } + + // 看看待播放队列中还有多少个数据包 + int pkg_count_in_queue = 0; + int pkg_need_add = 0; + + m_locker.lock(); + pkg_count_in_queue = m_data.size(); + m_locker.unlock(); + + // 少于1000个的话,补足到2000个 + if(m_data.size() < 1000) + pkg_need_add = 2000 - pkg_count_in_queue; + + if(pkg_need_add == 0) { + msleep(100); + continue; + } + + for(int i = 0; i < pkg_need_add; ++i) { + if(m_need_stop) + return; + if(m_need_restart) + break; + + // 如果数据文件尚未打开,则打开它 + if(fdata == nullptr) { + str_fidx.sprintf("%d", m_file_idx+1); + QString tpd_fname = QString("%1/tp-rdp-%2.tpd").arg(m_data_path, str_fidx); + tpd_fname = QDir::toNativeSeparators(tpd_fname); + + QFileInfo fi_tpd(tpd_fname); + if(!fi_tpd.exists()) { + if(m_need_download) { + // 此文件尚未下载完成,等待 + for(;;) { + if(m_need_stop) + return; + if(!m_thr_download.is_running() || m_thr_download.is_tpd_downloaded(m_file_idx)) + break; + msleep(100); + } + + // 下载失败了 + if(!m_thr_download.is_tpd_downloaded(m_file_idx)) + return; + } + } + + fdata = new QFile(tpd_fname); + if(!fdata->open(QFile::ReadOnly)) { + qDebug() << "Can not open " << tpd_fname << " for read."; + _notify_error(QString("%1\n\n%2").arg(LOCAL8BIT("无法打开录像数据文件!"), tpd_fname)); + return; + } + + file_size = fdata->size(); + file_processed = 0; + qDebug("Open file tp-rdp-%d.tpd, processed: %" PRId64 ", size: %" PRId64, m_file_idx+1, file_processed, file_size); + } + + // 如果指定了起始偏移,则跳过这部分数据 + if(m_offset > 0) { + fdata->seek(m_offset); + file_processed = m_offset; + m_offset = 0; + } + + //---------------------------------- + // 读取一个数据包 + //---------------------------------- + if(file_size - file_processed < sizeof(TS_RECORD_PKG)) { + qDebug("invaid tp-rdp-%d.tpd file, filesize=%" PRId64 ", processed=%" PRId64 ", need=%d.", m_file_idx+1, file_size, file_processed, sizeof(TS_RECORD_PKG)); + _notify_error(QString("%1\ntp-rdp-%2.tpd").arg(LOCAL8BIT("错误的录像数据文件!"), str_fidx)); + return; + } + + TS_RECORD_PKG pkg; + read_len = fdata->read(reinterpret_cast(&pkg), sizeof(TS_RECORD_PKG)); + if(read_len != sizeof(TS_RECORD_PKG)) { + qDebug("invaid tp-rdp-%d.tpd file, read_len=%" PRId64 " (1).", m_file_idx+1, read_len); + _notify_error(QString("%1\ntp-rdp-%2.tpd").arg(LOCAL8BIT("错误的录像数据文件!"), str_fidx)); + return; + } + file_processed += sizeof(TS_RECORD_PKG); + + if(file_size - file_processed < pkg.size) { + qDebug("invaid tp-rdp-%d.tpd file (2).", m_file_idx+1); + _notify_error(QString("%1\ntp-rdp-%2.tpd").arg(LOCAL8BIT("错误的录像数据文件!"), str_fidx)); + return; + } + + if(pkg.size == 0) { + qDebug("################## too bad."); + } + + QByteArray pkg_data = fdata->read(pkg.size); + if(pkg_data.size() != static_cast(pkg.size)) { + qDebug("invaid tp-rdp-%d.tpd file, read_len=%" PRId64 " (3).", m_file_idx+1, read_len); + _notify_error(QString("%1\ntp-rdp-%2.tpd").arg(LOCAL8BIT("错误的录像数据文件!"), str_fidx)); + return; + } + file_processed += pkg.size; + + UpdateData* dat = _parse(pkg, pkg_data); + if(dat == nullptr) { + qDebug("invaid tp-rdp-%d.tpd file (4).", m_file_idx+1); + _notify_error(QString("%1\ntp-rdp-%2.tpd").arg(LOCAL8BIT("错误的录像数据文件!"), str_fidx)); + return; + } + + // 遇到关键帧,需要清除自上一个关键帧以来保存的缓存图像数据 + if(pkg.type == TS_RECORD_TYPE_RDP_KEYFRAME) { + for(size_t ci = 0; ci < m_cache_imgs.size(); ++ci) { + if(m_cache_imgs[ci] != nullptr) + delete m_cache_imgs[ci]; + } + m_cache_imgs.clear(); + } + + // 拖动滚动条后,需要显示一次关键帧数据,然后跳过后续关键帧。 + if(pkg.type == TS_RECORD_TYPE_RDP_KEYFRAME) { + qDebug("----key frame: %ld, processed=%" PRId64 ", pkg.size=%d", pkg.time_ms, file_processed, pkg.size); + if(m_need_show_kf) { + m_need_show_kf = false; + qDebug("++ show keyframe."); + } + else { + qDebug("-- skip keyframe."); + delete dat; + dat = nullptr; + } + } + + // 数据放到待播放列表中 + if(dat) { + m_locker.lock(); + m_data.enqueue(dat); + m_locker.unlock(); + } + + // 让线程调度器让播放线程有机会执行 +// msleep(1); + + // 如果此文件已经处理完毕,则关闭文件,这样下次处理一个新的文件 + if(file_processed >= file_size) { + fdata->close(); + delete fdata; + fdata = nullptr; + m_file_idx++; + } + + if(m_file_idx >= m_hdr.info.dat_file_count) { + UpdateData* dat = new UpdateData(TYPE_END); + m_locker.lock(); + m_data.enqueue(dat); + m_locker.unlock(); + break; + } + } + } +} + +UpdateData* ThrData::_parse(const TS_RECORD_PKG& pkg, const QByteArray& data) { + if(pkg.type == TS_RECORD_TYPE_RDP_POINTER) { + if(data.size() != sizeof(TS_RECORD_RDP_POINTER)) + return nullptr; + + UpdateData* ud = new UpdateData(); + ud->set_pointer(pkg.time_ms, reinterpret_cast(data.data())); + return ud; + } + else if(pkg.type == TS_RECORD_TYPE_RDP_IMAGE) { + UpdateData* ud = new UpdateData(TYPE_IMAGE, pkg.time_ms); + + if(data.size() < static_cast(sizeof(uint16_t) + sizeof(TS_RECORD_RDP_IMAGE_INFO))) { + delete ud; + return nullptr; + } + + const uint8_t* dat_ptr = reinterpret_cast(data.data()); + + uint16_t count = (reinterpret_cast(dat_ptr))[0]; + uint32_t offset = sizeof(uint16_t); + + UpdateImages& imgs = ud->get_images(); + + for(uint16_t i = 0; i < count; ++i) { + + const TS_RECORD_RDP_IMAGE_INFO* info = reinterpret_cast(dat_ptr+offset); + offset += sizeof(TS_RECORD_RDP_IMAGE_INFO); + + if(info->format != TS_RDP_IMG_ALT) { + const uint8_t* img_dat = dat_ptr + offset; + + const uint8_t* real_img_dat = nullptr; + QByteArray unzip_data; + if(info->zip_len > 0) { + // 数据被压缩了,需要解压缩 + unzip_data.resize(static_cast(info->dat_len)); + + uLong u_len = info->dat_len; + int err = uncompress(reinterpret_cast(unzip_data.data()), &u_len, img_dat, info->zip_len); + if(err != Z_OK || u_len != info->dat_len) { + qDebug("image uncompress failed. err=%d.", err); + } + else { + real_img_dat = reinterpret_cast(unzip_data.data()); + } + + offset += info->zip_len; + } + else { + real_img_dat = img_dat; + offset += info->dat_len; + } + + + UPDATE_IMAGE uimg; + uimg.x = info->destLeft; + uimg.y = info->destTop; + uimg.w = info->destRight - info->destLeft + 1; + uimg.h = info->destBottom - info->destTop + 1; + if(real_img_dat) + uimg.img = _rdpimg2QImage(info->width, info->height, info->bitsPerPixel, (info->format == TS_RDP_IMG_BMP) ? true : false, real_img_dat, info->dat_len); + else + uimg.img = nullptr; + imgs.push_back(uimg); + + QImage* cache_img = nullptr; + if(uimg.img != nullptr) + cache_img = new QImage(*uimg.img); + + m_cache_imgs.push_back(cache_img); + } + else { + UPDATE_IMAGE uimg; + uimg.x = info->destLeft; + uimg.y = info->destTop; + uimg.w = info->destRight - info->destLeft + 1; + uimg.h = info->destBottom - info->destTop + 1; + + size_t cache_idx = info->dat_len; + + if(cache_idx >= m_cache_imgs.size() || m_cache_imgs[cache_idx] == nullptr) { + uimg.img = nullptr; + } + else { + uimg.img = new QImage(*m_cache_imgs[cache_idx]); + } + imgs.push_back(uimg); + } + } + + return ud; + } + else if(pkg.type == TS_RECORD_TYPE_RDP_KEYFRAME) { + UpdateData* ud = new UpdateData(TYPE_IMAGE, pkg.time_ms); + const TS_RECORD_RDP_KEYFRAME_INFO* info = reinterpret_cast(data.data()); + const uint8_t* data_buf = reinterpret_cast(data.data() + sizeof(TS_RECORD_RDP_KEYFRAME_INFO)); + uint32_t data_len = data.size() - sizeof(TS_RECORD_RDP_KEYFRAME_INFO); + + UpdateImages& imgs = ud->get_images(); + + UPDATE_IMAGE uimg; + uimg.x = 0; + uimg.y = 0; + uimg.w = m_hdr.basic.width; + uimg.h = m_hdr.basic.height; + + const uint8_t* real_img_dat = nullptr; + uint32_t real_img_len = m_hdr.basic.width * m_hdr.basic.height * 2; + + QByteArray unzip_data; + if(data_len != real_img_len) { + // 数据被压缩了,需要解压缩 + unzip_data.resize(static_cast(real_img_len)); + + uLong u_len = real_img_len; + int err = uncompress(reinterpret_cast(unzip_data.data()), &u_len, data_buf, data_len); + if(err != Z_OK || u_len != real_img_len) { + qDebug("keyframe uncompress failed. err=%d.", err); + } + else { + real_img_dat = reinterpret_cast(unzip_data.data()); + } + } + else { + real_img_dat = data_buf; + } + + if(real_img_dat != nullptr) + uimg.img = _raw2QImage(m_hdr.basic.width, m_hdr.basic.height, real_img_dat, real_img_len); + else + uimg.img = nullptr; + imgs.push_back(uimg); + + return ud; + } + + return nullptr; +} + + +void ThrData::restart(uint32_t start_ms) { + qDebug("restart at %ld ms", start_ms); + // 让处理线程处理完当前循环,然后等待 + m_need_restart = true; + + // 确保处理线程已经处理完当前循环 + for(;;) { + msleep(50); + if(m_need_stop) + return; + if(m_wait_restart) + break; + } + + // 清空待播放队列 + _clear_data(); + + if(start_ms == 0) { + m_offset = 0; + m_file_idx = 0; + m_need_show_kf = false; + } + else { + // 找到最接近 start_ms 但小于它的关键帧 + size_t i = 0; + for(i = 0; i < m_kf.size(); ++i) { + if(m_kf[i].time_ms > start_ms) { + break; + } + } + if(i > 0) + i--; + + qDebug("restart acturelly at %ld ms, kf: %d", m_kf[i].time_ms, i); + + // 指定要播放的数据的开始位置 + m_offset = m_kf[i].offset; + m_file_idx = m_kf[i].file_index; + if(m_file_idx == (uint32_t)-1) + m_file_idx = 0; + m_need_show_kf = true; + } + + qDebug("RESTART: offset=%d, file_idx=%d", m_offset, m_file_idx); + + // 让处理线程继续 + m_wait_restart = false; + m_need_restart = false; +} + +bool ThrData::_load_header() { + QString msg; + qDebug() << "PATH_BASE: " << m_data_path; + + QString filename = QString("%1/tp-rdp.tpr").arg(m_data_path); + filename = QDir::toNativeSeparators(filename); + qDebug() << "TPR: " << filename; + + QFile f(filename); + if(!f.open(QFile::ReadOnly)) { + qDebug() << "Can not open " << filename << " for read."; + _notify_error(QString("%1\n\n%2").arg(LOCAL8BIT("无法打开录像信息文件!"), filename)); + return false; + } + + memset(&m_hdr, 0, sizeof(TS_RECORD_HEADER)); + + qint64 read_len = 0; + read_len = f.read(reinterpret_cast(&m_hdr), sizeof(TS_RECORD_HEADER)); + if(read_len != sizeof(TS_RECORD_HEADER)) { + qDebug() << "invaid .tpr file."; + _notify_error(QString("%1\n\n%2").arg(LOCAL8BIT("错误的录像信息文件!"), filename)); + return false; + } + + if(m_hdr.info.ver != 4) { + qDebug() << "invaid .tpr file."; + _notify_error(QString("%1 %2%3").arg(LOCAL8BIT("不支持的录像文件版本 "), QString(m_hdr.info.ver), LOCAL8BIT("!\n\n此播放器支持录像文件版本 4。"))); + return false; + } + + if(m_hdr.basic.width == 0 || m_hdr.basic.height == 0) { + _notify_error(LOCAL8BIT("错误的录像信息,未记录窗口尺寸!")); + return false; + } + + if(m_hdr.info.dat_file_count == 0) { + _notify_error(LOCAL8BIT("错误的录像信息,未记录数据文件数量!")); + return false; + } + + return true; +} + +bool ThrData::_load_keyframe() { + QString tpk_fname = QString("%1/tp-rdp.tpk").arg(m_data_path); + tpk_fname = QDir::toNativeSeparators(tpk_fname); + + qDebug() << "TPK: " << tpk_fname; + + QFile f_kf(tpk_fname); + if(!f_kf.open(QFile::ReadOnly)) { + qDebug() << "Can not open " << tpk_fname << " for read."; + _notify_error(QString("%1\n\n%3").arg(LOCAL8BIT("无法打开关键帧信息文件!"), tpk_fname)); + return false; + } + + qint64 fsize = f_kf.size(); + if(!fsize || fsize % sizeof(TS_RECORD_RDP_KEYFRAME_INFO) != 0) { + qDebug() << "Can not open " << tpk_fname << " for read."; + _notify_error(LOCAL8BIT("关键帧信息文件格式错误!")); + return false; + } + + qint64 read_len = 0; + int kf_count = static_cast(fsize / sizeof(TS_RECORD_RDP_KEYFRAME_INFO)); + for(int i = 0; i < kf_count; ++i) { + TS_RECORD_RDP_KEYFRAME_INFO kf; + memset(&kf, 0, sizeof(TS_RECORD_RDP_KEYFRAME_INFO)); + read_len = f_kf.read(reinterpret_cast(&kf), sizeof(TS_RECORD_RDP_KEYFRAME_INFO)); + if(read_len != sizeof(TS_RECORD_RDP_KEYFRAME_INFO)) { + qDebug() << "invaid .tpk file."; + _notify_error(LOCAL8BIT("关键帧信息文件格式错误!")); + return false; + } + + m_kf.push_back(kf); + } + + return true; +} + +UpdateData* ThrData::get_data() { + UpdateData* d = nullptr; + + m_locker.lock(); + if(m_data.size() > 0) { + d = m_data.dequeue(); + } + m_locker.unlock(); + + return d; +} + +void ThrData::_clear_data() { + m_locker.lock(); + while(m_data.size() > 0) { + UpdateData* d = m_data.dequeue(); + delete d; + } + m_locker.unlock(); +} diff --git a/client/tp-player/thr_data.h b/client/tp-player/thr_data.h new file mode 100644 index 0000000..92ad518 --- /dev/null +++ b/client/tp-player/thr_data.h @@ -0,0 +1,107 @@ +#ifndef THR_DATA_H +#define THR_DATA_H + +#include +#include +#include +#include +#include +#include +#include +#include "update_data.h" +#include "record_format.h" +#include "thr_download.h" + +/* +为支持“边下载,边播放”、“可拖动进度条”等功能,录像数据会分为多个文件存放,目前每个文件约4MB。 +例如: + tp-rdp.tpr + tp-rdp.tpk (关键帧信息文件,v3.5.1开始引入) + tp-rdp-1.tpd, tp-rdp-2.tpd, tp-rdp-3.tpd, ... +这样,下载完一个数据文件,即可播放此数据文件中的内容,同时下载线程可以下载后续数据文件。 + +为支持“拖动进度条”,可以在数据文件中插入关键帧的方式,这就要求记录录像数据的同时对图像数据进行解码, +并同步合成全屏数据(关键帧),每经过一段时间(或者一定数量的图像数据包)之后,就在录像数据文件中增加一个关键帧。 +正常播放时,跳过此关键帧。 +当进度条拖放发生时,找到目标时间点之前的最后一个关键帧,从此处开始无延时播放到目标时间点,然后正常播放。 +因此,需要能够快速定位到各个关键帧,因为有可能此时尚未下载这个关键帧所在的数据文件。定位到此关键帧 +所在的数据文件后,下载线程要放弃当前下载任务(如果不是当前正在下载的数据文件),并开始下载新的数据文件。 +因此,需要引入新的关键帧信息文件(.tpk),记录各个关键帧数据所在的数据文件序号、偏移、时间点等信息。 + +另外,为保证数据文件、关键帧信息文件等下载正确,下载时保存到对应的临时文件中,并记录已下载字节数,下载完成后再改名,如: + tp-rdp.tpk.tmp, tp-rdp.tpk.len + tp-rdp-1.tpd.tmp, tp-rdp-1.tpd.len, ... +这样,下次需要下载指定文件时,如果发现对应的临时文件存在,可以根据已下载字节数,继续下载。 +*/ + +typedef std::vector KeyFrames; + +typedef std::vector CachedImages; + +class MainWindow; + +// 下载必要的文件,解析文件数据,生成图像数据(QImage*),将数据包放入待显示队列中,等待 ThrPlay 线程使用 +// 注意,无需将所有数据解析并放入待显示队列,此队列有数量限制(例如1000个),避免过多占用内存 +class ThrData : public QThread { + Q_OBJECT +public: + ThrData(MainWindow* mainwin, const QString& url); + ~ThrData(); + + virtual void run(); + void stop(); + + void restart(uint32_t start_ms); // 重新从指定时间开始播放 + + bool have_more_data(); + + UpdateData* get_data(); + +private: + void _run(); + + bool _load_header(); + bool _load_keyframe(); + + void _clear_data(); +// void _prepare(); + + UpdateData* _parse(const TS_RECORD_PKG& pkg, const QByteArray& data); + + void _notify_message(const QString& msg); + void _notify_error(const QString& err_msg); + +signals: + void signal_update_data(UpdateData*); + +private: + MainWindow* m_mainwin; + QQueue m_data; + QMutex m_locker; + + ThrDownload m_thr_download; + + bool m_need_stop; + + bool m_need_download; + QString m_res; + QString m_data_path_base; + + QString m_url_base; + QString m_sid; + QString m_rid; + QString m_data_path; + + TS_RECORD_HEADER m_hdr; + KeyFrames m_kf; + + bool m_need_restart; + bool m_wait_restart; + bool m_need_show_kf; + uint32_t m_file_idx; + uint32_t m_offset; + + CachedImages m_cache_imgs; +}; + +#endif // THR_DATA_H diff --git a/client/tp-player/thr_download.cpp b/client/tp-player/thr_download.cpp new file mode 100644 index 0000000..cbfe0f7 --- /dev/null +++ b/client/tp-player/thr_download.cpp @@ -0,0 +1,292 @@ +#include +#include +#include +#include + +#include "thr_download.h" +#include "util.h" +#include "downloader.h" +#include "record_format.h" + +//================================================================= +// ThrDownload +//================================================================= + +ThrDownload::ThrDownload() { + m_need_stop = false; + m_have_tpr = false; + m_have_tpk = false; + m_have_tpd = nullptr; + m_need_tpk = false; + m_running = true; +} + +ThrDownload::~ThrDownload() { + if(m_have_tpd) + delete[] m_have_tpd; +} + +// tp-player.exe http://teleport.domain.com:7190/{sub/path/}tp_1491560510_ca67fceb75a78c9d/1234 (注意,并不直接访问此URI,实际上其并不存在) +// TP服务器地址(可能包含子路径哦,例如上例中的{sub/path/}部分)/session-id(用于判断当前授权用户)/录像会话编号 + +bool ThrDownload::init(const QString& local_data_path_base, const QString &res) { + m_data_path_base = local_data_path_base; + + QString _tmp_res = res.toLower(); + if(!_tmp_res.startsWith("http")) { + return false; + } + + QStringList _uris = res.split('/'); + if(_uris.size() < 3) { + return false; + } + + m_sid = _uris[_uris.size()-2]; + m_rid = _uris[_uris.size()-1]; + m_url_base = res.left(res.length() - m_sid.length() - m_rid.length() - 2); + + if(m_sid.length() == 0 || m_rid.length() == 0 || m_url_base.length() == 0) + return false; + + return true; +} + +void ThrDownload::stop() { + if(!m_running) + return; +// if(!isRunning()) +// return; + m_need_stop = true; + wait(); + qDebug("data thread stop() end."); +} + +// tp-player.exe http://teleport.domain.com:7190/{sub/path/}tp_1491560510_ca67fceb75a78c9d/1234 (注意,并不直接访问此URI,实际上其并不存在) +// TP服务器地址(可能包含子路径哦,例如上例中的{sub/path/}部分)/session-id(用于判断当前授权用户)/录像会话编号 + +void ThrDownload::run() { + _run(); + m_running = false; + qDebug("ThrDownload thread run() end."); +} + +void ThrDownload::_run() { +// m_state = statDownloading; + + if(!_download_tpr()) { +// m_state = statFailDone; + return; + } + m_have_tpr = true; + + m_have_tpd = new bool[m_tpd_count]; + for(uint32_t i = 0; i < m_tpd_count; ++i) { + m_have_tpd[i] = false; + } + + if(m_need_tpk) { + if(!_download_tpk()) { +// m_state = statFailDone; + return; + } + m_have_tpk = true; + } + + uint32_t file_idx = 0; + for(;;) { + if(m_need_stop) + break; + QString str_fidx; + str_fidx.sprintf("%d", file_idx+1); + + QString tpd_fname = QString("%1/tp-rdp-%2.tpd").arg(m_data_path, str_fidx); + tpd_fname = QDir::toNativeSeparators(tpd_fname); + + QString tmp_fname = QString("%1/tp-rdp-%2.tpd.downloading").arg(m_data_path, str_fidx); + tmp_fname = QDir::toNativeSeparators(tmp_fname); + + QFileInfo fi_tmp(tmp_fname); + if(fi_tmp.isFile()) { + QFile::remove(tmp_fname); + } + + QFileInfo fi_tpd(tpd_fname); + if(!fi_tpd.exists()) { + QString url = QString("%1/audit/get-file?act=read&type=rdp&rid=%2&f=tp-rdp-%3.tpd").arg(m_url_base, m_rid, str_fidx); + + qDebug() << "URL : " << url; + qDebug() << "TPD : " << tmp_fname; + if(!_download_file(url, tmp_fname)) { +// m_state = statFailDone; + return; + } + + if(!QFile::rename(tmp_fname, tpd_fname)) { +// m_state = statFailDone; + return; + } + } + + m_have_tpd[file_idx] = true; + + file_idx += 1; + if(file_idx >= m_tpd_count) + break; + } + +// m_state = statSuccessDone; +} + +bool ThrDownload::_download_tpr() { + QString url = QString("%1/audit/get-file?act=read&type=rdp&rid=%2&f=tp-rdp.tpr").arg(m_url_base, m_rid); + QByteArray data; + if(!_download_file(url, data)) + return false; + + if(data.size() != sizeof(TS_RECORD_HEADER)) { + qDebug("invalid header data. %d", data.size()); + m_error = QString(LOCAL8BIT("录像信息文件数据错误!")); + return false; + } + + TS_RECORD_HEADER* hdr = reinterpret_cast(data.data()); +// if(hdr->info.ver != 4) { +// qDebug() << "invaid .tpr file."; +// m_last_error = QString("%1 %2%3").arg(LOCAL8BIT("不支持的录像文件版本 "), QString(hdr->info.ver), LOCAL8BIT("!\n\n此播放器支持录像文件版本 4。")); +// return false; +// } + +// if(m_hdr.basic.width == 0 || m_hdr.basic.height == 0) { +// _notify_error(LOCAL8BIT("错误的录像信息,未记录窗口尺寸!")); +// return false; +// } + +// if(m_hdr.info.dat_file_count == 0) { +// _notify_error(LOCAL8BIT("错误的录像信息,未记录数据文件数量!")); +// return false; +// } + + + // 下载得到的数据应该是一个TS_RECORD_HEADER,解析此数据,生成本地文件路径,并保存之。 + QDateTime timeUTC; + // timeUTC.setTimeSpec(Qt::UTC); + // timeUTC.setTime_t(m_hdr.basic.timestamp); + timeUTC.setSecsSinceEpoch(hdr->basic.timestamp); + QString strUTC = timeUTC.toString("yyyyMMdd-hhmmss"); + + QString strAcc(hdr->basic.acc_username); + int idx = strAcc.indexOf('\\'); + if(-1 != idx) { + QString _domain = strAcc.left(idx); + QString _user = strAcc.right(strAcc.length() - idx - 1); + strAcc = _user + "@" + _domain; + } + + QString strType; + if(hdr->info.type == TS_TPPR_TYPE_SSH) { + strType = "SSH"; + } + else if(hdr->info.type == TS_TPPR_TYPE_RDP) { + strType = "RDP"; + m_need_tpk = true; + } + else { + strType = "UNKNOWN"; + } + + // .../record/RDP-211-admin-user@domain-192.168.0.68-20191015-020243 + m_data_path = QString("%1/%2-%3-%4-%5-%6-%7").arg(m_data_path_base, strType, m_rid, hdr->basic.user_username, strAcc, hdr->basic.host_ip, strUTC); + m_data_path = QDir::toNativeSeparators(m_data_path); + qDebug() << "PATH_BASE: " << m_data_path; + + QDir dir; + dir.mkpath(m_data_path); + QFileInfo fi; + fi.setFile(m_data_path); + if(!fi.isDir()) { + qDebug("can not create folder to save downloaded file."); + return false; + } + + QString filename = QString("%1/tp-rdp.tpr").arg(m_data_path); + filename = QDir::toNativeSeparators(filename); + qDebug() << "TPR: " << filename; + + QFile f; + f.setFileName(filename); + if(!f.open(QIODevice::WriteOnly | QFile::Truncate)){ + qDebug("open file for write failed."); + return false; + } + + qint64 written = f.write(reinterpret_cast(hdr), sizeof(TS_RECORD_HEADER)); + f.flush(); + f.close(); + + if(written != sizeof(TS_RECORD_HEADER)) { + qDebug("save header file failed."); + return false; + } + + m_tpd_count = hdr->info.dat_file_count; + + return true; +} + +bool ThrDownload::_download_tpk() { + QString tpk_fname = QString("%1/tp-rdp.tpk").arg(m_data_path); + tpk_fname = QDir::toNativeSeparators(tpk_fname); + + QString tmp_fname = QString("%1/tp-rdp.tpk.downloading").arg(m_data_path); + tmp_fname = QDir::toNativeSeparators(tmp_fname); + + QFileInfo fi_tmp(tmp_fname); + if(fi_tmp.isFile()) { + QFile::remove(tmp_fname); + } + + QFileInfo fi_tpk(tpk_fname); + if(!fi_tpk.exists()) { + QString url = QString("%1/audit/get-file?act=read&type=rdp&rid=%2&f=tp-rdp.tpk").arg(m_url_base, m_rid); + qDebug() << "TPK: " << tmp_fname; + if(!_download_file(url, tmp_fname)) + return false; + + if(!QFile::rename(tmp_fname, tpk_fname)) + return false; + } + + return true; +} + +bool ThrDownload::_download_file(const QString& url, const QString filename) { + Downloader dl; + if(!dl.request(url, m_sid, filename)) { + qDebug() << "download failed."; + m_error = QString("%1").arg(LOCAL8BIT("下载文件失败!")); + return false; + } + + return true; +} + +bool ThrDownload::_download_file(const QString& url, QByteArray& data) { + Downloader dl; + if(!dl.request(url, m_sid, &data)) { + qDebug() << "download failed."; + m_error = QString("%1").arg(LOCAL8BIT("下载文件失败!")); + return false; + } + + return true; +} + +bool ThrDownload::is_tpd_downloaded(uint32_t file_idx) const { + if(!m_have_tpd) + return false; + if(file_idx >= m_tpd_count) + return false; + return m_have_tpd[file_idx]; +} + diff --git a/client/tp-player/thr_download.h b/client/tp-player/thr_download.h new file mode 100644 index 0000000..e5478e0 --- /dev/null +++ b/client/tp-player/thr_download.h @@ -0,0 +1,72 @@ +#ifndef THR_DOWNLOAD_H +#define THR_DOWNLOAD_H + +#include +#include +#include +#include + +class ThrDownload : public QThread { + Q_OBJECT + +//public: +// enum State { +// statStarting, +// statDownloading, +// statInvalidParam, +// statFailDone, +// statSuccessDone +// }; + +public: + ThrDownload(); + ~ThrDownload(); + + bool init(const QString& local_data_path_base, const QString& res); + + virtual void run(); + void stop(); + + bool is_running() const {return m_running;} + + bool is_tpr_downloaded() const {return m_have_tpr;} + bool is_tpk_downloaded() const {return m_have_tpk;} + bool is_tpd_downloaded(uint32_t file_idx) const; + bool get_data_path(QString& path) const { + if(m_data_path.isEmpty()) + return false; + path = m_data_path; + return true; + } + +private: + void _run(); + + bool _download_tpr(); + bool _download_tpk(); + + bool _download_file(const QString& url, const QString filename); + bool _download_file(const QString& url, QByteArray& data); + +private: + bool m_need_stop; + + QString m_data_path_base; + + QString m_url_base; + QString m_sid; + QString m_rid; + QString m_data_path; + + bool m_running; + bool m_have_tpr; + bool m_have_tpk; + bool m_need_tpk; + + uint32_t m_tpd_count; + bool* m_have_tpd; + + QString m_error; +}; + +#endif // THR_DOWNLOAD_H diff --git a/client/tp-player/thr_play.cpp b/client/tp-player/thr_play.cpp new file mode 100644 index 0000000..424f7bd --- /dev/null +++ b/client/tp-player/thr_play.cpp @@ -0,0 +1,182 @@ +#include +#include +#include + +#include "thr_play.h" +#include "thr_data.h" +#include "mainwindow.h" +#include "record_format.h" +#include "util.h" + + +/* + * 录像播放流程: + * - 数据处理线程,该线程负责(下载)文件、解析文件,将数据准备成待播放队列; + * + 数据处理线程维护待播放队列,少于500个则填充至1000个,每20ms检查一次队列是否少于500个。 + * - 播放线程从队列中取出一个数据,判断当前时间是否应该播放此数据,如果应该,则将此数据发送给主UI + * + if( 播放速率 * (当前时间 - 播放时间) >= (当前数据包偏移时间 - 上个数据包偏移时间)) 则 播放 + * + 如选择“跳过无操作时间”,则数据包偏移时间差超过3秒的,视为3秒。 + */ + + +ThrPlay::ThrPlay(MainWindow* mainwnd) { + m_mainwnd = mainwnd; + m_need_stop = false; + m_need_pause = false; + m_speed = 1; + m_skip = false; + m_start_ms = 0; +} + +ThrPlay::~ThrPlay() { + stop(); +} + +void ThrPlay::stop() { + if(!isRunning()) + return; + + m_need_stop = true; + wait(); + qDebug() << "play-thread end."; +} + +void ThrPlay::_notify_message(const QString& msg) { + UpdateData* _msg = new UpdateData(TYPE_MESSAGE); + _msg->message(msg); + emit signal_update_data(_msg); +} + +void ThrPlay::_notify_error(const QString& msg) { + UpdateData* _msg = new UpdateData(TYPE_ERROR); + _msg->message(msg); + emit signal_update_data(_msg); +} + +void ThrPlay::resume(bool relocate, uint32_t start_ms) { + if(relocate) { + m_start_ms = start_ms; + m_first_run = true; + } + m_need_pause = false; +} + +void ThrPlay::run() { + + ThrData* thr_data = m_mainwnd->get_thr_data(); + m_first_run = true; + uint32_t last_time_ms = 0; + uint32_t last_pass_ms = 0; + + UpdateData* dat = nullptr; + for(;;) { + if(m_need_stop) + break; + + // 1. 从ThrData的待播放队列中取出一个数据 + dat = thr_data->get_data(); + if(dat == nullptr) { + msleep(20); + continue; + } + + if(m_first_run) { + m_first_run = false; + _notify_message(""); + } + + if(m_start_ms > 0) { + if(dat->get_time() < m_start_ms) { + emit signal_update_data(dat); + continue; + } + last_time_ms = m_start_ms; + m_start_ms = 0; + UpdateData* _enable = new UpdateData(TYPE_ENABLE_DRAW); + emit signal_update_data(_enable); + } + + // 2. 根据数据包的信息,等待到播放时间点 + uint32_t need_wait_ms = 0; + uint32_t this_time_ms = dat->get_time(); + uint32_t this_pass_ms = last_time_ms; + if(this_time_ms > 0) { + if(this_time_ms >= last_time_ms) + need_wait_ms = this_time_ms - last_time_ms; + else + need_wait_ms = 0; + + if(need_wait_ms > 0) { + uint32_t time_wait = 0; + + // 如果设置了跳过无操作区间,将超过1秒的等待时间压缩至1秒。 + if(m_skip) { + if(need_wait_ms > 1000) + need_wait_ms = 1000; + } + + for(;;) { + time_wait = need_wait_ms > 10 ? 10 : need_wait_ms; + msleep(time_wait); + + if(m_need_pause) { + while(m_need_pause) { + msleep(50); + if(m_need_stop) + break; + } + } + + if(m_need_stop) + break; + + if(m_start_ms > 0) { + delete dat; + dat = nullptr; + UpdateData* _disable = new UpdateData(TYPE_DISABLE_DRAW); + msleep(500); + emit signal_update_data(_disable); + break; + } + + time_wait *= m_speed; + + // 如果已经在等待长时间无操作区间内,用户设置了跳过无操作区间,则将超过0.5秒的等待时间压缩至0.5秒。 + if(m_skip) { + if(need_wait_ms > 500) + need_wait_ms = 500; + } + + this_pass_ms += time_wait; + if(this_pass_ms - last_pass_ms > 100) { + UpdateData* _passed_ms = new UpdateData(TYPE_PLAYED_MS); + _passed_ms->played_ms(this_pass_ms); + emit signal_update_data(_passed_ms); + last_pass_ms = this_pass_ms; + } + + if(need_wait_ms <= time_wait) + break; + else + need_wait_ms -= time_wait; + } + + if(m_need_stop) + break; + } + + } + last_time_ms = this_time_ms; + + // 3. 将数据包发送给主UI界面进行显示 + if(dat != nullptr) { + if(dat->data_type() == TYPE_END) { + _notify_message(LOCAL8BIT("播放结束")); + } + emit signal_update_data(dat); + } + } + + if(dat != nullptr) + delete dat; +} diff --git a/client/tp-player/thr_play.h b/client/tp-player/thr_play.h new file mode 100644 index 0000000..e8ea3d8 --- /dev/null +++ b/client/tp-player/thr_play.h @@ -0,0 +1,43 @@ +#ifndef THR_PLAY_H +#define THR_PLAY_H + +#include +#include "update_data.h" +#include "downloader.h" + +class MainWindow; +// 根据播放规则,将要播放的图像发送给主UI线程进行显示 +class ThrPlay : public QThread +{ + Q_OBJECT + +friend class ThrData; +public: + ThrPlay(MainWindow* mainwnd); + ~ThrPlay(); + + virtual void run(); + void stop(); + void pause() {m_need_pause = true;} + void resume(bool relocate, uint32_t start_ms); + void speed(int s) {if(s >= 1 && s <= 16) m_speed = s;} + void skip(bool s) {m_skip = s;} + +private: + void _notify_message(const QString& msg); + void _notify_error(const QString& err_msg); + +signals: + void signal_update_data(UpdateData*); + +private: + MainWindow* m_mainwnd; + bool m_need_stop; + bool m_need_pause; + int m_speed; + bool m_skip; + bool m_first_run; + uint32_t m_start_ms; +}; + +#endif // THR_PLAY_H diff --git a/client/tp-player/tp-player.pro b/client/tp-player/tp-player.pro new file mode 100644 index 0000000..ba65483 --- /dev/null +++ b/client/tp-player/tp-player.pro @@ -0,0 +1,70 @@ +TEMPLATE = app +TARGET = tp-player + +QT += core gui widgets network + +HEADERS += \ + mainwindow.h \ + bar.h \ + thr_play.h \ + thr_data.h \ + update_data.h \ + record_format.h \ + rle.h \ + util.h \ + downloader.h \ + thr_download.h + +SOURCES += \ + main.cpp \ + mainwindow.cpp \ + bar.cpp \ + thr_play.cpp \ + thr_data.cpp \ + update_data.cpp \ + rle.c \ + util.cpp \ + downloader.cpp \ + thr_download.cpp + +RESOURCES += \ + tp-player.qrc + +RC_FILE += \ + tp-player.rc + +FORMS += \ + mainwindow.ui + + +win32:CONFIG(release, debug|release): { + DEFINES += QT_NO_DEBUG_OUTPUT + LIBS += -L$$PWD/../../external/zlib/build/release/ -lzlib + DESTDIR = $$PWD/../../out/client/x86/Release +} +else:win32:CONFIG(debug, debug|release): { + LIBS += -L$$PWD/../../external/zlib/build/debug/ -lzlibd + DESTDIR = $$PWD/../../out/client/x86/Debug +} + + +macx:CONFIG(release, debug|release): { + DEFINES += QT_NO_DEBUG_OUTPUT + LIBS += -L$$PWD/../../external/zlib/build/release/ -lzlib + DESTDIR = $$PWD/../../out/client/x86/Release +} +else:macx:CONFIG(debug, debug|release): { + LIBS += -L$$PWD/../../external/zlib/build/debug/ -lzlibd + DESTDIR = $$PWD/../../out/client/x86/Debug +} + + +INCLUDEPATH += $$PWD/../../external/zlib +INCLUDEPATH += $$PWD/../../external/zlib/build +DEPENDPATH += $$PWD/../../external/zlib +DEPENDPATH += $$PWD/../../external/zlib/build + +#win32-g++:CONFIG(release, debug|release): PRE_TARGETDEPS += $$PWD/../../external/zlib/build/release/libzlibstatic.a +#else:win32-g++:CONFIG(debug, debug|release): PRE_TARGETDEPS += $$PWD/../../external/zlib/build/debug/libzlibstaticd.a +#else:win32:!win32-g++:CONFIG(release, debug|release): PRE_TARGETDEPS += $$PWD/../../external/zlib/build/release/zlibstatic.lib +#else:win32:!win32-g++:CONFIG(debug, debug|release): PRE_TARGETDEPS += $$PWD/../../external/zlib/build/debug/zlibstaticd.lib diff --git a/client/tp-player/tp-player.qrc b/client/tp-player/tp-player.qrc new file mode 100644 index 0000000..f79019d --- /dev/null +++ b/client/tp-player/tp-player.qrc @@ -0,0 +1,38 @@ + + + res/bg.png + res/cursor.png + + res/bar/bg-left.png + res/bar/bg-mid.png + res/bar/bg-right.png + + res/bar/btn-normal-left.png + res/bar/btn-normal-mid.png + res/bar/btn-normal-right.png + res/bar/btn-sel-left.png + res/bar/btn-sel-mid.png + res/bar/btn-sel-right.png + res/bar/btn-hover-left.png + res/bar/btn-hover-mid.png + res/bar/btn-hover-right.png + + res/bar/play-hover.png + res/bar/play-normal.png + res/bar/pause-hover.png + res/bar/pause-normal.png + + res/bar/prgbar-mid.png + res/bar/prgbar-right.png + res/bar/prgbarh-left.png + res/bar/prgbarh-mid.png + + res/bar/prgpt-normal.png + res/bar/prgpt-hover.png + + res/bar/chkbox-normal.png + res/bar/chkbox-hover.png + res/bar/chkbox-sel-normal.png + res/bar/chkbox-sel-hover.png + + diff --git a/client/tp-player/tp-player.rc b/client/tp-player/tp-player.rc new file mode 100644 index 0000000..df678b3 --- /dev/null +++ b/client/tp-player/tp-player.rc @@ -0,0 +1,2 @@ +IDI_ICON1 ICON DISCARDABLE "res\\tp-player.ico" + diff --git a/client/tp-player/update_data.cpp b/client/tp-player/update_data.cpp new file mode 100644 index 0000000..b9cfdfd --- /dev/null +++ b/client/tp-player/update_data.cpp @@ -0,0 +1,79 @@ +#include "update_data.h" + +#include +#include + + +UpdateData::UpdateData() : QObject(nullptr) +{ + _init(); +} + +UpdateData::UpdateData(int data_type) : QObject(nullptr) +{ + _init(); + m_data_type = data_type; +} + +UpdateData::UpdateData(int data_type, uint32_t time_ms) : QObject(nullptr) +{ + _init(); + m_data_type = data_type; + m_time_ms = time_ms; +} + +UpdateData::UpdateData(const TS_RECORD_HEADER& hdr) : QObject(nullptr) +{ + _init(); + m_data_type = TYPE_HEADER_INFO; + m_hdr = new TS_RECORD_HEADER; + memcpy(m_hdr, &hdr, sizeof(TS_RECORD_HEADER)); +} + +void UpdateData::_init() { + m_data_type = TYPE_UNKNOWN; + m_hdr = nullptr; + m_pointer = nullptr; + + m_data_buf = nullptr; + m_data_len = 0; + m_time_ms = 0; +} + +UpdateData::~UpdateData() { + if(m_hdr) + delete m_hdr; + if(m_pointer) + delete m_pointer; + for(int i = 0; i < m_images.size(); ++i) { + delete m_images[i].img; + } + m_images.clear(); + + if(m_data_buf) + delete m_data_buf; +} + +void UpdateData::set_pointer(uint32_t ts, const TS_RECORD_RDP_POINTER* p) { + m_data_type = TYPE_POINTER; + m_time_ms = ts; + m_pointer = new TS_RECORD_RDP_POINTER; + memcpy(m_pointer, p, sizeof(TS_RECORD_RDP_POINTER)); +} + +void UpdateData::alloc_data(uint32_t len) { + if(m_data_buf) + delete m_data_buf; + + m_data_buf = new uint8_t[len]; + memset(m_data_buf, 0, len); + m_data_len = len; +} + +void UpdateData::attach_data(const uint8_t* dat, uint32_t len) { + if(m_data_buf) + delete m_data_buf; + m_data_buf = new uint8_t[len]; + memcpy(m_data_buf, dat, len); + m_data_len = len; +} diff --git a/client/tp-player/update_data.h b/client/tp-player/update_data.h new file mode 100644 index 0000000..f142629 --- /dev/null +++ b/client/tp-player/update_data.h @@ -0,0 +1,106 @@ +#ifndef UPDATE_DATA_H +#define UPDATE_DATA_H + +#include +#include +#include "record_format.h" + +#define TYPE_UNKNOWN 0 +#define TYPE_HEADER_INFO 1 + +#define TYPE_DISABLE_DRAW 5 +#define TYPE_ENABLE_DRAW 6 + +#define TYPE_POINTER 10 +#define TYPE_IMAGE 11 +#define TYPE_KEYFRAME 12 +#define TYPE_PLAYED_MS 20 +#define TYPE_DOWNLOAD_PERCENT 21 +#define TYPE_END 50 +#define TYPE_MESSAGE 90 +#define TYPE_ERROR 91 + + +typedef struct UPDATE_IMAGE { + int x; + int y; + int w; + int h; + QImage* img; +}UPDATE_IMAGE; + +typedef QVector UpdateImages; + +class UpdateData : public QObject +{ + Q_OBJECT +public: + explicit UpdateData(); + explicit UpdateData(int data_type); + explicit UpdateData(int data_type, uint32_t time_ms); + explicit UpdateData(const TS_RECORD_HEADER& hdr); + virtual ~UpdateData(); + + void set_pointer(uint32_t ts, const TS_RECORD_RDP_POINTER* p); + + TS_RECORD_HEADER* get_header() {return m_hdr;} + TS_RECORD_RDP_POINTER* get_pointer() {return m_pointer;} + UpdateImages& get_images() {return m_images;} + const UpdateImages& get_images() const {return m_images;} + + uint32_t get_time() {return m_time_ms;} + + void alloc_data(uint32_t len); + void attach_data(const uint8_t* dat, uint32_t len); + + int data_type() const {return m_data_type;} + + uint8_t* data_buf() {return m_data_buf;} + uint32_t data_len() const {return m_data_len;} + + void played_ms(uint32_t ms) {m_played_ms = ms;} + uint32_t played_ms() {return m_played_ms;} + + void message(const QString& msg) {m_msg = msg;} + const QString message(){return m_msg;} + +private: + void _init(void); + +signals: + +public slots: + + +private: + int m_data_type; + uint32_t m_time_ms; + uint8_t* m_data_buf; + uint32_t m_data_len; + uint32_t m_played_ms; + QString m_msg; + + // for HEADER + TS_RECORD_HEADER* m_hdr; + // for POINTER + TS_RECORD_RDP_POINTER* m_pointer; + // for IMAGE + UpdateImages m_images; +}; + +class UpdateDataHelper { +public: + UpdateDataHelper(UpdateData* data) { + m_data = data; + } + ~UpdateDataHelper() { + if(m_data) + delete m_data; + } + +private: + UpdateData* m_data; +}; + + +#endif // UPDATE_DATA_H diff --git a/client/tp-player/util.cpp b/client/tp-player/util.cpp new file mode 100644 index 0000000..e69de29 diff --git a/client/tp-player/util.h b/client/tp-player/util.h new file mode 100644 index 0000000..8ff938b --- /dev/null +++ b/client/tp-player/util.h @@ -0,0 +1,33 @@ +#ifndef TP_PLAYER_UTIL_H +#define TP_PLAYER_UTIL_H + +#include + +class TimeUseTest { +public: + TimeUseTest() { + m_used_ms = 0; + m_count = 0; + } + ~TimeUseTest() {} + + void begin() { + m_time.start(); + } + void end() { + m_count++; + m_used_ms += m_time.elapsed(); + } + + uint32_t used() const {return m_used_ms;} + uint32_t count() const {return m_count;} + +private: + QTime m_time; + uint32_t m_used_ms; + uint32_t m_count; +}; + +#define LOCAL8BIT(x) QString::fromLocal8Bit(x) + +#endif // TP_PLAYER_UTIL_H diff --git a/client/tp_assist_macos/TP-Assist.xcodeproj/project.pbxproj b/client/tp_assist_macos/TP-Assist.xcodeproj/project.pbxproj index f40084d..c86b523 100644 --- a/client/tp_assist_macos/TP-Assist.xcodeproj/project.pbxproj +++ b/client/tp_assist_macos/TP-Assist.xcodeproj/project.pbxproj @@ -37,8 +37,9 @@ 7AA2CD541F6AB9F10074C92B /* json_writer.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 7AA2CD501F6AB9F10074C92B /* json_writer.cpp */; }; 7AA2CD571F6ABA2E0074C92B /* mongoose.c in Sources */ = {isa = PBXBuildFile; fileRef = 7AA2CD561F6ABA2E0074C92B /* mongoose.c */; }; 7AA2CD591F6AC0DA0074C92B /* site in Resources */ = {isa = PBXBuildFile; fileRef = 7AA2CD581F6AC0DA0074C92B /* site */; }; - 7AF9BF272199E3DE00BE5DBC /* libssl.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 7AF9BF1F2199E31A00BE5DBC /* libssl.a */; }; - 7AF9BF292199E3DF00BE5DBC /* libcrypto.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 7AF9BF282199E3DF00BE5DBC /* libcrypto.a */; }; + 7AAE4B242390EE5C007EDDE7 /* libmbedtls.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 7AF9BF222199E32B00BE5DBC /* libmbedtls.a */; }; + 7AAE4B252390EE7D007EDDE7 /* libmbedcrypto.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 7AF9BF232199E32B00BE5DBC /* libmbedcrypto.a */; }; + 7AAE4B262390EE7D007EDDE7 /* libmbedx509.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 7AF9BF212199E32B00BE5DBC /* libmbedx509.a */; }; A1B7B9DD1DB53ED200809327 /* Localizable.strings in Resources */ = {isa = PBXBuildFile; fileRef = A1B7B9DF1DB53ED200809327 /* Localizable.strings */; }; A1D700071A5DCE8D003563E4 /* AboutWindowController.m in Sources */ = {isa = PBXBuildFile; fileRef = A1D700061A5DCE8D003563E4 /* AboutWindowController.m */; }; C149EBFE15D5214600B1F558 /* Cocoa.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = C149EBFD15D5214600B1F558 /* Cocoa.framework */; }; @@ -107,11 +108,10 @@ 7AA2CD501F6AB9F10074C92B /* json_writer.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = json_writer.cpp; path = ../../../../external/jsoncpp/src/lib_json/json_writer.cpp; sourceTree = ""; }; 7AA2CD561F6ABA2E0074C92B /* mongoose.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = mongoose.c; path = ../../../../external/mongoose/mongoose.c; sourceTree = ""; }; 7AA2CD581F6AC0DA0074C92B /* site */ = {isa = PBXFileReference; lastKnownFileType = folder; path = site; sourceTree = ""; }; - 7AF9BF1F2199E31A00BE5DBC /* libssl.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libssl.a; path = ../../external/macos/release/lib/libssl.a; sourceTree = ""; }; + 7AAE4B232390E642007EDDE7 /* mongoose.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = mongoose.h; path = ../../../../external/mongoose/mongoose.h; sourceTree = ""; }; 7AF9BF212199E32B00BE5DBC /* libmbedx509.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libmbedx509.a; path = ../../external/macos/release/lib/libmbedx509.a; sourceTree = ""; }; 7AF9BF222199E32B00BE5DBC /* libmbedtls.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libmbedtls.a; path = ../../external/macos/release/lib/libmbedtls.a; sourceTree = ""; }; 7AF9BF232199E32B00BE5DBC /* libmbedcrypto.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libmbedcrypto.a; path = ../../external/macos/release/lib/libmbedcrypto.a; sourceTree = ""; }; - 7AF9BF282199E3DF00BE5DBC /* libcrypto.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libcrypto.a; path = ../../external/macos/release/lib/libcrypto.a; sourceTree = ""; }; A1B7B9D31DB5361700809327 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = Base; path = Base.lproj/MainMenu.xib; sourceTree = ""; }; A1B7B9DE1DB53ED200809327 /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/Localizable.strings; sourceTree = ""; }; A1B7B9E01DB53ED700809327 /* Base */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = Base; path = Base.lproj/Localizable.strings; sourceTree = ""; }; @@ -143,8 +143,9 @@ buildActionMask = 2147483647; files = ( C149EBFE15D5214600B1F558 /* Cocoa.framework in Frameworks */, - 7AF9BF292199E3DF00BE5DBC /* libcrypto.a in Frameworks */, - 7AF9BF272199E3DE00BE5DBC /* libssl.a in Frameworks */, + 7AAE4B262390EE7D007EDDE7 /* libmbedx509.a in Frameworks */, + 7AAE4B252390EE7D007EDDE7 /* libmbedcrypto.a in Frameworks */, + 7AAE4B242390EE5C007EDDE7 /* libmbedtls.a in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -225,6 +226,7 @@ 7AA2CD551F6ABA000074C92B /* mongoose */ = { isa = PBXGroup; children = ( + 7AAE4B232390E642007EDDE7 /* mongoose.h */, 7AA2CD561F6ABA2E0074C92B /* mongoose.c */, ); name = mongoose; @@ -248,13 +250,6 @@ path = csrc; sourceTree = ""; }; - 7AF9BF1E2199E0DD00BE5DBC /* mbedtls */ = { - isa = PBXGroup; - children = ( - ); - name = mbedtls; - sourceTree = ""; - }; A12D9BE61BCF2C72004F52A6 /* apple-scpt */ = { isa = PBXGroup; children = ( @@ -286,11 +281,9 @@ C149EBFC15D5214600B1F558 /* Frameworks */ = { isa = PBXGroup; children = ( - 7AF9BF282199E3DF00BE5DBC /* libcrypto.a */, 7AF9BF232199E32B00BE5DBC /* libmbedcrypto.a */, 7AF9BF222199E32B00BE5DBC /* libmbedtls.a */, 7AF9BF212199E32B00BE5DBC /* libmbedx509.a */, - 7AF9BF1F2199E31A00BE5DBC /* libssl.a */, C149EBFD15D5214600B1F558 /* Cocoa.framework */, C149EBFF15D5214600B1F558 /* Other Frameworks */, ); @@ -310,7 +303,6 @@ C149EC0315D5214600B1F558 /* src */ = { isa = PBXGroup; children = ( - 7AF9BF1E2199E0DD00BE5DBC /* mbedtls */, 7A45423D2196E32800FEB5B4 /* cfg */, 7AD3E8741F6A7CC600D2EB48 /* csrc */, A12D9BE61BCF2C72004F52A6 /* apple-scpt */, @@ -367,7 +359,7 @@ C149EBF015D5214600B1F558 /* Project object */ = { isa = PBXProject; attributes = { - LastUpgradeCheck = 0930; + LastUpgradeCheck = 1120; ORGANIZATIONNAME = TP4A; TargetAttributes = { C149EBF815D5214600B1F558 = { @@ -381,7 +373,7 @@ }; buildConfigurationList = C149EBF315D5214600B1F558 /* Build configuration list for PBXProject "TP-Assist" */; compatibilityVersion = "Xcode 10.0"; - developmentRegion = English; + developmentRegion = en; hasScannedForEncodings = 0; knownRegions = ( en, @@ -606,13 +598,14 @@ C149EC1815D5214600B1F558 /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { - CODE_SIGN_IDENTITY = ""; + CODE_SIGN_IDENTITY = "-"; COMBINE_HIDPI_IMAGES = YES; GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PREFIX_HEADER = "src/TP-Assist-Prefix.pch"; GCC_PREPROCESSOR_DEFINITIONS = ( "DEBUG=1", MG_ENABLE_SSL, + "MG_SSL_IF=MG_SSL_IF_MBEDTLS", ); HEADER_SEARCH_PATHS = ( ../../common/teleport, @@ -634,11 +627,14 @@ C149EC1915D5214600B1F558 /* Release */ = { isa = XCBuildConfiguration; buildSettings = { - CODE_SIGN_IDENTITY = ""; + CODE_SIGN_IDENTITY = "-"; COMBINE_HIDPI_IMAGES = YES; GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PREFIX_HEADER = "src/TP-Assist-Prefix.pch"; - GCC_PREPROCESSOR_DEFINITIONS = MG_ENABLE_SSL; + GCC_PREPROCESSOR_DEFINITIONS = ( + MG_ENABLE_SSL, + "MG_SSL_IF=MG_SSL_IF_MBEDTLS", + ); HEADER_SEARCH_PATHS = ( ../../common/teleport, ../../common/libex/include, diff --git a/client/tp_assist_macos/apple-scripts/scripts/iterm2.applescript b/client/tp_assist_macos/apple-scripts/scripts/iterm2.applescript index 1caf314..a840cae 100644 --- a/client/tp_assist_macos/apple-scripts/scripts/iterm2.applescript +++ b/client/tp_assist_macos/apple-scripts/scripts/iterm2.applescript @@ -6,58 +6,17 @@ on scriptRun(argsCmd, argsProfile, argsTitle) end scriptRun on CommandRun(theCmd, theProfile, theTitle) - tell application "iTerm" - if it is not running then - tell application "iTerm" - activate - delay 0.5 - try - close first window - end try - end tell - - tell application "iTerm" - try - create window with profile theProfile - on error msg - create window with profile "Default" - end try - tell the current window - tell the current session - delay 0.5 - set name to theTitle - set profile to theProfile - write text theCmd - delay 0.5 - write text "" - end tell - end tell - end tell - else - --assume that iTerm is open and open a new tab - try + try + tell application "iTerm" + if it is not running then tell application "iTerm" activate - tell the current window - try - create tab with profile theProfile - on error msg - create tab with profile "Default" - end try - tell the current tab - tell the current session - delay 0.5 - set name to theTitle - write text theCmd - delay 0.5 - write text "" - end tell - end tell - end tell + delay 0.5 + try + close first window + end try end tell - on error msg - -- if all iTerm windows are closed the app stays open. In this scenario iTerm has - -- no "current window" and will give an error when trying to create the new tab. + tell application "iTerm" try create window with profile theProfile @@ -68,13 +27,59 @@ on CommandRun(theCmd, theProfile, theTitle) tell the current session delay 0.5 set name to theTitle + set profile to theProfile write text theCmd delay 0.5 write text "" end tell end tell end tell - end try - end if - end tell + else + --assume that iTerm is open and open a new tab + try + tell application "iTerm" + activate + tell the current window + try + create tab with profile theProfile + on error msg + create tab with profile "Default" + end try + tell the current tab + tell the current session + delay 0.5 + set name to theTitle + write text theCmd + delay 0.5 + write text "" + end tell + end tell + end tell + end tell + on error msg + -- if all iTerm windows are closed the app stays open. In this scenario iTerm has + -- no "current window" and will give an error when trying to create the new tab. + tell application "iTerm" + try + create window with profile theProfile + on error msg + create window with profile "Default" + end try + tell the current window + tell the current session + delay 0.5 + set name to theTitle + write text theCmd + delay 0.5 + write text "" + end tell + end tell + end tell + end try + end if + end tell + on error msg + display dialog "ERROR: " & msg + end try + end CommandRun diff --git a/client/tp_assist_macos/apple-scripts/scripts/terminal.applescript b/client/tp_assist_macos/apple-scripts/scripts/terminal.applescript index a61e3ab..33506c7 100644 --- a/client/tp_assist_macos/apple-scripts/scripts/terminal.applescript +++ b/client/tp_assist_macos/apple-scripts/scripts/terminal.applescript @@ -1,76 +1,83 @@ on scriptRun(argsCmd, argsProfile, argsTitle) - set theCmd to (argsCmd) + set theCmd to (argsCmd) set theProfile to (argsProfile) set theTitle to (argsTitle) CommandRun(theCmd, theProfile, theTitle) end scriptRun on CommandRun(theCmd, theProfile, theTitle) - tell application "Terminal" - if it is not running then - --if this is the first time Terminal is running you have specify window 1 - --if you dont do this you will get two windows and the title wont be set - activate - delay 1.0 - set newTerm to do script theCmd in window 1 - set newTerm's current settings to settings set theProfile - set custom title of front window to theTitle - - delay 1.0 - reopen - activate - tell application "System Events" to key code 36 - else - --Terminal is running get the window count - set windowCount to (count every window) - if windowCount = 0 then - --Terminal is running but no windows are open - --run our script in a new window + try + tell application "Terminal" + if it is not running then + --if this is the first time Terminal is running you have specify window 1 + --if you dont do this you will get two windows and the title wont be set + activate + delay 3.0 + set newTerm to do script theCmd in window 1 + set newTerm's current settings to settings set theProfile + set custom title of front window to theTitle + + delay 1.0 reopen activate - - do script theCmd in window 1 - - set current settings of selected tab of front window to settings set theProfile - set title displays custom title of front window to true - set custom title of selected tab of front window to theTitle - - delay 1.0 - reopen - activate - tell application "System Events" to key code 36 - + tell application "System Events" to key code 36 else - --Terminal is running and we have a window run in a new tab - reopen - activate - - tell application "System Events" - tell process "Terminal" - delay 0.5 - keystroke "t" using {command down} + --Terminal is running get the window count + set windowCount to (count every window) + if windowCount = 0 then + --Terminal is running but no windows are open + --run our script in a new window + reopen + activate + + do script theCmd in window 1 + + set current settings of selected tab of front window to settings set theProfile + set title displays custom title of front window to true + set custom title of selected tab of front window to theTitle + + delay 1.0 + reopen + activate + tell application "System Events" to key code 36 + + else + --Terminal is running and we have a window run in a new tab + reopen + activate + + tell application "System Events" + tell process "Terminal" + delay 0.5 + keystroke "t" using {command down} + end tell end tell - end tell - - reopen - activate - do script theCmd in front window - - set current settings of selected tab of front window to settings set theProfile - set title displays custom title of front window to true - set custom title of selected tab of front window to theTitle - - delay 1.0 - reopen - activate - tell application "System Events" to key code 36 - + + reopen + activate + do script theCmd in front window + + set current settings of selected tab of front window to settings set theProfile + set title displays custom title of front window to true + set custom title of selected tab of front window to theTitle + + delay 1.0 + reopen + activate + tell application "System Events" to key code 36 + + end if + + --set current settings of selected tab of front window to settings set theProfile + --set title displays custom title of front window to true + --set custom title of selected tab of front window to theTitle end if - -# set current settings of selected tab of front window to settings set theProfile -# set title displays custom title of front window to true -# set custom title of selected tab of front window to theTitle - end if - - end tell + + end tell + + on error msg + display dialog "ERROR: " & msg + end try + + end CommandRun diff --git a/client/tp_assist_macos/site/index.html b/client/tp_assist_macos/site/index.html index 35292c5..caecd17 100644 --- a/client/tp_assist_macos/site/index.html +++ b/client/tp_assist_macos/site/index.html @@ -32,7 +32,7 @@ diff --git a/client/tp_assist_macos/site/status.html b/client/tp_assist_macos/site/status.html new file mode 100644 index 0000000..f00ffd8 --- /dev/null +++ b/client/tp_assist_macos/site/status.html @@ -0,0 +1,94 @@ + + + + + + + + + + + TELEPORT助手 + + + + + + + + + + + +
+
+ Teleport 助手 + +
+
+
+ + + + +
+ +
+ +
+ Teleport助手工作正常! +
+ +
+

如果在使用 HTTPS 方式访问 teleport 的 web 服务时检测不到助手,请点击这里,查看页面是否能够正常显示。

+

因为助手在配合HTTPS访问时使用了自签名证书,而自签名证书的颁发机构的根证书默认不被浏览器信任,因此,还需要将其设置为浏览器信任的根证书才行,根据浏览器的不同,具体设置方法有两种:

+

Chrome/IE/Edge/Opera 等浏览器

+
    +
  1. 在桌面的助手快捷方式上点击右键,然后选择“打开文件所在的位置”;
  2. +
  3. 右键点击 cacert.cer,在弹出菜单中选择“安装证书”;
  4. +
  5. 在打开的“证书导入向导”对话框中选择“当前用户”,点击下一步;
  6. +
  7. 选择“将所有的证书都放入下列存储”,然后点击“浏览”按钮;
  8. +
  9. 在打开的“选择证书存储”对话框中选择“受信任的根证书颁发机构”,点击确定;
  10. +
  11. 点击“下一步”,然后点击“完成”;
  12. +
  13. 系统提示“导入成功”,大功告成。
  14. +
+ +

FireFox火狐浏览器

+
    +
  1. 打开火狐浏览器的选项页面;
  2. +
  3. 点击左侧的“隐私与安全”,然后滚动页面到底部,点击“查看证书”按钮;
  4. +
  5. 在打开的“证书管理器”对话框中选择“证书颁发机构”选项卡;
  6. +
  7. 点击对话框底部的“导入”按钮,然后选择 cacert.cer 文件并点击“打开”按钮;
  8. +
  9. 在“下载证书”对话框中,勾选“信任由此证书颁发机构来标识网站”,然后点击“确定”;
  10. +
  11. 点击“确定”来关闭证书管理器对话框,大功告成。
  12. +
+ +

注意:导入证书后,请再次点击这里,查看页面是否能够正常显示。

+ +
+
+ +
+ + + + \ No newline at end of file diff --git a/client/tp_assist_macos/src/Base.lproj/AboutWindowController.xib b/client/tp_assist_macos/src/Base.lproj/AboutWindowController.xib index f9946ed..60f1162 100644 --- a/client/tp_assist_macos/src/Base.lproj/AboutWindowController.xib +++ b/client/tp_assist_macos/src/Base.lproj/AboutWindowController.xib @@ -1,8 +1,8 @@ - + - + @@ -19,61 +19,61 @@ - - + + - + - + - + - - - + + + - + + + + + + + + + + - + - - - - - - - - - - + diff --git a/client/tp_assist_macos/src/Base.lproj/Localizable.strings b/client/tp_assist_macos/src/Base.lproj/Localizable.strings index e10f545..4773301 100644 --- a/client/tp_assist_macos/src/Base.lproj/Localizable.strings +++ b/client/tp_assist_macos/src/Base.lproj/Localizable.strings @@ -3,12 +3,12 @@ TPAssist */ -"app_name" = "Teleport助手"; +"app_name" = "Teleport Assist"; //============================================= // for About Window //============================================= -"version" = "Version: "; -"app_full_name" = "Teleport Assist for macOS"; -"copyright" = "Copyright © 2017~2018 TP4A. All rights reserved."; +"version" = ""; +"app_full_name" = "Teleport Assist"; +"copyright" = "Copyright © 2017~2019, tp4a.com. All rights reserved."; "visit_tp4a_website" = "Visit Teleport Website" diff --git a/client/tp_assist_macos/src/TP-Assist-Info.plist b/client/tp_assist_macos/src/TP-Assist-Info.plist index 63fffbc..421ea8e 100644 --- a/client/tp_assist_macos/src/TP-Assist-Info.plist +++ b/client/tp_assist_macos/src/TP-Assist-Info.plist @@ -17,19 +17,21 @@ CFBundlePackageType APPL CFBundleShortVersionString - 3.2.0 + 3.5.5 CFBundleSignature ???? CFBundleVersion - 3.2.0 + 3.5.5 LSApplicationCategoryType public.app-category.productivity LSMinimumSystemVersion ${MACOSX_DEPLOYMENT_TARGET} LSUIElement + NSAppleEventsUsageDescription + NSHumanReadableCopyright - Copyright © 2017~2018 TP4A. All rights reserved. + Copyright © 2017~2019, tp4a.com. All rights reserved. NSMainNibFile MainMenu NSPrincipalClass diff --git a/client/tp_assist_macos/src/apple-scpt/iterm2.scpt b/client/tp_assist_macos/src/apple-scpt/iterm2.scpt index 431c2bf..dac4f8e 100644 Binary files a/client/tp_assist_macos/src/apple-scpt/iterm2.scpt and b/client/tp_assist_macos/src/apple-scpt/iterm2.scpt differ diff --git a/client/tp_assist_macos/src/apple-scpt/terminal.scpt b/client/tp_assist_macos/src/apple-scpt/terminal.scpt index abda883..98d335e 100644 Binary files a/client/tp_assist_macos/src/apple-scpt/terminal.scpt and b/client/tp_assist_macos/src/apple-scpt/terminal.scpt differ diff --git a/client/tp_assist_macos/src/csrc/ts_cfg.cpp b/client/tp_assist_macos/src/csrc/ts_cfg.cpp index 7e3daf5..4896d37 100644 --- a/client/tp_assist_macos/src/csrc/ts_cfg.cpp +++ b/client/tp_assist_macos/src/csrc/ts_cfg.cpp @@ -41,9 +41,12 @@ bool TsCfg::save(const ex_astr& new_value) if(!_load(new_value)) return false; - Json::StyledWriter jwriter; - ex_astr val = jwriter.write(m_root); - + Json::StreamWriterBuilder jwb; + std::unique_ptr jwriter(jwb.newStreamWriter()); + ex_aoss os; + jwriter->write(m_root, &os); + ex_astr val = os.str(); + if(!ex_write_text_file(g_env.m_cfg_file, val)) { EXLOGE("can not save config file.\n"); return false; @@ -123,12 +126,21 @@ bool TsCfg::_parse_app(const Json::Value& m_root, const ex_astr& str_app, APP_CO bool TsCfg::_load(const ex_astr& str_json) { - Json::Reader jreader; +// Json::Reader jreader; +// +// if (!jreader.parse(str_json.c_str(), m_root)) { +// EXLOGE("can not parse new config data, not in json format?\n"); +// return false; +// } + Json::CharReaderBuilder jcrb; + std::unique_ptr const jreader(jcrb.newCharReader()); + const char *str_json_begin = str_json.c_str(); - if (!jreader.parse(str_json.c_str(), m_root)) { - EXLOGE("can not parse new config data, not in json format?\n"); - return false; - } + ex_astr err; + if (!jreader->parse(str_json_begin, str_json_begin + str_json.length(), &m_root, &err)) { + EXLOGE("can not parse new config data, not in json format? %s\n", err.c_str()); + return false; + } //=================================== // check ssh config diff --git a/client/tp_assist_macos/src/csrc/ts_env.cpp b/client/tp_assist_macos/src/csrc/ts_env.cpp index fc196d9..b5c9173 100644 --- a/client/tp_assist_macos/src/csrc/ts_env.cpp +++ b/client/tp_assist_macos/src/csrc/ts_env.cpp @@ -32,3 +32,11 @@ bool TsEnv::init(const char* cfg_file, const char* res_path) return true; } + +extern "C" { +int mg_ssl_if_mbed_random(void *ctx, unsigned char *buf, size_t len) { + (void) ctx; + while (len--) *buf++ = (arc4random() % 255); + return 0; +} +} diff --git a/client/tp_assist_macos/src/csrc/ts_http_rpc.cpp b/client/tp_assist_macos/src/csrc/ts_http_rpc.cpp index f74a9a7..4430c4a 100644 --- a/client/tp_assist_macos/src/csrc/ts_http_rpc.cpp +++ b/client/tp_assist_macos/src/csrc/ts_http_rpc.cpp @@ -32,9 +32,9 @@ int http_rpc_start(void* app) { EXLOGE("[ERROR] can not start HTTP-RPC listener, maybe port %d is already in use.\n", TS_HTTP_RPC_PORT); return -1; } - - EXLOGW("[rpc] TeleportAssist-HTTP-RPC ready on localhost:%d\n", TS_HTTP_RPC_PORT); - + + EXLOGW("[rpc] TeleportAssist-HTTP-RPC ready on 127.0.0.1:%d\n", TS_HTTP_RPC_PORT); + if(!g_http_interface.start()) return -2; @@ -43,9 +43,9 @@ int http_rpc_start(void* app) { EXLOGE("[ERROR] can not start HTTPS-RPC listener, maybe port %d is already in use.\n", TS_HTTPS_RPC_PORT); return -1; } - - EXLOGW("[rpc] TeleportAssist-HTTPS-RPC ready on localhost:%d\n", TS_HTTPS_RPC_PORT); - + + EXLOGW("[rpc] TeleportAssist-HTTPS-RPC ready on 127.0.0.1:%d\n", TS_HTTPS_RPC_PORT); + if(!g_https_interface.start()) return -2; @@ -106,22 +106,21 @@ TsHttpRpc::~TsHttpRpc() mg_mgr_free(&m_mg_mgr); } -bool TsHttpRpc::init_http() -{ - - char addr[128] = { 0 }; - ex_strformat(addr, 128, "tcp://localhost:%d", TS_HTTP_RPC_PORT); - +bool TsHttpRpc::init_http() { struct mg_connection* nc = NULL; + + char addr[128] = { 0 }; + ex_strformat(addr, 128, "tcp://127.0.0.1:%d", TS_HTTP_RPC_PORT); + nc = mg_bind(&m_mg_mgr, addr, _mg_event_handler); - if (nc == NULL) { - EXLOGE("[rpc] TsHttpRpc::init_http() localhost:%d\n", TS_HTTP_RPC_PORT); + if (!nc) { + EXLOGE("[rpc] TsHttpRpc::init 127.0.0.1:%d\n", TS_HTTP_RPC_PORT); return false; } nc->user_data = this; - + mg_set_protocol_http_websocket(nc); - + return _on_init(); } @@ -135,27 +134,27 @@ bool TsHttpRpc::init_https() ex_wstr2astr(file_ssl_cert, _ssl_cert); ex_astr _ssl_key; ex_wstr2astr(file_ssl_key, _ssl_key); - + const char *err = NULL; struct mg_bind_opts bind_opts; memset(&bind_opts, 0, sizeof(bind_opts)); bind_opts.ssl_cert = _ssl_cert.c_str(); bind_opts.ssl_key = _ssl_key.c_str(); bind_opts.error_string = &err; - + char addr[128] = { 0 }; - ex_strformat(addr, 128, "tcp://localhost:%d", TS_HTTPS_RPC_PORT); + ex_strformat(addr, 128, "tcp://127.0.0.1:%d", TS_HTTPS_RPC_PORT); struct mg_connection* nc = NULL; nc = mg_bind_opt(&m_mg_mgr, addr, _mg_event_handler, bind_opts); - if (nc == NULL) { - EXLOGE("[rpc] TsHttpRpc::init_https() localhost:%d\n", TS_HTTPS_RPC_PORT); + if (!nc) { + EXLOGE("[rpc] TsHttpRpc::init 127.0.0.1:%d\n", TS_HTTPS_RPC_PORT); return false; } nc->user_data = this; - + mg_set_protocol_http_websocket(nc); - + return _on_init(); } @@ -182,7 +181,7 @@ void TsHttpRpc::_thread_loop(void) { mg_mgr_poll(&m_mg_mgr, 500); } - + EXLOGV("[core] rpc main loop end.\n"); } @@ -225,23 +224,25 @@ void TsHttpRpc::_mg_event_handler(struct mg_connection *nc, int ev, void *ev_dat EXLOGV("[rpc] got %s request: %s\n", dbg_method, uri.c_str()); #endif ex_astr ret_buf; - bool b_is_index = false; + bool b_is_html = false; - if (uri == "/") - { - ex_wstr page = L"Teleport Assistor\n\n
Teleport Assistor works fine.
"; - ex_wstr2astr(page, ret_buf, EX_CODEPAGE_UTF8); +// if (uri == "/") { +// ex_wstr page = L"Teleport\n\n
Teleport Assistor works fine.
"; +// ex_wstr2astr(page, ret_buf, EX_CODEPAGE_UTF8); +// +// mg_printf(nc, "HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %d\r\nContent-Type: text/html\r\n\r\n%s", ret_buf.size() - 1, &ret_buf[0]); +// nc->flags |= MG_F_SEND_AND_CLOSE; +// return; +// } - mg_printf(nc, "HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %ld\r\nContent-Type: text/html\r\n\r\n%s", ret_buf.size() - 1, &ret_buf[0]); - nc->flags |= MG_F_SEND_AND_CLOSE; - return; - } - - if (uri == "/config") - { - uri = "/index.html"; - b_is_index = true; - } + if (uri == "/") { + uri = "/status.html"; + b_is_html = true; + } + else if (uri == "/config") { + uri = "/index.html"; + b_is_html = true; + } ex_astr temp; size_t offset = uri.find("/", 1); @@ -262,24 +263,24 @@ void TsHttpRpc::_mg_event_handler(struct mg_connection *nc, int ev, void *ev_dat { _this->_process_js_request(method, json_param, ret_buf); } - - mg_printf(nc, "HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %ld\r\nContent-Type: application/json\r\n\r\n%s", ret_buf.size() - 1, &ret_buf[0]); + + mg_printf(nc, "HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %ld\r\nContent-Type: application/json\r\n\r\n%s", ret_buf.length(), ret_buf.c_str()); nc->flags |= MG_F_SEND_AND_CLOSE; return; } } - + ex_astr file_suffix; offset = uri.rfind("."); if (offset > 0) { file_suffix = uri.substr(offset, uri.length()); } - + ex_wstr2astr(g_env.m_site_path, temp); ex_astr index_path = temp + uri; - + FILE* file = ex_fopen(index_path.c_str(), "rb"); if (file) @@ -295,25 +296,23 @@ void TsHttpRpc::_mg_event_handler(struct mg_connection *nc, int ev, void *ev_dat fseek(file, 0, SEEK_SET); ret = fread(buf, 1, file_size, file); fclose(file); - + ex_astr content_type = _this->get_content_type(file_suffix); - + mg_printf(nc, "HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %ld\r\nContent-Type: %s\r\n\r\n", file_size, content_type.c_str()); mg_send(nc, buf, (int)file_size); delete []buf; nc->flags |= MG_F_SEND_AND_CLOSE; return; - } - else if (b_is_index) - { + } else if (b_is_html) { ex_wstr page = L"404 Not Found

404 Not Found


Teleport Assistor configuration page not found.

"; ex_wstr2astr(page, ret_buf, EX_CODEPAGE_UTF8); - - mg_printf(nc, "HTTP/1.0 404 File Not Found\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %ld\r\nContent-Type: text/html\r\n\r\n%s", ret_buf.size() - 1, &ret_buf[0]); + + mg_printf(nc, "HTTP/1.0 404 File Not Found\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %ld\r\nContent-Type: text/html\r\n\r\n%s", ret_buf.length(), ret_buf.c_str()); nc->flags |= MG_F_SEND_AND_CLOSE; return; } - + } break; default: @@ -336,7 +335,7 @@ int TsHttpRpc::_parse_request(struct http_message* req, ex_astr& func_cmd, ex_as ex_astrs strs; - size_t pos_start = 1; // һֽڣһ '/' + size_t pos_start = 1; // skip first charactor, it must be '/' size_t i = 0; for (i = pos_start; i < req->uri.len; ++i) @@ -349,7 +348,7 @@ int TsHttpRpc::_parse_request(struct http_message* req, ex_astr& func_cmd, ex_as tmp_uri.assign(req->uri.p + pos_start, i - pos_start); strs.push_back(tmp_uri); } - pos_start = i + 1; // ǰҵķָ + pos_start = i + 1; // skip current split chactor. } } if (pos_start < req->uri.len) @@ -397,7 +396,7 @@ int TsHttpRpc::_parse_request(struct http_message* req, ex_astr& func_cmd, ex_as if (func_args.length() > 0) { - // url-decode + // decode param with url-decode. size_t len = func_args.length() * 2; ex_chars sztmp; sztmp.resize(len); @@ -448,44 +447,50 @@ void TsHttpRpc::_process_js_request(const ex_astr& func_cmd, const ex_astr& func void TsHttpRpc::_create_json_ret(ex_astr& buf, int errcode) { - // أ {"code":123} + // return {"code":123} - Json::FastWriter jr_writer; Json::Value jr_root; - jr_root["code"] = errcode; - buf = jr_writer.write(jr_root); + Json::StreamWriterBuilder jwb; + std::unique_ptr jwriter(jwb.newStreamWriter()); + ex_aoss os; + jwriter->write(jr_root, &os); + buf = os.str(); } void TsHttpRpc::_create_json_ret(ex_astr& buf, Json::Value& jr_root) { - Json::FastWriter jr_writer; - buf = jr_writer.write(jr_root); + Json::StreamWriterBuilder jwb; + std::unique_ptr jwriter(jwb.newStreamWriter()); + ex_aoss os; + jwriter->write(jr_root, &os); + buf = os.str(); } -void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) -{ - // Σ{"ip":"192.168.5.11","port":22,"uname":"root","uauth":"abcdefg","authmode":1,"protocol":2} +void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) { + // param: {"ip":"192.168.5.11","port":22,"uname":"root","uauth":"abcdefg","authmode":1,"protocol":2} // authmode: 1=password, 2=private-key // protocol: 1=rdp, 2=ssh - // SSHأ {"code":0, "data":{"sid":"0123abcde"}} - // RDPأ {"code":0, "data":{"sid":"0123abcde0A"}} + // SSH return {"code":0, "data":{"sid":"0123abcde"}} + // RDP return {"code":0, "data":{"sid":"0123abcde0A"}} - Json::Reader jreader; - Json::Value jsRoot; + Json::CharReaderBuilder jcrb; + std::unique_ptr const jreader(jcrb.newCharReader()); + const char *str_json_begin = func_args.c_str(); - if (!jreader.parse(func_args.c_str(), jsRoot)) - { - _create_json_ret(buf, TPE_JSON_FORMAT); - return; - } + Json::Value jsRoot; + ex_astr err; + if (!jreader->parse(str_json_begin, str_json_begin + func_args.length(), &jsRoot, &err)) { + _create_json_ret(buf, TPE_JSON_FORMAT); + return; + } if (!jsRoot.isObject()) { _create_json_ret(buf, TPE_PARAM); return; } - // жϲǷȷ + // check param if (!jsRoot["teleport_ip"].isString() || !jsRoot["teleport_port"].isNumeric() || !jsRoot["remote_host_ip"].isString() || !jsRoot["session_id"].isString() || !jsRoot["protocol_type"].isNumeric() || !jsRoot["protocol_sub_type"].isNumeric() @@ -512,10 +517,9 @@ void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) ex_astr s_exec; ex_astr s_arg; ex_astrs s_argv; - - - if (pro_type == TP_PROTOCOL_TYPE_RDP) - { + + + if (pro_type == TP_PROTOCOL_TYPE_RDP) { //============================================== // RDP //============================================== @@ -524,10 +528,10 @@ void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) _create_json_ret(buf, TPE_NOT_EXISTS); return; } - - bool flag_clipboard = (protocol_flag & TP_FLAG_RDP_CLIPBOARD); - bool flag_disk = (protocol_flag & TP_FLAG_RDP_DISK); - bool flag_console = (protocol_flag & TP_FLAG_RDP_CONSOLE); + + bool flag_clipboard = ((protocol_flag & TP_FLAG_RDP_CLIPBOARD) == TP_FLAG_RDP_CLIPBOARD); + bool flag_disk = ((protocol_flag & TP_FLAG_RDP_DISK) == TP_FLAG_RDP_DISK); + bool flag_console = ((protocol_flag & TP_FLAG_RDP_CONSOLE) == TP_FLAG_RDP_CONSOLE); int rdp_w = 800; int rdp_h = 640; @@ -578,9 +582,9 @@ void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) { szPwd[i] = '*'; } - + //ex_astr2wstr(real_sid, w_sid); - + //w_exe_path = _T("\""); //w_exe_path += g_cfg.rdp_app + _T("\" "); //w_exe_path += g_cfg.rdp_cmdline; @@ -589,24 +593,49 @@ void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) //w_exe_path = _T("xfreerdp -u {user_name} {size} {console} "); //s_exec = "/usr/local/Cellar/freerdp/1.0.2_1/bin/xfreerdp"; s_exec = g_cfg.rdp.application; - s_argv.push_back(s_exec.c_str()); + s_arg = g_cfg.rdp.cmdline; + + sid = "02" + real_sid; +// s_argv.push_back("/f"); + + s_argv.push_back("/sec:tls"); + s_argv.push_back("-wallpaper"); + s_argv.push_back("-themes"); + // Ignore certificate + s_argv.push_back("/cert-ignore"); + // Automatically accept certificate on first connect + s_argv.push_back("/cert-tofu"); + + ex_astr _tmp_pass = "/p:PLACEHOLDER"; + //_tmp_pass += szPwd; + s_argv.push_back(_tmp_pass); + +//#if 0 + //s_argv.push_back(s_exec.c_str()); { - ex_astr username = "02" + real_sid; - - s_argv.push_back("-u"); - s_argv.push_back(username.c_str()); - +// ex_astr username = "02" + real_sid; +// s_argv.push_back("/u:"); +// s_argv.push_back(username.c_str()); + + if (rdp_w == 0 || rdp_h == 0) { - s_argv.push_back("-f"); + s_argv.push_back("/f"); } else { - char sz_size[64] = {0}; - ex_strformat(sz_size, 63, "%dx%d", rdp_w, rdp_h); - s_argv.push_back("-g"); - s_argv.push_back(sz_size); - } - +// char sz_size[64] = {0}; +// ex_strformat(sz_size, 63, "%dx%d", rdp_w, rdp_h); +// s_argv.push_back("-g"); +// s_argv.push_back(sz_size); + char sz_width[64] = {0}; + ex_strformat(sz_width, 63, "/w:%d", rdp_w); + s_argv.push_back(sz_width); + + char sz_height[64] = {0}; + ex_strformat(sz_height, 63, "/h:%d", rdp_h); + s_argv.push_back(sz_height); + } + if (flag_console && rdp_console) s_argv.push_back("/admin"); @@ -619,17 +648,16 @@ void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) // s_argv.push_back("+drives"); // else // s_argv.push_back("-drives"); - - { - char sz_temp[128] = {0}; - ex_strformat(sz_temp, 127, "%s:%d", teleport_ip.c_str(), teleport_port); - s_argv.push_back(sz_temp); - } +// +// { +// char sz_temp[128] = {0}; +// ex_strformat(sz_temp, 127, "%s:%d", teleport_ip.c_str(), teleport_port); +// s_argv.push_back(sz_temp); +// } } - +//#endif } - else if (pro_type == TP_PROTOCOL_TYPE_SSH) - { + else if (pro_type == TP_PROTOCOL_TYPE_SSH) { //============================================== // SSH //============================================== @@ -638,11 +666,11 @@ void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) { if(g_cfg.ssh.name == "terminal" || g_cfg.ssh.name == "iterm2") { char szCmd[1024] = {0}; - ex_strformat(szCmd, 1023, "ssh %s@%s -p %d", sid.c_str(), teleport_ip.c_str(), teleport_port); - + ex_strformat(szCmd, 1023, "ssh %s@%s -p %d -o \"StrictHostKeyChecking no\"", sid.c_str(), teleport_ip.c_str(), teleport_port); + char szTitle[128] = {0}; ex_strformat(szTitle, 127, "TP#%s", real_host_ip.c_str()); - + int ret = AppDelegate_start_ssh_client(g_app, szCmd, g_cfg.ssh.name.c_str(), g_cfg.ssh.cmdline.c_str(), szTitle); if(ret == 0) _create_json_ret(buf, TPE_OK); @@ -650,20 +678,20 @@ void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) _create_json_ret(buf, TPE_FAILED); return; } - + if(g_cfg.ssh.application.length() == 0) { _create_json_ret(buf, TPE_NOT_EXISTS); return; } - + s_exec = g_cfg.ssh.application; s_argv.push_back(s_exec.c_str()); - + s_arg = g_cfg.ssh.cmdline; } else { - + // sorry, SFTP not supported yet for macOS. // _create_json_ret(buf, TPE_NOT_IMPLEMENT); // return; @@ -672,16 +700,14 @@ void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) _create_json_ret(buf, TPE_NOT_EXISTS); return; } - + s_exec = g_cfg.sftp.application; s_argv.push_back(s_exec.c_str()); - - s_arg = g_cfg.sftp.cmdline; + s_arg = g_cfg.sftp.cmdline; } } - else if (pro_type == TP_PROTOCOL_TYPE_TELNET) - { + else if (pro_type == TP_PROTOCOL_TYPE_TELNET) { //============================================== // TELNET //============================================== @@ -716,7 +742,7 @@ void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) // s_arg = g_cfg.telnet.cmdline; } - + //---- split s_arg and push to s_argv --- ex_astr::size_type p1 = 0; ex_astr::size_type p2 = 0; @@ -739,7 +765,7 @@ void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) ex_astr _t; _t.assign(tmp, p1, p2 - p1); tmp.erase(0, p2 + 2); - + s_argv.push_back(_t); } else { p1 = 0; @@ -754,12 +780,12 @@ void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) ex_astr _t; _t.assign(tmp, p1, p2 - p1); tmp.erase(0, p2 + 1); - + s_argv.push_back(_t); } } - - + + Json::Value root_ret; ex_astr utf8_path = s_exec; @@ -774,18 +800,18 @@ void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) utf8_path += " "; utf8_path += (*it); } - + root_ret["path"] = utf8_path; // for macOS, Create Process should be fork()/exec()... pid_t processId; if ((processId = fork()) == 0) { - + int i = 0; char** _argv = (char**)calloc(s_argv.size()+1, sizeof(char*)); if (!_argv) return; - + for (i = 0; i < s_argv.size(); ++i) { _argv[i] = ex_strdup(s_argv[i].c_str()); @@ -800,42 +826,47 @@ void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) } } free(_argv); - + } else if (processId < 0) { root_ret["code"] = TPE_FAILED; } else { root_ret["code"] = TPE_OK; } - + // root_ret["code"] = TPE_OK; _create_json_ret(buf, root_ret); } - - -void TsHttpRpc::_rpc_func_rdp_play(const ex_astr& func_args, ex_astr& buf) -{ +void TsHttpRpc::_rpc_func_rdp_play(const ex_astr& func_args, ex_astr& buf) { _create_json_ret(buf, TPE_NOT_IMPLEMENT); } -void TsHttpRpc::_rpc_func_get_config(const ex_astr& func_args, ex_astr& buf) -{ +void TsHttpRpc::_rpc_func_get_config(const ex_astr& func_args, ex_astr& buf) { Json::Value jr_root; jr_root["code"] = 0; jr_root["data"] = g_cfg.get_root(); _create_json_ret(buf, jr_root); } -void TsHttpRpc::_rpc_func_set_config(const ex_astr& func_args, ex_astr& buf) -{ - Json::Reader jreader; - Json::Value jsRoot; - if (!jreader.parse(func_args.c_str(), jsRoot)) - { - _create_json_ret(buf, TPE_JSON_FORMAT); - return; - } +void TsHttpRpc::_rpc_func_set_config(const ex_astr& func_args, ex_astr& buf) { +// Json::Reader jreader; +// Json::Value jsRoot; +// if (!jreader.parse(func_args.c_str(), jsRoot)) +// { +// _create_json_ret(buf, TPE_JSON_FORMAT); +// return; +// } + + Json::CharReaderBuilder jcrb; + std::unique_ptr const jreader(jcrb.newCharReader()); + const char *str_json_begin = func_args.c_str(); + Json::Value jsRoot; + ex_astr err; + if (!jreader->parse(str_json_begin, str_json_begin + func_args.length(), &jsRoot, &err)) { + _create_json_ret(buf, TPE_JSON_FORMAT); + return; + } if(!g_cfg.save(func_args)) _create_json_ret(buf, TPE_FAILED); @@ -848,7 +879,7 @@ void TsHttpRpc::_rpc_func_file_action(const ex_astr& func_args, ex_astr& buf) { #if 0 Json::Reader jreader; Json::Value jsRoot; - + if (!jreader.parse(func_args.c_str(), jsRoot)) { _create_json_ret(buf, TPE_JSON_FORMAT); return; @@ -859,7 +890,7 @@ void TsHttpRpc::_rpc_func_file_action(const ex_astr& func_args, ex_astr& buf) { // return; // } // int action = jsRoot["action"].asUInt(); - + AppDelegate_select_app(g_app); _create_json_ret(buf, TPE_FAILED); @@ -884,8 +915,7 @@ void TsHttpRpc::_rpc_func_file_action(const ex_astr& func_args, ex_astr& buf) { #endif } -void TsHttpRpc::_rpc_func_get_version(const ex_astr& func_args, ex_astr& buf) -{ +void TsHttpRpc::_rpc_func_get_version(const ex_astr& func_args, ex_astr& buf) { Json::Value root_ret; ex_wstr w_version = TP_ASSIST_VER; ex_astr version; diff --git a/client/tp_assist_macos/src/csrc/ts_ver.h b/client/tp_assist_macos/src/csrc/ts_ver.h index a296cc3..935d5a2 100644 --- a/client/tp_assist_macos/src/csrc/ts_ver.h +++ b/client/tp_assist_macos/src/csrc/ts_ver.h @@ -1,6 +1,6 @@ -#ifndef __TS_ASSIST_VER_H__ -#define __TS_ASSIST_VER_H__ - -#define TP_ASSIST_VER L"3.2.0" - -#endif // __TS_ASSIST_VER_H__ +#ifndef __TS_ASSIST_VER_H__ +#define __TS_ASSIST_VER_H__ + +#define TP_ASSIST_VER L"3.5.5" + +#endif // __TS_ASSIST_VER_H__ diff --git a/client/tp_assist_macos/src/zh-Hans.lproj/Localizable.strings b/client/tp_assist_macos/src/zh-Hans.lproj/Localizable.strings index 1c1fec3..9ebc70b 100644 --- a/client/tp_assist_macos/src/zh-Hans.lproj/Localizable.strings +++ b/client/tp_assist_macos/src/zh-Hans.lproj/Localizable.strings @@ -13,7 +13,7 @@ // for About Window //============================================= "about " = "关于 "; -"version" = "版本:"; -"app_full_name" = "Teleport助手 - macOS"; -"copyright" = "© 2017~2018 TP4A,保留所有权利。"; +"version" = ""; +"app_full_name" = "Teleport助手"; +"copyright" = "© 2017~2019,tp4a.com。保留所有权利。"; "visit_tp4a_website" = "访问 Teleport 网站"; diff --git a/client/tp_assist_win/dlg_main.cpp b/client/tp_assist_win/dlg_main.cpp index c2171cc..fbb8bd7 100644 --- a/client/tp_assist_win/dlg_main.cpp +++ b/client/tp_assist_win/dlg_main.cpp @@ -96,7 +96,7 @@ INT_PTR CALLBACK eomDlgMainProc(HWND hwndDlg, UINT message, WPARAM wParam, LPARA case IDM_OPEN_CONFIG: { - ShellExecute(nullptr, _T("open"), _T("http://localhost:50022/config"), nullptr, nullptr, SW_SHOW); + ShellExecute(nullptr, _T("open"), _T("http://127.0.0.1:50022/config"), nullptr, nullptr, SW_SHOW); return TRUE; }break; diff --git a/client/tp_assist_win/site/index.html b/client/tp_assist_win/site/index.html index 60e67a2..400afb8 100644 --- a/client/tp_assist_win/site/index.html +++ b/client/tp_assist_win/site/index.html @@ -32,7 +32,7 @@ @@ -47,6 +47,7 @@
  • {host_ip} 替换主机IP地址
  • {host_port} 替换主机端口号
  • +
  • {host_name} 替换主机名称
  • {user_name} 替换用户名
  • {real_ip} 替换为远程主机真实IP(仅用于显示,例如客户端的窗口标题或标签页标题等)
  • {assist_tools_path} 替换为助手工具所在的tools目录的绝对路径
  • diff --git a/client/tp_assist_win/site/js/config.js b/client/tp_assist_win/site/js/config.js index 541943a..583c3bb 100644 --- a/client/tp_assist_win/site/js/config.js +++ b/client/tp_assist_win/site/js/config.js @@ -1,6 +1,7 @@ "use strict"; -var g_url_base = 'http://localhost:50022'; +//var g_url_base = 'http://localhost:50022'; +var g_url_base = 'http://127.0.0.1:50022'; var g_cfg = null; diff --git a/client/tp_assist_win/site/status.html b/client/tp_assist_win/site/status.html new file mode 100644 index 0000000..f00ffd8 --- /dev/null +++ b/client/tp_assist_win/site/status.html @@ -0,0 +1,94 @@ + + + + + + + + + + + TELEPORT助手 + + + + + + + + + + + +
    +
    + Teleport 助手 + +
    +
    +
    + + + + +
    + +
    + +
    + Teleport助手工作正常! +
    + +
    +

    如果在使用 HTTPS 方式访问 teleport 的 web 服务时检测不到助手,请点击这里,查看页面是否能够正常显示。

    +

    因为助手在配合HTTPS访问时使用了自签名证书,而自签名证书的颁发机构的根证书默认不被浏览器信任,因此,还需要将其设置为浏览器信任的根证书才行,根据浏览器的不同,具体设置方法有两种:

    +

    Chrome/IE/Edge/Opera 等浏览器

    +
      +
    1. 在桌面的助手快捷方式上点击右键,然后选择“打开文件所在的位置”;
    2. +
    3. 右键点击 cacert.cer,在弹出菜单中选择“安装证书”;
    4. +
    5. 在打开的“证书导入向导”对话框中选择“当前用户”,点击下一步;
    6. +
    7. 选择“将所有的证书都放入下列存储”,然后点击“浏览”按钮;
    8. +
    9. 在打开的“选择证书存储”对话框中选择“受信任的根证书颁发机构”,点击确定;
    10. +
    11. 点击“下一步”,然后点击“完成”;
    12. +
    13. 系统提示“导入成功”,大功告成。
    14. +
    + +

    FireFox火狐浏览器

    +
      +
    1. 打开火狐浏览器的选项页面;
    2. +
    3. 点击左侧的“隐私与安全”,然后滚动页面到底部,点击“查看证书”按钮;
    4. +
    5. 在打开的“证书管理器”对话框中选择“证书颁发机构”选项卡;
    6. +
    7. 点击对话框底部的“导入”按钮,然后选择 cacert.cer 文件并点击“打开”按钮;
    8. +
    9. 在“下载证书”对话框中,勾选“信任由此证书颁发机构来标识网站”,然后点击“确定”;
    10. +
    11. 点击“确定”来关闭证书管理器对话框,大功告成。
    12. +
    + +

    注意:导入证书后,请再次点击这里,查看页面是否能够正常显示。

    + +
    +
    + +
    + + + + \ No newline at end of file diff --git a/client/tp_assist_win/stdafx.cpp b/client/tp_assist_win/stdafx.cpp index fd4f341..6c5f035 100644 --- a/client/tp_assist_win/stdafx.cpp +++ b/client/tp_assist_win/stdafx.cpp @@ -1 +1,14 @@ -#include "stdafx.h" +#include "stdafx.h" + +#include + +// #ifdef EX_DEBUG +// // # pragma comment(lib, "libssl32MTd.lib") +// // # pragma comment(lib, "libcrypto32MTd.lib") +// #else +// # pragma comment(lib, "libssl32MT.lib") +// # pragma comment(lib, "libcrypto32MT.lib") +// #endif + +# pragma comment(lib, "libssl.lib") +# pragma comment(lib, "libcrypto.lib") diff --git a/client/tp_assist_win/tp_assist.rc b/client/tp_assist_win/tp_assist.rc index fabec21..2f7babf 100644 Binary files a/client/tp_assist_win/tp_assist.rc and b/client/tp_assist_win/tp_assist.rc differ diff --git a/client/tp_assist_win/tp_assist.vs2015.sln b/client/tp_assist_win/tp_assist.vs2015.sln deleted file mode 100644 index 61bfe7d..0000000 --- a/client/tp_assist_win/tp_assist.vs2015.sln +++ /dev/null @@ -1,22 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio 14 -VisualStudioVersion = 14.0.23107.0 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tp_assist", "tp_assist.vs2015.vcxproj", "{63B7A8F2-9722-487C-A92A-3DB5D8CA1473}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|x86 = Debug|x86 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {63B7A8F2-9722-487C-A92A-3DB5D8CA1473}.Debug|x86.ActiveCfg = Debug|Win32 - {63B7A8F2-9722-487C-A92A-3DB5D8CA1473}.Debug|x86.Build.0 = Debug|Win32 - {63B7A8F2-9722-487C-A92A-3DB5D8CA1473}.Release|x86.ActiveCfg = Release|Win32 - {63B7A8F2-9722-487C-A92A-3DB5D8CA1473}.Release|x86.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal diff --git a/client/tp_assist_win/tp_assist.vs2015.vcxproj b/client/tp_assist_win/tp_assist.vs2015.vcxproj deleted file mode 100644 index 71ba5fe..0000000 --- a/client/tp_assist_win/tp_assist.vs2015.vcxproj +++ /dev/null @@ -1,185 +0,0 @@ - - - - - Debug - Win32 - - - Release - Win32 - - - - {63B7A8F2-9722-487C-A92A-3DB5D8CA1473} - Win32Proj - tp_assist - tp_assist - 8.1 - - - - Application - true - v140_xp - Unicode - - - Application - false - v140_xp - true - Unicode - false - - - - - - - - - - - - - true - ..\..\out\client\$(PlatformTarget)\$(Configuration)\ - ..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - C:\Program Files %28x86%29\Visual Leak Detector\include;$(IncludePath) - C:\Program Files %28x86%29\Visual Leak Detector\lib\Win32;$(LibraryPath) - - - false - ..\..\out\client\$(PlatformTarget)\$(Configuration)\ - ..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - - - - Use - Level3 - Disabled - WIN32;MG_ENABLE_SSL;_DEBUG;_WINDOWS;_WINSOCK_DEPRECATED_NO_WARNINGS;MG_ENABLE_THREADS;MG_DISABLE_HTTP_DIGEST_AUTH;MG_DISABLE_MQTT;MG_DISABLE_SSI;MG_DISABLE_FILESYSTEM;%(PreprocessorDefinitions) - true - ..\..\common\teleport;..\..\common\libex\include;..\..\external\jsoncpp\include;..\..\external\openssl\inc32 - - - Windows - true - ..\..\external\openssl\out32\ssleay32.lib;..\..\external\openssl\out32\libeay32.lib;%(AdditionalDependencies) - - - - - Level3 - Use - MaxSpeed - true - true - WIN32;MG_ENABLE_SSL;NDEBUG;_WINDOWS;_WINSOCK_DEPRECATED_NO_WARNINGS;MG_ENABLE_THREADS;MG_DISABLE_HTTP_DIGEST_AUTH;MG_DISABLE_MQTT;MG_DISABLE_SSI;MG_DISABLE_FILESYSTEM;%(PreprocessorDefinitions) - true - ..\..\common\teleport;..\..\common\libex\include;..\..\external\jsoncpp\include;..\..\external\openssl\inc32 - MultiThreaded - - - Windows - true - true - true - ..\..\external\openssl\out32\ssleay32.lib;..\..\external\openssl\out32\libeay32.lib;%(AdditionalDependencies) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - NotUsing - NotUsing - - - NotUsing - NotUsing - - - NotUsing - NotUsing - - - NotUsing - NotUsing - - - NotUsing - NotUsing - - - NotUsing - NotUsing - - - NotUsing - NotUsing - - - NotUsing - NotUsing - - - NotUsing - NotUsing - - - NotUsing - NotUsing - - - - - Create - Create - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/client/tp_assist_win/tp_assist.vs2015.vcxproj.filters b/client/tp_assist_win/tp_assist.vs2015.vcxproj.filters deleted file mode 100644 index c020865..0000000 --- a/client/tp_assist_win/tp_assist.vs2015.vcxproj.filters +++ /dev/null @@ -1,180 +0,0 @@ - - - - - main app - - - main app - - - main app - - - main app - - - main app - - - main app - - - main app - - - main app - - - jsoncpp - - - jsoncpp - - - jsoncpp - - - mongoose - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - - - resource - - - main app - - - main app - - - main app - - - main app - - - main app - - - main app - - - main app - - - main app - - - mongoose - - - main app - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - main app - - - main app - - - libex\header - - - libex\header - - - libex\header - - - teleport - - - - - resource - - - resource - - - resource - - - - - {52b425b1-8aa9-4e08-acbd-c88387350530} - - - {adabe93d-3938-4b11-9352-5b67a1efd7e3} - - - {35a345a0-6147-4c87-97c9-3b0b2a57e348} - - - {0942cec3-67df-4d19-bbc1-e962145e496f} - - - {a88e05d3-51f4-463f-84cc-c3bc86f07aac} - - - {e3e7a811-5905-4ad5-86a7-9721af5d015a} - - - {d7d49fa4-5192-42c5-bc70-5584d9d646c6} - - - {1291a5cf-cb08-4ad6-8a86-8a0486297c63} - - - - - resource - - - - - jsoncpp - - - \ No newline at end of file diff --git a/client/tp_assist_win/tp_assist.vs2017.vcxproj b/client/tp_assist_win/tp_assist.vs2017.vcxproj index 11918c8..c5ef904 100644 --- a/client/tp_assist_win/tp_assist.vs2017.vcxproj +++ b/client/tp_assist_win/tp_assist.vs2017.vcxproj @@ -46,8 +46,8 @@ true ..\..\out\client\$(PlatformTarget)\$(Configuration)\ ..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - C:\Program Files %28x86%29\Visual Leak Detector\include;$(IncludePath) - C:\Program Files %28x86%29\Visual Leak Detector\lib\Win32;$(LibraryPath) + C:\apps\vld\include;$(IncludePath) + C:\apps\vld\lib\Win32;$(LibraryPath) false @@ -61,12 +61,14 @@ Disabled WIN32;MG_ENABLE_SSL;_DEBUG;_WINDOWS;_WINSOCK_DEPRECATED_NO_WARNINGS;MG_ENABLE_THREADS;MG_DISABLE_HTTP_DIGEST_AUTH;MG_DISABLE_MQTT;MG_DISABLE_SSI;MG_DISABLE_FILESYSTEM;%(PreprocessorDefinitions) true - ..\..\common\teleport;..\..\common\libex\include;..\..\external\jsoncpp\include;..\..\external\openssl\inc32 + ..\..\common\teleport;..\..\common\libex\include;..\..\external\jsoncpp\include;..\..\external\openssl\include + MultiThreadedDebug Windows true - ..\..\external\openssl\out32\ssleay32.lib;..\..\external\openssl\out32\libeay32.lib;%(AdditionalDependencies) + %(AdditionalDependencies) + ..\..\external\openssl\lib;%(AdditionalLibraryDirectories) @@ -78,7 +80,7 @@ true WIN32;MG_ENABLE_SSL;NDEBUG;_WINDOWS;_WINSOCK_DEPRECATED_NO_WARNINGS;MG_ENABLE_THREADS;MG_DISABLE_HTTP_DIGEST_AUTH;MG_DISABLE_MQTT;MG_DISABLE_SSI;MG_DISABLE_FILESYSTEM;%(PreprocessorDefinitions) true - ..\..\common\teleport;..\..\common\libex\include;..\..\external\jsoncpp\include;..\..\external\openssl\inc32 + ..\..\common\teleport;..\..\common\libex\include;..\..\external\jsoncpp\include;..\..\external\openssl\include MultiThreaded @@ -86,7 +88,8 @@ true true true - ..\..\external\openssl\out32\ssleay32.lib;..\..\external\openssl\out32\libeay32.lib;%(AdditionalDependencies) + %(AdditionalDependencies) + ..\..\external\openssl\lib;%(AdditionalLibraryDirectories) diff --git a/client/tp_assist_win/ts_cfg.cpp b/client/tp_assist_win/ts_cfg.cpp index 722a7b5..f9214f6 100644 --- a/client/tp_assist_win/ts_cfg.cpp +++ b/client/tp_assist_win/ts_cfg.cpp @@ -1,266 +1,274 @@ -#include "stdafx.h" -#include "ts_cfg.h" -#include "ts_env.h" - - -TsCfg g_cfg; - -TsCfg::TsCfg() -{} - -TsCfg::~TsCfg() -{} - -bool TsCfg::init(void) { - ex_astr file_content; - if (!ex_read_text_file(g_env.m_cfg_file, file_content)) { - EXLOGE("can not load config file.\n"); - return false; - } - - if (!_load(file_content)) - return false; - - return true; -} - -bool TsCfg::save(const ex_astr& new_value) -{ - if (!_load(new_value)) - return false; - - Json::StyledWriter jwriter; - ex_astr val = jwriter.write(m_root); - - if (!ex_write_text_file(g_env.m_cfg_file, val)) { - EXLOGE("can not save config file.\n"); - return false; - } - - return true; -} - -bool TsCfg::_load(const ex_astr& str_json) { - Json::Reader jreader; - - if (!jreader.parse(str_json.c_str(), m_root)) { - EXLOGE("can not parse new config data, not in json format? %s\n", jreader.getFormattedErrorMessages().c_str()); - return false; - } - - ex_astr sel_name; - size_t i = 0; - ex_astr tmp; - - //=================================== - // check ssh config - //=================================== - - if (!m_root["ssh"].isObject()) { - EXLOGE("invalid config, error 1.\n"); - return false; - } - - if (!m_root["ssh"]["selected"].isString()) { - EXLOGE("invalid config, error 2.\n"); - return false; - } - - sel_name = m_root["ssh"]["selected"].asCString(); - - if (!m_root["ssh"]["available"].isArray() || m_root["ssh"]["available"].size() == 0) { - EXLOGE("invalid config, error 3.\n"); - return false; - } - - for (i = 0; i < m_root["ssh"]["available"].size(); ++i) { - - if ( - !m_root["ssh"]["available"][i]["name"].isString() - || !m_root["ssh"]["available"][i]["app"].isString() - || !m_root["ssh"]["available"][i]["cmdline"].isString() - ) { - EXLOGE("invalid config, error 4.\n"); - return false; - } - - if (m_root["ssh"]["available"][i]["display"].isNull()) { - m_root["ssh"]["available"][i]["display"] = m_root["ssh"]["available"][i]["name"]; - } - - if (m_root["ssh"]["available"][i]["name"].asCString() != sel_name) - continue; - - tmp = m_root["ssh"]["available"][i]["app"].asCString(); - ex_astr2wstr(tmp, ssh_app, EX_CODEPAGE_UTF8); - tmp = m_root["ssh"]["available"][i]["cmdline"].asCString(); - ex_astr2wstr(tmp, ssh_cmdline, EX_CODEPAGE_UTF8); - - break; - } - - if (ssh_app.length() == 0 || ssh_cmdline.length() == 0) { - EXLOGE("invalid config, error 6.\n"); - return false; - } - - //=================================== - // check sftp config - //=================================== - - if (!m_root["scp"].isObject()) { - EXLOGE("invalid config, error 1.\n"); - return false; - } - - if (!m_root["scp"]["selected"].isString()) { - EXLOGE("invalid config, error 2.\n"); - return false; - } - - sel_name = m_root["scp"]["selected"].asCString(); - - if (!m_root["scp"]["available"].isArray() || m_root["scp"]["available"].size() == 0) { - EXLOGE("invalid config, error 3.\n"); - return false; - } - - for (i = 0; i < m_root["scp"]["available"].size(); ++i) { - - if ( - !m_root["scp"]["available"][i]["name"].isString() - || !m_root["scp"]["available"][i]["app"].isString() - || !m_root["scp"]["available"][i]["cmdline"].isString() - ) { - EXLOGE("invalid config, error 4.\n"); - return false; - } - - if (m_root["scp"]["available"][i]["display"].isNull()) { - m_root["scp"]["available"][i]["display"] = m_root["scp"]["available"][i]["name"]; - } - - if (m_root["scp"]["available"][i]["name"].asCString() != sel_name) - continue; - - tmp = m_root["scp"]["available"][i]["app"].asCString(); - ex_astr2wstr(tmp, scp_app, EX_CODEPAGE_UTF8); - tmp = m_root["scp"]["available"][i]["cmdline"].asCString(); - ex_astr2wstr(tmp, scp_cmdline, EX_CODEPAGE_UTF8); - - break; - } - - if (scp_app.length() == 0 || scp_cmdline.length() == 0) { - EXLOGE("invalid config, error 6.\n"); - return false; - } - - //=================================== - // check telnet config - //=================================== - - if (!m_root["telnet"].isObject()) { - EXLOGE("invalid config, error 1.\n"); - return false; - } - - if (!m_root["telnet"]["selected"].isString()) { - EXLOGE("invalid config, error 2.\n"); - return false; - } - - sel_name = m_root["telnet"]["selected"].asCString(); - - if (!m_root["telnet"]["available"].isArray() || m_root["telnet"]["available"].size() == 0) { - EXLOGE("invalid config, error 3.\n"); - return false; - } - - for (i = 0; i < m_root["telnet"]["available"].size(); ++i) { - - if ( - !m_root["telnet"]["available"][i]["name"].isString() - || !m_root["telnet"]["available"][i]["app"].isString() - || !m_root["telnet"]["available"][i]["cmdline"].isString() - ) { - EXLOGE("invalid config, error 4.\n"); - return false; - } - - if (m_root["telnet"]["available"][i]["display"].isNull()) { - m_root["telnet"]["available"][i]["display"] = m_root["telnet"]["available"][i]["name"]; - } - - if (m_root["telnet"]["available"][i]["name"].asCString() != sel_name) - continue; - - tmp = m_root["telnet"]["available"][i]["app"].asCString(); - ex_astr2wstr(tmp, telnet_app, EX_CODEPAGE_UTF8); - tmp = m_root["telnet"]["available"][i]["cmdline"].asCString(); - ex_astr2wstr(tmp, telnet_cmdline, EX_CODEPAGE_UTF8); - - break; - } - - if (telnet_app.length() == 0 || telnet_cmdline.length() == 0) { - EXLOGE("invalid config, error 6.\n"); - return false; - } - - //=================================== - // check rdp config - //=================================== - - if (!m_root["rdp"].isObject()) { - EXLOGE("invalid config, error 1.\n"); - return false; - } - - if (!m_root["rdp"]["selected"].isString()) { - EXLOGE("invalid config, error 2.\n"); - return false; - } - - sel_name = m_root["rdp"]["selected"].asCString(); - - if (!m_root["rdp"]["available"].isArray() || m_root["rdp"]["available"].size() == 0) { - EXLOGE("invalid config, error 3.\n"); - return false; - } - - for (i = 0; i < m_root["rdp"]["available"].size(); ++i) { - - if ( - !m_root["rdp"]["available"][i]["name"].isString() - || !m_root["rdp"]["available"][i]["app"].isString() - || !m_root["rdp"]["available"][i]["cmdline"].isString() - ) { - EXLOGE("invalid config, error 4.\n"); - return false; - } - - if (m_root["rdp"]["available"][i]["display"].isNull()) { - m_root["rdp"]["available"][i]["display"] = m_root["rdp"]["available"][i]["name"]; - } - - if (m_root["rdp"]["available"][i]["name"].asCString() != sel_name) - continue; - - tmp = m_root["rdp"]["available"][i]["app"].asCString(); - ex_astr2wstr(tmp, rdp_app, EX_CODEPAGE_UTF8); - tmp = m_root["rdp"]["available"][i]["cmdline"].asCString(); - ex_astr2wstr(tmp, rdp_cmdline, EX_CODEPAGE_UTF8); - tmp = m_root["rdp"]["available"][i]["name"].asCString(); - ex_astr2wstr(tmp, rdp_name, EX_CODEPAGE_UTF8); - - break; - } - - if (rdp_app.length() == 0 || rdp_cmdline.length() == 0 || rdp_name.length() == 0) { - EXLOGE("invalid config, error 6.\n"); - return false; - } - - return true; -} +#include "stdafx.h" +#include "ts_cfg.h" +#include "ts_env.h" + + +TsCfg g_cfg; + +TsCfg::TsCfg() +{} + +TsCfg::~TsCfg() +{} + +bool TsCfg::init(void) { + ex_astr file_content; + if (!ex_read_text_file(g_env.m_cfg_file, file_content)) { + EXLOGE("can not load config file.\n"); + return false; + } + + if (!_load(file_content)) + return false; + + return true; +} + +bool TsCfg::save(const ex_astr& new_value) +{ + if (!_load(new_value)) + return false; + + //Json::StyledWriter jwriter; + Json::StreamWriterBuilder jwb; + std::unique_ptr jwriter(jwb.newStreamWriter()); + ex_aoss os; + jwriter->write(m_root, &os); + ex_astr val = os.str(); + + if (!ex_write_text_file(g_env.m_cfg_file, val)) { + EXLOGE("can not save config file.\n"); + return false; + } + + return true; +} + +bool TsCfg::_load(const ex_astr& str_json) { + //Json::Reader jreader; + Json::CharReaderBuilder jcrb; + std::unique_ptr const jreader(jcrb.newCharReader()); + const char *str_json_begin = str_json.c_str(); + + ex_astr err; + if (!jreader->parse(str_json_begin, str_json_begin + str_json.length(), &m_root, &err)) { + EXLOGE("can not parse new config data, not in json format? %s\n", err.c_str()); + return false; + } + + ex_astr sel_name; + size_t i = 0; + ex_astr tmp; + + //=================================== + // check ssh config + //=================================== + + if (!m_root["ssh"].isObject()) { + EXLOGE("invalid config, error 1.\n"); + return false; + } + + if (!m_root["ssh"]["selected"].isString()) { + EXLOGE("invalid config, error 2.\n"); + return false; + } + + sel_name = m_root["ssh"]["selected"].asCString(); + + if (!m_root["ssh"]["available"].isArray() || m_root["ssh"]["available"].size() == 0) { + EXLOGE("invalid config, error 3.\n"); + return false; + } + + for (i = 0; i < m_root["ssh"]["available"].size(); ++i) { + + if ( + !m_root["ssh"]["available"][i]["name"].isString() + || !m_root["ssh"]["available"][i]["app"].isString() + || !m_root["ssh"]["available"][i]["cmdline"].isString() + ) { + EXLOGE("invalid config, error 4.\n"); + return false; + } + + if (m_root["ssh"]["available"][i]["display"].isNull()) { + m_root["ssh"]["available"][i]["display"] = m_root["ssh"]["available"][i]["name"]; + } + + if (m_root["ssh"]["available"][i]["name"].asCString() != sel_name) + continue; + + tmp = m_root["ssh"]["available"][i]["app"].asCString(); + ex_astr2wstr(tmp, ssh_app, EX_CODEPAGE_UTF8); + tmp = m_root["ssh"]["available"][i]["cmdline"].asCString(); + ex_astr2wstr(tmp, ssh_cmdline, EX_CODEPAGE_UTF8); + + break; + } + + if (ssh_app.length() == 0 || ssh_cmdline.length() == 0) { + EXLOGE("invalid config, error 6.\n"); + return false; + } + + //=================================== + // check sftp config + //=================================== + + if (!m_root["scp"].isObject()) { + EXLOGE("invalid config, error 1.\n"); + return false; + } + + if (!m_root["scp"]["selected"].isString()) { + EXLOGE("invalid config, error 2.\n"); + return false; + } + + sel_name = m_root["scp"]["selected"].asCString(); + + if (!m_root["scp"]["available"].isArray() || m_root["scp"]["available"].size() == 0) { + EXLOGE("invalid config, error 3.\n"); + return false; + } + + for (i = 0; i < m_root["scp"]["available"].size(); ++i) { + + if ( + !m_root["scp"]["available"][i]["name"].isString() + || !m_root["scp"]["available"][i]["app"].isString() + || !m_root["scp"]["available"][i]["cmdline"].isString() + ) { + EXLOGE("invalid config, error 4.\n"); + return false; + } + + if (m_root["scp"]["available"][i]["display"].isNull()) { + m_root["scp"]["available"][i]["display"] = m_root["scp"]["available"][i]["name"]; + } + + if (m_root["scp"]["available"][i]["name"].asCString() != sel_name) + continue; + + tmp = m_root["scp"]["available"][i]["app"].asCString(); + ex_astr2wstr(tmp, scp_app, EX_CODEPAGE_UTF8); + tmp = m_root["scp"]["available"][i]["cmdline"].asCString(); + ex_astr2wstr(tmp, scp_cmdline, EX_CODEPAGE_UTF8); + + break; + } + + if (scp_app.length() == 0 || scp_cmdline.length() == 0) { + EXLOGE("invalid config, error 6.\n"); + return false; + } + + //=================================== + // check telnet config + //=================================== + + if (!m_root["telnet"].isObject()) { + EXLOGE("invalid config, error 1.\n"); + return false; + } + + if (!m_root["telnet"]["selected"].isString()) { + EXLOGE("invalid config, error 2.\n"); + return false; + } + + sel_name = m_root["telnet"]["selected"].asCString(); + + if (!m_root["telnet"]["available"].isArray() || m_root["telnet"]["available"].size() == 0) { + EXLOGE("invalid config, error 3.\n"); + return false; + } + + for (i = 0; i < m_root["telnet"]["available"].size(); ++i) { + + if ( + !m_root["telnet"]["available"][i]["name"].isString() + || !m_root["telnet"]["available"][i]["app"].isString() + || !m_root["telnet"]["available"][i]["cmdline"].isString() + ) { + EXLOGE("invalid config, error 4.\n"); + return false; + } + + if (m_root["telnet"]["available"][i]["display"].isNull()) { + m_root["telnet"]["available"][i]["display"] = m_root["telnet"]["available"][i]["name"]; + } + + if (m_root["telnet"]["available"][i]["name"].asCString() != sel_name) + continue; + + tmp = m_root["telnet"]["available"][i]["app"].asCString(); + ex_astr2wstr(tmp, telnet_app, EX_CODEPAGE_UTF8); + tmp = m_root["telnet"]["available"][i]["cmdline"].asCString(); + ex_astr2wstr(tmp, telnet_cmdline, EX_CODEPAGE_UTF8); + + break; + } + + if (telnet_app.length() == 0 || telnet_cmdline.length() == 0) { + EXLOGE("invalid config, error 6.\n"); + return false; + } + + //=================================== + // check rdp config + //=================================== + + if (!m_root["rdp"].isObject()) { + EXLOGE("invalid config, error 1.\n"); + return false; + } + + if (!m_root["rdp"]["selected"].isString()) { + EXLOGE("invalid config, error 2.\n"); + return false; + } + + sel_name = m_root["rdp"]["selected"].asCString(); + + if (!m_root["rdp"]["available"].isArray() || m_root["rdp"]["available"].size() == 0) { + EXLOGE("invalid config, error 3.\n"); + return false; + } + + for (i = 0; i < m_root["rdp"]["available"].size(); ++i) { + + if ( + !m_root["rdp"]["available"][i]["name"].isString() + || !m_root["rdp"]["available"][i]["app"].isString() + || !m_root["rdp"]["available"][i]["cmdline"].isString() + ) { + EXLOGE("invalid config, error 4.\n"); + return false; + } + + if (m_root["rdp"]["available"][i]["display"].isNull()) { + m_root["rdp"]["available"][i]["display"] = m_root["rdp"]["available"][i]["name"]; + } + + if (m_root["rdp"]["available"][i]["name"].asCString() != sel_name) + continue; + + tmp = m_root["rdp"]["available"][i]["app"].asCString(); + ex_astr2wstr(tmp, rdp_app, EX_CODEPAGE_UTF8); + tmp = m_root["rdp"]["available"][i]["cmdline"].asCString(); + ex_astr2wstr(tmp, rdp_cmdline, EX_CODEPAGE_UTF8); + tmp = m_root["rdp"]["available"][i]["name"].asCString(); + ex_astr2wstr(tmp, rdp_name, EX_CODEPAGE_UTF8); + + break; + } + + if (rdp_app.length() == 0 || rdp_cmdline.length() == 0 || rdp_name.length() == 0) { + EXLOGE("invalid config, error 6.\n"); + return false; + } + + return true; +} diff --git a/client/tp_assist_win/ts_env.cpp b/client/tp_assist_win/ts_env.cpp index db08129..767ef7b 100644 --- a/client/tp_assist_win/ts_env.cpp +++ b/client/tp_assist_win/ts_env.cpp @@ -1,71 +1,74 @@ -#include "stdafx.h" -#include "ts_env.h" - -#include -#ifdef EX_OS_WIN32 -# include -//# include -#endif - -TsEnv g_env; - -//======================================================= -// -//======================================================= - -TsEnv::TsEnv() -{} - -TsEnv::~TsEnv() -{} - -bool TsEnv::init(void) -{ - if (!ex_exec_file(m_exec_file)) - return false; - - m_exec_path = m_exec_file; - if (!ex_dirname(m_exec_path)) - return false; - - m_cfg_file = m_exec_path; - ex_path_join(m_cfg_file, false, L"cfg", L"tp-assist.json", NULL); - - m_log_path = m_exec_path; - ex_path_join(m_log_path, false, L"log", NULL); - - ex_wstr cfg_default; - -#ifdef _DEBUG - m_site_path = m_exec_path; - ex_path_join(m_site_path, true, L"..", L"..", L"..", L"..", L"client", L"tp_assist_win", L"site", NULL); - - m_tools_path = m_exec_path; - ex_path_join(m_tools_path, true, L"..", L"..", L"..", L"..", L"client", L"tools", NULL); - - cfg_default = m_exec_path; - ex_path_join(cfg_default, true, L"..", L"..", L"..", L"..", L"client", L"tp_assist_win", L"cfg", L"tp-assist.default.json", NULL); - -#else - m_site_path = m_exec_path; - ex_path_join(m_site_path, false, L"site", NULL); - - m_tools_path = m_exec_path; - ex_path_join(m_tools_path, false, L"tools", NULL); - - cfg_default = m_exec_path; - ex_path_join(cfg_default, false, L"tp-assist.default.json", NULL); -#endif - - if (!ex_is_file_exists(m_cfg_file.c_str())) { - ex_wstr cfg_path = m_exec_path; - ex_path_join(cfg_path, false, L"cfg", NULL); - - ex_mkdirs(cfg_path); - - if (!ex_copy_file(cfg_default.c_str(), m_cfg_file.c_str())) - return false; -} - - return true; -} +#include "stdafx.h" +#include "ts_env.h" + +#include +#ifdef EX_OS_WIN32 +# include +//# include +#endif + +TsEnv g_env; + +//======================================================= +// +//======================================================= + +TsEnv::TsEnv() +{} + +TsEnv::~TsEnv() +{} + +bool TsEnv::init(void) +{ + if (!ex_exec_file(m_exec_file)) + return false; + + m_exec_path = m_exec_file; + if (!ex_dirname(m_exec_path)) + return false; + + m_cfg_file = m_exec_path; + ex_path_join(m_cfg_file, false, L"cfg", L"tp-assist.json", NULL); + + m_log_path = m_exec_path; + ex_path_join(m_log_path, false, L"log", NULL); + + ex_wstr cfg_default; + +#ifdef _DEBUG + m_site_path = m_exec_path; + ex_path_join(m_site_path, true, L"..", L"..", L"..", L"..", L"client", L"tp_assist_win", L"site", NULL); + +// m_tools_path = m_exec_path; +// ex_path_join(m_tools_path, true, L"..", L"..", L"..", L"..", L"client", L"tools", NULL); + + cfg_default = m_exec_path; + ex_path_join(cfg_default, true, L"..", L"..", L"..", L"..", L"client", L"tp_assist_win", L"cfg", L"tp-assist.default.json", NULL); + +#else + m_site_path = m_exec_path; + ex_path_join(m_site_path, false, L"site", NULL); + +// m_tools_path = m_exec_path; +// ex_path_join(m_tools_path, false, L"tools", NULL); + + cfg_default = m_exec_path; + ex_path_join(cfg_default, false, L"tp-assist.default.json", NULL); +#endif + + m_tools_path = m_exec_path; + ex_path_join(m_tools_path, false, L"tools", NULL); + + if (!ex_is_file_exists(m_cfg_file.c_str())) { + ex_wstr cfg_path = m_exec_path; + ex_path_join(cfg_path, false, L"cfg", NULL); + + ex_mkdirs(cfg_path); + + if (!ex_copy_file(cfg_default.c_str(), m_cfg_file.c_str())) + return false; +} + + return true; +} diff --git a/client/tp_assist_win/ts_http_rpc.cpp b/client/tp_assist_win/ts_http_rpc.cpp index 1fd43c0..0916e89 100644 --- a/client/tp_assist_win/ts_http_rpc.cpp +++ b/client/tp_assist_win/ts_http_rpc.cpp @@ -1,1199 +1,1191 @@ -#include "stdafx.h" - -#pragma warning(disable:4091) - -#include -#include -#include - -#pragma comment(lib, "Crypt32.lib") - -#include - -#include "ts_http_rpc.h" -#include "dlg_main.h" -#include "ts_ver.h" -#include "ts_env.h" - -/* -1. -SecureCRT֧ñǩҳı⣬в /N "tab name"Ϳ -Example: -To launch a new Telnet session, displaying the name "Houston, TX" on the tab, use the following: -/T /N "Houston, TX" /TELNET 192.168.0.6 - -2. -SecureCRTŵһڵIJͬǩҳУʹò /T - SecureCRT.exe /T /N "TP#ssh://192.168.1.3" /SSH2 /L root /PASSWORD 1234 120.26.109.25 - -3. -telnetͻ˵ - putty.exe telnet://administrator@127.0.0.1:52389 -SecureCRTҪ - SecureCRT.exe /T /N "TP#telnet://192.168.1.3" /SCRIPT X:\path\to\startup.vbs /TELNET 127.0.0.1 52389 -Уstartup.vbsΪ ----------ļʼ--------- -#$language = "VBScript" -#$interface = "1.0" -Sub main - crt.Screen.Synchronous = True - crt.Screen.WaitForString "ogin: " - crt.Screen.Send "SESSION-ID" & VbCr - crt.Screen.Synchronous = False -End Sub ----------ļ--------- - -4. ΪputtyĴڱǩʾIPԳӳɹ˷ - PS1="\[\e]0;${debian_chroot:+($debian_chroot)}\u@192.168.1.2: \w\a\]$PS1" -ֹˣubuntuԣ֪Ƿܹ֧еLinuxSecureCRTԴ˱ʾԡ -*/ - -//#define RDP_CLIENT_SYSTEM_BUILTIN -// #define RDP_CLIENT_SYSTEM_ACTIVE_CONTROL -//#define RDP_CLIENT_FREERDP - - -//#ifdef RDP_CLIENT_SYSTEM_BUILTIN - -//connect to console:i:%d -//compression:i:1 -//bitmapcachepersistenable:i:1 - -std::string rdp_content = "\ -administrative session:i:%d\n\ -screen mode id:i:%d\n\ -use multimon:i:0\n\ -desktopwidth:i:%d\n\ -desktopheight:i:%d\n\ -session bpp:i:16\n\ -winposstr:s:0,1,%d,%d,%d,%d\n\ -compression:i:1\n\ -bitmapcachepersistenable:i:1\n\ -keyboardhook:i:2\n\ -audiocapturemode:i:0\n\ -videoplaybackmode:i:1\n\ -connection type:i:7\n\ -networkautodetect:i:1\n\ -bandwidthautodetect:i:1\n\ -displayconnectionbar:i:1\n\ -enableworkspacereconnect:i:0\n\ -disable wallpaper:i:1\n\ -allow font smoothing:i:0\n\ -allow desktop composition:i:0\n\ -disable full window drag:i:1\n\ -disable menu anims:i:1\n\ -disable themes:i:1\n\ -disable cursor setting:i:1\n\ -full address:s:%s:%d\n\ -audiomode:i:0\n\ -redirectprinters:i:0\n\ -redirectcomports:i:0\n\ -redirectsmartcards:i:0\n\ -redirectclipboard:i:%d\n\ -redirectposdevices:i:0\n\ -autoreconnection enabled:i:0\n\ -authentication level:i:2\n\ -prompt for credentials:i:0\n\ -negotiate security layer:i:1\n\ -remoteapplicationmode:i:0\n\ -alternate shell:s:\n\ -shell working directory:s:\n\ -gatewayhostname:s:\n\ -gatewayusagemethod:i:4\n\ -gatewaycredentialssource:i:4\n\ -gatewayprofileusagemethod:i:0\n\ -promptcredentialonce:i:0\n\ -gatewaybrokeringtype:i:0\n\ -use redirection server name:i:0\n\ -rdgiskdcproxy:i:0\n\ -kdcproxyname:s:\n\ -drivestoredirect:s:%s\n\ -username:s:%s\n\ -password 51:b:%s\n\ -"; - -//redirectdirectx:i:0\n\ -//prompt for credentials on client:i:0\n\ - -//#endif - - -TsHttpRpc g_http_interface; -TsHttpRpc g_https_interface; - -void http_rpc_main_loop(bool is_https) { - if (is_https) { - if (!g_https_interface.init_https()) { - EXLOGE("[ERROR] can not start HTTPS-RPC listener, maybe port %d is already in use.\n", TS_HTTPS_RPC_PORT); - return; - } - - EXLOGW("======================================================\n"); - EXLOGW("[rpc] TeleportAssist-HTTPS-RPC ready on localhost:%d\n", TS_HTTPS_RPC_PORT); - - g_https_interface.run(); - - EXLOGW("[rpc] HTTPS-Server main loop end.\n"); - } else { - if (!g_http_interface.init_http()) { - EXLOGE("[ERROR] can not start HTTP-RPC listener, maybe port %d is already in use.\n", TS_HTTP_RPC_PORT); - return; - } - - EXLOGW("======================================================\n"); - EXLOGW("[rpc] TeleportAssist-HTTP-RPC ready on localhost:%d\n", TS_HTTP_RPC_PORT); - - g_http_interface.run(); - - EXLOGW("[rpc] HTTP-Server main loop end.\n"); - } -} - -void http_rpc_stop(bool is_https) { - if (is_https) - g_https_interface.stop(); - else - g_http_interface.stop(); -} - -#define HEXTOI(x) (isdigit(x) ? x - '0' : x - 'W') - -int ts_url_decode(const char *src, int src_len, char *dst, int dst_len, int is_form_url_encoded) { - int i, j, a, b; - - for (i = j = 0; i < src_len && j < dst_len - 1; i++, j++) { - if (src[i] == '%') { - if (i < src_len - 2 && isxdigit(*(const unsigned char *)(src + i + 1)) && - isxdigit(*(const unsigned char *)(src + i + 2))) { - a = tolower(*(const unsigned char *)(src + i + 1)); - b = tolower(*(const unsigned char *)(src + i + 2)); - dst[j] = (char)((HEXTOI(a) << 4) | HEXTOI(b)); - i += 2; - } else { - return -1; - } - } else if (is_form_url_encoded && src[i] == '+') { - dst[j] = ' '; - } else { - dst[j] = src[i]; - } - } - - dst[j] = '\0'; /* Null-terminate the destination */ - - return i >= src_len ? j : -1; -} - -bool calc_psw51b(const char* password, std::string& ret) { - DATA_BLOB DataIn; - DATA_BLOB DataOut; - - ex_wstr w_pswd; - ex_astr2wstr(password, w_pswd, EX_CODEPAGE_ACP); - - DataIn.cbData = w_pswd.length() * sizeof(wchar_t); - DataIn.pbData = (BYTE*)w_pswd.c_str(); - - - if (!CryptProtectData(&DataIn, L"psw", nullptr, nullptr, nullptr, 0, &DataOut)) - return false; - - char szRet[5] = { 0 }; - for (DWORD i = 0; i < DataOut.cbData; ++i) { - sprintf_s(szRet, 5, "%02X", DataOut.pbData[i]); - ret += szRet; - } - - LocalFree(DataOut.pbData); - return true; -} - -bool isDegital(std::string str) { - for (int i = 0; i < str.size(); i++) { - if (str.at(i) == '-' && str.size() > 1) // пָܳ - continue; - if (str.at(i) > '9' || str.at(i) < '0') - return false; - } - return true; -} - -std::string strtolower(std::string str) { - for (int i = 0; i < str.size(); i++) - { - str[i] = tolower(str[i]); - } - return str; -} - -void SplitString(const std::string& s, std::vector& v, const std::string& c) -{ - std::string::size_type pos1, pos2; - pos2 = s.find(c); - pos1 = 0; - while (std::string::npos != pos2) - { - v.push_back(s.substr(pos1, pos2 - pos1)); - - pos1 = pos2 + c.size(); - pos2 = s.find(c, pos1); - } - if (pos1 != s.length()) - v.push_back(s.substr(pos1)); -} - -TsHttpRpc::TsHttpRpc() { - m_stop = false; - mg_mgr_init(&m_mg_mgr, nullptr); -} - -TsHttpRpc::~TsHttpRpc() { - mg_mgr_free(&m_mg_mgr); -} - -bool TsHttpRpc::init_http() { - struct mg_connection* nc = nullptr; - - char addr[128] = { 0 }; - ex_strformat(addr, 128, "tcp://127.0.0.1:%d", TS_HTTP_RPC_PORT); - - nc = mg_bind(&m_mg_mgr, addr, _mg_event_handler); - if (!nc) { - EXLOGE("[rpc] TsHttpRpc::init localhost:%d\n", TS_HTTP_RPC_PORT); - return false; - } - nc->user_data = this; - - mg_set_protocol_http_websocket(nc); - - return _on_init(); -} - -bool TsHttpRpc::init_https() { - ex_wstr file_ssl_cert = g_env.m_exec_path; - ex_path_join(file_ssl_cert, true, L"cfg", L"localhost.pem", NULL); - ex_wstr file_ssl_key = g_env.m_exec_path; - ex_path_join(file_ssl_key, true, L"cfg", L"localhost.key", NULL); - ex_astr _ssl_cert; - ex_wstr2astr(file_ssl_cert, _ssl_cert); - ex_astr _ssl_key; - ex_wstr2astr(file_ssl_key, _ssl_key); - - const char *err = NULL; - struct mg_bind_opts bind_opts; - memset(&bind_opts, 0, sizeof(bind_opts)); - bind_opts.ssl_cert = _ssl_cert.c_str(); - bind_opts.ssl_key = _ssl_key.c_str(); - bind_opts.error_string = &err; - - - char addr[128] = { 0 }; - ex_strformat(addr, 128, "tcp://127.0.0.1:%d", TS_HTTPS_RPC_PORT); - //ex_strformat(addr, 128, "%d", TS_HTTPS_RPC_PORT); - - struct mg_connection* nc = nullptr; - nc = mg_bind_opt(&m_mg_mgr, addr, _mg_event_handler, bind_opts); - if (!nc) { - EXLOGE("[rpc] TsHttpRpc::init localhost:%d\n", TS_HTTPS_RPC_PORT); - return false; - } - nc->user_data = this; - - mg_set_protocol_http_websocket(nc); - - return _on_init(); -} - -bool TsHttpRpc::_on_init() { - char file_name[MAX_PATH] = { 0 }; - if (!GetModuleFileNameA(nullptr, file_name, MAX_PATH)) - return false; - - int len = strlen(file_name); - - if (file_name[len] == '\\') - file_name[len] = '\0'; - - char* match = strrchr(file_name, '\\'); - if (match) - *match = '\0'; - - m_content_type_map[".js"] = "application/javascript"; - m_content_type_map[".png"] = "image/png"; - m_content_type_map[".jpeg"] = "image/jpeg"; - m_content_type_map[".jpg"] = "image/jpeg"; - m_content_type_map[".gif"] = "image/gif"; - m_content_type_map[".ico"] = "image/x-icon"; - m_content_type_map[".json"] = "image/json"; - m_content_type_map[".html"] = "text/html"; - m_content_type_map[".css"] = "text/css"; - m_content_type_map[".tif"] = "image/tiff"; - m_content_type_map[".tiff"] = "image/tiff"; - m_content_type_map[".svg"] = "text/html"; - - return true; -} - -void TsHttpRpc::run(void) { - while (!m_stop) { - mg_mgr_poll(&m_mg_mgr, 500); - } -} - -void TsHttpRpc::stop(void) { - m_stop = true; -} - -void TsHttpRpc::_mg_event_handler(struct mg_connection *nc, int ev, void *ev_data) { - struct http_message *hm = (struct http_message*)ev_data; - - TsHttpRpc* _this = (TsHttpRpc*)nc->user_data; - if (!_this) { - EXLOGE("[ERROR] invalid http request.\n"); - return; - } - - switch (ev) { - case MG_EV_HTTP_REQUEST: - { - ex_astr uri; - ex_chars _uri; - _uri.resize(hm->uri.len + 1); - memset(&_uri[0], 0, hm->uri.len + 1); - memcpy(&_uri[0], hm->uri.p, hm->uri.len); - uri = &_uri[0]; - -#ifdef EX_DEBUG - char* dbg_method = nullptr; - if (hm->method.len == 3 && 0 == memcmp(hm->method.p, "GET", hm->method.len)) - dbg_method = "GET"; - else if (hm->method.len == 4 && 0 == memcmp(hm->method.p, "POST", hm->method.len)) - dbg_method = "POST"; - else - dbg_method = "UNSUPPORTED-HTTP-METHOD"; - - EXLOGV("[rpc] got %s request: %s\n", dbg_method, uri.c_str()); -#endif - ex_astr ret_buf; - bool b_is_index = false; - - if (uri == "/") { - ex_wstr page = L"Teleport\n\n
    Teleportֹ
    "; - ex_wstr2astr(page, ret_buf, EX_CODEPAGE_UTF8); - - mg_printf(nc, "HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %d\r\nContent-Type: text/html\r\n\r\n%s", ret_buf.size() - 1, &ret_buf[0]); - nc->flags |= MG_F_SEND_AND_CLOSE; - return; - } - - if (uri == "/config") { - uri = "/index.html"; - b_is_index = true; - } - - ex_astr temp; - int offset = uri.find("/", 1); - if (offset > 0) { - temp = uri.substr(1, offset - 1); - - if (temp == "api") { - ex_astr method; - ex_astr json_param; - int rv = _this->_parse_request(hm, method, json_param); - if (0 != rv) { - EXLOGE("[ERROR] http-rpc got invalid request.\n"); - _this->_create_json_ret(ret_buf, rv); - } else { - _this->_process_js_request(method, json_param, ret_buf); - } - - mg_printf(nc, "HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %ld\r\nContent-Type: application/json\r\n\r\n%s", ret_buf.size() - 1, &ret_buf[0]); - nc->flags |= MG_F_SEND_AND_CLOSE; - return; - } - } - - - ex_astr file_suffix; - offset = uri.rfind("."); - if (offset > 0) { - file_suffix = uri.substr(offset, uri.length()); - } - - ex_wstr2astr(g_env.m_site_path, temp); - ex_astr index_path = temp + uri; - - - FILE* file = ex_fopen(index_path.c_str(), "rb"); - if (file) { - unsigned long file_size = 0; - char* buf = nullptr; - size_t ret = 0; - - fseek(file, 0, SEEK_END); - file_size = ftell(file); - buf = new char[file_size]; - memset(buf, 0, file_size); - fseek(file, 0, SEEK_SET); - ret = fread(buf, 1, file_size, file); - fclose(file); - - ex_astr content_type = _this->get_content_type(file_suffix); - - mg_printf(nc, "HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %ld\r\nContent-Type: %s\r\n\r\n", file_size, content_type.c_str()); - mg_send(nc, buf, (int)file_size); - delete[]buf; - nc->flags |= MG_F_SEND_AND_CLOSE; - return; - } else if (b_is_index) { - ex_wstr page = L"404 Not Found

    404 Not Found


    Teleport Assistor configuration page not found.

    "; - ex_wstr2astr(page, ret_buf, EX_CODEPAGE_UTF8); - - mg_printf(nc, "HTTP/1.0 404 File Not Found\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %ld\r\nContent-Type: text/html\r\n\r\n%s", ret_buf.size() - 1, &ret_buf[0]); - nc->flags |= MG_F_SEND_AND_CLOSE; - return; - } - - } - break; - default: - break; - } -} - -int TsHttpRpc::_parse_request(struct http_message* req, ex_astr& func_cmd, ex_astr& func_args) { - if (!req) - return TPE_FAILED; - - bool is_get = true; - if (req->method.len == 3 && 0 == memcmp(req->method.p, "GET", req->method.len)) - is_get = true; - else if (req->method.len == 4 && 0 == memcmp(req->method.p, "POST", req->method.len)) - is_get = false; - else - return TPE_HTTP_METHOD; - - ex_astrs strs; - - size_t pos_start = 1; // һֽڣһ '/' - - size_t i = 0; - for (i = pos_start; i < req->uri.len; ++i) { - if (req->uri.p[i] == '/') { - if (i - pos_start > 0) { - ex_astr tmp_uri; - tmp_uri.assign(req->uri.p + pos_start, i - pos_start); - strs.push_back(tmp_uri); - } - pos_start = i + 1; // ǰҵķָ - } - } - if (pos_start < req->uri.len) { - ex_astr tmp_uri; - tmp_uri.assign(req->uri.p + pos_start, req->uri.len - pos_start); - strs.push_back(tmp_uri); - } - - if (strs.empty() || strs[0] != "api") - return TPE_PARAM; - - if (is_get) { - if (2 == strs.size()) { - func_cmd = strs[1]; - } else if (3 == strs.size()) { - func_cmd = strs[1]; - func_args = strs[2]; - } else { - return TPE_PARAM; - } - } else { - if (2 == strs.size()) { - func_cmd = strs[1]; - } else { - return TPE_PARAM; - } - - if (req->body.len > 0) { - func_args.assign(req->body.p, req->body.len); - } - } - - if (func_args.length() > 0) { - // url-decode - int len = func_args.length() * 2; - ex_chars sztmp; - sztmp.resize(len); - memset(&sztmp[0], 0, len); - if (-1 == ts_url_decode(func_args.c_str(), func_args.length(), &sztmp[0], len, 0)) - return TPE_HTTP_URL_ENCODE; - - func_args = &sztmp[0]; - } - - EXLOGV("[rpc] method=%s, json_param=%s\n", func_cmd.c_str(), func_args.c_str()); - - return TPE_OK; -} - -void TsHttpRpc::_process_js_request(const ex_astr& func_cmd, const ex_astr& func_args, ex_astr& buf) { - if (func_cmd == "get_version") { - _rpc_func_get_version(func_args, buf); - } else if (func_cmd == "run") { - _rpc_func_run_client(func_args, buf); - } else if (func_cmd == "rdp_play") { - _rpc_func_rdp_play(func_args, buf); - } else if (func_cmd == "get_config") { - _rpc_func_get_config(func_args, buf); - } else if (func_cmd == "set_config") { - _rpc_func_set_config(func_args, buf); - } else if (func_cmd == "file_action") { - _rpc_func_file_action(func_args, buf); - } else { - EXLOGE("[rpc] got unknown command: %s\n", func_cmd.c_str()); - _create_json_ret(buf, TPE_UNKNOWN_CMD); - } -} - -void TsHttpRpc::_create_json_ret(ex_astr& buf, int errcode) { - // أ {"code":123} - - Json::FastWriter jr_writer; - Json::Value jr_root; - - jr_root["code"] = errcode; - buf = jr_writer.write(jr_root); -} - -void TsHttpRpc::_create_json_ret(ex_astr& buf, Json::Value& jr_root) { - Json::FastWriter jr_writer; - buf = jr_writer.write(jr_root); -} - -void TsHttpRpc::_rpc_func_url_protocol(const ex_astr& args, ex_astr& buf) -{ - //urlprotocol÷ʽ - // url-decode - std::string func_args = args; - if (func_args.length() > 0) - { - int len = func_args.length() * 2; - ex_chars sztmp; - sztmp.resize(len); - memset(&sztmp[0], 0, len); - if (-1 == ts_url_decode(func_args.c_str(), func_args.length(), &sztmp[0], len, 0)) - return ; - - func_args = &sztmp[0]; - } - EXLOGD(("%s\n"), func_args.c_str()); - //ιteleport://{}/,ֻ - std::string urlproto_appname = TP_URLPROTO_APP_NAME; - urlproto_appname += "://{"; - func_args.erase(0, urlproto_appname.length());//ȥһURLPROTO_APP_NAMEԼ://ַ - int pos = func_args.length() - 1; - if (func_args.substr(pos, 1) == "/") - func_args.erase(pos - 1, 2);//ȥһ}/ַ - else - func_args.erase(pos, 1); - - //Сieʱԭjsonṹе"ȥҪ¸ʽΪjsonʽ - if (func_args.find("\"", 0) == std::string::npos) { - std::vector strv; - SplitString(func_args, strv, ","); - func_args = ""; - for (std::vector::size_type i = 0; i < strv.size(); i++) { - std::vector strv1; - SplitString(strv[i], strv1, ":"); - strv1[0] = "\"" + strv1[0] + "\""; - if (!isDegital(strv1[1]) && strtolower(strv1[1]) != "true" && strtolower(strv1[1]) != "false") - strv1[1] = "\"" + strv1[1] + "\""; - - strv[i] = strv1[0] + ":" + strv1[1]; - if (i == 0) - func_args = strv[i]; - else - func_args += "," + strv[i]; - } - } - func_args = "{" + func_args + "}"; - EXLOGD(("%s\n"), func_args.c_str()); - //TsHttpRpc_rpc_func_run_clientͻ - _rpc_func_run_client(func_args, buf); -} - -void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) { - // Σ{"ip":"192.168.5.11","port":22,"uname":"root","uauth":"abcdefg","authmode":1,"protocol":2} - // authmode: 1=password, 2=private-key - // protocol: 1=rdp, 2=ssh - // SSHأ {"code":0, "data":{"sid":"0123abcde"}} - // RDPأ {"code":0, "data":{"sid":"0123abcde0A"}} - - Json::Reader jreader; - Json::Value jsRoot; - - if (!jreader.parse(func_args.c_str(), jsRoot)) { - _create_json_ret(buf, TPE_JSON_FORMAT); - return; - } - if (!jsRoot.isObject()) { - _create_json_ret(buf, TPE_PARAM); - return; - } - - // жϲǷȷ - if (!jsRoot["teleport_ip"].isString() - || !jsRoot["teleport_port"].isNumeric() || !jsRoot["remote_host_ip"].isString() - || !jsRoot["session_id"].isString() || !jsRoot["protocol_type"].isNumeric() || !jsRoot["protocol_sub_type"].isNumeric() - || !jsRoot["protocol_flag"].isNumeric() - ) { - _create_json_ret(buf, TPE_PARAM); - return; - } - - int pro_type = jsRoot["protocol_type"].asUInt(); - int pro_sub = jsRoot["protocol_sub_type"].asInt(); - ex_u32 protocol_flag = jsRoot["protocol_flag"].asUInt(); - - ex_astr teleport_ip = jsRoot["teleport_ip"].asCString(); - int teleport_port = jsRoot["teleport_port"].asUInt(); - - ex_astr real_host_ip = jsRoot["remote_host_ip"].asCString(); - ex_astr sid = jsRoot["session_id"].asCString(); - - ex_wstr w_exe_path; - WCHAR w_szCommandLine[MAX_PATH] = { 0 }; - - - ex_wstr w_sid; - ex_astr2wstr(sid, w_sid); - ex_wstr w_teleport_ip; - ex_astr2wstr(teleport_ip, w_teleport_ip); - ex_wstr w_real_host_ip; - ex_astr2wstr(real_host_ip, w_real_host_ip); - WCHAR w_port[32] = { 0 }; - swprintf_s(w_port, _T("%d"), teleport_port); - - ex_wstr tmp_rdp_file; // for .rdp file - - if (pro_type == TP_PROTOCOL_TYPE_RDP) { - //============================================== - // RDP - //============================================== - - bool flag_clipboard = ((protocol_flag & TP_FLAG_RDP_CLIPBOARD) == TP_FLAG_RDP_CLIPBOARD); - bool flag_disk = ((protocol_flag & TP_FLAG_RDP_DISK) == TP_FLAG_RDP_DISK); - bool flag_console = ((protocol_flag & TP_FLAG_RDP_CONSOLE) == TP_FLAG_RDP_CONSOLE); - - int rdp_w = 800; - int rdp_h = 640; - bool rdp_console = false; - - if (!jsRoot["rdp_width"].isNull()) { - if (jsRoot["rdp_width"].isNumeric()) { - rdp_w = jsRoot["rdp_width"].asUInt(); - } else { - _create_json_ret(buf, TPE_PARAM); - return; - } - } - - if (!jsRoot["rdp_height"].isNull()) { - if (jsRoot["rdp_height"].isNumeric()) { - rdp_h = jsRoot["rdp_height"].asUInt(); - } else { - _create_json_ret(buf, TPE_PARAM); - return; - } - } - - if (!jsRoot["rdp_console"].isNull()) { - if (jsRoot["rdp_console"].isBool()) { - rdp_console = jsRoot["rdp_console"].asBool(); - } else { - _create_json_ret(buf, TPE_PARAM); - return; - } - } - - if (!flag_console) - rdp_console = false; - - - int split_pos = sid.length() - 2; - ex_astr real_sid = sid.substr(0, split_pos); - ex_astr str_pwd_len = sid.substr(split_pos, sid.length()); - int n_pwd_len = strtol(str_pwd_len.c_str(), nullptr, 16); - n_pwd_len -= real_sid.length(); - n_pwd_len -= 2; - char szPwd[256] = { 0 }; - for (int i = 0; i < n_pwd_len; i++) { - szPwd[i] = '*'; - } - - ex_astr2wstr(real_sid, w_sid); - - w_exe_path = _T("\""); - w_exe_path += g_cfg.rdp_app + _T("\" "); - - ex_wstr rdp_name = g_cfg.rdp_name; - if (rdp_name == L"mstsc") { - w_exe_path += g_cfg.rdp_cmdline; - - int width = 0; - int higth = 0; - int cx = 0; - int cy = 0; - - int display = 1; - int iWidth = GetSystemMetrics(SM_CXSCREEN); - int iHeight = GetSystemMetrics(SM_CYSCREEN); - - if (rdp_w == 0 || rdp_h == 0) { - //ȫ - width = iWidth; - higth = iHeight; - display = 2; - } else { - width = rdp_w; - higth = rdp_h; - display = 1; - } - - cx = (iWidth - width) / 2; - cy = (iHeight - higth) / 2; - if (cx < 0) { - cx = 0; - } - if (cy < 0) { - cy = 0; - } - - // int console_mode = 0; - // if (rdp_console) - // console_mode = 1; - - std::string psw51b; - if (!calc_psw51b(szPwd, psw51b)) { - EXLOGE("calc password failed.\n"); - _create_json_ret(buf, TPE_FAILED); - return; - } - - real_sid = "01" + real_sid; - - char sz_rdp_file_content[4096] = { 0 }; - sprintf_s(sz_rdp_file_content, rdp_content.c_str() - , (flag_console && rdp_console) ? 1 : 0 - , display, width, higth - , cx, cy, cx + width + 100, cy + higth + 100 - , flag_clipboard ? 1 : 0 - , teleport_ip.c_str(), teleport_port - , flag_disk ? "*" : "" - , real_sid.c_str() - , psw51b.c_str() - ); - - char sz_file_name[MAX_PATH] = { 0 }; - char temp_path[MAX_PATH] = { 0 }; - DWORD ret = GetTempPathA(MAX_PATH, temp_path); - if (ret <= 0) { - EXLOGE("fopen failed (%d).\n", GetLastError()); - _create_json_ret(buf, TPE_FAILED); - return; - } - - ex_astr temp_host_ip = real_host_ip; - ex_replace_all(temp_host_ip, ".", "-"); - - sprintf_s(sz_file_name, ("%s%s.rdp"), temp_path, temp_host_ip.c_str()); - - FILE* f = NULL; - if (fopen_s(&f, sz_file_name, "wt") != 0) { - EXLOGE("fopen failed (%d).\n", GetLastError()); - _create_json_ret(buf, TPE_OPENFILE); - return; - } - // Write a string into the file. - fwrite(sz_rdp_file_content, strlen(sz_rdp_file_content), 1, f); - fclose(f); - ex_astr2wstr(sz_file_name, tmp_rdp_file); - - // 滻 - ex_replace_all(w_exe_path, _T("{tmp_rdp_file}"), tmp_rdp_file); - } else if (g_cfg.rdp_name == L"freerdp") { - w_exe_path += L"{size} {console} {clipboard} {drives} "; - w_exe_path += g_cfg.rdp_cmdline; - - ex_wstr w_screen; - - if (rdp_w == 0 || rdp_h == 0) { - //ȫ - w_screen = _T("/f"); - } else { - char sz_size[64] = { 0 }; - ex_strformat(sz_size, 63, "/size:%dx%d", rdp_w, rdp_h); - ex_astr2wstr(sz_size, w_screen); - } - - // wchar_t* w_console = NULL; - // - // if (flag_console && rdp_console) - // { - // w_console = L"/admin"; - // } - // else - // { - // w_console = L""; - // } - - ex_wstr w_password; - ex_astr2wstr(szPwd, w_password); - w_exe_path += L" /p:"; - w_exe_path += w_password; - - w_sid = L"02" + w_sid; - - w_exe_path += L" /gdi:sw"; // ʹȾgdi:hwʹӲ٣ǻֺܶڿ飨¼طʱģ - w_exe_path += L" -grab-keyboard"; // [new style] ֹFreeRDPʧȥؼӦСһFreeRDPڣòƲã - - // 滻 - ex_replace_all(w_exe_path, _T("{size}"), w_screen); - - if (flag_console && rdp_console) - ex_replace_all(w_exe_path, _T("{console}"), L"/admin"); - else - ex_replace_all(w_exe_path, _T("{console}"), L""); - - //ex_replace_all(w_exe_path, _T("{clipboard}"), L"+clipboard"); - - if (flag_clipboard) - ex_replace_all(w_exe_path, _T("{clipboard}"), L"/clipboard"); - else - ex_replace_all(w_exe_path, _T("{clipboard}"), L"-clipboard"); - - if (flag_disk) - ex_replace_all(w_exe_path, _T("{drives}"), L"/drives"); - else - ex_replace_all(w_exe_path, _T("{drives}"), L"-drives"); - } else { - _create_json_ret(buf, TPE_FAILED); - return; - } - } else if (pro_type == TP_PROTOCOL_TYPE_SSH) { - //============================================== - // SSH - //============================================== - - if (pro_sub == TP_PROTOCOL_TYPE_SSH_SHELL) { - w_exe_path = _T("\""); - w_exe_path += g_cfg.ssh_app + _T("\" "); - w_exe_path += g_cfg.ssh_cmdline; - } else { - w_exe_path = _T("\""); - w_exe_path += g_cfg.scp_app + _T("\" "); - w_exe_path += g_cfg.scp_cmdline; - } - } else if (pro_type == TP_PROTOCOL_TYPE_TELNET) { - //============================================== - // TELNET - //============================================== - w_exe_path = _T("\""); - w_exe_path += g_cfg.telnet_app + _T("\" "); - w_exe_path += g_cfg.telnet_cmdline; - } - - ex_replace_all(w_exe_path, _T("{host_port}"), w_port); - ex_replace_all(w_exe_path, _T("{host_ip}"), w_teleport_ip.c_str()); - ex_replace_all(w_exe_path, _T("{user_name}"), w_sid.c_str()); - ex_replace_all(w_exe_path, _T("{real_ip}"), w_real_host_ip.c_str()); - ex_replace_all(w_exe_path, _T("{assist_tools_path}"), g_env.m_tools_path.c_str()); - - - STARTUPINFO si; - PROCESS_INFORMATION pi; - - ZeroMemory(&si, sizeof(si)); - si.cb = sizeof(si); - ZeroMemory(&pi, sizeof(pi)); - - Json::Value root_ret; - ex_astr utf8_path; - ex_wstr2astr(w_exe_path, utf8_path, EX_CODEPAGE_UTF8); - root_ret["path"] = utf8_path; - - if (!CreateProcess(NULL, (wchar_t *)w_exe_path.c_str(), NULL, NULL, FALSE, 0, NULL, NULL, &si, &pi)) { - EXLOGE(_T("CreateProcess() failed. Error=0x%08X.\n %s\n"), GetLastError(), w_exe_path.c_str()); - root_ret["code"] = TPE_START_CLIENT; - _create_json_ret(buf, root_ret); - return; - } - - root_ret["code"] = TPE_OK; - _create_json_ret(buf, root_ret); -} - -void TsHttpRpc::_rpc_func_rdp_play(const ex_astr& func_args, ex_astr& buf) { - Json::Reader jreader; - Json::Value jsRoot; - - if (!jreader.parse(func_args.c_str(), jsRoot)) { - _create_json_ret(buf, TPE_JSON_FORMAT); - return; - } - - // жϲǷȷ - if (!jsRoot["rid"].isInt() - || !jsRoot["web"].isString() - || !jsRoot["sid"].isString() - || !jsRoot["user"].isString() - || !jsRoot["acc"].isString() - || !jsRoot["host"].isString() - || !jsRoot["start"].isString() - ) { - _create_json_ret(buf, TPE_PARAM); - return; - } - - int rid = jsRoot["rid"].asInt(); - ex_astr a_url_base = jsRoot["web"].asCString(); - ex_astr a_sid = jsRoot["sid"].asCString(); - ex_astr a_user = jsRoot["user"].asCString(); - ex_astr a_acc = jsRoot["acc"].asCString(); - ex_astr a_host = jsRoot["host"].asCString(); - ex_astr a_start = jsRoot["start"].asCString(); - - char cmd_args[1024] = { 0 }; - ex_strformat(cmd_args, 1023, "%d \"%s\" \"%09d-%s-%s-%s-%s\"", rid, a_sid.c_str(), rid, a_user.c_str(), a_acc.c_str(), a_host.c_str(), a_start.c_str()); - - // TODO: ϲӦǰתΪIPIJӦý͸ɲԼȥ - // ڸFreeRDPIJʱΪ˴ӷļʹMongoose⣬⣨ò첽ѯDNS⣩ - // ʱֽIPת - { - unsigned int port_i = 0; - struct mg_str scheme, query, fragment, user_info, host, path; - - if (mg_parse_uri(mg_mk_str(a_url_base.c_str()), &scheme, &user_info, &host, &port_i, &path, &query, &fragment) != 0) { - EXLOGE(_T("parse url failed.\n")); - Json::Value root_ret; - root_ret["code"] = TPE_PARAM; - _create_json_ret(buf, root_ret); - return; - } - - ex_astr _scheme; - _scheme.assign(scheme.p, scheme.len); - - // hostתΪIP - ex_astr str_tp_host; - str_tp_host.assign(host.p, host.len); - struct hostent *tp_host = gethostbyname(str_tp_host.c_str()); - if (NULL == tp_host) { - EXLOGE(_T("resolve host name failed.\n")); - Json::Value root_ret; - root_ret["code"] = TPE_PARAM; - _create_json_ret(buf, root_ret); - return; - } - - int i = 0; - char* _ip = NULL; - if (tp_host->h_addrtype == AF_INET) { - struct in_addr addr; - while (tp_host->h_addr_list[i] != 0) { - addr.s_addr = *(u_long *)tp_host->h_addr_list[i++]; - _ip = inet_ntoa(addr); - break; - } - } - - if (NULL == _ip) { - EXLOGE(_T("resolve host name failed.\n")); - Json::Value root_ret; - root_ret["code"] = TPE_PARAM; - _create_json_ret(buf, root_ret); - return; - } - - char _url_base[256]; - ex_strformat(_url_base, 255, "%s://%s:%d", _scheme.c_str(), _ip, port_i); - a_url_base = _url_base; - } - - ex_wstr w_url_base; - ex_astr2wstr(a_url_base, w_url_base); - ex_wstr w_cmd_args; - ex_astr2wstr(cmd_args, w_cmd_args); - - ex_wstr w_exe_path; - w_exe_path = _T("\""); - w_exe_path += g_env.m_tools_path + _T("\\tprdp\\tprdp-replay.exe\""); - w_exe_path += _T(" \""); - w_exe_path += w_url_base; - w_exe_path += _T("\" "); - w_exe_path += w_cmd_args; - - Json::Value root_ret; - ex_astr utf8_path; - ex_wstr2astr(w_exe_path, utf8_path, EX_CODEPAGE_UTF8); - root_ret["cmdline"] = utf8_path; - - EXLOGD(w_exe_path.c_str()); - - STARTUPINFO si; - PROCESS_INFORMATION pi; - - ZeroMemory(&si, sizeof(si)); - si.cb = sizeof(si); - ZeroMemory(&pi, sizeof(pi)); - if (!CreateProcess(NULL, (wchar_t *)w_exe_path.c_str(), NULL, NULL, FALSE, 0, NULL, NULL, &si, &pi)) { - EXLOGE(_T("CreateProcess() failed. Error=0x%08X.\n %s\n"), GetLastError(), w_exe_path.c_str()); - root_ret["code"] = TPE_START_CLIENT; - _create_json_ret(buf, root_ret); - return; - } - - root_ret["code"] = TPE_OK; - _create_json_ret(buf, root_ret); - return; -} - -void TsHttpRpc::_rpc_func_get_config(const ex_astr& func_args, ex_astr& buf) { - Json::Value jr_root; - jr_root["code"] = 0; - jr_root["data"] = g_cfg.get_root(); - _create_json_ret(buf, jr_root); -} - -void TsHttpRpc::_rpc_func_set_config(const ex_astr& func_args, ex_astr& buf) { - Json::Reader jreader; - Json::Value jsRoot; - if (!jreader.parse(func_args.c_str(), jsRoot)) { - _create_json_ret(buf, TPE_JSON_FORMAT); - return; - } - - if (!g_cfg.save(func_args)) - _create_json_ret(buf, TPE_FAILED); - else - _create_json_ret(buf, TPE_OK); -} - -void TsHttpRpc::_rpc_func_file_action(const ex_astr& func_args, ex_astr& buf) { - - Json::Reader jreader; - Json::Value jsRoot; - - if (!jreader.parse(func_args.c_str(), jsRoot)) { - _create_json_ret(buf, TPE_JSON_FORMAT); - return; - } - // жϲǷȷ - if (!jsRoot["action"].isNumeric()) { - _create_json_ret(buf, TPE_PARAM); - return; - } - int action = jsRoot["action"].asUInt(); - - HWND hParent = GetForegroundWindow(); - if (NULL == hParent) - hParent = g_hDlgMain; - - BOOL ret = FALSE; - wchar_t wszReturnPath[MAX_PATH] = _T(""); - - if (action == 1 || action == 2) { - OPENFILENAME ofn; - ex_wstr wsDefaultName; - ex_wstr wsDefaultPath; - StringCchCopy(wszReturnPath, MAX_PATH, wsDefaultName.c_str()); - - ZeroMemory(&ofn, sizeof(ofn)); - - ofn.lStructSize = sizeof(ofn); - ofn.lpstrTitle = _T("ѡļ"); - ofn.hwndOwner = hParent; - ofn.lpstrFilter = _T("ִг (*.exe)\0*.exe\0"); - ofn.lpstrFile = wszReturnPath; - ofn.nMaxFile = MAX_PATH; - ofn.lpstrInitialDir = wsDefaultPath.c_str(); - ofn.Flags = OFN_EXPLORER | OFN_PATHMUSTEXIST; - - if (action == 1) { - ofn.Flags |= OFN_FILEMUSTEXIST; - ret = GetOpenFileName(&ofn); - } else { - ofn.Flags |= OFN_OVERWRITEPROMPT; - ret = GetSaveFileName(&ofn); - } - } else if (action == 3) { - BROWSEINFO bi; - ZeroMemory(&bi, sizeof(BROWSEINFO)); - bi.hwndOwner = NULL; - bi.pidlRoot = NULL; - bi.pszDisplayName = wszReturnPath; //˲ΪNULLʾԻ - bi.lpszTitle = _T("ѡĿ¼"); - bi.ulFlags = BIF_RETURNONLYFSDIRS; - bi.lpfn = NULL; - bi.iImage = 0; //ʼڲbi - LPITEMIDLIST pIDList = SHBrowseForFolder(&bi);//ʾѡԻ - if (pIDList) { - ret = true; - SHGetPathFromIDList(pIDList, wszReturnPath); - } else { - ret = false; - } - } else if (action == 4) { - ex_wstr wsDefaultName; - ex_wstr wsDefaultPath; - - if (wsDefaultPath.length() == 0) { - _create_json_ret(buf, TPE_PARAM); - return; - } - - ex_wstr::size_type pos = 0; - - while (ex_wstr::npos != (pos = wsDefaultPath.find(L"/", pos))) { - wsDefaultPath.replace(pos, 1, L"\\"); - pos += 1; - } - - ex_wstr wArg = L"/select, \""; - wArg += wsDefaultPath; - wArg += L"\""; - if ((int)ShellExecute(hParent, _T("open"), _T("explorer"), wArg.c_str(), NULL, SW_SHOW) > 32) - ret = true; - else - ret = false; - } - - if (ret) { - if (action == 1 || action == 2 || action == 3) { - ex_astr utf8_path; - ex_wstr2astr(wszReturnPath, utf8_path, EX_CODEPAGE_UTF8); - Json::Value root; - root["code"] = TPE_OK; - root["path"] = utf8_path; - _create_json_ret(buf, root); - - return; - } else { - _create_json_ret(buf, TPE_OK); - return; - } - } else { - _create_json_ret(buf, TPE_DATA); - return; - } -} - -void TsHttpRpc::_rpc_func_get_version(const ex_astr& func_args, ex_astr& buf) { - Json::Value root_ret; - ex_wstr w_version = TP_ASSIST_VER; - ex_astr version; - ex_wstr2astr(w_version, version, EX_CODEPAGE_UTF8); - root_ret["version"] = version; - root_ret["code"] = TPE_OK; - _create_json_ret(buf, root_ret); - return; -} +#include "stdafx.h" + +#pragma warning(disable:4091) + +#include +#include +#include + +#pragma comment(lib, "Crypt32.lib") + +#include + +#include "ts_http_rpc.h" +#include "dlg_main.h" +#include "ts_ver.h" +#include "ts_env.h" + +/* +1. +SecureCRT支持设置标签页的标题,命令行参数 /N "tab name"就可以 +Example: +To launch a new Telnet session, displaying the name "Houston, TX" on the tab, use the following: +/T /N "Houston, TX" /TELNET 192.168.0.6 + +2. +多次启动的SecureCRT放到一个窗口的不同标签页中,使用参数: /T + SecureCRT.exe /T /N "TP#ssh://192.168.1.3" /SSH2 /L root /PASSWORD 1234 120.26.109.25 + +3. +telnet客户端的启动: + putty.exe telnet://administrator@127.0.0.1:52389 +如果是SecureCRT,则需要 + SecureCRT.exe /T /N "TP#telnet://192.168.1.3" /SCRIPT X:\path\to\startup.vbs /TELNET 127.0.0.1 52389 +其中,startup.vbs的内容为: +---------文件开始--------- +#$language = "VBScript" +#$interface = "1.0" +Sub main + crt.Screen.Synchronous = True + crt.Screen.WaitForString "ogin: " + crt.Screen.Send "SESSION-ID" & VbCr + crt.Screen.Synchronous = False +End Sub +---------文件结束--------- + +4. 为了让putty的窗口标签显示正常的IP,可以尝试在连接成功后,主动向服务端发送下列命令: + PS1="\[\e]0;${debian_chroot:+($debian_chroot)}\u@192.168.1.2: \w\a\]$PS1" +手工测试了,ubuntu服务器可以,不知道是否能够支持所有的Linux。SecureCRT对此表示忽略。 +*/ + +//#define RDP_CLIENT_SYSTEM_BUILTIN +// #define RDP_CLIENT_SYSTEM_ACTIVE_CONTROL +//#define RDP_CLIENT_FREERDP + + +//#ifdef RDP_CLIENT_SYSTEM_BUILTIN + +//connect to console:i:%d +//compression:i:1 +//bitmapcachepersistenable:i:1 + +std::string rdp_content = "\ +administrative session:i:%d\n\ +screen mode id:i:%d\n\ +use multimon:i:0\n\ +desktopwidth:i:%d\n\ +desktopheight:i:%d\n\ +session bpp:i:16\n\ +winposstr:s:0,1,%d,%d,%d,%d\n\ +bitmapcachepersistenable:i:1\n\ +bitmapcachesize:i:32000\n\ +compression:i:1\n\ +keyboardhook:i:2\n\ +audiocapturemode:i:0\n\ +videoplaybackmode:i:1\n\ +connection type:i:7\n\ +networkautodetect:i:1\n\ +bandwidthautodetect:i:1\n\ +disableclipboardredirection:i:0\n\ +displayconnectionbar:i:1\n\ +enableworkspacereconnect:i:0\n\ +disable wallpaper:i:1\n\ +allow font smoothing:i:0\n\ +allow desktop composition:i:0\n\ +disable full window drag:i:1\n\ +disable menu anims:i:1\n\ +disable themes:i:1\n\ +disable cursor setting:i:1\n\ +full address:s:%s:%d\n\ +audiomode:i:0\n\ +redirectprinters:i:0\n\ +redirectcomports:i:0\n\ +redirectsmartcards:i:0\n\ +redirectclipboard:i:%d\n\ +redirectposdevices:i:0\n\ +autoreconnection enabled:i:0\n\ +authentication level:i:2\n\ +prompt for credentials:i:0\n\ +negotiate security layer:i:1\n\ +remoteapplicationmode:i:0\n\ +alternate shell:s:\n\ +shell working directory:s:\n\ +gatewayhostname:s:\n\ +gatewayusagemethod:i:4\n\ +gatewaycredentialssource:i:4\n\ +gatewayprofileusagemethod:i:0\n\ +promptcredentialonce:i:0\n\ +gatewaybrokeringtype:i:0\n\ +use redirection server name:i:0\n\ +rdgiskdcproxy:i:0\n\ +kdcproxyname:s:\n\ +drivestoredirect:s:%s\n\ +username:s:%s\n\ +password 51:b:%s\n\ +"; + +// https://www.donkz.nl/overview-rdp-file-settings/ +// +// authentication level:i:2\n +// +// +// negotiate security layer:i:1\n +// 0 = negotiation is not enabled and the session is started by using Secure Sockets Layer (SSL). +// 1 = negotiation is enabled and the session is started by using x.224 encryption. + + + +//redirectdirectx:i:0\n\ +//prompt for credentials on client:i:0\n\ + +//#endif + + +TsHttpRpc g_http_interface; +TsHttpRpc g_https_interface; + +void http_rpc_main_loop(bool is_https) { + if (is_https) { + if (!g_https_interface.init_https()) { + EXLOGE("[ERROR] can not start HTTPS-RPC listener, maybe port %d is already in use.\n", TS_HTTPS_RPC_PORT); + return; + } + + EXLOGW("======================================================\n"); + EXLOGW("[rpc] TeleportAssist-HTTPS-RPC ready on 127.0.0.1:%d\n", TS_HTTPS_RPC_PORT); + + g_https_interface.run(); + + EXLOGW("[rpc] HTTPS-Server main loop end.\n"); + } else { + if (!g_http_interface.init_http()) { + EXLOGE("[ERROR] can not start HTTP-RPC listener, maybe port %d is already in use.\n", TS_HTTP_RPC_PORT); + return; + } + + EXLOGW("======================================================\n"); + EXLOGW("[rpc] TeleportAssist-HTTP-RPC ready on 127.0.0.1:%d\n", TS_HTTP_RPC_PORT); + + g_http_interface.run(); + + EXLOGW("[rpc] HTTP-Server main loop end.\n"); + } +} + +void http_rpc_stop(bool is_https) { + if (is_https) + g_https_interface.stop(); + else + g_http_interface.stop(); +} + +#define HEXTOI(x) (isdigit(x) ? x - '0' : x - 'W') + +int ts_url_decode(const char *src, int src_len, char *dst, int dst_len, int is_form_url_encoded) { + int i, j, a, b; + + for (i = j = 0; i < src_len && j < dst_len - 1; i++, j++) { + if (src[i] == '%') { + if (i < src_len - 2 && isxdigit(*(const unsigned char *)(src + i + 1)) && + isxdigit(*(const unsigned char *)(src + i + 2))) { + a = tolower(*(const unsigned char *)(src + i + 1)); + b = tolower(*(const unsigned char *)(src + i + 2)); + dst[j] = (char)((HEXTOI(a) << 4) | HEXTOI(b)); + i += 2; + } else { + return -1; + } + } else if (is_form_url_encoded && src[i] == '+') { + dst[j] = ' '; + } else { + dst[j] = src[i]; + } + } + + dst[j] = '\0'; /* Null-terminate the destination */ + + return i >= src_len ? j : -1; +} + +bool calc_psw51b(const char* password, std::string& ret) { + DATA_BLOB DataIn; + DATA_BLOB DataOut; + + ex_wstr w_pswd; + ex_astr2wstr(password, w_pswd, EX_CODEPAGE_ACP); + + DataIn.cbData = w_pswd.length() * sizeof(wchar_t); + DataIn.pbData = (BYTE*)w_pswd.c_str(); + + + if (!CryptProtectData(&DataIn, L"psw", nullptr, nullptr, nullptr, 0, &DataOut)) + return false; + + char szRet[5] = { 0 }; + for (DWORD i = 0; i < DataOut.cbData; ++i) { + sprintf_s(szRet, 5, "%02X", DataOut.pbData[i]); + ret += szRet; + } + + LocalFree(DataOut.pbData); + return true; +} + +bool isDegital(std::string str) { + for (int i = 0; i < str.size(); i++) { + if (str.at(i) == '-' && str.size() > 1) // 有可能出现负数 + continue; + if (str.at(i) > '9' || str.at(i) < '0') + return false; + } + return true; +} + +std::string strtolower(std::string str) { + for (int i = 0; i < str.size(); i++) + { + str[i] = tolower(str[i]); + } + return str; +} + +void SplitString(const std::string& s, std::vector& v, const std::string& c) +{ + std::string::size_type pos1, pos2; + pos2 = s.find(c); + pos1 = 0; + while (std::string::npos != pos2) + { + v.push_back(s.substr(pos1, pos2 - pos1)); + + pos1 = pos2 + c.size(); + pos2 = s.find(c, pos1); + } + if (pos1 != s.length()) + v.push_back(s.substr(pos1)); +} + +TsHttpRpc::TsHttpRpc() { + m_stop = false; + mg_mgr_init(&m_mg_mgr, nullptr); +} + +TsHttpRpc::~TsHttpRpc() { + mg_mgr_free(&m_mg_mgr); +} + +bool TsHttpRpc::init_http() { + struct mg_connection* nc = nullptr; + + char addr[128] = { 0 }; + ex_strformat(addr, 128, "tcp://127.0.0.1:%d", TS_HTTP_RPC_PORT); + + nc = mg_bind(&m_mg_mgr, addr, _mg_event_handler); + if (!nc) { + EXLOGE("[rpc] TsHttpRpc::init 127.0.0.1:%d\n", TS_HTTP_RPC_PORT); + return false; + } + nc->user_data = this; + + mg_set_protocol_http_websocket(nc); + + return _on_init(); +} + +bool TsHttpRpc::init_https() { + ex_wstr file_ssl_cert = g_env.m_exec_path; + ex_path_join(file_ssl_cert, true, L"cfg", L"localhost.pem", NULL); + ex_wstr file_ssl_key = g_env.m_exec_path; + ex_path_join(file_ssl_key, true, L"cfg", L"localhost.key", NULL); + ex_astr _ssl_cert; + ex_wstr2astr(file_ssl_cert, _ssl_cert); + ex_astr _ssl_key; + ex_wstr2astr(file_ssl_key, _ssl_key); + + const char *err = NULL; + struct mg_bind_opts bind_opts; + memset(&bind_opts, 0, sizeof(bind_opts)); + bind_opts.ssl_cert = _ssl_cert.c_str(); + bind_opts.ssl_key = _ssl_key.c_str(); + bind_opts.error_string = &err; + + + char addr[128] = { 0 }; + ex_strformat(addr, 128, "tcp://127.0.0.1:%d", TS_HTTPS_RPC_PORT); + + struct mg_connection* nc = nullptr; + nc = mg_bind_opt(&m_mg_mgr, addr, _mg_event_handler, bind_opts); + if (!nc) { + EXLOGE("[rpc] TsHttpRpc::init 127.0.0.1:%d\n", TS_HTTPS_RPC_PORT); + return false; + } + nc->user_data = this; + + mg_set_protocol_http_websocket(nc); + + return _on_init(); +} + +bool TsHttpRpc::_on_init() { + char file_name[MAX_PATH] = { 0 }; + if (!GetModuleFileNameA(nullptr, file_name, MAX_PATH)) + return false; + + int len = strlen(file_name); + + if (file_name[len] == '\\') + file_name[len] = '\0'; + + char* match = strrchr(file_name, '\\'); + if (match) + *match = '\0'; + + m_content_type_map[".js"] = "application/javascript"; + m_content_type_map[".png"] = "image/png"; + m_content_type_map[".jpeg"] = "image/jpeg"; + m_content_type_map[".jpg"] = "image/jpeg"; + m_content_type_map[".gif"] = "image/gif"; + m_content_type_map[".ico"] = "image/x-icon"; + m_content_type_map[".json"] = "image/json"; + m_content_type_map[".html"] = "text/html"; + m_content_type_map[".css"] = "text/css"; + m_content_type_map[".tif"] = "image/tiff"; + m_content_type_map[".tiff"] = "image/tiff"; + m_content_type_map[".svg"] = "text/html"; + + return true; +} + +void TsHttpRpc::run(void) { + while (!m_stop) { + mg_mgr_poll(&m_mg_mgr, 500); + } +} + +void TsHttpRpc::stop(void) { + m_stop = true; +} + +void TsHttpRpc::_mg_event_handler(struct mg_connection *nc, int ev, void *ev_data) { + struct http_message *hm = (struct http_message*)ev_data; + + TsHttpRpc* _this = (TsHttpRpc*)nc->user_data; + if (!_this) { + EXLOGE("[ERROR] invalid http request.\n"); + return; + } + + switch (ev) { + case MG_EV_HTTP_REQUEST: + { + ex_astr uri; + ex_chars _uri; + _uri.resize(hm->uri.len + 1); + memset(&_uri[0], 0, hm->uri.len + 1); + memcpy(&_uri[0], hm->uri.p, hm->uri.len); + uri = &_uri[0]; + +#ifdef EX_DEBUG + char* dbg_method = nullptr; + if (hm->method.len == 3 && 0 == memcmp(hm->method.p, "GET", hm->method.len)) + dbg_method = "GET"; + else if (hm->method.len == 4 && 0 == memcmp(hm->method.p, "POST", hm->method.len)) + dbg_method = "POST"; + else + dbg_method = "UNSUPPORTED-HTTP-METHOD"; + + EXLOGV("[rpc] got %s request: %s\n", dbg_method, uri.c_str()); +#endif + ex_astr ret_buf; + bool b_is_html = false; + +// if (uri == "/") { +// ex_wstr page = L"Teleport助手\n\n
    Teleport助手工作正常!
    "; +// ex_wstr2astr(page, ret_buf, EX_CODEPAGE_UTF8); +// +// mg_printf(nc, "HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %d\r\nContent-Type: text/html\r\n\r\n%s", ret_buf.size() - 1, &ret_buf[0]); +// nc->flags |= MG_F_SEND_AND_CLOSE; +// return; +// } + + if (uri == "/") { + uri = "/status.html"; + b_is_html = true; + } + else if (uri == "/config") { + uri = "/index.html"; + b_is_html = true; + } + + ex_astr temp; + int offset = uri.find("/", 1); + if (offset > 0) { + temp = uri.substr(1, offset - 1); + + if (temp == "api") { + ex_astr method; + ex_astr json_param; + int rv = _this->_parse_request(hm, method, json_param); + if (0 != rv) { + EXLOGE("[ERROR] http-rpc got invalid request.\n"); + _this->_create_json_ret(ret_buf, rv); + } else { + _this->_process_js_request(method, json_param, ret_buf); + } + + mg_printf(nc, "HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %ld\r\nContent-Type: application/json\r\n\r\n%s", ret_buf.length(), &ret_buf[0]); + nc->flags |= MG_F_SEND_AND_CLOSE; + return; + } + } + + + ex_astr file_suffix; + offset = uri.rfind("."); + if (offset > 0) { + file_suffix = uri.substr(offset, uri.length()); + } + + ex_wstr2astr(g_env.m_site_path, temp); + ex_astr index_path = temp + uri; + + + FILE* file = ex_fopen(index_path.c_str(), "rb"); + if (file) { + unsigned long file_size = 0; + char* buf = nullptr; + size_t ret = 0; + + fseek(file, 0, SEEK_END); + file_size = ftell(file); + buf = new char[file_size]; + memset(buf, 0, file_size); + fseek(file, 0, SEEK_SET); + ret = fread(buf, 1, file_size, file); + fclose(file); + + ex_astr content_type = _this->get_content_type(file_suffix); + + mg_printf(nc, "HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %ld\r\nContent-Type: %s\r\n\r\n", file_size, content_type.c_str()); + mg_send(nc, buf, (int)file_size); + delete[]buf; + nc->flags |= MG_F_SEND_AND_CLOSE; + return; + } else if (b_is_html) { + ex_wstr page = L"404 Not Found

    404 Not Found


    Teleport Assistor configuration page not found.

    "; + ex_wstr2astr(page, ret_buf, EX_CODEPAGE_UTF8); + + mg_printf(nc, "HTTP/1.0 404 File Not Found\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %ld\r\nContent-Type: text/html\r\n\r\n%s", ret_buf.size() - 1, &ret_buf[0]); + nc->flags |= MG_F_SEND_AND_CLOSE; + return; + } + + } + break; + default: + break; + } +} + +int TsHttpRpc::_parse_request(struct http_message* req, ex_astr& func_cmd, ex_astr& func_args) { + if (!req) + return TPE_FAILED; + + bool is_get = true; + if (req->method.len == 3 && 0 == memcmp(req->method.p, "GET", req->method.len)) + is_get = true; + else if (req->method.len == 4 && 0 == memcmp(req->method.p, "POST", req->method.len)) + is_get = false; + else + return TPE_HTTP_METHOD; + + ex_astrs strs; + + size_t pos_start = 1; // 跳过第一个字节,一定是 '/' + + size_t i = 0; + for (i = pos_start; i < req->uri.len; ++i) { + if (req->uri.p[i] == '/') { + if (i - pos_start > 0) { + ex_astr tmp_uri; + tmp_uri.assign(req->uri.p + pos_start, i - pos_start); + strs.push_back(tmp_uri); + } + pos_start = i + 1; // 跳过当前找到的分隔符 + } + } + if (pos_start < req->uri.len) { + ex_astr tmp_uri; + tmp_uri.assign(req->uri.p + pos_start, req->uri.len - pos_start); + strs.push_back(tmp_uri); + } + + if (strs.empty() || strs[0] != "api") + return TPE_PARAM; + + if (is_get) { + if (2 == strs.size()) { + func_cmd = strs[1]; + } else if (3 == strs.size()) { + func_cmd = strs[1]; + func_args = strs[2]; + } else { + return TPE_PARAM; + } + } else { + if (2 == strs.size()) { + func_cmd = strs[1]; + } else { + return TPE_PARAM; + } + + if (req->body.len > 0) { + func_args.assign(req->body.p, req->body.len); + } + } + + if (func_args.length() > 0) { + // 将参数进行 url-decode 解码 + int len = func_args.length() * 2; + ex_chars sztmp; + sztmp.resize(len); + memset(&sztmp[0], 0, len); + if (-1 == ts_url_decode(func_args.c_str(), func_args.length(), &sztmp[0], len, 0)) + return TPE_HTTP_URL_ENCODE; + + func_args = &sztmp[0]; + } + + EXLOGV("[rpc] method=%s, json_param=%s\n", func_cmd.c_str(), func_args.c_str()); + + return TPE_OK; +} + +void TsHttpRpc::_process_js_request(const ex_astr& func_cmd, const ex_astr& func_args, ex_astr& buf) { + if (func_cmd == "get_version") { + _rpc_func_get_version(func_args, buf); + } else if (func_cmd == "run") { + _rpc_func_run_client(func_args, buf); + } else if (func_cmd == "rdp_play") { + _rpc_func_rdp_play(func_args, buf); + } else if (func_cmd == "get_config") { + _rpc_func_get_config(func_args, buf); + } else if (func_cmd == "set_config") { + _rpc_func_set_config(func_args, buf); + } else if (func_cmd == "file_action") { + _rpc_func_file_action(func_args, buf); + } else { + EXLOGE("[rpc] got unknown command: %s\n", func_cmd.c_str()); + _create_json_ret(buf, TPE_UNKNOWN_CMD); + } +} + +void TsHttpRpc::_create_json_ret(ex_astr& buf, int errcode) { + // 返回: {"code":123} + + Json::Value jr_root; + jr_root["code"] = errcode; + + // buf = jr_writer.write(jr_root); + Json::StreamWriterBuilder jwb; + std::unique_ptr jwriter(jwb.newStreamWriter()); + ex_aoss os; + jwriter->write(jr_root, &os); + buf = os.str(); +} + +void TsHttpRpc::_create_json_ret(ex_astr& buf, Json::Value& jr_root) { +// Json::FastWriter jr_writer; +// buf = jr_writer.write(jr_root); + Json::StreamWriterBuilder jwb; + std::unique_ptr jwriter(jwb.newStreamWriter()); + ex_aoss os; + jwriter->write(jr_root, &os); + buf = os.str(); +} + +void TsHttpRpc::_rpc_func_url_protocol(const ex_astr& args, ex_astr& buf) +{ + //处理urlprotocol调用访式 + // 将参数进行 url-decode 解码 + std::string func_args = args; + if (func_args.length() > 0) + { + int len = func_args.length() * 2; + ex_chars sztmp; + sztmp.resize(len); + memset(&sztmp[0], 0, len); + if (-1 == ts_url_decode(func_args.c_str(), func_args.length(), &sztmp[0], len, 0)) + return ; + + func_args = &sztmp[0]; + } + EXLOGD(("%s\n"), func_args.c_str()); + //处理传参过来的teleport://{}/,只保留参数部份 + std::string urlproto_appname = TP_URLPROTO_APP_NAME; + urlproto_appname += "://{"; + func_args.erase(0, urlproto_appname.length());//去除第一个URLPROTO_APP_NAME以及://字符 + int pos = func_args.length() - 1; + if (func_args.substr(pos, 1) == "/") + func_args.erase(pos - 1, 2);//去除最后一个}/字符 + else + func_args.erase(pos, 1); + + //由于命令行、ie浏览器参数传递时会把原来json结构中的"号去掉,需要重新格式化参数为json格式 + if (func_args.find("\"", 0) == std::string::npos) { + std::vector strv; + SplitString(func_args, strv, ","); + func_args = ""; + for (std::vector::size_type i = 0; i < strv.size(); i++) { + std::vector strv1; + SplitString(strv[i], strv1, ":"); + strv1[0] = "\"" + strv1[0] + "\""; + if (!isDegital(strv1[1]) && strtolower(strv1[1]) != "true" && strtolower(strv1[1]) != "false") + strv1[1] = "\"" + strv1[1] + "\""; + + strv[i] = strv1[0] + ":" + strv1[1]; + if (i == 0) + func_args = strv[i]; + else + func_args += "," + strv[i]; + } + } + func_args = "{" + func_args + "}"; + EXLOGD(("%s\n"), func_args.c_str()); + //调用TsHttpRpc类里的_rpc_func_run_client启动客户端 + _rpc_func_run_client(func_args, buf); +} + +void TsHttpRpc::_rpc_func_run_client(const ex_astr& func_args, ex_astr& buf) { + // 入参:{"ip":"192.168.5.11","port":22,"uname":"root","uauth":"abcdefg","authmode":1,"protocol":2} + // authmode: 1=password, 2=private-key + // protocol: 1=rdp, 2=ssh + // SSH返回: {"code":0, "data":{"sid":"0123abcde"}} + // RDP返回: {"code":0, "data":{"sid":"0123abcde0A"}} + + //Json::Reader jreader; + Json::Value jsRoot; + + Json::CharReaderBuilder jcrb; + std::unique_ptr const jreader(jcrb.newCharReader()); + const char *str_json_begin = func_args.c_str(); + ex_astr err; + + //if (!jreader.parse(func_args.c_str(), jsRoot)) { + if (!jreader->parse(str_json_begin, str_json_begin + func_args.length(), &jsRoot, &err)) { + _create_json_ret(buf, TPE_JSON_FORMAT); + return; + } + if (!jsRoot.isObject()) { + _create_json_ret(buf, TPE_PARAM); + return; + } + + // 判断参数是否正确 + if (!jsRoot["teleport_ip"].isString() + || !jsRoot["teleport_port"].isNumeric() || !jsRoot["remote_host_ip"].isString() + || !jsRoot["session_id"].isString() || !jsRoot["protocol_type"].isNumeric() || !jsRoot["protocol_sub_type"].isNumeric() + || !jsRoot["protocol_flag"].isNumeric() + ) { + _create_json_ret(buf, TPE_PARAM); + return; + } + + int pro_type = jsRoot["protocol_type"].asUInt(); + int pro_sub = jsRoot["protocol_sub_type"].asInt(); + ex_u32 protocol_flag = jsRoot["protocol_flag"].asUInt(); + + ex_astr teleport_ip = jsRoot["teleport_ip"].asCString(); + int teleport_port = jsRoot["teleport_port"].asUInt(); + + ex_astr remote_host_name = jsRoot["remote_host_name"].asCString(); + + ex_astr real_host_ip = jsRoot["remote_host_ip"].asCString(); + ex_astr sid = jsRoot["session_id"].asCString(); + + ex_wstr w_exe_path; + WCHAR w_szCommandLine[MAX_PATH] = { 0 }; + + + ex_wstr w_sid; + ex_astr2wstr(sid, w_sid); + ex_wstr w_teleport_ip; + ex_astr2wstr(teleport_ip, w_teleport_ip); + ex_wstr w_real_host_ip; + ex_astr2wstr(real_host_ip, w_real_host_ip); + ex_wstr w_remote_host_name; + ex_astr2wstr(remote_host_name, w_remote_host_name); + WCHAR w_port[32] = { 0 }; + swprintf_s(w_port, _T("%d"), teleport_port); + + ex_wstr tmp_rdp_file; // for .rdp file + + if (pro_type == TP_PROTOCOL_TYPE_RDP) { + //============================================== + // RDP + //============================================== + + bool flag_clipboard = ((protocol_flag & TP_FLAG_RDP_CLIPBOARD) == TP_FLAG_RDP_CLIPBOARD); + bool flag_disk = ((protocol_flag & TP_FLAG_RDP_DISK) == TP_FLAG_RDP_DISK); + bool flag_console = ((protocol_flag & TP_FLAG_RDP_CONSOLE) == TP_FLAG_RDP_CONSOLE); + + int rdp_w = 800; + int rdp_h = 640; + bool rdp_console = false; + + if (!jsRoot["rdp_width"].isNull()) { + if (jsRoot["rdp_width"].isNumeric()) { + rdp_w = jsRoot["rdp_width"].asUInt(); + } else { + _create_json_ret(buf, TPE_PARAM); + return; + } + } + + if (!jsRoot["rdp_height"].isNull()) { + if (jsRoot["rdp_height"].isNumeric()) { + rdp_h = jsRoot["rdp_height"].asUInt(); + } else { + _create_json_ret(buf, TPE_PARAM); + return; + } + } + + if (!jsRoot["rdp_console"].isNull()) { + if (jsRoot["rdp_console"].isBool()) { + rdp_console = jsRoot["rdp_console"].asBool(); + } else { + _create_json_ret(buf, TPE_PARAM); + return; + } + } + + if (!flag_console) + rdp_console = false; + + + int split_pos = sid.length() - 2; + ex_astr real_sid = sid.substr(0, split_pos); + ex_astr str_pwd_len = sid.substr(split_pos, sid.length()); + int n_pwd_len = strtol(str_pwd_len.c_str(), nullptr, 16); + n_pwd_len -= real_sid.length(); + n_pwd_len -= 2; + char szPwd[256] = { 0 }; + for (int i = 0; i < n_pwd_len; i++) { + szPwd[i] = '*'; + } + + ex_astr2wstr(real_sid, w_sid); + + w_exe_path = _T("\""); + w_exe_path += g_cfg.rdp_app + _T("\" "); + + ex_wstr rdp_name = g_cfg.rdp_name; + if (rdp_name == L"mstsc") { + w_exe_path += g_cfg.rdp_cmdline; + + int width = 0; + int higth = 0; + int cx = 0; + int cy = 0; + + int display = 1; + int iWidth = GetSystemMetrics(SM_CXSCREEN); + int iHeight = GetSystemMetrics(SM_CYSCREEN); + + if (rdp_w == 0 || rdp_h == 0) { + //全屏 + width = iWidth; + higth = iHeight; + display = 2; + } else { + width = rdp_w; + higth = rdp_h; + display = 1; + } + + cx = (iWidth - width) / 2; + cy = (iHeight - higth) / 2; + if (cx < 0) { + cx = 0; + } + if (cy < 0) { + cy = 0; + } + + // int console_mode = 0; + // if (rdp_console) + // console_mode = 1; + + std::string psw51b; + if (!calc_psw51b(szPwd, psw51b)) { + EXLOGE("calc password failed.\n"); + _create_json_ret(buf, TPE_FAILED); + return; + } + + real_sid = "01" + real_sid; + + char sz_rdp_file_content[4096] = { 0 }; + sprintf_s(sz_rdp_file_content, 4096, rdp_content.c_str() + , (flag_console && rdp_console) ? 1 : 0 + , display, width, higth + , cx, cy, cx + width + 100, cy + higth + 100 + , teleport_ip.c_str(), teleport_port + , flag_clipboard ? 1 : 0 + , flag_disk ? "*" : "" + , real_sid.c_str() + , psw51b.c_str() + ); + + char sz_file_name[MAX_PATH] = { 0 }; + char temp_path[MAX_PATH] = { 0 }; + DWORD ret = GetTempPathA(MAX_PATH, temp_path); + if (ret <= 0) { + EXLOGE("fopen failed (%d).\n", GetLastError()); + _create_json_ret(buf, TPE_FAILED); + return; + } + + ex_astr temp_host_ip = real_host_ip; + ex_replace_all(temp_host_ip, ".", "-"); + + sprintf_s(sz_file_name, MAX_PATH, ("%s%s.rdp"), temp_path, temp_host_ip.c_str()); + + FILE* f = NULL; + if (fopen_s(&f, sz_file_name, "wt") != 0) { + EXLOGE("fopen failed (%d).\n", GetLastError()); + _create_json_ret(buf, TPE_OPENFILE); + return; + } + // Write a string into the file. + fwrite(sz_rdp_file_content, strlen(sz_rdp_file_content), 1, f); + fclose(f); + ex_astr2wstr(sz_file_name, tmp_rdp_file); + + // 变量替换 + ex_replace_all(w_exe_path, _T("{tmp_rdp_file}"), tmp_rdp_file); + } else if (g_cfg.rdp_name == L"freerdp") { + w_exe_path += L"{size} {console} {clipboard} {drives} "; + w_exe_path += g_cfg.rdp_cmdline; + + ex_wstr w_screen; + + if (rdp_w == 0 || rdp_h == 0) { + //全屏 + w_screen = _T("/f"); + } else { + char sz_size[64] = { 0 }; + ex_strformat(sz_size, 63, "/size:%dx%d", rdp_w, rdp_h); + ex_astr2wstr(sz_size, w_screen); + } + + // wchar_t* w_console = NULL; + // + // if (flag_console && rdp_console) + // { + // w_console = L"/admin"; + // } + // else + // { + // w_console = L""; + // } + + ex_wstr w_password; + ex_astr2wstr(szPwd, w_password); + w_exe_path += L" /p:"; + w_exe_path += w_password; + + w_sid = L"02" + w_sid; + + w_exe_path += L" /gdi:sw"; // 使用软件渲染,gdi:hw使用硬件加速,但是会出现很多黑块(录像回放时又是正常的!) + w_exe_path += L" -grab-keyboard"; // [new style] 防止启动FreeRDP后,失去本地键盘响应,必须得先最小化一下FreeRDP窗口(不过貌似不起作用) + + // 变量替换 + ex_replace_all(w_exe_path, _T("{size}"), w_screen); + + if (flag_console && rdp_console) + ex_replace_all(w_exe_path, _T("{console}"), L"/admin"); + else + ex_replace_all(w_exe_path, _T("{console}"), L""); + + //ex_replace_all(w_exe_path, _T("{clipboard}"), L"+clipboard"); + + if (flag_clipboard) + ex_replace_all(w_exe_path, _T("{clipboard}"), L"/clipboard"); + else + ex_replace_all(w_exe_path, _T("{clipboard}"), L"-clipboard"); + + if (flag_disk) + ex_replace_all(w_exe_path, _T("{drives}"), L"/drives"); + else + ex_replace_all(w_exe_path, _T("{drives}"), L"-drives"); + } else { + _create_json_ret(buf, TPE_FAILED); + return; + } + } else if (pro_type == TP_PROTOCOL_TYPE_SSH) { + //============================================== + // SSH + //============================================== + + if (pro_sub == TP_PROTOCOL_TYPE_SSH_SHELL) { + w_exe_path = _T("\""); + w_exe_path += g_cfg.ssh_app + _T("\" "); + w_exe_path += g_cfg.ssh_cmdline; + } else { + w_exe_path = _T("\""); + w_exe_path += g_cfg.scp_app + _T("\" "); + w_exe_path += g_cfg.scp_cmdline; + } + } else if (pro_type == TP_PROTOCOL_TYPE_TELNET) { + //============================================== + // TELNET + //============================================== + w_exe_path = _T("\""); + w_exe_path += g_cfg.telnet_app + _T("\" "); + w_exe_path += g_cfg.telnet_cmdline; + } + + ex_replace_all(w_exe_path, _T("{host_ip}"), w_teleport_ip.c_str()); + ex_replace_all(w_exe_path, _T("{host_port}"), w_port); + ex_replace_all(w_exe_path, _T("{user_name}"), w_sid.c_str()); + ex_replace_all(w_exe_path, _T("{host_name}"), w_remote_host_name.c_str()); + ex_replace_all(w_exe_path, _T("{real_ip}"), w_real_host_ip.c_str()); + ex_replace_all(w_exe_path, _T("{assist_tools_path}"), g_env.m_tools_path.c_str()); + + + STARTUPINFO si; + PROCESS_INFORMATION pi; + + ZeroMemory(&si, sizeof(si)); + si.cb = sizeof(si); + ZeroMemory(&pi, sizeof(pi)); + + Json::Value root_ret; + ex_astr utf8_path; + ex_wstr2astr(w_exe_path, utf8_path, EX_CODEPAGE_UTF8); + root_ret["path"] = utf8_path; + + if (!CreateProcess(NULL, (wchar_t *)w_exe_path.c_str(), NULL, NULL, FALSE, 0, NULL, NULL, &si, &pi)) { + EXLOGE(_T("CreateProcess() failed. Error=0x%08X.\n %s\n"), GetLastError(), w_exe_path.c_str()); + root_ret["code"] = TPE_START_CLIENT; + _create_json_ret(buf, root_ret); + return; + } + + root_ret["code"] = TPE_OK; + _create_json_ret(buf, root_ret); +} + +void TsHttpRpc::_rpc_func_rdp_play(const ex_astr& func_args, ex_astr& buf) { + //Json::Reader jreader; + Json::Value jsRoot; + + Json::CharReaderBuilder jcrb; + std::unique_ptr const jreader(jcrb.newCharReader()); + const char *str_json_begin = func_args.c_str(); + ex_astr err; + + //if (!jreader.parse(func_args.c_str(), jsRoot)) { + if (!jreader->parse(str_json_begin, str_json_begin + func_args.length(), &jsRoot, &err)) { + _create_json_ret(buf, TPE_JSON_FORMAT); + return; + } + + // 判断参数是否正确 + if (!jsRoot["rid"].isInt() + || !jsRoot["web"].isString() + || !jsRoot["sid"].isString() + ) { + _create_json_ret(buf, TPE_PARAM); + return; + } + + int rid = jsRoot["rid"].asInt(); + ex_astr a_url_base = jsRoot["web"].asCString(); + ex_astr a_sid = jsRoot["sid"].asCString(); + + char cmd_args[1024] = { 0 }; + ex_strformat(cmd_args, 1023, "%s/%d", a_sid.c_str(), rid); + + ex_wstr w_url_base; + ex_astr2wstr(a_url_base, w_url_base); + ex_wstr w_cmd_args; + ex_astr2wstr(cmd_args, w_cmd_args); + + ex_wstr w_exe_path; + w_exe_path = _T("\""); + w_exe_path += g_env.m_exec_path + _T("\\tp-player.exe\""); + w_exe_path += _T(" \""); + w_exe_path += w_url_base; + w_exe_path += _T("/"); + w_exe_path += w_cmd_args; + + Json::Value root_ret; + ex_astr utf8_path; + ex_wstr2astr(w_exe_path, utf8_path, EX_CODEPAGE_UTF8); + root_ret["cmdline"] = utf8_path; + + EXLOGD(w_exe_path.c_str()); + + STARTUPINFO si; + PROCESS_INFORMATION pi; + + ZeroMemory(&si, sizeof(si)); + si.cb = sizeof(si); + ZeroMemory(&pi, sizeof(pi)); + if (!CreateProcess(NULL, (wchar_t *)w_exe_path.c_str(), NULL, NULL, FALSE, 0, NULL, NULL, &si, &pi)) { + EXLOGE(_T("CreateProcess() failed. Error=0x%08X.\n %s\n"), GetLastError(), w_exe_path.c_str()); + root_ret["code"] = TPE_START_CLIENT; + _create_json_ret(buf, root_ret); + return; + } + + root_ret["code"] = TPE_OK; + _create_json_ret(buf, root_ret); + return; +} + +void TsHttpRpc::_rpc_func_get_config(const ex_astr& func_args, ex_astr& buf) { + Json::Value jr_root; + jr_root["code"] = 0; + jr_root["data"] = g_cfg.get_root(); + _create_json_ret(buf, jr_root); +} + +void TsHttpRpc::_rpc_func_set_config(const ex_astr& func_args, ex_astr& buf) { + //Json::Reader jreader; + Json::Value jsRoot; + Json::CharReaderBuilder jcrb; + std::unique_ptr const jreader(jcrb.newCharReader()); + const char *str_json_begin = func_args.c_str(); + ex_astr err; + + //if (!jreader.parse(func_args.c_str(), jsRoot)) { + if (!jreader->parse(str_json_begin, str_json_begin + func_args.length(), &jsRoot, &err)) { + _create_json_ret(buf, TPE_JSON_FORMAT); + return; + } + + if (!g_cfg.save(func_args)) + _create_json_ret(buf, TPE_FAILED); + else + _create_json_ret(buf, TPE_OK); +} + +void TsHttpRpc::_rpc_func_file_action(const ex_astr& func_args, ex_astr& buf) { + + //Json::Reader jreader; + Json::Value jsRoot; + + Json::CharReaderBuilder jcrb; + std::unique_ptr const jreader(jcrb.newCharReader()); + const char *str_json_begin = func_args.c_str(); + ex_astr err; + + //if (!jreader.parse(func_args.c_str(), jsRoot)) { + if (!jreader->parse(str_json_begin, str_json_begin + func_args.length(), &jsRoot, &err)) { + _create_json_ret(buf, TPE_JSON_FORMAT); + return; + } + // 判断参数是否正确 + if (!jsRoot["action"].isNumeric()) { + _create_json_ret(buf, TPE_PARAM); + return; + } + int action = jsRoot["action"].asUInt(); + + HWND hParent = GetForegroundWindow(); + if (NULL == hParent) + hParent = g_hDlgMain; + + BOOL ret = FALSE; + wchar_t wszReturnPath[MAX_PATH] = _T(""); + + if (action == 1 || action == 2) { + OPENFILENAME ofn; + ex_wstr wsDefaultName; + ex_wstr wsDefaultPath; + StringCchCopy(wszReturnPath, MAX_PATH, wsDefaultName.c_str()); + + ZeroMemory(&ofn, sizeof(ofn)); + + ofn.lStructSize = sizeof(ofn); + ofn.lpstrTitle = _T("选择文件"); + ofn.hwndOwner = hParent; + ofn.lpstrFilter = _T("可执行程序 (*.exe)\0*.exe\0"); + ofn.lpstrFile = wszReturnPath; + ofn.nMaxFile = MAX_PATH; + ofn.lpstrInitialDir = wsDefaultPath.c_str(); + ofn.Flags = OFN_EXPLORER | OFN_PATHMUSTEXIST; + + if (action == 1) { + ofn.Flags |= OFN_FILEMUSTEXIST; + ret = GetOpenFileName(&ofn); + } else { + ofn.Flags |= OFN_OVERWRITEPROMPT; + ret = GetSaveFileName(&ofn); + } + } else if (action == 3) { + BROWSEINFO bi; + ZeroMemory(&bi, sizeof(BROWSEINFO)); + bi.hwndOwner = NULL; + bi.pidlRoot = NULL; + bi.pszDisplayName = wszReturnPath; //此参数如为NULL则不能显示对话框 + bi.lpszTitle = _T("选择目录"); + bi.ulFlags = BIF_RETURNONLYFSDIRS; + bi.lpfn = NULL; + bi.iImage = 0; //初始化入口参数bi结束 + LPITEMIDLIST pIDList = SHBrowseForFolder(&bi);//调用显示选择对话框 + if (pIDList) { + ret = true; + SHGetPathFromIDList(pIDList, wszReturnPath); + } else { + ret = false; + } + } else if (action == 4) { + ex_wstr wsDefaultName; + ex_wstr wsDefaultPath; + + if (wsDefaultPath.length() == 0) { + _create_json_ret(buf, TPE_PARAM); + return; + } + + ex_wstr::size_type pos = 0; + + while (ex_wstr::npos != (pos = wsDefaultPath.find(L"/", pos))) { + wsDefaultPath.replace(pos, 1, L"\\"); + pos += 1; + } + + ex_wstr wArg = L"/select, \""; + wArg += wsDefaultPath; + wArg += L"\""; + if ((int)ShellExecute(hParent, _T("open"), _T("explorer"), wArg.c_str(), NULL, SW_SHOW) > 32) + ret = true; + else + ret = false; + } + + if (ret) { + if (action == 1 || action == 2 || action == 3) { + ex_astr utf8_path; + ex_wstr2astr(wszReturnPath, utf8_path, EX_CODEPAGE_UTF8); + Json::Value root; + root["code"] = TPE_OK; + root["path"] = utf8_path; + _create_json_ret(buf, root); + + return; + } else { + _create_json_ret(buf, TPE_OK); + return; + } + } else { + _create_json_ret(buf, TPE_FAILED); + return; + } +} + +void TsHttpRpc::_rpc_func_get_version(const ex_astr& func_args, ex_astr& buf) { + Json::Value root_ret; + ex_wstr w_version = TP_ASSIST_VER; + ex_astr version; + ex_wstr2astr(w_version, version, EX_CODEPAGE_UTF8); + root_ret["version"] = version; + root_ret["code"] = TPE_OK; + _create_json_ret(buf, root_ret); + return; +} diff --git a/client/tp_assist_win/ts_http_rpc.h b/client/tp_assist_win/ts_http_rpc.h index 7a9fd25..25dbb50 100644 --- a/client/tp_assist_win/ts_http_rpc.h +++ b/client/tp_assist_win/ts_http_rpc.h @@ -17,14 +17,14 @@ //================================================================= ӿʹ˵ -󣬼 localhost:50022httpʽҪ£ +󣬼 127.0.0.1:50022httpʽҪ£ GET ʽ -http://localhost:50022/method/json_param +http://127.0.0.1:50022/method/json_param json_paramʹurl_encodeбjsonʽַ POST ʽ -http://localhost:50022/method +http://127.0.0.1:50022/method postjson_param УURIΪ֣ diff --git a/client/tp_assist_win/ts_ver.h b/client/tp_assist_win/ts_ver.h index a296cc3..935d5a2 100644 --- a/client/tp_assist_win/ts_ver.h +++ b/client/tp_assist_win/ts_ver.h @@ -1,6 +1,6 @@ -#ifndef __TS_ASSIST_VER_H__ -#define __TS_ASSIST_VER_H__ - -#define TP_ASSIST_VER L"3.2.0" - -#endif // __TS_ASSIST_VER_H__ +#ifndef __TS_ASSIST_VER_H__ +#define __TS_ASSIST_VER_H__ + +#define TP_ASSIST_VER L"3.5.5" + +#endif // __TS_ASSIST_VER_H__ diff --git a/common/libex/include/ex.h b/common/libex/include/ex.h index 68166fb..2c668d3 100644 --- a/common/libex/include/ex.h +++ b/common/libex/include/ex.h @@ -1,4 +1,4 @@ -#ifndef __LIB_EX_H__ +#ifndef __LIB_EX_H__ #define __LIB_EX_H__ #ifdef EX_HAVE_CONFIG diff --git a/common/libex/include/ex/ex_const.h b/common/libex/include/ex/ex_const.h index 30604a7..699bf10 100644 --- a/common/libex/include/ex/ex_const.h +++ b/common/libex/include/ex/ex_const.h @@ -1,4 +1,4 @@ -#ifndef __LIB_EX_CONST_H__ +#ifndef __LIB_EX_CONST_H__ #define __LIB_EX_CONST_H__ #include "ex_platform.h" @@ -43,8 +43,8 @@ // error code. //==================================================== #define EXRV_OK 0 -#define EXRV_SYS_ERR 1 // ϵͳ󣬿ʹGetLastErrorerrnoȡֵ -#define EXRV_FAILED 2 // ʧ +#define EXRV_SYS_ERR 1 // 系统错误,可以使用GetLastError或者errno来获取具体错误值 +#define EXRV_FAILED 2 // 操作失败 //#define EXRV_CANNOT_FOUND 9 #define EXRV_CANNOT_CREATE 10 diff --git a/common/libex/include/ex/ex_ini.h b/common/libex/include/ex/ex_ini.h index 38749f5..681f05f 100644 --- a/common/libex/include/ex/ex_ini.h +++ b/common/libex/include/ex/ex_ini.h @@ -1,4 +1,4 @@ -#ifndef __EX_INI_H__ +#ifndef __EX_INI_H__ #define __EX_INI_H__ /* diff --git a/common/libex/include/ex/ex_log.h b/common/libex/include/ex/ex_log.h index 29a1b41..1cd89ab 100644 --- a/common/libex/include/ex/ex_log.h +++ b/common/libex/include/ex/ex_log.h @@ -1,4 +1,4 @@ -#ifndef __EX_LOG_H__ +#ifndef __EX_LOG_H__ #define __EX_LOG_H__ #include "ex_types.h" @@ -27,7 +27,7 @@ public: protected: bool _open_file(); - bool _rotate_file(void); // ־ļݣȻ¿һ־ļ + bool _rotate_file(void); // 将现有日志文件改名备份,然后新开一个日志文件 public: ExThreadLock lock; @@ -63,7 +63,7 @@ void EXLOG_USE_LOGGER(ExLogger* logger); void EXLOG_LEVEL(int min_level); void EXLOG_DEBUG(bool debug_mode); -// 趨־ļ·δָ·ΪִгĿ¼µlogĿ¼ +// 设定日志文件名及路径,如未指定路径,则为可执行程序所在目录下的log目录。 void EXLOG_FILE(const wchar_t* log_file, const wchar_t* log_path = NULL, ex_u32 max_filesize = EX_LOG_FILE_MAX_SIZE, ex_u8 max_filecount = EX_LOG_FILE_MAX_COUNT); void EXLOG_CONSOLE(bool output_to_console); diff --git a/common/libex/include/ex/ex_path.h b/common/libex/include/ex/ex_path.h index 2f1fa19..20ac79e 100644 --- a/common/libex/include/ex/ex_path.h +++ b/common/libex/include/ex/ex_path.h @@ -1,4 +1,4 @@ -#ifndef __LIB_EX_PATH_H__ +#ifndef __LIB_EX_PATH_H__ #define __LIB_EX_PATH_H__ #include "ex_platform.h" @@ -39,7 +39,7 @@ bool ex_path_join(ex_wstr& inout_path, EX_BOOL auto_abspath, ...); bool ex_abspath_to(const ex_wstr& base_abs_path, const ex_wstr& relate_path, ex_wstr& out_path); bool ex_mkdirs(const ex_wstr& in_path); -// ȡļеչ֣.abc.py py +// 获取文件名中的扩展名部分(不包括.,例如abc.py,返回 py) bool ex_path_ext_name(const ex_wstr& in_filename, ex_wstr& out_ext); #endif diff --git a/common/libex/include/ex/ex_platform.h b/common/libex/include/ex/ex_platform.h index 3000efe..099cfbf 100644 --- a/common/libex/include/ex/ex_platform.h +++ b/common/libex/include/ex/ex_platform.h @@ -1,4 +1,4 @@ -#ifndef __LIB_EX_PLATFORM_H__ +#ifndef __LIB_EX_PLATFORM_H__ #define __LIB_EX_PLATFORM_H__ #if defined(_WIN32) || defined(WIN32) @@ -67,6 +67,7 @@ # include // O_RDONLY, etc. # include # include +# include # include # include # include diff --git a/common/libex/include/ex/ex_str.h b/common/libex/include/ex/ex_str.h index b5c4a43..ca9cea9 100644 --- a/common/libex/include/ex/ex_str.h +++ b/common/libex/include/ex/ex_str.h @@ -1,86 +1,104 @@ -#ifndef __LIB_EX_STR_H__ -#define __LIB_EX_STR_H__ - -#include "ex_types.h" - -#define EX_CODEPAGE_ACP 0 -#define EX_CODEPAGE_UTF8 1 -#ifdef EX_OS_WIN32 -# define EX_CODEPAGE_DEFAULT EX_CODEPAGE_ACP -#else -# define EX_CODEPAGE_DEFAULT EX_CODEPAGE_UTF8 -#endif - -#define EX_RSC_BEGIN 0x01 -#define EX_RSC_END 0x02 -#define EX_RSC_ALL EX_RSC_BEGIN | EX_RSC_END - -//================================================= -// C Interface -//================================================= - -// copy a string from `source` to `target`. -// `size` is size of target buffer. -// if buffer is to small, NULL will return, but `size-1` characters have been copied. -char* ex_strcpy(char* target, size_t size, const char* source); -wchar_t* ex_wcscpy(wchar_t* target, size_t size, const wchar_t* source); - - -// dupilicate a string. -// must use ex_free() to release the returned value. -char* ex_strdup(const char* src); -wchar_t* ex_wcsdup(const wchar_t* src); - -// convert between mutli-bytes and wide char string. -// must use ex_free() to release the returned value. -wchar_t* ex_str2wcs_alloc(const char* in_buffer, int code_page); -char* ex_wcs2str_alloc(const wchar_t* in_buffer, int code_page); - -// convert char** argv to wchar_t** argv. -// must use ex_free_argv() to release the returned value. -wchar_t** ex_make_wargv(int argc, char** argv); -void ex_free_wargv(int argc, wchar_t** argv); - -EX_BOOL ex_str_only_white_space(const wchar_t* src); -EX_BOOL ex_wcs_only_white_space(const char* src); - - -int ex_strformat(char* out_buf, size_t buf_size, const char* fmt, ...); -int ex_wcsformat(wchar_t* out_buf, size_t buf_size, const wchar_t* fmt, ...); - -//================================================= -// C++ Interface -//================================================= -#ifdef __cplusplus - -#include -#include - -typedef std::string ex_astr; -typedef std::wstring ex_wstr; - -typedef std::vector ex_astrs; -typedef std::vector ex_wstrs; -typedef std::vector ex_str_utf16le; - -bool ex_wstr2astr(const ex_wstr& in_str, ex_astr& out_str, int code_page = EX_CODEPAGE_DEFAULT); -bool ex_wstr2astr(const wchar_t* in_str, ex_astr& out_str, int code_page = EX_CODEPAGE_DEFAULT); -bool ex_astr2wstr(const ex_astr& in_str, ex_wstr& out_str, int code_page = EX_CODEPAGE_DEFAULT); -bool ex_astr2wstr(const char* in_str, ex_wstr& out_str, int code_page = EX_CODEPAGE_DEFAULT); - -bool ex_only_white_space(const ex_astr& str_check); -bool ex_only_white_space(const ex_wstr& str_check); - -void ex_remove_white_space(ex_astr& str_fix, int ulFlag = EX_RSC_ALL); -void ex_remove_white_space(ex_wstr& str_fix, int ulFlag = EX_RSC_ALL); - -ex_astr& ex_replace_all(ex_astr& str, const ex_astr& old_value, const ex_astr& new_value); -ex_wstr& ex_replace_all(ex_wstr& str, const ex_wstr& old_value, const ex_wstr& new_value); - -// UTF8ַתΪUTF16-LEַ\0 -bool ex_utf8_to_utf16le(const std::string& from, ex_str_utf16le& to); - -#endif - - -#endif // __LIB_EX_STR_H__ +#ifndef __LIB_EX_STR_H__ +#define __LIB_EX_STR_H__ + +#include "ex_types.h" + +#define EX_CODEPAGE_ACP 0 +#define EX_CODEPAGE_UTF8 1 +#ifdef EX_OS_WIN32 +# define EX_CODEPAGE_DEFAULT EX_CODEPAGE_ACP +#else +# define EX_CODEPAGE_DEFAULT EX_CODEPAGE_UTF8 +#endif + +#define EX_RSC_BEGIN 0x01 +#define EX_RSC_END 0x02 +#define EX_RSC_ALL EX_RSC_BEGIN | EX_RSC_END + +//================================================= +// C Interface +//================================================= + +// copy a string from `source` to `target`. +// `size` is size of target buffer. +// if buffer is to small, NULL will return, but `size-1` characters have been copied. +char* ex_strcpy(char* target, size_t size, const char* source); +wchar_t* ex_wcscpy(wchar_t* target, size_t size, const wchar_t* source); + + +// dupilicate a string. +// must use ex_free() to release the returned value. +char* ex_strdup(const char* src); +wchar_t* ex_wcsdup(const wchar_t* src); + +// convert between mutli-bytes and wide char string. +// must use ex_free() to release the returned value. +wchar_t* ex_str2wcs_alloc(const char* in_buffer, int code_page); +char* ex_wcs2str_alloc(const wchar_t* in_buffer, int code_page); + +// convert char** argv to wchar_t** argv. +// must use ex_free_argv() to release the returned value. +wchar_t** ex_make_wargv(int argc, char** argv); +void ex_free_wargv(int argc, wchar_t** argv); + +EX_BOOL ex_str_only_white_space(const wchar_t* src); +EX_BOOL ex_wcs_only_white_space(const char* src); + + +int ex_strformat(char* out_buf, size_t buf_size, const char* fmt, ...); +int ex_wcsformat(wchar_t* out_buf, size_t buf_size, const wchar_t* fmt, ...); + +//================================================= +// C++ Interface +//================================================= +#ifdef __cplusplus + +#include +#include +//#include +#include + +typedef std::string ex_astr; +typedef std::wstring ex_wstr; +typedef std::ostringstream ex_aoss; +typedef std::wostringstream ex_woss; + +typedef std::vector ex_astrs; +typedef std::vector ex_wstrs; +//typedef std::vector ex_str_utf16le; + +bool ex_wstr2astr(const ex_wstr& in_str, ex_astr& out_str, int code_page = EX_CODEPAGE_DEFAULT); +bool ex_wstr2astr(const wchar_t* in_str, ex_astr& out_str, int code_page = EX_CODEPAGE_DEFAULT); +bool ex_astr2wstr(const ex_astr& in_str, ex_wstr& out_str, int code_page = EX_CODEPAGE_DEFAULT); +bool ex_astr2wstr(const char* in_str, ex_wstr& out_str, int code_page = EX_CODEPAGE_DEFAULT); + +bool ex_only_white_space(const ex_astr& str_check); +bool ex_only_white_space(const ex_wstr& str_check); + +void ex_remove_white_space(ex_astr& str_fix, int ulFlag = EX_RSC_ALL); +void ex_remove_white_space(ex_wstr& str_fix, int ulFlag = EX_RSC_ALL); + +ex_astr& ex_replace_all(ex_astr& str, const ex_astr& old_value, const ex_astr& new_value); +ex_wstr& ex_replace_all(ex_wstr& str, const ex_wstr& old_value, const ex_wstr& new_value); + +class ex_str_utf16le { +public: + ex_str_utf16le(); + ~ex_str_utf16le(); + + size_t length() const; + bool from_utf8(const ex_astr& from); + + const uint16_t* c_str() const; + +protected: + std::vector m_data; +}; + +// 将UTF8字符串转换为UTF16-LE字符串(输出结果包含\0结束符) +//bool ex_utf8_to_utf16le(const ex_astr& from, ex_str_utf16le& to); + +#endif + + +#endif // __LIB_EX_STR_H__ diff --git a/common/libex/include/ex/ex_thread.h b/common/libex/include/ex/ex_thread.h index f3d4170..3948749 100644 --- a/common/libex/include/ex/ex_thread.h +++ b/common/libex/include/ex/ex_thread.h @@ -1,4 +1,4 @@ -#ifndef __EX_THREAD_H__ +#ifndef __EX_THREAD_H__ #define __EX_THREAD_H__ #include "ex_str.h" @@ -23,11 +23,11 @@ public: bool is_running(void) { return m_is_running; } - // ִ̣߳б˵run() + // 创建并启动线程(执行被重载了的run()函数) bool start(void); - // ̣߳ȴwait_timeout_ms룬wait_timeout_msΪ0޵ȴ + // 结束线程(等待wait_timeout_ms毫秒,如果wait_timeout_ms为0,则无限等待) bool stop(void); - // ֱӽ̣߳ǿɱʹã + // 直接结束线程(强杀,不建议使用) bool terminate(void); protected: @@ -52,7 +52,7 @@ protected: }; -// ߳ʹã +// 线程锁(进程内使用) class ExThreadLock { public: @@ -70,7 +70,7 @@ private: #endif }; -// ߳ +// 线程锁辅助类 class ExThreadSmartLock { public: @@ -109,12 +109,12 @@ private: }; -// ԭӲ +// 原子操作 int ex_atomic_add(volatile int* pt, int t); int ex_atomic_inc(volatile int* pt); int ex_atomic_dec(volatile int* pt); -// ߳ز +// 线程相关操作 ex_u64 ex_get_thread_id(void); #endif // __EX_THREAD_H__ diff --git a/common/libex/include/ex/ex_types.h b/common/libex/include/ex/ex_types.h index 0187b0a..3ad1a01 100644 --- a/common/libex/include/ex/ex_types.h +++ b/common/libex/include/ex/ex_types.h @@ -1,44 +1,44 @@ -#ifndef __LIB_EX_TYPE_H__ -#define __LIB_EX_TYPE_H__ - -#include "ex_platform.h" - -#include - -typedef signed char ex_i8; -typedef signed short ex_i16; - -typedef unsigned char ex_u8; -typedef unsigned short ex_u16; -typedef unsigned int ex_u32; -typedef unsigned long ex_ulong; - -#if defined(EX_OS_WIN32) -typedef unsigned __int64 ex_u64; -typedef signed __int64 ex_i64; -typedef wchar_t ex_utf16; -#else -typedef unsigned long long ex_u64; -typedef signed long long ex_i64; -typedef ex_i16 ex_utf16; -#endif - -typedef int EX_BOOL; -#define EX_TRUE 1 -#define EX_FALSE 0 - - -typedef std::vector ex_bin; -typedef std::vector ex_chars; - -typedef ex_u32 ex_rv; - - -#if defined(EX_OS_WIN32) -# define EX_DYLIB_HANDLE HINSTANCE -#else -# define EX_DYLIB_HANDLE void* -#endif - - -#endif // __LIB_EX_TYPE_H__ +#ifndef __LIB_EX_TYPE_H__ +#define __LIB_EX_TYPE_H__ + +#include "ex_platform.h" + +#include + +typedef signed char ex_i8; +typedef signed short ex_i16; + +typedef unsigned char ex_u8; +typedef unsigned short ex_u16; +typedef unsigned int ex_u32; +typedef unsigned long ex_ulong; + +#if defined(EX_OS_WIN32) +typedef unsigned __int64 ex_u64; +typedef signed __int64 ex_i64; +typedef wchar_t ex_utf16; +#else +typedef unsigned long long ex_u64; +typedef signed long long ex_i64; +typedef ex_i16 ex_utf16; +#endif + +typedef int EX_BOOL; +#define EX_TRUE 1 +#define EX_FALSE 0 + + +typedef std::vector ex_bin; +typedef std::vector ex_chars; + +typedef ex_u32 ex_rv; + + +#if defined(EX_OS_WIN32) +# define EX_DYLIB_HANDLE HINSTANCE +#else +# define EX_DYLIB_HANDLE void* +#endif + + +#endif // __LIB_EX_TYPE_H__ diff --git a/common/libex/include/ex/ex_util.h b/common/libex/include/ex/ex_util.h index 06b78ec..17b2556 100644 --- a/common/libex/include/ex/ex_util.h +++ b/common/libex/include/ex/ex_util.h @@ -1,55 +1,55 @@ -#ifndef __LIB_EX_UTIL_H__ -#define __LIB_EX_UTIL_H__ - -#include "ex_types.h" -#include "ex_str.h" - -#ifdef EX_OS_WIN32 -# include -//# include -//# include -// #include -#pragma comment(lib, "ws2_32.lib") -#else -// #include -# include -# include -#endif - -EX_BOOL ex_initialize(const char* lc_ctype); - -void ex_free(void* buffer); - -// haystackΪhaystacklenֽڣвneedleΪneedlelenʼַNULLʾûҵ -const ex_u8* ex_memmem(const ex_u8* haystack, size_t haystacklen, const ex_u8* needle, size_t needlelen); -void ex_mem_reverse(ex_u8* p, size_t l); - -void ex_printf(const char* fmt, ...); -void ex_wprintf(const wchar_t* fmt, ...); - -ex_u64 ex_get_tick_count(void); -void ex_sleep_ms(int ms); - -EX_BOOL ex_localtime_now(int* t, struct tm* dt); - - -FILE* ex_fopen(const ex_wstr& filename, const wchar_t* mode); -FILE* ex_fopen(const ex_astr& filename, const char* mode); - -// open a text file and read all content. -bool ex_read_text_file(const ex_wstr& file_name, ex_astr& file_content); -// open a file and write content. -bool ex_write_text_file(const ex_wstr& file_name, const ex_astr& file_content); - -EX_DYLIB_HANDLE ex_dlopen(const wchar_t* dylib_path); -void ex_dlclose(EX_DYLIB_HANDLE dylib); - - -// inet... -int ex_ip4_name(const struct sockaddr_in* src, char* dst, size_t size); - -#define EX_IPV4_NAME_LEN 16 -#define EX_IPV6_NAME_LEN 46 -const char* ex_inet_ntop(int af, const void *src, char *dst, size_t size); - -#endif // __LIB_EX_UTIL_H__ +#ifndef __LIB_EX_UTIL_H__ +#define __LIB_EX_UTIL_H__ + +#include "ex_types.h" +#include "ex_str.h" + +#ifdef EX_OS_WIN32 +# include +//# include +//# include +// #include +#pragma comment(lib, "ws2_32.lib") +#else +// #include +# include +# include +#endif + +EX_BOOL ex_initialize(const char* lc_ctype); + +void ex_free(void* buffer); + +// 在haystack(长度为haystacklen字节)中查找needle(长度为needlelen)的起始地址,返回NULL表示没有找到 +const ex_u8* ex_memmem(const ex_u8* haystack, size_t haystacklen, const ex_u8* needle, size_t needlelen); +void ex_mem_reverse(ex_u8* p, size_t l); + +void ex_printf(const char* fmt, ...); +void ex_wprintf(const wchar_t* fmt, ...); + +ex_u64 ex_get_tick_count(void); +void ex_sleep_ms(int ms); + +EX_BOOL ex_localtime_now(int* t, struct tm* dt); + + +FILE* ex_fopen(const ex_wstr& filename, const wchar_t* mode); +FILE* ex_fopen(const ex_astr& filename, const char* mode); + +// open a text file and read all content. +bool ex_read_text_file(const ex_wstr& file_name, ex_astr& file_content); +// open a file and write content. +bool ex_write_text_file(const ex_wstr& file_name, const ex_astr& file_content); + +EX_DYLIB_HANDLE ex_dlopen(const wchar_t* dylib_path); +void ex_dlclose(EX_DYLIB_HANDLE dylib); + + +// inet... +int ex_ip4_name(const struct sockaddr_in* src, char* dst, size_t size); + +#define EX_IPV4_NAME_LEN 16 +#define EX_IPV6_NAME_LEN 46 +const char* ex_inet_ntop(int af, const void *src, char *dst, size_t size); + +#endif // __LIB_EX_UTIL_H__ diff --git a/common/libex/include/ex/ex_winsrv.h b/common/libex/include/ex/ex_winsrv.h index 91938ec..15ac6ee 100644 --- a/common/libex/include/ex/ex_winsrv.h +++ b/common/libex/include/ex/ex_winsrv.h @@ -1,4 +1,4 @@ -#ifndef __EX_WINSRV_H__ +#ifndef __EX_WINSRV_H__ #define __EX_WINSRV_H__ #include "ex_str.h" diff --git a/common/libex/src/ex_ini.cpp b/common/libex/src/ex_ini.cpp index 669b890..e471752 100644 --- a/common/libex/src/ex_ini.cpp +++ b/common/libex/src/ex_ini.cpp @@ -1,4 +1,4 @@ -#include +#include #include #include @@ -241,7 +241,7 @@ bool ExIniFile::LoadFromFile(const ex_wstr& strFileName, bool bClearOld) { pOffset += 3; } - // ļʹUTF8 + // 配置文件均使用UTF8编码 ex_wstr fileData; if (!ex_astr2wstr(pOffset, fileData, EX_CODEPAGE_UTF8)) return false; @@ -357,7 +357,7 @@ void ExIniFile::Save(int codepage/* = EX_CODEPAGE_UTF8*/) return; } - // вκСڵֵԣȱ֮ + // 如果有不属于任何小节的值对,先保存之 if (m_dumy_sec.Count() > 0) m_dumy_sec.Save(file, codepage); @@ -415,16 +415,16 @@ ExIniSection* ExIniFile::GetSection(const ex_wstr& strName, bool bCreateIfNotExi } // static function. -// һУֵΪ [/ֵ/ע/ʲôҲ/] -// => strKey = [section_name] -// ֵ => strKey = strValue +// 解析一行,返回值为 [节名/值对/注释/什么也不是/出错了] +// 节名 => strKey = [section_name] +// 值对 => strKey = strValue ExIniFile::PARSE_RV ExIniFile::_ParseLine(const ex_wstr& strOrigLine, ex_wstr& strKey, ex_wstr& strValue) { - // ȥ׵Ŀո TAB + // 首先去掉行首的空格或者 TAB 控制 ex_wstr strLine(strOrigLine); ex_remove_white_space(strLine, EX_RSC_BEGIN); - // жǷΪע͡ .ini ļ ֺ';'/'#' Ϊעеĵһַ + // 判断是否为注释。 .ini 文件以 分号';'/'#' 作为注释行的第一个字符 if (';' == strLine[0] || '#' == strLine[0]) { return PARSE_COMMENT; @@ -432,7 +432,7 @@ ExIniFile::PARSE_RV ExIniFile::_ParseLine(const ex_wstr& strOrigLine, ex_wstr& s if ('[' == strLine[0]) { - // һ(section) + // 这是一个节(section) ex_wstr::size_type startPos = strLine.find('['); ex_wstr::size_type endPos = strLine.rfind(']'); strLine.erase(endPos); @@ -443,23 +443,23 @@ ExIniFile::PARSE_RV ExIniFile::_ParseLine(const ex_wstr& strOrigLine, ex_wstr& s } else { - // ܷҵȺ(=) key=value б𷽷 + // 看看能否找到等号(=),这是 key=value 的判别方法 ex_wstr::size_type pos = strLine.find('='); if (ex_wstr::npos == pos) { - //return PARSE_OTHER; // ûеȺ + //return PARSE_OTHER; // 没有等号 ex_remove_white_space(strLine); strKey = strLine; strValue.clear(); return PARSE_KEYVALUE; } - // ȺǰȺźķָ + // 将等号前面的与等号后面的分割 strKey.assign(strLine, 0, pos); strValue.assign(strLine, pos + 1, strLine.length() - pos); ex_remove_white_space(strKey); - // ȺźӦԭⲻӦƳհַ + // 等号后面的应该原封不动,不应该移除空白字符 ex_remove_white_space(strValue, EX_RSC_BEGIN); return PARSE_KEYVALUE; @@ -489,7 +489,7 @@ bool ExIniFile::_ProcessLine(const ex_wstr strLine, ExIniSection** pCurSection) break; case PARSE_SECTION: { - // һ + // 创建一个节 ExIniSection* pSection = GetSection(strKey, true); if (NULL == pSection) { @@ -508,7 +508,7 @@ bool ExIniFile::_ProcessLine(const ex_wstr strLine, ExIniSection** pCurSection) *pCurSection = &m_dumy_sec; } - // һֵ + // 创建一个值对 if (!(*pCurSection)->SetValue(strKey, strValue, true)) { bError = true; diff --git a/common/libex/src/ex_log.cpp b/common/libex/src/ex_log.cpp index 7a2f9fa..b1b8fb8 100644 --- a/common/libex/src/ex_log.cpp +++ b/common/libex/src/ex_log.cpp @@ -1,517 +1,541 @@ -#include -#include -//#include -//#include -//#include -//#include - -#ifdef EX_OS_WIN32 -# include -# include -# include -#else -//# include -//# include -#endif - -#define EX_LOG_CONTENT_MAX_LEN 2048 - -//typedef std::deque log_file_deque; - -static ExLogger* g_exlog = NULL; - -void EXLOG_USE_LOGGER(ExLogger* logger) -{ - g_exlog = logger; -} - -void EXLOG_LEVEL(int min_level) -{ - if(NULL != g_exlog) - g_exlog->min_level = min_level; -} - -void EXLOG_DEBUG(bool debug_mode) -{ - if (NULL != g_exlog) - g_exlog->debug_mode = debug_mode; -} - -void EXLOG_CONSOLE(bool output_to_console) -{ - if(NULL != g_exlog) - g_exlog->to_console = output_to_console; -} - -void EXLOG_FILE(const wchar_t* log_file, const wchar_t* log_path /*= NULL*/, ex_u32 max_filesize /*= EX_LOG_FILE_MAX_SIZE*/, ex_u8 max_filecount /*= EX_LOG_FILE_MAX_COUNT*/) -{ - if(NULL == g_exlog) - return; - - ex_wstr _path; - if (NULL == log_path) - { - ex_exec_file(_path); - ex_dirname(_path); - ex_path_join(_path, false, L"log", NULL); - } - else - { - _path = log_path; - } - - g_exlog->set_log_file(_path, log_file, max_filesize, max_filecount); -} - -ExLogger::ExLogger() -{ -#ifdef EX_OS_WIN32 - console_handle = GetStdHandle(STD_OUTPUT_HANDLE); -#endif - - min_level = EX_LOG_LEVEL_INFO; - debug_mode = false; - to_console = true; - - m_file = NULL; - m_filesize = 0; -} - -ExLogger::~ExLogger() -{ - if (NULL != m_file) - { -#ifdef EX_OS_WIN32 - CloseHandle(m_file); -#else - fclose(m_file); -#endif - m_file = NULL; - } -} - -void ExLogger::log_a(int level, const char* fmt, va_list valist) -{ - if (NULL == fmt) - return; - - if (0 == strlen(fmt)) - return; - - char szTmp[4096] = { 0 }; - size_t offset = 0; - - if (level == EX_LOG_LEVEL_ERROR) - { - szTmp[0] = '['; - szTmp[1] = 'E'; - szTmp[2] = ']'; - szTmp[3] = ' '; - offset = 4; - } - -#ifdef EX_OS_WIN32 - vsnprintf_s(szTmp+offset, 4096-offset, 4095-offset, fmt, valist); - if(to_console) - { - if (NULL != console_handle) - { - printf_s("%s", szTmp); - fflush(stdout); - } - else - { - if(debug_mode) - OutputDebugStringA(szTmp); - } - } -#else - vsnprintf(szTmp+offset, 4095-offset, fmt, valist); - if(to_console) - { - // On linux, the stdout only output the first time output format (char or wchar_t). - // e.g.: first time you use printf(), then after that, every wprintf() not work, and vice versa. - // so we always use wprintf() to fix that. - - ex_astr tmp(szTmp); - ex_wstr _tmp; - ex_astr2wstr(tmp, _tmp); - wprintf(L"%ls", _tmp.c_str()); - fflush(stdout); - -// printf("%s", szTmp); -// fflush(stdout); - } -#endif - - write_a(szTmp); -} - -void ExLogger::log_w(int level, const wchar_t* fmt, va_list valist) -{ - if (NULL == fmt || 0 == wcslen(fmt)) - return; - - wchar_t szTmp[4096] = { 0 }; - size_t offset = 0; - - if (level == EX_LOG_LEVEL_ERROR) - { - szTmp[0] = L'['; - szTmp[1] = L'E'; - szTmp[2] = L']'; - szTmp[3] = L' '; - offset = 4; - } - -#ifdef EX_OS_WIN32 - _vsnwprintf_s(szTmp+offset, 4096-offset, 4095-offset, fmt, valist); - if(to_console) - { - if (NULL != console_handle) - { - wprintf_s(_T("%s"), szTmp); - fflush(stdout); - } - else - { - if(debug_mode) - OutputDebugStringW(szTmp); - } - } -#else - vswprintf(szTmp+offset, 4095-offset, fmt, valist); - if(to_console) - { - wprintf(L"%ls", szTmp); - fflush(stdout); - } -#endif - - write_w(szTmp); -} - -#define EX_PRINTF_XA(fn, level) \ -void fn(const char* fmt, ...) \ -{ \ - if(NULL == g_exlog) \ - return; \ - if (g_exlog->min_level > level) \ - return; \ - ExThreadSmartLock locker(g_exlog->lock); \ - va_list valist; \ - va_start(valist, fmt); \ - g_exlog->log_a(level, fmt, valist); \ - va_end(valist); \ -} - -#define EX_PRINTF_XW(fn, level) \ -void fn(const wchar_t* fmt, ...) \ -{ \ - if(NULL == g_exlog) \ - return; \ - if (g_exlog->min_level > level) \ - return; \ - ExThreadSmartLock locker(g_exlog->lock); \ - va_list valist; \ - va_start(valist, fmt); \ - g_exlog->log_w(level, fmt, valist); \ - va_end(valist); \ -} - -EX_PRINTF_XA(ex_printf_d, EX_LOG_LEVEL_DEBUG) -EX_PRINTF_XA(ex_printf_v, EX_LOG_LEVEL_VERBOSE) -EX_PRINTF_XA(ex_printf_i, EX_LOG_LEVEL_INFO) -EX_PRINTF_XA(ex_printf_w, EX_LOG_LEVEL_WARN) -EX_PRINTF_XA(ex_printf_e, EX_LOG_LEVEL_ERROR) - -EX_PRINTF_XW(ex_printf_d, EX_LOG_LEVEL_DEBUG) -EX_PRINTF_XW(ex_printf_v, EX_LOG_LEVEL_VERBOSE) -EX_PRINTF_XW(ex_printf_i, EX_LOG_LEVEL_INFO) -EX_PRINTF_XW(ex_printf_w, EX_LOG_LEVEL_WARN) -EX_PRINTF_XW(ex_printf_e, EX_LOG_LEVEL_ERROR) - - -#ifdef EX_OS_WIN32 -void ex_printf_e_lasterror(const char* fmt, ...) -{ - ExThreadSmartLock locker(g_exlog->lock); - - va_list valist; - va_start(valist, fmt); - g_exlog->log_a(EX_LOG_LEVEL_ERROR, fmt, valist); - va_end(valist); - - //========================================= - - LPVOID lpMsgBuf; - DWORD dw = GetLastError(); - - FormatMessageA(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, - NULL, dw, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), - (LPSTR)&lpMsgBuf, 0, NULL); - - ex_printf_e(" - WinErr(%d): %s\n", dw, (LPSTR)lpMsgBuf); - LocalFree(lpMsgBuf); -} - -void ex_printf_e_lasterror(const wchar_t* fmt, ...) -{ - ExThreadSmartLock locker(g_exlog->lock); - - va_list valist; - va_start(valist, fmt); - g_exlog->log_w(EX_LOG_LEVEL_ERROR, fmt, valist); - va_end(valist); - - //========================================= - - LPVOID lpMsgBuf; - DWORD dw = GetLastError(); - - FormatMessageA(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, - NULL, dw, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), - (LPSTR)&lpMsgBuf, 0, NULL); - - ex_printf_e(" - WinErr(%d): %s\n", dw, (LPSTR)lpMsgBuf); - LocalFree(lpMsgBuf); -} -#endif - -void ex_printf_bin(const ex_u8* bin_data, size_t bin_size, const char* fmt, ...) -{ - if(NULL == g_exlog) - return; - if (!g_exlog->debug_mode) - return; - - ExThreadSmartLock locker(g_exlog->lock); - - va_list valist; - va_start(valist, fmt); - g_exlog->log_a(EX_LOG_LEVEL_DEBUG, fmt, valist); - va_end(valist); - - ex_printf_d(" (%d/0x%02x Bytes)\n", bin_size, bin_size); - - const ex_u8* line = bin_data; - size_t thisline = 0; - size_t offset = 0; - unsigned int i = 0; - - char szTmp[128] = { 0 }; - size_t _offset = 0; - - while (offset < bin_size) - { - memset(szTmp, 0, 128); - _offset = 0; - - snprintf(szTmp + _offset, 128 - _offset, "%06x ", (int)offset); - _offset += 8; - - thisline = bin_size - offset; - if (thisline > 16) - thisline = 16; - - for (i = 0; i < thisline; i++) - { - snprintf(szTmp + _offset, 128 - _offset, "%02x ", line[i]); - _offset += 3; - } - - snprintf(szTmp + _offset, 128 - _offset, " "); - _offset += 2; - - for (; i < 16; i++) - { - snprintf(szTmp + _offset, 128 - _offset, " "); - _offset += 3; - } - - for (i = 0; i < thisline; i++) - { - snprintf(szTmp + _offset, 128 - _offset, "%c", (line[i] >= 0x20 && line[i] < 0x7f) ? line[i] : '.'); - _offset += 1; - } - - snprintf(szTmp + _offset, 128 - _offset, "\n"); - _offset += 1; - - ex_printf_d("%s", szTmp); - - offset += thisline; - line += thisline; - } - - fflush(stdout); -} - -bool ExLogger::set_log_file(const ex_wstr& log_path, const ex_wstr& log_name, ex_u32 max_filesize, ex_u8 max_count) -{ - m_max_filesize = max_filesize; - m_max_count = max_count; - - m_filename = log_name; - - m_path = log_path; - ex_abspath(m_path); - - ex_mkdirs(m_path); - - m_fullname = m_path; - ex_path_join(m_fullname, false, log_name.c_str(), NULL); - - return _open_file(); -} - - -bool ExLogger::_open_file() -{ - if (m_file) - { -#ifdef EX_OS_WIN32 - CloseHandle(m_file); -#else - fclose(m_file); -#endif - m_file = NULL; - } - -#ifdef EX_OS_WIN32 - // ע⣺ʹ CreateFile() ־ļʹFILEָ޷ݸ̬в - m_file = CreateFileW(m_fullname.c_str(), GENERIC_WRITE, FILE_SHARE_READ, NULL, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL); - if (INVALID_HANDLE_VALUE == m_file) - { - m_file = NULL; - return false; - } - - SetFilePointer(m_file, 0, NULL, FILE_END); - m_filesize = GetFileSize(m_file, NULL); -#else - ex_astr _fullname; - ex_wstr2astr(m_fullname, _fullname); - m_file = fopen(_fullname.c_str(), "a"); - - if (NULL == m_file) - { - return false; - } - - fseek(m_file, 0, SEEK_END); - m_filesize = (ex_u32)ftell(m_file); -#endif - - return _rotate_file(); -} - -bool ExLogger::_rotate_file(void) -{ - if (m_filesize < m_max_filesize) - return true; - - if (m_file) - { -#ifdef EX_OS_WIN32 - CloseHandle(m_file); -#else - fclose(m_file); -#endif - m_file = NULL; - } - - // make a name for backup file. - wchar_t _tmpname[64] = { 0 }; -#ifdef EX_OS_WIN32 - SYSTEMTIME st; - GetLocalTime(&st); - swprintf_s(_tmpname, 64, L"%s.%04d%02d%02d%02d%02d%02d.bak", m_filename.c_str(), st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond); -#else - time_t timep; - time(&timep); - struct tm *p = localtime(&timep); - if (p == NULL) - return false; - - ex_wcsformat(_tmpname, 64, L"%ls.%04d%02d%02d%02d%02d%02d.bak", m_filename.c_str(), p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec); -#endif - - ex_wstr _new_fullname(m_path); - ex_path_join(_new_fullname, false, _tmpname, NULL); - -#ifdef EX_OS_WIN32 - if (!MoveFileW(m_fullname.c_str(), _new_fullname.c_str())) - { - EXLOGE_WIN("can not rename log file, remove old one and try again."); - DeleteFileW(_new_fullname.c_str()); - if (!MoveFileW(m_fullname.c_str(), _new_fullname.c_str())) - return false; - } -#else - ex_astr _a_fullname; - ex_astr _a_new_fullname; - ex_wstr2astr(m_fullname, _a_fullname); - ex_wstr2astr(_new_fullname, _a_new_fullname); - - if (rename(_a_fullname.c_str(), _a_new_fullname.c_str()) != 0) - { - remove(_a_new_fullname.c_str()); - if (0 != (rename(_a_fullname.c_str(), _a_new_fullname.c_str()))) - return false; - } -#endif - - return _open_file(); -} - -bool ExLogger::write_a(const char* buf) -{ - if (NULL == m_file) - return false; - - size_t len = strlen(buf); - - if (len > EX_LOG_CONTENT_MAX_LEN) - return false; - - char szTime[100] = { 0 }; -#ifdef EX_OS_WIN32 - SYSTEMTIME st; - GetLocalTime(&st); - sprintf_s(szTime, 100, "[%04d-%02d-%02d %02d:%02d:%02d] ", st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond); - - int lenTime = strlen(szTime); - DWORD dwWritten = 0; - WriteFile(m_file, szTime, lenTime, &dwWritten, NULL); - m_filesize += lenTime; - WriteFile(m_file, buf, len, &dwWritten, NULL); - m_filesize += len; - FlushFileBuffers(m_file); -#else - time_t timep; - struct tm *p; - time(&timep); - p = localtime(&timep); - if (p == NULL) - return false; - sprintf(szTime, "[%04d-%02d-%02d %02d:%02d:%02d] ", p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec); - - size_t lenTime = strlen(szTime); - fwrite(szTime, lenTime, 1, m_file); - m_filesize += lenTime; - fwrite(buf, len, 1, m_file); - m_filesize += len; - fflush(m_file); -#endif - - - return _rotate_file(); -} - -bool ExLogger::write_w(const wchar_t* buf) -{ - ex_astr _buf; - ex_wstr2astr(buf, _buf, EX_CODEPAGE_UTF8); - return write_a(_buf.c_str()); -} +#include +#include +//#include +//#include +//#include +//#include + +#ifdef EX_OS_WIN32 +# include +# include +# include +#else +//# include +//# include +#endif + +#define EX_LOG_CONTENT_MAX_LEN 2048 + +//typedef std::deque log_file_deque; + +static ExLogger* g_exlog = NULL; + +void EXLOG_USE_LOGGER(ExLogger* logger) +{ + g_exlog = logger; +} + +void EXLOG_LEVEL(int min_level) +{ + if(NULL != g_exlog) + g_exlog->min_level = min_level; +} + +void EXLOG_DEBUG(bool debug_mode) +{ + if (NULL != g_exlog) + g_exlog->debug_mode = debug_mode; +} + +void EXLOG_CONSOLE(bool output_to_console) +{ + if(NULL != g_exlog) + g_exlog->to_console = output_to_console; +} + +void EXLOG_FILE(const wchar_t* log_file, const wchar_t* log_path /*= NULL*/, ex_u32 max_filesize /*= EX_LOG_FILE_MAX_SIZE*/, ex_u8 max_filecount /*= EX_LOG_FILE_MAX_COUNT*/) +{ + if(NULL == g_exlog) + return; + + ex_wstr _path; + if (NULL == log_path) + { + ex_exec_file(_path); + ex_dirname(_path); + ex_path_join(_path, false, L"log", NULL); + } + else + { + _path = log_path; + } + + g_exlog->set_log_file(_path, log_file, max_filesize, max_filecount); +} + +ExLogger::ExLogger() +{ +#ifdef EX_OS_WIN32 + console_handle = GetStdHandle(STD_OUTPUT_HANDLE); +#endif + + min_level = EX_LOG_LEVEL_INFO; + debug_mode = false; + to_console = true; + + m_file = NULL; + m_filesize = 0; +} + +ExLogger::~ExLogger() +{ + if (NULL != m_file) + { +#ifdef EX_OS_WIN32 + CloseHandle(m_file); +#else + fclose(m_file); +#endif + m_file = NULL; + } +} + +void ExLogger::log_a(int level, const char* fmt, va_list valist) +{ + if (NULL == fmt) + return; + + if (0 == strlen(fmt)) + return; + + char szTmp[4096] = { 0 }; + size_t offset = 0; + + if (level == EX_LOG_LEVEL_ERROR) + { + szTmp[0] = '['; + szTmp[1] = 'E'; + szTmp[2] = ']'; + szTmp[3] = ' '; + offset = 4; + } + +#ifdef EX_OS_WIN32 + vsnprintf_s(szTmp+offset, 4096-offset, 4095-offset, fmt, valist); + if(to_console) + { + if (NULL != console_handle) + { + printf_s("%s", szTmp); + fflush(stdout); + } + else + { + if(debug_mode) + OutputDebugStringA(szTmp); + } + } +#else + vsnprintf(szTmp+offset, 4095-offset, fmt, valist); + if(to_console) + { + // On linux, the stdout only output the first time output format (char or wchar_t). + // e.g.: first time you use printf(), then after that, every wprintf() not work, and vice versa. + // so we always use wprintf() to fix that. + + ex_astr tmp(szTmp); + ex_wstr _tmp; + ex_astr2wstr(tmp, _tmp); + wprintf(L"%ls", _tmp.c_str()); + fflush(stdout); + +// printf("%s", szTmp); +// fflush(stdout); + } +#endif + + write_a(szTmp); +} + +void ExLogger::log_w(int level, const wchar_t* fmt, va_list valist) +{ + if (NULL == fmt || 0 == wcslen(fmt)) + return; + + wchar_t szTmp[4096] = { 0 }; + size_t offset = 0; + + if (level == EX_LOG_LEVEL_ERROR) + { + szTmp[0] = L'['; + szTmp[1] = L'E'; + szTmp[2] = L']'; + szTmp[3] = L' '; + offset = 4; + } + +#ifdef EX_OS_WIN32 + _vsnwprintf_s(szTmp+offset, 4096-offset, 4095-offset, fmt, valist); + if(to_console) + { + if (NULL != console_handle) + { + wprintf_s(_T("%s"), szTmp); + fflush(stdout); + } + else + { + if(debug_mode) + OutputDebugStringW(szTmp); + } + } +#else + vswprintf(szTmp+offset, 4095-offset, fmt, valist); + if(to_console) + { + wprintf(L"%ls", szTmp); + fflush(stdout); + } +#endif + + write_w(szTmp); +} + +#define EX_PRINTF_XA(fn, level) \ +void fn(const char* fmt, ...) \ +{ \ + if(NULL == g_exlog) \ + return; \ + if (g_exlog->min_level > level) \ + return; \ + ExThreadSmartLock locker(g_exlog->lock); \ + va_list valist; \ + va_start(valist, fmt); \ + g_exlog->log_a(level, fmt, valist); \ + va_end(valist); \ +} + +#define EX_PRINTF_XW(fn, level) \ +void fn(const wchar_t* fmt, ...) \ +{ \ + if(NULL == g_exlog) \ + return; \ + if (g_exlog->min_level > level) \ + return; \ + ExThreadSmartLock locker(g_exlog->lock); \ + va_list valist; \ + va_start(valist, fmt); \ + g_exlog->log_w(level, fmt, valist); \ + va_end(valist); \ +} + +EX_PRINTF_XA(ex_printf_d, EX_LOG_LEVEL_DEBUG) +EX_PRINTF_XA(ex_printf_v, EX_LOG_LEVEL_VERBOSE) +EX_PRINTF_XA(ex_printf_i, EX_LOG_LEVEL_INFO) +EX_PRINTF_XA(ex_printf_w, EX_LOG_LEVEL_WARN) +EX_PRINTF_XA(ex_printf_e, EX_LOG_LEVEL_ERROR) + +EX_PRINTF_XW(ex_printf_d, EX_LOG_LEVEL_DEBUG) +EX_PRINTF_XW(ex_printf_v, EX_LOG_LEVEL_VERBOSE) +EX_PRINTF_XW(ex_printf_i, EX_LOG_LEVEL_INFO) +EX_PRINTF_XW(ex_printf_w, EX_LOG_LEVEL_WARN) +EX_PRINTF_XW(ex_printf_e, EX_LOG_LEVEL_ERROR) + + +#ifdef EX_OS_WIN32 +void ex_printf_e_lasterror(const char* fmt, ...) +{ + ExThreadSmartLock locker(g_exlog->lock); + + va_list valist; + va_start(valist, fmt); + g_exlog->log_a(EX_LOG_LEVEL_ERROR, fmt, valist); + va_end(valist); + + //========================================= + + LPVOID lpMsgBuf; + DWORD dw = GetLastError(); + + FormatMessageA(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, + NULL, dw, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), + (LPSTR)&lpMsgBuf, 0, NULL); + + ex_printf_e(" - WinErr(%d): %s\n", dw, (LPSTR)lpMsgBuf); + LocalFree(lpMsgBuf); +} + +void ex_printf_e_lasterror(const wchar_t* fmt, ...) +{ + ExThreadSmartLock locker(g_exlog->lock); + + va_list valist; + va_start(valist, fmt); + g_exlog->log_w(EX_LOG_LEVEL_ERROR, fmt, valist); + va_end(valist); + + //========================================= + + LPVOID lpMsgBuf; + DWORD dw = GetLastError(); + + FormatMessageA(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, + NULL, dw, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), + (LPSTR)&lpMsgBuf, 0, NULL); + + ex_printf_e(" - WinErr(%d): %s\n", dw, (LPSTR)lpMsgBuf); + LocalFree(lpMsgBuf); +} +#endif + +void ex_printf_bin(const ex_u8* bin_data, size_t bin_size, const char* fmt, ...) +{ + if(NULL == g_exlog) + return; + if (!g_exlog->debug_mode) + return; + + ExThreadSmartLock locker(g_exlog->lock); + + + + if (NULL == fmt) + return; + if (0 == strlen(fmt)) + return; + + char szTmp[128] = { 0 }; +// size_t offset = 0; + + va_list valist; + va_start(valist, fmt); + +#ifdef EX_OS_WIN32 + vsnprintf_s(szTmp, 128, 127, fmt, valist); +#else + vsnprintf(szTmp, 127, fmt, valist); +#endif + va_end(valist); + +// +// +// +// +// va_list valist; +// va_start(valist, fmt); +// g_exlog->log_a(EX_LOG_LEVEL_DEBUG, fmt, valist); +// va_end(valist); +// + ex_printf_d("%s (%d/0x%02x Bytes)\n", szTmp, bin_size, bin_size); + + const ex_u8* line = bin_data; + size_t thisline = 0; + size_t offset = 0; + unsigned int i = 0; + +// char szTmp[128] = { 0 }; + size_t _offset = 0; + + while (offset < bin_size) + { + memset(szTmp, 0, 128); + _offset = 0; + + snprintf(szTmp + _offset, 128 - _offset, "%06x ", (int)offset); + _offset += 8; + + thisline = bin_size - offset; + if (thisline > 16) + thisline = 16; + + for (i = 0; i < thisline; i++) + { + snprintf(szTmp + _offset, 128 - _offset, "%02x ", line[i]); + _offset += 3; + } + + snprintf(szTmp + _offset, 128 - _offset, " "); + _offset += 2; + + for (; i < 16; i++) + { + snprintf(szTmp + _offset, 128 - _offset, " "); + _offset += 3; + } + + for (i = 0; i < thisline; i++) + { + snprintf(szTmp + _offset, 128 - _offset, "%c", (line[i] >= 0x20 && line[i] < 0x7f) ? line[i] : '.'); + _offset += 1; + } + + snprintf(szTmp + _offset, 128 - _offset, "\n"); + _offset += 1; + + ex_printf_d("%s", szTmp); + + offset += thisline; + line += thisline; + } + + fflush(stdout); +} + +bool ExLogger::set_log_file(const ex_wstr& log_path, const ex_wstr& log_name, ex_u32 max_filesize, ex_u8 max_count) +{ + m_max_filesize = max_filesize; + m_max_count = max_count; + + m_filename = log_name; + + m_path = log_path; + ex_abspath(m_path); + + ex_mkdirs(m_path); + + m_fullname = m_path; + ex_path_join(m_fullname, false, log_name.c_str(), NULL); + + return _open_file(); +} + + +bool ExLogger::_open_file() +{ + if (m_file) + { +#ifdef EX_OS_WIN32 + CloseHandle(m_file); +#else + fclose(m_file); +#endif + m_file = NULL; + } + +#ifdef EX_OS_WIN32 + // 注意:这里必须使用 CreateFile() 来打开日志文件,使用FILE指针无法传递给动态库进行操作。 + m_file = CreateFileW(m_fullname.c_str(), GENERIC_WRITE, FILE_SHARE_READ, NULL, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL); + if (INVALID_HANDLE_VALUE == m_file) + { + m_file = NULL; + return false; + } + + SetFilePointer(m_file, 0, NULL, FILE_END); + m_filesize = GetFileSize(m_file, NULL); +#else + ex_astr _fullname; + ex_wstr2astr(m_fullname, _fullname); + m_file = fopen(_fullname.c_str(), "a"); + + if (NULL == m_file) + { + return false; + } + + fseek(m_file, 0, SEEK_END); + m_filesize = (ex_u32)ftell(m_file); +#endif + + return _rotate_file(); +} + +bool ExLogger::_rotate_file(void) +{ + if (m_filesize < m_max_filesize) + return true; + + if (m_file) + { +#ifdef EX_OS_WIN32 + CloseHandle(m_file); +#else + fclose(m_file); +#endif + m_file = NULL; + } + + // make a name for backup file. + wchar_t _tmpname[64] = { 0 }; +#ifdef EX_OS_WIN32 + SYSTEMTIME st; + GetLocalTime(&st); + swprintf_s(_tmpname, 64, L"%s.%04d%02d%02d%02d%02d%02d.bak", m_filename.c_str(), st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond); +#else + time_t timep; + time(&timep); + struct tm *p = localtime(&timep); + if (p == NULL) + return false; + + ex_wcsformat(_tmpname, 64, L"%ls.%04d%02d%02d%02d%02d%02d.bak", m_filename.c_str(), p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec); +#endif + + ex_wstr _new_fullname(m_path); + ex_path_join(_new_fullname, false, _tmpname, NULL); + +#ifdef EX_OS_WIN32 + if (!MoveFileW(m_fullname.c_str(), _new_fullname.c_str())) + { + EXLOGE_WIN("can not rename log file, remove old one and try again."); + DeleteFileW(_new_fullname.c_str()); + if (!MoveFileW(m_fullname.c_str(), _new_fullname.c_str())) + return false; + } +#else + ex_astr _a_fullname; + ex_astr _a_new_fullname; + ex_wstr2astr(m_fullname, _a_fullname); + ex_wstr2astr(_new_fullname, _a_new_fullname); + + if (rename(_a_fullname.c_str(), _a_new_fullname.c_str()) != 0) + { + remove(_a_new_fullname.c_str()); + if (0 != (rename(_a_fullname.c_str(), _a_new_fullname.c_str()))) + return false; + } +#endif + + return _open_file(); +} + +bool ExLogger::write_a(const char* buf) +{ + if (NULL == m_file) + return false; + + size_t len = strlen(buf); + + if (len > EX_LOG_CONTENT_MAX_LEN) + return false; + + char szTime[100] = { 0 }; +#ifdef EX_OS_WIN32 + SYSTEMTIME st; + GetLocalTime(&st); + sprintf_s(szTime, 100, "[%04d-%02d-%02d %02d:%02d:%02d] ", st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond); + + int lenTime = strlen(szTime); + DWORD dwWritten = 0; + WriteFile(m_file, szTime, lenTime, &dwWritten, NULL); + m_filesize += lenTime; + WriteFile(m_file, buf, len, &dwWritten, NULL); + m_filesize += len; + FlushFileBuffers(m_file); +#else + time_t timep; + struct tm *p; + time(&timep); + p = localtime(&timep); + if (p == NULL) + return false; + sprintf(szTime, "[%04d-%02d-%02d %02d:%02d:%02d] ", p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec); + + size_t lenTime = strlen(szTime); + fwrite(szTime, lenTime, 1, m_file); + m_filesize += lenTime; + fwrite(buf, len, 1, m_file); + m_filesize += len; + fflush(m_file); +#endif + + + return _rotate_file(); +} + +bool ExLogger::write_w(const wchar_t* buf) +{ + ex_astr _buf; + ex_wstr2astr(buf, _buf, EX_CODEPAGE_UTF8); + return write_a(_buf.c_str()); +} diff --git a/common/libex/src/ex_path.cpp b/common/libex/src/ex_path.cpp index fd3e7f4..4ac23d5 100644 --- a/common/libex/src/ex_path.cpp +++ b/common/libex/src/ex_path.cpp @@ -1,4 +1,4 @@ -#include +#include #include #include diff --git a/common/libex/src/ex_str.cpp b/common/libex/src/ex_str.cpp index a3fac76..b88fabc 100644 --- a/common/libex/src/ex_str.cpp +++ b/common/libex/src/ex_str.cpp @@ -1,855 +1,882 @@ -#include -#include -#include - -char* ex_strcpy(char* target, size_t size, const char* source) -{ - if (target == source) - return target; - -#ifdef EX_OS_WIN32 - if (SUCCEEDED(StringCchCopyA(target, size, source))) - return target; - else - return NULL; -#else - size_t len = strlen(source); - if (size > len) - { - return strcpy(target, source); - } - else - { - memmove(target, source, size - 1); - return NULL; - } -#endif -} - -wchar_t* ex_wcscpy(wchar_t* target, size_t size, const wchar_t* source) -{ - if (target == source) - return target; - -#ifdef EX_OS_WIN32 - if (SUCCEEDED(StringCchCopyW(target, size, source))) - return target; - else - return NULL; -#else - size_t len = wcslen(source); - if (size > len) - { - return wcscpy(target, source); - } - else - { - memmove(target, source, (size - 1)*sizeof(wchar_t)); - return NULL; - } -#endif -} - -char* ex_strdup(const char* src) -{ - if (NULL == src) - return NULL; - size_t len = strlen(src) + 1; - char* ret = (char*)calloc(1, len); - memcpy(ret, src, len); - return ret; -} - -wchar_t* ex_wcsdup(const wchar_t* src) -{ - if (NULL == src) - return NULL; - size_t len = wcslen(src) + 1; - wchar_t* ret = (wchar_t*)calloc(sizeof(wchar_t), len); - memcpy(ret, src, sizeof(wchar_t)*len); - return ret; -} - -wchar_t* ex_str2wcs_alloc(const char* in_buffer, int code_page) -{ - wchar_t* out_buffer = NULL; -#ifdef EX_OS_WIN32 - int wlen = 0; - UINT _cp = 0; - if (code_page == EX_CODEPAGE_ACP) - _cp = CP_ACP; - else if (code_page == EX_CODEPAGE_UTF8) - _cp = CP_UTF8; - - wlen = MultiByteToWideChar(_cp, 0, in_buffer, -1, NULL, 0); - if (0 == wlen) - return NULL; - - out_buffer = (wchar_t*)calloc(wlen + 1, sizeof(wchar_t)); - if (NULL == out_buffer) - return NULL; - - wlen = MultiByteToWideChar(_cp, 0, in_buffer, -1, out_buffer, wlen); - if (0 == wlen) - { - free(out_buffer); - return NULL; - } - -#else - size_t wlen = 0; - wlen = mbstowcs(NULL, in_buffer, 0); - if (wlen <= 0) - return NULL; - - out_buffer = (wchar_t*)calloc(wlen + 1, sizeof(wchar_t)); - if (NULL == out_buffer) - return NULL; - - wlen = mbstowcs(out_buffer, in_buffer, wlen); - if (wlen <= 0) - { - free(out_buffer); - return NULL; - } - -#endif - - return out_buffer; -} - - -char* ex_wcs2str_alloc(const wchar_t* in_buffer, int code_page) -{ - char* out_buffer = NULL; - - if(NULL == in_buffer) - return NULL; - -#ifdef EX_OS_WIN32 - int len = 0; - UINT _cp = 0; - if (code_page == EX_CODEPAGE_ACP) - _cp = CP_ACP; - else if (code_page == EX_CODEPAGE_UTF8) - _cp = CP_UTF8; - - len = WideCharToMultiByte(_cp, 0, in_buffer, -1, NULL, 0, NULL, NULL); - if (0 == len) - return NULL; - - out_buffer = (char*)calloc(len + 1, sizeof(char)); - if (NULL == out_buffer) - return NULL; - - len = WideCharToMultiByte(_cp, 0, in_buffer, -1, out_buffer, len, NULL, NULL); - if (0 == len) - { - free(out_buffer); - return NULL; - } - -#else - size_t len = 0; - len = wcstombs(NULL, in_buffer, 0); - if (len <= 0) - return NULL; - - out_buffer = (char*)calloc(len + 1, sizeof(char)); - if (NULL == out_buffer) - return NULL; - - len = wcstombs(out_buffer, in_buffer, len); - if (len <= 0) - { - free(out_buffer); - return NULL; - } - -#endif - - return out_buffer; -} - -wchar_t** ex_make_wargv(int argc, char** argv) -{ - int i = 0; - wchar_t** ret = NULL; - - ret = (wchar_t**)calloc(argc + 1, sizeof(wchar_t*)); - if (!ret) - { - return NULL; - } - - for (i = 0; i < argc; ++i) - { - ret[i] = ex_str2wcs_alloc(argv[i], EX_CODEPAGE_DEFAULT); - if (NULL == ret[i]) - goto err; - } - - return ret; - -err: - ex_free_wargv(argc, ret); - return NULL; -} - -void ex_free_wargv(int argc, wchar_t** argv) -{ - int i = 0; - for (i = 0; i < argc; ++i) - free(argv[i]); - - free(argv); -} - -EX_BOOL ex_str_only_white_space(const wchar_t* src) -{ - if (ex_only_white_space(src)) - return EX_TRUE; - else - return EX_FALSE; -} - -EX_BOOL ex_wcs_only_white_space(const char* src) -{ - if (ex_only_white_space(src)) - return EX_TRUE; - else - return EX_FALSE; -} - -int ex_strformat(char* out_buf, size_t buf_size, const char* fmt, ...) -{ - int ret = 0; - va_list valist; - va_start(valist, fmt); - //_ts_printf_a(level, EX_COLOR_BLACK, fmt, valist); -#ifdef EX_OS_WIN32 - ret = vsnprintf(out_buf, buf_size, fmt, valist); -#else - ret = vsprintf(out_buf, fmt, valist); -#endif - va_end(valist); - return ret; -} - -int ex_wcsformat(wchar_t* out_buf, size_t buf_size, const wchar_t* fmt, ...) -{ - int ret = 0; - va_list valist; - va_start(valist, fmt); - //_ts_printf_a(level, EX_COLOR_BLACK, fmt, valist); -#ifdef EX_OS_WIN32 - //ret = vsnprintf(out_buf, buf_size, fmt, valist); - ret = _vsnwprintf_s(out_buf, buf_size, buf_size, fmt, valist); -#else - //ret = vsprintf(out_buf, fmt, valist); - ret = vswprintf(out_buf, buf_size, fmt, valist); -#endif - va_end(valist); - return ret; -} - - -#ifdef __cplusplus -bool ex_wstr2astr(const ex_wstr& in_str, ex_astr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/) -{ - return ex_wstr2astr(in_str.c_str(), out_str, code_page); -} - -bool ex_wstr2astr(const wchar_t* in_str, ex_astr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/) -{ - char* astr = ex_wcs2str_alloc(in_str, code_page); - if (NULL == astr) - return false; - - out_str = astr; - ex_free(astr); - return true; -} - -bool ex_astr2wstr(const ex_astr& in_str, ex_wstr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/) -{ - return ex_astr2wstr(in_str.c_str(), out_str, code_page); -} - -bool ex_astr2wstr(const char* in_str, ex_wstr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/) -{ - wchar_t* wstr = ex_str2wcs_alloc(in_str, code_page); - if (NULL == wstr) - return false; - - out_str = wstr; - ex_free(wstr); - return true; -} - -bool ex_only_white_space(const ex_astr& str_check) -{ - ex_astr::size_type pos = 0; - ex_astr strFilter(" \t\r\n"); - pos = str_check.find_first_not_of(strFilter); - if (ex_astr::npos == pos) - return true; - else - return false; -} - -bool ex_only_white_space(const ex_wstr& str_check) -{ - ex_wstr::size_type pos = 0; - ex_wstr strFilter(L" \t\r\n"); - pos = str_check.find_first_not_of(strFilter); - if (ex_wstr::npos == pos) - return true; - else - return false; -} - -void ex_remove_white_space(ex_astr& str_fix, int ulFlag /*= EX_RSC_ALL*/) -{ - ex_astr::size_type pos = 0; - ex_astr strFilter(" \t\r\n"); - - if (ulFlag & EX_RSC_BEGIN) - { - pos = str_fix.find_first_not_of(strFilter); - if (ex_astr::npos != pos) - str_fix.erase(0, pos); - // FIXME - } - if (ulFlag & EX_RSC_END) - { - pos = str_fix.find_last_not_of(strFilter); - if (ex_astr::npos != pos) - str_fix.erase(pos + 1); - // FIXME - } -} - -void ex_remove_white_space(ex_wstr& str_fix, int ulFlag /*= EX_RSC_ALL*/) -{ - ex_wstr::size_type pos = 0; - ex_wstr strFilter(L" \t\r\n"); - - if (ulFlag & EX_RSC_BEGIN) - { - pos = str_fix.find_first_not_of(strFilter); - if (ex_wstr::npos != pos) - str_fix.erase(0, pos); - // FIXME - } - if (ulFlag & EX_RSC_END) - { - pos = str_fix.find_last_not_of(strFilter); - if (ex_wstr::npos != pos) - str_fix.erase(pos + 1); - // FIXME - } -} - -ex_astr& ex_replace_all(ex_astr& str, const ex_astr& old_value, const ex_astr& new_value) -{ - for (ex_astr::size_type pos(0); pos != ex_astr::npos; pos += new_value.length()) - { - if ((pos = str.find(old_value, pos)) != ex_astr::npos) - str.replace(pos, old_value.length(), new_value); - else - break; - } - - return str; -} - -ex_wstr& ex_replace_all(ex_wstr& str, const ex_wstr& old_value, const ex_wstr& new_value) -{ - for (ex_wstr::size_type pos(0); pos != ex_wstr::npos; pos += new_value.length()) - { - if ((pos = str.find(old_value, pos)) != ex_wstr::npos) - str.replace(pos, old_value.length(), new_value); - else - break; - } - - return str; -} - - - -#ifndef EX_OS_WIN32 - -#define BYTE ex_u8 -#define DWORD ex_u32 -#define WCHAR ex_i16 -#define LPWSTR WCHAR* -#define BOOL int -#define TRUE 1 -#define FALSE 0 -#define UINT unsigned int -#define LPCSTR const char* -#define CP_UTF8 1 - -typedef enum -{ - conversionOK, /* conversion successful */ - sourceExhausted, /* partial character in source, but hit end */ - targetExhausted, /* insuff. room in target for conversion */ - sourceIllegal /* source sequence is illegal/malformed */ -} ConversionResult; - -typedef enum -{ - strictConversion = 0, - lenientConversion -} ConversionFlags; - -static const char trailingBytesForUTF8[256] = -{ - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, - 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 3,3,3,3,3,3,3,3,4,4,4,4,5,5,5,5 -}; - -static const DWORD offsetsFromUTF8[6] = { 0x00000000UL, 0x00003080UL, 0x000E2080UL, 0x03C82080UL, 0xFA082080UL, 0x82082080UL -}; - -static const BYTE firstByteMark[7] = { 0x00, 0x00, 0xC0, 0xE0, 0xF0, 0xF8, 0xFC }; - -static const int halfShift = 10; /* used for shifting by 10 bits */ - -static const DWORD halfBase = 0x0010000UL; -static const DWORD halfMask = 0x3FFUL; - -#define UNI_SUR_HIGH_START (DWORD)0xD800 -#define UNI_SUR_HIGH_END (DWORD)0xDBFF -#define UNI_SUR_LOW_START (DWORD)0xDC00 -#define UNI_SUR_LOW_END (DWORD)0xDFFF - -#define UNI_REPLACEMENT_CHAR (DWORD)0x0000FFFD -#define UNI_MAX_BMP (DWORD)0x0000FFFF -#define UNI_MAX_UTF16 (DWORD)0x0010FFFF -#define UNI_MAX_UTF32 (DWORD)0x7FFFFFFF -#define UNI_MAX_LEGAL_UTF32 (DWORD)0x0010FFFF - - -static ConversionResult ConvertUTF16toUTF8(const WCHAR** sourceStart, const WCHAR* sourceEnd, BYTE** targetStart, BYTE* targetEnd, ConversionFlags flags) -{ - BYTE* target; - const WCHAR* source; - BOOL computeLength; - ConversionResult result; - computeLength = (!targetEnd) ? TRUE : FALSE; - source = *sourceStart; - target = *targetStart; - result = conversionOK; - - while (source < sourceEnd) - { - DWORD ch; - unsigned short bytesToWrite = 0; - const DWORD byteMask = 0xBF; - const DWORD byteMark = 0x80; - const WCHAR* oldSource = source; /* In case we have to back up because of target overflow. */ - ch = *source++; - - /* If we have a surrogate pair, convert to UTF32 first. */ - if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_HIGH_END) - { - /* If the 16 bits following the high surrogate are in the source buffer... */ - if (source < sourceEnd) - { - DWORD ch2 = *source; - - /* If it's a low surrogate, convert to UTF32. */ - if (ch2 >= UNI_SUR_LOW_START && ch2 <= UNI_SUR_LOW_END) - { - ch = ((ch - UNI_SUR_HIGH_START) << halfShift) - + (ch2 - UNI_SUR_LOW_START) + halfBase; - ++source; - } - else if (flags == strictConversion) - { - /* it's an unpaired high surrogate */ - --source; /* return to the illegal value itself */ - result = sourceIllegal; - break; - } - } - else - { - /* We don't have the 16 bits following the high surrogate. */ - --source; /* return to the high surrogate */ - result = sourceExhausted; - break; - } - } - else if (flags == strictConversion) - { - /* UTF-16 surrogate values are illegal in UTF-32 */ - if (ch >= UNI_SUR_LOW_START && ch <= UNI_SUR_LOW_END) - { - --source; /* return to the illegal value itself */ - result = sourceIllegal; - break; - } - } - - /* Figure out how many bytes the result will require */ - if (ch < (DWORD)0x80) - { - bytesToWrite = 1; - } - else if (ch < (DWORD)0x800) - { - bytesToWrite = 2; - } - else if (ch < (DWORD)0x10000) - { - bytesToWrite = 3; - } - else if (ch < (DWORD)0x110000) - { - bytesToWrite = 4; - } - else - { - bytesToWrite = 3; - ch = UNI_REPLACEMENT_CHAR; - } - - target += bytesToWrite; - - if ((target > targetEnd) && (!computeLength)) - { - source = oldSource; /* Back up source pointer! */ - target -= bytesToWrite; - result = targetExhausted; - break; - } - - if (!computeLength) - { - switch (bytesToWrite) - { - /* note: everything falls through. */ - case 4: - *--target = (BYTE)((ch | byteMark) & byteMask); - ch >>= 6; - - case 3: - *--target = (BYTE)((ch | byteMark) & byteMask); - ch >>= 6; - - case 2: - *--target = (BYTE)((ch | byteMark) & byteMask); - ch >>= 6; - - case 1: - *--target = (BYTE)(ch | firstByteMark[bytesToWrite]); - } - } - else - { - switch (bytesToWrite) - { - /* note: everything falls through. */ - case 4: - --target; - ch >>= 6; - - case 3: - --target; - ch >>= 6; - - case 2: - --target; - ch >>= 6; - - case 1: - --target; - } - } - - target += bytesToWrite; - } - - *sourceStart = source; - *targetStart = target; - return result; -} - - -static BOOL isLegalUTF8(const BYTE* source, int length) -{ - BYTE a; - const BYTE* srcptr = source + length; - - switch (length) - { - default: - return FALSE; - - /* Everything else falls through when "TRUE"... */ - case 4: - if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return FALSE; - - case 3: - if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return FALSE; - - case 2: - if ((a = (*--srcptr)) > 0xBF) return FALSE; - - switch (*source) - { - /* no fall-through in this inner switch */ - case 0xE0: - if (a < 0xA0) return FALSE; - - break; - - case 0xED: - if (a > 0x9F) return FALSE; - - break; - - case 0xF0: - if (a < 0x90) return FALSE; - - break; - - case 0xF4: - if (a > 0x8F) return FALSE; - - break; - - default: - if (a < 0x80) return FALSE; - } - - case 1: - if (*source >= 0x80 && *source < 0xC2) return FALSE; - } - - if (*source > 0xF4) - return FALSE; - - return TRUE; -} - -static ConversionResult _ConvertUTF8toUTF16(const BYTE** sourceStart, const BYTE* sourceEnd, WCHAR** targetStart, WCHAR* targetEnd, ConversionFlags flags) -{ - WCHAR* target; - const BYTE* source; - BOOL computeLength; - ConversionResult result; - computeLength = (!targetEnd) ? TRUE : FALSE; - result = conversionOK; - source = *sourceStart; - target = *targetStart; - - while (source < sourceEnd) - { - DWORD ch = 0; - unsigned short extraBytesToRead = trailingBytesForUTF8[*source]; - - if ((source + extraBytesToRead) >= sourceEnd) - { - result = sourceExhausted; - break; - } - - /* Do this check whether lenient or strict */ - if (!isLegalUTF8(source, extraBytesToRead + 1)) - { - result = sourceIllegal; - break; - } - - /* - * The cases all fall through. See "Note A" below. - */ - switch (extraBytesToRead) - { - case 5: - ch += *source++; - ch <<= 6; /* remember, illegal UTF-8 */ - - case 4: - ch += *source++; - ch <<= 6; /* remember, illegal UTF-8 */ - - case 3: - ch += *source++; - ch <<= 6; - - case 2: - ch += *source++; - ch <<= 6; - - case 1: - ch += *source++; - ch <<= 6; - - case 0: - ch += *source++; - } - - ch -= offsetsFromUTF8[extraBytesToRead]; - - if ((target >= targetEnd) && (!computeLength)) - { - source -= (extraBytesToRead + 1); /* Back up source pointer! */ - result = targetExhausted; - break; - } - - if (ch <= UNI_MAX_BMP) - { - /* Target is a character <= 0xFFFF */ - /* UTF-16 surrogate values are illegal in UTF-32 */ - if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) - { - if (flags == strictConversion) - { - source -= (extraBytesToRead + 1); /* return to the illegal value itself */ - result = sourceIllegal; - break; - } - else - { - if (!computeLength) - *target++ = UNI_REPLACEMENT_CHAR; - else - target++; - } - } - else - { - if (!computeLength) - *target++ = (WCHAR)ch; /* normal case */ - else - target++; - } - } - else if (ch > UNI_MAX_UTF16) - { - if (flags == strictConversion) - { - result = sourceIllegal; - source -= (extraBytesToRead + 1); /* return to the start */ - break; /* Bail out; shouldn't continue */ - } - else - { - if (!computeLength) - *target++ = UNI_REPLACEMENT_CHAR; - else - target++; - } - } - else - { - /* target is a character in range 0xFFFF - 0x10FFFF. */ - if ((target + 1 >= targetEnd) && (!computeLength)) - { - source -= (extraBytesToRead + 1); /* Back up source pointer! */ - result = targetExhausted; - break; - } - - ch -= halfBase; - - if (!computeLength) - { - *target++ = (WCHAR)((ch >> halfShift) + UNI_SUR_HIGH_START); - *target++ = (WCHAR)((ch & halfMask) + UNI_SUR_LOW_START); - } - else - { - target++; - target++; - } - } - } - - *sourceStart = source; - *targetStart = target; - return result; -} - -static int MultiByteToWideChar(UINT CodePage, DWORD dwFlags, LPCSTR lpMultiByteStr, int cbMultiByte, LPWSTR lpWideCharStr, int cchWideChar) -{ - int length; - LPWSTR targetStart; - const BYTE* sourceStart; - ConversionResult result; - - /* If cbMultiByte is 0, the function fails */ - - if (cbMultiByte == 0) - return 0; - - /* If cbMultiByte is -1, the string is null-terminated */ - - if (cbMultiByte == -1) - cbMultiByte = (int)strlen((char*)lpMultiByteStr) + 1; - - /* - * if cchWideChar is 0, the function returns the required buffer size - * in characters for lpWideCharStr and makes no use of the output parameter itself. - */ - - if (cchWideChar == 0) - { - sourceStart = (const BYTE*)lpMultiByteStr; - targetStart = (WCHAR*)NULL; - - result = _ConvertUTF8toUTF16(&sourceStart, &sourceStart[cbMultiByte], - &targetStart, NULL, strictConversion); - - length = (int)(targetStart - ((WCHAR*)NULL)); - cchWideChar = length; - } - else - { - sourceStart = (const BYTE*)lpMultiByteStr; - targetStart = lpWideCharStr; - - result = _ConvertUTF8toUTF16(&sourceStart, &sourceStart[cbMultiByte], - &targetStart, &targetStart[cchWideChar], strictConversion); - - length = (int)(targetStart - ((WCHAR*)lpWideCharStr)); - cchWideChar = length; - } - - return cchWideChar; -} - -#endif - - - -bool ex_utf8_to_utf16le(const std::string& from, ex_str_utf16le& to) -{ - int iSize = MultiByteToWideChar(CP_UTF8, 0, from.c_str(), -1, NULL, 0); - if (iSize <= 0) - return false; - - //++iSize; - to.resize(iSize); - memset(&to[0], 0, sizeof(ex_utf16)); - - MultiByteToWideChar(CP_UTF8, 0, from.c_str(), -1, &to[0], iSize); - - return true; -} - -#endif +#include +#include +#include + +char* ex_strcpy(char* target, size_t size, const char* source) +{ + if (target == source) + return target; + +#ifdef EX_OS_WIN32 + if (SUCCEEDED(StringCchCopyA(target, size, source))) + return target; + else + return NULL; +#else + size_t len = strlen(source); + if (size > len) + { + return strcpy(target, source); + } + else + { + memmove(target, source, size - 1); + return NULL; + } +#endif +} + +wchar_t* ex_wcscpy(wchar_t* target, size_t size, const wchar_t* source) +{ + if (target == source) + return target; + +#ifdef EX_OS_WIN32 + if (SUCCEEDED(StringCchCopyW(target, size, source))) + return target; + else + return NULL; +#else + size_t len = wcslen(source); + if (size > len) + { + return wcscpy(target, source); + } + else + { + memmove(target, source, (size - 1)*sizeof(wchar_t)); + return NULL; + } +#endif +} + +char* ex_strdup(const char* src) +{ + if (NULL == src) + return NULL; + size_t len = strlen(src) + 1; + char* ret = (char*)calloc(1, len); + memcpy(ret, src, len); + return ret; +} + +wchar_t* ex_wcsdup(const wchar_t* src) +{ + if (NULL == src) + return NULL; + size_t len = wcslen(src) + 1; + wchar_t* ret = (wchar_t*)calloc(sizeof(wchar_t), len); + memcpy(ret, src, sizeof(wchar_t)*len); + return ret; +} + +wchar_t* ex_str2wcs_alloc(const char* in_buffer, int code_page) +{ + wchar_t* out_buffer = NULL; +#ifdef EX_OS_WIN32 + int wlen = 0; + UINT _cp = 0; + if (code_page == EX_CODEPAGE_ACP) + _cp = CP_ACP; + else if (code_page == EX_CODEPAGE_UTF8) + _cp = CP_UTF8; + + wlen = MultiByteToWideChar(_cp, 0, in_buffer, -1, NULL, 0); + if (0 == wlen) + return NULL; + + out_buffer = (wchar_t*)calloc(wlen + 1, sizeof(wchar_t)); + if (NULL == out_buffer) + return NULL; + + wlen = MultiByteToWideChar(_cp, 0, in_buffer, -1, out_buffer, wlen); + if (0 == wlen) + { + free(out_buffer); + return NULL; + } + +#else + size_t wlen = 0; + wlen = mbstowcs(NULL, in_buffer, 0); + if (wlen <= 0) + return NULL; + + out_buffer = (wchar_t*)calloc(wlen + 1, sizeof(wchar_t)); + if (NULL == out_buffer) + return NULL; + + wlen = mbstowcs(out_buffer, in_buffer, wlen); + if (wlen <= 0) + { + free(out_buffer); + return NULL; + } + +#endif + + return out_buffer; +} + + +char* ex_wcs2str_alloc(const wchar_t* in_buffer, int code_page) +{ + char* out_buffer = NULL; + + if(NULL == in_buffer) + return NULL; + +#ifdef EX_OS_WIN32 + int len = 0; + UINT _cp = 0; + if (code_page == EX_CODEPAGE_ACP) + _cp = CP_ACP; + else if (code_page == EX_CODEPAGE_UTF8) + _cp = CP_UTF8; + + len = WideCharToMultiByte(_cp, 0, in_buffer, -1, NULL, 0, NULL, NULL); + if (0 == len) + return NULL; + + out_buffer = (char*)calloc(len + 1, sizeof(char)); + if (NULL == out_buffer) + return NULL; + + len = WideCharToMultiByte(_cp, 0, in_buffer, -1, out_buffer, len, NULL, NULL); + if (0 == len) + { + free(out_buffer); + return NULL; + } + +#else + size_t len = 0; + len = wcstombs(NULL, in_buffer, 0); + if (len <= 0) + return NULL; + + out_buffer = (char*)calloc(len + 1, sizeof(char)); + if (NULL == out_buffer) + return NULL; + + len = wcstombs(out_buffer, in_buffer, len); + if (len <= 0) + { + free(out_buffer); + return NULL; + } + +#endif + + return out_buffer; +} + +wchar_t** ex_make_wargv(int argc, char** argv) +{ + int i = 0; + wchar_t** ret = NULL; + + ret = (wchar_t**)calloc(argc + 1, sizeof(wchar_t*)); + if (!ret) + { + return NULL; + } + + for (i = 0; i < argc; ++i) + { + ret[i] = ex_str2wcs_alloc(argv[i], EX_CODEPAGE_DEFAULT); + if (NULL == ret[i]) + goto err; + } + + return ret; + +err: + ex_free_wargv(argc, ret); + return NULL; +} + +void ex_free_wargv(int argc, wchar_t** argv) +{ + int i = 0; + for (i = 0; i < argc; ++i) + free(argv[i]); + + free(argv); +} + +EX_BOOL ex_str_only_white_space(const wchar_t* src) +{ + if (ex_only_white_space(src)) + return EX_TRUE; + else + return EX_FALSE; +} + +EX_BOOL ex_wcs_only_white_space(const char* src) +{ + if (ex_only_white_space(src)) + return EX_TRUE; + else + return EX_FALSE; +} + +int ex_strformat(char* out_buf, size_t buf_size, const char* fmt, ...) +{ + int ret = 0; + va_list valist; + va_start(valist, fmt); + //_ts_printf_a(level, EX_COLOR_BLACK, fmt, valist); +#ifdef EX_OS_WIN32 + ret = vsnprintf(out_buf, buf_size, fmt, valist); +#else + ret = vsprintf(out_buf, fmt, valist); +#endif + va_end(valist); + return ret; +} + +int ex_wcsformat(wchar_t* out_buf, size_t buf_size, const wchar_t* fmt, ...) +{ + int ret = 0; + va_list valist; + va_start(valist, fmt); + //_ts_printf_a(level, EX_COLOR_BLACK, fmt, valist); +#ifdef EX_OS_WIN32 + //ret = vsnprintf(out_buf, buf_size, fmt, valist); + ret = _vsnwprintf_s(out_buf, buf_size, buf_size, fmt, valist); +#else + //ret = vsprintf(out_buf, fmt, valist); + ret = vswprintf(out_buf, buf_size, fmt, valist); +#endif + va_end(valist); + return ret; +} + + +#ifdef __cplusplus +bool ex_wstr2astr(const ex_wstr& in_str, ex_astr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/) +{ + return ex_wstr2astr(in_str.c_str(), out_str, code_page); +} + +bool ex_wstr2astr(const wchar_t* in_str, ex_astr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/) +{ + char* astr = ex_wcs2str_alloc(in_str, code_page); + if (NULL == astr) + return false; + + out_str = astr; + ex_free(astr); + return true; +} + +bool ex_astr2wstr(const ex_astr& in_str, ex_wstr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/) +{ + return ex_astr2wstr(in_str.c_str(), out_str, code_page); +} + +bool ex_astr2wstr(const char* in_str, ex_wstr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/) +{ + wchar_t* wstr = ex_str2wcs_alloc(in_str, code_page); + if (NULL == wstr) + return false; + + out_str = wstr; + ex_free(wstr); + return true; +} + +bool ex_only_white_space(const ex_astr& str_check) +{ + ex_astr::size_type pos = 0; + ex_astr strFilter(" \t\r\n"); + pos = str_check.find_first_not_of(strFilter); + if (ex_astr::npos == pos) + return true; + else + return false; +} + +bool ex_only_white_space(const ex_wstr& str_check) +{ + ex_wstr::size_type pos = 0; + ex_wstr strFilter(L" \t\r\n"); + pos = str_check.find_first_not_of(strFilter); + if (ex_wstr::npos == pos) + return true; + else + return false; +} + +void ex_remove_white_space(ex_astr& str_fix, int ulFlag /*= EX_RSC_ALL*/) +{ + ex_astr::size_type pos = 0; + ex_astr strFilter(" \t\r\n"); + + if (ulFlag & EX_RSC_BEGIN) + { + pos = str_fix.find_first_not_of(strFilter); + if (ex_astr::npos != pos) + str_fix.erase(0, pos); + // FIXME + } + if (ulFlag & EX_RSC_END) + { + pos = str_fix.find_last_not_of(strFilter); + if (ex_astr::npos != pos) + str_fix.erase(pos + 1); + // FIXME + } +} + +void ex_remove_white_space(ex_wstr& str_fix, int ulFlag /*= EX_RSC_ALL*/) +{ + ex_wstr::size_type pos = 0; + ex_wstr strFilter(L" \t\r\n"); + + if (ulFlag & EX_RSC_BEGIN) + { + pos = str_fix.find_first_not_of(strFilter); + if (ex_wstr::npos != pos) + str_fix.erase(0, pos); + // FIXME + } + if (ulFlag & EX_RSC_END) + { + pos = str_fix.find_last_not_of(strFilter); + if (ex_wstr::npos != pos) + str_fix.erase(pos + 1); + // FIXME + } +} + +ex_astr& ex_replace_all(ex_astr& str, const ex_astr& old_value, const ex_astr& new_value) +{ + for (ex_astr::size_type pos(0); pos != ex_astr::npos; pos += new_value.length()) + { + if ((pos = str.find(old_value, pos)) != ex_astr::npos) + str.replace(pos, old_value.length(), new_value); + else + break; + } + + return str; +} + +ex_wstr& ex_replace_all(ex_wstr& str, const ex_wstr& old_value, const ex_wstr& new_value) +{ + for (ex_wstr::size_type pos(0); pos != ex_wstr::npos; pos += new_value.length()) + { + if ((pos = str.find(old_value, pos)) != ex_wstr::npos) + str.replace(pos, old_value.length(), new_value); + else + break; + } + + return str; +} + + + +#ifndef EX_OS_WIN32 + +#define BYTE ex_u8 +#define DWORD ex_u32 +#define WCHAR ex_i16 +#define LPWSTR WCHAR* +#define BOOL int +#define TRUE 1 +#define FALSE 0 +#define UINT unsigned int +#define LPCSTR const char* +#define CP_UTF8 1 + +typedef enum +{ + conversionOK, /* conversion successful */ + sourceExhausted, /* partial character in source, but hit end */ + targetExhausted, /* insuff. room in target for conversion */ + sourceIllegal /* source sequence is illegal/malformed */ +} ConversionResult; + +typedef enum +{ + strictConversion = 0, + lenientConversion +} ConversionFlags; + +static const char trailingBytesForUTF8[256] = +{ + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, + 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 3,3,3,3,3,3,3,3,4,4,4,4,5,5,5,5 +}; + +static const DWORD offsetsFromUTF8[6] = { 0x00000000UL, 0x00003080UL, 0x000E2080UL, 0x03C82080UL, 0xFA082080UL, 0x82082080UL +}; + +static const BYTE firstByteMark[7] = { 0x00, 0x00, 0xC0, 0xE0, 0xF0, 0xF8, 0xFC }; + +static const int halfShift = 10; /* used for shifting by 10 bits */ + +static const DWORD halfBase = 0x0010000UL; +static const DWORD halfMask = 0x3FFUL; + +#define UNI_SUR_HIGH_START (DWORD)0xD800 +#define UNI_SUR_HIGH_END (DWORD)0xDBFF +#define UNI_SUR_LOW_START (DWORD)0xDC00 +#define UNI_SUR_LOW_END (DWORD)0xDFFF + +#define UNI_REPLACEMENT_CHAR (DWORD)0x0000FFFD +#define UNI_MAX_BMP (DWORD)0x0000FFFF +#define UNI_MAX_UTF16 (DWORD)0x0010FFFF +#define UNI_MAX_UTF32 (DWORD)0x7FFFFFFF +#define UNI_MAX_LEGAL_UTF32 (DWORD)0x0010FFFF + + +static ConversionResult ConvertUTF16toUTF8(const WCHAR** sourceStart, const WCHAR* sourceEnd, BYTE** targetStart, BYTE* targetEnd, ConversionFlags flags) +{ + BYTE* target; + const WCHAR* source; + BOOL computeLength; + ConversionResult result; + computeLength = (!targetEnd) ? TRUE : FALSE; + source = *sourceStart; + target = *targetStart; + result = conversionOK; + + while (source < sourceEnd) + { + DWORD ch; + unsigned short bytesToWrite = 0; + const DWORD byteMask = 0xBF; + const DWORD byteMark = 0x80; + const WCHAR* oldSource = source; /* In case we have to back up because of target overflow. */ + ch = *source++; + + /* If we have a surrogate pair, convert to UTF32 first. */ + if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_HIGH_END) + { + /* If the 16 bits following the high surrogate are in the source buffer... */ + if (source < sourceEnd) + { + DWORD ch2 = *source; + + /* If it's a low surrogate, convert to UTF32. */ + if (ch2 >= UNI_SUR_LOW_START && ch2 <= UNI_SUR_LOW_END) + { + ch = ((ch - UNI_SUR_HIGH_START) << halfShift) + + (ch2 - UNI_SUR_LOW_START) + halfBase; + ++source; + } + else if (flags == strictConversion) + { + /* it's an unpaired high surrogate */ + --source; /* return to the illegal value itself */ + result = sourceIllegal; + break; + } + } + else + { + /* We don't have the 16 bits following the high surrogate. */ + --source; /* return to the high surrogate */ + result = sourceExhausted; + break; + } + } + else if (flags == strictConversion) + { + /* UTF-16 surrogate values are illegal in UTF-32 */ + if (ch >= UNI_SUR_LOW_START && ch <= UNI_SUR_LOW_END) + { + --source; /* return to the illegal value itself */ + result = sourceIllegal; + break; + } + } + + /* Figure out how many bytes the result will require */ + if (ch < (DWORD)0x80) + { + bytesToWrite = 1; + } + else if (ch < (DWORD)0x800) + { + bytesToWrite = 2; + } + else if (ch < (DWORD)0x10000) + { + bytesToWrite = 3; + } + else if (ch < (DWORD)0x110000) + { + bytesToWrite = 4; + } + else + { + bytesToWrite = 3; + ch = UNI_REPLACEMENT_CHAR; + } + + target += bytesToWrite; + + if ((target > targetEnd) && (!computeLength)) + { + source = oldSource; /* Back up source pointer! */ + target -= bytesToWrite; + result = targetExhausted; + break; + } + + if (!computeLength) + { + switch (bytesToWrite) + { + /* note: everything falls through. */ + case 4: + *--target = (BYTE)((ch | byteMark) & byteMask); + ch >>= 6; + + case 3: + *--target = (BYTE)((ch | byteMark) & byteMask); + ch >>= 6; + + case 2: + *--target = (BYTE)((ch | byteMark) & byteMask); + ch >>= 6; + + case 1: + *--target = (BYTE)(ch | firstByteMark[bytesToWrite]); + } + } + else + { + switch (bytesToWrite) + { + /* note: everything falls through. */ + case 4: + --target; + ch >>= 6; + + case 3: + --target; + ch >>= 6; + + case 2: + --target; + ch >>= 6; + + case 1: + --target; + } + } + + target += bytesToWrite; + } + + *sourceStart = source; + *targetStart = target; + return result; +} + + +static BOOL isLegalUTF8(const BYTE* source, int length) +{ + BYTE a; + const BYTE* srcptr = source + length; + + switch (length) + { + default: + return FALSE; + + /* Everything else falls through when "TRUE"... */ + case 4: + if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return FALSE; + + case 3: + if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return FALSE; + + case 2: + if ((a = (*--srcptr)) > 0xBF) return FALSE; + + switch (*source) + { + /* no fall-through in this inner switch */ + case 0xE0: + if (a < 0xA0) return FALSE; + + break; + + case 0xED: + if (a > 0x9F) return FALSE; + + break; + + case 0xF0: + if (a < 0x90) return FALSE; + + break; + + case 0xF4: + if (a > 0x8F) return FALSE; + + break; + + default: + if (a < 0x80) return FALSE; + } + + case 1: + if (*source >= 0x80 && *source < 0xC2) return FALSE; + } + + if (*source > 0xF4) + return FALSE; + + return TRUE; +} + +static ConversionResult _ConvertUTF8toUTF16(const BYTE** sourceStart, const BYTE* sourceEnd, WCHAR** targetStart, WCHAR* targetEnd, ConversionFlags flags) +{ + WCHAR* target; + const BYTE* source; + BOOL computeLength; + ConversionResult result; + computeLength = (!targetEnd) ? TRUE : FALSE; + result = conversionOK; + source = *sourceStart; + target = *targetStart; + + while (source < sourceEnd) + { + DWORD ch = 0; + unsigned short extraBytesToRead = trailingBytesForUTF8[*source]; + + if ((source + extraBytesToRead) >= sourceEnd) + { + result = sourceExhausted; + break; + } + + /* Do this check whether lenient or strict */ + if (!isLegalUTF8(source, extraBytesToRead + 1)) + { + result = sourceIllegal; + break; + } + + /* + * The cases all fall through. See "Note A" below. + */ + switch (extraBytesToRead) + { + case 5: + ch += *source++; + ch <<= 6; /* remember, illegal UTF-8 */ + + case 4: + ch += *source++; + ch <<= 6; /* remember, illegal UTF-8 */ + + case 3: + ch += *source++; + ch <<= 6; + + case 2: + ch += *source++; + ch <<= 6; + + case 1: + ch += *source++; + ch <<= 6; + + case 0: + ch += *source++; + } + + ch -= offsetsFromUTF8[extraBytesToRead]; + + if ((target >= targetEnd) && (!computeLength)) + { + source -= (extraBytesToRead + 1); /* Back up source pointer! */ + result = targetExhausted; + break; + } + + if (ch <= UNI_MAX_BMP) + { + /* Target is a character <= 0xFFFF */ + /* UTF-16 surrogate values are illegal in UTF-32 */ + if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) + { + if (flags == strictConversion) + { + source -= (extraBytesToRead + 1); /* return to the illegal value itself */ + result = sourceIllegal; + break; + } + else + { + if (!computeLength) + *target++ = UNI_REPLACEMENT_CHAR; + else + target++; + } + } + else + { + if (!computeLength) + *target++ = (WCHAR)ch; /* normal case */ + else + target++; + } + } + else if (ch > UNI_MAX_UTF16) + { + if (flags == strictConversion) + { + result = sourceIllegal; + source -= (extraBytesToRead + 1); /* return to the start */ + break; /* Bail out; shouldn't continue */ + } + else + { + if (!computeLength) + *target++ = UNI_REPLACEMENT_CHAR; + else + target++; + } + } + else + { + /* target is a character in range 0xFFFF - 0x10FFFF. */ + if ((target + 1 >= targetEnd) && (!computeLength)) + { + source -= (extraBytesToRead + 1); /* Back up source pointer! */ + result = targetExhausted; + break; + } + + ch -= halfBase; + + if (!computeLength) + { + *target++ = (WCHAR)((ch >> halfShift) + UNI_SUR_HIGH_START); + *target++ = (WCHAR)((ch & halfMask) + UNI_SUR_LOW_START); + } + else + { + target++; + target++; + } + } + } + + *sourceStart = source; + *targetStart = target; + return result; +} + +static int MultiByteToWideChar(UINT CodePage, DWORD dwFlags, LPCSTR lpMultiByteStr, int cbMultiByte, LPWSTR lpWideCharStr, int cchWideChar) +{ + int length; + LPWSTR targetStart; + const BYTE* sourceStart; + ConversionResult result; + + /* If cbMultiByte is 0, the function fails */ + + if (cbMultiByte == 0) + return 0; + + /* If cbMultiByte is -1, the string is null-terminated */ + + if (cbMultiByte == -1) + cbMultiByte = (int)strlen((char*)lpMultiByteStr) + 1; + + /* + * if cchWideChar is 0, the function returns the required buffer size + * in characters for lpWideCharStr and makes no use of the output parameter itself. + */ + + if (cchWideChar == 0) + { + sourceStart = (const BYTE*)lpMultiByteStr; + targetStart = (WCHAR*)NULL; + + result = _ConvertUTF8toUTF16(&sourceStart, &sourceStart[cbMultiByte], + &targetStart, NULL, strictConversion); + + length = (int)(targetStart - ((WCHAR*)NULL)); + cchWideChar = length; + } + else + { + sourceStart = (const BYTE*)lpMultiByteStr; + targetStart = lpWideCharStr; + + result = _ConvertUTF8toUTF16(&sourceStart, &sourceStart[cbMultiByte], + &targetStart, &targetStart[cchWideChar], strictConversion); + + length = (int)(targetStart - ((WCHAR*)lpWideCharStr)); + cchWideChar = length; + } + + return cchWideChar; +} + +#endif + +ex_str_utf16le::ex_str_utf16le() { + m_data.resize(1); + memset(&m_data[0], 0, 1); +} + +ex_str_utf16le::~ex_str_utf16le() {} + +size_t ex_str_utf16le::length() const { + return m_data.size() - 1; +} + +bool ex_str_utf16le::from_utf8(const ex_astr& from) { + int iSize = MultiByteToWideChar(CP_UTF8, 0, from.c_str(), -1, NULL, 0); + // 注意iSize包括\0结束符 + if (iSize <= 0) + return false; + + m_data.resize(iSize); + memset(&m_data[0], 0, sizeof(ex_utf16)); + + MultiByteToWideChar(CP_UTF8, 0, from.c_str(), -1, (LPWSTR)&m_data[0], iSize); + + return true; +} + +const uint16_t* ex_str_utf16le::c_str() const { + return &m_data[0]; +} + +// bool ex_utf8_to_utf16le(const ex_astr& from, ex_str_utf16le& to) +// { +// int iSize = MultiByteToWideChar(CP_UTF8, 0, from.c_str(), -1, NULL, 0); +// if (iSize <= 0) +// return false; +// +// //++iSize; +// to.resize(iSize); +// memset(&to[0], 0, sizeof(ex_utf16)); +// +// MultiByteToWideChar(CP_UTF8, 0, from.c_str(), -1, &to[0], iSize); +// +// return true; +// } + +#endif diff --git a/common/libex/src/ex_thread.cpp b/common/libex/src/ex_thread.cpp index acdcdf8..fb28d41 100644 --- a/common/libex/src/ex_thread.cpp +++ b/common/libex/src/ex_thread.cpp @@ -1,224 +1,227 @@ -#include -#include - -//========================================================= -// -//========================================================= - - -#ifdef EX_OS_WIN32 -unsigned int WINAPI ExThreadBase::_thread_func(LPVOID pParam) -#else - -void *ExThreadBase::_thread_func(void *pParam) -#endif -{ - ExThreadBase *_this = (ExThreadBase *) pParam; - - _this->m_is_running = true; - _this->_thread_loop(); - _this->m_is_running = false; - _this->m_handle = 0; - - EXLOGV(" # thread [%s] exit.\n", _this->m_thread_name.c_str()); - _this->_on_stopped(); - return 0; -} - -ExThreadBase::ExThreadBase(const char *thread_name) : - m_handle(0), - m_is_running(false), - m_need_stop(false) { - m_thread_name = thread_name; -} - -ExThreadBase::~ExThreadBase() { - if(m_is_running) { - EXLOGE(" # thread [%s] not stop before destroy.\n", m_thread_name.c_str()); - } -} - -bool ExThreadBase::start(void) { - m_need_stop = false; - EXLOGV(" . thread [%s] starting.\n", m_thread_name.c_str()); -#ifdef WIN32 - HANDLE h = (HANDLE)_beginthreadex(NULL, 0, _thread_func, (void*)this, 0, NULL); - - if (NULL == h) - { - return false; - } - m_handle = h; -#else - pthread_t ptid = 0; - int ret = pthread_create(&ptid, NULL, _thread_func, (void *) this); - if (ret != 0) { - return false; - } - m_handle = ptid; - -#endif - - return true; -} - -bool ExThreadBase::stop(void) { - if (m_handle == 0) { - EXLOGW("[thread] thread [%s] already stopped.\n", m_thread_name.c_str()); - return true; - } - - EXLOGV("[thread] try to stop thread [%s].\n", m_thread_name.c_str()); - m_need_stop = true; - _on_stop(); - - EXLOGV("[thread] wait thread [%s] exit.\n", m_thread_name.c_str()); - -#ifdef EX_OS_WIN32 - if (WaitForSingleObject(m_handle, INFINITE) != WAIT_OBJECT_0) - { - return false; - } -#else - if (pthread_join(m_handle, NULL) != 0) { - return false; - } -#endif - - return true; -} - -bool ExThreadBase::terminate(void) { -#ifdef EX_OS_WIN32 - return (TerminateThread(m_handle, 1) == TRUE); -#else - return (pthread_cancel(m_handle) == 0); -#endif -} - -//========================================================= -// -//========================================================= - -ExThreadManager::ExThreadManager() {} - -ExThreadManager::~ExThreadManager() { - if (!m_threads.empty()) { - EXLOGE("when destroy thread manager, there are %d thread not exit.\n", m_threads.size()); - stop_all(); - } -} - -void ExThreadManager::stop_all(void) { - ExThreadSmartLock locker(m_lock); - - ex_threads::iterator it = m_threads.begin(); - for (; it != m_threads.end(); ++it) { - (*it)->stop(); - } - m_threads.clear(); -} - -void ExThreadManager::add(ExThreadBase *tb) { - ExThreadSmartLock locker(m_lock); - - ex_threads::iterator it = m_threads.begin(); - for (; it != m_threads.end(); ++it) { - if ((*it) == tb) { - EXLOGE("when add thread to manager, it already exist.\n"); - return; - } - } - - m_threads.push_back(tb); -} - -void ExThreadManager::remove(ExThreadBase *tb) { - ExThreadSmartLock locker(m_lock); - - ex_threads::iterator it = m_threads.begin(); - for (; it != m_threads.end(); ++it) { - if ((*it) == tb) { - m_threads.erase(it); - return; - } - } - EXLOGE("thread not hold by thread-manager while remove it.\n"); -} - -//========================================================= -// -//========================================================= - -ExThreadLock::ExThreadLock() { -#ifdef EX_OS_WIN32 - InitializeCriticalSection(&m_locker); -#else - pthread_mutexattr_t attr; - pthread_mutexattr_init(&attr); - pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); - pthread_mutex_init(&m_locker, &attr); - pthread_mutexattr_destroy(&attr); -#endif -} - -ExThreadLock::~ExThreadLock() { -#ifdef EX_OS_WIN32 - DeleteCriticalSection(&m_locker); -#else - pthread_mutex_destroy(&m_locker); -#endif -} - -void ExThreadLock::lock(void) { -#ifdef EX_OS_WIN32 - EnterCriticalSection(&m_locker); -#else - pthread_mutex_lock(&m_locker); -#endif -} - -void ExThreadLock::unlock(void) { -#ifdef EX_OS_WIN32 - LeaveCriticalSection(&m_locker); -#else - pthread_mutex_unlock(&m_locker); -#endif -} - -//========================================================= -// -//========================================================= - -int ex_atomic_add(volatile int *pt, int t) { -#ifdef EX_OS_WIN32 - return (int)InterlockedExchangeAdd((long*)pt, (long)t); -#else - return __sync_add_and_fetch(pt, t); -#endif -} - -int ex_atomic_inc(volatile int *pt) { -#ifdef EX_OS_WIN32 - return (int)InterlockedIncrement((long*)pt); -#else - return __sync_add_and_fetch(pt, 1); -#endif -} - -int ex_atomic_dec(volatile int *pt) { -#ifdef EX_OS_WIN32 - return (int)InterlockedDecrement((long*)pt); -#else - return __sync_add_and_fetch(pt, -1); -#endif -} - - -ex_u64 ex_get_thread_id(void) { -#ifdef EX_OS_WIN32 - return GetCurrentThreadId(); -#else - return (ex_u64) pthread_self(); -#endif -} +#include +#include + +//========================================================= +// +//========================================================= + + +#ifdef EX_OS_WIN32 +unsigned int WINAPI ExThreadBase::_thread_func(LPVOID pParam) +#else + +void *ExThreadBase::_thread_func(void *pParam) +#endif +{ + ExThreadBase *_this = (ExThreadBase *) pParam; + + _this->m_is_running = true; + _this->_thread_loop(); + _this->m_is_running = false; + _this->m_handle = 0; + + EXLOGV("[thread] - `%s` exit.\n", _this->m_thread_name.c_str()); + _this->_on_stopped(); + return 0; +} + +ExThreadBase::ExThreadBase(const char *thread_name) : + m_handle(0), + m_is_running(false), + m_need_stop(false) { + m_thread_name = thread_name; +} + +ExThreadBase::~ExThreadBase() { + if(m_is_running) { + EXLOGE("[thread] `%s` not stop before destroy.\n", m_thread_name.c_str()); + } +} + +bool ExThreadBase::start(void) { + m_need_stop = false; + EXLOGV("[thread] + `%s` starting.\n", m_thread_name.c_str()); +#ifdef WIN32 + HANDLE h = (HANDLE)_beginthreadex(NULL, 0, _thread_func, (void*)this, 0, NULL); + + if (NULL == h) + { + return false; + } + m_handle = h; +#else + pthread_t ptid = 0; + int ret = pthread_create(&ptid, NULL, _thread_func, (void *) this); + if (ret != 0) { + return false; + } + m_handle = ptid; + +#endif + + return true; +} + +bool ExThreadBase::stop(void) { + if (m_handle == 0) { + EXLOGW("[thread] `%s` already stopped before stop() call.\n", m_thread_name.c_str()); + return true; + } + + EXLOGV("[thread] - try to stop `%s.\n", m_thread_name.c_str()); + m_need_stop = true; + _on_stop(); + + //EXLOGV("[thread] - wait `%s` exit, thread-handle=0x%08x.\n", m_thread_name.c_str(), m_handle); + +#ifdef EX_OS_WIN32 + if (m_handle) { + if (WaitForSingleObject(m_handle, INFINITE) != WAIT_OBJECT_0) { + return false; + } + } +#else + if(m_handle != 0) { + if (pthread_join(m_handle, NULL) != 0) { + return false; + } + } +#endif + + return true; +} + +bool ExThreadBase::terminate(void) { +#ifdef EX_OS_WIN32 + return (TerminateThread(m_handle, 1) == TRUE); +#else + return (pthread_cancel(m_handle) == 0); +#endif +} + +//========================================================= +// +//========================================================= + +ExThreadManager::ExThreadManager() {} + +ExThreadManager::~ExThreadManager() { + if (!m_threads.empty()) { + EXLOGE("[thread] when destroy thread manager, there are %d thread not exit.\n", m_threads.size()); + stop_all(); + } +} + +void ExThreadManager::stop_all(void) { + ExThreadSmartLock locker(m_lock); + + ex_threads::iterator it = m_threads.begin(); + for (; it != m_threads.end(); ++it) { + (*it)->stop(); + } + m_threads.clear(); +} + +void ExThreadManager::add(ExThreadBase *tb) { + ExThreadSmartLock locker(m_lock); + + ex_threads::iterator it = m_threads.begin(); + for (; it != m_threads.end(); ++it) { + if ((*it) == tb) { + EXLOGE("[thread] when add thread to manager, it already exist.\n"); + return; + } + } + + m_threads.push_back(tb); +} + +void ExThreadManager::remove(ExThreadBase *tb) { + ExThreadSmartLock locker(m_lock); + + ex_threads::iterator it = m_threads.begin(); + for (; it != m_threads.end(); ++it) { + if ((*it) == tb) { + m_threads.erase(it); + return; + } + } + EXLOGE("[thread] thread not hold by thread-manager while remove it.\n"); +} + +//========================================================= +// +//========================================================= + +ExThreadLock::ExThreadLock() { +#ifdef EX_OS_WIN32 + InitializeCriticalSection(&m_locker); +#else + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&m_locker, &attr); + pthread_mutexattr_destroy(&attr); +#endif +} + +ExThreadLock::~ExThreadLock() { +#ifdef EX_OS_WIN32 + DeleteCriticalSection(&m_locker); +#else + pthread_mutex_destroy(&m_locker); +#endif +} + +void ExThreadLock::lock(void) { +#ifdef EX_OS_WIN32 + EnterCriticalSection(&m_locker); +#else + pthread_mutex_lock(&m_locker); +#endif +} + +void ExThreadLock::unlock(void) { +#ifdef EX_OS_WIN32 + LeaveCriticalSection(&m_locker); +#else + pthread_mutex_unlock(&m_locker); +#endif +} + +//========================================================= +// +//========================================================= + +int ex_atomic_add(volatile int *pt, int t) { +#ifdef EX_OS_WIN32 + return (int)InterlockedExchangeAdd((long*)pt, (long)t); +#else + return __sync_add_and_fetch(pt, t); +#endif +} + +int ex_atomic_inc(volatile int *pt) { +#ifdef EX_OS_WIN32 + return (int)InterlockedIncrement((long*)pt); +#else + return __sync_add_and_fetch(pt, 1); +#endif +} + +int ex_atomic_dec(volatile int *pt) { +#ifdef EX_OS_WIN32 + return (int)InterlockedDecrement((long*)pt); +#else + return __sync_add_and_fetch(pt, -1); +#endif +} + + +ex_u64 ex_get_thread_id(void) { +#ifdef EX_OS_WIN32 + return GetCurrentThreadId(); +#else + return (ex_u64) pthread_self(); +#endif +} diff --git a/common/libex/src/ex_util.cpp b/common/libex/src/ex_util.cpp index a84a001..c4daeba 100644 --- a/common/libex/src/ex_util.cpp +++ b/common/libex/src/ex_util.cpp @@ -1,4 +1,4 @@ -#include +#include #include #include #include diff --git a/common/libex/src/ex_winsrv.cpp b/common/libex/src/ex_winsrv.cpp index 4c909f4..0b5087c 100644 --- a/common/libex/src/ex_winsrv.cpp +++ b/common/libex/src/ex_winsrv.cpp @@ -1,4 +1,4 @@ -#include +#include #ifdef EX_OS_WIN32 @@ -44,14 +44,14 @@ ex_rv ex_winsrv_install(const ex_wstr& srv_name, const ex_wstr& disp_name, const } SERVICE_FAILURE_ACTIONS failure_action; - failure_action.dwResetPeriod = 0; // reset failure count to zero ʱ䣬λΪ + failure_action.dwResetPeriod = 0; // reset failure count to zero 的时间,单位为秒 failure_action.lpRebootMsg = NULL; // Message to broadcast to server users before rebooting failure_action.lpCommand = NULL; // Command line of the process for the CreateProcess function to execute in response - failure_action.cActions = 3; // actionĸ + failure_action.cActions = 3; // action数组的个数 SC_ACTION actionarray[3]; - actionarray[0].Type = SC_ACTION_RESTART; // - actionarray[0].Delay = 60000; // λΪ + actionarray[0].Type = SC_ACTION_RESTART; // 重新启动服务 + actionarray[0].Delay = 60000; // 单位为毫秒 actionarray[1].Type = SC_ACTION_RESTART; actionarray[1].Delay = 60000; actionarray[2].Type = SC_ACTION_RESTART; diff --git a/common/teleport/teleport_const.h b/common/teleport/teleport_const.h index 8660e18..df55075 100644 --- a/common/teleport/teleport_const.h +++ b/common/teleport/teleport_const.h @@ -1,151 +1,152 @@ -#ifndef __TELEPORT_CONST_H__ -#define __TELEPORT_CONST_H__ - -// עͬͬԵconstļ - -// ļ趨teleportģ֮ͨѶʱĴֵJSONݣ -// - WEB -// - WEBWEB̨ -// - WEB̨COREķ - -//======================================================= -// Urlprotocol -//======================================================= -#define TP_URLPROTO_APP_NAME "teleport" - -//======================================================= -// Զ֤ʽ -//======================================================= -#define TP_AUTH_TYPE_NONE 0 -#define TP_AUTH_TYPE_PASSWORD 1 -#define TP_AUTH_TYPE_PRIVATE_KEY 2 - -//======================================================= -// ԶЭ -//======================================================= -#define TP_PROTOCOL_TYPE_RDP 1 -#define TP_PROTOCOL_TYPE_SSH 2 -#define TP_PROTOCOL_TYPE_TELNET 3 - -//======================================================= -// ԶЭ -//======================================================= -#define TP_PROTOCOL_TYPE_RDP_DESKTOP 100 -#define TP_PROTOCOL_TYPE_SSH_SHELL 200 -#define TP_PROTOCOL_TYPE_SSH_SFTP 201 -#define TP_PROTOCOL_TYPE_TELNET_SHELL 300 - - -//======================================================= -// Զϵͳ -//======================================================= -#define TP_OS_TYPE_WINDOWS 1 -#define TP_OS_TYPE_LINUX 2 - -//======================================================= -// ԶӻỰ״̬ -//======================================================= -#define TP_SESS_STAT_RUNNING 0 // Ựʼˣ -#define TP_SESS_STAT_END 9999 // Ựɹ -#define TP_SESS_STAT_ERR_AUTH_DENIED 1 // ỰΪ֤ʧ -#define TP_SESS_STAT_ERR_CONNECT 2 // ỰΪ޷ӵԶ -#define TP_SESS_STAT_ERR_BAD_SSH_KEY 3 // ỰΪ޷ʶSSH˽Կ -#define TP_SESS_STAT_ERR_INTERNAL 4 // ỰΪڲ -#define TP_SESS_STAT_ERR_UNSUPPORT_PROTOCOL 5 // ỰΪЭ鲻֧(RDP) -#define TP_SESS_STAT_ERR_BAD_PKG 6 // ỰΪյı -#define TP_SESS_STAT_ERR_RESET 7 // ỰΪteleportķ -#define TP_SESS_STAT_ERR_IO 8 // ỰΪж -#define TP_SESS_STAT_ERR_SESSION 9 // ỰΪЧĻỰID -#define TP_SESS_STAT_ERR_AUTH_TYPE 10 // ỰΪ֤ʽ -#define TP_SESS_STAT_STARTED 100 // Ѿӳɹˣʼ¼¼ -#define TP_SESS_STAT_ERR_START_INTERNAL 104 // ỰΪڲ -#define TP_SESS_STAT_ERR_START_BAD_PKG 106 // ỰΪյı -#define TP_SESS_STAT_ERR_START_RESET 107 // ỰΪteleportķ -#define TP_SESS_STAT_ERR_START_IO 108 // ỰΪж - - -//======================================================= -// Ȩ -//======================================================= -#define TP_FLAG_ALL 0xFFFFFFFF -// Ự¼ -#define TP_FLAG_RECORD_REPLAY 0x00000001 // ¼ʷ¼طţ -#define TP_FLAG_RECORD_REAL_TIME 0x00000002 // ʵʱ -// RDP -#define TP_FLAG_RDP_DESKTOP 0x00000001 // Զ -#define TP_FLAG_RDP_CLIPBOARD 0x00000002 // -#define TP_FLAG_RDP_DISK 0x00000004 // ӳ -#define TP_FLAG_RDP_APP 0x00000008 // ԶAPPδʵ֣ -#define TP_FLAG_RDP_CONSOLE 0x00001000 //ӵԱỰRDPconsoleѡ -// SSH -#define TP_FLAG_SSH_SHELL 0x00000001 // SHELL -#define TP_FLAG_SSH_SFTP 0x00000002 // SFTP -#define TP_FLAG_SSH_X11 0x00000004 // X11תδʵ֣ -#define TP_FLAG_SSH_EXEC 0x00000008 // execִԶδʵ֣ -#define TP_FLAG_SSH_TUNNEL 0x00000010 // allow ssh tunnel. (not impl.) - - -//======================================================= -// ֵ -//======================================================= -#define TPE_OK 0 // ɹ -//------------------------------------------------------- -// ͨôֵ -//------------------------------------------------------- -#define TPE_NEED_MORE_DATA 1 // ҪݣһǴ -#define TPE_NEED_LOGIN 2 // Ҫ¼ -#define TPE_PRIVILEGE 3 // ûвȨ -#define TPE_NOT_IMPLEMENT 7 // δʵ -#define TPE_EXISTS 8 // ĿѾ -#define TPE_NOT_EXISTS 9 // Ŀ겻 - -// 100~299ͨôֵ - -#define TPE_FAILED 100 // ڲ -#define TPE_NETWORK 101 // -#define TPE_DATABASE 102 // ݿʧ - -// HTTPش -#define TPE_HTTP_METHOD 120 // Ч󷽷GET/POSTȣߴ󷽷ҪPOSTȴʹGETʽ -#define TPE_HTTP_URL_ENCODE 121 // URL޷룩 -//#define TPE_HTTP_URI 122 // ЧURI - -#define TPE_UNKNOWN_CMD 124 // δ֪ -#define TPE_JSON_FORMAT 125 // JSONʽҪJSONʽݣȴ޷JSONʽ룩 -#define TPE_PARAM 126 // -#define TPE_DATA 127 // ݴ - -// #define TPE_OPENFILE_ERROR 0x1007 // ޷ļ -// #define TPE_GETTEMPPATH_ERROR 0x1007 -#define TPE_OPENFILE 300 - - -//------------------------------------------------------- -// WEBרôֵ -//------------------------------------------------------- -#define TPE_CAPTCHA_EXPIRED 10000 // ֤ѹ -#define TPE_CAPTCHA_MISMATCH 10001 // ֤ -#define TPE_OATH_MISMATCH 10002 // ֤̬֤ -#define TPE_SYS_MAINTENANCE 10003 // ϵͳά - -#define TPE_USER_LOCKED 10100 // ûѾδ룩 -#define TPE_USER_DISABLED 10101 // ûѾ -#define TPE_USER_AUTH 10102 // ֤ʧ - -//------------------------------------------------------- -// ֳרôֵ -//------------------------------------------------------- -#define TPE_NO_ASSIST 100000 // δܼ⵽ֳ -#define TPE_OLD_ASSIST 100001 // ֳ汾̫ -#define TPE_START_CLIENT 100002 // ޷ͻ˳޷̣ - - - -//------------------------------------------------------- -// ķרôֵ -//------------------------------------------------------- -#define TPE_NO_CORE_SERVER 200000 // δܼ⵽ķ - - - -#endif // __TELEPORT_CONST_H__ +#ifndef __TELEPORT_CONST_H__ +#define __TELEPORT_CONST_H__ + +// 注意同步更新三个不同语言的const文件 + +// 本文件设定teleport各个模块之间通讯时的错误值(JSON数据),包括: +// - WEB界面与助手 +// - WEB界面与WEB后台 +// - WEB后台与CORE核心服务 + +//======================================================= +// Urlprotocol相关 +//======================================================= +#define TP_URLPROTO_APP_NAME "teleport" + +//======================================================= +// 远程连接认证方式 +//======================================================= +#define TP_AUTH_TYPE_NONE 0 +#define TP_AUTH_TYPE_PASSWORD 1 +#define TP_AUTH_TYPE_PRIVATE_KEY 2 + +//======================================================= +// 远程连接协议 +//======================================================= +#define TP_PROTOCOL_TYPE_RDP 1 +#define TP_PROTOCOL_TYPE_SSH 2 +#define TP_PROTOCOL_TYPE_TELNET 3 + +//======================================================= +// 远程连接子协议 +//======================================================= +#define TP_PROTOCOL_TYPE_RDP_DESKTOP 100 +#define TP_PROTOCOL_TYPE_SSH_SHELL 200 +#define TP_PROTOCOL_TYPE_SSH_SFTP 201 +#define TP_PROTOCOL_TYPE_TELNET_SHELL 300 + + +//======================================================= +// 远程主机操作系统 +//======================================================= +#define TP_OS_TYPE_WINDOWS 1 +#define TP_OS_TYPE_LINUX 2 + +//======================================================= +// 远程连接会话状态 +//======================================================= +#define TP_SESS_STAT_RUNNING 0 // 会话开始了,正在连接 +#define TP_SESS_STAT_END 9999 // 会话成功结束 +#define TP_SESS_STAT_ERR_AUTH_DENIED 1 // 会话结束,因为认证失败 +#define TP_SESS_STAT_ERR_CONNECT 2 // 会话结束,因为无法连接到远程主机 +#define TP_SESS_STAT_ERR_BAD_SSH_KEY 3 // 会话结束,因为无法识别SSH私钥 +#define TP_SESS_STAT_ERR_INTERNAL 4 // 会话结束,因为内部错误 +#define TP_SESS_STAT_ERR_UNSUPPORT_PROTOCOL 5 // 会话结束,因为协议不支持(RDP) +#define TP_SESS_STAT_ERR_BAD_PKG 6 // 会话结束,因为收到错误的报文 +#define TP_SESS_STAT_ERR_RESET 7 // 会话结束,因为teleport核心服务重置了 +#define TP_SESS_STAT_ERR_IO 8 // 会话结束,因为网络中断 +#define TP_SESS_STAT_ERR_SESSION 9 // 会话结束,因为无效的会话ID +#define TP_SESS_STAT_ERR_AUTH_TYPE 10 // 会话结束,因为不被允许的认证方式 +#define TP_SESS_STAT_STARTED 100 // 已经连接成功了,开始记录录像了 +#define TP_SESS_STAT_ERR_START_INTERNAL 104 // 会话结束,因为内部错误 +#define TP_SESS_STAT_ERR_START_BAD_PKG 106 // 会话结束,因为收到错误的报文 +#define TP_SESS_STAT_ERR_START_RESET 107 // 会话结束,因为teleport核心服务重置了 +#define TP_SESS_STAT_ERR_START_IO 108 // 会话结束,因为网络中断 + + +//======================================================= +// 授权标记 +//======================================================= +#define TP_FLAG_ALL 0xFFFFFFFF +// 会话记录相关 +#define TP_FLAG_RECORD_REPLAY 0x00000001 // 允许记录历史(录像回放) +#define TP_FLAG_RECORD_REAL_TIME 0x00000002 // 允许实时监控 +// RDP相关 +#define TP_FLAG_RDP_DESKTOP 0x00000001 // 允许远程桌面 +#define TP_FLAG_RDP_CLIPBOARD 0x00000002 // 允许剪贴板 +#define TP_FLAG_RDP_DISK 0x00000004 // 允许磁盘映射 +#define TP_FLAG_RDP_APP 0x00000008 // 允许远程APP(尚未实现) +#define TP_FLAG_RDP_CONSOLE 0x00001000 //允许连接到管理员会话(RDP的console选项) +// SSH相关 +#define TP_FLAG_SSH_SHELL 0x00000001 // 允许SHELL +#define TP_FLAG_SSH_SFTP 0x00000002 // 允许SFTP +#define TP_FLAG_SSH_X11 0x00000004 // 允许X11转发(尚未实现) +#define TP_FLAG_SSH_EXEC 0x00000008 // 允许exec执行远程命令(尚未实现) +#define TP_FLAG_SSH_TUNNEL 0x00000010 // allow ssh tunnel. (not impl.) + + +//======================================================= +// 错误值 +//======================================================= +#define TPE_OK 0 // 成功 +//------------------------------------------------------- +// 通用错误值 +//------------------------------------------------------- +#define TPE_NEED_MORE_DATA 1 // 需要更多数据(不一定是错误) +#define TPE_NEED_LOGIN 2 // 需要登录 +#define TPE_PRIVILEGE 3 // 没有操作权限 +#define TPE_NOT_IMPLEMENT 7 // 功能尚未实现 +#define TPE_EXISTS 8 // 目标已经存在 +#define TPE_NOT_EXISTS 9 // 目标不存在 + +// 100~299是通用错误值 + +#define TPE_FAILED 100 // 内部错误 +#define TPE_NETWORK 101 // 网络错误 +#define TPE_DATABASE 102 // 数据库操作失败 + +// HTTP请求相关错误 +#define TPE_HTTP_METHOD 120 // 无效的请求方法(不是GET/POST等),或者错误的请求方法(例如需要POST,却使用GET方式请求) +#define TPE_HTTP_URL_ENCODE 121 // URL编码错误(无法解码) +//#define TPE_HTTP_URI 122 // 无效的URI + +#define TPE_UNKNOWN_CMD 124 // 未知的命令 +#define TPE_JSON_FORMAT 125 // 错误的JSON格式(需要JSON格式数据,但是却无法按JSON格式解码) +#define TPE_PARAM 126 // 参数错误 +#define TPE_INVALID_DATA 127 // 数据错误 +#define TPE_UNEXPECTED_DATA 128 // 不是期望的数据 + +// #define TPE_OPENFILE_ERROR 0x1007 // 无法打开文件 +// #define TPE_GETTEMPPATH_ERROR 0x1007 +#define TPE_OPENFILE 300 + + +//------------------------------------------------------- +// WEB服务专用错误值 +//------------------------------------------------------- +#define TPE_CAPTCHA_EXPIRED 10000 // 验证码已过期 +#define TPE_CAPTCHA_MISMATCH 10001 // 验证码错误 +#define TPE_OATH_MISMATCH 10002 // 身份验证器动态验证码错误 +#define TPE_SYS_MAINTENANCE 10003 // 系统维护中 + +#define TPE_USER_LOCKED 10100 // 用户已经被锁定(连续多次错误密码) +#define TPE_USER_DISABLED 10101 // 用户已经被禁用 +#define TPE_USER_AUTH 10102 // 身份验证失败 + +//------------------------------------------------------- +// 助手程序专用错误值 +//------------------------------------------------------- +#define TPE_NO_ASSIST 100000 // 未能检测到助手程序 +#define TPE_OLD_ASSIST 100001 // 助手程序版本太低 +#define TPE_START_CLIENT 100002 // 无法启动客户端程序(无法创建进程) + + + +//------------------------------------------------------- +// 核心服务专用错误值 +//------------------------------------------------------- +#define TPE_NO_CORE_SERVER 200000 // 未能检测到核心服务 + + + +#endif // __TELEPORT_CONST_H__ diff --git a/config.ini.in b/config.ini.in index fbc4f5e..27cb9fa 100644 --- a/config.ini.in +++ b/config.ini.in @@ -22,6 +22,8 @@ wget = C:\Program Files (x86)\wget\wget.exe # if not set msbuild path, default to get it by register. #msbuild = C:\Program Files (x86)\MSBuild\14.0\bin\MSBuild.exe +# need qt to build tp-player. +qt = C:\Qt\Qt5.12.0\5.12.0\msvc2017 # ============================================ # for Linux and macOS diff --git a/dist/client/windows/assist/installer.nsi b/dist/client/windows/assist/installer.nsi index 9304d0c..d893ee3 100644 Binary files a/dist/client/windows/assist/installer.nsi and b/dist/client/windows/assist/installer.nsi differ diff --git a/dist/client/windows/assist/setup.nsh b/dist/client/windows/assist/setup.nsh index de7f510..e0ca4e1 100644 Binary files a/dist/client/windows/assist/setup.nsh and b/dist/client/windows/assist/setup.nsh differ diff --git a/dist/server/script/core/utils.py b/dist/server/script/core/utils.py index c09ea42..303648c 100644 --- a/dist/server/script/core/utils.py +++ b/dist/server/script/core/utils.py @@ -14,11 +14,12 @@ from .env import env def remove(*args): path = os.path.join(*args) - cc.v(' - remove [%s] ... ' % path, end='') + # cc.v(' - remove [%s] ... ' % path, end='') if not (os.path.exists(path) or os.path.islink(path)): - cc.v('not exists, skip.') + # cc.v('not exists, skip.') return + cc.v(' - remove [%s] ... ' % path, end='') for i in range(5): cc.v('.', end='') try: diff --git a/dist/server/script/main.py b/dist/server/script/main.py index f3e0c3e..b364c89 100644 --- a/dist/server/script/main.py +++ b/dist/server/script/main.py @@ -498,6 +498,18 @@ class InstallerLinux(InstallerBase): elif os.path.exists('/etc/init.d/teleport'): self._is_installed = True self._install_path = '/usr/local/teleport' + + with open('/etc/init.d/teleport', 'r') as f: + lines = f.readlines() + for l in lines: + if l.startswith('DAEMON_PATH='): + l = l.replace('\r', '') + l = l.replace('\n', '') + x = l.split('=') + self._install_path = x[1] + break + + # self._fix_path() if self._is_installed: diff --git a/external/fix-external/Python-3.7.5/Modules/Setup.dist b/external/fix-external/Python-3.7.5/Modules/Setup.dist new file mode 100755 index 0000000..730619e --- /dev/null +++ b/external/fix-external/Python-3.7.5/Modules/Setup.dist @@ -0,0 +1,400 @@ +# -*- makefile -*- +# The file Setup is used by the makesetup script to construct the files +# Makefile and config.c, from Makefile.pre and config.c.in, +# respectively. The file Setup itself is initially copied from +# Setup.dist; once it exists it will not be overwritten, so you can edit +# Setup to your heart's content. Note that Makefile.pre is created +# from Makefile.pre.in by the toplevel configure script. + +# (VPATH notes: Setup and Makefile.pre are in the build directory, as +# are Makefile and config.c; the *.in and *.dist files are in the source +# directory.) + +# Each line in this file describes one or more optional modules. +# Modules configured here will not be compiled by the setup.py script, +# so the file can be used to override setup.py's behavior. +# Tag lines containing just the word "*static*", "*shared*" or "*disabled*" +# (without the quotes but with the stars) are used to tag the following module +# descriptions. Tag lines may alternate throughout this file. Modules are +# built statically when they are preceded by a "*static*" tag line or when +# there is no tag line between the start of the file and the module +# description. Modules are built as a shared library when they are preceded by +# a "*shared*" tag line. Modules are not built at all, not by the Makefile, +# nor by the setup.py script, when they are preceded by a "*disabled*" tag +# line. + +# Lines have the following structure: +# +# ... [ ...] [ ...] [ ...] +# +# is anything ending in .c (.C, .cc, .c++ are C++ files) +# is anything starting with -I, -D, -U or -C +# is anything ending in .a or beginning with -l or -L +# is anything else but should be a valid Python +# identifier (letters, digits, underscores, beginning with non-digit) +# +# (As the makesetup script changes, it may recognize some other +# arguments as well, e.g. *.so and *.sl as libraries. See the big +# case statement in the makesetup script.) +# +# Lines can also have the form +# +# = +# +# which defines a Make variable definition inserted into Makefile.in +# +# The build process works like this: +# +# 1. Build all modules that are declared as static in Modules/Setup, +# combine them into libpythonxy.a, combine that into python. +# 2. Build all modules that are listed as shared in Modules/Setup. +# 3. Invoke setup.py. That builds all modules that +# a) are not builtin, and +# b) are not listed in Modules/Setup, and +# c) can be build on the target +# +# Therefore, modules declared to be shared will not be +# included in the config.c file, nor in the list of objects to be +# added to the library archive, and their linker options won't be +# added to the linker options. Rules to create their .o files and +# their shared libraries will still be added to the Makefile, and +# their names will be collected in the Make variable SHAREDMODS. This +# is used to build modules as shared libraries. (They can be +# installed using "make sharedinstall", which is implied by the +# toplevel "make install" target.) (For compatibility, +# *noconfig* has the same effect as *shared*.) +# +# NOTE: As a standard policy, as many modules as can be supported by a +# platform should be present. The distribution comes with all modules +# enabled that are supported by most platforms and don't require you +# to ftp sources from elsewhere. + + +# Some special rules to define PYTHONPATH. +# Edit the definitions below to indicate which options you are using. +# Don't add any whitespace or comments! + +# Directories where library files get installed. +# DESTLIB is for Python modules; MACHDESTLIB for shared libraries. +DESTLIB=$(LIBDEST) +MACHDESTLIB=$(BINLIBDEST) + +# NOTE: all the paths are now relative to the prefix that is computed +# at run time! + +# Standard path -- don't edit. +# No leading colon since this is the first entry. +# Empty since this is now just the runtime prefix. +DESTPATH= + +# Site specific path components -- should begin with : if non-empty +SITEPATH= + +# Standard path components for test modules +TESTPATH= + +COREPYTHONPATH=$(DESTPATH)$(SITEPATH)$(TESTPATH) +PYTHONPATH=$(COREPYTHONPATH) + + +# The modules listed here can't be built as shared libraries for +# various reasons; therefore they are listed here instead of in the +# normal order. + +# This only contains the minimal set of modules required to run the +# setup.py script in the root of the Python source tree. + +posix -DPy_BUILD_CORE posixmodule.c # posix (UNIX) system calls +errno errnomodule.c # posix (UNIX) errno values +pwd pwdmodule.c # this is needed to find out the user's home dir + # if $HOME is not set +_sre _sre.c # Fredrik Lundh's new regular expressions +_codecs _codecsmodule.c # access to the builtin codecs and codec registry +_weakref _weakref.c # weak references +_functools -DPy_BUILD_CORE _functoolsmodule.c # Tools for working with functions and callable objects +_operator _operator.c # operator.add() and similar goodies +_collections _collectionsmodule.c # Container types +_abc _abc.c # Abstract base classes +itertools itertoolsmodule.c # Functions creating iterators for efficient looping +atexit atexitmodule.c # Register functions to be run at interpreter-shutdown +_signal -DPy_BUILD_CORE signalmodule.c +_stat _stat.c # stat.h interface +time -DPy_BUILD_CORE timemodule.c # -lm # time operations and variables +_thread -DPy_BUILD_CORE _threadmodule.c # low-level threading interface + +# access to ISO C locale support +_locale _localemodule.c # -lintl + +# Standard I/O baseline +_io -DPy_BUILD_CORE -I$(srcdir)/Modules/_io _io/_iomodule.c _io/iobase.c _io/fileio.c _io/bytesio.c _io/bufferedio.c _io/textio.c _io/stringio.c + +# The zipimport module is always imported at startup. Having it as a +# builtin module avoids some bootstrapping problems and reduces overhead. +zipimport -DPy_BUILD_CORE zipimport.c + +# faulthandler module +faulthandler faulthandler.c + +# debug tool to trace memory blocks allocated by Python +_tracemalloc _tracemalloc.c hashtable.c + +# The rest of the modules listed in this file are all commented out by +# default. Usually they can be detected and built as dynamically +# loaded modules by the new setup.py script added in Python 2.1. If +# you're on a platform that doesn't support dynamic loading, want to +# compile modules statically into the Python binary, or need to +# specify some odd set of compiler switches, you can uncomment the +# appropriate lines below. + +# ====================================================================== + +# The Python symtable module depends on .h files that setup.py doesn't track +_symtable symtablemodule.c + +# Uncommenting the following line tells makesetup that all following +# modules are to be built as shared libraries (see above for more +# detail; also note that *static* or *disabled* cancels this effect): + +#*shared* +*static* + +# GNU readline. Unlike previous Python incarnations, GNU readline is +# now incorporated in an optional module, configured in the Setup file +# instead of by a configure script switch. You may have to insert a +# -L option pointing to the directory where libreadline.* lives, +# and you may have to change -ltermcap to -ltermlib or perhaps remove +# it, depending on your system -- see the GNU readline instructions. +# It's okay for this to be a shared library, too. + +#readline readline.c -lreadline -ltermcap + + +# Modules that should always be present (non UNIX dependent): + +array arraymodule.c # array objects +cmath cmathmodule.c _math.c # -lm # complex math library functions +math mathmodule.c _math.c # -lm # math library functions, e.g. sin() +_contextvars _contextvarsmodule.c # Context Variables +_struct _struct.c # binary structure packing/unpacking +_weakref _weakref.c # basic weak reference support +#_testcapi _testcapimodule.c # Python C API test module +_random _randommodule.c # Random number generator +_elementtree -I$(srcdir)/Modules/expat -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI _elementtree.c # elementtree accelerator +_pickle _pickle.c # pickle accelerator +_datetime _datetimemodule.c # datetime accelerator +_bisect _bisectmodule.c # Bisection algorithms +_heapq _heapqmodule.c # Heap queue algorithm +_asyncio _asynciomodule.c # Fast asyncio Future + +unicodedata unicodedata.c # static Unicode character database + + +# Modules with some UNIX dependencies -- on by default: +# (If you have a really backward UNIX, select and socket may not be +# supported...) + +fcntl fcntlmodule.c # fcntl(2) and ioctl(2) +spwd spwdmodule.c # spwd(3) +grp grpmodule.c # grp(3) +select selectmodule.c # select(2); not on ancient System V + +# Memory-mapped files (also works on Win32). +mmap mmapmodule.c + +# CSV file helper +_csv _csv.c + +# Socket module helper for socket(2) +_socket socketmodule.c + +# Socket module helper for SSL support; you must comment out the other +# socket line above, and possibly edit the SSL variable: +SSL=$(srcdir)/../../release +_ssl _ssl.c \ + -DUSE_SSL -I$(SSL)/include \ + $(SSL)/lib/libssl.a $(SSL)/lib/libcrypto.a +# -lssl -lcrypto + +# The crypt module is now disabled by default because it breaks builds +# on many systems (where -lcrypt is needed), e.g. Linux (I believe). + +#_crypt _cryptmodule.c # -lcrypt # crypt(3); needs -lcrypt on some systems + + +# Some more UNIX dependent modules -- off by default, since these +# are not supported by all UNIX systems: + +#nis nismodule.c -lnsl # Sun yellow pages -- not everywhere +termios termios.c # Steen Lumholt's termios module +resource resource.c # Jeremy Hylton's rlimit interface + +_posixsubprocess _posixsubprocess.c # POSIX subprocess module helper + +# Multimedia modules -- off by default. +# These don't work for 64-bit platforms!!! +# #993173 says audioop works on 64-bit platforms, though. +# These represent audio samples or images as strings: + +#audioop audioop.c # Operations on audio samples + + +# Note that the _md5 and _sha modules are normally only built if the +# system does not have the OpenSSL libs containing an optimized version. + +# The _md5 module implements the RSA Data Security, Inc. MD5 +# Message-Digest Algorithm, described in RFC 1321. + +_md5 md5module.c + + +# The _sha module implements the SHA checksum algorithms. +# (NIST's Secure Hash Algorithms.) +_sha1 sha1module.c +_sha256 sha256module.c +_sha512 sha512module.c +_sha3 _sha3/sha3module.c + +# _blake module +_blake2 _blake2/blake2module.c _blake2/blake2b_impl.c _blake2/blake2s_impl.c + +# The _tkinter module. +# +# The command for _tkinter is long and site specific. Please +# uncomment and/or edit those parts as indicated. If you don't have a +# specific extension (e.g. Tix or BLT), leave the corresponding line +# commented out. (Leave the trailing backslashes in! If you +# experience strange errors, you may want to join all uncommented +# lines and remove the backslashes -- the backslash interpretation is +# done by the shell's "read" command and it may not be implemented on +# every system. + +# *** Always uncomment this (leave the leading underscore in!): +# _tkinter _tkinter.c tkappinit.c -DWITH_APPINIT \ +# *** Uncomment and edit to reflect where your Tcl/Tk libraries are: +# -L/usr/local/lib \ +# *** Uncomment and edit to reflect where your Tcl/Tk headers are: +# -I/usr/local/include \ +# *** Uncomment and edit to reflect where your X11 header files are: +# -I/usr/X11R6/include \ +# *** Or uncomment this for Solaris: +# -I/usr/openwin/include \ +# *** Uncomment and edit for Tix extension only: +# -DWITH_TIX -ltix8.1.8.2 \ +# *** Uncomment and edit for BLT extension only: +# -DWITH_BLT -I/usr/local/blt/blt8.0-unoff/include -lBLT8.0 \ +# *** Uncomment and edit for PIL (TkImaging) extension only: +# (See http://www.pythonware.com/products/pil/ for more info) +# -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ +# *** Uncomment and edit for TOGL extension only: +# -DWITH_TOGL togl.c \ +# *** Uncomment and edit to reflect your Tcl/Tk versions: +# -ltk8.2 -ltcl8.2 \ +# *** Uncomment and edit to reflect where your X11 libraries are: +# -L/usr/X11R6/lib \ +# *** Or uncomment this for Solaris: +# -L/usr/openwin/lib \ +# *** Uncomment these for TOGL extension only: +# -lGL -lGLU -lXext -lXmu \ +# *** Uncomment for AIX: +# -lld \ +# *** Always uncomment this; X11 libraries to link with: +# -lX11 + +# Lance Ellinghaus's syslog module +syslog syslogmodule.c # syslog daemon interface + + +# Curses support, requiring the System V version of curses, often +# provided by the ncurses library. e.g. on Linux, link with -lncurses +# instead of -lcurses). + +#_curses _cursesmodule.c -lcurses -ltermcap +# Wrapper for the panel library that's part of ncurses and SYSV curses. +#_curses_panel _curses_panel.c -lpanel -lncurses + + +# Modules that provide persistent dictionary-like semantics. You will +# probably want to arrange for at least one of them to be available on +# your machine, though none are defined by default because of library +# dependencies. The Python module dbm/__init__.py provides an +# implementation independent wrapper for these; dbm/dumb.py provides +# similar functionality (but slower of course) implemented in Python. + +#_dbm _dbmmodule.c # dbm(3) may require -lndbm or similar + +# Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: + +#_gdbm _gdbmmodule.c -I/usr/local/include -L/usr/local/lib -lgdbm + + +# Helper module for various ascii-encoders +binascii binascii.c + +# Fred Drake's interface to the Python parser +#parser parsermodule.c + + +# Andrew Kuchling's zlib module. +# This require zlib 1.1.3 (or later). +# See http://www.gzip.org/zlib/ +#zlib zlibmodule.c -I$(prefix)/include -L$(exec_prefix)/lib -lz + +# Interface to the Expat XML parser +# +# Expat was written by James Clark and is now maintained by a group of +# developers on SourceForge; see www.libexpat.org for more +# information. The pyexpat module was written by Paul Prescod after a +# prototype by Jack Jansen. Source of Expat 1.95.2 is included in +# Modules/expat/. Usage of a system shared libexpat.so/expat.dll is +# not advised. +# +# More information on Expat can be found at www.libexpat.org. +# +#pyexpat expat/xmlparse.c expat/xmlrole.c expat/xmltok.c pyexpat.c -I$(srcdir)/Modules/expat -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI + +# Hye-Shik Chang's CJKCodecs + +# multibytecodec is required for all the other CJK codec modules +_multibytecodec cjkcodecs/multibytecodec.c + +_codecs_cn cjkcodecs/_codecs_cn.c +_codecs_hk cjkcodecs/_codecs_hk.c +_codecs_iso2022 cjkcodecs/_codecs_iso2022.c +_codecs_jp cjkcodecs/_codecs_jp.c +_codecs_kr cjkcodecs/_codecs_kr.c +_codecs_tw cjkcodecs/_codecs_tw.c + +# Example -- included for reference only: +# xx xxmodule.c + +# Another example -- the 'xxsubtype' module shows C-level subtyping in action +#xxsubtype xxsubtype.c + + + +######################################################################## +# add extra-builtin-module by Apex Liu. +######################################################################## + +zlib zlibmodule.c -I$(srcdir)/Modules/zlib \ + zlib/adler32.c zlib/crc32.c zlib/deflate.c zlib/infback.c zlib/inffast.c zlib/inflate.c zlib/inftrees.c zlib/trees.c zlib/zutil.c \ + zlib/compress.c zlib/uncompr.c zlib/gzclose.c zlib/gzlib.c zlib/gzread.c zlib/gzwrite.c + +_json _json.c + +_sqlite3 -I$(srcdir)/Modules/_sqlite/sqlite3 -I$(srcdir)/Modules/_sqlite \ + $(srcdir)/Modules/_sqlite/sqlite3/sqlite3.c \ + _sqlite/cache.c _sqlite/connection.c _sqlite/cursor.c _sqlite/microprotocols.c _sqlite/module.c \ + _sqlite/prepare_protocol.c _sqlite/row.c _sqlite/statement.c _sqlite/util.c + +# Uncommenting the following line tells makesetup that all following modules +# are not built (see above for more detail). +# +#*disabled* +# +#_sqlite3 _tkinter _curses pyexpat +#_codecs_jp _codecs_kr _codecs_tw unicodedata + +*disabled* +_tkinter _curses + diff --git a/external/fix-external/Python-3.7.5/Modules/_sqlite/cache.h b/external/fix-external/Python-3.7.5/Modules/_sqlite/cache.h new file mode 100644 index 0000000..9d9a95b --- /dev/null +++ b/external/fix-external/Python-3.7.5/Modules/_sqlite/cache.h @@ -0,0 +1,75 @@ +/* cache.h - definitions for the LRU cache + * + * Copyright (C) 2004-2010 Gerhard Häring + * + * This file is part of pysqlite. + * + * This software is provided 'as-is', without any express or implied + * warranty. In no event will the authors be held liable for any damages + * arising from the use of this software. + * + * Permission is granted to anyone to use this software for any purpose, + * including commercial applications, and to alter it and redistribute it + * freely, subject to the following restrictions: + * + * 1. The origin of this software must not be misrepresented; you must not + * claim that you wrote the original software. If you use this software + * in a product, an acknowledgment in the product documentation would be + * appreciated but is not required. + * 2. Altered source versions must be plainly marked as such, and must not be + * misrepresented as being the original software. + * 3. This notice may not be removed or altered from any source distribution. + */ + +#ifndef PYSQLITE_CACHE_H +#define PYSQLITE_CACHE_H +#include "Python.h" + +#define MODULE_NAME "sqlite3" + +/* The LRU cache is implemented as a combination of a doubly-linked with a + * dictionary. The list items are of type 'Node' and the dictionary has the + * nodes as values. */ + +typedef struct _pysqlite_Node +{ + PyObject_HEAD + PyObject* key; + PyObject* data; + long count; + struct _pysqlite_Node* prev; + struct _pysqlite_Node* next; +} pysqlite_Node; + +typedef struct +{ + PyObject_HEAD + int size; + + /* a dictionary mapping keys to Node entries */ + PyObject* mapping; + + /* the factory callable */ + PyObject* factory; + + pysqlite_Node* first; + pysqlite_Node* last; + + /* if set, decrement the factory function when the Cache is deallocated. + * this is almost always desirable, but not in the pysqlite context */ + int decref_factory; +} pysqlite_Cache; + +extern PyTypeObject pysqlite_NodeType; +extern PyTypeObject pysqlite_CacheType; + +int pysqlite_node_init(pysqlite_Node* self, PyObject* args, PyObject* kwargs); +void pysqlite_node_dealloc(pysqlite_Node* self); + +int pysqlite_cache_init(pysqlite_Cache* self, PyObject* args, PyObject* kwargs); +void pysqlite_cache_dealloc(pysqlite_Cache* self); +PyObject* pysqlite_cache_get(pysqlite_Cache* self, PyObject* args); + +int pysqlite_cache_setup_types(void); + +#endif diff --git a/external/fix-external/Python-3.7.5/Modules/_sqlite/prepare_protocol.h b/external/fix-external/Python-3.7.5/Modules/_sqlite/prepare_protocol.h new file mode 100644 index 0000000..2de505c --- /dev/null +++ b/external/fix-external/Python-3.7.5/Modules/_sqlite/prepare_protocol.h @@ -0,0 +1,43 @@ +/* prepare_protocol.h - the protocol for preparing values for SQLite + * + * Copyright (C) 2005-2010 Gerhard Häring + * + * This file is part of pysqlite. + * + * This software is provided 'as-is', without any express or implied + * warranty. In no event will the authors be held liable for any damages + * arising from the use of this software. + * + * Permission is granted to anyone to use this software for any purpose, + * including commercial applications, and to alter it and redistribute it + * freely, subject to the following restrictions: + * + * 1. The origin of this software must not be misrepresented; you must not + * claim that you wrote the original software. If you use this software + * in a product, an acknowledgment in the product documentation would be + * appreciated but is not required. + * 2. Altered source versions must be plainly marked as such, and must not be + * misrepresented as being the original software. + * 3. This notice may not be removed or altered from any source distribution. + */ + +#ifndef PYSQLITE_PREPARE_PROTOCOL_H +#define PYSQLITE_PREPARE_PROTOCOL_H +#include "Python.h" + +#define MODULE_NAME "sqlite3" + +typedef struct +{ + PyObject_HEAD +} pysqlite_PrepareProtocol; + +extern PyTypeObject pysqlite_PrepareProtocolType; + +int pysqlite_prepare_protocol_init(pysqlite_PrepareProtocol* self, PyObject* args, PyObject* kwargs); +void pysqlite_prepare_protocol_dealloc(pysqlite_PrepareProtocol* self); + +int pysqlite_prepare_protocol_setup_types(void); + +#define UNKNOWN (-1) +#endif diff --git a/external/fix-external/libssh/libssh-0.9.0/src/libcrypto-compat.c b/external/fix-external/libssh/libssh-0.9.0/src/libcrypto-compat.c new file mode 100755 index 0000000..2bfa186 --- /dev/null +++ b/external/fix-external/libssh/libssh-0.9.0/src/libcrypto-compat.c @@ -0,0 +1,398 @@ +/* + * Copyright 2016 The OpenSSL Project Authors. All Rights Reserved. + * + * Licensed under the OpenSSL license (the "License"). You may not use + * this file except in compliance with the License. You can obtain a copy + * in the file LICENSE in the source distribution or at + * https://www.openssl.org/source/license.html + */ + +#include "config.h" + +#include +#include "libcrypto-compat.h" + +#ifndef OPENSSL_NO_ENGINE +#include +#endif + +static void *OPENSSL_zalloc(size_t num) +{ + void *ret = OPENSSL_malloc(num); + + if (ret != NULL) + memset(ret, 0, num); + return ret; +} + +int RSA_set0_key(RSA *r, BIGNUM *n, BIGNUM *e, BIGNUM *d) +{ + /* If the fields n and e in r are NULL, the corresponding input + * parameters MUST be non-NULL for n and e. d may be + * left NULL (in case only the public key is used). + */ + if ((r->n == NULL && n == NULL) + || (r->e == NULL && e == NULL)) + return 0; + + if (n != NULL) { + BN_free(r->n); + r->n = n; + } + if (e != NULL) { + BN_free(r->e); + r->e = e; + } + if (d != NULL) { + BN_free(r->d); + r->d = d; + } + + return 1; +} + +int RSA_set0_factors(RSA *r, BIGNUM *p, BIGNUM *q) +{ + /* If the fields p and q in r are NULL, the corresponding input + * parameters MUST be non-NULL. + */ + if ((r->p == NULL && p == NULL) + || (r->q == NULL && q == NULL)) + return 0; + + if (p != NULL) { + BN_free(r->p); + r->p = p; + } + if (q != NULL) { + BN_free(r->q); + r->q = q; + } + + return 1; +} + +int RSA_set0_crt_params(RSA *r, BIGNUM *dmp1, BIGNUM *dmq1, BIGNUM *iqmp) +{ + /* If the fields dmp1, dmq1 and iqmp in r are NULL, the corresponding input + * parameters MUST be non-NULL. + */ + if ((r->dmp1 == NULL && dmp1 == NULL) + || (r->dmq1 == NULL && dmq1 == NULL) + || (r->iqmp == NULL && iqmp == NULL)) + return 0; + + if (dmp1 != NULL) { + BN_free(r->dmp1); + r->dmp1 = dmp1; + } + if (dmq1 != NULL) { + BN_free(r->dmq1); + r->dmq1 = dmq1; + } + if (iqmp != NULL) { + BN_free(r->iqmp); + r->iqmp = iqmp; + } + + return 1; +} + +void RSA_get0_key(const RSA *r, + const BIGNUM **n, const BIGNUM **e, const BIGNUM **d) +{ + if (n != NULL) + *n = r->n; + if (e != NULL) + *e = r->e; + if (d != NULL) + *d = r->d; +} + +void RSA_get0_factors(const RSA *r, const BIGNUM **p, const BIGNUM **q) +{ + if (p != NULL) + *p = r->p; + if (q != NULL) + *q = r->q; +} + +void RSA_get0_crt_params(const RSA *r, + const BIGNUM **dmp1, const BIGNUM **dmq1, + const BIGNUM **iqmp) +{ + if (dmp1 != NULL) + *dmp1 = r->dmp1; + if (dmq1 != NULL) + *dmq1 = r->dmq1; + if (iqmp != NULL) + *iqmp = r->iqmp; +} + +void DSA_get0_pqg(const DSA *d, + const BIGNUM **p, const BIGNUM **q, const BIGNUM **g) +{ + if (p != NULL) + *p = d->p; + if (q != NULL) + *q = d->q; + if (g != NULL) + *g = d->g; +} + +int DSA_set0_pqg(DSA *d, BIGNUM *p, BIGNUM *q, BIGNUM *g) +{ + /* If the fields p, q and g in d are NULL, the corresponding input + * parameters MUST be non-NULL. + */ + if ((d->p == NULL && p == NULL) + || (d->q == NULL && q == NULL) + || (d->g == NULL && g == NULL)) + return 0; + + if (p != NULL) { + BN_free(d->p); + d->p = p; + } + if (q != NULL) { + BN_free(d->q); + d->q = q; + } + if (g != NULL) { + BN_free(d->g); + d->g = g; + } + + return 1; +} + +void DSA_get0_key(const DSA *d, + const BIGNUM **pub_key, const BIGNUM **priv_key) +{ + if (pub_key != NULL) + *pub_key = d->pub_key; + if (priv_key != NULL) + *priv_key = d->priv_key; +} + +int DSA_set0_key(DSA *d, BIGNUM *pub_key, BIGNUM *priv_key) +{ + /* If the field pub_key in d is NULL, the corresponding input + * parameters MUST be non-NULL. The priv_key field may + * be left NULL. + */ + if (d->pub_key == NULL && pub_key == NULL) + return 0; + + if (pub_key != NULL) { + BN_free(d->pub_key); + d->pub_key = pub_key; + } + if (priv_key != NULL) { + BN_free(d->priv_key); + d->priv_key = priv_key; + } + + return 1; +} + +void DSA_SIG_get0(const DSA_SIG *sig, const BIGNUM **pr, const BIGNUM **ps) +{ + if (pr != NULL) + *pr = sig->r; + if (ps != NULL) + *ps = sig->s; +} + +int DSA_SIG_set0(DSA_SIG *sig, BIGNUM *r, BIGNUM *s) +{ + if (r == NULL || s == NULL) + return 0; + BN_clear_free(sig->r); + BN_clear_free(sig->s); + sig->r = r; + sig->s = s; + return 1; +} + +void ECDSA_SIG_get0(const ECDSA_SIG *sig, const BIGNUM **pr, const BIGNUM **ps) +{ + if (pr != NULL) + *pr = sig->r; + if (ps != NULL) + *ps = sig->s; +} + +int ECDSA_SIG_set0(ECDSA_SIG *sig, BIGNUM *r, BIGNUM *s) +{ + if (r == NULL || s == NULL) + return 0; + BN_clear_free(sig->r); + BN_clear_free(sig->s); + sig->r = r; + sig->s = s; + return 1; +} + +EVP_MD_CTX *EVP_MD_CTX_new(void) +{ + return OPENSSL_zalloc(sizeof(EVP_MD_CTX)); +} + +static void OPENSSL_clear_free(void *str, size_t num) +{ + if (str == NULL) + return; + if (num) + OPENSSL_cleanse(str, num); + OPENSSL_free(str); +} + +/* This call frees resources associated with the context */ +int EVP_MD_CTX_reset(EVP_MD_CTX *ctx) +{ + if (ctx == NULL) + return 1; + + /* + * Don't assume ctx->md_data was cleaned in EVP_Digest_Final, because + * sometimes only copies of the context are ever finalised. + */ + if (ctx->digest && ctx->digest->cleanup + && !EVP_MD_CTX_test_flags(ctx, EVP_MD_CTX_FLAG_CLEANED)) + ctx->digest->cleanup(ctx); + if (ctx->digest && ctx->digest->ctx_size && ctx->md_data + && !EVP_MD_CTX_test_flags(ctx, EVP_MD_CTX_FLAG_REUSE)) { + OPENSSL_clear_free(ctx->md_data, ctx->digest->ctx_size); + } + EVP_PKEY_CTX_free(ctx->pctx); +#ifndef OPENSSL_NO_ENGINE + ENGINE_finish(ctx->engine); +#endif + OPENSSL_cleanse(ctx, sizeof(*ctx)); + + return 1; +} + +void EVP_MD_CTX_free(EVP_MD_CTX *ctx) +{ + EVP_MD_CTX_reset(ctx); + OPENSSL_free(ctx); +} + +HMAC_CTX *HMAC_CTX_new(void) +{ + HMAC_CTX *ctx = OPENSSL_zalloc(sizeof(HMAC_CTX)); + + if (ctx != NULL) { + if (!HMAC_CTX_reset(ctx)) { + HMAC_CTX_free(ctx); + return NULL; + } + } + return ctx; +} + +static void hmac_ctx_cleanup(HMAC_CTX *ctx) +{ + EVP_MD_CTX_reset(&ctx->i_ctx); + EVP_MD_CTX_reset(&ctx->o_ctx); + EVP_MD_CTX_reset(&ctx->md_ctx); + ctx->md = NULL; + ctx->key_length = 0; + OPENSSL_cleanse(ctx->key, sizeof(ctx->key)); +} + +void HMAC_CTX_free(HMAC_CTX *ctx) +{ + if (ctx != NULL) { + hmac_ctx_cleanup(ctx); +#if OPENSSL_VERSION_NUMBER > 0x10100000L + EVP_MD_CTX_free(&ctx->i_ctx); + EVP_MD_CTX_free(&ctx->o_ctx); + EVP_MD_CTX_free(&ctx->md_ctx); +#endif + OPENSSL_free(ctx); + } +} + +int HMAC_CTX_reset(HMAC_CTX *ctx) +{ + HMAC_CTX_init(ctx); + return 1; +} + +#if 0 // by apex +#ifndef HAVE_OPENSSL_EVP_CIPHER_CTX_NEW +EVP_CIPHER_CTX *EVP_CIPHER_CTX_new(void) +{ + return OPENSSL_zalloc(sizeof(EVP_CIPHER_CTX)); +} + +void EVP_CIPHER_CTX_free(EVP_CIPHER_CTX *ctx) +{ + /* EVP_CIPHER_CTX_reset(ctx); alias */ + EVP_CIPHER_CTX_init(ctx); + OPENSSL_free(ctx); +} +#endif +#endif + +void DH_get0_pqg(const DH *dh, + const BIGNUM **p, const BIGNUM **q, const BIGNUM **g) +{ + if (p) { + *p = dh->p; + } + if (q) { + *q = NULL; + } + if (g) { + *g = dh->g; + } +} + +int DH_set0_pqg(DH *dh, BIGNUM *p, BIGNUM *q, BIGNUM *g) +{ + if (p) { + if (dh->p) { + BN_free(dh->p); + } + dh->p = p; + } + if (g) { + if (dh->g) { + BN_free(dh->g); + } + dh->g = g; + } + return 1; +} + +void DH_get0_key(const DH *dh, + const BIGNUM **pub_key, const BIGNUM **priv_key) +{ + if (pub_key) { + *pub_key = dh->pub_key; + } + if (priv_key) { + *priv_key = dh->priv_key; + } +} + +int DH_set0_key(DH *dh, BIGNUM *pub_key, BIGNUM *priv_key) +{ + if (pub_key) { + if (dh->pub_key) { + BN_free(dh->pub_key); + } + dh->pub_key = pub_key; + } + if (priv_key) { + if (dh->priv_key) { + BN_free(dh->priv_key); + } + dh->priv_key = priv_key; + } + return 1; +} diff --git a/external/fix-external/libssh/libssh-0.9.2/src/libcrypto-compat.c b/external/fix-external/libssh/libssh-0.9.2/src/libcrypto-compat.c new file mode 100644 index 0000000..36de72f --- /dev/null +++ b/external/fix-external/libssh/libssh-0.9.2/src/libcrypto-compat.c @@ -0,0 +1,398 @@ +/* + * Copyright 2016 The OpenSSL Project Authors. All Rights Reserved. + * + * Licensed under the OpenSSL license (the "License"). You may not use + * this file except in compliance with the License. You can obtain a copy + * in the file LICENSE in the source distribution or at + * https://www.openssl.org/source/license.html + */ + +#include "config.h" + +#include +#include "libcrypto-compat.h" + +#ifndef OPENSSL_NO_ENGINE +#include +#endif + +static void *OPENSSL_zalloc(size_t num) +{ + void *ret = OPENSSL_malloc(num); + + if (ret != NULL) + memset(ret, 0, num); + return ret; +} + +int RSA_set0_key(RSA *r, BIGNUM *n, BIGNUM *e, BIGNUM *d) +{ + /* If the fields n and e in r are NULL, the corresponding input + * parameters MUST be non-NULL for n and e. d may be + * left NULL (in case only the public key is used). + */ + if ((r->n == NULL && n == NULL) + || (r->e == NULL && e == NULL)) + return 0; + + if (n != NULL) { + BN_free(r->n); + r->n = n; + } + if (e != NULL) { + BN_free(r->e); + r->e = e; + } + if (d != NULL) { + BN_free(r->d); + r->d = d; + } + + return 1; +} + +int RSA_set0_factors(RSA *r, BIGNUM *p, BIGNUM *q) +{ + /* If the fields p and q in r are NULL, the corresponding input + * parameters MUST be non-NULL. + */ + if ((r->p == NULL && p == NULL) + || (r->q == NULL && q == NULL)) + return 0; + + if (p != NULL) { + BN_free(r->p); + r->p = p; + } + if (q != NULL) { + BN_free(r->q); + r->q = q; + } + + return 1; +} + +int RSA_set0_crt_params(RSA *r, BIGNUM *dmp1, BIGNUM *dmq1, BIGNUM *iqmp) +{ + /* If the fields dmp1, dmq1 and iqmp in r are NULL, the corresponding input + * parameters MUST be non-NULL. + */ + if ((r->dmp1 == NULL && dmp1 == NULL) + || (r->dmq1 == NULL && dmq1 == NULL) + || (r->iqmp == NULL && iqmp == NULL)) + return 0; + + if (dmp1 != NULL) { + BN_free(r->dmp1); + r->dmp1 = dmp1; + } + if (dmq1 != NULL) { + BN_free(r->dmq1); + r->dmq1 = dmq1; + } + if (iqmp != NULL) { + BN_free(r->iqmp); + r->iqmp = iqmp; + } + + return 1; +} + +void RSA_get0_key(const RSA *r, + const BIGNUM **n, const BIGNUM **e, const BIGNUM **d) +{ + if (n != NULL) + *n = r->n; + if (e != NULL) + *e = r->e; + if (d != NULL) + *d = r->d; +} + +void RSA_get0_factors(const RSA *r, const BIGNUM **p, const BIGNUM **q) +{ + if (p != NULL) + *p = r->p; + if (q != NULL) + *q = r->q; +} + +void RSA_get0_crt_params(const RSA *r, + const BIGNUM **dmp1, const BIGNUM **dmq1, + const BIGNUM **iqmp) +{ + if (dmp1 != NULL) + *dmp1 = r->dmp1; + if (dmq1 != NULL) + *dmq1 = r->dmq1; + if (iqmp != NULL) + *iqmp = r->iqmp; +} + +void DSA_get0_pqg(const DSA *d, + const BIGNUM **p, const BIGNUM **q, const BIGNUM **g) +{ + if (p != NULL) + *p = d->p; + if (q != NULL) + *q = d->q; + if (g != NULL) + *g = d->g; +} + +int DSA_set0_pqg(DSA *d, BIGNUM *p, BIGNUM *q, BIGNUM *g) +{ + /* If the fields p, q and g in d are NULL, the corresponding input + * parameters MUST be non-NULL. + */ + if ((d->p == NULL && p == NULL) + || (d->q == NULL && q == NULL) + || (d->g == NULL && g == NULL)) + return 0; + + if (p != NULL) { + BN_free(d->p); + d->p = p; + } + if (q != NULL) { + BN_free(d->q); + d->q = q; + } + if (g != NULL) { + BN_free(d->g); + d->g = g; + } + + return 1; +} + +void DSA_get0_key(const DSA *d, + const BIGNUM **pub_key, const BIGNUM **priv_key) +{ + if (pub_key != NULL) + *pub_key = d->pub_key; + if (priv_key != NULL) + *priv_key = d->priv_key; +} + +int DSA_set0_key(DSA *d, BIGNUM *pub_key, BIGNUM *priv_key) +{ + /* If the field pub_key in d is NULL, the corresponding input + * parameters MUST be non-NULL. The priv_key field may + * be left NULL. + */ + if (d->pub_key == NULL && pub_key == NULL) + return 0; + + if (pub_key != NULL) { + BN_free(d->pub_key); + d->pub_key = pub_key; + } + if (priv_key != NULL) { + BN_free(d->priv_key); + d->priv_key = priv_key; + } + + return 1; +} + +void DSA_SIG_get0(const DSA_SIG *sig, const BIGNUM **pr, const BIGNUM **ps) +{ + if (pr != NULL) + *pr = sig->r; + if (ps != NULL) + *ps = sig->s; +} + +int DSA_SIG_set0(DSA_SIG *sig, BIGNUM *r, BIGNUM *s) +{ + if (r == NULL || s == NULL) + return 0; + BN_clear_free(sig->r); + BN_clear_free(sig->s); + sig->r = r; + sig->s = s; + return 1; +} + +void ECDSA_SIG_get0(const ECDSA_SIG *sig, const BIGNUM **pr, const BIGNUM **ps) +{ + if (pr != NULL) + *pr = sig->r; + if (ps != NULL) + *ps = sig->s; +} + +int ECDSA_SIG_set0(ECDSA_SIG *sig, BIGNUM *r, BIGNUM *s) +{ + if (r == NULL || s == NULL) + return 0; + BN_clear_free(sig->r); + BN_clear_free(sig->s); + sig->r = r; + sig->s = s; + return 1; +} + +EVP_MD_CTX *EVP_MD_CTX_new(void) +{ + return OPENSSL_zalloc(sizeof(EVP_MD_CTX)); +} + +static void OPENSSL_clear_free(void *str, size_t num) +{ + if (str == NULL) + return; + if (num) + OPENSSL_cleanse(str, num); + OPENSSL_free(str); +} + +/* This call frees resources associated with the context */ +int EVP_MD_CTX_reset(EVP_MD_CTX *ctx) +{ + if (ctx == NULL) + return 1; + + /* + * Don't assume ctx->md_data was cleaned in EVP_Digest_Final, because + * sometimes only copies of the context are ever finalised. + */ + if (ctx->digest && ctx->digest->cleanup + && !EVP_MD_CTX_test_flags(ctx, EVP_MD_CTX_FLAG_CLEANED)) + ctx->digest->cleanup(ctx); + if (ctx->digest && ctx->digest->ctx_size && ctx->md_data + && !EVP_MD_CTX_test_flags(ctx, EVP_MD_CTX_FLAG_REUSE)) { + OPENSSL_clear_free(ctx->md_data, ctx->digest->ctx_size); + } + EVP_PKEY_CTX_free(ctx->pctx); +#ifndef OPENSSL_NO_ENGINE + ENGINE_finish(ctx->engine); +#endif + OPENSSL_cleanse(ctx, sizeof(*ctx)); + + return 1; +} + +void EVP_MD_CTX_free(EVP_MD_CTX *ctx) +{ + EVP_MD_CTX_reset(ctx); + OPENSSL_free(ctx); +} + +HMAC_CTX *HMAC_CTX_new(void) +{ + HMAC_CTX *ctx = OPENSSL_zalloc(sizeof(HMAC_CTX)); + + if (ctx != NULL) { + if (!HMAC_CTX_reset(ctx)) { + HMAC_CTX_free(ctx); + return NULL; + } + } + return ctx; +} + +static void hmac_ctx_cleanup(HMAC_CTX *ctx) +{ + EVP_MD_CTX_reset(&ctx->i_ctx); + EVP_MD_CTX_reset(&ctx->o_ctx); + EVP_MD_CTX_reset(&ctx->md_ctx); + ctx->md = NULL; + ctx->key_length = 0; + OPENSSL_cleanse(ctx->key, sizeof(ctx->key)); +} + +void HMAC_CTX_free(HMAC_CTX *ctx) +{ + if (ctx != NULL) { + hmac_ctx_cleanup(ctx); +#if OPENSSL_VERSION_NUMBER > 0x10100000L + EVP_MD_CTX_free(&ctx->i_ctx); + EVP_MD_CTX_free(&ctx->o_ctx); + EVP_MD_CTX_free(&ctx->md_ctx); +#endif + OPENSSL_free(ctx); + } +} + +int HMAC_CTX_reset(HMAC_CTX *ctx) +{ + HMAC_CTX_init(ctx); + return 1; +} + +#if 0 // by apex.liu +#ifndef HAVE_OPENSSL_EVP_CIPHER_CTX_NEW +EVP_CIPHER_CTX *EVP_CIPHER_CTX_new(void) +{ + return OPENSSL_zalloc(sizeof(EVP_CIPHER_CTX)); +} + +void EVP_CIPHER_CTX_free(EVP_CIPHER_CTX *ctx) +{ + /* EVP_CIPHER_CTX_reset(ctx); alias */ + EVP_CIPHER_CTX_init(ctx); + OPENSSL_free(ctx); +} +#endif +#endif // endif by apex.liu + +void DH_get0_pqg(const DH *dh, + const BIGNUM **p, const BIGNUM **q, const BIGNUM **g) +{ + if (p) { + *p = dh->p; + } + if (q) { + *q = NULL; + } + if (g) { + *g = dh->g; + } +} + +int DH_set0_pqg(DH *dh, BIGNUM *p, BIGNUM *q, BIGNUM *g) +{ + if (p) { + if (dh->p) { + BN_free(dh->p); + } + dh->p = p; + } + if (g) { + if (dh->g) { + BN_free(dh->g); + } + dh->g = g; + } + return 1; +} + +void DH_get0_key(const DH *dh, + const BIGNUM **pub_key, const BIGNUM **priv_key) +{ + if (pub_key) { + *pub_key = dh->pub_key; + } + if (priv_key) { + *priv_key = dh->priv_key; + } +} + +int DH_set0_key(DH *dh, BIGNUM *pub_key, BIGNUM *priv_key) +{ + if (pub_key) { + if (dh->pub_key) { + BN_free(dh->pub_key); + } + dh->pub_key = pub_key; + } + if (priv_key) { + if (dh->priv_key) { + BN_free(dh->priv_key); + } + dh->priv_key = priv_key; + } + return 1; +} diff --git a/external/fix-external/libssh/libssh-0.9.2/src/session.c b/external/fix-external/libssh/libssh-0.9.2/src/session.c new file mode 100644 index 0000000..7ec5522 --- /dev/null +++ b/external/fix-external/libssh/libssh-0.9.2/src/session.c @@ -0,0 +1,1210 @@ +/* + * session.c - non-networking functions + * + * This file is part of the SSH Library + * + * Copyright (c) 2005-2013 by Aris Adamantiadis + * + * The SSH Library is free software; you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation; either version 2.1 of the License, or (at your + * option) any later version. + * + * The SSH Library is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public + * License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with the SSH Library; see the file COPYING. If not, write to + * the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, + * MA 02111-1307, USA. + */ + +#include "config.h" + +#include +#include + +#include "libssh/priv.h" +#include "libssh/libssh.h" +#include "libssh/crypto.h" +#include "libssh/server.h" +#include "libssh/socket.h" +#include "libssh/ssh2.h" +#include "libssh/agent.h" +#include "libssh/packet.h" +#include "libssh/session.h" +#include "libssh/misc.h" +#include "libssh/buffer.h" +#include "libssh/poll.h" +#include "libssh/pki.h" + +#define FIRST_CHANNEL 42 // why not ? it helps to find bugs. + +/** + * @defgroup libssh_session The SSH session functions. + * @ingroup libssh + * + * Functions that manage a session. + * + * @{ + */ + +/** + * @brief Create a new ssh session. + * + * @returns A new ssh_session pointer, NULL on error. + */ +ssh_session ssh_new(void) { + ssh_session session; + char *id = NULL; + int rc; + + session = calloc(1, sizeof (struct ssh_session_struct)); + if (session == NULL) { + return NULL; + } + + session->next_crypto = crypto_new(); + if (session->next_crypto == NULL) { + goto err; + } + + session->socket = ssh_socket_new(session); + if (session->socket == NULL) { + goto err; + } + + session->out_buffer = ssh_buffer_new(); + if (session->out_buffer == NULL) { + goto err; + } + + session->in_buffer=ssh_buffer_new(); + if (session->in_buffer == NULL) { + goto err; + } + + session->out_queue = ssh_list_new(); + if (session->out_queue == NULL) { + goto err; + } + + session->alive = 0; + session->auth.supported_methods = 0; + ssh_set_blocking(session, 1); + session->maxchannel = FIRST_CHANNEL; + +#ifndef _WIN32 + session->agent = ssh_agent_new(session); + if (session->agent == NULL) { + goto err; + } +#endif /* _WIN32 */ + + /* OPTIONS */ + session->opts.StrictHostKeyChecking = 1; + session->opts.port = 0; + session->opts.fd = -1; + session->opts.compressionlevel=7; + session->opts.nodelay = 0; + session->opts.flags = SSH_OPT_FLAG_PASSWORD_AUTH | SSH_OPT_FLAG_PUBKEY_AUTH | + SSH_OPT_FLAG_KBDINT_AUTH | SSH_OPT_FLAG_GSSAPI_AUTH; + session->opts.identity = ssh_list_new(); + if (session->opts.identity == NULL) { + goto err; + } + + id = strdup("%d/id_ed25519"); + if (id == NULL) { + goto err; + } + rc = ssh_list_append(session->opts.identity, id); + if (rc == SSH_ERROR) { + goto err; + } + +#ifdef HAVE_ECC + id = strdup("%d/id_ecdsa"); + if (id == NULL) { + goto err; + } + rc = ssh_list_append(session->opts.identity, id); + if (rc == SSH_ERROR) { + goto err; + } +#endif + + id = strdup("%d/id_rsa"); + if (id == NULL) { + goto err; + } + rc = ssh_list_append(session->opts.identity, id); + if (rc == SSH_ERROR) { + goto err; + } + +#ifdef HAVE_DSA + id = strdup("%d/id_dsa"); + if (id == NULL) { + goto err; + } + rc = ssh_list_append(session->opts.identity, id); + if (rc == SSH_ERROR) { + goto err; + } +#endif + + return session; + +err: + free(id); + ssh_free(session); + return NULL; +} + +/** + * @brief Deallocate a SSH session handle. + * + * @param[in] session The SSH session to free. + * + * @see ssh_disconnect() + * @see ssh_new() + */ +void ssh_free(ssh_session session) +{ + int i; + struct ssh_iterator *it = NULL; + struct ssh_buffer_struct *b = NULL; + + if (session == NULL) { + return; + } + + /* + * Delete all channels + * + * This needs the first thing we clean up cause if there is still an open + * channel we call ssh_channel_close() first. So we need a working socket + * and poll context for it. + */ + for (it = ssh_list_get_iterator(session->channels); + it != NULL; + it = ssh_list_get_iterator(session->channels)) { + ssh_channel_do_free(ssh_iterator_value(ssh_channel,it)); + ssh_list_remove(session->channels, it); + } + ssh_list_free(session->channels); + session->channels = NULL; + +#ifdef WITH_PCAP + if (session->pcap_ctx) { + ssh_pcap_context_free(session->pcap_ctx); + session->pcap_ctx = NULL; + } +#endif + + ssh_socket_free(session->socket); + session->socket = NULL; + + if (session->default_poll_ctx) { + ssh_poll_ctx_free(session->default_poll_ctx); + } + + ssh_buffer_free(session->in_buffer); + ssh_buffer_free(session->out_buffer); + session->in_buffer = session->out_buffer = NULL; + + if (session->in_hashbuf != NULL) { + ssh_buffer_free(session->in_hashbuf); + } + if (session->out_hashbuf != NULL) { + ssh_buffer_free(session->out_hashbuf); + } + + crypto_free(session->current_crypto); + crypto_free(session->next_crypto); + +#ifndef _WIN32 + ssh_agent_free(session->agent); +#endif /* _WIN32 */ + + ssh_key_free(session->srv.dsa_key); + session->srv.dsa_key = NULL; + ssh_key_free(session->srv.rsa_key); + session->srv.rsa_key = NULL; + ssh_key_free(session->srv.ecdsa_key); + session->srv.ecdsa_key = NULL; + ssh_key_free(session->srv.ed25519_key); + session->srv.ed25519_key = NULL; + + if (session->ssh_message_list) { + ssh_message msg; + + for (msg = ssh_list_pop_head(ssh_message, session->ssh_message_list); + msg != NULL; + msg = ssh_list_pop_head(ssh_message, session->ssh_message_list)) { + ssh_message_free(msg); + } + ssh_list_free(session->ssh_message_list); + } + + if (session->kbdint != NULL) { + ssh_kbdint_free(session->kbdint); + } + + if (session->packet_callbacks) { + ssh_list_free(session->packet_callbacks); + } + + /* options */ + if (session->opts.identity) { + char *id; + + for (id = ssh_list_pop_head(char *, session->opts.identity); + id != NULL; + id = ssh_list_pop_head(char *, session->opts.identity)) { + SAFE_FREE(id); + } + ssh_list_free(session->opts.identity); + } + + while ((b = ssh_list_pop_head(struct ssh_buffer_struct *, + session->out_queue)) != NULL) { + ssh_buffer_free(b); + } + ssh_list_free(session->out_queue); + +#ifndef _WIN32 + ssh_agent_state_free (session->agent_state); +#endif + session->agent_state = NULL; + + SAFE_FREE(session->auth.auto_state); + SAFE_FREE(session->serverbanner); + SAFE_FREE(session->clientbanner); + SAFE_FREE(session->banner); + + SAFE_FREE(session->opts.bindaddr); + SAFE_FREE(session->opts.custombanner); + SAFE_FREE(session->opts.username); + SAFE_FREE(session->opts.host); + SAFE_FREE(session->opts.sshdir); + SAFE_FREE(session->opts.knownhosts); + SAFE_FREE(session->opts.global_knownhosts); + SAFE_FREE(session->opts.ProxyCommand); + SAFE_FREE(session->opts.gss_server_identity); + SAFE_FREE(session->opts.gss_client_identity); + SAFE_FREE(session->opts.pubkey_accepted_types); + + for (i = 0; i < 10; i++) { + if (session->opts.wanted_methods[i]) { + SAFE_FREE(session->opts.wanted_methods[i]); + } + } + + /* burn connection, it could contain sensitive data */ + explicit_bzero(session, sizeof(struct ssh_session_struct)); + SAFE_FREE(session); +} + +/** + * @brief get the client banner + * + * @param[in] session The SSH session + * + * @return Returns the client banner string or NULL. + */ +const char* ssh_get_clientbanner(ssh_session session) { + if (session == NULL) { + return NULL; + } + + return session->clientbanner; +} + +/** + * @brief get the server banner + * + * @param[in] session The SSH session + * + * @return Returns the server banner string or NULL. + */ +const char* ssh_get_serverbanner(ssh_session session) { + if(!session) { + return NULL; + } + return session->serverbanner; +} + +/** + * @brief get the name of the current key exchange algorithm. + * + * @param[in] session The SSH session + * + * @return Returns the key exchange algorithm string or NULL. + */ +const char* ssh_get_kex_algo(ssh_session session) { + if ((session == NULL) || + (session->current_crypto == NULL)) { + return NULL; + } + + switch (session->current_crypto->kex_type) { + case SSH_KEX_DH_GROUP1_SHA1: + return "diffie-hellman-group1-sha1"; + case SSH_KEX_DH_GROUP14_SHA1: + return "diffie-hellman-group14-sha1"; + case SSH_KEX_DH_GROUP16_SHA512: + return "diffie-hellman-group16-sha512"; + case SSH_KEX_DH_GROUP18_SHA512: + return "diffie-hellman-group18-sha512"; + case SSH_KEX_ECDH_SHA2_NISTP256: + return "ecdh-sha2-nistp256"; + case SSH_KEX_ECDH_SHA2_NISTP384: + return "ecdh-sha2-nistp384"; + case SSH_KEX_ECDH_SHA2_NISTP521: + return "ecdh-sha2-nistp521"; + case SSH_KEX_CURVE25519_SHA256: + return "curve25519-sha256"; + case SSH_KEX_CURVE25519_SHA256_LIBSSH_ORG: + return "curve25519-sha256@libssh.org"; + default: + break; + } + + return NULL; +} + +/** + * @brief get the name of the input cipher for the given session. + * + * @param[in] session The SSH session. + * + * @return Returns cipher name or NULL. + */ +const char* ssh_get_cipher_in(ssh_session session) { + if ((session != NULL) && + (session->current_crypto != NULL) && + (session->current_crypto->in_cipher != NULL)) { + return session->current_crypto->in_cipher->name; + } + return NULL; +} + +/** + * @brief get the name of the output cipher for the given session. + * + * @param[in] session The SSH session. + * + * @return Returns cipher name or NULL. + */ +const char* ssh_get_cipher_out(ssh_session session) { + if ((session != NULL) && + (session->current_crypto != NULL) && + (session->current_crypto->out_cipher != NULL)) { + return session->current_crypto->out_cipher->name; + } + return NULL; +} + +/** + * @brief get the name of the input HMAC algorithm for the given session. + * + * @param[in] session The SSH session. + * + * @return Returns HMAC algorithm name or NULL if unknown. + */ +const char* ssh_get_hmac_in(ssh_session session) { + if ((session != NULL) && + (session->current_crypto != NULL)) { + return ssh_hmac_type_to_string(session->current_crypto->in_hmac, session->current_crypto->in_hmac_etm); + } + return NULL; +} + +/** + * @brief get the name of the output HMAC algorithm for the given session. + * + * @param[in] session The SSH session. + * + * @return Returns HMAC algorithm name or NULL if unknown. + */ +const char* ssh_get_hmac_out(ssh_session session) { + if ((session != NULL) && + (session->current_crypto != NULL)) { + return ssh_hmac_type_to_string(session->current_crypto->out_hmac, session->current_crypto->out_hmac_etm); + } + return NULL; +} + +/** + * @brief Disconnect impolitely from a remote host by closing the socket. + * + * Suitable if you forked and want to destroy this session. + * + * @param[in] session The SSH session to disconnect. + */ +void ssh_silent_disconnect(ssh_session session) { + if (session == NULL) { + return; + } + + ssh_socket_close(session->socket); + session->alive = 0; + ssh_disconnect(session); +} + +/** + * @brief Set the session in blocking/nonblocking mode. + * + * @param[in] session The ssh session to change. + * + * @param[in] blocking Zero for nonblocking mode. + */ +void ssh_set_blocking(ssh_session session, int blocking) +{ + if (session == NULL) { + return; + } + session->flags &= ~SSH_SESSION_FLAG_BLOCKING; + session->flags |= blocking ? SSH_SESSION_FLAG_BLOCKING : 0; +} + +/** + * @brief Return the blocking mode of libssh + * @param[in] session The SSH session + * @returns 0 if the session is nonblocking, + * @returns 1 if the functions may block. + */ +int ssh_is_blocking(ssh_session session) +{ + return (session->flags & SSH_SESSION_FLAG_BLOCKING) ? 1 : 0; +} + +/* Waits until the output socket is empty */ +static int ssh_flush_termination(void *c){ + ssh_session session = c; + if (ssh_socket_buffered_write_bytes(session->socket) == 0 || + session->session_state == SSH_SESSION_STATE_ERROR) + return 1; + else + return 0; +} + +/** + * @brief Blocking flush of the outgoing buffer + * @param[in] session The SSH session + * @param[in] timeout Set an upper limit on the time for which this function + * will block, in milliseconds. Specifying -1 + * means an infinite timeout. This parameter is passed to + * the poll() function. + * @returns SSH_OK on success, SSH_AGAIN if timeout occurred, + * SSH_ERROR otherwise. + */ + +int ssh_blocking_flush(ssh_session session, int timeout){ + int rc; + if (session == NULL) { + return SSH_ERROR; + } + + rc = ssh_handle_packets_termination(session, timeout, + ssh_flush_termination, session); + if (rc == SSH_ERROR) { + return rc; + } + if (!ssh_flush_termination(session)) { + rc = SSH_AGAIN; + } + + return rc; +} + +/** + * @brief Check if we are connected. + * + * @param[in] session The session to check if it is connected. + * + * @return 1 if we are connected, 0 if not. + */ +int ssh_is_connected(ssh_session session) { + if (session == NULL) { + return 0; + } + + return session->alive; +} + +/** + * @brief Get the fd of a connection. + * + * In case you'd need the file descriptor of the connection to the server/client. + * + * @param[in] session The ssh session to use. + * + * @return The file descriptor of the connection, or -1 if it is + * not connected + */ +socket_t ssh_get_fd(ssh_session session) { + if (session == NULL) { + return -1; + } + + return ssh_socket_get_fd(session->socket); +} + +/** + * @brief Tell the session it has data to read on the file descriptor without + * blocking. + * + * @param[in] session The ssh session to use. + */ +void ssh_set_fd_toread(ssh_session session) { + if (session == NULL) { + return; + } + + ssh_socket_set_read_wontblock(session->socket); +} + +/** + * @brief Tell the session it may write to the file descriptor without blocking. + * + * @param[in] session The ssh session to use. + */ +void ssh_set_fd_towrite(ssh_session session) { + if (session == NULL) { + return; + } + + ssh_socket_set_write_wontblock(session->socket); +} + +/** + * @brief Tell the session it has an exception to catch on the file descriptor. + * + * \param[in] session The ssh session to use. + */ +void ssh_set_fd_except(ssh_session session) { + if (session == NULL) { + return; + } + + ssh_socket_set_except(session->socket); +} + +/** + * @internal + * + * @brief Poll the current session for an event and call the appropriate + * callbacks. This function will not loop until the timeout is expired. + * + * This will block until one event happens. + * + * @param[in] session The session handle to use. + * + * @param[in] timeout Set an upper limit on the time for which this function + * will block, in milliseconds. Specifying SSH_TIMEOUT_INFINITE + * (-1) means an infinite timeout. + * Specifying SSH_TIMEOUT_USER means to use the timeout + * specified in options. 0 means poll will return immediately. + * This parameter is passed to the poll() function. + * + * @return SSH_OK on success, SSH_ERROR otherwise. + */ +int ssh_handle_packets(ssh_session session, int timeout) { + ssh_poll_handle spoll; + ssh_poll_ctx ctx; + int tm = timeout; + int rc; + + if (session == NULL || session->socket == NULL) { + return SSH_ERROR; + } + + spoll = ssh_socket_get_poll_handle(session->socket); + // apex.liu + // ssh_poll_add_events(spoll, POLLIN); + ssh_poll_add_events(spoll, POLLIN|POLLOUT); + ctx = ssh_poll_get_ctx(spoll); + + if (!ctx) { + ctx = ssh_poll_get_default_ctx(session); + ssh_poll_ctx_add(ctx, spoll); + } + + if (timeout == SSH_TIMEOUT_USER) { + if (ssh_is_blocking(session)) + tm = ssh_make_milliseconds(session->opts.timeout, + session->opts.timeout_usec); + else + tm = 0; + } + rc = ssh_poll_ctx_dopoll(ctx, tm); + if (rc == SSH_ERROR) { + session->session_state = SSH_SESSION_STATE_ERROR; + } + + return rc; +} + +/** + * @internal + * + * @brief Poll the current session for an event and call the appropriate + * callbacks. + * + * This will block until termination function returns true, or timeout expired. + * + * @param[in] session The session handle to use. + * + * @param[in] timeout Set an upper limit on the time for which this function + * will block, in milliseconds. Specifying + * SSH_TIMEOUT_INFINITE (-1) means an infinite timeout. + * Specifying SSH_TIMEOUT_USER means to use the timeout + * specified in options. 0 means poll will return + * immediately. + * SSH_TIMEOUT_DEFAULT uses the session timeout if set or + * uses blocking parameters of the session. + * This parameter is passed to the poll() function. + * + * @param[in] fct Termination function to be used to determine if it is + * possible to stop polling. + * @param[in] user User parameter to be passed to fct termination function. + * @return SSH_OK on success, SSH_ERROR otherwise. + */ +int ssh_handle_packets_termination(ssh_session session, + long timeout, + ssh_termination_function fct, + void *user) +{ + struct ssh_timestamp ts; + long timeout_ms = SSH_TIMEOUT_INFINITE; + long tm; + int ret = SSH_OK; + + // apex.liu + timeout = SSH_TIMEOUT_USER; + + /* If a timeout has been provided, use it */ + if (timeout >= 0) { + timeout_ms = timeout; + } else { + if (ssh_is_blocking(session)) { + if (timeout == SSH_TIMEOUT_USER || timeout == SSH_TIMEOUT_DEFAULT) { + if (session->opts.timeout > 0 || + session->opts.timeout_usec > 0) { + timeout_ms = + ssh_make_milliseconds(session->opts.timeout, + session->opts.timeout_usec); + } + } + } else { + timeout_ms = SSH_TIMEOUT_NONBLOCKING; + } + } + + /* avoid unnecessary syscall for the SSH_TIMEOUT_NONBLOCKING case */ + if (timeout_ms != SSH_TIMEOUT_NONBLOCKING) { + ssh_timestamp_init(&ts); + } + + tm = timeout_ms; + while(!fct(user)) { + ret = ssh_handle_packets(session, tm); + if (ret == SSH_ERROR) { + break; + } + if (ssh_timeout_elapsed(&ts, timeout_ms)) { + ret = fct(user) ? SSH_OK : SSH_AGAIN; + break; + } + + tm = ssh_timeout_update(&ts, timeout_ms); + } + + return ret; +} + +/** + * @brief Get session status + * + * @param session The ssh session to use. + * + * @returns A bitmask including SSH_CLOSED, SSH_READ_PENDING, SSH_WRITE_PENDING + * or SSH_CLOSED_ERROR which respectively means the session is closed, + * has data to read on the connection socket and session was closed + * due to an error. + */ +int ssh_get_status(ssh_session session) { + int socketstate; + int r = 0; + + if (session == NULL) { + return 0; + } + + socketstate = ssh_socket_get_status(session->socket); + + if (session->session_state == SSH_SESSION_STATE_DISCONNECTED) { + r |= SSH_CLOSED; + } + if (socketstate & SSH_READ_PENDING) { + r |= SSH_READ_PENDING; + } + if (socketstate & SSH_WRITE_PENDING) { + r |= SSH_WRITE_PENDING; + } + if ((session->session_state == SSH_SESSION_STATE_DISCONNECTED && + (socketstate & SSH_CLOSED_ERROR)) || + session->session_state == SSH_SESSION_STATE_ERROR) { + r |= SSH_CLOSED_ERROR; + } + + return r; +} + +/** + * @brief Get poll flags for an external mainloop + * + * @param session The ssh session to use. + * + * @returns A bitmask including SSH_READ_PENDING or SSH_WRITE_PENDING. + * For SSH_READ_PENDING, your invocation of poll() should include + * POLLIN. For SSH_WRITE_PENDING, your invocation of poll() should + * include POLLOUT. + */ +int ssh_get_poll_flags(ssh_session session) +{ + if (session == NULL) { + return 0; + } + + return ssh_socket_get_poll_flags (session->socket); +} + +/** + * @brief Get the disconnect message from the server. + * + * @param[in] session The ssh session to use. + * + * @return The message sent by the server along with the + * disconnect, or NULL in which case the reason of the + * disconnect may be found with ssh_get_error. + * + * @see ssh_get_error() + */ +const char *ssh_get_disconnect_message(ssh_session session) { + if (session == NULL) { + return NULL; + } + + if (session->session_state != SSH_SESSION_STATE_DISCONNECTED) { + ssh_set_error(session, SSH_REQUEST_DENIED, + "Connection not closed yet"); + } else if(!session->discon_msg) { + ssh_set_error(session, SSH_FATAL, + "Connection correctly closed but no disconnect message"); + } else { + return session->discon_msg; + } + + return NULL; +} + +/** + * @brief Get the protocol version of the session. + * + * @param session The ssh session to use. + * + * @return The SSH version as integer, < 0 on error. + */ +int ssh_get_version(ssh_session session) { + if (session == NULL) { + return -1; + } + + return 2; +} + +/** + * @internal + * @brief Callback to be called when the socket received an exception code. + * @param user is a pointer to session + */ +void ssh_socket_exception_callback(int code, int errno_code, void *user){ + ssh_session session=(ssh_session)user; + + SSH_LOG(SSH_LOG_RARE,"Socket exception callback: %d (%d)",code, errno_code); + session->session_state = SSH_SESSION_STATE_ERROR; + if (errno_code == 0 && code == SSH_SOCKET_EXCEPTION_EOF) { + ssh_set_error(session, SSH_FATAL, "Socket error: disconnected"); + } else { + ssh_set_error(session, SSH_FATAL, "Socket error: %s", strerror(errno_code)); + } + + session->ssh_connection_callback(session); +} + +/** + * @brief Send a message that should be ignored + * + * @param[in] session The SSH session + * @param[in] data Data to be sent + * + * @return SSH_OK on success, SSH_ERROR otherwise. + */ +int ssh_send_ignore (ssh_session session, const char *data) { + const int type = SSH2_MSG_IGNORE; + int rc; + + if (ssh_socket_is_open(session->socket)) { + rc = ssh_buffer_pack(session->out_buffer, + "bs", + type, + data); + if (rc != SSH_OK){ + ssh_set_error_oom(session); + goto error; + } + ssh_packet_send(session); + ssh_handle_packets(session, 0); + } + + return SSH_OK; + +error: + ssh_buffer_reinit(session->out_buffer); + return SSH_ERROR; +} + +/** + * @brief Send a debug message + * + * @param[in] session The SSH session + * @param[in] message Data to be sent + * @param[in] always_display Message SHOULD be displayed by the server. It + * SHOULD NOT be displayed unless debugging + * information has been explicitly requested. + * + * @return SSH_OK on success, SSH_ERROR otherwise. + */ +int ssh_send_debug (ssh_session session, const char *message, int always_display) { + int rc; + + if (ssh_socket_is_open(session->socket)) { + rc = ssh_buffer_pack(session->out_buffer, + "bbsd", + SSH2_MSG_DEBUG, + always_display != 0 ? 1 : 0, + message, + 0); /* empty language tag */ + if (rc != SSH_OK) { + ssh_set_error_oom(session); + goto error; + } + ssh_packet_send(session); + ssh_handle_packets(session, 0); + } + + return SSH_OK; + +error: + ssh_buffer_reinit(session->out_buffer); + return SSH_ERROR; +} + + /** + * @brief Set the session data counters. + * + * This functions sets the counter structures to be used to calculate data + * which comes in and goes out through the session at different levels. + * + * @code + * struct ssh_counter_struct scounter = { + * .in_bytes = 0, + * .out_bytes = 0, + * .in_packets = 0, + * .out_packets = 0 + * }; + * + * struct ssh_counter_struct rcounter = { + * .in_bytes = 0, + * .out_bytes = 0, + * .in_packets = 0, + * .out_packets = 0 + * }; + * + * ssh_set_counters(session, &scounter, &rcounter); + * @endcode + * + * @param[in] session The SSH session. + * + * @param[in] scounter Counter for byte data handled by the session sockets. + * + * @param[in] rcounter Counter for byte and packet data handled by the session, + * prior compression and SSH overhead. + */ +void ssh_set_counters(ssh_session session, ssh_counter scounter, + ssh_counter rcounter) { + if (session != NULL) { + session->socket_counter = scounter; + session->raw_counter = rcounter; + } +} + +/** + * @deprecated Use ssh_get_publickey_hash() + */ +int ssh_get_pubkey_hash(ssh_session session, unsigned char **hash) +{ + ssh_key pubkey = NULL; + ssh_string pubkey_blob = NULL; + MD5CTX ctx; + unsigned char *h; + int rc; + + if (session == NULL || hash == NULL) { + return SSH_ERROR; + } + + /* In FIPS mode, we cannot use MD5 */ + if (ssh_fips_mode()) { + ssh_set_error(session, + SSH_FATAL, + "In FIPS mode MD5 is not allowed." + "Try ssh_get_publickey_hash() with" + "SSH_PUBLICKEY_HASH_SHA256"); + return SSH_ERROR; + } + + *hash = NULL; + if (session->current_crypto == NULL || + session->current_crypto->server_pubkey == NULL) { + ssh_set_error(session,SSH_FATAL,"No current cryptographic context"); + return SSH_ERROR; + } + + h = calloc(MD5_DIGEST_LEN, sizeof(unsigned char)); + if (h == NULL) { + return SSH_ERROR; + } + + ctx = md5_init(); + if (ctx == NULL) { + SAFE_FREE(h); + return SSH_ERROR; + } + + rc = ssh_get_server_publickey(session, &pubkey); + if (rc != SSH_OK) { + md5_final(h, ctx); + SAFE_FREE(h); + return SSH_ERROR; + } + + rc = ssh_pki_export_pubkey_blob(pubkey, &pubkey_blob); + ssh_key_free(pubkey); + if (rc != SSH_OK) { + md5_final(h, ctx); + SAFE_FREE(h); + return SSH_ERROR; + } + + md5_update(ctx, ssh_string_data(pubkey_blob), ssh_string_len(pubkey_blob)); + ssh_string_free(pubkey_blob); + md5_final(h, ctx); + + *hash = h; + + return MD5_DIGEST_LEN; +} + +/** + * @brief Deallocate the hash obtained by ssh_get_pubkey_hash. + * + * This is required under Microsoft platform as this library might use a + * different C library than your software, hence a different heap. + * + * @param[in] hash The buffer to deallocate. + * + * @see ssh_get_pubkey_hash() + */ +void ssh_clean_pubkey_hash(unsigned char **hash) { + SAFE_FREE(*hash); +} + +/** + * @brief Get the server public key from a session. + * + * @param[in] session The session to get the key from. + * + * @param[out] key A pointer to store the allocated key. You need to free + * the key. + * + * @return SSH_OK on success, SSH_ERROR on errror. + * + * @see ssh_key_free() + */ +int ssh_get_server_publickey(ssh_session session, ssh_key *key) +{ + ssh_key pubkey = NULL; + + if (session == NULL || + session->current_crypto == NULL || + session->current_crypto->server_pubkey == NULL) { + return SSH_ERROR; + } + + pubkey = ssh_key_dup(session->current_crypto->server_pubkey); + if (pubkey == NULL) { + return SSH_ERROR; + } + + *key = pubkey; + return SSH_OK; +} + +/** + * @deprecated Use ssh_get_server_publickey() + */ +int ssh_get_publickey(ssh_session session, ssh_key *key) +{ + return ssh_get_server_publickey(session, key); +} + +/** + * @brief Allocates a buffer with the hash of the public key. + * + * This function allows you to get a hash of the public key. You can then + * print this hash in a human-readable form to the user so that he is able to + * verify it. Use ssh_get_hexa() or ssh_print_hash() to display it. + * + * @param[in] key The public key to create the hash for. + * + * @param[in] type The type of the hash you want. + * + * @param[in] hash A pointer to store the allocated buffer. It can be + * freed using ssh_clean_pubkey_hash(). + * + * @param[in] hlen The length of the hash. + * + * @return 0 on success, -1 if an error occured. + * + * @warning It is very important that you verify at some moment that the hash + * matches a known server. If you don't do it, cryptography wont help + * you at making things secure. + * OpenSSH uses SHA256 to print public key digests. + * + * @see ssh_session_update_known_hosts() + * @see ssh_get_hexa() + * @see ssh_print_hash() + * @see ssh_clean_pubkey_hash() + */ +int ssh_get_publickey_hash(const ssh_key key, + enum ssh_publickey_hash_type type, + unsigned char **hash, + size_t *hlen) +{ + ssh_string blob; + unsigned char *h; + int rc; + + rc = ssh_pki_export_pubkey_blob(key, &blob); + if (rc < 0) { + return rc; + } + + switch (type) { + case SSH_PUBLICKEY_HASH_SHA1: + { + SHACTX ctx; + + h = calloc(1, SHA_DIGEST_LEN); + if (h == NULL) { + rc = -1; + goto out; + } + + ctx = sha1_init(); + if (ctx == NULL) { + free(h); + rc = -1; + goto out; + } + + sha1_update(ctx, ssh_string_data(blob), ssh_string_len(blob)); + sha1_final(h, ctx); + + *hlen = SHA_DIGEST_LEN; + } + break; + case SSH_PUBLICKEY_HASH_SHA256: + { + SHA256CTX ctx; + + h = calloc(1, SHA256_DIGEST_LEN); + if (h == NULL) { + rc = -1; + goto out; + } + + ctx = sha256_init(); + if (ctx == NULL) { + free(h); + rc = -1; + goto out; + } + + sha256_update(ctx, ssh_string_data(blob), ssh_string_len(blob)); + sha256_final(h, ctx); + + *hlen = SHA256_DIGEST_LEN; + } + break; + case SSH_PUBLICKEY_HASH_MD5: + { + MD5CTX ctx; + + /* In FIPS mode, we cannot use MD5 */ + if (ssh_fips_mode()) { + SSH_LOG(SSH_LOG_WARN, "In FIPS mode MD5 is not allowed." + "Try using SSH_PUBLICKEY_HASH_SHA256"); + rc = SSH_ERROR; + goto out; + } + + h = calloc(1, MD5_DIGEST_LEN); + if (h == NULL) { + rc = -1; + goto out; + } + + ctx = md5_init(); + if (ctx == NULL) { + free(h); + rc = -1; + goto out; + } + + md5_update(ctx, ssh_string_data(blob), ssh_string_len(blob)); + md5_final(h, ctx); + + *hlen = MD5_DIGEST_LEN; + } + break; + default: + rc = -1; + goto out; + } + + *hash = h; + rc = 0; +out: + ssh_string_free(blob); + return rc; +} + +/** @} */ diff --git a/external/version.ini b/external/version.ini index 804a41b..481d0a0 100644 --- a/external/version.ini +++ b/external/version.ini @@ -1,8 +1,16 @@ [external_ver] -openssl = 1.0.2p,1000210f -libuv = 1.23.0 -mbedtls = 2.12.0 -libssh = 0.8.5 -jsoncpp = 0.10.6 -mongoose = 6.12 - +; https://github.com/openssl/openssl/releases +; http://slproweb.com/download/Win32OpenSSL-1_1_1d.exe +openssl = 1.1.1d,1010104f +; https://github.com/libuv/libuv/releases +libuv = 1.33.1 +; https://github.com/ARMmbed/mbedtls/releases +mbedtls = 2.16.3 +; https://github.com/open-source-parsers/jsoncpp/releases +jsoncpp = 1.9.2 +; https://github.com/cesanta/mongoose/releases +mongoose = 6.16 +; https://www.zlib.net/zlib1211.zip +zlib = 1.2.11,1211 +; https://git.libssh.org/projects/libssh.git/ +libssh = 0.9.2 diff --git a/resource/icon-tp-player.psd b/resource/icon-tp-player.psd new file mode 100644 index 0000000..3ae9008 Binary files /dev/null and b/resource/icon-tp-player.psd differ diff --git a/server/tp_core/common/base_env.cpp b/server/tp_core/common/base_env.cpp index 0975cfd..34f8c07 100644 --- a/server/tp_core/common/base_env.cpp +++ b/server/tp_core/common/base_env.cpp @@ -1,48 +1,48 @@ -#include "base_env.h" - -TppEnvBase::TppEnvBase() -{} - -TppEnvBase::~TppEnvBase() -{} - -bool TppEnvBase::init(TPP_INIT_ARGS* args) -{ - if (NULL == args) - { - EXLOGE("invalid init args(1).\n"); - return false; - } - - EXLOG_USE_LOGGER(args->logger); - - exec_path = args->exec_path; - etc_path = args->etc_path; - replay_path = args->replay_path; - - get_connect_info = args->func_get_connect_info; - free_connect_info = args->func_free_connect_info; - session_begin = args->func_session_begin; - session_update = args->func_session_update; - session_end = args->func_session_end; - - if (NULL == get_connect_info || NULL == free_connect_info || NULL == session_begin || NULL == session_update || NULL == session_end) - { - EXLOGE("invalid init args(2).\n"); - return false; - } - - if (NULL == args->cfg) - { - EXLOGE("invalid init args(3).\n"); - return false; - } - - if (!_on_init(args)) - { - EXLOGE("invalid init args(4).\n"); - return false; - } - - return true; -} +#include "base_env.h" + +TppEnvBase::TppEnvBase() +{} + +TppEnvBase::~TppEnvBase() +{} + +bool TppEnvBase::init(TPP_INIT_ARGS* args) +{ + if (NULL == args) + { + EXLOGE("invalid init args(1).\n"); + return false; + } + + EXLOG_USE_LOGGER(args->logger); + + exec_path = args->exec_path; + etc_path = args->etc_path; + replay_path = args->replay_path; + + get_connect_info = args->func_get_connect_info; + free_connect_info = args->func_free_connect_info; + session_begin = args->func_session_begin; + session_update = args->func_session_update; + session_end = args->func_session_end; + + if (NULL == get_connect_info || NULL == free_connect_info || NULL == session_begin || NULL == session_update || NULL == session_end) + { + EXLOGE("invalid init args(2).\n"); + return false; + } + + if (NULL == args->cfg) + { + EXLOGE("invalid init args(3).\n"); + return false; + } + + if (!_on_init(args)) + { + EXLOGE("invalid init args(4).\n"); + return false; + } + + return true; +} diff --git a/server/tp_core/common/base_env.h b/server/tp_core/common/base_env.h index c57fd41..316216d 100644 --- a/server/tp_core/common/base_env.h +++ b/server/tp_core/common/base_env.h @@ -1,29 +1,29 @@ -#ifndef __TS_BASE_ENV_H__ -#define __TS_BASE_ENV_H__ - -#include "protocol_interface.h" - -class TppEnvBase -{ -public: - TppEnvBase(); - virtual ~TppEnvBase(); - - bool init(TPP_INIT_ARGS* args); - -public: - ex_wstr exec_path; - ex_wstr etc_path; // ļSSH˽ԿļĴ· - ex_wstr replay_path; - - TPP_GET_CONNNECT_INFO_FUNC get_connect_info; - TPP_FREE_CONNECT_INFO_FUNC free_connect_info; - TPP_SESSION_BEGIN_FUNC session_begin; - TPP_SESSION_UPDATE_FUNC session_update; - TPP_SESSION_END_FUNC session_end; - -protected: - virtual bool _on_init(TPP_INIT_ARGS* args) = 0; -}; - -#endif // __TS_BASE_ENV_H__ +#ifndef __TS_BASE_ENV_H__ +#define __TS_BASE_ENV_H__ + +#include "protocol_interface.h" + +class TppEnvBase +{ +public: + TppEnvBase(); + virtual ~TppEnvBase(); + + bool init(TPP_INIT_ARGS* args); + +public: + ex_wstr exec_path; + ex_wstr etc_path; // 配置文件、SSH服务器的私钥文件的存放路径 + ex_wstr replay_path; + + TPP_GET_CONNNECT_INFO_FUNC get_connect_info; + TPP_FREE_CONNECT_INFO_FUNC free_connect_info; + TPP_SESSION_BEGIN_FUNC session_begin; + TPP_SESSION_UPDATE_FUNC session_update; + TPP_SESSION_END_FUNC session_end; + +protected: + virtual bool _on_init(TPP_INIT_ARGS* args) = 0; +}; + +#endif // __TS_BASE_ENV_H__ diff --git a/server/tp_core/common/base_record.cpp b/server/tp_core/common/base_record.cpp index 10849db..ae0d19f 100644 --- a/server/tp_core/common/base_record.cpp +++ b/server/tp_core/common/base_record.cpp @@ -1,4 +1,4 @@ -#include +#include #include "base_record.h" diff --git a/server/tp_core/common/base_record.h b/server/tp_core/common/base_record.h index cdcabb3..4e614d1 100644 --- a/server/tp_core/common/base_record.h +++ b/server/tp_core/common/base_record.h @@ -1,101 +1,108 @@ -#ifndef __TS_BASE_RECORD_H__ -#define __TS_BASE_RECORD_H__ - -#include "base_env.h" -#include "ts_membuf.h" -#include "protocol_interface.h" - -#include - -#define MAX_SIZE_PER_FILE 4194304 // 4M = 1024*1024*4 - -#pragma pack(push,1) - -/* - * ¼ - * - * һ¼ΪļһϢļһļ - * ڲ4M5룬ͽдļУͬʱϢļ - * - */ - - -// ¼ļͷ(¼д룬ıIJ) -typedef struct TS_RECORD_HEADER_INFO -{ - ex_u32 magic; // "TPPR" ־ TelePort Protocol Record - ex_u16 ver; // ¼ļ汾ĿǰΪ3 - ex_u32 packages; // ܰ - ex_u32 time_ms; // ܺʱ룩 - //ex_u32 file_size; // ļС -}TS_RECORD_HEADER_INFO; -#define ts_record_header_info_size sizeof(TS_RECORD_HEADER_INFO) - -// ¼ļͷ(̶䲿) -typedef struct TS_RECORD_HEADER_BASIC -{ - ex_u16 protocol_type; // Э飺1=RDP, 2=SSH, 3=Telnet - ex_u16 protocol_sub_type; // Э飺100=RDP-DESKTOP, 200=SSH-SHELL, 201=SSH-SFTP, 300=Telnet - ex_u64 timestamp; // ¼ʼʱ䣨UTCʱ - ex_u16 width; // ʼĻߴ磺 - ex_u16 height; // ʼĻߴ磺 - char user_username[64]; // teleport˺ - char acc_username[64]; // Զû - - char host_ip[40]; // ԶIP - char conn_ip[40]; // ԶIP - ex_u16 conn_port; // Զ˿ - - char client_ip[40]; // ͻIP - - // RDPר - ex_u8 rdp_security; // 0 = RDP, 1 = TLS - - ex_u8 _reserve[512 - 2 - 2 - 8 - 2 - 2 - 64 - 64 - 40 - 40 - 2 - 40 - 1 - ts_record_header_info_size]; -}TS_RECORD_HEADER_BASIC; -#define ts_record_header_basic_size sizeof(TS_RECORD_HEADER_BASIC) - -typedef struct TS_RECORD_HEADER -{ - TS_RECORD_HEADER_INFO info; - TS_RECORD_HEADER_BASIC basic; -}TS_RECORD_HEADER; - -// header֣header-info + header-basic = 512B -#define ts_record_header_size sizeof(TS_RECORD_HEADER) - - -// һݰͷ -typedef struct TS_RECORD_PKG -{ - ex_u8 type; // - ex_u32 size; // ܴСͷ - ex_u32 time_ms; // ʼʱʱ룬ζһӲܳ49죩 - ex_u8 _reserve[3]; // -}TS_RECORD_PKG; - -#pragma pack(pop) - -class TppRecBase -{ -public: - TppRecBase(); - virtual ~TppRecBase(); - - bool begin(const wchar_t* base_path, const wchar_t* base_fname, int record_id, const TPP_CONNECT_INFO* info); - bool end(); - -protected: - virtual bool _on_begin(const TPP_CONNECT_INFO* info) = 0; - virtual bool _on_end() = 0; - -protected: - ex_wstr m_base_path; // ¼ļ· /usr/local/teleport/data/replay/ssh/123ֱڲӵģΪλỰ¼ļĿ¼ - ex_wstr m_base_fname; // ¼ļļչ֣ڲԴΪϳļȫ¼ļ m_base_path ָĿ¼ - - ex_u64 m_start_time; - - MemBuffer m_cache; -}; - -#endif // __TS_BASE_RECORD_H__ +#ifndef __TS_BASE_RECORD_H__ +#define __TS_BASE_RECORD_H__ + +#include "base_env.h" +#include "ts_membuf.h" +#include "protocol_interface.h" + +#include + +#define MAX_CACHE_SIZE 1048576 // 1M = 1024*1024*1 +#define MAX_SIZE_PER_FILE 4194304 // 4M = 1024*1024*4 +// for test. +// #define MAX_CACHE_SIZE 524288 // 512KB = 512*1024 +// #define MAX_SIZE_PER_FILE 1048576 // 1M = 1024*1024*1 + +#pragma pack(push,1) + +/* + * 录像 + * + * 一个录像分为多个文件: + * *.tpr,录像信息文件,一个,固定大小(512字节) + * *.tpd,数据文件,n个,例如 tp-rdp-1.tpd,tp-rdp-2.tpd等等,每个数据文件约4MB + * *.tpk,关键帧信息文件,一个,仅RDP录像,记录各个关键帧数据所在的数据文件序号、偏移、时间点等信息。 + * *-cmd.txt,ssh命令记录文件,仅SSH。 + * 服务内部缓存最大4M,或者5秒,就将数据写入数据文件中,并同时更新信息文件。 + * + */ + +#define TS_TPPR_TYPE_UNKNOWN 0x0000 +#define TS_TPPR_TYPE_SSH 0x0001 +#define TS_TPPR_TYPE_RDP 0x0101 + + // 录像文件头(随着录像数据写入,会改变的部分) +typedef struct TS_RECORD_HEADER_INFO { + ex_u32 magic; // "TPPR" 标志 TelePort Protocol Record + ex_u16 ver; // 录像文件版本,v3.5.0开始为4 + ex_u16 type; // 录像内容,SSH or RDP + // ex_u32 packages; // 总包数 + ex_u32 time_ms; // 总耗时(毫秒) + ex_u32 dat_file_count; // 数据文件数量 +}TS_RECORD_HEADER_INFO; +#define ts_record_header_info_size sizeof(TS_RECORD_HEADER_INFO) + +// 录像文件头(固定不变部分) +typedef struct TS_RECORD_HEADER_BASIC { + ex_u16 protocol_type; // 协议:1=RDP, 2=SSH, 3=Telnet + ex_u16 protocol_sub_type; // 子协议:100=RDP-DESKTOP, 200=SSH-SHELL, 201=SSH-SFTP, 300=Telnet + ex_u64 timestamp; // 本次录像的起始时间(UTC时间戳) + ex_u16 width; // 初始屏幕尺寸:宽 + ex_u16 height; // 初始屏幕尺寸:高 + char user_username[64]; // teleport账号 + char acc_username[64]; // 远程主机用户名 + + char host_ip[40]; // 远程主机IP + char conn_ip[40]; // 远程主机IP + ex_u16 conn_port; // 远程主机端口 + + char client_ip[40]; // 客户端IP + +// // RDP专有 - v3.5.0废弃并移除 +// ex_u8 rdp_security; // 0 = RDP, 1 = TLS + +}TS_RECORD_HEADER_BASIC; +#define ts_record_header_basic_size sizeof(TS_RECORD_HEADER_BASIC) + +typedef struct TS_RECORD_HEADER { + TS_RECORD_HEADER_INFO info; + ex_u8 _reserve1[64 - ts_record_header_info_size]; + TS_RECORD_HEADER_BASIC basic; + ex_u8 _reserve2[512 - 64 - ts_record_header_basic_size]; +}TS_RECORD_HEADER; + +// header部分(header-info + header-basic) = 512B +#define ts_record_header_size sizeof(TS_RECORD_HEADER) + +// 一个数据包的头 +typedef struct TS_RECORD_PKG { + ex_u8 type; // 包的数据类型 + ex_u32 size; // 这个包的总大小(不含包头) + ex_u32 time_ms; // 这个包距起始时间的时间差(毫秒,意味着一个连接不能持续超过49天) + ex_u8 _reserve[3]; // 保留 +}TS_RECORD_PKG; + +#pragma pack(pop) + +class TppRecBase { +public: + TppRecBase(); + virtual ~TppRecBase(); + + bool begin(const wchar_t* base_path, const wchar_t* base_fname, int record_id, const TPP_CONNECT_INFO* info); + bool end(); + +protected: + virtual bool _on_begin(const TPP_CONNECT_INFO* info) = 0; + virtual bool _on_end() = 0; + +protected: + ex_wstr m_base_path; // 录像文件基础路径,例如 /usr/local/teleport/data/replay/ssh/123,数字编号是内部附加的,作为本次会话录像文件的目录名称 + ex_wstr m_base_fname; // 录像文件的文件名,不含扩展名部分,内部会以此为基础合成文件全名,并将录像文件存放在 m_base_path 指向的目录中 + + ex_u64 m_start_time; + + MemBuffer m_cache; +}; + +#endif // __TS_BASE_RECORD_H__ diff --git a/server/tp_core/common/protocol_interface.h b/server/tp_core/common/protocol_interface.h index 50051c6..bc201ec 100644 --- a/server/tp_core/common/protocol_interface.h +++ b/server/tp_core/common/protocol_interface.h @@ -1,100 +1,100 @@ -#ifndef __TP_PROTOCOL_INTERFACE_H__ -#define __TP_PROTOCOL_INTERFACE_H__ - -#include "ts_const.h" -#include - -#ifdef EX_OS_WIN32 -# ifdef TPP_EXPORTS -# define TPP_API __declspec(dllexport) -# else -# define TPP_API __declspec(dllimport) -# endif -#else -# define TPP_API -#endif - -#define TPP_CMD_INIT 0x00000000 -#define TPP_CMD_SET_RUNTIME_CFG 0x00000005 -#define TPP_CMD_KILL_SESSIONS 0x00000006 - -typedef struct TPP_CONNECT_INFO -{ - char* sid; - - // ϢصҪصID - int user_id; - int host_id; - int acc_id; - - char* user_username; // 뱾ӵû - - char* host_ip; // ԶIPֱģʽremote_host_ipͬ - char* conn_ip; // ҪӵԶIPǶ˿ӳģʽΪ·IP - int conn_port; // ҪӵԶĶ˿ڣǶ˿ӳģʽΪ·Ķ˿ڣ - char* client_ip; - - char* acc_username; // Զ˺ - char* acc_secret; // Զ˺ŵ루˽Կ - char* username_prompt; // for telnet - char* password_prompt; // for telnet - - int protocol_type; - int protocol_sub_type; - int protocol_flag; - int record_flag; - int auth_type; -}TPP_CONNECT_INFO; - -typedef TPP_CONNECT_INFO* (*TPP_GET_CONNNECT_INFO_FUNC)(const char* sid); -typedef void(*TPP_FREE_CONNECT_INFO_FUNC)(TPP_CONNECT_INFO* info); -typedef bool(*TPP_SESSION_BEGIN_FUNC)(const TPP_CONNECT_INFO* info, int* db_id); -typedef bool(*TPP_SESSION_UPDATE_FUNC)(int db_id, int protocol_sub_type, int state); -typedef bool(*TPP_SESSION_END_FUNC)(const char* sid, int db_id, int ret); - - -typedef struct TPP_INIT_ARGS -{ - ExLogger* logger; - ex_wstr exec_path; - ex_wstr etc_path; - ex_wstr replay_path; - ExIniFile* cfg; - - TPP_GET_CONNNECT_INFO_FUNC func_get_connect_info; - TPP_FREE_CONNECT_INFO_FUNC func_free_connect_info; - TPP_SESSION_BEGIN_FUNC func_session_begin; - TPP_SESSION_UPDATE_FUNC func_session_update; - TPP_SESSION_END_FUNC func_session_end; -}TPP_INIT_ARGS; - -// typedef struct TPP_SET_CFG_ARGS { -// ex_u32 noop_timeout; // as second. -// }TPP_SET_CFG_ARGS; - -#ifdef __cplusplus -extern "C" -{ -#endif - - TPP_API ex_rv tpp_init(TPP_INIT_ARGS* init_args); - TPP_API ex_rv tpp_start(void); - TPP_API ex_rv tpp_stop(void); - TPP_API void tpp_timer(void); -// TPP_API void tpp_set_cfg(TPP_SET_CFG_ARGS* cfg_args); - - TPP_API ex_rv tpp_command(ex_u32 cmd, const char* param); - -#ifdef __cplusplus -} -#endif - -typedef ex_rv (*TPP_INIT_FUNC)(TPP_INIT_ARGS* init_args); -typedef ex_rv (*TPP_START_FUNC)(void); -typedef ex_rv(*TPP_STOP_FUNC)(void); -typedef void(*TPP_TIMER_FUNC)(void); -// typedef void(*TPP_SET_CFG_FUNC)(TPP_SET_CFG_ARGS* cfg_args); - -typedef ex_rv(*TPP_COMMAND_FUNC)(ex_u32 cmd, const char* param); // param is a JSON formatted string. - -#endif // __TP_PROTOCOL_INTERFACE_H__ +#ifndef __TP_PROTOCOL_INTERFACE_H__ +#define __TP_PROTOCOL_INTERFACE_H__ + +#include "ts_const.h" +#include + +#ifdef EX_OS_WIN32 +# ifdef TPP_EXPORTS +# define TPP_API __declspec(dllexport) +# else +# define TPP_API __declspec(dllimport) +# endif +#else +# define TPP_API +#endif + +#define TPP_CMD_INIT 0x00000000 +#define TPP_CMD_SET_RUNTIME_CFG 0x00000005 +#define TPP_CMD_KILL_SESSIONS 0x00000006 + +typedef struct TPP_CONNECT_INFO +{ + char* sid; + + // 与此连接信息相关的三个要素的ID + int user_id; + int host_id; + int acc_id; + + char* user_username; // 申请本次连接的用户名 + + char* host_ip; // 真正的远程主机IP(如果是直接连接模式,则与remote_host_ip相同) + char* conn_ip; // 要连接的远程主机的IP(如果是端口映射模式,则为路由主机的IP) + int conn_port; // 要连接的远程主机的端口(如果是端口映射模式,则为路由主机的端口) + char* client_ip; + + char* acc_username; // 远程主机的账号 + char* acc_secret; // 远程主机账号的密码(或者私钥) + char* username_prompt; // for telnet + char* password_prompt; // for telnet + + int protocol_type; + int protocol_sub_type; + int protocol_flag; + int record_flag; + int auth_type; +}TPP_CONNECT_INFO; + +typedef TPP_CONNECT_INFO* (*TPP_GET_CONNNECT_INFO_FUNC)(const char* sid); +typedef void(*TPP_FREE_CONNECT_INFO_FUNC)(TPP_CONNECT_INFO* info); +typedef bool(*TPP_SESSION_BEGIN_FUNC)(const TPP_CONNECT_INFO* info, int* db_id); +typedef bool(*TPP_SESSION_UPDATE_FUNC)(int db_id, int protocol_sub_type, int state); +typedef bool(*TPP_SESSION_END_FUNC)(const char* sid, int db_id, int ret); + + +typedef struct TPP_INIT_ARGS +{ + ExLogger* logger; + ex_wstr exec_path; + ex_wstr etc_path; + ex_wstr replay_path; + ExIniFile* cfg; + + TPP_GET_CONNNECT_INFO_FUNC func_get_connect_info; + TPP_FREE_CONNECT_INFO_FUNC func_free_connect_info; + TPP_SESSION_BEGIN_FUNC func_session_begin; + TPP_SESSION_UPDATE_FUNC func_session_update; + TPP_SESSION_END_FUNC func_session_end; +}TPP_INIT_ARGS; + +// typedef struct TPP_SET_CFG_ARGS { +// ex_u32 noop_timeout; // as second. +// }TPP_SET_CFG_ARGS; + +#ifdef __cplusplus +extern "C" +{ +#endif + + TPP_API ex_rv tpp_init(TPP_INIT_ARGS* init_args); + TPP_API ex_rv tpp_start(void); + TPP_API ex_rv tpp_stop(void); + TPP_API void tpp_timer(void); +// TPP_API void tpp_set_cfg(TPP_SET_CFG_ARGS* cfg_args); + + TPP_API ex_rv tpp_command(ex_u32 cmd, const char* param); + +#ifdef __cplusplus +} +#endif + +typedef ex_rv (*TPP_INIT_FUNC)(TPP_INIT_ARGS* init_args); +typedef ex_rv (*TPP_START_FUNC)(void); +typedef ex_rv(*TPP_STOP_FUNC)(void); +typedef void(*TPP_TIMER_FUNC)(void); +// typedef void(*TPP_SET_CFG_FUNC)(TPP_SET_CFG_ARGS* cfg_args); + +typedef ex_rv(*TPP_COMMAND_FUNC)(ex_u32 cmd, const char* param); // param is a JSON formatted string. + +#endif // __TP_PROTOCOL_INTERFACE_H__ diff --git a/server/tp_core/common/ts_const.h b/server/tp_core/common/ts_const.h index da1fc45..be55510 100644 --- a/server/tp_core/common/ts_const.h +++ b/server/tp_core/common/ts_const.h @@ -1,26 +1,26 @@ -#ifndef __TS_ERRNO_H__ -#define __TS_ERRNO_H__ - -//#include "ts_types.h" - -#define TS_RDP_PROXY_PORT 52089 -#define TS_RDP_PROXY_HOST "0.0.0.0" - -#define TS_SSH_PROXY_PORT 52189 -#define TS_SSH_PROXY_HOST "0.0.0.0" - -#define TS_TELNET_PROXY_PORT 52389 -#define TS_TELNET_PROXY_HOST "0.0.0.0" - -#define TS_HTTP_RPC_PORT 52080 -//#define TS_HTTP_RPC_HOST "127.0.0.1" -#define TS_HTTP_RPC_HOST "localhost" - - -#define TS_RDP_PROTOCOL_RDP 0 -#define TS_RDP_PROTOCOL_TLS 1 -#define TS_RDP_PROTOCOL_HYBRID 2 -#define TS_RDP_PROTOCOL_RDSTLS 4 -#define TS_RDP_PROTOCOL_HYBRID_EX 8 - -#endif // __TS_ERRNO_H__ +#ifndef __TS_ERRNO_H__ +#define __TS_ERRNO_H__ + +//#include "ts_types.h" + +#define TS_RDP_PROXY_PORT 52089 +#define TS_RDP_PROXY_HOST "0.0.0.0" + +#define TS_SSH_PROXY_PORT 52189 +#define TS_SSH_PROXY_HOST "0.0.0.0" + +#define TS_TELNET_PROXY_PORT 52389 +#define TS_TELNET_PROXY_HOST "0.0.0.0" + +#define TS_HTTP_RPC_PORT 52080 +//#define TS_HTTP_RPC_HOST "127.0.0.1" +#define TS_HTTP_RPC_HOST "localhost" + + +#define TS_RDP_PROTOCOL_RDP 0 +#define TS_RDP_PROTOCOL_TLS 1 +#define TS_RDP_PROTOCOL_HYBRID 2 +#define TS_RDP_PROTOCOL_RDSTLS 4 +#define TS_RDP_PROTOCOL_HYBRID_EX 8 + +#endif // __TS_ERRNO_H__ diff --git a/server/tp_core/common/ts_membuf.cpp b/server/tp_core/common/ts_membuf.cpp index c5f063f..f601f6a 100644 --- a/server/tp_core/common/ts_membuf.cpp +++ b/server/tp_core/common/ts_membuf.cpp @@ -1,4 +1,4 @@ -#include "ts_membuf.h" +#include "ts_membuf.h" #include MemBuffer::MemBuffer()// : m_buffer(NULL), m_data_size(0), m_buffer_size(0) @@ -45,7 +45,7 @@ void MemBuffer::reserve(size_t size) return; } - // µĻСȡ MEMBUF_BLOCK_SIZE + // 将新的缓冲区大小取整到 MEMBUF_BLOCK_SIZE 的整数倍 size_t new_size = (size + MEMBUF_BLOCK_SIZE - 1) & ~(MEMBUF_BLOCK_SIZE - 1); //EXLOGD("[mbuf:%p] reserve(2): m_buf: %p, m_buf_size: %d, size: %d, new size: %d.\n", this, m_buffer, m_buffer_size, size, new_size); diff --git a/server/tp_core/common/ts_membuf.h b/server/tp_core/common/ts_membuf.h index 0d7d283..95fa44c 100644 --- a/server/tp_core/common/ts_membuf.h +++ b/server/tp_core/common/ts_membuf.h @@ -1,4 +1,4 @@ -#ifndef __TS_MEMBUF_H__ +#ifndef __TS_MEMBUF_H__ #define __TS_MEMBUF_H__ #include @@ -11,15 +11,15 @@ public: MemBuffer(); virtual ~MemBuffer(); - // sizeֽڵݵĩβܻᵼ» + // 附加size字节的数据到缓冲区末尾(可能会导致缓冲区扩大) void append(const ex_u8* data, size_t size); - // Ϊָֽܻ󻺳С֤Чݲᱻı䣩 + // 缓冲区至少为指定字节数(可能会扩大缓冲区,但不会缩小缓冲区,保证有效数据不会被改变) void reserve(size_t size); - // mЧݸӵԼЧĩβܻ󻺳mݲ + // 将m的有效数据附加到自己的有效数据末尾,可能会扩大缓冲区,m内容不变 void concat(const MemBuffer& m); - // ӻͷƳsizeֽڣСܲʣµЧǰơ + // 从缓冲区头部移除size字节(缓冲区大小可能并不会收缩),剩下的有效数据前移。 void pop(size_t size); - // ջЧΪ0ֽڣ䣩 + // 清空缓冲区(有效数据为0字节,缓冲区不变) void empty(void) { m_data_size = 0; } bool is_empty(void) { return m_data_size == 0; } diff --git a/server/tp_core/common/ts_memstream.cpp b/server/tp_core/common/ts_memstream.cpp index e51c142..64388cb 100644 --- a/server/tp_core/common/ts_memstream.cpp +++ b/server/tp_core/common/ts_memstream.cpp @@ -1,222 +1,222 @@ -#include "ts_memstream.h" - -MemStream::MemStream(MemBuffer& mbuf) : m_mbuf(mbuf) -{ - m_offset = 0; -} - -MemStream::~MemStream() -{} - -void MemStream::reset(void) -{ - m_mbuf.empty(); - rewind(); -} - - -bool MemStream::seek(size_t offset) -{ - if (offset > m_mbuf.size()) - return false; - - m_offset = offset; - return true; -} - -bool MemStream::skip(size_t n) -{ - if (0 == n) - return true; - - if (m_offset + n > m_mbuf.size()) - return false; - m_offset += n; - return true; -} - -bool MemStream::rewind(size_t n) -{ - if (m_offset < n) - return false; - - if (0 == n) - m_offset = 0; - else - m_offset -= n; - return true; -} - -ex_u8 MemStream::get_u8(void) -{ - ASSERT(m_offset + 1 <= m_mbuf.size()); - - ex_u8 v = (m_mbuf.data() + m_offset)[0]; - m_offset++; - return v; -} - -ex_u16 MemStream::get_u16_le(void) -{ - ASSERT(m_offset + 2 <= m_mbuf.size()); - - ex_u8* p = m_mbuf.data() + m_offset; -#if defined(B_ENDIAN) - ex_u16 v = (ex_u16)(p[0] | (p[1] << 8)); -#else - ex_u16 v = ((ex_u16*)p)[0]; -#endif - m_offset += 2; - return v; -} - -ex_u16 MemStream::get_u16_be(void) -{ - ASSERT(m_offset + 2 <= m_mbuf.size()); - - ex_u8* p = m_mbuf.data() + m_offset; -#if defined(B_ENDIAN) - ex_u16 v = ((ex_u16*)p)[0]; -#else - ex_u16 v = (ex_u16)((p[0] << 8) | p[1]); -#endif - m_offset += 2; - return v; -} - - -ex_u32 MemStream::get_u32_le(void) -{ - ASSERT(m_offset + 4 <= m_mbuf.size()); - - ex_u8* p = m_mbuf.data() + m_offset; -#if defined(B_ENDIAN) - ex_u32 v = (ex_u32)(p[0] | (p[1] << 8) | (p[2] << 16) | (p[3] << 24)); -#else - ex_u32 v = ((ex_u32*)p)[0]; -#endif - m_offset += 4; - return v; -} - -ex_u32 MemStream::get_u32_be(void) -{ - ASSERT(m_offset + 4 <= m_mbuf.size()); - - ex_u8* p = m_mbuf.data() + m_offset; -#if defined(B_ENDIAN) - ex_u32 v = ((ex_u32*)p)[0]; -#else - ex_u32 v = (ex_u32)((p[0] << 24) | (p[1] << 16) | (p[2] << 8) | p[3]); -#endif - m_offset += 4; - return v; -} - -ex_u8* MemStream::get_bin(size_t n) -{ - ASSERT(m_offset + 4 <= m_mbuf.size()); - ex_u8* p = m_mbuf.data() + m_offset; - m_offset += n; - return p; -} - - -void MemStream::put_zero(size_t n) -{ - m_mbuf.reserve(m_mbuf.size() + n); - memset(m_mbuf.data() + m_offset, 0, n); - m_offset += n; - if (m_mbuf.size() < m_offset) - m_mbuf.size(m_offset); -} - -void MemStream::put_u8(ex_u8 v) -{ - m_mbuf.reserve(m_mbuf.size() + 1); - - (m_mbuf.data() + m_offset)[0] = v; - m_offset++; - if (m_mbuf.size() < m_offset) - m_mbuf.size(m_offset); -} - -void MemStream::put_u16_le(ex_u16 v) -{ - m_mbuf.reserve(m_mbuf.size() + 2); - - ex_u8* p = m_mbuf.data() + m_offset; -#if defined(B_ENDIAN) - p[0] = (ex_u8)v; - p[1] = (ex_u8)(v >> 8); -#else - ((ex_u16*)p)[0] = v; -#endif - m_offset += 2; - if (m_mbuf.size() < m_offset) - m_mbuf.size(m_offset); -} - -void MemStream::put_u16_be(ex_u16 v) -{ - m_mbuf.reserve(m_mbuf.size() + 2); - - ex_u8* p = m_mbuf.data() + m_offset; -#if defined(B_ENDIAN) - ((ex_u16*)p)[0] = v; -#else - ex_u8* _v = (ex_u8*)&v; - p[0] = _v[1]; - p[1] = _v[0]; -#endif - m_offset += 2; - if (m_mbuf.size() < m_offset) - m_mbuf.size(m_offset); -} - -void MemStream::put_u32_le(ex_u32 v) -{ - m_mbuf.reserve(m_mbuf.size() + 4); - - ex_u8* p = m_mbuf.data() + m_offset; -#if defined(B_ENDIAN) - p[0] = (ex_u8)v; - p[1] = (ex_u8)(v >> 8); - p[2] = (ex_u8)(v >> 16); - p[3] = (ex_u8)(v >> 24); -#else - ((ex_u32*)p)[0] = v; -#endif - m_offset += 4; - if (m_mbuf.size() < m_offset) - m_mbuf.size(m_offset); -} - -void MemStream::put_u32_be(ex_u32 v) -{ - m_mbuf.reserve(m_mbuf.size() + 4); - - ex_u8* p = m_mbuf.data() + m_offset; -#if defined(B_ENDIAN) - ((ex_u32*)p)[0] = v; -#else - ex_u8* _v = (ex_u8*)&v; - p[0] = _v[3]; - p[1] = _v[2]; - p[2] = _v[1]; - p[3] = _v[0]; -#endif - m_offset += 4; - if (m_mbuf.size() < m_offset) - m_mbuf.size(m_offset); -} - -void MemStream::put_bin(const ex_u8* p, size_t n) -{ - m_mbuf.reserve(m_mbuf.size() + n); - memcpy(m_mbuf.data() + m_offset, p, n); - m_offset += n; - if (m_mbuf.size() < m_offset) - m_mbuf.size(m_offset); -} - +#include "ts_memstream.h" + +MemStream::MemStream(MemBuffer& mbuf) : m_mbuf(mbuf) +{ + m_offset = 0; +} + +MemStream::~MemStream() +{} + +void MemStream::reset(void) +{ + m_mbuf.empty(); + rewind(); +} + + +bool MemStream::seek(size_t offset) +{ + if (offset > m_mbuf.size()) + return false; + + m_offset = offset; + return true; +} + +bool MemStream::skip(size_t n) +{ + if (0 == n) + return true; + + if (m_offset + n > m_mbuf.size()) + return false; + m_offset += n; + return true; +} + +bool MemStream::rewind(size_t n) +{ + if (m_offset < n) + return false; + + if (0 == n) + m_offset = 0; + else + m_offset -= n; + return true; +} + +ex_u8 MemStream::get_u8(void) +{ + ASSERT(m_offset + 1 <= m_mbuf.size()); + + ex_u8 v = (m_mbuf.data() + m_offset)[0]; + m_offset++; + return v; +} + +ex_u16 MemStream::get_u16_le(void) +{ + ASSERT(m_offset + 2 <= m_mbuf.size()); + + ex_u8* p = m_mbuf.data() + m_offset; +#if defined(B_ENDIAN) + ex_u16 v = (ex_u16)(p[0] | (p[1] << 8)); +#else + ex_u16 v = ((ex_u16*)p)[0]; +#endif + m_offset += 2; + return v; +} + +ex_u16 MemStream::get_u16_be(void) +{ + ASSERT(m_offset + 2 <= m_mbuf.size()); + + ex_u8* p = m_mbuf.data() + m_offset; +#if defined(B_ENDIAN) + ex_u16 v = ((ex_u16*)p)[0]; +#else + ex_u16 v = (ex_u16)((p[0] << 8) | p[1]); +#endif + m_offset += 2; + return v; +} + + +ex_u32 MemStream::get_u32_le(void) +{ + ASSERT(m_offset + 4 <= m_mbuf.size()); + + ex_u8* p = m_mbuf.data() + m_offset; +#if defined(B_ENDIAN) + ex_u32 v = (ex_u32)(p[0] | (p[1] << 8) | (p[2] << 16) | (p[3] << 24)); +#else + ex_u32 v = ((ex_u32*)p)[0]; +#endif + m_offset += 4; + return v; +} + +ex_u32 MemStream::get_u32_be(void) +{ + ASSERT(m_offset + 4 <= m_mbuf.size()); + + ex_u8* p = m_mbuf.data() + m_offset; +#if defined(B_ENDIAN) + ex_u32 v = ((ex_u32*)p)[0]; +#else + ex_u32 v = (ex_u32)((p[0] << 24) | (p[1] << 16) | (p[2] << 8) | p[3]); +#endif + m_offset += 4; + return v; +} + +ex_u8* MemStream::get_bin(size_t n) +{ + ASSERT(m_offset + 4 <= m_mbuf.size()); + ex_u8* p = m_mbuf.data() + m_offset; + m_offset += n; + return p; +} + + +void MemStream::put_zero(size_t n) +{ + m_mbuf.reserve(m_mbuf.size() + n); + memset(m_mbuf.data() + m_offset, 0, n); + m_offset += n; + if (m_mbuf.size() < m_offset) + m_mbuf.size(m_offset); +} + +void MemStream::put_u8(ex_u8 v) +{ + m_mbuf.reserve(m_mbuf.size() + 1); + + (m_mbuf.data() + m_offset)[0] = v; + m_offset++; + if (m_mbuf.size() < m_offset) + m_mbuf.size(m_offset); +} + +void MemStream::put_u16_le(ex_u16 v) +{ + m_mbuf.reserve(m_mbuf.size() + 2); + + ex_u8* p = m_mbuf.data() + m_offset; +#if defined(B_ENDIAN) + p[0] = (ex_u8)v; + p[1] = (ex_u8)(v >> 8); +#else + ((ex_u16*)p)[0] = v; +#endif + m_offset += 2; + if (m_mbuf.size() < m_offset) + m_mbuf.size(m_offset); +} + +void MemStream::put_u16_be(ex_u16 v) +{ + m_mbuf.reserve(m_mbuf.size() + 2); + + ex_u8* p = m_mbuf.data() + m_offset; +#if defined(B_ENDIAN) + ((ex_u16*)p)[0] = v; +#else + ex_u8* _v = (ex_u8*)&v; + p[0] = _v[1]; + p[1] = _v[0]; +#endif + m_offset += 2; + if (m_mbuf.size() < m_offset) + m_mbuf.size(m_offset); +} + +void MemStream::put_u32_le(ex_u32 v) +{ + m_mbuf.reserve(m_mbuf.size() + 4); + + ex_u8* p = m_mbuf.data() + m_offset; +#if defined(B_ENDIAN) + p[0] = (ex_u8)v; + p[1] = (ex_u8)(v >> 8); + p[2] = (ex_u8)(v >> 16); + p[3] = (ex_u8)(v >> 24); +#else + ((ex_u32*)p)[0] = v; +#endif + m_offset += 4; + if (m_mbuf.size() < m_offset) + m_mbuf.size(m_offset); +} + +void MemStream::put_u32_be(ex_u32 v) +{ + m_mbuf.reserve(m_mbuf.size() + 4); + + ex_u8* p = m_mbuf.data() + m_offset; +#if defined(B_ENDIAN) + ((ex_u32*)p)[0] = v; +#else + ex_u8* _v = (ex_u8*)&v; + p[0] = _v[3]; + p[1] = _v[2]; + p[2] = _v[1]; + p[3] = _v[0]; +#endif + m_offset += 4; + if (m_mbuf.size() < m_offset) + m_mbuf.size(m_offset); +} + +void MemStream::put_bin(const ex_u8* p, size_t n) +{ + m_mbuf.reserve(m_mbuf.size() + n); + memcpy(m_mbuf.data() + m_offset, p, n); + m_offset += n; + if (m_mbuf.size() < m_offset) + m_mbuf.size(m_offset); +} + diff --git a/server/tp_core/common/ts_memstream.h b/server/tp_core/common/ts_memstream.h index 9ab425c..ddbb45f 100644 --- a/server/tp_core/common/ts_memstream.h +++ b/server/tp_core/common/ts_memstream.h @@ -1,45 +1,45 @@ -#ifndef __TS_MEMSTREAM_H__ -#define __TS_MEMSTREAM_H__ - -#include "ts_membuf.h" - -class MemStream -{ -public: - MemStream(MemBuffer& mbuf); - ~MemStream(); - - void reset(void); // ջݣͷڴ棩ָƶͷ - - bool seek(size_t offset); // ƶָ뵽ָƫƣԽ磬򷵻ش - bool rewind(size_t n = 0); // nֽڣԽ磬شnΪ0˵ʼ - bool skip(size_t n); // nֽڣԽ磬򷵻ش - - ex_u8* ptr(void) { return m_mbuf.data() + m_offset; } // صǰָ - size_t offset(void) { return m_offset; } // صǰָʼƫ - - size_t left(void) { return m_mbuf.size() - m_offset; } // ʣݵĴСӵǰָ뵽β - - ex_u8 get_u8(void); - ex_u16 get_u16_le(void); - ex_u16 get_u16_be(void); - ex_u32 get_u32_le(void); - ex_u32 get_u32_be(void); - ex_u8* get_bin(size_t n); // صǰָݵָ룬ڲƫƻƶnֽ - - void put_zero(size_t n); // nֽڵ0 - void put_u8(ex_u8 v); - void put_u16_le(ex_u16 v); - void put_u16_be(ex_u16 v); - void put_u32_le(ex_u32 v); - void put_u32_be(ex_u32 v); - void put_bin(const ex_u8* p, size_t n); // pָnֽ - - size_t size(void) { return m_mbuf.size(); } - -private: - MemBuffer& m_mbuf; - size_t m_offset; -}; - -#endif // __TS_MEMSTREAM_H__ +#ifndef __TS_MEMSTREAM_H__ +#define __TS_MEMSTREAM_H__ + +#include "ts_membuf.h" + +class MemStream +{ +public: + MemStream(MemBuffer& mbuf); + ~MemStream(); + + void reset(void); // 清空缓冲区数据(但不释放内存),指针移动到头部 + + bool seek(size_t offset); // 移动指针到指定偏移,如果越界,则返回错误 + bool rewind(size_t n = 0); // 回退n字节,如果越界,返回错误,如果n为0,则回退到最开始处 + bool skip(size_t n); // 跳过n字节,如果越界,则返回错误 + + ex_u8* ptr(void) { return m_mbuf.data() + m_offset; } // 返回当前数据指针 + size_t offset(void) { return m_offset; } // 返回当前指针相对数据起始的偏移 + + size_t left(void) { return m_mbuf.size() - m_offset; } // 返回剩余数据的大小(从当前数据指针到缓冲区结尾) + + ex_u8 get_u8(void); + ex_u16 get_u16_le(void); + ex_u16 get_u16_be(void); + ex_u32 get_u32_le(void); + ex_u32 get_u32_be(void); + ex_u8* get_bin(size_t n); // 返回当前指向的数据的指针,内部偏移会向后移动n字节 + + void put_zero(size_t n); // 填充n字节的0 + void put_u8(ex_u8 v); + void put_u16_le(ex_u16 v); + void put_u16_be(ex_u16 v); + void put_u32_le(ex_u32 v); + void put_u32_be(ex_u32 v); + void put_bin(const ex_u8* p, size_t n); // 填充p指向的n字节数据 + + size_t size(void) { return m_mbuf.size(); } + +private: + MemBuffer& m_mbuf; + size_t m_offset; +}; + +#endif // __TS_MEMSTREAM_H__ diff --git a/server/tp_core/core/CMakeLists.txt b/server/tp_core/core/CMakeLists.txt index 2abe5de..5b9f238 100644 --- a/server/tp_core/core/CMakeLists.txt +++ b/server/tp_core/core/CMakeLists.txt @@ -3,19 +3,17 @@ cmake_minimum_required(VERSION 3.5) MESSAGE(STATUS "=======================================================") MESSAGE(STATUS " tp_core") MESSAGE(STATUS "=======================================================") -#MESSAGE(STATUS "operation system is ${CMAKE_SYSTEM}") -#MESSAGE(STATUS "current source directory is ${CMAKE_CURRENT_SOURCE_DIR}") include(../../../CMakeCfg.txt) ADD_DEFINITIONS( - -DMG_ENABLE_THREADS - -DMG_DISABLE_HTTP_DIGEST_AUTH - -DMG_DISABLE_MQTT - -DMG_DISABLE_SSI - # -DMG_DISABLE_FILESYSTEM - -DHAVE_CONFIG_H - # -DCS_COMMON_MD5_H_ - -DCS_DISABLE_MD5 + -DMG_ENABLE_THREADS + -DMG_DISABLE_HTTP_DIGEST_AUTH + -DMG_DISABLE_MQTT + -DMG_DISABLE_SSI + # -DMG_DISABLE_FILESYSTEM + -DHAVE_CONFIG_H + # -DCS_COMMON_MD5_H_ + -DCS_DISABLE_MD5 ) aux_source_directory(. DIR_SRCS) @@ -30,16 +28,26 @@ include_directories( ../../../external/jsoncpp/include ) -include_directories( +if (OS_LINUX) + set(CMAKE_EXE_LINKER_FLAGS "-export-dynamic") + include_directories( ${TP_EXTERNAL_RELEASE_DIR}/include -) -link_directories(${TP_EXTERNAL_RELEASE_DIR}/lib) - + ) + link_directories(${TP_EXTERNAL_RELEASE_DIR}/lib) +elseif (OS_MACOS) + include_directories( + /usr/local/opt/openssl/include + ${TP_EXTERNAL_RELEASE_DIR}/include + ) + link_directories( + /usr/local/opt/openssl/lib + ${TP_EXTERNAL_RELEASE_DIR}/lib + ) +endif () add_executable(tp_core ${DIR_SRCS}) if (OS_LINUX) - set(CMAKE_EXE_LINKER_FLAGS "-export-dynamic") target_link_libraries(tp_core ssl crypto mbedx509 mbedtls mbedcrypto dl pthread rt util) elseif (OS_MACOS) target_link_libraries(tp_core ssl crypto mbedx509 mbedtls mbedcrypto dl pthread util) diff --git a/server/tp_core/core/tp_core.rc b/server/tp_core/core/tp_core.rc index 2f2222e..ba66674 100644 Binary files a/server/tp_core/core/tp_core.rc and b/server/tp_core/core/tp_core.rc differ diff --git a/server/tp_core/core/tp_core.vs2015.sln b/server/tp_core/core/tp_core.vs2015.sln deleted file mode 100644 index 207213a..0000000 --- a/server/tp_core/core/tp_core.vs2015.sln +++ /dev/null @@ -1,22 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio 14 -VisualStudioVersion = 14.0.23107.0 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tp_core", "tp_core.vs2015.vcxproj", "{6548CB1D-A7BA-4A68-9B3F-A5129F77868B}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|x86 = Debug|x86 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Debug|x86.ActiveCfg = Debug|Win32 - {6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Debug|x86.Build.0 = Debug|Win32 - {6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Release|x86.ActiveCfg = Release|Win32 - {6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Release|x86.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal diff --git a/server/tp_core/core/tp_core.vs2015.vcxproj b/server/tp_core/core/tp_core.vs2015.vcxproj deleted file mode 100644 index fdcdae2..0000000 --- a/server/tp_core/core/tp_core.vs2015.vcxproj +++ /dev/null @@ -1,234 +0,0 @@ - - - - - Debug - Win32 - - - Release - Win32 - - - Debug - x64 - - - Release - x64 - - - - {6548CB1D-A7BA-4A68-9B3F-A5129F77868B} - Win32Proj - tp_core - 8.1 - tp_core - - - - Application - true - v140_xp - Unicode - - - Application - false - v140_xp - true - Unicode - - - Application - true - v140 - Unicode - - - Application - false - v140 - true - Unicode - - - - - - - - - - - - - - - - - - - - - true - ..\..\..\out\server\$(PlatformTarget)\$(Configuration)\ - ..\..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - C:\Program Files %28x86%29\Visual Leak Detector\include;$(IncludePath) - C:\Program Files %28x86%29\Visual Leak Detector\lib\Win32;$(LibraryPath) - - - true - ..\..\out\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - ..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - - - false - ..\..\..\out\server\$(PlatformTarget)\$(Configuration)\ - ..\..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - - - false - ..\..\out\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - ..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - - - - - - Level3 - Disabled - WIN32;_DEBUG;_CONSOLE;_WINSOCK_DEPRECATED_NO_WARNINGS;MG_ENABLE_THREADS;MG_DISABLE_HTTP_DIGEST_AUTH;MG_DISABLE_MQTT;MG_DISABLE_SSI;MG_DISABLE_FILESYSTEM;%(PreprocessorDefinitions) - true - ../../../common/teleport;../../../common/libex/include;../../../external/jsoncpp/include;../../../external/mbedtls/include;../../../external/mongoose - MultiThreadedDebug - - - Console - - - libcmt.lib - Debug - - - - - - - Level3 - Disabled - _DEBUG;_WINDOWS;%(PreprocessorDefinitions) - true - ../../external/windows/libuv/include;../../external/windows/openssl/include;../../external/windows/zlib/include;../../external/windows/mbedtls/include;../../external/windows/libssh/include;../../external/common/jsoncpp/include;../../external/common/sqlite;d:/apps/vld/include - - - Windows - true - ../../external/windows/openssl/lib;../../external/windows/zlib/lib;../../external/windows/libssh/lib - - - - - Level3 - - - MaxSpeed - true - true - WIN32;NDEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;_WINSOCK_DEPRECATED_NO_WARNINGS;MG_ENABLE_THREADS;MG_DISABLE_HTTP_DIGEST_AUTH;MG_DISABLE_MQTT;MG_DISABLE_SSI;MG_DISABLE_FILESYSTEM;%(PreprocessorDefinitions) - true - ../../../common/teleport;../../../common/libex/include;../../../external/jsoncpp/include;../../../external/mbedtls/include;../../../external/mongoose - MultiThreaded - - - Console - true - true - - - - - - - Level3 - - - MaxSpeed - true - true - NDEBUG;_WINDOWS;%(PreprocessorDefinitions) - true - ../../external/windows/libuv/include;../../external/windows/openssl/include;../../external/windows/zlib/include;../../external/windows/mbedtls/include;../../external/windows/libssh/include;../../external/common/jsoncpp/include;../../external/common/sqlite - - - Windows - true - true - true - ../../external/windows/openssl/lib;../../external/windows/zlib/lib;../../external/windows/libssh/lib - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/server/tp_core/core/tp_core.vs2015.vcxproj.filters b/server/tp_core/core/tp_core.vs2015.vcxproj.filters deleted file mode 100644 index 3d67ae0..0000000 --- a/server/tp_core/core/tp_core.vs2015.vcxproj.filters +++ /dev/null @@ -1,206 +0,0 @@ - - - - - {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} - rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms - - - {0155895f-d6be-4e0f-970d-9b6b5c759502} - - - {9c2d60b3-2932-485b-bccd-b66886b0286b} - - - {541f6e28-2218-4de7-9f3a-d45c56647c82} - - - {95f40be8-2d36-40ee-92ea-184bb5c44411} - - - {a3d6f884-46a8-4450-9b4c-184c6189fea2} - - - {2450e4a6-cc0b-40fe-ac51-0b4367adef26} - - - {ffc7dbb8-e1f6-4655-bc79-1038b5cdfb15} - - - {d38dfe81-e91d-4248-a0bc-1f2e014d15ee} - - - {0f03131b-cf95-4be3-bc23-8bdf582d9198} - - - {dd7c7b9e-b873-42ba-939f-ae6e7b7bde12} - - - {b94c32df-7b3a-4712-a951-ec92d2e875cc} - - - - - main app - - - main app - - - main app - - - main app - - - main app - - - main app - - - main app - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - mongoose - - - jsoncpp\src - - - jsoncpp\src - - - jsoncpp\src - - - mbedtls\library - - - mbedtls\library - - - mbedtls\library - - - main app - - - main app - - - mbedtls\library - - - - - main app - - - main app - - - main app - - - main app - - - main app - - - main app - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - Resource Files - - - jsoncpp\header - - - mongoose - - - common - - - common - - - main app - - - main app - - - common - - - main app - - - - - Resource Files - - - - - Resource Files - - - \ No newline at end of file diff --git a/server/tp_core/core/tp_core.vs2017.sln b/server/tp_core/core/tp_core.vs2017.sln index d059e1d..3c23a0b 100644 --- a/server/tp_core/core/tp_core.vs2017.sln +++ b/server/tp_core/core/tp_core.vs2017.sln @@ -19,4 +19,7 @@ Global GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {787A1953-2C25-4859-B81F-7F63A94B8EE3} + EndGlobalSection EndGlobal diff --git a/server/tp_core/core/tp_core.vs2017.vcxproj b/server/tp_core/core/tp_core.vs2017.vcxproj index 8f3236a..ee0e4d1 100644 --- a/server/tp_core/core/tp_core.vs2017.vcxproj +++ b/server/tp_core/core/tp_core.vs2017.vcxproj @@ -9,14 +9,6 @@ Release Win32 - - Debug - x64 - - - Release - x64 -
    {6548CB1D-A7BA-4A68-9B3F-A5129F77868B} @@ -39,19 +31,6 @@ Unicode v141 - - Application - true - v140 - Unicode - - - Application - false - v140 - true - Unicode - @@ -63,35 +42,19 @@ - - - - - - true ..\..\..\out\server\$(PlatformTarget)\$(Configuration)\ ..\..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - C:\Program Files %28x86%29\Visual Leak Detector\include;$(IncludePath) - C:\Program Files %28x86%29\Visual Leak Detector\lib\Win32;$(LibraryPath) - - - true - ..\..\out\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - ..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ + c:\apps\vld\include;$(IncludePath) + C:\apps\vld\lib\Win32;$(LibraryPath) false ..\..\..\out\server\$(PlatformTarget)\$(Configuration)\ ..\..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - - false - ..\..\out\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - ..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - @@ -111,22 +74,6 @@ Debug - - - - - Level3 - Disabled - _DEBUG;_WINDOWS;%(PreprocessorDefinitions) - true - ../../external/windows/libuv/include;../../external/windows/openssl/include;../../external/windows/zlib/include;../../external/windows/mbedtls/include;../../external/windows/libssh/include;../../external/common/jsoncpp/include;../../external/common/sqlite;d:/apps/vld/include - - - Windows - true - ../../external/windows/openssl/lib;../../external/windows/zlib/lib;../../external/windows/libssh/lib - - Level3 @@ -148,26 +95,6 @@ - - - Level3 - - - MaxSpeed - true - true - NDEBUG;_WINDOWS;%(PreprocessorDefinitions) - true - ../../external/windows/libuv/include;../../external/windows/openssl/include;../../external/windows/zlib/include;../../external/windows/mbedtls/include;../../external/windows/libssh/include;../../external/common/jsoncpp/include;../../external/common/sqlite - - - Windows - true - true - true - ../../external/windows/openssl/lib;../../external/windows/zlib/lib;../../external/windows/libssh/lib - - diff --git a/server/tp_core/core/ts_http_rpc.cpp b/server/tp_core/core/ts_http_rpc.cpp index e8bd3f6..e5feb0f 100644 --- a/server/tp_core/core/ts_http_rpc.cpp +++ b/server/tp_core/core/ts_http_rpc.cpp @@ -1,553 +1,573 @@ -#include "ts_http_rpc.h" -#include "ts_ver.h" -#include "ts_env.h" -#include "ts_session.h" -#include "ts_crypto.h" -#include "ts_web_rpc.h" -#include "tp_tpp_mgr.h" - -extern TppManager g_tpp_mgr; - -#include - - -#define HEXTOI(x) (isdigit(x) ? x - '0' : x - 'W') -int ts_url_decode(const char *src, int src_len, char *dst, int dst_len, int is_form_url_encoded) -{ - int i, j, a, b; - - for (i = j = 0; i < src_len && j < dst_len - 1; i++, j++) - { - if (src[i] == '%') - { - if (i < src_len - 2 && isxdigit(*(const unsigned char *)(src + i + 1)) && - isxdigit(*(const unsigned char *)(src + i + 2))) { - a = tolower(*(const unsigned char *)(src + i + 1)); - b = tolower(*(const unsigned char *)(src + i + 2)); - dst[j] = (char)((HEXTOI(a) << 4) | HEXTOI(b)); - i += 2; - } - else - { - return -1; - } - } - else if (is_form_url_encoded && src[i] == '+') - { - dst[j] = ' '; - } - else - { - dst[j] = src[i]; - } - } - - dst[j] = '\0'; /* Null-terminate the destination */ - - return i >= src_len ? j : -1; -} - -TsHttpRpc::TsHttpRpc() : - ExThreadBase("http-rpc-thread") -{ - mg_mgr_init(&m_mg_mgr, NULL); -} - -TsHttpRpc::~TsHttpRpc() -{ - mg_mgr_free(&m_mg_mgr); -} - -void TsHttpRpc::_thread_loop(void) -{ - EXLOGI("[core] TeleportServer-RPC ready on %s:%d\n", m_host_ip.c_str(), m_host_port); - - while (!m_need_stop) - { - mg_mgr_poll(&m_mg_mgr, 500); - } - - EXLOGV("[core] rpc main loop end.\n"); -} - - -bool TsHttpRpc::init(void) -{ - struct mg_connection* nc = NULL; - - m_host_ip = g_env.rpc_bind_ip; - m_host_port = g_env.rpc_bind_port; - - char addr[128] = { 0 }; - // if (0 == strcmp(m_host_ip.c_str(), "127.0.0.1") || 0 == strcmp(m_host_ip.c_str(), "localhost")) - // ex_strformat(addr, 128, ":%d", m_host_port); - // else - // ex_strformat(addr, 128, "%s:%d", m_host_ip.c_str(), m_host_port); - if (0 == strcmp(m_host_ip.c_str(), "0.0.0.0")) - ex_strformat(addr, 128, ":%d", m_host_port); - else - ex_strformat(addr, 128, "%s:%d", m_host_ip.c_str(), m_host_port); - - nc = mg_bind(&m_mg_mgr, addr, _mg_event_handler); - if (NULL == nc) - { - EXLOGE("[core] rpc listener failed to bind at %s.\n", addr); - return false; - } - - nc->user_data = this; - - mg_set_protocol_http_websocket(nc); - - // ڴй¶ĵطÿԼ1KBڴ棩 - // DO NOT USE MULTITHREADING OF MG. - // cpq (one of the authors of MG) commented on 3 Feb: Multithreading support has been removed. - // https://github.com/cesanta/mongoose/commit/707b9ed2d6f177b3ad8787cb16a1bff90ddad992 - //mg_enable_multithreading(nc); - - return true; -} - -void TsHttpRpc::_mg_event_handler(struct mg_connection *nc, int ev, void *ev_data) -{ - struct http_message *hm = (struct http_message*)ev_data; - - TsHttpRpc* _this = (TsHttpRpc*)nc->user_data; - if (NULL == _this) - { - EXLOGE("[core] rpc invalid http request.\n"); - return; - } - - switch (ev) - { - case MG_EV_HTTP_REQUEST: - { - ex_astr ret_buf; - - ex_astr uri; - uri.assign(hm->uri.p, hm->uri.len); - - //EXLOGD("[core] rpc got request: %s\n", uri.c_str()); - - if (uri == "/rpc") - { - ex_astr method; - Json::Value json_param; - - ex_rv rv = _this->_parse_request(hm, method, json_param); - if (TPE_OK != rv) - { - EXLOGE("[core] rpc got invalid request.\n"); - _this->_create_json_ret(ret_buf, rv); - } - else - { - EXLOGD("[core] rpc got request method `%s`\n", method.c_str()); - _this->_process_request(method, json_param, ret_buf); - } - } - else - { - EXLOGE("[core] rpc got invalid request: not `rpc` uri.\n"); - _this->_create_json_ret(ret_buf, TPE_PARAM, "not a `rpc` request."); - } - - mg_printf(nc, "HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %d\r\nContent-Type: application/json\r\n\r\n%s", (int)ret_buf.size() - 1, &ret_buf[0]); - nc->flags |= MG_F_SEND_AND_CLOSE; - } - break; - default: - break; - } -} - -ex_rv TsHttpRpc::_parse_request(struct http_message* req, ex_astr& func_cmd, Json::Value& json_param) -{ - if (NULL == req) - return TPE_PARAM; - - bool is_get = true; - if (req->method.len == 3 && 0 == memcmp(req->method.p, "GET", req->method.len)) - is_get = true; - else if (req->method.len == 4 && 0 == memcmp(req->method.p, "POST", req->method.len)) - is_get = false; - else - return TPE_HTTP_METHOD; - - ex_astr json_str; - bool need_decode = false; - if (is_get) { - json_str.assign(req->query_string.p, req->query_string.len); - need_decode = true; - } - else { - json_str.assign(req->body.p, req->body.len); - if (json_str.length() > 0 && json_str[0] == '%') - need_decode = true; - } - - if (need_decode) { - // url-decode - int len = json_str.length() * 2; - ex_chars sztmp; - sztmp.resize(len); - memset(&sztmp[0], 0, len); - if (-1 == ts_url_decode(json_str.c_str(), json_str.length(), &sztmp[0], len, 0)) - return TPE_HTTP_URL_ENCODE; - - json_str = &sztmp[0]; - } - - if (0 == json_str.length()) - return TPE_PARAM; - - Json::Reader jreader; - - if (!jreader.parse(json_str.c_str(), json_param)) - return TPE_JSON_FORMAT; - - if (json_param.isArray()) - return TPE_PARAM; - - if (json_param["method"].isNull() || !json_param["method"].isString()) - return TPE_PARAM; - - func_cmd = json_param["method"].asCString(); - json_param = json_param["param"]; - - return TPE_OK; -} - -void TsHttpRpc::_create_json_ret(ex_astr& buf, int errcode, const Json::Value& jr_data) -{ - // أ {"code":errcode, "data":{jr_data}} - - Json::FastWriter jr_writer; - Json::Value jr_root; - - jr_root["code"] = errcode; - jr_root["data"] = jr_data; - buf = jr_writer.write(jr_root); -} - -void TsHttpRpc::_create_json_ret(ex_astr& buf, int errcode) -{ - // أ {"code":errcode} - - Json::FastWriter jr_writer; - Json::Value jr_root; - - jr_root["code"] = errcode; - buf = jr_writer.write(jr_root); -} - -void TsHttpRpc::_create_json_ret(ex_astr& buf, int errcode, const char* message) -{ - // أ {"code":errcode, "message":message} - - Json::FastWriter jr_writer; - Json::Value jr_root; - - jr_root["code"] = errcode; - jr_root["message"] = message; - buf = jr_writer.write(jr_root); -} - -void TsHttpRpc::_process_request(const ex_astr& func_cmd, const Json::Value& json_param, ex_astr& buf) -{ - if (func_cmd == "request_session") { - _rpc_func_request_session(json_param, buf); - } - else if (func_cmd == "kill_sessions") { - _rpc_func_kill_sessions(json_param, buf); - } - else if (func_cmd == "get_config") { - _rpc_func_get_config(json_param, buf); - } - else if (func_cmd == "set_config") { - _rpc_func_set_config(json_param, buf); - } - else if (func_cmd == "enc") { - _rpc_func_enc(json_param, buf); - } - else if (func_cmd == "exit") { - _rpc_func_exit(json_param, buf); - } - else { - EXLOGE("[core] rpc got unknown command: %s\n", func_cmd.c_str()); - _create_json_ret(buf, TPE_UNKNOWN_CMD); - } -} - -extern bool g_exit_flag; // ҪTS˳ı־ֹ̣ͣ߳ -void TsHttpRpc::_rpc_func_exit(const Json::Value& json_param, ex_astr& buf) -{ - // һȫ˳־ - g_exit_flag = true; - _create_json_ret(buf, TPE_OK); -} - -void TsHttpRpc::_rpc_func_get_config(const Json::Value& json_param, ex_astr& buf) -{ - Json::Value jr_data; - - ex_astr _replay_name; - ex_wstr2astr(g_env.m_replay_path, _replay_name); - jr_data["replay-path"] = _replay_name; - - jr_data["web-server-rpc"] = g_env.web_server_rpc; - - ex_astr _version; - ex_wstr2astr(TP_SERVER_VER, _version); - jr_data["version"] = _version; - - ExIniFile& ini = g_env.get_ini(); - ex_ini_sections& secs = ini.GetAllSections(); - ex_ini_sections::iterator it = secs.begin(); - for (; it != secs.end(); ++it) - { - if (it->first.length() > 9 && 0 == wcsncmp(it->first.c_str(), L"protocol-", 9)) - { - ex_wstr name; - name.assign(it->first, 9, it->first.length() - 9); - ex_astr _name; - ex_wstr2astr(name, _name); - - bool enabled = false; - it->second->GetBool(L"enabled", enabled, false); - - ex_wstr ip; - if (!it->second->GetStr(L"bind-ip", ip)) - continue; - ex_astr _ip; - ex_wstr2astr(ip, _ip); - - int port; - it->second->GetInt(L"bind-port", port, 52189); - - jr_data[_name.c_str()]["enable"] = enabled; - jr_data[_name.c_str()]["ip"] = _ip; - jr_data[_name.c_str()]["port"] = port; - } - } - - _create_json_ret(buf, TPE_OK, jr_data); -} - -void TsHttpRpc::_rpc_func_request_session(const Json::Value& json_param, ex_astr& buf) -{ - // https://github.com/tp4a/teleport/wiki/TELEPORT-CORE-JSON-RPC#request_session - - int conn_id = 0; - ex_rv rv = TPE_OK; - - if (json_param["conn_id"].isNull()) - { - _create_json_ret(buf, TPE_PARAM); - return; - } - if (!json_param["conn_id"].isInt()) - { - _create_json_ret(buf, TPE_PARAM); - return; - } - - conn_id = json_param["conn_id"].asInt(); - if (0 == conn_id) - { - _create_json_ret(buf, TPE_PARAM); - return; - } - - TS_CONNECT_INFO* info = new TS_CONNECT_INFO; - if ((rv = ts_web_rpc_get_conn_info(conn_id, *info)) != TPE_OK) - { - _create_json_ret(buf, rv); - return; - } - -// info->ref_count = 0; -// info->ticket_start = ex_get_tick_count(); -// - // һsession-idڲظ - ex_astr sid; - if (!g_session_mgr.request_session(sid, info)) { - _create_json_ret(buf, TPE_FAILED); - return; - } - - EXLOGD("[core] rpc new session-id: %s\n", sid.c_str()); - - Json::Value jr_data; - jr_data["sid"] = sid; - - _create_json_ret(buf, TPE_OK, jr_data); -} - -void TsHttpRpc::_rpc_func_kill_sessions(const Json::Value& json_param, ex_astr& buf) { - /* - { - "sessions": ["0123456", "ABCDEF", ...] - } - */ - - if (json_param.isArray()) { - _create_json_ret(buf, TPE_PARAM); - return; - } - - if (json_param["sessions"].isNull() || !json_param["sessions"].isArray()) { - _create_json_ret(buf, TPE_PARAM); - return; - } - - Json::Value s = json_param["sessions"]; - int cnt = s.size(); - for (int i = 0; i < cnt; ++i) { - if (!s[i].isString()) { - _create_json_ret(buf, TPE_PARAM); - return; - } - } - - EXLOGV("[core] try to kill %d sessions.\n", cnt); - ex_astr sp = s.toStyledString(); - g_tpp_mgr.kill_sessions(sp); - - _create_json_ret(buf, TPE_OK); -} - -void TsHttpRpc::_rpc_func_enc(const Json::Value& json_param, ex_astr& buf) -{ - // https://github.com/tp4a/teleport/wiki/TELEPORT-CORE-JSON-RPC#enc - // һַ [ p=plain-text, c=cipher-text ] - // : {"p":"need be encrypt"} - // ʾ: {"p":"this-is-a-password"} - // p: ַܵ - // أ - // dataе"c"Ǽܺĵbase64 - // ʾ: {"code":0, "data":{"c":"Mxs340a9r3fs+3sdf=="}} - // 󷵻أ {"code":1234} - - if (json_param.isArray()) - { - _create_json_ret(buf, TPE_PARAM); - return; - } - - ex_astr plain_text; - - if (json_param["p"].isNull() || !json_param["p"].isString()) - { - _create_json_ret(buf, TPE_PARAM); - return; - } - - plain_text = json_param["p"].asCString(); - if (plain_text.length() == 0) - { - _create_json_ret(buf, TPE_PARAM); - return; - } - ex_astr cipher_text; - - if (!ts_db_field_encrypt(plain_text, cipher_text)) - { - _create_json_ret(buf, TPE_FAILED); - return; - } - - Json::Value jr_data; - jr_data["c"] = cipher_text; - _create_json_ret(buf, TPE_OK, jr_data); -} - -void TsHttpRpc::_rpc_func_set_config(const Json::Value& json_param, ex_astr& buf) -{ - // https://github.com/tp4a/teleport/wiki/TELEPORT-CORE-JSON-RPC#set_config - /* - { - "noop-timeout": 15 # Ӽ - } - */ - - if (json_param.isArray()) { - _create_json_ret(buf, TPE_PARAM); - return; - } - - if (json_param["noop_timeout"].isNull() || !json_param["noop_timeout"].isUInt()) { - _create_json_ret(buf, TPE_PARAM); - return; - } - - int noop_timeout = json_param["noop_timeout"].asUInt(); - EXLOGV("[core] set run-time config:\n"); - EXLOGV("[core] noop_timeout = %dm\n", noop_timeout); - - ex_astr sp = json_param.toStyledString(); - g_tpp_mgr.set_runtime_config(sp); - - _create_json_ret(buf, TPE_OK); -} - - -/* -void TsHttpRpc::_rpc_func_enc(const Json::Value& json_param, ex_astr& buf) -{ - // https://github.com/tp4a/teleport/wiki/TELEPORT-CORE-JSON-RPC#enc - // ַܶ [ p=plain-text, c=cipher-text ] - // : {"p":["need be encrypt", "plain to cipher"]} - // ʾ: {"p":["password-for-A"]} - // p: ַܵ - // أ - // dataе"c"Ǽܺĵbase64 - // ʾ: {"code":0, "data":{"c":["Mxs340a9r3fs+3sdf=="]}} - // 󷵻أ {"code":1234} - - if (json_param.isArray()) - { - _create_json_ret(buf, TPE_PARAM); - return; - } - - ex_astr plain_text; - - if (json_param["p"].isNull() || !json_param["p"].isArray()) - { - _create_json_ret(buf, TPE_PARAM); - return; - } - - Json::Value c; - - Json::Value p = json_param["p"]; - int cnt = p.size(); - for (int i = 0; i < cnt; ++i) - { - if (!p[i].isString()) { - _create_json_ret(buf, TPE_PARAM); - return; - } - - ex_astr p_txt = p[i].asCString(); - if (p_txt.length() == 0) { - c["c"].append(""); - } - - ex_astr c_txt; - if (!ts_db_field_encrypt(p_txt, c_txt)) - { - _create_json_ret(buf, TPE_FAILED); - return; - } - - c["c"].append(c_txt); - } - - Json::Value jr_data; - jr_data["c"] = c; - _create_json_ret(buf, TPE_OK, jr_data); -} -*/ +#include "ts_http_rpc.h" +#include "ts_ver.h" +#include "ts_env.h" +#include "ts_session.h" +#include "ts_crypto.h" +#include "ts_web_rpc.h" +#include "tp_tpp_mgr.h" + +extern TppManager g_tpp_mgr; + +#include +#include + + +#define HEXTOI(x) (isdigit(x) ? x - '0' : x - 'W') +int ts_url_decode(const char *src, int src_len, char *dst, int dst_len, int is_form_url_encoded) +{ + int i, j, a, b; + + for (i = j = 0; i < src_len && j < dst_len - 1; i++, j++) + { + if (src[i] == '%') + { + if (i < src_len - 2 && isxdigit(*(const unsigned char *)(src + i + 1)) && + isxdigit(*(const unsigned char *)(src + i + 2))) { + a = tolower(*(const unsigned char *)(src + i + 1)); + b = tolower(*(const unsigned char *)(src + i + 2)); + dst[j] = (char)((HEXTOI(a) << 4) | HEXTOI(b)); + i += 2; + } + else + { + return -1; + } + } + else if (is_form_url_encoded && src[i] == '+') + { + dst[j] = ' '; + } + else + { + dst[j] = src[i]; + } + } + + dst[j] = '\0'; /* Null-terminate the destination */ + + return i >= src_len ? j : -1; +} + +TsHttpRpc::TsHttpRpc() : + ExThreadBase("http-rpc-thread") +{ + mg_mgr_init(&m_mg_mgr, NULL); +} + +TsHttpRpc::~TsHttpRpc() +{ + mg_mgr_free(&m_mg_mgr); +} + +void TsHttpRpc::_thread_loop(void) +{ + EXLOGI("[core] TeleportServer-RPC ready on %s:%d\n", m_host_ip.c_str(), m_host_port); + + while (!m_need_stop) + { + mg_mgr_poll(&m_mg_mgr, 500); + } + + EXLOGV("[core] rpc main loop end.\n"); +} + + +bool TsHttpRpc::init(void) +{ + struct mg_connection* nc = NULL; + + m_host_ip = g_env.rpc_bind_ip; + m_host_port = g_env.rpc_bind_port; + + char addr[128] = { 0 }; + // if (0 == strcmp(m_host_ip.c_str(), "127.0.0.1") || 0 == strcmp(m_host_ip.c_str(), "localhost")) + // ex_strformat(addr, 128, ":%d", m_host_port); + // else + // ex_strformat(addr, 128, "%s:%d", m_host_ip.c_str(), m_host_port); + if (0 == strcmp(m_host_ip.c_str(), "0.0.0.0")) + ex_strformat(addr, 128, ":%d", m_host_port); + else + ex_strformat(addr, 128, "%s:%d", m_host_ip.c_str(), m_host_port); + + nc = mg_bind(&m_mg_mgr, addr, _mg_event_handler); + if (NULL == nc) + { + EXLOGE("[core] rpc listener failed to bind at %s.\n", addr); + return false; + } + + nc->user_data = this; + + mg_set_protocol_http_websocket(nc); + + // 导致内存泄露的地方(每次请求约消耗1KB内存) + // DO NOT USE MULTITHREADING OF MG. + // cpq (one of the authors of MG) commented on 3 Feb: Multithreading support has been removed. + // https://github.com/cesanta/mongoose/commit/707b9ed2d6f177b3ad8787cb16a1bff90ddad992 + //mg_enable_multithreading(nc); + + return true; +} + +void TsHttpRpc::_mg_event_handler(struct mg_connection *nc, int ev, void *ev_data) +{ + struct http_message *hm = (struct http_message*)ev_data; + + TsHttpRpc* _this = (TsHttpRpc*)nc->user_data; + if (NULL == _this) + { + EXLOGE("[core] rpc invalid http request.\n"); + return; + } + + switch (ev) + { + case MG_EV_HTTP_REQUEST: + { + ex_astr ret_buf; + + ex_astr uri; + uri.assign(hm->uri.p, hm->uri.len); + + //EXLOGD("[core] rpc got request: %s\n", uri.c_str()); + + if (uri == "/rpc") + { + ex_astr method; + Json::Value json_param; + + ex_rv rv = _this->_parse_request(hm, method, json_param); + if (TPE_OK != rv) + { + EXLOGE("[core] rpc got invalid request.\n"); + _this->_create_json_ret(ret_buf, rv); + } + else + { + EXLOGD("[core] rpc got request method `%s`\n", method.c_str()); + _this->_process_request(method, json_param, ret_buf); + } + } + else + { + EXLOGE("[core] rpc got invalid request: not `rpc` uri.\n"); + _this->_create_json_ret(ret_buf, TPE_PARAM, "not a `rpc` request."); + } + + mg_printf(nc, "HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %d\r\nContent-Type: application/json\r\n\r\n%s", (int)ret_buf.length(), &ret_buf[0]); + nc->flags |= MG_F_SEND_AND_CLOSE; + } + break; + default: + break; + } +} + +ex_rv TsHttpRpc::_parse_request(struct http_message* req, ex_astr& func_cmd, Json::Value& json_param) +{ + if (NULL == req) + return TPE_PARAM; + + bool is_get = true; + if (req->method.len == 3 && 0 == memcmp(req->method.p, "GET", req->method.len)) + is_get = true; + else if (req->method.len == 4 && 0 == memcmp(req->method.p, "POST", req->method.len)) + is_get = false; + else + return TPE_HTTP_METHOD; + + ex_astr json_str; + bool need_decode = false; + if (is_get) { + json_str.assign(req->query_string.p, req->query_string.len); + need_decode = true; + } + else { + json_str.assign(req->body.p, req->body.len); + if (json_str.length() > 0 && json_str[0] == '%') + need_decode = true; + } + + if (need_decode) { + // 将参数进行 url-decode 解码 + int len = json_str.length() * 2; + ex_chars sztmp; + sztmp.resize(len); + memset(&sztmp[0], 0, len); + if (-1 == ts_url_decode(json_str.c_str(), json_str.length(), &sztmp[0], len, 0)) + return TPE_HTTP_URL_ENCODE; + + json_str = &sztmp[0]; + } + + if (0 == json_str.length()) + return TPE_PARAM; + + //Json::Reader jreader; + Json::CharReaderBuilder jcrb; + std::unique_ptr const jreader(jcrb.newCharReader()); + const char *str_json_begin = json_str.c_str(); + ex_astr err; + + //if (!jreader.parse(json_str.c_str(), json_param)) + if (!jreader->parse(str_json_begin, str_json_begin + json_str.length(), &json_param, &err)) + return TPE_JSON_FORMAT; + + if (json_param.isArray()) + return TPE_PARAM; + + if (json_param["method"].isNull() || !json_param["method"].isString()) + return TPE_PARAM; + + func_cmd = json_param["method"].asCString(); + json_param = json_param["param"]; + + return TPE_OK; +} + +void TsHttpRpc::_create_json_ret(ex_astr& buf, int errcode, const Json::Value& jr_data) +{ + // 返回: {"code":errcode, "data":{jr_data}} + + //Json::FastWriter jr_writer; + Json::Value jr_root; + jr_root["code"] = errcode; + jr_root["data"] = jr_data; + //buf = jr_writer.write(jr_root); + Json::StreamWriterBuilder jwb; + std::unique_ptr jwriter(jwb.newStreamWriter()); + ex_aoss os; + jwriter->write(jr_root, &os); + buf = os.str(); +} + +void TsHttpRpc::_create_json_ret(ex_astr& buf, int errcode) +{ + // 返回: {"code":errcode} + + //Json::FastWriter jr_writer; + Json::Value jr_root; + jr_root["code"] = errcode; + //buf = jr_writer.write(jr_root); + Json::StreamWriterBuilder jwb; + std::unique_ptr jwriter(jwb.newStreamWriter()); + ex_aoss os; + jwriter->write(jr_root, &os); + buf = os.str(); +} + +void TsHttpRpc::_create_json_ret(ex_astr& buf, int errcode, const char* message) +{ + // 返回: {"code":errcode, "message":message} + + //Json::FastWriter jr_writer; + Json::Value jr_root; + jr_root["code"] = errcode; + jr_root["message"] = message; + //buf = jr_writer.write(jr_root); + Json::StreamWriterBuilder jwb; + std::unique_ptr jwriter(jwb.newStreamWriter()); + ex_aoss os; + jwriter->write(jr_root, &os); + buf = os.str(); +} + +void TsHttpRpc::_process_request(const ex_astr& func_cmd, const Json::Value& json_param, ex_astr& buf) +{ + if (func_cmd == "request_session") { + _rpc_func_request_session(json_param, buf); + } + else if (func_cmd == "kill_sessions") { + _rpc_func_kill_sessions(json_param, buf); + } + else if (func_cmd == "get_config") { + _rpc_func_get_config(json_param, buf); + } + else if (func_cmd == "set_config") { + _rpc_func_set_config(json_param, buf); + } + else if (func_cmd == "enc") { + _rpc_func_enc(json_param, buf); + } + else if (func_cmd == "exit") { + _rpc_func_exit(json_param, buf); + } + else { + EXLOGE("[core] rpc got unknown command: %s\n", func_cmd.c_str()); + _create_json_ret(buf, TPE_UNKNOWN_CMD); + } +} + +extern bool g_exit_flag; // 要求整个TS退出的标志(用于停止各个工作线程) +void TsHttpRpc::_rpc_func_exit(const Json::Value& json_param, ex_astr& buf) +{ + // 设置一个全局退出标志 + g_exit_flag = true; + _create_json_ret(buf, TPE_OK); +} + +void TsHttpRpc::_rpc_func_get_config(const Json::Value& json_param, ex_astr& buf) +{ + Json::Value jr_data; + + ex_astr _replay_name; + ex_wstr2astr(g_env.m_replay_path, _replay_name); + jr_data["replay-path"] = _replay_name; + + jr_data["web-server-rpc"] = g_env.web_server_rpc; + + ex_astr _version; + ex_wstr2astr(TP_SERVER_VER, _version); + jr_data["version"] = _version; + + ExIniFile& ini = g_env.get_ini(); + ex_ini_sections& secs = ini.GetAllSections(); + ex_ini_sections::iterator it = secs.begin(); + for (; it != secs.end(); ++it) + { + if (it->first.length() > 9 && 0 == wcsncmp(it->first.c_str(), L"protocol-", 9)) + { + ex_wstr name; + name.assign(it->first, 9, it->first.length() - 9); + ex_astr _name; + ex_wstr2astr(name, _name); + + bool enabled = false; + it->second->GetBool(L"enabled", enabled, false); + + ex_wstr ip; + if (!it->second->GetStr(L"bind-ip", ip)) + continue; + ex_astr _ip; + ex_wstr2astr(ip, _ip); + + int port; + it->second->GetInt(L"bind-port", port, 52189); + + jr_data[_name.c_str()]["enable"] = enabled; + jr_data[_name.c_str()]["ip"] = _ip; + jr_data[_name.c_str()]["port"] = port; + } + } + + _create_json_ret(buf, TPE_OK, jr_data); +} + +void TsHttpRpc::_rpc_func_request_session(const Json::Value& json_param, ex_astr& buf) +{ + // https://github.com/tp4a/teleport/wiki/TELEPORT-CORE-JSON-RPC#request_session + + int conn_id = 0; + ex_rv rv = TPE_OK; + + if (json_param["conn_id"].isNull()) + { + _create_json_ret(buf, TPE_PARAM); + return; + } + if (!json_param["conn_id"].isInt()) + { + _create_json_ret(buf, TPE_PARAM); + return; + } + + conn_id = json_param["conn_id"].asInt(); + if (0 == conn_id) + { + _create_json_ret(buf, TPE_PARAM); + return; + } + + TS_CONNECT_INFO* info = new TS_CONNECT_INFO; + if ((rv = ts_web_rpc_get_conn_info(conn_id, *info)) != TPE_OK) + { + _create_json_ret(buf, rv); + delete info; + return; + } + +// info->ref_count = 0; +// info->ticket_start = ex_get_tick_count(); +// + // 生成一个session-id(内部会避免重复) + ex_astr sid; + if (!g_session_mgr.request_session(sid, info)) { + _create_json_ret(buf, TPE_FAILED); + delete info; + return; + } + + EXLOGD("[core] rpc new session-id: %s\n", sid.c_str()); + + Json::Value jr_data; + jr_data["sid"] = sid; + + _create_json_ret(buf, TPE_OK, jr_data); +} + +void TsHttpRpc::_rpc_func_kill_sessions(const Json::Value& json_param, ex_astr& buf) { + /* + { + "sessions": ["0123456", "ABCDEF", ...] + } + */ + + if (json_param.isArray()) { + _create_json_ret(buf, TPE_PARAM); + return; + } + + if (json_param["sessions"].isNull() || !json_param["sessions"].isArray()) { + _create_json_ret(buf, TPE_PARAM); + return; + } + + Json::Value s = json_param["sessions"]; + int cnt = s.size(); + for (int i = 0; i < cnt; ++i) { + if (!s[i].isString()) { + _create_json_ret(buf, TPE_PARAM); + return; + } + } + + EXLOGV("[core] try to kill %d sessions.\n", cnt); + ex_astr sp = s.toStyledString(); + g_tpp_mgr.kill_sessions(sp); + + _create_json_ret(buf, TPE_OK); +} + +void TsHttpRpc::_rpc_func_enc(const Json::Value& json_param, ex_astr& buf) +{ + // https://github.com/tp4a/teleport/wiki/TELEPORT-CORE-JSON-RPC#enc + // 加密一个字符串 [ p=plain-text, c=cipher-text ] + // 入参: {"p":"need be encrypt"} + // 示例: {"p":"this-is-a-password"} + // p: 被加密的字符串 + // 返回: + // data域中的"c"的内容是加密后密文的base64编码结果 + // 示例: {"code":0, "data":{"c":"Mxs340a9r3fs+3sdf=="}} + // 错误返回: {"code":1234} + + if (json_param.isArray()) + { + _create_json_ret(buf, TPE_PARAM); + return; + } + + ex_astr plain_text; + + if (json_param["p"].isNull() || !json_param["p"].isString()) + { + _create_json_ret(buf, TPE_PARAM); + return; + } + + plain_text = json_param["p"].asCString(); + if (plain_text.length() == 0) + { + _create_json_ret(buf, TPE_PARAM); + return; + } + ex_astr cipher_text; + + if (!ts_db_field_encrypt(plain_text, cipher_text)) + { + _create_json_ret(buf, TPE_FAILED); + return; + } + + Json::Value jr_data; + jr_data["c"] = cipher_text; + _create_json_ret(buf, TPE_OK, jr_data); +} + +void TsHttpRpc::_rpc_func_set_config(const Json::Value& json_param, ex_astr& buf) +{ + // https://github.com/tp4a/teleport/wiki/TELEPORT-CORE-JSON-RPC#set_config + /* + { + "noop-timeout": 15 # 按分钟计 + } + */ + + if (json_param.isArray()) { + _create_json_ret(buf, TPE_PARAM); + return; + } + + if (json_param["noop_timeout"].isNull() || !json_param["noop_timeout"].isUInt()) { + _create_json_ret(buf, TPE_PARAM); + return; + } + + int noop_timeout = json_param["noop_timeout"].asUInt(); + EXLOGV("[core] set run-time config:\n"); + EXLOGV("[core] noop_timeout = %dm\n", noop_timeout); + + ex_astr sp = json_param.toStyledString(); + g_tpp_mgr.set_runtime_config(sp); + + _create_json_ret(buf, TPE_OK); +} + + +/* +void TsHttpRpc::_rpc_func_enc(const Json::Value& json_param, ex_astr& buf) +{ + // https://github.com/tp4a/teleport/wiki/TELEPORT-CORE-JSON-RPC#enc + // 加密多个个字符串 [ p=plain-text, c=cipher-text ] + // 入参: {"p":["need be encrypt", "plain to cipher"]} + // 示例: {"p":["password-for-A"]} + // p: 被加密的字符串 + // 返回: + // data域中的"c"的内容是加密后密文的base64编码结果 + // 示例: {"code":0, "data":{"c":["Mxs340a9r3fs+3sdf=="]}} + // 错误返回: {"code":1234} + + if (json_param.isArray()) + { + _create_json_ret(buf, TPE_PARAM); + return; + } + + ex_astr plain_text; + + if (json_param["p"].isNull() || !json_param["p"].isArray()) + { + _create_json_ret(buf, TPE_PARAM); + return; + } + + Json::Value c; + + Json::Value p = json_param["p"]; + int cnt = p.size(); + for (int i = 0; i < cnt; ++i) + { + if (!p[i].isString()) { + _create_json_ret(buf, TPE_PARAM); + return; + } + + ex_astr p_txt = p[i].asCString(); + if (p_txt.length() == 0) { + c["c"].append(""); + } + + ex_astr c_txt; + if (!ts_db_field_encrypt(p_txt, c_txt)) + { + _create_json_ret(buf, TPE_FAILED); + return; + } + + c["c"].append(c_txt); + } + + Json::Value jr_data; + jr_data["c"] = c; + _create_json_ret(buf, TPE_OK, jr_data); +} +*/ diff --git a/server/tp_core/core/ts_session.cpp b/server/tp_core/core/ts_session.cpp index deee94f..ae6ca97 100644 --- a/server/tp_core/core/ts_session.cpp +++ b/server/tp_core/core/ts_session.cpp @@ -1,4 +1,4 @@ -#include "ts_session.h" +#include "ts_session.h" #include "ts_env.h" #include @@ -13,6 +13,7 @@ TsSessionManager::TsSessionManager() : TsSessionManager::~TsSessionManager() { ts_connections::iterator it_conn = m_connections.begin(); for (; it_conn != m_connections.end(); ++it_conn) { + EXLOGD("[core] m_connections not clean: %s, %s\n", it_conn->first.c_str(), it_conn->second->acc_username.c_str()); delete it_conn->second; } m_connections.clear(); @@ -28,7 +29,7 @@ void TsSessionManager::_thread_loop(void) { } void TsSessionManager::_remove_expired_connect_info(void) { - // 15δӵconnect-infoᱻƳ + // 超过15秒未进行连接的connect-info会被移除 ExThreadSmartLock locker(m_lock); @@ -87,8 +88,8 @@ bool TsSessionManager::free_connect_info(const ex_astr &sid) { it->second->ref_count--; - // RDP˵ʱҪƳϢϵͳԴRDPͻڵһʱЭЭ̣ȻϻϿ֮һΣڶ֮ǰܻʾ֤Ϣûʱ䲻ܻᵼ³ʱ - // ˣǽüͣһʱ䣬öʱƳ + // 对于RDP来说,此时不要移除连接信息,系统自带RDP客户端在第一次连接时进行协议协商,然后马上会断开,之后立即重新连接一次(第二次连接之前可能会提示证书信息,如果用户长时间不操作,可能会导致超时)。 + // 因此,我们将其引用计数减低,并更新一下最后访问时间,让定时器来移除它。 if (it->second->protocol_type != TP_PROTOCOL_TYPE_RDP) { if (it->second->ref_count <= 0) { EXLOGD("[core] remove connection info, because all connections closed: %s\n", it->first.c_str()); @@ -99,7 +100,7 @@ bool TsSessionManager::free_connect_info(const ex_astr &sid) { } else { if (it->second->ref_count == 1) it->second->ref_count = 0; - it->second->ticket_start = ex_get_tick_count() + 45000; // ǽʱƶ45룬ûзRDPĵڶӣϢͻһӺ + it->second->ticket_start = ex_get_tick_count() + 45000; // 我们将时间向后移动45秒,这样如果没有发生RDP的第二次连接,这个连接信息就会在一分钟后被清除。 } @@ -133,7 +134,7 @@ bool TsSessionManager::request_session(ex_astr &sid, TS_CONNECT_INFO *info) { sid = _sid; if (info->protocol_type == TP_PROTOCOL_TYPE_RDP) { - info->ref_count = 1; // ΪRDP֮ǰܻкܳʱȷǷӡǷ֤飬Ժ׳ʱΪü+1ֹʱ + info->ref_count = 1; // 因为RDP连接之前可能会有很长时间用于确认是否连接、是否信任证书,所以很容易超时,我们认为将引用计数+1,防止因超时被清除。 char szTmp[8] = {0}; snprintf(szTmp, 8, "%02X", (unsigned char) (info->acc_username.length() + info->acc_secret.length())); sid += szTmp; diff --git a/server/tp_core/core/ts_ver.h b/server/tp_core/core/ts_ver.h index aca4ca9..4f27a43 100644 --- a/server/tp_core/core/ts_ver.h +++ b/server/tp_core/core/ts_ver.h @@ -1,6 +1,6 @@ -#ifndef __TS_SERVER_VER_H__ -#define __TS_SERVER_VER_H__ - -#define TP_SERVER_VER L"3.2.0" - -#endif // __TS_SERVER_VER_H__ +#ifndef __TS_SERVER_VER_H__ +#define __TS_SERVER_VER_H__ + +#define TP_SERVER_VER L"3.5.5" + +#endif // __TS_SERVER_VER_H__ diff --git a/server/tp_core/core/ts_web_rpc.cpp b/server/tp_core/core/ts_web_rpc.cpp index cf5b899..1ec813a 100644 --- a/server/tp_core/core/ts_web_rpc.cpp +++ b/server/tp_core/core/ts_web_rpc.cpp @@ -1,320 +1,359 @@ -#include "ts_web_rpc.h" -#include "ts_env.h" -#include "ts_crypto.h" -#include "ts_http_client.h" - -#include "../common/ts_const.h" - -#include -#include - -bool ts_web_rpc_register_core() -{ - Json::FastWriter json_writer; - Json::Value jreq; - jreq["method"] = "register_core"; - jreq["param"]["rpc"] = g_env.core_server_rpc; - - ex_astr json_param; - json_param = json_writer.write(jreq); - - ex_astr param; - ts_url_encode(json_param.c_str(), param); - - ex_astr url = g_env.web_server_rpc; - url += "?"; - url += param; - - ex_astr body; - return ts_http_get(url, body); -} - -int ts_web_rpc_get_conn_info(int conn_id, TS_CONNECT_INFO& info) -{ - Json::FastWriter json_writer; - Json::Value jreq; - jreq["method"] = "get_conn_info"; - jreq["param"]["conn_id"] = conn_id; - - ex_astr json_param; - json_param = json_writer.write(jreq); - - ex_astr param; - ts_url_encode(json_param.c_str(), param); - - ex_astr url = g_env.web_server_rpc; - url += "?"; - url += param; - - ex_astr body; - if (!ts_http_get(url, body)) - { - EXLOGE("[core] get conn info from web-server failed: can not connect to web-server.\n"); - return TPE_NETWORK; - } - if (body.length() == 0) { - EXLOGE("[core] get conn info from web-server failed: got nothing.\n"); - return TPE_NETWORK; - } - - Json::Reader jreader; - Json::Value jret; - - if (!jreader.parse(body.c_str(), jret)) - return TPE_PARAM; - if (!jret.isObject()) - return TPE_PARAM; - if (!jret["data"].isObject()) - return TPE_PARAM; - - Json::Value& _jret = jret["data"]; - - if(!_jret["user_id"].isInt()) - EXLOGE("connection info: need `user_id`.\n"); - if(!_jret["host_id"].isInt()) - EXLOGE("connection info: need `host_id`.\n"); - if(!_jret["acc_id"].isInt()) - EXLOGE("connection info: need `acc_id`.\n"); - if(!_jret["conn_port"].isInt()) - EXLOGE("connection info: need `conn_port`.\n"); - if(!_jret["protocol_type"].isInt()) - EXLOGE("connection info: need `protocol_type`.\n"); - if(!_jret["protocol_sub_type"].isInt()) - EXLOGE("connection info: need `protocol_sub_type`.\n"); - if(!_jret["auth_type"].isInt()) - EXLOGE("connection info: need `auth_type`.\n"); - if (!_jret["protocol_flag"].isUInt()) - EXLOGE("connection info: need `protocol_flag`.\n"); - if (!_jret["record_flag"].isUInt()) - EXLOGE("connection info: need `record_flag`.\n"); - if (!_jret["_enc"].isInt()) - EXLOGE("connection info: need `_enc`.\n"); - if(!_jret["user_username"].isString()) - EXLOGE("connection info: need `user_username`.\n"); - if(!_jret["host_ip"].isString()) - EXLOGE("connection info: need `host_ip`.\n"); - if(!_jret["conn_ip"].isString()) - EXLOGE("connection info: need `conn_ip`.\n"); - if(!_jret["client_ip"].isString()) - EXLOGE("connection info: need `client_ip`.\n"); - if(!_jret["acc_username"].isString()) - EXLOGE("connection info: need `acc_username`.\n"); - if(!_jret["acc_secret"].isString()) - EXLOGE("connection info: need `acc_secret`.\n"); - if(!_jret["username_prompt"].isString()) - EXLOGE("connection info: need `username_prompt`.\n"); - if(!_jret["password_prompt"].isString()) - EXLOGE("connection info: need `password_prompt`.\n"); - - if ( - !_jret["user_id"].isInt() - || !_jret["host_id"].isInt() - || !_jret["acc_id"].isInt() - || !_jret["conn_port"].isInt() - || !_jret["protocol_type"].isInt() - || !_jret["protocol_sub_type"].isInt() - || !_jret["auth_type"].isInt() - || !_jret["protocol_flag"].isUInt() - || !_jret["record_flag"].isUInt() - || !_jret["_enc"].isInt() - - || !_jret["user_username"].isString() - || !_jret["host_ip"].isString() - || !_jret["conn_ip"].isString() - || !_jret["client_ip"].isString() - || !_jret["acc_username"].isString() - || !_jret["acc_secret"].isString() - || !_jret["username_prompt"].isString() - || !_jret["password_prompt"].isString() - ) - { - EXLOGE("got connection info from web-server, but not all info valid.\n"); - return TPE_PARAM; - } - - int user_id; - int host_id; - int acc_id; - ex_astr user_username;// 뱾ӵû - ex_astr host_ip;// ԶIPֱģʽremote_host_ipͬ - ex_astr conn_ip;// ҪӵԶIPǶ˿ӳģʽΪ·IP - int conn_port;// ҪӵԶĶ˿ڣǶ˿ӳģʽΪ·Ķ˿ڣ - ex_astr client_ip; - ex_astr acc_username; // Զ˺ - ex_astr acc_secret;// Զ˺ŵ루˽Կ - ex_astr username_prompt; - ex_astr password_prompt; - int protocol_type = 0; - int protocol_sub_type = 0; - int auth_type = 0; - int protocol_flag = 0; - int record_flag = 0; - bool _enc; - - user_id = _jret["user_id"].asInt(); - host_id = _jret["host_id"].asInt(); - acc_id = _jret["acc_id"].asInt(); - user_username = _jret["user_username"].asString(); - host_ip = _jret["host_ip"].asString(); - conn_ip = _jret["conn_ip"].asString(); - conn_port = _jret["conn_port"].asInt(); - client_ip = _jret["client_ip"].asString(); - acc_username = _jret["acc_username"].asString(); - acc_secret = _jret["acc_secret"].asString(); - username_prompt = _jret["username_prompt"].asString(); - password_prompt = _jret["password_prompt"].asString(); - protocol_type = _jret["protocol_type"].asInt(); - protocol_sub_type = _jret["protocol_sub_type"].asInt(); - protocol_flag = _jret["protocol_flag"].asUInt(); - record_flag = _jret["record_flag"].asUInt(); - auth_type = _jret["auth_type"].asInt(); - _enc = _jret["_enc"].asBool(); - - - // һжϲǷϷ - // ע⣬account_idΪ-1ʾһβӡ - if (user_id <= 0 || host_id <= 0 - || user_username.length() == 0 - || host_ip.length() == 0 || conn_ip.length() == 0 || client_ip.length() == 0 - || conn_port <= 0 || conn_port >= 65535 - || acc_username.length() == 0 || acc_secret.length() == 0 - || !(protocol_type == TP_PROTOCOL_TYPE_RDP || protocol_type == TP_PROTOCOL_TYPE_SSH || protocol_type == TP_PROTOCOL_TYPE_TELNET) - || !(auth_type == TP_AUTH_TYPE_NONE || auth_type == TP_AUTH_TYPE_PASSWORD || auth_type == TP_AUTH_TYPE_PRIVATE_KEY) - ) - { - return TPE_PARAM; - } - - if (_enc) { - ex_astr _auth; - if (!ts_db_field_decrypt(acc_secret, _auth)) - return TPE_FAILED; - - acc_secret = _auth; - } - - info.user_id = user_id; - info.host_id = host_id; - info.acc_id = acc_id; - info.user_username = user_username; - info.host_ip = host_ip; - info.conn_ip = conn_ip; - info.conn_port = conn_port; - info.client_ip = client_ip; - info.acc_username = acc_username; - info.acc_secret = acc_secret; - info.username_prompt = username_prompt; - info.password_prompt = password_prompt; - info.protocol_type = protocol_type; - info.protocol_sub_type = protocol_sub_type; - info.auth_type = auth_type; - info.protocol_flag = protocol_flag; - info.record_flag = record_flag; - - return TPE_OK; -} - -bool ts_web_rpc_session_begin(TS_CONNECT_INFO& info, int& record_id) -{ - Json::FastWriter json_writer; - Json::Value jreq; - - jreq["method"] = "session_begin"; - jreq["param"]["sid"] = info.sid.c_str(); - jreq["param"]["user_id"] = info.user_id; - jreq["param"]["host_id"] = info.host_id; - jreq["param"]["acc_id"] = info.acc_id; - jreq["param"]["user_username"] = info.user_username.c_str(); - jreq["param"]["acc_username"] = info.acc_username.c_str(); - jreq["param"]["host_ip"] = info.host_ip.c_str(); - jreq["param"]["conn_ip"] = info.conn_ip.c_str(); - jreq["param"]["client_ip"] = info.client_ip.c_str(); - //jreq["param"]["sys_type"] = info.sys_type; - jreq["param"]["conn_port"] = info.conn_port; - jreq["param"]["auth_type"] = info.auth_type; - jreq["param"]["protocol_type"] = info.protocol_type; - jreq["param"]["protocol_sub_type"] = info.protocol_sub_type; - - ex_astr json_param; - json_param = json_writer.write(jreq); - - ex_astr param; - ts_url_encode(json_param.c_str(), param); - - ex_astr url = g_env.web_server_rpc; - url += "?"; - url += param; - - ex_astr body; - if (!ts_http_get(url, body)) - { - // EXLOGV("request `rpc::session_begin` from web return: "); - // EXLOGV(body.c_str()); - // EXLOGV("\n"); - return false; - } - - Json::Reader jreader; - Json::Value jret; - - if (!jreader.parse(body.c_str(), jret)) - return false; - if (!jret.isObject()) - return false; - if (!jret["data"].isObject()) - return false; - if (!jret["data"]["rid"].isUInt()) - return false; - - record_id = jret["data"]["rid"].asUInt(); - - return true; -} - -bool ts_web_rpc_session_update(int record_id, int protocol_sub_type, int state) { - Json::FastWriter json_writer; - Json::Value jreq; - jreq["method"] = "session_update"; - jreq["param"]["rid"] = record_id; - jreq["param"]["protocol_sub_type"] = protocol_sub_type; - jreq["param"]["code"] = state; - - ex_astr json_param; - json_param = json_writer.write(jreq); - - ex_astr param; - ts_url_encode(json_param.c_str(), param); - - ex_astr url = g_env.web_server_rpc; - url += "?"; - url += param; - - ex_astr body; - return ts_http_get(url, body); -} - - -//session -bool ts_web_rpc_session_end(const char* sid, int record_id, int ret_code) -{ - // TODO: ָsidصĻỰüһ0ʱ٣ - - Json::FastWriter json_writer; - Json::Value jreq; - jreq["method"] = "session_end"; - jreq["param"]["rid"] = record_id; - jreq["param"]["code"] = ret_code; - - ex_astr json_param; - json_param = json_writer.write(jreq); - - ex_astr param; - ts_url_encode(json_param.c_str(), param); - - ex_astr url = g_env.web_server_rpc; - url += "?"; - url += param; - - ex_astr body; - return ts_http_get(url, body); -} +#include "ts_web_rpc.h" +#include "ts_env.h" +#include "ts_crypto.h" +#include "ts_http_client.h" + +#include "../common/ts_const.h" + +#include +#include + +bool ts_web_rpc_register_core() +{ + //Json::FastWriter json_writer; + Json::Value jreq; + jreq["method"] = "register_core"; + jreq["param"]["rpc"] = g_env.core_server_rpc; + + ex_astr json_param; + //json_param = json_writer.write(jreq); + Json::StreamWriterBuilder jwb; + std::unique_ptr jwriter(jwb.newStreamWriter()); + ex_aoss os; + jwriter->write(jreq, &os); + json_param = os.str(); + + ex_astr param; + ts_url_encode(json_param.c_str(), param); + + ex_astr url = g_env.web_server_rpc; + url += "?"; + url += param; + + ex_astr body; + return ts_http_get(url, body); +} + +int ts_web_rpc_get_conn_info(int conn_id, TS_CONNECT_INFO& info) +{ + //Json::FastWriter json_writer; + Json::Value jreq; + jreq["method"] = "get_conn_info"; + jreq["param"]["conn_id"] = conn_id; + + ex_astr json_param; + //json_param = json_writer.write(jreq); + Json::StreamWriterBuilder jwb; + std::unique_ptr jwriter(jwb.newStreamWriter()); + ex_aoss os; + jwriter->write(jreq, &os); + json_param = os.str(); + + ex_astr param; + ts_url_encode(json_param.c_str(), param); + + ex_astr url = g_env.web_server_rpc; + url += "?"; + url += param; + + ex_astr body; + if (!ts_http_get(url, body)) + { + EXLOGE("[core] get conn info from web-server failed: can not connect to web-server.\n"); + return TPE_NETWORK; + } + if (body.length() == 0) { + EXLOGE("[core] get conn info from web-server failed: got nothing.\n"); + return TPE_NETWORK; + } + + //Json::Reader jreader; + Json::Value jret; + + //if (!jreader.parse(body.c_str(), jret)) + Json::CharReaderBuilder jcrb; + std::unique_ptr const jreader(jcrb.newCharReader()); + const char *str_json_begin = body.c_str(); + ex_astr err; + + //if (!jreader.parse(func_args.c_str(), jsRoot)) { + if (!jreader->parse(str_json_begin, str_json_begin + body.length(), &jret, &err)) + return TPE_PARAM; + if (!jret.isObject()) + return TPE_PARAM; + if (!jret["data"].isObject()) + return TPE_PARAM; + + Json::Value& _jret = jret["data"]; + + if(!_jret["user_id"].isInt()) + EXLOGE("connection info: need `user_id`.\n"); + if(!_jret["host_id"].isInt()) + EXLOGE("connection info: need `host_id`.\n"); + if(!_jret["acc_id"].isInt()) + EXLOGE("connection info: need `acc_id`.\n"); + if(!_jret["conn_port"].isInt()) + EXLOGE("connection info: need `conn_port`.\n"); + if(!_jret["protocol_type"].isInt()) + EXLOGE("connection info: need `protocol_type`.\n"); + if(!_jret["protocol_sub_type"].isInt()) + EXLOGE("connection info: need `protocol_sub_type`.\n"); + if(!_jret["auth_type"].isInt()) + EXLOGE("connection info: need `auth_type`.\n"); + if (!_jret["protocol_flag"].isUInt()) + EXLOGE("connection info: need `protocol_flag`.\n"); + if (!_jret["record_flag"].isUInt()) + EXLOGE("connection info: need `record_flag`.\n"); + if (!_jret["_enc"].isInt()) + EXLOGE("connection info: need `_enc`.\n"); + if(!_jret["user_username"].isString()) + EXLOGE("connection info: need `user_username`.\n"); + if(!_jret["host_ip"].isString()) + EXLOGE("connection info: need `host_ip`.\n"); + if(!_jret["conn_ip"].isString()) + EXLOGE("connection info: need `conn_ip`.\n"); + if(!_jret["client_ip"].isString()) + EXLOGE("connection info: need `client_ip`.\n"); + if(!_jret["acc_username"].isString()) + EXLOGE("connection info: need `acc_username`.\n"); + if(!_jret["acc_secret"].isString()) + EXLOGE("connection info: need `acc_secret`.\n"); + if(!_jret["username_prompt"].isString()) + EXLOGE("connection info: need `username_prompt`.\n"); + if(!_jret["password_prompt"].isString()) + EXLOGE("connection info: need `password_prompt`.\n"); + + if ( + !_jret["user_id"].isInt() + || !_jret["host_id"].isInt() + || !_jret["acc_id"].isInt() + || !_jret["conn_port"].isInt() + || !_jret["protocol_type"].isInt() + || !_jret["protocol_sub_type"].isInt() + || !_jret["auth_type"].isInt() + || !_jret["protocol_flag"].isUInt() + || !_jret["record_flag"].isUInt() + || !_jret["_enc"].isInt() + + || !_jret["user_username"].isString() + || !_jret["host_ip"].isString() + || !_jret["conn_ip"].isString() + || !_jret["client_ip"].isString() + || !_jret["acc_username"].isString() + || !_jret["acc_secret"].isString() + || !_jret["username_prompt"].isString() + || !_jret["password_prompt"].isString() + ) + { + EXLOGE("got connection info from web-server, but not all info valid.\n"); + return TPE_PARAM; + } + + int user_id; + int host_id; + int acc_id; + ex_astr user_username;// 申请本次连接的用户名 + ex_astr host_ip;// 真正的远程主机IP(如果是直接连接模式,则与remote_host_ip相同) + ex_astr conn_ip;// 要连接的远程主机的IP(如果是端口映射模式,则为路由主机的IP) + int conn_port;// 要连接的远程主机的端口(如果是端口映射模式,则为路由主机的端口) + ex_astr client_ip; + ex_astr acc_username; // 远程主机的账号 + ex_astr acc_secret;// 远程主机账号的密码(或者私钥) + ex_astr username_prompt; + ex_astr password_prompt; + int protocol_type = 0; + int protocol_sub_type = 0; + int auth_type = 0; + int protocol_flag = 0; + int record_flag = 0; + bool _enc; + + user_id = _jret["user_id"].asInt(); + host_id = _jret["host_id"].asInt(); + acc_id = _jret["acc_id"].asInt(); + user_username = _jret["user_username"].asString(); + host_ip = _jret["host_ip"].asString(); + conn_ip = _jret["conn_ip"].asString(); + conn_port = _jret["conn_port"].asInt(); + client_ip = _jret["client_ip"].asString(); + acc_username = _jret["acc_username"].asString(); + acc_secret = _jret["acc_secret"].asString(); + username_prompt = _jret["username_prompt"].asString(); + password_prompt = _jret["password_prompt"].asString(); + protocol_type = _jret["protocol_type"].asInt(); + protocol_sub_type = _jret["protocol_sub_type"].asInt(); + protocol_flag = _jret["protocol_flag"].asUInt(); + record_flag = _jret["record_flag"].asUInt(); + auth_type = _jret["auth_type"].asInt(); + _enc = _jret["_enc"].asBool(); + + + // 进一步判断参数是否合法 + // 注意,account_id可以为-1,表示这是一次测试连接。 + if (user_id <= 0 || host_id <= 0 + || user_username.length() == 0 + || host_ip.length() == 0 || conn_ip.length() == 0 || client_ip.length() == 0 + || conn_port <= 0 || conn_port >= 65535 + || acc_username.length() == 0 || acc_secret.length() == 0 + || !(protocol_type == TP_PROTOCOL_TYPE_RDP || protocol_type == TP_PROTOCOL_TYPE_SSH || protocol_type == TP_PROTOCOL_TYPE_TELNET) + || !(auth_type == TP_AUTH_TYPE_NONE || auth_type == TP_AUTH_TYPE_PASSWORD || auth_type == TP_AUTH_TYPE_PRIVATE_KEY) + ) + { + return TPE_PARAM; + } + + if (_enc) { + ex_astr _auth; + if (!ts_db_field_decrypt(acc_secret, _auth)) + return TPE_FAILED; + + acc_secret = _auth; + } + + info.user_id = user_id; + info.host_id = host_id; + info.acc_id = acc_id; + info.user_username = user_username; + info.host_ip = host_ip; + info.conn_ip = conn_ip; + info.conn_port = conn_port; + info.client_ip = client_ip; + info.acc_username = acc_username; + info.acc_secret = acc_secret; + info.username_prompt = username_prompt; + info.password_prompt = password_prompt; + info.protocol_type = protocol_type; + info.protocol_sub_type = protocol_sub_type; + info.auth_type = auth_type; + info.protocol_flag = protocol_flag; + info.record_flag = record_flag; + + return TPE_OK; +} + +bool ts_web_rpc_session_begin(TS_CONNECT_INFO& info, int& record_id) +{ + //Json::FastWriter json_writer; + Json::Value jreq; + + jreq["method"] = "session_begin"; + jreq["param"]["sid"] = info.sid.c_str(); + jreq["param"]["user_id"] = info.user_id; + jreq["param"]["host_id"] = info.host_id; + jreq["param"]["acc_id"] = info.acc_id; + jreq["param"]["user_username"] = info.user_username.c_str(); + jreq["param"]["acc_username"] = info.acc_username.c_str(); + jreq["param"]["host_ip"] = info.host_ip.c_str(); + jreq["param"]["conn_ip"] = info.conn_ip.c_str(); + jreq["param"]["client_ip"] = info.client_ip.c_str(); + //jreq["param"]["sys_type"] = info.sys_type; + jreq["param"]["conn_port"] = info.conn_port; + jreq["param"]["auth_type"] = info.auth_type; + jreq["param"]["protocol_type"] = info.protocol_type; + jreq["param"]["protocol_sub_type"] = info.protocol_sub_type; + + ex_astr json_param; + //json_param = json_writer.write(jreq); + Json::StreamWriterBuilder jwb; + std::unique_ptr jwriter(jwb.newStreamWriter()); + ex_aoss os; + jwriter->write(jreq, &os); + json_param = os.str(); + + ex_astr param; + ts_url_encode(json_param.c_str(), param); + + ex_astr url = g_env.web_server_rpc; + url += "?"; + url += param; + + ex_astr body; + if (!ts_http_get(url, body)) + { + // EXLOGV("request `rpc::session_begin` from web return: "); + // EXLOGV(body.c_str()); + // EXLOGV("\n"); + return false; + } + + //Json::Reader jreader; + Json::Value jret; + + //if (!jreader.parse(body.c_str(), jret)) + Json::CharReaderBuilder jcrb; + std::unique_ptr const jreader(jcrb.newCharReader()); + const char *str_json_begin = body.c_str(); + ex_astr err; + + //if (!jreader.parse(func_args.c_str(), jsRoot)) { + if (!jreader->parse(str_json_begin, str_json_begin + body.length(), &jret, &err)) + return false; + if (!jret.isObject()) + return false; + if (!jret["data"].isObject()) + return false; + if (!jret["data"]["rid"].isUInt()) + return false; + + record_id = jret["data"]["rid"].asUInt(); + + return true; +} + +bool ts_web_rpc_session_update(int record_id, int protocol_sub_type, int state) { + //Json::FastWriter json_writer; + Json::Value jreq; + jreq["method"] = "session_update"; + jreq["param"]["rid"] = record_id; + jreq["param"]["protocol_sub_type"] = protocol_sub_type; + jreq["param"]["code"] = state; + + ex_astr json_param; + //json_param = json_writer.write(jreq); + Json::StreamWriterBuilder jwb; + std::unique_ptr jwriter(jwb.newStreamWriter()); + ex_aoss os; + jwriter->write(jreq, &os); + json_param = os.str(); + + ex_astr param; + ts_url_encode(json_param.c_str(), param); + + ex_astr url = g_env.web_server_rpc; + url += "?"; + url += param; + + ex_astr body; + return ts_http_get(url, body); +} + + +//session 结束 +bool ts_web_rpc_session_end(const char* sid, int record_id, int ret_code) +{ + // TODO: 对指定的sid相关的会话的引用计数减一(但减到0时销毁) + + //Json::FastWriter json_writer; + Json::Value jreq; + jreq["method"] = "session_end"; + jreq["param"]["rid"] = record_id; + jreq["param"]["code"] = ret_code; + + ex_astr json_param; + //json_param = json_writer.write(jreq); + Json::StreamWriterBuilder jwb; + std::unique_ptr jwriter(jwb.newStreamWriter()); + ex_aoss os; + jwriter->write(jreq, &os); + json_param = os.str(); + + ex_astr param; + ts_url_encode(json_param.c_str(), param); + + ex_astr url = g_env.web_server_rpc; + url += "?"; + url += param; + + ex_astr body; + return ts_http_get(url, body); +} diff --git a/server/tp_core/protocol/ssh/CMakeLists.txt b/server/tp_core/protocol/ssh/CMakeLists.txt index 98f736b..91934b4 100644 --- a/server/tp_core/protocol/ssh/CMakeLists.txt +++ b/server/tp_core/protocol/ssh/CMakeLists.txt @@ -3,13 +3,12 @@ cmake_minimum_required(VERSION 3.5) MESSAGE(STATUS "=======================================================") MESSAGE(STATUS " libtpssh") MESSAGE(STATUS "=======================================================") -#MESSAGE(STATUS "operation system is ${CMAKE_SYSTEM}") -#MESSAGE(STATUS "current source directory is ${CMAKE_CURRENT_SOURCE_DIR}") include(../../../../CMakeCfg.txt) -set(CMAKE_CXX_FLAGS "-fPIC") -set(CMAKE_C_FLAGS "-fPIC") +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC") +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fPIC") + aux_source_directory(. DIR_SSH_SRCS) aux_source_directory(../../common DIR_SSH_SRCS) @@ -26,16 +25,31 @@ include_directories( ../../../../external/jsoncpp/include ) -include_directories( - ${TP_EXTERNAL_RELEASE_DIR}/include -) -link_directories(${TP_EXTERNAL_RELEASE_DIR}/lib) +if (OS_LINUX) + include_directories( + ${TP_EXTERNAL_RELEASE_DIR}/include + ) + link_directories( + ${TP_EXTERNAL_RELEASE_DIR}/lib + ${TP_EXTERNAL_RELEASE_DIR}/lib64 + ) +elseif (OS_MACOS) + include_directories( + /usr/local/opt/openssl/include + ${TP_EXTERNAL_RELEASE_DIR}/include + ) + link_directories( + /usr/local/opt/openssl/lib + ${TP_EXTERNAL_RELEASE_DIR}/lib + ) +endif () + add_library(tpssh SHARED ${DIR_SSH_SRCS}) if (OS_LINUX) - target_link_libraries(tpssh ssh ssl crypto mbedx509 mbedtls mbedcrypto dl pthread rt util) + target_link_libraries(tpssh ssh ssl crypto mbedx509 mbedtls mbedcrypto z dl pthread rt util) elseif (OS_MACOS) - target_link_libraries(tpssh ssh ssl crypto mbedx509 mbedtls mbedcrypto dl pthread util) + target_link_libraries(tpssh ssh ssl crypto mbedx509 mbedtls mbedcrypto z dl pthread util) endif() diff --git a/server/tp_core/protocol/ssh/ssh_proxy.cpp b/server/tp_core/protocol/ssh/ssh_proxy.cpp index 3a8f4d4..330ccaf 100644 --- a/server/tp_core/protocol/ssh/ssh_proxy.cpp +++ b/server/tp_core/protocol/ssh/ssh_proxy.cpp @@ -86,7 +86,7 @@ void SshProxy::kill_sessions(const ex_astrs &sessions) { for (size_t i = 0; i < sessions.size(); ++i) { if (it->first->sid() == sessions[i]) { EXLOGW("[ssh] try to kill %s\n", sessions[i].c_str()); - it->first->check_noop_timeout(0, 0); // + it->first->check_noop_timeout(0, 0); // 立即结束 } } } @@ -96,13 +96,15 @@ void SshProxy::_thread_loop() { EXLOGI("[ssh] TeleportServer-SSH ready on %s:%d\n", m_host_ip.c_str(), m_host_port); for (;;) { - // ע⣬ssh_new()ָ룬ֹͣ־ڲͷˣָ뽻SshSessionʵʱͷš + // 注意,ssh_new()出来的指针,如果遇到停止标志,本函数内部就释放了,否则这个指针交给了SshSession类实例管理,其析构时会释放。 ssh_session sess_to_client = ssh_new(); - // int flag = SSH_LOG_FUNCTIONS; - // ssh_options_set(sess_to_client, SSH_OPTIONS_LOG_VERBOSITY, &flag); +// #ifdef EX_DEBUG +// int flag = SSH_LOG_FUNCTIONS; +// ssh_options_set(sess_to_client, SSH_OPTIONS_LOG_VERBOSITY, &flag); +// #endif - ssh_set_blocking(sess_to_client, 1); + //ssh_set_blocking(sess_to_client, 1); struct sockaddr_storage sock_client; char ip[32] = {0}; @@ -145,14 +147,14 @@ void SshProxy::_thread_loop() { sess->start(); } - // ȴй߳˳ + // 等待所有工作线程退出 //m_thread_mgr.stop_all(); { ExThreadSmartLock locker(m_lock); ts_ssh_sessions::iterator it = m_sessions.begin(); for (; it != m_sessions.end(); ++it) { - it->first->check_noop_timeout(0, 0); // + it->first->check_noop_timeout(0, 0); // 立即结束 } } @@ -173,7 +175,7 @@ void SshProxy::_on_stop() { ExThreadBase::_on_stop(); if (m_is_running) { - // һͨķʽеļһ + // 用一个变通的方式来结束阻塞中的监听,就是连接一下它。 ex_astr host_ip = m_host_ip; if (host_ip == "0.0.0.0") host_ip = "127.0.0.1"; @@ -185,14 +187,17 @@ void SshProxy::_on_stop() { int _timeout_us = 10; ssh_options_set(_session, SSH_OPTIONS_TIMEOUT, &_timeout_us); ssh_connect(_session); + ssh_disconnect(_session); ssh_free(_session); + + ex_sleep_ms(100); } // m_thread_mgr.stop_all(); } void SshProxy::session_finished(SshSession *sess) { - // TODO: ģ㱨˻ỰֹԼٶӦϢü + // TODO: 向核心模块汇报此会话终止,以减少对应连接信息的引用计数 ExThreadSmartLock locker(m_lock); ts_ssh_sessions::iterator it = m_sessions.find(sess); diff --git a/server/tp_core/protocol/ssh/ssh_recorder.cpp b/server/tp_core/protocol/ssh/ssh_recorder.cpp index 9d66b86..0190493 100644 --- a/server/tp_core/protocol/ssh/ssh_recorder.cpp +++ b/server/tp_core/protocol/ssh/ssh_recorder.cpp @@ -1,4 +1,4 @@ -#include "ssh_recorder.h" +#include "ssh_recorder.h" //#include static ex_u8 TPP_RECORD_MAGIC[4] = {'T', 'P', 'P', 'R'}; @@ -8,7 +8,8 @@ TppSshRec::TppSshRec() { memset(&m_head, 0, sizeof(TS_RECORD_HEADER)); memcpy((ex_u8 *) (&m_head.info.magic), TPP_RECORD_MAGIC, sizeof(ex_u32)); - m_head.info.ver = 0x03; + m_head.info.ver = 0x04; + m_head.info.type = TS_TPPR_TYPE_SSH; m_header_changed = false; m_save_full_header = false; @@ -40,7 +41,7 @@ bool TppSshRec::_on_begin(const TPP_CONNECT_INFO *info) { } bool TppSshRec::_on_end() { - // ʣδдݣдļС + // 如果还有剩下未写入的数据,写入文件中。 save_record(); if (m_file_info != NULL) @@ -73,13 +74,14 @@ void TppSshRec::record(ex_u8 type, const ex_u8 *data, size_t size) { if (m_start_time > 0) { pkg.time_ms = (ex_u32) (ex_get_tick_count() - m_start_time); m_head.info.time_ms = pkg.time_ms; + m_header_changed = true; } m_cache.append((ex_u8 *) &pkg, sizeof(TS_RECORD_PKG)); m_cache.append(data, size); - m_head.info.packages++; - m_header_changed = true; + //m_head.info.packages++; + //m_header_changed = true; } void TppSshRec::record_win_size_startup(int width, int height) { @@ -95,7 +97,7 @@ void TppSshRec::record_win_size_change(int width, int height) { record(TS_RECORD_TYPE_SSH_TERM_SIZE, (ex_u8 *) &pkg, sizeof(TS_RECORD_WIN_SIZE)); } -// Ϊ¼طźʷܹӦֱ¼ĶӦʱ㣩¼ݰķʽ¼ʱƫƣǾʱ䡣 +// 为了录像回放和命令历史能够对应(比如点击命令直接跳到录像的对应时点),仿照录像数据包的方式记录相对时间偏移,而不是绝对时间。 void TppSshRec::record_command(int flag, const ex_astr &cmd) { char szTime[100] = {0}; #ifdef EX_OS_WIN32 diff --git a/server/tp_core/protocol/ssh/ssh_session.cpp b/server/tp_core/protocol/ssh/ssh_session.cpp index 0585442..8b35d89 100644 --- a/server/tp_core/protocol/ssh/ssh_session.cpp +++ b/server/tp_core/protocol/ssh/ssh_session.cpp @@ -129,7 +129,7 @@ void SshSession::_record_end(TP_SSH_CHANNEL_PAIR *cp) { if (cp->db_id > 0) { //EXLOGD("[ssh] [channel:%d] channel end with code: %d\n", cp->channel_id, cp->state); - // Ựûз״̬Ϊ¼´ֵ + // 如果会话过程中没有发生错误,则将其状态改为结束,否则记录下错误值 if (cp->state == TP_SESS_STAT_RUNNING || cp->state == TP_SESS_STAT_STARTED) cp->state = TP_SESS_STAT_END; @@ -257,7 +257,7 @@ void SshSession::_run(void) { int err = SSH_OK; - // ȫӣԿ + // 安全连接(密钥交换) err = ssh_handle_key_exchange(m_cli_session); if (err != SSH_OK) { EXLOGE("[ssh] key exchange with client failed: %s\n", ssh_get_error(m_cli_session)); @@ -275,7 +275,7 @@ void SshSession::_run(void) { return; } - // ֤һͨ + // 认证,并打开一个通道 while (!(m_is_logon && !m_channels.empty())) { if (m_have_error) break; @@ -296,7 +296,7 @@ void SshSession::_run(void) { EXLOGW("[ssh] authenticated and got a channel.\n"); - // ˫Ѿˣʼת + // 现在双方的连接已经建立好了,开始转发 ssh_event_add_session(event_loop, m_srv_session); do { //err = ssh_event_dopoll(event_loop, 5000); @@ -333,11 +333,11 @@ void SshSession::_run(void) { ssh_event_free(event_loop); - // һSSHv1һSSHv2ͬһevent_loopʱSSHv1ղݣŵѭʱSSHv2ò - // ԣSSHv1ԶӺ󣬵shell֮󣬾ͽһֶȡݵѭʹssh_event_dopoll()ˡ + // 如果一边是走SSHv1,另一边是SSHv2,放在同一个event_loop时,SSHv1会收不到数据,放到循环中时,SSHv2得不到数据 + // 所以,当SSHv1的远程主机连接后,到建立好shell环境之后,就进入另一种读取数据的循环,不再使用ssh_event_dopoll()了。 if (m_ssh_ver == 1) { - tp_channels::iterator it = m_channels.begin(); // SSHv1ֻܴһchannel + tp_channels::iterator it = m_channels.begin(); // SSHv1只能打开一个channel ssh_channel cli = (*it)->cli_channel; ssh_channel srv = (*it)->srv_channel; @@ -447,7 +447,7 @@ int SshSession::_on_auth_password_request(ssh_session session, const char *user, EXLOGV("[ssh] try to connect to real SSH server %s:%d\n", _this->m_conn_ip.c_str(), _this->m_conn_port); _this->m_srv_session = ssh_new(); - ssh_set_blocking(_this->m_srv_session, 1); +// ssh_set_blocking(_this->m_srv_session, 1); ssh_options_set(_this->m_srv_session, SSH_OPTIONS_HOST, _this->m_conn_ip.c_str()); int port = (int) _this->m_conn_port; @@ -460,12 +460,16 @@ int SshSession::_on_auth_password_request(ssh_session session, const char *user, // ssh_options_set(_this->m_srv_session, SSH_OPTIONS_LOG_VERBOSITY, &flag); //#endif + int _timeout_cli = 120; // 120 sec. + ssh_options_set(_this->m_cli_session, SSH_OPTIONS_TIMEOUT, &_timeout_cli); + if (_this->m_auth_type != TP_AUTH_TYPE_NONE) ssh_options_set(_this->m_srv_session, SSH_OPTIONS_USER, _this->m_acc_name.c_str()); - // default timeout is 10 seconds, it is too short for connect progress, so set it to 60 sec. - int _timeout = 60; // 60 sec. + // default timeout is 10 seconds, it is too short for connect progress, so set it to 120 sec. + // usually when sshd config to UseDNS. + int _timeout = 120; // 120 sec. ssh_options_set(_this->m_srv_session, SSH_OPTIONS_TIMEOUT, &_timeout); int rc = 0; @@ -477,14 +481,19 @@ int SshSession::_on_auth_password_request(ssh_session session, const char *user, return SSH_AUTH_ERROR; } + if(ssh_is_blocking(_this->m_cli_session)) + EXLOGD("[ssh] client session is blocking.\n"); + if(ssh_is_blocking(_this->m_srv_session)) + EXLOGD("[ssh] server session is blocking.\n"); + // once the server are connected, change the timeout back to default. - _timeout = 30; // in seconds. + _timeout = 120; // in seconds. ssh_options_set(_this->m_srv_session, SSH_OPTIONS_TIMEOUT, &_timeout); // get ssh version of host, v1 or v2 // TODO: libssh-0.8.5 does not support sshv1 anymore. _this->m_ssh_ver = ssh_get_version(_this->m_srv_session); - EXLOGW("[ssh] real host is SSHv%d\n", _this->m_ssh_ver); + //EXLOGW("[ssh] real host is SSHv%d\n", _this->m_ssh_ver); #if 0 // check supported auth type by host @@ -634,13 +643,13 @@ int SshSession::_on_auth_password_request(ssh_session session, const char *user, } ssh_channel SshSession::_on_new_channel_request(ssh_session session, void *userdata) { - // ͻ˳ԴһͨȻͨͨշݣ + // 客户端尝试打开一个通道(然后才能通过这个通道发控制命令或者收发数据) EXLOGV("[ssh] client open channel\n"); SshSession *_this = (SshSession *) userdata; - // TODO: ͻTPʹõSSHv2Э飬Ϊʼʱ֪ԶDzSSHv1 - // ˴˴ΪͻֱԶЩһֱʱSecureCRTĿ¡ỰܻΪΪӵSSHv1ԶӣǴͨ + // TODO: 客户端与TP连接使用的总是SSHv2协议,因为最开始连接时还不知道真正的远程主机是不是SSHv1。 + // 因此此处行为与客户端直连远程主机有些不一样。直连时,SecureCRT的克隆会话功能会因为以为连接的是SSHv1而自动重新连接,而不是打开新通道。 if (_this->m_ssh_ver == 1 && _this->m_channels.size() != 0) { EXLOGE("[ssh] SSH1 supports only one execution channel. One has already been opened.\n"); return NULL; @@ -653,7 +662,7 @@ ssh_channel SshSession::_on_new_channel_request(ssh_session session, void *userd } ssh_set_channel_callbacks(cli_channel, &_this->m_cli_channel_cb); - // ҲҪķһͨת + // 我们也要向真正的服务器申请打开一个通道,来进行转发 ssh_channel srv_channel = ssh_channel_new(_this->m_srv_session); if (srv_channel == NULL) { EXLOGE("[ssh] can not create channel for server.\n"); @@ -682,7 +691,7 @@ ssh_channel SshSession::_on_new_channel_request(ssh_session session, void *userd return NULL; } - // ͻ˺ͷ˵ͨ + // 将客户端和服务端的通道关联起来 { ExThreadSmartLock locker(_this->m_lock); _this->m_channels.push_back(cp); @@ -794,7 +803,7 @@ int SshSession::_on_client_channel_data(ssh_session session, ssh_channel channel SshSession *_this = (SshSession *) userdata; - // ǰ߳ڽշ˷صݣֱӷأŻٷʹݵ + // 当前线程正在接收服务端返回的数据,因此我们直接返回,这样紧跟着会重新再发送此数据的 if (_this->m_recving_from_srv) { // EXLOGD("recving from srv...try again later...\n"); return 0; @@ -815,14 +824,14 @@ int SshSession::_on_client_channel_data(ssh_session session, ssh_channel channel int _len = len; if (cp->type == TS_SSH_CHANNEL_TYPE_SHELL) { - // ȡֱʾʾ֮ǰͿͻݵˣ־¼ҡ + // 在收取服务端数据直到显示命令行提示符之前,不允许发送客户端数据到服务端,避免日志记录混乱。 if (!cp->server_ready) { _this->m_recving_from_cli = false; return 0; } - // Բִ֣ rz - // xxxx ûճıǽΪÿһзһݰ + // 不可以拆分!!否则执行 rz 命令会出错! + // xxxx 如果用户复制粘贴多行文本,我们将其拆分为每一行发送一次数据包 // for (unsigned int i = 0; i < len; ++i) { // if (((ex_u8 *) data)[i] == 0x0d) { // _len = i + 1; @@ -889,7 +898,7 @@ int SshSession::_on_client_channel_subsystem_request(ssh_session session, ssh_ch cp->last_access_timestamp = (ex_u32) time(NULL); - // Ŀǰֻ֧SFTPϵͳ + // 目前只支持SFTP子系统 if (strcmp(subsystem, "sftp") != 0) { EXLOGE("[ssh] support `sftp` subsystem only, but got `%s`.\n", subsystem); cp->state = TP_SESS_STAT_ERR_UNSUPPORT_PROTOCOL; @@ -961,7 +970,7 @@ int SshSession::_on_server_channel_data(ssh_session session, ssh_channel channel int ret = 0; - // յһ˷صʱ֮ǰʾһЩԶϢ + // 收到第一包服务端返回的数据时,在输出数据之前显示一些自定义的信息 #if 1 if (!is_stderr && cp->is_first_server_data) { cp->is_first_server_data = false; @@ -988,13 +997,15 @@ int SshSession::_on_server_channel_data(ssh_session session, ssh_channel channel "\r\n"\ "%s\r\n"\ "Teleport SSH Bastion Server...\r\n"\ - " - teleport to %s:%d\r\n"\ + " - teleport to %s:%d [%d]\r\n"\ " - authroized by %s\r\n"\ "%s\r\n"\ "\r\n\r\n", line.c_str(), _this->m_conn_ip.c_str(), - _this->m_conn_port, auth_mode, + _this->m_conn_port, + cp->db_id, + auth_mode, line.c_str() ); @@ -1013,15 +1024,45 @@ int SshSession::_on_server_channel_data(ssh_session session, ssh_channel channel #endif #if 1 - // ֱתݵͻ - if (is_stderr) - ret = ssh_channel_write_stderr(cp->cli_channel, data, len); - else - ret = ssh_channel_write(cp->cli_channel, data, len); + //static int idx = 0; + + ssh_set_blocking(_this->m_cli_session, 0); + + int xx = 0; + for(xx = 0; xx < 10; ++xx) { + +// idx++; +// EXLOGD(">>>>> %d . %d\n", cp->db_id, idx); + + // 直接转发数据到客户端 + if (is_stderr) + ret = ssh_channel_write_stderr(cp->cli_channel, data, len); + else + ret = ssh_channel_write(cp->cli_channel, data, len); + +// EXLOGD("<<<<< %d . %d\n", cp->db_id, idx); + + if(ret == SSH_OK) { +// EXLOGD("ssh_channel_write() ok.\n"); + break; + } + else if(ret == SSH_AGAIN) { +// EXLOGD("ssh_channel_write() need again, %d.\n", xx); + ex_sleep_ms(500); + continue; + } + else { +// EXLOGD("ssh_channel_write() failed.\n"); + break; + } + } + + ssh_set_blocking(_this->m_cli_session, 1); + #else - // յķݰ \033]0;AABB\007 ݣͻ˻ݴ˸ı䴰ڱ - // Ҫ滻ⲿݣʹ֮ʾ \033]0;TP#ssh://remote-ip\007 ı⡣ - // ήһЩܣĿǰã˲ִ뱸á + // 分析收到的服务端数据包,如果包含类似 \033]0;AABB\007 这样的数据,客户端会根据此改变窗口标题 + // 我们需要替换这部分数据,使之显示类似 \033]0;TP#ssh://remote-ip\007 这样的标题。 + // 但是这样会降低一些性能,因此目前不启用,保留此部分代码备用。 if (is_stderr) { ret = ssh_channel_write_stderr(cp->cli_channel, data, len); } @@ -1038,7 +1079,7 @@ int SshSession::_on_server_channel_data(ssh_session session, ssh_channel channel { _end++; - // киıݣ⻻ΪҪ + // 这个包中含有改变标题的数据,将标题换为我们想要的 EXLOGD("-- found title\n"); size_t len_end = len - (_end - (const ex_u8*)data); MemBuffer mbuf; @@ -1060,7 +1101,7 @@ int SshSession::_on_server_channel_data(ssh_session session, ssh_channel channel if (ret == SSH_ERROR) break; if (ret == mbuf.size()) { - ret = len; // ʾѾеˡ + ret = len; // 表示我们已经处理了所有的数据了。 break; } else { @@ -1139,7 +1180,7 @@ void SshSession::_process_ssh_command(TP_SSH_CHANNEL_PAIR *cp, int from, const e if (TP_SSH_CLIENT_SIDE == from) { if (len >= 2) { if (((ex_u8 *) data)[len - 1] == 0x0d) { - // ƸճһִУ¼־ļ + // 疑似复制粘贴多行命令一次性执行,将其记录到日志文件中 ex_astr str((const char *) data, len - 1); cp->rec.record_command(1, str); @@ -1148,13 +1189,13 @@ void SshSession::_process_ssh_command(TP_SSH_CHANNEL_PAIR *cp, int from, const e } } - // ͻسʱʱִһҪݷ˷صݽнһж + // 客户端输入回车时,可能时执行了一条命令,需要根据服务端返回的数据进行进一步判断 cp->maybe_cmd = (data[len - 1] == 0x0d); // if (cp->maybe_cmd) // EXLOGD("[ssh] maybe cmd.\n"); - // ʱִtop£һĸ'q'˳ûسܻᵼº¼ʱصʾΪ - // ¼ˣҪųķʽǣͻ˵ĸյǿ 1b 5b xx xxͲ + // 有时在执行类似top命令的情况下,输入一个字母'q'就退出程序,没有输入回车,可能会导致后续记录命令时将返回的命令行提示符作为命令 + // 记录下来了,要避免这种情况,排除的方式是:客户端单个字母,后续服务端如果收到的是控制序列 1b 5b xx xx,就不计做命令。 cp->client_single_char = (len == 1 && isprint(data[0])); cp->process_srv = true; @@ -1193,15 +1234,15 @@ void SshSession::_process_ssh_command(TP_SSH_CHANNEL_PAIR *cp, int from, const e case 0x4b: { // 'K' if (0 == esc_arg) { - // ɾ굽βַ + // 删除光标到行尾的字符串 cp->cmd_char_list.erase(cp->cmd_char_pos, cp->cmd_char_list.end()); cp->cmd_char_pos = cp->cmd_char_list.end(); } else if (1 == esc_arg) { - // ɾӿʼ괦ַ + // 删除从开始到光标处的字符串 cp->cmd_char_list.erase(cp->cmd_char_list.begin(), cp->cmd_char_pos); cp->cmd_char_pos = cp->cmd_char_list.end(); } else if (2 == esc_arg) { - // ɾ + // 删除整行 cp->cmd_char_list.clear(); cp->cmd_char_pos = cp->cmd_char_list.begin(); } @@ -1210,7 +1251,7 @@ void SshSession::_process_ssh_command(TP_SSH_CHANNEL_PAIR *cp, int from, const e break; } case 0x43: {// ^[C - // + // 光标右移 if (esc_arg == 0) esc_arg = 1; for (int j = 0; j < esc_arg; ++j) { @@ -1221,7 +1262,7 @@ void SshSession::_process_ssh_command(TP_SSH_CHANNEL_PAIR *cp, int from, const e break; } case 0x44: { // ^[D - // + // 光标左移 if (esc_arg == 0) esc_arg = 1; for (int j = 0; j < esc_arg; ++j) { @@ -1233,7 +1274,7 @@ void SshSession::_process_ssh_command(TP_SSH_CHANNEL_PAIR *cp, int from, const e break; } - case 0x50: {// 'P' ɾַָ + case 0x50: {// 'P' 删除指定数量的字符 if (esc_arg == 0) esc_arg = 1; @@ -1245,7 +1286,7 @@ void SshSession::_process_ssh_command(TP_SSH_CHANNEL_PAIR *cp, int from, const e break; } - case 0x40: { // '@' ָĿհַ + case 0x40: { // '@' 插入指定数量的空白字符 if (esc_arg == 0) esc_arg = 1; for (int j = 0; j < esc_arg; ++j) @@ -1267,10 +1308,10 @@ void SshSession::_process_ssh_command(TP_SSH_CHANNEL_PAIR *cp, int from, const e switch (ch) { case 0x07: - // + // 响铃 break; case 0x08: { - // + // 光标左移 if (cp->cmd_char_pos != cp->cmd_char_list.begin()) cp->cmd_char_pos--; break; @@ -1343,10 +1384,10 @@ void SshSession::_process_sftp_command(TP_SSH_CHANNEL_PAIR *cp, const ex_u8 *dat // SFTP protocol: https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13 //EXLOG_BIN(data, len, "[sftp] client channel data"); - // TODO: ݿͻ˵ͷ˵ķأԽһжûβļģдȵȣԼĽdzɹʧܡ - // ¼ʽ time-offset,flag,action,result,file-path,[file-path] - // УflagĿǰΪ0ԺԣΪ֤ssh-cmdʽһ£time-offset/action/result - // file-pathDZĶ󣬹Ϊ :ʵݣ磬 13:/root/abc.txt + // TODO: 根据客户端的请求和服务端的返回,可以进一步判断用户是如何操作文件的,比如读、写等等,以及操作的结果是成功还是失败。 + // 记录格式: time-offset,flag,action,result,file-path,[file-path] + // 其中,flag目前总是为0,可以忽略(为保证与ssh-cmd格式一致),time-offset/action/result 都是数字 + // file-path是被操作的对象,规格为 长度:实际内容,例如, 13:/root/abc.txt if (len < 9) @@ -1364,7 +1405,7 @@ void SshSession::_process_sftp_command(TP_SSH_CHANNEL_PAIR *cp, const ex_u8 *dat return; } - // Ҫ14ֽ + // 需要的数据至少14字节 // uint32 + byte + uint32 + (uint32 + char + ...) // pkg_len + cmd + req_id + string( length + content...) if (len < 14) @@ -1397,13 +1438,13 @@ void SshSession::_process_sftp_command(TP_SSH_CHANNEL_PAIR *cp, const ex_u8 *dat break; case 0x12: // 0x12 = 18 = SSH_FXP_RENAME - // renameаַ + // rename操作数据中包含两个字符串 str2_ptr = str1_ptr + str1_len + 4; str2_len = (int) ((str2_ptr[0] << 24) | (str2_ptr[1] << 16) | (str2_ptr[2] << 8) | str2_ptr[3]); break; case 0x15: // 0x15 = 21 = SSH_FXP_LINK - // linkаַǰµļбӵļ + // link操作数据中包含两个字符串,前者是新的链接文件名,后者是现有被链接的文件名 str2_ptr = str1_ptr + str1_len + 4; str2_len = (int) ((str2_ptr[0] << 24) | (str2_ptr[1] << 16) | (str2_ptr[2] << 8) | str2_ptr[3]); break; diff --git a/server/tp_core/protocol/ssh/stdafx.cpp b/server/tp_core/protocol/ssh/stdafx.cpp index 1eab518..faf90a3 100644 --- a/server/tp_core/protocol/ssh/stdafx.cpp +++ b/server/tp_core/protocol/ssh/stdafx.cpp @@ -1,4 +1,4 @@ -// stdafx.cpp : source file that includes just the standard includes +// stdafx.cpp : source file that includes just the standard includes // tpssh.pch will be the pre-compiled header // stdafx.obj will contain the pre-compiled type information @@ -11,11 +11,17 @@ #ifdef EX_OS_WIN32 # ifdef EX_DEBUG -# pragma comment(lib, "..\\..\\..\\..\\external\\libssh\\build\\src\\Debug\\ssh.lib") +# pragma comment(lib, "debug/ssh.lib") +# pragma comment(lib, "libcrypto32MTd.lib") +# pragma comment(lib, "libssl32MTd.lib") # else -# pragma comment(lib, "..\\..\\..\\..\\external\\libssh\\build\\src\\Release\\ssh.lib") +# pragma comment(lib, "release/ssh.lib") +# pragma comment(lib, "libcrypto32MT.lib") +# pragma comment(lib, "libssl32MT.lib") # endif -# pragma comment(lib, "libeay32.lib") +// # pragma comment(lib, "libcrypto.lib") +// # pragma comment(lib, "libeay32.lib") # pragma comment(lib, "ws2_32.lib") +# pragma comment(lib, "crypt32.lib") #endif diff --git a/server/tp_core/protocol/ssh/tpssh.cpp b/server/tp_core/protocol/ssh/tpssh.cpp index 5bfa7d1..a605635 100644 --- a/server/tp_core/protocol/ssh/tpssh.cpp +++ b/server/tp_core/protocol/ssh/tpssh.cpp @@ -1,4 +1,4 @@ -#include "ssh_proxy.h" +#include "ssh_proxy.h" #include "tpp_env.h" #include @@ -46,10 +46,15 @@ TPP_API void tpp_timer(void) { static ex_rv _set_runtime_config(const char* param) { Json::Value jp; - Json::Reader jreader; + //Json::Reader jreader; + Json::CharReaderBuilder jcrb; + std::unique_ptr const jreader(jcrb.newCharReader()); + const char *str_json_begin = param; + ex_astr err; - if (!jreader.parse(param, jp)) - return TPE_JSON_FORMAT; + //if (!jreader.parse(param, jp)) + if (!jreader->parse(str_json_begin, param + strlen(param), &jp, &err)) + return TPE_JSON_FORMAT; if (!jp.isObject()) return TPE_PARAM; @@ -68,10 +73,16 @@ static ex_rv _set_runtime_config(const char* param) { static ex_rv _kill_sessions(const char* param) { Json::Value jp; - Json::Reader jreader; +// Json::Reader jreader; +// if (!jreader.parse(param, jp)) + Json::CharReaderBuilder jcrb; + std::unique_ptr const jreader(jcrb.newCharReader()); + const char *str_json_begin = param; + ex_astr err; - if (!jreader.parse(param, jp)) - return TPE_JSON_FORMAT; + //if (!jreader.parse(param, jp)) + if (!jreader->parse(str_json_begin, param + strlen(param), &jp, &err)) + return TPE_JSON_FORMAT; if (!jp.isArray()) return TPE_PARAM; diff --git a/server/tp_core/protocol/ssh/tpssh.vs2015.sln b/server/tp_core/protocol/ssh/tpssh.vs2015.sln deleted file mode 100644 index b4a60e7..0000000 --- a/server/tp_core/protocol/ssh/tpssh.vs2015.sln +++ /dev/null @@ -1,22 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio 14 -VisualStudioVersion = 14.0.23107.0 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tpssh", "tpssh.vs2015.vcxproj", "{FDA16D20-09B7-45AF-ADF1-DAF3EF2C0531}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|x86 = Debug|x86 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {FDA16D20-09B7-45AF-ADF1-DAF3EF2C0531}.Debug|x86.ActiveCfg = Debug|Win32 - {FDA16D20-09B7-45AF-ADF1-DAF3EF2C0531}.Debug|x86.Build.0 = Debug|Win32 - {FDA16D20-09B7-45AF-ADF1-DAF3EF2C0531}.Release|x86.ActiveCfg = Release|Win32 - {FDA16D20-09B7-45AF-ADF1-DAF3EF2C0531}.Release|x86.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal diff --git a/server/tp_core/protocol/ssh/tpssh.vs2015.vcxproj b/server/tp_core/protocol/ssh/tpssh.vs2015.vcxproj deleted file mode 100644 index afc56b1..0000000 --- a/server/tp_core/protocol/ssh/tpssh.vs2015.vcxproj +++ /dev/null @@ -1,162 +0,0 @@ - - - - - Debug - Win32 - - - Release - Win32 - - - Debug - x64 - - - Release - x64 - - - - {FDA16D20-09B7-45AF-ADF1-DAF3EF2C0531} - Win32Proj - tpssh - 8.1 - tpssh - - - - DynamicLibrary - true - v140_xp - Unicode - - - DynamicLibrary - false - v140_xp - true - Unicode - - - - - - - - - - - - - - - true - ..\..\..\..\out\server\$(PlatformTarget)\$(Configuration)\ - ..\..\..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - - - false - ..\..\..\..\out\server\$(PlatformTarget)\$(Configuration)\ - ..\..\..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - - - - - - Level3 - Disabled - WIN32;_DEBUG;_WINDOWS;_USRDLL;TPP_EXPORTS;LIBSSH_STATIC;%(PreprocessorDefinitions) - ../../../../common/teleport;../../../../common/libex/include;../../../../external/jsoncpp/include;../../../../external/libssh/include;%(AdditionalIncludeDirectories) - MultiThreadedDebug - - - Windows - Debug - ..\..\..\..\external\libssh-win-static\lib;..\..\..\..\external\openssl\out32;%(AdditionalLibraryDirectories) - - - - - Level3 - - - MaxSpeed - true - true - WIN32;NDEBUG;_WINDOWS;_USRDLL;TPP_EXPORTS;LIBSSH_STATIC;%(PreprocessorDefinitions) - ../../../../common/teleport;../../../../common/libex/include;../../../../external/jsoncpp/include;../../../../external/libssh/include;%(AdditionalIncludeDirectories) - MultiThreaded - - - Windows - true - true - true - ..\..\..\..\external\libssh-win-static\lib;..\..\..\..\external\openssl\out32;%(AdditionalLibraryDirectories) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - false - - - false - - - - - - - - - - - - - - \ No newline at end of file diff --git a/server/tp_core/protocol/ssh/tpssh.vs2015.vcxproj.filters b/server/tp_core/protocol/ssh/tpssh.vs2015.vcxproj.filters deleted file mode 100644 index 18936c4..0000000 --- a/server/tp_core/protocol/ssh/tpssh.vs2015.vcxproj.filters +++ /dev/null @@ -1,182 +0,0 @@ - - - - - {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} - rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms - - - {73f964a0-2430-47c6-b083-956809018ec2} - - - {68aa1482-4f0d-43b5-9bbd-d7857e9b540b} - - - {e6941d07-304a-4ba8-af5a-c5a09dae61b4} - - - {4a7ed97e-93c1-4513-b813-3399eaeb8a2f} - - - {b95409eb-4905-440f-8537-ee892e96b49a} - - - {4FC737F1-C7A5-4376-A066-2A32D752A2FF} - cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx - - - {f1fea9ae-123c-4aa8-a152-e88d1d0a29fd} - - - {022b0a3d-47c2-40d8-96d9-dceea02e7eef} - - - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - win32 - - - win32 - - - main app - - - main app - - - common - - - common - - - common - - - common - - - main app - - - common - - - main app - - - libssh - - - libssh - - - libssh - - - libssh - - - jsoncpp - - - common - - - - - main app - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - win32 - - - win32 - - - main app - - - main app - - - common - - - common - - - main app - - - common - - - main app - - - jsoncpp - - - jsoncpp - - - jsoncpp - - - \ No newline at end of file diff --git a/server/tp_core/protocol/ssh/tpssh.vs2017.vcxproj b/server/tp_core/protocol/ssh/tpssh.vs2017.vcxproj index 250ef8c..cd5eb27 100644 --- a/server/tp_core/protocol/ssh/tpssh.vs2017.vcxproj +++ b/server/tp_core/protocol/ssh/tpssh.vs2017.vcxproj @@ -66,7 +66,7 @@ Windows Debug - ..\..\..\..\external\libssh-win-static\lib;..\..\..\..\external\openssl\out32;%(AdditionalLibraryDirectories) + ..\..\..\..\external\libssh\build\src;..\..\..\..\external\openssl\lib\VC\static;%(AdditionalLibraryDirectories) @@ -86,7 +86,7 @@ true true true - ..\..\..\..\external\libssh-win-static\lib;..\..\..\..\external\openssl\out32;%(AdditionalLibraryDirectories) + ..\..\..\..\external\libssh\build\src;..\..\..\..\external\openssl\lib\VC\static;%(AdditionalLibraryDirectories) @@ -103,10 +103,10 @@ - - - - + + + + diff --git a/server/tp_core/protocol/ssh/tpssh.vs2017.vcxproj.filters b/server/tp_core/protocol/ssh/tpssh.vs2017.vcxproj.filters index 18936c4..6850888 100644 --- a/server/tp_core/protocol/ssh/tpssh.vs2017.vcxproj.filters +++ b/server/tp_core/protocol/ssh/tpssh.vs2017.vcxproj.filters @@ -98,24 +98,24 @@ main app - - libssh - - - libssh - - - libssh - - - libssh - jsoncpp common + + libssh + + + libssh + + + libssh + + + libssh + diff --git a/server/tp_core/protocol/telnet/telnet_recorder.cpp b/server/tp_core/protocol/telnet/telnet_recorder.cpp index 985e896..c1700ee 100644 --- a/server/tp_core/protocol/telnet/telnet_recorder.cpp +++ b/server/tp_core/protocol/telnet/telnet_recorder.cpp @@ -8,7 +8,7 @@ TppTelnetRec::TppTelnetRec() memset(&m_head, 0, sizeof(TS_RECORD_HEADER)); memcpy((ex_u8*)(&m_head.info.magic), TPP_RECORD_MAGIC, sizeof(ex_u32)); - m_head.info.ver = 0x03; + m_head.info.ver = 0x04; m_header_changed = false; m_save_full_header = false; @@ -42,7 +42,7 @@ bool TppTelnetRec::_on_begin(const TPP_CONNECT_INFO* info) bool TppTelnetRec::_on_end() { - // ʣδдݣдļС + // 如果还有剩下未写入的数据,写入文件中。 save_record(); if (m_file_info != NULL) @@ -76,13 +76,14 @@ void TppTelnetRec::record(ex_u8 type, const ex_u8* data, size_t size) { pkg.time_ms = (ex_u32)(ex_get_tick_count() - m_start_time); m_head.info.time_ms = pkg.time_ms; + m_header_changed = true; } m_cache.append((ex_u8*)&pkg, sizeof(TS_RECORD_PKG)); m_cache.append(data, size); - m_head.info.packages++; - m_header_changed = true; + // m_head.info.packages++; + // m_header_changed = true; } // void TppTelnetRec::record_win_size(int width, int height) @@ -93,20 +94,20 @@ void TppTelnetRec::record(ex_u8 type, const ex_u8* data, size_t size) // m_header_changed = true; // } -void TppTelnetRec::record_win_size_startup(int width, int height) -{ - m_head.basic.width = (ex_u16)width; - m_head.basic.height = (ex_u16)height; - m_save_full_header = true; -} - -void TppTelnetRec::record_win_size_change(int width, int height) -{ - TS_RECORD_WIN_SIZE pkg = { 0 }; - pkg.width = (ex_u16)width; - pkg.height = (ex_u16)height; - record(TS_RECORD_TYPE_TELNET_TERM_SIZE, (ex_u8*)&pkg, sizeof(TS_RECORD_WIN_SIZE)); -} +void TppTelnetRec::record_win_size_startup(int width, int height) +{ + m_head.basic.width = (ex_u16)width; + m_head.basic.height = (ex_u16)height; + m_save_full_header = true; +} + +void TppTelnetRec::record_win_size_change(int width, int height) +{ + TS_RECORD_WIN_SIZE pkg = { 0 }; + pkg.width = (ex_u16)width; + pkg.height = (ex_u16)height; + record(TS_RECORD_TYPE_TELNET_TERM_SIZE, (ex_u8*)&pkg, sizeof(TS_RECORD_WIN_SIZE)); +} bool TppTelnetRec::_save_to_info_file() { if (!m_header_changed) diff --git a/server/tp_core/protocol/telnet/tptelnet.cpp b/server/tp_core/protocol/telnet/tptelnet.cpp index 70cf6ed..4ec8976 100644 --- a/server/tp_core/protocol/telnet/tptelnet.cpp +++ b/server/tp_core/protocol/telnet/tptelnet.cpp @@ -1,60 +1,66 @@ -#include "telnet_proxy.h" -#include "tpp_env.h" - -#include -#include - -TPP_API ex_rv tpp_init(TPP_INIT_ARGS* init_args) -{ - if (!g_telnet_env.init(init_args)) - return TPE_FAILED; - - return 0; -} - -TPP_API ex_rv tpp_start(void) -{ - if (!g_telnet_proxy.init()) - return TPE_FAILED; - if (!g_telnet_proxy.start()) - return TPE_FAILED; - - return 0; -} - -TPP_API ex_rv tpp_stop(void) -{ - g_telnet_proxy.stop(); - return 0; -} - -TPP_API void tpp_timer(void) { - // be called per one second. - g_telnet_proxy.timer(); -} - +#include "telnet_proxy.h" +#include "tpp_env.h" + +#include +#include + +TPP_API ex_rv tpp_init(TPP_INIT_ARGS* init_args) +{ + if (!g_telnet_env.init(init_args)) + return TPE_FAILED; + + return 0; +} + +TPP_API ex_rv tpp_start(void) +{ + if (!g_telnet_proxy.init()) + return TPE_FAILED; + if (!g_telnet_proxy.start()) + return TPE_FAILED; + + return 0; +} + +TPP_API ex_rv tpp_stop(void) +{ + g_telnet_proxy.stop(); + return 0; +} + +TPP_API void tpp_timer(void) { + // be called per one second. + g_telnet_proxy.timer(); +} + + +// TPP_API void tpp_set_cfg(TPP_SET_CFG_ARGS* cfg_args) { +// g_telnet_proxy.set_cfg(cfg_args); +// } -// TPP_API void tpp_set_cfg(TPP_SET_CFG_ARGS* cfg_args) { -// g_telnet_proxy.set_cfg(cfg_args); -// } - static ex_rv _set_runtime_config(const char* param) { - Json::Value jp; - Json::Reader jreader; +// Json::Value jp; +// Json::Reader jreader; +// +// if (!jreader.parse(param, jp)) + Json::CharReaderBuilder jcrb; + std::unique_ptr const jreader(jcrb.newCharReader()); - if (!jreader.parse(param, jp)) - return TPE_JSON_FORMAT; + ex_astr err; + Json::Value jp; + if (!jreader->parse(param, param + strlen(param), &jp, &err)) + return TPE_JSON_FORMAT; if (!jp.isObject()) return TPE_PARAM; - if (jp["noop_timeout"].isNull() || !jp["noop_timeout"].isUInt()) - return TPE_PARAM; - - ex_u32 noop_timeout = jp["noop_timeout"].asUInt(); - if (noop_timeout == 0) - return TPE_PARAM; + if (jp["noop_timeout"].isNull() || !jp["noop_timeout"].isUInt()) + return TPE_PARAM; + + ex_u32 noop_timeout = jp["noop_timeout"].asUInt(); + if (noop_timeout == 0) + return TPE_PARAM; g_telnet_proxy.set_cfg(noop_timeout * 60); @@ -62,25 +68,31 @@ static ex_rv _set_runtime_config(const char* param) { } static ex_rv _kill_sessions(const char* param) { - Json::Value jp; - Json::Reader jreader; +// Json::Value jp; +// Json::Reader jreader; +// +// if (!jreader.parse(param, jp)) + Json::CharReaderBuilder jcrb; + std::unique_ptr const jreader(jcrb.newCharReader()); - if (!jreader.parse(param, jp)) - return TPE_JSON_FORMAT; + ex_astr err; + Json::Value jp; + if (!jreader->parse(param, param + strlen(param), &jp, &err)) + return TPE_JSON_FORMAT; if (!jp.isArray()) return TPE_PARAM; ex_astrs ss; - int cnt = jp.size(); - for (int i = 0; i < cnt; ++i) - { - if (!jp[i].isString()) { - return TPE_PARAM; - } - - ss.push_back(jp[i].asString()); - } + int cnt = jp.size(); + for (int i = 0; i < cnt; ++i) + { + if (!jp[i].isString()) { + return TPE_PARAM; + } + + ss.push_back(jp[i].asString()); + } g_telnet_proxy.kill_sessions(ss); diff --git a/server/tp_core/protocol/telnet/tptelnet.vs2015.sln b/server/tp_core/protocol/telnet/tptelnet.vs2015.sln deleted file mode 100644 index 89b3573..0000000 --- a/server/tp_core/protocol/telnet/tptelnet.vs2015.sln +++ /dev/null @@ -1,22 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio 14 -VisualStudioVersion = 14.0.23107.0 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tptelnet", "tptelnet.vs2015.vcxproj", "{FDA16D20-09B7-45AF-ADF1-DAF3EF2C0531}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|x86 = Debug|x86 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {FDA16D20-09B7-45AF-ADF1-DAF3EF2C0531}.Debug|x86.ActiveCfg = Debug|Win32 - {FDA16D20-09B7-45AF-ADF1-DAF3EF2C0531}.Debug|x86.Build.0 = Debug|Win32 - {FDA16D20-09B7-45AF-ADF1-DAF3EF2C0531}.Release|x86.ActiveCfg = Release|Win32 - {FDA16D20-09B7-45AF-ADF1-DAF3EF2C0531}.Release|x86.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal diff --git a/server/tp_core/protocol/telnet/tptelnet.vs2015.vcxproj b/server/tp_core/protocol/telnet/tptelnet.vs2015.vcxproj deleted file mode 100644 index a5f9255..0000000 --- a/server/tp_core/protocol/telnet/tptelnet.vs2015.vcxproj +++ /dev/null @@ -1,197 +0,0 @@ - - - - - Debug - Win32 - - - Release - Win32 - - - - {FDA16D20-09B7-45AF-ADF1-DAF3EF2C0531} - Win32Proj - tptelnet - 8.1 - tptelnet - - - - DynamicLibrary - true - Unicode - v140_xp - - - DynamicLibrary - false - v140_xp - true - Unicode - - - - - - - - - - - - - - - true - ..\..\..\..\out\server\$(PlatformTarget)\$(Configuration)\ - ..\..\..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - - - false - ..\..\..\..\out\server\$(PlatformTarget)\$(Configuration)\ - ..\..\..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - - - - - - Level3 - Disabled - WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_WINDOWS;_USRDLL;TPP_EXPORTS;_WIN32_WINNT=0x0600;%(PreprocessorDefinitions) - ../../../../common/teleport;../../../../common/libex/include;../../../../external/jsoncpp/include;../../../../external/mbedtls/include;../../../../external/libuv/include;../../../../external/libuv/src;%(AdditionalIncludeDirectories) - MultiThreadedDebug - - - Windows - ..\..\..\..\external\openssl\out32;%(AdditionalLibraryDirectories) - Debug - - - - - Level3 - - - MaxSpeed - true - true - WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_WINDOWS;_USRDLL;TPP_EXPORTS;_WIN32_WINNT=0x0600;%(PreprocessorDefinitions) - ../../../../common/teleport;../../../../common/libex/include;../../../../external/jsoncpp/include;../../../../external/mbedtls/include;../../../../external/libuv/include;../../../../external/libuv/src;%(AdditionalIncludeDirectories) - MultiThreaded - - - Windows - true - true - true - ..\..\..\..\external\openssl\out32;%(AdditionalLibraryDirectories) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - false - - - false - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/server/tp_core/protocol/telnet/tptelnet.vs2015.vcxproj.filters b/server/tp_core/protocol/telnet/tptelnet.vs2015.vcxproj.filters deleted file mode 100644 index cf00123..0000000 --- a/server/tp_core/protocol/telnet/tptelnet.vs2015.vcxproj.filters +++ /dev/null @@ -1,320 +0,0 @@ - - - - - {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} - rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms - - - {73f964a0-2430-47c6-b083-956809018ec2} - - - {68aa1482-4f0d-43b5-9bbd-d7857e9b540b} - - - {e6941d07-304a-4ba8-af5a-c5a09dae61b4} - - - {4a7ed97e-93c1-4513-b813-3399eaeb8a2f} - - - {b95409eb-4905-440f-8537-ee892e96b49a} - - - {4FC737F1-C7A5-4376-A066-2A32D752A2FF} - cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx - - - {c33b46b3-8d5d-4dff-851b-4a9394b198d0} - - - {e3918a55-cbc1-4d69-bed8-c0146f2f4ce2} - - - {c248e06b-21a4-4f6d-b0ca-0f1b2a5783bd} - - - {89181d75-3db3-45a5-a35d-9083fb349de3} - - - {fae1b562-5e3f-4b9b-9a5d-41bb15ae2223} - - - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - win32 - - - win32 - - - main app - - - main app - - - common - - - common - - - common - - - common - - - main app - - - common - - - main app - - - main app - - - common - - - libuv\header - - - libuv\header - - - libuv\header - - - libuv\header - - - libuv\header - - - libuv\header - - - libuv\src - - - libuv\src - - - libuv\src - - - libuv\src\win - - - libuv\src\win - - - common - - - jsoncpp - - - - - main app - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - win32 - - - win32 - - - main app - - - main app - - - common - - - common - - - main app - - - common - - - main app - - - main app - - - common - - - libuv\src - - - libuv\src - - - libuv\src - - - libuv\src - - - libuv\src - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - libuv\src\win - - - jsoncpp - - - jsoncpp - - - jsoncpp - - - libuv - - - \ No newline at end of file diff --git a/server/tp_core/protocol/telnet/tptelnet.vs2017.vcxproj b/server/tp_core/protocol/telnet/tptelnet.vs2017.vcxproj index 47833b1..153a0ff 100644 --- a/server/tp_core/protocol/telnet/tptelnet.vs2017.vcxproj +++ b/server/tp_core/protocol/telnet/tptelnet.vs2017.vcxproj @@ -140,7 +140,9 @@ + + @@ -160,7 +162,6 @@ - diff --git a/server/tp_core/protocol/telnet/tptelnet.vs2017.vcxproj.filters b/server/tp_core/protocol/telnet/tptelnet.vs2017.vcxproj.filters index cf00123..caaf634 100644 --- a/server/tp_core/protocol/telnet/tptelnet.vs2017.vcxproj.filters +++ b/server/tp_core/protocol/telnet/tptelnet.vs2017.vcxproj.filters @@ -226,9 +226,6 @@ libuv\src - - libuv\src\win - libuv\src\win @@ -314,7 +311,13 @@ jsoncpp - libuv + libuv\src + + + libuv\src + + + libuv\src \ No newline at end of file diff --git a/server/tp_core/testssh/CMakeLists.txt b/server/tp_core/testssh/CMakeLists.txt new file mode 100644 index 0000000..f78a3a3 --- /dev/null +++ b/server/tp_core/testssh/CMakeLists.txt @@ -0,0 +1,44 @@ +cmake_minimum_required(VERSION 3.5) + +MESSAGE(STATUS "=======================================================") +MESSAGE(STATUS " testssh") +MESSAGE(STATUS "=======================================================") +include(../../../CMakeCfg.txt) + +aux_source_directory(. DIR_SRCS) +aux_source_directory(../../../common/libex/src DIR_SRCS) + +list(REMOVE_ITEM DIR_SRCS "./stdafx.cpp") + +include_directories( + ../../../common/libex/include +) + +if (OS_LINUX) + set(CMAKE_EXE_LINKER_FLAGS "-export-dynamic") + include_directories( + ${TP_EXTERNAL_RELEASE_DIR}/include + ) + link_directories( + ${TP_EXTERNAL_RELEASE_DIR}/lib + ${TP_EXTERNAL_RELEASE_DIR}/lib64 + ) +elseif (OS_MACOS) + include_directories( + /usr/local/opt/openssl/include + ${TP_EXTERNAL_RELEASE_DIR}/include + ) + link_directories( + /usr/local/opt/openssl/lib + ${TP_EXTERNAL_RELEASE_DIR}/lib + ) +endif () + + +add_executable(testssh ${DIR_SRCS}) + +if (OS_LINUX) + target_link_libraries(testssh ssh ssl z crypto dl pthread rt util) +elseif (OS_MACOS) + target_link_libraries(testssh ssh ssl z crypto dl pthread util) +endif () diff --git a/server/tp_core/testssh/readme.md b/server/tp_core/testssh/readme.md new file mode 100644 index 0000000..eb8f4cb --- /dev/null +++ b/server/tp_core/testssh/readme.md @@ -0,0 +1,40 @@ +======================================================================== + CONSOLE APPLICATION : testssh Project Overview +======================================================================== + +AppWizard has created this testssh application for you. + +This file contains a summary of what you will find in each of the files that +make up your testssh application. + + +testssh.vcxproj + This is the main project file for VC++ projects generated using an Application Wizard. + It contains information about the version of Visual C++ that generated the file, and + information about the platforms, configurations, and project features selected with the + Application Wizard. + +testssh.vcxproj.filters + This is the filters file for VC++ projects generated using an Application Wizard. + It contains information about the association between the files in your project + and the filters. This association is used in the IDE to show grouping of files with + similar extensions under a specific node (for e.g. ".cpp" files are associated with the + "Source Files" filter). + +testssh.cpp + This is the main application source file. + +///////////////////////////////////////////////////////////////////////////// +Other standard files: + +StdAfx.h, StdAfx.cpp + These files are used to build a precompiled header (PCH) file + named testssh.pch and a precompiled types file named StdAfx.obj. + +///////////////////////////////////////////////////////////////////////////// +Other notes: + +AppWizard uses "TODO:" comments to indicate parts of the source code you +should add to or customize. + +///////////////////////////////////////////////////////////////////////////// diff --git a/server/tp_core/testssh/stdafx.cpp b/server/tp_core/testssh/stdafx.cpp new file mode 100644 index 0000000..68931a3 --- /dev/null +++ b/server/tp_core/testssh/stdafx.cpp @@ -0,0 +1,24 @@ +// stdafx.cpp : source file that includes just the standard includes +// testssh.pch will be the pre-compiled header +// stdafx.obj will contain the pre-compiled type information + +#include "stdafx.h" +#include + +// TODO: reference any additional headers you need in STDAFX.H +// and not in this file + +#ifdef _DEBUG +# pragma comment(lib, "debug/ssh.lib") +# pragma comment(lib, "libcrypto32MTd.lib") +# pragma comment(lib, "libssl32MTd.lib") +#else +# pragma comment(lib, "release/ssh.lib") +# pragma comment(lib, "libcrypto32MT.lib") +# pragma comment(lib, "libssl32MT.lib") +#endif +// #pragma comment(lib, "libeay32.lib") +// #pragma comment(lib, "libcrypto.lib") +#pragma comment(lib, "ws2_32.lib") +#pragma comment(lib, "crypt32.lib") + diff --git a/server/tp_core/testssh/stdafx.h b/server/tp_core/testssh/stdafx.h new file mode 100644 index 0000000..47a0d02 --- /dev/null +++ b/server/tp_core/testssh/stdafx.h @@ -0,0 +1,15 @@ +// stdafx.h : include file for standard system include files, +// or project specific include files that are used frequently, but +// are changed infrequently +// + +#pragma once + +#include "targetver.h" + +#include +#include + + + +// TODO: reference additional headers your program requires here diff --git a/server/tp_core/testssh/targetver.h b/server/tp_core/testssh/targetver.h new file mode 100644 index 0000000..90e767b --- /dev/null +++ b/server/tp_core/testssh/targetver.h @@ -0,0 +1,8 @@ +#pragma once + +// Including SDKDDKVer.h defines the highest available Windows platform. + +// If you wish to build your application for a previous Windows platform, include WinSDKVer.h and +// set the _WIN32_WINNT macro to the platform you wish to support before including SDKDDKVer.h. + +#include diff --git a/server/tp_core/testssh/testssh.cpp b/server/tp_core/testssh/testssh.cpp new file mode 100644 index 0000000..42fa101 --- /dev/null +++ b/server/tp_core/testssh/testssh.cpp @@ -0,0 +1,172 @@ +// testssh.cpp : Defines the entry point for the console application. +// + +#include +#include + +void show_usage() { + printf("Usage:\n"); + printf(" testssh USERNAME PASSWORD IP PORT\n"); +} + +int main(int argc, char** argv) +{ + if (argc != 5) { + show_usage(); + return -1; + } + + ssh_init(); + + ssh_session sess = ssh_new(); + ssh_set_blocking(sess, 1); + + char* username = argv[1]; + char* password = argv[2]; + + char* ip = argv[3]; + ssh_options_set(sess, SSH_OPTIONS_HOST, ip); + + int port = atoi(argv[4]); + ssh_options_set(sess, SSH_OPTIONS_PORT, &port); + + int flag = SSH_LOG_FUNCTIONS; + ssh_options_set(sess, SSH_OPTIONS_LOG_VERBOSITY, &flag); + + int val = 0; + ssh_options_set(sess, SSH_OPTIONS_STRICTHOSTKEYCHECK, &val); + + ssh_options_set(sess, SSH_OPTIONS_USER, username); + + int _timeout = 120; // 60 sec. + ssh_options_set(sess, SSH_OPTIONS_TIMEOUT, &_timeout); + + // connect to real SSH host. + int rc = 0; + rc = ssh_connect(sess); + if (rc != SSH_OK) { + printf("[ERROR] can not connect to SSH server %s:%d. [%d] %s\n", ip, port, rc, ssh_get_error(sess)); + ssh_free(sess); + return -1; + } + + _timeout = 120; // 60 sec. + ssh_options_set(sess, SSH_OPTIONS_TIMEOUT, &_timeout); + + // get version of SSH server. + int ver = ssh_get_version(sess); + printf("[INFO] host is SSHv%d\n", ver); + + // get supported auth-type of SSH server. + //ssh_userauth_none(sess, username); + rc = ssh_userauth_none(sess, NULL); + if (rc == SSH_AUTH_ERROR) { + printf("[ERROR] can not got auth type supported by SSH server.\n"); + ssh_free(sess); + return -1; + } + + int auth_methods = ssh_userauth_list(sess, username); + printf("[INFO] supported auth-type: 0x%08x\n", auth_methods); + if(auth_methods == SSH_AUTH_METHOD_UNKNOWN) { +// auth_methods = SSH_AUTH_METHOD_PASSWORD|SSH_AUTH_METHOD_INTERACTIVE; +// printf("[WRN] unknown auth-type, try PASSWORD and INTERACTIVE\n"); + auth_methods = SSH_AUTH_METHOD_PASSWORD; + printf("[WRN] unknown auth-type, try PASSWORD mode.\n"); + } + + // get banner. + const char* banner = ssh_get_issue_banner(sess); + if (banner != NULL) { + printf("[INFO] server issue banner: %s\n", banner); + } + + // try auth. + bool ok = false; + int retry_count = 0; + + // first try interactive login mode if server allow. + if (!ok && (auth_methods & SSH_AUTH_METHOD_INTERACTIVE) == SSH_AUTH_METHOD_INTERACTIVE) { + retry_count = 0; + rc = ssh_userauth_kbdint(sess, NULL, NULL); + for (;;) { + if (rc == SSH_AUTH_SUCCESS) { + ok = true; + break; + } + + if (rc == SSH_AUTH_AGAIN) { + retry_count += 1; + if (retry_count >= 5) + break; + ex_sleep_ms(500); + // Sleep(500); + rc = ssh_userauth_kbdint(sess, NULL, NULL); + continue; + } + + if (rc != SSH_AUTH_INFO) + break; + + int nprompts = ssh_userauth_kbdint_getnprompts(sess); + if (0 == nprompts) { + rc = ssh_userauth_kbdint(sess, NULL, NULL); + continue; + } + + for (int iprompt = 0; iprompt < nprompts; ++iprompt) { + char echo = 0; + const char *prompt = ssh_userauth_kbdint_getprompt(sess, iprompt, &echo); + printf("[INFO] interactive login prompt: %s\n", prompt); + + rc = ssh_userauth_kbdint_setanswer(sess, iprompt, password); + if (rc < 0) { + printf("[ERROR] invalid password for interactive mode to login to SSH server.\n"); + ssh_free(sess); + return -1; + } + } + + rc = ssh_userauth_kbdint(sess, NULL, NULL); + } + } + + // and then try password login mode if server allow. + if (!ok && (auth_methods & SSH_AUTH_METHOD_PASSWORD) == SSH_AUTH_METHOD_PASSWORD) { + retry_count = 0; + rc = ssh_userauth_password(sess, NULL, password); + for (;;) { + if (rc == SSH_AUTH_AGAIN) { + retry_count += 1; + if (retry_count >= 3) + break; + ex_sleep_ms(100); + // Sleep(100); + rc = ssh_userauth_password(sess, NULL, password); + continue; + } + if (rc == SSH_AUTH_SUCCESS) { + ok = true; + printf("[INFO] login with password mode OK.\n"); + break; + } else { + printf("[ERROR] failed to login with password mode, got %d.\n", rc); + break; + } + } + } + + if (!ok) { + printf("[ERROR] can not use password mode or interactive mode to login to SSH server.\n"); + } + else { + printf("[INFO] login success.\n"); + } + + ssh_disconnect(sess); + ssh_free(sess); + ssh_finalize(); + + return 0; +} + diff --git a/server/tp_core/testssh/testssh.sln b/server/tp_core/testssh/testssh.sln new file mode 100644 index 0000000..7819b63 --- /dev/null +++ b/server/tp_core/testssh/testssh.sln @@ -0,0 +1,25 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 15 +VisualStudioVersion = 15.0.28307.168 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "testssh", "testssh.vcxproj", "{27998EAA-69B4-4AA5-9D91-54CE740181A9}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|x86 = Debug|x86 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {27998EAA-69B4-4AA5-9D91-54CE740181A9}.Debug|x86.ActiveCfg = Debug|Win32 + {27998EAA-69B4-4AA5-9D91-54CE740181A9}.Debug|x86.Build.0 = Debug|Win32 + {27998EAA-69B4-4AA5-9D91-54CE740181A9}.Release|x86.ActiveCfg = Release|Win32 + {27998EAA-69B4-4AA5-9D91-54CE740181A9}.Release|x86.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {0FAE3FD8-A9E1-4BF8-9A6C-A9AC50174CDF} + EndGlobalSection +EndGlobal diff --git a/server/tp_core/testssh/testssh.vcxproj b/server/tp_core/testssh/testssh.vcxproj new file mode 100644 index 0000000..73f5575 --- /dev/null +++ b/server/tp_core/testssh/testssh.vcxproj @@ -0,0 +1,127 @@ + + + + + Debug + Win32 + + + Release + Win32 + + + + {27998EAA-69B4-4AA5-9D91-54CE740181A9} + Win32Proj + testssh + 8.1 + + + + Application + true + v141 + Unicode + + + Application + false + v141 + true + Unicode + + + + + + + + + + + + + + + true + + + false + + + + Use + Level3 + Disabled + WIN32;_DEBUG;_CONSOLE;LIBSSH_STATIC;%(PreprocessorDefinitions) + true + ..\..\..\external\libssh\include;..\..\..\common\libex\include;%(AdditionalIncludeDirectories) + MultiThreadedDebug + + + Console + true + ..\..\..\external\libssh\build\src;..\..\..\external\openssl\lib\VC\static;%(AdditionalLibraryDirectories) + + + + + Level3 + Use + MaxSpeed + true + true + WIN32;NDEBUG;_CONSOLE;LIBSSH_STATIC;%(PreprocessorDefinitions) + true + ..\..\..\external\libssh\include;..\..\..\common\libex\include;%(AdditionalIncludeDirectories) + MultiThreaded + + + Console + true + true + true + ..\..\..\external\libssh\build\src;..\..\..\external\openssl\lib\VC\static;C:\Program Files (x86)\Visual Leak Detector\lib\Win32;%(AdditionalLibraryDirectories) + + + + + + + + + + + + NotUsing + NotUsing + + + NotUsing + NotUsing + + + NotUsing + NotUsing + + + NotUsing + NotUsing + + + NotUsing + NotUsing + + + Create + Create + + + NotUsing + NotUsing + + + + + + \ No newline at end of file diff --git a/server/tp_core/testssh/testssh.vcxproj.filters b/server/tp_core/testssh/testssh.vcxproj.filters new file mode 100644 index 0000000..0388b1a --- /dev/null +++ b/server/tp_core/testssh/testssh.vcxproj.filters @@ -0,0 +1,54 @@ + + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hh;hpp;hxx;hm;inl;inc;xsd + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + {d9087583-83c7-4d47-bba3-fd86b0cb3901} + + + + + + + + Header Files + + + Header Files + + + + + Source Files + + + Source Files + + + libex + + + libex + + + libex + + + libex + + + libex + + + \ No newline at end of file diff --git a/server/tp_web/src/CMakeLists.txt b/server/tp_web/src/CMakeLists.txt index 33f81ea..c8ba2fb 100644 --- a/server/tp_web/src/CMakeLists.txt +++ b/server/tp_web/src/CMakeLists.txt @@ -1,15 +1,13 @@ cmake_minimum_required(VERSION 3.5) MESSAGE(STATUS "=======================================================") -MESSAGE(STATUS " libtptelnet") +MESSAGE(STATUS " tp_web") MESSAGE(STATUS "=======================================================") #MESSAGE(STATUS "operation system is ${CMAKE_SYSTEM}") #MESSAGE(STATUS "current source directory is ${CMAKE_CURRENT_SOURCE_DIR}") include(../../../CMakeCfg.txt) -#set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") - set(CMAKE_EXE_LINKER_FLAGS "-export-dynamic") aux_source_directory(. DIR_SRCS) @@ -23,7 +21,7 @@ include_directories( include_directories( ${TP_EXTERNAL_RELEASE_DIR}/include - ${TP_EXTERNAL_RELEASE_DIR}/include/python + ${TP_EXTERNAL_RELEASE_DIR}/include/python3.7m ) link_directories(${TP_EXTERNAL_RELEASE_DIR}/lib) diff --git a/server/tp_web/src/tp_web.vs2015.sln b/server/tp_web/src/tp_web.vs2015.sln deleted file mode 100644 index a8e2dc4..0000000 --- a/server/tp_web/src/tp_web.vs2015.sln +++ /dev/null @@ -1,22 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio 14 -VisualStudioVersion = 14.0.23107.0 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tp_web", "tp_web.vs2015.vcxproj", "{6548CB1D-A7BA-4A68-9B3F-A5129F77868B}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|x86 = Debug|x86 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Debug|x86.ActiveCfg = Debug|Win32 - {6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Debug|x86.Build.0 = Debug|Win32 - {6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Release|x86.ActiveCfg = Release|Win32 - {6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Release|x86.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal diff --git a/server/tp_web/src/tp_web.vs2015.vcxproj b/server/tp_web/src/tp_web.vs2015.vcxproj deleted file mode 100644 index 50c13f8..0000000 --- a/server/tp_web/src/tp_web.vs2015.vcxproj +++ /dev/null @@ -1,212 +0,0 @@ - - - - - Debug - Win32 - - - Release - Win32 - - - Debug - x64 - - - Release - x64 - - - - {6548CB1D-A7BA-4A68-9B3F-A5129F77868B} - Win32Proj - tp_web - 8.1 - tp_web - - - - Application - true - v140_xp - Unicode - - - Application - false - v140_xp - true - Unicode - - - Application - true - v140 - Unicode - - - Application - false - v140 - true - Unicode - - - - - - - - - - - - - - - - - - - - - true - ..\..\..\out\server\$(PlatformTarget)\$(Configuration)\ - ..\..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - D:\apps\vld\include;$(IncludePath) - D:\apps\vld\lib\Win32;$(LibraryPath) - - - true - ..\..\out\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - ..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - - - false - ..\..\..\out\server\$(PlatformTarget)\$(Configuration)\ - ..\..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - - - false - ..\..\out\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - ..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ - - - - - - Level3 - Disabled - WIN32;_DEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;_WINSOCK_DEPRECATED_NO_WARNINGS;%(PreprocessorDefinitions) - true - ../../../common/libex/include;../../../common/pyshell/include;../../../external/python/include - MultiThreadedDebug - - - Console - - - libcmt.lib - - - - - - - Level3 - Disabled - _DEBUG;_WINDOWS;%(PreprocessorDefinitions) - true - ../../../common/libex/include;../../../common/pyshell/include;../../../external/python/include - - - Windows - true - ../../external/windows/openssl/lib;../../external/windows/zlib/lib;../../external/windows/libssh/lib - - - - - Level3 - - - MaxSpeed - true - true - WIN32;NDEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;_WINSOCK_DEPRECATED_NO_WARNINGS;%(PreprocessorDefinitions) - true - ../../../common/libex/include;../../../common/pyshell/include;../../../external/python/include - MultiThreaded - - - Console - true - true - - - - - - - Level3 - - - MaxSpeed - true - true - NDEBUG;_WINDOWS;%(PreprocessorDefinitions) - true - ../../../common/libex/include;../../../common/pyshell/include;../../../external/python/include - - - Windows - true - true - true - ../../external/windows/openssl/lib;../../external/windows/zlib/lib;../../external/windows/libssh/lib - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/server/tp_web/src/tp_web.vs2015.vcxproj.filters b/server/tp_web/src/tp_web.vs2015.vcxproj.filters deleted file mode 100644 index df7b24a..0000000 --- a/server/tp_web/src/tp_web.vs2015.vcxproj.filters +++ /dev/null @@ -1,131 +0,0 @@ - - - - - {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} - rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms - - - {0155895f-d6be-4e0f-970d-9b6b5c759502} - - - {0da131e6-c187-4632-a82b-c9b84238b97a} - - - {ffe9fc8a-0268-4a71-8681-ab835e44fd83} - - - {f9606240-3c34-4d3d-8623-7913fe36b8b4} - - - {465c4847-7106-4020-ae5f-bcc649ae7ca9} - - - {4a9f6402-c1c7-4c13-a390-794b6ac77697} - - - {5696c8d5-f56a-429d-b058-cbe79a1a17ca} - - - - - main app - - - main app - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - libex\src - - - pyshell\src - - - pyshell\src - - - pyshell\src - - - - - main app - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - main app - - - libex\header - - - libex\header - - - libex\header - - - libex\header - - - pyshell\src - - - pyshell\src - - - pyshell\header - - - Resource Files - - - - - Resource Files - - - - - Resource Files - - - \ No newline at end of file diff --git a/server/www/packages/packages-darwin/x64/psutil/__init__.py b/server/www/packages/packages-darwin/x64/psutil/__init__.py index e129965..b400ec8 100644 --- a/server/www/packages/packages-darwin/x64/psutil/__init__.py +++ b/server/www/packages/packages-darwin/x64/psutil/__init__.py @@ -17,7 +17,7 @@ sensors) in Python. Supported platforms: - Sun Solaris - AIX -Works with Python versions from 2.6 to 3.X. +Works with Python versions from 2.6 to 3.4+. """ from __future__ import division @@ -31,8 +31,8 @@ import os import signal import subprocess import sys +import threading import time -import traceback try: import pwd except ImportError: @@ -87,12 +87,6 @@ from ._common import POSIX # NOQA from ._common import SUNOS from ._common import WINDOWS -from ._exceptions import AccessDenied -from ._exceptions import Error -from ._exceptions import NoSuchProcess -from ._exceptions import TimeoutExpired -from ._exceptions import ZombieProcess - if LINUX: # This is public API and it will be retrieved from _pslinux.py # via sys.modules. @@ -152,6 +146,10 @@ elif WINDOWS: from ._psutil_windows import NORMAL_PRIORITY_CLASS # NOQA from ._psutil_windows import REALTIME_PRIORITY_CLASS # NOQA from ._pswindows import CONN_DELETE_TCB # NOQA + from ._pswindows import IOPRIO_VERYLOW # NOQA + from ._pswindows import IOPRIO_LOW # NOQA + from ._pswindows import IOPRIO_NORMAL # NOQA + from ._pswindows import IOPRIO_HIGH # NOQA elif MACOS: from . import _psosx as _psplatform @@ -212,23 +210,26 @@ __all__ = [ "pid_exists", "pids", "process_iter", "wait_procs", # proc "virtual_memory", "swap_memory", # memory "cpu_times", "cpu_percent", "cpu_times_percent", "cpu_count", # cpu - "cpu_stats", # "cpu_freq", + "cpu_stats", # "cpu_freq", "getloadavg" "net_io_counters", "net_connections", "net_if_addrs", # network "net_if_stats", "disk_io_counters", "disk_partitions", "disk_usage", # disk # "sensors_temperatures", "sensors_battery", "sensors_fans" # sensors "users", "boot_time", # others ] + + __all__.extend(_psplatform.__extra__all__) __author__ = "Giampaolo Rodola'" -__version__ = "5.4.7" +__version__ = "5.6.3" version_info = tuple([int(num) for num in __version__.split('.')]) + +_timer = getattr(time, 'monotonic', time.time) AF_LINK = _psplatform.AF_LINK POWER_TIME_UNLIMITED = _common.POWER_TIME_UNLIMITED POWER_TIME_UNKNOWN = _common.POWER_TIME_UNKNOWN _TOTAL_PHYMEM = None -_timer = getattr(time, 'monotonic', time.time) - +_LOWEST_PID = None # Sanity check in case the user messed up with psutil installation # or did something weird with sys.path. In this case we might end @@ -252,6 +253,112 @@ if (int(__version__.replace('.', '')) != raise ImportError(msg) +# ===================================================================== +# --- Exceptions +# ===================================================================== + + +class Error(Exception): + """Base exception class. All other psutil exceptions inherit + from this one. + """ + + def __init__(self, msg=""): + Exception.__init__(self, msg) + self.msg = msg + + def __repr__(self): + ret = "psutil.%s %s" % (self.__class__.__name__, self.msg) + return ret.strip() + + __str__ = __repr__ + + +class NoSuchProcess(Error): + """Exception raised when a process with a certain PID doesn't + or no longer exists. + """ + + def __init__(self, pid, name=None, msg=None): + Error.__init__(self, msg) + self.pid = pid + self.name = name + self.msg = msg + if msg is None: + if name: + details = "(pid=%s, name=%s)" % (self.pid, repr(self.name)) + else: + details = "(pid=%s)" % self.pid + self.msg = "process no longer exists " + details + + +class ZombieProcess(NoSuchProcess): + """Exception raised when querying a zombie process. This is + raised on macOS, BSD and Solaris only, and not always: depending + on the query the OS may be able to succeed anyway. + On Linux all zombie processes are querable (hence this is never + raised). Windows doesn't have zombie processes. + """ + + def __init__(self, pid, name=None, ppid=None, msg=None): + NoSuchProcess.__init__(self, msg) + self.pid = pid + self.ppid = ppid + self.name = name + self.msg = msg + if msg is None: + args = ["pid=%s" % pid] + if name: + args.append("name=%s" % repr(self.name)) + if ppid: + args.append("ppid=%s" % self.ppid) + details = "(%s)" % ", ".join(args) + self.msg = "process still exists but it's a zombie " + details + + +class AccessDenied(Error): + """Exception raised when permission to perform an action is denied.""" + + def __init__(self, pid=None, name=None, msg=None): + Error.__init__(self, msg) + self.pid = pid + self.name = name + self.msg = msg + if msg is None: + if (pid is not None) and (name is not None): + self.msg = "(pid=%s, name=%s)" % (pid, repr(name)) + elif (pid is not None): + self.msg = "(pid=%s)" % self.pid + else: + self.msg = "" + + +class TimeoutExpired(Error): + """Raised on Process.wait(timeout) if timeout expires and process + is still alive. + """ + + def __init__(self, seconds, pid=None, name=None): + Error.__init__(self, "timeout after %s seconds" % seconds) + self.seconds = seconds + self.pid = pid + self.name = name + if (pid is not None) and (name is not None): + self.msg += " (pid=%s, name=%s)" % (pid, repr(name)) + elif (pid is not None): + self.msg += " (pid=%s)" % self.pid + + +# Push exception classes into platform specific module namespace. +_psplatform.NoSuchProcess = NoSuchProcess +_psplatform.ZombieProcess = ZombieProcess +_psplatform.AccessDenied = AccessDenied +_psplatform.TimeoutExpired = TimeoutExpired +if POSIX: + from . import _psposix + _psposix.TimeoutExpired = TimeoutExpired + + # ===================================================================== # --- Utils # ===================================================================== @@ -353,7 +460,7 @@ class Process(object): self._create_time = None self._gone = False self._hash = None - self._oneshot_inctx = False + self._lock = threading.RLock() # used for caching on Windows only (on POSIX ppid may change) self._ppid = None # platform-specific modules define an _psplatform.Process @@ -457,40 +564,45 @@ class Process(object): ... >>> """ - if self._oneshot_inctx: - # NOOP: this covers the use case where the user enters the - # context twice. Since as_dict() internally uses oneshot() - # I expect that the code below will be a pretty common - # "mistake" that the user will make, so let's guard - # against that: - # - # >>> with p.oneshot(): - # ... p.as_dict() - # ... - yield - else: - self._oneshot_inctx = True - try: - # cached in case cpu_percent() is used - self.cpu_times.cache_activate() - # cached in case memory_percent() is used - self.memory_info.cache_activate() - # cached in case parent() is used - self.ppid.cache_activate() - # cached in case username() is used - if POSIX: - self.uids.cache_activate() - # specific implementation cache - self._proc.oneshot_enter() + with self._lock: + if hasattr(self, "_cache"): + # NOOP: this covers the use case where the user enters the + # context twice: + # + # >>> with p.oneshot(): + # ... with p.oneshot(): + # ... + # + # Also, since as_dict() internally uses oneshot() + # I expect that the code below will be a pretty common + # "mistake" that the user will make, so let's guard + # against that: + # + # >>> with p.oneshot(): + # ... p.as_dict() + # ... yield - finally: - self.cpu_times.cache_deactivate() - self.memory_info.cache_deactivate() - self.ppid.cache_deactivate() - if POSIX: - self.uids.cache_deactivate() - self._proc.oneshot_exit() - self._oneshot_inctx = False + else: + try: + # cached in case cpu_percent() is used + self.cpu_times.cache_activate(self) + # cached in case memory_percent() is used + self.memory_info.cache_activate(self) + # cached in case parent() is used + self.ppid.cache_activate(self) + # cached in case username() is used + if POSIX: + self.uids.cache_activate(self) + # specific implementation cache + self._proc.oneshot_enter() + yield + finally: + self.cpu_times.cache_deactivate(self) + self.memory_info.cache_deactivate(self) + self.ppid.cache_deactivate(self) + if POSIX: + self.uids.cache_deactivate(self) + self._proc.oneshot_exit() def as_dict(self, attrs=None, ad_value=None): """Utility method returning process information as a @@ -541,6 +653,9 @@ class Process(object): checking whether PID has been reused. If no parent is known return None. """ + lowest_pid = _LOWEST_PID if _LOWEST_PID is not None else pids()[0] + if self.pid == lowest_pid: + return None ppid = self.ppid() if ppid is not None: ctime = self.create_time() @@ -552,6 +667,17 @@ class Process(object): except NoSuchProcess: pass + def parents(self): + """Return the parents of this process as a list of Process + instances. If no parents are known return an empty list. + """ + parents = [] + proc = self.parent() + while proc is not None: + parents.append(proc) + proc = proc.parent() + return parents + def is_running(self): """Return whether this process is running. It also checks if PID has been reused by another process in @@ -800,9 +926,6 @@ class Process(object): (and set). (Windows, Linux and BSD only). """ - # Automatically remove duplicates both on get and - # set (for get it's not really necessary, it's - # just for extra safety). if cpus is None: return list(set(self._proc.cpu_affinity_get())) else: @@ -826,7 +949,7 @@ class Process(object): """ return self._proc.cpu_num() - # Linux, macOS and Windows only + # Linux, macOS, Windows, Solaris, AIX if hasattr(_psplatform.Process, "environ"): def environ(self): @@ -1096,7 +1219,6 @@ class Process(object): return (value / float(total_phymem)) * 100 if hasattr(_psplatform.Process, "memory_maps"): - # Available everywhere except OpenBSD and NetBSD. def memory_maps(self, grouped=True): """Return process' mapped memory regions as a list of namedtuples whose fields are variable depending on the platform. @@ -1299,7 +1421,7 @@ class Popen(Process): http://bugs.python.org/issue6973. For a complete documentation refer to: - http://docs.python.org/library/subprocess.html + http://docs.python.org/3/library/subprocess.html """ def __init__(self, *args, **kwargs): @@ -1355,7 +1477,7 @@ class Popen(Process): _as_dict_attrnames = set( [x for x in dir(Process) if not x.startswith('_') and x not in ['send_signal', 'suspend', 'resume', 'terminate', 'kill', 'wait', - 'is_running', 'as_dict', 'parent', 'children', 'rlimit', + 'is_running', 'as_dict', 'parent', 'parents', 'children', 'rlimit', 'memory_info_ex', 'oneshot']]) @@ -1366,7 +1488,10 @@ _as_dict_attrnames = set( def pids(): """Return a list of current running PIDs.""" - return _psplatform.pids() + global _LOWEST_PID + ret = sorted(_psplatform.pids()) + _LOWEST_PID = ret[0] + return ret def pid_exists(pid): @@ -1388,6 +1513,7 @@ def pid_exists(pid): _pmap = {} +_lock = threading.Lock() def process_iter(attrs=None, ad_value=None): @@ -1415,21 +1541,26 @@ def process_iter(attrs=None, ad_value=None): proc = Process(pid) if attrs is not None: proc.info = proc.as_dict(attrs=attrs, ad_value=ad_value) - _pmap[proc.pid] = proc + with _lock: + _pmap[proc.pid] = proc return proc def remove(pid): - _pmap.pop(pid, None) + with _lock: + _pmap.pop(pid, None) a = set(pids()) b = set(_pmap.keys()) new_pids = a - b gone_pids = b - a - for pid in gone_pids: remove(pid) - for pid, proc in sorted(list(_pmap.items()) + - list(dict.fromkeys(new_pids).items())): + + with _lock: + ls = sorted(list(_pmap.items()) + + list(dict.fromkeys(new_pids).items())) + + for pid, proc in ls: try: if proc is None: # new process yield add(pid) @@ -1609,14 +1740,12 @@ try: except Exception: # Don't want to crash at import time. _last_cpu_times = None - traceback.print_exc() try: _last_per_cpu_times = cpu_times(percpu=True) except Exception: # Don't want to crash at import time. _last_per_cpu_times = None - traceback.print_exc() def _cpu_tot_time(times): @@ -1864,18 +1993,41 @@ if hasattr(_psplatform, "cpu_freq"): return ret[0] else: currs, mins, maxs = 0.0, 0.0, 0.0 + set_none = False for cpu in ret: currs += cpu.current + # On Linux if /proc/cpuinfo is used min/max are set + # to None. + if LINUX and cpu.min is None: + set_none = True + continue mins += cpu.min maxs += cpu.max + current = currs / num_cpus - min_ = mins / num_cpus - max_ = maxs / num_cpus + + if set_none: + min_ = max_ = None + else: + min_ = mins / num_cpus + max_ = maxs / num_cpus + return _common.scpufreq(current, min_, max_) __all__.append("cpu_freq") +if hasattr(os, "getloadavg") or hasattr(_psplatform, "getloadavg"): + # Perform this hasattr check once on import time to either use the + # platform based code or proxy straight from the os module. + if hasattr(os, "getloadavg"): + getloadavg = os.getloadavg + else: + getloadavg = _psplatform.getloadavg + + __all__.append("getloadavg") + + # ===================================================================== # --- system memory related functions # ===================================================================== @@ -1901,7 +2053,7 @@ def virtual_memory(): - used: memory used, calculated differently depending on the platform and designed for informational purposes only: - macOS: active + inactive + wired + macOS: active + wired BSD: active + wired + cached Linux: total - free @@ -2297,19 +2449,16 @@ if WINDOWS: def test(): # pragma: no cover - """List info of all currently running processes emulating ps aux - output. - """ + from ._common import bytes2human + from ._compat import get_terminal_size + today_day = datetime.date.today() - templ = "%-10s %5s %4s %7s %7s %-13s %5s %7s %s" - attrs = ['pid', 'memory_percent', 'name', 'cpu_times', 'create_time', - 'memory_info'] - if POSIX: - attrs.append('uids') - attrs.append('terminal') - print(templ % ("USER", "PID", "%MEM", "VSZ", "RSS", "TTY", "START", "TIME", - "COMMAND")) - for p in process_iter(attrs=attrs, ad_value=''): + templ = "%-10s %5s %5s %7s %7s %5s %6s %6s %6s %s" + attrs = ['pid', 'memory_percent', 'name', 'cmdline', 'cpu_times', + 'create_time', 'memory_info', 'status', 'nice', 'username'] + print(templ % ("USER", "PID", "%MEM", "VSZ", "RSS", "NICE", + "STATUS", "START", "TIME", "CMDLINE")) + for p in process_iter(attrs, ad_value=None): if p.info['create_time']: ctime = datetime.datetime.fromtimestamp(p.info['create_time']) if ctime.date() == today_day: @@ -2318,30 +2467,46 @@ def test(): # pragma: no cover ctime = ctime.strftime("%b%d") else: ctime = '' - cputime = time.strftime("%M:%S", - time.localtime(sum(p.info['cpu_times']))) - try: - user = p.username() - except Error: - user = '' - if WINDOWS and '\\' in user: + if p.info['cpu_times']: + cputime = time.strftime("%M:%S", + time.localtime(sum(p.info['cpu_times']))) + else: + cputime = '' + + user = p.info['username'] or '' + if not user and POSIX: + try: + user = p.uids()[0] + except Error: + pass + if user and WINDOWS and '\\' in user: user = user.split('\\')[1] - vms = p.info['memory_info'] and \ - int(p.info['memory_info'].vms / 1024) or '?' - rss = p.info['memory_info'] and \ - int(p.info['memory_info'].rss / 1024) or '?' - memp = p.info['memory_percent'] and \ - round(p.info['memory_percent'], 1) or '?' - print(templ % ( + user = user[:9] + vms = bytes2human(p.info['memory_info'].vms) if \ + p.info['memory_info'] is not None else '' + rss = bytes2human(p.info['memory_info'].rss) if \ + p.info['memory_info'] is not None else '' + memp = round(p.info['memory_percent'], 1) if \ + p.info['memory_percent'] is not None else '' + nice = int(p.info['nice']) if p.info['nice'] else '' + if p.info['cmdline']: + cmdline = ' '.join(p.info['cmdline']) + else: + cmdline = p.info['name'] + status = p.info['status'][:5] if p.info['status'] else '' + + line = templ % ( user[:10], p.info['pid'], memp, vms, rss, - p.info.get('terminal', '') or '?', + nice, + status, ctime, cputime, - p.info['name'].strip() or '?')) + cmdline) + print(line[:get_terminal_size()[0]]) del memoize, memoize_when_activated, division, deprecated_method diff --git a/server/www/packages/packages-darwin/x64/psutil/_common.py b/server/www/packages/packages-darwin/x64/psutil/_common.py index 2cc3939..e3b4541 100644 --- a/server/www/packages/packages-darwin/x64/psutil/_common.py +++ b/server/www/packages/packages-darwin/x64/psutil/_common.py @@ -64,6 +64,7 @@ __all__ = [ 'conn_tmap', 'deprecated_method', 'isfile_strict', 'memoize', 'parse_environ_block', 'path_exists_strict', 'usage_percent', 'supports_ipv6', 'sockfam_to_enum', 'socktype_to_enum', "wrap_numbers", + 'bytes2human', ] @@ -327,7 +328,7 @@ def memoize_when_activated(fun): 1 >>> >>> # activated - >>> foo.cache_activate() + >>> foo.cache_activate(self) >>> foo() 1 >>> foo() @@ -336,26 +337,30 @@ def memoize_when_activated(fun): """ @functools.wraps(fun) def wrapper(self): - if not wrapper.cache_activated: + try: + # case 1: we previously entered oneshot() ctx + ret = self._cache[fun] + except AttributeError: + # case 2: we never entered oneshot() ctx return fun(self) - else: - try: - ret = cache[fun] - except KeyError: - ret = cache[fun] = fun(self) - return ret + except KeyError: + # case 3: we entered oneshot() ctx but there's no cache + # for this entry yet + ret = self._cache[fun] = fun(self) + return ret - def cache_activate(): - """Activate cache.""" - wrapper.cache_activated = True + def cache_activate(proc): + """Activate cache. Expects a Process instance. Cache will be + stored as a "_cache" instance attribute.""" + proc._cache = {} - def cache_deactivate(): + def cache_deactivate(proc): """Deactivate and clear cache.""" - wrapper.cache_activated = False - cache.clear() + try: + del proc._cache + except AttributeError: + pass - cache = {} - wrapper.cache_activated = False wrapper.cache_activate = cache_activate wrapper.cache_deactivate = cache_deactivate return wrapper @@ -471,7 +476,7 @@ def deprecated_method(replacement): @functools.wraps(fun) def inner(self, *args, **kwargs): - warnings.warn(msg, category=FutureWarning, stacklevel=2) + warnings.warn(msg, category=DeprecationWarning, stacklevel=2) return getattr(self, replacement)(*args, **kwargs) return inner return outer @@ -576,3 +581,54 @@ def wrap_numbers(input_dict, name): _wn = _WrapNumbers() wrap_numbers.cache_clear = _wn.cache_clear wrap_numbers.cache_info = _wn.cache_info + + +def open_binary(fname, **kwargs): + return open(fname, "rb", **kwargs) + + +def open_text(fname, **kwargs): + """On Python 3 opens a file in text mode by using fs encoding and + a proper en/decoding errors handler. + On Python 2 this is just an alias for open(name, 'rt'). + """ + if PY3: + # See: + # https://github.com/giampaolo/psutil/issues/675 + # https://github.com/giampaolo/psutil/pull/733 + kwargs.setdefault('encoding', ENCODING) + kwargs.setdefault('errors', ENCODING_ERRS) + return open(fname, "rt", **kwargs) + + +def bytes2human(n, format="%(value).1f%(symbol)s"): + """Used by various scripts. See: + http://goo.gl/zeJZl + + >>> bytes2human(10000) + '9.8K' + >>> bytes2human(100001221) + '95.4M' + """ + symbols = ('B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') + prefix = {} + for i, s in enumerate(symbols[1:]): + prefix[s] = 1 << (i + 1) * 10 + for symbol in reversed(symbols[1:]): + if n >= prefix[symbol]: + value = float(n) / prefix[symbol] + return format % locals() + return format % dict(symbol=symbols[0], value=n) + + +def get_procfs_path(): + """Return updated psutil.PROCFS_PATH constant.""" + return sys.modules['psutil'].PROCFS_PATH + + +if PY3: + def decode(s): + return s.decode(encoding=ENCODING, errors=ENCODING_ERRS) +else: + def decode(s): + return s diff --git a/server/www/packages/packages-darwin/x64/psutil/_compat.py b/server/www/packages/packages-darwin/x64/psutil/_compat.py index 08aefe4..c772f61 100644 --- a/server/www/packages/packages-darwin/x64/psutil/_compat.py +++ b/server/www/packages/packages-darwin/x64/psutil/_compat.py @@ -10,7 +10,7 @@ import os import sys __all__ = ["PY3", "long", "xrange", "unicode", "basestring", "u", "b", - "lru_cache", "which"] + "lru_cache", "which", "get_terminal_size"] PY3 = sys.version_info[0] == 3 @@ -239,3 +239,24 @@ except ImportError: if _access_check(name, mode): return name return None + + +# python 3.3 +try: + from shutil import get_terminal_size +except ImportError: + def get_terminal_size(fallback=(80, 24)): + try: + import fcntl + import termios + import struct + except ImportError: + return fallback + else: + try: + # This should work on Linux. + res = struct.unpack( + 'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, '1234')) + return (res[1], res[0]) + except Exception: + return fallback diff --git a/server/www/packages/packages-darwin/x64/psutil/_exceptions.py b/server/www/packages/packages-darwin/x64/psutil/_exceptions.py deleted file mode 100644 index 6dbbd28..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/_exceptions.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - - -class Error(Exception): - """Base exception class. All other psutil exceptions inherit - from this one. - """ - - def __init__(self, msg=""): - Exception.__init__(self, msg) - self.msg = msg - - def __repr__(self): - ret = "psutil.%s %s" % (self.__class__.__name__, self.msg) - return ret.strip() - - __str__ = __repr__ - - -class NoSuchProcess(Error): - """Exception raised when a process with a certain PID doesn't - or no longer exists. - """ - - def __init__(self, pid, name=None, msg=None): - Error.__init__(self, msg) - self.pid = pid - self.name = name - self.msg = msg - if msg is None: - if name: - details = "(pid=%s, name=%s)" % (self.pid, repr(self.name)) - else: - details = "(pid=%s)" % self.pid - self.msg = "process no longer exists " + details - - -class ZombieProcess(NoSuchProcess): - """Exception raised when querying a zombie process. This is - raised on macOS, BSD and Solaris only, and not always: depending - on the query the OS may be able to succeed anyway. - On Linux all zombie processes are querable (hence this is never - raised). Windows doesn't have zombie processes. - """ - - def __init__(self, pid, name=None, ppid=None, msg=None): - NoSuchProcess.__init__(self, msg) - self.pid = pid - self.ppid = ppid - self.name = name - self.msg = msg - if msg is None: - args = ["pid=%s" % pid] - if name: - args.append("name=%s" % repr(self.name)) - if ppid: - args.append("ppid=%s" % self.ppid) - details = "(%s)" % ", ".join(args) - self.msg = "process still exists but it's a zombie " + details - - -class AccessDenied(Error): - """Exception raised when permission to perform an action is denied.""" - - def __init__(self, pid=None, name=None, msg=None): - Error.__init__(self, msg) - self.pid = pid - self.name = name - self.msg = msg - if msg is None: - if (pid is not None) and (name is not None): - self.msg = "(pid=%s, name=%s)" % (pid, repr(name)) - elif (pid is not None): - self.msg = "(pid=%s)" % self.pid - else: - self.msg = "" - - -class TimeoutExpired(Error): - """Raised on Process.wait(timeout) if timeout expires and process - is still alive. - """ - - def __init__(self, seconds, pid=None, name=None): - Error.__init__(self, "timeout after %s seconds" % seconds) - self.seconds = seconds - self.pid = pid - self.name = name - if (pid is not None) and (name is not None): - self.msg += " (pid=%s, name=%s)" % (pid, repr(name)) - elif (pid is not None): - self.msg += " (pid=%s)" % self.pid diff --git a/server/www/packages/packages-darwin/x64/psutil/_psaix.py b/server/www/packages/packages-darwin/x64/psutil/_psaix.py index 7ba212d..b24325d 100644 --- a/server/www/packages/packages-darwin/x64/psutil/_psaix.py +++ b/server/www/packages/packages-darwin/x64/psutil/_psaix.py @@ -7,6 +7,7 @@ """AIX platform implementation.""" import errno +import functools import glob import os import re @@ -20,6 +21,7 @@ from . import _psposix from . import _psutil_aix as cext from . import _psutil_posix as cext_posix from ._common import AF_INET6 +from ._common import get_procfs_path from ._common import memoize_when_activated from ._common import NIC_DUPLEX_FULL from ._common import NIC_DUPLEX_HALF @@ -28,9 +30,6 @@ from ._common import sockfam_to_enum from ._common import socktype_to_enum from ._common import usage_percent from ._compat import PY3 -from ._exceptions import AccessDenied -from ._exceptions import NoSuchProcess -from ._exceptions import ZombieProcess __extra__all__ = ["PROCFS_PATH"] @@ -42,6 +41,8 @@ __extra__all__ = ["PROCFS_PATH"] HAS_THREADS = hasattr(cext, "proc_threads") +HAS_NET_IO_COUNTERS = hasattr(cext, "net_io_counters") +HAS_PROC_IO_COUNTERS = hasattr(cext, "proc_io_counters") PAGE_SIZE = os.sysconf('SC_PAGE_SIZE') AF_LINK = cext_posix.AF_LINK @@ -79,6 +80,13 @@ proc_info_map = dict( status=6, ttynr=7) +# These objects get set on "import psutil" from the __init__.py +# file, see: https://github.com/giampaolo/psutil/issues/1402 +NoSuchProcess = None +ZombieProcess = None +AccessDenied = None +TimeoutExpired = None + # ===================================================================== # --- named tuples @@ -93,21 +101,6 @@ pfullmem = pmem scputimes = namedtuple('scputimes', ['user', 'system', 'idle', 'iowait']) # psutil.virtual_memory() svmem = namedtuple('svmem', ['total', 'available', 'percent', 'used', 'free']) -# psutil.Process.memory_maps(grouped=True) -pmmap_grouped = namedtuple('pmmap_grouped', ['path', 'rss', 'anon', 'locked']) -# psutil.Process.memory_maps(grouped=False) -pmmap_ext = namedtuple( - 'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields)) - - -# ===================================================================== -# --- utils -# ===================================================================== - - -def get_procfs_path(): - """Return updated psutil.PROCFS_PATH constant.""" - return sys.modules['psutil'].PROCFS_PATH # ===================================================================== @@ -212,7 +205,9 @@ def disk_partitions(all=False): net_if_addrs = cext_posix.net_if_addrs -net_io_counters = cext.net_io_counters + +if HAS_NET_IO_COUNTERS: + net_io_counters = cext.net_io_counters def net_connections(kind, _pid=-1): @@ -328,7 +323,7 @@ def wrap_exceptions(fun): """Call callable into a try/except clause and translate ENOENT, EACCES and EPERM in NoSuchProcess or AccessDenied exceptions. """ - + @functools.wraps(fun) def wrapper(self, *args, **kwargs): try: return fun(self, *args, **kwargs) @@ -354,7 +349,7 @@ def wrap_exceptions(fun): class Process(object): """Wrapper class around underlying C implementation.""" - __slots__ = ["pid", "_name", "_ppid", "_procfs_path"] + __slots__ = ["pid", "_name", "_ppid", "_procfs_path", "_cache"] def __init__(self, pid): self.pid = pid @@ -363,23 +358,19 @@ class Process(object): self._procfs_path = get_procfs_path() def oneshot_enter(self): - self._proc_name_and_args.cache_activate() - self._proc_basic_info.cache_activate() - self._proc_cred.cache_activate() + self._proc_basic_info.cache_activate(self) + self._proc_cred.cache_activate(self) def oneshot_exit(self): - self._proc_name_and_args.cache_deactivate() - self._proc_basic_info.cache_deactivate() - self._proc_cred.cache_deactivate() - - @memoize_when_activated - def _proc_name_and_args(self): - return cext.proc_name_and_args(self.pid, self._procfs_path) + self._proc_basic_info.cache_deactivate(self) + self._proc_cred.cache_deactivate(self) + @wrap_exceptions @memoize_when_activated def _proc_basic_info(self): return cext.proc_basic_info(self.pid, self._procfs_path) + @wrap_exceptions @memoize_when_activated def _proc_cred(self): return cext.proc_cred(self.pid, self._procfs_path) @@ -388,22 +379,25 @@ class Process(object): def name(self): if self.pid == 0: return "swapper" - # note: this is limited to 15 characters - return self._proc_name_and_args()[0].rstrip("\x00") + # note: max 16 characters + return cext.proc_name(self.pid, self._procfs_path).rstrip("\x00") @wrap_exceptions def exe(self): # there is no way to get executable path in AIX other than to guess, # and guessing is more complex than what's in the wrapping class - exe = self.cmdline()[0] + cmdline = self.cmdline() + if not cmdline: + return '' + exe = cmdline[0] if os.path.sep in exe: # relative or absolute path if not os.path.isabs(exe): # if cwd has changed, we're out of luck - this may be wrong! exe = os.path.abspath(os.path.join(self.cwd(), exe)) if (os.path.isabs(exe) and - os.path.isfile(exe) and - os.access(exe, os.X_OK)): + os.path.isfile(exe) and + os.access(exe, os.X_OK)): return exe # not found, move to search in PATH using basename only exe = os.path.basename(exe) @@ -411,13 +405,17 @@ class Process(object): for path in os.environ["PATH"].split(":"): possible_exe = os.path.abspath(os.path.join(path, exe)) if (os.path.isfile(possible_exe) and - os.access(possible_exe, os.X_OK)): + os.access(possible_exe, os.X_OK)): return possible_exe return '' @wrap_exceptions def cmdline(self): - return self._proc_name_and_args()[1].split(' ') + return cext.proc_args(self.pid) + + @wrap_exceptions + def environ(self): + return cext.proc_environ(self.pid) @wrap_exceptions def create_time(self): @@ -561,14 +559,15 @@ class Process(object): def wait(self, timeout=None): return _psposix.wait_pid(self.pid, timeout, self._name) - @wrap_exceptions - def io_counters(self): - try: - rc, wc, rb, wb = cext.proc_io_counters(self.pid) - except OSError: - # if process is terminated, proc_io_counters returns OSError - # instead of NSP - if not pid_exists(self.pid): - raise NoSuchProcess(self.pid, self._name) - raise - return _common.pio(rc, wc, rb, wb) + if HAS_PROC_IO_COUNTERS: + @wrap_exceptions + def io_counters(self): + try: + rc, wc, rb, wb = cext.proc_io_counters(self.pid) + except OSError: + # if process is terminated, proc_io_counters returns OSError + # instead of NSP + if not pid_exists(self.pid): + raise NoSuchProcess(self.pid, self._name) + raise + return _common.pio(rc, wc, rb, wb) diff --git a/server/www/packages/packages-darwin/x64/psutil/_psbsd.py b/server/www/packages/packages-darwin/x64/psutil/_psbsd.py index 7f4bcb6..3d9dfda 100644 --- a/server/www/packages/packages-darwin/x64/psutil/_psbsd.py +++ b/server/www/packages/packages-darwin/x64/psutil/_psbsd.py @@ -11,6 +11,7 @@ import os import xml.etree.ElementTree as ET from collections import namedtuple from socket import AF_INET +from collections import defaultdict from . import _common from . import _psposix @@ -27,9 +28,6 @@ from ._common import sockfam_to_enum from ._common import socktype_to_enum from ._common import usage_percent from ._compat import which -from ._exceptions import AccessDenied -from ._exceptions import NoSuchProcess -from ._exceptions import ZombieProcess __extra__all__ = [] @@ -103,6 +101,11 @@ else: PAGESIZE = os.sysconf("SC_PAGE_SIZE") AF_LINK = cext_posix.AF_LINK +HAS_PER_CPU_TIMES = hasattr(cext, "per_cpu_times") +HAS_PROC_NUM_THREADS = hasattr(cext, "proc_num_threads") +HAS_PROC_OPEN_FILES = hasattr(cext, 'proc_open_files') +HAS_PROC_NUM_FDS = hasattr(cext, 'proc_num_fds') + kinfo_proc_map = dict( ppid=0, status=1, @@ -131,6 +134,13 @@ kinfo_proc_map = dict( name=24, ) +# These objects get set on "import psutil" from the __init__.py +# file, see: https://github.com/giampaolo/psutil/issues/1402 +NoSuchProcess = None +ZombieProcess = None +AccessDenied = None +TimeoutExpired = None + # ===================================================================== # --- named tuples @@ -211,7 +221,7 @@ def cpu_times(): return scputimes(user, nice, system, idle, irq) -if hasattr(cext, "per_cpu_times"): +if HAS_PER_CPU_TIMES: def per_cpu_times(): """Return system CPU times as a namedtuple""" ret = [] @@ -432,6 +442,47 @@ if FREEBSD: secsleft = minsleft * 60 return _common.sbattery(percent, secsleft, power_plugged) + def sensors_temperatures(): + "Return CPU cores temperatures if available, else an empty dict." + ret = defaultdict(list) + num_cpus = cpu_count_logical() + for cpu in range(num_cpus): + try: + current, high = cext.sensors_cpu_temperature(cpu) + if high <= 0: + high = None + name = "Core %s" % cpu + ret["coretemp"].append( + _common.shwtemp(name, current, high, high)) + except NotImplementedError: + pass + + return ret + + def cpu_freq(): + """Return frequency metrics for CPUs. As of Dec 2018 only + CPU 0 appears to be supported by FreeBSD and all other cores + match the frequency of CPU 0. + """ + ret = [] + num_cpus = cpu_count_logical() + for cpu in range(num_cpus): + try: + current, available_freq = cext.cpu_frequency(cpu) + except NotImplementedError: + continue + if available_freq: + try: + min_freq = int(available_freq.split(" ")[-1].split("/")[0]) + except(IndexError, ValueError): + min_freq = None + try: + max_freq = int(available_freq.split(" ")[0].split("/")[0]) + except(IndexError, ValueError): + max_freq = None + ret.append(_common.scpufreq(current, min_freq, max_freq)) + return ret + # ===================================================================== # --- other system functions @@ -547,13 +598,20 @@ def wrap_exceptions_procfs(inst): class Process(object): """Wrapper class around underlying C implementation.""" - __slots__ = ["pid", "_name", "_ppid"] + __slots__ = ["pid", "_name", "_ppid", "_cache"] def __init__(self, pid): self.pid = pid self._name = None self._ppid = None + def _assert_alive(self): + """Raise NSP if the process disappeared on us.""" + # For those C function who do not raise NSP, possibly returning + # incorrect or incomplete result. + cext.proc_name(self.pid) + + @wrap_exceptions @memoize_when_activated def oneshot(self): """Retrieves multiple process info in one shot as a raw tuple.""" @@ -562,10 +620,10 @@ class Process(object): return ret def oneshot_enter(self): - self.oneshot.cache_activate() + self.oneshot.cache_activate(self) def oneshot_exit(self): - self.oneshot.cache_deactivate() + self.oneshot.cache_deactivate(self) @wrap_exceptions def name(self): @@ -678,7 +736,7 @@ class Process(object): @wrap_exceptions def num_threads(self): - if hasattr(cext, "proc_num_threads"): + if HAS_PROC_NUM_THREADS: # FreeBSD return cext.proc_num_threads(self.pid) else: @@ -700,10 +758,7 @@ class Process(object): ntuple = _common.pthread(thread_id, utime, stime) retlist.append(ntuple) if OPENBSD: - # On OpenBSD the underlying C function does not raise NSP - # in case the process is gone (and the returned list may - # incomplete). - self.name() # raise NSP if the process disappeared on us + self._assert_alive() return retlist @wrap_exceptions @@ -733,10 +788,7 @@ class Process(object): type = socktype_to_enum(type) nt = _common.pconn(fd, fam, type, laddr, raddr, status) ret.add(nt) - # On NetBSD the underlying C function does not raise NSP - # in case the process is gone (and the returned list may - # incomplete). - self.name() # raise NSP if the process disappeared on us + self._assert_alive() return list(ret) families, types = conn_tmap[kind] @@ -755,10 +807,7 @@ class Process(object): nt = _common.pconn(fd, fam, type, laddr, raddr, status) ret.append(nt) if OPENBSD: - # On OpenBSD the underlying C function does not raise NSP - # in case the process is gone (and the returned list may - # incomplete). - self.name() # raise NSP if the process disappeared on us + self._assert_alive() return ret @wrap_exceptions @@ -798,7 +847,7 @@ class Process(object): elif NETBSD: with wrap_exceptions_procfs(self): return os.readlink("/proc/%s/cwd" % self.pid) - elif hasattr(cext, 'proc_open_files'): + elif HAS_PROC_OPEN_FILES: # FreeBSD < 8 does not support functions based on # kinfo_getfile() and kinfo_getvmmap() return cext.proc_cwd(self.pid) or None @@ -817,7 +866,7 @@ class Process(object): # FreeBSD < 8 does not support functions based on kinfo_getfile() # and kinfo_getvmmap() - if hasattr(cext, 'proc_open_files'): + if HAS_PROC_OPEN_FILES: @wrap_exceptions def open_files(self): """Return files opened by process as a list of namedtuples.""" @@ -828,15 +877,13 @@ class Process(object): # FreeBSD < 8 does not support functions based on kinfo_getfile() # and kinfo_getvmmap() - if hasattr(cext, 'proc_num_fds'): + if HAS_PROC_NUM_FDS: @wrap_exceptions def num_fds(self): """Return the number of file descriptors opened by this process.""" ret = cext.proc_num_fds(self.pid) if NETBSD: - # On NetBSD the underlying C function does not raise NSP - # in case the process is gone. - self.name() # raise NSP if the process disappeared on us + self._assert_alive() return ret else: num_fds = _not_implemented diff --git a/server/www/packages/packages-darwin/x64/psutil/_pslinux.py b/server/www/packages/packages-darwin/x64/psutil/_pslinux.py index df624de..e4bc7d7 100644 --- a/server/www/packages/packages-darwin/x64/psutil/_pslinux.py +++ b/server/www/packages/packages-darwin/x64/psutil/_pslinux.py @@ -25,25 +25,23 @@ from . import _common from . import _psposix from . import _psutil_linux as cext from . import _psutil_posix as cext_posix -from ._common import ENCODING -from ._common import ENCODING_ERRS +from ._common import decode +from ._common import get_procfs_path from ._common import isfile_strict from ._common import memoize from ._common import memoize_when_activated from ._common import NIC_DUPLEX_FULL from ._common import NIC_DUPLEX_HALF from ._common import NIC_DUPLEX_UNKNOWN +from ._common import open_binary +from ._common import open_text from ._common import parse_environ_block from ._common import path_exists_strict from ._common import supports_ipv6 from ._common import usage_percent from ._compat import b from ._compat import basestring -from ._compat import long from ._compat import PY3 -from ._exceptions import AccessDenied -from ._exceptions import NoSuchProcess -from ._exceptions import ZombieProcess if sys.version_info >= (3, 4): import enum @@ -71,6 +69,7 @@ __extra__all__ = [ POWER_SUPPLY_PATH = "/sys/class/power_supply" HAS_SMAPS = os.path.exists('/proc/%s/smaps' % os.getpid()) HAS_PRLIMIT = hasattr(cext, "linux_prlimit") +HAS_PROC_IO_PRIORITY = hasattr(cext, "proc_ioprio_get") _DEFAULT = object() # RLIMIT_* constants, not guaranteed to be present on all kernels @@ -158,6 +157,13 @@ TCP_STATUSES = { "0B": _common.CONN_CLOSING } +# These objects get set on "import psutil" from the __init__.py +# file, see: https://github.com/giampaolo/psutil/issues/1402 +NoSuchProcess = None +ZombieProcess = None +AccessDenied = None +TimeoutExpired = None + # ===================================================================== # --- named tuples @@ -201,37 +207,6 @@ pio = namedtuple('pio', ['read_count', 'write_count', # ===================================================================== -def open_binary(fname, **kwargs): - return open(fname, "rb", **kwargs) - - -def open_text(fname, **kwargs): - """On Python 3 opens a file in text mode by using fs encoding and - a proper en/decoding errors handler. - On Python 2 this is just an alias for open(name, 'rt'). - """ - if PY3: - # See: - # https://github.com/giampaolo/psutil/issues/675 - # https://github.com/giampaolo/psutil/pull/733 - kwargs.setdefault('encoding', ENCODING) - kwargs.setdefault('errors', ENCODING_ERRS) - return open(fname, "rt", **kwargs) - - -if PY3: - def decode(s): - return s.decode(encoding=ENCODING, errors=ENCODING_ERRS) -else: - def decode(s): - return s - - -def get_procfs_path(): - """Return updated psutil.PROCFS_PATH constant.""" - return sys.modules['psutil'].PROCFS_PATH - - def readlink(path): """Wrapper around os.readlink().""" assert isinstance(path, basestring), path @@ -638,6 +613,17 @@ def cpu_count_logical(): def cpu_count_physical(): """Return the number of physical cores in the system.""" + # Method #1 + core_ids = set() + for path in glob.glob( + "/sys/devices/system/cpu/cpu[0-9]*/topology/core_id"): + with open_binary(path) as f: + core_ids.add(int(f.read())) + result = len(core_ids) + if result != 0: + return result + + # Method #2 mapping = {} current_info = {} with open_binary('%s/cpuinfo' % get_procfs_path()) as f: @@ -657,8 +643,8 @@ def cpu_count_physical(): key, value = line.split(b'\t:', 1) current_info[key] = int(value) - # mimic os.cpu_count() - return sum(mapping.values()) or None + result = sum(mapping.values()) + return result or None # mimic os.cpu_count() def cpu_stats(): @@ -682,30 +668,26 @@ def cpu_stats(): ctx_switches, interrupts, soft_interrupts, syscalls) -if os.path.exists("/sys/devices/system/cpu/cpufreq") or \ +if os.path.exists("/sys/devices/system/cpu/cpufreq/policy0") or \ os.path.exists("/sys/devices/system/cpu/cpu0/cpufreq"): def cpu_freq(): """Return frequency metrics for all CPUs. Contrarily to other OSes, Linux updates these values in real-time. """ - # scaling_* files seem preferable to cpuinfo_*, see: - # http://unix.stackexchange.com/a/87537/168884 - ret = [] - ls = glob.glob("/sys/devices/system/cpu/cpufreq/policy*") - if ls: - # Sort the list so that '10' comes after '2'. This should - # ensure the CPU order is consistent with other CPU functions - # having a 'percpu' argument and returning results for multiple - # CPUs (cpu_times(), cpu_percent(), cpu_times_percent()). - ls.sort(key=lambda x: int(os.path.basename(x)[6:])) - else: - # https://github.com/giampaolo/psutil/issues/981 - ls = glob.glob("/sys/devices/system/cpu/cpu[0-9]*/cpufreq") - ls.sort(key=lambda x: int(re.search('[0-9]+', x).group(0))) + def get_path(num): + for p in ("/sys/devices/system/cpu/cpufreq/policy%s" % num, + "/sys/devices/system/cpu/cpu%s/cpufreq" % num): + if os.path.exists(p): + return p - pjoin = os.path.join - for path in ls: + ret = [] + for n in range(cpu_count_logical()): + path = get_path(n) + if not path: + continue + + pjoin = os.path.join curr = cat(pjoin(path, "scaling_cur_freq"), fallback=None) if curr is None: # Likely an old RedHat, see: @@ -720,6 +702,25 @@ if os.path.exists("/sys/devices/system/cpu/cpufreq") or \ ret.append(_common.scpufreq(curr, min_, max_)) return ret +elif os.path.exists("/proc/cpuinfo"): + def cpu_freq(): + """Alternate implementation using /proc/cpuinfo. + min and max frequencies are not available and are set to None. + """ + ret = [] + with open_binary('%s/cpuinfo' % get_procfs_path()) as f: + for line in f: + if line.lower().startswith(b'cpu mhz'): + key, value = line.split(b'\t:', 1) + ret.append(_common.scpufreq(float(value), 0., 0.)) + return ret + +else: + def cpu_freq(): + """Dummy implementation when none of the above files are present. + """ + return [] + # ===================================================================== # --- network @@ -1062,6 +1063,8 @@ def disk_io_counters(perdisk=False): # ...unless (Linux 2.6) the line refers to a partition instead # of a disk, in which case the line has less fields (7): # "3 1 hda1 8 8 8 8" + # 4.18+ has 4 fields added: + # "3 0 hda 8 8 8 8 8 8 8 8 8 8 8 0 0 0 0" # See: # https://www.kernel.org/doc/Documentation/iostats.txt # https://www.kernel.org/doc/Documentation/ABI/testing/procfs-diskstats @@ -1076,7 +1079,7 @@ def disk_io_counters(perdisk=False): reads = int(fields[2]) (reads_merged, rbytes, rtime, writes, writes_merged, wbytes, wtime, _, busy_time, _) = map(int, fields[4:14]) - elif flen == 14: + elif flen == 14 or flen == 18: # Linux 2.6+, line referring to a disk name = fields[2] (reads, reads_merged, rbytes, rtime, writes, writes_merged, @@ -1142,7 +1145,8 @@ def disk_io_counters(perdisk=False): def disk_partitions(all=False): """Return mounted disk partitions as a list of namedtuples.""" fstypes = set() - with open_text("%s/filesystems" % get_procfs_path()) as f: + procfs_path = get_procfs_path() + with open_text("%s/filesystems" % procfs_path) as f: for line in f: line = line.strip() if not line.startswith("nodev"): @@ -1153,8 +1157,14 @@ def disk_partitions(all=False): if fstype == "zfs": fstypes.add("zfs") + # See: https://github.com/giampaolo/psutil/issues/1307 + if procfs_path == "/proc" and os.path.isfile('/etc/mtab'): + mounts_path = os.path.realpath("/etc/mtab") + else: + mounts_path = os.path.realpath("%s/self/mounts" % procfs_path) + retlist = [] - partitions = cext.disk_partitions() + partitions = cext.disk_partitions(mounts_path) for partition in partitions: device, mountpoint, fstype, opts = partition if device == 'none': @@ -1229,7 +1239,51 @@ def sensors_temperatures(): ret[unit_name].append((label, current, high, critical)) - return ret + # Indication that no sensors were detected in /sys/class/hwmon/ + if not basenames: + basenames = glob.glob('/sys/class/thermal/thermal_zone*') + basenames = sorted(set(basenames)) + + for base in basenames: + try: + path = os.path.join(base, 'temp') + current = float(cat(path)) / 1000.0 + path = os.path.join(base, 'type') + unit_name = cat(path, binary=False) + except (IOError, OSError, ValueError) as err: + warnings.warn("ignoring %r for file %r" % (err, path), + RuntimeWarning) + continue + + trip_paths = glob.glob(base + '/trip_point*') + trip_points = set(['_'.join( + os.path.basename(p).split('_')[0:3]) for p in trip_paths]) + critical = None + high = None + for trip_point in trip_points: + path = os.path.join(base, trip_point + "_type") + trip_type = cat(path, fallback='', binary=False) + if trip_type == 'critical': + critical = cat(os.path.join(base, trip_point + "_temp"), + fallback=None) + elif trip_type == 'high': + high = cat(os.path.join(base, trip_point + "_temp"), + fallback=None) + + if high is not None: + try: + high = float(high) / 1000.0 + except ValueError: + high = None + if critical is not None: + try: + critical = float(critical) / 1000.0 + except ValueError: + critical = None + + ret[unit_name].append(('', current, high, critical)) + + return dict(ret) def sensors_fans(): @@ -1477,7 +1531,7 @@ def wrap_exceptions(fun): class Process(object): """Linux process implementation.""" - __slots__ = ["pid", "_name", "_ppid", "_procfs_path"] + __slots__ = ["pid", "_name", "_ppid", "_procfs_path", "_cache"] def __init__(self, pid): self.pid = pid @@ -1485,13 +1539,20 @@ class Process(object): self._ppid = None self._procfs_path = get_procfs_path() + def _assert_alive(self): + """Raise NSP if the process disappeared on us.""" + # For those C function who do not raise NSP, possibly returning + # incorrect or incomplete result. + os.stat('%s/%s' % (self._procfs_path, self.pid)) + + @wrap_exceptions @memoize_when_activated def _parse_stat_file(self): - """Parse /proc/{pid}/stat file. Return a list of fields where - process name is in position 0. + """Parse /proc/{pid}/stat file and return a dict with various + process info. Using "man proc" as a reference: where "man proc" refers to - position N, always substract 2 (e.g starttime pos 22 in - 'man proc' == pos 20 in the list returned here). + position N always substract 3 (e.g ppid position 4 in + 'man proc' == position 1 in here). The return value is cached in case oneshot() ctx manager is in use. """ @@ -1502,9 +1563,23 @@ class Process(object): # the first occurrence of "(" and the last occurence of ")". rpar = data.rfind(b')') name = data[data.find(b'(') + 1:rpar] - others = data[rpar + 2:].split() - return [name] + others + fields = data[rpar + 2:].split() + ret = {} + ret['name'] = name + ret['status'] = fields[0] + ret['ppid'] = fields[1] + ret['ttynr'] = fields[4] + ret['utime'] = fields[11] + ret['stime'] = fields[12] + ret['children_utime'] = fields[13] + ret['children_stime'] = fields[14] + ret['create_time'] = fields[19] + ret['cpu_num'] = fields[36] + + return ret + + @wrap_exceptions @memoize_when_activated def _read_status_file(self): """Read /proc/{pid}/stat file and return its content. @@ -1514,6 +1589,7 @@ class Process(object): with open_binary("%s/%s/status" % (self._procfs_path, self.pid)) as f: return f.read() + @wrap_exceptions @memoize_when_activated def _read_smaps_file(self): with open_binary("%s/%s/smaps" % (self._procfs_path, self.pid), @@ -1521,18 +1597,18 @@ class Process(object): return f.read().strip() def oneshot_enter(self): - self._parse_stat_file.cache_activate() - self._read_status_file.cache_activate() - self._read_smaps_file.cache_activate() + self._parse_stat_file.cache_activate(self) + self._read_status_file.cache_activate(self) + self._read_smaps_file.cache_activate(self) def oneshot_exit(self): - self._parse_stat_file.cache_deactivate() - self._read_status_file.cache_deactivate() - self._read_smaps_file.cache_deactivate() + self._parse_stat_file.cache_deactivate(self) + self._read_status_file.cache_deactivate(self) + self._read_smaps_file.cache_deactivate(self) @wrap_exceptions def name(self): - name = self._parse_stat_file()[0] + name = self._parse_stat_file()['name'] if PY3: name = decode(name) # XXX - gets changed later and probably needs refactoring @@ -1574,7 +1650,7 @@ class Process(object): sep = '\x00' if data.endswith('\x00') else ' ' if data.endswith(sep): data = data[:-1] - return [x for x in data.split(sep)] + return data.split(sep) @wrap_exceptions def environ(self): @@ -1584,13 +1660,14 @@ class Process(object): @wrap_exceptions def terminal(self): - tty_nr = int(self._parse_stat_file()[5]) + tty_nr = int(self._parse_stat_file()['ttynr']) tmap = _psposix.get_terminal_map() try: return tmap[tty_nr] except KeyError: return None + # May not be available on old kernels. if os.path.exists('/proc/%s/io' % os.getpid()): @wrap_exceptions def io_counters(self): @@ -1601,36 +1678,41 @@ class Process(object): # https://github.com/giampaolo/psutil/issues/1004 line = line.strip() if line: - name, value = line.split(b': ') - fields[name] = int(value) + try: + name, value = line.split(b': ') + except ValueError: + # https://github.com/giampaolo/psutil/issues/1004 + continue + else: + fields[name] = int(value) if not fields: raise RuntimeError("%s file was empty" % fname) - return pio( - fields[b'syscr'], # read syscalls - fields[b'syscw'], # write syscalls - fields[b'read_bytes'], # read bytes - fields[b'write_bytes'], # write bytes - fields[b'rchar'], # read chars - fields[b'wchar'], # write chars - ) - else: - def io_counters(self): - raise NotImplementedError("couldn't find /proc/%s/io (kernel " - "too old?)" % self.pid) + try: + return pio( + fields[b'syscr'], # read syscalls + fields[b'syscw'], # write syscalls + fields[b'read_bytes'], # read bytes + fields[b'write_bytes'], # write bytes + fields[b'rchar'], # read chars + fields[b'wchar'], # write chars + ) + except KeyError as err: + raise ValueError("%r field was not found in %s; found fields " + "are %r" % (err[0], fname, fields)) @wrap_exceptions def cpu_times(self): values = self._parse_stat_file() - utime = float(values[12]) / CLOCK_TICKS - stime = float(values[13]) / CLOCK_TICKS - children_utime = float(values[14]) / CLOCK_TICKS - children_stime = float(values[15]) / CLOCK_TICKS + utime = float(values['utime']) / CLOCK_TICKS + stime = float(values['stime']) / CLOCK_TICKS + children_utime = float(values['children_utime']) / CLOCK_TICKS + children_stime = float(values['children_stime']) / CLOCK_TICKS return _common.pcputimes(utime, stime, children_utime, children_stime) @wrap_exceptions def cpu_num(self): """What CPU the process is on.""" - return int(self._parse_stat_file()[37]) + return int(self._parse_stat_file()['cpu_num']) @wrap_exceptions def wait(self, timeout=None): @@ -1638,14 +1720,14 @@ class Process(object): @wrap_exceptions def create_time(self): - values = self._parse_stat_file() + ctime = float(self._parse_stat_file()['create_time']) # According to documentation, starttime is in field 21 and the # unit is jiffies (clock ticks). # We first divide it for clock ticks and then add uptime returning # seconds since the epoch, in UTC. # Also use cached value if available. bt = BOOT_TIME or boot_time() - return (float(values[20]) / CLOCK_TICKS) + bt + return (ctime / CLOCK_TICKS) + bt @wrap_exceptions def memory_info(self): @@ -1707,6 +1789,9 @@ class Process(object): """Return process's mapped memory regions as a list of named tuples. Fields are explained in 'man proc'; here is an updated (Apr 2012) version: http://goo.gl/fmebo + + /proc/{PID}/smaps does not exist on kernels < 2.6.14 or if + CONFIG_MMU kernel configuration option is not enabled. """ def get_blocks(lines, current_block): data = {} @@ -1767,13 +1852,6 @@ class Process(object): )) return ls - else: # pragma: no cover - def memory_maps(self): - raise NotImplementedError( - "/proc/%s/smaps does not exist on kernels < 2.6.14 or " - "if CONFIG_MMU kernel configuration option is not " - "enabled." % self.pid) - @wrap_exceptions def cwd(self): try: @@ -1836,8 +1914,7 @@ class Process(object): ntuple = _common.pthread(int(thread_id), utime, stime) retlist.append(ntuple) if hit_enoent: - # raise NSP if the process disappeared on us - os.stat('%s/%s' % (self._procfs_path, self.pid)) + self._assert_alive() return retlist @wrap_exceptions @@ -1887,7 +1964,7 @@ class Process(object): raise # only starting from kernel 2.6.13 - if hasattr(cext, "proc_ioprio_get"): + if HAS_PROC_IO_PRIORITY: @wrap_exceptions def ionice_get(self): @@ -1898,38 +1975,16 @@ class Process(object): @wrap_exceptions def ionice_set(self, ioclass, value): - if value is not None: - if not PY3 and not isinstance(value, (int, long)): - msg = "value argument is not an integer (gor %r)" % value - raise TypeError(msg) - if not 0 <= value <= 7: - raise ValueError( - "value argument range expected is between 0 and 7") - - if ioclass in (IOPRIO_CLASS_NONE, None): - if value: - msg = "can't specify value with IOPRIO_CLASS_NONE " \ - "(got %r)" % value - raise ValueError(msg) - ioclass = IOPRIO_CLASS_NONE + if value is None: value = 0 - elif ioclass == IOPRIO_CLASS_IDLE: - if value: - msg = "can't specify value with IOPRIO_CLASS_IDLE " \ - "(got %r)" % value - raise ValueError(msg) - value = 0 - elif ioclass in (IOPRIO_CLASS_RT, IOPRIO_CLASS_BE): - if value is None: - # TODO: add comment explaining why this is 4 (?) - value = 4 - else: - # otherwise we would get OSError(EVINAL) - raise ValueError("invalid ioclass argument %r" % ioclass) - + if value and ioclass in (IOPRIO_CLASS_IDLE, IOPRIO_CLASS_NONE): + raise ValueError("%r ioclass accepts no value" % ioclass) + if value < 0 or value > 7: + raise ValueError("value not in 0-7 range") return cext.proc_ioprio_set(self.pid, ioclass, value) if HAS_PRLIMIT: + @wrap_exceptions def rlimit(self, resource, limits=None): # If pid is 0 prlimit() applies to the calling process and @@ -1959,7 +2014,7 @@ class Process(object): @wrap_exceptions def status(self): - letter = self._parse_stat_file()[1] + letter = self._parse_stat_file()['status'] if PY3: letter = letter.decode() # XXX is '?' legit? (we're not supposed to return it anyway) @@ -1999,9 +2054,8 @@ class Process(object): flags = int(f.readline().split()[1], 8) except IOError as err: if err.errno == errno.ENOENT: - # fd gone in the meantime; does not - # necessarily mean the process disappeared - # on us. + # fd gone in the meantime; process may + # still be alive hit_enoent = True else: raise @@ -2011,15 +2065,13 @@ class Process(object): path, int(fd), int(pos), mode, flags) retlist.append(ntuple) if hit_enoent: - # raise NSP if the process disappeared on us - os.stat('%s/%s' % (self._procfs_path, self.pid)) + self._assert_alive() return retlist @wrap_exceptions def connections(self, kind='inet'): ret = _connections.retrieve(kind, self.pid) - # raise NSP if the process disappeared on us - os.stat('%s/%s' % (self._procfs_path, self.pid)) + self._assert_alive() return ret @wrap_exceptions @@ -2028,7 +2080,7 @@ class Process(object): @wrap_exceptions def ppid(self): - return int(self._parse_stat_file()[2]) + return int(self._parse_stat_file()['ppid']) @wrap_exceptions def uids(self, _uids_re=re.compile(br'Uid:\t(\d+)\t(\d+)\t(\d+)')): diff --git a/server/www/packages/packages-darwin/x64/psutil/_psosx.py b/server/www/packages/packages-darwin/x64/psutil/_psosx.py index fbfedf3..7459a0f 100644 --- a/server/www/packages/packages-darwin/x64/psutil/_psosx.py +++ b/server/www/packages/packages-darwin/x64/psutil/_psosx.py @@ -23,9 +23,6 @@ from ._common import parse_environ_block from ._common import sockfam_to_enum from ._common import socktype_to_enum from ._common import usage_percent -from ._exceptions import AccessDenied -from ._exceptions import NoSuchProcess -from ._exceptions import ZombieProcess __extra__all__ = [] @@ -87,6 +84,13 @@ pidtaskinfo_map = dict( volctxsw=7, ) +# These objects get set on "import psutil" from the __init__.py +# file, see: https://github.com/giampaolo/psutil/issues/1402 +NoSuchProcess = None +ZombieProcess = None +AccessDenied = None +TimeoutExpired = None + # ===================================================================== # --- named tuples @@ -103,13 +107,6 @@ svmem = namedtuple( pmem = namedtuple('pmem', ['rss', 'vms', 'pfaults', 'pageins']) # psutil.Process.memory_full_info() pfullmem = namedtuple('pfullmem', pmem._fields + ('uss', )) -# psutil.Process.memory_maps(grouped=True) -pmmap_grouped = namedtuple( - 'pmmap_grouped', - 'path rss private swapped dirtied ref_count shadow_depth') -# psutil.Process.memory_maps(grouped=False) -pmmap_ext = namedtuple( - 'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields)) # ===================================================================== @@ -119,9 +116,16 @@ pmmap_ext = namedtuple( def virtual_memory(): """System virtual memory as a namedtuple.""" - total, active, inactive, wired, free = cext.virtual_mem() + total, active, inactive, wired, free, speculative = cext.virtual_mem() + # This is how Zabbix calculate avail and used mem: + # https://github.com/zabbix/zabbix/blob/trunk/src/libs/zbxsysinfo/ + # osx/memory.c + # Also see: https://github.com/giampaolo/psutil/issues/1277 avail = inactive + free - used = active + inactive + wired + used = active + wired + # This is NOT how Zabbix calculates free mem but it matches "free" + # cmdline utility. + free -= speculative percent = usage_percent((total - avail), total, round_=1) return svmem(total, avail, percent, used, free, active, inactive, wired) @@ -373,13 +377,14 @@ def catch_zombie(proc): class Process(object): """Wrapper class around underlying C implementation.""" - __slots__ = ["pid", "_name", "_ppid"] + __slots__ = ["pid", "_name", "_ppid", "_cache"] def __init__(self, pid): self.pid = pid self._name = None self._ppid = None + @wrap_exceptions @memoize_when_activated def _get_kinfo_proc(self): # Note: should work with all PIDs without permission issues. @@ -387,6 +392,7 @@ class Process(object): assert len(ret) == len(kinfo_proc_map) return ret + @wrap_exceptions @memoize_when_activated def _get_pidtaskinfo(self): # Note: should work for PIDs owned by user only. @@ -396,12 +402,12 @@ class Process(object): return ret def oneshot_enter(self): - self._get_kinfo_proc.cache_activate() - self._get_pidtaskinfo.cache_activate() + self._get_kinfo_proc.cache_activate(self) + self._get_pidtaskinfo.cache_activate(self) def oneshot_exit(self): - self._get_kinfo_proc.cache_deactivate() - self._get_pidtaskinfo.cache_deactivate() + self._get_kinfo_proc.cache_deactivate(self) + self._get_pidtaskinfo.cache_deactivate(self) @wrap_exceptions def name(self): @@ -570,7 +576,3 @@ class Process(object): ntuple = _common.pthread(thread_id, utime, stime) retlist.append(ntuple) return retlist - - @wrap_exceptions - def memory_maps(self): - return cext.proc_memory_maps(self.pid) diff --git a/server/www/packages/packages-darwin/x64/psutil/_psposix.py b/server/www/packages/packages-darwin/x64/psutil/_psposix.py index 9c3fac2..d362143 100644 --- a/server/www/packages/packages-darwin/x64/psutil/_psposix.py +++ b/server/www/packages/packages-darwin/x64/psutil/_psposix.py @@ -15,12 +15,16 @@ from ._common import sdiskusage from ._common import usage_percent from ._compat import PY3 from ._compat import unicode -from ._exceptions import TimeoutExpired __all__ = ['pid_exists', 'wait_pid', 'disk_usage', 'get_terminal_map'] +# This object gets set on "import psutil" from the __init__.py +# file, see: https://github.com/giampaolo/psutil/issues/1402 +TimeoutExpired = None + + def pid_exists(pid): """Check whether pid exists in the current process table.""" if pid == 0: diff --git a/server/www/packages/packages-darwin/x64/psutil/_pssunos.py b/server/www/packages/packages-darwin/x64/psutil/_pssunos.py index e2f33a3..6d7fda8 100644 --- a/server/www/packages/packages-darwin/x64/psutil/_pssunos.py +++ b/server/www/packages/packages-darwin/x64/psutil/_pssunos.py @@ -5,6 +5,7 @@ """Sun OS Solaris platform implementation.""" import errno +import functools import os import socket import subprocess @@ -17,6 +18,7 @@ from . import _psposix from . import _psutil_posix as cext_posix from . import _psutil_sunos as cext from ._common import AF_INET6 +from ._common import get_procfs_path from ._common import isfile_strict from ._common import memoize_when_activated from ._common import sockfam_to_enum @@ -24,9 +26,6 @@ from ._common import socktype_to_enum from ._common import usage_percent from ._compat import b from ._compat import PY3 -from ._exceptions import AccessDenied -from ._exceptions import NoSuchProcess -from ._exceptions import ZombieProcess __extra__all__ = ["CONN_IDLE", "CONN_BOUND", "PROCFS_PATH"] @@ -85,6 +84,13 @@ proc_info_map = dict( gid=10, egid=11) +# These objects get set on "import psutil" from the __init__.py +# file, see: https://github.com/giampaolo/psutil/issues/1402 +NoSuchProcess = None +ZombieProcess = None +AccessDenied = None +TimeoutExpired = None + # ===================================================================== # --- named tuples @@ -109,16 +115,6 @@ pmmap_ext = namedtuple( 'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields)) -# ===================================================================== -# --- utils -# ===================================================================== - - -def get_procfs_path(): - """Return updated psutil.PROCFS_PATH constant.""" - return sys.modules['psutil'].PROCFS_PATH - - # ===================================================================== # --- memory # ===================================================================== @@ -341,7 +337,7 @@ def wrap_exceptions(fun): """Call callable into a try/except clause and translate ENOENT, EACCES and EPERM in NoSuchProcess or AccessDenied exceptions. """ - + @functools.wraps(fun) def wrapper(self, *args, **kwargs): try: return fun(self, *args, **kwargs) @@ -368,7 +364,7 @@ def wrap_exceptions(fun): class Process(object): """Wrapper class around underlying C implementation.""" - __slots__ = ["pid", "_name", "_ppid", "_procfs_path"] + __slots__ = ["pid", "_name", "_ppid", "_procfs_path", "_cache"] def __init__(self, pid): self.pid = pid @@ -376,32 +372,38 @@ class Process(object): self._ppid = None self._procfs_path = get_procfs_path() + def _assert_alive(self): + """Raise NSP if the process disappeared on us.""" + # For those C function who do not raise NSP, possibly returning + # incorrect or incomplete result. + os.stat('%s/%s' % (self._procfs_path, self.pid)) + def oneshot_enter(self): - self._proc_name_and_args.cache_activate() - self._proc_basic_info.cache_activate() - self._proc_cred.cache_activate() + self._proc_name_and_args.cache_activate(self) + self._proc_basic_info.cache_activate(self) + self._proc_cred.cache_activate(self) def oneshot_exit(self): - self._proc_name_and_args.cache_deactivate() - self._proc_basic_info.cache_deactivate() - self._proc_cred.cache_deactivate() + self._proc_name_and_args.cache_deactivate(self) + self._proc_basic_info.cache_deactivate(self) + self._proc_cred.cache_deactivate(self) + @wrap_exceptions @memoize_when_activated def _proc_name_and_args(self): return cext.proc_name_and_args(self.pid, self._procfs_path) + @wrap_exceptions @memoize_when_activated def _proc_basic_info(self): ret = cext.proc_basic_info(self.pid, self._procfs_path) assert len(ret) == len(proc_info_map) return ret + @wrap_exceptions @memoize_when_activated def _proc_cred(self): - @wrap_exceptions - def proc_cred(self): - return cext.proc_cred(self.pid, self._procfs_path) - return proc_cred(self) + return cext.proc_cred(self.pid, self._procfs_path) @wrap_exceptions def name(self): @@ -518,8 +520,7 @@ class Process(object): continue raise if hit_enoent: - # raise NSP if the process disappeared on us - os.stat('%s/%s' % (procfs_path, self.pid)) + self._assert_alive() @wrap_exceptions def cwd(self): @@ -581,8 +582,7 @@ class Process(object): nt = _common.pthread(tid, utime, stime) ret.append(nt) if hit_enoent: - # raise NSP if the process disappeared on us - os.stat('%s/%s' % (procfs_path, self.pid)) + self._assert_alive() return ret @wrap_exceptions @@ -606,8 +606,7 @@ class Process(object): if isfile_strict(file): retlist.append(_common.popenfile(file, int(fd))) if hit_enoent: - # raise NSP if the process disappeared on us - os.stat('%s/%s' % (procfs_path, self.pid)) + self._assert_alive() return retlist def _get_unix_sockets(self, pid): @@ -707,8 +706,7 @@ class Process(object): raise retlist.append((addr, perm, name, rss, anon, locked)) if hit_enoent: - # raise NSP if the process disappeared on us - os.stat('%s/%s' % (procfs_path, self.pid)) + self._assert_alive() return retlist @wrap_exceptions diff --git a/server/www/packages/packages-darwin/x64/psutil/_psutil_osx.cpython-37m-darwin.so b/server/www/packages/packages-darwin/x64/psutil/_psutil_osx.cpython-37m-darwin.so index 9d038ca..7eec7af 100755 Binary files a/server/www/packages/packages-darwin/x64/psutil/_psutil_osx.cpython-37m-darwin.so and b/server/www/packages/packages-darwin/x64/psutil/_psutil_osx.cpython-37m-darwin.so differ diff --git a/server/www/packages/packages-darwin/x64/psutil/_psutil_posix.cpython-37m-darwin.so b/server/www/packages/packages-darwin/x64/psutil/_psutil_posix.cpython-37m-darwin.so index ccc4304..3f69a2f 100755 Binary files a/server/www/packages/packages-darwin/x64/psutil/_psutil_posix.cpython-37m-darwin.so and b/server/www/packages/packages-darwin/x64/psutil/_psutil_posix.cpython-37m-darwin.so differ diff --git a/server/www/packages/packages-darwin/x64/psutil/_pswindows.py b/server/www/packages/packages-darwin/x64/psutil/_pswindows.py index 18651d6..3f13198 100644 --- a/server/www/packages/packages-darwin/x64/psutil/_pswindows.py +++ b/server/www/packages/packages-darwin/x64/psutil/_pswindows.py @@ -27,8 +27,7 @@ except ImportError as err: # but if we get here it means this this was a wheel (or exe). msg = "this Windows version is too old (< Windows Vista); " msg += "psutil 3.4.2 is the latest version which supports Windows " - msg += "2000, XP and 2003 server; it may be possible that psutil " - msg += "will work if compiled from sources though" + msg += "2000, XP and 2003 server" raise RuntimeError(msg) else: raise @@ -37,6 +36,7 @@ from ._common import conn_tmap from ._common import ENCODING from ._common import ENCODING_ERRS from ._common import isfile_strict +from ._common import memoize from ._common import memoize_when_activated from ._common import parse_environ_block from ._common import sockfam_to_enum @@ -47,9 +47,6 @@ from ._compat import lru_cache from ._compat import PY3 from ._compat import unicode from ._compat import xrange -from ._exceptions import AccessDenied -from ._exceptions import NoSuchProcess -from ._exceptions import TimeoutExpired from ._psutil_windows import ABOVE_NORMAL_PRIORITY_CLASS from ._psutil_windows import BELOW_NORMAL_PRIORITY_CLASS from ._psutil_windows import HIGH_PRIORITY_CLASS @@ -66,11 +63,14 @@ else: # http://msdn.microsoft.com/en-us/library/ms686219(v=vs.85).aspx __extra__all__ = [ "win_service_iter", "win_service_get", + # Process priority "ABOVE_NORMAL_PRIORITY_CLASS", "BELOW_NORMAL_PRIORITY_CLASS", - "HIGH_PRIORITY_CLASS", "IDLE_PRIORITY_CLASS", - "NORMAL_PRIORITY_CLASS", "REALTIME_PRIORITY_CLASS", - "CONN_DELETE_TCB", - "AF_LINK", + "HIGH_PRIORITY_CLASS", "IDLE_PRIORITY_CLASS", "NORMAL_PRIORITY_CLASS", + "REALTIME_PRIORITY_CLASS", + # IO priority + "IOPRIO_VERYLOW", "IOPRIO_LOW", "IOPRIO_NORMAL", "IOPRIO_HIGH", + # others + "CONN_DELETE_TCB", "AF_LINK", ] @@ -79,10 +79,8 @@ __extra__all__ = [ # ===================================================================== CONN_DELETE_TCB = "DELETE_TCB" -ACCESS_DENIED_ERRSET = frozenset([errno.EPERM, errno.EACCES, - cext.ERROR_ACCESS_DENIED]) -NO_SUCH_SERVICE_ERRSET = frozenset([cext.ERROR_INVALID_NAME, - cext.ERROR_SERVICE_DOES_NOT_EXIST]) +HAS_PROC_IO_PRIORITY = hasattr(cext, "proc_io_priority_get") +HAS_GETLOADAVG = hasattr(cext, "getloadavg") if enum is None: @@ -118,6 +116,19 @@ if enum is not None: globals().update(Priority.__members__) +if enum is None: + IOPRIO_VERYLOW = 0 + IOPRIO_LOW = 1 + IOPRIO_NORMAL = 2 + IOPRIO_HIGH = 3 +else: + class IOPriority(enum.IntEnum): + IOPRIO_VERYLOW = 0 + IOPRIO_LOW = 1 + IOPRIO_NORMAL = 2 + IOPRIO_HIGH = 3 + globals().update(IOPriority.__members__) + pinfo_map = dict( num_handles=0, ctx_switches=1, @@ -143,6 +154,35 @@ pinfo_map = dict( mem_private=21, ) +# These objects get set on "import psutil" from the __init__.py +# file, see: https://github.com/giampaolo/psutil/issues/1402 +NoSuchProcess = None +ZombieProcess = None +AccessDenied = None +TimeoutExpired = None + +# More values at: https://stackoverflow.com/a/20804735/376587 +WIN_10 = (10, 0) +WIN_8 = (6, 2) +WIN_7 = (6, 1) +WIN_SERVER_2008 = (6, 0) +WIN_VISTA = (6, 0) +WIN_SERVER_2003 = (5, 2) +WIN_XP = (5, 1) + + +@lru_cache() +def get_winver(): + """Usage: + >>> if get_winver() <= WIN_VISTA: + ... ... + """ + wv = sys.getwindowsversion() + return (wv.major, wv.minor) + + +IS_WIN_XP = get_winver() < WIN_VISTA + # ===================================================================== # --- named tuples @@ -203,6 +243,11 @@ def py2_strencode(s): return s.encode(ENCODING, ENCODING_ERRS) +@memoize +def getpagesize(): + return cext.getpagesize() + + # ===================================================================== # --- memory # ===================================================================== @@ -309,6 +354,23 @@ def cpu_freq(): return [_common.scpufreq(float(curr), min_, float(max_))] +if HAS_GETLOADAVG: + _loadavg_inititialized = False + + def getloadavg(): + """Return the number of processes in the system run queue averaged + over the last 1, 5, and 15 minutes respectively as a tuple""" + global _loadavg_inititialized + + if not _loadavg_inititialized: + cext.init_loadavg_counter() + _loadavg_inititialized = True + + # Drop to 2 decimal points which is what Linux does + raw_loads = cext.getloadavg() + return tuple([round(load, 2) for load in raw_loads]) + + # ===================================================================== # --- network # ===================================================================== @@ -501,14 +563,14 @@ class WindowsService(object): """ try: yield - except WindowsError as err: - if err.errno in ACCESS_DENIED_ERRSET: + except OSError as err: + if is_permission_err(err): raise AccessDenied( pid=None, name=self._name, msg="service %r is not querable (not enough privileges)" % self._name) - elif err.errno in NO_SUCH_SERVICE_ERRSET or \ - err.winerror in NO_SUCH_SERVICE_ERRSET: + elif err.winerror in (cext.ERROR_INVALID_NAME, + cext.ERROR_SERVICE_DOES_NOT_EXIST): raise NoSuchProcess( pid=None, name=self._name, msg="service %r does not exist)" % self._name) @@ -625,27 +687,42 @@ pid_exists = cext.pid_exists ppid_map = cext.ppid_map # used internally by Process.children() +def is_permission_err(exc): + """Return True if this is a permission error.""" + assert isinstance(exc, OSError), exc + # On Python 2 OSError doesn't always have 'winerror'. Sometimes + # it does, in which case the original exception was WindowsError + # (which is a subclass of OSError). + return exc.errno in (errno.EPERM, errno.EACCES) or \ + getattr(exc, "winerror", -1) in (cext.ERROR_ACCESS_DENIED, + cext.ERROR_PRIVILEGE_NOT_HELD) + + +def convert_oserror(exc, pid=None, name=None): + """Convert OSError into NoSuchProcess or AccessDenied.""" + assert isinstance(exc, OSError), exc + if is_permission_err(exc): + return AccessDenied(pid=pid, name=name) + if exc.errno == errno.ESRCH: + return NoSuchProcess(pid=pid, name=name) + raise exc + + def wrap_exceptions(fun): - """Decorator which translates bare OSError and WindowsError - exceptions into NoSuchProcess and AccessDenied. - """ + """Decorator which converts OSError into NoSuchProcess or AccessDenied.""" @functools.wraps(fun) def wrapper(self, *args, **kwargs): try: return fun(self, *args, **kwargs) except OSError as err: - if err.errno in ACCESS_DENIED_ERRSET: - raise AccessDenied(self.pid, self._name) - if err.errno == errno.ESRCH: - raise NoSuchProcess(self.pid, self._name) - raise + raise convert_oserror(err, pid=self.pid, name=self._name) return wrapper class Process(object): """Wrapper class around underlying C implementation.""" - __slots__ = ["pid", "_name", "_ppid"] + __slots__ = ["pid", "_name", "_ppid", "_cache"] def __init__(self, pid): self.pid = pid @@ -655,11 +732,12 @@ class Process(object): # --- oneshot() stuff def oneshot_enter(self): - self.oneshot_info.cache_activate() + self.oneshot_info.cache_activate(self) def oneshot_exit(self): - self.oneshot_info.cache_deactivate() + self.oneshot_info.cache_deactivate(self) + @wrap_exceptions @memoize_when_activated def oneshot_info(self): """Return multiple information about this process as a @@ -690,19 +768,33 @@ class Process(object): @wrap_exceptions def exe(self): - # Note: os.path.exists(path) may return False even if the file - # is there, see: - # http://stackoverflow.com/questions/3112546/os-path-exists-lies - - # see https://github.com/giampaolo/psutil/issues/414 - # see https://github.com/giampaolo/psutil/issues/528 - if self.pid in (0, 4): - raise AccessDenied(self.pid, self._name) - return py2_strencode(convert_dos_path(cext.proc_exe(self.pid))) + # Dual implementation, see: + # https://github.com/giampaolo/psutil/pull/1413 + if not IS_WIN_XP: + exe = cext.proc_exe(self.pid) + else: + if self.pid in (0, 4): + # https://github.com/giampaolo/psutil/issues/414 + # https://github.com/giampaolo/psutil/issues/528 + raise AccessDenied(self.pid, self._name) + exe = cext.proc_exe(self.pid) + exe = convert_dos_path(exe) + return py2_strencode(exe) @wrap_exceptions def cmdline(self): - ret = cext.proc_cmdline(self.pid) + if cext.WINVER >= cext.WINDOWS_8_1: + # PEB method detects cmdline changes but requires more + # privileges: https://github.com/giampaolo/psutil/pull/1398 + try: + ret = cext.proc_cmdline(self.pid, use_peb=True) + except OSError as err: + if is_permission_err(err): + ret = cext.proc_cmdline(self.pid, use_peb=False) + else: + raise + else: + ret = cext.proc_cmdline(self.pid, use_peb=True) if PY3: return ret else: @@ -725,7 +817,7 @@ class Process(object): try: return cext.proc_memory_info(self.pid) except OSError as err: - if err.errno in ACCESS_DENIED_ERRSET: + if is_permission_err(err): # TODO: the C ext can probably be refactored in order # to get this from cext.proc_info() info = self.oneshot_info() @@ -757,6 +849,7 @@ class Process(object): def memory_full_info(self): basic_mem = self.memory_info() uss = cext.proc_memory_uss(self.pid) + uss *= getpagesize() return pfullmem(*basic_mem + (uss, )) def memory_maps(self): @@ -765,11 +858,7 @@ class Process(object): except OSError as err: # XXX - can't use wrap_exceptions decorator as we're # returning a generator; probably needs refactoring. - if err.errno in ACCESS_DENIED_ERRSET: - raise AccessDenied(self.pid, self._name) - if err.errno == errno.ESRCH: - raise NoSuchProcess(self.pid, self._name) - raise + raise convert_oserror(err, self.pid, self._name) else: for addr, perm, path, rss in raw: path = convert_dos_path(path) @@ -845,7 +934,7 @@ class Process(object): try: return cext.proc_create_time(self.pid) except OSError as err: - if err.errno in ACCESS_DENIED_ERRSET: + if is_permission_err(err): return self.oneshot_info()[pinfo_map['create_time']] raise @@ -867,22 +956,21 @@ class Process(object): try: user, system = cext.proc_cpu_times(self.pid) except OSError as err: - if err.errno in ACCESS_DENIED_ERRSET: - info = self.oneshot_info() - user = info[pinfo_map['user_time']] - system = info[pinfo_map['kernel_time']] - else: + if not is_permission_err(err): raise + info = self.oneshot_info() + user = info[pinfo_map['user_time']] + system = info[pinfo_map['kernel_time']] # Children user/system times are not retrievable (set to 0). return _common.pcputimes(user, system, 0.0, 0.0) @wrap_exceptions def suspend(self): - return cext.proc_suspend(self.pid) + cext.proc_suspend_or_resume(self.pid, True) @wrap_exceptions def resume(self): - return cext.proc_resume(self.pid) + cext.proc_suspend_or_resume(self.pid, False) @wrap_exceptions def cwd(self): @@ -928,38 +1016,39 @@ class Process(object): return cext.proc_priority_set(self.pid, value) # available on Windows >= Vista - if hasattr(cext, "proc_io_priority_get"): + if HAS_PROC_IO_PRIORITY: @wrap_exceptions def ionice_get(self): - return cext.proc_io_priority_get(self.pid) + ret = cext.proc_io_priority_get(self.pid) + if enum is not None: + ret = IOPriority(ret) + return ret @wrap_exceptions - def ionice_set(self, value, _): - if _: - raise TypeError("set_proc_ionice() on Windows takes only " - "1 argument (2 given)") - if value not in (2, 1, 0): - raise ValueError("value must be 2 (normal), 1 (low) or 0 " - "(very low); got %r" % value) - return cext.proc_io_priority_set(self.pid, value) + def ionice_set(self, ioclass, value): + if value: + raise TypeError("value argument not accepted on Windows") + if ioclass not in (IOPRIO_VERYLOW, IOPRIO_LOW, IOPRIO_NORMAL, + IOPRIO_HIGH): + raise ValueError("%s is not a valid priority" % ioclass) + cext.proc_io_priority_set(self.pid, ioclass) @wrap_exceptions def io_counters(self): try: ret = cext.proc_io_counters(self.pid) except OSError as err: - if err.errno in ACCESS_DENIED_ERRSET: - info = self.oneshot_info() - ret = ( - info[pinfo_map['io_rcount']], - info[pinfo_map['io_wcount']], - info[pinfo_map['io_rbytes']], - info[pinfo_map['io_wbytes']], - info[pinfo_map['io_count_others']], - info[pinfo_map['io_bytes_others']], - ) - else: + if not is_permission_err(err): raise + info = self.oneshot_info() + ret = ( + info[pinfo_map['io_rcount']], + info[pinfo_map['io_wcount']], + info[pinfo_map['io_rbytes']], + info[pinfo_map['io_wbytes']], + info[pinfo_map['io_count_others']], + info[pinfo_map['io_bytes_others']], + ) return pio(*ret) @wrap_exceptions @@ -1007,7 +1096,7 @@ class Process(object): try: return cext.proc_num_handles(self.pid) except OSError as err: - if err.errno in ACCESS_DENIED_ERRSET: + if is_permission_err(err): return self.oneshot_info()[pinfo_map['num_handles']] raise diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/__init__.py b/server/www/packages/packages-darwin/x64/psutil/tests/__init__.py deleted file mode 100644 index a483eca..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/__init__.py +++ /dev/null @@ -1,1241 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Test utilities. -""" - -from __future__ import print_function - -import atexit -import contextlib -import ctypes -import errno -import functools -import os -import random -import re -import select -import shutil -import socket -import stat -import subprocess -import sys -import tempfile -import textwrap -import threading -import time -import traceback -import warnings -from socket import AF_INET -from socket import AF_INET6 -from socket import SOCK_DGRAM -from socket import SOCK_STREAM - -import psutil -from psutil import MACOS -from psutil import POSIX -from psutil import SUNOS -from psutil import WINDOWS -from psutil._common import supports_ipv6 -from psutil._compat import PY3 -from psutil._compat import u -from psutil._compat import unicode -from psutil._compat import which - -if sys.version_info < (2, 7): - import unittest2 as unittest # requires "pip install unittest2" -else: - import unittest - -try: - from unittest import mock # py3 -except ImportError: - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - import mock # NOQA - requires "pip install mock" - -if sys.version_info >= (3, 4): - import enum -else: - enum = None - - -__all__ = [ - # constants - 'APPVEYOR', 'DEVNULL', 'GLOBAL_TIMEOUT', 'MEMORY_TOLERANCE', 'NO_RETRIES', - 'PYPY', 'PYTHON_EXE', 'ROOT_DIR', 'SCRIPTS_DIR', 'TESTFILE_PREFIX', - 'TESTFN', 'TESTFN_UNICODE', 'TOX', 'TRAVIS', 'VALID_PROC_STATUSES', - 'VERBOSITY', - "HAS_CPU_AFFINITY", "HAS_CPU_FREQ", "HAS_ENVIRON", "HAS_PROC_IO_COUNTERS", - "HAS_IONICE", "HAS_MEMORY_MAPS", "HAS_PROC_CPU_NUM", "HAS_RLIMIT", - "HAS_SENSORS_BATTERY", "HAS_BATTERY", "HAS_SENSORS_FANS", - "HAS_SENSORS_TEMPERATURES", "HAS_MEMORY_FULL_INFO", - # subprocesses - 'pyrun', 'reap_children', 'get_test_subprocess', 'create_zombie_proc', - 'create_proc_children_pair', - # threads - 'ThreadTask' - # test utils - 'unittest', 'skip_on_access_denied', 'skip_on_not_implemented', - 'retry_before_failing', 'run_test_module_by_name', 'get_suite', - 'run_suite', - # install utils - 'install_pip', 'install_test_deps', - # fs utils - 'chdir', 'safe_rmpath', 'create_exe', 'decode_path', 'encode_path', - 'unique_filename', - # os - 'get_winver', 'get_kernel_version', - # sync primitives - 'call_until', 'wait_for_pid', 'wait_for_file', - # network - 'check_connection_ntuple', 'check_net_address', - 'get_free_port', 'unix_socket_path', 'bind_socket', 'bind_unix_socket', - 'tcp_socketpair', 'unix_socketpair', 'create_sockets', - # compat - 'reload_module', 'import_module_by_path', - # others - 'warn', 'copyload_shared_lib', 'is_namedtuple', -] - - -# =================================================================== -# --- constants -# =================================================================== - -# --- platforms - -TOX = os.getenv('TOX') or '' in ('1', 'true') -PYPY = '__pypy__' in sys.builtin_module_names -WIN_VISTA = (6, 0, 0) if WINDOWS else None -# whether we're running this test suite on Travis (https://travis-ci.org/) -TRAVIS = bool(os.environ.get('TRAVIS')) -# whether we're running this test suite on Appveyor for Windows -# (http://www.appveyor.com/) -APPVEYOR = bool(os.environ.get('APPVEYOR')) - -# --- configurable defaults - -# how many times retry_before_failing() decorator will retry -NO_RETRIES = 10 -# bytes tolerance for system-wide memory related tests -MEMORY_TOLERANCE = 500 * 1024 # 500KB -# the timeout used in functions which have to wait -GLOBAL_TIMEOUT = 3 if TRAVIS or APPVEYOR else 0.5 -# test output verbosity -VERBOSITY = 1 if os.getenv('SILENT') or TOX else 2 -# be more tolerant if we're on travis / appveyor in order to avoid -# false positives -if TRAVIS or APPVEYOR: - NO_RETRIES *= 3 - GLOBAL_TIMEOUT *= 3 - -# --- files - -TESTFILE_PREFIX = '$testfn' -if os.name == 'java': - # Jython disallows @ in module names - TESTFILE_PREFIX = '$psutil-test-' -else: - TESTFILE_PREFIX = '@psutil-test-' -TESTFN = os.path.join(os.path.realpath(os.getcwd()), TESTFILE_PREFIX) -# Disambiguate TESTFN for parallel testing, while letting it remain a valid -# module name. -TESTFN = TESTFN + str(os.getpid()) - -_TESTFN = TESTFN + '-internal' -TESTFN_UNICODE = TESTFN + u("-ƒőő") -ASCII_FS = sys.getfilesystemencoding().lower() in ('ascii', 'us-ascii') - -# --- paths - -ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) -SCRIPTS_DIR = os.path.join(ROOT_DIR, 'scripts') -HERE = os.path.abspath(os.path.dirname(__file__)) - -# --- support - -HAS_CPU_AFFINITY = hasattr(psutil.Process, "cpu_affinity") -HAS_CPU_FREQ = hasattr(psutil, "cpu_freq") -HAS_CONNECTIONS_UNIX = POSIX and not SUNOS -HAS_ENVIRON = hasattr(psutil.Process, "environ") -HAS_PROC_IO_COUNTERS = hasattr(psutil.Process, "io_counters") -HAS_IONICE = hasattr(psutil.Process, "ionice") -HAS_MEMORY_FULL_INFO = 'uss' in psutil.Process().memory_full_info()._fields -HAS_MEMORY_MAPS = hasattr(psutil.Process, "memory_maps") -HAS_PROC_CPU_NUM = hasattr(psutil.Process, "cpu_num") -HAS_RLIMIT = hasattr(psutil.Process, "rlimit") -HAS_THREADS = hasattr(psutil.Process, "threads") -HAS_SENSORS_BATTERY = hasattr(psutil, "sensors_battery") -HAS_BATTERY = HAS_SENSORS_BATTERY and bool(psutil.sensors_battery()) -HAS_SENSORS_FANS = hasattr(psutil, "sensors_fans") -HAS_SENSORS_TEMPERATURES = hasattr(psutil, "sensors_temperatures") - -# --- misc - - -def _get_py_exe(): - def attempt(exe): - try: - subprocess.check_call( - [exe, "-V"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - except Exception: - return None - else: - return exe - - if MACOS: - exe = \ - attempt(sys.executable) or \ - attempt(os.path.realpath(sys.executable)) or \ - attempt(which("python%s.%s" % sys.version_info[:2])) or \ - attempt(psutil.Process().exe()) - if not exe: - raise ValueError("can't find python exe real abspath") - return exe - else: - exe = os.path.realpath(sys.executable) - assert os.path.exists(exe), exe - return exe - - -PYTHON_EXE = _get_py_exe() -DEVNULL = open(os.devnull, 'r+') -VALID_PROC_STATUSES = [getattr(psutil, x) for x in dir(psutil) - if x.startswith('STATUS_')] -AF_UNIX = getattr(socket, "AF_UNIX", object()) -SOCK_SEQPACKET = getattr(socket, "SOCK_SEQPACKET", object()) - -_subprocesses_started = set() -_pids_started = set() -_testfiles_created = set() - - -def logstderr(s): - print(s, file=sys.stderr) - - -@atexit.register -def cleanup_test_files(): - logstderr("executing cleanup_test_files() atexit function") - DEVNULL.close() - for name in os.listdir(u('.')): - if isinstance(name, unicode): - prefix = u(TESTFILE_PREFIX) - else: - prefix = TESTFILE_PREFIX - if name.startswith(prefix): - logstderr("removing temporary test file %r" % name) - try: - safe_rmpath(name) - except Exception: - traceback.print_exc() - for path in _testfiles_created: - logstderr("removing temporary test file %r" % path) - try: - safe_rmpath(path) - except Exception: - traceback.print_exc() - - -# this is executed first -@atexit.register -def cleanup_test_procs(): - logstderr("executing cleanup_test_procs() atexit function") - reap_children(recursive=True) - - -# =================================================================== -# --- threads -# =================================================================== - - -class ThreadTask(threading.Thread): - """A thread task which does nothing expect staying alive.""" - - def __init__(self): - threading.Thread.__init__(self) - self._running = False - self._interval = 0.001 - self._flag = threading.Event() - - def __repr__(self): - name = self.__class__.__name__ - return '<%s running=%s at %#x>' % (name, self._running, id(self)) - - def __enter__(self): - self.start() - return self - - def __exit__(self, *args, **kwargs): - self.stop() - - def start(self): - """Start thread and keep it running until an explicit - stop() request. Polls for shutdown every 'timeout' seconds. - """ - if self._running: - raise ValueError("already started") - threading.Thread.start(self) - self._flag.wait() - - def run(self): - self._running = True - self._flag.set() - while self._running: - time.sleep(self._interval) - - def stop(self): - """Stop thread execution and and waits until it is stopped.""" - if not self._running: - raise ValueError("already stopped") - self._running = False - self.join() - - -# =================================================================== -# --- subprocesses -# =================================================================== - - -def _reap_children_on_err(fun): - @functools.wraps(fun) - def wrapper(*args, **kwargs): - try: - return fun(*args, **kwargs) - except Exception: - reap_children() - raise - return wrapper - - -@_reap_children_on_err -def get_test_subprocess(cmd=None, **kwds): - """Creates a python subprocess which does nothing for 60 secs and - return it as subprocess.Popen instance. - If "cmd" is specified that is used instead of python. - By default stdin and stdout are redirected to /dev/null. - It also attemps to make sure the process is in a reasonably - initialized state. - The process is registered for cleanup on reap_children(). - """ - kwds.setdefault("stdin", DEVNULL) - kwds.setdefault("stdout", DEVNULL) - kwds.setdefault("cwd", os.getcwd()) - kwds.setdefault("env", os.environ) - if WINDOWS: - # Prevents the subprocess to open error dialogs. - kwds.setdefault("creationflags", 0x8000000) # CREATE_NO_WINDOW - if cmd is None: - safe_rmpath(_TESTFN) - pyline = "from time import sleep;" \ - "open(r'%s', 'w').close();" \ - "sleep(60);" % _TESTFN - cmd = [PYTHON_EXE, "-c", pyline] - sproc = subprocess.Popen(cmd, **kwds) - _subprocesses_started.add(sproc) - wait_for_file(_TESTFN, delete=True, empty=True) - else: - sproc = subprocess.Popen(cmd, **kwds) - _subprocesses_started.add(sproc) - wait_for_pid(sproc.pid) - return sproc - - -@_reap_children_on_err -def create_proc_children_pair(): - """Create a subprocess which creates another one as in: - A (us) -> B (child) -> C (grandchild). - Return a (child, grandchild) tuple. - The 2 processes are fully initialized and will live for 60 secs - and are registered for cleanup on reap_children(). - """ - _TESTFN2 = os.path.basename(_TESTFN) + '2' # need to be relative - s = textwrap.dedent("""\ - import subprocess, os, sys, time - s = "import os, time;" - s += "f = open('%s', 'w');" - s += "f.write(str(os.getpid()));" - s += "f.close();" - s += "time.sleep(60);" - subprocess.Popen(['%s', '-c', s]) - time.sleep(60) - """ % (_TESTFN2, PYTHON_EXE)) - # On Windows if we create a subprocess with CREATE_NO_WINDOW flag - # set (which is the default) a "conhost.exe" extra process will be - # spawned as a child. We don't want that. - if WINDOWS: - subp = pyrun(s, creationflags=0) - else: - subp = pyrun(s) - child1 = psutil.Process(subp.pid) - data = wait_for_file(_TESTFN2, delete=False, empty=False) - safe_rmpath(_TESTFN2) - child2_pid = int(data) - _pids_started.add(child2_pid) - child2 = psutil.Process(child2_pid) - return (child1, child2) - - -def create_zombie_proc(): - """Create a zombie process and return its PID.""" - assert psutil.POSIX - unix_file = tempfile.mktemp(prefix=TESTFILE_PREFIX) if MACOS else TESTFN - src = textwrap.dedent("""\ - import os, sys, time, socket, contextlib - child_pid = os.fork() - if child_pid > 0: - time.sleep(3000) - else: - # this is the zombie process - s = socket.socket(socket.AF_UNIX) - with contextlib.closing(s): - s.connect('%s') - if sys.version_info < (3, ): - pid = str(os.getpid()) - else: - pid = bytes(str(os.getpid()), 'ascii') - s.sendall(pid) - """ % unix_file) - with contextlib.closing(socket.socket(socket.AF_UNIX)) as sock: - sock.settimeout(GLOBAL_TIMEOUT) - sock.bind(unix_file) - sock.listen(1) - pyrun(src) - conn, _ = sock.accept() - try: - select.select([conn.fileno()], [], [], GLOBAL_TIMEOUT) - zpid = int(conn.recv(1024)) - _pids_started.add(zpid) - zproc = psutil.Process(zpid) - call_until(lambda: zproc.status(), "ret == psutil.STATUS_ZOMBIE") - return zpid - finally: - conn.close() - - -@_reap_children_on_err -def pyrun(src, **kwds): - """Run python 'src' code string in a separate interpreter. - Returns a subprocess.Popen instance. - """ - kwds.setdefault("stdout", None) - kwds.setdefault("stderr", None) - with tempfile.NamedTemporaryFile( - prefix=TESTFILE_PREFIX, mode="wt", delete=False) as f: - _testfiles_created.add(f.name) - f.write(src) - f.flush() - subp = get_test_subprocess([PYTHON_EXE, f.name], **kwds) - wait_for_pid(subp.pid) - return subp - - -@_reap_children_on_err -def sh(cmd, **kwds): - """run cmd in a subprocess and return its output. - raises RuntimeError on error. - """ - shell = True if isinstance(cmd, (str, unicode)) else False - # Prevents subprocess to open error dialogs in case of error. - flags = 0x8000000 if WINDOWS and shell else 0 - kwds.setdefault("shell", shell) - kwds.setdefault("stdout", subprocess.PIPE) - kwds.setdefault("stderr", subprocess.PIPE) - kwds.setdefault("universal_newlines", True) - kwds.setdefault("creationflags", flags) - p = subprocess.Popen(cmd, **kwds) - _subprocesses_started.add(p) - stdout, stderr = p.communicate() - if p.returncode != 0: - raise RuntimeError(stderr) - if stderr: - warn(stderr) - if stdout.endswith('\n'): - stdout = stdout[:-1] - return stdout - - -def reap_children(recursive=False): - """Terminate and wait() any subprocess started by this test suite - and ensure that no zombies stick around to hog resources and - create problems when looking for refleaks. - - If resursive is True it also tries to terminate and wait() - all grandchildren started by this process. - """ - # This is here to make sure wait_procs() behaves properly and - # investigate: - # https://ci.appveyor.com/project/giampaolo/psutil/build/job/ - # jiq2cgd6stsbtn60 - def assert_gone(pid): - assert not psutil.pid_exists(pid), pid - assert pid not in psutil.pids(), pid - try: - p = psutil.Process(pid) - assert not p.is_running(), pid - except psutil.NoSuchProcess: - pass - else: - assert 0, "pid %s is not gone" % pid - - # Get the children here, before terminating the children sub - # processes as we don't want to lose the intermediate reference - # in case of grandchildren. - if recursive: - children = set(psutil.Process().children(recursive=True)) - else: - children = set() - - # Terminate subprocess.Popen instances "cleanly" by closing their - # fds and wiat()ing for them in order to avoid zombies. - while _subprocesses_started: - subp = _subprocesses_started.pop() - _pids_started.add(subp.pid) - try: - subp.terminate() - except OSError as err: - if WINDOWS and err.errno == 6: # "invalid handle" - pass - elif err.errno != errno.ESRCH: - raise - if subp.stdout: - subp.stdout.close() - if subp.stderr: - subp.stderr.close() - try: - # Flushing a BufferedWriter may raise an error. - if subp.stdin: - subp.stdin.close() - finally: - # Wait for the process to terminate, to avoid zombies. - try: - subp.wait() - except OSError as err: - if err.errno != errno.ECHILD: - raise - - # Terminate started pids. - while _pids_started: - pid = _pids_started.pop() - try: - p = psutil.Process(pid) - except psutil.NoSuchProcess: - assert_gone(pid) - else: - children.add(p) - - # Terminate children. - if children: - for p in children: - try: - p.terminate() - except psutil.NoSuchProcess: - pass - gone, alive = psutil.wait_procs(children, timeout=GLOBAL_TIMEOUT) - for p in alive: - warn("couldn't terminate process %r; attempting kill()" % p) - try: - p.kill() - except psutil.NoSuchProcess: - pass - gone, alive = psutil.wait_procs(alive, timeout=GLOBAL_TIMEOUT) - if alive: - for p in alive: - warn("process %r survived kill()" % p) - - for p in children: - assert_gone(p.pid) - - -# =================================================================== -# --- OS -# =================================================================== - - -def get_kernel_version(): - """Return a tuple such as (2, 6, 36).""" - if not POSIX: - raise NotImplementedError("not POSIX") - s = "" - uname = os.uname()[2] - for c in uname: - if c.isdigit() or c == '.': - s += c - else: - break - if not s: - raise ValueError("can't parse %r" % uname) - minor = 0 - micro = 0 - nums = s.split('.') - major = int(nums[0]) - if len(nums) >= 2: - minor = int(nums[1]) - if len(nums) >= 3: - micro = int(nums[2]) - return (major, minor, micro) - - -def get_winver(): - if not WINDOWS: - raise NotImplementedError("not WINDOWS") - wv = sys.getwindowsversion() - if hasattr(wv, 'service_pack_major'): # python >= 2.7 - sp = wv.service_pack_major or 0 - else: - r = re.search(r"\s\d$", wv[4]) - if r: - sp = int(r.group(0)) - else: - sp = 0 - return (wv[0], wv[1], sp) - - -# =================================================================== -# --- sync primitives -# =================================================================== - - -class retry(object): - """A retry decorator.""" - - def __init__(self, - exception=Exception, - timeout=None, - retries=None, - interval=0.001, - logfun=lambda s: print(s, file=sys.stderr), - ): - if timeout and retries: - raise ValueError("timeout and retries args are mutually exclusive") - self.exception = exception - self.timeout = timeout - self.retries = retries - self.interval = interval - self.logfun = logfun - - def __iter__(self): - if self.timeout: - stop_at = time.time() + self.timeout - while time.time() < stop_at: - yield - elif self.retries: - for _ in range(self.retries): - yield - else: - while True: - yield - - def sleep(self): - if self.interval is not None: - time.sleep(self.interval) - - def __call__(self, fun): - @functools.wraps(fun) - def wrapper(*args, **kwargs): - exc = None - for _ in self: - try: - return fun(*args, **kwargs) - except self.exception as _: - exc = _ - if self.logfun is not None: - self.logfun(exc) - self.sleep() - continue - if PY3: - raise exc - else: - raise - - # This way the user of the decorated function can change config - # parameters. - wrapper.decorator = self - return wrapper - - -@retry(exception=psutil.NoSuchProcess, logfun=None, timeout=GLOBAL_TIMEOUT, - interval=0.001) -def wait_for_pid(pid): - """Wait for pid to show up in the process list then return. - Used in the test suite to give time the sub process to initialize. - """ - psutil.Process(pid) - if WINDOWS: - # give it some more time to allow better initialization - time.sleep(0.01) - - -@retry(exception=(EnvironmentError, AssertionError), logfun=None, - timeout=GLOBAL_TIMEOUT, interval=0.001) -def wait_for_file(fname, delete=True, empty=False): - """Wait for a file to be written on disk with some content.""" - with open(fname, "rb") as f: - data = f.read() - if not empty: - assert data - if delete: - safe_rmpath(fname) - return data - - -@retry(exception=AssertionError, logfun=None, timeout=GLOBAL_TIMEOUT, - interval=0.001) -def call_until(fun, expr): - """Keep calling function for timeout secs and exit if eval() - expression is True. - """ - ret = fun() - assert eval(expr) - return ret - - -# =================================================================== -# --- fs -# =================================================================== - - -def safe_rmpath(path): - "Convenience function for removing temporary test files or dirs" - def retry_fun(fun): - # On Windows it could happen that the file or directory has - # open handles or references preventing the delete operation - # to succeed immediately, so we retry for a while. See: - # https://bugs.python.org/issue33240 - stop_at = time.time() + 1 - while time.time() < stop_at: - try: - return fun() - except WindowsError as _: - err = _ - if err.errno != errno.ENOENT: - raise - else: - warn("ignoring %s" % (str(err))) - time.sleep(0.01) - raise err - - try: - st = os.stat(path) - if stat.S_ISDIR(st.st_mode): - fun = functools.partial(shutil.rmtree, path) - else: - fun = functools.partial(os.remove, path) - if POSIX: - fun() - else: - retry_fun(fun) - except OSError as err: - if err.errno != errno.ENOENT: - raise - - -def safe_mkdir(dir): - "Convenience function for creating a directory" - try: - os.mkdir(dir) - except OSError as err: - if err.errno != errno.EEXIST: - raise - - -@contextlib.contextmanager -def chdir(dirname): - "Context manager which temporarily changes the current directory." - curdir = os.getcwd() - try: - os.chdir(dirname) - yield - finally: - os.chdir(curdir) - - -def create_exe(outpath, c_code=None): - """Creates an executable file in the given location.""" - assert not os.path.exists(outpath), outpath - if c_code: - if not which("gcc"): - raise ValueError("gcc is not installed") - if isinstance(c_code, bool): # c_code is True - c_code = textwrap.dedent( - """ - #include - int main() { - pause(); - return 1; - } - """) - assert isinstance(c_code, str), c_code - with tempfile.NamedTemporaryFile( - suffix='.c', delete=False, mode='wt') as f: - f.write(c_code) - try: - subprocess.check_call(["gcc", f.name, "-o", outpath]) - finally: - safe_rmpath(f.name) - else: - # copy python executable - shutil.copyfile(PYTHON_EXE, outpath) - if POSIX: - st = os.stat(outpath) - os.chmod(outpath, st.st_mode | stat.S_IEXEC) - - -def unique_filename(prefix=TESTFILE_PREFIX, suffix=""): - return tempfile.mktemp(prefix=prefix, suffix=suffix) - - -# =================================================================== -# --- testing -# =================================================================== - - -class TestCase(unittest.TestCase): - - # Print a full path representation of the single unit tests - # being run. - def __str__(self): - return "%s.%s.%s" % ( - self.__class__.__module__, self.__class__.__name__, - self._testMethodName) - - # assertRaisesRegexp renamed to assertRaisesRegex in 3.3; - # add support for the new name. - if not hasattr(unittest.TestCase, 'assertRaisesRegex'): - assertRaisesRegex = unittest.TestCase.assertRaisesRegexp - - -# override default unittest.TestCase -unittest.TestCase = TestCase - - -def _setup_tests(): - if 'PSUTIL_TESTING' not in os.environ: - # This won't work on Windows but set_testing() below will do it. - os.environ['PSUTIL_TESTING'] = '1' - psutil._psplatform.cext.set_testing() - - -def get_suite(): - testmods = [os.path.splitext(x)[0] for x in os.listdir(HERE) - if x.endswith('.py') and x.startswith('test_') and not - x.startswith('test_memory_leaks')] - if "WHEELHOUSE_UPLOADER_USERNAME" in os.environ: - testmods = [x for x in testmods if not x.endswith(( - "osx", "posix", "linux"))] - suite = unittest.TestSuite() - for tm in testmods: - # ...so that the full test paths are printed on screen - tm = "psutil.tests.%s" % tm - suite.addTest(unittest.defaultTestLoader.loadTestsFromName(tm)) - return suite - - -def run_suite(): - _setup_tests() - result = unittest.TextTestRunner(verbosity=VERBOSITY).run(get_suite()) - success = result.wasSuccessful() - sys.exit(0 if success else 1) - - -def run_test_module_by_name(name): - # testmodules = [os.path.splitext(x)[0] for x in os.listdir(HERE) - # if x.endswith('.py') and x.startswith('test_')] - _setup_tests() - name = os.path.splitext(os.path.basename(name))[0] - suite = unittest.TestSuite() - suite.addTest(unittest.defaultTestLoader.loadTestsFromName(name)) - result = unittest.TextTestRunner(verbosity=VERBOSITY).run(suite) - success = result.wasSuccessful() - sys.exit(0 if success else 1) - - -def retry_before_failing(retries=NO_RETRIES): - """Decorator which runs a test function and retries N times before - actually failing. - """ - return retry(exception=AssertionError, timeout=None, retries=retries) - - -def skip_on_access_denied(only_if=None): - """Decorator to Ignore AccessDenied exceptions.""" - def decorator(fun): - @functools.wraps(fun) - def wrapper(*args, **kwargs): - try: - return fun(*args, **kwargs) - except psutil.AccessDenied: - if only_if is not None: - if not only_if: - raise - raise unittest.SkipTest("raises AccessDenied") - return wrapper - return decorator - - -def skip_on_not_implemented(only_if=None): - """Decorator to Ignore NotImplementedError exceptions.""" - def decorator(fun): - @functools.wraps(fun) - def wrapper(*args, **kwargs): - try: - return fun(*args, **kwargs) - except NotImplementedError: - if only_if is not None: - if not only_if: - raise - msg = "%r was skipped because it raised NotImplementedError" \ - % fun.__name__ - raise unittest.SkipTest(msg) - return wrapper - return decorator - - -# =================================================================== -# --- network -# =================================================================== - - -def get_free_port(host='127.0.0.1'): - """Return an unused TCP port.""" - with contextlib.closing(socket.socket()) as sock: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - sock.bind((host, 0)) - return sock.getsockname()[1] - - -@contextlib.contextmanager -def unix_socket_path(suffix=""): - """A context manager which returns a non-existent file name - and tries to delete it on exit. - """ - assert psutil.POSIX - path = unique_filename(suffix=suffix) - try: - yield path - finally: - try: - os.unlink(path) - except OSError: - pass - - -def bind_socket(family=AF_INET, type=SOCK_STREAM, addr=None): - """Binds a generic socket.""" - if addr is None and family in (AF_INET, AF_INET6): - addr = ("", 0) - sock = socket.socket(family, type) - try: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - sock.bind(addr) - if type == socket.SOCK_STREAM: - sock.listen(10) - return sock - except Exception: - sock.close() - raise - - -def bind_unix_socket(name, type=socket.SOCK_STREAM): - """Bind a UNIX socket.""" - assert psutil.POSIX - assert not os.path.exists(name), name - sock = socket.socket(socket.AF_UNIX, type) - try: - sock.bind(name) - if type == socket.SOCK_STREAM: - sock.listen(10) - except Exception: - sock.close() - raise - return sock - - -def tcp_socketpair(family, addr=("", 0)): - """Build a pair of TCP sockets connected to each other. - Return a (server, client) tuple. - """ - with contextlib.closing(socket.socket(family, SOCK_STREAM)) as ll: - ll.bind(addr) - ll.listen(10) - addr = ll.getsockname() - c = socket.socket(family, SOCK_STREAM) - try: - c.connect(addr) - caddr = c.getsockname() - while True: - a, addr = ll.accept() - # check that we've got the correct client - if addr == caddr: - return (a, c) - a.close() - except OSError: - c.close() - raise - - -def unix_socketpair(name): - """Build a pair of UNIX sockets connected to each other through - the same UNIX file name. - Return a (server, client) tuple. - """ - assert psutil.POSIX - server = client = None - try: - server = bind_unix_socket(name, type=socket.SOCK_STREAM) - server.setblocking(0) - client = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - client.setblocking(0) - client.connect(name) - # new = server.accept() - except Exception: - if server is not None: - server.close() - if client is not None: - client.close() - raise - return (server, client) - - -@contextlib.contextmanager -def create_sockets(): - """Open as many socket families / types as possible.""" - socks = [] - fname1 = fname2 = None - try: - socks.append(bind_socket(socket.AF_INET, socket.SOCK_STREAM)) - socks.append(bind_socket(socket.AF_INET, socket.SOCK_DGRAM)) - if supports_ipv6(): - socks.append(bind_socket(socket.AF_INET6, socket.SOCK_STREAM)) - socks.append(bind_socket(socket.AF_INET6, socket.SOCK_DGRAM)) - if POSIX and HAS_CONNECTIONS_UNIX: - fname1 = unix_socket_path().__enter__() - fname2 = unix_socket_path().__enter__() - s1, s2 = unix_socketpair(fname1) - s3 = bind_unix_socket(fname2, type=socket.SOCK_DGRAM) - # self.addCleanup(safe_rmpath, fname1) - # self.addCleanup(safe_rmpath, fname2) - for s in (s1, s2, s3): - socks.append(s) - yield socks - finally: - for s in socks: - s.close() - if fname1 is not None: - safe_rmpath(fname1) - if fname2 is not None: - safe_rmpath(fname2) - - -def check_net_address(addr, family): - """Check a net address validity. Supported families are IPv4, - IPv6 and MAC addresses. - """ - import ipaddress # python >= 3.3 / requires "pip install ipaddress" - if enum and PY3: - assert isinstance(family, enum.IntEnum), family - if family == socket.AF_INET: - octs = [int(x) for x in addr.split('.')] - assert len(octs) == 4, addr - for num in octs: - assert 0 <= num <= 255, addr - if not PY3: - addr = unicode(addr) - ipaddress.IPv4Address(addr) - elif family == socket.AF_INET6: - assert isinstance(addr, str), addr - if not PY3: - addr = unicode(addr) - ipaddress.IPv6Address(addr) - elif family == psutil.AF_LINK: - assert re.match(r'([a-fA-F0-9]{2}[:|\-]?){6}', addr) is not None, addr - else: - raise ValueError("unknown family %r", family) - - -def check_connection_ntuple(conn): - """Check validity of a connection namedtuple.""" - # check ntuple - assert len(conn) in (6, 7), conn - has_pid = len(conn) == 7 - has_fd = getattr(conn, 'fd', -1) != -1 - assert conn[0] == conn.fd - assert conn[1] == conn.family - assert conn[2] == conn.type - assert conn[3] == conn.laddr - assert conn[4] == conn.raddr - assert conn[5] == conn.status - if has_pid: - assert conn[6] == conn.pid - - # check fd - if has_fd: - assert conn.fd >= 0, conn - if hasattr(socket, 'fromfd') and not WINDOWS: - try: - dupsock = socket.fromfd(conn.fd, conn.family, conn.type) - except (socket.error, OSError) as err: - if err.args[0] != errno.EBADF: - raise - else: - with contextlib.closing(dupsock): - assert dupsock.family == conn.family - assert dupsock.type == conn.type - - # check family - assert conn.family in (AF_INET, AF_INET6, AF_UNIX), repr(conn.family) - if conn.family in (AF_INET, AF_INET6): - # actually try to bind the local socket; ignore IPv6 - # sockets as their address might be represented as - # an IPv4-mapped-address (e.g. "::127.0.0.1") - # and that's rejected by bind() - if conn.family == AF_INET: - s = socket.socket(conn.family, conn.type) - with contextlib.closing(s): - try: - s.bind((conn.laddr[0], 0)) - except socket.error as err: - if err.errno != errno.EADDRNOTAVAIL: - raise - elif conn.family == AF_UNIX: - assert conn.status == psutil.CONN_NONE, conn.status - - # check type (SOCK_SEQPACKET may happen in case of AF_UNIX socks) - assert conn.type in (SOCK_STREAM, SOCK_DGRAM, SOCK_SEQPACKET), \ - repr(conn.type) - if conn.type == SOCK_DGRAM: - assert conn.status == psutil.CONN_NONE, conn.status - - # check laddr (IP address and port sanity) - for addr in (conn.laddr, conn.raddr): - if conn.family in (AF_INET, AF_INET6): - assert isinstance(addr, tuple), addr - if not addr: - continue - assert isinstance(addr.port, int), addr.port - assert 0 <= addr.port <= 65535, addr.port - check_net_address(addr.ip, conn.family) - elif conn.family == AF_UNIX: - assert isinstance(addr, str), addr - - # check status - assert isinstance(conn.status, str), conn - valids = [getattr(psutil, x) for x in dir(psutil) if x.startswith('CONN_')] - assert conn.status in valids, conn - - -# =================================================================== -# --- compatibility -# =================================================================== - - -def reload_module(module): - """Backport of importlib.reload of Python 3.3+.""" - try: - import importlib - if not hasattr(importlib, 'reload'): # python <=3.3 - raise ImportError - except ImportError: - import imp - return imp.reload(module) - else: - return importlib.reload(module) - - -def import_module_by_path(path): - name = os.path.splitext(os.path.basename(path))[0] - if sys.version_info[0] == 2: - import imp - return imp.load_source(name, path) - elif sys.version_info[:2] <= (3, 4): - from importlib.machinery import SourceFileLoader - return SourceFileLoader(name, path).load_module() - else: - import importlib.util - spec = importlib.util.spec_from_file_location(name, path) - mod = importlib.util.module_from_spec(spec) - spec.loader.exec_module(mod) - return mod - - -# =================================================================== -# --- others -# =================================================================== - - -def warn(msg): - """Raise a warning msg.""" - warnings.warn(msg, UserWarning) - - -def is_namedtuple(x): - """Check if object is an instance of namedtuple.""" - t = type(x) - b = t.__bases__ - if len(b) != 1 or b[0] != tuple: - return False - f = getattr(t, '_fields', None) - if not isinstance(f, tuple): - return False - return all(type(n) == str for n in f) - - -if POSIX: - @contextlib.contextmanager - def copyload_shared_lib(dst_prefix=TESTFILE_PREFIX): - """Ctx manager which picks up a random shared CO lib used - by this process, copies it in another location and loads it - in memory via ctypes. Return the new absolutized path. - """ - ext = ".so" - dst = tempfile.mktemp(prefix=dst_prefix, suffix=ext) - libs = [x.path for x in psutil.Process().memory_maps() if - os.path.splitext(x.path)[1] == ext and - 'python' in x.path.lower()] - src = random.choice(libs) - shutil.copyfile(src, dst) - try: - ctypes.CDLL(dst) - yield dst - finally: - safe_rmpath(dst) -else: - @contextlib.contextmanager - def copyload_shared_lib(dst_prefix=TESTFILE_PREFIX): - """Ctx manager which picks up a random shared DLL lib used - by this process, copies it in another location and loads it - in memory via ctypes. - Return the new absolutized, normcased path. - """ - from ctypes import wintypes - from ctypes import WinError - ext = ".dll" - dst = tempfile.mktemp(prefix=dst_prefix, suffix=ext) - libs = [x.path for x in psutil.Process().memory_maps() if - os.path.splitext(x.path)[1].lower() == ext and - 'python' in os.path.basename(x.path).lower() and - 'wow64' not in x.path.lower()] - src = random.choice(libs) - shutil.copyfile(src, dst) - cfile = None - try: - cfile = ctypes.WinDLL(dst) - yield dst - finally: - # Work around OverflowError: - # - https://ci.appveyor.com/project/giampaolo/psutil/build/1207/ - # job/o53330pbnri9bcw7 - # - http://bugs.python.org/issue30286 - # - http://stackoverflow.com/questions/23522055 - if cfile is not None: - FreeLibrary = ctypes.windll.kernel32.FreeLibrary - FreeLibrary.argtypes = [wintypes.HMODULE] - ret = FreeLibrary(cfile._handle) - if ret == 0: - WinError() - safe_rmpath(dst) diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/__main__.py b/server/www/packages/packages-darwin/x64/psutil/tests/__main__.py deleted file mode 100644 index 36554a1..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/__main__.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Run unit tests. This is invoked by: - -$ python -m psutil.tests -""" - -import contextlib -import optparse -import os -import sys -import tempfile -try: - from urllib.request import urlopen # py3 -except ImportError: - from urllib2 import urlopen - -from psutil.tests import PYTHON_EXE -from psutil.tests import run_suite - - -HERE = os.path.abspath(os.path.dirname(__file__)) -GET_PIP_URL = "https://bootstrap.pypa.io/get-pip.py" -TEST_DEPS = [] -if sys.version_info[:2] == (2, 6): - TEST_DEPS.extend(["ipaddress", "unittest2", "argparse", "mock==1.0.1"]) -elif sys.version_info[:2] == (2, 7) or sys.version_info[:2] <= (3, 2): - TEST_DEPS.extend(["ipaddress", "mock"]) - - -def install_pip(): - try: - import pip # NOQA - except ImportError: - import ssl - f = tempfile.NamedTemporaryFile(suffix='.py') - with contextlib.closing(f): - print("downloading %s to %s" % (GET_PIP_URL, f.name)) - if hasattr(ssl, '_create_unverified_context'): - ctx = ssl._create_unverified_context() - else: - ctx = None - kwargs = dict(context=ctx) if ctx else {} - req = urlopen(GET_PIP_URL, **kwargs) - data = req.read() - f.write(data) - f.flush() - - print("installing pip") - code = os.system('%s %s --user' % (PYTHON_EXE, f.name)) - return code - - -def install_test_deps(deps=None): - """Install test dependencies via pip.""" - if deps is None: - deps = TEST_DEPS - deps = set(deps) - if deps: - is_venv = hasattr(sys, 'real_prefix') - opts = "--user" if not is_venv else "" - install_pip() - code = os.system('%s -m pip install %s --upgrade %s' % ( - PYTHON_EXE, opts, " ".join(deps))) - return code - - -def main(): - usage = "%s -m psutil.tests [opts]" % PYTHON_EXE - parser = optparse.OptionParser(usage=usage, description="run unit tests") - parser.add_option("-i", "--install-deps", - action="store_true", default=False, - help="don't print status messages to stdout") - - opts, args = parser.parse_args() - if opts.install_deps: - install_pip() - install_test_deps() - else: - for dep in TEST_DEPS: - try: - __import__(dep.split("==")[0]) - except ImportError: - sys.exit("%r lib is not installed; run %s -m psutil.tests " - "--install-deps" % (dep, PYTHON_EXE)) - run_suite() - - -main() diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/test_aix.py b/server/www/packages/packages-darwin/x64/psutil/tests/test_aix.py deleted file mode 100644 index 7a8a4c3..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/test_aix.py +++ /dev/null @@ -1,121 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009, Giampaolo Rodola' -# Copyright (c) 2017, Arnon Yaari -# All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""AIX specific tests.""" - -import re - -from psutil import AIX -from psutil.tests import run_test_module_by_name -from psutil.tests import sh -from psutil.tests import unittest -import psutil - - -@unittest.skipIf(not AIX, "AIX only") -class AIXSpecificTestCase(unittest.TestCase): - - def test_virtual_memory(self): - out = sh('/usr/bin/svmon -O unit=KB') - re_pattern = "memory\s*" - for field in ("size inuse free pin virtual available mmode").split(): - re_pattern += "(?P<%s>\S+)\s+" % (field,) - matchobj = re.search(re_pattern, out) - - self.assertIsNotNone( - matchobj, "svmon command returned unexpected output") - - KB = 1024 - total = int(matchobj.group("size")) * KB - available = int(matchobj.group("available")) * KB - used = int(matchobj.group("inuse")) * KB - free = int(matchobj.group("free")) * KB - - psutil_result = psutil.virtual_memory() - - # MEMORY_TOLERANCE from psutil.tests is not enough. For some reason - # we're seeing differences of ~1.2 MB. 2 MB is still a good tolerance - # when compared to GBs. - MEMORY_TOLERANCE = 2 * KB * KB # 2 MB - self.assertEqual(psutil_result.total, total) - self.assertAlmostEqual( - psutil_result.used, used, delta=MEMORY_TOLERANCE) - self.assertAlmostEqual( - psutil_result.available, available, delta=MEMORY_TOLERANCE) - self.assertAlmostEqual( - psutil_result.free, free, delta=MEMORY_TOLERANCE) - - def test_swap_memory(self): - out = sh('/usr/sbin/lsps -a') - # From the man page, "The size is given in megabytes" so we assume - # we'll always have 'MB' in the result - # TODO maybe try to use "swap -l" to check "used" too, but its units - # are not guaranteed to be "MB" so parsing may not be consistent - matchobj = re.search("(?P\S+)\s+" - "(?P\S+)\s+" - "(?P\S+)\s+" - "(?P\d+)MB", out) - - self.assertIsNotNone( - matchobj, "lsps command returned unexpected output") - - total_mb = int(matchobj.group("size")) - MB = 1024 ** 2 - psutil_result = psutil.swap_memory() - # we divide our result by MB instead of multiplying the lsps value by - # MB because lsps may round down, so we round down too - self.assertEqual(int(psutil_result.total / MB), total_mb) - - def test_cpu_stats(self): - out = sh('/usr/bin/mpstat -a') - - re_pattern = "ALL\s*" - for field in ("min maj mpcs mpcr dev soft dec ph cs ics bound rq " - "push S3pull S3grd S0rd S1rd S2rd S3rd S4rd S5rd " - "sysc").split(): - re_pattern += "(?P<%s>\S+)\s+" % (field,) - matchobj = re.search(re_pattern, out) - - self.assertIsNotNone( - matchobj, "mpstat command returned unexpected output") - - # numbers are usually in the millions so 1000 is ok for tolerance - CPU_STATS_TOLERANCE = 1000 - psutil_result = psutil.cpu_stats() - self.assertAlmostEqual( - psutil_result.ctx_switches, - int(matchobj.group("cs")), - delta=CPU_STATS_TOLERANCE) - self.assertAlmostEqual( - psutil_result.syscalls, - int(matchobj.group("sysc")), - delta=CPU_STATS_TOLERANCE) - self.assertAlmostEqual( - psutil_result.interrupts, - int(matchobj.group("dev")), - delta=CPU_STATS_TOLERANCE) - self.assertAlmostEqual( - psutil_result.soft_interrupts, - int(matchobj.group("soft")), - delta=CPU_STATS_TOLERANCE) - - def test_cpu_count_logical(self): - out = sh('/usr/bin/mpstat -a') - mpstat_lcpu = int(re.search("lcpu=(\d+)", out).group(1)) - psutil_lcpu = psutil.cpu_count(logical=True) - self.assertEqual(mpstat_lcpu, psutil_lcpu) - - def test_net_if_addrs_names(self): - out = sh('/etc/ifconfig -l') - ifconfig_names = set(out.split()) - psutil_names = set(psutil.net_if_addrs().keys()) - self.assertSetEqual(ifconfig_names, psutil_names) - - -if __name__ == '__main__': - run_test_module_by_name(__file__) diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/test_bsd.py b/server/www/packages/packages-darwin/x64/psutil/tests/test_bsd.py deleted file mode 100644 index 7846c1c..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/test_bsd.py +++ /dev/null @@ -1,519 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# TODO: (FreeBSD) add test for comparing connections with 'sockstat' cmd. - - -"""Tests specific to all BSD platforms.""" - - -import datetime -import os -import re -import time - -import psutil -from psutil import BSD -from psutil import FREEBSD -from psutil import NETBSD -from psutil import OPENBSD -from psutil.tests import get_test_subprocess -from psutil.tests import HAS_BATTERY -from psutil.tests import MEMORY_TOLERANCE -from psutil.tests import reap_children -from psutil.tests import retry_before_failing -from psutil.tests import run_test_module_by_name -from psutil.tests import sh -from psutil.tests import unittest -from psutil.tests import which - - -if BSD: - PAGESIZE = os.sysconf("SC_PAGE_SIZE") - if os.getuid() == 0: # muse requires root privileges - MUSE_AVAILABLE = which('muse') - else: - MUSE_AVAILABLE = False -else: - MUSE_AVAILABLE = False - - -def sysctl(cmdline): - """Expects a sysctl command with an argument and parse the result - returning only the value of interest. - """ - result = sh("sysctl " + cmdline) - if FREEBSD: - result = result[result.find(": ") + 2:] - elif OPENBSD or NETBSD: - result = result[result.find("=") + 1:] - try: - return int(result) - except ValueError: - return result - - -def muse(field): - """Thin wrapper around 'muse' cmdline utility.""" - out = sh('muse') - for line in out.split('\n'): - if line.startswith(field): - break - else: - raise ValueError("line not found") - return int(line.split()[1]) - - -# ===================================================================== -# --- All BSD* -# ===================================================================== - - -@unittest.skipIf(not BSD, "BSD only") -class BSDSpecificTestCase(unittest.TestCase): - """Generic tests common to all BSD variants.""" - - @classmethod - def setUpClass(cls): - cls.pid = get_test_subprocess().pid - - @classmethod - def tearDownClass(cls): - reap_children() - - @unittest.skipIf(NETBSD, "-o lstart doesn't work on NETBSD") - def test_process_create_time(self): - output = sh("ps -o lstart -p %s" % self.pid) - start_ps = output.replace('STARTED', '').strip() - start_psutil = psutil.Process(self.pid).create_time() - start_psutil = time.strftime("%a %b %e %H:%M:%S %Y", - time.localtime(start_psutil)) - self.assertEqual(start_ps, start_psutil) - - def test_disks(self): - # test psutil.disk_usage() and psutil.disk_partitions() - # against "df -a" - def df(path): - out = sh('df -k "%s"' % path).strip() - lines = out.split('\n') - lines.pop(0) - line = lines.pop(0) - dev, total, used, free = line.split()[:4] - if dev == 'none': - dev = '' - total = int(total) * 1024 - used = int(used) * 1024 - free = int(free) * 1024 - return dev, total, used, free - - for part in psutil.disk_partitions(all=False): - usage = psutil.disk_usage(part.mountpoint) - dev, total, used, free = df(part.mountpoint) - self.assertEqual(part.device, dev) - self.assertEqual(usage.total, total) - # 10 MB tollerance - if abs(usage.free - free) > 10 * 1024 * 1024: - self.fail("psutil=%s, df=%s" % (usage.free, free)) - if abs(usage.used - used) > 10 * 1024 * 1024: - self.fail("psutil=%s, df=%s" % (usage.used, used)) - - @unittest.skipIf(not which('sysctl'), "sysctl cmd not available") - def test_cpu_count_logical(self): - syst = sysctl("hw.ncpu") - self.assertEqual(psutil.cpu_count(logical=True), syst) - - @unittest.skipIf(not which('sysctl'), "sysctl cmd not available") - def test_virtual_memory_total(self): - num = sysctl('hw.physmem') - self.assertEqual(num, psutil.virtual_memory().total) - - def test_net_if_stats(self): - for name, stats in psutil.net_if_stats().items(): - try: - out = sh("ifconfig %s" % name) - except RuntimeError: - pass - else: - self.assertEqual(stats.isup, 'RUNNING' in out, msg=out) - if "mtu" in out: - self.assertEqual(stats.mtu, - int(re.findall(r'mtu (\d+)', out)[0])) - - -# ===================================================================== -# --- FreeBSD -# ===================================================================== - - -@unittest.skipIf(not FREEBSD, "FREEBSD only") -class FreeBSDSpecificTestCase(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.pid = get_test_subprocess().pid - - @classmethod - def tearDownClass(cls): - reap_children() - - @staticmethod - def parse_swapinfo(): - # the last line is always the total - output = sh("swapinfo -k").splitlines()[-1] - parts = re.split(r'\s+', output) - - if not parts: - raise ValueError("Can't parse swapinfo: %s" % output) - - # the size is in 1k units, so multiply by 1024 - total, used, free = (int(p) * 1024 for p in parts[1:4]) - return total, used, free - - @retry_before_failing() - def test_proc_memory_maps(self): - out = sh('procstat -v %s' % self.pid) - maps = psutil.Process(self.pid).memory_maps(grouped=False) - lines = out.split('\n')[1:] - while lines: - line = lines.pop() - fields = line.split() - _, start, stop, perms, res = fields[:5] - map = maps.pop() - self.assertEqual("%s-%s" % (start, stop), map.addr) - self.assertEqual(int(res), map.rss) - if not map.path.startswith('['): - self.assertEqual(fields[10], map.path) - - def test_proc_exe(self): - out = sh('procstat -b %s' % self.pid) - self.assertEqual(psutil.Process(self.pid).exe(), - out.split('\n')[1].split()[-1]) - - def test_proc_cmdline(self): - out = sh('procstat -c %s' % self.pid) - self.assertEqual(' '.join(psutil.Process(self.pid).cmdline()), - ' '.join(out.split('\n')[1].split()[2:])) - - def test_proc_uids_gids(self): - out = sh('procstat -s %s' % self.pid) - euid, ruid, suid, egid, rgid, sgid = out.split('\n')[1].split()[2:8] - p = psutil.Process(self.pid) - uids = p.uids() - gids = p.gids() - self.assertEqual(uids.real, int(ruid)) - self.assertEqual(uids.effective, int(euid)) - self.assertEqual(uids.saved, int(suid)) - self.assertEqual(gids.real, int(rgid)) - self.assertEqual(gids.effective, int(egid)) - self.assertEqual(gids.saved, int(sgid)) - - @retry_before_failing() - def test_proc_ctx_switches(self): - tested = [] - out = sh('procstat -r %s' % self.pid) - p = psutil.Process(self.pid) - for line in out.split('\n'): - line = line.lower().strip() - if ' voluntary context' in line: - pstat_value = int(line.split()[-1]) - psutil_value = p.num_ctx_switches().voluntary - self.assertEqual(pstat_value, psutil_value) - tested.append(None) - elif ' involuntary context' in line: - pstat_value = int(line.split()[-1]) - psutil_value = p.num_ctx_switches().involuntary - self.assertEqual(pstat_value, psutil_value) - tested.append(None) - if len(tested) != 2: - raise RuntimeError("couldn't find lines match in procstat out") - - @retry_before_failing() - def test_proc_cpu_times(self): - tested = [] - out = sh('procstat -r %s' % self.pid) - p = psutil.Process(self.pid) - for line in out.split('\n'): - line = line.lower().strip() - if 'user time' in line: - pstat_value = float('0.' + line.split()[-1].split('.')[-1]) - psutil_value = p.cpu_times().user - self.assertEqual(pstat_value, psutil_value) - tested.append(None) - elif 'system time' in line: - pstat_value = float('0.' + line.split()[-1].split('.')[-1]) - psutil_value = p.cpu_times().system - self.assertEqual(pstat_value, psutil_value) - tested.append(None) - if len(tested) != 2: - raise RuntimeError("couldn't find lines match in procstat out") - - # --- virtual_memory(); tests against sysctl - - @retry_before_failing() - def test_vmem_active(self): - syst = sysctl("vm.stats.vm.v_active_count") * PAGESIZE - self.assertAlmostEqual(psutil.virtual_memory().active, syst, - delta=MEMORY_TOLERANCE) - - @retry_before_failing() - def test_vmem_inactive(self): - syst = sysctl("vm.stats.vm.v_inactive_count") * PAGESIZE - self.assertAlmostEqual(psutil.virtual_memory().inactive, syst, - delta=MEMORY_TOLERANCE) - - @retry_before_failing() - def test_vmem_wired(self): - syst = sysctl("vm.stats.vm.v_wire_count") * PAGESIZE - self.assertAlmostEqual(psutil.virtual_memory().wired, syst, - delta=MEMORY_TOLERANCE) - - @retry_before_failing() - def test_vmem_cached(self): - syst = sysctl("vm.stats.vm.v_cache_count") * PAGESIZE - self.assertAlmostEqual(psutil.virtual_memory().cached, syst, - delta=MEMORY_TOLERANCE) - - @retry_before_failing() - def test_vmem_free(self): - syst = sysctl("vm.stats.vm.v_free_count") * PAGESIZE - self.assertAlmostEqual(psutil.virtual_memory().free, syst, - delta=MEMORY_TOLERANCE) - - @retry_before_failing() - def test_vmem_buffers(self): - syst = sysctl("vfs.bufspace") - self.assertAlmostEqual(psutil.virtual_memory().buffers, syst, - delta=MEMORY_TOLERANCE) - - # --- virtual_memory(); tests against muse - - @unittest.skipIf(not MUSE_AVAILABLE, "muse not installed") - def test_muse_vmem_total(self): - num = muse('Total') - self.assertEqual(psutil.virtual_memory().total, num) - - @unittest.skipIf(not MUSE_AVAILABLE, "muse not installed") - @retry_before_failing() - def test_muse_vmem_active(self): - num = muse('Active') - self.assertAlmostEqual(psutil.virtual_memory().active, num, - delta=MEMORY_TOLERANCE) - - @unittest.skipIf(not MUSE_AVAILABLE, "muse not installed") - @retry_before_failing() - def test_muse_vmem_inactive(self): - num = muse('Inactive') - self.assertAlmostEqual(psutil.virtual_memory().inactive, num, - delta=MEMORY_TOLERANCE) - - @unittest.skipIf(not MUSE_AVAILABLE, "muse not installed") - @retry_before_failing() - def test_muse_vmem_wired(self): - num = muse('Wired') - self.assertAlmostEqual(psutil.virtual_memory().wired, num, - delta=MEMORY_TOLERANCE) - - @unittest.skipIf(not MUSE_AVAILABLE, "muse not installed") - @retry_before_failing() - def test_muse_vmem_cached(self): - num = muse('Cache') - self.assertAlmostEqual(psutil.virtual_memory().cached, num, - delta=MEMORY_TOLERANCE) - - @unittest.skipIf(not MUSE_AVAILABLE, "muse not installed") - @retry_before_failing() - def test_muse_vmem_free(self): - num = muse('Free') - self.assertAlmostEqual(psutil.virtual_memory().free, num, - delta=MEMORY_TOLERANCE) - - @unittest.skipIf(not MUSE_AVAILABLE, "muse not installed") - @retry_before_failing() - def test_muse_vmem_buffers(self): - num = muse('Buffer') - self.assertAlmostEqual(psutil.virtual_memory().buffers, num, - delta=MEMORY_TOLERANCE) - - def test_cpu_stats_ctx_switches(self): - self.assertAlmostEqual(psutil.cpu_stats().ctx_switches, - sysctl('vm.stats.sys.v_swtch'), delta=1000) - - def test_cpu_stats_interrupts(self): - self.assertAlmostEqual(psutil.cpu_stats().interrupts, - sysctl('vm.stats.sys.v_intr'), delta=1000) - - def test_cpu_stats_soft_interrupts(self): - self.assertAlmostEqual(psutil.cpu_stats().soft_interrupts, - sysctl('vm.stats.sys.v_soft'), delta=1000) - - def test_cpu_stats_syscalls(self): - self.assertAlmostEqual(psutil.cpu_stats().syscalls, - sysctl('vm.stats.sys.v_syscall'), delta=1000) - - # def test_cpu_stats_traps(self): - # self.assertAlmostEqual(psutil.cpu_stats().traps, - # sysctl('vm.stats.sys.v_trap'), delta=1000) - - # --- swap memory - - def test_swapmem_free(self): - total, used, free = self.parse_swapinfo() - self.assertAlmostEqual( - psutil.swap_memory().free, free, delta=MEMORY_TOLERANCE) - - def test_swapmem_used(self): - total, used, free = self.parse_swapinfo() - self.assertAlmostEqual( - psutil.swap_memory().used, used, delta=MEMORY_TOLERANCE) - - def test_swapmem_total(self): - total, used, free = self.parse_swapinfo() - self.assertAlmostEqual( - psutil.swap_memory().total, total, delta=MEMORY_TOLERANCE) - - # --- others - - def test_boot_time(self): - s = sysctl('sysctl kern.boottime') - s = s[s.find(" sec = ") + 7:] - s = s[:s.find(',')] - btime = int(s) - self.assertEqual(btime, psutil.boot_time()) - - # --- sensors_battery - - @unittest.skipIf(not HAS_BATTERY, "no battery") - def test_sensors_battery(self): - def secs2hours(secs): - m, s = divmod(secs, 60) - h, m = divmod(m, 60) - return "%d:%02d" % (h, m) - - out = sh("acpiconf -i 0") - fields = dict([(x.split('\t')[0], x.split('\t')[-1]) - for x in out.split("\n")]) - metrics = psutil.sensors_battery() - percent = int(fields['Remaining capacity:'].replace('%', '')) - remaining_time = fields['Remaining time:'] - self.assertEqual(metrics.percent, percent) - if remaining_time == 'unknown': - self.assertEqual(metrics.secsleft, psutil.POWER_TIME_UNLIMITED) - else: - self.assertEqual(secs2hours(metrics.secsleft), remaining_time) - - @unittest.skipIf(not HAS_BATTERY, "no battery") - def test_sensors_battery_against_sysctl(self): - self.assertEqual(psutil.sensors_battery().percent, - sysctl("hw.acpi.battery.life")) - self.assertEqual(psutil.sensors_battery().power_plugged, - sysctl("hw.acpi.acline") == 1) - secsleft = psutil.sensors_battery().secsleft - if secsleft < 0: - self.assertEqual(sysctl("hw.acpi.battery.time"), -1) - else: - self.assertEqual(secsleft, sysctl("hw.acpi.battery.time") * 60) - - @unittest.skipIf(HAS_BATTERY, "has battery") - def test_sensors_battery_no_battery(self): - # If no battery is present one of these calls is supposed - # to fail, see: - # https://github.com/giampaolo/psutil/issues/1074 - with self.assertRaises(RuntimeError): - sysctl("hw.acpi.battery.life") - sysctl("hw.acpi.battery.time") - sysctl("hw.acpi.acline") - self.assertIsNone(psutil.sensors_battery()) - - -# ===================================================================== -# --- OpenBSD -# ===================================================================== - - -@unittest.skipIf(not OPENBSD, "OPENBSD only") -class OpenBSDSpecificTestCase(unittest.TestCase): - - def test_boot_time(self): - s = sysctl('kern.boottime') - sys_bt = datetime.datetime.strptime(s, "%a %b %d %H:%M:%S %Y") - psutil_bt = datetime.datetime.fromtimestamp(psutil.boot_time()) - self.assertEqual(sys_bt, psutil_bt) - - -# ===================================================================== -# --- NetBSD -# ===================================================================== - - -@unittest.skipIf(not NETBSD, "NETBSD only") -class NetBSDSpecificTestCase(unittest.TestCase): - - @staticmethod - def parse_meminfo(look_for): - with open('/proc/meminfo', 'rb') as f: - for line in f: - if line.startswith(look_for): - return int(line.split()[1]) * 1024 - raise ValueError("can't find %s" % look_for) - - def test_vmem_total(self): - self.assertEqual( - psutil.virtual_memory().total, self.parse_meminfo("MemTotal:")) - - def test_vmem_free(self): - self.assertAlmostEqual( - psutil.virtual_memory().free, self.parse_meminfo("MemFree:"), - delta=MEMORY_TOLERANCE) - - def test_vmem_buffers(self): - self.assertAlmostEqual( - psutil.virtual_memory().buffers, self.parse_meminfo("Buffers:"), - delta=MEMORY_TOLERANCE) - - def test_vmem_shared(self): - self.assertAlmostEqual( - psutil.virtual_memory().shared, self.parse_meminfo("MemShared:"), - delta=MEMORY_TOLERANCE) - - def test_swapmem_total(self): - self.assertAlmostEqual( - psutil.swap_memory().total, self.parse_meminfo("SwapTotal:"), - delta=MEMORY_TOLERANCE) - - def test_swapmem_free(self): - self.assertAlmostEqual( - psutil.swap_memory().free, self.parse_meminfo("SwapFree:"), - delta=MEMORY_TOLERANCE) - - def test_swapmem_used(self): - smem = psutil.swap_memory() - self.assertEqual(smem.used, smem.total - smem.free) - - def test_cpu_stats_interrupts(self): - with open('/proc/stat', 'rb') as f: - for line in f: - if line.startswith(b'intr'): - interrupts = int(line.split()[1]) - break - else: - raise ValueError("couldn't find line") - self.assertAlmostEqual( - psutil.cpu_stats().interrupts, interrupts, delta=1000) - - def test_cpu_stats_ctx_switches(self): - with open('/proc/stat', 'rb') as f: - for line in f: - if line.startswith(b'ctxt'): - ctx_switches = int(line.split()[1]) - break - else: - raise ValueError("couldn't find line") - self.assertAlmostEqual( - psutil.cpu_stats().ctx_switches, ctx_switches, delta=1000) - - -if __name__ == '__main__': - run_test_module_by_name(__file__) diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/test_connections.py b/server/www/packages/packages-darwin/x64/psutil/tests/test_connections.py deleted file mode 100644 index cba835e..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/test_connections.py +++ /dev/null @@ -1,525 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Tests for net_connections() and Process.connections() APIs.""" - -import os -import socket -import textwrap -from contextlib import closing -from socket import AF_INET -from socket import AF_INET6 -from socket import SOCK_DGRAM -from socket import SOCK_STREAM - -import psutil -from psutil import FREEBSD -from psutil import LINUX -from psutil import MACOS -from psutil import NETBSD -from psutil import OPENBSD -from psutil import POSIX -from psutil import SUNOS -from psutil import WINDOWS -from psutil._common import supports_ipv6 -from psutil._compat import PY3 -from psutil.tests import AF_UNIX -from psutil.tests import bind_socket -from psutil.tests import bind_unix_socket -from psutil.tests import check_connection_ntuple -from psutil.tests import create_sockets -from psutil.tests import get_free_port -from psutil.tests import HAS_CONNECTIONS_UNIX -from psutil.tests import pyrun -from psutil.tests import reap_children -from psutil.tests import run_test_module_by_name -from psutil.tests import safe_rmpath -from psutil.tests import skip_on_access_denied -from psutil.tests import tcp_socketpair -from psutil.tests import TESTFN -from psutil.tests import TRAVIS -from psutil.tests import unittest -from psutil.tests import unix_socket_path -from psutil.tests import unix_socketpair -from psutil.tests import wait_for_file - - -thisproc = psutil.Process() - - -class Base(object): - - def setUp(self): - if not NETBSD: - # NetBSD opens a UNIX socket to /var/log/run. - cons = thisproc.connections(kind='all') - assert not cons, cons - - def tearDown(self): - safe_rmpath(TESTFN) - reap_children() - if not NETBSD: - # Make sure we closed all resources. - # NetBSD opens a UNIX socket to /var/log/run. - cons = thisproc.connections(kind='all') - assert not cons, cons - - def get_conn_from_sock(self, sock): - cons = thisproc.connections(kind='all') - smap = dict([(c.fd, c) for c in cons]) - if NETBSD: - # NetBSD opens a UNIX socket to /var/log/run - # so there may be more connections. - return smap[sock.fileno()] - else: - self.assertEqual(len(cons), 1) - if cons[0].fd != -1: - self.assertEqual(smap[sock.fileno()].fd, sock.fileno()) - return cons[0] - - def check_socket(self, sock, conn=None): - """Given a socket, makes sure it matches the one obtained - via psutil. It assumes this process created one connection - only (the one supposed to be checked). - """ - if conn is None: - conn = self.get_conn_from_sock(sock) - check_connection_ntuple(conn) - - # fd, family, type - if conn.fd != -1: - self.assertEqual(conn.fd, sock.fileno()) - self.assertEqual(conn.family, sock.family) - # see: http://bugs.python.org/issue30204 - self.assertEqual( - conn.type, sock.getsockopt(socket.SOL_SOCKET, socket.SO_TYPE)) - - # local address - laddr = sock.getsockname() - if not laddr and PY3 and isinstance(laddr, bytes): - # See: http://bugs.python.org/issue30205 - laddr = laddr.decode() - if sock.family == AF_INET6: - laddr = laddr[:2] - if sock.family == AF_UNIX and OPENBSD: - # No addresses are set for UNIX sockets on OpenBSD. - pass - else: - self.assertEqual(conn.laddr, laddr) - - # XXX Solaris can't retrieve system-wide UNIX sockets - if sock.family == AF_UNIX and HAS_CONNECTIONS_UNIX: - cons = thisproc.connections(kind='all') - self.compare_procsys_connections(os.getpid(), cons) - return conn - - def compare_procsys_connections(self, pid, proc_cons, kind='all'): - """Given a process PID and its list of connections compare - those against system-wide connections retrieved via - psutil.net_connections. - """ - try: - sys_cons = psutil.net_connections(kind=kind) - except psutil.AccessDenied: - # On MACOS, system-wide connections are retrieved by iterating - # over all processes - if MACOS: - return - else: - raise - # Filter for this proc PID and exlucde PIDs from the tuple. - sys_cons = [c[:-1] for c in sys_cons if c.pid == pid] - sys_cons.sort() - proc_cons.sort() - self.assertEqual(proc_cons, sys_cons) - - -# ===================================================================== -# --- Test unconnected sockets -# ===================================================================== - - -class TestUnconnectedSockets(Base, unittest.TestCase): - """Tests sockets which are open but not connected to anything.""" - - def test_tcp_v4(self): - addr = ("127.0.0.1", get_free_port()) - with closing(bind_socket(AF_INET, SOCK_STREAM, addr=addr)) as sock: - conn = self.check_socket(sock) - assert not conn.raddr - self.assertEqual(conn.status, psutil.CONN_LISTEN) - - @unittest.skipIf(not supports_ipv6(), "IPv6 not supported") - def test_tcp_v6(self): - addr = ("::1", get_free_port()) - with closing(bind_socket(AF_INET6, SOCK_STREAM, addr=addr)) as sock: - conn = self.check_socket(sock) - assert not conn.raddr - self.assertEqual(conn.status, psutil.CONN_LISTEN) - - def test_udp_v4(self): - addr = ("127.0.0.1", get_free_port()) - with closing(bind_socket(AF_INET, SOCK_DGRAM, addr=addr)) as sock: - conn = self.check_socket(sock) - assert not conn.raddr - self.assertEqual(conn.status, psutil.CONN_NONE) - - @unittest.skipIf(not supports_ipv6(), "IPv6 not supported") - def test_udp_v6(self): - addr = ("::1", get_free_port()) - with closing(bind_socket(AF_INET6, SOCK_DGRAM, addr=addr)) as sock: - conn = self.check_socket(sock) - assert not conn.raddr - self.assertEqual(conn.status, psutil.CONN_NONE) - - @unittest.skipIf(not POSIX, 'POSIX only') - def test_unix_tcp(self): - with unix_socket_path() as name: - with closing(bind_unix_socket(name, type=SOCK_STREAM)) as sock: - conn = self.check_socket(sock) - assert not conn.raddr - self.assertEqual(conn.status, psutil.CONN_NONE) - - @unittest.skipIf(not POSIX, 'POSIX only') - def test_unix_udp(self): - with unix_socket_path() as name: - with closing(bind_unix_socket(name, type=SOCK_STREAM)) as sock: - conn = self.check_socket(sock) - assert not conn.raddr - self.assertEqual(conn.status, psutil.CONN_NONE) - - -# ===================================================================== -# --- Test connected sockets -# ===================================================================== - - -class TestConnectedSocketPairs(Base, unittest.TestCase): - """Test socket pairs which are are actually connected to - each other. - """ - - # On SunOS, even after we close() it, the server socket stays around - # in TIME_WAIT state. - @unittest.skipIf(SUNOS, "unreliable on SUONS") - def test_tcp(self): - addr = ("127.0.0.1", get_free_port()) - assert not thisproc.connections(kind='tcp4') - server, client = tcp_socketpair(AF_INET, addr=addr) - try: - cons = thisproc.connections(kind='tcp4') - self.assertEqual(len(cons), 2) - self.assertEqual(cons[0].status, psutil.CONN_ESTABLISHED) - self.assertEqual(cons[1].status, psutil.CONN_ESTABLISHED) - # May not be fast enough to change state so it stays - # commenteed. - # client.close() - # cons = thisproc.connections(kind='all') - # self.assertEqual(len(cons), 1) - # self.assertEqual(cons[0].status, psutil.CONN_CLOSE_WAIT) - finally: - server.close() - client.close() - - @unittest.skipIf(not POSIX, 'POSIX only') - def test_unix(self): - with unix_socket_path() as name: - server, client = unix_socketpair(name) - try: - cons = thisproc.connections(kind='unix') - assert not (cons[0].laddr and cons[0].raddr) - assert not (cons[1].laddr and cons[1].raddr) - if NETBSD: - # On NetBSD creating a UNIX socket will cause - # a UNIX connection to /var/run/log. - cons = [c for c in cons if c.raddr != '/var/run/log'] - self.assertEqual(len(cons), 2) - if LINUX or FREEBSD or SUNOS: - # remote path is never set - self.assertEqual(cons[0].raddr, "") - self.assertEqual(cons[1].raddr, "") - # one local address should though - self.assertEqual(name, cons[0].laddr or cons[1].laddr) - elif OPENBSD: - # No addresses whatsoever here. - for addr in (cons[0].laddr, cons[0].raddr, - cons[1].laddr, cons[1].raddr): - self.assertEqual(addr, "") - else: - # On other systems either the laddr or raddr - # of both peers are set. - self.assertEqual(cons[0].laddr or cons[1].laddr, name) - self.assertEqual(cons[0].raddr or cons[1].raddr, name) - finally: - server.close() - client.close() - - @skip_on_access_denied(only_if=MACOS) - def test_combos(self): - def check_conn(proc, conn, family, type, laddr, raddr, status, kinds): - all_kinds = ("all", "inet", "inet4", "inet6", "tcp", "tcp4", - "tcp6", "udp", "udp4", "udp6") - check_connection_ntuple(conn) - self.assertEqual(conn.family, family) - self.assertEqual(conn.type, type) - self.assertEqual(conn.laddr, laddr) - self.assertEqual(conn.raddr, raddr) - self.assertEqual(conn.status, status) - for kind in all_kinds: - cons = proc.connections(kind=kind) - if kind in kinds: - assert cons - else: - assert not cons, cons - # compare against system-wide connections - # XXX Solaris can't retrieve system-wide UNIX - # sockets. - if HAS_CONNECTIONS_UNIX: - self.compare_procsys_connections(proc.pid, [conn]) - - tcp_template = textwrap.dedent(""" - import socket, time - s = socket.socket($family, socket.SOCK_STREAM) - s.bind(('$addr', 0)) - s.listen(1) - with open('$testfn', 'w') as f: - f.write(str(s.getsockname()[:2])) - time.sleep(60) - """) - - udp_template = textwrap.dedent(""" - import socket, time - s = socket.socket($family, socket.SOCK_DGRAM) - s.bind(('$addr', 0)) - with open('$testfn', 'w') as f: - f.write(str(s.getsockname()[:2])) - time.sleep(60) - """) - - from string import Template - testfile = os.path.basename(TESTFN) - tcp4_template = Template(tcp_template).substitute( - family=int(AF_INET), addr="127.0.0.1", testfn=testfile) - udp4_template = Template(udp_template).substitute( - family=int(AF_INET), addr="127.0.0.1", testfn=testfile) - tcp6_template = Template(tcp_template).substitute( - family=int(AF_INET6), addr="::1", testfn=testfile) - udp6_template = Template(udp_template).substitute( - family=int(AF_INET6), addr="::1", testfn=testfile) - - # launch various subprocess instantiating a socket of various - # families and types to enrich psutil results - tcp4_proc = pyrun(tcp4_template) - tcp4_addr = eval(wait_for_file(testfile)) - udp4_proc = pyrun(udp4_template) - udp4_addr = eval(wait_for_file(testfile)) - if supports_ipv6(): - tcp6_proc = pyrun(tcp6_template) - tcp6_addr = eval(wait_for_file(testfile)) - udp6_proc = pyrun(udp6_template) - udp6_addr = eval(wait_for_file(testfile)) - else: - tcp6_proc = None - udp6_proc = None - tcp6_addr = None - udp6_addr = None - - for p in thisproc.children(): - cons = p.connections() - self.assertEqual(len(cons), 1) - for conn in cons: - # TCP v4 - if p.pid == tcp4_proc.pid: - check_conn(p, conn, AF_INET, SOCK_STREAM, tcp4_addr, (), - psutil.CONN_LISTEN, - ("all", "inet", "inet4", "tcp", "tcp4")) - # UDP v4 - elif p.pid == udp4_proc.pid: - check_conn(p, conn, AF_INET, SOCK_DGRAM, udp4_addr, (), - psutil.CONN_NONE, - ("all", "inet", "inet4", "udp", "udp4")) - # TCP v6 - elif p.pid == getattr(tcp6_proc, "pid", None): - check_conn(p, conn, AF_INET6, SOCK_STREAM, tcp6_addr, (), - psutil.CONN_LISTEN, - ("all", "inet", "inet6", "tcp", "tcp6")) - # UDP v6 - elif p.pid == getattr(udp6_proc, "pid", None): - check_conn(p, conn, AF_INET6, SOCK_DGRAM, udp6_addr, (), - psutil.CONN_NONE, - ("all", "inet", "inet6", "udp", "udp6")) - - # err - self.assertRaises(ValueError, p.connections, kind='???') - - def test_multi_sockets_filtering(self): - with create_sockets() as socks: - cons = thisproc.connections(kind='all') - self.assertEqual(len(cons), len(socks)) - # tcp - cons = thisproc.connections(kind='tcp') - self.assertEqual(len(cons), 2 if supports_ipv6() else 1) - for conn in cons: - self.assertIn(conn.family, (AF_INET, AF_INET6)) - self.assertEqual(conn.type, SOCK_STREAM) - # tcp4 - cons = thisproc.connections(kind='tcp4') - self.assertEqual(len(cons), 1) - self.assertEqual(cons[0].family, AF_INET) - self.assertEqual(cons[0].type, SOCK_STREAM) - # tcp6 - if supports_ipv6(): - cons = thisproc.connections(kind='tcp6') - self.assertEqual(len(cons), 1) - self.assertEqual(cons[0].family, AF_INET6) - self.assertEqual(cons[0].type, SOCK_STREAM) - # udp - cons = thisproc.connections(kind='udp') - self.assertEqual(len(cons), 2 if supports_ipv6() else 1) - for conn in cons: - self.assertIn(conn.family, (AF_INET, AF_INET6)) - self.assertEqual(conn.type, SOCK_DGRAM) - # udp4 - cons = thisproc.connections(kind='udp4') - self.assertEqual(len(cons), 1) - self.assertEqual(cons[0].family, AF_INET) - self.assertEqual(cons[0].type, SOCK_DGRAM) - # udp6 - if supports_ipv6(): - cons = thisproc.connections(kind='udp6') - self.assertEqual(len(cons), 1) - self.assertEqual(cons[0].family, AF_INET6) - self.assertEqual(cons[0].type, SOCK_DGRAM) - # inet - cons = thisproc.connections(kind='inet') - self.assertEqual(len(cons), 4 if supports_ipv6() else 2) - for conn in cons: - self.assertIn(conn.family, (AF_INET, AF_INET6)) - self.assertIn(conn.type, (SOCK_STREAM, SOCK_DGRAM)) - # inet6 - if supports_ipv6(): - cons = thisproc.connections(kind='inet6') - self.assertEqual(len(cons), 2) - for conn in cons: - self.assertEqual(conn.family, AF_INET6) - self.assertIn(conn.type, (SOCK_STREAM, SOCK_DGRAM)) - # unix - if HAS_CONNECTIONS_UNIX: - cons = thisproc.connections(kind='unix') - self.assertEqual(len(cons), 3) - for conn in cons: - self.assertEqual(conn.family, AF_UNIX) - self.assertIn(conn.type, (SOCK_STREAM, SOCK_DGRAM)) - - -# ===================================================================== -# --- Miscellaneous tests -# ===================================================================== - - -class TestSystemWideConnections(Base, unittest.TestCase): - """Tests for net_connections().""" - - @skip_on_access_denied() - def test_it(self): - def check(cons, families, types_): - AF_UNIX = getattr(socket, 'AF_UNIX', object()) - for conn in cons: - self.assertIn(conn.family, families, msg=conn) - if conn.family != AF_UNIX: - self.assertIn(conn.type, types_, msg=conn) - check_connection_ntuple(conn) - - with create_sockets(): - from psutil._common import conn_tmap - for kind, groups in conn_tmap.items(): - # XXX: SunOS does not retrieve UNIX sockets. - if kind == 'unix' and not HAS_CONNECTIONS_UNIX: - continue - families, types_ = groups - cons = psutil.net_connections(kind) - self.assertEqual(len(cons), len(set(cons))) - check(cons, families, types_) - - self.assertRaises(ValueError, psutil.net_connections, kind='???') - - @skip_on_access_denied() - def test_multi_socks(self): - with create_sockets() as socks: - cons = [x for x in psutil.net_connections(kind='all') - if x.pid == os.getpid()] - self.assertEqual(len(cons), len(socks)) - - @skip_on_access_denied() - # See: https://travis-ci.org/giampaolo/psutil/jobs/237566297 - @unittest.skipIf(MACOS and TRAVIS, "unreliable on MACOS + TRAVIS") - def test_multi_sockets_procs(self): - # Creates multiple sub processes, each creating different - # sockets. For each process check that proc.connections() - # and net_connections() return the same results. - # This is done mainly to check whether net_connections()'s - # pid is properly set, see: - # https://github.com/giampaolo/psutil/issues/1013 - with create_sockets() as socks: - expected = len(socks) - pids = [] - times = 10 - for i in range(times): - fname = os.path.realpath(TESTFN) + str(i) - src = textwrap.dedent("""\ - import time, os - from psutil.tests import create_sockets - with create_sockets(): - with open('%s', 'w') as f: - f.write(str(os.getpid())) - time.sleep(60) - """ % fname) - sproc = pyrun(src) - pids.append(sproc.pid) - self.addCleanup(safe_rmpath, fname) - - # sync - for i in range(times): - fname = TESTFN + str(i) - wait_for_file(fname) - - syscons = [x for x in psutil.net_connections(kind='all') if x.pid - in pids] - for pid in pids: - self.assertEqual(len([x for x in syscons if x.pid == pid]), - expected) - p = psutil.Process(pid) - self.assertEqual(len(p.connections('all')), expected) - - -# ===================================================================== -# --- Miscellaneous tests -# ===================================================================== - - -class TestMisc(unittest.TestCase): - - def test_connection_constants(self): - ints = [] - strs = [] - for name in dir(psutil): - if name.startswith('CONN_'): - num = getattr(psutil, name) - str_ = str(num) - assert str_.isupper(), str_ - self.assertNotIn(str, strs) - self.assertNotIn(num, ints) - ints.append(num) - strs.append(str_) - if SUNOS: - psutil.CONN_IDLE - psutil.CONN_BOUND - if WINDOWS: - psutil.CONN_DELETE_TCB - - -if __name__ == '__main__': - run_test_module_by_name(__file__) diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/test_contracts.py b/server/www/packages/packages-darwin/x64/psutil/tests/test_contracts.py deleted file mode 100644 index 877a5c0..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/test_contracts.py +++ /dev/null @@ -1,642 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Contracts tests. These tests mainly check API sanity in terms of -returned types and APIs availability. -Some of these are duplicates of tests test_system.py and test_process.py -""" - -import errno -import os -import stat -import time -import traceback -import warnings -from contextlib import closing - -from psutil import AIX -from psutil import BSD -from psutil import FREEBSD -from psutil import LINUX -from psutil import MACOS -from psutil import NETBSD -from psutil import OPENBSD -from psutil import POSIX -from psutil import SUNOS -from psutil import WINDOWS -from psutil._compat import long -from psutil.tests import bind_unix_socket -from psutil.tests import check_connection_ntuple -from psutil.tests import get_kernel_version -from psutil.tests import HAS_CONNECTIONS_UNIX -from psutil.tests import HAS_RLIMIT -from psutil.tests import HAS_SENSORS_FANS -from psutil.tests import HAS_SENSORS_TEMPERATURES -from psutil.tests import is_namedtuple -from psutil.tests import run_test_module_by_name -from psutil.tests import safe_rmpath -from psutil.tests import skip_on_access_denied -from psutil.tests import TESTFN -from psutil.tests import unittest -from psutil.tests import unix_socket_path -from psutil.tests import VALID_PROC_STATUSES -from psutil.tests import warn -import psutil - - -# =================================================================== -# --- APIs availability -# =================================================================== - - -class TestAvailability(unittest.TestCase): - """Make sure code reflects what doc promises in terms of APIs - availability. - """ - - def test_cpu_affinity(self): - hasit = LINUX or WINDOWS or FREEBSD - self.assertEqual(hasattr(psutil.Process, "cpu_affinity"), hasit) - - def test_win_service(self): - self.assertEqual(hasattr(psutil, "win_service_iter"), WINDOWS) - self.assertEqual(hasattr(psutil, "win_service_get"), WINDOWS) - - def test_PROCFS_PATH(self): - self.assertEqual(hasattr(psutil, "PROCFS_PATH"), - LINUX or SUNOS or AIX) - - def test_win_priority(self): - ae = self.assertEqual - ae(hasattr(psutil, "ABOVE_NORMAL_PRIORITY_CLASS"), WINDOWS) - ae(hasattr(psutil, "BELOW_NORMAL_PRIORITY_CLASS"), WINDOWS) - ae(hasattr(psutil, "HIGH_PRIORITY_CLASS"), WINDOWS) - ae(hasattr(psutil, "IDLE_PRIORITY_CLASS"), WINDOWS) - ae(hasattr(psutil, "NORMAL_PRIORITY_CLASS"), WINDOWS) - ae(hasattr(psutil, "REALTIME_PRIORITY_CLASS"), WINDOWS) - - def test_linux_ioprio(self): - ae = self.assertEqual - ae(hasattr(psutil, "IOPRIO_CLASS_NONE"), LINUX) - ae(hasattr(psutil, "IOPRIO_CLASS_RT"), LINUX) - ae(hasattr(psutil, "IOPRIO_CLASS_BE"), LINUX) - ae(hasattr(psutil, "IOPRIO_CLASS_IDLE"), LINUX) - - def test_linux_rlimit(self): - ae = self.assertEqual - hasit = LINUX and get_kernel_version() >= (2, 6, 36) - ae(hasattr(psutil.Process, "rlimit"), hasit) - ae(hasattr(psutil, "RLIM_INFINITY"), hasit) - ae(hasattr(psutil, "RLIMIT_AS"), hasit) - ae(hasattr(psutil, "RLIMIT_CORE"), hasit) - ae(hasattr(psutil, "RLIMIT_CPU"), hasit) - ae(hasattr(psutil, "RLIMIT_DATA"), hasit) - ae(hasattr(psutil, "RLIMIT_FSIZE"), hasit) - ae(hasattr(psutil, "RLIMIT_LOCKS"), hasit) - ae(hasattr(psutil, "RLIMIT_MEMLOCK"), hasit) - ae(hasattr(psutil, "RLIMIT_NOFILE"), hasit) - ae(hasattr(psutil, "RLIMIT_NPROC"), hasit) - ae(hasattr(psutil, "RLIMIT_RSS"), hasit) - ae(hasattr(psutil, "RLIMIT_STACK"), hasit) - - hasit = LINUX and get_kernel_version() >= (3, 0) - ae(hasattr(psutil, "RLIMIT_MSGQUEUE"), hasit) - ae(hasattr(psutil, "RLIMIT_NICE"), hasit) - ae(hasattr(psutil, "RLIMIT_RTPRIO"), hasit) - ae(hasattr(psutil, "RLIMIT_RTTIME"), hasit) - ae(hasattr(psutil, "RLIMIT_SIGPENDING"), hasit) - - def test_cpu_freq(self): - linux = (LINUX and - (os.path.exists("/sys/devices/system/cpu/cpufreq") or - os.path.exists("/sys/devices/system/cpu/cpu0/cpufreq"))) - self.assertEqual(hasattr(psutil, "cpu_freq"), - linux or MACOS or WINDOWS) - - def test_sensors_temperatures(self): - self.assertEqual( - hasattr(psutil, "sensors_temperatures"), LINUX) - - def test_sensors_fans(self): - self.assertEqual(hasattr(psutil, "sensors_fans"), LINUX) - - def test_battery(self): - self.assertEqual(hasattr(psutil, "sensors_battery"), - LINUX or WINDOWS or FREEBSD or MACOS) - - def test_proc_environ(self): - self.assertEqual(hasattr(psutil.Process, "environ"), - LINUX or MACOS or WINDOWS) - - def test_proc_uids(self): - self.assertEqual(hasattr(psutil.Process, "uids"), POSIX) - - def test_proc_gids(self): - self.assertEqual(hasattr(psutil.Process, "uids"), POSIX) - - def test_proc_terminal(self): - self.assertEqual(hasattr(psutil.Process, "terminal"), POSIX) - - def test_proc_ionice(self): - self.assertEqual(hasattr(psutil.Process, "ionice"), LINUX or WINDOWS) - - def test_proc_rlimit(self): - self.assertEqual(hasattr(psutil.Process, "rlimit"), LINUX) - - def test_proc_io_counters(self): - hasit = hasattr(psutil.Process, "io_counters") - self.assertEqual(hasit, False if MACOS or SUNOS else True) - - def test_proc_num_fds(self): - self.assertEqual(hasattr(psutil.Process, "num_fds"), POSIX) - - def test_proc_num_handles(self): - self.assertEqual(hasattr(psutil.Process, "num_handles"), WINDOWS) - - def test_proc_cpu_affinity(self): - self.assertEqual(hasattr(psutil.Process, "cpu_affinity"), - LINUX or WINDOWS or FREEBSD) - - def test_proc_cpu_num(self): - self.assertEqual(hasattr(psutil.Process, "cpu_num"), - LINUX or FREEBSD or SUNOS) - - def test_proc_memory_maps(self): - hasit = hasattr(psutil.Process, "memory_maps") - self.assertEqual(hasit, False if OPENBSD or NETBSD or AIX else True) - - -# =================================================================== -# --- Test deprecations -# =================================================================== - - -class TestDeprecations(unittest.TestCase): - - def test_memory_info_ex(self): - with warnings.catch_warnings(record=True) as ws: - psutil.Process().memory_info_ex() - w = ws[0] - self.assertIsInstance(w.category(), FutureWarning) - self.assertIn("memory_info_ex() is deprecated", str(w.message)) - self.assertIn("use memory_info() instead", str(w.message)) - - -# =================================================================== -# --- System API types -# =================================================================== - - -class TestSystem(unittest.TestCase): - """Check the return types of system related APIs. - Mainly we want to test we never return unicode on Python 2, see: - https://github.com/giampaolo/psutil/issues/1039 - """ - - @classmethod - def setUpClass(cls): - cls.proc = psutil.Process() - - def tearDown(self): - safe_rmpath(TESTFN) - - def test_cpu_times(self): - # Duplicate of test_system.py. Keep it anyway. - ret = psutil.cpu_times() - assert is_namedtuple(ret) - for n in ret: - self.assertIsInstance(n, float) - self.assertGreaterEqual(n, 0) - - def test_io_counters(self): - # Duplicate of test_system.py. Keep it anyway. - for k in psutil.disk_io_counters(perdisk=True): - self.assertIsInstance(k, str) - - def test_disk_partitions(self): - # Duplicate of test_system.py. Keep it anyway. - for disk in psutil.disk_partitions(): - self.assertIsInstance(disk.device, str) - self.assertIsInstance(disk.mountpoint, str) - self.assertIsInstance(disk.fstype, str) - self.assertIsInstance(disk.opts, str) - - @unittest.skipIf(not POSIX, 'POSIX only') - @unittest.skipIf(not HAS_CONNECTIONS_UNIX, "can't list UNIX sockets") - @skip_on_access_denied(only_if=MACOS) - def test_net_connections(self): - with unix_socket_path() as name: - with closing(bind_unix_socket(name)): - cons = psutil.net_connections(kind='unix') - assert cons - for conn in cons: - self.assertIsInstance(conn.laddr, str) - - def test_net_if_addrs(self): - # Duplicate of test_system.py. Keep it anyway. - for ifname, addrs in psutil.net_if_addrs().items(): - self.assertIsInstance(ifname, str) - for addr in addrs: - self.assertIsInstance(addr.address, str) - self.assertIsInstance(addr.netmask, (str, type(None))) - self.assertIsInstance(addr.broadcast, (str, type(None))) - - def test_net_if_stats(self): - # Duplicate of test_system.py. Keep it anyway. - for ifname, _ in psutil.net_if_stats().items(): - self.assertIsInstance(ifname, str) - - def test_net_io_counters(self): - # Duplicate of test_system.py. Keep it anyway. - for ifname, _ in psutil.net_io_counters(pernic=True).items(): - self.assertIsInstance(ifname, str) - - @unittest.skipIf(not HAS_SENSORS_FANS, "not supported") - def test_sensors_fans(self): - # Duplicate of test_system.py. Keep it anyway. - for name, units in psutil.sensors_fans().items(): - self.assertIsInstance(name, str) - for unit in units: - self.assertIsInstance(unit.label, str) - - @unittest.skipIf(not HAS_SENSORS_TEMPERATURES, "not supported") - def test_sensors_temperatures(self): - # Duplicate of test_system.py. Keep it anyway. - for name, units in psutil.sensors_temperatures().items(): - self.assertIsInstance(name, str) - for unit in units: - self.assertIsInstance(unit.label, str) - - def test_users(self): - # Duplicate of test_system.py. Keep it anyway. - for user in psutil.users(): - self.assertIsInstance(user.name, str) - self.assertIsInstance(user.terminal, (str, type(None))) - self.assertIsInstance(user.host, (str, type(None))) - self.assertIsInstance(user.pid, (int, type(None))) - - -# =================================================================== -# --- Featch all processes test -# =================================================================== - - -class TestFetchAllProcesses(unittest.TestCase): - """Test which iterates over all running processes and performs - some sanity checks against Process API's returned values. - """ - - def test_fetch_all(self): - valid_procs = 0 - excluded_names = set([ - 'send_signal', 'suspend', 'resume', 'terminate', 'kill', 'wait', - 'as_dict', 'parent', 'children', 'memory_info_ex', 'oneshot', - ]) - if LINUX and not HAS_RLIMIT: - excluded_names.add('rlimit') - attrs = [] - for name in dir(psutil.Process): - if name.startswith("_"): - continue - if name in excluded_names: - continue - attrs.append(name) - - default = object() - failures = [] - for p in psutil.process_iter(): - with p.oneshot(): - for name in attrs: - ret = default - try: - args = () - kwargs = {} - attr = getattr(p, name, None) - if attr is not None and callable(attr): - if name == 'rlimit': - args = (psutil.RLIMIT_NOFILE,) - elif name == 'memory_maps': - kwargs = {'grouped': False} - ret = attr(*args, **kwargs) - else: - ret = attr - valid_procs += 1 - except NotImplementedError: - msg = "%r was skipped because not implemented" % ( - self.__class__.__name__ + '.test_' + name) - warn(msg) - except (psutil.NoSuchProcess, psutil.AccessDenied) as err: - self.assertEqual(err.pid, p.pid) - if err.name: - # make sure exception's name attr is set - # with the actual process name - self.assertEqual(err.name, p.name()) - assert str(err) - assert err.msg - except Exception as err: - s = '\n' + '=' * 70 + '\n' - s += "FAIL: test_%s (proc=%s" % (name, p) - if ret != default: - s += ", ret=%s)" % repr(ret) - s += ')\n' - s += '-' * 70 - s += "\n%s" % traceback.format_exc() - s = "\n".join((" " * 4) + i for i in s.splitlines()) - s += '\n' - failures.append(s) - break - else: - if ret not in (0, 0.0, [], None, '', {}): - assert ret, ret - meth = getattr(self, name) - meth(ret, p) - - if failures: - self.fail(''.join(failures)) - - # we should always have a non-empty list, not including PID 0 etc. - # special cases. - assert valid_procs - - def cmdline(self, ret, proc): - self.assertIsInstance(ret, list) - for part in ret: - self.assertIsInstance(part, str) - - def exe(self, ret, proc): - self.assertIsInstance(ret, (str, type(None))) - if not ret: - self.assertEqual(ret, '') - else: - assert os.path.isabs(ret), ret - # Note: os.stat() may return False even if the file is there - # hence we skip the test, see: - # http://stackoverflow.com/questions/3112546/os-path-exists-lies - if POSIX and os.path.isfile(ret): - if hasattr(os, 'access') and hasattr(os, "X_OK"): - # XXX may fail on MACOS - assert os.access(ret, os.X_OK) - - def pid(self, ret, proc): - self.assertIsInstance(ret, int) - self.assertGreaterEqual(ret, 0) - - def ppid(self, ret, proc): - self.assertIsInstance(ret, (int, long)) - self.assertGreaterEqual(ret, 0) - - def name(self, ret, proc): - self.assertIsInstance(ret, str) - # on AIX, "" processes don't have names - if not AIX: - assert ret - - def create_time(self, ret, proc): - self.assertIsInstance(ret, float) - try: - self.assertGreaterEqual(ret, 0) - except AssertionError: - # XXX - if OPENBSD and proc.status() == psutil.STATUS_ZOMBIE: - pass - else: - raise - # this can't be taken for granted on all platforms - # self.assertGreaterEqual(ret, psutil.boot_time()) - # make sure returned value can be pretty printed - # with strftime - time.strftime("%Y %m %d %H:%M:%S", time.localtime(ret)) - - def uids(self, ret, proc): - assert is_namedtuple(ret) - for uid in ret: - self.assertIsInstance(uid, int) - self.assertGreaterEqual(uid, 0) - - def gids(self, ret, proc): - assert is_namedtuple(ret) - # note: testing all gids as above seems not to be reliable for - # gid == 30 (nodoby); not sure why. - for gid in ret: - self.assertIsInstance(gid, int) - if not MACOS and not NETBSD: - self.assertGreaterEqual(gid, 0) - - def username(self, ret, proc): - self.assertIsInstance(ret, str) - assert ret - - def status(self, ret, proc): - self.assertIsInstance(ret, str) - assert ret - self.assertNotEqual(ret, '?') # XXX - self.assertIn(ret, VALID_PROC_STATUSES) - - def io_counters(self, ret, proc): - assert is_namedtuple(ret) - for field in ret: - self.assertIsInstance(field, (int, long)) - if field != -1: - self.assertGreaterEqual(field, 0) - - def ionice(self, ret, proc): - if POSIX: - assert is_namedtuple(ret) - for field in ret: - self.assertIsInstance(field, int) - if LINUX: - self.assertGreaterEqual(ret.ioclass, 0) - self.assertGreaterEqual(ret.value, 0) - else: - self.assertGreaterEqual(ret, 0) - self.assertIn(ret, (0, 1, 2)) - - def num_threads(self, ret, proc): - self.assertIsInstance(ret, int) - self.assertGreaterEqual(ret, 1) - - def threads(self, ret, proc): - self.assertIsInstance(ret, list) - for t in ret: - assert is_namedtuple(t) - self.assertGreaterEqual(t.id, 0) - self.assertGreaterEqual(t.user_time, 0) - self.assertGreaterEqual(t.system_time, 0) - for field in t: - self.assertIsInstance(field, (int, float)) - - def cpu_times(self, ret, proc): - assert is_namedtuple(ret) - for n in ret: - self.assertIsInstance(n, float) - self.assertGreaterEqual(n, 0) - # TODO: check ntuple fields - - def cpu_percent(self, ret, proc): - self.assertIsInstance(ret, float) - assert 0.0 <= ret <= 100.0, ret - - def cpu_num(self, ret, proc): - self.assertIsInstance(ret, int) - if FREEBSD and ret == -1: - return - self.assertGreaterEqual(ret, 0) - if psutil.cpu_count() == 1: - self.assertEqual(ret, 0) - self.assertIn(ret, list(range(psutil.cpu_count()))) - - def memory_info(self, ret, proc): - assert is_namedtuple(ret) - for value in ret: - self.assertIsInstance(value, (int, long)) - self.assertGreaterEqual(value, 0) - if POSIX and not AIX and ret.vms != 0: - # VMS is always supposed to be the highest - for name in ret._fields: - if name != 'vms': - value = getattr(ret, name) - self.assertGreater(ret.vms, value, msg=ret) - elif WINDOWS: - self.assertGreaterEqual(ret.peak_wset, ret.wset) - self.assertGreaterEqual(ret.peak_paged_pool, ret.paged_pool) - self.assertGreaterEqual(ret.peak_nonpaged_pool, ret.nonpaged_pool) - self.assertGreaterEqual(ret.peak_pagefile, ret.pagefile) - - def memory_full_info(self, ret, proc): - assert is_namedtuple(ret) - total = psutil.virtual_memory().total - for name in ret._fields: - value = getattr(ret, name) - self.assertIsInstance(value, (int, long)) - self.assertGreaterEqual(value, 0, msg=(name, value)) - if LINUX and name in ('vms', 'data'): - # On Linux there are processes (e.g. 'goa-daemon') whose - # VMS is incredibly high for some reason. - continue - self.assertLessEqual(value, total, msg=(name, value, total)) - - if LINUX: - self.assertGreaterEqual(ret.pss, ret.uss) - - def open_files(self, ret, proc): - self.assertIsInstance(ret, list) - for f in ret: - self.assertIsInstance(f.fd, int) - self.assertIsInstance(f.path, str) - if WINDOWS: - self.assertEqual(f.fd, -1) - elif LINUX: - self.assertIsInstance(f.position, int) - self.assertIsInstance(f.mode, str) - self.assertIsInstance(f.flags, int) - self.assertGreaterEqual(f.position, 0) - self.assertIn(f.mode, ('r', 'w', 'a', 'r+', 'a+')) - self.assertGreater(f.flags, 0) - elif BSD and not f.path: - # XXX see: https://github.com/giampaolo/psutil/issues/595 - continue - assert os.path.isabs(f.path), f - assert os.path.isfile(f.path), f - - def num_fds(self, ret, proc): - self.assertIsInstance(ret, int) - self.assertGreaterEqual(ret, 0) - - def connections(self, ret, proc): - self.assertEqual(len(ret), len(set(ret))) - for conn in ret: - check_connection_ntuple(conn) - - def cwd(self, ret, proc): - if ret: # 'ret' can be None or empty - self.assertIsInstance(ret, str) - assert os.path.isabs(ret), ret - try: - st = os.stat(ret) - except OSError as err: - if WINDOWS and err.errno in \ - psutil._psplatform.ACCESS_DENIED_SET: - pass - # directory has been removed in mean time - elif err.errno != errno.ENOENT: - raise - else: - assert stat.S_ISDIR(st.st_mode) - - def memory_percent(self, ret, proc): - self.assertIsInstance(ret, float) - assert 0 <= ret <= 100, ret - - def is_running(self, ret, proc): - self.assertIsInstance(ret, bool) - - def cpu_affinity(self, ret, proc): - self.assertIsInstance(ret, list) - assert ret != [], ret - cpus = range(psutil.cpu_count()) - for n in ret: - self.assertIsInstance(n, int) - self.assertIn(n, cpus) - - def terminal(self, ret, proc): - self.assertIsInstance(ret, (str, type(None))) - if ret is not None: - assert os.path.isabs(ret), ret - assert os.path.exists(ret), ret - - def memory_maps(self, ret, proc): - for nt in ret: - self.assertIsInstance(nt.addr, str) - self.assertIsInstance(nt.perms, str) - self.assertIsInstance(nt.path, str) - for fname in nt._fields: - value = getattr(nt, fname) - if fname == 'path': - if not value.startswith('['): - assert os.path.isabs(nt.path), nt.path - # commented as on Linux we might get - # '/foo/bar (deleted)' - # assert os.path.exists(nt.path), nt.path - elif fname in ('addr', 'perms'): - assert value - else: - self.assertIsInstance(value, (int, long)) - self.assertGreaterEqual(value, 0) - - def num_handles(self, ret, proc): - self.assertIsInstance(ret, int) - self.assertGreaterEqual(ret, 0) - - def nice(self, ret, proc): - self.assertIsInstance(ret, int) - if POSIX: - assert -20 <= ret <= 20, ret - else: - priorities = [getattr(psutil, x) for x in dir(psutil) - if x.endswith('_PRIORITY_CLASS')] - self.assertIn(ret, priorities) - - def num_ctx_switches(self, ret, proc): - assert is_namedtuple(ret) - for value in ret: - self.assertIsInstance(value, (int, long)) - self.assertGreaterEqual(value, 0) - - def rlimit(self, ret, proc): - self.assertIsInstance(ret, tuple) - self.assertEqual(len(ret), 2) - self.assertGreaterEqual(ret[0], -1) - self.assertGreaterEqual(ret[1], -1) - - def environ(self, ret, proc): - self.assertIsInstance(ret, dict) - for k, v in ret.items(): - self.assertIsInstance(k, str) - self.assertIsInstance(v, str) - - -if __name__ == '__main__': - run_test_module_by_name(__file__) diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/test_linux.py b/server/www/packages/packages-darwin/x64/psutil/tests/test_linux.py deleted file mode 100644 index 4e652a9..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/test_linux.py +++ /dev/null @@ -1,2005 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Linux specific tests.""" - -from __future__ import division -import collections -import contextlib -import errno -import glob -import io -import os -import re -import shutil -import socket -import struct -import tempfile -import textwrap -import time -import warnings - -import psutil -from psutil import LINUX -from psutil._compat import basestring -from psutil._compat import PY3 -from psutil._compat import u -from psutil.tests import call_until -from psutil.tests import HAS_BATTERY -from psutil.tests import HAS_CPU_FREQ -from psutil.tests import HAS_RLIMIT -from psutil.tests import MEMORY_TOLERANCE -from psutil.tests import mock -from psutil.tests import PYPY -from psutil.tests import pyrun -from psutil.tests import reap_children -from psutil.tests import reload_module -from psutil.tests import retry_before_failing -from psutil.tests import run_test_module_by_name -from psutil.tests import safe_rmpath -from psutil.tests import sh -from psutil.tests import skip_on_not_implemented -from psutil.tests import TESTFN -from psutil.tests import ThreadTask -from psutil.tests import TRAVIS -from psutil.tests import unittest -from psutil.tests import which - - -HERE = os.path.abspath(os.path.dirname(__file__)) -SIOCGIFADDR = 0x8915 -SIOCGIFCONF = 0x8912 -SIOCGIFHWADDR = 0x8927 -if LINUX: - SECTOR_SIZE = 512 - - -# ===================================================================== -# --- utils -# ===================================================================== - - -def get_ipv4_address(ifname): - import fcntl - ifname = ifname[:15] - if PY3: - ifname = bytes(ifname, 'ascii') - s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - with contextlib.closing(s): - return socket.inet_ntoa( - fcntl.ioctl(s.fileno(), - SIOCGIFADDR, - struct.pack('256s', ifname))[20:24]) - - -def get_mac_address(ifname): - import fcntl - ifname = ifname[:15] - if PY3: - ifname = bytes(ifname, 'ascii') - s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - with contextlib.closing(s): - info = fcntl.ioctl( - s.fileno(), SIOCGIFHWADDR, struct.pack('256s', ifname)) - if PY3: - def ord(x): - return x - else: - import __builtin__ - ord = __builtin__.ord - return ''.join(['%02x:' % ord(char) for char in info[18:24]])[:-1] - - -def free_swap(): - """Parse 'free' cmd and return swap memory's s total, used and free - values. - """ - out = sh('free -b') - lines = out.split('\n') - for line in lines: - if line.startswith('Swap'): - _, total, used, free = line.split() - nt = collections.namedtuple('free', 'total used free') - return nt(int(total), int(used), int(free)) - raise ValueError( - "can't find 'Swap' in 'free' output:\n%s" % '\n'.join(lines)) - - -def free_physmem(): - """Parse 'free' cmd and return physical memory's total, used - and free values. - """ - # Note: free can have 2 different formats, invalidating 'shared' - # and 'cached' memory which may have different positions so we - # do not return them. - # https://github.com/giampaolo/psutil/issues/538#issuecomment-57059946 - out = sh('free -b') - lines = out.split('\n') - for line in lines: - if line.startswith('Mem'): - total, used, free, shared = \ - [int(x) for x in line.split()[1:5]] - nt = collections.namedtuple( - 'free', 'total used free shared output') - return nt(total, used, free, shared, out) - raise ValueError( - "can't find 'Mem' in 'free' output:\n%s" % '\n'.join(lines)) - - -def vmstat(stat): - out = sh("vmstat -s") - for line in out.split("\n"): - line = line.strip() - if stat in line: - return int(line.split(' ')[0]) - raise ValueError("can't find %r in 'vmstat' output" % stat) - - -def get_free_version_info(): - out = sh("free -V").strip() - return tuple(map(int, out.split()[-1].split('.'))) - - -@contextlib.contextmanager -def mock_open_content(for_path, content): - """Mock open() builtin and forces it to return a certain `content` - on read() if the path being opened matches `for_path`. - """ - def open_mock(name, *args, **kwargs): - if name == for_path: - if PY3: - if isinstance(content, basestring): - return io.StringIO(content) - else: - return io.BytesIO(content) - else: - return io.BytesIO(content) - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, create=True, side_effect=open_mock) as m: - yield m - - -@contextlib.contextmanager -def mock_open_exception(for_path, exc): - """Mock open() builtin and raises `exc` if the path being opened - matches `for_path`. - """ - def open_mock(name, *args, **kwargs): - if name == for_path: - raise exc - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, create=True, side_effect=open_mock) as m: - yield m - - -# ===================================================================== -# --- system virtual memory -# ===================================================================== - - -@unittest.skipIf(not LINUX, "LINUX only") -class TestSystemVirtualMemory(unittest.TestCase): - - def test_total(self): - # free_value = free_physmem().total - # psutil_value = psutil.virtual_memory().total - # self.assertEqual(free_value, psutil_value) - vmstat_value = vmstat('total memory') * 1024 - psutil_value = psutil.virtual_memory().total - self.assertAlmostEqual(vmstat_value, psutil_value) - - # Older versions of procps used slab memory to calculate used memory. - # This got changed in: - # https://gitlab.com/procps-ng/procps/commit/ - # 05d751c4f076a2f0118b914c5e51cfbb4762ad8e - @unittest.skipIf(LINUX and get_free_version_info() < (3, 3, 12), - "old free version") - @retry_before_failing() - def test_used(self): - free = free_physmem() - free_value = free.used - psutil_value = psutil.virtual_memory().used - self.assertAlmostEqual( - free_value, psutil_value, delta=MEMORY_TOLERANCE, - msg='%s %s \n%s' % (free_value, psutil_value, free.output)) - - @unittest.skipIf(TRAVIS, "unreliable on TRAVIS") - @retry_before_failing() - def test_free(self): - # _, _, free_value, _ = free_physmem() - # psutil_value = psutil.virtual_memory().free - # self.assertAlmostEqual( - # free_value, psutil_value, delta=MEMORY_TOLERANCE) - vmstat_value = vmstat('free memory') * 1024 - psutil_value = psutil.virtual_memory().free - self.assertAlmostEqual( - vmstat_value, psutil_value, delta=MEMORY_TOLERANCE) - - @retry_before_failing() - def test_buffers(self): - vmstat_value = vmstat('buffer memory') * 1024 - psutil_value = psutil.virtual_memory().buffers - self.assertAlmostEqual( - vmstat_value, psutil_value, delta=MEMORY_TOLERANCE) - - # https://travis-ci.org/giampaolo/psutil/jobs/226719664 - @unittest.skipIf(TRAVIS, "unreliable on TRAVIS") - @retry_before_failing() - def test_active(self): - vmstat_value = vmstat('active memory') * 1024 - psutil_value = psutil.virtual_memory().active - self.assertAlmostEqual( - vmstat_value, psutil_value, delta=MEMORY_TOLERANCE) - - # https://travis-ci.org/giampaolo/psutil/jobs/227242952 - @unittest.skipIf(TRAVIS, "unreliable on TRAVIS") - @retry_before_failing() - def test_inactive(self): - vmstat_value = vmstat('inactive memory') * 1024 - psutil_value = psutil.virtual_memory().inactive - self.assertAlmostEqual( - vmstat_value, psutil_value, delta=MEMORY_TOLERANCE) - - @retry_before_failing() - def test_shared(self): - free = free_physmem() - free_value = free.shared - if free_value == 0: - raise unittest.SkipTest("free does not support 'shared' column") - psutil_value = psutil.virtual_memory().shared - self.assertAlmostEqual( - free_value, psutil_value, delta=MEMORY_TOLERANCE, - msg='%s %s \n%s' % (free_value, psutil_value, free.output)) - - @retry_before_failing() - def test_available(self): - # "free" output format has changed at some point: - # https://github.com/giampaolo/psutil/issues/538#issuecomment-147192098 - out = sh("free -b") - lines = out.split('\n') - if 'available' not in lines[0]: - raise unittest.SkipTest("free does not support 'available' column") - else: - free_value = int(lines[1].split()[-1]) - psutil_value = psutil.virtual_memory().available - self.assertAlmostEqual( - free_value, psutil_value, delta=MEMORY_TOLERANCE, - msg='%s %s \n%s' % (free_value, psutil_value, out)) - - def test_warnings_on_misses(self): - # Emulate a case where /proc/meminfo provides few info. - # psutil is supposed to set the missing fields to 0 and - # raise a warning. - with mock_open_content( - '/proc/meminfo', - textwrap.dedent("""\ - Active(anon): 6145416 kB - Active(file): 2950064 kB - Inactive(anon): 574764 kB - Inactive(file): 1567648 kB - MemAvailable: -1 kB - MemFree: 2057400 kB - MemTotal: 16325648 kB - SReclaimable: 346648 kB - """).encode()) as m: - with warnings.catch_warnings(record=True) as ws: - warnings.simplefilter("always") - ret = psutil.virtual_memory() - assert m.called - self.assertEqual(len(ws), 1) - w = ws[0] - assert w.filename.endswith('psutil/_pslinux.py') - self.assertIn( - "memory stats couldn't be determined", str(w.message)) - self.assertIn("cached", str(w.message)) - self.assertIn("shared", str(w.message)) - self.assertIn("active", str(w.message)) - self.assertIn("inactive", str(w.message)) - self.assertIn("buffers", str(w.message)) - self.assertIn("available", str(w.message)) - self.assertEqual(ret.cached, 0) - self.assertEqual(ret.active, 0) - self.assertEqual(ret.inactive, 0) - self.assertEqual(ret.shared, 0) - self.assertEqual(ret.buffers, 0) - self.assertEqual(ret.available, 0) - self.assertEqual(ret.slab, 0) - - def test_avail_old_percent(self): - # Make sure that our calculation of avail mem for old kernels - # is off by max 10%. - from psutil._pslinux import calculate_avail_vmem - from psutil._pslinux import open_binary - - mems = {} - with open_binary('/proc/meminfo') as f: - for line in f: - fields = line.split() - mems[fields[0]] = int(fields[1]) * 1024 - - a = calculate_avail_vmem(mems) - if b'MemAvailable:' in mems: - b = mems[b'MemAvailable:'] - diff_percent = abs(a - b) / a * 100 - self.assertLess(diff_percent, 10) - - def test_avail_old_comes_from_kernel(self): - # Make sure "MemAvailable:" coluimn is used instead of relying - # on our internal algorithm to calculate avail mem. - with mock_open_content( - '/proc/meminfo', - textwrap.dedent("""\ - Active: 9444728 kB - Active(anon): 6145416 kB - Active(file): 2950064 kB - Buffers: 287952 kB - Cached: 4818144 kB - Inactive(file): 1578132 kB - Inactive(anon): 574764 kB - Inactive(file): 1567648 kB - MemAvailable: 6574984 kB - MemFree: 2057400 kB - MemTotal: 16325648 kB - Shmem: 577588 kB - SReclaimable: 346648 kB - """).encode()) as m: - with warnings.catch_warnings(record=True) as ws: - ret = psutil.virtual_memory() - assert m.called - self.assertEqual(ret.available, 6574984 * 1024) - w = ws[0] - self.assertIn( - "inactive memory stats couldn't be determined", str(w.message)) - - def test_avail_old_missing_fields(self): - # Remove Active(file), Inactive(file) and SReclaimable - # from /proc/meminfo and make sure the fallback is used - # (free + cached), - with mock_open_content( - "/proc/meminfo", - textwrap.dedent("""\ - Active: 9444728 kB - Active(anon): 6145416 kB - Buffers: 287952 kB - Cached: 4818144 kB - Inactive(file): 1578132 kB - Inactive(anon): 574764 kB - MemFree: 2057400 kB - MemTotal: 16325648 kB - Shmem: 577588 kB - """).encode()) as m: - with warnings.catch_warnings(record=True) as ws: - ret = psutil.virtual_memory() - assert m.called - self.assertEqual(ret.available, 2057400 * 1024 + 4818144 * 1024) - w = ws[0] - self.assertIn( - "inactive memory stats couldn't be determined", str(w.message)) - - def test_avail_old_missing_zoneinfo(self): - # Remove /proc/zoneinfo file. Make sure fallback is used - # (free + cached). - with mock_open_content( - "/proc/meminfo", - textwrap.dedent("""\ - Active: 9444728 kB - Active(anon): 6145416 kB - Active(file): 2950064 kB - Buffers: 287952 kB - Cached: 4818144 kB - Inactive(file): 1578132 kB - Inactive(anon): 574764 kB - Inactive(file): 1567648 kB - MemFree: 2057400 kB - MemTotal: 16325648 kB - Shmem: 577588 kB - SReclaimable: 346648 kB - """).encode()): - with mock_open_exception( - "/proc/zoneinfo", - IOError(errno.ENOENT, 'no such file or directory')): - with warnings.catch_warnings(record=True) as ws: - ret = psutil.virtual_memory() - self.assertEqual( - ret.available, 2057400 * 1024 + 4818144 * 1024) - w = ws[0] - self.assertIn( - "inactive memory stats couldn't be determined", - str(w.message)) - - def test_virtual_memory_mocked(self): - # Emulate /proc/meminfo because neither vmstat nor free return slab. - def open_mock(name, *args, **kwargs): - if name == '/proc/meminfo': - return io.BytesIO(textwrap.dedent("""\ - MemTotal: 100 kB - MemFree: 2 kB - MemAvailable: 3 kB - Buffers: 4 kB - Cached: 5 kB - SwapCached: 6 kB - Active: 7 kB - Inactive: 8 kB - Active(anon): 9 kB - Inactive(anon): 10 kB - Active(file): 11 kB - Inactive(file): 12 kB - Unevictable: 13 kB - Mlocked: 14 kB - SwapTotal: 15 kB - SwapFree: 16 kB - Dirty: 17 kB - Writeback: 18 kB - AnonPages: 19 kB - Mapped: 20 kB - Shmem: 21 kB - Slab: 22 kB - SReclaimable: 23 kB - SUnreclaim: 24 kB - KernelStack: 25 kB - PageTables: 26 kB - NFS_Unstable: 27 kB - Bounce: 28 kB - WritebackTmp: 29 kB - CommitLimit: 30 kB - Committed_AS: 31 kB - VmallocTotal: 32 kB - VmallocUsed: 33 kB - VmallocChunk: 34 kB - HardwareCorrupted: 35 kB - AnonHugePages: 36 kB - ShmemHugePages: 37 kB - ShmemPmdMapped: 38 kB - CmaTotal: 39 kB - CmaFree: 40 kB - HugePages_Total: 41 kB - HugePages_Free: 42 kB - HugePages_Rsvd: 43 kB - HugePages_Surp: 44 kB - Hugepagesize: 45 kB - DirectMap46k: 46 kB - DirectMap47M: 47 kB - DirectMap48G: 48 kB - """).encode()) - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, create=True, side_effect=open_mock) as m: - mem = psutil.virtual_memory() - assert m.called - self.assertEqual(mem.total, 100 * 1024) - self.assertEqual(mem.free, 2 * 1024) - self.assertEqual(mem.buffers, 4 * 1024) - # cached mem also includes reclaimable memory - self.assertEqual(mem.cached, (5 + 23) * 1024) - self.assertEqual(mem.shared, 21 * 1024) - self.assertEqual(mem.active, 7 * 1024) - self.assertEqual(mem.inactive, 8 * 1024) - self.assertEqual(mem.slab, 22 * 1024) - self.assertEqual(mem.available, 3 * 1024) - - -# ===================================================================== -# --- system swap memory -# ===================================================================== - - -@unittest.skipIf(not LINUX, "LINUX only") -class TestSystemSwapMemory(unittest.TestCase): - - @staticmethod - def meminfo_has_swap_info(): - """Return True if /proc/meminfo provides swap metrics.""" - with open("/proc/meminfo") as f: - data = f.read() - return 'SwapTotal:' in data and 'SwapFree:' in data - - def test_total(self): - free_value = free_swap().total - psutil_value = psutil.swap_memory().total - return self.assertAlmostEqual( - free_value, psutil_value, delta=MEMORY_TOLERANCE) - - @retry_before_failing() - def test_used(self): - free_value = free_swap().used - psutil_value = psutil.swap_memory().used - return self.assertAlmostEqual( - free_value, psutil_value, delta=MEMORY_TOLERANCE) - - @retry_before_failing() - def test_free(self): - free_value = free_swap().free - psutil_value = psutil.swap_memory().free - return self.assertAlmostEqual( - free_value, psutil_value, delta=MEMORY_TOLERANCE) - - def test_missing_sin_sout(self): - with mock.patch('psutil._pslinux.open', create=True) as m: - with warnings.catch_warnings(record=True) as ws: - warnings.simplefilter("always") - ret = psutil.swap_memory() - assert m.called - self.assertEqual(len(ws), 1) - w = ws[0] - assert w.filename.endswith('psutil/_pslinux.py') - self.assertIn( - "'sin' and 'sout' swap memory stats couldn't " - "be determined", str(w.message)) - self.assertEqual(ret.sin, 0) - self.assertEqual(ret.sout, 0) - - def test_no_vmstat_mocked(self): - # see https://github.com/giampaolo/psutil/issues/722 - with mock_open_exception( - "/proc/vmstat", - IOError(errno.ENOENT, 'no such file or directory')) as m: - with warnings.catch_warnings(record=True) as ws: - warnings.simplefilter("always") - ret = psutil.swap_memory() - assert m.called - self.assertEqual(len(ws), 1) - w = ws[0] - assert w.filename.endswith('psutil/_pslinux.py') - self.assertIn( - "'sin' and 'sout' swap memory stats couldn't " - "be determined and were set to 0", - str(w.message)) - self.assertEqual(ret.sin, 0) - self.assertEqual(ret.sout, 0) - - def test_meminfo_against_sysinfo(self): - # Make sure the content of /proc/meminfo about swap memory - # matches sysinfo() syscall, see: - # https://github.com/giampaolo/psutil/issues/1015 - if not self.meminfo_has_swap_info(): - return unittest.skip("/proc/meminfo has no swap metrics") - with mock.patch('psutil._pslinux.cext.linux_sysinfo') as m: - swap = psutil.swap_memory() - assert not m.called - import psutil._psutil_linux as cext - _, _, _, _, total, free, unit_multiplier = cext.linux_sysinfo() - total *= unit_multiplier - free *= unit_multiplier - self.assertEqual(swap.total, total) - self.assertEqual(swap.free, free) - - def test_emulate_meminfo_has_no_metrics(self): - # Emulate a case where /proc/meminfo provides no swap metrics - # in which case sysinfo() syscall is supposed to be used - # as a fallback. - with mock_open_content("/proc/meminfo", b"") as m: - psutil.swap_memory() - assert m.called - - -# ===================================================================== -# --- system CPU -# ===================================================================== - - -@unittest.skipIf(not LINUX, "LINUX only") -class TestSystemCPU(unittest.TestCase): - - @unittest.skipIf(TRAVIS, "unknown failure on travis") - def test_cpu_times(self): - fields = psutil.cpu_times()._fields - kernel_ver = re.findall(r'\d+\.\d+\.\d+', os.uname()[2])[0] - kernel_ver_info = tuple(map(int, kernel_ver.split('.'))) - if kernel_ver_info >= (2, 6, 11): - self.assertIn('steal', fields) - else: - self.assertNotIn('steal', fields) - if kernel_ver_info >= (2, 6, 24): - self.assertIn('guest', fields) - else: - self.assertNotIn('guest', fields) - if kernel_ver_info >= (3, 2, 0): - self.assertIn('guest_nice', fields) - else: - self.assertNotIn('guest_nice', fields) - - @unittest.skipIf(not os.path.exists("/sys/devices/system/cpu/online"), - "/sys/devices/system/cpu/online does not exist") - def test_cpu_count_logical_w_sysdev_cpu_online(self): - with open("/sys/devices/system/cpu/online") as f: - value = f.read().strip() - if "-" in str(value): - value = int(value.split('-')[1]) + 1 - self.assertEqual(psutil.cpu_count(), value) - - @unittest.skipIf(not os.path.exists("/sys/devices/system/cpu"), - "/sys/devices/system/cpu does not exist") - def test_cpu_count_logical_w_sysdev_cpu_num(self): - ls = os.listdir("/sys/devices/system/cpu") - count = len([x for x in ls if re.search(r"cpu\d+$", x) is not None]) - self.assertEqual(psutil.cpu_count(), count) - - @unittest.skipIf(not which("nproc"), "nproc utility not available") - def test_cpu_count_logical_w_nproc(self): - num = int(sh("nproc --all")) - self.assertEqual(psutil.cpu_count(logical=True), num) - - @unittest.skipIf(not which("lscpu"), "lscpu utility not available") - def test_cpu_count_logical_w_lscpu(self): - out = sh("lscpu -p") - num = len([x for x in out.split('\n') if not x.startswith('#')]) - self.assertEqual(psutil.cpu_count(logical=True), num) - - def test_cpu_count_logical_mocked(self): - import psutil._pslinux - original = psutil._pslinux.cpu_count_logical() - # Here we want to mock os.sysconf("SC_NPROCESSORS_ONLN") in - # order to cause the parsing of /proc/cpuinfo and /proc/stat. - with mock.patch( - 'psutil._pslinux.os.sysconf', side_effect=ValueError) as m: - self.assertEqual(psutil._pslinux.cpu_count_logical(), original) - assert m.called - - # Let's have open() return emtpy data and make sure None is - # returned ('cause we mimick os.cpu_count()). - with mock.patch('psutil._pslinux.open', create=True) as m: - self.assertIsNone(psutil._pslinux.cpu_count_logical()) - self.assertEqual(m.call_count, 2) - # /proc/stat should be the last one - self.assertEqual(m.call_args[0][0], '/proc/stat') - - # Let's push this a bit further and make sure /proc/cpuinfo - # parsing works as expected. - with open('/proc/cpuinfo', 'rb') as f: - cpuinfo_data = f.read() - fake_file = io.BytesIO(cpuinfo_data) - with mock.patch('psutil._pslinux.open', - return_value=fake_file, create=True) as m: - self.assertEqual(psutil._pslinux.cpu_count_logical(), original) - - # Finally, let's make /proc/cpuinfo return meaningless data; - # this way we'll fall back on relying on /proc/stat - with mock_open_content('/proc/cpuinfo', b"") as m: - self.assertEqual(psutil._pslinux.cpu_count_logical(), original) - m.called - - def test_cpu_count_physical_mocked(self): - # Have open() return emtpy data and make sure None is returned - # ('cause we want to mimick os.cpu_count()) - with mock.patch('psutil._pslinux.open', create=True) as m: - self.assertIsNone(psutil._pslinux.cpu_count_physical()) - assert m.called - - @unittest.skipIf(not HAS_CPU_FREQ, "not supported") - def test_cpu_freq_no_result(self): - with mock.patch("psutil._pslinux.glob.glob", return_value=[]): - self.assertIsNone(psutil.cpu_freq()) - - @unittest.skipIf(TRAVIS, "fails on Travis") - @unittest.skipIf(not HAS_CPU_FREQ, "not supported") - def test_cpu_freq_use_second_file(self): - # https://github.com/giampaolo/psutil/issues/981 - def glob_mock(pattern): - if pattern.startswith("/sys/devices/system/cpu/cpufreq/policy"): - flags.append(None) - return [] - else: - flags.append(None) - return orig_glob(pattern) - - flags = [] - orig_glob = glob.glob - with mock.patch("psutil._pslinux.glob.glob", side_effect=glob_mock, - create=True): - assert psutil.cpu_freq() - self.assertEqual(len(flags), 2) - - @unittest.skipIf(not HAS_CPU_FREQ, "not supported") - def test_cpu_freq_emulate_data(self): - def open_mock(name, *args, **kwargs): - if name.endswith('/scaling_cur_freq'): - return io.BytesIO(b"500000") - elif name.endswith('/scaling_min_freq'): - return io.BytesIO(b"600000") - elif name.endswith('/scaling_max_freq'): - return io.BytesIO(b"700000") - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, side_effect=open_mock): - with mock.patch( - 'glob.glob', - return_value=['/sys/devices/system/cpu/cpufreq/policy0']): - freq = psutil.cpu_freq() - self.assertEqual(freq.current, 500.0) - self.assertEqual(freq.min, 600.0) - self.assertEqual(freq.max, 700.0) - - @unittest.skipIf(not HAS_CPU_FREQ, "not supported") - def test_cpu_freq_emulate_multi_cpu(self): - def open_mock(name, *args, **kwargs): - if name.endswith('/scaling_cur_freq'): - return io.BytesIO(b"100000") - elif name.endswith('/scaling_min_freq'): - return io.BytesIO(b"200000") - elif name.endswith('/scaling_max_freq'): - return io.BytesIO(b"300000") - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - policies = ['/sys/devices/system/cpu/cpufreq/policy0', - '/sys/devices/system/cpu/cpufreq/policy1', - '/sys/devices/system/cpu/cpufreq/policy2'] - with mock.patch(patch_point, side_effect=open_mock): - with mock.patch('glob.glob', return_value=policies): - freq = psutil.cpu_freq() - self.assertEqual(freq.current, 100.0) - self.assertEqual(freq.min, 200.0) - self.assertEqual(freq.max, 300.0) - - @unittest.skipIf(TRAVIS, "fails on Travis") - @unittest.skipIf(not HAS_CPU_FREQ, "not supported") - def test_cpu_freq_no_scaling_cur_freq_file(self): - # See: https://github.com/giampaolo/psutil/issues/1071 - def open_mock(name, *args, **kwargs): - if name.endswith('/scaling_cur_freq'): - raise IOError(errno.ENOENT, "") - elif name.endswith('/cpuinfo_cur_freq'): - return io.BytesIO(b"200000") - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - policies = ['/sys/devices/system/cpu/cpufreq/policy0', - '/sys/devices/system/cpu/cpufreq/policy1', - '/sys/devices/system/cpu/cpufreq/policy2'] - - with mock.patch(patch_point, side_effect=open_mock): - with mock.patch('glob.glob', return_value=policies): - freq = psutil.cpu_freq() - self.assertEqual(freq.current, 200) - - # Also test that NotImplementedError is raised in case no - # current freq file is present. - - def open_mock(name, *args, **kwargs): - if name.endswith('/scaling_cur_freq'): - raise IOError(errno.ENOENT, "") - elif name.endswith('/cpuinfo_cur_freq'): - raise IOError(errno.ENOENT, "") - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, side_effect=open_mock): - with mock.patch('glob.glob', return_value=policies): - self.assertRaises(NotImplementedError, psutil.cpu_freq) - - -# ===================================================================== -# --- system CPU stats -# ===================================================================== - - -@unittest.skipIf(not LINUX, "LINUX only") -class TestSystemCPUStats(unittest.TestCase): - - @unittest.skipIf(TRAVIS, "fails on Travis") - def test_ctx_switches(self): - vmstat_value = vmstat("context switches") - psutil_value = psutil.cpu_stats().ctx_switches - self.assertAlmostEqual(vmstat_value, psutil_value, delta=500) - - @unittest.skipIf(TRAVIS, "fails on Travis") - def test_interrupts(self): - vmstat_value = vmstat("interrupts") - psutil_value = psutil.cpu_stats().interrupts - self.assertAlmostEqual(vmstat_value, psutil_value, delta=500) - - -# ===================================================================== -# --- system network -# ===================================================================== - - -@unittest.skipIf(not LINUX, "LINUX only") -class TestSystemNetwork(unittest.TestCase): - - def test_net_if_addrs_ips(self): - for name, addrs in psutil.net_if_addrs().items(): - for addr in addrs: - if addr.family == psutil.AF_LINK: - self.assertEqual(addr.address, get_mac_address(name)) - elif addr.family == socket.AF_INET: - self.assertEqual(addr.address, get_ipv4_address(name)) - # TODO: test for AF_INET6 family - - def test_net_if_stats(self): - for name, stats in psutil.net_if_stats().items(): - try: - out = sh("ifconfig %s" % name) - except RuntimeError: - pass - else: - # Not always reliable. - # self.assertEqual(stats.isup, 'RUNNING' in out, msg=out) - self.assertEqual(stats.mtu, - int(re.findall(r'(?i)MTU[: ](\d+)', out)[0])) - - @retry_before_failing() - def test_net_io_counters(self): - def ifconfig(nic): - ret = {} - out = sh("ifconfig %s" % name) - ret['packets_recv'] = int( - re.findall(r'RX packets[: ](\d+)', out)[0]) - ret['packets_sent'] = int( - re.findall(r'TX packets[: ](\d+)', out)[0]) - ret['errin'] = int(re.findall(r'errors[: ](\d+)', out)[0]) - ret['errout'] = int(re.findall(r'errors[: ](\d+)', out)[1]) - ret['dropin'] = int(re.findall(r'dropped[: ](\d+)', out)[0]) - ret['dropout'] = int(re.findall(r'dropped[: ](\d+)', out)[1]) - ret['bytes_recv'] = int( - re.findall(r'RX (?:packets \d+ +)?bytes[: ](\d+)', out)[0]) - ret['bytes_sent'] = int( - re.findall(r'TX (?:packets \d+ +)?bytes[: ](\d+)', out)[0]) - return ret - - nio = psutil.net_io_counters(pernic=True, nowrap=False) - for name, stats in nio.items(): - try: - ifconfig_ret = ifconfig(name) - except RuntimeError: - continue - self.assertAlmostEqual( - stats.bytes_recv, ifconfig_ret['bytes_recv'], delta=1024 * 5) - self.assertAlmostEqual( - stats.bytes_sent, ifconfig_ret['bytes_sent'], delta=1024 * 5) - self.assertAlmostEqual( - stats.packets_recv, ifconfig_ret['packets_recv'], delta=1024) - self.assertAlmostEqual( - stats.packets_sent, ifconfig_ret['packets_sent'], delta=1024) - self.assertAlmostEqual( - stats.errin, ifconfig_ret['errin'], delta=10) - self.assertAlmostEqual( - stats.errout, ifconfig_ret['errout'], delta=10) - self.assertAlmostEqual( - stats.dropin, ifconfig_ret['dropin'], delta=10) - self.assertAlmostEqual( - stats.dropout, ifconfig_ret['dropout'], delta=10) - - # XXX - not reliable when having virtual NICs installed by Docker. - # @unittest.skipIf(not which('ip'), "'ip' utility not available") - # @unittest.skipIf(TRAVIS, "skipped on Travis") - # def test_net_if_names(self): - # out = sh("ip addr").strip() - # nics = [x for x in psutil.net_if_addrs().keys() if ':' not in x] - # found = 0 - # for line in out.split('\n'): - # line = line.strip() - # if re.search(r"^\d+:", line): - # found += 1 - # name = line.split(':')[1].strip() - # self.assertIn(name, nics) - # self.assertEqual(len(nics), found, msg="%s\n---\n%s" % ( - # pprint.pformat(nics), out)) - - @mock.patch('psutil._pslinux.socket.inet_ntop', side_effect=ValueError) - @mock.patch('psutil._pslinux.supports_ipv6', return_value=False) - def test_net_connections_ipv6_unsupported(self, supports_ipv6, inet_ntop): - # see: https://github.com/giampaolo/psutil/issues/623 - try: - s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) - self.addCleanup(s.close) - s.bind(("::1", 0)) - except socket.error: - pass - psutil.net_connections(kind='inet6') - - def test_net_connections_mocked(self): - with mock_open_content( - '/proc/net/unix', - textwrap.dedent("""\ - 0: 00000003 000 000 0001 03 462170 @/tmp/dbus-Qw2hMPIU3n - 0: 00000003 000 000 0001 03 35010 @/tmp/dbus-tB2X8h69BQ - 0: 00000003 000 000 0001 03 34424 @/tmp/dbus-cHy80Y8O - 000000000000000000000000000000000000000000000000000000 - """)) as m: - psutil.net_connections(kind='unix') - assert m.called - - -# ===================================================================== -# --- system disk -# ===================================================================== - - -@unittest.skipIf(not LINUX, "LINUX only") -class TestSystemDisks(unittest.TestCase): - - @unittest.skipIf(not hasattr(os, 'statvfs'), "os.statvfs() not available") - @skip_on_not_implemented() - def test_disk_partitions_and_usage(self): - # test psutil.disk_usage() and psutil.disk_partitions() - # against "df -a" - def df(path): - out = sh('df -P -B 1 "%s"' % path).strip() - lines = out.split('\n') - lines.pop(0) - line = lines.pop(0) - dev, total, used, free = line.split()[:4] - if dev == 'none': - dev = '' - total, used, free = int(total), int(used), int(free) - return dev, total, used, free - - for part in psutil.disk_partitions(all=False): - usage = psutil.disk_usage(part.mountpoint) - dev, total, used, free = df(part.mountpoint) - self.assertEqual(usage.total, total) - # 10 MB tollerance - if abs(usage.free - free) > 10 * 1024 * 1024: - self.fail("psutil=%s, df=%s" % (usage.free, free)) - if abs(usage.used - used) > 10 * 1024 * 1024: - self.fail("psutil=%s, df=%s" % (usage.used, used)) - - def test_disk_partitions_mocked(self): - # Test that ZFS partitions are returned. - with open("/proc/filesystems", "r") as f: - data = f.read() - if 'zfs' in data: - for part in psutil.disk_partitions(): - if part.fstype == 'zfs': - break - else: - self.fail("couldn't find any ZFS partition") - else: - # No ZFS partitions on this system. Let's fake one. - fake_file = io.StringIO(u("nodev\tzfs\n")) - with mock.patch('psutil._pslinux.open', - return_value=fake_file, create=True) as m1: - with mock.patch( - 'psutil._pslinux.cext.disk_partitions', - return_value=[('/dev/sdb3', '/', 'zfs', 'rw')]) as m2: - ret = psutil.disk_partitions() - assert m1.called - assert m2.called - assert ret - self.assertEqual(ret[0].fstype, 'zfs') - - def test_disk_io_counters_kernel_2_4_mocked(self): - # Tests /proc/diskstats parsing format for 2.4 kernels, see: - # https://github.com/giampaolo/psutil/issues/767 - with mock_open_content( - '/proc/diskstats', - " 3 0 1 hda 2 3 4 5 6 7 8 9 10 11 12"): - with mock.patch('psutil._pslinux.is_storage_device', - return_value=True): - ret = psutil.disk_io_counters(nowrap=False) - self.assertEqual(ret.read_count, 1) - self.assertEqual(ret.read_merged_count, 2) - self.assertEqual(ret.read_bytes, 3 * SECTOR_SIZE) - self.assertEqual(ret.read_time, 4) - self.assertEqual(ret.write_count, 5) - self.assertEqual(ret.write_merged_count, 6) - self.assertEqual(ret.write_bytes, 7 * SECTOR_SIZE) - self.assertEqual(ret.write_time, 8) - self.assertEqual(ret.busy_time, 10) - - def test_disk_io_counters_kernel_2_6_full_mocked(self): - # Tests /proc/diskstats parsing format for 2.6 kernels, - # lines reporting all metrics: - # https://github.com/giampaolo/psutil/issues/767 - with mock_open_content( - '/proc/diskstats', - " 3 0 hda 1 2 3 4 5 6 7 8 9 10 11"): - with mock.patch('psutil._pslinux.is_storage_device', - return_value=True): - ret = psutil.disk_io_counters(nowrap=False) - self.assertEqual(ret.read_count, 1) - self.assertEqual(ret.read_merged_count, 2) - self.assertEqual(ret.read_bytes, 3 * SECTOR_SIZE) - self.assertEqual(ret.read_time, 4) - self.assertEqual(ret.write_count, 5) - self.assertEqual(ret.write_merged_count, 6) - self.assertEqual(ret.write_bytes, 7 * SECTOR_SIZE) - self.assertEqual(ret.write_time, 8) - self.assertEqual(ret.busy_time, 10) - - def test_disk_io_counters_kernel_2_6_limited_mocked(self): - # Tests /proc/diskstats parsing format for 2.6 kernels, - # where one line of /proc/partitions return a limited - # amount of metrics when it bumps into a partition - # (instead of a disk). See: - # https://github.com/giampaolo/psutil/issues/767 - with mock_open_content( - '/proc/diskstats', - " 3 1 hda 1 2 3 4"): - with mock.patch('psutil._pslinux.is_storage_device', - return_value=True): - ret = psutil.disk_io_counters(nowrap=False) - self.assertEqual(ret.read_count, 1) - self.assertEqual(ret.read_bytes, 2 * SECTOR_SIZE) - self.assertEqual(ret.write_count, 3) - self.assertEqual(ret.write_bytes, 4 * SECTOR_SIZE) - - self.assertEqual(ret.read_merged_count, 0) - self.assertEqual(ret.read_time, 0) - self.assertEqual(ret.write_merged_count, 0) - self.assertEqual(ret.write_time, 0) - self.assertEqual(ret.busy_time, 0) - - def test_disk_io_counters_include_partitions(self): - # Make sure that when perdisk=True disk partitions are returned, - # see: - # https://github.com/giampaolo/psutil/pull/1313#issuecomment-408626842 - with mock_open_content( - '/proc/diskstats', - textwrap.dedent("""\ - 3 0 nvme0n1 1 2 3 4 5 6 7 8 9 10 11 - 3 0 nvme0n1p1 1 2 3 4 5 6 7 8 9 10 11 - """)): - with mock.patch('psutil._pslinux.is_storage_device', - return_value=False): - ret = psutil.disk_io_counters(perdisk=True, nowrap=False) - self.assertEqual(len(ret), 2) - self.assertEqual(ret['nvme0n1'].read_count, 1) - self.assertEqual(ret['nvme0n1p1'].read_count, 1) - self.assertEqual(ret['nvme0n1'].write_count, 5) - self.assertEqual(ret['nvme0n1p1'].write_count, 5) - - def test_disk_io_counters_exclude_partitions(self): - # Make sure that when perdisk=False partitions (e.g. 'sda1', - # 'nvme0n1p1') are skipped and not included in the total count. - # https://github.com/giampaolo/psutil/pull/1313#issuecomment-408626842 - with mock_open_content( - '/proc/diskstats', - textwrap.dedent("""\ - 3 0 nvme0n1 1 2 3 4 5 6 7 8 9 10 11 - 3 0 nvme0n1p1 1 2 3 4 5 6 7 8 9 10 11 - """)): - with mock.patch('psutil._pslinux.is_storage_device', - return_value=False): - ret = psutil.disk_io_counters(perdisk=False, nowrap=False) - self.assertIsNone(ret) - - # - def is_storage_device(name): - return name == 'nvme0n1' - - with mock_open_content( - '/proc/diskstats', - textwrap.dedent("""\ - 3 0 nvme0n1 1 2 3 4 5 6 7 8 9 10 11 - 3 0 nvme0n1p1 1 2 3 4 5 6 7 8 9 10 11 - """)): - with mock.patch('psutil._pslinux.is_storage_device', - create=True, side_effect=is_storage_device): - ret = psutil.disk_io_counters(perdisk=False, nowrap=False) - self.assertEqual(ret.read_count, 1) - self.assertEqual(ret.write_count, 5) - - def test_disk_io_counters_sysfs(self): - def exists(path): - if path == '/proc/diskstats': - return False - return True - - wprocfs = psutil.disk_io_counters(perdisk=True) - with mock.patch('psutil._pslinux.os.path.exists', - create=True, side_effect=exists): - wsysfs = psutil.disk_io_counters(perdisk=True) - self.assertEqual(len(wprocfs), len(wsysfs)) - - def test_disk_io_counters_not_impl(self): - def exists(path): - return False - - with mock.patch('psutil._pslinux.os.path.exists', - create=True, side_effect=exists): - self.assertRaises(NotImplementedError, psutil.disk_io_counters) - - -# ===================================================================== -# --- misc -# ===================================================================== - - -@unittest.skipIf(not LINUX, "LINUX only") -class TestMisc(unittest.TestCase): - - def test_boot_time(self): - vmstat_value = vmstat('boot time') - psutil_value = psutil.boot_time() - self.assertEqual(int(vmstat_value), int(psutil_value)) - - @mock.patch('psutil.traceback.print_exc') - def test_no_procfs_on_import(self, tb): - my_procfs = tempfile.mkdtemp() - - with open(os.path.join(my_procfs, 'stat'), 'w') as f: - f.write('cpu 0 0 0 0 0 0 0 0 0 0\n') - f.write('cpu0 0 0 0 0 0 0 0 0 0 0\n') - f.write('cpu1 0 0 0 0 0 0 0 0 0 0\n') - - try: - orig_open = open - - def open_mock(name, *args, **kwargs): - if name.startswith('/proc'): - raise IOError(errno.ENOENT, 'rejecting access for test') - return orig_open(name, *args, **kwargs) - - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, side_effect=open_mock): - reload_module(psutil) - assert tb.called - - self.assertRaises(IOError, psutil.cpu_times) - self.assertRaises(IOError, psutil.cpu_times, percpu=True) - self.assertRaises(IOError, psutil.cpu_percent) - self.assertRaises(IOError, psutil.cpu_percent, percpu=True) - self.assertRaises(IOError, psutil.cpu_times_percent) - self.assertRaises( - IOError, psutil.cpu_times_percent, percpu=True) - - psutil.PROCFS_PATH = my_procfs - - self.assertEqual(psutil.cpu_percent(), 0) - self.assertEqual(sum(psutil.cpu_times_percent()), 0) - - # since we don't know the number of CPUs at import time, - # we awkwardly say there are none until the second call - per_cpu_percent = psutil.cpu_percent(percpu=True) - self.assertEqual(sum(per_cpu_percent), 0) - - # ditto awkward length - per_cpu_times_percent = psutil.cpu_times_percent(percpu=True) - self.assertEqual(sum(map(sum, per_cpu_times_percent)), 0) - - # much user, very busy - with open(os.path.join(my_procfs, 'stat'), 'w') as f: - f.write('cpu 1 0 0 0 0 0 0 0 0 0\n') - f.write('cpu0 1 0 0 0 0 0 0 0 0 0\n') - f.write('cpu1 1 0 0 0 0 0 0 0 0 0\n') - - self.assertNotEqual(psutil.cpu_percent(), 0) - self.assertNotEqual( - sum(psutil.cpu_percent(percpu=True)), 0) - self.assertNotEqual(sum(psutil.cpu_times_percent()), 0) - self.assertNotEqual( - sum(map(sum, psutil.cpu_times_percent(percpu=True))), 0) - finally: - shutil.rmtree(my_procfs) - reload_module(psutil) - - self.assertEqual(psutil.PROCFS_PATH, '/proc') - - def test_cpu_steal_decrease(self): - # Test cumulative cpu stats decrease. We should ignore this. - # See issue #1210. - with mock_open_content( - "/proc/stat", - textwrap.dedent("""\ - cpu 0 0 0 0 0 0 0 1 0 0 - cpu0 0 0 0 0 0 0 0 1 0 0 - cpu1 0 0 0 0 0 0 0 1 0 0 - """).encode()) as m: - # first call to "percent" functions should read the new stat file - # and compare to the "real" file read at import time - so the - # values are meaningless - psutil.cpu_percent() - assert m.called - psutil.cpu_percent(percpu=True) - psutil.cpu_times_percent() - psutil.cpu_times_percent(percpu=True) - - with mock_open_content( - "/proc/stat", - textwrap.dedent("""\ - cpu 1 0 0 0 0 0 0 0 0 0 - cpu0 1 0 0 0 0 0 0 0 0 0 - cpu1 1 0 0 0 0 0 0 0 0 0 - """).encode()) as m: - # Increase "user" while steal goes "backwards" to zero. - cpu_percent = psutil.cpu_percent() - assert m.called - cpu_percent_percpu = psutil.cpu_percent(percpu=True) - cpu_times_percent = psutil.cpu_times_percent() - cpu_times_percent_percpu = psutil.cpu_times_percent(percpu=True) - self.assertNotEqual(cpu_percent, 0) - self.assertNotEqual(sum(cpu_percent_percpu), 0) - self.assertNotEqual(sum(cpu_times_percent), 0) - self.assertNotEqual(sum(cpu_times_percent), 100.0) - self.assertNotEqual(sum(map(sum, cpu_times_percent_percpu)), 0) - self.assertNotEqual(sum(map(sum, cpu_times_percent_percpu)), 100.0) - self.assertEqual(cpu_times_percent.steal, 0) - self.assertNotEqual(cpu_times_percent.user, 0) - - def test_boot_time_mocked(self): - with mock.patch('psutil._pslinux.open', create=True) as m: - self.assertRaises( - RuntimeError, - psutil._pslinux.boot_time) - assert m.called - - def test_users_mocked(self): - # Make sure ':0' and ':0.0' (returned by C ext) are converted - # to 'localhost'. - with mock.patch('psutil._pslinux.cext.users', - return_value=[('giampaolo', 'pts/2', ':0', - 1436573184.0, True, 2)]) as m: - self.assertEqual(psutil.users()[0].host, 'localhost') - assert m.called - with mock.patch('psutil._pslinux.cext.users', - return_value=[('giampaolo', 'pts/2', ':0.0', - 1436573184.0, True, 2)]) as m: - self.assertEqual(psutil.users()[0].host, 'localhost') - assert m.called - # ...otherwise it should be returned as-is - with mock.patch('psutil._pslinux.cext.users', - return_value=[('giampaolo', 'pts/2', 'foo', - 1436573184.0, True, 2)]) as m: - self.assertEqual(psutil.users()[0].host, 'foo') - assert m.called - - def test_procfs_path(self): - tdir = tempfile.mkdtemp() - try: - psutil.PROCFS_PATH = tdir - self.assertRaises(IOError, psutil.virtual_memory) - self.assertRaises(IOError, psutil.cpu_times) - self.assertRaises(IOError, psutil.cpu_times, percpu=True) - self.assertRaises(IOError, psutil.boot_time) - # self.assertRaises(IOError, psutil.pids) - self.assertRaises(IOError, psutil.net_connections) - self.assertRaises(IOError, psutil.net_io_counters) - self.assertRaises(IOError, psutil.net_if_stats) - # self.assertRaises(IOError, psutil.disk_io_counters) - self.assertRaises(IOError, psutil.disk_partitions) - self.assertRaises(psutil.NoSuchProcess, psutil.Process) - finally: - psutil.PROCFS_PATH = "/proc" - os.rmdir(tdir) - - def test_issue_687(self): - # In case of thread ID: - # - pid_exists() is supposed to return False - # - Process(tid) is supposed to work - # - pids() should not return the TID - # See: https://github.com/giampaolo/psutil/issues/687 - t = ThreadTask() - t.start() - try: - p = psutil.Process() - tid = p.threads()[1].id - assert not psutil.pid_exists(tid), tid - pt = psutil.Process(tid) - pt.as_dict() - self.assertNotIn(tid, psutil.pids()) - finally: - t.stop() - - def test_pid_exists_no_proc_status(self): - # Internally pid_exists relies on /proc/{pid}/status. - # Emulate a case where this file is empty in which case - # psutil is supposed to fall back on using pids(). - with mock_open_content("/proc/%s/status", "") as m: - assert psutil.pid_exists(os.getpid()) - assert m.called - - -# ===================================================================== -# --- sensors -# ===================================================================== - - -@unittest.skipIf(not LINUX, "LINUX only") -@unittest.skipIf(not HAS_BATTERY, "no battery") -class TestSensorsBattery(unittest.TestCase): - - @unittest.skipIf(not which("acpi"), "acpi utility not available") - def test_percent(self): - out = sh("acpi -b") - acpi_value = int(out.split(",")[1].strip().replace('%', '')) - psutil_value = psutil.sensors_battery().percent - self.assertAlmostEqual(acpi_value, psutil_value, delta=1) - - @unittest.skipIf(not which("acpi"), "acpi utility not available") - def test_power_plugged(self): - out = sh("acpi -b") - if 'unknown' in out.lower(): - return unittest.skip("acpi output not reliable") - if 'discharging at zero rate' in out: - plugged = True - else: - plugged = "Charging" in out.split('\n')[0] - self.assertEqual(psutil.sensors_battery().power_plugged, plugged) - - def test_emulate_power_plugged(self): - # Pretend the AC power cable is connected. - def open_mock(name, *args, **kwargs): - if name.endswith("AC0/online") or name.endswith("AC/online"): - return io.BytesIO(b"1") - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, side_effect=open_mock) as m: - self.assertEqual(psutil.sensors_battery().power_plugged, True) - self.assertEqual( - psutil.sensors_battery().secsleft, psutil.POWER_TIME_UNLIMITED) - assert m.called - - def test_emulate_power_plugged_2(self): - # Same as above but pretend /AC0/online does not exist in which - # case code relies on /status file. - def open_mock(name, *args, **kwargs): - if name.endswith("AC0/online") or name.endswith("AC/online"): - raise IOError(errno.ENOENT, "") - elif name.endswith("/status"): - return io.StringIO(u("charging")) - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, side_effect=open_mock) as m: - self.assertEqual(psutil.sensors_battery().power_plugged, True) - assert m.called - - def test_emulate_power_not_plugged(self): - # Pretend the AC power cable is not connected. - def open_mock(name, *args, **kwargs): - if name.endswith("AC0/online") or name.endswith("AC/online"): - return io.BytesIO(b"0") - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, side_effect=open_mock) as m: - self.assertEqual(psutil.sensors_battery().power_plugged, False) - assert m.called - - def test_emulate_power_not_plugged_2(self): - # Same as above but pretend /AC0/online does not exist in which - # case code relies on /status file. - def open_mock(name, *args, **kwargs): - if name.endswith("AC0/online") or name.endswith("AC/online"): - raise IOError(errno.ENOENT, "") - elif name.endswith("/status"): - return io.StringIO(u("discharging")) - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, side_effect=open_mock) as m: - self.assertEqual(psutil.sensors_battery().power_plugged, False) - assert m.called - - def test_emulate_power_undetermined(self): - # Pretend we can't know whether the AC power cable not - # connected (assert fallback to False). - def open_mock(name, *args, **kwargs): - if name.startswith("/sys/class/power_supply/AC0/online") or \ - name.startswith("/sys/class/power_supply/AC/online"): - raise IOError(errno.ENOENT, "") - elif name.startswith("/sys/class/power_supply/BAT0/status"): - return io.BytesIO(b"???") - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, side_effect=open_mock) as m: - self.assertIsNone(psutil.sensors_battery().power_plugged) - assert m.called - - def test_emulate_no_base_files(self): - # Emulate a case where base metrics files are not present, - # in which case we're supposed to get None. - with mock_open_exception( - "/sys/class/power_supply/BAT0/energy_now", - IOError(errno.ENOENT, "")): - with mock_open_exception( - "/sys/class/power_supply/BAT0/charge_now", - IOError(errno.ENOENT, "")): - self.assertIsNone(psutil.sensors_battery()) - - def test_emulate_energy_full_0(self): - # Emulate a case where energy_full files returns 0. - with mock_open_content( - "/sys/class/power_supply/BAT0/energy_full", b"0") as m: - self.assertEqual(psutil.sensors_battery().percent, 0) - assert m.called - - def test_emulate_energy_full_not_avail(self): - # Emulate a case where energy_full file does not exist. - # Expected fallback on /capacity. - with mock_open_exception( - "/sys/class/power_supply/BAT0/energy_full", - IOError(errno.ENOENT, "")): - with mock_open_exception( - "/sys/class/power_supply/BAT0/charge_full", - IOError(errno.ENOENT, "")): - with mock_open_content( - "/sys/class/power_supply/BAT0/capacity", b"88"): - self.assertEqual(psutil.sensors_battery().percent, 88) - - def test_emulate_no_power(self): - # Emulate a case where /AC0/online file nor /BAT0/status exist. - with mock_open_exception( - "/sys/class/power_supply/AC/online", - IOError(errno.ENOENT, "")): - with mock_open_exception( - "/sys/class/power_supply/AC0/online", - IOError(errno.ENOENT, "")): - with mock_open_exception( - "/sys/class/power_supply/BAT0/status", - IOError(errno.ENOENT, "")): - self.assertIsNone(psutil.sensors_battery().power_plugged) - - -@unittest.skipIf(not LINUX, "LINUX only") -class TestSensorsTemperatures(unittest.TestCase): - - @unittest.skipIf(TRAVIS, "unreliable on TRAVIS") - def test_emulate_eio_error(self): - def open_mock(name, *args, **kwargs): - if name.endswith("_input"): - raise OSError(errno.EIO, "") - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, side_effect=open_mock) as m: - with warnings.catch_warnings(record=True) as ws: - self.assertEqual(psutil.sensors_temperatures(), {}) - assert m.called - self.assertIn("ignoring", str(ws[0].message)) - - def test_emulate_data(self): - def open_mock(name, *args, **kwargs): - if name.endswith('/name'): - return io.StringIO(u("name")) - elif name.endswith('/temp1_label'): - return io.StringIO(u("label")) - elif name.endswith('/temp1_input'): - return io.BytesIO(b"30000") - elif name.endswith('/temp1_max'): - return io.BytesIO(b"40000") - elif name.endswith('/temp1_crit'): - return io.BytesIO(b"50000") - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, side_effect=open_mock): - with mock.patch('glob.glob', - return_value=['/sys/class/hwmon/hwmon0/temp1']): - temp = psutil.sensors_temperatures()['name'][0] - self.assertEqual(temp.label, 'label') - self.assertEqual(temp.current, 30.0) - self.assertEqual(temp.high, 40.0) - self.assertEqual(temp.critical, 50.0) - - -@unittest.skipIf(not LINUX, "LINUX only") -class TestSensorsFans(unittest.TestCase): - - def test_emulate_data(self): - def open_mock(name, *args, **kwargs): - if name.endswith('/name'): - return io.StringIO(u("name")) - elif name.endswith('/fan1_label'): - return io.StringIO(u("label")) - elif name.endswith('/fan1_input'): - return io.StringIO(u("2000")) - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, side_effect=open_mock): - with mock.patch('glob.glob', - return_value=['/sys/class/hwmon/hwmon2/fan1']): - fan = psutil.sensors_fans()['name'][0] - self.assertEqual(fan.label, 'label') - self.assertEqual(fan.current, 2000) - - -# ===================================================================== -# --- test process -# ===================================================================== - - -@unittest.skipIf(not LINUX, "LINUX only") -class TestProcess(unittest.TestCase): - - def setUp(self): - safe_rmpath(TESTFN) - - tearDown = setUp - - def test_memory_full_info(self): - src = textwrap.dedent(""" - import time - with open("%s", "w") as f: - time.sleep(10) - """ % TESTFN) - sproc = pyrun(src) - self.addCleanup(reap_children) - call_until(lambda: os.listdir('.'), "'%s' not in ret" % TESTFN) - p = psutil.Process(sproc.pid) - time.sleep(.1) - mem = p.memory_full_info() - maps = p.memory_maps(grouped=False) - self.assertAlmostEqual( - mem.uss, sum([x.private_dirty + x.private_clean for x in maps]), - delta=4096) - self.assertAlmostEqual( - mem.pss, sum([x.pss for x in maps]), delta=4096) - self.assertAlmostEqual( - mem.swap, sum([x.swap for x in maps]), delta=4096) - - def test_memory_full_info_mocked(self): - # See: https://github.com/giampaolo/psutil/issues/1222 - with mock_open_content( - "/proc/%s/smaps" % os.getpid(), - textwrap.dedent("""\ - fffff0 r-xp 00000000 00:00 0 [vsyscall] - Size: 1 kB - Rss: 2 kB - Pss: 3 kB - Shared_Clean: 4 kB - Shared_Dirty: 5 kB - Private_Clean: 6 kB - Private_Dirty: 7 kB - Referenced: 8 kB - Anonymous: 9 kB - LazyFree: 10 kB - AnonHugePages: 11 kB - ShmemPmdMapped: 12 kB - Shared_Hugetlb: 13 kB - Private_Hugetlb: 14 kB - Swap: 15 kB - SwapPss: 16 kB - KernelPageSize: 17 kB - MMUPageSize: 18 kB - Locked: 19 kB - VmFlags: rd ex - """).encode()) as m: - p = psutil.Process() - mem = p.memory_full_info() - assert m.called - self.assertEqual(mem.uss, (6 + 7 + 14) * 1024) - self.assertEqual(mem.pss, 3 * 1024) - self.assertEqual(mem.swap, 15 * 1024) - - # On PYPY file descriptors are not closed fast enough. - @unittest.skipIf(PYPY, "unreliable on PYPY") - def test_open_files_mode(self): - def get_test_file(): - p = psutil.Process() - giveup_at = time.time() + 2 - while True: - for file in p.open_files(): - if file.path == os.path.abspath(TESTFN): - return file - elif time.time() > giveup_at: - break - raise RuntimeError("timeout looking for test file") - - # - with open(TESTFN, "w"): - self.assertEqual(get_test_file().mode, "w") - with open(TESTFN, "r"): - self.assertEqual(get_test_file().mode, "r") - with open(TESTFN, "a"): - self.assertEqual(get_test_file().mode, "a") - # - with open(TESTFN, "r+"): - self.assertEqual(get_test_file().mode, "r+") - with open(TESTFN, "w+"): - self.assertEqual(get_test_file().mode, "r+") - with open(TESTFN, "a+"): - self.assertEqual(get_test_file().mode, "a+") - # note: "x" bit is not supported - if PY3: - safe_rmpath(TESTFN) - with open(TESTFN, "x"): - self.assertEqual(get_test_file().mode, "w") - safe_rmpath(TESTFN) - with open(TESTFN, "x+"): - self.assertEqual(get_test_file().mode, "r+") - - def test_open_files_file_gone(self): - # simulates a file which gets deleted during open_files() - # execution - p = psutil.Process() - files = p.open_files() - with tempfile.NamedTemporaryFile(): - # give the kernel some time to see the new file - call_until(p.open_files, "len(ret) != %i" % len(files)) - with mock.patch('psutil._pslinux.os.readlink', - side_effect=OSError(errno.ENOENT, "")) as m: - files = p.open_files() - assert not files - assert m.called - # also simulate the case where os.readlink() returns EINVAL - # in which case psutil is supposed to 'continue' - with mock.patch('psutil._pslinux.os.readlink', - side_effect=OSError(errno.EINVAL, "")) as m: - self.assertEqual(p.open_files(), []) - assert m.called - - def test_open_files_fd_gone(self): - # Simulate a case where /proc/{pid}/fdinfo/{fd} disappears - # while iterating through fds. - # https://travis-ci.org/giampaolo/psutil/jobs/225694530 - p = psutil.Process() - files = p.open_files() - with tempfile.NamedTemporaryFile(): - # give the kernel some time to see the new file - call_until(p.open_files, "len(ret) != %i" % len(files)) - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, - side_effect=IOError(errno.ENOENT, "")) as m: - files = p.open_files() - assert not files - assert m.called - - # --- mocked tests - - def test_terminal_mocked(self): - with mock.patch('psutil._pslinux._psposix.get_terminal_map', - return_value={}) as m: - self.assertIsNone(psutil._pslinux.Process(os.getpid()).terminal()) - assert m.called - - # TODO: re-enable this test. - # def test_num_ctx_switches_mocked(self): - # with mock.patch('psutil._pslinux.open', create=True) as m: - # self.assertRaises( - # NotImplementedError, - # psutil._pslinux.Process(os.getpid()).num_ctx_switches) - # assert m.called - - def test_cmdline_mocked(self): - # see: https://github.com/giampaolo/psutil/issues/639 - p = psutil.Process() - fake_file = io.StringIO(u('foo\x00bar\x00')) - with mock.patch('psutil._pslinux.open', - return_value=fake_file, create=True) as m: - self.assertEqual(p.cmdline(), ['foo', 'bar']) - assert m.called - fake_file = io.StringIO(u('foo\x00bar\x00\x00')) - with mock.patch('psutil._pslinux.open', - return_value=fake_file, create=True) as m: - self.assertEqual(p.cmdline(), ['foo', 'bar', '']) - assert m.called - - def test_cmdline_spaces_mocked(self): - # see: https://github.com/giampaolo/psutil/issues/1179 - p = psutil.Process() - fake_file = io.StringIO(u('foo bar ')) - with mock.patch('psutil._pslinux.open', - return_value=fake_file, create=True) as m: - self.assertEqual(p.cmdline(), ['foo', 'bar']) - assert m.called - fake_file = io.StringIO(u('foo bar ')) - with mock.patch('psutil._pslinux.open', - return_value=fake_file, create=True) as m: - self.assertEqual(p.cmdline(), ['foo', 'bar', '']) - assert m.called - - def test_readlink_path_deleted_mocked(self): - with mock.patch('psutil._pslinux.os.readlink', - return_value='/home/foo (deleted)'): - self.assertEqual(psutil.Process().exe(), "/home/foo") - self.assertEqual(psutil.Process().cwd(), "/home/foo") - - def test_threads_mocked(self): - # Test the case where os.listdir() returns a file (thread) - # which no longer exists by the time we open() it (race - # condition). threads() is supposed to ignore that instead - # of raising NSP. - def open_mock(name, *args, **kwargs): - if name.startswith('/proc/%s/task' % os.getpid()): - raise IOError(errno.ENOENT, "") - else: - return orig_open(name, *args, **kwargs) - - orig_open = open - patch_point = 'builtins.open' if PY3 else '__builtin__.open' - with mock.patch(patch_point, side_effect=open_mock) as m: - ret = psutil.Process().threads() - assert m.called - self.assertEqual(ret, []) - - # ...but if it bumps into something != ENOENT we want an - # exception. - def open_mock(name, *args, **kwargs): - if name.startswith('/proc/%s/task' % os.getpid()): - raise IOError(errno.EPERM, "") - else: - return orig_open(name, *args, **kwargs) - - with mock.patch(patch_point, side_effect=open_mock): - self.assertRaises(psutil.AccessDenied, psutil.Process().threads) - - def test_exe_mocked(self): - with mock.patch('psutil._pslinux.readlink', - side_effect=OSError(errno.ENOENT, "")) as m1: - with mock.patch('psutil.Process.cmdline', - side_effect=psutil.AccessDenied(0, "")) as m2: - # No such file error; might be raised also if /proc/pid/exe - # path actually exists for system processes with low pids - # (about 0-20). In this case psutil is supposed to return - # an empty string. - ret = psutil.Process().exe() - assert m1.called - assert m2.called - self.assertEqual(ret, "") - - # ...but if /proc/pid no longer exist we're supposed to treat - # it as an alias for zombie process - with mock.patch('psutil._pslinux.os.path.lexists', - return_value=False): - self.assertRaises( - psutil.ZombieProcess, psutil.Process().exe) - - def test_issue_1014(self): - # Emulates a case where smaps file does not exist. In this case - # wrap_exception decorator should not raise NoSuchProcess. - with mock_open_exception( - '/proc/%s/smaps' % os.getpid(), - IOError(errno.ENOENT, "")) as m: - p = psutil.Process() - with self.assertRaises(IOError) as err: - p.memory_maps() - self.assertEqual(err.exception.errno, errno.ENOENT) - assert m.called - - @unittest.skipIf(not HAS_RLIMIT, "not supported") - def test_rlimit_zombie(self): - # Emulate a case where rlimit() raises ENOSYS, which may - # happen in case of zombie process: - # https://travis-ci.org/giampaolo/psutil/jobs/51368273 - with mock.patch("psutil._pslinux.cext.linux_prlimit", - side_effect=OSError(errno.ENOSYS, "")) as m: - p = psutil.Process() - p.name() - with self.assertRaises(psutil.ZombieProcess) as exc: - p.rlimit(psutil.RLIMIT_NOFILE) - assert m.called - self.assertEqual(exc.exception.pid, p.pid) - self.assertEqual(exc.exception.name, p.name()) - - def test_cwd_zombie(self): - with mock.patch("psutil._pslinux.os.readlink", - side_effect=OSError(errno.ENOENT, "")) as m: - p = psutil.Process() - p.name() - with self.assertRaises(psutil.ZombieProcess) as exc: - p.cwd() - assert m.called - self.assertEqual(exc.exception.pid, p.pid) - self.assertEqual(exc.exception.name, p.name()) - - def test_stat_file_parsing(self): - from psutil._pslinux import CLOCK_TICKS - - args = [ - "0", # pid - "(cat)", # name - "Z", # status - "1", # ppid - "0", # pgrp - "0", # session - "0", # tty - "0", # tpgid - "0", # flags - "0", # minflt - "0", # cminflt - "0", # majflt - "0", # cmajflt - "2", # utime - "3", # stime - "4", # cutime - "5", # cstime - "0", # priority - "0", # nice - "0", # num_threads - "0", # itrealvalue - "6", # starttime - "0", # vsize - "0", # rss - "0", # rsslim - "0", # startcode - "0", # endcode - "0", # startstack - "0", # kstkesp - "0", # kstkeip - "0", # signal - "0", # blocked - "0", # sigignore - "0", # sigcatch - "0", # wchan - "0", # nswap - "0", # cnswap - "0", # exit_signal - "6", # processor - ] - content = " ".join(args).encode() - with mock_open_content('/proc/%s/stat' % os.getpid(), content): - p = psutil.Process() - self.assertEqual(p.name(), 'cat') - self.assertEqual(p.status(), psutil.STATUS_ZOMBIE) - self.assertEqual(p.ppid(), 1) - self.assertEqual( - p.create_time(), 6 / CLOCK_TICKS + psutil.boot_time()) - cpu = p.cpu_times() - self.assertEqual(cpu.user, 2 / CLOCK_TICKS) - self.assertEqual(cpu.system, 3 / CLOCK_TICKS) - self.assertEqual(cpu.children_user, 4 / CLOCK_TICKS) - self.assertEqual(cpu.children_system, 5 / CLOCK_TICKS) - self.assertEqual(p.cpu_num(), 6) - - def test_status_file_parsing(self): - with mock_open_content( - '/proc/%s/status' % os.getpid(), - textwrap.dedent("""\ - Uid:\t1000\t1001\t1002\t1003 - Gid:\t1004\t1005\t1006\t1007 - Threads:\t66 - Cpus_allowed:\tf - Cpus_allowed_list:\t0-7 - voluntary_ctxt_switches:\t12 - nonvoluntary_ctxt_switches:\t13""").encode()): - p = psutil.Process() - self.assertEqual(p.num_ctx_switches().voluntary, 12) - self.assertEqual(p.num_ctx_switches().involuntary, 13) - self.assertEqual(p.num_threads(), 66) - uids = p.uids() - self.assertEqual(uids.real, 1000) - self.assertEqual(uids.effective, 1001) - self.assertEqual(uids.saved, 1002) - gids = p.gids() - self.assertEqual(gids.real, 1004) - self.assertEqual(gids.effective, 1005) - self.assertEqual(gids.saved, 1006) - self.assertEqual(p._proc._get_eligible_cpus(), list(range(0, 8))) - - -@unittest.skipIf(not LINUX, "LINUX only") -class TestProcessAgainstStatus(unittest.TestCase): - """/proc/pid/stat and /proc/pid/status have many values in common. - Whenever possible, psutil uses /proc/pid/stat (it's faster). - For all those cases we check that the value found in - /proc/pid/stat (by psutil) matches the one found in - /proc/pid/status. - """ - - @classmethod - def setUpClass(cls): - cls.proc = psutil.Process() - - def read_status_file(self, linestart): - with psutil._psplatform.open_text( - '/proc/%s/status' % self.proc.pid) as f: - for line in f: - line = line.strip() - if line.startswith(linestart): - value = line.partition('\t')[2] - try: - return int(value) - except ValueError: - return value - raise ValueError("can't find %r" % linestart) - - def test_name(self): - value = self.read_status_file("Name:") - self.assertEqual(self.proc.name(), value) - - def test_status(self): - value = self.read_status_file("State:") - value = value[value.find('(') + 1:value.rfind(')')] - value = value.replace(' ', '-') - self.assertEqual(self.proc.status(), value) - - def test_ppid(self): - value = self.read_status_file("PPid:") - self.assertEqual(self.proc.ppid(), value) - - def test_num_threads(self): - value = self.read_status_file("Threads:") - self.assertEqual(self.proc.num_threads(), value) - - def test_uids(self): - value = self.read_status_file("Uid:") - value = tuple(map(int, value.split()[1:4])) - self.assertEqual(self.proc.uids(), value) - - def test_gids(self): - value = self.read_status_file("Gid:") - value = tuple(map(int, value.split()[1:4])) - self.assertEqual(self.proc.gids(), value) - - @retry_before_failing() - def test_num_ctx_switches(self): - value = self.read_status_file("voluntary_ctxt_switches:") - self.assertEqual(self.proc.num_ctx_switches().voluntary, value) - value = self.read_status_file("nonvoluntary_ctxt_switches:") - self.assertEqual(self.proc.num_ctx_switches().involuntary, value) - - def test_cpu_affinity(self): - value = self.read_status_file("Cpus_allowed_list:") - if '-' in str(value): - min_, max_ = map(int, value.split('-')) - self.assertEqual( - self.proc.cpu_affinity(), list(range(min_, max_ + 1))) - - def test_cpu_affinity_eligible_cpus(self): - value = self.read_status_file("Cpus_allowed_list:") - with mock.patch("psutil._pslinux.per_cpu_times") as m: - self.proc._proc._get_eligible_cpus() - if '-' in str(value): - assert not m.called - else: - assert m.called - - -# ===================================================================== -# --- test utils -# ===================================================================== - - -@unittest.skipIf(not LINUX, "LINUX only") -class TestUtils(unittest.TestCase): - - def test_open_text(self): - with psutil._psplatform.open_text(__file__) as f: - self.assertEqual(f.mode, 'rt') - - def test_open_binary(self): - with psutil._psplatform.open_binary(__file__) as f: - self.assertEqual(f.mode, 'rb') - - def test_readlink(self): - with mock.patch("os.readlink", return_value="foo (deleted)") as m: - self.assertEqual(psutil._psplatform.readlink("bar"), "foo") - assert m.called - - def test_cat(self): - fname = os.path.abspath(TESTFN) - with open(fname, "wt") as f: - f.write("foo ") - self.assertEqual(psutil._psplatform.cat(TESTFN, binary=False), "foo") - self.assertEqual(psutil._psplatform.cat(TESTFN, binary=True), b"foo") - self.assertEqual( - psutil._psplatform.cat(TESTFN + '??', fallback="bar"), "bar") - - -if __name__ == '__main__': - run_test_module_by_name(__file__) diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/test_memory_leaks.py b/server/www/packages/packages-darwin/x64/psutil/tests/test_memory_leaks.py deleted file mode 100644 index ce08245..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/test_memory_leaks.py +++ /dev/null @@ -1,599 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Tests for detecting function memory leaks (typically the ones -implemented in C). It does so by calling a function many times and -checking whether process memory usage keeps increasing between -calls or over time. -Note that this may produce false positives (especially on Windows -for some reason). -""" - -from __future__ import print_function -import errno -import functools -import gc -import os -import sys -import threading -import time - -import psutil -import psutil._common -from psutil import LINUX -from psutil import MACOS -from psutil import OPENBSD -from psutil import POSIX -from psutil import SUNOS -from psutil import WINDOWS -from psutil._compat import xrange -from psutil.tests import create_sockets -from psutil.tests import get_test_subprocess -from psutil.tests import HAS_CPU_AFFINITY -from psutil.tests import HAS_CPU_FREQ -from psutil.tests import HAS_ENVIRON -from psutil.tests import HAS_IONICE -from psutil.tests import HAS_MEMORY_MAPS -from psutil.tests import HAS_PROC_CPU_NUM -from psutil.tests import HAS_PROC_IO_COUNTERS -from psutil.tests import HAS_RLIMIT -from psutil.tests import HAS_SENSORS_BATTERY -from psutil.tests import HAS_SENSORS_FANS -from psutil.tests import HAS_SENSORS_TEMPERATURES -from psutil.tests import reap_children -from psutil.tests import run_test_module_by_name -from psutil.tests import safe_rmpath -from psutil.tests import skip_on_access_denied -from psutil.tests import TESTFN -from psutil.tests import TRAVIS -from psutil.tests import unittest - - -LOOPS = 1000 -MEMORY_TOLERANCE = 4096 -RETRY_FOR = 3 - -SKIP_PYTHON_IMPL = True if TRAVIS else False -cext = psutil._psplatform.cext -thisproc = psutil.Process() -SKIP_PYTHON_IMPL = True if TRAVIS else False - - -# =================================================================== -# utils -# =================================================================== - - -def skip_if_linux(): - return unittest.skipIf(LINUX and SKIP_PYTHON_IMPL, - "worthless on LINUX (pure python)") - - -def bytes2human(n): - """ - http://code.activestate.com/recipes/578019 - >>> bytes2human(10000) - '9.8K' - >>> bytes2human(100001221) - '95.4M' - """ - symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') - prefix = {} - for i, s in enumerate(symbols): - prefix[s] = 1 << (i + 1) * 10 - for s in reversed(symbols): - if n >= prefix[s]: - value = float(n) / prefix[s] - return '%.2f%s' % (value, s) - return "%sB" % n - - -class TestMemLeak(unittest.TestCase): - """Base framework class which calls a function many times and - produces a failure if process memory usage keeps increasing - between calls or over time. - """ - tolerance = MEMORY_TOLERANCE - loops = LOOPS - retry_for = RETRY_FOR - - def setUp(self): - gc.collect() - - def execute(self, fun, *args, **kwargs): - """Test a callable.""" - def call_many_times(): - for x in xrange(loops): - self._call(fun, *args, **kwargs) - del x - gc.collect() - - tolerance = kwargs.pop('tolerance_', None) or self.tolerance - loops = kwargs.pop('loops_', None) or self.loops - retry_for = kwargs.pop('retry_for_', None) or self.retry_for - - # warm up - for x in range(10): - self._call(fun, *args, **kwargs) - self.assertEqual(gc.garbage, []) - self.assertEqual(threading.active_count(), 1) - self.assertEqual(thisproc.children(), []) - - # Get 2 distinct memory samples, before and after having - # called fun repeadetly. - # step 1 - call_many_times() - mem1 = self._get_mem() - # step 2 - call_many_times() - mem2 = self._get_mem() - - diff1 = mem2 - mem1 - if diff1 > tolerance: - # This doesn't necessarily mean we have a leak yet. - # At this point we assume that after having called the - # function so many times the memory usage is stabilized - # and if there are no leaks it should not increase - # anymore. - # Let's keep calling fun for 3 more seconds and fail if - # we notice any difference. - ncalls = 0 - stop_at = time.time() + retry_for - while time.time() <= stop_at: - self._call(fun, *args, **kwargs) - ncalls += 1 - - del stop_at - gc.collect() - mem3 = self._get_mem() - diff2 = mem3 - mem2 - - if mem3 > mem2: - # failure - extra_proc_mem = bytes2human(diff1 + diff2) - print("exta proc mem: %s" % extra_proc_mem, file=sys.stderr) - msg = "+%s after %s calls, +%s after another %s calls, " - msg += "+%s extra proc mem" - msg = msg % ( - bytes2human(diff1), loops, bytes2human(diff2), ncalls, - extra_proc_mem) - self.fail(msg) - - def execute_w_exc(self, exc, fun, *args, **kwargs): - """Convenience function which tests a callable raising - an exception. - """ - def call(): - self.assertRaises(exc, fun, *args, **kwargs) - - self.execute(call) - - @staticmethod - def _get_mem(): - # By using USS memory it seems it's less likely to bump - # into false positives. - if LINUX or WINDOWS or MACOS: - return thisproc.memory_full_info().uss - else: - return thisproc.memory_info().rss - - @staticmethod - def _call(fun, *args, **kwargs): - fun(*args, **kwargs) - - -# =================================================================== -# Process class -# =================================================================== - - -class TestProcessObjectLeaks(TestMemLeak): - """Test leaks of Process class methods.""" - - proc = thisproc - - def test_coverage(self): - skip = set(( - "pid", "as_dict", "children", "cpu_affinity", "cpu_percent", - "ionice", "is_running", "kill", "memory_info_ex", "memory_percent", - "nice", "oneshot", "parent", "rlimit", "send_signal", "suspend", - "terminate", "wait")) - for name in dir(psutil.Process): - if name.startswith('_'): - continue - if name in skip: - continue - self.assertTrue(hasattr(self, "test_" + name), msg=name) - - @skip_if_linux() - def test_name(self): - self.execute(self.proc.name) - - @skip_if_linux() - def test_cmdline(self): - self.execute(self.proc.cmdline) - - @skip_if_linux() - def test_exe(self): - self.execute(self.proc.exe) - - @skip_if_linux() - def test_ppid(self): - self.execute(self.proc.ppid) - - @unittest.skipIf(not POSIX, "POSIX only") - @skip_if_linux() - def test_uids(self): - self.execute(self.proc.uids) - - @unittest.skipIf(not POSIX, "POSIX only") - @skip_if_linux() - def test_gids(self): - self.execute(self.proc.gids) - - @skip_if_linux() - def test_status(self): - self.execute(self.proc.status) - - def test_nice_get(self): - self.execute(self.proc.nice) - - def test_nice_set(self): - niceness = thisproc.nice() - self.execute(self.proc.nice, niceness) - - @unittest.skipIf(not HAS_IONICE, "not supported") - def test_ionice_get(self): - self.execute(self.proc.ionice) - - @unittest.skipIf(not HAS_IONICE, "not supported") - def test_ionice_set(self): - if WINDOWS: - value = thisproc.ionice() - self.execute(self.proc.ionice, value) - else: - self.execute(self.proc.ionice, psutil.IOPRIO_CLASS_NONE) - fun = functools.partial(cext.proc_ioprio_set, os.getpid(), -1, 0) - self.execute_w_exc(OSError, fun) - - @unittest.skipIf(not HAS_PROC_IO_COUNTERS, "not supported") - @skip_if_linux() - def test_io_counters(self): - self.execute(self.proc.io_counters) - - @unittest.skipIf(POSIX, "worthless on POSIX") - def test_username(self): - self.execute(self.proc.username) - - @skip_if_linux() - def test_create_time(self): - self.execute(self.proc.create_time) - - @skip_if_linux() - @skip_on_access_denied(only_if=OPENBSD) - def test_num_threads(self): - self.execute(self.proc.num_threads) - - @unittest.skipIf(not WINDOWS, "WINDOWS only") - def test_num_handles(self): - self.execute(self.proc.num_handles) - - @unittest.skipIf(not POSIX, "POSIX only") - @skip_if_linux() - def test_num_fds(self): - self.execute(self.proc.num_fds) - - @skip_if_linux() - def test_num_ctx_switches(self): - self.execute(self.proc.num_ctx_switches) - - @skip_if_linux() - @skip_on_access_denied(only_if=OPENBSD) - def test_threads(self): - self.execute(self.proc.threads) - - @skip_if_linux() - def test_cpu_times(self): - self.execute(self.proc.cpu_times) - - @skip_if_linux() - @unittest.skipIf(not HAS_PROC_CPU_NUM, "not supported") - def test_cpu_num(self): - self.execute(self.proc.cpu_num) - - @skip_if_linux() - def test_memory_info(self): - self.execute(self.proc.memory_info) - - @skip_if_linux() - def test_memory_full_info(self): - self.execute(self.proc.memory_full_info) - - @unittest.skipIf(not POSIX, "POSIX only") - @skip_if_linux() - def test_terminal(self): - self.execute(self.proc.terminal) - - @unittest.skipIf(POSIX and SKIP_PYTHON_IMPL, - "worthless on POSIX (pure python)") - def test_resume(self): - self.execute(self.proc.resume) - - @skip_if_linux() - def test_cwd(self): - self.execute(self.proc.cwd) - - @unittest.skipIf(not HAS_CPU_AFFINITY, "not supported") - def test_cpu_affinity_get(self): - self.execute(self.proc.cpu_affinity) - - @unittest.skipIf(not HAS_CPU_AFFINITY, "not supported") - def test_cpu_affinity_set(self): - affinity = thisproc.cpu_affinity() - self.execute(self.proc.cpu_affinity, affinity) - if not TRAVIS: - self.execute_w_exc(ValueError, self.proc.cpu_affinity, [-1]) - - @skip_if_linux() - def test_open_files(self): - safe_rmpath(TESTFN) # needed after UNIX socket test has run - with open(TESTFN, 'w'): - self.execute(self.proc.open_files) - - # MACOS implementation is unbelievably slow - @unittest.skipIf(MACOS, "too slow on MACOS") - @unittest.skipIf(not HAS_MEMORY_MAPS, "not supported") - @skip_if_linux() - def test_memory_maps(self): - self.execute(self.proc.memory_maps) - - @unittest.skipIf(not LINUX, "LINUX only") - @unittest.skipIf(not HAS_RLIMIT, "not supported") - def test_rlimit_get(self): - self.execute(self.proc.rlimit, psutil.RLIMIT_NOFILE) - - @unittest.skipIf(not LINUX, "LINUX only") - @unittest.skipIf(not HAS_RLIMIT, "not supported") - def test_rlimit_set(self): - limit = thisproc.rlimit(psutil.RLIMIT_NOFILE) - self.execute(self.proc.rlimit, psutil.RLIMIT_NOFILE, limit) - self.execute_w_exc(OSError, self.proc.rlimit, -1) - - @skip_if_linux() - # Windows implementation is based on a single system-wide - # function (tested later). - @unittest.skipIf(WINDOWS, "worthless on WINDOWS") - def test_connections(self): - # TODO: UNIX sockets are temporarily implemented by parsing - # 'pfiles' cmd output; we don't want that part of the code to - # be executed. - with create_sockets(): - kind = 'inet' if SUNOS else 'all' - self.execute(self.proc.connections, kind) - - @unittest.skipIf(not HAS_ENVIRON, "not supported") - def test_environ(self): - self.execute(self.proc.environ) - - @unittest.skipIf(not WINDOWS, "WINDOWS only") - def test_proc_info(self): - self.execute(cext.proc_info, os.getpid()) - - -class TestTerminatedProcessLeaks(TestProcessObjectLeaks): - """Repeat the tests above looking for leaks occurring when dealing - with terminated processes raising NoSuchProcess exception. - The C functions are still invoked but will follow different code - paths. We'll check those code paths. - """ - - @classmethod - def setUpClass(cls): - super(TestTerminatedProcessLeaks, cls).setUpClass() - p = get_test_subprocess() - cls.proc = psutil.Process(p.pid) - cls.proc.kill() - cls.proc.wait() - - @classmethod - def tearDownClass(cls): - super(TestTerminatedProcessLeaks, cls).tearDownClass() - reap_children() - - def _call(self, fun, *args, **kwargs): - try: - fun(*args, **kwargs) - except psutil.NoSuchProcess: - pass - - if WINDOWS: - - def test_kill(self): - self.execute(self.proc.kill) - - def test_terminate(self): - self.execute(self.proc.terminate) - - def test_suspend(self): - self.execute(self.proc.suspend) - - def test_resume(self): - self.execute(self.proc.resume) - - def test_wait(self): - self.execute(self.proc.wait) - - def test_proc_info(self): - # test dual implementation - def call(): - try: - return cext.proc_info(self.proc.pid) - except OSError as err: - if err.errno != errno.ESRCH: - raise - - self.execute(call) - - -# =================================================================== -# system APIs -# =================================================================== - - -class TestModuleFunctionsLeaks(TestMemLeak): - """Test leaks of psutil module functions.""" - - def test_coverage(self): - skip = set(( - "version_info", "__version__", "process_iter", "wait_procs", - "cpu_percent", "cpu_times_percent", "cpu_count")) - for name in psutil.__all__: - if not name.islower(): - continue - if name in skip: - continue - self.assertTrue(hasattr(self, "test_" + name), msg=name) - - # --- cpu - - @skip_if_linux() - def test_cpu_count_logical(self): - self.execute(psutil.cpu_count, logical=True) - - @skip_if_linux() - def test_cpu_count_physical(self): - self.execute(psutil.cpu_count, logical=False) - - @skip_if_linux() - def test_cpu_times(self): - self.execute(psutil.cpu_times) - - @skip_if_linux() - def test_per_cpu_times(self): - self.execute(psutil.cpu_times, percpu=True) - - def test_cpu_stats(self): - self.execute(psutil.cpu_stats) - - @skip_if_linux() - @unittest.skipIf(not HAS_CPU_FREQ, "not supported") - def test_cpu_freq(self): - self.execute(psutil.cpu_freq) - - # --- mem - - def test_virtual_memory(self): - self.execute(psutil.virtual_memory) - - # TODO: remove this skip when this gets fixed - @unittest.skipIf(SUNOS, - "worthless on SUNOS (uses a subprocess)") - def test_swap_memory(self): - self.execute(psutil.swap_memory) - - @unittest.skipIf(POSIX and SKIP_PYTHON_IMPL, - "worthless on POSIX (pure python)") - def test_pid_exists(self): - self.execute(psutil.pid_exists, os.getpid()) - - # --- disk - - @unittest.skipIf(POSIX and SKIP_PYTHON_IMPL, - "worthless on POSIX (pure python)") - def test_disk_usage(self): - self.execute(psutil.disk_usage, '.') - - def test_disk_partitions(self): - self.execute(psutil.disk_partitions) - - @unittest.skipIf(LINUX and not os.path.exists('/proc/diskstats'), - '/proc/diskstats not available on this Linux version') - @skip_if_linux() - def test_disk_io_counters(self): - self.execute(psutil.disk_io_counters, nowrap=False) - - # --- proc - - @skip_if_linux() - def test_pids(self): - self.execute(psutil.pids) - - # --- net - - @skip_if_linux() - def test_net_io_counters(self): - self.execute(psutil.net_io_counters, nowrap=False) - - @unittest.skipIf(LINUX, - "worthless on Linux (pure python)") - @unittest.skipIf(MACOS and os.getuid() != 0, "need root access") - def test_net_connections(self): - with create_sockets(): - self.execute(psutil.net_connections) - - def test_net_if_addrs(self): - # Note: verified that on Windows this was a false positive. - self.execute(psutil.net_if_addrs, - tolerance_=80 * 1024 if WINDOWS else None) - - @unittest.skipIf(TRAVIS, "EPERM on travis") - def test_net_if_stats(self): - self.execute(psutil.net_if_stats) - - # --- sensors - - @skip_if_linux() - @unittest.skipIf(not HAS_SENSORS_BATTERY, "not supported") - def test_sensors_battery(self): - self.execute(psutil.sensors_battery) - - @skip_if_linux() - @unittest.skipIf(not HAS_SENSORS_TEMPERATURES, "not supported") - def test_sensors_temperatures(self): - self.execute(psutil.sensors_temperatures) - - @skip_if_linux() - @unittest.skipIf(not HAS_SENSORS_FANS, "not supported") - def test_sensors_fans(self): - self.execute(psutil.sensors_fans) - - # --- others - - @skip_if_linux() - def test_boot_time(self): - self.execute(psutil.boot_time) - - # XXX - on Windows this produces a false positive - @unittest.skipIf(WINDOWS, "XXX produces a false positive on Windows") - def test_users(self): - self.execute(psutil.users) - - if WINDOWS: - - # --- win services - - def test_win_service_iter(self): - self.execute(cext.winservice_enumerate) - - def test_win_service_get(self): - pass - - def test_win_service_get_config(self): - name = next(psutil.win_service_iter()).name() - self.execute(cext.winservice_query_config, name) - - def test_win_service_get_status(self): - name = next(psutil.win_service_iter()).name() - self.execute(cext.winservice_query_status, name) - - def test_win_service_get_description(self): - name = next(psutil.win_service_iter()).name() - self.execute(cext.winservice_query_descr, name) - - -if __name__ == '__main__': - run_test_module_by_name(__file__) diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/test_misc.py b/server/www/packages/packages-darwin/x64/psutil/tests/test_misc.py deleted file mode 100644 index 1d9067e..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/test_misc.py +++ /dev/null @@ -1,1046 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Miscellaneous tests. -""" - -import ast -import collections -import contextlib -import errno -import json -import os -import pickle -import socket -import stat - -from psutil import LINUX -from psutil import POSIX -from psutil import WINDOWS -from psutil._common import memoize -from psutil._common import memoize_when_activated -from psutil._common import supports_ipv6 -from psutil._common import wrap_numbers -from psutil._compat import PY3 -from psutil.tests import APPVEYOR -from psutil.tests import bind_socket -from psutil.tests import bind_unix_socket -from psutil.tests import call_until -from psutil.tests import chdir -from psutil.tests import create_proc_children_pair -from psutil.tests import create_sockets -from psutil.tests import create_zombie_proc -from psutil.tests import DEVNULL -from psutil.tests import get_free_port -from psutil.tests import get_test_subprocess -from psutil.tests import HAS_BATTERY -from psutil.tests import HAS_CONNECTIONS_UNIX -from psutil.tests import HAS_MEMORY_FULL_INFO -from psutil.tests import HAS_MEMORY_MAPS -from psutil.tests import HAS_SENSORS_BATTERY -from psutil.tests import HAS_SENSORS_FANS -from psutil.tests import HAS_SENSORS_TEMPERATURES -from psutil.tests import import_module_by_path -from psutil.tests import is_namedtuple -from psutil.tests import mock -from psutil.tests import PYTHON_EXE -from psutil.tests import reap_children -from psutil.tests import reload_module -from psutil.tests import retry -from psutil.tests import ROOT_DIR -from psutil.tests import run_test_module_by_name -from psutil.tests import safe_mkdir -from psutil.tests import safe_rmpath -from psutil.tests import SCRIPTS_DIR -from psutil.tests import sh -from psutil.tests import tcp_socketpair -from psutil.tests import TESTFN -from psutil.tests import TOX -from psutil.tests import TRAVIS -from psutil.tests import unittest -from psutil.tests import unix_socket_path -from psutil.tests import unix_socketpair -from psutil.tests import wait_for_file -from psutil.tests import wait_for_pid -import psutil -import psutil.tests - - -# =================================================================== -# --- Misc / generic tests. -# =================================================================== - - -class TestMisc(unittest.TestCase): - - def test_process__repr__(self, func=repr): - p = psutil.Process() - r = func(p) - self.assertIn("psutil.Process", r) - self.assertIn("pid=%s" % p.pid, r) - self.assertIn("name=", r) - self.assertIn(p.name(), r) - with mock.patch.object(psutil.Process, "name", - side_effect=psutil.ZombieProcess(os.getpid())): - p = psutil.Process() - r = func(p) - self.assertIn("pid=%s" % p.pid, r) - self.assertIn("zombie", r) - self.assertNotIn("name=", r) - with mock.patch.object(psutil.Process, "name", - side_effect=psutil.NoSuchProcess(os.getpid())): - p = psutil.Process() - r = func(p) - self.assertIn("pid=%s" % p.pid, r) - self.assertIn("terminated", r) - self.assertNotIn("name=", r) - with mock.patch.object(psutil.Process, "name", - side_effect=psutil.AccessDenied(os.getpid())): - p = psutil.Process() - r = func(p) - self.assertIn("pid=%s" % p.pid, r) - self.assertNotIn("name=", r) - - def test_process__str__(self): - self.test_process__repr__(func=str) - - def test_no_such_process__repr__(self, func=repr): - self.assertEqual( - repr(psutil.NoSuchProcess(321)), - "psutil.NoSuchProcess process no longer exists (pid=321)") - self.assertEqual( - repr(psutil.NoSuchProcess(321, name='foo')), - "psutil.NoSuchProcess process no longer exists (pid=321, " - "name='foo')") - self.assertEqual( - repr(psutil.NoSuchProcess(321, msg='foo')), - "psutil.NoSuchProcess foo") - - def test_zombie_process__repr__(self, func=repr): - self.assertEqual( - repr(psutil.ZombieProcess(321)), - "psutil.ZombieProcess process still exists but it's a zombie " - "(pid=321)") - self.assertEqual( - repr(psutil.ZombieProcess(321, name='foo')), - "psutil.ZombieProcess process still exists but it's a zombie " - "(pid=321, name='foo')") - self.assertEqual( - repr(psutil.ZombieProcess(321, name='foo', ppid=1)), - "psutil.ZombieProcess process still exists but it's a zombie " - "(pid=321, name='foo', ppid=1)") - self.assertEqual( - repr(psutil.ZombieProcess(321, msg='foo')), - "psutil.ZombieProcess foo") - - def test_access_denied__repr__(self, func=repr): - self.assertEqual( - repr(psutil.AccessDenied(321)), - "psutil.AccessDenied (pid=321)") - self.assertEqual( - repr(psutil.AccessDenied(321, name='foo')), - "psutil.AccessDenied (pid=321, name='foo')") - self.assertEqual( - repr(psutil.AccessDenied(321, msg='foo')), - "psutil.AccessDenied foo") - - def test_timeout_expired__repr__(self, func=repr): - self.assertEqual( - repr(psutil.TimeoutExpired(321)), - "psutil.TimeoutExpired timeout after 321 seconds") - self.assertEqual( - repr(psutil.TimeoutExpired(321, pid=111)), - "psutil.TimeoutExpired timeout after 321 seconds (pid=111)") - self.assertEqual( - repr(psutil.TimeoutExpired(321, pid=111, name='foo')), - "psutil.TimeoutExpired timeout after 321 seconds " - "(pid=111, name='foo')") - - def test_process__eq__(self): - p1 = psutil.Process() - p2 = psutil.Process() - self.assertEqual(p1, p2) - p2._ident = (0, 0) - self.assertNotEqual(p1, p2) - self.assertNotEqual(p1, 'foo') - - def test_process__hash__(self): - s = set([psutil.Process(), psutil.Process()]) - self.assertEqual(len(s), 1) - - def test__all__(self): - dir_psutil = dir(psutil) - for name in dir_psutil: - if name in ('callable', 'error', 'namedtuple', 'tests', - 'long', 'test', 'NUM_CPUS', 'BOOT_TIME', - 'TOTAL_PHYMEM'): - continue - if not name.startswith('_'): - try: - __import__(name) - except ImportError: - if name not in psutil.__all__: - fun = getattr(psutil, name) - if fun is None: - continue - if (fun.__doc__ is not None and - 'deprecated' not in fun.__doc__.lower()): - self.fail('%r not in psutil.__all__' % name) - - # Import 'star' will break if __all__ is inconsistent, see: - # https://github.com/giampaolo/psutil/issues/656 - # Can't do `from psutil import *` as it won't work on python 3 - # so we simply iterate over __all__. - for name in psutil.__all__: - self.assertIn(name, dir_psutil) - - def test_version(self): - self.assertEqual('.'.join([str(x) for x in psutil.version_info]), - psutil.__version__) - - def test_process_as_dict_no_new_names(self): - # See https://github.com/giampaolo/psutil/issues/813 - p = psutil.Process() - p.foo = '1' - self.assertNotIn('foo', p.as_dict()) - - def test_memoize(self): - @memoize - def foo(*args, **kwargs): - "foo docstring" - calls.append(None) - return (args, kwargs) - - calls = [] - # no args - for x in range(2): - ret = foo() - expected = ((), {}) - self.assertEqual(ret, expected) - self.assertEqual(len(calls), 1) - # with args - for x in range(2): - ret = foo(1) - expected = ((1, ), {}) - self.assertEqual(ret, expected) - self.assertEqual(len(calls), 2) - # with args + kwargs - for x in range(2): - ret = foo(1, bar=2) - expected = ((1, ), {'bar': 2}) - self.assertEqual(ret, expected) - self.assertEqual(len(calls), 3) - # clear cache - foo.cache_clear() - ret = foo() - expected = ((), {}) - self.assertEqual(ret, expected) - self.assertEqual(len(calls), 4) - # docstring - self.assertEqual(foo.__doc__, "foo docstring") - - def test_memoize_when_activated(self): - class Foo: - - @memoize_when_activated - def foo(self): - calls.append(None) - - f = Foo() - calls = [] - f.foo() - f.foo() - self.assertEqual(len(calls), 2) - - # activate - calls = [] - f.foo.cache_activate() - f.foo() - f.foo() - self.assertEqual(len(calls), 1) - - # deactivate - calls = [] - f.foo.cache_deactivate() - f.foo() - f.foo() - self.assertEqual(len(calls), 2) - - def test_parse_environ_block(self): - from psutil._common import parse_environ_block - - def k(s): - return s.upper() if WINDOWS else s - - self.assertEqual(parse_environ_block("a=1\0"), - {k("a"): "1"}) - self.assertEqual(parse_environ_block("a=1\0b=2\0\0"), - {k("a"): "1", k("b"): "2"}) - self.assertEqual(parse_environ_block("a=1\0b=\0\0"), - {k("a"): "1", k("b"): ""}) - # ignore everything after \0\0 - self.assertEqual(parse_environ_block("a=1\0b=2\0\0c=3\0"), - {k("a"): "1", k("b"): "2"}) - # ignore everything that is not an assignment - self.assertEqual(parse_environ_block("xxx\0a=1\0"), {k("a"): "1"}) - self.assertEqual(parse_environ_block("a=1\0=b=2\0"), {k("a"): "1"}) - # do not fail if the block is incomplete - self.assertEqual(parse_environ_block("a=1\0b=2"), {k("a"): "1"}) - - def test_supports_ipv6(self): - self.addCleanup(supports_ipv6.cache_clear) - if supports_ipv6(): - with mock.patch('psutil._common.socket') as s: - s.has_ipv6 = False - supports_ipv6.cache_clear() - assert not supports_ipv6() - - supports_ipv6.cache_clear() - with mock.patch('psutil._common.socket.socket', - side_effect=socket.error) as s: - assert not supports_ipv6() - assert s.called - - supports_ipv6.cache_clear() - with mock.patch('psutil._common.socket.socket', - side_effect=socket.gaierror) as s: - assert not supports_ipv6() - supports_ipv6.cache_clear() - assert s.called - - supports_ipv6.cache_clear() - with mock.patch('psutil._common.socket.socket.bind', - side_effect=socket.gaierror) as s: - assert not supports_ipv6() - supports_ipv6.cache_clear() - assert s.called - else: - with self.assertRaises(Exception): - sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) - sock.bind(("::1", 0)) - - def test_isfile_strict(self): - from psutil._common import isfile_strict - this_file = os.path.abspath(__file__) - assert isfile_strict(this_file) - assert not isfile_strict(os.path.dirname(this_file)) - with mock.patch('psutil._common.os.stat', - side_effect=OSError(errno.EPERM, "foo")): - self.assertRaises(OSError, isfile_strict, this_file) - with mock.patch('psutil._common.os.stat', - side_effect=OSError(errno.EACCES, "foo")): - self.assertRaises(OSError, isfile_strict, this_file) - with mock.patch('psutil._common.os.stat', - side_effect=OSError(errno.EINVAL, "foo")): - assert not isfile_strict(this_file) - with mock.patch('psutil._common.stat.S_ISREG', return_value=False): - assert not isfile_strict(this_file) - - def test_serialization(self): - def check(ret): - if json is not None: - json.loads(json.dumps(ret)) - a = pickle.dumps(ret) - b = pickle.loads(a) - self.assertEqual(ret, b) - - check(psutil.Process().as_dict()) - check(psutil.virtual_memory()) - check(psutil.swap_memory()) - check(psutil.cpu_times()) - check(psutil.cpu_times_percent(interval=0)) - check(psutil.net_io_counters()) - if LINUX and not os.path.exists('/proc/diskstats'): - pass - else: - if not APPVEYOR: - check(psutil.disk_io_counters()) - check(psutil.disk_partitions()) - check(psutil.disk_usage(os.getcwd())) - check(psutil.users()) - - def test_setup_script(self): - setup_py = os.path.join(ROOT_DIR, 'setup.py') - if TRAVIS and not os.path.exists(setup_py): - return self.skipTest("can't find setup.py") - module = import_module_by_path(setup_py) - self.assertRaises(SystemExit, module.setup) - self.assertEqual(module.get_version(), psutil.__version__) - - def test_ad_on_process_creation(self): - # We are supposed to be able to instantiate Process also in case - # of zombie processes or access denied. - with mock.patch.object(psutil.Process, 'create_time', - side_effect=psutil.AccessDenied) as meth: - psutil.Process() - assert meth.called - with mock.patch.object(psutil.Process, 'create_time', - side_effect=psutil.ZombieProcess(1)) as meth: - psutil.Process() - assert meth.called - with mock.patch.object(psutil.Process, 'create_time', - side_effect=ValueError) as meth: - with self.assertRaises(ValueError): - psutil.Process() - assert meth.called - - def test_sanity_version_check(self): - # see: https://github.com/giampaolo/psutil/issues/564 - with mock.patch( - "psutil._psplatform.cext.version", return_value="0.0.0"): - with self.assertRaises(ImportError) as cm: - reload_module(psutil) - self.assertIn("version conflict", str(cm.exception).lower()) - - -# =================================================================== -# --- Tests for wrap_numbers() function. -# =================================================================== - - -nt = collections.namedtuple('foo', 'a b c') - - -class TestWrapNumbers(unittest.TestCase): - - def setUp(self): - wrap_numbers.cache_clear() - - tearDown = setUp - - def test_first_call(self): - input = {'disk1': nt(5, 5, 5)} - self.assertEqual(wrap_numbers(input, 'disk_io'), input) - - def test_input_hasnt_changed(self): - input = {'disk1': nt(5, 5, 5)} - self.assertEqual(wrap_numbers(input, 'disk_io'), input) - self.assertEqual(wrap_numbers(input, 'disk_io'), input) - - def test_increase_but_no_wrap(self): - input = {'disk1': nt(5, 5, 5)} - self.assertEqual(wrap_numbers(input, 'disk_io'), input) - input = {'disk1': nt(10, 15, 20)} - self.assertEqual(wrap_numbers(input, 'disk_io'), input) - input = {'disk1': nt(20, 25, 30)} - self.assertEqual(wrap_numbers(input, 'disk_io'), input) - input = {'disk1': nt(20, 25, 30)} - self.assertEqual(wrap_numbers(input, 'disk_io'), input) - - def test_wrap(self): - # let's say 100 is the threshold - input = {'disk1': nt(100, 100, 100)} - self.assertEqual(wrap_numbers(input, 'disk_io'), input) - # first wrap restarts from 10 - input = {'disk1': nt(100, 100, 10)} - self.assertEqual(wrap_numbers(input, 'disk_io'), - {'disk1': nt(100, 100, 110)}) - # then it remains the same - input = {'disk1': nt(100, 100, 10)} - self.assertEqual(wrap_numbers(input, 'disk_io'), - {'disk1': nt(100, 100, 110)}) - # then it goes up - input = {'disk1': nt(100, 100, 90)} - self.assertEqual(wrap_numbers(input, 'disk_io'), - {'disk1': nt(100, 100, 190)}) - # then it wraps again - input = {'disk1': nt(100, 100, 20)} - self.assertEqual(wrap_numbers(input, 'disk_io'), - {'disk1': nt(100, 100, 210)}) - # and remains the same - input = {'disk1': nt(100, 100, 20)} - self.assertEqual(wrap_numbers(input, 'disk_io'), - {'disk1': nt(100, 100, 210)}) - # now wrap another num - input = {'disk1': nt(50, 100, 20)} - self.assertEqual(wrap_numbers(input, 'disk_io'), - {'disk1': nt(150, 100, 210)}) - # and again - input = {'disk1': nt(40, 100, 20)} - self.assertEqual(wrap_numbers(input, 'disk_io'), - {'disk1': nt(190, 100, 210)}) - # keep it the same - input = {'disk1': nt(40, 100, 20)} - self.assertEqual(wrap_numbers(input, 'disk_io'), - {'disk1': nt(190, 100, 210)}) - - def test_changing_keys(self): - # Emulate a case where the second call to disk_io() - # (or whatever) provides a new disk, then the new disk - # disappears on the third call. - input = {'disk1': nt(5, 5, 5)} - self.assertEqual(wrap_numbers(input, 'disk_io'), input) - input = {'disk1': nt(5, 5, 5), - 'disk2': nt(7, 7, 7)} - self.assertEqual(wrap_numbers(input, 'disk_io'), input) - input = {'disk1': nt(8, 8, 8)} - self.assertEqual(wrap_numbers(input, 'disk_io'), input) - - def test_changing_keys_w_wrap(self): - input = {'disk1': nt(50, 50, 50), - 'disk2': nt(100, 100, 100)} - self.assertEqual(wrap_numbers(input, 'disk_io'), input) - # disk 2 wraps - input = {'disk1': nt(50, 50, 50), - 'disk2': nt(100, 100, 10)} - self.assertEqual(wrap_numbers(input, 'disk_io'), - {'disk1': nt(50, 50, 50), - 'disk2': nt(100, 100, 110)}) - # disk 2 disappears - input = {'disk1': nt(50, 50, 50)} - self.assertEqual(wrap_numbers(input, 'disk_io'), input) - - # then it appears again; the old wrap is supposed to be - # gone. - input = {'disk1': nt(50, 50, 50), - 'disk2': nt(100, 100, 100)} - self.assertEqual(wrap_numbers(input, 'disk_io'), input) - # remains the same - input = {'disk1': nt(50, 50, 50), - 'disk2': nt(100, 100, 100)} - self.assertEqual(wrap_numbers(input, 'disk_io'), input) - # and then wraps again - input = {'disk1': nt(50, 50, 50), - 'disk2': nt(100, 100, 10)} - self.assertEqual(wrap_numbers(input, 'disk_io'), - {'disk1': nt(50, 50, 50), - 'disk2': nt(100, 100, 110)}) - - def test_real_data(self): - d = {'nvme0n1': (300, 508, 640, 1571, 5970, 1987, 2049, 451751, 47048), - 'nvme0n1p1': (1171, 2, 5600256, 1024, 516, 0, 0, 0, 8), - 'nvme0n1p2': (54, 54, 2396160, 5165056, 4, 24, 30, 1207, 28), - 'nvme0n1p3': (2389, 4539, 5154, 150, 4828, 1844, 2019, 398, 348)} - self.assertEqual(wrap_numbers(d, 'disk_io'), d) - self.assertEqual(wrap_numbers(d, 'disk_io'), d) - # decrease this ↓ - d = {'nvme0n1': (100, 508, 640, 1571, 5970, 1987, 2049, 451751, 47048), - 'nvme0n1p1': (1171, 2, 5600256, 1024, 516, 0, 0, 0, 8), - 'nvme0n1p2': (54, 54, 2396160, 5165056, 4, 24, 30, 1207, 28), - 'nvme0n1p3': (2389, 4539, 5154, 150, 4828, 1844, 2019, 398, 348)} - out = wrap_numbers(d, 'disk_io') - self.assertEqual(out['nvme0n1'][0], 400) - - # --- cache tests - - def test_cache_first_call(self): - input = {'disk1': nt(5, 5, 5)} - wrap_numbers(input, 'disk_io') - cache = wrap_numbers.cache_info() - self.assertEqual(cache[0], {'disk_io': input}) - self.assertEqual(cache[1], {'disk_io': {}}) - self.assertEqual(cache[2], {'disk_io': {}}) - - def test_cache_call_twice(self): - input = {'disk1': nt(5, 5, 5)} - wrap_numbers(input, 'disk_io') - input = {'disk1': nt(10, 10, 10)} - wrap_numbers(input, 'disk_io') - cache = wrap_numbers.cache_info() - self.assertEqual(cache[0], {'disk_io': input}) - self.assertEqual( - cache[1], - {'disk_io': {('disk1', 0): 0, ('disk1', 1): 0, ('disk1', 2): 0}}) - self.assertEqual(cache[2], {'disk_io': {}}) - - def test_cache_wrap(self): - # let's say 100 is the threshold - input = {'disk1': nt(100, 100, 100)} - wrap_numbers(input, 'disk_io') - - # first wrap restarts from 10 - input = {'disk1': nt(100, 100, 10)} - wrap_numbers(input, 'disk_io') - cache = wrap_numbers.cache_info() - self.assertEqual(cache[0], {'disk_io': input}) - self.assertEqual( - cache[1], - {'disk_io': {('disk1', 0): 0, ('disk1', 1): 0, ('disk1', 2): 100}}) - self.assertEqual(cache[2], {'disk_io': {'disk1': set([('disk1', 2)])}}) - - def assert_(): - cache = wrap_numbers.cache_info() - self.assertEqual( - cache[1], - {'disk_io': {('disk1', 0): 0, ('disk1', 1): 0, - ('disk1', 2): 100}}) - self.assertEqual(cache[2], - {'disk_io': {'disk1': set([('disk1', 2)])}}) - - # then it remains the same - input = {'disk1': nt(100, 100, 10)} - wrap_numbers(input, 'disk_io') - cache = wrap_numbers.cache_info() - self.assertEqual(cache[0], {'disk_io': input}) - assert_() - - # then it goes up - input = {'disk1': nt(100, 100, 90)} - wrap_numbers(input, 'disk_io') - cache = wrap_numbers.cache_info() - self.assertEqual(cache[0], {'disk_io': input}) - assert_() - - # then it wraps again - input = {'disk1': nt(100, 100, 20)} - wrap_numbers(input, 'disk_io') - cache = wrap_numbers.cache_info() - self.assertEqual(cache[0], {'disk_io': input}) - self.assertEqual( - cache[1], - {'disk_io': {('disk1', 0): 0, ('disk1', 1): 0, ('disk1', 2): 190}}) - self.assertEqual(cache[2], {'disk_io': {'disk1': set([('disk1', 2)])}}) - - def test_cache_changing_keys(self): - input = {'disk1': nt(5, 5, 5)} - wrap_numbers(input, 'disk_io') - input = {'disk1': nt(5, 5, 5), - 'disk2': nt(7, 7, 7)} - wrap_numbers(input, 'disk_io') - cache = wrap_numbers.cache_info() - self.assertEqual(cache[0], {'disk_io': input}) - self.assertEqual( - cache[1], - {'disk_io': {('disk1', 0): 0, ('disk1', 1): 0, ('disk1', 2): 0}}) - self.assertEqual(cache[2], {'disk_io': {}}) - - def test_cache_clear(self): - input = {'disk1': nt(5, 5, 5)} - wrap_numbers(input, 'disk_io') - wrap_numbers(input, 'disk_io') - wrap_numbers.cache_clear('disk_io') - self.assertEqual(wrap_numbers.cache_info(), ({}, {}, {})) - wrap_numbers.cache_clear('disk_io') - wrap_numbers.cache_clear('?!?') - - @unittest.skipIf( - not psutil.disk_io_counters() or not psutil.net_io_counters(), - "no disks or NICs available") - def test_cache_clear_public_apis(self): - psutil.disk_io_counters() - psutil.net_io_counters() - caches = wrap_numbers.cache_info() - for cache in caches: - self.assertIn('psutil.disk_io_counters', cache) - self.assertIn('psutil.net_io_counters', cache) - - psutil.disk_io_counters.cache_clear() - caches = wrap_numbers.cache_info() - for cache in caches: - self.assertIn('psutil.net_io_counters', cache) - self.assertNotIn('psutil.disk_io_counters', cache) - - psutil.net_io_counters.cache_clear() - caches = wrap_numbers.cache_info() - self.assertEqual(caches, ({}, {}, {})) - - -# =================================================================== -# --- Example script tests -# =================================================================== - - -@unittest.skipIf(TOX, "can't test on TOX") -# See: https://travis-ci.org/giampaolo/psutil/jobs/295224806 -@unittest.skipIf(TRAVIS and not os.path.exists(SCRIPTS_DIR), - "can't locate scripts directory") -class TestScripts(unittest.TestCase): - """Tests for scripts in the "scripts" directory.""" - - @staticmethod - def assert_stdout(exe, *args, **kwargs): - exe = '%s' % os.path.join(SCRIPTS_DIR, exe) - cmd = [PYTHON_EXE, exe] - for arg in args: - cmd.append(arg) - try: - out = sh(cmd, **kwargs).strip() - except RuntimeError as err: - if 'AccessDenied' in str(err): - return str(err) - else: - raise - assert out, out - return out - - @staticmethod - def assert_syntax(exe, args=None): - exe = os.path.join(SCRIPTS_DIR, exe) - if PY3: - f = open(exe, 'rt', encoding='utf8') - else: - f = open(exe, 'rt') - with f: - src = f.read() - ast.parse(src) - - def test_coverage(self): - # make sure all example scripts have a test method defined - meths = dir(self) - for name in os.listdir(SCRIPTS_DIR): - if name.endswith('.py'): - if 'test_' + os.path.splitext(name)[0] not in meths: - # self.assert_stdout(name) - self.fail('no test defined for %r script' - % os.path.join(SCRIPTS_DIR, name)) - - @unittest.skipIf(not POSIX, "POSIX only") - def test_executable(self): - for name in os.listdir(SCRIPTS_DIR): - if name.endswith('.py'): - path = os.path.join(SCRIPTS_DIR, name) - if not stat.S_IXUSR & os.stat(path)[stat.ST_MODE]: - self.fail('%r is not executable' % path) - - def test_disk_usage(self): - self.assert_stdout('disk_usage.py') - - def test_free(self): - self.assert_stdout('free.py') - - def test_meminfo(self): - self.assert_stdout('meminfo.py') - - def test_procinfo(self): - self.assert_stdout('procinfo.py', str(os.getpid())) - - # can't find users on APPVEYOR or TRAVIS - @unittest.skipIf(APPVEYOR or TRAVIS and not psutil.users(), - "unreliable on APPVEYOR or TRAVIS") - def test_who(self): - self.assert_stdout('who.py') - - def test_ps(self): - self.assert_stdout('ps.py') - - def test_pstree(self): - self.assert_stdout('pstree.py') - - def test_netstat(self): - self.assert_stdout('netstat.py') - - # permission denied on travis - @unittest.skipIf(TRAVIS, "unreliable on TRAVIS") - def test_ifconfig(self): - self.assert_stdout('ifconfig.py') - - @unittest.skipIf(not HAS_MEMORY_MAPS, "not supported") - def test_pmap(self): - self.assert_stdout('pmap.py', str(os.getpid())) - - @unittest.skipIf(not HAS_MEMORY_FULL_INFO, "not supported") - def test_procsmem(self): - self.assert_stdout('procsmem.py', stderr=DEVNULL) - - def test_killall(self): - self.assert_syntax('killall.py') - - def test_nettop(self): - self.assert_syntax('nettop.py') - - def test_top(self): - self.assert_syntax('top.py') - - def test_iotop(self): - self.assert_syntax('iotop.py') - - def test_pidof(self): - output = self.assert_stdout('pidof.py', psutil.Process().name()) - self.assertIn(str(os.getpid()), output) - - @unittest.skipIf(not WINDOWS, "WINDOWS only") - def test_winservices(self): - self.assert_stdout('winservices.py') - - def test_cpu_distribution(self): - self.assert_syntax('cpu_distribution.py') - - @unittest.skipIf(not HAS_SENSORS_TEMPERATURES, "not supported") - @unittest.skipIf(TRAVIS, "unreliable on TRAVIS") - def test_temperatures(self): - self.assert_stdout('temperatures.py') - - @unittest.skipIf(not HAS_SENSORS_FANS, "not supported") - @unittest.skipIf(TRAVIS, "unreliable on TRAVIS") - def test_fans(self): - self.assert_stdout('fans.py') - - @unittest.skipIf(not HAS_SENSORS_BATTERY, "not supported") - @unittest.skipIf(not HAS_BATTERY, "no battery") - def test_battery(self): - self.assert_stdout('battery.py') - - def test_sensors(self): - self.assert_stdout('sensors.py') - - -# =================================================================== -# --- Unit tests for test utilities. -# =================================================================== - - -class TestRetryDecorator(unittest.TestCase): - - @mock.patch('time.sleep') - def test_retry_success(self, sleep): - # Fail 3 times out of 5; make sure the decorated fun returns. - - @retry(retries=5, interval=1, logfun=None) - def foo(): - while queue: - queue.pop() - 1 / 0 - return 1 - - queue = list(range(3)) - self.assertEqual(foo(), 1) - self.assertEqual(sleep.call_count, 3) - - @mock.patch('time.sleep') - def test_retry_failure(self, sleep): - # Fail 6 times out of 5; th function is supposed to raise exc. - - @retry(retries=5, interval=1, logfun=None) - def foo(): - while queue: - queue.pop() - 1 / 0 - return 1 - - queue = list(range(6)) - self.assertRaises(ZeroDivisionError, foo) - self.assertEqual(sleep.call_count, 5) - - @mock.patch('time.sleep') - def test_exception_arg(self, sleep): - @retry(exception=ValueError, interval=1) - def foo(): - raise TypeError - - self.assertRaises(TypeError, foo) - self.assertEqual(sleep.call_count, 0) - - @mock.patch('time.sleep') - def test_no_interval_arg(self, sleep): - # if interval is not specified sleep is not supposed to be called - - @retry(retries=5, interval=None, logfun=None) - def foo(): - 1 / 0 - - self.assertRaises(ZeroDivisionError, foo) - self.assertEqual(sleep.call_count, 0) - - @mock.patch('time.sleep') - def test_retries_arg(self, sleep): - - @retry(retries=5, interval=1, logfun=None) - def foo(): - 1 / 0 - - self.assertRaises(ZeroDivisionError, foo) - self.assertEqual(sleep.call_count, 5) - - @mock.patch('time.sleep') - def test_retries_and_timeout_args(self, sleep): - self.assertRaises(ValueError, retry, retries=5, timeout=1) - - -class TestSyncTestUtils(unittest.TestCase): - - def tearDown(self): - safe_rmpath(TESTFN) - - def test_wait_for_pid(self): - wait_for_pid(os.getpid()) - nopid = max(psutil.pids()) + 99999 - with mock.patch('psutil.tests.retry.__iter__', return_value=iter([0])): - self.assertRaises(psutil.NoSuchProcess, wait_for_pid, nopid) - - def test_wait_for_file(self): - with open(TESTFN, 'w') as f: - f.write('foo') - wait_for_file(TESTFN) - assert not os.path.exists(TESTFN) - - def test_wait_for_file_empty(self): - with open(TESTFN, 'w'): - pass - wait_for_file(TESTFN, empty=True) - assert not os.path.exists(TESTFN) - - def test_wait_for_file_no_file(self): - with mock.patch('psutil.tests.retry.__iter__', return_value=iter([0])): - self.assertRaises(IOError, wait_for_file, TESTFN) - - def test_wait_for_file_no_delete(self): - with open(TESTFN, 'w') as f: - f.write('foo') - wait_for_file(TESTFN, delete=False) - assert os.path.exists(TESTFN) - - def test_call_until(self): - ret = call_until(lambda: 1, "ret == 1") - self.assertEqual(ret, 1) - - -class TestFSTestUtils(unittest.TestCase): - - def setUp(self): - safe_rmpath(TESTFN) - - tearDown = setUp - - def test_safe_mkdir(self): - safe_mkdir(TESTFN) - assert os.path.isdir(TESTFN) - safe_mkdir(TESTFN) - assert os.path.isdir(TESTFN) - - def test_safe_rmpath(self): - # test file is removed - open(TESTFN, 'w').close() - safe_rmpath(TESTFN) - assert not os.path.exists(TESTFN) - # test no exception if path does not exist - safe_rmpath(TESTFN) - # test dir is removed - os.mkdir(TESTFN) - safe_rmpath(TESTFN) - assert not os.path.exists(TESTFN) - # test other exceptions are raised - with mock.patch('psutil.tests.os.stat', - side_effect=OSError(errno.EINVAL, "")) as m: - with self.assertRaises(OSError): - safe_rmpath(TESTFN) - assert m.called - - def test_chdir(self): - base = os.getcwd() - os.mkdir(TESTFN) - with chdir(TESTFN): - self.assertEqual(os.getcwd(), os.path.join(base, TESTFN)) - self.assertEqual(os.getcwd(), base) - - -class TestProcessUtils(unittest.TestCase): - - def test_reap_children(self): - subp = get_test_subprocess() - p = psutil.Process(subp.pid) - assert p.is_running() - reap_children() - assert not p.is_running() - assert not psutil.tests._pids_started - assert not psutil.tests._subprocesses_started - - def test_create_proc_children_pair(self): - p1, p2 = create_proc_children_pair() - self.assertNotEqual(p1.pid, p2.pid) - assert p1.is_running() - assert p2.is_running() - children = psutil.Process().children(recursive=True) - self.assertEqual(len(children), 2) - self.assertIn(p1, children) - self.assertIn(p2, children) - self.assertEqual(p1.ppid(), os.getpid()) - self.assertEqual(p2.ppid(), p1.pid) - - # make sure both of them are cleaned up - reap_children() - assert not p1.is_running() - assert not p2.is_running() - assert not psutil.tests._pids_started - assert not psutil.tests._subprocesses_started - - @unittest.skipIf(not POSIX, "POSIX only") - def test_create_zombie_proc(self): - zpid = create_zombie_proc() - self.addCleanup(reap_children, recursive=True) - p = psutil.Process(zpid) - self.assertEqual(p.status(), psutil.STATUS_ZOMBIE) - - -class TestNetUtils(unittest.TestCase): - - def bind_socket(self): - port = get_free_port() - with contextlib.closing(bind_socket(addr=('', port))) as s: - self.assertEqual(s.getsockname()[1], port) - - @unittest.skipIf(not POSIX, "POSIX only") - def test_bind_unix_socket(self): - with unix_socket_path() as name: - sock = bind_unix_socket(name) - with contextlib.closing(sock): - self.assertEqual(sock.family, socket.AF_UNIX) - self.assertEqual(sock.type, socket.SOCK_STREAM) - self.assertEqual(sock.getsockname(), name) - assert os.path.exists(name) - assert stat.S_ISSOCK(os.stat(name).st_mode) - # UDP - with unix_socket_path() as name: - sock = bind_unix_socket(name, type=socket.SOCK_DGRAM) - with contextlib.closing(sock): - self.assertEqual(sock.type, socket.SOCK_DGRAM) - - def tcp_tcp_socketpair(self): - addr = ("127.0.0.1", get_free_port()) - server, client = tcp_socketpair(socket.AF_INET, addr=addr) - with contextlib.closing(server): - with contextlib.closing(client): - # Ensure they are connected and the positions are - # correct. - self.assertEqual(server.getsockname(), addr) - self.assertEqual(client.getpeername(), addr) - self.assertNotEqual(client.getsockname(), addr) - - @unittest.skipIf(not POSIX, "POSIX only") - def test_unix_socketpair(self): - p = psutil.Process() - num_fds = p.num_fds() - assert not p.connections(kind='unix') - with unix_socket_path() as name: - server, client = unix_socketpair(name) - try: - assert os.path.exists(name) - assert stat.S_ISSOCK(os.stat(name).st_mode) - self.assertEqual(p.num_fds() - num_fds, 2) - self.assertEqual(len(p.connections(kind='unix')), 2) - self.assertEqual(server.getsockname(), name) - self.assertEqual(client.getpeername(), name) - finally: - client.close() - server.close() - - def test_create_sockets(self): - with create_sockets() as socks: - fams = collections.defaultdict(int) - types = collections.defaultdict(int) - for s in socks: - fams[s.family] += 1 - # work around http://bugs.python.org/issue30204 - types[s.getsockopt(socket.SOL_SOCKET, socket.SO_TYPE)] += 1 - self.assertGreaterEqual(fams[socket.AF_INET], 2) - if supports_ipv6(): - self.assertGreaterEqual(fams[socket.AF_INET6], 2) - if POSIX and HAS_CONNECTIONS_UNIX: - self.assertGreaterEqual(fams[socket.AF_UNIX], 2) - self.assertGreaterEqual(types[socket.SOCK_STREAM], 2) - self.assertGreaterEqual(types[socket.SOCK_DGRAM], 2) - - -class TestOtherUtils(unittest.TestCase): - - def test_is_namedtuple(self): - assert is_namedtuple(collections.namedtuple('foo', 'a b c')(1, 2, 3)) - assert not is_namedtuple(tuple()) - - -if __name__ == '__main__': - run_test_module_by_name(__file__) diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/test_osx.py b/server/www/packages/packages-darwin/x64/psutil/tests/test_osx.py deleted file mode 100644 index 557af9f..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/test_osx.py +++ /dev/null @@ -1,303 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""MACOS specific tests.""" - -import os -import re -import time - -import psutil -from psutil import MACOS -from psutil.tests import create_zombie_proc -from psutil.tests import get_test_subprocess -from psutil.tests import HAS_BATTERY -from psutil.tests import MEMORY_TOLERANCE -from psutil.tests import reap_children -from psutil.tests import retry_before_failing -from psutil.tests import run_test_module_by_name -from psutil.tests import sh -from psutil.tests import unittest - - -PAGESIZE = os.sysconf("SC_PAGE_SIZE") if MACOS else None - - -def sysctl(cmdline): - """Expects a sysctl command with an argument and parse the result - returning only the value of interest. - """ - out = sh(cmdline) - result = out.split()[1] - try: - return int(result) - except ValueError: - return result - - -def vm_stat(field): - """Wrapper around 'vm_stat' cmdline utility.""" - out = sh('vm_stat') - for line in out.split('\n'): - if field in line: - break - else: - raise ValueError("line not found") - return int(re.search(r'\d+', line).group(0)) * PAGESIZE - - -# http://code.activestate.com/recipes/578019/ -def human2bytes(s): - SYMBOLS = { - 'customary': ('B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'), - } - init = s - num = "" - while s and s[0:1].isdigit() or s[0:1] == '.': - num += s[0] - s = s[1:] - num = float(num) - letter = s.strip() - for name, sset in SYMBOLS.items(): - if letter in sset: - break - else: - if letter == 'k': - sset = SYMBOLS['customary'] - letter = letter.upper() - else: - raise ValueError("can't interpret %r" % init) - prefix = {sset[0]: 1} - for i, s in enumerate(sset[1:]): - prefix[s] = 1 << (i + 1) * 10 - return int(num * prefix[letter]) - - -@unittest.skipIf(not MACOS, "MACOS only") -class TestProcess(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.pid = get_test_subprocess().pid - - @classmethod - def tearDownClass(cls): - reap_children() - - def test_process_create_time(self): - output = sh("ps -o lstart -p %s" % self.pid) - start_ps = output.replace('STARTED', '').strip() - hhmmss = start_ps.split(' ')[-2] - year = start_ps.split(' ')[-1] - start_psutil = psutil.Process(self.pid).create_time() - self.assertEqual( - hhmmss, - time.strftime("%H:%M:%S", time.localtime(start_psutil))) - self.assertEqual( - year, - time.strftime("%Y", time.localtime(start_psutil))) - - -@unittest.skipIf(not MACOS, "MACOS only") -class TestZombieProcessAPIs(unittest.TestCase): - - @classmethod - def setUpClass(cls): - zpid = create_zombie_proc() - cls.p = psutil.Process(zpid) - - @classmethod - def tearDownClass(cls): - reap_children(recursive=True) - - def test_pidtask_info(self): - self.assertEqual(self.p.status(), psutil.STATUS_ZOMBIE) - self.p.ppid() - self.p.uids() - self.p.gids() - self.p.terminal() - self.p.create_time() - - def test_exe(self): - self.assertRaises(psutil.ZombieProcess, self.p.exe) - - def test_cmdline(self): - self.assertRaises(psutil.ZombieProcess, self.p.cmdline) - - def test_environ(self): - self.assertRaises(psutil.ZombieProcess, self.p.environ) - - def test_cwd(self): - self.assertRaises(psutil.ZombieProcess, self.p.cwd) - - def test_memory_full_info(self): - self.assertRaises(psutil.ZombieProcess, self.p.memory_full_info) - - def test_cpu_times(self): - self.assertRaises(psutil.ZombieProcess, self.p.cpu_times) - - def test_num_ctx_switches(self): - self.assertRaises(psutil.ZombieProcess, self.p.num_ctx_switches) - - def test_num_threads(self): - self.assertRaises(psutil.ZombieProcess, self.p.num_threads) - - def test_open_files(self): - self.assertRaises(psutil.ZombieProcess, self.p.open_files) - - def test_connections(self): - self.assertRaises(psutil.ZombieProcess, self.p.connections) - - def test_num_fds(self): - self.assertRaises(psutil.ZombieProcess, self.p.num_fds) - - def test_threads(self): - self.assertRaises((psutil.ZombieProcess, psutil.AccessDenied), - self.p.threads) - - def test_memory_maps(self): - self.assertRaises(psutil.ZombieProcess, self.p.memory_maps) - - -@unittest.skipIf(not MACOS, "MACOS only") -class TestSystemAPIs(unittest.TestCase): - - # --- disk - - def test_disks(self): - # test psutil.disk_usage() and psutil.disk_partitions() - # against "df -a" - def df(path): - out = sh('df -k "%s"' % path).strip() - lines = out.split('\n') - lines.pop(0) - line = lines.pop(0) - dev, total, used, free = line.split()[:4] - if dev == 'none': - dev = '' - total = int(total) * 1024 - used = int(used) * 1024 - free = int(free) * 1024 - return dev, total, used, free - - for part in psutil.disk_partitions(all=False): - usage = psutil.disk_usage(part.mountpoint) - dev, total, used, free = df(part.mountpoint) - self.assertEqual(part.device, dev) - self.assertEqual(usage.total, total) - # 10 MB tollerance - if abs(usage.free - free) > 10 * 1024 * 1024: - self.fail("psutil=%s, df=%s" % usage.free, free) - if abs(usage.used - used) > 10 * 1024 * 1024: - self.fail("psutil=%s, df=%s" % usage.used, used) - - # --- cpu - - def test_cpu_count_logical(self): - num = sysctl("sysctl hw.logicalcpu") - self.assertEqual(num, psutil.cpu_count(logical=True)) - - def test_cpu_count_physical(self): - num = sysctl("sysctl hw.physicalcpu") - self.assertEqual(num, psutil.cpu_count(logical=False)) - - def test_cpu_freq(self): - freq = psutil.cpu_freq() - self.assertEqual( - freq.current * 1000 * 1000, sysctl("sysctl hw.cpufrequency")) - self.assertEqual( - freq.min * 1000 * 1000, sysctl("sysctl hw.cpufrequency_min")) - self.assertEqual( - freq.max * 1000 * 1000, sysctl("sysctl hw.cpufrequency_max")) - - # --- virtual mem - - def test_vmem_total(self): - sysctl_hwphymem = sysctl('sysctl hw.memsize') - self.assertEqual(sysctl_hwphymem, psutil.virtual_memory().total) - - @retry_before_failing() - def test_vmem_free(self): - vmstat_val = vm_stat("free") - psutil_val = psutil.virtual_memory().free - self.assertAlmostEqual(psutil_val, vmstat_val, delta=MEMORY_TOLERANCE) - - @retry_before_failing() - def test_vmem_available(self): - vmstat_val = vm_stat("inactive") + vm_stat("free") - psutil_val = psutil.virtual_memory().available - self.assertAlmostEqual(psutil_val, vmstat_val, delta=MEMORY_TOLERANCE) - - @retry_before_failing() - def test_vmem_active(self): - vmstat_val = vm_stat("active") - psutil_val = psutil.virtual_memory().active - self.assertAlmostEqual(psutil_val, vmstat_val, delta=MEMORY_TOLERANCE) - - @retry_before_failing() - def test_vmem_inactive(self): - vmstat_val = vm_stat("inactive") - psutil_val = psutil.virtual_memory().inactive - self.assertAlmostEqual(psutil_val, vmstat_val, delta=MEMORY_TOLERANCE) - - @retry_before_failing() - def test_vmem_wired(self): - vmstat_val = vm_stat("wired") - psutil_val = psutil.virtual_memory().wired - self.assertAlmostEqual(psutil_val, vmstat_val, delta=MEMORY_TOLERANCE) - - # --- swap mem - - @retry_before_failing() - def test_swapmem_sin(self): - vmstat_val = vm_stat("Pageins") - psutil_val = psutil.swap_memory().sin - self.assertEqual(psutil_val, vmstat_val) - - @retry_before_failing() - def test_swapmem_sout(self): - vmstat_val = vm_stat("Pageout") - psutil_val = psutil.swap_memory().sout - self.assertEqual(psutil_val, vmstat_val) - - # Not very reliable. - # def test_swapmem_total(self): - # out = sh('sysctl vm.swapusage') - # out = out.replace('vm.swapusage: ', '') - # total, used, free = re.findall('\d+.\d+\w', out) - # psutil_smem = psutil.swap_memory() - # self.assertEqual(psutil_smem.total, human2bytes(total)) - # self.assertEqual(psutil_smem.used, human2bytes(used)) - # self.assertEqual(psutil_smem.free, human2bytes(free)) - - # --- network - - def test_net_if_stats(self): - for name, stats in psutil.net_if_stats().items(): - try: - out = sh("ifconfig %s" % name) - except RuntimeError: - pass - else: - self.assertEqual(stats.isup, 'RUNNING' in out, msg=out) - self.assertEqual(stats.mtu, - int(re.findall(r'mtu (\d+)', out)[0])) - - # --- sensors_battery - - @unittest.skipIf(not HAS_BATTERY, "no battery") - def test_sensors_battery(self): - out = sh("pmset -g batt") - percent = re.search("(\d+)%", out).group(1) - drawing_from = re.search("Now drawing from '([^']+)'", out).group(1) - power_plugged = drawing_from == "AC Power" - psutil_result = psutil.sensors_battery() - self.assertEqual(psutil_result.power_plugged, power_plugged) - self.assertEqual(psutil_result.percent, int(percent)) - - -if __name__ == '__main__': - run_test_module_by_name(__file__) diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/test_posix.py b/server/www/packages/packages-darwin/x64/psutil/tests/test_posix.py deleted file mode 100644 index b80128c..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/test_posix.py +++ /dev/null @@ -1,418 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""POSIX specific tests.""" - -import datetime -import errno -import os -import re -import subprocess -import sys -import time - -import psutil -from psutil import AIX -from psutil import BSD -from psutil import LINUX -from psutil import MACOS -from psutil import OPENBSD -from psutil import POSIX -from psutil import SUNOS -from psutil._compat import PY3 -from psutil.tests import APPVEYOR -from psutil.tests import get_kernel_version -from psutil.tests import get_test_subprocess -from psutil.tests import mock -from psutil.tests import PYTHON_EXE -from psutil.tests import reap_children -from psutil.tests import retry_before_failing -from psutil.tests import run_test_module_by_name -from psutil.tests import sh -from psutil.tests import skip_on_access_denied -from psutil.tests import TRAVIS -from psutil.tests import unittest -from psutil.tests import wait_for_pid -from psutil.tests import which - - -def ps(cmd): - """Expects a ps command with a -o argument and parse the result - returning only the value of interest. - """ - if not LINUX: - cmd = cmd.replace(" --no-headers ", " ") - if SUNOS: - cmd = cmd.replace("-o start", "-o stime") - if AIX: - cmd = cmd.replace("-o rss", "-o rssize") - output = sh(cmd) - if not LINUX: - output = output.split('\n')[1].strip() - try: - return int(output) - except ValueError: - return output - -# ps "-o" field names differ wildly between platforms. -# "comm" means "only executable name" but is not available on BSD platforms. -# "args" means "command with all its arguments", and is also not available -# on BSD platforms. -# "command" is like "args" on most platforms, but like "comm" on AIX, -# and not available on SUNOS. -# so for the executable name we can use "comm" on Solaris and split "command" -# on other platforms. -# to get the cmdline (with args) we have to use "args" on AIX and -# Solaris, and can use "command" on all others. - - -def ps_name(pid): - field = "command" - if SUNOS: - field = "comm" - return ps("ps --no-headers -o %s -p %s" % (field, pid)).split(' ')[0] - - -def ps_args(pid): - field = "command" - if AIX or SUNOS: - field = "args" - return ps("ps --no-headers -o %s -p %s" % (field, pid)) - - -@unittest.skipIf(not POSIX, "POSIX only") -class TestProcess(unittest.TestCase): - """Compare psutil results against 'ps' command line utility (mainly).""" - - @classmethod - def setUpClass(cls): - cls.pid = get_test_subprocess([PYTHON_EXE, "-E", "-O"], - stdin=subprocess.PIPE).pid - wait_for_pid(cls.pid) - - @classmethod - def tearDownClass(cls): - reap_children() - - def test_ppid(self): - ppid_ps = ps("ps --no-headers -o ppid -p %s" % self.pid) - ppid_psutil = psutil.Process(self.pid).ppid() - self.assertEqual(ppid_ps, ppid_psutil) - - def test_uid(self): - uid_ps = ps("ps --no-headers -o uid -p %s" % self.pid) - uid_psutil = psutil.Process(self.pid).uids().real - self.assertEqual(uid_ps, uid_psutil) - - def test_gid(self): - gid_ps = ps("ps --no-headers -o rgid -p %s" % self.pid) - gid_psutil = psutil.Process(self.pid).gids().real - self.assertEqual(gid_ps, gid_psutil) - - def test_username(self): - username_ps = ps("ps --no-headers -o user -p %s" % self.pid) - username_psutil = psutil.Process(self.pid).username() - self.assertEqual(username_ps, username_psutil) - - def test_username_no_resolution(self): - # Emulate a case where the system can't resolve the uid to - # a username in which case psutil is supposed to return - # the stringified uid. - p = psutil.Process() - with mock.patch("psutil.pwd.getpwuid", side_effect=KeyError) as fun: - self.assertEqual(p.username(), str(p.uids().real)) - assert fun.called - - @skip_on_access_denied() - @retry_before_failing() - def test_rss_memory(self): - # give python interpreter some time to properly initialize - # so that the results are the same - time.sleep(0.1) - rss_ps = ps("ps --no-headers -o rss -p %s" % self.pid) - rss_psutil = psutil.Process(self.pid).memory_info()[0] / 1024 - self.assertEqual(rss_ps, rss_psutil) - - @skip_on_access_denied() - @retry_before_failing() - def test_vsz_memory(self): - # give python interpreter some time to properly initialize - # so that the results are the same - time.sleep(0.1) - vsz_ps = ps("ps --no-headers -o vsz -p %s" % self.pid) - vsz_psutil = psutil.Process(self.pid).memory_info()[1] / 1024 - self.assertEqual(vsz_ps, vsz_psutil) - - def test_name(self): - name_ps = ps_name(self.pid) - # remove path if there is any, from the command - name_ps = os.path.basename(name_ps).lower() - name_psutil = psutil.Process(self.pid).name().lower() - # ...because of how we calculate PYTHON_EXE; on MACOS this may - # be "pythonX.Y". - name_ps = re.sub(r"\d.\d", "", name_ps) - name_psutil = re.sub(r"\d.\d", "", name_psutil) - self.assertEqual(name_ps, name_psutil) - - def test_name_long(self): - # On UNIX the kernel truncates the name to the first 15 - # characters. In such a case psutil tries to determine the - # full name from the cmdline. - name = "long-program-name" - cmdline = ["long-program-name-extended", "foo", "bar"] - with mock.patch("psutil._psplatform.Process.name", - return_value=name): - with mock.patch("psutil._psplatform.Process.cmdline", - return_value=cmdline): - p = psutil.Process() - self.assertEqual(p.name(), "long-program-name-extended") - - def test_name_long_cmdline_ad_exc(self): - # Same as above but emulates a case where cmdline() raises - # AccessDenied in which case psutil is supposed to return - # the truncated name instead of crashing. - name = "long-program-name" - with mock.patch("psutil._psplatform.Process.name", - return_value=name): - with mock.patch("psutil._psplatform.Process.cmdline", - side_effect=psutil.AccessDenied(0, "")): - p = psutil.Process() - self.assertEqual(p.name(), "long-program-name") - - def test_name_long_cmdline_nsp_exc(self): - # Same as above but emulates a case where cmdline() raises NSP - # which is supposed to propagate. - name = "long-program-name" - with mock.patch("psutil._psplatform.Process.name", - return_value=name): - with mock.patch("psutil._psplatform.Process.cmdline", - side_effect=psutil.NoSuchProcess(0, "")): - p = psutil.Process() - self.assertRaises(psutil.NoSuchProcess, p.name) - - @unittest.skipIf(MACOS or BSD, 'ps -o start not available') - def test_create_time(self): - time_ps = ps("ps --no-headers -o start -p %s" % self.pid).split(' ')[0] - time_psutil = psutil.Process(self.pid).create_time() - time_psutil_tstamp = datetime.datetime.fromtimestamp( - time_psutil).strftime("%H:%M:%S") - # sometimes ps shows the time rounded up instead of down, so we check - # for both possible values - round_time_psutil = round(time_psutil) - round_time_psutil_tstamp = datetime.datetime.fromtimestamp( - round_time_psutil).strftime("%H:%M:%S") - self.assertIn(time_ps, [time_psutil_tstamp, round_time_psutil_tstamp]) - - def test_exe(self): - ps_pathname = ps_name(self.pid) - psutil_pathname = psutil.Process(self.pid).exe() - try: - self.assertEqual(ps_pathname, psutil_pathname) - except AssertionError: - # certain platforms such as BSD are more accurate returning: - # "/usr/local/bin/python2.7" - # ...instead of: - # "/usr/local/bin/python" - # We do not want to consider this difference in accuracy - # an error. - adjusted_ps_pathname = ps_pathname[:len(ps_pathname)] - self.assertEqual(ps_pathname, adjusted_ps_pathname) - - def test_cmdline(self): - ps_cmdline = ps_args(self.pid) - psutil_cmdline = " ".join(psutil.Process(self.pid).cmdline()) - self.assertEqual(ps_cmdline, psutil_cmdline) - - # On SUNOS "ps" reads niceness /proc/pid/psinfo which returns an - # incorrect value (20); the real deal is getpriority(2) which - # returns 0; psutil relies on it, see: - # https://github.com/giampaolo/psutil/issues/1082 - # AIX has the same issue - @unittest.skipIf(SUNOS, "not reliable on SUNOS") - @unittest.skipIf(AIX, "not reliable on AIX") - def test_nice(self): - ps_nice = ps("ps --no-headers -o nice -p %s" % self.pid) - psutil_nice = psutil.Process().nice() - self.assertEqual(ps_nice, psutil_nice) - - def test_num_fds(self): - # Note: this fails from time to time; I'm keen on thinking - # it doesn't mean something is broken - def call(p, attr): - args = () - attr = getattr(p, name, None) - if attr is not None and callable(attr): - if name == 'rlimit': - args = (psutil.RLIMIT_NOFILE,) - attr(*args) - else: - attr - - p = psutil.Process(os.getpid()) - failures = [] - ignored_names = ['terminate', 'kill', 'suspend', 'resume', 'nice', - 'send_signal', 'wait', 'children', 'as_dict', - 'memory_info_ex'] - if LINUX and get_kernel_version() < (2, 6, 36): - ignored_names.append('rlimit') - if LINUX and get_kernel_version() < (2, 6, 23): - ignored_names.append('num_ctx_switches') - for name in dir(psutil.Process): - if (name.startswith('_') or name in ignored_names): - continue - else: - try: - num1 = p.num_fds() - for x in range(2): - call(p, name) - num2 = p.num_fds() - except psutil.AccessDenied: - pass - else: - if abs(num2 - num1) > 1: - fail = "failure while processing Process.%s method " \ - "(before=%s, after=%s)" % (name, num1, num2) - failures.append(fail) - if failures: - self.fail('\n' + '\n'.join(failures)) - - -@unittest.skipIf(not POSIX, "POSIX only") -class TestSystemAPIs(unittest.TestCase): - """Test some system APIs.""" - - @retry_before_failing() - def test_pids(self): - # Note: this test might fail if the OS is starting/killing - # other processes in the meantime - if SUNOS or AIX: - cmd = ["ps", "-A", "-o", "pid"] - else: - cmd = ["ps", "ax", "-o", "pid"] - p = get_test_subprocess(cmd, stdout=subprocess.PIPE) - output = p.communicate()[0].strip() - assert p.poll() == 0 - if PY3: - output = str(output, sys.stdout.encoding) - pids_ps = [] - for line in output.split('\n')[1:]: - if line: - pid = int(line.split()[0].strip()) - pids_ps.append(pid) - # remove ps subprocess pid which is supposed to be dead in meantime - pids_ps.remove(p.pid) - pids_psutil = psutil.pids() - pids_ps.sort() - pids_psutil.sort() - - # on MACOS and OPENBSD ps doesn't show pid 0 - if MACOS or OPENBSD and 0 not in pids_ps: - pids_ps.insert(0, 0) - self.assertEqual(pids_ps, pids_psutil) - - # for some reason ifconfig -a does not report all interfaces - # returned by psutil - @unittest.skipIf(SUNOS, "unreliable on SUNOS") - @unittest.skipIf(TRAVIS, "unreliable on TRAVIS") - @unittest.skipIf(not which('ifconfig'), "no ifconfig cmd") - def test_nic_names(self): - output = sh("ifconfig -a") - for nic in psutil.net_io_counters(pernic=True).keys(): - for line in output.split(): - if line.startswith(nic): - break - else: - self.fail( - "couldn't find %s nic in 'ifconfig -a' output\n%s" % ( - nic, output)) - - # can't find users on APPVEYOR or TRAVIS - @unittest.skipIf(APPVEYOR or TRAVIS and not psutil.users(), - "unreliable on APPVEYOR or TRAVIS") - @retry_before_failing() - def test_users(self): - out = sh("who") - lines = out.split('\n') - users = [x.split()[0] for x in lines] - terminals = [x.split()[1] for x in lines] - self.assertEqual(len(users), len(psutil.users())) - for u in psutil.users(): - self.assertIn(u.name, users) - self.assertIn(u.terminal, terminals) - - def test_pid_exists_let_raise(self): - # According to "man 2 kill" possible error values for kill - # are (EINVAL, EPERM, ESRCH). Test that any other errno - # results in an exception. - with mock.patch("psutil._psposix.os.kill", - side_effect=OSError(errno.EBADF, "")) as m: - self.assertRaises(OSError, psutil._psposix.pid_exists, os.getpid()) - assert m.called - - def test_os_waitpid_let_raise(self): - # os.waitpid() is supposed to catch EINTR and ECHILD only. - # Test that any other errno results in an exception. - with mock.patch("psutil._psposix.os.waitpid", - side_effect=OSError(errno.EBADF, "")) as m: - self.assertRaises(OSError, psutil._psposix.wait_pid, os.getpid()) - assert m.called - - def test_os_waitpid_eintr(self): - # os.waitpid() is supposed to "retry" on EINTR. - with mock.patch("psutil._psposix.os.waitpid", - side_effect=OSError(errno.EINTR, "")) as m: - self.assertRaises( - psutil._psposix.TimeoutExpired, - psutil._psposix.wait_pid, os.getpid(), timeout=0.01) - assert m.called - - def test_os_waitpid_bad_ret_status(self): - # Simulate os.waitpid() returning a bad status. - with mock.patch("psutil._psposix.os.waitpid", - return_value=(1, -1)) as m: - self.assertRaises(ValueError, - psutil._psposix.wait_pid, os.getpid()) - assert m.called - - # AIX can return '-' in df output instead of numbers, e.g. for /proc - @unittest.skipIf(AIX, "unreliable on AIX") - def test_disk_usage(self): - def df(device): - out = sh("df -k %s" % device).strip() - line = out.split('\n')[1] - fields = line.split() - total = int(fields[1]) * 1024 - used = int(fields[2]) * 1024 - free = int(fields[3]) * 1024 - percent = float(fields[4].replace('%', '')) - return (total, used, free, percent) - - tolerance = 4 * 1024 * 1024 # 4MB - for part in psutil.disk_partitions(all=False): - usage = psutil.disk_usage(part.mountpoint) - try: - total, used, free, percent = df(part.device) - except RuntimeError as err: - # see: - # https://travis-ci.org/giampaolo/psutil/jobs/138338464 - # https://travis-ci.org/giampaolo/psutil/jobs/138343361 - err = str(err).lower() - if "no such file or directory" in err or \ - "raw devices not supported" in err or \ - "permission denied" in err: - continue - else: - raise - else: - self.assertAlmostEqual(usage.total, total, delta=tolerance) - self.assertAlmostEqual(usage.used, used, delta=tolerance) - self.assertAlmostEqual(usage.free, free, delta=tolerance) - self.assertAlmostEqual(usage.percent, percent, delta=1) - - -if __name__ == '__main__': - run_test_module_by_name(__file__) diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/test_process.py b/server/www/packages/packages-darwin/x64/psutil/tests/test_process.py deleted file mode 100644 index 2308196..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/test_process.py +++ /dev/null @@ -1,1564 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Tests for psutil.Process class.""" - -import collections -import errno -import getpass -import itertools -import os -import signal -import socket -import subprocess -import sys -import tempfile -import textwrap -import time -import types - -import psutil - -from psutil import AIX -from psutil import BSD -from psutil import LINUX -from psutil import MACOS -from psutil import NETBSD -from psutil import OPENBSD -from psutil import POSIX -from psutil import SUNOS -from psutil import WINDOWS -from psutil._compat import long -from psutil._compat import PY3 -from psutil.tests import APPVEYOR -from psutil.tests import call_until -from psutil.tests import copyload_shared_lib -from psutil.tests import create_exe -from psutil.tests import create_proc_children_pair -from psutil.tests import create_zombie_proc -from psutil.tests import enum -from psutil.tests import get_test_subprocess -from psutil.tests import get_winver -from psutil.tests import HAS_CPU_AFFINITY -from psutil.tests import HAS_ENVIRON -from psutil.tests import HAS_IONICE -from psutil.tests import HAS_MEMORY_MAPS -from psutil.tests import HAS_PROC_CPU_NUM -from psutil.tests import HAS_PROC_IO_COUNTERS -from psutil.tests import HAS_RLIMIT -from psutil.tests import HAS_THREADS -from psutil.tests import mock -from psutil.tests import PYPY -from psutil.tests import PYTHON_EXE -from psutil.tests import reap_children -from psutil.tests import retry_before_failing -from psutil.tests import run_test_module_by_name -from psutil.tests import safe_rmpath -from psutil.tests import sh -from psutil.tests import skip_on_access_denied -from psutil.tests import skip_on_not_implemented -from psutil.tests import TESTFILE_PREFIX -from psutil.tests import TESTFN -from psutil.tests import ThreadTask -from psutil.tests import TRAVIS -from psutil.tests import unittest -from psutil.tests import wait_for_pid -from psutil.tests import WIN_VISTA - - -# =================================================================== -# --- psutil.Process class tests -# =================================================================== - -class TestProcess(unittest.TestCase): - """Tests for psutil.Process class.""" - - def setUp(self): - safe_rmpath(TESTFN) - - def tearDown(self): - reap_children() - - def test_pid(self): - p = psutil.Process() - self.assertEqual(p.pid, os.getpid()) - sproc = get_test_subprocess() - self.assertEqual(psutil.Process(sproc.pid).pid, sproc.pid) - with self.assertRaises(AttributeError): - p.pid = 33 - - def test_kill(self): - sproc = get_test_subprocess() - test_pid = sproc.pid - p = psutil.Process(test_pid) - p.kill() - sig = p.wait() - self.assertFalse(psutil.pid_exists(test_pid)) - if POSIX: - self.assertEqual(sig, -signal.SIGKILL) - - def test_terminate(self): - sproc = get_test_subprocess() - test_pid = sproc.pid - p = psutil.Process(test_pid) - p.terminate() - sig = p.wait() - self.assertFalse(psutil.pid_exists(test_pid)) - if POSIX: - self.assertEqual(sig, -signal.SIGTERM) - - def test_send_signal(self): - sig = signal.SIGKILL if POSIX else signal.SIGTERM - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - p.send_signal(sig) - exit_sig = p.wait() - self.assertFalse(psutil.pid_exists(p.pid)) - if POSIX: - self.assertEqual(exit_sig, -sig) - # - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - p.send_signal(sig) - with mock.patch('psutil.os.kill', - side_effect=OSError(errno.ESRCH, "")): - with self.assertRaises(psutil.NoSuchProcess): - p.send_signal(sig) - # - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - p.send_signal(sig) - with mock.patch('psutil.os.kill', - side_effect=OSError(errno.EPERM, "")): - with self.assertRaises(psutil.AccessDenied): - psutil.Process().send_signal(sig) - # Sending a signal to process with PID 0 is not allowed as - # it would affect every process in the process group of - # the calling process (os.getpid()) instead of PID 0"). - if 0 in psutil.pids(): - p = psutil.Process(0) - self.assertRaises(ValueError, p.send_signal, signal.SIGTERM) - - def test_wait(self): - # check exit code signal - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - p.kill() - code = p.wait() - if POSIX: - self.assertEqual(code, -signal.SIGKILL) - else: - self.assertEqual(code, signal.SIGTERM) - self.assertFalse(p.is_running()) - - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - p.terminate() - code = p.wait() - if POSIX: - self.assertEqual(code, -signal.SIGTERM) - else: - self.assertEqual(code, signal.SIGTERM) - self.assertFalse(p.is_running()) - - # check sys.exit() code - code = "import time, sys; time.sleep(0.01); sys.exit(5);" - sproc = get_test_subprocess([PYTHON_EXE, "-c", code]) - p = psutil.Process(sproc.pid) - self.assertEqual(p.wait(), 5) - self.assertFalse(p.is_running()) - - # Test wait() issued twice. - # It is not supposed to raise NSP when the process is gone. - # On UNIX this should return None, on Windows it should keep - # returning the exit code. - sproc = get_test_subprocess([PYTHON_EXE, "-c", code]) - p = psutil.Process(sproc.pid) - self.assertEqual(p.wait(), 5) - self.assertIn(p.wait(), (5, None)) - - # test timeout - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - p.name() - self.assertRaises(psutil.TimeoutExpired, p.wait, 0.01) - - # timeout < 0 not allowed - self.assertRaises(ValueError, p.wait, -1) - - def test_wait_non_children(self): - # Test wait() against a process which is not our direct - # child. - p1, p2 = create_proc_children_pair() - self.assertRaises(psutil.TimeoutExpired, p1.wait, 0.01) - self.assertRaises(psutil.TimeoutExpired, p2.wait, 0.01) - # We also terminate the direct child otherwise the - # grandchild will hang until the parent is gone. - p1.terminate() - p2.terminate() - ret1 = p1.wait() - ret2 = p2.wait() - if POSIX: - self.assertEqual(ret1, -signal.SIGTERM) - # For processes which are not our children we're supposed - # to get None. - self.assertEqual(ret2, None) - else: - self.assertEqual(ret1, signal.SIGTERM) - self.assertEqual(ret1, signal.SIGTERM) - - def test_wait_timeout_0(self): - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - self.assertRaises(psutil.TimeoutExpired, p.wait, 0) - p.kill() - stop_at = time.time() + 2 - while True: - try: - code = p.wait(0) - except psutil.TimeoutExpired: - if time.time() >= stop_at: - raise - else: - break - if POSIX: - self.assertEqual(code, -signal.SIGKILL) - else: - self.assertEqual(code, signal.SIGTERM) - self.assertFalse(p.is_running()) - - def test_cpu_percent(self): - p = psutil.Process() - p.cpu_percent(interval=0.001) - p.cpu_percent(interval=0.001) - for x in range(100): - percent = p.cpu_percent(interval=None) - self.assertIsInstance(percent, float) - self.assertGreaterEqual(percent, 0.0) - with self.assertRaises(ValueError): - p.cpu_percent(interval=-1) - - def test_cpu_percent_numcpus_none(self): - # See: https://github.com/giampaolo/psutil/issues/1087 - with mock.patch('psutil.cpu_count', return_value=None) as m: - psutil.Process().cpu_percent() - assert m.called - - def test_cpu_times(self): - times = psutil.Process().cpu_times() - assert (times.user > 0.0) or (times.system > 0.0), times - assert (times.children_user >= 0.0), times - assert (times.children_system >= 0.0), times - # make sure returned values can be pretty printed with strftime - for name in times._fields: - time.strftime("%H:%M:%S", time.localtime(getattr(times, name))) - - def test_cpu_times_2(self): - user_time, kernel_time = psutil.Process().cpu_times()[:2] - utime, ktime = os.times()[:2] - - # Use os.times()[:2] as base values to compare our results - # using a tolerance of +/- 0.1 seconds. - # It will fail if the difference between the values is > 0.1s. - if (max([user_time, utime]) - min([user_time, utime])) > 0.1: - self.fail("expected: %s, found: %s" % (utime, user_time)) - - if (max([kernel_time, ktime]) - min([kernel_time, ktime])) > 0.1: - self.fail("expected: %s, found: %s" % (ktime, kernel_time)) - - @unittest.skipIf(not HAS_PROC_CPU_NUM, "not supported") - def test_cpu_num(self): - p = psutil.Process() - num = p.cpu_num() - self.assertGreaterEqual(num, 0) - if psutil.cpu_count() == 1: - self.assertEqual(num, 0) - self.assertIn(p.cpu_num(), range(psutil.cpu_count())) - - def test_create_time(self): - sproc = get_test_subprocess() - now = time.time() - p = psutil.Process(sproc.pid) - create_time = p.create_time() - - # Use time.time() as base value to compare our result using a - # tolerance of +/- 1 second. - # It will fail if the difference between the values is > 2s. - difference = abs(create_time - now) - if difference > 2: - self.fail("expected: %s, found: %s, difference: %s" - % (now, create_time, difference)) - - # make sure returned value can be pretty printed with strftime - time.strftime("%Y %m %d %H:%M:%S", time.localtime(p.create_time())) - - @unittest.skipIf(not POSIX, 'POSIX only') - @unittest.skipIf(TRAVIS, 'not reliable on TRAVIS') - def test_terminal(self): - terminal = psutil.Process().terminal() - if sys.stdin.isatty() or sys.stdout.isatty(): - tty = os.path.realpath(sh('tty')) - self.assertEqual(terminal, tty) - else: - self.assertIsNone(terminal) - - @unittest.skipIf(not HAS_PROC_IO_COUNTERS, 'not supported') - @skip_on_not_implemented(only_if=LINUX) - def test_io_counters(self): - p = psutil.Process() - - # test reads - io1 = p.io_counters() - with open(PYTHON_EXE, 'rb') as f: - f.read() - io2 = p.io_counters() - if not BSD and not AIX: - self.assertGreater(io2.read_count, io1.read_count) - self.assertEqual(io2.write_count, io1.write_count) - if LINUX: - self.assertGreater(io2.read_chars, io1.read_chars) - self.assertEqual(io2.write_chars, io1.write_chars) - else: - self.assertGreaterEqual(io2.read_bytes, io1.read_bytes) - self.assertGreaterEqual(io2.write_bytes, io1.write_bytes) - - # test writes - io1 = p.io_counters() - with tempfile.TemporaryFile(prefix=TESTFILE_PREFIX) as f: - if PY3: - f.write(bytes("x" * 1000000, 'ascii')) - else: - f.write("x" * 1000000) - io2 = p.io_counters() - self.assertGreaterEqual(io2.write_count, io1.write_count) - self.assertGreaterEqual(io2.write_bytes, io1.write_bytes) - self.assertGreaterEqual(io2.read_count, io1.read_count) - self.assertGreaterEqual(io2.read_bytes, io1.read_bytes) - if LINUX: - self.assertGreater(io2.write_chars, io1.write_chars) - self.assertGreaterEqual(io2.read_chars, io1.read_chars) - - # sanity check - for i in range(len(io2)): - if BSD and i >= 2: - # On BSD read_bytes and write_bytes are always set to -1. - continue - self.assertGreaterEqual(io2[i], 0) - self.assertGreaterEqual(io2[i], 0) - - @unittest.skipIf(not HAS_IONICE, "not supported") - @unittest.skipIf(WINDOWS and get_winver() < WIN_VISTA, 'not supported') - def test_ionice(self): - if LINUX: - from psutil import (IOPRIO_CLASS_NONE, IOPRIO_CLASS_RT, - IOPRIO_CLASS_BE, IOPRIO_CLASS_IDLE) - self.assertEqual(IOPRIO_CLASS_NONE, 0) - self.assertEqual(IOPRIO_CLASS_RT, 1) - self.assertEqual(IOPRIO_CLASS_BE, 2) - self.assertEqual(IOPRIO_CLASS_IDLE, 3) - p = psutil.Process() - try: - p.ionice(2) - ioclass, value = p.ionice() - if enum is not None: - self.assertIsInstance(ioclass, enum.IntEnum) - self.assertEqual(ioclass, 2) - self.assertEqual(value, 4) - # - p.ionice(3) - ioclass, value = p.ionice() - self.assertEqual(ioclass, 3) - self.assertEqual(value, 0) - # - p.ionice(2, 0) - ioclass, value = p.ionice() - self.assertEqual(ioclass, 2) - self.assertEqual(value, 0) - p.ionice(2, 7) - ioclass, value = p.ionice() - self.assertEqual(ioclass, 2) - self.assertEqual(value, 7) - finally: - p.ionice(IOPRIO_CLASS_NONE) - else: - p = psutil.Process() - original = p.ionice() - self.assertIsInstance(original, int) - try: - value = 0 # very low - if original == value: - value = 1 # low - p.ionice(value) - self.assertEqual(p.ionice(), value) - finally: - p.ionice(original) - - @unittest.skipIf(not HAS_IONICE, "not supported") - @unittest.skipIf(WINDOWS and get_winver() < WIN_VISTA, 'not supported') - def test_ionice_errs(self): - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - if LINUX: - self.assertRaises(ValueError, p.ionice, 2, 10) - self.assertRaises(ValueError, p.ionice, 2, -1) - self.assertRaises(ValueError, p.ionice, 4) - self.assertRaises(TypeError, p.ionice, 2, "foo") - self.assertRaisesRegex( - ValueError, "can't specify value with IOPRIO_CLASS_NONE", - p.ionice, psutil.IOPRIO_CLASS_NONE, 1) - self.assertRaisesRegex( - ValueError, "can't specify value with IOPRIO_CLASS_IDLE", - p.ionice, psutil.IOPRIO_CLASS_IDLE, 1) - self.assertRaisesRegex( - ValueError, "'ioclass' argument must be specified", - p.ionice, value=1) - else: - self.assertRaises(ValueError, p.ionice, 3) - self.assertRaises(TypeError, p.ionice, 2, 1) - - @unittest.skipIf(not HAS_RLIMIT, "not supported") - def test_rlimit_get(self): - import resource - p = psutil.Process(os.getpid()) - names = [x for x in dir(psutil) if x.startswith('RLIMIT')] - assert names, names - for name in names: - value = getattr(psutil, name) - self.assertGreaterEqual(value, 0) - if name in dir(resource): - self.assertEqual(value, getattr(resource, name)) - # XXX - On PyPy RLIMIT_INFINITY returned by - # resource.getrlimit() is reported as a very big long - # number instead of -1. It looks like a bug with PyPy. - if PYPY: - continue - self.assertEqual(p.rlimit(value), resource.getrlimit(value)) - else: - ret = p.rlimit(value) - self.assertEqual(len(ret), 2) - self.assertGreaterEqual(ret[0], -1) - self.assertGreaterEqual(ret[1], -1) - - @unittest.skipIf(not HAS_RLIMIT, "not supported") - def test_rlimit_set(self): - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - p.rlimit(psutil.RLIMIT_NOFILE, (5, 5)) - self.assertEqual(p.rlimit(psutil.RLIMIT_NOFILE), (5, 5)) - # If pid is 0 prlimit() applies to the calling process and - # we don't want that. - with self.assertRaises(ValueError): - psutil._psplatform.Process(0).rlimit(0) - with self.assertRaises(ValueError): - p.rlimit(psutil.RLIMIT_NOFILE, (5, 5, 5)) - - @unittest.skipIf(not HAS_RLIMIT, "not supported") - def test_rlimit(self): - p = psutil.Process() - soft, hard = p.rlimit(psutil.RLIMIT_FSIZE) - try: - p.rlimit(psutil.RLIMIT_FSIZE, (1024, hard)) - with open(TESTFN, "wb") as f: - f.write(b"X" * 1024) - # write() or flush() doesn't always cause the exception - # but close() will. - with self.assertRaises(IOError) as exc: - with open(TESTFN, "wb") as f: - f.write(b"X" * 1025) - self.assertEqual(exc.exception.errno if PY3 else exc.exception[0], - errno.EFBIG) - finally: - p.rlimit(psutil.RLIMIT_FSIZE, (soft, hard)) - self.assertEqual(p.rlimit(psutil.RLIMIT_FSIZE), (soft, hard)) - - @unittest.skipIf(not HAS_RLIMIT, "not supported") - def test_rlimit_infinity(self): - # First set a limit, then re-set it by specifying INFINITY - # and assume we overridden the previous limit. - p = psutil.Process() - soft, hard = p.rlimit(psutil.RLIMIT_FSIZE) - try: - p.rlimit(psutil.RLIMIT_FSIZE, (1024, hard)) - p.rlimit(psutil.RLIMIT_FSIZE, (psutil.RLIM_INFINITY, hard)) - with open(TESTFN, "wb") as f: - f.write(b"X" * 2048) - finally: - p.rlimit(psutil.RLIMIT_FSIZE, (soft, hard)) - self.assertEqual(p.rlimit(psutil.RLIMIT_FSIZE), (soft, hard)) - - @unittest.skipIf(not HAS_RLIMIT, "not supported") - def test_rlimit_infinity_value(self): - # RLIMIT_FSIZE should be RLIM_INFINITY, which will be a really - # big number on a platform with large file support. On these - # platforms we need to test that the get/setrlimit functions - # properly convert the number to a C long long and that the - # conversion doesn't raise an error. - p = psutil.Process() - soft, hard = p.rlimit(psutil.RLIMIT_FSIZE) - self.assertEqual(psutil.RLIM_INFINITY, hard) - p.rlimit(psutil.RLIMIT_FSIZE, (soft, hard)) - - def test_num_threads(self): - # on certain platforms such as Linux we might test for exact - # thread number, since we always have with 1 thread per process, - # but this does not apply across all platforms (MACOS, Windows) - p = psutil.Process() - if OPENBSD: - try: - step1 = p.num_threads() - except psutil.AccessDenied: - raise unittest.SkipTest("on OpenBSD this requires root access") - else: - step1 = p.num_threads() - - with ThreadTask(): - step2 = p.num_threads() - self.assertEqual(step2, step1 + 1) - - @unittest.skipIf(not WINDOWS, 'WINDOWS only') - def test_num_handles(self): - # a better test is done later into test/_windows.py - p = psutil.Process() - self.assertGreater(p.num_handles(), 0) - - @unittest.skipIf(not HAS_THREADS, 'not supported') - def test_threads(self): - p = psutil.Process() - if OPENBSD: - try: - step1 = p.threads() - except psutil.AccessDenied: - raise unittest.SkipTest("on OpenBSD this requires root access") - else: - step1 = p.threads() - - with ThreadTask(): - step2 = p.threads() - self.assertEqual(len(step2), len(step1) + 1) - # on Linux, first thread id is supposed to be this process - if LINUX: - self.assertEqual(step2[0].id, os.getpid()) - athread = step2[0] - # test named tuple - self.assertEqual(athread.id, athread[0]) - self.assertEqual(athread.user_time, athread[1]) - self.assertEqual(athread.system_time, athread[2]) - - @retry_before_failing() - @skip_on_access_denied(only_if=MACOS) - @unittest.skipIf(not HAS_THREADS, 'not supported') - def test_threads_2(self): - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - if OPENBSD: - try: - p.threads() - except psutil.AccessDenied: - raise unittest.SkipTest( - "on OpenBSD this requires root access") - self.assertAlmostEqual( - p.cpu_times().user, - sum([x.user_time for x in p.threads()]), delta=0.1) - self.assertAlmostEqual( - p.cpu_times().system, - sum([x.system_time for x in p.threads()]), delta=0.1) - - def test_memory_info(self): - p = psutil.Process() - - # step 1 - get a base value to compare our results - rss1, vms1 = p.memory_info()[:2] - percent1 = p.memory_percent() - self.assertGreater(rss1, 0) - self.assertGreater(vms1, 0) - - # step 2 - allocate some memory - memarr = [None] * 1500000 - - rss2, vms2 = p.memory_info()[:2] - percent2 = p.memory_percent() - - # step 3 - make sure that the memory usage bumped up - self.assertGreater(rss2, rss1) - self.assertGreaterEqual(vms2, vms1) # vms might be equal - self.assertGreater(percent2, percent1) - del memarr - - if WINDOWS: - mem = p.memory_info() - self.assertEqual(mem.rss, mem.wset) - self.assertEqual(mem.vms, mem.pagefile) - - mem = p.memory_info() - for name in mem._fields: - self.assertGreaterEqual(getattr(mem, name), 0) - - def test_memory_full_info(self): - total = psutil.virtual_memory().total - mem = psutil.Process().memory_full_info() - for name in mem._fields: - value = getattr(mem, name) - self.assertGreaterEqual(value, 0, msg=(name, value)) - self.assertLessEqual(value, total, msg=(name, value, total)) - if LINUX or WINDOWS or MACOS: - self.assertGreaterEqual(mem.uss, 0) - if LINUX: - self.assertGreaterEqual(mem.pss, 0) - self.assertGreaterEqual(mem.swap, 0) - - @unittest.skipIf(not HAS_MEMORY_MAPS, "not supported") - def test_memory_maps(self): - p = psutil.Process() - maps = p.memory_maps() - paths = [x for x in maps] - self.assertEqual(len(paths), len(set(paths))) - ext_maps = p.memory_maps(grouped=False) - - for nt in maps: - if not nt.path.startswith('['): - assert os.path.isabs(nt.path), nt.path - if POSIX: - try: - assert os.path.exists(nt.path) or \ - os.path.islink(nt.path), nt.path - except AssertionError: - if not LINUX: - raise - else: - # https://github.com/giampaolo/psutil/issues/759 - with open('/proc/self/smaps') as f: - data = f.read() - if "%s (deleted)" % nt.path not in data: - raise - else: - # XXX - On Windows we have this strange behavior with - # 64 bit dlls: they are visible via explorer but cannot - # be accessed via os.stat() (wtf?). - if '64' not in os.path.basename(nt.path): - assert os.path.exists(nt.path), nt.path - for nt in ext_maps: - for fname in nt._fields: - value = getattr(nt, fname) - if fname == 'path': - continue - elif fname in ('addr', 'perms'): - assert value, value - else: - self.assertIsInstance(value, (int, long)) - assert value >= 0, value - - @unittest.skipIf(not HAS_MEMORY_MAPS, "not supported") - def test_memory_maps_lists_lib(self): - # Make sure a newly loaded shared lib is listed. - with copyload_shared_lib() as path: - def normpath(p): - return os.path.realpath(os.path.normcase(p)) - libpaths = [normpath(x.path) - for x in psutil.Process().memory_maps()] - self.assertIn(normpath(path), libpaths) - - def test_memory_percent(self): - p = psutil.Process() - ret = p.memory_percent() - assert 0 <= ret <= 100, ret - ret = p.memory_percent(memtype='vms') - assert 0 <= ret <= 100, ret - assert 0 <= ret <= 100, ret - self.assertRaises(ValueError, p.memory_percent, memtype="?!?") - if LINUX or MACOS or WINDOWS: - ret = p.memory_percent(memtype='uss') - assert 0 <= ret <= 100, ret - assert 0 <= ret <= 100, ret - - def test_is_running(self): - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - assert p.is_running() - assert p.is_running() - p.kill() - p.wait() - assert not p.is_running() - assert not p.is_running() - - def test_exe(self): - sproc = get_test_subprocess() - exe = psutil.Process(sproc.pid).exe() - try: - self.assertEqual(exe, PYTHON_EXE) - except AssertionError: - if WINDOWS and len(exe) == len(PYTHON_EXE): - # on Windows we don't care about case sensitivity - normcase = os.path.normcase - self.assertEqual(normcase(exe), normcase(PYTHON_EXE)) - else: - # certain platforms such as BSD are more accurate returning: - # "/usr/local/bin/python2.7" - # ...instead of: - # "/usr/local/bin/python" - # We do not want to consider this difference in accuracy - # an error. - ver = "%s.%s" % (sys.version_info[0], sys.version_info[1]) - try: - self.assertEqual(exe.replace(ver, ''), - PYTHON_EXE.replace(ver, '')) - except AssertionError: - # Tipically MACOS. Really not sure what to do here. - pass - - out = sh([exe, "-c", "import os; print('hey')"]) - self.assertEqual(out, 'hey') - - def test_cmdline(self): - cmdline = [PYTHON_EXE, "-c", "import time; time.sleep(60)"] - sproc = get_test_subprocess(cmdline) - try: - self.assertEqual(' '.join(psutil.Process(sproc.pid).cmdline()), - ' '.join(cmdline)) - except AssertionError: - # XXX - most of the times the underlying sysctl() call on Net - # and Open BSD returns a truncated string. - # Also /proc/pid/cmdline behaves the same so it looks - # like this is a kernel bug. - # XXX - AIX truncates long arguments in /proc/pid/cmdline - if NETBSD or OPENBSD or AIX: - self.assertEqual( - psutil.Process(sproc.pid).cmdline()[0], PYTHON_EXE) - else: - raise - - def test_name(self): - sproc = get_test_subprocess(PYTHON_EXE) - name = psutil.Process(sproc.pid).name().lower() - pyexe = os.path.basename(os.path.realpath(sys.executable)).lower() - assert pyexe.startswith(name), (pyexe, name) - - # XXX - @unittest.skipIf(SUNOS, "broken on SUNOS") - @unittest.skipIf(AIX, "broken on AIX") - def test_prog_w_funky_name(self): - # Test that name(), exe() and cmdline() correctly handle programs - # with funky chars such as spaces and ")", see: - # https://github.com/giampaolo/psutil/issues/628 - - def rm(): - # Try to limit occasional failures on Appveyor: - # https://ci.appveyor.com/project/giampaolo/psutil/build/1350/ - # job/lbo3bkju55le850n - try: - safe_rmpath(funky_path) - except OSError: - pass - - funky_path = TESTFN + 'foo bar )' - create_exe(funky_path) - self.addCleanup(rm) - cmdline = [funky_path, "-c", - "import time; [time.sleep(0.01) for x in range(3000)];" - "arg1", "arg2", "", "arg3", ""] - sproc = get_test_subprocess(cmdline) - p = psutil.Process(sproc.pid) - # ...in order to try to prevent occasional failures on travis - if TRAVIS: - wait_for_pid(p.pid) - self.assertEqual(p.cmdline(), cmdline) - self.assertEqual(p.name(), os.path.basename(funky_path)) - self.assertEqual(os.path.normcase(p.exe()), - os.path.normcase(funky_path)) - - @unittest.skipIf(not POSIX, 'POSIX only') - def test_uids(self): - p = psutil.Process() - real, effective, saved = p.uids() - # os.getuid() refers to "real" uid - self.assertEqual(real, os.getuid()) - # os.geteuid() refers to "effective" uid - self.assertEqual(effective, os.geteuid()) - # No such thing as os.getsuid() ("saved" uid), but starting - # from python 2.7 we have os.getresuid() which returns all - # of them. - if hasattr(os, "getresuid"): - self.assertEqual(os.getresuid(), p.uids()) - - @unittest.skipIf(not POSIX, 'POSIX only') - def test_gids(self): - p = psutil.Process() - real, effective, saved = p.gids() - # os.getuid() refers to "real" uid - self.assertEqual(real, os.getgid()) - # os.geteuid() refers to "effective" uid - self.assertEqual(effective, os.getegid()) - # No such thing as os.getsgid() ("saved" gid), but starting - # from python 2.7 we have os.getresgid() which returns all - # of them. - if hasattr(os, "getresuid"): - self.assertEqual(os.getresgid(), p.gids()) - - def test_nice(self): - p = psutil.Process() - self.assertRaises(TypeError, p.nice, "str") - if WINDOWS: - try: - init = p.nice() - if sys.version_info > (3, 4): - self.assertIsInstance(init, enum.IntEnum) - else: - self.assertIsInstance(init, int) - self.assertEqual(init, psutil.NORMAL_PRIORITY_CLASS) - p.nice(psutil.HIGH_PRIORITY_CLASS) - self.assertEqual(p.nice(), psutil.HIGH_PRIORITY_CLASS) - p.nice(psutil.NORMAL_PRIORITY_CLASS) - self.assertEqual(p.nice(), psutil.NORMAL_PRIORITY_CLASS) - finally: - p.nice(psutil.NORMAL_PRIORITY_CLASS) - else: - first_nice = p.nice() - try: - if hasattr(os, "getpriority"): - self.assertEqual( - os.getpriority(os.PRIO_PROCESS, os.getpid()), p.nice()) - p.nice(1) - self.assertEqual(p.nice(), 1) - if hasattr(os, "getpriority"): - self.assertEqual( - os.getpriority(os.PRIO_PROCESS, os.getpid()), p.nice()) - # XXX - going back to previous nice value raises - # AccessDenied on MACOS - if not MACOS: - p.nice(0) - self.assertEqual(p.nice(), 0) - except psutil.AccessDenied: - pass - finally: - try: - p.nice(first_nice) - except psutil.AccessDenied: - pass - - def test_status(self): - p = psutil.Process() - self.assertEqual(p.status(), psutil.STATUS_RUNNING) - - def test_username(self): - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - username = p.username() - if WINDOWS: - domain, username = username.split('\\') - self.assertEqual(username, getpass.getuser()) - if 'USERDOMAIN' in os.environ: - self.assertEqual(domain, os.environ['USERDOMAIN']) - else: - self.assertEqual(username, getpass.getuser()) - - def test_cwd(self): - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - self.assertEqual(p.cwd(), os.getcwd()) - - def test_cwd_2(self): - cmd = [PYTHON_EXE, "-c", - "import os, time; os.chdir('..'); time.sleep(60)"] - sproc = get_test_subprocess(cmd) - p = psutil.Process(sproc.pid) - call_until(p.cwd, "ret == os.path.dirname(os.getcwd())") - - @unittest.skipIf(not HAS_CPU_AFFINITY, 'not supported') - def test_cpu_affinity(self): - p = psutil.Process() - initial = p.cpu_affinity() - assert initial, initial - self.addCleanup(p.cpu_affinity, initial) - - if hasattr(os, "sched_getaffinity"): - self.assertEqual(initial, list(os.sched_getaffinity(p.pid))) - self.assertEqual(len(initial), len(set(initial))) - - all_cpus = list(range(len(psutil.cpu_percent(percpu=True)))) - # Work around travis failure: - # https://travis-ci.org/giampaolo/psutil/builds/284173194 - for n in all_cpus if not TRAVIS else initial: - p.cpu_affinity([n]) - self.assertEqual(p.cpu_affinity(), [n]) - if hasattr(os, "sched_getaffinity"): - self.assertEqual(p.cpu_affinity(), - list(os.sched_getaffinity(p.pid))) - # also test num_cpu() - if hasattr(p, "num_cpu"): - self.assertEqual(p.cpu_affinity()[0], p.num_cpu()) - - # [] is an alias for "all eligible CPUs"; on Linux this may - # not be equal to all available CPUs, see: - # https://github.com/giampaolo/psutil/issues/956 - p.cpu_affinity([]) - if LINUX: - self.assertEqual(p.cpu_affinity(), p._proc._get_eligible_cpus()) - else: - self.assertEqual(p.cpu_affinity(), all_cpus) - if hasattr(os, "sched_getaffinity"): - self.assertEqual(p.cpu_affinity(), - list(os.sched_getaffinity(p.pid))) - # - self.assertRaises(TypeError, p.cpu_affinity, 1) - p.cpu_affinity(initial) - # it should work with all iterables, not only lists - p.cpu_affinity(set(all_cpus)) - p.cpu_affinity(tuple(all_cpus)) - - @unittest.skipIf(not HAS_CPU_AFFINITY, 'not supported') - def test_cpu_affinity_errs(self): - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - invalid_cpu = [len(psutil.cpu_times(percpu=True)) + 10] - self.assertRaises(ValueError, p.cpu_affinity, invalid_cpu) - self.assertRaises(ValueError, p.cpu_affinity, range(10000, 11000)) - self.assertRaises(TypeError, p.cpu_affinity, [0, "1"]) - self.assertRaises(ValueError, p.cpu_affinity, [0, -1]) - - @unittest.skipIf(not HAS_CPU_AFFINITY, 'not supported') - def test_cpu_affinity_all_combinations(self): - p = psutil.Process() - initial = p.cpu_affinity() - assert initial, initial - self.addCleanup(p.cpu_affinity, initial) - - # All possible CPU set combinations. - combos = [] - for l in range(0, len(initial) + 1): - for subset in itertools.combinations(initial, l): - if subset: - combos.append(list(subset)) - - for combo in combos: - p.cpu_affinity(combo) - self.assertEqual(p.cpu_affinity(), combo) - - # TODO: #595 - @unittest.skipIf(BSD, "broken on BSD") - # can't find any process file on Appveyor - @unittest.skipIf(APPVEYOR, "unreliable on APPVEYOR") - def test_open_files(self): - # current process - p = psutil.Process() - files = p.open_files() - self.assertFalse(TESTFN in files) - with open(TESTFN, 'wb') as f: - f.write(b'x' * 1024) - f.flush() - # give the kernel some time to see the new file - files = call_until(p.open_files, "len(ret) != %i" % len(files)) - for file in files: - if file.path == TESTFN: - if LINUX: - self.assertEqual(file.position, 1024) - break - else: - self.fail("no file found; files=%s" % repr(files)) - for file in files: - assert os.path.isfile(file.path), file - - # another process - cmdline = "import time; f = open(r'%s', 'r'); time.sleep(60);" % TESTFN - sproc = get_test_subprocess([PYTHON_EXE, "-c", cmdline]) - p = psutil.Process(sproc.pid) - - for x in range(100): - filenames = [x.path for x in p.open_files()] - if TESTFN in filenames: - break - time.sleep(.01) - else: - self.assertIn(TESTFN, filenames) - for file in filenames: - assert os.path.isfile(file), file - - # TODO: #595 - @unittest.skipIf(BSD, "broken on BSD") - # can't find any process file on Appveyor - @unittest.skipIf(APPVEYOR, "unreliable on APPVEYOR") - def test_open_files_2(self): - # test fd and path fields - with open(TESTFN, 'w') as fileobj: - p = psutil.Process() - for file in p.open_files(): - if file.path == fileobj.name or file.fd == fileobj.fileno(): - break - else: - self.fail("no file found; files=%s" % repr(p.open_files())) - self.assertEqual(file.path, fileobj.name) - if WINDOWS: - self.assertEqual(file.fd, -1) - else: - self.assertEqual(file.fd, fileobj.fileno()) - # test positions - ntuple = p.open_files()[0] - self.assertEqual(ntuple[0], ntuple.path) - self.assertEqual(ntuple[1], ntuple.fd) - # test file is gone - self.assertNotIn(fileobj.name, p.open_files()) - - @unittest.skipIf(not POSIX, 'POSIX only') - def test_num_fds(self): - p = psutil.Process() - start = p.num_fds() - file = open(TESTFN, 'w') - self.addCleanup(file.close) - self.assertEqual(p.num_fds(), start + 1) - sock = socket.socket() - self.addCleanup(sock.close) - self.assertEqual(p.num_fds(), start + 2) - file.close() - sock.close() - self.assertEqual(p.num_fds(), start) - - @skip_on_not_implemented(only_if=LINUX) - @unittest.skipIf(OPENBSD or NETBSD, "not reliable on OPENBSD & NETBSD") - def test_num_ctx_switches(self): - p = psutil.Process() - before = sum(p.num_ctx_switches()) - for x in range(500000): - after = sum(p.num_ctx_switches()) - if after > before: - return - self.fail("num ctx switches still the same after 50.000 iterations") - - def test_ppid(self): - if hasattr(os, 'getppid'): - self.assertEqual(psutil.Process().ppid(), os.getppid()) - this_parent = os.getpid() - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - self.assertEqual(p.ppid(), this_parent) - # no other process is supposed to have us as parent - reap_children(recursive=True) - if APPVEYOR: - # Occasional failures, see: - # https://ci.appveyor.com/project/giampaolo/psutil/build/ - # job/0hs623nenj7w4m33 - return - for p in psutil.process_iter(): - if p.pid == sproc.pid: - continue - # XXX: sometimes this fails on Windows; not sure why. - self.assertNotEqual(p.ppid(), this_parent, msg=p) - - def test_parent(self): - this_parent = os.getpid() - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - self.assertEqual(p.parent().pid, this_parent) - - def test_parent_disappeared(self): - # Emulate a case where the parent process disappeared. - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - with mock.patch("psutil.Process", - side_effect=psutil.NoSuchProcess(0, 'foo')): - self.assertIsNone(p.parent()) - - def test_children(self): - reap_children(recursive=True) - p = psutil.Process() - self.assertEqual(p.children(), []) - self.assertEqual(p.children(recursive=True), []) - # On Windows we set the flag to 0 in order to cancel out the - # CREATE_NO_WINDOW flag (enabled by default) which creates - # an extra "conhost.exe" child. - sproc = get_test_subprocess(creationflags=0) - children1 = p.children() - children2 = p.children(recursive=True) - for children in (children1, children2): - self.assertEqual(len(children), 1) - self.assertEqual(children[0].pid, sproc.pid) - self.assertEqual(children[0].ppid(), os.getpid()) - - def test_children_recursive(self): - # Test children() against two sub processes, p1 and p2, where - # p1 (our child) spawned p2 (our grandchild). - p1, p2 = create_proc_children_pair() - p = psutil.Process() - self.assertEqual(p.children(), [p1]) - self.assertEqual(p.children(recursive=True), [p1, p2]) - # If the intermediate process is gone there's no way for - # children() to recursively find it. - p1.terminate() - p1.wait() - self.assertEqual(p.children(recursive=True), []) - - def test_children_duplicates(self): - # find the process which has the highest number of children - table = collections.defaultdict(int) - for p in psutil.process_iter(): - try: - table[p.ppid()] += 1 - except psutil.Error: - pass - # this is the one, now let's make sure there are no duplicates - pid = sorted(table.items(), key=lambda x: x[1])[-1][0] - p = psutil.Process(pid) - try: - c = p.children(recursive=True) - except psutil.AccessDenied: # windows - pass - else: - self.assertEqual(len(c), len(set(c))) - - def test_suspend_resume(self): - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - p.suspend() - for x in range(100): - if p.status() == psutil.STATUS_STOPPED: - break - time.sleep(0.01) - p.resume() - self.assertNotEqual(p.status(), psutil.STATUS_STOPPED) - - def test_invalid_pid(self): - self.assertRaises(TypeError, psutil.Process, "1") - self.assertRaises(ValueError, psutil.Process, -1) - - def test_as_dict(self): - p = psutil.Process() - d = p.as_dict(attrs=['exe', 'name']) - self.assertEqual(sorted(d.keys()), ['exe', 'name']) - - p = psutil.Process(min(psutil.pids())) - d = p.as_dict(attrs=['connections'], ad_value='foo') - if not isinstance(d['connections'], list): - self.assertEqual(d['connections'], 'foo') - - # Test ad_value is set on AccessDenied. - with mock.patch('psutil.Process.nice', create=True, - side_effect=psutil.AccessDenied): - self.assertEqual( - p.as_dict(attrs=["nice"], ad_value=1), {"nice": 1}) - - # Test that NoSuchProcess bubbles up. - with mock.patch('psutil.Process.nice', create=True, - side_effect=psutil.NoSuchProcess(p.pid, "name")): - self.assertRaises( - psutil.NoSuchProcess, p.as_dict, attrs=["nice"]) - - # Test that ZombieProcess is swallowed. - with mock.patch('psutil.Process.nice', create=True, - side_effect=psutil.ZombieProcess(p.pid, "name")): - self.assertEqual( - p.as_dict(attrs=["nice"], ad_value="foo"), {"nice": "foo"}) - - # By default APIs raising NotImplementedError are - # supposed to be skipped. - with mock.patch('psutil.Process.nice', create=True, - side_effect=NotImplementedError): - d = p.as_dict() - self.assertNotIn('nice', list(d.keys())) - # ...unless the user explicitly asked for some attr. - with self.assertRaises(NotImplementedError): - p.as_dict(attrs=["nice"]) - - # errors - with self.assertRaises(TypeError): - p.as_dict('name') - with self.assertRaises(ValueError): - p.as_dict(['foo']) - with self.assertRaises(ValueError): - p.as_dict(['foo', 'bar']) - - def test_oneshot(self): - with mock.patch("psutil._psplatform.Process.cpu_times") as m: - p = psutil.Process() - with p.oneshot(): - p.cpu_times() - p.cpu_times() - self.assertEqual(m.call_count, 1) - - with mock.patch("psutil._psplatform.Process.cpu_times") as m: - p.cpu_times() - p.cpu_times() - self.assertEqual(m.call_count, 2) - - def test_oneshot_twice(self): - # Test the case where the ctx manager is __enter__ed twice. - # The second __enter__ is supposed to resut in a NOOP. - with mock.patch("psutil._psplatform.Process.cpu_times") as m1: - with mock.patch("psutil._psplatform.Process.oneshot_enter") as m2: - p = psutil.Process() - with p.oneshot(): - p.cpu_times() - p.cpu_times() - with p.oneshot(): - p.cpu_times() - p.cpu_times() - self.assertEqual(m1.call_count, 1) - self.assertEqual(m2.call_count, 1) - - with mock.patch("psutil._psplatform.Process.cpu_times") as m: - p.cpu_times() - p.cpu_times() - self.assertEqual(m.call_count, 2) - - def test_halfway_terminated_process(self): - # Test that NoSuchProcess exception gets raised in case the - # process dies after we create the Process object. - # Example: - # >>> proc = Process(1234) - # >>> time.sleep(2) # time-consuming task, process dies in meantime - # >>> proc.name() - # Refers to Issue #15 - sproc = get_test_subprocess() - p = psutil.Process(sproc.pid) - p.terminate() - p.wait() - if WINDOWS: - call_until(psutil.pids, "%s not in ret" % p.pid) - self.assertFalse(p.is_running()) - # self.assertFalse(p.pid in psutil.pids(), msg="retcode = %s" % - # retcode) - - excluded_names = ['pid', 'is_running', 'wait', 'create_time', - 'oneshot', 'memory_info_ex'] - if LINUX and not HAS_RLIMIT: - excluded_names.append('rlimit') - for name in dir(p): - if (name.startswith('_') or - name in excluded_names): - continue - try: - meth = getattr(p, name) - # get/set methods - if name == 'nice': - if POSIX: - ret = meth(1) - else: - ret = meth(psutil.NORMAL_PRIORITY_CLASS) - elif name == 'ionice': - ret = meth() - ret = meth(2) - elif name == 'rlimit': - ret = meth(psutil.RLIMIT_NOFILE) - ret = meth(psutil.RLIMIT_NOFILE, (5, 5)) - elif name == 'cpu_affinity': - ret = meth() - ret = meth([0]) - elif name == 'send_signal': - ret = meth(signal.SIGTERM) - else: - ret = meth() - except psutil.ZombieProcess: - self.fail("ZombieProcess for %r was not supposed to happen" % - name) - except psutil.NoSuchProcess: - pass - except psutil.AccessDenied: - if OPENBSD and name in ('threads', 'num_threads'): - pass - else: - raise - except NotImplementedError: - pass - else: - self.fail( - "NoSuchProcess exception not raised for %r, retval=%s" % ( - name, ret)) - - @unittest.skipIf(not POSIX, 'POSIX only') - def test_zombie_process(self): - def succeed_or_zombie_p_exc(fun, *args, **kwargs): - try: - return fun(*args, **kwargs) - except (psutil.ZombieProcess, psutil.AccessDenied): - pass - - zpid = create_zombie_proc() - self.addCleanup(reap_children, recursive=True) - # A zombie process should always be instantiable - zproc = psutil.Process(zpid) - # ...and at least its status always be querable - self.assertEqual(zproc.status(), psutil.STATUS_ZOMBIE) - # ...and it should be considered 'running' - self.assertTrue(zproc.is_running()) - # ...and as_dict() shouldn't crash - zproc.as_dict() - # if cmdline succeeds it should be an empty list - ret = succeed_or_zombie_p_exc(zproc.suspend) - if ret is not None: - self.assertEqual(ret, []) - - if hasattr(zproc, "rlimit"): - succeed_or_zombie_p_exc(zproc.rlimit, psutil.RLIMIT_NOFILE) - succeed_or_zombie_p_exc(zproc.rlimit, psutil.RLIMIT_NOFILE, - (5, 5)) - # set methods - succeed_or_zombie_p_exc(zproc.parent) - if hasattr(zproc, 'cpu_affinity'): - try: - succeed_or_zombie_p_exc(zproc.cpu_affinity, [0]) - except ValueError as err: - if TRAVIS and LINUX and "not eligible" in str(err): - # https://travis-ci.org/giampaolo/psutil/jobs/279890461 - pass - else: - raise - - succeed_or_zombie_p_exc(zproc.nice, 0) - if hasattr(zproc, 'ionice'): - if LINUX: - succeed_or_zombie_p_exc(zproc.ionice, 2, 0) - else: - succeed_or_zombie_p_exc(zproc.ionice, 0) # Windows - if hasattr(zproc, 'rlimit'): - succeed_or_zombie_p_exc(zproc.rlimit, - psutil.RLIMIT_NOFILE, (5, 5)) - succeed_or_zombie_p_exc(zproc.suspend) - succeed_or_zombie_p_exc(zproc.resume) - succeed_or_zombie_p_exc(zproc.terminate) - succeed_or_zombie_p_exc(zproc.kill) - - # ...its parent should 'see' it - # edit: not true on BSD and MACOS - # descendants = [x.pid for x in psutil.Process().children( - # recursive=True)] - # self.assertIn(zpid, descendants) - # XXX should we also assume ppid be usable? Note: this - # would be an important use case as the only way to get - # rid of a zombie is to kill its parent. - # self.assertEqual(zpid.ppid(), os.getpid()) - # ...and all other APIs should be able to deal with it - self.assertTrue(psutil.pid_exists(zpid)) - if not TRAVIS and MACOS: - # For some reason this started failing all of the sudden. - # Maybe they upgraded MACOS version? - # https://travis-ci.org/giampaolo/psutil/jobs/310896404 - self.assertIn(zpid, psutil.pids()) - self.assertIn(zpid, [x.pid for x in psutil.process_iter()]) - psutil._pmap = {} - self.assertIn(zpid, [x.pid for x in psutil.process_iter()]) - - @unittest.skipIf(not POSIX, 'POSIX only') - def test_zombie_process_is_running_w_exc(self): - # Emulate a case where internally is_running() raises - # ZombieProcess. - p = psutil.Process() - with mock.patch("psutil.Process", - side_effect=psutil.ZombieProcess(0)) as m: - assert p.is_running() - assert m.called - - @unittest.skipIf(not POSIX, 'POSIX only') - def test_zombie_process_status_w_exc(self): - # Emulate a case where internally status() raises - # ZombieProcess. - p = psutil.Process() - with mock.patch("psutil._psplatform.Process.status", - side_effect=psutil.ZombieProcess(0)) as m: - self.assertEqual(p.status(), psutil.STATUS_ZOMBIE) - assert m.called - - def test_pid_0(self): - # Process(0) is supposed to work on all platforms except Linux - if 0 not in psutil.pids(): - self.assertRaises(psutil.NoSuchProcess, psutil.Process, 0) - return - - # test all methods - p = psutil.Process(0) - for name in psutil._as_dict_attrnames: - if name == 'pid': - continue - meth = getattr(p, name) - try: - ret = meth() - except psutil.AccessDenied: - pass - else: - if name in ("uids", "gids"): - self.assertEqual(ret.real, 0) - elif name == "username": - if POSIX: - self.assertEqual(p.username(), 'root') - elif WINDOWS: - self.assertEqual(p.username(), 'NT AUTHORITY\\SYSTEM') - elif name == "name": - assert name, name - - if hasattr(p, 'rlimit'): - try: - p.rlimit(psutil.RLIMIT_FSIZE) - except psutil.AccessDenied: - pass - - p.as_dict() - - if not OPENBSD: - self.assertIn(0, psutil.pids()) - self.assertTrue(psutil.pid_exists(0)) - - @unittest.skipIf(not HAS_ENVIRON, "not supported") - def test_environ(self): - def clean_dict(d): - # Most of these are problematic on Travis. - d.pop("PSUTIL_TESTING", None) - d.pop("PLAT", None) - d.pop("HOME", None) - if MACOS: - d.pop("__CF_USER_TEXT_ENCODING", None) - d.pop("VERSIONER_PYTHON_PREFER_32_BIT", None) - d.pop("VERSIONER_PYTHON_VERSION", None) - return dict( - [(k.replace("\r", "").replace("\n", ""), - v.replace("\r", "").replace("\n", "")) - for k, v in d.items()]) - - self.maxDiff = None - p = psutil.Process() - d1 = clean_dict(p.environ()) - d2 = clean_dict(os.environ.copy()) - self.assertEqual(d1, d2) - - @unittest.skipIf(not HAS_ENVIRON, "not supported") - @unittest.skipIf(not POSIX, "POSIX only") - def test_weird_environ(self): - # environment variables can contain values without an equals sign - code = textwrap.dedent(""" - #include - #include - char * const argv[] = {"cat", 0}; - char * const envp[] = {"A=1", "X", "C=3", 0}; - int main(void) { - /* Close stderr on exec so parent can wait for the execve to - * finish. */ - if (fcntl(2, F_SETFD, FD_CLOEXEC) != 0) - return 0; - return execve("/bin/cat", argv, envp); - } - """) - path = TESTFN - create_exe(path, c_code=code) - self.addCleanup(safe_rmpath, path) - sproc = get_test_subprocess([path], - stdin=subprocess.PIPE, - stderr=subprocess.PIPE) - p = psutil.Process(sproc.pid) - wait_for_pid(p.pid) - self.assertTrue(p.is_running()) - # Wait for process to exec or exit. - self.assertEqual(sproc.stderr.read(), b"") - self.assertEqual(p.environ(), {"A": "1", "C": "3"}) - sproc.communicate() - self.assertEqual(sproc.returncode, 0) - - -# =================================================================== -# --- Limited user tests -# =================================================================== - - -if POSIX and os.getuid() == 0: - class LimitedUserTestCase(TestProcess): - """Repeat the previous tests by using a limited user. - Executed only on UNIX and only if the user who run the test script - is root. - """ - # the uid/gid the test suite runs under - if hasattr(os, 'getuid'): - PROCESS_UID = os.getuid() - PROCESS_GID = os.getgid() - - def __init__(self, *args, **kwargs): - TestProcess.__init__(self, *args, **kwargs) - # re-define all existent test methods in order to - # ignore AccessDenied exceptions - for attr in [x for x in dir(self) if x.startswith('test')]: - meth = getattr(self, attr) - - def test_(self): - try: - meth() - except psutil.AccessDenied: - pass - setattr(self, attr, types.MethodType(test_, self)) - - def setUp(self): - safe_rmpath(TESTFN) - TestProcess.setUp(self) - os.setegid(1000) - os.seteuid(1000) - - def tearDown(self): - os.setegid(self.PROCESS_UID) - os.seteuid(self.PROCESS_GID) - TestProcess.tearDown(self) - - def test_nice(self): - try: - psutil.Process().nice(-1) - except psutil.AccessDenied: - pass - else: - self.fail("exception not raised") - - def test_zombie_process(self): - # causes problems if test test suite is run as root - pass - - -# =================================================================== -# --- psutil.Popen tests -# =================================================================== - - -class TestPopen(unittest.TestCase): - """Tests for psutil.Popen class.""" - - def tearDown(self): - reap_children() - - def test_misc(self): - # XXX this test causes a ResourceWarning on Python 3 because - # psutil.__subproc instance doesn't get propertly freed. - # Not sure what to do though. - cmd = [PYTHON_EXE, "-c", "import time; time.sleep(60);"] - with psutil.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) as proc: - proc.name() - proc.cpu_times() - proc.stdin - self.assertTrue(dir(proc)) - self.assertRaises(AttributeError, getattr, proc, 'foo') - proc.terminate() - - def test_ctx_manager(self): - with psutil.Popen([PYTHON_EXE, "-V"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - stdin=subprocess.PIPE) as proc: - proc.communicate() - assert proc.stdout.closed - assert proc.stderr.closed - assert proc.stdin.closed - self.assertEqual(proc.returncode, 0) - - def test_kill_terminate(self): - # subprocess.Popen()'s terminate(), kill() and send_signal() do - # not raise exception after the process is gone. psutil.Popen - # diverges from that. - cmd = [PYTHON_EXE, "-c", "import time; time.sleep(60);"] - with psutil.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) as proc: - proc.terminate() - proc.wait() - self.assertRaises(psutil.NoSuchProcess, proc.terminate) - self.assertRaises(psutil.NoSuchProcess, proc.kill) - self.assertRaises(psutil.NoSuchProcess, proc.send_signal, - signal.SIGTERM) - if WINDOWS and sys.version_info >= (2, 7): - self.assertRaises(psutil.NoSuchProcess, proc.send_signal, - signal.CTRL_C_EVENT) - self.assertRaises(psutil.NoSuchProcess, proc.send_signal, - signal.CTRL_BREAK_EVENT) - - -if __name__ == '__main__': - run_test_module_by_name(__file__) diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/test_sunos.py b/server/www/packages/packages-darwin/x64/psutil/tests/test_sunos.py deleted file mode 100644 index ea9afcd..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/test_sunos.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Sun OS specific tests.""" - -import os - -import psutil -from psutil import SUNOS -from psutil.tests import run_test_module_by_name -from psutil.tests import sh -from psutil.tests import unittest - - -@unittest.skipIf(not SUNOS, "SUNOS only") -class SunOSSpecificTestCase(unittest.TestCase): - - def test_swap_memory(self): - out = sh('env PATH=/usr/sbin:/sbin:%s swap -l' % os.environ['PATH']) - lines = out.strip().split('\n')[1:] - if not lines: - raise ValueError('no swap device(s) configured') - total = free = 0 - for line in lines: - line = line.split() - t, f = line[-2:] - total += int(int(t) * 512) - free += int(int(f) * 512) - used = total - free - - psutil_swap = psutil.swap_memory() - self.assertEqual(psutil_swap.total, total) - self.assertEqual(psutil_swap.used, used) - self.assertEqual(psutil_swap.free, free) - - def test_cpu_count(self): - out = sh("/usr/sbin/psrinfo") - self.assertEqual(psutil.cpu_count(), len(out.split('\n'))) - - -if __name__ == '__main__': - run_test_module_by_name(__file__) diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/test_system.py b/server/www/packages/packages-darwin/x64/psutil/tests/test_system.py deleted file mode 100644 index f9006ce..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/test_system.py +++ /dev/null @@ -1,869 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Tests for system APIS.""" - -import contextlib -import datetime -import errno -import os -import pprint -import shutil -import signal -import socket -import sys -import tempfile -import time - -import psutil -from psutil import AIX -from psutil import BSD -from psutil import FREEBSD -from psutil import LINUX -from psutil import MACOS -from psutil import NETBSD -from psutil import OPENBSD -from psutil import POSIX -from psutil import SUNOS -from psutil import WINDOWS -from psutil._compat import long -from psutil.tests import APPVEYOR -from psutil.tests import ASCII_FS -from psutil.tests import check_net_address -from psutil.tests import DEVNULL -from psutil.tests import enum -from psutil.tests import get_test_subprocess -from psutil.tests import HAS_BATTERY -from psutil.tests import HAS_CPU_FREQ -from psutil.tests import HAS_SENSORS_BATTERY -from psutil.tests import HAS_SENSORS_FANS -from psutil.tests import HAS_SENSORS_TEMPERATURES -from psutil.tests import mock -from psutil.tests import reap_children -from psutil.tests import retry_before_failing -from psutil.tests import run_test_module_by_name -from psutil.tests import safe_rmpath -from psutil.tests import TESTFN -from psutil.tests import TESTFN_UNICODE -from psutil.tests import TRAVIS -from psutil.tests import unittest - - -# =================================================================== -# --- System-related API tests -# =================================================================== - - -class TestSystemAPIs(unittest.TestCase): - """Tests for system-related APIs.""" - - def setUp(self): - safe_rmpath(TESTFN) - - def tearDown(self): - reap_children() - - def test_process_iter(self): - self.assertIn(os.getpid(), [x.pid for x in psutil.process_iter()]) - sproc = get_test_subprocess() - self.assertIn(sproc.pid, [x.pid for x in psutil.process_iter()]) - p = psutil.Process(sproc.pid) - p.kill() - p.wait() - self.assertNotIn(sproc.pid, [x.pid for x in psutil.process_iter()]) - - with mock.patch('psutil.Process', - side_effect=psutil.NoSuchProcess(os.getpid())): - self.assertEqual(list(psutil.process_iter()), []) - with mock.patch('psutil.Process', - side_effect=psutil.AccessDenied(os.getpid())): - with self.assertRaises(psutil.AccessDenied): - list(psutil.process_iter()) - - def test_prcess_iter_w_params(self): - for p in psutil.process_iter(attrs=['pid']): - self.assertEqual(list(p.info.keys()), ['pid']) - with self.assertRaises(ValueError): - list(psutil.process_iter(attrs=['foo'])) - with mock.patch("psutil._psplatform.Process.cpu_times", - side_effect=psutil.AccessDenied(0, "")) as m: - for p in psutil.process_iter(attrs=["pid", "cpu_times"]): - self.assertIsNone(p.info['cpu_times']) - self.assertGreaterEqual(p.info['pid'], 0) - assert m.called - with mock.patch("psutil._psplatform.Process.cpu_times", - side_effect=psutil.AccessDenied(0, "")) as m: - flag = object() - for p in psutil.process_iter( - attrs=["pid", "cpu_times"], ad_value=flag): - self.assertIs(p.info['cpu_times'], flag) - self.assertGreaterEqual(p.info['pid'], 0) - assert m.called - - def test_wait_procs(self): - def callback(p): - pids.append(p.pid) - - pids = [] - sproc1 = get_test_subprocess() - sproc2 = get_test_subprocess() - sproc3 = get_test_subprocess() - procs = [psutil.Process(x.pid) for x in (sproc1, sproc2, sproc3)] - self.assertRaises(ValueError, psutil.wait_procs, procs, timeout=-1) - self.assertRaises(TypeError, psutil.wait_procs, procs, callback=1) - t = time.time() - gone, alive = psutil.wait_procs(procs, timeout=0.01, callback=callback) - - self.assertLess(time.time() - t, 0.5) - self.assertEqual(gone, []) - self.assertEqual(len(alive), 3) - self.assertEqual(pids, []) - for p in alive: - self.assertFalse(hasattr(p, 'returncode')) - - @retry_before_failing(30) - def test(procs, callback): - gone, alive = psutil.wait_procs(procs, timeout=0.03, - callback=callback) - self.assertEqual(len(gone), 1) - self.assertEqual(len(alive), 2) - return gone, alive - - sproc3.terminate() - gone, alive = test(procs, callback) - self.assertIn(sproc3.pid, [x.pid for x in gone]) - if POSIX: - self.assertEqual(gone.pop().returncode, -signal.SIGTERM) - else: - self.assertEqual(gone.pop().returncode, 1) - self.assertEqual(pids, [sproc3.pid]) - for p in alive: - self.assertFalse(hasattr(p, 'returncode')) - - @retry_before_failing(30) - def test(procs, callback): - gone, alive = psutil.wait_procs(procs, timeout=0.03, - callback=callback) - self.assertEqual(len(gone), 3) - self.assertEqual(len(alive), 0) - return gone, alive - - sproc1.terminate() - sproc2.terminate() - gone, alive = test(procs, callback) - self.assertEqual(set(pids), set([sproc1.pid, sproc2.pid, sproc3.pid])) - for p in gone: - self.assertTrue(hasattr(p, 'returncode')) - - def test_wait_procs_no_timeout(self): - sproc1 = get_test_subprocess() - sproc2 = get_test_subprocess() - sproc3 = get_test_subprocess() - procs = [psutil.Process(x.pid) for x in (sproc1, sproc2, sproc3)] - for p in procs: - p.terminate() - gone, alive = psutil.wait_procs(procs) - - def test_boot_time(self): - bt = psutil.boot_time() - self.assertIsInstance(bt, float) - self.assertGreater(bt, 0) - self.assertLess(bt, time.time()) - - @unittest.skipIf(not POSIX, 'POSIX only') - def test_PAGESIZE(self): - # pagesize is used internally to perform different calculations - # and it's determined by using SC_PAGE_SIZE; make sure - # getpagesize() returns the same value. - import resource - self.assertEqual(os.sysconf("SC_PAGE_SIZE"), resource.getpagesize()) - - def test_virtual_memory(self): - mem = psutil.virtual_memory() - assert mem.total > 0, mem - assert mem.available > 0, mem - assert 0 <= mem.percent <= 100, mem - assert mem.used > 0, mem - assert mem.free >= 0, mem - for name in mem._fields: - value = getattr(mem, name) - if name != 'percent': - self.assertIsInstance(value, (int, long)) - if name != 'total': - if not value >= 0: - self.fail("%r < 0 (%s)" % (name, value)) - if value > mem.total: - self.fail("%r > total (total=%s, %s=%s)" - % (name, mem.total, name, value)) - - def test_swap_memory(self): - mem = psutil.swap_memory() - self.assertEqual( - mem._fields, ('total', 'used', 'free', 'percent', 'sin', 'sout')) - - assert mem.total >= 0, mem - assert mem.used >= 0, mem - if mem.total > 0: - # likely a system with no swap partition - assert mem.free > 0, mem - else: - assert mem.free == 0, mem - assert 0 <= mem.percent <= 100, mem - assert mem.sin >= 0, mem - assert mem.sout >= 0, mem - - def test_pid_exists(self): - sproc = get_test_subprocess() - self.assertTrue(psutil.pid_exists(sproc.pid)) - p = psutil.Process(sproc.pid) - p.kill() - p.wait() - self.assertFalse(psutil.pid_exists(sproc.pid)) - self.assertFalse(psutil.pid_exists(-1)) - self.assertEqual(psutil.pid_exists(0), 0 in psutil.pids()) - - def test_pid_exists_2(self): - reap_children() - pids = psutil.pids() - for pid in pids: - try: - assert psutil.pid_exists(pid) - except AssertionError: - # in case the process disappeared in meantime fail only - # if it is no longer in psutil.pids() - time.sleep(.1) - if pid in psutil.pids(): - self.fail(pid) - pids = range(max(pids) + 5000, max(pids) + 6000) - for pid in pids: - self.assertFalse(psutil.pid_exists(pid), msg=pid) - - def test_pids(self): - plist = [x.pid for x in psutil.process_iter()] - pidlist = psutil.pids() - self.assertEqual(plist.sort(), pidlist.sort()) - # make sure every pid is unique - self.assertEqual(len(pidlist), len(set(pidlist))) - - def test_test(self): - # test for psutil.test() function - stdout = sys.stdout - sys.stdout = DEVNULL - try: - psutil.test() - finally: - sys.stdout = stdout - - def test_cpu_count(self): - logical = psutil.cpu_count() - self.assertEqual(logical, len(psutil.cpu_times(percpu=True))) - self.assertGreaterEqual(logical, 1) - # - if os.path.exists("/proc/cpuinfo"): - with open("/proc/cpuinfo") as fd: - cpuinfo_data = fd.read() - if "physical id" not in cpuinfo_data: - raise unittest.SkipTest("cpuinfo doesn't include physical id") - physical = psutil.cpu_count(logical=False) - if WINDOWS and sys.getwindowsversion()[:2] <= (6, 1): # <= Vista - self.assertIsNone(physical) - else: - self.assertGreaterEqual(physical, 1) - self.assertGreaterEqual(logical, physical) - - def test_cpu_count_none(self): - # https://github.com/giampaolo/psutil/issues/1085 - for val in (-1, 0, None): - with mock.patch('psutil._psplatform.cpu_count_logical', - return_value=val) as m: - self.assertIsNone(psutil.cpu_count()) - assert m.called - with mock.patch('psutil._psplatform.cpu_count_physical', - return_value=val) as m: - self.assertIsNone(psutil.cpu_count(logical=False)) - assert m.called - - def test_cpu_times(self): - # Check type, value >= 0, str(). - total = 0 - times = psutil.cpu_times() - sum(times) - for cp_time in times: - self.assertIsInstance(cp_time, float) - self.assertGreaterEqual(cp_time, 0.0) - total += cp_time - self.assertEqual(total, sum(times)) - str(times) - # CPU times are always supposed to increase over time - # or at least remain the same and that's because time - # cannot go backwards. - # Surprisingly sometimes this might not be the case (at - # least on Windows and Linux), see: - # https://github.com/giampaolo/psutil/issues/392 - # https://github.com/giampaolo/psutil/issues/645 - # if not WINDOWS: - # last = psutil.cpu_times() - # for x in range(100): - # new = psutil.cpu_times() - # for field in new._fields: - # new_t = getattr(new, field) - # last_t = getattr(last, field) - # self.assertGreaterEqual(new_t, last_t, - # msg="%s %s" % (new_t, last_t)) - # last = new - - def test_cpu_times_time_increases(self): - # Make sure time increases between calls. - t1 = sum(psutil.cpu_times()) - time.sleep(0.1) - t2 = sum(psutil.cpu_times()) - difference = t2 - t1 - if not difference >= 0.05: - self.fail("difference %s" % difference) - - def test_per_cpu_times(self): - # Check type, value >= 0, str(). - for times in psutil.cpu_times(percpu=True): - total = 0 - sum(times) - for cp_time in times: - self.assertIsInstance(cp_time, float) - self.assertGreaterEqual(cp_time, 0.0) - total += cp_time - self.assertEqual(total, sum(times)) - str(times) - self.assertEqual(len(psutil.cpu_times(percpu=True)[0]), - len(psutil.cpu_times(percpu=False))) - - # Note: in theory CPU times are always supposed to increase over - # time or remain the same but never go backwards. In practice - # sometimes this is not the case. - # This issue seemd to be afflict Windows: - # https://github.com/giampaolo/psutil/issues/392 - # ...but it turns out also Linux (rarely) behaves the same. - # last = psutil.cpu_times(percpu=True) - # for x in range(100): - # new = psutil.cpu_times(percpu=True) - # for index in range(len(new)): - # newcpu = new[index] - # lastcpu = last[index] - # for field in newcpu._fields: - # new_t = getattr(newcpu, field) - # last_t = getattr(lastcpu, field) - # self.assertGreaterEqual( - # new_t, last_t, msg="%s %s" % (lastcpu, newcpu)) - # last = new - - def test_per_cpu_times_2(self): - # Simulate some work load then make sure time have increased - # between calls. - tot1 = psutil.cpu_times(percpu=True) - stop_at = time.time() + 0.1 - while True: - if time.time() >= stop_at: - break - tot2 = psutil.cpu_times(percpu=True) - for t1, t2 in zip(tot1, tot2): - t1, t2 = sum(t1), sum(t2) - difference = t2 - t1 - if difference >= 0.05: - return - self.fail() - - def test_cpu_times_comparison(self): - # Make sure the sum of all per cpu times is almost equal to - # base "one cpu" times. - base = psutil.cpu_times() - per_cpu = psutil.cpu_times(percpu=True) - summed_values = base._make([sum(num) for num in zip(*per_cpu)]) - for field in base._fields: - self.assertAlmostEqual( - getattr(base, field), getattr(summed_values, field), delta=1) - - def _test_cpu_percent(self, percent, last_ret, new_ret): - try: - self.assertIsInstance(percent, float) - self.assertGreaterEqual(percent, 0.0) - self.assertIsNot(percent, -0.0) - self.assertLessEqual(percent, 100.0 * psutil.cpu_count()) - except AssertionError as err: - raise AssertionError("\n%s\nlast=%s\nnew=%s" % ( - err, pprint.pformat(last_ret), pprint.pformat(new_ret))) - - def test_cpu_percent(self): - last = psutil.cpu_percent(interval=0.001) - for x in range(100): - new = psutil.cpu_percent(interval=None) - self._test_cpu_percent(new, last, new) - last = new - with self.assertRaises(ValueError): - psutil.cpu_percent(interval=-1) - - def test_per_cpu_percent(self): - last = psutil.cpu_percent(interval=0.001, percpu=True) - self.assertEqual(len(last), psutil.cpu_count()) - for x in range(100): - new = psutil.cpu_percent(interval=None, percpu=True) - for percent in new: - self._test_cpu_percent(percent, last, new) - last = new - with self.assertRaises(ValueError): - psutil.cpu_percent(interval=-1, percpu=True) - - def test_cpu_times_percent(self): - last = psutil.cpu_times_percent(interval=0.001) - for x in range(100): - new = psutil.cpu_times_percent(interval=None) - for percent in new: - self._test_cpu_percent(percent, last, new) - self._test_cpu_percent(sum(new), last, new) - last = new - - def test_per_cpu_times_percent(self): - last = psutil.cpu_times_percent(interval=0.001, percpu=True) - self.assertEqual(len(last), psutil.cpu_count()) - for x in range(100): - new = psutil.cpu_times_percent(interval=None, percpu=True) - for cpu in new: - for percent in cpu: - self._test_cpu_percent(percent, last, new) - self._test_cpu_percent(sum(cpu), last, new) - last = new - - def test_per_cpu_times_percent_negative(self): - # see: https://github.com/giampaolo/psutil/issues/645 - psutil.cpu_times_percent(percpu=True) - zero_times = [x._make([0 for x in range(len(x._fields))]) - for x in psutil.cpu_times(percpu=True)] - with mock.patch('psutil.cpu_times', return_value=zero_times): - for cpu in psutil.cpu_times_percent(percpu=True): - for percent in cpu: - self._test_cpu_percent(percent, None, None) - - def test_disk_usage(self): - usage = psutil.disk_usage(os.getcwd()) - self.assertEqual(usage._fields, ('total', 'used', 'free', 'percent')) - - assert usage.total > 0, usage - assert usage.used > 0, usage - assert usage.free > 0, usage - assert usage.total > usage.used, usage - assert usage.total > usage.free, usage - assert 0 <= usage.percent <= 100, usage.percent - if hasattr(shutil, 'disk_usage'): - # py >= 3.3, see: http://bugs.python.org/issue12442 - shutil_usage = shutil.disk_usage(os.getcwd()) - tolerance = 5 * 1024 * 1024 # 5MB - self.assertEqual(usage.total, shutil_usage.total) - self.assertAlmostEqual(usage.free, shutil_usage.free, - delta=tolerance) - self.assertAlmostEqual(usage.used, shutil_usage.used, - delta=tolerance) - - # if path does not exist OSError ENOENT is expected across - # all platforms - fname = tempfile.mktemp() - with self.assertRaises(OSError) as exc: - psutil.disk_usage(fname) - self.assertEqual(exc.exception.errno, errno.ENOENT) - - def test_disk_usage_unicode(self): - # See: https://github.com/giampaolo/psutil/issues/416 - if ASCII_FS: - with self.assertRaises(UnicodeEncodeError): - psutil.disk_usage(TESTFN_UNICODE) - - def test_disk_usage_bytes(self): - psutil.disk_usage(b'.') - - def test_disk_partitions(self): - # all = False - ls = psutil.disk_partitions(all=False) - # on travis we get: - # self.assertEqual(p.cpu_affinity(), [n]) - # AssertionError: Lists differ: [0, 1, 2, 3, 4, 5, 6, 7,... != [0] - self.assertTrue(ls, msg=ls) - for disk in ls: - self.assertIsInstance(disk.device, str) - self.assertIsInstance(disk.mountpoint, str) - self.assertIsInstance(disk.fstype, str) - self.assertIsInstance(disk.opts, str) - if WINDOWS and 'cdrom' in disk.opts: - continue - if not POSIX: - assert os.path.exists(disk.device), disk - else: - # we cannot make any assumption about this, see: - # http://goo.gl/p9c43 - disk.device - if SUNOS or TRAVIS: - # on solaris apparently mount points can also be files - assert os.path.exists(disk.mountpoint), disk - else: - assert os.path.isdir(disk.mountpoint), disk - assert disk.fstype, disk - - # all = True - ls = psutil.disk_partitions(all=True) - self.assertTrue(ls, msg=ls) - for disk in psutil.disk_partitions(all=True): - if not WINDOWS: - try: - os.stat(disk.mountpoint) - except OSError as err: - if TRAVIS and MACOS and err.errno == errno.EIO: - continue - # http://mail.python.org/pipermail/python-dev/ - # 2012-June/120787.html - if err.errno not in (errno.EPERM, errno.EACCES): - raise - else: - if SUNOS or TRAVIS: - # on solaris apparently mount points can also be files - assert os.path.exists(disk.mountpoint), disk - else: - assert os.path.isdir(disk.mountpoint), disk - self.assertIsInstance(disk.fstype, str) - self.assertIsInstance(disk.opts, str) - - def find_mount_point(path): - path = os.path.abspath(path) - while not os.path.ismount(path): - path = os.path.dirname(path) - return path.lower() - - mount = find_mount_point(__file__) - mounts = [x.mountpoint.lower() for x in - psutil.disk_partitions(all=True)] - self.assertIn(mount, mounts) - psutil.disk_usage(mount) - - def test_net_io_counters(self): - def check_ntuple(nt): - self.assertEqual(nt[0], nt.bytes_sent) - self.assertEqual(nt[1], nt.bytes_recv) - self.assertEqual(nt[2], nt.packets_sent) - self.assertEqual(nt[3], nt.packets_recv) - self.assertEqual(nt[4], nt.errin) - self.assertEqual(nt[5], nt.errout) - self.assertEqual(nt[6], nt.dropin) - self.assertEqual(nt[7], nt.dropout) - assert nt.bytes_sent >= 0, nt - assert nt.bytes_recv >= 0, nt - assert nt.packets_sent >= 0, nt - assert nt.packets_recv >= 0, nt - assert nt.errin >= 0, nt - assert nt.errout >= 0, nt - assert nt.dropin >= 0, nt - assert nt.dropout >= 0, nt - - ret = psutil.net_io_counters(pernic=False) - check_ntuple(ret) - ret = psutil.net_io_counters(pernic=True) - self.assertNotEqual(ret, []) - for key in ret: - self.assertTrue(key) - self.assertIsInstance(key, str) - check_ntuple(ret[key]) - - def test_net_io_counters_no_nics(self): - # Emulate a case where no NICs are installed, see: - # https://github.com/giampaolo/psutil/issues/1062 - with mock.patch('psutil._psplatform.net_io_counters', - return_value={}) as m: - self.assertIsNone(psutil.net_io_counters(pernic=False)) - self.assertEqual(psutil.net_io_counters(pernic=True), {}) - assert m.called - - def test_net_if_addrs(self): - nics = psutil.net_if_addrs() - assert nics, nics - - nic_stats = psutil.net_if_stats() - - # Not reliable on all platforms (net_if_addrs() reports more - # interfaces). - # self.assertEqual(sorted(nics.keys()), - # sorted(psutil.net_io_counters(pernic=True).keys())) - - families = set([socket.AF_INET, socket.AF_INET6, psutil.AF_LINK]) - for nic, addrs in nics.items(): - self.assertIsInstance(nic, str) - self.assertEqual(len(set(addrs)), len(addrs)) - for addr in addrs: - self.assertIsInstance(addr.family, int) - self.assertIsInstance(addr.address, str) - self.assertIsInstance(addr.netmask, (str, type(None))) - self.assertIsInstance(addr.broadcast, (str, type(None))) - self.assertIn(addr.family, families) - if sys.version_info >= (3, 4): - self.assertIsInstance(addr.family, enum.IntEnum) - if nic_stats[nic].isup: - # Do not test binding to addresses of interfaces - # that are down - if addr.family == socket.AF_INET: - s = socket.socket(addr.family) - with contextlib.closing(s): - s.bind((addr.address, 0)) - elif addr.family == socket.AF_INET6: - info = socket.getaddrinfo( - addr.address, 0, socket.AF_INET6, - socket.SOCK_STREAM, 0, socket.AI_PASSIVE)[0] - af, socktype, proto, canonname, sa = info - s = socket.socket(af, socktype, proto) - with contextlib.closing(s): - s.bind(sa) - for ip in (addr.address, addr.netmask, addr.broadcast, - addr.ptp): - if ip is not None: - # TODO: skip AF_INET6 for now because I get: - # AddressValueError: Only hex digits permitted in - # u'c6f3%lxcbr0' in u'fe80::c8e0:fff:fe54:c6f3%lxcbr0' - if addr.family != socket.AF_INET6: - check_net_address(ip, addr.family) - # broadcast and ptp addresses are mutually exclusive - if addr.broadcast: - self.assertIsNone(addr.ptp) - elif addr.ptp: - self.assertIsNone(addr.broadcast) - - if BSD or MACOS or SUNOS: - if hasattr(socket, "AF_LINK"): - self.assertEqual(psutil.AF_LINK, socket.AF_LINK) - elif LINUX: - self.assertEqual(psutil.AF_LINK, socket.AF_PACKET) - elif WINDOWS: - self.assertEqual(psutil.AF_LINK, -1) - - def test_net_if_addrs_mac_null_bytes(self): - # Simulate that the underlying C function returns an incomplete - # MAC address. psutil is supposed to fill it with null bytes. - # https://github.com/giampaolo/psutil/issues/786 - if POSIX: - ret = [('em1', psutil.AF_LINK, '06:3d:29', None, None, None)] - else: - ret = [('em1', -1, '06-3d-29', None, None, None)] - with mock.patch('psutil._psplatform.net_if_addrs', - return_value=ret) as m: - addr = psutil.net_if_addrs()['em1'][0] - assert m.called - if POSIX: - self.assertEqual(addr.address, '06:3d:29:00:00:00') - else: - self.assertEqual(addr.address, '06-3d-29-00-00-00') - - @unittest.skipIf(TRAVIS, "unreliable on TRAVIS") # raises EPERM - def test_net_if_stats(self): - nics = psutil.net_if_stats() - assert nics, nics - all_duplexes = (psutil.NIC_DUPLEX_FULL, - psutil.NIC_DUPLEX_HALF, - psutil.NIC_DUPLEX_UNKNOWN) - for name, stats in nics.items(): - self.assertIsInstance(name, str) - isup, duplex, speed, mtu = stats - self.assertIsInstance(isup, bool) - self.assertIn(duplex, all_duplexes) - self.assertIn(duplex, all_duplexes) - self.assertGreaterEqual(speed, 0) - self.assertGreaterEqual(mtu, 0) - - @unittest.skipIf(not (LINUX or BSD or MACOS), - "LINUX or BSD or MACOS specific") - def test_net_if_stats_enodev(self): - # See: https://github.com/giampaolo/psutil/issues/1279 - with mock.patch('psutil._psutil_posix.net_if_mtu', - side_effect=OSError(errno.ENODEV, "")) as m: - ret = psutil.net_if_stats() - self.assertEqual(ret, {}) - assert m.called - - @unittest.skipIf(LINUX and not os.path.exists('/proc/diskstats'), - '/proc/diskstats not available on this linux version') - @unittest.skipIf(APPVEYOR and psutil.disk_io_counters() is None, - "unreliable on APPVEYOR") # no visible disks - def test_disk_io_counters(self): - def check_ntuple(nt): - self.assertEqual(nt[0], nt.read_count) - self.assertEqual(nt[1], nt.write_count) - self.assertEqual(nt[2], nt.read_bytes) - self.assertEqual(nt[3], nt.write_bytes) - if not (OPENBSD or NETBSD): - self.assertEqual(nt[4], nt.read_time) - self.assertEqual(nt[5], nt.write_time) - if LINUX: - self.assertEqual(nt[6], nt.read_merged_count) - self.assertEqual(nt[7], nt.write_merged_count) - self.assertEqual(nt[8], nt.busy_time) - elif FREEBSD: - self.assertEqual(nt[6], nt.busy_time) - for name in nt._fields: - assert getattr(nt, name) >= 0, nt - - ret = psutil.disk_io_counters(perdisk=False) - assert ret is not None, "no disks on this system?" - check_ntuple(ret) - ret = psutil.disk_io_counters(perdisk=True) - # make sure there are no duplicates - self.assertEqual(len(ret), len(set(ret))) - for key in ret: - assert key, key - check_ntuple(ret[key]) - - def test_disk_io_counters_no_disks(self): - # Emulate a case where no disks are installed, see: - # https://github.com/giampaolo/psutil/issues/1062 - with mock.patch('psutil._psplatform.disk_io_counters', - return_value={}) as m: - self.assertIsNone(psutil.disk_io_counters(perdisk=False)) - self.assertEqual(psutil.disk_io_counters(perdisk=True), {}) - assert m.called - - # can't find users on APPVEYOR or TRAVIS - @unittest.skipIf(APPVEYOR or TRAVIS and not psutil.users(), - "unreliable on APPVEYOR or TRAVIS") - def test_users(self): - users = psutil.users() - self.assertNotEqual(users, []) - for user in users: - assert user.name, user - self.assertIsInstance(user.name, str) - self.assertIsInstance(user.terminal, (str, type(None))) - if user.host is not None: - self.assertIsInstance(user.host, (str, type(None))) - user.terminal - user.host - assert user.started > 0.0, user - datetime.datetime.fromtimestamp(user.started) - if WINDOWS or OPENBSD: - self.assertIsNone(user.pid) - else: - psutil.Process(user.pid) - - def test_cpu_stats(self): - # Tested more extensively in per-platform test modules. - infos = psutil.cpu_stats() - self.assertEqual( - infos._fields, - ('ctx_switches', 'interrupts', 'soft_interrupts', 'syscalls')) - for name in infos._fields: - value = getattr(infos, name) - self.assertGreaterEqual(value, 0) - # on AIX, ctx_switches is always 0 - if not AIX and name in ('ctx_switches', 'interrupts'): - self.assertGreater(value, 0) - - @unittest.skipIf(not HAS_CPU_FREQ, "not suported") - def test_cpu_freq(self): - def check_ls(ls): - for nt in ls: - self.assertEqual(nt._fields, ('current', 'min', 'max')) - self.assertLessEqual(nt.current, nt.max) - for name in nt._fields: - value = getattr(nt, name) - self.assertIsInstance(value, (int, long, float)) - self.assertGreaterEqual(value, 0) - - ls = psutil.cpu_freq(percpu=True) - if TRAVIS and not ls: - return - - assert ls, ls - check_ls([psutil.cpu_freq(percpu=False)]) - - if LINUX: - self.assertEqual(len(ls), psutil.cpu_count()) - - def test_os_constants(self): - names = ["POSIX", "WINDOWS", "LINUX", "MACOS", "FREEBSD", "OPENBSD", - "NETBSD", "BSD", "SUNOS"] - for name in names: - self.assertIsInstance(getattr(psutil, name), bool, msg=name) - - if os.name == 'posix': - assert psutil.POSIX - assert not psutil.WINDOWS - names.remove("POSIX") - if "linux" in sys.platform.lower(): - assert psutil.LINUX - names.remove("LINUX") - elif "bsd" in sys.platform.lower(): - assert psutil.BSD - self.assertEqual([psutil.FREEBSD, psutil.OPENBSD, - psutil.NETBSD].count(True), 1) - names.remove("BSD") - names.remove("FREEBSD") - names.remove("OPENBSD") - names.remove("NETBSD") - elif "sunos" in sys.platform.lower() or \ - "solaris" in sys.platform.lower(): - assert psutil.SUNOS - names.remove("SUNOS") - elif "darwin" in sys.platform.lower(): - assert psutil.MACOS - names.remove("MACOS") - else: - assert psutil.WINDOWS - assert not psutil.POSIX - names.remove("WINDOWS") - - # assert all other constants are set to False - for name in names: - self.assertIs(getattr(psutil, name), False, msg=name) - - @unittest.skipIf(not HAS_SENSORS_TEMPERATURES, "not supported") - def test_sensors_temperatures(self): - temps = psutil.sensors_temperatures() - for name, entries in temps.items(): - self.assertIsInstance(name, str) - for entry in entries: - self.assertIsInstance(entry.label, str) - if entry.current is not None: - self.assertGreaterEqual(entry.current, 0) - if entry.high is not None: - self.assertGreaterEqual(entry.high, 0) - if entry.critical is not None: - self.assertGreaterEqual(entry.critical, 0) - - @unittest.skipIf(not HAS_SENSORS_TEMPERATURES, "not supported") - def test_sensors_temperatures_fahreneit(self): - d = {'coretemp': [('label', 50.0, 60.0, 70.0)]} - with mock.patch("psutil._psplatform.sensors_temperatures", - return_value=d) as m: - temps = psutil.sensors_temperatures( - fahrenheit=True)['coretemp'][0] - assert m.called - self.assertEqual(temps.current, 122.0) - self.assertEqual(temps.high, 140.0) - self.assertEqual(temps.critical, 158.0) - - @unittest.skipIf(not HAS_SENSORS_BATTERY, "not supported") - @unittest.skipIf(not HAS_BATTERY, "no battery") - def test_sensors_battery(self): - ret = psutil.sensors_battery() - self.assertGreaterEqual(ret.percent, 0) - self.assertLessEqual(ret.percent, 100) - if ret.secsleft not in (psutil.POWER_TIME_UNKNOWN, - psutil.POWER_TIME_UNLIMITED): - self.assertGreaterEqual(ret.secsleft, 0) - else: - if ret.secsleft == psutil.POWER_TIME_UNLIMITED: - self.assertTrue(ret.power_plugged) - self.assertIsInstance(ret.power_plugged, bool) - - @unittest.skipIf(not HAS_SENSORS_FANS, "not supported") - def test_sensors_fans(self): - fans = psutil.sensors_fans() - for name, entries in fans.items(): - self.assertIsInstance(name, str) - for entry in entries: - self.assertIsInstance(entry.label, str) - self.assertIsInstance(entry.current, (int, long)) - self.assertGreaterEqual(entry.current, 0) - - -if __name__ == '__main__': - run_test_module_by_name(__file__) diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/test_unicode.py b/server/www/packages/packages-darwin/x64/psutil/tests/test_unicode.py deleted file mode 100644 index 4144b5c..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/test_unicode.py +++ /dev/null @@ -1,366 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Notes about unicode handling in psutil -====================================== - -In psutil these are the APIs returning or dealing with a string -('not tested' means they are not tested to deal with non-ASCII strings): - -* Process.cmdline() -* Process.connections('unix') -* Process.cwd() -* Process.environ() -* Process.exe() -* Process.memory_maps() -* Process.name() -* Process.open_files() -* Process.username() (not tested) - -* disk_io_counters() (not tested) -* disk_partitions() (not tested) -* disk_usage(str) -* net_connections('unix') -* net_if_addrs() (not tested) -* net_if_stats() (not tested) -* net_io_counters() (not tested) -* sensors_fans() (not tested) -* sensors_temperatures() (not tested) -* users() (not tested) - -* WindowsService.binpath() (not tested) -* WindowsService.description() (not tested) -* WindowsService.display_name() (not tested) -* WindowsService.name() (not tested) -* WindowsService.status() (not tested) -* WindowsService.username() (not tested) - -In here we create a unicode path with a funky non-ASCII name and (where -possible) make psutil return it back (e.g. on name(), exe(), open_files(), -etc.) and make sure that: - -* psutil never crashes with UnicodeDecodeError -* the returned path matches - -For a detailed explanation of how psutil handles unicode see: -- https://github.com/giampaolo/psutil/issues/1040 -- http://psutil.readthedocs.io/#unicode -""" - -import os -import traceback -import warnings -from contextlib import closing - -from psutil import BSD -from psutil import MACOS -from psutil import OPENBSD -from psutil import POSIX -from psutil import WINDOWS -from psutil._compat import PY3 -from psutil._compat import u -from psutil.tests import APPVEYOR -from psutil.tests import ASCII_FS -from psutil.tests import bind_unix_socket -from psutil.tests import chdir -from psutil.tests import copyload_shared_lib -from psutil.tests import create_exe -from psutil.tests import get_test_subprocess -from psutil.tests import HAS_CONNECTIONS_UNIX -from psutil.tests import HAS_ENVIRON -from psutil.tests import HAS_MEMORY_MAPS -from psutil.tests import mock -from psutil.tests import reap_children -from psutil.tests import run_test_module_by_name -from psutil.tests import safe_mkdir -from psutil.tests import safe_rmpath as _safe_rmpath -from psutil.tests import skip_on_access_denied -from psutil.tests import TESTFILE_PREFIX -from psutil.tests import TESTFN -from psutil.tests import TESTFN_UNICODE -from psutil.tests import TRAVIS -from psutil.tests import unittest -from psutil.tests import unix_socket_path -import psutil - - -def safe_rmpath(path): - if APPVEYOR: - # TODO - this is quite random and I'm not sure why it happens, - # nor I can reproduce it locally: - # https://ci.appveyor.com/project/giampaolo/psutil/build/job/ - # jiq2cgd6stsbtn60 - # safe_rmpath() happens after reap_children() so this is weird - # Perhaps wait_procs() on Windows is broken? Maybe because - # of STILL_ACTIVE? - # https://github.com/giampaolo/psutil/blob/ - # 68c7a70728a31d8b8b58f4be6c4c0baa2f449eda/psutil/arch/ - # windows/process_info.c#L146 - try: - return _safe_rmpath(path) - except WindowsError: - traceback.print_exc() - else: - return _safe_rmpath(path) - - -def subprocess_supports_unicode(name): - """Return True if both the fs and the subprocess module can - deal with a unicode file name. - """ - if PY3: - return True - try: - safe_rmpath(name) - create_exe(name) - get_test_subprocess(cmd=[name]) - except UnicodeEncodeError: - return False - else: - return True - finally: - reap_children() - - -# An invalid unicode string. -if PY3: - INVALID_NAME = (TESTFN.encode('utf8') + b"f\xc0\x80").decode( - 'utf8', 'surrogateescape') -else: - INVALID_NAME = TESTFN + "f\xc0\x80" - - -# =================================================================== -# FS APIs -# =================================================================== - - -class _BaseFSAPIsTests(object): - funky_name = None - - @classmethod - def setUpClass(cls): - safe_rmpath(cls.funky_name) - create_exe(cls.funky_name) - - @classmethod - def tearDownClass(cls): - reap_children() - safe_rmpath(cls.funky_name) - - def tearDown(self): - reap_children() - - def expect_exact_path_match(self): - raise NotImplementedError("must be implemented in subclass") - - def test_proc_exe(self): - subp = get_test_subprocess(cmd=[self.funky_name]) - p = psutil.Process(subp.pid) - exe = p.exe() - self.assertIsInstance(exe, str) - if self.expect_exact_path_match(): - self.assertEqual(exe, self.funky_name) - - def test_proc_name(self): - subp = get_test_subprocess(cmd=[self.funky_name]) - if WINDOWS: - # On Windows name() is determined from exe() first, because - # it's faster; we want to overcome the internal optimization - # and test name() instead of exe(). - with mock.patch("psutil._psplatform.cext.proc_exe", - side_effect=psutil.AccessDenied(os.getpid())) as m: - name = psutil.Process(subp.pid).name() - assert m.called - else: - name = psutil.Process(subp.pid).name() - self.assertIsInstance(name, str) - if self.expect_exact_path_match(): - self.assertEqual(name, os.path.basename(self.funky_name)) - - def test_proc_cmdline(self): - subp = get_test_subprocess(cmd=[self.funky_name]) - p = psutil.Process(subp.pid) - cmdline = p.cmdline() - for part in cmdline: - self.assertIsInstance(part, str) - if self.expect_exact_path_match(): - self.assertEqual(cmdline, [self.funky_name]) - - def test_proc_cwd(self): - dname = self.funky_name + "2" - self.addCleanup(safe_rmpath, dname) - safe_mkdir(dname) - with chdir(dname): - p = psutil.Process() - cwd = p.cwd() - self.assertIsInstance(p.cwd(), str) - if self.expect_exact_path_match(): - self.assertEqual(cwd, dname) - - def test_proc_open_files(self): - p = psutil.Process() - start = set(p.open_files()) - with open(self.funky_name, 'rb'): - new = set(p.open_files()) - path = (new - start).pop().path - self.assertIsInstance(path, str) - if BSD and not path: - # XXX - see https://github.com/giampaolo/psutil/issues/595 - return self.skipTest("open_files on BSD is broken") - if self.expect_exact_path_match(): - self.assertEqual(os.path.normcase(path), - os.path.normcase(self.funky_name)) - - @unittest.skipIf(not POSIX, "POSIX only") - def test_proc_connections(self): - suffix = os.path.basename(self.funky_name) - with unix_socket_path(suffix=suffix) as name: - try: - sock = bind_unix_socket(name) - except UnicodeEncodeError: - if PY3: - raise - else: - raise unittest.SkipTest("not supported") - with closing(sock): - conn = psutil.Process().connections('unix')[0] - self.assertIsInstance(conn.laddr, str) - # AF_UNIX addr not set on OpenBSD - if not OPENBSD: - self.assertEqual(conn.laddr, name) - - @unittest.skipIf(not POSIX, "POSIX only") - @unittest.skipIf(not HAS_CONNECTIONS_UNIX, "can't list UNIX sockets") - @skip_on_access_denied() - def test_net_connections(self): - def find_sock(cons): - for conn in cons: - if os.path.basename(conn.laddr).startswith(TESTFILE_PREFIX): - return conn - raise ValueError("connection not found") - - suffix = os.path.basename(self.funky_name) - with unix_socket_path(suffix=suffix) as name: - try: - sock = bind_unix_socket(name) - except UnicodeEncodeError: - if PY3: - raise - else: - raise unittest.SkipTest("not supported") - with closing(sock): - cons = psutil.net_connections(kind='unix') - # AF_UNIX addr not set on OpenBSD - if not OPENBSD: - conn = find_sock(cons) - self.assertIsInstance(conn.laddr, str) - self.assertEqual(conn.laddr, name) - - def test_disk_usage(self): - dname = self.funky_name + "2" - self.addCleanup(safe_rmpath, dname) - safe_mkdir(dname) - psutil.disk_usage(dname) - - @unittest.skipIf(not HAS_MEMORY_MAPS, "not supported") - @unittest.skipIf(not PY3, "ctypes does not support unicode on PY2") - def test_memory_maps(self): - # XXX: on Python 2, using ctypes.CDLL with a unicode path - # opens a message box which blocks the test run. - with copyload_shared_lib(dst_prefix=self.funky_name) as funky_path: - def normpath(p): - return os.path.realpath(os.path.normcase(p)) - libpaths = [normpath(x.path) - for x in psutil.Process().memory_maps()] - # ...just to have a clearer msg in case of failure - libpaths = [x for x in libpaths if TESTFILE_PREFIX in x] - self.assertIn(normpath(funky_path), libpaths) - for path in libpaths: - self.assertIsInstance(path, str) - - -@unittest.skipIf(MACOS and TRAVIS, "unreliable on TRAVIS") # TODO -@unittest.skipIf(ASCII_FS, "ASCII fs") -@unittest.skipIf(not subprocess_supports_unicode(TESTFN_UNICODE), - "subprocess can't deal with unicode") -class TestFSAPIs(_BaseFSAPIsTests, unittest.TestCase): - """Test FS APIs with a funky, valid, UTF8 path name.""" - funky_name = TESTFN_UNICODE - - @classmethod - def expect_exact_path_match(cls): - # Do not expect psutil to correctly handle unicode paths on - # Python 2 if os.listdir() is not able either. - if PY3: - return True - else: - here = '.' if isinstance(cls.funky_name, str) else u('.') - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - return cls.funky_name in os.listdir(here) - - -@unittest.skipIf(MACOS and TRAVIS, "unreliable on TRAVIS") # TODO -@unittest.skipIf(not subprocess_supports_unicode(INVALID_NAME), - "subprocess can't deal with invalid unicode") -class TestFSAPIsWithInvalidPath(_BaseFSAPIsTests, unittest.TestCase): - """Test FS APIs with a funky, invalid path name.""" - funky_name = INVALID_NAME - - @classmethod - def expect_exact_path_match(cls): - # Invalid unicode names are supposed to work on Python 2. - return True - - -@unittest.skipIf(not WINDOWS, "WINDOWS only") -class TestWinProcessName(unittest.TestCase): - - def test_name_type(self): - # On Windows name() is determined from exe() first, because - # it's faster; we want to overcome the internal optimization - # and test name() instead of exe(). - with mock.patch("psutil._psplatform.cext.proc_exe", - side_effect=psutil.AccessDenied(os.getpid())) as m: - self.assertIsInstance(psutil.Process().name(), str) - assert m.called - - -# =================================================================== -# Non fs APIs -# =================================================================== - - -class TestNonFSAPIS(unittest.TestCase): - """Unicode tests for non fs-related APIs.""" - - def tearDown(self): - reap_children() - - @unittest.skipIf(not HAS_ENVIRON, "not supported") - def test_proc_environ(self): - # Note: differently from others, this test does not deal - # with fs paths. On Python 2 subprocess module is broken as - # it's not able to handle with non-ASCII env vars, so - # we use "è", which is part of the extended ASCII table - # (unicode point <= 255). - env = os.environ.copy() - funky_str = TESTFN_UNICODE if PY3 else 'è' - env['FUNNY_ARG'] = funky_str - sproc = get_test_subprocess(env=env) - p = psutil.Process(sproc.pid) - env = p.environ() - for k, v in env.items(): - self.assertIsInstance(k, str) - self.assertIsInstance(v, str) - self.assertEqual(env['FUNNY_ARG'], funky_str) - - -if __name__ == '__main__': - run_test_module_by_name(__file__) diff --git a/server/www/packages/packages-darwin/x64/psutil/tests/test_windows.py b/server/www/packages/packages-darwin/x64/psutil/tests/test_windows.py deleted file mode 100644 index ffa763d..0000000 --- a/server/www/packages/packages-darwin/x64/psutil/tests/test_windows.py +++ /dev/null @@ -1,859 +0,0 @@ -#!/usr/bin/env python -# -*- coding: UTF-8 -* - -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Windows specific tests.""" - -import datetime -import errno -import glob -import os -import platform -import re -import signal -import subprocess -import sys -import time -import warnings - -import psutil -from psutil import WINDOWS -from psutil.tests import APPVEYOR -from psutil.tests import get_test_subprocess -from psutil.tests import HAS_BATTERY -from psutil.tests import mock -from psutil.tests import reap_children -from psutil.tests import retry_before_failing -from psutil.tests import run_test_module_by_name -from psutil.tests import sh -from psutil.tests import unittest - -with warnings.catch_warnings(): - warnings.simplefilter("ignore") - try: - import win32api # requires "pip install pypiwin32" - import win32con - import win32process - import wmi # requires "pip install wmi" / "make setup-dev-env" - except ImportError: - if os.name == 'nt': - raise - - -cext = psutil._psplatform.cext - -# are we a 64 bit process -IS_64_BIT = sys.maxsize > 2**32 - - -def wrap_exceptions(fun): - def wrapper(self, *args, **kwargs): - try: - return fun(self, *args, **kwargs) - except OSError as err: - from psutil._pswindows import ACCESS_DENIED_SET - if err.errno in ACCESS_DENIED_SET: - raise psutil.AccessDenied(None, None) - if err.errno == errno.ESRCH: - raise psutil.NoSuchProcess(None, None) - raise - return wrapper - - -# =================================================================== -# System APIs -# =================================================================== - - -@unittest.skipIf(not WINDOWS, "WINDOWS only") -class TestCpuAPIs(unittest.TestCase): - - @unittest.skipIf('NUMBER_OF_PROCESSORS' not in os.environ, - 'NUMBER_OF_PROCESSORS env var is not available') - def test_cpu_count_vs_NUMBER_OF_PROCESSORS(self): - # Will likely fail on many-cores systems: - # https://stackoverflow.com/questions/31209256 - num_cpus = int(os.environ['NUMBER_OF_PROCESSORS']) - self.assertEqual(num_cpus, psutil.cpu_count()) - - def test_cpu_count_vs_GetSystemInfo(self): - # Will likely fail on many-cores systems: - # https://stackoverflow.com/questions/31209256 - sys_value = win32api.GetSystemInfo()[5] - psutil_value = psutil.cpu_count() - self.assertEqual(sys_value, psutil_value) - - def test_cpu_count_logical_vs_wmi(self): - w = wmi.WMI() - proc = w.Win32_Processor()[0] - self.assertEqual(psutil.cpu_count(), proc.NumberOfLogicalProcessors) - - def test_cpu_count_phys_vs_wmi(self): - w = wmi.WMI() - proc = w.Win32_Processor()[0] - self.assertEqual(psutil.cpu_count(logical=False), proc.NumberOfCores) - - def test_cpu_count_vs_cpu_times(self): - self.assertEqual(psutil.cpu_count(), - len(psutil.cpu_times(percpu=True))) - - def test_cpu_freq(self): - w = wmi.WMI() - proc = w.Win32_Processor()[0] - self.assertEqual(proc.CurrentClockSpeed, psutil.cpu_freq().current) - self.assertEqual(proc.MaxClockSpeed, psutil.cpu_freq().max) - - -@unittest.skipIf(not WINDOWS, "WINDOWS only") -class TestSystemAPIs(unittest.TestCase): - - def test_nic_names(self): - out = sh('ipconfig /all') - nics = psutil.net_io_counters(pernic=True).keys() - for nic in nics: - if "pseudo-interface" in nic.replace(' ', '-').lower(): - continue - if nic not in out: - self.fail( - "%r nic wasn't found in 'ipconfig /all' output" % nic) - - def test_total_phymem(self): - w = wmi.WMI().Win32_ComputerSystem()[0] - self.assertEqual(int(w.TotalPhysicalMemory), - psutil.virtual_memory().total) - - # @unittest.skipIf(wmi is None, "wmi module is not installed") - # def test__UPTIME(self): - # # _UPTIME constant is not public but it is used internally - # # as value to return for pid 0 creation time. - # # WMI behaves the same. - # w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0] - # p = psutil.Process(0) - # wmic_create = str(w.CreationDate.split('.')[0]) - # psutil_create = time.strftime("%Y%m%d%H%M%S", - # time.localtime(p.create_time())) - - # Note: this test is not very reliable - @unittest.skipIf(APPVEYOR, "test not relieable on appveyor") - @retry_before_failing() - def test_pids(self): - # Note: this test might fail if the OS is starting/killing - # other processes in the meantime - w = wmi.WMI().Win32_Process() - wmi_pids = set([x.ProcessId for x in w]) - psutil_pids = set(psutil.pids()) - self.assertEqual(wmi_pids, psutil_pids) - - @retry_before_failing() - def test_disks(self): - ps_parts = psutil.disk_partitions(all=True) - wmi_parts = wmi.WMI().Win32_LogicalDisk() - for ps_part in ps_parts: - for wmi_part in wmi_parts: - if ps_part.device.replace('\\', '') == wmi_part.DeviceID: - if not ps_part.mountpoint: - # this is usually a CD-ROM with no disk inserted - break - try: - usage = psutil.disk_usage(ps_part.mountpoint) - except OSError as err: - if err.errno == errno.ENOENT: - # usually this is the floppy - break - else: - raise - self.assertEqual(usage.total, int(wmi_part.Size)) - wmi_free = int(wmi_part.FreeSpace) - self.assertEqual(usage.free, wmi_free) - # 10 MB tollerance - if abs(usage.free - wmi_free) > 10 * 1024 * 1024: - self.fail("psutil=%s, wmi=%s" % ( - usage.free, wmi_free)) - break - else: - self.fail("can't find partition %s" % repr(ps_part)) - - def test_disk_usage(self): - for disk in psutil.disk_partitions(): - sys_value = win32api.GetDiskFreeSpaceEx(disk.mountpoint) - psutil_value = psutil.disk_usage(disk.mountpoint) - self.assertAlmostEqual(sys_value[0], psutil_value.free, - delta=1024 * 1024) - self.assertAlmostEqual(sys_value[1], psutil_value.total, - delta=1024 * 1024) - self.assertEqual(psutil_value.used, - psutil_value.total - psutil_value.free) - - def test_disk_partitions(self): - sys_value = [ - x + '\\' for x in win32api.GetLogicalDriveStrings().split("\\\x00") - if x and not x.startswith('A:')] - psutil_value = [x.mountpoint for x in psutil.disk_partitions(all=True)] - self.assertEqual(sys_value, psutil_value) - - def test_net_if_stats(self): - ps_names = set(cext.net_if_stats()) - wmi_adapters = wmi.WMI().Win32_NetworkAdapter() - wmi_names = set() - for wmi_adapter in wmi_adapters: - wmi_names.add(wmi_adapter.Name) - wmi_names.add(wmi_adapter.NetConnectionID) - self.assertTrue(ps_names & wmi_names, - "no common entries in %s, %s" % (ps_names, wmi_names)) - - def test_boot_time(self): - wmi_os = wmi.WMI().Win32_OperatingSystem() - wmi_btime_str = wmi_os[0].LastBootUpTime.split('.')[0] - wmi_btime_dt = datetime.datetime.strptime( - wmi_btime_str, "%Y%m%d%H%M%S") - psutil_dt = datetime.datetime.fromtimestamp(psutil.boot_time()) - diff = abs((wmi_btime_dt - psutil_dt).total_seconds()) - # Wmic time is 2-3 secs lower for some reason; that's OK. - self.assertLessEqual(diff, 3) - - def test_boot_time_fluctuation(self): - # https://github.com/giampaolo/psutil/issues/1007 - with mock.patch('psutil._pswindows.cext.boot_time', return_value=5): - self.assertEqual(psutil.boot_time(), 5) - with mock.patch('psutil._pswindows.cext.boot_time', return_value=4): - self.assertEqual(psutil.boot_time(), 5) - with mock.patch('psutil._pswindows.cext.boot_time', return_value=6): - self.assertEqual(psutil.boot_time(), 5) - with mock.patch('psutil._pswindows.cext.boot_time', return_value=333): - self.assertEqual(psutil.boot_time(), 333) - - -# =================================================================== -# sensors_battery() -# =================================================================== - - -@unittest.skipIf(not WINDOWS, "WINDOWS only") -class TestSensorsBattery(unittest.TestCase): - - def test_has_battery(self): - if win32api.GetPwrCapabilities()['SystemBatteriesPresent']: - self.assertIsNotNone(psutil.sensors_battery()) - else: - self.assertIsNone(psutil.sensors_battery()) - - @unittest.skipIf(not HAS_BATTERY, "no battery") - def test_percent(self): - w = wmi.WMI() - battery_wmi = w.query('select * from Win32_Battery')[0] - battery_psutil = psutil.sensors_battery() - self.assertAlmostEqual( - battery_psutil.percent, battery_wmi.EstimatedChargeRemaining, - delta=1) - - @unittest.skipIf(not HAS_BATTERY, "no battery") - def test_power_plugged(self): - w = wmi.WMI() - battery_wmi = w.query('select * from Win32_Battery')[0] - battery_psutil = psutil.sensors_battery() - # Status codes: - # https://msdn.microsoft.com/en-us/library/aa394074(v=vs.85).aspx - self.assertEqual(battery_psutil.power_plugged, - battery_wmi.BatteryStatus == 2) - - def test_emulate_no_battery(self): - with mock.patch("psutil._pswindows.cext.sensors_battery", - return_value=(0, 128, 0, 0)) as m: - self.assertIsNone(psutil.sensors_battery()) - assert m.called - - def test_emulate_power_connected(self): - with mock.patch("psutil._pswindows.cext.sensors_battery", - return_value=(1, 0, 0, 0)) as m: - self.assertEqual(psutil.sensors_battery().secsleft, - psutil.POWER_TIME_UNLIMITED) - assert m.called - - def test_emulate_power_charging(self): - with mock.patch("psutil._pswindows.cext.sensors_battery", - return_value=(0, 8, 0, 0)) as m: - self.assertEqual(psutil.sensors_battery().secsleft, - psutil.POWER_TIME_UNLIMITED) - assert m.called - - def test_emulate_secs_left_unknown(self): - with mock.patch("psutil._pswindows.cext.sensors_battery", - return_value=(0, 0, 0, -1)) as m: - self.assertEqual(psutil.sensors_battery().secsleft, - psutil.POWER_TIME_UNKNOWN) - assert m.called - - -# =================================================================== -# Process APIs -# =================================================================== - - -@unittest.skipIf(not WINDOWS, "WINDOWS only") -class TestProcess(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.pid = get_test_subprocess().pid - - @classmethod - def tearDownClass(cls): - reap_children() - - def test_issue_24(self): - p = psutil.Process(0) - self.assertRaises(psutil.AccessDenied, p.kill) - - def test_special_pid(self): - p = psutil.Process(4) - self.assertEqual(p.name(), 'System') - # use __str__ to access all common Process properties to check - # that nothing strange happens - str(p) - p.username() - self.assertTrue(p.create_time() >= 0.0) - try: - rss, vms = p.memory_info()[:2] - except psutil.AccessDenied: - # expected on Windows Vista and Windows 7 - if not platform.uname()[1] in ('vista', 'win-7', 'win7'): - raise - else: - self.assertTrue(rss > 0) - - def test_send_signal(self): - p = psutil.Process(self.pid) - self.assertRaises(ValueError, p.send_signal, signal.SIGINT) - - def test_exe(self): - for p in psutil.process_iter(): - try: - self.assertEqual(os.path.basename(p.exe()), p.name()) - except psutil.Error: - pass - - def test_num_handles_increment(self): - p = psutil.Process(os.getpid()) - before = p.num_handles() - handle = win32api.OpenProcess(win32con.PROCESS_QUERY_INFORMATION, - win32con.FALSE, os.getpid()) - after = p.num_handles() - self.assertEqual(after, before + 1) - win32api.CloseHandle(handle) - self.assertEqual(p.num_handles(), before) - - def test_handles_leak(self): - # Call all Process methods and make sure no handles are left - # open. This is here mainly to make sure functions using - # OpenProcess() always call CloseHandle(). - def call(p, attr): - attr = getattr(p, name, None) - if attr is not None and callable(attr): - attr() - else: - attr - - p = psutil.Process(self.pid) - failures = [] - for name in dir(psutil.Process): - if name.startswith('_') \ - or name in ('terminate', 'kill', 'suspend', 'resume', - 'nice', 'send_signal', 'wait', 'children', - 'as_dict', 'memory_info_ex'): - continue - else: - try: - call(p, name) - num1 = p.num_handles() - call(p, name) - num2 = p.num_handles() - except (psutil.NoSuchProcess, psutil.AccessDenied): - pass - else: - if num2 > num1: - fail = \ - "failure while processing Process.%s method " \ - "(before=%s, after=%s)" % (name, num1, num2) - failures.append(fail) - if failures: - self.fail('\n' + '\n'.join(failures)) - - def test_name_always_available(self): - # On Windows name() is never supposed to raise AccessDenied, - # see https://github.com/giampaolo/psutil/issues/627 - for p in psutil.process_iter(): - try: - p.name() - except psutil.NoSuchProcess: - pass - - @unittest.skipIf(not sys.version_info >= (2, 7), - "CTRL_* signals not supported") - def test_ctrl_signals(self): - p = psutil.Process(get_test_subprocess().pid) - p.send_signal(signal.CTRL_C_EVENT) - p.send_signal(signal.CTRL_BREAK_EVENT) - p.kill() - p.wait() - self.assertRaises(psutil.NoSuchProcess, - p.send_signal, signal.CTRL_C_EVENT) - self.assertRaises(psutil.NoSuchProcess, - p.send_signal, signal.CTRL_BREAK_EVENT) - - def test_compare_name_exe(self): - for p in psutil.process_iter(): - try: - a = os.path.basename(p.exe()) - b = p.name() - except (psutil.NoSuchProcess, psutil.AccessDenied): - pass - else: - self.assertEqual(a, b) - - def test_username(self): - self.assertEqual(psutil.Process().username(), - win32api.GetUserNameEx(win32con.NameSamCompatible)) - - def test_cmdline(self): - sys_value = re.sub(' +', ' ', win32api.GetCommandLine()).strip() - psutil_value = ' '.join(psutil.Process().cmdline()) - self.assertEqual(sys_value, psutil_value) - - # XXX - occasional failures - - # def test_cpu_times(self): - # handle = win32api.OpenProcess(win32con.PROCESS_QUERY_INFORMATION, - # win32con.FALSE, os.getpid()) - # self.addCleanup(win32api.CloseHandle, handle) - # sys_value = win32process.GetProcessTimes(handle) - # psutil_value = psutil.Process().cpu_times() - # self.assertAlmostEqual( - # psutil_value.user, sys_value['UserTime'] / 10000000.0, - # delta=0.2) - # self.assertAlmostEqual( - # psutil_value.user, sys_value['KernelTime'] / 10000000.0, - # delta=0.2) - - def test_nice(self): - handle = win32api.OpenProcess(win32con.PROCESS_QUERY_INFORMATION, - win32con.FALSE, os.getpid()) - self.addCleanup(win32api.CloseHandle, handle) - sys_value = win32process.GetPriorityClass(handle) - psutil_value = psutil.Process().nice() - self.assertEqual(psutil_value, sys_value) - - def test_memory_info(self): - handle = win32api.OpenProcess(win32con.PROCESS_QUERY_INFORMATION, - win32con.FALSE, self.pid) - self.addCleanup(win32api.CloseHandle, handle) - sys_value = win32process.GetProcessMemoryInfo(handle) - psutil_value = psutil.Process(self.pid).memory_info() - self.assertEqual( - sys_value['PeakWorkingSetSize'], psutil_value.peak_wset) - self.assertEqual( - sys_value['WorkingSetSize'], psutil_value.wset) - self.assertEqual( - sys_value['QuotaPeakPagedPoolUsage'], psutil_value.peak_paged_pool) - self.assertEqual( - sys_value['QuotaPagedPoolUsage'], psutil_value.paged_pool) - self.assertEqual( - sys_value['QuotaPeakNonPagedPoolUsage'], - psutil_value.peak_nonpaged_pool) - self.assertEqual( - sys_value['QuotaNonPagedPoolUsage'], psutil_value.nonpaged_pool) - self.assertEqual( - sys_value['PagefileUsage'], psutil_value.pagefile) - self.assertEqual( - sys_value['PeakPagefileUsage'], psutil_value.peak_pagefile) - - self.assertEqual(psutil_value.rss, psutil_value.wset) - self.assertEqual(psutil_value.vms, psutil_value.pagefile) - - def test_wait(self): - handle = win32api.OpenProcess(win32con.PROCESS_QUERY_INFORMATION, - win32con.FALSE, self.pid) - self.addCleanup(win32api.CloseHandle, handle) - p = psutil.Process(self.pid) - p.terminate() - psutil_value = p.wait() - sys_value = win32process.GetExitCodeProcess(handle) - self.assertEqual(psutil_value, sys_value) - - def test_cpu_affinity(self): - def from_bitmask(x): - return [i for i in range(64) if (1 << i) & x] - - handle = win32api.OpenProcess(win32con.PROCESS_QUERY_INFORMATION, - win32con.FALSE, self.pid) - self.addCleanup(win32api.CloseHandle, handle) - sys_value = from_bitmask( - win32process.GetProcessAffinityMask(handle)[0]) - psutil_value = psutil.Process(self.pid).cpu_affinity() - self.assertEqual(psutil_value, sys_value) - - def test_io_counters(self): - handle = win32api.OpenProcess(win32con.PROCESS_QUERY_INFORMATION, - win32con.FALSE, os.getpid()) - self.addCleanup(win32api.CloseHandle, handle) - sys_value = win32process.GetProcessIoCounters(handle) - psutil_value = psutil.Process().io_counters() - self.assertEqual( - psutil_value.read_count, sys_value['ReadOperationCount']) - self.assertEqual( - psutil_value.write_count, sys_value['WriteOperationCount']) - self.assertEqual( - psutil_value.read_bytes, sys_value['ReadTransferCount']) - self.assertEqual( - psutil_value.write_bytes, sys_value['WriteTransferCount']) - self.assertEqual( - psutil_value.other_count, sys_value['OtherOperationCount']) - self.assertEqual( - psutil_value.other_bytes, sys_value['OtherTransferCount']) - - def test_num_handles(self): - import ctypes - import ctypes.wintypes - PROCESS_QUERY_INFORMATION = 0x400 - handle = ctypes.windll.kernel32.OpenProcess( - PROCESS_QUERY_INFORMATION, 0, os.getpid()) - self.addCleanup(ctypes.windll.kernel32.CloseHandle, handle) - hndcnt = ctypes.wintypes.DWORD() - ctypes.windll.kernel32.GetProcessHandleCount( - handle, ctypes.byref(hndcnt)) - sys_value = hndcnt.value - psutil_value = psutil.Process().num_handles() - ctypes.windll.kernel32.CloseHandle(handle) - self.assertEqual(psutil_value, sys_value + 1) - - -@unittest.skipIf(not WINDOWS, "WINDOWS only") -class TestProcessWMI(unittest.TestCase): - """Compare Process API results with WMI.""" - - @classmethod - def setUpClass(cls): - cls.pid = get_test_subprocess().pid - - @classmethod - def tearDownClass(cls): - reap_children() - - def test_name(self): - w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0] - p = psutil.Process(self.pid) - self.assertEqual(p.name(), w.Caption) - - def test_exe(self): - w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0] - p = psutil.Process(self.pid) - # Note: wmi reports the exe as a lower case string. - # Being Windows paths case-insensitive we ignore that. - self.assertEqual(p.exe().lower(), w.ExecutablePath.lower()) - - def test_cmdline(self): - w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0] - p = psutil.Process(self.pid) - self.assertEqual(' '.join(p.cmdline()), - w.CommandLine.replace('"', '')) - - def test_username(self): - w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0] - p = psutil.Process(self.pid) - domain, _, username = w.GetOwner() - username = "%s\\%s" % (domain, username) - self.assertEqual(p.username(), username) - - def test_memory_rss(self): - time.sleep(0.1) - w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0] - p = psutil.Process(self.pid) - rss = p.memory_info().rss - self.assertEqual(rss, int(w.WorkingSetSize)) - - def test_memory_vms(self): - time.sleep(0.1) - w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0] - p = psutil.Process(self.pid) - vms = p.memory_info().vms - # http://msdn.microsoft.com/en-us/library/aa394372(VS.85).aspx - # ...claims that PageFileUsage is represented in Kilo - # bytes but funnily enough on certain platforms bytes are - # returned instead. - wmi_usage = int(w.PageFileUsage) - if (vms != wmi_usage) and (vms != wmi_usage * 1024): - self.fail("wmi=%s, psutil=%s" % (wmi_usage, vms)) - - def test_create_time(self): - w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0] - p = psutil.Process(self.pid) - wmic_create = str(w.CreationDate.split('.')[0]) - psutil_create = time.strftime("%Y%m%d%H%M%S", - time.localtime(p.create_time())) - self.assertEqual(wmic_create, psutil_create) - - -@unittest.skipIf(not WINDOWS, "WINDOWS only") -class TestDualProcessImplementation(unittest.TestCase): - """ - Certain APIs on Windows have 2 internal implementations, one - based on documented Windows APIs, another one based - NtQuerySystemInformation() which gets called as fallback in - case the first fails because of limited permission error. - Here we test that the two methods return the exact same value, - see: - https://github.com/giampaolo/psutil/issues/304 - """ - - @classmethod - def setUpClass(cls): - cls.pid = get_test_subprocess().pid - - @classmethod - def tearDownClass(cls): - reap_children() - # --- - # same tests as above but mimicks the AccessDenied failure of - # the first (fast) method failing with AD. - - def test_name(self): - name = psutil.Process(self.pid).name() - with mock.patch("psutil._psplatform.cext.proc_exe", - side_effect=psutil.AccessDenied(os.getpid())) as fun: - self.assertEqual(psutil.Process(self.pid).name(), name) - assert fun.called - - def test_memory_info(self): - mem_1 = psutil.Process(self.pid).memory_info() - with mock.patch("psutil._psplatform.cext.proc_memory_info", - side_effect=OSError(errno.EPERM, "msg")) as fun: - mem_2 = psutil.Process(self.pid).memory_info() - self.assertEqual(len(mem_1), len(mem_2)) - for i in range(len(mem_1)): - self.assertGreaterEqual(mem_1[i], 0) - self.assertGreaterEqual(mem_2[i], 0) - self.assertAlmostEqual(mem_1[i], mem_2[i], delta=512) - assert fun.called - - def test_create_time(self): - ctime = psutil.Process(self.pid).create_time() - with mock.patch("psutil._psplatform.cext.proc_create_time", - side_effect=OSError(errno.EPERM, "msg")) as fun: - self.assertEqual(psutil.Process(self.pid).create_time(), ctime) - assert fun.called - - def test_cpu_times(self): - cpu_times_1 = psutil.Process(self.pid).cpu_times() - with mock.patch("psutil._psplatform.cext.proc_cpu_times", - side_effect=OSError(errno.EPERM, "msg")) as fun: - cpu_times_2 = psutil.Process(self.pid).cpu_times() - assert fun.called - self.assertAlmostEqual( - cpu_times_1.user, cpu_times_2.user, delta=0.01) - self.assertAlmostEqual( - cpu_times_1.system, cpu_times_2.system, delta=0.01) - - def test_io_counters(self): - io_counters_1 = psutil.Process(self.pid).io_counters() - with mock.patch("psutil._psplatform.cext.proc_io_counters", - side_effect=OSError(errno.EPERM, "msg")) as fun: - io_counters_2 = psutil.Process(self.pid).io_counters() - for i in range(len(io_counters_1)): - self.assertAlmostEqual( - io_counters_1[i], io_counters_2[i], delta=5) - assert fun.called - - def test_num_handles(self): - num_handles = psutil.Process(self.pid).num_handles() - with mock.patch("psutil._psplatform.cext.proc_num_handles", - side_effect=OSError(errno.EPERM, "msg")) as fun: - self.assertEqual(psutil.Process(self.pid).num_handles(), - num_handles) - assert fun.called - - -@unittest.skipIf(not WINDOWS, "WINDOWS only") -class RemoteProcessTestCase(unittest.TestCase): - """Certain functions require calling ReadProcessMemory. - This trivially works when called on the current process. - Check that this works on other processes, especially when they - have a different bitness. - """ - - @staticmethod - def find_other_interpreter(): - # find a python interpreter that is of the opposite bitness from us - code = "import sys; sys.stdout.write(str(sys.maxsize > 2**32))" - - # XXX: a different and probably more stable approach might be to access - # the registry but accessing 64 bit paths from a 32 bit process - for filename in glob.glob(r"C:\Python*\python.exe"): - proc = subprocess.Popen(args=[filename, "-c", code], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - output, _ = proc.communicate() - if output == str(not IS_64_BIT): - return filename - - @classmethod - def setUpClass(cls): - other_python = cls.find_other_interpreter() - - if other_python is None: - raise unittest.SkipTest( - "could not find interpreter with opposite bitness") - - if IS_64_BIT: - cls.python64 = sys.executable - cls.python32 = other_python - else: - cls.python64 = other_python - cls.python32 = sys.executable - - test_args = ["-c", "import sys; sys.stdin.read()"] - - def setUp(self): - env = os.environ.copy() - env["THINK_OF_A_NUMBER"] = str(os.getpid()) - self.proc32 = get_test_subprocess([self.python32] + self.test_args, - env=env, - stdin=subprocess.PIPE) - self.proc64 = get_test_subprocess([self.python64] + self.test_args, - env=env, - stdin=subprocess.PIPE) - - def tearDown(self): - self.proc32.communicate() - self.proc64.communicate() - reap_children() - - @classmethod - def tearDownClass(cls): - reap_children() - - def test_cmdline_32(self): - p = psutil.Process(self.proc32.pid) - self.assertEqual(len(p.cmdline()), 3) - self.assertEqual(p.cmdline()[1:], self.test_args) - - def test_cmdline_64(self): - p = psutil.Process(self.proc64.pid) - self.assertEqual(len(p.cmdline()), 3) - self.assertEqual(p.cmdline()[1:], self.test_args) - - def test_cwd_32(self): - p = psutil.Process(self.proc32.pid) - self.assertEqual(p.cwd(), os.getcwd()) - - def test_cwd_64(self): - p = psutil.Process(self.proc64.pid) - self.assertEqual(p.cwd(), os.getcwd()) - - def test_environ_32(self): - p = psutil.Process(self.proc32.pid) - e = p.environ() - self.assertIn("THINK_OF_A_NUMBER", e) - self.assertEquals(e["THINK_OF_A_NUMBER"], str(os.getpid())) - - def test_environ_64(self): - p = psutil.Process(self.proc64.pid) - e = p.environ() - self.assertIn("THINK_OF_A_NUMBER", e) - self.assertEquals(e["THINK_OF_A_NUMBER"], str(os.getpid())) - - -# =================================================================== -# Windows services -# =================================================================== - - -@unittest.skipIf(not WINDOWS, "WINDOWS only") -class TestServices(unittest.TestCase): - - def test_win_service_iter(self): - valid_statuses = set([ - "running", - "paused", - "start", - "pause", - "continue", - "stop", - "stopped", - ]) - valid_start_types = set([ - "automatic", - "manual", - "disabled", - ]) - valid_statuses = set([ - "running", - "paused", - "start_pending", - "pause_pending", - "continue_pending", - "stop_pending", - "stopped" - ]) - for serv in psutil.win_service_iter(): - data = serv.as_dict() - self.assertIsInstance(data['name'], str) - self.assertNotEqual(data['name'].strip(), "") - self.assertIsInstance(data['display_name'], str) - self.assertIsInstance(data['username'], str) - self.assertIn(data['status'], valid_statuses) - if data['pid'] is not None: - psutil.Process(data['pid']) - self.assertIsInstance(data['binpath'], str) - self.assertIsInstance(data['username'], str) - self.assertIsInstance(data['start_type'], str) - self.assertIn(data['start_type'], valid_start_types) - self.assertIn(data['status'], valid_statuses) - self.assertIsInstance(data['description'], str) - pid = serv.pid() - if pid is not None: - p = psutil.Process(pid) - self.assertTrue(p.is_running()) - # win_service_get - s = psutil.win_service_get(serv.name()) - # test __eq__ - self.assertEqual(serv, s) - - def test_win_service_get(self): - name = next(psutil.win_service_iter()).name() - - with self.assertRaises(psutil.NoSuchProcess) as cm: - psutil.win_service_get(name + '???') - self.assertEqual(cm.exception.name, name + '???') - - # test NoSuchProcess - service = psutil.win_service_get(name) - exc = WindowsError( - psutil._psplatform.cext.ERROR_SERVICE_DOES_NOT_EXIST, "") - with mock.patch("psutil._psplatform.cext.winservice_query_status", - side_effect=exc): - self.assertRaises(psutil.NoSuchProcess, service.status) - with mock.patch("psutil._psplatform.cext.winservice_query_config", - side_effect=exc): - self.assertRaises(psutil.NoSuchProcess, service.username) - - # test AccessDenied - exc = WindowsError( - psutil._psplatform.cext.ERROR_ACCESS_DENIED, "") - with mock.patch("psutil._psplatform.cext.winservice_query_status", - side_effect=exc): - self.assertRaises(psutil.AccessDenied, service.status) - with mock.patch("psutil._psplatform.cext.winservice_query_config", - side_effect=exc): - self.assertRaises(psutil.AccessDenied, service.username) - - # test __str__ and __repr__ - self.assertIn(service.name(), str(service)) - self.assertIn(service.display_name(), str(service)) - self.assertIn(service.name(), repr(service)) - self.assertIn(service.display_name(), repr(service)) - - -if __name__ == '__main__': - run_test_module_by_name(__file__) diff --git a/server/www/packages/packages-linux/x64/.libs_cffi_backend/libffi-806b1a9d.so.6.0.4 b/server/www/packages/packages-linux/x64/.libs_cffi_backend/libffi-806b1a9d.so.6.0.4 new file mode 100755 index 0000000..a74aa90 Binary files /dev/null and b/server/www/packages/packages-linux/x64/.libs_cffi_backend/libffi-806b1a9d.so.6.0.4 differ diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libfreetype-7ce95de6.so.6.16.1 b/server/www/packages/packages-linux/x64/PIL/.libs/libfreetype-7ce95de6.so.6.16.1 deleted file mode 100755 index 179f60c..0000000 Binary files a/server/www/packages/packages-linux/x64/PIL/.libs/libfreetype-7ce95de6.so.6.16.1 and /dev/null differ diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libjpeg-3fe7dfc0.so.9.3.0 b/server/www/packages/packages-linux/x64/PIL/.libs/libjpeg-3fe7dfc0.so.9.3.0 deleted file mode 100755 index 835b57b..0000000 Binary files a/server/www/packages/packages-linux/x64/PIL/.libs/libjpeg-3fe7dfc0.so.9.3.0 and /dev/null differ diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libopenjp2-e366d6b0.so.2.1.0 b/server/www/packages/packages-linux/x64/PIL/.libs/libopenjp2-e366d6b0.so.2.1.0 deleted file mode 100755 index c31cea4..0000000 Binary files a/server/www/packages/packages-linux/x64/PIL/.libs/libopenjp2-e366d6b0.so.2.1.0 and /dev/null differ diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libpng16-898afbbd.so.16.35.0 b/server/www/packages/packages-linux/x64/PIL/.libs/libpng16-898afbbd.so.16.35.0 deleted file mode 100755 index 9cf16ab..0000000 Binary files a/server/www/packages/packages-linux/x64/PIL/.libs/libpng16-898afbbd.so.16.35.0 and /dev/null differ diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libtiff-8a6d997d.so.5.3.0 b/server/www/packages/packages-linux/x64/PIL/.libs/libtiff-8a6d997d.so.5.3.0 deleted file mode 100755 index ff0a9dd..0000000 Binary files a/server/www/packages/packages-linux/x64/PIL/.libs/libtiff-8a6d997d.so.5.3.0 and /dev/null differ diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libwebp-8ccd29fd.so.7.0.2 b/server/www/packages/packages-linux/x64/PIL/.libs/libwebp-8ccd29fd.so.7.0.2 deleted file mode 100755 index b2f0763..0000000 Binary files a/server/www/packages/packages-linux/x64/PIL/.libs/libwebp-8ccd29fd.so.7.0.2 and /dev/null differ diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libwebpdemux-eba3dc32.so.2.0.4 b/server/www/packages/packages-linux/x64/PIL/.libs/libwebpdemux-eba3dc32.so.2.0.4 deleted file mode 100755 index 66adccb..0000000 Binary files a/server/www/packages/packages-linux/x64/PIL/.libs/libwebpdemux-eba3dc32.so.2.0.4 and /dev/null differ diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libwebpmux-1c63fe99.so.3.0.2 b/server/www/packages/packages-linux/x64/PIL/.libs/libwebpmux-1c63fe99.so.3.0.2 deleted file mode 100755 index 2045a77..0000000 Binary files a/server/www/packages/packages-linux/x64/PIL/.libs/libwebpmux-1c63fe99.so.3.0.2 and /dev/null differ diff --git a/server/www/packages/packages-linux/x64/PIL/BdfFontFile.py b/server/www/packages/packages-linux/x64/PIL/BdfFontFile.py index eac19bd..7a485cf 100644 --- a/server/www/packages/packages-linux/x64/PIL/BdfFontFile.py +++ b/server/www/packages/packages-linux/x64/PIL/BdfFontFile.py @@ -17,10 +17,8 @@ # See the README file for information on usage and redistribution. # -from __future__ import print_function - -from . import Image, FontFile +from . import FontFile, Image # -------------------------------------------------------------------- # parse X Bitmap Distribution Format (BDF) @@ -32,14 +30,10 @@ bdf_slant = { "O": "Oblique", "RI": "Reverse Italic", "RO": "Reverse Oblique", - "OT": "Other" + "OT": "Other", } -bdf_spacing = { - "P": "Proportional", - "M": "Monospaced", - "C": "Cell" -} +bdf_spacing = {"P": "Proportional", "M": "Monospaced", "C": "Cell"} def bdf_char(f): @@ -50,7 +44,7 @@ def bdf_char(f): return None if s[:9] == b"STARTCHAR": break - id = s[9:].strip().decode('ascii') + id = s[9:].strip().decode("ascii") # load symbol properties props = {} @@ -59,7 +53,7 @@ def bdf_char(f): if not s or s[:6] == b"BITMAP": break i = s.find(b" ") - props[s[:i].decode('ascii')] = s[i+1:-1].decode('ascii') + props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii") # load bitmap bitmap = [] @@ -73,7 +67,7 @@ def bdf_char(f): [x, y, l, d] = [int(p) for p in props["BBX"].split()] [dx, dy] = [int(p) for p in props["DWIDTH"].split()] - bbox = (dx, dy), (l, -d-y, x+l, -d), (0, 0, x, y) + bbox = (dx, dy), (l, -d - y, x + l, -d), (0, 0, x, y) try: im = Image.frombytes("1", (x, y), bitmap, "hex", "1") @@ -87,11 +81,10 @@ def bdf_char(f): ## # Font file plugin for the X11 BDF format. + class BdfFontFile(FontFile.FontFile): - def __init__(self, fp): - - FontFile.FontFile.__init__(self) + super().__init__() s = fp.readline() if s[:13] != b"STARTFONT 2.1": @@ -105,10 +98,10 @@ class BdfFontFile(FontFile.FontFile): if not s or s[:13] == b"ENDPROPERTIES": break i = s.find(b" ") - props[s[:i].decode('ascii')] = s[i+1:-1].decode('ascii') + props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii") if s[:i] in [b"COMMENT", b"COPYRIGHT"]: if s.find(b"LogicalFontDescription") < 0: - comments.append(s[i+1:-1].decode('ascii')) + comments.append(s[i + 1 : -1].decode("ascii")) while True: c = bdf_char(fp) diff --git a/server/www/packages/packages-linux/x64/PIL/BlpImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/BlpImagePlugin.py index 398e0fa..5ccba37 100644 --- a/server/www/packages/packages-linux/x64/PIL/BlpImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/BlpImagePlugin.py @@ -34,7 +34,6 @@ from io import BytesIO from . import Image, ImageFile - BLP_FORMAT_JPEG = 0 BLP_ENCODING_UNCOMPRESSED = 1 @@ -47,11 +46,7 @@ BLP_ALPHA_ENCODING_DXT5 = 7 def unpack_565(i): - return ( - ((i >> 11) & 0x1f) << 3, - ((i >> 5) & 0x3f) << 2, - (i & 0x1f) << 3 - ) + return (((i >> 11) & 0x1F) << 3, ((i >> 5) & 0x3F) << 2, (i & 0x1F) << 3) def decode_dxt1(data, alpha=False): @@ -119,12 +114,12 @@ def decode_dxt3(data): for block in range(blocks): idx = block * 16 - block = data[idx:idx + 16] + block = data[idx : idx + 16] # Decode next 16-byte block. bits = struct.unpack_from("<8B", block) color0, color1 = struct.unpack_from(">= 4 else: high = True - a &= 0xf + a &= 0xF a *= 17 # We get a value between 0 and 15 color_code = (code >> 2 * (4 * j + i)) & 0x03 @@ -172,19 +167,17 @@ def decode_dxt5(data): for block in range(blocks): idx = block * 16 - block = data[idx:idx + 16] + block = data[idx : idx + 16] # Decode next 16-byte block. a0, a1 = struct.unpack_from("= 40: # v3 and OS/2 - file_info['y_flip'] = i8(header_data[7]) == 0xff - file_info['direction'] = 1 if file_info['y_flip'] else -1 - file_info['width'] = i32(header_data[0:4]) - file_info['height'] = (i32(header_data[4:8]) - if not file_info['y_flip'] - else 2**32 - i32(header_data[4:8])) - file_info['planes'] = i16(header_data[8:10]) - file_info['bits'] = i16(header_data[10:12]) - file_info['compression'] = i32(header_data[12:16]) - # byte size of pixel data - file_info['data_size'] = i32(header_data[16:20]) - file_info['pixels_per_meter'] = (i32(header_data[20:24]), - i32(header_data[24:28])) - file_info['colors'] = i32(header_data[28:32]) - file_info['palette_padding'] = 4 - self.info["dpi"] = tuple( - map(lambda x: int(math.ceil(x / 39.3701)), - file_info['pixels_per_meter'])) - if file_info['compression'] == self.BITFIELDS: - if len(header_data) >= 52: - for idx, mask in enumerate(['r_mask', - 'g_mask', - 'b_mask', - 'a_mask']): - file_info[mask] = i32(header_data[36+idx*4:40+idx*4]) - else: - # 40 byte headers only have the three components in the - # bitfields masks, - # ref: https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx - # See also https://github.com/python-pillow/Pillow/issues/1293 - # There is a 4th component in the RGBQuad, in the alpha - # location, but it is listed as a reserved component, - # and it is not generally an alpha channel - file_info['a_mask'] = 0x0 - for mask in ['r_mask', 'g_mask', 'b_mask']: - file_info[mask] = i32(read(4)) - file_info['rgb_mask'] = (file_info['r_mask'], - file_info['g_mask'], - file_info['b_mask']) - file_info['rgba_mask'] = (file_info['r_mask'], - file_info['g_mask'], - file_info['b_mask'], - file_info['a_mask']) + header_data = ImageFile._safe_read(self.fp, file_info["header_size"] - 4) + + # -------------------------------------------------- IBM OS/2 Bitmap v1 + # ----- This format has different offsets because of width/height types + if file_info["header_size"] == 12: + file_info["width"] = i16(header_data[0:2]) + file_info["height"] = i16(header_data[2:4]) + file_info["planes"] = i16(header_data[4:6]) + file_info["bits"] = i16(header_data[6:8]) + file_info["compression"] = self.RAW + file_info["palette_padding"] = 3 + + # --------------------------------------------- Windows Bitmap v2 to v5 + # v3, OS/2 v2, v4, v5 + elif file_info["header_size"] in (40, 64, 108, 124): + file_info["y_flip"] = i8(header_data[7]) == 0xFF + file_info["direction"] = 1 if file_info["y_flip"] else -1 + file_info["width"] = i32(header_data[0:4]) + file_info["height"] = ( + i32(header_data[4:8]) + if not file_info["y_flip"] + else 2 ** 32 - i32(header_data[4:8]) + ) + file_info["planes"] = i16(header_data[8:10]) + file_info["bits"] = i16(header_data[10:12]) + file_info["compression"] = i32(header_data[12:16]) + # byte size of pixel data + file_info["data_size"] = i32(header_data[16:20]) + file_info["pixels_per_meter"] = ( + i32(header_data[20:24]), + i32(header_data[24:28]), + ) + file_info["colors"] = i32(header_data[28:32]) + file_info["palette_padding"] = 4 + self.info["dpi"] = tuple( + int(x / 39.3701 + 0.5) for x in file_info["pixels_per_meter"] + ) + if file_info["compression"] == self.BITFIELDS: + if len(header_data) >= 52: + for idx, mask in enumerate( + ["r_mask", "g_mask", "b_mask", "a_mask"] + ): + file_info[mask] = i32(header_data[36 + idx * 4 : 40 + idx * 4]) + else: + # 40 byte headers only have the three components in the + # bitfields masks, ref: + # https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx + # See also + # https://github.com/python-pillow/Pillow/issues/1293 + # There is a 4th component in the RGBQuad, in the alpha + # location, but it is listed as a reserved component, + # and it is not generally an alpha channel + file_info["a_mask"] = 0x0 + for mask in ["r_mask", "g_mask", "b_mask"]: + file_info[mask] = i32(read(4)) + file_info["rgb_mask"] = ( + file_info["r_mask"], + file_info["g_mask"], + file_info["b_mask"], + ) + file_info["rgba_mask"] = ( + file_info["r_mask"], + file_info["g_mask"], + file_info["b_mask"], + file_info["a_mask"], + ) else: - raise IOError("Unsupported BMP header type (%d)" % - file_info['header_size']) + raise OSError("Unsupported BMP header type (%d)" % file_info["header_size"]) + # ------------------ Special case : header is reported 40, which # ---------------------- is shorter than real size for bpp >= 16 - self._size = file_info['width'], file_info['height'] - # -------- If color count was not found in the header, compute from bits - file_info['colors'] = file_info['colors'] if file_info.get('colors', 0) else (1 << file_info['bits']) - # -------------------------------- Check abnormal values for DOS attacks - if file_info['width'] * file_info['height'] > 2**31: - raise IOError("Unsupported BMP Size: (%dx%d)" % self.size) - # ----------------------- Check bit depth for unusual unsupported values - self.mode, raw_mode = BIT2MODE.get(file_info['bits'], (None, None)) + self._size = file_info["width"], file_info["height"] + + # ------- If color count was not found in the header, compute from bits + file_info["colors"] = ( + file_info["colors"] + if file_info.get("colors", 0) + else (1 << file_info["bits"]) + ) + + # ------------------------------- Check abnormal values for DOS attacks + if file_info["width"] * file_info["height"] > 2 ** 31: + raise OSError("Unsupported BMP Size: (%dx%d)" % self.size) + + # ---------------------- Check bit depth for unusual unsupported values + self.mode, raw_mode = BIT2MODE.get(file_info["bits"], (None, None)) if self.mode is None: - raise IOError("Unsupported BMP pixel depth (%d)" - % file_info['bits']) - # ----------------- Process BMP with Bitfields compression (not palette) - if file_info['compression'] == self.BITFIELDS: + raise OSError("Unsupported BMP pixel depth (%d)" % file_info["bits"]) + + # ---------------- Process BMP with Bitfields compression (not palette) + if file_info["compression"] == self.BITFIELDS: SUPPORTED = { - 32: [(0xff0000, 0xff00, 0xff, 0x0), - (0xff0000, 0xff00, 0xff, 0xff000000), - (0x0, 0x0, 0x0, 0x0), - (0xff000000, 0xff0000, 0xff00, 0x0)], - 24: [(0xff0000, 0xff00, 0xff)], - 16: [(0xf800, 0x7e0, 0x1f), (0x7c00, 0x3e0, 0x1f)] + 32: [ + (0xFF0000, 0xFF00, 0xFF, 0x0), + (0xFF0000, 0xFF00, 0xFF, 0xFF000000), + (0xFF, 0xFF00, 0xFF0000, 0xFF000000), + (0x0, 0x0, 0x0, 0x0), + (0xFF000000, 0xFF0000, 0xFF00, 0x0), + ], + 24: [(0xFF0000, 0xFF00, 0xFF)], + 16: [(0xF800, 0x7E0, 0x1F), (0x7C00, 0x3E0, 0x1F)], } MASK_MODES = { - (32, (0xff0000, 0xff00, 0xff, 0x0)): "BGRX", - (32, (0xff000000, 0xff0000, 0xff00, 0x0)): "XBGR", - (32, (0xff0000, 0xff00, 0xff, 0xff000000)): "BGRA", + (32, (0xFF0000, 0xFF00, 0xFF, 0x0)): "BGRX", + (32, (0xFF000000, 0xFF0000, 0xFF00, 0x0)): "XBGR", + (32, (0xFF, 0xFF00, 0xFF0000, 0xFF000000)): "RGBA", + (32, (0xFF0000, 0xFF00, 0xFF, 0xFF000000)): "BGRA", (32, (0x0, 0x0, 0x0, 0x0)): "BGRA", - (24, (0xff0000, 0xff00, 0xff)): "BGR", - (16, (0xf800, 0x7e0, 0x1f)): "BGR;16", - (16, (0x7c00, 0x3e0, 0x1f)): "BGR;15" + (24, (0xFF0000, 0xFF00, 0xFF)): "BGR", + (16, (0xF800, 0x7E0, 0x1F)): "BGR;16", + (16, (0x7C00, 0x3E0, 0x1F)): "BGR;15", } - if file_info['bits'] in SUPPORTED: - if file_info['bits'] == 32 and \ - file_info['rgba_mask'] in SUPPORTED[file_info['bits']]: - raw_mode = MASK_MODES[(file_info['bits'], file_info['rgba_mask'])] - self.mode = "RGBA" if raw_mode in ("BGRA",) else self.mode - elif (file_info['bits'] in (24, 16) and - file_info['rgb_mask'] in SUPPORTED[file_info['bits']]): - raw_mode = MASK_MODES[ - (file_info['bits'], file_info['rgb_mask']) - ] + if file_info["bits"] in SUPPORTED: + if ( + file_info["bits"] == 32 + and file_info["rgba_mask"] in SUPPORTED[file_info["bits"]] + ): + raw_mode = MASK_MODES[(file_info["bits"], file_info["rgba_mask"])] + self.mode = "RGBA" if "A" in raw_mode else self.mode + elif ( + file_info["bits"] in (24, 16) + and file_info["rgb_mask"] in SUPPORTED[file_info["bits"]] + ): + raw_mode = MASK_MODES[(file_info["bits"], file_info["rgb_mask"])] else: - raise IOError("Unsupported BMP bitfields layout") + raise OSError("Unsupported BMP bitfields layout") else: - raise IOError("Unsupported BMP bitfields layout") - elif file_info['compression'] == self.RAW: - if file_info['bits'] == 32 and header == 22: # 32-bit .cur offset + raise OSError("Unsupported BMP bitfields layout") + elif file_info["compression"] == self.RAW: + if file_info["bits"] == 32 and header == 22: # 32-bit .cur offset raw_mode, self.mode = "BGRA", "RGBA" else: - raise IOError("Unsupported BMP compression (%d)" % - file_info['compression']) - # ---------------- Once the header is processed, process the palette/LUT + raise OSError("Unsupported BMP compression (%d)" % file_info["compression"]) + + # --------------- Once the header is processed, process the palette/LUT if self.mode == "P": # Paletted for 1, 4 and 8 bit images - # ----------------------------------------------------- 1-bit images - if not (0 < file_info['colors'] <= 65536): - raise IOError("Unsupported BMP Palette size (%d)" % - file_info['colors']) + + # ---------------------------------------------------- 1-bit images + if not (0 < file_info["colors"] <= 65536): + raise OSError("Unsupported BMP Palette size (%d)" % file_info["colors"]) else: - padding = file_info['palette_padding'] - palette = read(padding * file_info['colors']) + padding = file_info["palette_padding"] + palette = read(padding * file_info["colors"]) greyscale = True - indices = (0, 255) if file_info['colors'] == 2 else \ - list(range(file_info['colors'])) - # ------------------ Check if greyscale and ignore palette if so + indices = ( + (0, 255) + if file_info["colors"] == 2 + else list(range(file_info["colors"])) + ) + + # ----------------- Check if greyscale and ignore palette if so for ind, val in enumerate(indices): - rgb = palette[ind*padding:ind*padding + 3] + rgb = palette[ind * padding : ind * padding + 3] if rgb != o8(val) * 3: greyscale = False - # -------- If all colors are grey, white or black, ditch palette + + # ------- If all colors are grey, white or black, ditch palette if greyscale: - self.mode = "1" if file_info['colors'] == 2 else "L" + self.mode = "1" if file_info["colors"] == 2 else "L" raw_mode = self.mode else: self.mode = "P" self.palette = ImagePalette.raw( - "BGRX" if padding == 4 else "BGR", palette) + "BGRX" if padding == 4 else "BGR", palette + ) - # ----------------------------- Finally set the tile data for the plugin - self.info['compression'] = file_info['compression'] + # ---------------------------- Finally set the tile data for the plugin + self.info["compression"] = file_info["compression"] self.tile = [ - ('raw', - (0, 0, file_info['width'], file_info['height']), - offset or self.fp.tell(), - (raw_mode, - ((file_info['width'] * file_info['bits'] + 31) >> 3) & (~3), - file_info['direction'])) + ( + "raw", + (0, 0, file_info["width"], file_info["height"]), + offset or self.fp.tell(), + ( + raw_mode, + ((file_info["width"] * file_info["bits"] + 31) >> 3) & (~3), + file_info["direction"], + ), + ) ] def _open(self): @@ -243,9 +271,9 @@ class BmpImageFile(ImageFile.ImageFile): self._bitmap(offset=offset) -# ============================================================================== +# ============================================================================= # Image plugin for the DIB format (BMP alias) -# ============================================================================== +# ============================================================================= class DibImageFile(BmpImageFile): format = "DIB" @@ -254,6 +282,7 @@ class DibImageFile(BmpImageFile): def _open(self): self._bitmap() + # # -------------------------------------------------------------------- # Write BMP file @@ -268,43 +297,56 @@ SAVE = { } -def _save(im, fp, filename): +def _dib_save(im, fp, filename): + _save(im, fp, filename, False) + + +def _save(im, fp, filename, bitmap_header=True): try: rawmode, bits, colors = SAVE[im.mode] except KeyError: - raise IOError("cannot write mode %s as BMP" % im.mode) + raise OSError("cannot write mode %s as BMP" % im.mode) info = im.encoderinfo dpi = info.get("dpi", (96, 96)) # 1 meter == 39.3701 inches - ppm = tuple(map(lambda x: int(x * 39.3701), dpi)) + ppm = tuple(map(lambda x: int(x * 39.3701 + 0.5), dpi)) - stride = ((im.size[0]*bits+7)//8+3) & (~3) + stride = ((im.size[0] * bits + 7) // 8 + 3) & (~3) header = 40 # or 64 for OS/2 version 2 - offset = 14 + header + colors * 4 image = stride * im.size[1] # bitmap header - fp.write(b"BM" + # file type (magic) - o32(offset+image) + # file size - o32(0) + # reserved - o32(offset)) # image data offset + if bitmap_header: + offset = 14 + header + colors * 4 + file_size = offset + image + if file_size > 2 ** 32 - 1: + raise ValueError("File size is too large for the BMP format") + fp.write( + b"BM" # file type (magic) + + o32(file_size) # file size + + o32(0) # reserved + + o32(offset) # image data offset + ) # bitmap info header - fp.write(o32(header) + # info header size - o32(im.size[0]) + # width - o32(im.size[1]) + # height - o16(1) + # planes - o16(bits) + # depth - o32(0) + # compression (0=uncompressed) - o32(image) + # size of bitmap - o32(ppm[0]) + o32(ppm[1]) + # resolution - o32(colors) + # colors used - o32(colors)) # colors important + fp.write( + o32(header) # info header size + + o32(im.size[0]) # width + + o32(im.size[1]) # height + + o16(1) # planes + + o16(bits) # depth + + o32(0) # compression (0=uncompressed) + + o32(image) # size of bitmap + + o32(ppm[0]) # resolution + + o32(ppm[1]) # resolution + + o32(colors) # colors used + + o32(colors) # colors important + ) - fp.write(b"\0" * (header - 40)) # padding (for OS/2 format) + fp.write(b"\0" * (header - 40)) # padding (for OS/2 format) if im.mode == "1": for i in (0, 255): @@ -315,8 +357,8 @@ def _save(im, fp, filename): elif im.mode == "P": fp.write(im.im.getpalette("RGB", "BGRX")) - ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, - (rawmode, stride, -1))]) + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, stride, -1))]) + # # -------------------------------------------------------------------- @@ -329,3 +371,10 @@ Image.register_save(BmpImageFile.format, _save) Image.register_extension(BmpImageFile.format, ".bmp") Image.register_mime(BmpImageFile.format, "image/bmp") + +Image.register_open(DibImageFile.format, DibImageFile, _dib_accept) +Image.register_save(DibImageFile.format, _dib_save) + +Image.register_extension(DibImageFile.format, ".dib") + +Image.register_mime(DibImageFile.format, "image/bmp") diff --git a/server/www/packages/packages-linux/x64/PIL/BufrStubImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/BufrStubImagePlugin.py index a1957b3..48f21e1 100644 --- a/server/www/packages/packages-linux/x64/PIL/BufrStubImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/BufrStubImagePlugin.py @@ -27,6 +27,7 @@ def register_handler(handler): # -------------------------------------------------------------------- # Image adapter + def _accept(prefix): return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC" @@ -59,7 +60,7 @@ class BufrStubImageFile(ImageFile.StubImageFile): def _save(im, fp, filename): if _handler is None or not hasattr("_handler", "save"): - raise IOError("BUFR save handler not installed") + raise OSError("BUFR save handler not installed") _handler.save(im, fp, filename) diff --git a/server/www/packages/packages-linux/x64/PIL/ContainerIO.py b/server/www/packages/packages-linux/x64/PIL/ContainerIO.py index 682ad90..5bb0086 100644 --- a/server/www/packages/packages-linux/x64/PIL/ContainerIO.py +++ b/server/www/packages/packages-linux/x64/PIL/ContainerIO.py @@ -18,9 +18,10 @@ # A file object that provides read access to a part of an existing # file (for example a TAR file). +import io -class ContainerIO(object): +class ContainerIO: def __init__(self, file, offset, length): """ Create file object. @@ -39,9 +40,9 @@ class ContainerIO(object): # Always false. def isatty(self): - return 0 + return False - def seek(self, offset, mode=0): + def seek(self, offset, mode=io.SEEK_SET): """ Move file pointer. @@ -81,7 +82,7 @@ class ContainerIO(object): else: n = self.length - self.pos if not n: # EOF - return "" + return b"" if "b" in self.fh.mode else "" self.pos = self.pos + n return self.fh.read(n) @@ -91,13 +92,14 @@ class ContainerIO(object): :returns: An 8-bit string. """ - s = "" + s = b"" if "b" in self.fh.mode else "" + newline_character = b"\n" if "b" in self.fh.mode else "\n" while True: c = self.read(1) if not c: break s = s + c - if c == "\n": + if c == newline_character: break return s diff --git a/server/www/packages/packages-linux/x64/PIL/CurImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/CurImagePlugin.py index 3e8f321..3a1b6d2 100644 --- a/server/www/packages/packages-linux/x64/PIL/CurImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/CurImagePlugin.py @@ -15,14 +15,9 @@ # # See the README file for information on usage and redistribution. # - -from __future__ import print_function - -from . import Image, BmpImagePlugin +from . import BmpImagePlugin, Image from ._binary import i8, i16le as i16, i32le as i32 -__version__ = "0.1" - # # -------------------------------------------------------------------- @@ -34,6 +29,7 @@ def _accept(prefix): ## # Image plugin for Windows Cursor files. + class CurImageFile(BmpImagePlugin.BmpImageFile): format = "CUR" @@ -63,9 +59,9 @@ class CurImageFile(BmpImagePlugin.BmpImageFile): self._bitmap(i32(m[12:]) + offset) # patch up the bitmap height - self._size = self.size[0], self.size[1]//2 + self._size = self.size[0], self.size[1] // 2 d, e, o, a = self.tile[0] - self.tile[0] = d, (0, 0)+self.size, o, a + self.tile[0] = d, (0, 0) + self.size, o, a return diff --git a/server/www/packages/packages-linux/x64/PIL/DcxImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/DcxImagePlugin.py index 2045927..7d2aff3 100644 --- a/server/www/packages/packages-linux/x64/PIL/DcxImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/DcxImagePlugin.py @@ -25,8 +25,6 @@ from . import Image from ._binary import i32le as i32 from .PcxImagePlugin import PcxImageFile -__version__ = "0.2" - MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then? @@ -37,6 +35,7 @@ def _accept(prefix): ## # Image plugin for the Intel DCX format. + class DcxImageFile(PcxImageFile): format = "DCX" @@ -81,6 +80,15 @@ class DcxImageFile(PcxImageFile): def tell(self): return self.frame + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + Image.register_open(DcxImageFile.format, DcxImageFile, _accept) diff --git a/server/www/packages/packages-linux/x64/PIL/DdsImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/DdsImagePlugin.py index 7660827..9ba6e0f 100644 --- a/server/www/packages/packages-linux/x64/PIL/DdsImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/DdsImagePlugin.py @@ -12,8 +12,8 @@ Full text of the CC0 license: import struct from io import BytesIO -from . import Image, ImageFile +from . import Image, ImageFile # Magic ("DDS ") DDS_MAGIC = 0x20534444 @@ -61,8 +61,7 @@ DDS_LUMINANCEA = DDPF_LUMINANCE | DDPF_ALPHAPIXELS DDS_ALPHA = DDPF_ALPHA DDS_PAL8 = DDPF_PALETTEINDEXED8 -DDS_HEADER_FLAGS_TEXTURE = (DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | - DDSD_PIXELFORMAT) +DDS_HEADER_FLAGS_TEXTURE = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | DDSD_PIXELFORMAT DDS_HEADER_FLAGS_MIPMAP = DDSD_MIPMAPCOUNT DDS_HEADER_FLAGS_VOLUME = DDSD_DEPTH DDS_HEADER_FLAGS_PITCH = DDSD_PITCH @@ -107,10 +106,10 @@ class DdsImageFile(ImageFile.ImageFile): def _open(self): magic, header_size = struct.unpack(" 0: - s = fp.read(min(lengthfile, 100*1024)) + s = fp.read(min(lengthfile, 100 * 1024)) if not s: break lengthfile -= len(s) f.write(s) # Build Ghostscript command - command = ["gs", - "-q", # quiet mode - "-g%dx%d" % size, # set output geometry (pixels) - "-r%fx%f" % res, # set input DPI (dots per inch) - "-dBATCH", # exit after processing - "-dNOPAUSE", # don't pause between pages - "-dSAFER", # safe mode - "-sDEVICE=ppmraw", # ppm driver - "-sOutputFile=%s" % outfile, # output file - "-c", "%d %d translate" % (-bbox[0], -bbox[1]), - # adjust for image origin - "-f", infile, # input file - "-c", "showpage", # showpage (see: https://bugs.ghostscript.com/show_bug.cgi?id=698272) - ] + command = [ + "gs", + "-q", # quiet mode + "-g%dx%d" % size, # set output geometry (pixels) + "-r%fx%f" % res, # set input DPI (dots per inch) + "-dBATCH", # exit after processing + "-dNOPAUSE", # don't pause between pages + "-dSAFER", # safe mode + "-sDEVICE=ppmraw", # ppm driver + "-sOutputFile=%s" % outfile, # output file + # adjust for image origin + "-c", + "%d %d translate" % (-bbox[0], -bbox[1]), + "-f", + infile, # input file + # showpage (see https://bugs.ghostscript.com/show_bug.cgi?id=698272) + "-c", + "showpage", + ] if gs_windows_binary is not None: if not gs_windows_binary: - raise WindowsError('Unable to locate Ghostscript on paths') + raise OSError("Unable to locate Ghostscript on paths") command[0] = gs_windows_binary # push data through Ghostscript try: - with open(os.devnull, 'w+b') as devnull: - startupinfo = None - if sys.platform.startswith('win'): - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - subprocess.check_call(command, stdin=devnull, stdout=devnull, - startupinfo=startupinfo) - im = Image.open(outfile) - im.load() + startupinfo = None + if sys.platform.startswith("win"): + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + subprocess.check_call(command, startupinfo=startupinfo) + out_im = Image.open(outfile) + out_im.load() finally: try: os.unlink(outfile) @@ -155,18 +151,21 @@ def Ghostscript(tile, size, fp, scale=1): except OSError: pass - return im.im.copy() + im = out_im.im.copy() + out_im.close() + return im -class PSFile(object): +class PSFile: """ Wrapper for bytesio object that treats either CR or LF as end of line. """ + def __init__(self, fp): self.fp = fp self.char = None - def seek(self, offset, whence=0): + def seek(self, offset, whence=io.SEEK_SET): self.char = None self.fp.seek(offset, whence) @@ -184,12 +183,12 @@ class PSFile(object): if self.char in b"\r\n": self.char = None - return s.decode('latin-1') + return s.decode("latin-1") def _accept(prefix): - return prefix[:4] == b"%!PS" or \ - (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5) + return prefix[:4] == b"%!PS" or (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5) + ## # Image plugin for Encapsulated Postscript. This plugin supports only @@ -223,7 +222,7 @@ class EpsImageFile(ImageFile.ImageFile): # Load EPS header s_raw = fp.readline() - s = s_raw.strip('\r\n') + s = s_raw.strip("\r\n") while s_raw: if s: @@ -245,8 +244,9 @@ class EpsImageFile(ImageFile.ImageFile): # put floating point values there anyway. box = [int(float(i)) for i in v.split()] self._size = box[2] - box[0], box[3] - box[1] - self.tile = [("eps", (0, 0) + self.size, offset, - (length, box))] + self.tile = [ + ("eps", (0, 0) + self.size, offset, (length, box)) + ] except Exception: pass @@ -261,15 +261,15 @@ class EpsImageFile(ImageFile.ImageFile): self.info[k[:8]] = k[9:] else: self.info[k] = "" - elif s[0] == '%': + elif s[0] == "%": # handle non-DSC Postscript comments that some # tools mistakenly put in the Comments section pass else: - raise IOError("bad EPS header") + raise OSError("bad EPS header") s_raw = fp.readline() - s = s_raw.strip('\r\n') + s = s_raw.strip("\r\n") if s and s[:1] != "%": break @@ -296,12 +296,12 @@ class EpsImageFile(ImageFile.ImageFile): self._size = int(x), int(y) return - s = fp.readline().strip('\r\n') + s = fp.readline().strip("\r\n") if not s: break if not box: - raise IOError("cannot determine EPS bounding box") + raise OSError("cannot determine EPS bounding box") def _find_offset(self, fp): @@ -309,7 +309,7 @@ class EpsImageFile(ImageFile.ImageFile): if s[:4] == b"%!PS": # for HEAD without binary preview - fp.seek(0, 2) + fp.seek(0, io.SEEK_END) length = fp.tell() offset = 0 elif i32(s[0:4]) == 0xC6D3D0C5: @@ -343,6 +343,7 @@ class EpsImageFile(ImageFile.ImageFile): # # -------------------------------------------------------------------- + def _save(im, fp, filename, eps=1): """EPS Writer for the Python Imaging Library.""" @@ -364,9 +365,8 @@ def _save(im, fp, filename, eps=1): base_fp = fp wrapped_fp = False if fp != sys.stdout: - if sys.version_info.major > 2: - fp = io.TextIOWrapper(fp, encoding='latin-1') - wrapped_fp = True + fp = io.TextIOWrapper(fp, encoding="latin-1") + wrapped_fp = True try: if eps: @@ -380,7 +380,7 @@ def _save(im, fp, filename, eps=1): fp.write("%%EndComments\n") fp.write("%%Page: 1 1\n") fp.write("%%ImageData: %d %d " % im.size) - fp.write("%d %d 0 1 1 \"%s\"\n" % operator) + fp.write('%d %d 0 1 1 "%s"\n' % operator) # # image header @@ -395,7 +395,7 @@ def _save(im, fp, filename, eps=1): if hasattr(fp, "flush"): fp.flush() - ImageFile._save(im, base_fp, [("eps", (0, 0)+im.size, 0, None)]) + ImageFile._save(im, base_fp, [("eps", (0, 0) + im.size, 0, None)]) fp.write("\n%%%%EndBinary\n") fp.write("grestore end\n") @@ -405,6 +405,7 @@ def _save(im, fp, filename, eps=1): if wrapped_fp: fp.detach() + # # -------------------------------------------------------------------- diff --git a/server/www/packages/packages-linux/x64/PIL/ExifTags.py b/server/www/packages/packages-linux/x64/PIL/ExifTags.py index a8ad26b..cecc3f2 100644 --- a/server/www/packages/packages-linux/x64/PIL/ExifTags.py +++ b/server/www/packages/packages-linux/x64/PIL/ExifTags.py @@ -18,11 +18,10 @@ # Maps EXIF tags to tag names. TAGS = { - # possibly incomplete - 0x000b: "ProcessingSoftware", - 0x00fe: "NewSubfileType", - 0x00ff: "SubfileType", + 0x000B: "ProcessingSoftware", + 0x00FE: "NewSubfileType", + 0x00FF: "SubfileType", 0x0100: "ImageWidth", 0x0101: "ImageLength", 0x0102: "BitsPerSample", @@ -31,10 +30,10 @@ TAGS = { 0x0107: "Thresholding", 0x0108: "CellWidth", 0x0109: "CellLength", - 0x010a: "FillOrder", - 0x010d: "DocumentName", - 0x010e: "ImageDescription", - 0x010f: "Make", + 0x010A: "FillOrder", + 0x010D: "DocumentName", + 0x010E: "ImageDescription", + 0x010F: "Make", 0x0110: "Model", 0x0111: "StripOffsets", 0x0112: "Orientation", @@ -43,10 +42,10 @@ TAGS = { 0x0117: "StripByteCounts", 0x0118: "MinSampleValue", 0x0119: "MaxSampleValue", - 0x011a: "XResolution", - 0x011b: "YResolution", - 0x011c: "PlanarConfiguration", - 0x011d: "PageName", + 0x011A: "XResolution", + 0x011B: "YResolution", + 0x011C: "PlanarConfiguration", + 0x011D: "PageName", 0x0120: "FreeOffsets", 0x0121: "FreeByteCounts", 0x0122: "GrayResponseUnit", @@ -55,24 +54,24 @@ TAGS = { 0x0125: "T6Options", 0x0128: "ResolutionUnit", 0x0129: "PageNumber", - 0x012d: "TransferFunction", + 0x012D: "TransferFunction", 0x0131: "Software", 0x0132: "DateTime", - 0x013b: "Artist", - 0x013c: "HostComputer", - 0x013d: "Predictor", - 0x013e: "WhitePoint", - 0x013f: "PrimaryChromaticities", + 0x013B: "Artist", + 0x013C: "HostComputer", + 0x013D: "Predictor", + 0x013E: "WhitePoint", + 0x013F: "PrimaryChromaticities", 0x0140: "ColorMap", 0x0141: "HalftoneHints", 0x0142: "TileWidth", 0x0143: "TileLength", 0x0144: "TileOffsets", 0x0145: "TileByteCounts", - 0x014a: "SubIFDs", - 0x014c: "InkSet", - 0x014d: "InkNames", - 0x014e: "NumberOfInks", + 0x014A: "SubIFDs", + 0x014C: "InkSet", + 0x014D: "InkNames", + 0x014E: "NumberOfInks", 0x0150: "DotRange", 0x0151: "TargetPrinter", 0x0152: "ExtraSamples", @@ -83,9 +82,9 @@ TAGS = { 0x0157: "ClipPath", 0x0158: "XClipPathUnits", 0x0159: "YClipPathUnits", - 0x015a: "Indexed", - 0x015b: "JPEGTables", - 0x015f: "OPIProxy", + 0x015A: "Indexed", + 0x015B: "JPEGTables", + 0x015F: "OPIProxy", 0x0200: "JPEGProc", 0x0201: "JpegIFOffset", 0x0202: "JpegIFByteCount", @@ -99,20 +98,20 @@ TAGS = { 0x0212: "YCbCrSubSampling", 0x0213: "YCbCrPositioning", 0x0214: "ReferenceBlackWhite", - 0x02bc: "XMLPacket", + 0x02BC: "XMLPacket", 0x1000: "RelatedImageFileFormat", 0x1001: "RelatedImageWidth", 0x1002: "RelatedImageLength", 0x4746: "Rating", 0x4749: "RatingPercent", - 0x800d: "ImageID", - 0x828d: "CFARepeatPatternDim", - 0x828e: "CFAPattern", - 0x828f: "BatteryLevel", + 0x800D: "ImageID", + 0x828D: "CFARepeatPatternDim", + 0x828E: "CFAPattern", + 0x828F: "BatteryLevel", 0x8298: "Copyright", - 0x829a: "ExposureTime", - 0x829d: "FNumber", - 0x83bb: "IPTCNAA", + 0x829A: "ExposureTime", + 0x829D: "FNumber", + 0x83BB: "IPTCNAA", 0x8649: "ImageResources", 0x8769: "ExifOffset", 0x8773: "InterColorProfile", @@ -122,8 +121,8 @@ TAGS = { 0x8827: "ISOSpeedRatings", 0x8828: "OECF", 0x8829: "Interlace", - 0x882a: "TimeZoneOffset", - 0x882b: "SelfTimerMode", + 0x882A: "TimeZoneOffset", + 0x882B: "SelfTimerMode", 0x9000: "ExifVersion", 0x9003: "DateTimeOriginal", 0x9004: "DateTimeDigitized", @@ -138,142 +137,148 @@ TAGS = { 0x9207: "MeteringMode", 0x9208: "LightSource", 0x9209: "Flash", - 0x920a: "FocalLength", - 0x920b: "FlashEnergy", - 0x920c: "SpatialFrequencyResponse", - 0x920d: "Noise", + 0x920A: "FocalLength", + 0x920B: "FlashEnergy", + 0x920C: "SpatialFrequencyResponse", + 0x920D: "Noise", 0x9211: "ImageNumber", 0x9212: "SecurityClassification", 0x9213: "ImageHistory", 0x9214: "SubjectLocation", 0x9215: "ExposureIndex", 0x9216: "TIFF/EPStandardID", - 0x927c: "MakerNote", + 0x927C: "MakerNote", 0x9286: "UserComment", 0x9290: "SubsecTime", 0x9291: "SubsecTimeOriginal", 0x9292: "SubsecTimeDigitized", - 0x9c9b: "XPTitle", - 0x9c9c: "XPComment", - 0x9c9d: "XPAuthor", - 0x9c9e: "XPKeywords", - 0x9c9f: "XPSubject", - 0xa000: "FlashPixVersion", - 0xa001: "ColorSpace", - 0xa002: "ExifImageWidth", - 0xa003: "ExifImageHeight", - 0xa004: "RelatedSoundFile", - 0xa005: "ExifInteroperabilityOffset", - 0xa20b: "FlashEnergy", - 0xa20c: "SpatialFrequencyResponse", - 0xa20e: "FocalPlaneXResolution", - 0xa20f: "FocalPlaneYResolution", - 0xa210: "FocalPlaneResolutionUnit", - 0xa214: "SubjectLocation", - 0xa215: "ExposureIndex", - 0xa217: "SensingMethod", - 0xa300: "FileSource", - 0xa301: "SceneType", - 0xa302: "CFAPattern", - 0xa401: "CustomRendered", - 0xa402: "ExposureMode", - 0xa403: "WhiteBalance", - 0xa404: "DigitalZoomRatio", - 0xa405: "FocalLengthIn35mmFilm", - 0xa406: "SceneCaptureType", - 0xa407: "GainControl", - 0xa408: "Contrast", - 0xa409: "Saturation", - 0xa40a: "Sharpness", - 0xa40b: "DeviceSettingDescription", - 0xa40c: "SubjectDistanceRange", - 0xa420: "ImageUniqueID", - 0xa430: "CameraOwnerName", - 0xa431: "BodySerialNumber", - 0xa432: "LensSpecification", - 0xa433: "LensMake", - 0xa434: "LensModel", - 0xa435: "LensSerialNumber", - 0xa500: "Gamma", - 0xc4a5: "PrintImageMatching", - 0xc612: "DNGVersion", - 0xc613: "DNGBackwardVersion", - 0xc614: "UniqueCameraModel", - 0xc615: "LocalizedCameraModel", - 0xc616: "CFAPlaneColor", - 0xc617: "CFALayout", - 0xc618: "LinearizationTable", - 0xc619: "BlackLevelRepeatDim", - 0xc61a: "BlackLevel", - 0xc61b: "BlackLevelDeltaH", - 0xc61c: "BlackLevelDeltaV", - 0xc61d: "WhiteLevel", - 0xc61e: "DefaultScale", - 0xc61f: "DefaultCropOrigin", - 0xc620: "DefaultCropSize", - 0xc621: "ColorMatrix1", - 0xc622: "ColorMatrix2", - 0xc623: "CameraCalibration1", - 0xc624: "CameraCalibration2", - 0xc625: "ReductionMatrix1", - 0xc626: "ReductionMatrix2", - 0xc627: "AnalogBalance", - 0xc628: "AsShotNeutral", - 0xc629: "AsShotWhiteXY", - 0xc62a: "BaselineExposure", - 0xc62b: "BaselineNoise", - 0xc62c: "BaselineSharpness", - 0xc62d: "BayerGreenSplit", - 0xc62e: "LinearResponseLimit", - 0xc62f: "CameraSerialNumber", - 0xc630: "LensInfo", - 0xc631: "ChromaBlurRadius", - 0xc632: "AntiAliasStrength", - 0xc633: "ShadowScale", - 0xc634: "DNGPrivateData", - 0xc635: "MakerNoteSafety", - 0xc65a: "CalibrationIlluminant1", - 0xc65b: "CalibrationIlluminant2", - 0xc65c: "BestQualityScale", - 0xc65d: "RawDataUniqueID", - 0xc68b: "OriginalRawFileName", - 0xc68c: "OriginalRawFileData", - 0xc68d: "ActiveArea", - 0xc68e: "MaskedAreas", - 0xc68f: "AsShotICCProfile", - 0xc690: "AsShotPreProfileMatrix", - 0xc691: "CurrentICCProfile", - 0xc692: "CurrentPreProfileMatrix", - 0xc6bf: "ColorimetricReference", - 0xc6f3: "CameraCalibrationSignature", - 0xc6f4: "ProfileCalibrationSignature", - 0xc6f6: "AsShotProfileName", - 0xc6f7: "NoiseReductionApplied", - 0xc6f8: "ProfileName", - 0xc6f9: "ProfileHueSatMapDims", - 0xc6fa: "ProfileHueSatMapData1", - 0xc6fb: "ProfileHueSatMapData2", - 0xc6fc: "ProfileToneCurve", - 0xc6fd: "ProfileEmbedPolicy", - 0xc6fe: "ProfileCopyright", - 0xc714: "ForwardMatrix1", - 0xc715: "ForwardMatrix2", - 0xc716: "PreviewApplicationName", - 0xc717: "PreviewApplicationVersion", - 0xc718: "PreviewSettingsName", - 0xc719: "PreviewSettingsDigest", - 0xc71a: "PreviewColorSpace", - 0xc71b: "PreviewDateTime", - 0xc71c: "RawImageDigest", - 0xc71d: "OriginalRawFileDigest", - 0xc71e: "SubTileBlockSize", - 0xc71f: "RowInterleaveFactor", - 0xc725: "ProfileLookTableDims", - 0xc726: "ProfileLookTableData", - 0xc740: "OpcodeList1", - 0xc741: "OpcodeList2", - 0xc74e: "OpcodeList3", - 0xc761: "NoiseProfile" + 0x9400: "AmbientTemperature", + 0x9401: "Humidity", + 0x9402: "Pressure", + 0x9403: "WaterDepth", + 0x9404: "Acceleration", + 0x9405: "CameraElevationAngle", + 0x9C9B: "XPTitle", + 0x9C9C: "XPComment", + 0x9C9D: "XPAuthor", + 0x9C9E: "XPKeywords", + 0x9C9F: "XPSubject", + 0xA000: "FlashPixVersion", + 0xA001: "ColorSpace", + 0xA002: "ExifImageWidth", + 0xA003: "ExifImageHeight", + 0xA004: "RelatedSoundFile", + 0xA005: "ExifInteroperabilityOffset", + 0xA20B: "FlashEnergy", + 0xA20C: "SpatialFrequencyResponse", + 0xA20E: "FocalPlaneXResolution", + 0xA20F: "FocalPlaneYResolution", + 0xA210: "FocalPlaneResolutionUnit", + 0xA214: "SubjectLocation", + 0xA215: "ExposureIndex", + 0xA217: "SensingMethod", + 0xA300: "FileSource", + 0xA301: "SceneType", + 0xA302: "CFAPattern", + 0xA401: "CustomRendered", + 0xA402: "ExposureMode", + 0xA403: "WhiteBalance", + 0xA404: "DigitalZoomRatio", + 0xA405: "FocalLengthIn35mmFilm", + 0xA406: "SceneCaptureType", + 0xA407: "GainControl", + 0xA408: "Contrast", + 0xA409: "Saturation", + 0xA40A: "Sharpness", + 0xA40B: "DeviceSettingDescription", + 0xA40C: "SubjectDistanceRange", + 0xA420: "ImageUniqueID", + 0xA430: "CameraOwnerName", + 0xA431: "BodySerialNumber", + 0xA432: "LensSpecification", + 0xA433: "LensMake", + 0xA434: "LensModel", + 0xA435: "LensSerialNumber", + 0xA500: "Gamma", + 0xC4A5: "PrintImageMatching", + 0xC612: "DNGVersion", + 0xC613: "DNGBackwardVersion", + 0xC614: "UniqueCameraModel", + 0xC615: "LocalizedCameraModel", + 0xC616: "CFAPlaneColor", + 0xC617: "CFALayout", + 0xC618: "LinearizationTable", + 0xC619: "BlackLevelRepeatDim", + 0xC61A: "BlackLevel", + 0xC61B: "BlackLevelDeltaH", + 0xC61C: "BlackLevelDeltaV", + 0xC61D: "WhiteLevel", + 0xC61E: "DefaultScale", + 0xC61F: "DefaultCropOrigin", + 0xC620: "DefaultCropSize", + 0xC621: "ColorMatrix1", + 0xC622: "ColorMatrix2", + 0xC623: "CameraCalibration1", + 0xC624: "CameraCalibration2", + 0xC625: "ReductionMatrix1", + 0xC626: "ReductionMatrix2", + 0xC627: "AnalogBalance", + 0xC628: "AsShotNeutral", + 0xC629: "AsShotWhiteXY", + 0xC62A: "BaselineExposure", + 0xC62B: "BaselineNoise", + 0xC62C: "BaselineSharpness", + 0xC62D: "BayerGreenSplit", + 0xC62E: "LinearResponseLimit", + 0xC62F: "CameraSerialNumber", + 0xC630: "LensInfo", + 0xC631: "ChromaBlurRadius", + 0xC632: "AntiAliasStrength", + 0xC633: "ShadowScale", + 0xC634: "DNGPrivateData", + 0xC635: "MakerNoteSafety", + 0xC65A: "CalibrationIlluminant1", + 0xC65B: "CalibrationIlluminant2", + 0xC65C: "BestQualityScale", + 0xC65D: "RawDataUniqueID", + 0xC68B: "OriginalRawFileName", + 0xC68C: "OriginalRawFileData", + 0xC68D: "ActiveArea", + 0xC68E: "MaskedAreas", + 0xC68F: "AsShotICCProfile", + 0xC690: "AsShotPreProfileMatrix", + 0xC691: "CurrentICCProfile", + 0xC692: "CurrentPreProfileMatrix", + 0xC6BF: "ColorimetricReference", + 0xC6F3: "CameraCalibrationSignature", + 0xC6F4: "ProfileCalibrationSignature", + 0xC6F6: "AsShotProfileName", + 0xC6F7: "NoiseReductionApplied", + 0xC6F8: "ProfileName", + 0xC6F9: "ProfileHueSatMapDims", + 0xC6FA: "ProfileHueSatMapData1", + 0xC6FB: "ProfileHueSatMapData2", + 0xC6FC: "ProfileToneCurve", + 0xC6FD: "ProfileEmbedPolicy", + 0xC6FE: "ProfileCopyright", + 0xC714: "ForwardMatrix1", + 0xC715: "ForwardMatrix2", + 0xC716: "PreviewApplicationName", + 0xC717: "PreviewApplicationVersion", + 0xC718: "PreviewSettingsName", + 0xC719: "PreviewSettingsDigest", + 0xC71A: "PreviewColorSpace", + 0xC71B: "PreviewDateTime", + 0xC71C: "RawImageDigest", + 0xC71D: "OriginalRawFileDigest", + 0xC71E: "SubTileBlockSize", + 0xC71F: "RowInterleaveFactor", + 0xC725: "ProfileLookTableDims", + 0xC726: "ProfileLookTableData", + 0xC740: "OpcodeList1", + 0xC741: "OpcodeList2", + 0xC74E: "OpcodeList3", + 0xC761: "NoiseProfile", } ## diff --git a/server/www/packages/packages-linux/x64/PIL/FitsStubImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/FitsStubImagePlugin.py index 63c195c..c2ce865 100644 --- a/server/www/packages/packages-linux/x64/PIL/FitsStubImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/FitsStubImagePlugin.py @@ -23,6 +23,7 @@ def register_handler(handler): global _handler _handler = handler + # -------------------------------------------------------------------- # Image adapter @@ -62,7 +63,7 @@ class FITSStubImageFile(ImageFile.StubImageFile): def _save(im, fp, filename): if _handler is None or not hasattr("_handler", "save"): - raise IOError("FITS save handler not installed") + raise OSError("FITS save handler not installed") _handler.save(im, fp, filename) diff --git a/server/www/packages/packages-linux/x64/PIL/FliImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/FliImagePlugin.py index c78c8c6..9bf7d74 100644 --- a/server/www/packages/packages-linux/x64/PIL/FliImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/FliImagePlugin.py @@ -19,12 +19,10 @@ from . import Image, ImageFile, ImagePalette from ._binary import i8, i16le as i16, i32le as i32, o8 -__version__ = "0.2" - - # # decoder + def _accept(prefix): return len(prefix) >= 6 and i16(prefix[4:6]) in [0xAF11, 0xAF12] @@ -33,6 +31,7 @@ def _accept(prefix): # Image plugin for the FLI/FLC animation format. Use the seek # method to load individual frames. + class FliImageFile(ImageFile.ImageFile): format = "FLI" @@ -44,9 +43,11 @@ class FliImageFile(ImageFile.ImageFile): # HEAD s = self.fp.read(128) magic = i16(s[4:6]) - if not (magic in [0xAF11, 0xAF12] and - i16(s[14:16]) in [0, 3] and # flags - s[20:22] == b"\x00\x00"): # reserved + if not ( + magic in [0xAF11, 0xAF12] + and i16(s[14:16]) in [0, 3] # flags + and s[20:22] == b"\x00\x00" # reserved + ): raise SyntaxError("not an FLI/FLC file") # frames @@ -82,7 +83,7 @@ class FliImageFile(ImageFile.ImageFile): elif i16(s[4:6]) == 4: self._palette(palette, 0) - palette = [o8(r)+o8(g)+o8(b) for (r, g, b) in palette] + palette = [o8(r) + o8(g) + o8(b) for (r, g, b) in palette] self.palette = ImagePalette.raw("RGB", b"".join(palette)) # set things up to decode first frame @@ -104,8 +105,8 @@ class FliImageFile(ImageFile.ImageFile): s = self.fp.read(n * 3) for n in range(0, len(s), 3): r = i8(s[n]) << shift - g = i8(s[n+1]) << shift - b = i8(s[n+2]) << shift + g = i8(s[n + 1]) << shift + b = i8(s[n + 2]) << shift palette[i] = (r, g, b) i += 1 @@ -131,6 +132,9 @@ class FliImageFile(ImageFile.ImageFile): self.__frame = -1 self.__fp.seek(self.__rewind) self.__offset = 128 + else: + # ensure that the previous frame was loaded + self.load() if frame != self.__frame + 1: raise ValueError("cannot seek to frame %d" % frame) @@ -147,13 +151,22 @@ class FliImageFile(ImageFile.ImageFile): framesize = i32(s) self.decodermaxblock = framesize - self.tile = [("fli", (0, 0)+self.size, self.__offset, None)] + self.tile = [("fli", (0, 0) + self.size, self.__offset, None)] self.__offset += framesize def tell(self): return self.__frame + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + # # registry diff --git a/server/www/packages/packages-linux/x64/PIL/FontFile.py b/server/www/packages/packages-linux/x64/PIL/FontFile.py index 305e8af..979a1e3 100644 --- a/server/www/packages/packages-linux/x64/PIL/FontFile.py +++ b/server/www/packages/packages-linux/x64/PIL/FontFile.py @@ -14,9 +14,9 @@ # See the README file for information on usage and redistribution. # -from __future__ import print_function import os + from . import Image, _binary WIDTH = 800 @@ -33,7 +33,8 @@ def puti16(fp, values): ## # Base class for raster font file handlers. -class FontFile(object): + +class FontFile: bitmap = None @@ -46,7 +47,7 @@ class FontFile(object): return self.glyph[ix] def compile(self): - "Create metrics and bitmap" + """Create metrics and bitmap""" if self.bitmap: return @@ -61,7 +62,7 @@ class FontFile(object): w = w + (src[2] - src[0]) if w > WIDTH: lines += 1 - w = (src[2] - src[0]) + w = src[2] - src[0] maxwidth = max(maxwidth, w) xsize = maxwidth @@ -93,7 +94,7 @@ class FontFile(object): self.metrics[i] = d, dst, s def save(self, filename): - "Save font" + """Save font""" self.compile() @@ -103,7 +104,7 @@ class FontFile(object): # font metrics with open(os.path.splitext(filename)[0] + ".pil", "wb") as fp: fp.write(b"PILfont\n") - fp.write((";;;;;;%d;\n" % self.ysize).encode('ascii')) # HACK!!! + fp.write((";;;;;;%d;\n" % self.ysize).encode("ascii")) # HACK!!! fp.write(b"DATA\n") for id in range(256): m = self.metrics[id] diff --git a/server/www/packages/packages-linux/x64/PIL/FpxImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/FpxImagePlugin.py index 9f284fd..8d252c7 100644 --- a/server/www/packages/packages-linux/x64/PIL/FpxImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/FpxImagePlugin.py @@ -14,35 +14,31 @@ # # See the README file for information on usage and redistribution. # - -from __future__ import print_function - -from . import Image, ImageFile -from ._binary import i32le as i32, i8 - import olefile -__version__ = "0.1" +from . import Image, ImageFile +from ._binary import i8, i32le as i32 # we map from colour field tuples to (mode, rawmode) descriptors MODES = { # opacity - (0x00007ffe): ("A", "L"), + (0x00007FFE): ("A", "L"), # monochrome (0x00010000,): ("L", "L"), - (0x00018000, 0x00017ffe): ("RGBA", "LA"), + (0x00018000, 0x00017FFE): ("RGBA", "LA"), # photo YCC (0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"), - (0x00028000, 0x00028001, 0x00028002, 0x00027ffe): ("RGBA", "YCCA;P"), + (0x00028000, 0x00028001, 0x00028002, 0x00027FFE): ("RGBA", "YCCA;P"), # standard RGB (NIFRGB) (0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"), - (0x00038000, 0x00038001, 0x00038002, 0x00037ffe): ("RGBA", "RGBA"), + (0x00038000, 0x00038001, 0x00038002, 0x00037FFE): ("RGBA", "RGBA"), } # # -------------------------------------------------------------------- + def _accept(prefix): return prefix[:8] == olefile.MAGIC @@ -50,6 +46,7 @@ def _accept(prefix): ## # Image plugin for the FlashPix images. + class FpxImageFile(ImageFile.ImageFile): format = "FPX" @@ -62,7 +59,7 @@ class FpxImageFile(ImageFile.ImageFile): try: self.ole = olefile.OleFileIO(self.fp) - except IOError: + except OSError: raise SyntaxError("not an FPX file; invalid OLE file") if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B": @@ -74,10 +71,9 @@ class FpxImageFile(ImageFile.ImageFile): # # get the Image Contents Property Set - prop = self.ole.getproperties([ - "Data Object Store %06d" % index, - "\005Image Contents" - ]) + prop = self.ole.getproperties( + ["Data Object Store %06d" % index, "\005Image Contents"] + ) # size (highest resolution) @@ -101,9 +97,12 @@ class FpxImageFile(ImageFile.ImageFile): s = prop[0x2000002 | id] colors = [] - for i in range(i32(s, 4)): + bands = i32(s, 4) + if bands > 4: + raise IOError("Invalid number of bands") + for i in range(bands): # note: for now, we ignore the "uncalibrated" flag - colors.append(i32(s, 8+i*4) & 0x7fffffff) + colors.append(i32(s, 8 + i * 4) & 0x7FFFFFFF) self.mode, self.rawmode = MODES[tuple(colors)] @@ -123,7 +122,7 @@ class FpxImageFile(ImageFile.ImageFile): stream = [ "Data Object Store %06d" % index, "Resolution %04d" % subimage, - "Subimage 0000 Header" + "Subimage 0000 Header", ] fp = self.ole.openstream(stream) @@ -142,7 +141,7 @@ class FpxImageFile(ImageFile.ImageFile): length = i32(s, 32) if size != self.size: - raise IOError("subimage mismatch") + raise OSError("subimage mismatch") # get tile descriptors fp.seek(28 + offset) @@ -155,17 +154,29 @@ class FpxImageFile(ImageFile.ImageFile): for i in range(0, len(s), length): - compression = i32(s, i+8) + compression = i32(s, i + 8) if compression == 0: - self.tile.append(("raw", (x, y, x+xtile, y+ytile), - i32(s, i) + 28, (self.rawmode))) + self.tile.append( + ( + "raw", + (x, y, x + xtile, y + ytile), + i32(s, i) + 28, + (self.rawmode), + ) + ) elif compression == 1: # FIXME: the fill decoder is not implemented - self.tile.append(("fill", (x, y, x+xtile, y+ytile), - i32(s, i) + 28, (self.rawmode, s[12:16]))) + self.tile.append( + ( + "fill", + (x, y, x + xtile, y + ytile), + i32(s, i) + 28, + (self.rawmode, s[12:16]), + ) + ) elif compression == 2: @@ -187,8 +198,14 @@ class FpxImageFile(ImageFile.ImageFile): # The image is stored as defined by rawmode jpegmode = rawmode - self.tile.append(("jpeg", (x, y, x+xtile, y+ytile), - i32(s, i) + 28, (rawmode, jpegmode))) + self.tile.append( + ( + "jpeg", + (x, y, x + xtile, y + ytile), + i32(s, i) + 28, + (rawmode, jpegmode), + ) + ) # FIXME: jpeg tables are tile dependent; the prefix # data must be placed in the tile descriptor itself! @@ -197,7 +214,7 @@ class FpxImageFile(ImageFile.ImageFile): self.tile_prefix = self.jpeg[jpeg_tables] else: - raise IOError("unknown/invalid compression") + raise OSError("unknown/invalid compression") x = x + xtile if x >= xsize: @@ -211,11 +228,11 @@ class FpxImageFile(ImageFile.ImageFile): def load(self): if not self.fp: - self.fp = self.ole.openstream(self.stream[:2] + - ["Subimage 0000 Data"]) + self.fp = self.ole.openstream(self.stream[:2] + ["Subimage 0000 Data"]) return ImageFile.ImageFile.load(self) + # # -------------------------------------------------------------------- diff --git a/server/www/packages/packages-linux/x64/PIL/FtexImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/FtexImagePlugin.py index 08ce0e0..096ccac 100644 --- a/server/www/packages/packages-linux/x64/PIL/FtexImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/FtexImagePlugin.py @@ -20,7 +20,14 @@ has the following structure: {format_directory} {data} Where: -{header} = { u32:magic, u32:version, u32:width, u32:height, u32:mipmap_count, u32:format_count } +{header} = { + u32:magic, + u32:version, + u32:width, + u32:height, + u32:mipmap_count, + u32:format_count +} * The "magic" number is "FTEX". * "width" and "height" are the dimensions of the texture. @@ -46,8 +53,8 @@ Note: All data is stored in little-Endian (Intel) byte order. import struct from io import BytesIO -from . import Image, ImageFile +from . import Image, ImageFile MAGIC = b"FTEX" FORMAT_DXT1 = 0 @@ -59,8 +66,8 @@ class FtexImageFile(ImageFile.ImageFile): format_description = "Texture File Format (IW2:EOC)" def _open(self): - magic = struct.unpack("= 8 and \ - i32(prefix[:4]) >= 20 and i32(prefix[4:8]) in (1, 2) + return len(prefix) >= 8 and i32(prefix[:4]) >= 20 and i32(prefix[4:8]) in (1, 2) ## # Image plugin for the GIMP brush format. + class GbrImageFile(ImageFile.ImageFile): format = "GBR" @@ -55,24 +55,23 @@ class GbrImageFile(ImageFile.ImageFile): if width <= 0 or height <= 0: raise SyntaxError("not a GIMP brush") if color_depth not in (1, 4): - raise SyntaxError( - "Unsupported GIMP brush color depth: %s" % color_depth) + raise SyntaxError("Unsupported GIMP brush color depth: %s" % color_depth) if version == 1: - comment_length = header_size-20 + comment_length = header_size - 20 else: - comment_length = header_size-28 + comment_length = header_size - 28 magic_number = self.fp.read(4) - if magic_number != b'GIMP': + if magic_number != b"GIMP": raise SyntaxError("not a GIMP brush, bad magic number") - self.info['spacing'] = i32(self.fp.read(4)) + self.info["spacing"] = i32(self.fp.read(4)) comment = self.fp.read(comment_length)[:-1] if color_depth == 1: self.mode = "L" else: - self.mode = 'RGBA' + self.mode = "RGBA" self._size = width, height @@ -88,6 +87,7 @@ class GbrImageFile(ImageFile.ImageFile): self.im = Image.core.new(self.mode, self.size) self.frombytes(self.fp.read(self._data_size)) + # # registry diff --git a/server/www/packages/packages-linux/x64/PIL/GdImageFile.py b/server/www/packages/packages-linux/x64/PIL/GdImageFile.py index 1361542..54c8871 100644 --- a/server/www/packages/packages-linux/x64/PIL/GdImageFile.py +++ b/server/www/packages/packages-linux/x64/PIL/GdImageFile.py @@ -23,18 +23,16 @@ # purposes only. -from . import ImageFile, ImagePalette +from . import ImageFile, ImagePalette, UnidentifiedImageError from ._binary import i8, i16be as i16, i32be as i32 -__version__ = "0.1" - - ## # Image plugin for the GD uncompressed format. Note that this format # is not supported by the standard Image.open function. To use # this plugin, you have to import the GdImageFile module and # use the GdImageFile.open function. + class GdImageFile(ImageFile.ImageFile): format = "GD" @@ -55,14 +53,17 @@ class GdImageFile(ImageFile.ImageFile): trueColorOffset = 2 if trueColor else 0 # transparency index - tindex = i32(s[7+trueColorOffset:7+trueColorOffset+4]) + tindex = i32(s[7 + trueColorOffset : 7 + trueColorOffset + 4]) if tindex < 256: self.info["transparency"] = tindex - self.palette = ImagePalette.raw("XBGR", s[7+trueColorOffset+4:7+trueColorOffset+4+256*4]) + self.palette = ImagePalette.raw( + "XBGR", s[7 + trueColorOffset + 4 : 7 + trueColorOffset + 4 + 256 * 4] + ) - self.tile = [("raw", (0, 0)+self.size, 7+trueColorOffset+4+256*4, - ("L", 0, 1))] + self.tile = [ + ("raw", (0, 0) + self.size, 7 + trueColorOffset + 4 + 256 * 4, ("L", 0, 1)) + ] def open(fp, mode="r"): @@ -81,4 +82,4 @@ def open(fp, mode="r"): try: return GdImageFile(fp) except SyntaxError: - raise IOError("cannot identify this image file") + raise UnidentifiedImageError("cannot identify this image file") diff --git a/server/www/packages/packages-linux/x64/PIL/GifImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/GifImagePlugin.py index 107c015..1d94fc7 100644 --- a/server/www/packages/packages-linux/x64/PIL/GifImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/GifImagePlugin.py @@ -24,17 +24,18 @@ # See the README file for information on usage and redistribution. # -from . import Image, ImageFile, ImagePalette, ImageChops, ImageSequence -from ._binary import i8, i16le as i16, o8, o16le as o16 - import itertools +import math +import os +import subprocess -__version__ = "0.9" - +from . import Image, ImageChops, ImageFile, ImagePalette, ImageSequence +from ._binary import i8, i16le as i16, o8, o16le as o16 # -------------------------------------------------------------------- # Identify/read GIF files + def _accept(prefix): return prefix[:6] in [b"GIF87a", b"GIF89a"] @@ -43,6 +44,7 @@ def _accept(prefix): # Image plugin for GIF images. This plugin supports both GIF87 and # GIF89 images. + class GifImageFile(ImageFile.ImageFile): format = "GIF" @@ -76,7 +78,7 @@ class GifImageFile(ImageFile.ImageFile): # check if palette contains colour indices p = self.fp.read(3 << bits) for i in range(0, len(p), 3): - if not (i//3 == i8(p[i]) == i8(p[i+1]) == i8(p[i+2])): + if not (i // 3 == i8(p[i]) == i8(p[i + 1]) == i8(p[i + 2])): p = ImagePalette.raw("RGB", p) self.global_palette = self.palette = p break @@ -120,6 +122,8 @@ class GifImageFile(ImageFile.ImageFile): if not self._seek_check(frame): return if frame < self.__frame: + if frame != 0: + self.im = None self._seek(0) last_frame = self.__frame @@ -164,6 +168,7 @@ class GifImageFile(ImageFile.ImageFile): self.im.paste(self.dispose, self.dispose_extent) from copy import copy + self.palette = copy(self.global_palette) info = {} @@ -201,7 +206,13 @@ class GifImageFile(ImageFile.ImageFile): # # comment extension # - info["comment"] = block + while block: + if "comment" in info: + info["comment"] += block + else: + info["comment"] = block + block = self.data() + continue elif i8(s) == 255: # # application extension @@ -223,6 +234,8 @@ class GifImageFile(ImageFile.ImageFile): # extent x0, y0 = i16(s[0:]), i16(s[2:]) x1, y1 = x0 + i16(s[4:]), y0 + i16(s[6:]) + if x1 > self.size[0] or y1 > self.size[1]: + self._size = max(x1, self.size[0]), max(y1, self.size[1]) self.dispose_extent = x0, y0, x1, y1 flags = i8(s[8]) @@ -230,16 +243,14 @@ class GifImageFile(ImageFile.ImageFile): if flags & 128: bits = (flags & 7) + 1 - self.palette =\ - ImagePalette.raw("RGB", self.fp.read(3 << bits)) + self.palette = ImagePalette.raw("RGB", self.fp.read(3 << bits)) # image data bits = i8(self.fp.read(1)) self.__offset = self.fp.tell() - self.tile = [("gif", - (x0, y0, x1, y1), - self.__offset, - (bits, interlace))] + self.tile = [ + ("gif", (x0, y0, x1, y1), self.__offset, (bits, interlace)) + ] break else: @@ -252,8 +263,8 @@ class GifImageFile(ImageFile.ImageFile): self.dispose = None elif self.disposal_method == 2: # replace with background colour - self.dispose = Image.core.fill("P", self.size, - self.info["background"]) + Image._decompression_bomb_check(self.size) + self.dispose = Image.core.fill("P", self.size, self.info["background"]) else: # replace with previous contents if self.im: @@ -291,20 +302,25 @@ class GifImageFile(ImageFile.ImageFile): # we do this by pasting the updated area onto the previous # frame which we then use as the current image content updated = self._crop(self.im, self.dispose_extent) - self._prev_im.paste(updated, self.dispose_extent, - updated.convert('RGBA')) + self._prev_im.paste(updated, self.dispose_extent, updated.convert("RGBA")) self.im = self._prev_im self._prev_im = self.im.copy() + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + + # -------------------------------------------------------------------- # Write GIF files -RAWMODE = { - "1": "L", - "L": "L", - "P": "P" -} +RAWMODE = {"1": "L", "L": "L", "P": "P"} def _normalize_mode(im, initial_call=False): @@ -355,19 +371,23 @@ def _normalize_palette(im, palette, info): if isinstance(palette, (bytes, bytearray, list)): source_palette = bytearray(palette[:768]) if isinstance(palette, ImagePalette.ImagePalette): - source_palette = bytearray(itertools.chain.from_iterable( - zip(palette.palette[:256], - palette.palette[256:512], - palette.palette[512:768]))) + source_palette = bytearray( + itertools.chain.from_iterable( + zip( + palette.palette[:256], + palette.palette[256:512], + palette.palette[512:768], + ) + ) + ) if im.mode == "P": if not source_palette: source_palette = im.im.getpalette("RGB")[:768] else: # L-mode if not source_palette: - source_palette = bytearray(i//3 for i in range(768)) - im.palette = ImagePalette.ImagePalette("RGB", - palette=source_palette) + source_palette = bytearray(i // 3 for i in range(768)) + im.palette = ImagePalette.ImagePalette("RGB", palette=source_palette) used_palette_colors = _get_optimize(im, info) if used_palette_colors is not None: @@ -379,6 +399,8 @@ def _normalize_palette(im, palette, info): def _write_single_frame(im, fp, palette): im_out = _normalize_mode(im, True) + for k, v in im_out.info.items(): + im.encoderinfo.setdefault(k, v) im_out = _normalize_palette(im_out, palette, im.encoderinfo) for s in _get_global_header(im_out, im.encoderinfo): @@ -391,29 +413,31 @@ def _write_single_frame(im, fp, palette): _write_local_header(fp, im, (0, 0), flags) im_out.encoderconfig = (8, get_interlace(im)) - ImageFile._save(im_out, fp, [("gif", (0, 0)+im.size, 0, - RAWMODE[im_out.mode])]) + ImageFile._save(im_out, fp, [("gif", (0, 0) + im.size, 0, RAWMODE[im_out.mode])]) fp.write(b"\0") # end of image data def _write_multiple_frames(im, fp, palette): - duration = im.encoderinfo.get("duration", None) - disposal = im.encoderinfo.get('disposal', None) + duration = im.encoderinfo.get("duration", im.info.get("duration")) + disposal = im.encoderinfo.get("disposal", im.info.get("disposal")) im_frames = [] frame_count = 0 - for imSequence in itertools.chain([im], - im.encoderinfo.get("append_images", [])): + background_im = None + for imSequence in itertools.chain([im], im.encoderinfo.get("append_images", [])): for im_frame in ImageSequence.Iterator(imSequence): # a copy is required here since seek can still mutate the image im_frame = _normalize_mode(im_frame.copy()) + if frame_count == 0: + for k, v in im_frame.info.items(): + im.encoderinfo.setdefault(k, v) im_frame = _normalize_palette(im_frame, palette, im.encoderinfo) encoderinfo = im.encoderinfo.copy() if isinstance(duration, (list, tuple)): - encoderinfo['duration'] = duration[frame_count] + encoderinfo["duration"] = duration[frame_count] if isinstance(disposal, (list, tuple)): encoderinfo["disposal"] = disposal[frame_count] frame_count += 1 @@ -421,45 +445,54 @@ def _write_multiple_frames(im, fp, palette): if im_frames: # delta frame previous = im_frames[-1] - if _get_palette_bytes(im_frame) == \ - _get_palette_bytes(previous['im']): - delta = ImageChops.subtract_modulo(im_frame, - previous['im']) + if encoderinfo.get("disposal") == 2: + if background_im is None: + background = _get_background( + im, + im.encoderinfo.get("background", im.info.get("background")), + ) + background_im = Image.new("P", im_frame.size, background) + background_im.putpalette(im_frames[0]["im"].palette) + base_im = background_im + else: + base_im = previous["im"] + if _get_palette_bytes(im_frame) == _get_palette_bytes(base_im): + delta = ImageChops.subtract_modulo(im_frame, base_im) else: delta = ImageChops.subtract_modulo( - im_frame.convert('RGB'), previous['im'].convert('RGB')) + im_frame.convert("RGB"), base_im.convert("RGB") + ) bbox = delta.getbbox() if not bbox: # This frame is identical to the previous frame if duration: - previous['encoderinfo']['duration'] += \ - encoderinfo['duration'] + previous["encoderinfo"]["duration"] += encoderinfo["duration"] continue else: bbox = None - im_frames.append({ - 'im': im_frame, - 'bbox': bbox, - 'encoderinfo': encoderinfo - }) + im_frames.append({"im": im_frame, "bbox": bbox, "encoderinfo": encoderinfo}) if len(im_frames) > 1: for frame_data in im_frames: - im_frame = frame_data['im'] - if not frame_data['bbox']: + im_frame = frame_data["im"] + if not frame_data["bbox"]: # global header - for s in _get_global_header(im_frame, - frame_data['encoderinfo']): + for s in _get_global_header(im_frame, frame_data["encoderinfo"]): fp.write(s) offset = (0, 0) else: # compress difference - frame_data['encoderinfo']['include_color_table'] = True + frame_data["encoderinfo"]["include_color_table"] = True - im_frame = im_frame.crop(frame_data['bbox']) - offset = frame_data['bbox'][:2] - _write_frame_data(fp, im_frame, offset, frame_data['encoderinfo']) + im_frame = im_frame.crop(frame_data["bbox"]) + offset = frame_data["bbox"][:2] + _write_frame_data(fp, im_frame, offset, frame_data["encoderinfo"]) return True + elif "duration" in im.encoderinfo and isinstance( + im.encoderinfo["duration"], (list, tuple) + ): + # Since multiple frames will not be written, add together the frame durations + im.encoderinfo["duration"] = sum(im.encoderinfo["duration"]) def _save_all(im, fp, filename): @@ -467,12 +500,10 @@ def _save_all(im, fp, filename): def _save(im, fp, filename, save_all=False): - for k, v in im.info.items(): - im.encoderinfo.setdefault(k, v) # header - try: - palette = im.encoderinfo["palette"] - except KeyError: + if "palette" in im.encoderinfo or "palette" in im.info: + palette = im.encoderinfo.get("palette", im.info.get("palette")) + else: palette = None im.encoderinfo["optimize"] = im.encoderinfo.get("optimize", True) @@ -519,7 +550,7 @@ def _write_local_header(fp, im, offset, flags): else: duration = 0 - disposal = int(im.encoderinfo.get('disposal', 0)) + disposal = int(im.encoderinfo.get("disposal", 0)) if transparent_color_exists or duration != 0 or disposal: packed_flag = 1 if transparent_color_exists else 0 @@ -527,48 +558,56 @@ def _write_local_header(fp, im, offset, flags): if not transparent_color_exists: transparency = 0 - fp.write(b"!" + - o8(249) + # extension intro - o8(4) + # length - o8(packed_flag) + # packed fields - o16(duration) + # duration - o8(transparency) + # transparency index - o8(0)) + fp.write( + b"!" + + o8(249) # extension intro + + o8(4) # length + + o8(packed_flag) # packed fields + + o16(duration) # duration + + o8(transparency) # transparency index + + o8(0) + ) - if "comment" in im.encoderinfo and \ - 1 <= len(im.encoderinfo["comment"]) <= 255: - fp.write(b"!" + - o8(254) + # extension intro - o8(len(im.encoderinfo["comment"])) + - im.encoderinfo["comment"] + - o8(0)) + if "comment" in im.encoderinfo and 1 <= len(im.encoderinfo["comment"]): + fp.write(b"!" + o8(254)) # extension intro + comment = im.encoderinfo["comment"] + if isinstance(comment, str): + comment = comment.encode() + for i in range(0, len(comment), 255): + subblock = comment[i : i + 255] + fp.write(o8(len(subblock)) + subblock) + fp.write(o8(0)) if "loop" in im.encoderinfo: number_of_loops = im.encoderinfo["loop"] - fp.write(b"!" + - o8(255) + # extension intro - o8(11) + - b"NETSCAPE2.0" + - o8(3) + - o8(1) + - o16(number_of_loops) + # number of loops - o8(0)) - include_color_table = im.encoderinfo.get('include_color_table') + fp.write( + b"!" + + o8(255) # extension intro + + o8(11) + + b"NETSCAPE2.0" + + o8(3) + + o8(1) + + o16(number_of_loops) # number of loops + + o8(0) + ) + include_color_table = im.encoderinfo.get("include_color_table") if include_color_table: palette_bytes = _get_palette_bytes(im) color_table_size = _get_color_table_size(palette_bytes) if color_table_size: - flags = flags | 128 # local color table flag + flags = flags | 128 # local color table flag flags = flags | color_table_size - fp.write(b"," + - o16(offset[0]) + # offset - o16(offset[1]) + - o16(im.size[0]) + # size - o16(im.size[1]) + - o8(flags)) # flags + fp.write( + b"," + + o16(offset[0]) # offset + + o16(offset[1]) + + o16(im.size[0]) # size + + o16(im.size[1]) + + o8(flags) # flags + ) if include_color_table and color_table_size: fp.write(_get_header_palette(palette_bytes)) - fp.write(o8(8)) # bits + fp.write(o8(8)) # bits def _save_netpbm(im, fp, filename): @@ -579,40 +618,44 @@ def _save_netpbm(im, fp, filename): # If you need real GIF compression and/or RGB quantization, you # can use the external NETPBM/PBMPLUS utilities. See comments # below for information on how to enable this. - - import os - from subprocess import Popen, check_call, PIPE, CalledProcessError - file = im._dump() - - with open(filename, 'wb') as f: - if im.mode != "RGB": - with open(os.devnull, 'wb') as devnull: - check_call(["ppmtogif", file], stdout=f, stderr=devnull) - else: - # Pipe ppmquant output into ppmtogif - # "ppmquant 256 %s | ppmtogif > %s" % (file, filename) - quant_cmd = ["ppmquant", "256", file] - togif_cmd = ["ppmtogif"] - with open(os.devnull, 'wb') as devnull: - quant_proc = Popen(quant_cmd, stdout=PIPE, stderr=devnull) - togif_proc = Popen(togif_cmd, stdin=quant_proc.stdout, - stdout=f, stderr=devnull) - - # Allow ppmquant to receive SIGPIPE if ppmtogif exits - quant_proc.stdout.close() - - retcode = quant_proc.wait() - if retcode: - raise CalledProcessError(retcode, quant_cmd) - - retcode = togif_proc.wait() - if retcode: - raise CalledProcessError(retcode, togif_cmd) + tempfile = im._dump() try: - os.unlink(file) - except OSError: - pass + with open(filename, "wb") as f: + if im.mode != "RGB": + subprocess.check_call( + ["ppmtogif", tempfile], stdout=f, stderr=subprocess.DEVNULL + ) + else: + # Pipe ppmquant output into ppmtogif + # "ppmquant 256 %s | ppmtogif > %s" % (tempfile, filename) + quant_cmd = ["ppmquant", "256", tempfile] + togif_cmd = ["ppmtogif"] + quant_proc = subprocess.Popen( + quant_cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL + ) + togif_proc = subprocess.Popen( + togif_cmd, + stdin=quant_proc.stdout, + stdout=f, + stderr=subprocess.DEVNULL, + ) + + # Allow ppmquant to receive SIGPIPE if ppmtogif exits + quant_proc.stdout.close() + + retcode = quant_proc.wait() + if retcode: + raise subprocess.CalledProcessError(retcode, quant_cmd) + + retcode = togif_proc.wait() + if retcode: + raise subprocess.CalledProcessError(retcode, togif_cmd) + finally: + try: + os.unlink(tempfile) + except OSError: + pass # Force optimization so that we can test performance against @@ -642,7 +685,7 @@ def _get_optimize(im, info): # * If we have a 'large' image, the palette is in the noise. # create the new palette if not every color is used - optimise = _FORCE_OPTIMIZE or im.mode == 'L' + optimise = _FORCE_OPTIMIZE or im.mode == "L" if optimise or im.width * im.height < 512 * 512: # check which colors are used used_palette_colors = [] @@ -650,18 +693,21 @@ def _get_optimize(im, info): if count: used_palette_colors.append(i) - if optimise or (len(used_palette_colors) <= 128 and - max(used_palette_colors) > len(used_palette_colors)): + if optimise or ( + len(used_palette_colors) <= 128 + and max(used_palette_colors) > len(used_palette_colors) + ): return used_palette_colors def _get_color_table_size(palette_bytes): # calculate the palette size for the header - import math - color_table_size = int(math.ceil(math.log(len(palette_bytes)//3, 2)))-1 - if color_table_size < 0: - color_table_size = 0 - return color_table_size + if not palette_bytes: + return 0 + elif len(palette_bytes) < 9: + return 1 + else: + return math.ceil(math.log(len(palette_bytes) // 3, 2)) - 1 def _get_header_palette(palette_bytes): @@ -676,7 +722,7 @@ def _get_header_palette(palette_bytes): # add the missing amount of bytes # the palette has to be 2< 0: palette_bytes += o8(0) * 3 * actual_target_size_diff return palette_bytes @@ -692,6 +738,18 @@ def _get_palette_bytes(im): return im.palette.palette +def _get_background(im, infoBackground): + background = 0 + if infoBackground: + background = infoBackground + if isinstance(background, tuple): + # WebPImagePlugin stores an RGBA value in info["background"] + # So it must be converted to the same format as GifImagePlugin's + # info["background"] - a global color table index + background = im.palette.getcolor(background) + return background + + def _get_global_header(im, info): """Return a list of strings representing a GIF header""" @@ -701,9 +759,9 @@ def _get_global_header(im, info): version = b"87a" for extensionKey in ["transparency", "duration", "loop", "comment"]: if info and extensionKey in info: - if ((extensionKey == "duration" and info[extensionKey] == 0) or - (extensionKey == "comment" and - not (1 <= len(info[extensionKey]) <= 255))): + if (extensionKey == "duration" and info[extensionKey] == 0) or ( + extensionKey == "comment" and not (1 <= len(info[extensionKey]) <= 255) + ): continue version = b"89a" break @@ -711,24 +769,23 @@ def _get_global_header(im, info): if im.info.get("version") == b"89a": version = b"89a" + background = _get_background(im, info.get("background")) + palette_bytes = _get_palette_bytes(im) color_table_size = _get_color_table_size(palette_bytes) - background = info["background"] if "background" in info else 0 - return [ - b"GIF"+version + # signature + version - o16(im.size[0]) + # canvas width - o16(im.size[1]), # canvas height - + b"GIF" # signature + + version # version + + o16(im.size[0]) # canvas width + + o16(im.size[1]), # canvas height # Logical Screen Descriptor # size of global color table + global color table flag - o8(color_table_size + 128), # packed fields + o8(color_table_size + 128), # packed fields # background + reserved/aspect o8(background) + o8(0), - # Global Color Table - _get_header_palette(palette_bytes) + _get_header_palette(palette_bytes), ] @@ -739,13 +796,15 @@ def _write_frame_data(fp, im_frame, offset, params): # local image header _write_local_header(fp, im_frame, offset, 0) - ImageFile._save(im_frame, fp, [("gif", (0, 0)+im_frame.size, 0, - RAWMODE[im_frame.mode])]) + ImageFile._save( + im_frame, fp, [("gif", (0, 0) + im_frame.size, 0, RAWMODE[im_frame.mode])] + ) fp.write(b"\0") # end of image data finally: del im_frame.encoderinfo + # -------------------------------------------------------------------- # Legacy GIF utilities @@ -794,7 +853,8 @@ def getdata(im, offset=(0, 0), **params): :returns: List of Bytes containing gif encoded frame data """ - class Collector(object): + + class Collector: data = [] def write(self, data): diff --git a/server/www/packages/packages-linux/x64/PIL/GimpGradientFile.py b/server/www/packages/packages-linux/x64/PIL/GimpGradientFile.py index 10593da..1cacf57 100644 --- a/server/www/packages/packages-linux/x64/PIL/GimpGradientFile.py +++ b/server/www/packages/packages-linux/x64/PIL/GimpGradientFile.py @@ -13,7 +13,8 @@ # See the README file for information on usage and redistribution. # -from math import pi, log, sin, sqrt +from math import log, pi, sin, sqrt + from ._binary import o8 # -------------------------------------------------------------------- @@ -59,7 +60,7 @@ def sphere_decreasing(middle, pos): SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing] -class GradientFile(object): +class GradientFile: gradient = None @@ -72,7 +73,7 @@ class GradientFile(object): for i in range(entries): - x = i / float(entries-1) + x = i / (entries - 1) while x1 < x: ix += 1 @@ -100,8 +101,8 @@ class GradientFile(object): ## # File handler for GIMP's gradient format. -class GimpGradientFile(GradientFile): +class GimpGradientFile(GradientFile): def __init__(self, fp): if fp.readline()[:13] != b"GIMP Gradient": @@ -131,7 +132,7 @@ class GimpGradientFile(GradientFile): cspace = int(s[12]) if cspace != 0: - raise IOError("cannot handle HSV colour space") + raise OSError("cannot handle HSV colour space") gradient.append((x0, x1, xm, rgb0, rgb1, segment)) diff --git a/server/www/packages/packages-linux/x64/PIL/GimpPaletteFile.py b/server/www/packages/packages-linux/x64/PIL/GimpPaletteFile.py index 6eef6a2..e3060ab 100644 --- a/server/www/packages/packages-linux/x64/PIL/GimpPaletteFile.py +++ b/server/www/packages/packages-linux/x64/PIL/GimpPaletteFile.py @@ -15,31 +15,30 @@ # import re -from ._binary import o8 +from ._binary import o8 ## # File handler for GIMP's palette format. -class GimpPaletteFile(object): + +class GimpPaletteFile: rawmode = "RGB" def __init__(self, fp): - self.palette = [o8(i)*3 for i in range(256)] + self.palette = [o8(i) * 3 for i in range(256)] if fp.readline()[:12] != b"GIMP Palette": raise SyntaxError("not a GIMP palette file") - i = 0 - - while i <= 255: + for i in range(256): s = fp.readline() - if not s: break + # skip fields and comment lines if re.match(br"\w+:|#", s): continue @@ -50,10 +49,7 @@ class GimpPaletteFile(object): if len(v) != 3: raise ValueError("bad palette entry") - if 0 <= i <= 255: - self.palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2]) - - i += 1 + self.palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2]) self.palette = b"".join(self.palette) diff --git a/server/www/packages/packages-linux/x64/PIL/GribStubImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/GribStubImagePlugin.py index 243ea2a..515c272 100644 --- a/server/www/packages/packages-linux/x64/PIL/GribStubImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/GribStubImagePlugin.py @@ -28,6 +28,7 @@ def register_handler(handler): # -------------------------------------------------------------------- # Image adapter + def _accept(prefix): return prefix[0:4] == b"GRIB" and i8(prefix[7]) == 1 @@ -60,7 +61,7 @@ class GribStubImageFile(ImageFile.StubImageFile): def _save(im, fp, filename): if _handler is None or not hasattr("_handler", "save"): - raise IOError("GRIB save handler not installed") + raise OSError("GRIB save handler not installed") _handler.save(im, fp, filename) diff --git a/server/www/packages/packages-linux/x64/PIL/Hdf5StubImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/Hdf5StubImagePlugin.py index 8783f80..362f2d3 100644 --- a/server/www/packages/packages-linux/x64/PIL/Hdf5StubImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/Hdf5StubImagePlugin.py @@ -27,6 +27,7 @@ def register_handler(handler): # -------------------------------------------------------------------- # Image adapter + def _accept(prefix): return prefix[:8] == b"\x89HDF\r\n\x1a\n" @@ -59,7 +60,7 @@ class HDF5StubImageFile(ImageFile.StubImageFile): def _save(im, fp, filename): if _handler is None or not hasattr("_handler", "save"): - raise IOError("HDF5 save handler not installed") + raise OSError("HDF5 save handler not installed") _handler.save(im, fp, filename) diff --git a/server/www/packages/packages-linux/x64/PIL/IcnsImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/IcnsImagePlugin.py index 21236d4..c003926 100644 --- a/server/www/packages/packages-linux/x64/PIL/IcnsImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/IcnsImagePlugin.py @@ -15,16 +15,18 @@ # See the README file for information on usage and redistribution. # -from PIL import Image, ImageFile, PngImagePlugin -from PIL._binary import i8 import io import os import shutil import struct +import subprocess import sys import tempfile -enable_jpeg2k = hasattr(Image.core, 'jp2klib_version') +from PIL import Image, ImageFile, PngImagePlugin +from PIL._binary import i8 + +enable_jpeg2k = hasattr(Image.core, "jp2klib_version") if enable_jpeg2k: from PIL import Jpeg2KImagePlugin @@ -32,7 +34,7 @@ HEADERSIZE = 8 def nextheader(fobj): - return struct.unpack('>4sI', fobj.read(HEADERSIZE)) + return struct.unpack(">4sI", fobj.read(HEADERSIZE)) def read_32t(fobj, start_length, size): @@ -40,8 +42,8 @@ def read_32t(fobj, start_length, size): (start, length) = start_length fobj.seek(start) sig = fobj.read(4) - if sig != b'\x00\x00\x00\x00': - raise SyntaxError('Unknown signature, expecting 0x00000000') + if sig != b"\x00\x00\x00\x00": + raise SyntaxError("Unknown signature, expecting 0x00000000") return read_32(fobj, (start + 4, length - 4), size) @@ -81,12 +83,8 @@ def read_32(fobj, start_length, size): if bytesleft <= 0: break if bytesleft != 0: - raise SyntaxError( - "Error reading channel [%r left]" % bytesleft - ) - band = Image.frombuffer( - "L", pixel_size, b"".join(data), "raw", "L", 0, 1 - ) + raise SyntaxError("Error reading channel [%r left]" % bytesleft) + band = Image.frombuffer("L", pixel_size, b"".join(data), "raw", "L", 0, 1) im.im.putband(band.im, band_ix) return {"RGB": im} @@ -97,9 +95,7 @@ def read_mk(fobj, start_length, size): fobj.seek(start) pixel_size = (size[0] * size[2], size[1] * size[2]) sizesq = pixel_size[0] * pixel_size[1] - band = Image.frombuffer( - "L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1 - ) + band = Image.frombuffer("L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1) return {"A": band} @@ -107,73 +103,58 @@ def read_png_or_jpeg2000(fobj, start_length, size): (start, length) = start_length fobj.seek(start) sig = fobj.read(12) - if sig[:8] == b'\x89PNG\x0d\x0a\x1a\x0a': + if sig[:8] == b"\x89PNG\x0d\x0a\x1a\x0a": fobj.seek(start) im = PngImagePlugin.PngImageFile(fobj) return {"RGBA": im} - elif sig[:4] == b'\xff\x4f\xff\x51' \ - or sig[:4] == b'\x0d\x0a\x87\x0a' \ - or sig == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a': + elif ( + sig[:4] == b"\xff\x4f\xff\x51" + or sig[:4] == b"\x0d\x0a\x87\x0a" + or sig == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a" + ): if not enable_jpeg2k: - raise ValueError('Unsupported icon subimage format (rebuild PIL ' - 'with JPEG 2000 support to fix this)') + raise ValueError( + "Unsupported icon subimage format (rebuild PIL " + "with JPEG 2000 support to fix this)" + ) # j2k, jpc or j2c fobj.seek(start) jp2kstream = fobj.read(length) f = io.BytesIO(jp2kstream) im = Jpeg2KImagePlugin.Jpeg2KImageFile(f) - if im.mode != 'RGBA': - im = im.convert('RGBA') + if im.mode != "RGBA": + im = im.convert("RGBA") return {"RGBA": im} else: - raise ValueError('Unsupported icon subimage format') + raise ValueError("Unsupported icon subimage format") -class IcnsFile(object): +class IcnsFile: SIZES = { - (512, 512, 2): [ - (b'ic10', read_png_or_jpeg2000), - ], - (512, 512, 1): [ - (b'ic09', read_png_or_jpeg2000), - ], - (256, 256, 2): [ - (b'ic14', read_png_or_jpeg2000), - ], - (256, 256, 1): [ - (b'ic08', read_png_or_jpeg2000), - ], - (128, 128, 2): [ - (b'ic13', read_png_or_jpeg2000), - ], + (512, 512, 2): [(b"ic10", read_png_or_jpeg2000)], + (512, 512, 1): [(b"ic09", read_png_or_jpeg2000)], + (256, 256, 2): [(b"ic14", read_png_or_jpeg2000)], + (256, 256, 1): [(b"ic08", read_png_or_jpeg2000)], + (128, 128, 2): [(b"ic13", read_png_or_jpeg2000)], (128, 128, 1): [ - (b'ic07', read_png_or_jpeg2000), - (b'it32', read_32t), - (b't8mk', read_mk), - ], - (64, 64, 1): [ - (b'icp6', read_png_or_jpeg2000), - ], - (32, 32, 2): [ - (b'ic12', read_png_or_jpeg2000), - ], - (48, 48, 1): [ - (b'ih32', read_32), - (b'h8mk', read_mk), + (b"ic07", read_png_or_jpeg2000), + (b"it32", read_32t), + (b"t8mk", read_mk), ], + (64, 64, 1): [(b"icp6", read_png_or_jpeg2000)], + (32, 32, 2): [(b"ic12", read_png_or_jpeg2000)], + (48, 48, 1): [(b"ih32", read_32), (b"h8mk", read_mk)], (32, 32, 1): [ - (b'icp5', read_png_or_jpeg2000), - (b'il32', read_32), - (b'l8mk', read_mk), - ], - (16, 16, 2): [ - (b'ic11', read_png_or_jpeg2000), + (b"icp5", read_png_or_jpeg2000), + (b"il32", read_32), + (b"l8mk", read_mk), ], + (16, 16, 2): [(b"ic11", read_png_or_jpeg2000)], (16, 16, 1): [ - (b'icp4', read_png_or_jpeg2000), - (b'is32', read_32), - (b's8mk', read_mk), + (b"icp4", read_png_or_jpeg2000), + (b"is32", read_32), + (b"s8mk", read_mk), ], } @@ -185,17 +166,17 @@ class IcnsFile(object): self.dct = dct = {} self.fobj = fobj sig, filesize = nextheader(fobj) - if sig != b'icns': - raise SyntaxError('not an icns file') + if sig != b"icns": + raise SyntaxError("not an icns file") i = HEADERSIZE while i < filesize: sig, blocksize = nextheader(fobj) if blocksize <= 0: - raise SyntaxError('invalid block header') + raise SyntaxError("invalid block header") i += HEADERSIZE blocksize -= HEADERSIZE dct[sig] = (i, blocksize) - fobj.seek(blocksize, 1) + fobj.seek(blocksize, io.SEEK_CUR) i += blocksize def itersizes(self): @@ -233,7 +214,7 @@ class IcnsFile(object): size = (size[0], size[1], 1) channels = self.dataforsize(size) - im = channels.get('RGBA', None) + im = channels.get("RGBA", None) if im: return im @@ -248,6 +229,7 @@ class IcnsFile(object): ## # Image plugin for Mac OS icons. + class IcnsImageFile(ImageFile.ImageFile): """ PIL image support for Mac OS .icns files. @@ -264,13 +246,13 @@ class IcnsImageFile(ImageFile.ImageFile): def _open(self): self.icns = IcnsFile(self.fp) - self.mode = 'RGBA' - self.info['sizes'] = self.icns.itersizes() + self.mode = "RGBA" + self.info["sizes"] = self.icns.itersizes() self.best_size = self.icns.bestsize() - self.size = (self.best_size[0] * self.best_size[2], - self.best_size[1] * self.best_size[2]) - # Just use this to see if it's loaded or not yet. - self.tile = ('',) + self.size = ( + self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2], + ) @property def size(self): @@ -279,27 +261,33 @@ class IcnsImageFile(ImageFile.ImageFile): @size.setter def size(self, value): info_size = value - if info_size not in self.info['sizes'] and len(info_size) == 2: + if info_size not in self.info["sizes"] and len(info_size) == 2: info_size = (info_size[0], info_size[1], 1) - if info_size not in self.info['sizes'] and len(info_size) == 3 and \ - info_size[2] == 1: - simple_sizes = [(size[0] * size[2], size[1] * size[2]) - for size in self.info['sizes']] + if ( + info_size not in self.info["sizes"] + and len(info_size) == 3 + and info_size[2] == 1 + ): + simple_sizes = [ + (size[0] * size[2], size[1] * size[2]) for size in self.info["sizes"] + ] if value in simple_sizes: - info_size = self.info['sizes'][simple_sizes.index(value)] - if info_size not in self.info['sizes']: - raise ValueError( - "This is not one of the allowed sizes of this image") + info_size = self.info["sizes"][simple_sizes.index(value)] + if info_size not in self.info["sizes"]: + raise ValueError("This is not one of the allowed sizes of this image") self._size = value def load(self): if len(self.size) == 3: self.best_size = self.size - self.size = (self.best_size[0] * self.best_size[2], - self.best_size[1] * self.best_size[2]) + self.size = ( + self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2], + ) Image.Image.load(self) - if not self.tile: + if self.im and self.im.size == self.size: + # Already loaded return self.load_prepare() # This is likely NOT the best way to do it, but whatever. @@ -311,9 +299,6 @@ class IcnsImageFile(ImageFile.ImageFile): self.im = im.im self.mode = im.mode self.size = im.size - self.fp = None - self.icns = None - self.tile = () self.load_end() @@ -329,66 +314,63 @@ def _save(im, fp, filename): fp.flush() # create the temporary set of pngs - iconset = tempfile.mkdtemp('.iconset') - provided_images = {im.width: im - for im in im.encoderinfo.get("append_images", [])} - last_w = None - for w in [16, 32, 128, 256, 512]: - prefix = 'icon_{}x{}'.format(w, w) + with tempfile.TemporaryDirectory(".iconset") as iconset: + provided_images = { + im.width: im for im in im.encoderinfo.get("append_images", []) + } + last_w = None + second_path = None + for w in [16, 32, 128, 256, 512]: + prefix = "icon_{}x{}".format(w, w) - first_path = os.path.join(iconset, prefix+'.png') - if last_w == w: - shutil.copyfile(second_path, first_path) - else: - im_w = provided_images.get(w, im.resize((w, w), Image.LANCZOS)) - im_w.save(first_path) + first_path = os.path.join(iconset, prefix + ".png") + if last_w == w: + shutil.copyfile(second_path, first_path) + else: + im_w = provided_images.get(w, im.resize((w, w), Image.LANCZOS)) + im_w.save(first_path) - second_path = os.path.join(iconset, prefix+'@2x.png') - im_w2 = provided_images.get(w*2, im.resize((w*2, w*2), Image.LANCZOS)) - im_w2.save(second_path) - last_w = w*2 + second_path = os.path.join(iconset, prefix + "@2x.png") + im_w2 = provided_images.get(w * 2, im.resize((w * 2, w * 2), Image.LANCZOS)) + im_w2.save(second_path) + last_w = w * 2 - # iconutil -c icns -o {} {} - from subprocess import Popen, PIPE, CalledProcessError + # iconutil -c icns -o {} {} - convert_cmd = ["iconutil", "-c", "icns", "-o", filename, iconset] - with open(os.devnull, 'wb') as devnull: - convert_proc = Popen(convert_cmd, stdout=PIPE, stderr=devnull) + convert_cmd = ["iconutil", "-c", "icns", "-o", filename, iconset] + convert_proc = subprocess.Popen( + convert_cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL + ) - convert_proc.stdout.close() + convert_proc.stdout.close() - retcode = convert_proc.wait() + retcode = convert_proc.wait() - # remove the temporary files - shutil.rmtree(iconset) - - if retcode: - raise CalledProcessError(retcode, convert_cmd) + if retcode: + raise subprocess.CalledProcessError(retcode, convert_cmd) -Image.register_open(IcnsImageFile.format, IcnsImageFile, - lambda x: x[:4] == b'icns') -Image.register_extension(IcnsImageFile.format, '.icns') +Image.register_open(IcnsImageFile.format, IcnsImageFile, lambda x: x[:4] == b"icns") +Image.register_extension(IcnsImageFile.format, ".icns") -if sys.platform == 'darwin': +if sys.platform == "darwin": Image.register_save(IcnsImageFile.format, _save) Image.register_mime(IcnsImageFile.format, "image/icns") -if __name__ == '__main__': +if __name__ == "__main__": if len(sys.argv) < 2: print("Syntax: python IcnsImagePlugin.py [file]") sys.exit() - imf = IcnsImageFile(open(sys.argv[1], 'rb')) - for size in imf.info['sizes']: - imf.size = size - imf.load() - im = imf.im - im.save('out-%s-%s-%s.png' % size) - im = Image.open(sys.argv[1]) - im.save("out.png") - if sys.platform == 'windows': - os.startfile("out.png") + with open(sys.argv[1], "rb") as fp: + imf = IcnsImageFile(fp) + for size in imf.info["sizes"]: + imf.size = size + imf.save("out-%s-%s-%s.png" % size) + with Image.open(sys.argv[1]) as im: + im.save("out.png") + if sys.platform == "windows": + os.startfile("out.png") diff --git a/server/www/packages/packages-linux/x64/PIL/IcoImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/IcoImagePlugin.py index 589ef3c..e4a7432 100644 --- a/server/www/packages/packages-linux/x64/PIL/IcoImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/IcoImagePlugin.py @@ -23,13 +23,12 @@ import struct +import warnings from io import BytesIO +from math import ceil, log -from . import Image, ImageFile, BmpImagePlugin, PngImagePlugin +from . import BmpImagePlugin, Image, ImageFile, PngImagePlugin from ._binary import i8, i16le as i16, i32le as i32 -from math import log, ceil - -__version__ = "0.1" # # -------------------------------------------------------------------- @@ -39,16 +38,20 @@ _MAGIC = b"\0\0\1\0" def _save(im, fp, filename): fp.write(_MAGIC) # (2+2) - sizes = im.encoderinfo.get("sizes", - [(16, 16), (24, 24), (32, 32), (48, 48), - (64, 64), (128, 128), (256, 256)]) + sizes = im.encoderinfo.get( + "sizes", + [(16, 16), (24, 24), (32, 32), (48, 48), (64, 64), (128, 128), (256, 256)], + ) width, height = im.size - sizes = filter(lambda x: False if (x[0] > width or x[1] > height or - x[0] > 256 or x[1] > 256) else True, - sizes) + sizes = filter( + lambda x: False + if (x[0] > width or x[1] > height or x[0] > 256 or x[1] > 256) + else True, + sizes, + ) sizes = list(sizes) fp.write(struct.pack("=8bpp) - 'reserved': i8(s[3]), - 'planes': i16(s[4:]), - 'bpp': i16(s[6:]), - 'size': i32(s[8:]), - 'offset': i32(s[12:]) + "width": i8(s[0]), + "height": i8(s[1]), + "nb_color": i8(s[2]), # No. of colors in image (0 if >=8bpp) + "reserved": i8(s[3]), + "planes": i16(s[4:]), + "bpp": i16(s[6:]), + "size": i32(s[8:]), + "offset": i32(s[12:]), } # See Wikipedia - for j in ('width', 'height'): + for j in ("width", "height"): if not icon_header[j]: icon_header[j] = 256 # See Wikipedia notes about color depth. # We need this just to differ images with equal sizes - icon_header['color_depth'] = (icon_header['bpp'] or - (icon_header['nb_color'] != 0 and - ceil(log(icon_header['nb_color'], - 2))) or 256) + icon_header["color_depth"] = ( + icon_header["bpp"] + or ( + icon_header["nb_color"] != 0 + and ceil(log(icon_header["nb_color"], 2)) + ) + or 256 + ) - icon_header['dim'] = (icon_header['width'], icon_header['height']) - icon_header['square'] = (icon_header['width'] * - icon_header['height']) + icon_header["dim"] = (icon_header["width"], icon_header["height"]) + icon_header["square"] = icon_header["width"] * icon_header["height"] self.entry.append(icon_header) - self.entry = sorted(self.entry, key=lambda x: x['color_depth']) + self.entry = sorted(self.entry, key=lambda x: x["color_depth"]) # ICO images are usually squares # self.entry = sorted(self.entry, key=lambda x: x['width']) - self.entry = sorted(self.entry, key=lambda x: x['square']) + self.entry = sorted(self.entry, key=lambda x: x["square"]) self.entry.reverse() def sizes(self): """ Get a list of all available icon sizes and color depths. """ - return {(h['width'], h['height']) for h in self.entry} + return {(h["width"], h["height"]) for h in self.entry} + + def getentryindex(self, size, bpp=False): + for (i, h) in enumerate(self.entry): + if size == h["dim"] and (bpp is False or bpp == h["color_depth"]): + return i + return 0 def getimage(self, size, bpp=False): """ Get an image from the icon """ - for (i, h) in enumerate(self.entry): - if size == h['dim'] and (bpp is False or bpp == h['color_depth']): - return self.frame(i) - return self.frame(0) + return self.frame(self.getentryindex(size, bpp)) def frame(self, idx): """ @@ -157,9 +167,9 @@ class IcoFile(object): header = self.entry[idx] - self.buf.seek(header['offset']) + self.buf.seek(header["offset"]) data = self.buf.read(8) - self.buf.seek(header['offset']) + self.buf.seek(header["offset"]) if data[:8] == PngImagePlugin._MAGIC: # png frame @@ -167,6 +177,7 @@ class IcoFile(object): else: # XOR + AND mask bmp frame im = BmpImagePlugin.DibImageFile(self.buf) + Image._decompression_bomb_check(im.size) # change tile dimension to only encompass XOR image im._size = (im.size[0], int(im.size[1] / 2)) @@ -194,11 +205,11 @@ class IcoFile(object): # convert to an 8bpp grayscale image mask = Image.frombuffer( - 'L', # 8bpp - im.size, # (w, h) - alpha_bytes, # source chars - 'raw', # raw decoder - ('L', 0, -1) # 8bpp inverted, unpadded, reversed + "L", # 8bpp + im.size, # (w, h) + alpha_bytes, # source chars + "raw", # raw decoder + ("L", 0, -1), # 8bpp inverted, unpadded, reversed ) else: # get AND image from end of bitmap @@ -210,8 +221,7 @@ class IcoFile(object): # the total mask data is # padded row size * height / bits per char - and_mask_offset = o + int(im.size[0] * im.size[1] * - (bpp / 8.0)) + and_mask_offset = o + int(im.size[0] * im.size[1] * (bpp / 8.0)) total_bytes = int((w * im.size[1]) / 8) self.buf.seek(and_mask_offset) @@ -219,17 +229,17 @@ class IcoFile(object): # convert raw data to image mask = Image.frombuffer( - '1', # 1 bpp - im.size, # (w, h) - mask_data, # source chars - 'raw', # raw decoder - ('1;I', int(w/8), -1) # 1bpp inverted, padded, reversed + "1", # 1 bpp + im.size, # (w, h) + mask_data, # source chars + "raw", # raw decoder + ("1;I", int(w / 8), -1), # 1bpp inverted, padded, reversed ) # now we have two images, im is XOR image and mask is AND image # apply mask image as alpha channel - im = im.convert('RGBA') + im = im.convert("RGBA") im.putalpha(mask) return im @@ -238,6 +248,7 @@ class IcoFile(object): ## # Image plugin for Windows Icon files. + class IcoImageFile(ImageFile.ImageFile): """ PIL read-only image support for Microsoft Windows .ico files. @@ -250,17 +261,21 @@ class IcoImageFile(ImageFile.ImageFile): Handles classic, XP and Vista icon formats. + When saving, PNG compression is used. Support for this was only added in + Windows Vista. + This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis . https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki """ + format = "ICO" format_description = "Windows Icon" def _open(self): self.ico = IcoFile(self.fp) - self.info['sizes'] = self.ico.sizes() - self.size = self.ico.entry[0]['dim'] + self.info["sizes"] = self.ico.sizes() + self.size = self.ico.entry[0]["dim"] self.load() @property @@ -269,23 +284,35 @@ class IcoImageFile(ImageFile.ImageFile): @size.setter def size(self, value): - if value not in self.info['sizes']: - raise ValueError( - "This is not one of the allowed sizes of this image") + if value not in self.info["sizes"]: + raise ValueError("This is not one of the allowed sizes of this image") self._size = value def load(self): + if self.im and self.im.size == self.size: + # Already loaded + return im = self.ico.getimage(self.size) # if tile is PNG, it won't really be loaded yet im.load() self.im = im.im self.mode = im.mode - self.size = im.size + if im.size != self.size: + warnings.warn("Image was not the expected size") + + index = self.ico.getentryindex(self.size) + sizes = list(self.info["sizes"]) + sizes[index] = im.size + self.info["sizes"] = set(sizes) + + self.size = im.size def load_seek(self): # Flag the ImageFile.Parser so that it # just does all the decode at the end. pass + + # # -------------------------------------------------------------------- @@ -293,3 +320,5 @@ class IcoImageFile(ImageFile.ImageFile): Image.register_open(IcoImageFile.format, IcoImageFile, _accept) Image.register_save(IcoImageFile.format, _save) Image.register_extension(IcoImageFile.format, ".ico") + +Image.register_mime(IcoImageFile.format, "image/x-icon") diff --git a/server/www/packages/packages-linux/x64/PIL/ImImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/ImImagePlugin.py index 2896bb4..8b03f35 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/ImImagePlugin.py @@ -26,13 +26,12 @@ # +import os import re + from . import Image, ImageFile, ImagePalette from ._binary import i8 -__version__ = "0.7" - - # -------------------------------------------------------------------- # Standard tags @@ -46,8 +45,17 @@ SCALE = "Scale (x,y)" SIZE = "Image size (x*y)" MODE = "Image type" -TAGS = {COMMENT: 0, DATE: 0, EQUIPMENT: 0, FRAMES: 0, LUT: 0, NAME: 0, - SCALE: 0, SIZE: 0, MODE: 0} +TAGS = { + COMMENT: 0, + DATE: 0, + EQUIPMENT: 0, + FRAMES: 0, + LUT: 0, + NAME: 0, + SCALE: 0, + SIZE: 0, + MODE: 0, +} OPEN = { # ifunc93/p3cfunc formats @@ -69,6 +77,7 @@ OPEN = { "RYB3 image": ("RGB", "RYB;T"), # extensions "LA image": ("LA", "LA;L"), + "PA image": ("LA", "PA;L"), "RGBA image": ("RGBA", "RGBA;L"), "RGBX image": ("RGBX", "RGBX;L"), "CMYK image": ("CMYK", "CMYK;L"), @@ -105,6 +114,7 @@ def number(s): ## # Image plugin for the IFUNC IM file format. + class ImImageFile(ImageFile.ImageFile): format = "IM" @@ -137,7 +147,7 @@ class ImImageFile(ImageFile.ImageFile): if s == b"\r": continue - if not s or s == b'\0' or s == b'\x1A': + if not s or s == b"\0" or s == b"\x1A": break # FIXME: this may read whole file if not a text file @@ -146,14 +156,14 @@ class ImImageFile(ImageFile.ImageFile): if len(s) > 100: raise SyntaxError("not an IM file") - if s[-2:] == b'\r\n': + if s[-2:] == b"\r\n": s = s[:-2] - elif s[-1:] == b'\n': + elif s[-1:] == b"\n": s = s[:-1] try: m = split.match(s) - except re.error as v: + except re.error: raise SyntaxError("not an IM file") if m: @@ -162,8 +172,8 @@ class ImImageFile(ImageFile.ImageFile): # Don't know if this is the correct encoding, # but a decent guess (I guess) - k = k.decode('latin-1', 'replace') - v = v.decode('latin-1', 'replace') + k = k.decode("latin-1", "replace") + v = v.decode("latin-1", "replace") # Convert value as appropriate if k in [FRAMES, SCALE, SIZE]: @@ -189,8 +199,9 @@ class ImImageFile(ImageFile.ImageFile): else: - raise SyntaxError("Syntax error in IM header: " + - s.decode('ascii', 'replace')) + raise SyntaxError( + "Syntax error in IM header: " + s.decode("ascii", "replace") + ) if not n: raise SyntaxError("Not an IM file") @@ -200,7 +211,7 @@ class ImImageFile(ImageFile.ImageFile): self.mode = self.info[MODE] # Skip forward to start of image data - while s and s[0:1] != b'\x1A': + while s and s[0:1] != b"\x1A": s = self.fp.read(1) if not s: raise SyntaxError("File truncated") @@ -211,20 +222,21 @@ class ImImageFile(ImageFile.ImageFile): greyscale = 1 # greyscale palette linear = 1 # linear greyscale palette for i in range(256): - if palette[i] == palette[i+256] == palette[i+512]: + if palette[i] == palette[i + 256] == palette[i + 512]: if i8(palette[i]) != i: linear = 0 else: greyscale = 0 - if self.mode == "L" or self.mode == "LA": + if self.mode in ["L", "LA", "P", "PA"]: if greyscale: if not linear: self.lut = [i8(c) for c in palette[:256]] else: - if self.mode == "L": + if self.mode in ["L", "P"]: self.mode = self.rawmode = "P" - elif self.mode == "LA": - self.mode = self.rawmode = "PA" + elif self.mode in ["LA", "PA"]: + self.mode = "PA" + self.rawmode = "PA;L" self.palette = ImagePalette.raw("RGB;L", palette) elif self.mode == "RGB": if not greyscale or not linear: @@ -243,8 +255,7 @@ class ImImageFile(ImageFile.ImageFile): # use bit decoder (if necessary) bits = int(self.rawmode[2:]) if bits not in [8, 16, 32]: - self.tile = [("bit", (0, 0)+self.size, offs, - (bits, 8, 3, 0, -1))] + self.tile = [("bit", (0, 0) + self.size, offs, (bits, 8, 3, 0, -1))] return except ValueError: pass @@ -253,13 +264,14 @@ class ImImageFile(ImageFile.ImageFile): # Old LabEye/3PC files. Would be very surprised if anyone # ever stumbled upon such a file ;-) size = self.size[0] * self.size[1] - self.tile = [("raw", (0, 0)+self.size, offs, ("G", 0, -1)), - ("raw", (0, 0)+self.size, offs+size, ("R", 0, -1)), - ("raw", (0, 0)+self.size, offs+2*size, ("B", 0, -1))] + self.tile = [ + ("raw", (0, 0) + self.size, offs, ("G", 0, -1)), + ("raw", (0, 0) + self.size, offs + size, ("R", 0, -1)), + ("raw", (0, 0) + self.size, offs + 2 * size, ("B", 0, -1)), + ] else: # LabEye/IFUNC files - self.tile = [("raw", (0, 0)+self.size, offs, - (self.rawmode, 0, -1))] + self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))] @property def n_frames(self): @@ -285,11 +297,21 @@ class ImImageFile(ImageFile.ImageFile): self.fp = self.__fp - self.tile = [("raw", (0, 0)+self.size, offs, (self.rawmode, 0, -1))] + self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))] def tell(self): return self.frame + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + + # # -------------------------------------------------------------------- # Save IM files @@ -311,7 +333,7 @@ SAVE = { "RGBA": ("RGBA", "RGBA;L"), "RGBX": ("RGBX", "RGBX;L"), "CMYK": ("CMYK", "CMYK;L"), - "YCbCr": ("YCC", "YCbCr;L") + "YCbCr": ("YCC", "YCbCr;L"), } @@ -324,17 +346,25 @@ def _save(im, fp, filename): frames = im.encoderinfo.get("frames", 1) - fp.write(("Image type: %s image\r\n" % image_type).encode('ascii')) + fp.write(("Image type: %s image\r\n" % image_type).encode("ascii")) if filename: - fp.write(("Name: %s\r\n" % filename).encode('ascii')) - fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode('ascii')) - fp.write(("File size (no of images): %d\r\n" % frames).encode('ascii')) - if im.mode == "P": + # Each line must be 100 characters or less, + # or: SyntaxError("not an IM file") + # 8 characters are used for "Name: " and "\r\n" + # Keep just the filename, ditch the potentially overlong path + name, ext = os.path.splitext(os.path.basename(filename)) + name = "".join([name[: 92 - len(ext)], ext]) + + fp.write(("Name: %s\r\n" % name).encode("ascii")) + fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode("ascii")) + fp.write(("File size (no of images): %d\r\n" % frames).encode("ascii")) + if im.mode in ["P", "PA"]: fp.write(b"Lut: 1\r\n") - fp.write(b"\000" * (511-fp.tell()) + b"\032") - if im.mode == "P": + fp.write(b"\000" * (511 - fp.tell()) + b"\032") + if im.mode in ["P", "PA"]: fp.write(im.im.getpalette("RGB", "RGB;L")) # 768 bytes - ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, -1))]) + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, -1))]) + # # -------------------------------------------------------------------- diff --git a/server/www/packages/packages-linux/x64/PIL/Image.py b/server/www/packages/packages-linux/x64/PIL/Image.py index edea312..3ced965 100644 --- a/server/www/packages/packages-linux/x64/PIL/Image.py +++ b/server/www/packages/packages-linux/x64/PIL/Image.py @@ -24,15 +24,50 @@ # See the README file for information on usage and redistribution. # -# VERSION is deprecated and will be removed in Pillow 6.0.0. -# PILLOW_VERSION is deprecated and will be removed after that. -# Use __version__ instead. -from . import VERSION, PILLOW_VERSION, __version__, _plugins -from ._util import py3 - +import atexit +import builtins +import io import logging -import warnings import math +import numbers +import os +import struct +import sys +import tempfile +import warnings +from collections.abc import Callable, MutableMapping +from pathlib import Path + +# VERSION was removed in Pillow 6.0.0. +# PILLOW_VERSION is deprecated and will be removed in a future release. +# Use __version__ instead. +from . import ( + ImageMode, + TiffTags, + UnidentifiedImageError, + __version__, + _plugins, + _raise_version_warning, +) +from ._binary import i8, i32le +from ._util import deferred_error, isPath + +if sys.version_info >= (3, 7): + + def __getattr__(name): + if name == "PILLOW_VERSION": + _raise_version_warning() + return __version__ + raise AttributeError("module '{}' has no attribute '{}'".format(__name__, name)) + + +else: + + from . import PILLOW_VERSION + + # Silence warning + assert PILLOW_VERSION + logger = logging.getLogger(__name__) @@ -45,12 +80,6 @@ class DecompressionBombError(Exception): pass -class _imaging_not_installed(object): - # module placeholder - def __getattr__(self, id): - raise ImportError("The _imaging C module is not installed") - - # Limit to around a quarter gigabyte for a 24 bit (3 bpp) image MAX_IMAGE_PIXELS = int(1024 * 1024 * 1024 // 4 // 3) @@ -62,91 +91,38 @@ try: # Also note that Image.core is not a publicly documented interface, # and should be considered private and subject to change. from . import _imaging as core - if __version__ != getattr(core, 'PILLOW_VERSION', None): - raise ImportError("The _imaging extension was built for another " - "version of Pillow or PIL:\n" - "Core version: %s\n" - "Pillow version: %s" % - (getattr(core, 'PILLOW_VERSION', None), - __version__)) + + if __version__ != getattr(core, "PILLOW_VERSION", None): + raise ImportError( + "The _imaging extension was built for another version of Pillow or PIL:\n" + "Core version: %s\n" + "Pillow version: %s" % (getattr(core, "PILLOW_VERSION", None), __version__) + ) except ImportError as v: - core = _imaging_not_installed() + core = deferred_error(ImportError("The _imaging C module is not installed.")) # Explanations for ways that we know we might have an import error if str(v).startswith("Module use of python"): # The _imaging C module is present, but not compiled for # the right version (windows only). Print a warning, if # possible. warnings.warn( - "The _imaging extension was built for another version " - "of Python.", - RuntimeWarning - ) + "The _imaging extension was built for another version of Python.", + RuntimeWarning, + ) elif str(v).startswith("The _imaging extension"): warnings.warn(str(v), RuntimeWarning) - elif "Symbol not found: _PyUnicodeUCS2_" in str(v): - # should match _PyUnicodeUCS2_FromString and - # _PyUnicodeUCS2_AsLatin1String - warnings.warn( - "The _imaging extension was built for Python with UCS2 support; " - "recompile Pillow or build Python --without-wide-unicode. ", - RuntimeWarning - ) - elif "Symbol not found: _PyUnicodeUCS4_" in str(v): - # should match _PyUnicodeUCS4_FromString and - # _PyUnicodeUCS4_AsLatin1String - warnings.warn( - "The _imaging extension was built for Python with UCS4 support; " - "recompile Pillow or build Python --with-wide-unicode. ", - RuntimeWarning - ) # Fail here anyway. Don't let people run with a mostly broken Pillow. # see docs/porting.rst raise -try: - import builtins -except ImportError: - import __builtin__ - builtins = __builtin__ - -from . import ImageMode -from ._binary import i8 -from ._util import isPath, isStringType, deferred_error - -import os -import sys -import io -import struct -import atexit - -# type stuff -import numbers -try: - # Python 3 - from collections.abc import Callable -except ImportError: - # Python 2.7 - from collections import Callable - # works everywhere, win for pypy, not cpython -USE_CFFI_ACCESS = hasattr(sys, 'pypy_version_info') +USE_CFFI_ACCESS = hasattr(sys, "pypy_version_info") try: import cffi - HAS_CFFI = True except ImportError: - HAS_CFFI = False - -try: - from pathlib import Path - HAS_PATHLIB = True -except ImportError: - try: - from pathlib2 import Path - HAS_PATHLIB = True - except ImportError: - HAS_PATHLIB = False + cffi = None def isImageType(t): @@ -164,7 +140,7 @@ def isImageType(t): # -# Constants (also defined in _imagingmodule.c!) +# Constants NONE = 0 @@ -177,14 +153,14 @@ ROTATE_270 = 4 TRANSPOSE = 5 TRANSVERSE = 6 -# transforms +# transforms (also defined in Imaging.h) AFFINE = 0 EXTENT = 1 PERSPECTIVE = 2 QUAD = 3 MESH = 4 -# resampling filters +# resampling filters (also defined in Imaging.h) NEAREST = NONE = 0 BOX = 4 BILINEAR = LINEAR = 2 @@ -192,6 +168,9 @@ HAMMING = 5 BICUBIC = CUBIC = 3 LANCZOS = ANTIALIAS = 1 +_filters_support = {BOX: 0.5, BILINEAR: 1.0, HAMMING: 1.0, BICUBIC: 2.0, LANCZOS: 3.0} + + # dithers NEAREST = NONE = 0 ORDERED = 1 # Not yet implemented @@ -212,7 +191,7 @@ NORMAL = 0 SEQUENCE = 1 CONTAINER = 2 -if hasattr(core, 'DEFAULT_STRATEGY'): +if hasattr(core, "DEFAULT_STRATEGY"): DEFAULT_STRATEGY = core.DEFAULT_STRATEGY FILTERED = core.FILTERED HUFFMAN_ONLY = core.HUFFMAN_ONLY @@ -238,13 +217,12 @@ ENCODERS = {} _MODEINFO = { # NOTE: this table will be removed in future versions. use # getmode* functions or ImageMode descriptors instead. - # official modes "1": ("L", "L", ("1",)), "L": ("L", "L", ("L",)), "I": ("L", "I", ("I",)), "F": ("L", "F", ("F",)), - "P": ("RGB", "L", ("P",)), + "P": ("P", "L", ("P",)), "RGB": ("RGB", "L", ("R", "G", "B")), "RGBX": ("RGB", "L", ("R", "G", "B", "X")), "RGBA": ("RGB", "L", ("R", "G", "B", "A")), @@ -252,46 +230,44 @@ _MODEINFO = { "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr")), "LAB": ("RGB", "L", ("L", "A", "B")), "HSV": ("RGB", "L", ("H", "S", "V")), - # Experimental modes include I;16, I;16L, I;16B, RGBa, BGR;15, and # BGR;24. Use these modes only if you know exactly what you're # doing... - } -if sys.byteorder == 'little': - _ENDIAN = '<' +if sys.byteorder == "little": + _ENDIAN = "<" else: - _ENDIAN = '>' + _ENDIAN = ">" _MODE_CONV = { # official modes - "1": ('|b1', None), # Bits need to be extended to bytes - "L": ('|u1', None), - "LA": ('|u1', 2), - "I": (_ENDIAN + 'i4', None), - "F": (_ENDIAN + 'f4', None), - "P": ('|u1', None), - "RGB": ('|u1', 3), - "RGBX": ('|u1', 4), - "RGBA": ('|u1', 4), - "CMYK": ('|u1', 4), - "YCbCr": ('|u1', 3), - "LAB": ('|u1', 3), # UNDONE - unsigned |u1i1i1 - "HSV": ('|u1', 3), + "1": ("|b1", None), # Bits need to be extended to bytes + "L": ("|u1", None), + "LA": ("|u1", 2), + "I": (_ENDIAN + "i4", None), + "F": (_ENDIAN + "f4", None), + "P": ("|u1", None), + "RGB": ("|u1", 3), + "RGBX": ("|u1", 4), + "RGBA": ("|u1", 4), + "CMYK": ("|u1", 4), + "YCbCr": ("|u1", 3), + "LAB": ("|u1", 3), # UNDONE - unsigned |u1i1i1 + "HSV": ("|u1", 3), # I;16 == I;16L, and I;32 == I;32L - "I;16": ('u2', None), - "I;16L": ('i2', None), - "I;16LS": ('u4', None), - "I;32L": ('i4', None), - "I;32LS": ('u2", None), + "I;16L": ("i2", None), + "I;16LS": ("u4", None), + "I;32L": ("i4", None), + "I;32LS": ("= 3: - def __del__(self): - if (hasattr(self, 'fp') and hasattr(self, '_exclusive_fp') - and self.fp and self._exclusive_fp): - self.fp.close() - self.fp = None - def _copy(self): self.load() self.im = self.im.copy() @@ -617,11 +619,9 @@ class Image(object): self.load() def _dump(self, file=None, format=None, **options): - import tempfile - - suffix = '' + suffix = "" if format: - suffix = '.'+format + suffix = "." + format if not file: f, filename = tempfile.mkstemp(suffix) @@ -641,35 +641,34 @@ class Image(object): return filename def __eq__(self, other): - return (isinstance(other, Image) and - self.__class__.__name__ == other.__class__.__name__ and - self.mode == other.mode and - self.size == other.size and - self.info == other.info and - self.category == other.category and - self.readonly == other.readonly and - self.getpalette() == other.getpalette() and - self.tobytes() == other.tobytes()) - - def __ne__(self, other): - eq = (self == other) - return not eq + return ( + self.__class__ is other.__class__ + and self.mode == other.mode + and self.size == other.size + and self.info == other.info + and self.category == other.category + and self.readonly == other.readonly + and self.getpalette() == other.getpalette() + and self.tobytes() == other.tobytes() + ) def __repr__(self): return "<%s.%s image mode=%s size=%dx%d at 0x%X>" % ( - self.__class__.__module__, self.__class__.__name__, - self.mode, self.size[0], self.size[1], - id(self) - ) + self.__class__.__module__, + self.__class__.__name__, + self.mode, + self.size[0], + self.size[1], + id(self), + ) def _repr_png_(self): """ iPython display hook support :returns: png version of the image as bytes """ - from io import BytesIO - b = BytesIO() - self.save(b, 'PNG') + b = io.BytesIO() + self.save(b, "PNG") return b.getvalue() @property @@ -677,24 +676,19 @@ class Image(object): # numpy array interface support new = {} shape, typestr = _conv_type_shape(self) - new['shape'] = shape - new['typestr'] = typestr - new['version'] = 3 - if self.mode == '1': + new["shape"] = shape + new["typestr"] = typestr + new["version"] = 3 + if self.mode == "1": # Binary images need to be extended from bits to bytes # See: https://github.com/python-pillow/Pillow/issues/350 - new['data'] = self.tobytes('raw', 'L') + new["data"] = self.tobytes("raw", "L") else: - new['data'] = self.tobytes() + new["data"] = self.tobytes() return new def __getstate__(self): - return [ - self.info, - self.mode, - self.size, - self.getpalette(), - self.tobytes()] + return [self.info, self.mode, self.size, self.getpalette(), self.tobytes()] def __setstate__(self, state): Image.__init__(self) @@ -704,7 +698,7 @@ class Image(object): self.mode = mode self._size = size self.im = core.new(mode, size) - if mode in ("L", "P") and palette: + if mode in ("L", "LA", "P", "PA") and palette: self.putpalette(palette) self.frombytes(data) @@ -752,8 +746,9 @@ class Image(object): return b"".join(data) def tostring(self, *args, **kw): - raise NotImplementedError("tostring() has been removed. " - "Please call tobytes() instead.") + raise NotImplementedError( + "tostring() has been removed. Please call tobytes() instead." + ) def tobitmap(self, name="image"): """ @@ -770,11 +765,15 @@ class Image(object): if self.mode != "1": raise ValueError("not a bitmap") data = self.tobytes("xbm") - return b"".join([ - ("#define %s_width %d\n" % (name, self.size[0])).encode('ascii'), - ("#define %s_height %d\n" % (name, self.size[1])).encode('ascii'), - ("static char %s_bits[] = {\n" % name).encode('ascii'), data, b"};" - ]) + return b"".join( + [ + ("#define %s_width %d\n" % (name, self.size[0])).encode("ascii"), + ("#define %s_height %d\n" % (name, self.size[1])).encode("ascii"), + ("static char %s_bits[] = {\n" % name).encode("ascii"), + data, + b"};", + ] + ) def frombytes(self, data, decoder_name="raw", *args): """ @@ -803,8 +802,9 @@ class Image(object): raise ValueError("cannot decode image data") def fromstring(self, *args, **kw): - raise NotImplementedError("fromstring() has been removed. " - "Please call frombytes() instead.") + raise NotImplementedError( + "fromstring() has been removed. Please call frombytes() instead." + ) def load(self): """ @@ -813,8 +813,10 @@ class Image(object): Image class automatically loads an opened image when it is accessed for the first time. - This method will close the file associated with the image. See - :ref:`file-handling` for more information. + If the file associated with the image was opened by Pillow, then this + method will close it. The exception to this is if the image has + multiple frames, in which case the file will be left open for seek + operations. See :ref:`file-handling` for more information. :returns: An image access object. :rtype: :ref:`PixelAccess` or :py:class:`PIL.PyAccess` @@ -833,10 +835,11 @@ class Image(object): self.palette.mode = "RGBA" if self.im: - if HAS_CFFI and USE_CFFI_ACCESS: + if cffi and USE_CFFI_ACCESS: if self.pyaccess: return self.pyaccess from . import PyAccess + self.pyaccess = PyAccess.new(self, self.readonly) if self.pyaccess: return self.pyaccess @@ -853,8 +856,7 @@ class Image(object): """ pass - def convert(self, mode=None, matrix=None, dither=None, - palette=WEB, colors=256): + def convert(self, mode=None, matrix=None, dither=None, palette=WEB, colors=256): """ Returns a converted copy of this image. For the "P" mode, this method translates pixels through the palette. If mode is @@ -865,7 +867,7 @@ class Image(object): "L", "RGB" and "CMYK." The **matrix** argument only supports "L" and "RGB". - When translating a color image to black and white (mode "L"), + When translating a color image to greyscale (mode "L"), the library uses the ITU-R 601-2 luma transform:: L = R * 299/1000 + G * 587/1000 + B * 114/1000 @@ -873,9 +875,9 @@ class Image(object): The default method of converting a greyscale ("L") or "RGB" image into a bilevel (mode "1") image uses Floyd-Steinberg dither to approximate the original image luminosity levels. If - dither is NONE, all non-zero values are set to 255 (white). To - use other thresholds, use the :py:meth:`~PIL.Image.Image.point` - method. + dither is NONE, all values larger than 128 are set to 255 (white), + all other values to 0 (black). To use other thresholds, use the + :py:meth:`~PIL.Image.Image.point` method. When converting from "RGBA" to "P" without a **matrix** argument, this passes the operation to :py:meth:`~PIL.Image.Image.quantize`, @@ -907,7 +909,7 @@ class Image(object): if not mode or (mode == self.mode and not matrix): return self.copy() - has_transparency = self.info.get('transparency') is not None + has_transparency = self.info.get("transparency") is not None if matrix: # matrix conversion if mode not in ("L", "RGB"): @@ -915,19 +917,24 @@ class Image(object): im = self.im.convert_matrix(mode, matrix) new = self._new(im) if has_transparency and self.im.bands == 3: - transparency = new.info['transparency'] + transparency = new.info["transparency"] def convert_transparency(m, v): - v = m[0]*v[0] + m[1]*v[1] + m[2]*v[2] + m[3]*0.5 + v = m[0] * v[0] + m[1] * v[1] + m[2] * v[2] + m[3] * 0.5 return max(0, min(255, int(v))) + if mode == "L": transparency = convert_transparency(matrix, transparency) elif len(mode) == 3: - transparency = tuple([ - convert_transparency(matrix[i*4:i*4+4], transparency) - for i in range(0, len(transparency)) - ]) - new.info['transparency'] = transparency + transparency = tuple( + [ + convert_transparency( + matrix[i * 4 : i * 4 + 4], transparency + ) + for i in range(0, len(transparency)) + ] + ) + new.info["transparency"] = transparency return new if mode == "P" and self.mode == "RGBA": @@ -937,45 +944,48 @@ class Image(object): delete_trns = False # transparency handling if has_transparency: - if self.mode in ('L', 'RGB') and mode == 'RGBA': + if self.mode in ("1", "L", "I", "RGB") and mode == "RGBA": # Use transparent conversion to promote from transparent # color to an alpha channel. - new_im = self._new(self.im.convert_transparent( - mode, self.info['transparency'])) - del(new_im.info['transparency']) + new_im = self._new( + self.im.convert_transparent(mode, self.info["transparency"]) + ) + del new_im.info["transparency"] return new_im - elif self.mode in ('L', 'RGB', 'P') and mode in ('L', 'RGB', 'P'): - t = self.info['transparency'] + elif self.mode in ("L", "RGB", "P") and mode in ("L", "RGB", "P"): + t = self.info["transparency"] if isinstance(t, bytes): # Dragons. This can't be represented by a single color - warnings.warn('Palette images with Transparency ' + - ' expressed in bytes should be converted ' + - 'to RGBA images') + warnings.warn( + "Palette images with Transparency expressed in bytes should be " + "converted to RGBA images" + ) delete_trns = True else: # get the new transparency color. # use existing conversions trns_im = Image()._new(core.new(self.mode, (1, 1))) - if self.mode == 'P': + if self.mode == "P": trns_im.putpalette(self.palette) if isinstance(t, tuple): try: t = trns_im.palette.getcolor(t) - except: - raise ValueError("Couldn't allocate a palette " - "color for transparency") + except Exception: + raise ValueError( + "Couldn't allocate a palette color for transparency" + ) trns_im.putpixel((0, 0), t) - if mode in ('L', 'RGB'): + if mode in ("L", "RGB"): trns_im = trns_im.convert(mode) else: # can't just retrieve the palette number, got to do it # after quantization. - trns_im = trns_im.convert('RGB') + trns_im = trns_im.convert("RGB") trns = trns_im.getpixel((0, 0)) - elif self.mode == 'P' and mode == 'RGBA': - t = self.info['transparency'] + elif self.mode == "P" and mode == "RGBA": + t = self.info["transparency"] delete_trns = True if isinstance(t, bytes): @@ -983,27 +993,26 @@ class Image(object): elif isinstance(t, int): self.im.putpalettealpha(t, 0) else: - raise ValueError("Transparency for P mode should" + - " be bytes or int") + raise ValueError("Transparency for P mode should be bytes or int") if mode == "P" and palette == ADAPTIVE: im = self.im.quantize(colors) new = self._new(im) from . import ImagePalette + new.palette = ImagePalette.raw("RGB", new.im.getpalette("RGB")) if delete_trns: # This could possibly happen if we requantize to fewer colors. # The transparency would be totally off in that case. - del(new.info['transparency']) + del new.info["transparency"] if trns is not None: try: - new.info['transparency'] = new.palette.getcolor(trns) - except: + new.info["transparency"] = new.palette.getcolor(trns) + except Exception: # if we can't make a transparent color, don't leave the old # transparency hanging around to mess us up. - del(new.info['transparency']) - warnings.warn("Couldn't allocate palette entry " + - "for transparency") + del new.info["transparency"] + warnings.warn("Couldn't allocate palette entry for transparency") return new # colorspace conversion @@ -1023,20 +1032,19 @@ class Image(object): new_im = self._new(im) if delete_trns: # crash fail if we leave a bytes transparency in an rgb/l mode. - del(new_im.info['transparency']) + del new_im.info["transparency"] if trns is not None: - if new_im.mode == 'P': + if new_im.mode == "P": try: - new_im.info['transparency'] = new_im.palette.getcolor(trns) - except: - del(new_im.info['transparency']) - warnings.warn("Couldn't allocate palette entry " + - "for transparency") + new_im.info["transparency"] = new_im.palette.getcolor(trns) + except Exception: + del new_im.info["transparency"] + warnings.warn("Couldn't allocate palette entry for transparency") else: - new_im.info['transparency'] = trns + new_im.info["transparency"] = trns return new_im - def quantize(self, colors=256, method=None, kmeans=0, palette=None): + def quantize(self, colors=256, method=None, kmeans=0, palette=None, dither=1): """ Convert the image to 'P' mode with the specified number of colors. @@ -1047,7 +1055,12 @@ class Image(object): 2 = fast octree 3 = libimagequant :param kmeans: Integer - :param palette: Quantize to the palette of given :py:class:`PIL.Image.Image`. + :param palette: Quantize to the palette of given + :py:class:`PIL.Image.Image`. + :param dither: Dithering method, used when converting from + mode "RGB" to "P" or from "RGB" or "L" to "1". + Available methods are NONE or FLOYDSTEINBERG (default). + Default: 1 (legacy setting) :returns: A new image """ @@ -1057,14 +1070,15 @@ class Image(object): if method is None: # defaults: method = 0 - if self.mode == 'RGBA': + if self.mode == "RGBA": method = 2 - if self.mode == 'RGBA' and method not in (2, 3): + if self.mode == "RGBA" and method not in (2, 3): # Caller specified an invalid mode. raise ValueError( - 'Fast Octree (method == 2) and libimagequant (method == 3) ' + - 'are the only valid methods for quantizing RGBA images') + "Fast Octree (method == 2) and libimagequant (method == 3) " + "are the only valid methods for quantizing RGBA images" + ) if palette: # use palette from reference image @@ -1074,11 +1088,18 @@ class Image(object): if self.mode != "RGB" and self.mode != "L": raise ValueError( "only RGB or L mode images can be quantized to a palette" - ) - im = self.im.convert("P", 1, palette.im) + ) + im = self.im.convert("P", dither, palette.im) return self._new(im) - return self._new(self.im.quantize(colors, method, kmeans)) + im = self._new(self.im.quantize(colors, method, kmeans)) + + from . import ImagePalette + + mode = im.im.getpalettemode() + im.palette = ImagePalette.ImagePalette(mode, im.im.getpalette(mode, mode)) + + return im def copy(self): """ @@ -1136,16 +1157,18 @@ class Image(object): """ Configures the image file loader so it returns a version of the image that as closely as possible matches the given mode and - size. For example, you can use this method to convert a color - JPEG to greyscale while loading it, or to extract a 128x192 - version from a PCD file. + size. For example, you can use this method to convert a color + JPEG to greyscale while loading it. + + If any changes are made, returns a tuple with the chosen ``mode`` and + ``box`` with coordinates of the original image within the altered one. Note that this method modifies the :py:class:`~PIL.Image.Image` object - in place. If the image has already been loaded, this method has no + in place. If the image has already been loaded, this method has no effect. Note: This method is not implemented for most images. It is - currently implemented only for JPEG and PCD images. + currently implemented only for JPEG and MPO images. :param mode: The requested mode. :param size: The requested size. @@ -1173,8 +1196,9 @@ class Image(object): if isinstance(filter, Callable): filter = filter() if not hasattr(filter, "filter"): - raise TypeError("filter argument should be ImageFilter.Filter " + - "instance or class") + raise TypeError( + "filter argument should be ImageFilter.Filter instance or class" + ) multiband = isinstance(filter, ImageFilter.MultibandFilter) if self.im.bands == 1 or multiband: @@ -1273,6 +1297,12 @@ class Image(object): return tuple(extrema) return self.im.getextrema() + def getexif(self): + if self._exif is None: + self._exif = Exif() + self._exif.load(self.info.get("exif")) + return self._exif + def getim(self): """ Returns a capsule that points to the internal image memory. @@ -1293,10 +1323,7 @@ class Image(object): self.load() try: - if py3: - return list(self.im.getpalette()) - else: - return [i8(c) for c in self.im.getpalette()] + return list(self.im.getpalette()) except ValueError: return None # no palette @@ -1344,6 +1371,7 @@ class Image(object): bi-level image (mode "1") or a greyscale image ("L"). :param mask: An optional mask. + :param extrema: An optional tuple of manually-specified extrema. :returns: A list containing pixel counts. """ self.load() @@ -1356,9 +1384,36 @@ class Image(object): return self.im.histogram(extrema) return self.im.histogram() + def entropy(self, mask=None, extrema=None): + """ + Calculates and returns the entropy for the image. + + A bilevel image (mode "1") is treated as a greyscale ("L") + image by this method. + + If a mask is provided, the method employs the histogram for + those parts of the image where the mask image is non-zero. + The mask image must have the same size as the image, and be + either a bi-level image (mode "1") or a greyscale image ("L"). + + :param mask: An optional mask. + :param extrema: An optional tuple of manually-specified extrema. + :returns: A float value representing the image entropy + """ + self.load() + if mask: + mask.load() + return self.im.entropy((0, 0), mask.im) + if self.mode in ("I", "F"): + if extrema is None: + extrema = self.getextrema() + return self.im.entropy(extrema) + return self.im.entropy() + def offset(self, xoffset, yoffset=None): - raise NotImplementedError("offset() has been removed. " - "Please call ImageChops.offset() instead.") + raise NotImplementedError( + "offset() has been removed. Please call ImageChops.offset() instead." + ) def paste(self, im, box=None, mask=None): """ @@ -1416,13 +1471,12 @@ class Image(object): size = mask.size else: # FIXME: use self.size here? - raise ValueError( - "cannot determine region size; use 4-item box" - ) - box += (box[0]+size[0], box[1]+size[1]) + raise ValueError("cannot determine region size; use 4-item box") + box += (box[0] + size[0], box[1] + size[1]) - if isStringType(im): + if isinstance(im, str): from . import ImageColor + im = ImageColor.getcolor(im, self.mode) elif isImageType(im): @@ -1541,7 +1595,7 @@ class Image(object): self._ensure_mutable() - if self.mode not in ("LA", "RGBA"): + if self.mode not in ("LA", "PA", "RGBA"): # attempt to promote self to a matching alpha mode try: mode = getmodebase(self.mode) + "A" @@ -1550,7 +1604,7 @@ class Image(object): except (AttributeError, ValueError): # do things the hard way im = self.im.convert(mode) - if im.mode not in ("LA", "RGBA"): + if im.mode not in ("LA", "PA", "RGBA"): raise ValueError # sanity check self.im = im self.pyaccess = None @@ -1558,7 +1612,7 @@ class Image(object): except (KeyError, ValueError): raise ValueError("illegal image mode") - if self.mode == "LA": + if self.mode in ("LA", "PA"): band = 1 else: band = 3 @@ -1601,10 +1655,10 @@ class Image(object): def putpalette(self, data, rawmode="RGB"): """ - Attaches a palette to this image. The image must be a "P" or - "L" image, and the palette sequence must contain 768 integer - values, where each group of three values represent the red, - green, and blue values for the corresponding pixel + Attaches a palette to this image. The image must be a "P", + "PA", "L" or "LA" image, and the palette sequence must contain + 768 integer values, where each group of three values represent + the red, green, and blue values for the corresponding pixel index. Instead of an integer sequence, you can use an 8-bit string. @@ -1613,19 +1667,16 @@ class Image(object): """ from . import ImagePalette - if self.mode not in ("L", "P"): + if self.mode not in ("L", "LA", "P", "PA"): raise ValueError("illegal image mode") self.load() if isinstance(data, ImagePalette.ImagePalette): palette = ImagePalette.raw(data.rawmode, data.palette) else: if not isinstance(data, bytes): - if py3: - data = bytes(data) - else: - data = "".join(chr(x) for x in data) + data = bytes(data) palette = ImagePalette.raw(rawmode, data) - self.mode = "P" + self.mode = "PA" if "A" in self.mode else "P" self.palette = palette self.palette.mode = "RGB" self.load() # install new palette @@ -1634,7 +1685,8 @@ class Image(object): """ Modifies the pixel at the given position. The color is given as a single numerical value for single-band images, and a tuple for - multi-band images. + multi-band images. In addition to this, RGB and RGBA tuples are + accepted for P images. Note that this method is relatively slow. For more extensive changes, use :py:meth:`~PIL.Image.Image.paste` or the :py:mod:`~PIL.ImageDraw` @@ -1657,6 +1709,14 @@ class Image(object): if self.pyaccess: return self.pyaccess.putpixel(xy, value) + + if ( + self.mode == "P" + and isinstance(value, (list, tuple)) + and len(value) in [3, 4] + ): + # RGB or RGBA value for a P image + value = self.palette.getcolor(value) return self.im.putpixel(xy, value) def remap_palette(self, dest_map, source_palette=None): @@ -1664,7 +1724,7 @@ class Image(object): Rewrites the image to reorder the palette. :param dest_map: A list of indexes into the original palette. - e.g. [1,0] would swap a two item palette, and list(range(255)) + e.g. [1,0] would swap a two item palette, and list(range(256)) is the identity transform. :param source_palette: Bytes or None. :returns: An :py:class:`~PIL.Image.Image` object. @@ -1679,16 +1739,16 @@ class Image(object): if self.mode == "P": real_source_palette = self.im.getpalette("RGB")[:768] else: # L-mode - real_source_palette = bytearray(i//3 for i in range(768)) + real_source_palette = bytearray(i // 3 for i in range(768)) else: real_source_palette = source_palette palette_bytes = b"" - new_positions = [0]*256 + new_positions = [0] * 256 # pick only the used colors from the palette for i, oldPosition in enumerate(dest_map): - palette_bytes += real_source_palette[oldPosition*3:oldPosition*3+3] + palette_bytes += real_source_palette[oldPosition * 3 : oldPosition * 3 + 3] new_positions[oldPosition] = i # replace the palette color id of all pixel with the new id @@ -1712,30 +1772,46 @@ class Image(object): mapping_palette = bytearray(new_positions) m_im = self.copy() - m_im.mode = 'P' + m_im.mode = "P" - m_im.palette = ImagePalette.ImagePalette("RGB", - palette=mapping_palette*3, - size=768) + m_im.palette = ImagePalette.ImagePalette( + "RGB", palette=mapping_palette * 3, size=768 + ) # possibly set palette dirty, then # m_im.putpalette(mapping_palette, 'L') # converts to 'P' # or just force it. # UNDONE -- this is part of the general issue with palettes m_im.im.putpalette(*m_im.palette.getdata()) - m_im = m_im.convert('L') + m_im = m_im.convert("L") # Internally, we require 768 bytes for a palette. - new_palette_bytes = (palette_bytes + - (768 - len(palette_bytes)) * b'\x00') + new_palette_bytes = palette_bytes + (768 - len(palette_bytes)) * b"\x00" m_im.putpalette(new_palette_bytes) - m_im.palette = ImagePalette.ImagePalette("RGB", - palette=palette_bytes, - size=len(palette_bytes)) + m_im.palette = ImagePalette.ImagePalette( + "RGB", palette=palette_bytes, size=len(palette_bytes) + ) return m_im - def resize(self, size, resample=NEAREST, box=None): + def _get_safe_box(self, size, resample, box): + """Expands the box so it includes adjacent pixels + that may be used by resampling with the given resampling filter. + """ + filter_support = _filters_support[resample] - 0.5 + scale_x = (box[2] - box[0]) / size[0] + scale_y = (box[3] - box[1]) / size[1] + support_x = filter_support * scale_x + support_y = filter_support * scale_y + + return ( + max(0, int(box[0] - support_x)), + max(0, int(box[1] - support_y)), + min(self.size[0], math.ceil(box[2] + support_x)), + min(self.size[1], math.ceil(box[3] + support_y)), + ) + + def resize(self, size, resample=BICUBIC, box=None, reducing_gap=None): """ Returns a resized copy of this image. @@ -1745,20 +1821,49 @@ class Image(object): one of :py:attr:`PIL.Image.NEAREST`, :py:attr:`PIL.Image.BOX`, :py:attr:`PIL.Image.BILINEAR`, :py:attr:`PIL.Image.HAMMING`, :py:attr:`PIL.Image.BICUBIC` or :py:attr:`PIL.Image.LANCZOS`. - If omitted, or if the image has mode "1" or "P", it is - set :py:attr:`PIL.Image.NEAREST`. + Default filter is :py:attr:`PIL.Image.BICUBIC`. + If the image has mode "1" or "P", it is + always set to :py:attr:`PIL.Image.NEAREST`. See: :ref:`concept-filters`. - :param box: An optional 4-tuple of floats giving the region - of the source image which should be scaled. - The values should be within (0, 0, width, height) rectangle. + :param box: An optional 4-tuple of floats providing + the source image region to be scaled. + The values must be within (0, 0, width, height) rectangle. If omitted or None, the entire source is used. + :param reducing_gap: Apply optimization by resizing the image + in two steps. First, reducing the image by integer times + using :py:meth:`~PIL.Image.Image.reduce`. + Second, resizing using regular resampling. The last step + changes size no less than by ``reducing_gap`` times. + ``reducing_gap`` may be None (no first step is performed) + or should be greater than 1.0. The bigger ``reducing_gap``, + the closer the result to the fair resampling. + The smaller ``reducing_gap``, the faster resizing. + With ``reducing_gap`` greater or equal to 3.0, the result is + indistinguishable from fair resampling in most cases. + The default value is None (no optimization). :returns: An :py:class:`~PIL.Image.Image` object. """ - if resample not in ( - NEAREST, BILINEAR, BICUBIC, LANCZOS, BOX, HAMMING, - ): - raise ValueError("unknown resampling filter") + if resample not in (NEAREST, BILINEAR, BICUBIC, LANCZOS, BOX, HAMMING): + message = "Unknown resampling filter ({}).".format(resample) + + filters = [ + "{} ({})".format(filter[1], filter[0]) + for filter in ( + (NEAREST, "Image.NEAREST"), + (LANCZOS, "Image.LANCZOS"), + (BILINEAR, "Image.BILINEAR"), + (BICUBIC, "Image.BICUBIC"), + (BOX, "Image.BOX"), + (HAMMING, "Image.HAMMING"), + ) + ] + raise ValueError( + message + " Use " + ", ".join(filters[:-1]) + " or " + filters[-1] + ) + + if reducing_gap is not None and reducing_gap < 1.0: + raise ValueError("reducing_gap must be 1.0 or greater") size = tuple(size) @@ -1773,18 +1878,74 @@ class Image(object): if self.mode in ("1", "P"): resample = NEAREST - if self.mode == 'LA': - return self.convert('La').resize(size, resample, box).convert('LA') - - if self.mode == 'RGBA': - return self.convert('RGBa').resize(size, resample, box).convert('RGBA') + if self.mode in ["LA", "RGBA"]: + im = self.convert(self.mode[:-1] + "a") + im = im.resize(size, resample, box) + return im.convert(self.mode) self.load() + if reducing_gap is not None and resample != NEAREST: + factor_x = int((box[2] - box[0]) / size[0] / reducing_gap) or 1 + factor_y = int((box[3] - box[1]) / size[1] / reducing_gap) or 1 + if factor_x > 1 or factor_y > 1: + reduce_box = self._get_safe_box(size, resample, box) + factor = (factor_x, factor_y) + if callable(self.reduce): + self = self.reduce(factor, box=reduce_box) + else: + self = Image.reduce(self, factor, box=reduce_box) + box = ( + (box[0] - reduce_box[0]) / factor_x, + (box[1] - reduce_box[1]) / factor_y, + (box[2] - reduce_box[0]) / factor_x, + (box[3] - reduce_box[1]) / factor_y, + ) + return self._new(self.im.resize(size, resample, box)) - def rotate(self, angle, resample=NEAREST, expand=0, center=None, - translate=None, fillcolor=None): + def reduce(self, factor, box=None): + """ + Returns a copy of the image reduced by `factor` times. + If the size of the image is not dividable by the `factor`, + the resulting size will be rounded up. + + :param factor: A greater than 0 integer or tuple of two integers + for width and height separately. + :param box: An optional 4-tuple of ints providing + the source image region to be reduced. + The values must be within (0, 0, width, height) rectangle. + If omitted or None, the entire source is used. + """ + if not isinstance(factor, (list, tuple)): + factor = (factor, factor) + + if box is None: + box = (0, 0) + self.size + else: + box = tuple(box) + + if factor == (1, 1) and box == (0, 0) + self.size: + return self.copy() + + if self.mode in ["LA", "RGBA"]: + im = self.convert(self.mode[:-1] + "a") + im = im.reduce(factor, box) + return im.convert(self.mode) + + self.load() + + return self._new(self.im.reduce(factor, box)) + + def rotate( + self, + angle, + resample=NEAREST, + expand=0, + center=None, + translate=None, + fillcolor=None, + ): """ Returns a rotated copy of this image. This method returns a copy of this image, rotated the given number of degrees counter @@ -1797,7 +1958,7 @@ class Image(object): environment), or :py:attr:`PIL.Image.BICUBIC` (cubic spline interpolation in a 4x4 environment). If omitted, or if the image has mode "1" or "P", it is - set :py:attr:`PIL.Image.NEAREST`. See :ref:`concept-filters`. + set to :py:attr:`PIL.Image.NEAREST`. See :ref:`concept-filters`. :param expand: Optional expansion flag. If true, expands the output image to make it large enough to hold the entire rotated image. If false or omitted, make the output image the same size as the @@ -1849,22 +2010,28 @@ class Image(object): else: post_trans = translate if center is None: - rotn_center = (w / 2.0, h / 2.0) # FIXME These should be rounded to ints? + # FIXME These should be rounded to ints? + rotn_center = (w / 2.0, h / 2.0) else: rotn_center = center - angle = - math.radians(angle) + angle = -math.radians(angle) matrix = [ - round(math.cos(angle), 15), round(math.sin(angle), 15), 0.0, - round(-math.sin(angle), 15), round(math.cos(angle), 15), 0.0 + round(math.cos(angle), 15), + round(math.sin(angle), 15), + 0.0, + round(-math.sin(angle), 15), + round(math.cos(angle), 15), + 0.0, ] def transform(x, y, matrix): (a, b, c, d, e, f) = matrix - return a*x + b*y + c, d*x + e*y + f + return a * x + b * y + c, d * x + e * y + f - matrix[2], matrix[5] = transform(-rotn_center[0] - post_trans[0], - -rotn_center[1] - post_trans[1], matrix) + matrix[2], matrix[5] = transform( + -rotn_center[0] - post_trans[0], -rotn_center[1] - post_trans[1], matrix + ) matrix[2] += rotn_center[0] matrix[5] += rotn_center[1] @@ -1876,15 +2043,13 @@ class Image(object): x, y = transform(x, y, matrix) xx.append(x) yy.append(y) - nw = int(math.ceil(max(xx)) - math.floor(min(xx))) - nh = int(math.ceil(max(yy)) - math.floor(min(yy))) + nw = math.ceil(max(xx)) - math.floor(min(xx)) + nh = math.ceil(max(yy)) - math.floor(min(yy)) # We multiply a translation matrix from the right. Because of its # special form, this is the same as taking the image of the # translation vector as new translation vector. - matrix[2], matrix[5] = transform(-(nw - w) / 2.0, - -(nh - h) / 2.0, - matrix) + matrix[2], matrix[5] = transform(-(nw - w) / 2.0, -(nh - h) / 2.0, matrix) w, h = nw, nh return self.transform((w, h), AFFINE, matrix, resample, fillcolor=fillcolor) @@ -1924,7 +2089,7 @@ class Image(object): if isPath(fp): filename = fp open_fp = True - elif HAS_PATHLIB and isinstance(fp, Path): + elif isinstance(fp, Path): filename = str(fp) open_fp = True if not filename and hasattr(fp, "name") and isPath(fp.name): @@ -1932,9 +2097,9 @@ class Image(object): filename = fp.name # may mutate self! - self.load() + self._ensure_mutable() - save_all = params.pop('save_all', False) + save_all = params.pop("save_all", False) self.encoderinfo = params self.encoderconfig = () @@ -1948,7 +2113,7 @@ class Image(object): try: format = EXTENSION[ext] except KeyError: - raise ValueError('unknown file extension: {}'.format(ext)) + raise ValueError("unknown file extension: {}".format(ext)) if format.upper() not in SAVE: init() @@ -1958,11 +2123,11 @@ class Image(object): save_handler = SAVE[format.upper()] if open_fp: - if params.get('append', False): - fp = builtins.open(filename, "r+b") - else: + if params.get("append", False): # Open also for reading ("+"), because TIFF save_all # writer needs to go back and edit the written data. + fp = builtins.open(filename, "r+b") + else: fp = builtins.open(filename, "w+b") try: @@ -1979,9 +2144,6 @@ class Image(object): **EOFError** exception. When a sequence file is opened, the library automatically seeks to frame 0. - Note that in the current version of the library, most sequence - formats only allows you to seek to the next frame. - See :py:meth:`~PIL.Image.Image.tell`. :param frame: Frame number, starting at 0. @@ -1998,15 +2160,15 @@ class Image(object): Displays this image. This method is mainly intended for debugging purposes. - On Unix platforms, this method saves the image to a temporary - PPM file, and calls either the **xv** utility or the **display** - utility, depending on which one can be found. + The image is first saved to a temporary file. By default, it will be in + PNG format. - On macOS, this method saves the image to a temporary BMP file, and - opens it with the native Preview application. + On Unix, the image is then opened using the **display**, **eog** or + **xv** utility, depending on which one can be found. - On Windows, it saves the image to a temporary BMP file, and uses - the standard BMP display utility to show it (usually Paint). + On macOS, the image is opened with the native Preview application. + + On Windows, the image is opened with the standard PNG display utility. :param title: Optional title to use for the image window, where possible. @@ -2049,12 +2211,11 @@ class Image(object): """ self.load() - if isStringType(channel): + if isinstance(channel, str): try: channel = self.getbands().index(channel) except ValueError: - raise ValueError( - 'The image has no channel "{}"'.format(channel)) + raise ValueError('The image has no channel "{}"'.format(channel)) return self._new(self.im.getband(channel)) @@ -2066,7 +2227,7 @@ class Image(object): """ return 0 - def thumbnail(self, size, resample=BICUBIC): + def thumbnail(self, size, resample=BICUBIC, reducing_gap=2.0): """ Make this image into a thumbnail. This method modifies the image to contain a thumbnail version of itself, no larger than @@ -2085,38 +2246,60 @@ class Image(object): of :py:attr:`PIL.Image.NEAREST`, :py:attr:`PIL.Image.BILINEAR`, :py:attr:`PIL.Image.BICUBIC`, or :py:attr:`PIL.Image.LANCZOS`. If omitted, it defaults to :py:attr:`PIL.Image.BICUBIC`. - (was :py:attr:`PIL.Image.NEAREST` prior to version 2.5.0) + (was :py:attr:`PIL.Image.NEAREST` prior to version 2.5.0). + :param reducing_gap: Apply optimization by resizing the image + in two steps. First, reducing the image by integer times + using :py:meth:`~PIL.Image.Image.reduce` or + :py:meth:`~PIL.Image.Image.draft` for JPEG images. + Second, resizing using regular resampling. The last step + changes size no less than by ``reducing_gap`` times. + ``reducing_gap`` may be None (no first step is performed) + or should be greater than 1.0. The bigger ``reducing_gap``, + the closer the result to the fair resampling. + The smaller ``reducing_gap``, the faster resizing. + With ``reducing_gap`` greater or equal to 3.0, the result is + indistinguishable from fair resampling in most cases. + The default value is 2.0 (very close to fair resampling + while still being faster in many cases). :returns: None """ - # preserve aspect ratio - x, y = self.size - if x > size[0]: - y = int(max(y * size[0] / x, 1)) - x = int(size[0]) - if y > size[1]: - x = int(max(x * size[1] / y, 1)) - y = int(size[1]) - size = x, y - - if size == self.size: + x, y = map(math.floor, size) + if x >= self.width and y >= self.height: return - self.draft(None, size) + def round_aspect(number, key): + return max(min(math.floor(number), math.ceil(number), key=key), 1) - im = self.resize(size, resample) + # preserve aspect ratio + aspect = self.width / self.height + if x / y >= aspect: + x = round_aspect(y * aspect, key=lambda n: abs(aspect - n / y)) + else: + y = round_aspect(x / aspect, key=lambda n: abs(aspect - x / n)) + size = (x, y) - self.im = im.im - self.mode = im.mode - self._size = size + box = None + if reducing_gap is not None: + res = self.draft(None, (size[0] * reducing_gap, size[1] * reducing_gap)) + if res is not None: + box = res[1] + + if self.size != size: + im = self.resize(size, resample, box=box, reducing_gap=reducing_gap) + + self.im = im.im + self._size = size + self.mode = self.im.mode self.readonly = 0 self.pyaccess = None # FIXME: the different transform methods need further explanation # instead of bloating the method docs, add a separate chapter. - def transform(self, size, method, data=None, resample=NEAREST, - fill=1, fillcolor=None): + def transform( + self, size, method, data=None, resample=NEAREST, fill=1, fillcolor=None + ): """ Transforms this image. This method creates a new image with the given size, and the same mode as the original, and copies data @@ -2133,12 +2316,14 @@ class Image(object): It may also be an :py:class:`~PIL.Image.ImageTransformHandler` object:: + class Example(Image.ImageTransformHandler): def transform(size, method, data, resample, fill=1): # Return result It may also be an object with a :py:meth:`~method.getdata` method that returns a tuple supplying new **method** and **data** values:: + class Example(object): def getdata(self): method = Image.EXTENT @@ -2154,18 +2339,24 @@ class Image(object): :param fill: If **method** is an :py:class:`~PIL.Image.ImageTransformHandler` object, this is one of the arguments passed to it. Otherwise, it is unused. - :param fillcolor: Optional fill color for the area outside the transform - in the output image. + :param fillcolor: Optional fill color for the area outside the + transform in the output image. :returns: An :py:class:`~PIL.Image.Image` object. """ - if self.mode == 'LA': - return self.convert('La').transform( - size, method, data, resample, fill, fillcolor).convert('LA') + if self.mode == "LA": + return ( + self.convert("La") + .transform(size, method, data, resample, fill, fillcolor) + .convert("LA") + ) - if self.mode == 'RGBA': - return self.convert('RGBa').transform( - size, method, data, resample, fill, fillcolor).convert('RGBA') + if self.mode == "RGBA": + return ( + self.convert("RGBa") + .transform(size, method, data, resample, fill, fillcolor) + .convert("RGBA") + ) if isinstance(method, ImageTransformHandler): return method.transform(size, self, resample=resample, fill=fill) @@ -2178,19 +2369,19 @@ class Image(object): raise ValueError("missing method data") im = new(self.mode, size, fillcolor) + im.info = self.info.copy() if method == MESH: # list of quads for box, quad in data: - im.__transformer(box, self, QUAD, quad, resample, - fillcolor is None) + im.__transformer(box, self, QUAD, quad, resample, fillcolor is None) else: - im.__transformer((0, 0)+size, self, method, data, - resample, fillcolor is None) + im.__transformer( + (0, 0) + size, self, method, data, resample, fillcolor is None + ) return im - def __transformer(self, box, image, method, data, - resample=NEAREST, fill=1): + def __transformer(self, box, image, method, data, resample=NEAREST, fill=1): w = box[2] - box[0] h = box[3] - box[1] @@ -2200,8 +2391,8 @@ class Image(object): elif method == EXTENT: # convert extent to an affine transform x0, y0, x1, y1 = data - xs = float(x1 - x0) / w - ys = float(y1 - y0) / h + xs = (x1 - x0) / w + ys = (y1 - y0) / h method = AFFINE data = (xs, 0, x0, 0, ys, y0) @@ -2218,16 +2409,41 @@ class Image(object): x0, y0 = nw As = 1.0 / w At = 1.0 / h - data = (x0, (ne[0]-x0)*As, (sw[0]-x0)*At, - (se[0]-sw[0]-ne[0]+x0)*As*At, - y0, (ne[1]-y0)*As, (sw[1]-y0)*At, - (se[1]-sw[1]-ne[1]+y0)*As*At) + data = ( + x0, + (ne[0] - x0) * As, + (sw[0] - x0) * At, + (se[0] - sw[0] - ne[0] + x0) * As * At, + y0, + (ne[1] - y0) * As, + (sw[1] - y0) * At, + (se[1] - sw[1] - ne[1] + y0) * As * At, + ) else: raise ValueError("unknown transformation method") if resample not in (NEAREST, BILINEAR, BICUBIC): - raise ValueError("unknown resampling filter") + if resample in (BOX, HAMMING, LANCZOS): + message = { + BOX: "Image.BOX", + HAMMING: "Image.HAMMING", + LANCZOS: "Image.LANCZOS/Image.ANTIALIAS", + }[resample] + " ({}) cannot be used.".format(resample) + else: + message = "Unknown resampling filter ({}).".format(resample) + + filters = [ + "{} ({})".format(filter[1], filter[0]) + for filter in ( + (NEAREST, "Image.NEAREST"), + (BILINEAR, "Image.BILINEAR"), + (BICUBIC, "Image.BICUBIC"), + ) + ] + raise ValueError( + message + " Use " + ", ".join(filters[:-1]) + " or " + filters[-1] + ) image.load() @@ -2264,6 +2480,7 @@ class Image(object): def toqimage(self): """Returns a QImage copy of this image""" from . import ImageQt + if not ImageQt.qt_is_installed: raise ImportError("Qt bindings are not installed") return ImageQt.toqimage(self) @@ -2271,6 +2488,7 @@ class Image(object): def toqpixmap(self): """Returns a QPixmap copy of this image""" from . import ImageQt + if not ImageQt.qt_is_installed: raise ImportError("Qt bindings are not installed") return ImageQt.toqpixmap(self) @@ -2279,12 +2497,13 @@ class Image(object): # -------------------------------------------------------------------- # Abstract handlers. -class ImagePointHandler(object): + +class ImagePointHandler: # used as a mixin by point transforms (for use with im.point) pass -class ImageTransformHandler(object): +class ImageTransformHandler: # used as a mixin by geometry transforms (for use with im.transform) pass @@ -2295,6 +2514,7 @@ class ImageTransformHandler(object): # # Debugging + def _wedge(): """Create greyscale wedge (for debugging only)""" @@ -2341,13 +2561,21 @@ def new(mode, size, color=0): # don't initialize return Image()._new(core.new(mode, size)) - if isStringType(color): + if isinstance(color, str): # css3-style specifier from . import ImageColor + color = ImageColor.getcolor(color, mode) - return Image()._new(core.fill(mode, size, color)) + im = Image() + if mode == "P" and isinstance(color, (list, tuple)) and len(color) in [3, 4]: + # RGB or RGBA value for a P image + from . import ImagePalette + + im.palette = ImagePalette.ImagePalette() + color = im.palette.getcolor(color) + return im._new(core.fill(mode, size, color)) def frombytes(mode, size, data, decoder_name="raw", *args): @@ -2389,8 +2617,9 @@ def frombytes(mode, size, data, decoder_name="raw", *args): def fromstring(*args, **kw): - raise NotImplementedError("fromstring() has been removed. " + - "Please call frombytes() instead.") + raise NotImplementedError( + "fromstring() has been removed. Please call frombytes() instead." + ) def frombuffer(mode, size, data, decoder_name="raw", *args): @@ -2436,18 +2665,10 @@ def frombuffer(mode, size, data, decoder_name="raw", *args): if decoder_name == "raw": if args == (): - warnings.warn( - "the frombuffer defaults may change in a future release; " - "for portability, change the call to read:\n" - " frombuffer(mode, size, data, 'raw', mode, 0, 1)", - RuntimeWarning, stacklevel=2 - ) - args = mode, 0, -1 # may change to (mode, 0, 1) post-1.1.6 + args = mode, 0, 1 if args[0] in _MAPMODES: im = new(mode, (1, 1)) - im = im._new( - core.map_buffer(data, size, decoder_name, None, 0, args) - ) + im = im._new(core.map_buffer(data, size, decoder_name, 0, args)) im.readonly = 1 return im @@ -2481,15 +2702,18 @@ def fromarray(obj, mode=None): .. versionadded:: 1.1.6 """ arr = obj.__array_interface__ - shape = arr['shape'] + shape = arr["shape"] ndim = len(shape) - strides = arr.get('strides', None) + strides = arr.get("strides", None) if mode is None: try: - typekey = (1, 1) + shape[2:], arr['typestr'] - mode, rawmode = _fromarray_typemap[typekey] + typekey = (1, 1) + shape[2:], arr["typestr"] except KeyError: raise TypeError("Cannot handle this data type") + try: + mode, rawmode = _fromarray_typemap[typekey] + except KeyError: + raise TypeError("Cannot handle this data type: %s, %s" % typekey) else: rawmode = mode if mode in ["1", "L", "I", "P", "F"]: @@ -2503,7 +2727,7 @@ def fromarray(obj, mode=None): size = shape[1], shape[0] if strides is not None: - if hasattr(obj, 'tobytes'): + if hasattr(obj, "tobytes"): obj = obj.tobytes() else: obj = obj.tostring() @@ -2514,6 +2738,7 @@ def fromarray(obj, mode=None): def fromqimage(im): """Creates an image instance from a QImage image""" from . import ImageQt + if not ImageQt.qt_is_installed: raise ImportError("Qt bindings are not installed") return ImageQt.fromqimage(im) @@ -2522,6 +2747,7 @@ def fromqimage(im): def fromqpixmap(im): """Creates an image instance from a QPixmap image""" from . import ImageQt + if not ImageQt.qt_is_installed: raise ImportError("Qt bindings are not installed") return ImageQt.fromqpixmap(im) @@ -2548,7 +2774,7 @@ _fromarray_typemap = { ((1, 1, 2), "|u1"): ("LA", "LA"), ((1, 1, 3), "|u1"): ("RGB", "RGB"), ((1, 1, 4), "|u1"): ("RGBA", "RGBA"), - } +} # shortcuts _fromarray_typemap[((1, 1), _ENDIAN + "i4")] = ("I", "I") @@ -2564,15 +2790,15 @@ def _decompression_bomb_check(size): if pixels > 2 * MAX_IMAGE_PIXELS: raise DecompressionBombError( "Image size (%d pixels) exceeds limit of %d pixels, " - "could be decompression bomb DOS attack." % - (pixels, 2 * MAX_IMAGE_PIXELS)) + "could be decompression bomb DOS attack." % (pixels, 2 * MAX_IMAGE_PIXELS) + ) if pixels > MAX_IMAGE_PIXELS: warnings.warn( "Image size (%d pixels) exceeds limit of %d pixels, " - "could be decompression bomb DOS attack." % - (pixels, MAX_IMAGE_PIXELS), - DecompressionBombWarning) + "could be decompression bomb DOS attack." % (pixels, MAX_IMAGE_PIXELS), + DecompressionBombWarning, + ) def open(fp, mode="r"): @@ -2591,19 +2817,27 @@ def open(fp, mode="r"): and be opened in binary mode. :param mode: The mode. If given, this argument must be "r". :returns: An :py:class:`~PIL.Image.Image` object. - :exception IOError: If the file cannot be found, or the image cannot be - opened and identified. + :exception FileNotFoundError: If the file cannot be found. + :exception PIL.UnidentifiedImageError: If the image cannot be opened and + identified. + :exception ValueError: If the ``mode`` is not "r", or if a ``StringIO`` + instance is used for ``fp``. """ if mode != "r": raise ValueError("bad mode %r" % mode) + elif isinstance(fp, io.StringIO): + raise ValueError( + "StringIO cannot be used to open an image. " + "Binary data must be used instead." + ) exclusive_fp = False filename = "" - if isPath(fp): - filename = fp - elif HAS_PATHLIB and isinstance(fp, Path): + if isinstance(fp, Path): filename = str(fp.resolve()) + elif isPath(fp): + filename = fp if filename: fp = builtins.open(filename, "rb") @@ -2620,6 +2854,7 @@ def open(fp, mode="r"): preinit() accept_warnings = [] + def _open_core(fp, filename, prefix): for i in ID: try: @@ -2637,6 +2872,10 @@ def open(fp, mode="r"): # opening failures that are entirely expected. # logger.debug("", exc_info=True) continue + except BaseException: + if exclusive_fp: + fp.close() + raise return None im = _open_core(fp, filename, prefix) @@ -2653,8 +2892,10 @@ def open(fp, mode="r"): fp.close() for message in accept_warnings: warnings.warn(message) - raise IOError("cannot identify image file %r" - % (filename if filename else fp)) + raise UnidentifiedImageError( + "cannot identify image file %r" % (filename if filename else fp) + ) + # # Image processing. @@ -2758,6 +2999,7 @@ def merge(mode, bands): # -------------------------------------------------------------------- # Plugin registry + def register_open(id, factory, accept=None): """ Register an image file plugin. This function should not be used @@ -2871,6 +3113,7 @@ def register_encoder(name, encoder): # -------------------------------------------------------------------- # Simple display support. User code may override this. + def _show(image, **options): # override me, as necessary _showxv(image, **options) @@ -2878,12 +3121,14 @@ def _show(image, **options): def _showxv(image, title=None, **options): from . import ImageShow + ImageShow.show(image, title, **options) # -------------------------------------------------------------------- # Effects + def effect_mandelbrot(size, extent, quality): """ Generate a Mandelbrot set covering the given extent. @@ -2929,14 +3174,15 @@ def radial_gradient(mode): # -------------------------------------------------------------------- # Resources + def _apply_env_variables(env=None): if env is None: env = os.environ for var_name, setter in [ - ('PILLOW_ALIGNMENT', core.set_alignment), - ('PILLOW_BLOCK_SIZE', core.set_block_size), - ('PILLOW_BLOCKS_MAX', core.set_blocks_max), + ("PILLOW_ALIGNMENT", core.set_alignment), + ("PILLOW_BLOCK_SIZE", core.set_block_size), + ("PILLOW_BLOCKS_MAX", core.set_blocks_max), ]: if var_name not in env: continue @@ -2944,22 +3190,228 @@ def _apply_env_variables(env=None): var = env[var_name].lower() units = 1 - for postfix, mul in [('k', 1024), ('m', 1024*1024)]: + for postfix, mul in [("k", 1024), ("m", 1024 * 1024)]: if var.endswith(postfix): units = mul - var = var[:-len(postfix)] + var = var[: -len(postfix)] try: var = int(var) * units except ValueError: - warnings.warn("{0} is not int".format(var_name)) + warnings.warn("{} is not int".format(var_name)) continue try: setter(var) except ValueError as e: - warnings.warn("{0}: {1}".format(var_name, e)) + warnings.warn("{}: {}".format(var_name, e)) _apply_env_variables() atexit.register(core.clear_cache) + + +class Exif(MutableMapping): + endian = "<" + + def __init__(self): + self._data = {} + self._ifds = {} + self._info = None + self._loaded_exif = None + + def _fixup(self, value): + try: + if len(value) == 1 and not isinstance(value, dict): + return value[0] + except Exception: + pass + return value + + def _fixup_dict(self, src_dict): + # Helper function for _getexif() + # returns a dict with any single item tuples/lists as individual values + return {k: self._fixup(v) for k, v in src_dict.items()} + + def _get_ifd_dict(self, tag): + try: + # an offset pointer to the location of the nested embedded IFD. + # It should be a long, but may be corrupted. + self.fp.seek(self[tag]) + except (KeyError, TypeError): + pass + else: + from . import TiffImagePlugin + + info = TiffImagePlugin.ImageFileDirectory_v1(self.head) + info.load(self.fp) + return self._fixup_dict(info) + + def load(self, data): + # Extract EXIF information. This is highly experimental, + # and is likely to be replaced with something better in a future + # version. + + # The EXIF record consists of a TIFF file embedded in a JPEG + # application marker (!). + if data == self._loaded_exif: + return + self._loaded_exif = data + self._data.clear() + self._ifds.clear() + self._info = None + if not data: + return + + self.fp = io.BytesIO(data[6:]) + self.head = self.fp.read(8) + # process dictionary + from . import TiffImagePlugin + + self._info = TiffImagePlugin.ImageFileDirectory_v1(self.head) + self.endian = self._info._endian + self.fp.seek(self._info.next) + self._info.load(self.fp) + + # get EXIF extension + ifd = self._get_ifd_dict(0x8769) + if ifd: + self._data.update(ifd) + self._ifds[0x8769] = ifd + + def tobytes(self, offset=0): + from . import TiffImagePlugin + + if self.endian == "<": + head = b"II\x2A\x00\x08\x00\x00\x00" + else: + head = b"MM\x00\x2A\x00\x00\x00\x08" + ifd = TiffImagePlugin.ImageFileDirectory_v2(ifh=head) + for tag, value in self.items(): + ifd[tag] = value + return b"Exif\x00\x00" + head + ifd.tobytes(offset) + + def get_ifd(self, tag): + if tag not in self._ifds and tag in self: + if tag in [0x8825, 0xA005]: + # gpsinfo, interop + self._ifds[tag] = self._get_ifd_dict(tag) + elif tag == 0x927C: # makernote + from .TiffImagePlugin import ImageFileDirectory_v2 + + if self[0x927C][:8] == b"FUJIFILM": + exif_data = self[0x927C] + ifd_offset = i32le(exif_data[8:12]) + ifd_data = exif_data[ifd_offset:] + + makernote = {} + for i in range(0, struct.unpack(" 4: + (offset,) = struct.unpack("H", ifd_data[:2])[0]): + ifd_tag, typ, count, data = struct.unpack( + ">HHL4s", ifd_data[i * 12 + 2 : (i + 1) * 12 + 2] + ) + if ifd_tag == 0x1101: + # CameraInfo + (offset,) = struct.unpack(">L", data) + self.fp.seek(offset) + + camerainfo = {"ModelID": self.fp.read(4)} + + self.fp.read(4) + # Seconds since 2000 + camerainfo["TimeStamp"] = i32le(self.fp.read(12)) + + self.fp.read(4) + camerainfo["InternalSerialNumber"] = self.fp.read(4) + + self.fp.read(12) + parallax = self.fp.read(4) + handler = ImageFileDirectory_v2._load_dispatch[ + TiffTags.FLOAT + ][1] + camerainfo["Parallax"] = handler( + ImageFileDirectory_v2(), parallax, False + ) + + self.fp.read(4) + camerainfo["Category"] = self.fp.read(2) + + makernote = {0x1101: dict(self._fixup_dict(camerainfo))} + self._ifds[0x927C] = makernote + return self._ifds.get(tag, {}) + + def __str__(self): + if self._info is not None: + # Load all keys into self._data + for tag in self._info.keys(): + self[tag] + + return str(self._data) + + def __len__(self): + keys = set(self._data) + if self._info is not None: + keys.update(self._info) + return len(keys) + + def __getitem__(self, tag): + if self._info is not None and tag not in self._data and tag in self._info: + self._data[tag] = self._fixup(self._info[tag]) + if tag == 0x8825: + self._data[tag] = self.get_ifd(tag) + del self._info[tag] + return self._data[tag] + + def __contains__(self, tag): + return tag in self._data or (self._info is not None and tag in self._info) + + def __setitem__(self, tag, value): + if self._info is not None and tag in self._info: + del self._info[tag] + self._data[tag] = value + + def __delitem__(self, tag): + if self._info is not None and tag in self._info: + del self._info[tag] + del self._data[tag] + + def __iter__(self): + keys = set(self._data) + if self._info is not None: + keys.update(self._info) + return iter(keys) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageChops.py b/server/www/packages/packages-linux/x64/PIL/ImageChops.py index 8901673..2d13b52 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageChops.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageChops.py @@ -54,7 +54,7 @@ def invert(image): def lighter(image1, image2): """ Compares the two images, pixel by pixel, and returns a new image containing - the lighter values. + the lighter values. At least one of the images must have mode "1". .. code-block:: python @@ -70,8 +70,8 @@ def lighter(image1, image2): def darker(image1, image2): """ - Compares the two images, pixel by pixel, and returns a new image - containing the darker values. + Compares the two images, pixel by pixel, and returns a new image containing + the darker values. At least one of the images must have mode "1". .. code-block:: python @@ -88,7 +88,7 @@ def darker(image1, image2): def difference(image1, image2): """ Returns the absolute value of the pixel-by-pixel difference between the two - images. + images. At least one of the images must have mode "1". .. code-block:: python @@ -107,7 +107,8 @@ def multiply(image1, image2): Superimposes two images on top of each other. If you multiply an image with a solid black image, the result is black. If - you multiply with a solid white image, the image is unaffected. + you multiply with a solid white image, the image is unaffected. At least + one of the images must have mode "1". .. code-block:: python @@ -123,7 +124,8 @@ def multiply(image1, image2): def screen(image1, image2): """ - Superimposes two inverted images on top of each other. + Superimposes two inverted images on top of each other. At least one of the + images must have mode "1". .. code-block:: python @@ -137,10 +139,47 @@ def screen(image1, image2): return image1._new(image1.im.chop_screen(image2.im)) +def soft_light(image1, image2): + """ + Superimposes two images on top of each other using the Soft Light algorithm + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_soft_light(image2.im)) + + +def hard_light(image1, image2): + """ + Superimposes two images on top of each other using the Hard Light algorithm + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_hard_light(image2.im)) + + +def overlay(image1, image2): + """ + Superimposes two images on top of each other using the Overlay algorithm + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_overlay(image2.im)) + + def add(image1, image2, scale=1.0, offset=0): """ Adds two images, dividing the result by scale and adding the offset. If omitted, scale defaults to 1.0, and offset to 0.0. + At least one of the images must have mode "1". .. code-block:: python @@ -156,8 +195,9 @@ def add(image1, image2, scale=1.0, offset=0): def subtract(image1, image2, scale=1.0, offset=0): """ - Subtracts two images, dividing the result by scale and adding the - offset. If omitted, scale defaults to 1.0, and offset to 0.0. + Subtracts two images, dividing the result by scale and adding the offset. + If omitted, scale defaults to 1.0, and offset to 0.0. At least one of the + images must have mode "1". .. code-block:: python @@ -172,7 +212,8 @@ def subtract(image1, image2, scale=1.0, offset=0): def add_modulo(image1, image2): - """Add two images, without clipping the result. + """Add two images, without clipping the result. At least one of the images + must have mode "1". .. code-block:: python @@ -187,7 +228,8 @@ def add_modulo(image1, image2): def subtract_modulo(image1, image2): - """Subtract two images, without clipping the result. + """Subtract two images, without clipping the result. At least one of the + images must have mode "1". .. code-block:: python @@ -202,7 +244,8 @@ def subtract_modulo(image1, image2): def logical_and(image1, image2): - """Logical AND between two images. + """Logical AND between two images. At least one of the images must have + mode "1". .. code-block:: python @@ -217,7 +260,8 @@ def logical_and(image1, image2): def logical_or(image1, image2): - """Logical OR between two images. + """Logical OR between two images. At least one of the images must have + mode "1". .. code-block:: python @@ -232,7 +276,8 @@ def logical_or(image1, image2): def logical_xor(image1, image2): - """Logical XOR between two images. + """Logical XOR between two images. At least one of the images must have + mode "1". .. code-block:: python diff --git a/server/www/packages/packages-linux/x64/PIL/ImageCms.py b/server/www/packages/packages-linux/x64/PIL/ImageCms.py index 4b6281f..661c3f3 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageCms.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageCms.py @@ -15,18 +15,18 @@ # See the README file for information on usage and redistribution. See # below for the original description. -from __future__ import print_function import sys from PIL import Image + try: from PIL import _imagingcms except ImportError as ex: # Allow error import for doc purposes, but error out when accessing # anything in core. - from _util import deferred_error + from ._util import deferred_error + _imagingcms = deferred_error(ex) -from PIL._util import isStringType DESCRIPTION = """ pyCMS @@ -132,7 +132,7 @@ FLAGS = { "SOFTPROOFING": 16384, # Do softproofing "PRESERVEBLACK": 32768, # Black preservation "NODEFAULTRESOURCEDEF": 16777216, # CRD special - "GRIDPOINTS": lambda n: ((n) & 0xFF) << 16 # Gridpoints + "GRIDPOINTS": lambda n: ((n) & 0xFF) << 16, # Gridpoints } _MAX_FLAG = 0 @@ -148,8 +148,8 @@ for flag in FLAGS.values(): ## # Profile. -class ImageCmsProfile(object): +class ImageCmsProfile: def __init__(self, profile): """ :param profile: Either a string representing a filename, @@ -158,7 +158,7 @@ class ImageCmsProfile(object): """ - if isStringType(profile): + if isinstance(profile, str): self._set(core.profile_open(profile), profile) elif hasattr(profile, "read"): self._set(core.profile_frombytes(profile.read())) @@ -197,22 +197,31 @@ class ImageCmsTransform(Image.ImagePointHandler): Will return the output profile in the output.info['icc_profile']. """ - def __init__(self, input, output, input_mode, output_mode, - intent=INTENT_PERCEPTUAL, proof=None, - proof_intent=INTENT_ABSOLUTE_COLORIMETRIC, flags=0): + def __init__( + self, + input, + output, + input_mode, + output_mode, + intent=INTENT_PERCEPTUAL, + proof=None, + proof_intent=INTENT_ABSOLUTE_COLORIMETRIC, + flags=0, + ): if proof is None: self.transform = core.buildTransform( - input.profile, output.profile, - input_mode, output_mode, - intent, - flags + input.profile, output.profile, input_mode, output_mode, intent, flags ) else: self.transform = core.buildProofTransform( - input.profile, output.profile, proof.profile, - input_mode, output_mode, - intent, proof_intent, - flags + input.profile, + output.profile, + proof.profile, + input_mode, + output_mode, + intent, + proof_intent, + flags, ) # Note: inputMode and outputMode are for pyCMS compatibility only self.input_mode = self.inputMode = input_mode @@ -228,7 +237,7 @@ class ImageCmsTransform(Image.ImagePointHandler): if imOut is None: imOut = Image.new(self.output_mode, im.size, None) self.transform.apply(im.im.id, imOut.im.id) - imOut.info['icc_profile'] = self.output_profile.tobytes() + imOut.info["icc_profile"] = self.output_profile.tobytes() return imOut def apply_in_place(self, im): @@ -236,7 +245,7 @@ class ImageCmsTransform(Image.ImagePointHandler): if im.mode != self.output_mode: raise ValueError("mode mismatch") # wrong output mode self.transform.apply(im.im.id, im.im.id) - im.info['icc_profile'] = self.output_profile.tobytes() + im.info["icc_profile"] = self.output_profile.tobytes() return im @@ -245,19 +254,17 @@ def get_display_profile(handle=None): :returns: None if the profile is not known. """ - if sys.platform == "win32": - from PIL import ImageWin - if isinstance(handle, ImageWin.HDC): - profile = core.get_display_profile_win32(handle, 1) - else: - profile = core.get_display_profile_win32(handle or 0) + if sys.platform != "win32": + return None + + from PIL import ImageWin + + if isinstance(handle, ImageWin.HDC): + profile = core.get_display_profile_win32(handle, 1) else: - try: - get = _imagingcms.get_display_profile - except AttributeError: - return None - else: - profile = get() + profile = core.get_display_profile_win32(handle or 0) + if profile is None: + return None return ImageCmsProfile(profile) @@ -265,22 +272,30 @@ def get_display_profile(handle=None): # pyCMS compatible layer # --------------------------------------------------------------------. + class PyCMSError(Exception): """ (pyCMS) Exception class. This is used for all errors in the pyCMS API. """ + pass def profileToProfile( - im, inputProfile, outputProfile, renderingIntent=INTENT_PERCEPTUAL, - outputMode=None, inPlace=0, flags=0): + im, + inputProfile, + outputProfile, + renderingIntent=INTENT_PERCEPTUAL, + outputMode=None, + inPlace=False, + flags=0, +): """ (pyCMS) Applies an ICC transformation to a given image, mapping from inputProfile to outputProfile. If the input or output profiles specified are not valid filenames, a - PyCMSError will be raised. If inPlace == TRUE and outputMode != im.mode, + PyCMSError will be raised. If inPlace is True and outputMode != im.mode, a PyCMSError will be raised. If an error occurs during application of the profiles, a PyCMSError will be raised. If outputMode is not a mode supported by the outputProfile (or by pyCMS), a PyCMSError will be @@ -317,9 +332,9 @@ def profileToProfile( MUST be the same mode as the input, or omitted completely. If omitted, the outputMode will be the same as the mode of the input image (im.mode) - :param inPlace: Boolean (1 = True, None or 0 = False). If True, the - original image is modified in-place, and None is returned. If False - (default), a new Image object is returned with the transform applied. + :param inPlace: Boolean. If True, the original image is modified in-place, + and None is returned. If False (default), a new Image object is + returned with the transform applied. :param flags: Integer (0-...) specifying additional flags :returns: Either None or a new PIL image object, depending on value of inPlace @@ -333,8 +348,7 @@ def profileToProfile( raise PyCMSError("renderingIntent must be an integer between 0 and 3") if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): - raise PyCMSError( - "flags must be an integer between 0 and %s" + _MAX_FLAG) + raise PyCMSError("flags must be an integer between 0 and %s" + _MAX_FLAG) try: if not isinstance(inputProfile, ImageCmsProfile): @@ -342,15 +356,19 @@ def profileToProfile( if not isinstance(outputProfile, ImageCmsProfile): outputProfile = ImageCmsProfile(outputProfile) transform = ImageCmsTransform( - inputProfile, outputProfile, im.mode, outputMode, - renderingIntent, flags=flags + inputProfile, + outputProfile, + im.mode, + outputMode, + renderingIntent, + flags=flags, ) if inPlace: transform.apply_in_place(im) imOut = None else: imOut = transform.apply(im) - except (IOError, TypeError, ValueError) as v: + except (OSError, TypeError, ValueError) as v: raise PyCMSError(v) return imOut @@ -374,13 +392,18 @@ def getOpenProfile(profileFilename): try: return ImageCmsProfile(profileFilename) - except (IOError, TypeError, ValueError) as v: + except (OSError, TypeError, ValueError) as v: raise PyCMSError(v) def buildTransform( - inputProfile, outputProfile, inMode, outMode, - renderingIntent=INTENT_PERCEPTUAL, flags=0): + inputProfile, + outputProfile, + inMode, + outMode, + renderingIntent=INTENT_PERCEPTUAL, + flags=0, +): """ (pyCMS) Builds an ICC transform mapping from the inputProfile to the outputProfile. Use applyTransform to apply the transform to a given @@ -440,8 +463,7 @@ def buildTransform( raise PyCMSError("renderingIntent must be an integer between 0 and 3") if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): - raise PyCMSError( - "flags must be an integer between 0 and %s" + _MAX_FLAG) + raise PyCMSError("flags must be an integer between 0 and %s" + _MAX_FLAG) try: if not isinstance(inputProfile, ImageCmsProfile): @@ -449,17 +471,22 @@ def buildTransform( if not isinstance(outputProfile, ImageCmsProfile): outputProfile = ImageCmsProfile(outputProfile) return ImageCmsTransform( - inputProfile, outputProfile, inMode, outMode, - renderingIntent, flags=flags) - except (IOError, TypeError, ValueError) as v: + inputProfile, outputProfile, inMode, outMode, renderingIntent, flags=flags + ) + except (OSError, TypeError, ValueError) as v: raise PyCMSError(v) def buildProofTransform( - inputProfile, outputProfile, proofProfile, inMode, outMode, - renderingIntent=INTENT_PERCEPTUAL, - proofRenderingIntent=INTENT_ABSOLUTE_COLORIMETRIC, - flags=FLAGS["SOFTPROOFING"]): + inputProfile, + outputProfile, + proofProfile, + inMode, + outMode, + renderingIntent=INTENT_PERCEPTUAL, + proofRenderingIntent=INTENT_ABSOLUTE_COLORIMETRIC, + flags=FLAGS["SOFTPROOFING"], +): """ (pyCMS) Builds an ICC transform mapping from the inputProfile to the outputProfile, but tries to simulate the result that would be @@ -538,8 +565,7 @@ def buildProofTransform( raise PyCMSError("renderingIntent must be an integer between 0 and 3") if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): - raise PyCMSError( - "flags must be an integer between 0 and %s" + _MAX_FLAG) + raise PyCMSError("flags must be an integer between 0 and %s" + _MAX_FLAG) try: if not isinstance(inputProfile, ImageCmsProfile): @@ -549,9 +575,16 @@ def buildProofTransform( if not isinstance(proofProfile, ImageCmsProfile): proofProfile = ImageCmsProfile(proofProfile) return ImageCmsTransform( - inputProfile, outputProfile, inMode, outMode, renderingIntent, - proofProfile, proofRenderingIntent, flags) - except (IOError, TypeError, ValueError) as v: + inputProfile, + outputProfile, + inMode, + outMode, + renderingIntent, + proofProfile, + proofRenderingIntent, + flags, + ) + except (OSError, TypeError, ValueError) as v: raise PyCMSError(v) @@ -559,16 +592,16 @@ buildTransformFromOpenProfiles = buildTransform buildProofTransformFromOpenProfiles = buildProofTransform -def applyTransform(im, transform, inPlace=0): +def applyTransform(im, transform, inPlace=False): """ (pyCMS) Applies a transform to a given image. If im.mode != transform.inMode, a PyCMSError is raised. - If inPlace == TRUE and transform.inMode != transform.outMode, a + If inPlace is True and transform.inMode != transform.outMode, a PyCMSError is raised. - If im.mode, transfer.inMode, or transfer.outMode is not supported by + If im.mode, transform.inMode, or transform.outMode is not supported by pyCMSdll or the profiles you used for the transform, a PyCMSError is raised. @@ -581,7 +614,7 @@ def applyTransform(im, transform, inPlace=0): considerable calculation time if doing the same conversion multiple times. If you want to modify im in-place instead of receiving a new image as - the return value, set inPlace to TRUE. This can only be done if + the return value, set inPlace to True. This can only be done if transform.inMode and transform.outMode are the same, because we can't change the mode in-place (the buffer sizes for some modes are different). The default behavior is to return a new Image object of @@ -590,10 +623,9 @@ def applyTransform(im, transform, inPlace=0): :param im: A PIL Image object, and im.mode must be the same as the inMode supported by the transform. :param transform: A valid CmsTransform class object - :param inPlace: Bool (1 == True, 0 or None == False). If True, im is - modified in place and None is returned, if False, a new Image object - with the transform applied is returned (and im is not changed). The - default is False. + :param inPlace: Bool. If True, im is modified in place and None is + returned, if False, a new Image object with the transform applied is + returned (and im is not changed). The default is False. :returns: Either None, or a new PIL Image object, depending on the value of inPlace. The profile will be returned in the image's info['icc_profile']. @@ -642,15 +674,16 @@ def createProfile(colorSpace, colorTemp=-1): if colorSpace not in ["LAB", "XYZ", "sRGB"]: raise PyCMSError( "Color space not supported for on-the-fly profile creation (%s)" - % colorSpace) + % colorSpace + ) if colorSpace == "LAB": try: colorTemp = float(colorTemp) - except: + except (TypeError, ValueError): raise PyCMSError( - "Color temperature must be numeric, \"%s\" not valid" - % colorTemp) + 'Color temperature must be numeric, "%s" not valid' % colorTemp + ) try: return core.createProfile(colorSpace, colorTemp) @@ -687,16 +720,16 @@ def getProfileName(profile): # // name was "%s - %s" (model, manufacturer) || Description , # // but if the Model and Manufacturer were the same or the model # // was long, Just the model, in 1.x - model = profile.profile.product_model - manufacturer = profile.profile.product_manufacturer + model = profile.profile.model + manufacturer = profile.profile.manufacturer if not (model or manufacturer): - return profile.profile.product_description + "\n" + return (profile.profile.profile_description or "") + "\n" if not manufacturer or len(model) > 30: return model + "\n" - return "%s - %s\n" % (model, manufacturer) + return "{} - {}\n".format(model, manufacturer) - except (AttributeError, IOError, TypeError, ValueError) as v: + except (AttributeError, OSError, TypeError, ValueError) as v: raise PyCMSError(v) @@ -727,16 +760,16 @@ def getProfileInfo(profile): # add an extra newline to preserve pyCMS compatibility # Python, not C. the white point bits weren't working well, # so skipping. - # // info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint - description = profile.profile.product_description - cpright = profile.profile.product_copyright + # info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint + description = profile.profile.profile_description + cpright = profile.profile.copyright arr = [] for elt in (description, cpright): if elt: arr.append(elt) return "\r\n\r\n".join(arr) + "\r\n\r\n" - except (AttributeError, IOError, TypeError, ValueError) as v: + except (AttributeError, OSError, TypeError, ValueError) as v: raise PyCMSError(v) @@ -763,8 +796,8 @@ def getProfileCopyright(profile): # add an extra newline to preserve pyCMS compatibility if not isinstance(profile, ImageCmsProfile): profile = ImageCmsProfile(profile) - return profile.profile.product_copyright + "\n" - except (AttributeError, IOError, TypeError, ValueError) as v: + return (profile.profile.copyright or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: raise PyCMSError(v) @@ -791,8 +824,8 @@ def getProfileManufacturer(profile): # add an extra newline to preserve pyCMS compatibility if not isinstance(profile, ImageCmsProfile): profile = ImageCmsProfile(profile) - return profile.profile.product_manufacturer + "\n" - except (AttributeError, IOError, TypeError, ValueError) as v: + return (profile.profile.manufacturer or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: raise PyCMSError(v) @@ -820,8 +853,8 @@ def getProfileModel(profile): # add an extra newline to preserve pyCMS compatibility if not isinstance(profile, ImageCmsProfile): profile = ImageCmsProfile(profile) - return profile.profile.product_model + "\n" - except (AttributeError, IOError, TypeError, ValueError) as v: + return (profile.profile.model or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: raise PyCMSError(v) @@ -849,8 +882,8 @@ def getProfileDescription(profile): # add an extra newline to preserve pyCMS compatibility if not isinstance(profile, ImageCmsProfile): profile = ImageCmsProfile(profile) - return profile.profile.product_description + "\n" - except (AttributeError, IOError, TypeError, ValueError) as v: + return (profile.profile.profile_description or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: raise PyCMSError(v) @@ -889,7 +922,7 @@ def getDefaultIntent(profile): if not isinstance(profile, ImageCmsProfile): profile = ImageCmsProfile(profile) return profile.profile.rendering_intent - except (AttributeError, IOError, TypeError, ValueError) as v: + except (AttributeError, OSError, TypeError, ValueError) as v: raise PyCMSError(v) @@ -940,7 +973,7 @@ def isIntentSupported(profile, intent, direction): return 1 else: return -1 - except (AttributeError, IOError, TypeError, ValueError) as v: + except (AttributeError, OSError, TypeError, ValueError) as v: raise PyCMSError(v) @@ -949,7 +982,4 @@ def versions(): (pyCMS) Fetches versions. """ - return ( - VERSION, core.littlecms_version, - sys.version.split()[0], Image.VERSION - ) + return (VERSION, core.littlecms_version, sys.version.split()[0], Image.__version__) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageColor.py b/server/www/packages/packages-linux/x64/PIL/ImageColor.py index 08c00fd..9cf7a99 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageColor.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageColor.py @@ -17,9 +17,10 @@ # See the README file for information on usage and redistribution. # -from . import Image import re +from . import Image + def getrgb(color): """ @@ -41,89 +42,77 @@ def getrgb(color): return rgb # check for known string formats - if re.match('#[a-f0-9]{3}$', color): - return ( - int(color[1]*2, 16), - int(color[2]*2, 16), - int(color[3]*2, 16), - ) + if re.match("#[a-f0-9]{3}$", color): + return (int(color[1] * 2, 16), int(color[2] * 2, 16), int(color[3] * 2, 16)) - if re.match('#[a-f0-9]{4}$', color): + if re.match("#[a-f0-9]{4}$", color): return ( - int(color[1]*2, 16), - int(color[2]*2, 16), - int(color[3]*2, 16), - int(color[4]*2, 16), - ) + int(color[1] * 2, 16), + int(color[2] * 2, 16), + int(color[3] * 2, 16), + int(color[4] * 2, 16), + ) - if re.match('#[a-f0-9]{6}$', color): - return ( - int(color[1:3], 16), - int(color[3:5], 16), - int(color[5:7], 16), - ) + if re.match("#[a-f0-9]{6}$", color): + return (int(color[1:3], 16), int(color[3:5], 16), int(color[5:7], 16)) - if re.match('#[a-f0-9]{8}$', color): + if re.match("#[a-f0-9]{8}$", color): return ( int(color[1:3], 16), int(color[3:5], 16), int(color[5:7], 16), int(color[7:9], 16), - ) + ) m = re.match(r"rgb\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color) if m: - return ( - int(m.group(1)), - int(m.group(2)), - int(m.group(3)) - ) + return (int(m.group(1)), int(m.group(2)), int(m.group(3))) m = re.match(r"rgb\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color) if m: return ( int((int(m.group(1)) * 255) / 100.0 + 0.5), int((int(m.group(2)) * 255) / 100.0 + 0.5), - int((int(m.group(3)) * 255) / 100.0 + 0.5) - ) + int((int(m.group(3)) * 255) / 100.0 + 0.5), + ) - m = re.match(r"hsl\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color) + m = re.match( + r"hsl\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color + ) if m: from colorsys import hls_to_rgb + rgb = hls_to_rgb( float(m.group(1)) / 360.0, float(m.group(3)) / 100.0, float(m.group(2)) / 100.0, - ) + ) return ( int(rgb[0] * 255 + 0.5), int(rgb[1] * 255 + 0.5), - int(rgb[2] * 255 + 0.5) - ) + int(rgb[2] * 255 + 0.5), + ) - m = re.match(r"hs[bv]\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color) + m = re.match( + r"hs[bv]\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color + ) if m: from colorsys import hsv_to_rgb + rgb = hsv_to_rgb( float(m.group(1)) / 360.0, float(m.group(2)) / 100.0, float(m.group(3)) / 100.0, - ) + ) return ( int(rgb[0] * 255 + 0.5), int(rgb[1] * 255 + 0.5), - int(rgb[2] * 255 + 0.5) - ) + int(rgb[2] * 255 + 0.5), + ) - m = re.match(r"rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", - color) + m = re.match(r"rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color) if m: - return ( - int(m.group(1)), - int(m.group(2)), - int(m.group(3)), - int(m.group(4)) - ) + return (int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))) raise ValueError("unknown color specifier: %r" % color) @@ -145,11 +134,13 @@ def getcolor(color, mode): if Image.getmodebase(mode) == "L": r, g, b = color - color = (r*299 + g*587 + b*114)//1000 - if mode[-1] == 'A': + # ITU-R Recommendation 601-2 for nonlinear RGB + # scaled to 24 bits to match the convert's implementation. + color = (r * 19595 + g * 38470 + b * 7471 + 0x8000) >> 16 + if mode[-1] == "A": return (color, alpha) else: - if mode[-1] == 'A': + if mode[-1] == "A": return color + (alpha,) return color diff --git a/server/www/packages/packages-linux/x64/PIL/ImageDraw.py b/server/www/packages/packages-linux/x64/PIL/ImageDraw.py index 6a70def..7abd459 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageDraw.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageDraw.py @@ -34,7 +34,7 @@ import math import numbers from . import Image, ImageColor -from ._util import isStringType + """ A simple 2D drawing interface for PIL images. @@ -44,8 +44,7 @@ directly. """ -class ImageDraw(object): - +class ImageDraw: def __init__(self, im, mode=None): """ Create a drawing instance. @@ -76,9 +75,9 @@ class ImageDraw(object): self.draw = Image.core.draw(self.im, blend) self.mode = mode if mode in ("I", "F"): - self.ink = self.draw.draw_ink(1, mode) + self.ink = self.draw.draw_ink(1) else: - self.ink = self.draw.draw_ink(-1, mode) + self.ink = self.draw.draw_ink(-1) if mode in ("1", "P", "I", "F"): # FIXME: fix Fill2 to properly support matte for I+F images self.fontmode = "1" @@ -95,6 +94,7 @@ class ImageDraw(object): if not self.font: # FIXME: should add a font repository from . import ImageFont + self.font = ImageFont.load_default() return self.font @@ -106,17 +106,17 @@ class ImageDraw(object): ink = self.ink else: if ink is not None: - if isStringType(ink): + if isinstance(ink, str): ink = ImageColor.getcolor(ink, self.mode) if self.palette and not isinstance(ink, numbers.Number): ink = self.palette.getcolor(ink) - ink = self.draw.draw_ink(ink, self.mode) + ink = self.draw.draw_ink(ink) if fill is not None: - if isStringType(fill): + if isinstance(fill, str): fill = ImageColor.getcolor(fill, self.mode) if self.palette and not isinstance(fill, numbers.Number): fill = self.palette.getcolor(fill) - fill = self.draw.draw_ink(fill, self.mode) + fill = self.draw.draw_ink(fill) return ink, fill def arc(self, xy, start, end, fill=None, width=0): @@ -134,20 +134,20 @@ class ImageDraw(object): if ink is not None: self.draw.draw_bitmap(xy, bitmap.im, ink) - def chord(self, xy, start, end, fill=None, outline=None, width=0): + def chord(self, xy, start, end, fill=None, outline=None, width=1): """Draw a chord.""" ink, fill = self._getink(outline, fill) if fill is not None: self.draw.draw_chord(xy, start, end, fill, 1) - if ink is not None and ink != fill: + if ink is not None and ink != fill and width != 0: self.draw.draw_chord(xy, start, end, ink, 0, width) - def ellipse(self, xy, fill=None, outline=None, width=0): + def ellipse(self, xy, fill=None, outline=None, width=1): """Draw an ellipse.""" ink, fill = self._getink(outline, fill) if fill is not None: self.draw.draw_ellipse(xy, fill, 1) - if ink is not None and ink != fill: + if ink is not None and ink != fill and width != 0: self.draw.draw_ellipse(xy, ink, 0, width) def line(self, xy, fill=None, width=0, joint=None): @@ -156,13 +156,12 @@ class ImageDraw(object): if ink is not None: self.draw.draw_lines(xy, ink, width) if joint == "curve" and width > 4: - for i in range(1, len(xy)-1): + for i in range(1, len(xy) - 1): point = xy[i] angles = [ - math.degrees(math.atan2( - end[0] - start[0], start[1] - end[1] - )) % 360 - for start, end in ((xy[i-1], point), (point, xy[i+1])) + math.degrees(math.atan2(end[0] - start[0], start[1] - end[1])) + % 360 + for start, end in ((xy[i - 1], point), (point, xy[i + 1])) ] if angles[0] == angles[1]: # This is a straight line, so no joint is required @@ -171,21 +170,23 @@ class ImageDraw(object): def coord_at_angle(coord, angle): x, y = coord angle -= 90 - distance = width/2 - 1 - return tuple([ - p + - (math.floor(p_d) if p_d > 0 else math.ceil(p_d)) - for p, p_d in - ((x, distance * math.cos(math.radians(angle))), - (y, distance * math.sin(math.radians(angle)))) - ]) - flipped = ((angles[1] > angles[0] and - angles[1] - 180 > angles[0]) or - (angles[1] < angles[0] and - angles[1] + 180 > angles[0])) + distance = width / 2 - 1 + return tuple( + [ + p + (math.floor(p_d) if p_d > 0 else math.ceil(p_d)) + for p, p_d in ( + (x, distance * math.cos(math.radians(angle))), + (y, distance * math.sin(math.radians(angle))), + ) + ] + ) + + flipped = ( + angles[1] > angles[0] and angles[1] - 180 > angles[0] + ) or (angles[1] < angles[0] and angles[1] + 180 > angles[0]) coords = [ - (point[0] - width/2 + 1, point[1] - width/2 + 1), - (point[0] + width/2 - 1, point[1] + width/2 - 1) + (point[0] - width / 2 + 1, point[1] - width / 2 + 1), + (point[0] + width / 2 - 1, point[1] + width / 2 - 1), ] if flipped: start, end = (angles[1] + 90, angles[0] + 90) @@ -197,15 +198,15 @@ class ImageDraw(object): # Cover potential gaps between the line and the joint if flipped: gapCoords = [ - coord_at_angle(point, angles[0]+90), + coord_at_angle(point, angles[0] + 90), point, - coord_at_angle(point, angles[1]+90) + coord_at_angle(point, angles[1] + 90), ] else: gapCoords = [ - coord_at_angle(point, angles[0]-90), + coord_at_angle(point, angles[0] - 90), point, - coord_at_angle(point, angles[1]-90) + coord_at_angle(point, angles[1] - 90), ] self.line(gapCoords, fill, width=3) @@ -218,12 +219,12 @@ class ImageDraw(object): if ink is not None and ink != fill: self.draw.draw_outline(shape, ink, 0) - def pieslice(self, xy, start, end, fill=None, outline=None, width=0): + def pieslice(self, xy, start, end, fill=None, outline=None, width=1): """Draw a pieslice.""" ink, fill = self._getink(outline, fill) if fill is not None: self.draw.draw_pieslice(xy, start, end, fill, 1) - if ink is not None and ink != fill: + if ink is not None and ink != fill and width != 0: self.draw.draw_pieslice(xy, start, end, ink, 0, width) def point(self, xy, fill=None): @@ -240,12 +241,12 @@ class ImageDraw(object): if ink is not None and ink != fill: self.draw.draw_polygon(xy, ink, 0) - def rectangle(self, xy, fill=None, outline=None, width=0): + def rectangle(self, xy, fill=None, outline=None, width=1): """Draw a rectangle.""" ink, fill = self._getink(outline, fill) if fill is not None: self.draw.draw_rectangle(xy, fill, 1) - if ink is not None and ink != fill: + if ink is not None and ink != fill and width != 0: self.draw.draw_rectangle(xy, ink, 0, width) def _multiline_check(self, text): @@ -259,36 +260,126 @@ class ImageDraw(object): return text.split(split_character) - def text(self, xy, text, fill=None, font=None, anchor=None, - *args, **kwargs): + def text( + self, + xy, + text, + fill=None, + font=None, + anchor=None, + spacing=4, + align="left", + direction=None, + features=None, + language=None, + stroke_width=0, + stroke_fill=None, + *args, + **kwargs + ): if self._multiline_check(text): - return self.multiline_text(xy, text, fill, font, anchor, - *args, **kwargs) - ink, fill = self._getink(fill) + return self.multiline_text( + xy, + text, + fill, + font, + anchor, + spacing, + align, + direction, + features, + language, + stroke_width, + stroke_fill, + ) + if font is None: font = self.getfont() - if ink is None: - ink = fill - if ink is not None: + + def getink(fill): + ink, fill = self._getink(fill) + if ink is None: + return fill + return ink + + def draw_text(ink, stroke_width=0, stroke_offset=None): + coord = xy try: - mask, offset = font.getmask2(text, self.fontmode, - *args, **kwargs) - xy = xy[0] + offset[0], xy[1] + offset[1] + mask, offset = font.getmask2( + text, + self.fontmode, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + *args, + **kwargs, + ) + coord = coord[0] + offset[0], coord[1] + offset[1] except AttributeError: try: - mask = font.getmask(text, self.fontmode, *args, **kwargs) + mask = font.getmask( + text, + self.fontmode, + direction, + features, + language, + stroke_width, + *args, + **kwargs, + ) except TypeError: mask = font.getmask(text) - self.draw.draw_bitmap(xy, mask, ink) + if stroke_offset: + coord = coord[0] + stroke_offset[0], coord[1] + stroke_offset[1] + self.draw.draw_bitmap(coord, mask, ink) - def multiline_text(self, xy, text, fill=None, font=None, anchor=None, - spacing=4, align="left", direction=None, features=None): + ink = getink(fill) + if ink is not None: + stroke_ink = None + if stroke_width: + stroke_ink = getink(stroke_fill) if stroke_fill is not None else ink + + if stroke_ink is not None: + # Draw stroked text + draw_text(stroke_ink, stroke_width) + + # Draw normal text + draw_text(ink, 0, (stroke_width, stroke_width)) + else: + # Only draw normal text + draw_text(ink) + + def multiline_text( + self, + xy, + text, + fill=None, + font=None, + anchor=None, + spacing=4, + align="left", + direction=None, + features=None, + language=None, + stroke_width=0, + stroke_fill=None, + ): widths = [] max_width = 0 lines = self._multiline_split(text) - line_spacing = self.textsize('A', font=font)[1] + spacing + line_spacing = ( + self.textsize("A", font=font, stroke_width=stroke_width)[1] + spacing + ) for line in lines: - line_width, line_height = self.textsize(line, font) + line_width, line_height = self.textsize( + line, + font, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + ) widths.append(line_width) max_width = max(max_width, line_width) left, top = xy @@ -298,35 +389,65 @@ class ImageDraw(object): elif align == "center": left += (max_width - widths[idx]) / 2.0 elif align == "right": - left += (max_width - widths[idx]) + left += max_width - widths[idx] else: raise ValueError('align must be "left", "center" or "right"') - self.text((left, top), line, fill, font, anchor, - direction=direction, features=features) + self.text( + (left, top), + line, + fill, + font, + anchor, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + stroke_fill=stroke_fill, + ) top += line_spacing left = xy[0] - def textsize(self, text, font=None, spacing=4, direction=None, - features=None): + def textsize( + self, + text, + font=None, + spacing=4, + direction=None, + features=None, + language=None, + stroke_width=0, + ): """Get the size of a given string, in pixels.""" if self._multiline_check(text): - return self.multiline_textsize(text, font, spacing, - direction, features) + return self.multiline_textsize( + text, font, spacing, direction, features, language, stroke_width + ) if font is None: font = self.getfont() - return font.getsize(text, direction, features) + return font.getsize(text, direction, features, language, stroke_width) - def multiline_textsize(self, text, font=None, spacing=4, direction=None, - features=None): + def multiline_textsize( + self, + text, + font=None, + spacing=4, + direction=None, + features=None, + language=None, + stroke_width=0, + ): max_width = 0 lines = self._multiline_split(text) - line_spacing = self.textsize('A', font=font)[1] + spacing + line_spacing = ( + self.textsize("A", font=font, stroke_width=stroke_width)[1] + spacing + ) for line in lines: - line_width, line_height = self.textsize(line, font, spacing, - direction, features) + line_width, line_height = self.textsize( + line, font, spacing, direction, features, language, stroke_width + ) max_width = max(max_width, line_width) - return max_width, len(lines)*line_spacing - spacing + return max_width, len(lines) * line_spacing - spacing def Draw(im, mode=None): @@ -391,8 +512,8 @@ def floodfill(image, xy, value, border=None, thresh=0): pixel. :param thresh: Optional threshold value which specifies a maximum tolerable difference of a pixel value from the 'background' in - order for it to be replaced. Useful for filling regions of non- - homogeneous, but similar, colors. + order for it to be replaced. Useful for filling regions of + non-homogeneous, but similar, colors. """ # based on an implementation by Eric S. Raymond # amended by yo1995 @20180806 @@ -406,13 +527,16 @@ def floodfill(image, xy, value, border=None, thresh=0): except (ValueError, IndexError): return # seed point outside image edge = {(x, y)} - full_edge = set() # use a set to keep record of current and previous edge pixels to reduce memory consumption + # use a set to keep record of current and previous edge pixels + # to reduce memory consumption + full_edge = set() while edge: new_edge = set() for (x, y) in edge: # 4 adjacent method - for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)): - if (s, t) in full_edge: - continue # if already processed, skip + for (s, t) in ((x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)): + # If already processed, or if a coordinate is negative, skip + if (s, t) in full_edge or s < 0 or t < 0: + continue try: p = pixel[s, t] except (ValueError, IndexError): @@ -435,6 +559,6 @@ def _color_diff(color1, color2): Uses 1-norm distance to calculate difference between two values. """ if isinstance(color2, tuple): - return sum([abs(color1[i]-color2[i]) for i in range(0, len(color2))]) + return sum([abs(color1[i] - color2[i]) for i in range(0, len(color2))]) else: - return abs(color1-color2) + return abs(color1 - color2) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageDraw2.py b/server/www/packages/packages-linux/x64/PIL/ImageDraw2.py index f7902b0..20b5fe4 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageDraw2.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageDraw2.py @@ -19,26 +19,25 @@ from . import Image, ImageColor, ImageDraw, ImageFont, ImagePath -class Pen(object): +class Pen: def __init__(self, color, width=1, opacity=255): self.color = ImageColor.getrgb(color) self.width = width -class Brush(object): +class Brush: def __init__(self, color, opacity=255): self.color = ImageColor.getrgb(color) -class Font(object): +class Font: def __init__(self, color, file, size=12): # FIXME: add support for bitmap fonts self.color = ImageColor.getrgb(color) self.font = ImageFont.truetype(file, size) -class Draw(object): - +class Draw: def __init__(self, image, size=None, color=None): if not hasattr(image, "im"): image = Image.new(image, size, color) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageEnhance.py b/server/www/packages/packages-linux/x64/PIL/ImageEnhance.py index 1b78bfd..3b79d5c 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageEnhance.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageEnhance.py @@ -21,8 +21,7 @@ from . import Image, ImageFilter, ImageStat -class _Enhance(object): - +class _Enhance: def enhance(self, factor): """ Returns an enhanced image. @@ -45,14 +44,14 @@ class Color(_Enhance): factor of 0.0 gives a black and white image. A factor of 1.0 gives the original image. """ + def __init__(self, image): self.image = image - self.intermediate_mode = 'L' - if 'A' in image.getbands(): - self.intermediate_mode = 'LA' + self.intermediate_mode = "L" + if "A" in image.getbands(): + self.intermediate_mode = "LA" - self.degenerate = image.convert( - self.intermediate_mode).convert(image.mode) + self.degenerate = image.convert(self.intermediate_mode).convert(image.mode) class Contrast(_Enhance): @@ -62,13 +61,14 @@ class Contrast(_Enhance): to the contrast control on a TV set. An enhancement factor of 0.0 gives a solid grey image. A factor of 1.0 gives the original image. """ + def __init__(self, image): self.image = image mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5) self.degenerate = Image.new("L", image.size, mean).convert(image.mode) - if 'A' in image.getbands(): - self.degenerate.putalpha(image.getchannel('A')) + if "A" in image.getbands(): + self.degenerate.putalpha(image.getchannel("A")) class Brightness(_Enhance): @@ -78,12 +78,13 @@ class Brightness(_Enhance): enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the original image. """ + def __init__(self, image): self.image = image self.degenerate = Image.new(image.mode, image.size, 0) - if 'A' in image.getbands(): - self.degenerate.putalpha(image.getchannel('A')) + if "A" in image.getbands(): + self.degenerate.putalpha(image.getchannel("A")) class Sharpness(_Enhance): @@ -93,9 +94,10 @@ class Sharpness(_Enhance): enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the original image, and a factor of 2.0 gives a sharpened image. """ + def __init__(self, image): self.image = image self.degenerate = image.filter(ImageFilter.SMOOTH) - if 'A' in image.getbands(): - self.degenerate.putalpha(image.getchannel('A')) + if "A" in image.getbands(): + self.degenerate.putalpha(image.getchannel("A")) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageFile.py b/server/www/packages/packages-linux/x64/PIL/ImageFile.py index 915557a..6287968 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageFile.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageFile.py @@ -27,15 +27,16 @@ # See the README file for information on usage and redistribution. # +import io +import struct +import sys + from . import Image from ._util import isPath -import io -import sys -import struct MAXBLOCK = 65536 -SAFEBLOCK = 1024*1024 +SAFEBLOCK = 1024 * 1024 LOAD_TRUNCATED_IMAGES = False @@ -44,7 +45,7 @@ ERRORS = { -2: "decoding error", -3: "unknown error", -8: "bad configuration", - -9: "out of memory error" + -9: "out of memory error", } @@ -55,13 +56,14 @@ def raise_ioerror(error): message = ERRORS.get(error) if not message: message = "decoder error %d" % error - raise IOError(message + " when reading image file") + raise OSError(message + " when reading image file") # # -------------------------------------------------------------------- # Helpers + def _tilesort(t): # sort on offset return t[2] @@ -71,14 +73,17 @@ def _tilesort(t): # -------------------------------------------------------------------- # ImageFile base class + class ImageFile(Image.Image): "Base class for image file format handlers." def __init__(self, fp=None, filename=None): - Image.Image.__init__(self) + super().__init__() self._min_frame = 0 + self.custom_mimetype = None + self.tile = None self.readonly = 1 # until we know better @@ -98,32 +103,33 @@ class ImageFile(Image.Image): self._exclusive_fp = None try: - self._open() - except (IndexError, # end of data + try: + self._open() + except ( + IndexError, # end of data TypeError, # end of data (ord) KeyError, # unsupported mode EOFError, # got header but not the first frame - struct.error) as v: + struct.error, + ) as v: + raise SyntaxError(v) + + if not self.mode or self.size[0] <= 0: + raise SyntaxError("not identified by this driver") + except BaseException: # close the file only if we have opened it this constructor if self._exclusive_fp: self.fp.close() - raise SyntaxError(v) - - if not self.mode or self.size[0] <= 0: - raise SyntaxError("not identified by this driver") - - def draft(self, mode, size): - "Set draft mode" - - pass + raise def get_format_mimetype(self): - if self.format is None: - return - return Image.MIME.get(self.format.upper()) + if self.custom_mimetype: + return self.custom_mimetype + if self.format is not None: + return Image.MIME.get(self.format.upper()) def verify(self): - "Check file integrity" + """Check file integrity""" # raise exception if something's wrong. must be called # directly after open, and closes file when finished. @@ -132,19 +138,19 @@ class ImageFile(Image.Image): self.fp = None def load(self): - "Load image data based on tile list" + """Load image data based on tile list""" pixel = Image.Image.load(self) if self.tile is None: - raise IOError("cannot load this image") + raise OSError("cannot load this image") if not self.tile: return pixel self.map = None use_mmap = self.filename and len(self.tile) == 1 # As of pypy 2.1.0, memory mapping was failing here. - use_mmap = use_mmap and not hasattr(sys, 'pypy_version_info') + use_mmap = use_mmap and not hasattr(sys, "pypy_version_info") readonly = 0 @@ -165,9 +171,12 @@ class ImageFile(Image.Image): if use_mmap: # try memory mapping decoder_name, extents, offset, args = self.tile[0] - if decoder_name == "raw" and len(args) >= 3 and \ - args[0] == self.mode and \ - args[0] in Image._MAPMODES: + if ( + decoder_name == "raw" + and len(args) >= 3 + and args[0] == self.mode + and args[0] in Image._MAPMODES + ): try: if hasattr(Image.core, "map"): # use built-in mapper WIN32 only @@ -175,22 +184,24 @@ class ImageFile(Image.Image): self.map.seek(offset) self.im = self.map.readimage( self.mode, self.size, args[1], args[2] - ) + ) else: # use mmap, if possible import mmap + with open(self.filename, "r") as fp: - self.map = mmap.mmap(fp.fileno(), 0, - access=mmap.ACCESS_READ) + self.map = mmap.mmap( + fp.fileno(), 0, access=mmap.ACCESS_READ + ) self.im = Image.core.map_buffer( - self.map, self.size, decoder_name, extents, - offset, args) + self.map, self.size, decoder_name, offset, args + ) readonly = 1 # After trashing self.im, # we might need to reload the palette data. if self.palette: self.palette.dirty = 1 - except (AttributeError, EnvironmentError, ImportError): + except (AttributeError, OSError, ImportError): self.map = None self.load_prepare() @@ -206,8 +217,9 @@ class ImageFile(Image.Image): prefix = b"" for decoder_name, extents, offset, args in self.tile: - decoder = Image._getdecoder(self.mode, decoder_name, - args, self.decoderconfig) + decoder = Image._getdecoder( + self.mode, decoder_name, args, self.decoderconfig + ) try: seek(offset) decoder.setimage(self.im, extents) @@ -224,16 +236,16 @@ class ImageFile(Image.Image): if LOAD_TRUNCATED_IMAGES: break else: - raise IOError("image file is truncated") + raise OSError("image file is truncated") if not s: # truncated jpeg if LOAD_TRUNCATED_IMAGES: break else: - self.tile = [] - raise IOError("image file is truncated " - "(%d bytes not processed)" % - len(b)) + raise OSError( + "image file is truncated " + "(%d bytes not processed)" % len(b) + ) b = b + s n, err_code = decoder.decode(b) @@ -261,8 +273,7 @@ class ImageFile(Image.Image): def load_prepare(self): # create image memory if necessary - if not self.im or\ - self.im.mode != self.mode or self.im.size != self.size: + if not self.im or self.im.mode != self.mode or self.im.size != self.size: self.im = Image.core.new(self.mode, self.size) # create palette (optional) if self.mode == "P": @@ -281,11 +292,15 @@ class ImageFile(Image.Image): # pass def _seek_check(self, frame): - if (frame < self._min_frame or + if ( + frame < self._min_frame # Only check upper limit on frames if additional seek operations # are not required to do so - (not (hasattr(self, "_n_frames") and self._n_frames is None) and - frame >= self.n_frames+self._min_frame)): + or ( + not (hasattr(self, "_n_frames") and self._n_frames is None) + and frame >= self.n_frames + self._min_frame + ) + ): raise EOFError("attempt to seek outside sequence") return self.tell() != frame @@ -300,14 +315,12 @@ class StubImageFile(ImageFile): """ def _open(self): - raise NotImplementedError( - "StubImageFile subclass must implement _open" - ) + raise NotImplementedError("StubImageFile subclass must implement _open") def load(self): loader = self._load() if loader is None: - raise IOError("cannot find loader for this %s file" % self.format) + raise OSError("cannot find loader for this %s file" % self.format) image = loader.load(self) assert image is not None # become the other object (!) @@ -315,17 +328,16 @@ class StubImageFile(ImageFile): self.__dict__ = image.__dict__ def _load(self): - "(Hook) Find actual image loader." - raise NotImplementedError( - "StubImageFile subclass must implement _load" - ) + """(Hook) Find actual image loader.""" + raise NotImplementedError("StubImageFile subclass must implement _load") -class Parser(object): +class Parser: """ Incremental image parser. This class implements the standard feed/close consumer interface. """ + incremental = None image = None data = None @@ -397,7 +409,7 @@ class Parser(object): try: with io.BytesIO(self.data) as fp: im = Image.open(fp) - except IOError: + except OSError: # traceback.print_exc() pass # not enough data else: @@ -410,15 +422,13 @@ class Parser(object): im.load_prepare() d, e, o, a = im.tile[0] im.tile = [] - self.decoder = Image._getdecoder( - im.mode, d, a, im.decoderconfig - ) + self.decoder = Image._getdecoder(im.mode, d, a, im.decoderconfig) self.decoder.setimage(im.im, e) # calculate decoder offset self.offset = o if self.offset <= len(self.data): - self.data = self.data[self.offset:] + self.data = self.data[self.offset :] self.offset = 0 self.image = im @@ -444,9 +454,9 @@ class Parser(object): self.feed(b"") self.data = self.decoder = None if not self.finished: - raise IOError("image was incomplete") + raise OSError("image was incomplete") if not self.image: - raise IOError("cannot parse this image") + raise OSError("cannot parse this image") if self.data: # incremental parsing not possible; reopen the file # not that we have all data @@ -460,6 +470,7 @@ class Parser(object): # -------------------------------------------------------------------- + def _save(im, fp, tile, bufsize=0): """Helper to save image based on tile list @@ -489,7 +500,7 @@ def _save(im, fp, tile, bufsize=0): for e, b, o, a in tile: e = Image._getencoder(im.mode, e, a, im.encoderconfig) if o > 0: - fp.seek(o, 0) + fp.seek(o) e.setimage(im.im, b) if e.pushes_fd: e.setfd(fp) @@ -501,14 +512,14 @@ def _save(im, fp, tile, bufsize=0): if s: break if s < 0: - raise IOError("encoder error %d when writing image file" % s) + raise OSError("encoder error %d when writing image file" % s) e.cleanup() else: # slight speedup: compress to real file object for e, b, o, a in tile: e = Image._getencoder(im.mode, e, a, im.encoderconfig) if o > 0: - fp.seek(o, 0) + fp.seek(o) e.setimage(im.im, b) if e.pushes_fd: e.setfd(fp) @@ -516,7 +527,7 @@ def _save(im, fp, tile, bufsize=0): else: s = e.encode_to_file(fh, bufsize) if s < 0: - raise IOError("encoder error %d when writing image file" % s) + raise OSError("encoder error %d when writing image file" % s) e.cleanup() if hasattr(fp, "flush"): fp.flush() @@ -546,7 +557,7 @@ def _safe_read(fp, size): return b"".join(data) -class PyCodecState(object): +class PyCodecState: def __init__(self): self.xsize = 0 self.ysize = 0 @@ -554,11 +565,10 @@ class PyCodecState(object): self.yoff = 0 def extents(self): - return (self.xoff, self.yoff, - self.xoff+self.xsize, self.yoff+self.ysize) + return (self.xoff, self.yoff, self.xoff + self.xsize, self.yoff + self.ysize) -class PyDecoder(object): +class PyDecoder: """ Python implementation of a format decoder. Override this class and add the decoding logic in the `decode` method. @@ -593,8 +603,6 @@ class PyDecoder(object): Override to perform the decoding process. :param buffer: A bytes object with the data to be decoded. - If `handles_eof` is set, then `buffer` will be empty and `self.fd` - will be set. :returns: A tuple of (bytes consumed, errcode). If finished with decoding return <0 for the bytes consumed. Err codes are from `ERRORS` @@ -647,8 +655,10 @@ class PyDecoder(object): if self.state.xsize <= 0 or self.state.ysize <= 0: raise ValueError("Size cannot be negative") - if (self.state.xsize + self.state.xoff > self.im.size[0] or - self.state.ysize + self.state.yoff > self.im.size[1]): + if ( + self.state.xsize + self.state.xoff > self.im.size[0] + or self.state.ysize + self.state.yoff > self.im.size[1] + ): raise ValueError("Tile cannot extend outside image") def set_as_raw(self, data, rawmode=None): @@ -663,7 +673,7 @@ class PyDecoder(object): if not rawmode: rawmode = self.mode - d = Image._getdecoder(self.mode, 'raw', (rawmode)) + d = Image._getdecoder(self.mode, "raw", (rawmode)) d.setimage(self.im, self.state.extents()) s = d.decode(data) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageFilter.py b/server/www/packages/packages-linux/x64/PIL/ImageFilter.py index de99e64..6b0f5eb 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageFilter.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageFilter.py @@ -14,9 +14,6 @@ # # See the README file for information on usage and redistribution. # - -from __future__ import division - import functools try: @@ -25,7 +22,7 @@ except ImportError: # pragma: no cover numpy = None -class Filter(object): +class Filter: pass @@ -57,12 +54,13 @@ class Kernel(BuiltinFilter): :param offset: Offset. If given, this value is added to the result, after it has been divided by the scale factor. """ + name = "Kernel" def __init__(self, size, kernel, scale=None, offset=0): if scale is None: # default scale is sum of kernel - scale = functools.reduce(lambda a, b: a+b, kernel) + scale = functools.reduce(lambda a, b: a + b, kernel) if size[0] * size[1] != len(kernel): raise ValueError("not enough coefficients in kernel") self.filterargs = size, scale, offset, kernel @@ -78,6 +76,7 @@ class RankFilter(Filter): ``size * size / 2`` for a median filter, ``size * size - 1`` for a max filter, etc. """ + name = "Rank" def __init__(self, size, rank): @@ -87,7 +86,7 @@ class RankFilter(Filter): def filter(self, image): if image.mode == "P": raise ValueError("cannot filter palette images") - image = image.expand(self.size//2, self.size//2) + image = image.expand(self.size // 2, self.size // 2) return image.rankfilter(self.size, self.rank) @@ -98,11 +97,12 @@ class MedianFilter(RankFilter): :param size: The kernel size, in pixels. """ + name = "Median" def __init__(self, size=3): self.size = size - self.rank = size*size//2 + self.rank = size * size // 2 class MinFilter(RankFilter): @@ -112,6 +112,7 @@ class MinFilter(RankFilter): :param size: The kernel size, in pixels. """ + name = "Min" def __init__(self, size=3): @@ -126,11 +127,12 @@ class MaxFilter(RankFilter): :param size: The kernel size, in pixels. """ + name = "Max" def __init__(self, size=3): self.size = size - self.rank = size*size-1 + self.rank = size * size - 1 class ModeFilter(Filter): @@ -141,6 +143,7 @@ class ModeFilter(Filter): :param size: The kernel size, in pixels. """ + name = "Mode" def __init__(self, size=3): @@ -155,6 +158,7 @@ class GaussianBlur(MultibandFilter): :param radius: Blur radius. """ + name = "GaussianBlur" def __init__(self, radius=2): @@ -175,6 +179,7 @@ class BoxBlur(MultibandFilter): returns an identical image. Radius 1 takes 1 pixel in each direction, i.e. 9 pixels in total. """ + name = "BoxBlur" def __init__(self, radius): @@ -197,7 +202,8 @@ class UnsharpMask(MultibandFilter): .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking - """ + """ # noqa: E501 + name = "UnsharpMask" def __init__(self, radius=2, percent=150, threshold=3): @@ -211,96 +217,116 @@ class UnsharpMask(MultibandFilter): class BLUR(BuiltinFilter): name = "Blur" + # fmt: off filterargs = (5, 5), 16, 0, ( - 1, 1, 1, 1, 1, - 1, 0, 0, 0, 1, - 1, 0, 0, 0, 1, - 1, 0, 0, 0, 1, - 1, 1, 1, 1, 1 - ) + 1, 1, 1, 1, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 1, 1, 1, 1, + ) + # fmt: on class CONTOUR(BuiltinFilter): name = "Contour" + # fmt: off filterargs = (3, 3), 1, 255, ( -1, -1, -1, -1, 8, -1, - -1, -1, -1 - ) + -1, -1, -1, + ) + # fmt: on class DETAIL(BuiltinFilter): name = "Detail" + # fmt: off filterargs = (3, 3), 6, 0, ( - 0, -1, 0, + 0, -1, 0, -1, 10, -1, - 0, -1, 0 - ) + 0, -1, 0, + ) + # fmt: on class EDGE_ENHANCE(BuiltinFilter): name = "Edge-enhance" + # fmt: off filterargs = (3, 3), 2, 0, ( -1, -1, -1, -1, 10, -1, - -1, -1, -1 - ) + -1, -1, -1, + ) + # fmt: on class EDGE_ENHANCE_MORE(BuiltinFilter): name = "Edge-enhance More" + # fmt: off filterargs = (3, 3), 1, 0, ( -1, -1, -1, -1, 9, -1, - -1, -1, -1 - ) + -1, -1, -1, + ) + # fmt: on class EMBOSS(BuiltinFilter): name = "Emboss" + # fmt: off filterargs = (3, 3), 1, 128, ( - -1, 0, 0, - 0, 1, 0, - 0, 0, 0 - ) + -1, 0, 0, + 0, 1, 0, + 0, 0, 0, + ) + # fmt: on class FIND_EDGES(BuiltinFilter): name = "Find Edges" + # fmt: off filterargs = (3, 3), 1, 0, ( -1, -1, -1, -1, 8, -1, - -1, -1, -1 - ) + -1, -1, -1, + ) + # fmt: on class SHARPEN(BuiltinFilter): name = "Sharpen" + # fmt: off filterargs = (3, 3), 16, 0, ( -2, -2, -2, -2, 32, -2, - -2, -2, -2 - ) + -2, -2, -2, + ) + # fmt: on class SMOOTH(BuiltinFilter): name = "Smooth" + # fmt: off filterargs = (3, 3), 13, 0, ( - 1, 1, 1, - 1, 5, 1, - 1, 1, 1 - ) + 1, 1, 1, + 1, 5, 1, + 1, 1, 1, + ) + # fmt: on class SMOOTH_MORE(BuiltinFilter): name = "Smooth More" + # fmt: off filterargs = (5, 5), 100, 0, ( - 1, 1, 1, 1, 1, - 1, 5, 5, 5, 1, - 1, 5, 44, 5, 1, - 1, 5, 5, 5, 1, - 1, 1, 1, 1, 1 - ) + 1, 1, 1, 1, 1, + 1, 5, 5, 5, 1, + 1, 5, 44, 5, 1, + 1, 5, 5, 5, 1, + 1, 1, 1, 1, 1, + ) + # fmt: on class Color3DLUT(MultibandFilter): @@ -327,6 +353,7 @@ class Color3DLUT(MultibandFilter): than ``channels`` channels. Default is ``None``, which means that mode wouldn't be changed. """ + name = "Color 3D LUT" def __init__(self, size, table, channels=3, target_mode=None, **kwargs): @@ -338,7 +365,7 @@ class Color3DLUT(MultibandFilter): # Hidden flag `_copy_table=False` could be used to avoid extra copying # of the table if the table is specially made for the constructor. - copy_table = kwargs.get('_copy_table', True) + copy_table = kwargs.get("_copy_table", True) items = size[0] * size[1] * size[2] wrong_size = False @@ -346,8 +373,11 @@ class Color3DLUT(MultibandFilter): if copy_table: table = table.copy() - if table.shape in [(items * channels,), (items, channels), - (size[2], size[1], size[0], channels)]: + if table.shape in [ + (items * channels,), + (items, channels), + (size[2], size[1], size[0], channels), + ]: table = table.reshape(items * channels) else: wrong_size = True @@ -363,7 +393,8 @@ class Color3DLUT(MultibandFilter): if len(pixel) != channels: raise ValueError( "The elements of the table should " - "have a length of {}.".format(channels)) + "have a length of {}.".format(channels) + ) table.extend(pixel) if wrong_size or len(table) != items * channels: @@ -371,7 +402,9 @@ class Color3DLUT(MultibandFilter): "The table should have either channels * size**3 float items " "or size**3 items of channels-sized tuples with floats. " "Table should be: {}x{}x{}x{}. Actual length: {}".format( - channels, size[0], size[1], size[2], len(table))) + channels, size[0], size[1], size[2], len(table) + ) + ) self.table = table @staticmethod @@ -379,8 +412,9 @@ class Color3DLUT(MultibandFilter): try: _, _, _ = size except ValueError: - raise ValueError("Size should be either an integer or " - "a tuple of three integers.") + raise ValueError( + "Size should be either an integer or a tuple of three integers." + ) except TypeError: size = (size, size, size) size = [int(x) for x in size] @@ -411,15 +445,20 @@ class Color3DLUT(MultibandFilter): for b in range(size3D): for g in range(size2D): for r in range(size1D): - table[idx_out:idx_out + channels] = callback( - r / (size1D-1), g / (size2D-1), b / (size3D-1)) + table[idx_out : idx_out + channels] = callback( + r / (size1D - 1), g / (size2D - 1), b / (size3D - 1) + ) idx_out += channels - return cls((size1D, size2D, size3D), table, channels=channels, - target_mode=target_mode, _copy_table=False) + return cls( + (size1D, size2D, size3D), + table, + channels=channels, + target_mode=target_mode, + _copy_table=False, + ) - def transform(self, callback, with_normals=False, channels=None, - target_mode=None): + def transform(self, callback, with_normals=False, channels=None, target_mode=None): """Transforms the table values using provided callback and returns a new LUT with altered values. @@ -450,24 +489,31 @@ class Color3DLUT(MultibandFilter): for b in range(size3D): for g in range(size2D): for r in range(size1D): - values = self.table[idx_in:idx_in + ch_in] + values = self.table[idx_in : idx_in + ch_in] if with_normals: - values = callback(r / (size1D-1), g / (size2D-1), - b / (size3D-1), *values) + values = callback( + r / (size1D - 1), + g / (size2D - 1), + b / (size3D - 1), + *values, + ) else: values = callback(*values) - table[idx_out:idx_out + ch_out] = values + table[idx_out : idx_out + ch_out] = values idx_in += ch_in idx_out += ch_out - return type(self)(self.size, table, channels=ch_out, - target_mode=target_mode or self.mode, - _copy_table=False) + return type(self)( + self.size, + table, + channels=ch_out, + target_mode=target_mode or self.mode, + _copy_table=False, + ) def __repr__(self): r = [ - "{} from {}".format(self.__class__.__name__, - self.table.__class__.__name__), + "{} from {}".format(self.__class__.__name__, self.table.__class__.__name__), "size={:d}x{:d}x{:d}".format(*self.size), "channels={:d}".format(self.channels), ] @@ -479,5 +525,11 @@ class Color3DLUT(MultibandFilter): from . import Image return image.color_lut_3d( - self.mode or image.mode, Image.LINEAR, self.channels, - self.size[0], self.size[1], self.size[2], self.table) + self.mode or image.mode, + Image.LINEAR, + self.channels, + self.size[0], + self.size[1], + self.size[2], + self.table, + ) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageFont.py b/server/www/packages/packages-linux/x64/PIL/ImageFont.py index 5384a72..027e4c4 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageFont.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageFont.py @@ -25,16 +25,19 @@ # See the README file for information on usage and redistribution. # -from . import Image -from ._util import isDirectory, isPath, py3 +import base64 import os import sys +from io import BytesIO + +from . import Image +from ._util import isDirectory, isPath LAYOUT_BASIC = 0 LAYOUT_RAQM = 1 -class _imagingft_not_installed(object): +class _imagingft_not_installed: # module placeholder def __getattr__(self, id): raise ImportError("The _imagingft C module is not installed") @@ -62,27 +65,33 @@ except ImportError: # -------------------------------------------------------------------- -class ImageFont(object): +class ImageFont: "PIL font wrapper" def _load_pilfont(self, filename): with open(filename, "rb") as fp: + image = None for ext in (".png", ".gif", ".pbm"): + if image: + image.close() try: fullname = os.path.splitext(filename)[0] + ext image = Image.open(fullname) - except: + except Exception: pass else: if image and image.mode in ("1", "L"): break else: - raise IOError("cannot find glyph data file") + if image: + image.close() + raise OSError("cannot find glyph data file") self.file = fullname - return self._load_pilfont_data(fp, image) + self._load_pilfont_data(fp, image) + image.close() def _load_pilfont_data(self, file, image): @@ -98,7 +107,7 @@ class ImageFont(object): self.info.append(s) # read PILfont metrics - data = file.read(256*20) + data = file.read(256 * 20) # check image if image.mode not in ("1", "L"): @@ -109,9 +118,33 @@ class ImageFont(object): self.font = Image.core.font(image.im, data) def getsize(self, text, *args, **kwargs): + """ + Returns width and height (in pixels) of given text. + + :param text: Text to measure. + + :return: (width, height) + """ return self.font.getsize(text) def getmask(self, text, mode="", *args, **kwargs): + """ + Create a bitmap for the text. + + If the font uses antialiasing, the bitmap should have mode ``L`` and use a + maximum value of 255. Otherwise, it should have mode ``1``. + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + .. versionadded:: 1.1.5 + + :return: An internal PIL storage memory instance as defined by the + :py:mod:`PIL.Image.core` interface module. + """ return self.font.getmask(text, mode) @@ -119,11 +152,11 @@ class ImageFont(object): # Wrapper for FreeType fonts. Application code should use the # truetype factory function to create font objects. -class FreeTypeFont(object): + +class FreeTypeFont: "FreeType font wrapper (requires _imagingft service)" - def __init__(self, font=None, size=10, index=0, encoding="", - layout_engine=None): + def __init__(self, font=None, size=10, index=0, encoding="", layout_engine=None): # FIXME: use service provider instead self.path = font @@ -135,60 +168,317 @@ class FreeTypeFont(object): layout_engine = LAYOUT_BASIC if core.HAVE_RAQM: layout_engine = LAYOUT_RAQM - if layout_engine == LAYOUT_RAQM and not core.HAVE_RAQM: + elif layout_engine == LAYOUT_RAQM and not core.HAVE_RAQM: layout_engine = LAYOUT_BASIC self.layout_engine = layout_engine - if isPath(font): - self.font = core.getfont(font, size, index, encoding, - layout_engine=layout_engine) - else: - self.font_bytes = font.read() + def load_from_bytes(f): + self.font_bytes = f.read() self.font = core.getfont( - "", size, index, encoding, self.font_bytes, layout_engine) + "", size, index, encoding, self.font_bytes, layout_engine + ) + + if isPath(font): + if sys.platform == "win32": + font_bytes_path = font if isinstance(font, bytes) else font.encode() + try: + font_bytes_path.decode("ascii") + except UnicodeDecodeError: + # FreeType cannot load fonts with non-ASCII characters on Windows + # So load it into memory first + with open(font, "rb") as f: + load_from_bytes(f) + return + self.font = core.getfont( + font, size, index, encoding, layout_engine=layout_engine + ) + else: + load_from_bytes(font) def _multiline_split(self, text): split_character = "\n" if isinstance(text, str) else b"\n" return text.split(split_character) def getname(self): + """ + :return: A tuple of the font family (e.g. Helvetica) and the font style + (e.g. Bold) + """ return self.font.family, self.font.style def getmetrics(self): + """ + :return: A tuple of the font ascent (the distance from the baseline to + the highest outline point) and descent (the distance from the + baseline to the lowest outline point, a negative value) + """ return self.font.ascent, self.font.descent - def getsize(self, text, direction=None, features=None): - size, offset = self.font.getsize(text, direction, features) - return (size[0] + offset[0], size[1] + offset[1]) + def getsize( + self, text, direction=None, features=None, language=None, stroke_width=0 + ): + """ + Returns width and height (in pixels) of given text if rendered in font with + provided direction, features, and language. - def getsize_multiline(self, text, direction=None, - spacing=4, features=None): + :param text: Text to measure. + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + ` + Requires libraqm. + + .. versionadded:: 6.0.0 + + :param stroke_width: The width of the text stroke. + + .. versionadded:: 6.2.0 + + :return: (width, height) + """ + size, offset = self.font.getsize(text, direction, features, language) + return ( + size[0] + stroke_width * 2 + offset[0], + size[1] + stroke_width * 2 + offset[1], + ) + + def getsize_multiline( + self, + text, + direction=None, + spacing=4, + features=None, + language=None, + stroke_width=0, + ): + """ + Returns width and height (in pixels) of given text if rendered in font + with provided direction, features, and language, while respecting + newline characters. + + :param text: Text to measure. + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + :param spacing: The vertical gap between lines, defaulting to 4 pixels. + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + ` + Requires libraqm. + + .. versionadded:: 6.0.0 + + :param stroke_width: The width of the text stroke. + + .. versionadded:: 6.2.0 + + :return: (width, height) + """ max_width = 0 lines = self._multiline_split(text) - line_spacing = self.getsize('A')[1] + spacing + line_spacing = self.getsize("A", stroke_width=stroke_width)[1] + spacing for line in lines: - line_width, line_height = self.getsize(line, direction, features) + line_width, line_height = self.getsize( + line, direction, features, language, stroke_width + ) max_width = max(max_width, line_width) - return max_width, len(lines)*line_spacing - spacing + return max_width, len(lines) * line_spacing - spacing def getoffset(self, text): + """ + Returns the offset of given text. This is the gap between the + starting coordinate and the first marking. Note that this gap is + included in the result of :py:func:`~PIL.ImageFont.FreeTypeFont.getsize`. + + :param text: Text to measure. + + :return: A tuple of the x and y offset + """ return self.font.getsize(text)[1] - def getmask(self, text, mode="", direction=None, features=None): - return self.getmask2(text, mode, direction=direction, - features=features)[0] + def getmask( + self, + text, + mode="", + direction=None, + features=None, + language=None, + stroke_width=0, + ): + """ + Create a bitmap for the text. - def getmask2(self, text, mode="", fill=Image.core.fill, direction=None, - features=None, *args, **kwargs): - size, offset = self.font.getsize(text, direction, features) + If the font uses antialiasing, the bitmap should have mode ``L`` and use a + maximum value of 255. Otherwise, it should have mode ``1``. + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + .. versionadded:: 1.1.5 + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + ` + Requires libraqm. + + .. versionadded:: 6.0.0 + + :param stroke_width: The width of the text stroke. + + .. versionadded:: 6.2.0 + + :return: An internal PIL storage memory instance as defined by the + :py:mod:`PIL.Image.core` interface module. + """ + return self.getmask2( + text, + mode, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + )[0] + + def getmask2( + self, + text, + mode="", + fill=Image.core.fill, + direction=None, + features=None, + language=None, + stroke_width=0, + *args, + **kwargs + ): + """ + Create a bitmap for the text. + + If the font uses antialiasing, the bitmap should have mode ``L`` and use a + maximum value of 255. Otherwise, it should have mode ``1``. + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + .. versionadded:: 1.1.5 + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + ` + Requires libraqm. + + .. versionadded:: 6.0.0 + + :param stroke_width: The width of the text stroke. + + .. versionadded:: 6.2.0 + + :return: A tuple of an internal PIL storage memory instance as defined by the + :py:mod:`PIL.Image.core` interface module, and the text offset, the + gap between the starting coordinate and the first marking + """ + size, offset = self.font.getsize(text, direction, features, language) + size = size[0] + stroke_width * 2, size[1] + stroke_width * 2 im = fill("L", size, 0) - self.font.render(text, im.id, mode == "1", direction, features) + self.font.render( + text, im.id, mode == "1", direction, features, language, stroke_width + ) return im, offset - def font_variant(self, font=None, size=None, index=None, encoding=None, - layout_engine=None): + def font_variant( + self, font=None, size=None, index=None, encoding=None, layout_engine=None + ): """ Create a copy of this FreeTypeFont object, using any specified arguments to override the settings. @@ -203,11 +493,64 @@ class FreeTypeFont(object): size=self.size if size is None else size, index=self.index if index is None else index, encoding=self.encoding if encoding is None else encoding, - layout_engine=self.layout_engine if layout_engine is None else layout_engine + layout_engine=layout_engine or self.layout_engine, ) + def get_variation_names(self): + """ + :returns: A list of the named styles in a variation font. + :exception IOError: If the font is not a variation font. + """ + try: + names = self.font.getvarnames() + except AttributeError: + raise NotImplementedError("FreeType 2.9.1 or greater is required") + return [name.replace(b"\x00", b"") for name in names] -class TransposedFont(object): + def set_variation_by_name(self, name): + """ + :param name: The name of the style. + :exception IOError: If the font is not a variation font. + """ + names = self.get_variation_names() + if not isinstance(name, bytes): + name = name.encode() + index = names.index(name) + + if index == getattr(self, "_last_variation_index", None): + # When the same name is set twice in a row, + # there is an 'unknown freetype error' + # https://savannah.nongnu.org/bugs/?56186 + return + self._last_variation_index = index + + self.font.setvarname(index) + + def get_variation_axes(self): + """ + :returns: A list of the axes in a variation font. + :exception IOError: If the font is not a variation font. + """ + try: + axes = self.font.getvaraxes() + except AttributeError: + raise NotImplementedError("FreeType 2.9.1 or greater is required") + for axis in axes: + axis["name"] = axis["name"].replace(b"\x00", b"") + return axes + + def set_variation_by_axes(self, axes): + """ + :param axes: A list of values for each axis. + :exception IOError: If the font is not a variation font. + """ + try: + self.font.setvaraxes(axes) + except AttributeError: + raise NotImplementedError("FreeType 2.9.1 or greater is required") + + +class TransposedFont: "Wrapper for writing rotated or mirrored text" def __init__(self, font, orientation=None): @@ -250,35 +593,62 @@ def load(filename): return f -def truetype(font=None, size=10, index=0, encoding="", - layout_engine=None): +def truetype(font=None, size=10, index=0, encoding="", layout_engine=None): """ Load a TrueType or OpenType font from a file or file-like object, and create a font object. This function loads a font object from the given file or file-like object, and creates a font object for a font of the given size. + Pillow uses FreeType to open font files. If you are opening many fonts + simultaneously on Windows, be aware that Windows limits the number of files + that can be open in C at once to 512. If you approach that limit, an + ``OSError`` may be thrown, reporting that FreeType "cannot open resource". + This function requires the _imagingft service. :param font: A filename or file-like object containing a TrueType font. - Under Windows, if the file is not found in this filename, - the loader also looks in Windows :file:`fonts/` directory. + If the file is not found in this filename, the loader may also + search in other directories, such as the :file:`fonts/` + directory on Windows or :file:`/Library/Fonts/`, + :file:`/System/Library/Fonts/` and :file:`~/Library/Fonts/` on + macOS. + :param size: The requested size, in points. :param index: Which font face to load (default is first available face). - :param encoding: Which font encoding to use (default is Unicode). Common - encodings are "unic" (Unicode), "symb" (Microsoft - Symbol), "ADOB" (Adobe Standard), "ADBE" (Adobe Expert), - and "armn" (Apple Roman). See the FreeType documentation - for more information. + :param encoding: Which font encoding to use (default is Unicode). Possible + encodings include (see the FreeType documentation for more + information): + + * "unic" (Unicode) + * "symb" (Microsoft Symbol) + * "ADOB" (Adobe Standard) + * "ADBE" (Adobe Expert) + * "ADBC" (Adobe Custom) + * "armn" (Apple Roman) + * "sjis" (Shift JIS) + * "gb " (PRC) + * "big5" + * "wans" (Extended Wansung) + * "joha" (Johab) + * "lat1" (Latin-1) + + This specifies the character set to use. It does not alter the + encoding of any text provided in subsequent operations. :param layout_engine: Which layout engine to use, if available: `ImageFont.LAYOUT_BASIC` or `ImageFont.LAYOUT_RAQM`. :return: A font object. :exception IOError: If the file could not be read. """ - try: + def freetype(font): return FreeTypeFont(font, size, index, encoding, layout_engine) - except IOError: + + try: + return freetype(font) + except OSError: + if not isPath(font): + raise ttf_filename = os.path.basename(font) dirs = [] @@ -289,17 +659,19 @@ def truetype(font=None, size=10, index=0, encoding="", windir = os.environ.get("WINDIR") if windir: dirs.append(os.path.join(windir, "fonts")) - elif sys.platform in ('linux', 'linux2'): + elif sys.platform in ("linux", "linux2"): lindirs = os.environ.get("XDG_DATA_DIRS", "") if not lindirs: # According to the freedesktop spec, XDG_DATA_DIRS should # default to /usr/share - lindirs = '/usr/share' - dirs += [os.path.join(lindir, "fonts") - for lindir in lindirs.split(":")] - elif sys.platform == 'darwin': - dirs += ['/Library/Fonts', '/System/Library/Fonts', - os.path.expanduser('~/Library/Fonts')] + lindirs = "/usr/share" + dirs += [os.path.join(lindir, "fonts") for lindir in lindirs.split(":")] + elif sys.platform == "darwin": + dirs += [ + "/Library/Fonts", + "/System/Library/Fonts", + os.path.expanduser("~/Library/Fonts"), + ] ext = os.path.splitext(ttf_filename)[1] first_font_with_a_different_extension = None @@ -307,21 +679,15 @@ def truetype(font=None, size=10, index=0, encoding="", for walkroot, walkdir, walkfilenames in os.walk(directory): for walkfilename in walkfilenames: if ext and walkfilename == ttf_filename: + return freetype(os.path.join(walkroot, walkfilename)) + elif not ext and os.path.splitext(walkfilename)[0] == ttf_filename: fontpath = os.path.join(walkroot, walkfilename) - return FreeTypeFont(fontpath, size, index, - encoding, layout_engine) - elif (not ext and - os.path.splitext(walkfilename)[0] == ttf_filename): - fontpath = os.path.join(walkroot, walkfilename) - if os.path.splitext(fontpath)[1] == '.ttf': - return FreeTypeFont(fontpath, size, index, - encoding, layout_engine) - if not ext \ - and first_font_with_a_different_extension is None: + if os.path.splitext(fontpath)[1] == ".ttf": + return freetype(fontpath) + if not ext and first_font_with_a_different_extension is None: first_font_with_a_different_extension = fontpath if first_font_with_a_different_extension: - return FreeTypeFont(first_font_with_a_different_extension, size, - index, encoding, layout_engine) + return freetype(first_font_with_a_different_extension) raise @@ -337,15 +703,12 @@ def load_path(filename): for directory in sys.path: if isDirectory(directory): if not isinstance(filename, str): - if py3: - filename = filename.decode("utf-8") - else: - filename = filename.encode("utf-8") + filename = filename.decode("utf-8") try: return load(os.path.join(directory, filename)) - except IOError: + except OSError: pass - raise IOError("cannot find font file") + raise OSError("cannot find font file") def load_default(): @@ -355,12 +718,12 @@ def load_default(): :return: A font object. """ - from io import BytesIO - import base64 f = ImageFont() f._load_pilfont_data( # courB08 - BytesIO(base64.b64decode(b''' + BytesIO( + base64.b64decode( + b""" UElMZm9udAo7Ozs7OzsxMDsKREFUQQoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA @@ -452,7 +815,13 @@ AJsAEQAGAAAAAP/6AAX//wCbAAoAoAAPAAYAAAAA//oABQABAKAACgClABEABgAA////+AAGAAAA pQAKAKwAEgAGAAD////4AAYAAACsAAoAswASAAYAAP////gABgAAALMACgC6ABIABgAA////+QAG AAAAugAKAMEAEQAGAAD////4AAYAAgDBAAoAyAAUAAYAAP////kABQACAMgACgDOABMABgAA//// +QAGAAIAzgAKANUAEw== -''')), Image.open(BytesIO(base64.b64decode(b''' +""" + ) + ), + Image.open( + BytesIO( + base64.b64decode( + b""" iVBORw0KGgoAAAANSUhEUgAAAx4AAAAUAQAAAAArMtZoAAAEwElEQVR4nABlAJr/AHVE4czCI/4u Mc4b7vuds/xzjz5/3/7u/n9vMe7vnfH/9++vPn/xyf5zhxzjt8GHw8+2d83u8x27199/nxuQ6Od9 M43/5z2I+9n9ZtmDBwMQECDRQw/eQIQohJXxpBCNVE6QCCAAAAD//wBlAJr/AgALyj1t/wINwq0g @@ -476,5 +845,9 @@ evta/58PTEWzr21hufPjA8N+qlnBwAAAAAD//2JiWLci5v1+HmFXDqcnULE/MxgYGBj+f6CaJQAA AAD//2Ji2FrkY3iYpYC5qDeGgeEMAwPDvwQBBoYvcTwOVLMEAAAA//9isDBgkP///0EOg9z35v// Gc/eeW7BwPj5+QGZhANUswMAAAD//2JgqGBgYGBgqEMXlvhMPUsAAAAA//8iYDd1AAAAAP//AwDR w7IkEbzhVQAAAABJRU5ErkJggg== -''')))) +""" + ) + ) + ), + ) return f diff --git a/server/www/packages/packages-linux/x64/PIL/ImageGrab.py b/server/www/packages/packages-linux/x64/PIL/ImageGrab.py index 712b02c..66e2e85 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageGrab.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageGrab.py @@ -15,35 +15,54 @@ # See the README file for information on usage and redistribution. # +import sys + from . import Image -import sys -if sys.platform not in ["win32", "darwin"]: - raise ImportError("ImageGrab is macOS and Windows only") - -if sys.platform == "win32": - grabber = Image.core.grabscreen -elif sys.platform == "darwin": +if sys.platform == "darwin": import os import tempfile import subprocess -def grab(bbox=None): - if sys.platform == "darwin": - fh, filepath = tempfile.mkstemp('.png') - os.close(fh) - subprocess.call(['screencapture', '-x', filepath]) - im = Image.open(filepath) - im.load() - os.unlink(filepath) - else: - size, data = grabber() - im = Image.frombytes( - "RGB", size, data, - # RGB, 32-bit line padding, origin lower left corner - "raw", "BGR", (size[0]*3 + 3) & -4, -1 +def grab(bbox=None, include_layered_windows=False, all_screens=False, xdisplay=None): + if xdisplay is None: + if sys.platform == "darwin": + fh, filepath = tempfile.mkstemp(".png") + os.close(fh) + subprocess.call(["screencapture", "-x", filepath]) + im = Image.open(filepath) + im.load() + os.unlink(filepath) + if bbox: + im_cropped = im.crop(bbox) + im.close() + return im_cropped + return im + elif sys.platform == "win32": + offset, size, data = Image.core.grabscreen_win32( + include_layered_windows, all_screens ) + im = Image.frombytes( + "RGB", + size, + data, + # RGB, 32-bit line padding, origin lower left corner + "raw", + "BGR", + (size[0] * 3 + 3) & -4, + -1, + ) + if bbox: + x0, y0 = offset + left, top, right, bottom = bbox + im = im.crop((left - x0, top - y0, right - x0, bottom - y0)) + return im + # use xdisplay=None for default display on non-win32/macOS systems + if not Image.core.HAVE_XCB: + raise IOError("Pillow was built without XCB support") + size, data = Image.core.grabscreen_x11(xdisplay) + im = Image.frombytes("RGB", size, data, "raw", "BGRX", size[0] * 4, 1) if bbox: im = im.crop(bbox) return im @@ -51,14 +70,16 @@ def grab(bbox=None): def grabclipboard(): if sys.platform == "darwin": - fh, filepath = tempfile.mkstemp('.jpg') + fh, filepath = tempfile.mkstemp(".jpg") os.close(fh) commands = [ - "set theFile to (open for access POSIX file \""+filepath+"\" with write permission)", + 'set theFile to (open for access POSIX file "' + + filepath + + '" with write permission)', "try", - "write (the clipboard as JPEG picture) to theFile", + " write (the clipboard as JPEG picture) to theFile", "end try", - "close access theFile" + "close access theFile", ] script = ["osascript"] for command in commands: @@ -71,10 +92,13 @@ def grabclipboard(): im.load() os.unlink(filepath) return im - else: - data = Image.core.grabclipboard() + elif sys.platform == "win32": + data = Image.core.grabclipboard_win32() if isinstance(data, bytes): from . import BmpImagePlugin import io + return BmpImagePlugin.DibImageFile(io.BytesIO(data)) return data + else: + raise NotImplementedError("ImageGrab.grabclipboard() is macOS and Windows only") diff --git a/server/www/packages/packages-linux/x64/PIL/ImageMath.py b/server/www/packages/packages-linux/x64/PIL/ImageMath.py index d985877..adbb940 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageMath.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageMath.py @@ -15,23 +15,18 @@ # See the README file for information on usage and redistribution. # -from . import Image, _imagingmath -from ._util import py3 +import builtins -try: - import builtins -except ImportError: - import __builtin__ - builtins = __builtin__ +from . import Image, _imagingmath VERBOSE = 0 def _isconstant(v): - return isinstance(v, int) or isinstance(v, float) + return isinstance(v, (int, float)) -class _Operand(object): +class _Operand: """Wraps an image operand, providing standard operators""" def __init__(self, im): @@ -61,7 +56,7 @@ class _Operand(object): out = Image.new(mode or im1.mode, im1.size, None) im1.load() try: - op = getattr(_imagingmath, op+"_"+im1.mode) + op = getattr(_imagingmath, op + "_" + im1.mode) except AttributeError: raise TypeError("bad operand type for '%s'" % op) _imagingmath.unop(op, out.im.id, im1.im.id) @@ -78,8 +73,7 @@ class _Operand(object): raise ValueError("mode mismatch") if im1.size != im2.size: # crop both arguments to a common size - size = (min(im1.size[0], im2.size[0]), - min(im1.size[1], im2.size[1])) + size = (min(im1.size[0], im2.size[0]), min(im1.size[1], im2.size[1])) if im1.size != size: im1 = im1.crop((0, 0) + size) if im2.size != size: @@ -90,7 +84,7 @@ class _Operand(object): im1.load() im2.load() try: - op = getattr(_imagingmath, op+"_"+im1.mode) + op = getattr(_imagingmath, op + "_" + im1.mode) except AttributeError: raise TypeError("bad operand type for '%s'" % op) _imagingmath.binop(op, out.im.id, im1.im.id, im2.im.id) @@ -101,11 +95,6 @@ class _Operand(object): # an image is "true" if it contains at least one non-zero pixel return self.im.getbbox() is not None - if not py3: - # Provide __nonzero__ for pre-Py3k - __nonzero__ = __bool__ - del __bool__ - def __abs__(self): return self.apply("abs", self) @@ -152,13 +141,6 @@ class _Operand(object): def __rpow__(self, other): return self.apply("pow", other, self) - if not py3: - # Provide __div__ and __rdiv__ for pre-Py3k - __div__ = __truediv__ - __rdiv__ = __rtruediv__ - del __truediv__ - del __rtruediv__ - # bitwise def __invert__(self): return self.apply("invert", self) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageMode.py b/server/www/packages/packages-linux/x64/PIL/ImageMode.py index 2b3377a..9882883 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageMode.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageMode.py @@ -17,7 +17,7 @@ _modes = None -class ModeDescriptor(object): +class ModeDescriptor: """Wrapper for mode strings.""" def __init__(self, mode, bands, basemode, basetype): @@ -37,20 +37,28 @@ def getmode(mode): # initialize mode cache from . import Image + modes = {} # core modes for m, (basemode, basetype, bands) in Image._MODEINFO.items(): modes[m] = ModeDescriptor(m, bands, basemode, basetype) # extra experimental modes - modes["RGBa"] = ModeDescriptor("RGBa", - ("R", "G", "B", "a"), "RGB", "L") + modes["RGBa"] = ModeDescriptor("RGBa", ("R", "G", "B", "a"), "RGB", "L") modes["LA"] = ModeDescriptor("LA", ("L", "A"), "L", "L") modes["La"] = ModeDescriptor("La", ("L", "a"), "L", "L") modes["PA"] = ModeDescriptor("PA", ("P", "A"), "RGB", "L") # mapping modes - modes["I;16"] = ModeDescriptor("I;16", "I", "L", "L") - modes["I;16L"] = ModeDescriptor("I;16L", "I", "L", "L") - modes["I;16B"] = ModeDescriptor("I;16B", "I", "L", "L") + for i16mode in ( + "I;16", + "I;16S", + "I;16L", + "I;16LS", + "I;16B", + "I;16BS", + "I;16N", + "I;16NS", + ): + modes[i16mode] = ModeDescriptor(i16mode, ("I",), "L", "L") # set global mode cache atomically _modes = modes return _modes[mode] diff --git a/server/www/packages/packages-linux/x64/PIL/ImageMorph.py b/server/www/packages/packages-linux/x64/PIL/ImageMorph.py index 54ceb79..d1ec09e 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageMorph.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageMorph.py @@ -5,15 +5,27 @@ # # Copyright (c) 2014 Dov Grobgeld -from __future__ import print_function +import re from . import Image, _imagingmorph -import re LUT_SIZE = 1 << 9 +# fmt: off +ROTATION_MATRIX = [ + 6, 3, 0, + 7, 4, 1, + 8, 5, 2, +] +MIRROR_MATRIX = [ + 2, 1, 0, + 5, 4, 3, + 8, 7, 6, +] +# fmt: on -class LutBuilder(object): + +class LutBuilder: """A class for building a MorphLut from a descriptive language The input patterns is a list of a strings sequences like these:: @@ -48,6 +60,7 @@ class LutBuilder(object): lut = lb.build_lut() """ + def __init__(self, patterns=None, op_name=None): if patterns is not None: self.patterns = patterns @@ -56,20 +69,19 @@ class LutBuilder(object): self.lut = None if op_name is not None: known_patterns = { - 'corner': ['1:(... ... ...)->0', - '4:(00. 01. ...)->1'], - 'dilation4': ['4:(... .0. .1.)->1'], - 'dilation8': ['4:(... .0. .1.)->1', - '4:(... .0. ..1)->1'], - 'erosion4': ['4:(... .1. .0.)->0'], - 'erosion8': ['4:(... .1. .0.)->0', - '4:(... .1. ..0)->0'], - 'edge': ['1:(... ... ...)->0', - '4:(.0. .1. ...)->1', - '4:(01. .1. ...)->1'] + "corner": ["1:(... ... ...)->0", "4:(00. 01. ...)->1"], + "dilation4": ["4:(... .0. .1.)->1"], + "dilation8": ["4:(... .0. .1.)->1", "4:(... .0. ..1)->1"], + "erosion4": ["4:(... .1. .0.)->0"], + "erosion8": ["4:(... .1. .0.)->0", "4:(... .1. ..0)->0"], + "edge": [ + "1:(... ... ...)->0", + "4:(.0. .1. ...)->1", + "4:(01. .1. ...)->1", + ], } if op_name not in known_patterns: - raise Exception('Unknown pattern '+op_name+'!') + raise Exception("Unknown pattern " + op_name + "!") self.patterns = known_patterns[op_name] @@ -88,8 +100,8 @@ class LutBuilder(object): """string_permute takes a pattern and a permutation and returns the string permuted according to the permutation list. """ - assert(len(permutation) == 9) - return ''.join(pattern[p] for p in permutation) + assert len(permutation) == 9 + return "".join(pattern[p] for p in permutation) def _pattern_permute(self, basic_pattern, options, basic_result): """pattern_permute takes a basic pattern and its result and clones @@ -98,32 +110,25 @@ class LutBuilder(object): patterns = [(basic_pattern, basic_result)] # rotations - if '4' in options: + if "4" in options: res = patterns[-1][1] for i in range(4): patterns.append( - (self._string_permute(patterns[-1][0], [6, 3, 0, - 7, 4, 1, - 8, 5, 2]), res)) + (self._string_permute(patterns[-1][0], ROTATION_MATRIX), res) + ) # mirror - if 'M' in options: + if "M" in options: n = len(patterns) for pattern, res in patterns[0:n]: - patterns.append( - (self._string_permute(pattern, [2, 1, 0, - 5, 4, 3, - 8, 7, 6]), res)) + patterns.append((self._string_permute(pattern, MIRROR_MATRIX), res)) # negate - if 'N' in options: + if "N" in options: n = len(patterns) for pattern, res in patterns[0:n]: # Swap 0 and 1 - pattern = (pattern - .replace('0', 'Z') - .replace('1', '0') - .replace('Z', '1')) - res = 1-int(res) + pattern = pattern.replace("0", "Z").replace("1", "0").replace("Z", "1") + res = 1 - int(res) patterns.append((pattern, res)) return patterns @@ -138,22 +143,21 @@ class LutBuilder(object): # Parse and create symmetries of the patterns strings for p in self.patterns: - m = re.search( - r'(\w*):?\s*\((.+?)\)\s*->\s*(\d)', p.replace('\n', '')) + m = re.search(r"(\w*):?\s*\((.+?)\)\s*->\s*(\d)", p.replace("\n", "")) if not m: - raise Exception('Syntax error in pattern "'+p+'"') + raise Exception('Syntax error in pattern "' + p + '"') options = m.group(1) pattern = m.group(2) result = int(m.group(3)) # Get rid of spaces - pattern = pattern.replace(' ', '').replace('\n', '') + pattern = pattern.replace(" ", "").replace("\n", "") patterns += self._pattern_permute(pattern, options, result) # compile the patterns into regular expressions for speed for i, pattern in enumerate(patterns): - p = pattern[0].replace('.', 'X').replace('X', '[01]') + p = pattern[0].replace(".", "X").replace("X", "[01]") p = re.compile(p) patterns[i] = (p, pattern[1]) @@ -163,7 +167,7 @@ class LutBuilder(object): for i in range(LUT_SIZE): # Build the bit pattern bitpattern = bin(i)[2:] - bitpattern = ('0'*(9-len(bitpattern)) + bitpattern)[::-1] + bitpattern = ("0" * (9 - len(bitpattern)) + bitpattern)[::-1] for p, r in patterns: if p.match(bitpattern): @@ -172,13 +176,10 @@ class LutBuilder(object): return self.lut -class MorphOp(object): +class MorphOp: """A class for binary morphological operators""" - def __init__(self, - lut=None, - op_name=None, - patterns=None): + def __init__(self, lut=None, op_name=None, patterns=None): """Create a binary morphological operator""" self.lut = lut if op_name is not None: @@ -192,13 +193,12 @@ class MorphOp(object): Returns a tuple of the number of changed pixels and the morphed image""" if self.lut is None: - raise Exception('No operator loaded') + raise Exception("No operator loaded") - if image.mode != 'L': - raise Exception('Image must be binary, meaning it must use mode L') + if image.mode != "L": + raise Exception("Image must be binary, meaning it must use mode L") outimage = Image.new(image.mode, image.size, None) - count = _imagingmorph.apply( - bytes(self.lut), image.im.id, outimage.im.id) + count = _imagingmorph.apply(bytes(self.lut), image.im.id, outimage.im.id) return count, outimage def match(self, image): @@ -208,10 +208,10 @@ class MorphOp(object): Returns a list of tuples of (x,y) coordinates of all matching pixels. See :ref:`coordinate-system`.""" if self.lut is None: - raise Exception('No operator loaded') + raise Exception("No operator loaded") - if image.mode != 'L': - raise Exception('Image must be binary, meaning it must use mode L') + if image.mode != "L": + raise Exception("Image must be binary, meaning it must use mode L") return _imagingmorph.match(bytes(self.lut), image.im.id) def get_on_pixels(self, image): @@ -220,24 +220,24 @@ class MorphOp(object): Returns a list of tuples of (x,y) coordinates of all matching pixels. See :ref:`coordinate-system`.""" - if image.mode != 'L': - raise Exception('Image must be binary, meaning it must use mode L') + if image.mode != "L": + raise Exception("Image must be binary, meaning it must use mode L") return _imagingmorph.get_on_pixels(image.im.id) def load_lut(self, filename): """Load an operator from an mrl file""" - with open(filename, 'rb') as f: + with open(filename, "rb") as f: self.lut = bytearray(f.read()) if len(self.lut) != LUT_SIZE: self.lut = None - raise Exception('Wrong size operator file!') + raise Exception("Wrong size operator file!") def save_lut(self, filename): """Save an operator to an mrl file""" if self.lut is None: - raise Exception('No operator loaded') - with open(filename, 'wb') as f: + raise Exception("No operator loaded") + with open(filename, "wb") as f: f.write(self.lut) def set_lut(self, lut): diff --git a/server/www/packages/packages-linux/x64/PIL/ImageOps.py b/server/www/packages/packages-linux/x64/PIL/ImageOps.py index 9f516ba..e4e0840 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageOps.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageOps.py @@ -17,16 +17,15 @@ # See the README file for information on usage and redistribution. # -from . import Image -from ._util import isStringType -import operator import functools -import warnings +import operator +from . import Image # # helpers + def _border(border): if isinstance(border, tuple): if len(border) == 2: @@ -39,8 +38,9 @@ def _border(border): def _color(color, mode): - if isStringType(color): + if isinstance(color, str): from . import ImageColor + color = ImageColor.getcolor(color, mode) return color @@ -54,7 +54,8 @@ def _lut(image, lut): lut = lut + lut + lut return image.point(lut) else: - raise IOError("not supported for this image mode") + raise OSError("not supported for this image mode") + # # actions @@ -76,7 +77,7 @@ def autocontrast(image, cutoff=0, ignore=None): histogram = image.histogram() lut = [] for layer in range(0, len(histogram), 256): - h = histogram[layer:layer+256] + h = histogram[layer : layer + 256] if ignore is not None: # get rid of outliers try: @@ -136,8 +137,7 @@ def autocontrast(image, cutoff=0, ignore=None): return _lut(image, lut) -def colorize(image, black, white, mid=None, blackpoint=0, - whitepoint=255, midpoint=127): +def colorize(image, black, white, mid=None, blackpoint=0, whitepoint=255, midpoint=127): """ Colorize grayscale image. This function calculates a color wedge which maps all black pixels in @@ -221,7 +221,7 @@ def colorize(image, black, white, mid=None, blackpoint=0, return _lut(image, red + green + blue) -def pad(image, size, method=Image.NEAREST, color=None, centering=(0.5, 0.5)): +def pad(image, size, method=Image.BICUBIC, color=None, centering=(0.5, 0.5)): """ Returns a sized and padded version of the image, expanded to fill the requested aspect ratio and size. @@ -230,10 +230,11 @@ def pad(image, size, method=Image.NEAREST, color=None, centering=(0.5, 0.5)): :param size: The requested output size in pixels, given as a (width, height) tuple. :param method: What resampling method to use. Default is - :py:attr:`PIL.Image.NEAREST`. + :py:attr:`PIL.Image.BICUBIC`. See :ref:`concept-filters`. :param color: The background color of the padded image. :param centering: Control the position of the original image within the padded version. + (0.5, 0.5) will keep the image centered (0, 0) will keep the image aligned to the top left (1, 1) will keep the image aligned to the bottom @@ -242,7 +243,7 @@ def pad(image, size, method=Image.NEAREST, color=None, centering=(0.5, 0.5)): """ im_ratio = image.width / image.height - dest_ratio = float(size[0]) / size[1] + dest_ratio = size[0] / size[1] if im_ratio == dest_ratio: out = image.resize(size, resample=method) @@ -277,12 +278,10 @@ def crop(image, border=0): :return: An image. """ left, top, right, bottom = _border(border) - return image.crop( - (left, top, image.size[0]-right, image.size[1]-bottom) - ) + return image.crop((left, top, image.size[0] - right, image.size[1] - bottom)) -def scale(image, factor, resample=Image.NEAREST): +def scale(image, factor, resample=Image.BICUBIC): """ Returns a rescaled image by a specific factor given in parameter. A factor greater than 1 expands the image, between 0 and 1 contracts the @@ -290,8 +289,8 @@ def scale(image, factor, resample=Image.NEAREST): :param image: The image to rescale. :param factor: The expansion factor, as a float. - :param resample: An optional resampling filter. Same values possible as - in the PIL.Image.resize function. + :param resample: What resampling method to use. Default is + :py:attr:`PIL.Image.BICUBIC`. See :ref:`concept-filters`. :returns: An :py:class:`~PIL.Image.Image` object. """ if factor == 1: @@ -299,8 +298,7 @@ def scale(image, factor, resample=Image.NEAREST): elif factor <= 0: raise ValueError("the factor must be greater than 0") else: - size = (int(round(factor * image.width)), - int(round(factor * image.height))) + size = (round(factor * image.width), round(factor * image.height)) return image.resize(size, resample) @@ -315,9 +313,7 @@ def deform(image, deformer, resample=Image.BILINEAR): in the PIL.Image.transform function. :return: An image. """ - return image.transform( - image.size, Image.MESH, deformer.getmesh(image), resample - ) + return image.transform(image.size, Image.MESH, deformer.getmesh(image), resample) def equalize(image, mask=None): @@ -336,7 +332,7 @@ def equalize(image, mask=None): h = image.histogram(mask) lut = [] for b in range(0, len(h), 256): - histo = [_f for _f in h[b:b+256] if _f] + histo = [_f for _f in h[b : b + 256] if _f] if len(histo) <= 1: lut.extend(list(range(256))) else: @@ -347,7 +343,7 @@ def equalize(image, mask=None): n = step // 2 for i in range(256): lut.append(n // step) - n = n + h[i+b] + n = n + h[i + b] return _lut(image, lut) @@ -368,7 +364,7 @@ def expand(image, border=0, fill=0): return out -def fit(image, size, method=Image.NEAREST, bleed=0.0, centering=(0.5, 0.5)): +def fit(image, size, method=Image.BICUBIC, bleed=0.0, centering=(0.5, 0.5)): """ Returns a sized and cropped version of the image, cropped to the requested aspect ratio and size. @@ -379,10 +375,11 @@ def fit(image, size, method=Image.NEAREST, bleed=0.0, centering=(0.5, 0.5)): :param size: The requested output size in pixels, given as a (width, height) tuple. :param method: What resampling method to use. Default is - :py:attr:`PIL.Image.NEAREST`. - :param bleed: Remove a border around the outside of the image (from all + :py:attr:`PIL.Image.BICUBIC`. See :ref:`concept-filters`. + :param bleed: Remove a border around the outside of the image from all four edges. The value is a decimal percentage (use 0.01 for one percent). The default value is 0 (no border). + Cannot be greater than or equal to 0.5. :param centering: Control the cropping position. Use (0.5, 0.5) for center cropping (e.g. if cropping the width, take 50% off of the left side, and therefore 50% off the right side). @@ -400,66 +397,56 @@ def fit(image, size, method=Image.NEAREST, bleed=0.0, centering=(0.5, 0.5)): # kevin@cazabon.com # http://www.cazabon.com - # ensure inputs are valid - if not isinstance(centering, list): - centering = [centering[0], centering[1]] + # ensure centering is mutable + centering = list(centering) - if centering[0] > 1.0 or centering[0] < 0.0: - centering[0] = 0.50 - if centering[1] > 1.0 or centering[1] < 0.0: - centering[1] = 0.50 + if not 0.0 <= centering[0] <= 1.0: + centering[0] = 0.5 + if not 0.0 <= centering[1] <= 1.0: + centering[1] = 0.5 - if bleed > 0.49999 or bleed < 0.0: + if not 0.0 <= bleed < 0.5: bleed = 0.0 # calculate the area to use for resizing and cropping, subtracting # the 'bleed' around the edges # number of pixels to trim off on Top and Bottom, Left and Right - bleedPixels = ( - int((float(bleed) * float(image.size[0])) + 0.5), - int((float(bleed) * float(image.size[1])) + 0.5) - ) + bleed_pixels = (bleed * image.size[0], bleed * image.size[1]) - liveArea = (0, 0, image.size[0], image.size[1]) - if bleed > 0.0: - liveArea = ( - bleedPixels[0], bleedPixels[1], image.size[0] - bleedPixels[0] - 1, - image.size[1] - bleedPixels[1] - 1 - ) + live_size = ( + image.size[0] - bleed_pixels[0] * 2, + image.size[1] - bleed_pixels[1] * 2, + ) - liveSize = (liveArea[2] - liveArea[0], liveArea[3] - liveArea[1]) - - # calculate the aspect ratio of the liveArea - liveAreaAspectRatio = float(liveSize[0])/float(liveSize[1]) + # calculate the aspect ratio of the live_size + live_size_ratio = live_size[0] / live_size[1] # calculate the aspect ratio of the output image - aspectRatio = float(size[0]) / float(size[1]) + output_ratio = size[0] / size[1] # figure out if the sides or top/bottom will be cropped off - if liveAreaAspectRatio >= aspectRatio: - # liveArea is wider than what's needed, crop the sides - cropWidth = int((aspectRatio * float(liveSize[1])) + 0.5) - cropHeight = liveSize[1] + if live_size_ratio == output_ratio: + # live_size is already the needed ratio + crop_width = live_size[0] + crop_height = live_size[1] + elif live_size_ratio >= output_ratio: + # live_size is wider than what's needed, crop the sides + crop_width = output_ratio * live_size[1] + crop_height = live_size[1] else: - # liveArea is taller than what's needed, crop the top and bottom - cropWidth = liveSize[0] - cropHeight = int((float(liveSize[0])/aspectRatio) + 0.5) + # live_size is taller than what's needed, crop the top and bottom + crop_width = live_size[0] + crop_height = live_size[0] / output_ratio # make the crop - leftSide = int(liveArea[0] + (float(liveSize[0]-cropWidth) * centering[0])) - if leftSide < 0: - leftSide = 0 - topSide = int(liveArea[1] + (float(liveSize[1]-cropHeight) * centering[1])) - if topSide < 0: - topSide = 0 + crop_left = bleed_pixels[0] + (live_size[0] - crop_width) * centering[0] + crop_top = bleed_pixels[1] + (live_size[1] - crop_height) * centering[1] - out = image.crop( - (leftSide, topSide, leftSide + cropWidth, topSide + cropHeight) - ) + crop = (crop_left, crop_top, crop_left + crop_width, crop_top + crop_height) # resize the image and return it - return out.resize(size, method) + return image.resize(size, method, box=crop) def flip(image): @@ -491,7 +478,7 @@ def invert(image): """ lut = [] for i in range(256): - lut.append(255-i) + lut.append(255 - i) return _lut(image, lut) @@ -514,7 +501,7 @@ def posterize(image, bits): :return: An image. """ lut = [] - mask = ~(2**(8-bits)-1) + mask = ~(2 ** (8 - bits) - 1) for i in range(256): lut.append(i & mask) return _lut(image, lut) @@ -533,100 +520,32 @@ def solarize(image, threshold=128): if i < threshold: lut.append(i) else: - lut.append(255-i) + lut.append(255 - i) return _lut(image, lut) -# -------------------------------------------------------------------- -# PIL USM components, from Kevin Cazabon. - -def gaussian_blur(im, radius=None): - """ PIL_usm.gblur(im, [radius])""" - - warnings.warn( - 'PIL.ImageOps.gaussian_blur is deprecated. ' - 'Use PIL.ImageFilter.GaussianBlur instead. ' - 'This function will be removed in a future version.', - DeprecationWarning - ) - - if radius is None: - radius = 5.0 - - im.load() - - return im.im.gaussian_blur(radius) - - -def gblur(im, radius=None): - """ PIL_usm.gblur(im, [radius])""" - - warnings.warn( - 'PIL.ImageOps.gblur is deprecated. ' - 'Use PIL.ImageFilter.GaussianBlur instead. ' - 'This function will be removed in a future version.', - DeprecationWarning - ) - - return gaussian_blur(im, radius) - - -def unsharp_mask(im, radius=None, percent=None, threshold=None): - """ PIL_usm.usm(im, [radius, percent, threshold])""" - - warnings.warn( - 'PIL.ImageOps.unsharp_mask is deprecated. ' - 'Use PIL.ImageFilter.UnsharpMask instead. ' - 'This function will be removed in a future version.', - DeprecationWarning - ) - - if radius is None: - radius = 5.0 - if percent is None: - percent = 150 - if threshold is None: - threshold = 3 - - im.load() - - return im.im.unsharp_mask(radius, percent, threshold) - - -def usm(im, radius=None, percent=None, threshold=None): - """ PIL_usm.usm(im, [radius, percent, threshold])""" - - warnings.warn( - 'PIL.ImageOps.usm is deprecated. ' - 'Use PIL.ImageFilter.UnsharpMask instead. ' - 'This function will be removed in a future version.', - DeprecationWarning - ) - - return unsharp_mask(im, radius, percent, threshold) - - -def box_blur(image, radius): +def exif_transpose(image): """ - Blur the image by setting each pixel to the average value of the pixels - in a square box extending radius pixels in each direction. - Supports float radius of arbitrary size. Uses an optimized implementation - which runs in linear time relative to the size of the image - for any radius value. + If an image has an EXIF Orientation tag, return a new image that is + transposed accordingly. Otherwise, return a copy of the image. - :param image: The image to blur. - :param radius: Size of the box in one direction. Radius 0 does not blur, - returns an identical image. Radius 1 takes 1 pixel - in each direction, i.e. 9 pixels in total. + :param image: The image to transpose. :return: An image. """ - warnings.warn( - 'PIL.ImageOps.box_blur is deprecated. ' - 'Use PIL.ImageFilter.BoxBlur instead. ' - 'This function will be removed in a future version.', - DeprecationWarning - ) - - image.load() - - return image._new(image.im.box_blur(radius)) + exif = image.getexif() + orientation = exif.get(0x0112) + method = { + 2: Image.FLIP_LEFT_RIGHT, + 3: Image.ROTATE_180, + 4: Image.FLIP_TOP_BOTTOM, + 5: Image.TRANSPOSE, + 6: Image.ROTATE_270, + 7: Image.TRANSVERSE, + 8: Image.ROTATE_90, + }.get(orientation) + if method is not None: + transposed_image = image.transpose(method) + del exif[0x0112] + transposed_image.info["exif"] = exif.tobytes() + return transposed_image + return image.copy() diff --git a/server/www/packages/packages-linux/x64/PIL/ImagePalette.py b/server/www/packages/packages-linux/x64/PIL/ImagePalette.py index 81e99ab..e0d439c 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImagePalette.py +++ b/server/www/packages/packages-linux/x64/PIL/ImagePalette.py @@ -17,10 +17,11 @@ # import array -from . import ImageColor, GimpPaletteFile, GimpGradientFile, PaletteFile + +from . import GimpGradientFile, GimpPaletteFile, ImageColor, PaletteFile -class ImagePalette(object): +class ImagePalette: """ Color palette for palette mapped images @@ -38,11 +39,12 @@ class ImagePalette(object): def __init__(self, mode="RGB", palette=None, size=0): self.mode = mode self.rawmode = None # if set, palette contains raw data - self.palette = palette or bytearray(range(256))*len(self.mode) + self.palette = palette or bytearray(range(256)) * len(self.mode) self.colors = {} self.dirty = None - if ((size == 0 and len(self.mode)*256 != len(self.palette)) or - (size != 0 and size != len(self.palette))): + if (size == 0 and len(self.mode) * 256 != len(self.palette)) or ( + size != 0 and size != len(self.palette) + ): raise ValueError("wrong palette size") def copy(self): @@ -78,7 +80,7 @@ class ImagePalette(object): if isinstance(self.palette, bytes): return self.palette arr = array.array("B", self.palette) - if hasattr(arr, 'tobytes'): + if hasattr(arr, "tobytes"): return arr.tobytes() return arr.tostring() @@ -104,8 +106,8 @@ class ImagePalette(object): raise ValueError("cannot allocate more than 256 colors") self.colors[color] = index self.palette[index] = color[0] - self.palette[index+256] = color[1] - self.palette[index+512] = color[2] + self.palette[index + 256] = color[1] + self.palette[index + 512] = color[2] self.dirty = 1 return index else: @@ -124,7 +126,7 @@ class ImagePalette(object): fp.write("# Mode: %s\n" % self.mode) for i in range(256): fp.write("%d" % i) - for j in range(i*len(self.mode), (i+1)*len(self.mode)): + for j in range(i * len(self.mode), (i + 1) * len(self.mode)): try: fp.write(" %d" % self.palette[j]) except IndexError: @@ -136,6 +138,7 @@ class ImagePalette(object): # -------------------------------------------------------------------- # Internal + def raw(rawmode, data): palette = ImagePalette() palette.rawmode = rawmode @@ -147,11 +150,12 @@ def raw(rawmode, data): # -------------------------------------------------------------------- # Factories + def make_linear_lut(black, white): lut = [] if black == 0: for i in range(256): - lut.append(white*i//255) + lut.append(white * i // 255) else: raise NotImplementedError # FIXME return lut @@ -172,8 +176,9 @@ def negative(mode="RGB"): def random(mode="RGB"): from random import randint + palette = [] - for i in range(256*len(mode)): + for i in range(256 * len(mode)): palette.append(randint(0, 255)) return ImagePalette(mode, palette) @@ -199,7 +204,7 @@ def load(filename): for paletteHandler in [ GimpPaletteFile.GimpPaletteFile, GimpGradientFile.GimpGradientFile, - PaletteFile.PaletteFile + PaletteFile.PaletteFile, ]: try: fp.seek(0) @@ -211,6 +216,6 @@ def load(filename): # traceback.print_exc() pass else: - raise IOError("cannot load palette") + raise OSError("cannot load palette") return lut # data, rawmode diff --git a/server/www/packages/packages-linux/x64/PIL/ImagePath.py b/server/www/packages/packages-linux/x64/PIL/ImagePath.py index 8cbfec0..3d3538c 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImagePath.py +++ b/server/www/packages/packages-linux/x64/PIL/ImagePath.py @@ -16,5 +16,4 @@ from . import Image - Path = Image.core.path diff --git a/server/www/packages/packages-linux/x64/PIL/ImageQt.py b/server/www/packages/packages-linux/x64/PIL/ImageQt.py index e602613..dfe2f80 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageQt.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageQt.py @@ -16,34 +16,24 @@ # See the README file for information on usage and redistribution. # -from . import Image -from ._util import isPath, py3 -from io import BytesIO import sys +from io import BytesIO + +from . import Image +from ._util import isPath + +qt_versions = [["5", "PyQt5"], ["side2", "PySide2"]] -qt_versions = [ - ['5', 'PyQt5'], - ['side2', 'PySide2'], - ['4', 'PyQt4'], - ['side', 'PySide'] -] # If a version has already been imported, attempt it first -qt_versions.sort(key=lambda qt_version: qt_version[1] in sys.modules, - reverse=True) +qt_versions.sort(key=lambda qt_version: qt_version[1] in sys.modules, reverse=True) for qt_version, qt_module in qt_versions: try: - if qt_module == 'PyQt5': + if qt_module == "PyQt5": from PyQt5.QtGui import QImage, qRgba, QPixmap from PyQt5.QtCore import QBuffer, QIODevice - elif qt_module == 'PySide2': + elif qt_module == "PySide2": from PySide2.QtGui import QImage, qRgba, QPixmap from PySide2.QtCore import QBuffer, QIODevice - elif qt_module == 'PyQt4': - from PyQt4.QtGui import QImage, qRgba, QPixmap - from PyQt4.QtCore import QBuffer, QIODevice - elif qt_module == 'PySide': - from PySide.QtGui import QImage, qRgba, QPixmap - from PySide.QtCore import QBuffer, QIODevice except (ImportError, RuntimeError): continue qt_is_installed = True @@ -57,7 +47,7 @@ def rgb(r, g, b, a=255): """(Internal) Turns an RGB color into a Qt compatible color integer.""" # use qRgb to pack the colors, and then turn the resulting long # into a negative integer with the same bitpattern. - return (qRgba(r, g, b, a) & 0xffffffff) + return qRgba(r, g, b, a) & 0xFFFFFFFF def fromqimage(im): @@ -67,19 +57,15 @@ def fromqimage(im): """ buffer = QBuffer() buffer.open(QIODevice.ReadWrite) - # preserve alha channel with png + # preserve alpha channel with png # otherwise ppm is more friendly with Image.open if im.hasAlphaChannel(): - im.save(buffer, 'png') + im.save(buffer, "png") else: - im.save(buffer, 'ppm') + im.save(buffer, "ppm") b = BytesIO() - try: - b.write(buffer.data()) - except TypeError: - # workaround for Python 2 - b.write(str(buffer.data())) + b.write(buffer.data()) buffer.close() b.seek(0) @@ -105,11 +91,7 @@ def align8to32(bytes, width, mode): converts each scanline of data from 8 bit to 32 bit aligned """ - bits_per_pixel = { - '1': 1, - 'L': 8, - 'P': 8, - }[mode] + bits_per_pixel = {"1": 1, "L": 8, "P": 8}[mode] # calculate bytes per line and the extra padding if needed bits_per_line = bits_per_pixel * width @@ -124,10 +106,12 @@ def align8to32(bytes, width, mode): new_data = [] for i in range(len(bytes) // bytes_per_line): - new_data.append(bytes[i*bytes_per_line:(i+1)*bytes_per_line] - + b'\x00' * extra_padding) + new_data.append( + bytes[i * bytes_per_line : (i + 1) * bytes_per_line] + + b"\x00" * extra_padding + ) - return b''.join(new_data) + return b"".join(new_data) def _toqclass_helper(im): @@ -137,10 +121,7 @@ def _toqclass_helper(im): # handle filename, if given instead of image name if hasattr(im, "toUtf8"): # FIXME - is this really the best way to do this? - if py3: - im = str(im.toUtf8(), "utf-8") - else: - im = unicode(im.toUtf8(), "utf-8") + im = str(im.toUtf8(), "utf-8") if isPath(im): im = Image.open(im) @@ -156,7 +137,7 @@ def _toqclass_helper(im): colortable = [] palette = im.getpalette() for i in range(0, len(palette), 3): - colortable.append(rgb(*palette[i:i+3])) + colortable.append(rgb(*palette[i : i + 3])) elif im.mode == "RGB": data = im.tobytes("raw", "BGRX") format = QImage.Format_RGB32 @@ -172,33 +153,34 @@ def _toqclass_helper(im): raise ValueError("unsupported image mode %r" % im.mode) __data = data or align8to32(im.tobytes(), im.size[0], im.mode) - return { - 'data': __data, 'im': im, 'format': format, 'colortable': colortable - } + return {"data": __data, "im": im, "format": format, "colortable": colortable} if qt_is_installed: - class ImageQt(QImage): + class ImageQt(QImage): def __init__(self, im): """ An PIL image wrapper for Qt. This is a subclass of PyQt's QImage class. - :param im: A PIL Image object, or a file name (given either as Python - string or a PyQt string object). + :param im: A PIL Image object, or a file name (given either as + Python string or a PyQt string object). """ im_data = _toqclass_helper(im) # must keep a reference, or Qt will crash! # All QImage constructors that take data operate on an existing # buffer, so this buffer has to hang on for the life of the image. # Fixes https://github.com/python-pillow/Pillow/issues/1370 - self.__data = im_data['data'] - QImage.__init__(self, - self.__data, im_data['im'].size[0], - im_data['im'].size[1], im_data['format']) - if im_data['colortable']: - self.setColorTable(im_data['colortable']) + self.__data = im_data["data"] + super().__init__( + self.__data, + im_data["im"].size[0], + im_data["im"].size[1], + im_data["format"], + ) + if im_data["colortable"]: + self.setColorTable(im_data["colortable"]) def toqimage(im): @@ -211,8 +193,8 @@ def toqpixmap(im): # result = QPixmap(im_data['im'].size[0], im_data['im'].size[1]) # result.loadFromData(im_data['data']) # Fix some strange bug that causes - if im.mode == 'RGB': - im = im.convert('RGBA') + if im.mode == "RGB": + im = im.convert("RGBA") qimage = toqimage(im) return QPixmap.fromImage(qimage) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageSequence.py b/server/www/packages/packages-linux/x64/PIL/ImageSequence.py index 1fc6e5d..4e9f5c2 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageSequence.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageSequence.py @@ -16,7 +16,7 @@ ## -class Iterator(object): +class Iterator: """ This class implements an iterator object that can be used to loop over an image sequence. @@ -32,7 +32,7 @@ class Iterator(object): if not hasattr(im, "seek"): raise AttributeError("im must have seek method") self.im = im - self.position = 0 + self.position = getattr(self.im, "_min_frame", 0) def __getitem__(self, ix): try: @@ -52,5 +52,24 @@ class Iterator(object): except EOFError: raise StopIteration - def next(self): - return self.__next__() + +def all_frames(im, func=None): + """ + Applies a given function to all frames in an image or a list of images. + The frames are returned as a list of separate images. + + :param im: An image, or a list of images. + :param func: The function to apply to all of the image frames. + :returns: A list of images. + """ + if not isinstance(im, list): + im = [im] + + ims = [] + for imSequence in im: + current = imSequence.tell() + + ims += [im_frame.copy() for im_frame in Iterator(imSequence)] + + imSequence.seek(current) + return [func(im) for im in ims] if func else ims diff --git a/server/www/packages/packages-linux/x64/PIL/ImageShow.py b/server/www/packages/packages-linux/x64/PIL/ImageShow.py index b50d613..fc50894 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageShow.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageShow.py @@ -11,17 +11,14 @@ # # See the README file for information on usage and redistribution. # - -from __future__ import print_function +import os +import shutil +import subprocess +import sys +import tempfile +from shlex import quote from PIL import Image -import os -import sys - -if sys.version_info.major >= 3: - from shlex import quote -else: - from pipes import quote _viewers = [] @@ -53,7 +50,7 @@ def show(image, title=None, **options): return 0 -class Viewer(object): +class Viewer: """Base class for viewers.""" # main api @@ -61,16 +58,12 @@ class Viewer(object): def show(self, image, **options): # save temporary image to disk - if image.mode[:4] == "I;16": - # @PIL88 @PIL101 - # "I;16" isn't an 'official' mode, but we still want to - # provide a simple way to show 16-bit images. - base = "L" - # FIXME: auto-contrast if max() > 255? - else: + if not ( + image.mode in ("1", "RGBA") or (self.format == "PNG" and image.mode == "LA") + ): base = Image.getmodebase(image.mode) - if base != image.mode and image.mode != "1" and image.mode != "RGBA": - image = image.convert(base) + if image.mode != base: + image = image.convert(base) return self.show_image(image, **options) @@ -99,18 +92,22 @@ class Viewer(object): os.system(self.get_command(file, **options)) return 1 + # -------------------------------------------------------------------- if sys.platform == "win32": class WindowsViewer(Viewer): - format = "BMP" + format = "PNG" + options = {"compress_level": 1} def get_command(self, file, **options): - return ('start "Pillow" /WAIT "%s" ' - '&& ping -n 2 127.0.0.1 >NUL ' - '&& del /f "%s"' % (file, file)) + return ( + 'start "Pillow" /WAIT "%s" ' + "&& ping -n 2 127.0.0.1 >NUL " + '&& del /f "%s"' % (file, file) + ) register(WindowsViewer) @@ -118,41 +115,56 @@ elif sys.platform == "darwin": class MacViewer(Viewer): format = "PNG" - options = {'compress_level': 1} + options = {"compress_level": 1} def get_command(self, file, **options): # on darwin open returns immediately resulting in the temp # file removal while app is opening - command = "open -a /Applications/Preview.app" - command = "(%s %s; sleep 20; rm -f %s)&" % (command, quote(file), - quote(file)) + command = "open -a Preview.app" + command = "({} {}; sleep 20; rm -f {})&".format( + command, quote(file), quote(file) + ) return command + def show_file(self, file, **options): + """Display given file""" + fd, path = tempfile.mkstemp() + with os.fdopen(fd, "w") as f: + f.write(file) + with open(path, "r") as f: + subprocess.Popen( + ["im=$(cat); open -a Preview.app $im; sleep 20; rm -f $im"], + shell=True, + stdin=f, + ) + os.remove(path) + return 1 + register(MacViewer) else: # unixoids - def which(executable): - path = os.environ.get("PATH") - if not path: - return None - for dirname in path.split(os.pathsep): - filename = os.path.join(dirname, executable) - if os.path.isfile(filename) and os.access(filename, os.X_OK): - return filename - return None - class UnixViewer(Viewer): format = "PNG" - options = {'compress_level': 1} + options = {"compress_level": 1} + + def get_command(self, file, **options): + command = self.get_command_ex(file, **options)[0] + return "({} {}; rm -f {})&".format(command, quote(file), quote(file)) def show_file(self, file, **options): - command, executable = self.get_command_ex(file, **options) - command = "(%s %s; rm -f %s)&" % (command, quote(file), - quote(file)) - os.system(command) + """Display given file""" + fd, path = tempfile.mkstemp() + with os.fdopen(fd, "w") as f: + f.write(file) + with open(path, "r") as f: + command = self.get_command_ex(file, **options)[0] + subprocess.Popen( + ["im=$(cat);" + command + " $im; rm -f $im"], shell=True, stdin=f + ) + os.remove(path) return 1 # implementations @@ -162,7 +174,7 @@ else: command = executable = "display" return command, executable - if which("display"): + if shutil.which("display"): register(DisplayViewer) class EogViewer(UnixViewer): @@ -170,7 +182,7 @@ else: command = executable = "eog" return command, executable - if which("eog"): + if shutil.which("eog"): register(EogViewer) class XVViewer(UnixViewer): @@ -182,7 +194,7 @@ else: command += " -name %s" % quote(title) return command, executable - if which("xv"): + if shutil.which("xv"): register(XVViewer) if __name__ == "__main__": @@ -191,4 +203,5 @@ if __name__ == "__main__": print("Syntax: python ImageShow.py imagefile [title]") sys.exit() - print(show(Image.open(sys.argv[1]), *sys.argv[2:])) + with Image.open(sys.argv[1]) as im: + print(show(im, *sys.argv[2:])) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageStat.py b/server/www/packages/packages-linux/x64/PIL/ImageStat.py index d4b38d8..50bafc9 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageStat.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageStat.py @@ -21,13 +21,12 @@ # See the README file for information on usage and redistribution. # +import functools import math import operator -import functools -class Stat(object): - +class Stat: def __init__(self, image_or_list, mask=None): try: if mask: @@ -41,7 +40,7 @@ class Stat(object): self.bands = list(range(len(self.h) // 256)) def __getattr__(self, id): - "Calculate missing attribute" + """Calculate missing attribute""" if id[:4] == "_get": raise AttributeError(id) # calculate missing attribute @@ -50,7 +49,7 @@ class Stat(object): return v def _getextrema(self): - "Get min/max values for each band in the image" + """Get min/max values for each band in the image""" def minmax(histogram): n = 255 @@ -67,15 +66,15 @@ class Stat(object): return v def _getcount(self): - "Get total number of pixels in each layer" + """Get total number of pixels in each layer""" v = [] for i in range(0, len(self.h), 256): - v.append(functools.reduce(operator.add, self.h[i:i+256])) + v.append(functools.reduce(operator.add, self.h[i : i + 256])) return v def _getsum(self): - "Get sum of all pixels in each layer" + """Get sum of all pixels in each layer""" v = [] for i in range(0, len(self.h), 256): @@ -86,7 +85,7 @@ class Stat(object): return v def _getsum2(self): - "Get squared sum of all pixels in each layer" + """Get squared sum of all pixels in each layer""" v = [] for i in range(0, len(self.h), 256): @@ -97,7 +96,7 @@ class Stat(object): return v def _getmean(self): - "Get average pixel level for each layer" + """Get average pixel level for each layer""" v = [] for i in self.bands: @@ -105,22 +104,22 @@ class Stat(object): return v def _getmedian(self): - "Get median pixel level for each layer" + """Get median pixel level for each layer""" v = [] for i in self.bands: s = 0 - half = self.count[i]//2 + half = self.count[i] // 2 b = i * 256 for j in range(256): - s = s + self.h[b+j] + s = s + self.h[b + j] if s > half: break v.append(j) return v def _getrms(self): - "Get RMS for each layer" + """Get RMS for each layer""" v = [] for i in self.bands: @@ -128,16 +127,16 @@ class Stat(object): return v def _getvar(self): - "Get variance for each layer" + """Get variance for each layer""" v = [] for i in self.bands: n = self.count[i] - v.append((self.sum2[i]-(self.sum[i]**2.0)/n)/n) + v.append((self.sum2[i] - (self.sum[i] ** 2.0) / n) / n) return v def _getstddev(self): - "Get standard deviation for each layer" + """Get standard deviation for each layer""" v = [] for i in self.bands: diff --git a/server/www/packages/packages-linux/x64/PIL/ImageTk.py b/server/www/packages/packages-linux/x64/PIL/ImageTk.py index c56f556..ee707cf 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageTk.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageTk.py @@ -25,16 +25,10 @@ # See the README file for information on usage and redistribution. # -import sys - -if sys.version_info.major > 2: - import tkinter -else: - import Tkinter as tkinter - -from . import Image +import tkinter from io import BytesIO +from . import Image # -------------------------------------------------------------------- # Check for Tkinter interface hooks @@ -67,7 +61,8 @@ def _get_image_from_kw(kw): # -------------------------------------------------------------------- # PhotoImage -class PhotoImage(object): + +class PhotoImage: """ A Tkinter-compatible photo image. This can be used everywhere Tkinter expects an image object. If the image is an RGBA @@ -124,7 +119,7 @@ class PhotoImage(object): self.__photo.name = None try: self.__photo.tk.call("image", "delete", name) - except: + except Exception: pass # ignore internal errors def __str__(self): @@ -183,17 +178,18 @@ class PhotoImage(object): # activate Tkinter hook try: from . import _imagingtk + try: - if hasattr(tk, 'interp'): + if hasattr(tk, "interp"): # Required for PyPy, which always has CFFI installed from cffi import FFI + ffi = FFI() # PyPy is using an FFI CDATA element # (Pdb) self.tk.interp # - _imagingtk.tkinit( - int(ffi.cast("uintptr_t", tk.interp)), 1) + _imagingtk.tkinit(int(ffi.cast("uintptr_t", tk.interp)), 1) else: _imagingtk.tkinit(tk.interpaddr(), 1) except AttributeError: @@ -202,11 +198,12 @@ class PhotoImage(object): except (ImportError, AttributeError, tkinter.TclError): raise # configuration problem; cannot attach to Tkinter + # -------------------------------------------------------------------- # BitmapImage -class BitmapImage(object): +class BitmapImage: """ A Tkinter-compatible bitmap image. This can be used everywhere Tkinter expects an image object. @@ -244,7 +241,7 @@ class BitmapImage(object): self.__photo.name = None try: self.__photo.tk.call("image", "delete", name) - except: + except Exception: pass # ignore internal errors def width(self): @@ -275,10 +272,13 @@ class BitmapImage(object): def getimage(photo): - """ This function is unimplemented """ - """Copies the contents of a PhotoImage to a PIL image memory.""" - photo.tk.call("PyImagingPhotoGet", photo) + im = Image.new("RGBA", (photo.width(), photo.height())) + block = im.im + + photo.tk.call("PyImagingPhotoGet", photo, block.id) + + return im def _show(image, title): @@ -290,11 +290,10 @@ def _show(image, title): self.image = BitmapImage(im, foreground="white", master=master) else: self.image = PhotoImage(im, master=master) - tkinter.Label.__init__(self, master, image=self.image, - bg="black", bd=0) + super().__init__(master, image=self.image, bg="black", bd=0) if not tkinter._default_root: - raise IOError("tkinter not initialized") + raise OSError("tkinter not initialized") top = tkinter.Toplevel() if title: top.title(title) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageTransform.py b/server/www/packages/packages-linux/x64/PIL/ImageTransform.py index c3f6af8..77791ab 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageTransform.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageTransform.py @@ -46,6 +46,7 @@ class AffineTransform(Transform): :param matrix: A 6-tuple (a, b, c, d, e, f) containing the first two rows from an affine transform matrix. """ + method = Image.AFFINE @@ -67,6 +68,7 @@ class ExtentTransform(Transform): :param bbox: A 4-tuple (x0, y0, x1, y1) which specifies two points in the input image's coordinate system. See :ref:`coordinate-system`. """ + method = Image.EXTENT @@ -83,6 +85,7 @@ class QuadTransform(Transform): upper left, lower left, lower right, and upper right corner of the source quadrilateral. """ + method = Image.QUAD @@ -95,4 +98,5 @@ class MeshTransform(Transform): :param data: A list of (bbox, quad) tuples. """ + method = Image.MESH diff --git a/server/www/packages/packages-linux/x64/PIL/ImageWin.py b/server/www/packages/packages-linux/x64/PIL/ImageWin.py index 9b86270..927b169 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImageWin.py +++ b/server/www/packages/packages-linux/x64/PIL/ImageWin.py @@ -20,12 +20,13 @@ from . import Image -class HDC(object): +class HDC: """ Wraps an HDC integer. The resulting object can be passed to the :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` methods. """ + def __init__(self, dc): self.dc = dc @@ -33,12 +34,13 @@ class HDC(object): return self.dc -class HWND(object): +class HWND: """ Wraps an HWND integer. The resulting object can be passed to the :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` methods, instead of a DC. """ + def __init__(self, wnd): self.wnd = wnd @@ -46,7 +48,7 @@ class HWND(object): return self.wnd -class Dib(object): +class Dib: """ A Windows bitmap with the given mode and size. The mode can be one of "1", "L", "P", or "RGB". @@ -184,13 +186,13 @@ class Dib(object): return self.image.tobytes() -class Window(object): +class Window: """Create a Window with the given title size.""" def __init__(self, title="PIL", width=None, height=None): self.hwnd = Image.core.createwindow( title, self.__dispatcher, width or 0, height or 0 - ) + ) def __dispatcher(self, action, *args): return getattr(self, "ui_handle_" + action)(*args) @@ -222,7 +224,7 @@ class ImageWindow(Window): image = Dib(image) self.image = image width, height = image.size - Window.__init__(self, title, width=width, height=height) + super().__init__(title, width=width, height=height) def ui_handle_repair(self, dc, x0, y0, x1, y1): self.image.draw(dc, (x0, y0, x1, y1)) diff --git a/server/www/packages/packages-linux/x64/PIL/ImtImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/ImtImagePlugin.py index 5a6623c..21ffd74 100644 --- a/server/www/packages/packages-linux/x64/PIL/ImtImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/ImtImagePlugin.py @@ -19,9 +19,6 @@ import re from . import Image, ImageFile -__version__ = "0.2" - - # # -------------------------------------------------------------------- @@ -31,6 +28,7 @@ field = re.compile(br"([a-z]*) ([^ \r\n]*)") ## # Image plugin for IM Tools images. + class ImtImageFile(ImageFile.ImageFile): format = "IMT" @@ -53,12 +51,12 @@ class ImtImageFile(ImageFile.ImageFile): if not s: break - if s == b'\x0C': + if s == b"\x0C": # image data begins - self.tile = [("raw", (0, 0)+self.size, - self.fp.tell(), - (self.mode, 0, 1))] + self.tile = [ + ("raw", (0, 0) + self.size, self.fp.tell(), (self.mode, 0, 1)) + ] break diff --git a/server/www/packages/packages-linux/x64/PIL/IptcImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/IptcImagePlugin.py index b63e1ab..b2f976d 100644 --- a/server/www/packages/packages-linux/x64/PIL/IptcImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/IptcImagePlugin.py @@ -14,20 +14,13 @@ # # See the README file for information on usage and redistribution. # - -from __future__ import print_function - -from . import Image, ImageFile -from ._binary import i8, i16be as i16, i32be as i32, o8 import os import tempfile -__version__ = "0.3" +from . import Image, ImageFile +from ._binary import i8, i16be as i16, i32be as i32, o8 -COMPRESSION = { - 1: "raw", - 5: "jpeg" -} +COMPRESSION = {1: "raw", 5: "jpeg"} PAD = o8(0) * 4 @@ -35,13 +28,14 @@ PAD = o8(0) * 4 # # Helpers + def i(c): return i32((PAD + c)[-4:]) def dump(c): for i in c: - print("%02x" % i8(i), end=' ') + print("%02x" % i8(i), end=" ") print() @@ -49,6 +43,7 @@ def dump(c): # Image plugin for IPTC/NAA datastreams. To read IPTC/NAA fields # from TIFF and JPEG files, use the getiptcinfo function. + class IptcImageFile(ImageFile.ImageFile): format = "IPTC" @@ -73,11 +68,11 @@ class IptcImageFile(ImageFile.ImageFile): # field size size = i8(s[3]) if size > 132: - raise IOError("illegal field length in IPTC/NAA file") + raise OSError("illegal field length in IPTC/NAA file") elif size == 128: size = 0 elif size > 128: - size = i(self.fp.read(size-128)) + size = i(self.fp.read(size - 128)) else: size = i16(s[3:]) @@ -107,7 +102,7 @@ class IptcImageFile(ImageFile.ImageFile): layers = i8(self.info[(3, 60)][0]) component = i8(self.info[(3, 60)][1]) if (3, 65) in self.info: - id = i8(self.info[(3, 65)][0])-1 + id = i8(self.info[(3, 65)][0]) - 1 else: id = 0 if layers == 1 and not component: @@ -124,12 +119,13 @@ class IptcImageFile(ImageFile.ImageFile): try: compression = COMPRESSION[self.getint((3, 120))] except KeyError: - raise IOError("Unknown IPTC image compression") + raise OSError("Unknown IPTC image compression") # tile if tag == (8, 10): - self.tile = [("iptc", (compression, offset), - (0, 0, self.size[0], self.size[1]))] + self.tile = [ + ("iptc", (compression, offset), (0, 0, self.size[0], self.size[1])) + ] def load(self): @@ -162,9 +158,9 @@ class IptcImageFile(ImageFile.ImageFile): o.close() try: - _im = Image.open(outfile) - _im.load() - self.im = _im.im + with Image.open(outfile) as _im: + _im.load() + self.im = _im.im finally: try: os.unlink(outfile) @@ -196,35 +192,9 @@ def getiptcinfo(im): elif isinstance(im, JpegImagePlugin.JpegImageFile): # extract the IPTC/NAA resource - try: - app = im.app["APP13"] - if app[:14] == b"Photoshop 3.0\x00": - app = app[14:] - # parse the image resource block - offset = 0 - while app[offset:offset+4] == b"8BIM": - offset += 4 - # resource code - code = i16(app, offset) - offset += 2 - # resource name (usually empty) - name_len = i8(app[offset]) - # name = app[offset+1:offset+1+name_len] - offset = 1 + offset + name_len - if offset & 1: - offset += 1 - # resource data block - size = i32(app, offset) - offset += 4 - if code == 0x0404: - # 0x0404 contains IPTC/NAA data - data = app[offset:offset+size] - break - offset = offset + size - if offset & 1: - offset += 1 - except (AttributeError, KeyError): - pass + photoshop = im.info.get("photoshop") + if photoshop: + data = photoshop.get(0x0404) elif isinstance(im, TiffImagePlugin.TiffImageFile): # get raw data from the IPTC/NAA tag (PhotoShop tags the data @@ -238,8 +208,9 @@ def getiptcinfo(im): return None # no properties # create an IptcImagePlugin object without initializing it - class FakeImage(object): + class FakeImage: pass + im = FakeImage() im.__class__ = IptcImageFile diff --git a/server/www/packages/packages-linux/x64/PIL/Jpeg2KImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/Jpeg2KImagePlugin.py index 7659b6b..0b0d433 100644 --- a/server/www/packages/packages-linux/x64/PIL/Jpeg2KImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/Jpeg2KImagePlugin.py @@ -12,12 +12,11 @@ # # See the README file for information on usage and redistribution. # -from . import Image, ImageFile -import struct -import os import io +import os +import struct -__version__ = "0.1" +from . import Image, ImageFile def _parse_codestream(fp): @@ -25,30 +24,29 @@ def _parse_codestream(fp): count from the SIZ marker segment, returning a PIL (size, mode) tuple.""" hdr = fp.read(2) - lsiz = struct.unpack('>H', hdr)[0] + lsiz = struct.unpack(">H", hdr)[0] siz = hdr + fp.read(lsiz - 2) - lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, xtsiz, ytsiz, \ - xtosiz, ytosiz, csiz \ - = struct.unpack_from('>HHIIIIIIIIH', siz) - ssiz = [None]*csiz - xrsiz = [None]*csiz - yrsiz = [None]*csiz + lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, _, _, _, _, csiz = struct.unpack_from( + ">HHIIIIIIIIH", siz + ) + ssiz = [None] * csiz + xrsiz = [None] * csiz + yrsiz = [None] * csiz for i in range(csiz): - ssiz[i], xrsiz[i], yrsiz[i] \ - = struct.unpack_from('>BBB', siz, 36 + 3 * i) + ssiz[i], xrsiz[i], yrsiz[i] = struct.unpack_from(">BBB", siz, 36 + 3 * i) size = (xsiz - xosiz, ysiz - yosiz) if csiz == 1: - if (yrsiz[0] & 0x7f) > 8: - mode = 'I;16' + if (yrsiz[0] & 0x7F) > 8: + mode = "I;16" else: - mode = 'L' + mode = "L" elif csiz == 2: - mode = 'LA' + mode = "LA" elif csiz == 3: - mode = 'RGB' + mode = "RGB" elif csiz == 4: - mode = 'RGBA' + mode = "RGBA" else: mode = None @@ -57,29 +55,34 @@ def _parse_codestream(fp): def _parse_jp2_header(fp): """Parse the JP2 header box to extract size, component count and - color space information, returning a PIL (size, mode) tuple.""" + color space information, returning a (size, mode, mimetype) tuple.""" # Find the JP2 header box header = None + mimetype = None while True: - lbox, tbox = struct.unpack('>I4s', fp.read(8)) + lbox, tbox = struct.unpack(">I4s", fp.read(8)) if lbox == 1: - lbox = struct.unpack('>Q', fp.read(8))[0] + lbox = struct.unpack(">Q", fp.read(8))[0] hlen = 16 else: hlen = 8 if lbox < hlen: - raise SyntaxError('Invalid JP2 header length') + raise SyntaxError("Invalid JP2 header length") - if tbox == b'jp2h': + if tbox == b"jp2h": header = fp.read(lbox - hlen) break + elif tbox == b"ftyp": + if fp.read(4) == b"jpx ": + mimetype = "image/jpx" + fp.seek(lbox - hlen - 4, os.SEEK_CUR) else: fp.seek(lbox - hlen, os.SEEK_CUR) if header is None: - raise SyntaxError('could not find JP2 header') + raise SyntaxError("could not find JP2 header") size = None mode = None @@ -88,64 +91,64 @@ def _parse_jp2_header(fp): hio = io.BytesIO(header) while True: - lbox, tbox = struct.unpack('>I4s', hio.read(8)) + lbox, tbox = struct.unpack(">I4s", hio.read(8)) if lbox == 1: - lbox = struct.unpack('>Q', hio.read(8))[0] + lbox = struct.unpack(">Q", hio.read(8))[0] hlen = 16 else: hlen = 8 content = hio.read(lbox - hlen) - if tbox == b'ihdr': - height, width, nc, bpc, c, unkc, ipr \ - = struct.unpack('>IIHBBBB', content) + if tbox == b"ihdr": + height, width, nc, bpc, c, unkc, ipr = struct.unpack(">IIHBBBB", content) size = (width, height) if unkc: - if nc == 1 and (bpc & 0x7f) > 8: - mode = 'I;16' + if nc == 1 and (bpc & 0x7F) > 8: + mode = "I;16" elif nc == 1: - mode = 'L' + mode = "L" elif nc == 2: - mode = 'LA' + mode = "LA" elif nc == 3: - mode = 'RGB' + mode = "RGB" elif nc == 4: - mode = 'RGBA' + mode = "RGBA" break - elif tbox == b'colr': - meth, prec, approx = struct.unpack_from('>BBB', content) + elif tbox == b"colr": + meth, prec, approx = struct.unpack_from(">BBB", content) if meth == 1: - cs = struct.unpack_from('>I', content, 3)[0] - if cs == 16: # sRGB - if nc == 1 and (bpc & 0x7f) > 8: - mode = 'I;16' + cs = struct.unpack_from(">I", content, 3)[0] + if cs == 16: # sRGB + if nc == 1 and (bpc & 0x7F) > 8: + mode = "I;16" elif nc == 1: - mode = 'L' + mode = "L" elif nc == 3: - mode = 'RGB' + mode = "RGB" elif nc == 4: - mode = 'RGBA' + mode = "RGBA" break elif cs == 17: # grayscale - if nc == 1 and (bpc & 0x7f) > 8: - mode = 'I;16' + if nc == 1 and (bpc & 0x7F) > 8: + mode = "I;16" elif nc == 1: - mode = 'L' + mode = "L" elif nc == 2: - mode = 'LA' + mode = "LA" break elif cs == 18: # sYCC if nc == 3: - mode = 'RGB' + mode = "RGB" elif nc == 4: - mode = 'RGBA' + mode = "RGBA" break if size is None or mode is None: raise SyntaxError("Malformed jp2 header") - return (size, mode) + return (size, mode, mimetype) + ## # Image plugin for JPEG2000 images. @@ -157,22 +160,23 @@ class Jpeg2KImageFile(ImageFile.ImageFile): def _open(self): sig = self.fp.read(4) - if sig == b'\xff\x4f\xff\x51': + if sig == b"\xff\x4f\xff\x51": self.codec = "j2k" self._size, self.mode = _parse_codestream(self.fp) else: sig = sig + self.fp.read(8) - if sig == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a': + if sig == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a": self.codec = "jp2" - self._size, self.mode = _parse_jp2_header(self.fp) + header = _parse_jp2_header(self.fp) + self._size, self.mode, self.custom_mimetype = header else: - raise SyntaxError('not a JPEG 2000 file') + raise SyntaxError("not a JPEG 2000 file") if self.size is None or self.mode is None: - raise SyntaxError('unable to determine size/mode') + raise SyntaxError("unable to determine size/mode") - self.reduce = 0 + self._reduce = 0 self.layers = 0 fd = -1 @@ -181,69 +185,101 @@ class Jpeg2KImageFile(ImageFile.ImageFile): try: fd = self.fp.fileno() length = os.fstat(fd).st_size - except: + except Exception: fd = -1 try: pos = self.fp.tell() - self.fp.seek(0, 2) + self.fp.seek(0, io.SEEK_END) length = self.fp.tell() - self.fp.seek(pos, 0) - except: + self.fp.seek(pos) + except Exception: length = -1 - self.tile = [('jpeg2k', (0, 0) + self.size, 0, - (self.codec, self.reduce, self.layers, fd, length))] + self.tile = [ + ( + "jpeg2k", + (0, 0) + self.size, + 0, + (self.codec, self._reduce, self.layers, fd, length), + ) + ] + + @property + def reduce(self): + # https://github.com/python-pillow/Pillow/issues/4343 found that the + # new Image 'reduce' method was shadowed by this plugin's 'reduce' + # property. This attempts to allow for both scenarios + return self._reduce or super().reduce + + @reduce.setter + def reduce(self, value): + self._reduce = value def load(self): - if self.reduce: - power = 1 << self.reduce + if self.tile and self._reduce: + power = 1 << self._reduce adjust = power >> 1 - self._size = (int((self.size[0] + adjust) / power), - int((self.size[1] + adjust) / power)) + self._size = ( + int((self.size[0] + adjust) / power), + int((self.size[1] + adjust) / power), + ) - if self.tile: # Update the reduce and layers settings t = self.tile[0] - t3 = (t[3][0], self.reduce, self.layers, t[3][3], t[3][4]) + t3 = (t[3][0], self._reduce, self.layers, t[3][3], t[3][4]) self.tile = [(t[0], (0, 0) + self.size, t[2], t3)] return ImageFile.ImageFile.load(self) def _accept(prefix): - return (prefix[:4] == b'\xff\x4f\xff\x51' or - prefix[:12] == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a') + return ( + prefix[:4] == b"\xff\x4f\xff\x51" + or prefix[:12] == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a" + ) # ------------------------------------------------------------ # Save support + def _save(im, fp, filename): - if filename.endswith('.j2k'): - kind = 'j2k' + if filename.endswith(".j2k"): + kind = "j2k" else: - kind = 'jp2' + kind = "jp2" # Get the keyword arguments info = im.encoderinfo - offset = info.get('offset', None) - tile_offset = info.get('tile_offset', None) - tile_size = info.get('tile_size', None) - quality_mode = info.get('quality_mode', 'rates') - quality_layers = info.get('quality_layers', None) - num_resolutions = info.get('num_resolutions', 0) - cblk_size = info.get('codeblock_size', None) - precinct_size = info.get('precinct_size', None) - irreversible = info.get('irreversible', False) - progression = info.get('progression', 'LRCP') - cinema_mode = info.get('cinema_mode', 'no') + offset = info.get("offset", None) + tile_offset = info.get("tile_offset", None) + tile_size = info.get("tile_size", None) + quality_mode = info.get("quality_mode", "rates") + quality_layers = info.get("quality_layers", None) + if quality_layers is not None and not ( + isinstance(quality_layers, (list, tuple)) + and all( + [ + isinstance(quality_layer, (int, float)) + for quality_layer in quality_layers + ] + ) + ): + raise ValueError("quality_layers must be a sequence of numbers") + + num_resolutions = info.get("num_resolutions", 0) + cblk_size = info.get("codeblock_size", None) + precinct_size = info.get("precinct_size", None) + irreversible = info.get("irreversible", False) + progression = info.get("progression", "LRCP") + cinema_mode = info.get("cinema_mode", "no") fd = -1 if hasattr(fp, "fileno"): try: fd = fp.fileno() - except: + except Exception: fd = -1 im.encoderconfig = ( @@ -258,10 +294,11 @@ def _save(im, fp, filename): irreversible, progression, cinema_mode, - fd + fd, ) - ImageFile._save(im, fp, [('jpeg2k', (0, 0)+im.size, 0, kind)]) + ImageFile._save(im, fp, [("jpeg2k", (0, 0) + im.size, 0, kind)]) + # ------------------------------------------------------------ # Registry stuff @@ -270,8 +307,8 @@ def _save(im, fp, filename): Image.register_open(Jpeg2KImageFile.format, Jpeg2KImageFile, _accept) Image.register_save(Jpeg2KImageFile.format, _save) -Image.register_extensions(Jpeg2KImageFile.format, - [".jp2", ".j2k", ".jpc", ".jpf", ".jpx", ".j2c"]) +Image.register_extensions( + Jpeg2KImageFile.format, [".jp2", ".j2k", ".jpc", ".jpf", ".jpx", ".j2c"] +) -Image.register_mime(Jpeg2KImageFile.format, 'image/jp2') -Image.register_mime(Jpeg2KImageFile.format, 'image/jpx') +Image.register_mime(Jpeg2KImageFile.format, "image/jp2") diff --git a/server/www/packages/packages-linux/x64/PIL/JpegImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/JpegImagePlugin.py index f206818..2aa029e 100644 --- a/server/www/packages/packages-linux/x64/PIL/JpegImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/JpegImagePlugin.py @@ -31,26 +31,24 @@ # # See the README file for information on usage and redistribution. # - -from __future__ import print_function - import array -import struct import io +import os +import struct +import subprocess +import tempfile import warnings + from . import Image, ImageFile, TiffImagePlugin -from ._binary import i8, o8, i16be as i16 +from ._binary import i8, i16be as i16, i32be as i32, o8 from .JpegPresets import presets -from ._util import isStringType - -__version__ = "0.6" - # # Parser + def Skip(self, marker): - n = i16(self.fp.read(2))-2 + n = i16(self.fp.read(2)) - 2 ImageFile._safe_read(self.fp, n) @@ -59,7 +57,7 @@ def APP(self, marker): # Application marker. Store these in the APP dictionary. # Also look for well-known application markers. - n = i16(self.fp.read(2))-2 + n = i16(self.fp.read(2)) - 2 s = ImageFile._safe_read(self.fp, n) app = "APP%d" % (marker & 15) @@ -75,7 +73,7 @@ def APP(self, marker): try: jfif_unit = i8(s[7]) jfif_density = i16(s, 8), i16(s, 10) - except: + except Exception: pass else: if jfif_unit == 1: @@ -84,7 +82,7 @@ def APP(self, marker): self.info["jfif_density"] = jfif_density elif marker == 0xFFE1 and s[:5] == b"Exif\0": if "exif" not in self.info: - # extract Exif information (incomplete) + # extract EXIF information (incomplete) self.info["exif"] = s # FIXME: value will change elif marker == 0xFFE2 and s[:5] == b"FPXR\0": # extract FlashPix information (incomplete) @@ -102,12 +100,44 @@ def APP(self, marker): # reassemble the profile, rather than assuming that the APP2 # markers appear in the correct sequence. self.icclist.append(s) + elif marker == 0xFFED and s[:14] == b"Photoshop 3.0\x00": + # parse the image resource block + offset = 14 + photoshop = self.info.setdefault("photoshop", {}) + while s[offset : offset + 4] == b"8BIM": + try: + offset += 4 + # resource code + code = i16(s, offset) + offset += 2 + # resource name (usually empty) + name_len = i8(s[offset]) + # name = s[offset+1:offset+1+name_len] + offset += 1 + name_len + offset += offset & 1 # align + # resource data block + size = i32(s, offset) + offset += 4 + data = s[offset : offset + size] + if code == 0x03ED: # ResolutionInfo + data = { + "XResolution": i32(data[:4]) / 65536, + "DisplayedUnitsX": i16(data[4:8]), + "YResolution": i32(data[8:12]) / 65536, + "DisplayedUnitsY": i16(data[12:]), + } + photoshop[code] = data + offset += size + offset += offset & 1 # align + except struct.error: + break # insufficient data + elif marker == 0xFFEE and s[:5] == b"Adobe": self.info["adobe"] = i16(s, 5) # extract Adobe custom properties try: adobe_transform = i8(s[1]) - except: + except Exception: pass else: self.info["adobe_transform"] = adobe_transform @@ -121,30 +151,32 @@ def APP(self, marker): # If DPI isn't in JPEG header, fetch from EXIF if "dpi" not in self.info and "exif" in self.info: try: - exif = self._getexif() + exif = self.getexif() resolution_unit = exif[0x0128] x_resolution = exif[0x011A] try: - dpi = x_resolution[0] / x_resolution[1] + dpi = float(x_resolution[0]) / x_resolution[1] except TypeError: dpi = x_resolution if resolution_unit == 3: # cm # 1 dpcm = 2.54 dpi dpi *= 2.54 - self.info["dpi"] = dpi, dpi - except (KeyError, SyntaxError, ZeroDivisionError): - # SyntaxError for invalid/unreadable exif + self.info["dpi"] = int(dpi + 0.5), int(dpi + 0.5) + except (KeyError, SyntaxError, ValueError, ZeroDivisionError): + # SyntaxError for invalid/unreadable EXIF # KeyError for dpi not included # ZeroDivisionError for invalid dpi rational value + # ValueError for x_resolution[0] being an invalid float self.info["dpi"] = 72, 72 def COM(self, marker): # # Comment marker. Store these in the APP dictionary. - n = i16(self.fp.read(2))-2 + n = i16(self.fp.read(2)) - 2 s = ImageFile._safe_read(self.fp, n) + self.info["comment"] = s self.app["COM"] = s # compatibility self.applist.append(("COM", s)) @@ -157,7 +189,7 @@ def SOF(self, marker): # mode. Note that this could be made a bit brighter, by # looking for JFIF and Adobe APP markers. - n = i16(self.fp.read(2))-2 + n = i16(self.fp.read(2)) - 2 s = ImageFile._safe_read(self.fp, n) self._size = i16(s[3:]), i16(s[1:]) @@ -192,9 +224,9 @@ def SOF(self, marker): self.icclist = None for i in range(6, len(s), 3): - t = s[i:i+3] + t = s[i : i + 3] # 4-tuples: id, vsamp, hsamp, qtable - self.layer.append((t[0], i8(t[1])//16, i8(t[1]) & 15, i8(t[2]))) + self.layer.append((t[0], i8(t[1]) // 16, i8(t[1]) & 15, i8(t[2]))) def DQT(self, marker): @@ -206,13 +238,13 @@ def DQT(self, marker): # FIXME: The quantization tables can be used to estimate the # compression quality. - n = i16(self.fp.read(2))-2 + n = i16(self.fp.read(2)) - 2 s = ImageFile._safe_read(self.fp, n) while len(s): if len(s) < 65: raise SyntaxError("bad quantization table marker") v = i8(s[0]) - if v//16 == 0: + if v // 16 == 0: self.quantization[v & 15] = array.array("B", s[1:65]) s = s[65:] else: @@ -286,7 +318,7 @@ MARKER = { 0xFFFB: ("JPG11", "Extension 11", None), 0xFFFC: ("JPG12", "Extension 12", None), 0xFFFD: ("JPG13", "Extension 13", None), - 0xFFFE: ("COM", "Comment", COM) + 0xFFFE: ("COM", "Comment", COM), } @@ -297,6 +329,7 @@ def _accept(prefix): ## # Image plugin for JPEG and JFIF images. + class JpegImageFile(ImageFile.ImageFile): format = "JPEG" @@ -340,8 +373,7 @@ class JpegImageFile(ImageFile.ImageFile): rawmode = self.mode if self.mode == "CMYK": rawmode = "CMYK;I" # assume adobe conventions - self.tile = [("jpeg", (0, 0) + self.size, 0, - (rawmode, ""))] + self.tile = [("jpeg", (0, 0) + self.size, 0, (rawmode, ""))] # self.__offset = self.fp.tell() break s = self.fp.read(1) @@ -378,7 +410,8 @@ class JpegImageFile(ImageFile.ImageFile): return d, e, o, a = self.tile[0] - scale = 0 + scale = 1 + original_size = self.size if a[0] == "RGB" and mode in ["L", "YCbCr"]: self.mode = mode @@ -389,22 +422,25 @@ class JpegImageFile(ImageFile.ImageFile): for s in [8, 4, 2, 1]: if scale >= s: break - e = e[0], e[1], (e[2]-e[0]+s-1)//s+e[0], (e[3]-e[1]+s-1)//s+e[1] - self._size = ((self.size[0]+s-1)//s, (self.size[1]+s-1)//s) + e = ( + e[0], + e[1], + (e[2] - e[0] + s - 1) // s + e[0], + (e[3] - e[1] + s - 1) // s + e[1], + ) + self._size = ((self.size[0] + s - 1) // s, (self.size[1] + s - 1) // s) scale = s self.tile = [(d, e, o, a)] self.decoderconfig = (scale, 0) - return self + box = (0, 0, original_size[0] / scale, original_size[1] / scale) + return (self.mode, box) def load_djpeg(self): # ALTERNATIVE: handle JPEGs via the IJG command line utilities - import subprocess - import tempfile - import os f, path = tempfile.mkstemp() os.close(f) if os.path.exists(self.filename): @@ -413,9 +449,9 @@ class JpegImageFile(ImageFile.ImageFile): raise ValueError("Invalid Filename") try: - _im = Image.open(path) - _im.load() - self.im = _im.im + with Image.open(path) as _im: + _im.load() + self.im = _im.im finally: try: os.unlink(path) @@ -437,60 +473,14 @@ class JpegImageFile(ImageFile.ImageFile): def _fixup_dict(src_dict): # Helper function for _getexif() # returns a dict with any single item tuples/lists as individual values - def _fixup(value): - try: - if len(value) == 1 and not isinstance(value, dict): - return value[0] - except: - pass - return value - - return {k: _fixup(v) for k, v in src_dict.items()} + exif = Image.Exif() + return exif._fixup_dict(src_dict) def _getexif(self): - # Extract EXIF information. This method is highly experimental, - # and is likely to be replaced with something better in a future - # version. - - # The EXIF record consists of a TIFF file embedded in a JPEG - # application marker (!). - try: - data = self.info["exif"] - except KeyError: + if "exif" not in self.info: return None - file = io.BytesIO(data[6:]) - head = file.read(8) - # process dictionary - info = TiffImagePlugin.ImageFileDirectory_v1(head) - info.load(file) - exif = dict(_fixup_dict(info)) - # get exif extension - try: - # exif field 0x8769 is an offset pointer to the location - # of the nested embedded exif ifd. - # It should be a long, but may be corrupted. - file.seek(exif[0x8769]) - except (KeyError, TypeError): - pass - else: - info = TiffImagePlugin.ImageFileDirectory_v1(head) - info.load(file) - exif.update(_fixup_dict(info)) - # get gpsinfo extension - try: - # exif field 0x8825 is an offset pointer to the location - # of the nested embedded gps exif ifd. - # It should be a long, but may be corrupted. - file.seek(exif[0x8825]) - except (KeyError, TypeError): - pass - else: - info = TiffImagePlugin.ImageFileDirectory_v1(head) - info.load(file) - exif[0x8825] = _fixup_dict(info) - - return exif + return dict(self.getexif()) def _getmp(self): @@ -506,13 +496,14 @@ def _getmp(self): return None file_contents = io.BytesIO(data) head = file_contents.read(8) - endianness = '>' if head[:4] == b'\x4d\x4d\x00\x2a' else '<' + endianness = ">" if head[:4] == b"\x4d\x4d\x00\x2a" else "<" # process dictionary try: info = TiffImagePlugin.ImageFileDirectory_v2(head) + file_contents.seek(info.next) info.load(file_contents) mp = dict(info) - except: + except Exception: raise SyntaxError("malformed MP Index (unreadable directory)") # it's an error not to have a number of images try: @@ -525,37 +516,33 @@ def _getmp(self): rawmpentries = mp[0xB002] for entrynum in range(0, quant): unpackedentry = struct.unpack_from( - '{}LLLHH'.format(endianness), rawmpentries, entrynum * 16) - labels = ('Attribute', 'Size', 'DataOffset', 'EntryNo1', - 'EntryNo2') + "{}LLLHH".format(endianness), rawmpentries, entrynum * 16 + ) + labels = ("Attribute", "Size", "DataOffset", "EntryNo1", "EntryNo2") mpentry = dict(zip(labels, unpackedentry)) mpentryattr = { - 'DependentParentImageFlag': bool(mpentry['Attribute'] & - (1 << 31)), - 'DependentChildImageFlag': bool(mpentry['Attribute'] & - (1 << 30)), - 'RepresentativeImageFlag': bool(mpentry['Attribute'] & - (1 << 29)), - 'Reserved': (mpentry['Attribute'] & (3 << 27)) >> 27, - 'ImageDataFormat': (mpentry['Attribute'] & (7 << 24)) >> 24, - 'MPType': mpentry['Attribute'] & 0x00FFFFFF + "DependentParentImageFlag": bool(mpentry["Attribute"] & (1 << 31)), + "DependentChildImageFlag": bool(mpentry["Attribute"] & (1 << 30)), + "RepresentativeImageFlag": bool(mpentry["Attribute"] & (1 << 29)), + "Reserved": (mpentry["Attribute"] & (3 << 27)) >> 27, + "ImageDataFormat": (mpentry["Attribute"] & (7 << 24)) >> 24, + "MPType": mpentry["Attribute"] & 0x00FFFFFF, } - if mpentryattr['ImageDataFormat'] == 0: - mpentryattr['ImageDataFormat'] = 'JPEG' + if mpentryattr["ImageDataFormat"] == 0: + mpentryattr["ImageDataFormat"] = "JPEG" else: raise SyntaxError("unsupported picture format in MPO") mptypemap = { - 0x000000: 'Undefined', - 0x010001: 'Large Thumbnail (VGA Equivalent)', - 0x010002: 'Large Thumbnail (Full HD Equivalent)', - 0x020001: 'Multi-Frame Image (Panorama)', - 0x020002: 'Multi-Frame Image: (Disparity)', - 0x020003: 'Multi-Frame Image: (Multi-Angle)', - 0x030000: 'Baseline MP Primary Image' + 0x000000: "Undefined", + 0x010001: "Large Thumbnail (VGA Equivalent)", + 0x010002: "Large Thumbnail (Full HD Equivalent)", + 0x020001: "Multi-Frame Image (Panorama)", + 0x020002: "Multi-Frame Image: (Disparity)", + 0x020003: "Multi-Frame Image: (Multi-Angle)", + 0x030000: "Baseline MP Primary Image", } - mpentryattr['MPType'] = mptypemap.get(mpentryattr['MPType'], - 'Unknown') - mpentry['Attribute'] = mpentryattr + mpentryattr["MPType"] = mptypemap.get(mpentryattr["MPType"], "Unknown") + mpentry["Attribute"] = mpentryattr mpentries.append(mpentry) mp[0xB002] = mpentries except KeyError: @@ -578,19 +565,24 @@ RAWMODE = { "YCbCr": "YCbCr", } -zigzag_index = (0, 1, 5, 6, 14, 15, 27, 28, - 2, 4, 7, 13, 16, 26, 29, 42, - 3, 8, 12, 17, 25, 30, 41, 43, - 9, 11, 18, 24, 31, 40, 44, 53, - 10, 19, 23, 32, 39, 45, 52, 54, - 20, 22, 33, 38, 46, 51, 55, 60, - 21, 34, 37, 47, 50, 56, 59, 61, - 35, 36, 48, 49, 57, 58, 62, 63) +# fmt: off +zigzag_index = ( + 0, 1, 5, 6, 14, 15, 27, 28, + 2, 4, 7, 13, 16, 26, 29, 42, + 3, 8, 12, 17, 25, 30, 41, 43, + 9, 11, 18, 24, 31, 40, 44, 53, + 10, 19, 23, 32, 39, 45, 52, 54, + 20, 22, 33, 38, 46, 51, 55, 60, + 21, 34, 37, 47, 50, 56, 59, 61, + 35, 36, 48, 49, 57, 58, 62, 63, +) -samplings = {(1, 1, 1, 1, 1, 1): 0, - (2, 1, 1, 1, 1, 1): 1, - (2, 2, 1, 1, 1, 1): 2, - } +samplings = { + (1, 1, 1, 1, 1, 1): 0, + (2, 1, 1, 1, 1, 1): 1, + (2, 2, 1, 1, 1, 1): 2, +} +# fmt: on def convert_dict_qtables(qtables): @@ -608,7 +600,7 @@ def get_sampling(im): # NOTE: currently Pillow can't encode JPEG to YCCK format. # If YCCK support is added in the future, subsampling code will have # to be updated (here and in JpegEncode.c) to deal with 4 layers. - if not hasattr(im, 'layers') or im.layers in (1, 4): + if not hasattr(im, "layers") or im.layers in (1, 4): return -1 sampling = im.layer[0][1:3] + im.layer[1][1:3] + im.layer[2][1:3] return samplings.get(sampling, -1) @@ -619,32 +611,32 @@ def _save(im, fp, filename): try: rawmode = RAWMODE[im.mode] except KeyError: - raise IOError("cannot write mode %s as JPEG" % im.mode) + raise OSError("cannot write mode %s as JPEG" % im.mode) info = im.encoderinfo - dpi = [int(round(x)) for x in info.get("dpi", (0, 0))] + dpi = [round(x) for x in info.get("dpi", (0, 0))] - quality = info.get("quality", 0) + quality = info.get("quality", -1) subsampling = info.get("subsampling", -1) qtables = info.get("qtables") if quality == "keep": - quality = 0 + quality = -1 subsampling = "keep" qtables = "keep" elif quality in presets: preset = presets[quality] - quality = 0 - subsampling = preset.get('subsampling', -1) - qtables = preset.get('quantization') + quality = -1 + subsampling = preset.get("subsampling", -1) + qtables = preset.get("quantization") elif not isinstance(quality, int): raise ValueError("Invalid quality setting") else: if subsampling in presets: - subsampling = presets[subsampling].get('subsampling', -1) - if isStringType(qtables) and qtables in presets: - qtables = presets[qtables].get('quantization') + subsampling = presets[subsampling].get("subsampling", -1) + if isinstance(qtables, str) and qtables in presets: + qtables = presets[qtables].get("quantization") if subsampling == "4:4:4": subsampling = 0 @@ -658,21 +650,23 @@ def _save(im, fp, filename): subsampling = 2 elif subsampling == "keep": if im.format != "JPEG": - raise ValueError( - "Cannot use 'keep' when original image is not a JPEG") + raise ValueError("Cannot use 'keep' when original image is not a JPEG") subsampling = get_sampling(im) def validate_qtables(qtables): if qtables is None: return qtables - if isStringType(qtables): + if isinstance(qtables, str): try: - lines = [int(num) for line in qtables.splitlines() - for num in line.split('#', 1)[0].split()] + lines = [ + int(num) + for line in qtables.splitlines() + for num in line.split("#", 1)[0].split() + ] except ValueError: raise ValueError("Invalid quantization table") else: - qtables = [lines[s:s+64] for s in range(0, len(lines), 64)] + qtables = [lines[s : s + 64] for s in range(0, len(lines), 64)] if isinstance(qtables, (tuple, list, dict)): if isinstance(qtables, dict): qtables = convert_dict_qtables(qtables) @@ -684,7 +678,7 @@ def _save(im, fp, filename): try: if len(table) != 64: raise TypeError - table = array.array('B', table) + table = array.array("B", table) except TypeError: raise ValueError("Invalid quantization table") else: @@ -693,8 +687,7 @@ def _save(im, fp, filename): if qtables == "keep": if im.format != "JPEG": - raise ValueError( - "Cannot use 'keep' when original image is not a JPEG") + raise ValueError("Cannot use 'keep' when original image is not a JPEG") qtables = getattr(im, "quantization", None) qtables = validate_qtables(qtables) @@ -712,18 +705,27 @@ def _save(im, fp, filename): i = 1 for marker in markers: size = struct.pack(">H", 2 + ICC_OVERHEAD_LEN + len(marker)) - extra += (b"\xFF\xE2" + size + b"ICC_PROFILE\0" + o8(i) + - o8(len(markers)) + marker) + extra += ( + b"\xFF\xE2" + + size + + b"ICC_PROFILE\0" + + o8(i) + + o8(len(markers)) + + marker + ) i += 1 # "progressive" is the official name, but older documentation # says "progression" # FIXME: issue a warning if the wrong form is used (post-1.1.7) - progressive = (info.get("progressive", False) or - info.get("progression", False)) + progressive = info.get("progressive", False) or info.get("progression", False) optimize = info.get("optimize", False) + exif = info.get("exif", b"") + if isinstance(exif, Image.Exif): + exif = exif.tobytes() + # get keyword arguments im.encoderconfig = ( quality, @@ -731,12 +733,13 @@ def _save(im, fp, filename): info.get("smooth", 0), optimize, info.get("streamtype", 0), - dpi[0], dpi[1], + dpi[0], + dpi[1], subsampling, qtables, extra, - info.get("exif", b"") - ) + exif, + ) # if we optimize, libjpeg needs a buffer big enough to hold the whole image # in a shot. Guessing on the size, at im.size bytes. (raw pixel size is @@ -745,26 +748,23 @@ def _save(im, fp, filename): bufsize = 0 if optimize or progressive: # CMYK can be bigger - if im.mode == 'CMYK': + if im.mode == "CMYK": bufsize = 4 * im.size[0] * im.size[1] - # keep sets quality to 0, but the actual value may be high. - elif quality >= 95 or quality == 0: + # keep sets quality to -1, but the actual value may be high. + elif quality >= 95 or quality == -1: bufsize = 2 * im.size[0] * im.size[1] else: bufsize = im.size[0] * im.size[1] - # The exif info needs to be written as one block, + APP1, + one spare byte. + # The EXIF info needs to be written as one block, + APP1, + one spare byte. # Ensure that our buffer is big enough. Same with the icc_profile block. - bufsize = max(ImageFile.MAXBLOCK, bufsize, len(info.get("exif", b"")) + 5, - len(extra) + 1) + bufsize = max(ImageFile.MAXBLOCK, bufsize, len(exif) + 5, len(extra) + 1) - ImageFile._save(im, fp, [("jpeg", (0, 0)+im.size, 0, rawmode)], bufsize) + ImageFile._save(im, fp, [("jpeg", (0, 0) + im.size, 0, rawmode)], bufsize) def _save_cjpeg(im, fp, filename): # ALTERNATIVE: handle JPEGs via the IJG command line utilities. - import os - import subprocess tempfile = im._dump() subprocess.check_call(["cjpeg", "-outfile", filename, tempfile]) try: @@ -782,13 +782,17 @@ def jpeg_factory(fp=None, filename=None): if mpheader[45057] > 1: # It's actually an MPO from .MpoImagePlugin import MpoImageFile - im = MpoImageFile(fp, filename) + + # Don't reload everything, just convert it. + im = MpoImageFile.adopt(im, mpheader) except (TypeError, IndexError): # It is really a JPEG pass except SyntaxError: - warnings.warn("Image appears to be a malformed MPO file, it will be " - "interpreted as a base JPEG file") + warnings.warn( + "Image appears to be a malformed MPO file, it will be " + "interpreted as a base JPEG file" + ) return im @@ -798,7 +802,6 @@ def jpeg_factory(fp=None, filename=None): Image.register_open(JpegImageFile.format, jpeg_factory, _accept) Image.register_save(JpegImageFile.format, _save) -Image.register_extensions(JpegImageFile.format, - [".jfif", ".jpe", ".jpg", ".jpeg"]) +Image.register_extensions(JpegImageFile.format, [".jfif", ".jpe", ".jpg", ".jpeg"]) Image.register_mime(JpegImageFile.format, "image/jpeg") diff --git a/server/www/packages/packages-linux/x64/PIL/JpegPresets.py b/server/www/packages/packages-linux/x64/PIL/JpegPresets.py index 5f01f0d..012bf81 100644 --- a/server/www/packages/packages-linux/x64/PIL/JpegPresets.py +++ b/server/www/packages/packages-linux/x64/PIL/JpegPresets.py @@ -33,7 +33,10 @@ Possible subsampling values are 0, 1 and 2 that correspond to 4:4:4, 4:2:2 and 4:2:0. You can get the subsampling of a JPEG with the -`JpegImagePlugin.get_subsampling(im)` function. +`JpegImagePlugin.get_sampling(im)` function. + +In JPEG compressed data a JPEG marker is used instead of an EXIF tag. +(ref.: https://www.exiv2.org/tags.html) Quantization tables @@ -62,11 +65,13 @@ The tables format between im.quantization and quantization in presets differ in You can convert the dict format to the preset format with the `JpegImagePlugin.convert_dict_qtables(dict_qtables)` function. -Libjpeg ref.: https://web.archive.org/web/20120328125543/http://www.jpegcameras.com/libjpeg/libjpeg-3.html +Libjpeg ref.: +https://web.archive.org/web/20120328125543/http://www.jpegcameras.com/libjpeg/libjpeg-3.html """ -presets = { +# fmt: off +presets = { # noqa: E128 'web_low': {'subsampling': 2, # "4:2:0" 'quantization': [ [20, 16, 25, 39, 50, 46, 62, 68, @@ -239,3 +244,4 @@ presets = { 15, 12, 12, 12, 12, 12, 12, 12] ]}, } +# fmt: on diff --git a/server/www/packages/packages-linux/x64/PIL/McIdasImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/McIdasImagePlugin.py index 161fb5e..cd047fe 100644 --- a/server/www/packages/packages-linux/x64/PIL/McIdasImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/McIdasImagePlugin.py @@ -17,9 +17,8 @@ # import struct -from . import Image, ImageFile -__version__ = "0.2" +from . import Image, ImageFile def _accept(s): @@ -29,6 +28,7 @@ def _accept(s): ## # Image plugin for McIdas area images. + class McIdasImageFile(ImageFile.ImageFile): format = "MCIDAS" @@ -62,7 +62,7 @@ class McIdasImageFile(ImageFile.ImageFile): self._size = w[10], w[9] offset = w[34] + w[15] - stride = w[15] + w[10]*w[11]*w[14] + stride = w[15] + w[10] * w[11] * w[14] self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride, 1))] diff --git a/server/www/packages/packages-linux/x64/PIL/MicImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/MicImagePlugin.py index 1dbb6a5..8610988 100644 --- a/server/www/packages/packages-linux/x64/PIL/MicImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/MicImagePlugin.py @@ -17,12 +17,9 @@ # -from . import Image, TiffImagePlugin - import olefile -__version__ = "0.1" - +from . import Image, TiffImagePlugin # # -------------------------------------------------------------------- @@ -35,6 +32,7 @@ def _accept(prefix): ## # Image plugin for Microsoft's Image Composer file format. + class MicImageFile(TiffImagePlugin.TiffImageFile): format = "MIC" @@ -48,7 +46,7 @@ class MicImageFile(TiffImagePlugin.TiffImageFile): try: self.ole = olefile.OleFileIO(self.fp) - except IOError: + except OSError: raise SyntaxError("not an MIC file; invalid OLE file") # find ACI subfiles with Image members (maybe not the @@ -95,9 +93,17 @@ class MicImageFile(TiffImagePlugin.TiffImageFile): self.frame = frame def tell(self): - return self.frame + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + # # -------------------------------------------------------------------- diff --git a/server/www/packages/packages-linux/x64/PIL/MpegImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/MpegImagePlugin.py index 15c7afc..a358dfd 100644 --- a/server/www/packages/packages-linux/x64/PIL/MpegImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/MpegImagePlugin.py @@ -17,14 +17,11 @@ from . import Image, ImageFile from ._binary import i8 -__version__ = "0.1" - - # # Bitstream parser -class BitStream(object): +class BitStream: def __init__(self, fp): self.fp = fp self.bits = 0 @@ -59,6 +56,7 @@ class BitStream(object): # Image plugin for MPEG streams. This plugin can identify a stream, # but it cannot read it. + class MpegImageFile(ImageFile.ImageFile): format = "MPEG" diff --git a/server/www/packages/packages-linux/x64/PIL/MpoImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/MpoImagePlugin.py index a1a8d65..e97176d 100644 --- a/server/www/packages/packages-linux/x64/PIL/MpoImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/MpoImagePlugin.py @@ -18,9 +18,8 @@ # See the README file for information on usage and redistribution. # -from . import Image, JpegImagePlugin - -__version__ = "0.1" +from . import Image, ImageFile, JpegImagePlugin +from ._binary import i16be as i16 def _accept(prefix): @@ -35,6 +34,7 @@ def _save(im, fp, filename): ## # Image plugin for MPO images. + class MpoImageFile(JpegImagePlugin.JpegImageFile): format = "MPO" @@ -44,15 +44,19 @@ class MpoImageFile(JpegImagePlugin.JpegImageFile): def _open(self): self.fp.seek(0) # prep the fp in order to pass the JPEG test JpegImagePlugin.JpegImageFile._open(self) - self.mpinfo = self._getmp() + self._after_jpeg_open() + + def _after_jpeg_open(self, mpheader=None): + self.mpinfo = mpheader if mpheader is not None else self._getmp() self.__framecount = self.mpinfo[0xB001] - self.__mpoffsets = [mpent['DataOffset'] + self.info['mpoffset'] - for mpent in self.mpinfo[0xB002]] + self.__mpoffsets = [ + mpent["DataOffset"] + self.info["mpoffset"] for mpent in self.mpinfo[0xB002] + ] self.__mpoffsets[0] = 0 # Note that the following assertion will only be invalid if something # gets broken within JpegImagePlugin. assert self.__framecount == len(self.__mpoffsets) - del self.info['mpoffset'] # no longer needed + del self.info["mpoffset"] # no longer needed self.__fp = self.fp # FIXME: hack self.__fp.seek(self.__mpoffsets[0]) # get ready to read first frame self.__frame = 0 @@ -76,14 +80,52 @@ class MpoImageFile(JpegImagePlugin.JpegImageFile): return self.fp = self.__fp self.offset = self.__mpoffsets[frame] - self.tile = [ - ("jpeg", (0, 0) + self.size, self.offset, (self.mode, "")) - ] + + self.fp.seek(self.offset + 2) # skip SOI marker + segment = self.fp.read(2) + if not segment: + raise ValueError("No data found for frame") + if i16(segment) == 0xFFE1: # APP1 + n = i16(self.fp.read(2)) - 2 + self.info["exif"] = ImageFile._safe_read(self.fp, n) + + exif = self.getexif() + if 40962 in exif and 40963 in exif: + self._size = (exif[40962], exif[40963]) + elif "exif" in self.info: + del self.info["exif"] + + self.tile = [("jpeg", (0, 0) + self.size, self.offset, (self.mode, ""))] self.__frame = frame def tell(self): return self.__frame + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + + @staticmethod + def adopt(jpeg_instance, mpheader=None): + """ + Transform the instance of JpegImageFile into + an instance of MpoImageFile. + After the call, the JpegImageFile is extended + to be an MpoImageFile. + + This is essentially useful when opening a JPEG + file that reveals itself as an MPO, to avoid + double call to _open. + """ + jpeg_instance.__class__ = MpoImageFile + jpeg_instance._after_jpeg_open(mpheader) + return jpeg_instance + # --------------------------------------------------------------------- # Registry stuff diff --git a/server/www/packages/packages-linux/x64/PIL/MspImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/MspImagePlugin.py index 74c6817..2b2937e 100644 --- a/server/www/packages/packages-linux/x64/PIL/MspImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/MspImagePlugin.py @@ -23,13 +23,11 @@ # # See also: http://www.fileformat.info/format/mspaint/egff.htm -from . import Image, ImageFile -from ._binary import i16le as i16, o16le as o16, i8 -import struct import io +import struct -__version__ = "0.1" - +from . import Image, ImageFile +from ._binary import i8, i16le as i16, o16le as o16 # # read MSP files @@ -43,6 +41,7 @@ def _accept(prefix): # Image plugin for Windows MSP images. This plugin supports both # uncompressed (Windows 1.0). + class MspImageFile(ImageFile.ImageFile): format = "MSP" @@ -58,7 +57,7 @@ class MspImageFile(ImageFile.ImageFile): # Header checksum checksum = 0 for i in range(0, 32, 2): - checksum = checksum ^ i16(s[i:i+2]) + checksum = checksum ^ i16(s[i : i + 2]) if checksum != 0: raise SyntaxError("bad MSP checksum") @@ -66,9 +65,9 @@ class MspImageFile(ImageFile.ImageFile): self._size = i16(s[4:]), i16(s[6:]) if s[:4] == b"DanM": - self.tile = [("raw", (0, 0)+self.size, 32, ("1", 0, 1))] + self.tile = [("raw", (0, 0) + self.size, 32, ("1", 0, 1))] else: - self.tile = [("MSP", (0, 0)+self.size, 32, None)] + self.tile = [("MSP", (0, 0) + self.size, 32, None)] class MspDecoder(ImageFile.PyDecoder): @@ -111,13 +110,14 @@ class MspDecoder(ImageFile.PyDecoder): def decode(self, buffer): img = io.BytesIO() - blank_line = bytearray((0xff,)*((self.state.xsize+7)//8)) + blank_line = bytearray((0xFF,) * ((self.state.xsize + 7) // 8)) try: self.fd.seek(32) - rowmap = struct.unpack_from("<%dH" % (self.state.ysize), - self.fd.read(self.state.ysize*2)) + rowmap = struct.unpack_from( + "<%dH" % (self.state.ysize), self.fd.read(self.state.ysize * 2) + ) except struct.error: - raise IOError("Truncated MSP file in row map") + raise OSError("Truncated MSP file in row map") for x, rowlen in enumerate(rowmap): try: @@ -126,9 +126,9 @@ class MspDecoder(ImageFile.PyDecoder): continue row = self.fd.read(rowlen) if len(row) != rowlen: - raise IOError( - "Truncated MSP file, expected %d bytes on row %s", - (rowlen, x)) + raise OSError( + "Truncated MSP file, expected %d bytes on row %s", (rowlen, x) + ) idx = 0 while idx < rowlen: runtype = i8(row[idx]) @@ -139,18 +139,18 @@ class MspDecoder(ImageFile.PyDecoder): idx += 2 else: runcount = runtype - img.write(row[idx:idx+runcount]) + img.write(row[idx : idx + runcount]) idx += runcount except struct.error: - raise IOError("Corrupted MSP file in row %d" % x) + raise OSError("Corrupted MSP file in row %d" % x) self.set_as_raw(img.getvalue(), ("1", 0, 1)) return 0, 0 -Image.register_decoder('MSP', MspDecoder) +Image.register_decoder("MSP", MspDecoder) # @@ -160,7 +160,7 @@ Image.register_decoder('MSP', MspDecoder) def _save(im, fp, filename): if im.mode != "1": - raise IOError("cannot write mode %s as MSP" % im.mode) + raise OSError("cannot write mode %s as MSP" % im.mode) # create MSP header header = [0] * 16 @@ -181,7 +181,7 @@ def _save(im, fp, filename): fp.write(o16(h)) # image body - ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 32, ("1", 0, 1))]) + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 32, ("1", 0, 1))]) # diff --git a/server/www/packages/packages-linux/x64/PIL/OleFileIO.py b/server/www/packages/packages-linux/x64/PIL/OleFileIO.py deleted file mode 100644 index b3caa10..0000000 --- a/server/www/packages/packages-linux/x64/PIL/OleFileIO.py +++ /dev/null @@ -1,4 +0,0 @@ -raise ImportError( - 'PIL.OleFileIO is deprecated. Use the olefile Python package ' - 'instead. This module will be removed in a future version.' -) diff --git a/server/www/packages/packages-linux/x64/PIL/PSDraw.py b/server/www/packages/packages-linux/x64/PIL/PSDraw.py index d2ded6f..762d31e 100644 --- a/server/www/packages/packages-linux/x64/PIL/PSDraw.py +++ b/server/www/packages/packages-linux/x64/PIL/PSDraw.py @@ -15,15 +15,15 @@ # See the README file for information on usage and redistribution. # -from . import EpsImagePlugin -from ._util import py3 import sys +from . import EpsImagePlugin + ## # Simple Postscript graphics interface. -class PSDraw(object): +class PSDraw: """ Sets up printing to the given file. If **fp** is omitted, :py:attr:`sys.stdout` is assumed. @@ -35,19 +35,21 @@ class PSDraw(object): self.fp = fp def _fp_write(self, to_write): - if not py3 or self.fp == sys.stdout: + if self.fp == sys.stdout: self.fp.write(to_write) else: - self.fp.write(bytes(to_write, 'UTF-8')) + self.fp.write(bytes(to_write, "UTF-8")) def begin_document(self, id=None): """Set up printing of a document. (Write Postscript DSC header.)""" # FIXME: incomplete - self._fp_write("%!PS-Adobe-3.0\n" - "save\n" - "/showpage { } def\n" - "%%EndComments\n" - "%%BeginDocument\n") + self._fp_write( + "%!PS-Adobe-3.0\n" + "save\n" + "/showpage { } def\n" + "%%EndComments\n" + "%%BeginDocument\n" + ) # self._fp_write(ERROR_PS) # debugging! self._fp_write(EDROFF_PS) self._fp_write(VDI_PS) @@ -56,9 +58,7 @@ class PSDraw(object): def end_document(self): """Ends printing. (Write Postscript DSC footer.)""" - self._fp_write("%%EndDocument\n" - "restore showpage\n" - "%%End\n") + self._fp_write("%%EndDocument\nrestore showpage\n%%End\n") if hasattr(self.fp, "flush"): self.fp.flush() @@ -71,8 +71,7 @@ class PSDraw(object): """ if font not in self.isofont: # reencode font - self._fp_write("/PSDraw-%s ISOLatin1Encoding /%s E\n" % - (font, font)) + self._fp_write("/PSDraw-{} ISOLatin1Encoding /{} E\n".format(font, font)) self.isofont[font] = 1 # rough self._fp_write("/F0 %d /PSDraw-%s F\n" % (size, font)) @@ -120,8 +119,8 @@ class PSDraw(object): else: dpi = 100 # greyscale # image size (on paper) - x = float(im.size[0] * 72) / dpi - y = float(im.size[1] * 72) / dpi + x = im.size[0] * 72 / dpi + y = im.size[1] * 72 / dpi # max allowed size xmax = float(box[2] - box[0]) ymax = float(box[3] - box[1]) @@ -133,15 +132,16 @@ class PSDraw(object): y = ymax dx = (xmax - x) / 2 + box[0] dy = (ymax - y) / 2 + box[1] - self._fp_write("gsave\n%f %f translate\n" % (dx, dy)) + self._fp_write("gsave\n{:f} {:f} translate\n".format(dx, dy)) if (x, y) != im.size: # EpsImagePlugin._save prints the image at (0,0,xsize,ysize) sx = x / im.size[0] sy = y / im.size[1] - self._fp_write("%f %f scale\n" % (sx, sy)) + self._fp_write("{:f} {:f} scale\n".format(sx, sy)) EpsImagePlugin._save(im, self.fp, None, 0) self._fp_write("\ngrestore\n") + # -------------------------------------------------------------------- # Postscript driver diff --git a/server/www/packages/packages-linux/x64/PIL/PaletteFile.py b/server/www/packages/packages-linux/x64/PIL/PaletteFile.py index 9ed69d6..73f1b4b 100644 --- a/server/www/packages/packages-linux/x64/PIL/PaletteFile.py +++ b/server/www/packages/packages-linux/x64/PIL/PaletteFile.py @@ -15,11 +15,11 @@ from ._binary import o8 - ## # File handler for Teragon-style palette files. -class PaletteFile(object): + +class PaletteFile: rawmode = "RGB" diff --git a/server/www/packages/packages-linux/x64/PIL/PalmImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PalmImagePlugin.py index 7d7b165..804ece3 100644 --- a/server/www/packages/packages-linux/x64/PIL/PalmImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/PalmImagePlugin.py @@ -10,9 +10,8 @@ from . import Image, ImageFile from ._binary import o8, o16be as o16b -__version__ = "1.0" - -_Palm8BitColormapValues = ( +# fmt: off +_Palm8BitColormapValues = ( # noqa: E131 (255, 255, 255), (255, 204, 255), (255, 153, 255), (255, 102, 255), (255, 51, 255), (255, 0, 255), (255, 255, 204), (255, 204, 204), (255, 153, 204), (255, 102, 204), (255, 51, 204), (255, 0, 204), @@ -77,6 +76,7 @@ _Palm8BitColormapValues = ( (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0)) +# fmt: on # so build a prototype image to be used for palette resampling @@ -86,7 +86,7 @@ def build_prototype_image(): palettedata = () for colormapValue in _Palm8BitColormapValues: palettedata += colormapValue - palettedata += (0, 0, 0)*(256 - len(_Palm8BitColormapValues)) + palettedata += (0, 0, 0) * (256 - len(_Palm8BitColormapValues)) image.putpalette(palettedata) return image @@ -98,17 +98,9 @@ Palm8BitColormapImage = build_prototype_image() # # -------------------------------------------------------------------- -_FLAGS = { - "custom-colormap": 0x4000, - "is-compressed": 0x8000, - "has-transparent": 0x2000, - } +_FLAGS = {"custom-colormap": 0x4000, "is-compressed": 0x8000, "has-transparent": 0x2000} -_COMPRESSION_TYPES = { - "none": 0xFF, - "rle": 0x01, - "scanline": 0x00, - } +_COMPRESSION_TYPES = {"none": 0xFF, "rle": 0x01, "scanline": 0x00} # @@ -117,6 +109,7 @@ _COMPRESSION_TYPES = { ## # (Internal) Image save plugin for the Palm format. + def _save(im, fp, filename): if im.mode == "P": @@ -128,28 +121,24 @@ def _save(im, fp, filename): bpp = 8 version = 1 - elif (im.mode == "L" and - "bpp" in im.encoderinfo and - im.encoderinfo["bpp"] in (1, 2, 4)): + elif im.mode == "L": + if im.encoderinfo.get("bpp") in (1, 2, 4): + # this is 8-bit grayscale, so we shift it to get the high-order bits, + # and invert it because + # Palm does greyscale from white (0) to black (1) + bpp = im.encoderinfo["bpp"] + im = im.point( + lambda x, shift=8 - bpp, maxval=(1 << bpp) - 1: maxval - (x >> shift) + ) + elif im.info.get("bpp") in (1, 2, 4): + # here we assume that even though the inherent mode is 8-bit grayscale, + # only the lower bpp bits are significant. + # We invert them to match the Palm. + bpp = im.info["bpp"] + im = im.point(lambda x, maxval=(1 << bpp) - 1: maxval - (x & maxval)) + else: + raise OSError("cannot write mode %s as Palm" % im.mode) - # this is 8-bit grayscale, so we shift it to get the high-order bits, - # and invert it because - # Palm does greyscale from white (0) to black (1) - bpp = im.encoderinfo["bpp"] - im = im.point( - lambda x, shift=8-bpp, maxval=(1 << bpp)-1: maxval - (x >> shift)) - # we ignore the palette here - im.mode = "P" - rawmode = "P;" + str(bpp) - version = 1 - - elif im.mode == "L" and "bpp" in im.info and im.info["bpp"] in (1, 2, 4): - - # here we assume that even though the inherent mode is 8-bit grayscale, - # only the lower bpp bits are significant. - # We invert them to match the Palm. - bpp = im.info["bpp"] - im = im.point(lambda x, maxval=(1 << bpp)-1: maxval - (x & maxval)) # we ignore the palette here im.mode = "P" rawmode = "P;" + str(bpp) @@ -164,7 +153,7 @@ def _save(im, fp, filename): else: - raise IOError("cannot write mode %s as Palm" % im.mode) + raise OSError("cannot write mode %s as Palm" % im.mode) # # make sure image data is available @@ -175,7 +164,7 @@ def _save(im, fp, filename): cols = im.size[0] rows = im.size[1] - rowbytes = int((cols + (16//bpp - 1)) / (16 // bpp)) * 2 + rowbytes = int((cols + (16 // bpp - 1)) / (16 // bpp)) * 2 transparent_index = 0 compression_type = _COMPRESSION_TYPES["none"] @@ -199,7 +188,7 @@ def _save(im, fp, filename): fp.write(o16b(offset)) fp.write(o8(transparent_index)) fp.write(o8(compression_type)) - fp.write(o16b(0)) # reserved by Palm + fp.write(o16b(0)) # reserved by Palm # now write colormap if necessary @@ -207,20 +196,21 @@ def _save(im, fp, filename): fp.write(o16b(256)) for i in range(256): fp.write(o8(i)) - if colormapmode == 'RGB': + if colormapmode == "RGB": fp.write( - o8(colormap[3 * i]) + - o8(colormap[3 * i + 1]) + - o8(colormap[3 * i + 2])) - elif colormapmode == 'RGBA': + o8(colormap[3 * i]) + + o8(colormap[3 * i + 1]) + + o8(colormap[3 * i + 2]) + ) + elif colormapmode == "RGBA": fp.write( - o8(colormap[4 * i]) + - o8(colormap[4 * i + 1]) + - o8(colormap[4 * i + 2])) + o8(colormap[4 * i]) + + o8(colormap[4 * i + 1]) + + o8(colormap[4 * i + 2]) + ) # now convert data to raw form - ImageFile._save( - im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, rowbytes, 1))]) + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, rowbytes, 1))]) if hasattr(fp, "flush"): fp.flush() diff --git a/server/www/packages/packages-linux/x64/PIL/PcdImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PcdImagePlugin.py index 87e5792..625f556 100644 --- a/server/www/packages/packages-linux/x64/PIL/PcdImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/PcdImagePlugin.py @@ -18,14 +18,12 @@ from . import Image, ImageFile from ._binary import i8 -__version__ = "0.1" - - ## # Image plugin for PhotoCD images. This plugin only reads the 768x512 # image from the file; higher resolutions are encoded in a proprietary # encoding. + class PcdImageFile(ImageFile.ImageFile): format = "PCD" @@ -49,7 +47,7 @@ class PcdImageFile(ImageFile.ImageFile): self.mode = "RGB" self._size = 768, 512 # FIXME: not correct for rotated images! - self.tile = [("pcd", (0, 0)+self.size, 96*2048, None)] + self.tile = [("pcd", (0, 0) + self.size, 96 * 2048, None)] def load_end(self): if self.tile_post_rotate: diff --git a/server/www/packages/packages-linux/x64/PIL/PcfFontFile.py b/server/www/packages/packages-linux/x64/PIL/PcfFontFile.py index eba85fe..c463533 100644 --- a/server/www/packages/packages-linux/x64/PIL/PcfFontFile.py +++ b/server/www/packages/packages-linux/x64/PIL/PcfFontFile.py @@ -16,50 +16,55 @@ # See the README file for information on usage and redistribution. # -from . import Image, FontFile -from ._binary import i8, i16le as l16, i32le as l32, i16be as b16, i32be as b32 +import io + +from . import FontFile, Image +from ._binary import i8, i16be as b16, i16le as l16, i32be as b32, i32le as l32 # -------------------------------------------------------------------- # declarations PCF_MAGIC = 0x70636601 # "\x01fcp" -PCF_PROPERTIES = (1 << 0) -PCF_ACCELERATORS = (1 << 1) -PCF_METRICS = (1 << 2) -PCF_BITMAPS = (1 << 3) -PCF_INK_METRICS = (1 << 4) -PCF_BDF_ENCODINGS = (1 << 5) -PCF_SWIDTHS = (1 << 6) -PCF_GLYPH_NAMES = (1 << 7) -PCF_BDF_ACCELERATORS = (1 << 8) +PCF_PROPERTIES = 1 << 0 +PCF_ACCELERATORS = 1 << 1 +PCF_METRICS = 1 << 2 +PCF_BITMAPS = 1 << 3 +PCF_INK_METRICS = 1 << 4 +PCF_BDF_ENCODINGS = 1 << 5 +PCF_SWIDTHS = 1 << 6 +PCF_GLYPH_NAMES = 1 << 7 +PCF_BDF_ACCELERATORS = 1 << 8 BYTES_PER_ROW = [ - lambda bits: ((bits+7) >> 3), - lambda bits: ((bits+15) >> 3) & ~1, - lambda bits: ((bits+31) >> 3) & ~3, - lambda bits: ((bits+63) >> 3) & ~7, + lambda bits: ((bits + 7) >> 3), + lambda bits: ((bits + 15) >> 3) & ~1, + lambda bits: ((bits + 31) >> 3) & ~3, + lambda bits: ((bits + 63) >> 3) & ~7, ] def sz(s, o): - return s[o:s.index(b"\0", o)] + return s[o : s.index(b"\0", o)] ## # Font file plugin for the X11 PCF format. + class PcfFontFile(FontFile.FontFile): name = "name" - def __init__(self, fp): + def __init__(self, fp, charset_encoding="iso8859-1"): + + self.charset_encoding = charset_encoding magic = l32(fp.read(4)) if magic != PCF_MAGIC: raise SyntaxError("not a PCF file") - FontFile.FontFile.__init__(self) + super().__init__() count = l32(fp.read(4)) self.toc = {} @@ -82,7 +87,7 @@ class PcfFontFile(FontFile.FontFile): ix = encoding[ch] if ix is not None: x, y, l, r, w, a, d, f = metrics[ix] - glyph = (w, 0), (l, d-y, x+l, d), (0, 0, x, y), bitmaps[ix] + glyph = (w, 0), (l, d - y, x + l, d), (0, 0, x, y), bitmaps[ix] self.glyph[ch] = glyph def _getformat(self, tag): @@ -117,7 +122,7 @@ class PcfFontFile(FontFile.FontFile): for i in range(nprops): p.append((i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4)))) if nprops & 3: - fp.seek(4 - (nprops & 3), 1) # pad + fp.seek(4 - (nprops & 3), io.SEEK_CUR) # pad data = fp.read(i32(fp.read(4))) @@ -140,7 +145,7 @@ class PcfFontFile(FontFile.FontFile): append = metrics.append - if (format & 0xff00) == 0x100: + if (format & 0xFF00) == 0x100: # "compressed" metrics for i in range(i16(fp.read(2))): @@ -151,10 +156,7 @@ class PcfFontFile(FontFile.FontFile): descent = i8(fp.read(1)) - 128 xsize = right - left ysize = ascent + descent - append( - (xsize, ysize, left, right, width, - ascent, descent, 0) - ) + append((xsize, ysize, left, right, width, ascent, descent, 0)) else: @@ -168,10 +170,7 @@ class PcfFontFile(FontFile.FontFile): attributes = i16(fp.read(2)) xsize = right - left ysize = ascent + descent - append( - (xsize, ysize, left, right, width, - ascent, descent, attributes) - ) + append((xsize, ysize, left, right, width, ascent, descent, attributes)) return metrics @@ -187,7 +186,7 @@ class PcfFontFile(FontFile.FontFile): nbitmaps = i32(fp.read(4)) if nbitmaps != len(metrics): - raise IOError("Wrong number of bitmaps") + raise OSError("Wrong number of bitmaps") offsets = [] for i in range(nbitmaps): @@ -198,7 +197,7 @@ class PcfFontFile(FontFile.FontFile): bitmapSizes.append(i32(fp.read(4))) # byteorder = format & 4 # non-zero => MSB - bitorder = format & 8 # non-zero => MSB + bitorder = format & 8 # non-zero => MSB padindex = format & 3 bitmapsize = bitmapSizes[padindex] @@ -213,10 +212,8 @@ class PcfFontFile(FontFile.FontFile): for i in range(nbitmaps): x, y, l, r, w, a, d, f = metrics[i] - b, e = offsets[i], offsets[i+1] - bitmaps.append( - Image.frombytes("1", (x, y), data[b:e], "raw", mode, pad(x)) - ) + b, e = offsets[i], offsets[i + 1] + bitmaps.append(Image.frombytes("1", (x, y), data[b:e], "raw", mode, pad(x))) return bitmaps @@ -230,16 +227,21 @@ class PcfFontFile(FontFile.FontFile): firstCol, lastCol = i16(fp.read(2)), i16(fp.read(2)) firstRow, lastRow = i16(fp.read(2)), i16(fp.read(2)) - default = i16(fp.read(2)) + i16(fp.read(2)) # default nencoding = (lastCol - firstCol + 1) * (lastRow - firstRow + 1) - for i in range(nencoding): - encodingOffset = i16(fp.read(2)) - if encodingOffset != 0xFFFF: - try: - encoding[i+firstCol] = encodingOffset - except IndexError: - break # only load ISO-8859-1 glyphs + encodingOffsets = [i16(fp.read(2)) for _ in range(nencoding)] + + for i in range(firstCol, len(encoding)): + try: + encodingOffset = encodingOffsets[ + ord(bytearray([i]).decode(self.charset_encoding)) + ] + if encodingOffset != 0xFFFF: + encoding[i] = encodingOffset + except UnicodeDecodeError: + # character is not supported in selected encoding + pass return encoding diff --git a/server/www/packages/packages-linux/x64/PIL/PcxImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PcxImagePlugin.py index daa58b3..6cf10de 100644 --- a/server/www/packages/packages-linux/x64/PIL/PcxImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/PcxImagePlugin.py @@ -25,14 +25,14 @@ # See the README file for information on usage and redistribution. # +import io import logging + from . import Image, ImageFile, ImagePalette from ._binary import i8, i16le as i16, o8, o16le as o16 logger = logging.getLogger(__name__) -__version__ = "0.6" - def _accept(prefix): return i8(prefix[0]) == 10 and i8(prefix[1]) in [0, 2, 3, 5] @@ -41,6 +41,7 @@ def _accept(prefix): ## # Image plugin for Paintbrush images. + class PcxImageFile(ImageFile.ImageFile): format = "PCX" @@ -54,7 +55,7 @@ class PcxImageFile(ImageFile.ImageFile): raise SyntaxError("not a PCX file") # image - bbox = i16(s, 4), i16(s, 6), i16(s, 8)+1, i16(s, 10)+1 + bbox = i16(s, 4), i16(s, 6), i16(s, 8) + 1, i16(s, 10) + 1 if bbox[2] <= bbox[0] or bbox[3] <= bbox[1]: raise SyntaxError("bad PCX image size") logger.debug("BBox: %s %s %s %s", *bbox) @@ -64,8 +65,13 @@ class PcxImageFile(ImageFile.ImageFile): bits = i8(s[3]) planes = i8(s[65]) stride = i16(s, 66) - logger.debug("PCX version %s, bits %s, planes %s, stride %s", - version, bits, planes, stride) + logger.debug( + "PCX version %s, bits %s, planes %s, stride %s", + version, + bits, + planes, + stride, + ) self.info["dpi"] = i16(s, 12), i16(s, 14) @@ -80,12 +86,12 @@ class PcxImageFile(ImageFile.ImageFile): elif version == 5 and bits == 8 and planes == 1: mode = rawmode = "L" # FIXME: hey, this doesn't work with the incremental loader !!! - self.fp.seek(-769, 2) + self.fp.seek(-769, io.SEEK_END) s = self.fp.read(769) if len(s) == 769 and i8(s[0]) == 12: # check if the palette is linear greyscale for i in range(256): - if s[i*3+1:i*3+4] != o8(i)*3: + if s[i * 3 + 1 : i * 3 + 4] != o8(i) * 3: mode = rawmode = "P" break if mode == "P": @@ -97,16 +103,17 @@ class PcxImageFile(ImageFile.ImageFile): rawmode = "RGB;L" else: - raise IOError("unknown PCX mode") + raise OSError("unknown PCX mode") self.mode = mode - self._size = bbox[2]-bbox[0], bbox[3]-bbox[1] + self._size = bbox[2] - bbox[0], bbox[3] - bbox[1] bbox = (0, 0) + self.size logger.debug("size: %sx%s", *self.size) self.tile = [("pcx", bbox, self.fp.tell(), (rawmode, planes * stride))] + # -------------------------------------------------------------------- # save PCX files @@ -135,8 +142,12 @@ def _save(im, fp, filename): # Ideally it should be passed in in the state, but the bytes value # gets overwritten. - logger.debug("PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d", - im.size[0], bits, stride) + logger.debug( + "PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d", + im.size[0], + bits, + stride, + ) # under windows, we could determine the current screen size with # "Image.core.display_mode()[1]", but I think that's overkill... @@ -147,17 +158,30 @@ def _save(im, fp, filename): # PCX header fp.write( - o8(10) + o8(version) + o8(1) + o8(bits) + o16(0) + - o16(0) + o16(im.size[0]-1) + o16(im.size[1]-1) + o16(dpi[0]) + - o16(dpi[1]) + b"\0"*24 + b"\xFF"*24 + b"\0" + o8(planes) + - o16(stride) + o16(1) + o16(screen[0]) + o16(screen[1]) + - b"\0"*54 - ) + o8(10) + + o8(version) + + o8(1) + + o8(bits) + + o16(0) + + o16(0) + + o16(im.size[0] - 1) + + o16(im.size[1] - 1) + + o16(dpi[0]) + + o16(dpi[1]) + + b"\0" * 24 + + b"\xFF" * 24 + + b"\0" + + o8(planes) + + o16(stride) + + o16(1) + + o16(screen[0]) + + o16(screen[1]) + + b"\0" * 54 + ) assert fp.tell() == 128 - ImageFile._save(im, fp, [("pcx", (0, 0)+im.size, 0, - (rawmode, bits*planes))]) + ImageFile._save(im, fp, [("pcx", (0, 0) + im.size, 0, (rawmode, bits * planes))]) if im.mode == "P": # colour palette @@ -167,7 +191,8 @@ def _save(im, fp, filename): # greyscale palette fp.write(o8(12)) for i in range(256): - fp.write(o8(i)*3) + fp.write(o8(i) * 3) + # -------------------------------------------------------------------- # registry @@ -177,3 +202,5 @@ Image.register_open(PcxImageFile.format, PcxImageFile, _accept) Image.register_save(PcxImageFile.format, _save) Image.register_extension(PcxImageFile.format, ".pcx") + +Image.register_mime(PcxImageFile.format, "image/x-pcx") diff --git a/server/www/packages/packages-linux/x64/PIL/PdfImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PdfImagePlugin.py index b425027..47500ba 100644 --- a/server/www/packages/packages-linux/x64/PIL/PdfImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/PdfImagePlugin.py @@ -20,13 +20,11 @@ # Image plugin for PDF images (output only). ## -from . import Image, ImageFile, ImageSequence, PdfParser import io import os import time -__version__ = "0.5" - +from . import Image, ImageFile, ImageSequence, PdfParser, __version__ # # -------------------------------------------------------------------- @@ -46,6 +44,7 @@ def _save_all(im, fp, filename): ## # (Internal) Image save plugin for the PDF format. + def _save(im, fp, filename, save_all=False): is_appending = im.encoderinfo.get("append", False) if is_appending: @@ -56,16 +55,16 @@ def _save(im, fp, filename, save_all=False): resolution = im.encoderinfo.get("resolution", 72.0) info = { - "title": None if is_appending else os.path.splitext( - os.path.basename(filename) - )[0], + "title": None + if is_appending + else os.path.splitext(os.path.basename(filename))[0], "author": None, "subject": None, "keywords": None, "creator": None, "producer": None, "creationDate": None if is_appending else time.gmtime(), - "modDate": None if is_appending else time.gmtime() + "modDate": None if is_appending else time.gmtime(), } for k, default in info.items(): v = im.encoderinfo.get(k) if k in im.encoderinfo else default @@ -78,7 +77,7 @@ def _save(im, fp, filename, save_all=False): existing_pdf.start_writing() existing_pdf.write_header() - existing_pdf.write_comment("created by PIL PDF driver " + __version__) + existing_pdf.write_comment("created by Pillow {} PDF driver".format(__version__)) # # pages @@ -140,7 +139,7 @@ def _save(im, fp, filename, save_all=False): PdfParser.PdfName("Indexed"), PdfParser.PdfName("DeviceRGB"), 255, - PdfParser.PdfBinary(palette) + PdfParser.PdfBinary(palette), ] procset = "ImageI" # indexed color elif im.mode == "RGB": @@ -164,16 +163,15 @@ def _save(im, fp, filename, save_all=False): # FIXME: the hex encoder doesn't support packed 1-bit # images; do things the hard way... data = im.tobytes("raw", "1") - im = Image.new("L", (len(data), 1), None) + im = Image.new("L", im.size) im.putdata(data) - ImageFile._save(im, op, [("hex", (0, 0)+im.size, 0, im.mode)]) + ImageFile._save(im, op, [("hex", (0, 0) + im.size, 0, im.mode)]) elif filter == "DCTDecode": Image.SAVE["JPEG"](im, op, filename) elif filter == "FlateDecode": - ImageFile._save(im, op, [("zip", (0, 0)+im.size, 0, im.mode)]) + ImageFile._save(im, op, [("zip", (0, 0) + im.size, 0, im.mode)]) elif filter == "RunLengthDecode": - ImageFile._save(im, op, - [("packbits", (0, 0)+im.size, 0, im.mode)]) + ImageFile._save(im, op, [("packbits", (0, 0) + im.size, 0, im.mode)]) else: raise ValueError("unsupported PDF filter (%s)" % filter) @@ -182,48 +180,46 @@ def _save(im, fp, filename, save_all=False): width, height = im.size - existing_pdf.write_obj(image_refs[pageNumber], - stream=op.getvalue(), - Type=PdfParser.PdfName("XObject"), - Subtype=PdfParser.PdfName("Image"), - Width=width, # * 72.0 / resolution, - Height=height, # * 72.0 / resolution, - Filter=PdfParser.PdfName(filter), - BitsPerComponent=bits, - DecodeParams=params, - ColorSpace=colorspace) + existing_pdf.write_obj( + image_refs[pageNumber], + stream=op.getvalue(), + Type=PdfParser.PdfName("XObject"), + Subtype=PdfParser.PdfName("Image"), + Width=width, # * 72.0 / resolution, + Height=height, # * 72.0 / resolution, + Filter=PdfParser.PdfName(filter), + BitsPerComponent=bits, + DecodeParams=params, + ColorSpace=colorspace, + ) # # page - existing_pdf.write_page(page_refs[pageNumber], - Resources=PdfParser.PdfDict( - ProcSet=[ - PdfParser.PdfName("PDF"), - PdfParser.PdfName(procset) - ], - XObject=PdfParser.PdfDict( - image=image_refs[pageNumber] - ) - ), - MediaBox=[ - 0, - 0, - int(width * 72.0 / resolution), - int(height * 72.0 / resolution) - ], - Contents=contents_refs[pageNumber]) + existing_pdf.write_page( + page_refs[pageNumber], + Resources=PdfParser.PdfDict( + ProcSet=[PdfParser.PdfName("PDF"), PdfParser.PdfName(procset)], + XObject=PdfParser.PdfDict(image=image_refs[pageNumber]), + ), + MediaBox=[ + 0, + 0, + int(width * 72.0 / resolution), + int(height * 72.0 / resolution), + ], + Contents=contents_refs[pageNumber], + ) # # page contents - page_contents = PdfParser.make_bytes( - "q %d 0 0 %d 0 0 cm /image Do Q\n" % ( - int(width * 72.0 / resolution), - int(height * 72.0 / resolution))) + page_contents = b"q %d 0 0 %d 0 0 cm /image Do Q\n" % ( + int(width * 72.0 / resolution), + int(height * 72.0 / resolution), + ) - existing_pdf.write_obj(contents_refs[pageNumber], - stream=page_contents) + existing_pdf.write_obj(contents_refs[pageNumber], stream=page_contents) pageNumber += 1 @@ -234,6 +230,7 @@ def _save(im, fp, filename, save_all=False): fp.flush() existing_pdf.close() + # # -------------------------------------------------------------------- diff --git a/server/www/packages/packages-linux/x64/PIL/PdfParser.py b/server/www/packages/packages-linux/x64/PIL/PdfParser.py index bcd29db..fdb35ed 100644 --- a/server/www/packages/packages-linux/x64/PIL/PdfParser.py +++ b/server/www/packages/packages-linux/x64/PIL/PdfParser.py @@ -6,20 +6,6 @@ import os import re import time import zlib -from ._util import py3 - -try: - from UserDict import UserDict # Python 2.x -except ImportError: - UserDict = collections.UserDict # Python 3.x - - -if py3: # Python 3.x - def make_bytes(s): - return s.encode("us-ascii") -else: # Python 2.x - def make_bytes(s): # pragma: no cover - return s # pragma: no cover # see 7.9.2.2 Text String Type on page 86 and D.3 PDFDocEncoding Character Set @@ -29,62 +15,61 @@ def encode_text(s): PDFDocEncoding = { - 0x16: u"\u0017", - 0x18: u"\u02D8", - 0x19: u"\u02C7", - 0x1A: u"\u02C6", - 0x1B: u"\u02D9", - 0x1C: u"\u02DD", - 0x1D: u"\u02DB", - 0x1E: u"\u02DA", - 0x1F: u"\u02DC", - 0x80: u"\u2022", - 0x81: u"\u2020", - 0x82: u"\u2021", - 0x83: u"\u2026", - 0x84: u"\u2014", - 0x85: u"\u2013", - 0x86: u"\u0192", - 0x87: u"\u2044", - 0x88: u"\u2039", - 0x89: u"\u203A", - 0x8A: u"\u2212", - 0x8B: u"\u2030", - 0x8C: u"\u201E", - 0x8D: u"\u201C", - 0x8E: u"\u201D", - 0x8F: u"\u2018", - 0x90: u"\u2019", - 0x91: u"\u201A", - 0x92: u"\u2122", - 0x93: u"\uFB01", - 0x94: u"\uFB02", - 0x95: u"\u0141", - 0x96: u"\u0152", - 0x97: u"\u0160", - 0x98: u"\u0178", - 0x99: u"\u017D", - 0x9A: u"\u0131", - 0x9B: u"\u0142", - 0x9C: u"\u0153", - 0x9D: u"\u0161", - 0x9E: u"\u017E", - 0xA0: u"\u20AC", + 0x16: "\u0017", + 0x18: "\u02D8", + 0x19: "\u02C7", + 0x1A: "\u02C6", + 0x1B: "\u02D9", + 0x1C: "\u02DD", + 0x1D: "\u02DB", + 0x1E: "\u02DA", + 0x1F: "\u02DC", + 0x80: "\u2022", + 0x81: "\u2020", + 0x82: "\u2021", + 0x83: "\u2026", + 0x84: "\u2014", + 0x85: "\u2013", + 0x86: "\u0192", + 0x87: "\u2044", + 0x88: "\u2039", + 0x89: "\u203A", + 0x8A: "\u2212", + 0x8B: "\u2030", + 0x8C: "\u201E", + 0x8D: "\u201C", + 0x8E: "\u201D", + 0x8F: "\u2018", + 0x90: "\u2019", + 0x91: "\u201A", + 0x92: "\u2122", + 0x93: "\uFB01", + 0x94: "\uFB02", + 0x95: "\u0141", + 0x96: "\u0152", + 0x97: "\u0160", + 0x98: "\u0178", + 0x99: "\u017D", + 0x9A: "\u0131", + 0x9B: "\u0142", + 0x9C: "\u0153", + 0x9D: "\u0161", + 0x9E: "\u017E", + 0xA0: "\u20AC", } def decode_text(b): - if b[:len(codecs.BOM_UTF16_BE)] == codecs.BOM_UTF16_BE: - return b[len(codecs.BOM_UTF16_BE):].decode("utf_16_be") - elif py3: # Python 3.x + if b[: len(codecs.BOM_UTF16_BE)] == codecs.BOM_UTF16_BE: + return b[len(codecs.BOM_UTF16_BE) :].decode("utf_16_be") + else: return "".join(PDFDocEncoding.get(byte, chr(byte)) for byte in b) - else: # Python 2.x - return u"".join(PDFDocEncoding.get(ord(byte), byte) for byte in b) class PdfFormatError(RuntimeError): """An error that probably indicates a syntactic or semantic error in the PDF file structure""" + pass @@ -93,8 +78,9 @@ def check_format_condition(condition, error_message): raise PdfFormatError(error_message) -class IndirectReference(collections.namedtuple("IndirectReferenceTuple", - ["object_id", "generation"])): +class IndirectReference( + collections.namedtuple("IndirectReferenceTuple", ["object_id", "generation"]) +): def __str__(self): return "%s %s R" % self @@ -102,9 +88,11 @@ class IndirectReference(collections.namedtuple("IndirectReferenceTuple", return self.__str__().encode("us-ascii") def __eq__(self, other): - return other.__class__ is self.__class__ and \ - other.object_id == self.object_id and \ - other.generation == self.generation + return ( + other.__class__ is self.__class__ + and other.object_id == self.object_id + and other.generation == self.generation + ) def __ne__(self, other): return not (self == other) @@ -120,9 +108,9 @@ class IndirectObjectDef(IndirectReference): class XrefTable: def __init__(self): - self.existing_entries = {} # object ID => (offset, generation) - self.new_entries = {} # object ID => (offset, generation) - self.deleted_entries = {0: 65536} # object ID => generation + self.existing_entries = {} # object ID => (offset, generation) + self.new_entries = {} # object ID => (offset, generation) + self.deleted_entries = {0: 65536} # object ID => generation self.reading_finished = False def __setitem__(self, key, value): @@ -150,26 +138,27 @@ class XrefTable: elif key in self.deleted_entries: generation = self.deleted_entries[key] else: - raise IndexError("object ID " + str(key) + - " cannot be deleted because it doesn't exist") + raise IndexError( + "object ID " + str(key) + " cannot be deleted because it doesn't exist" + ) def __contains__(self, key): return key in self.existing_entries or key in self.new_entries def __len__(self): - return len(set(self.existing_entries.keys()) | - set(self.new_entries.keys()) | - set(self.deleted_entries.keys())) + return len( + set(self.existing_entries.keys()) + | set(self.new_entries.keys()) + | set(self.deleted_entries.keys()) + ) def keys(self): return ( - set(self.existing_entries.keys()) - - set(self.deleted_entries.keys()) + set(self.existing_entries.keys()) - set(self.deleted_entries.keys()) ) | set(self.new_entries.keys()) def write(self, f): - keys = sorted(set(self.new_entries.keys()) | - set(self.deleted_entries.keys())) + keys = sorted(set(self.new_entries.keys()) | set(self.deleted_entries.keys())) deleted_keys = sorted(set(self.deleted_entries.keys())) startxref = f.tell() f.write(b"xref\n") @@ -177,7 +166,7 @@ class XrefTable: # find a contiguous sequence of object IDs prev = None for index, key in enumerate(keys): - if prev is None or prev+1 == key: + if prev is None or prev + 1 == key: prev = key else: contiguous_keys = keys[:index] @@ -186,25 +175,25 @@ class XrefTable: else: contiguous_keys = keys keys = None - f.write(make_bytes("%d %d\n" % - (contiguous_keys[0], len(contiguous_keys)))) + f.write(b"%d %d\n" % (contiguous_keys[0], len(contiguous_keys))) for object_id in contiguous_keys: if object_id in self.new_entries: - f.write(make_bytes("%010d %05d n \n" % - self.new_entries[object_id])) + f.write(b"%010d %05d n \n" % self.new_entries[object_id]) else: this_deleted_object_id = deleted_keys.pop(0) - check_format_condition(object_id == this_deleted_object_id, - "expected the next deleted object " - "ID to be %s, instead found %s" % - (object_id, this_deleted_object_id)) + check_format_condition( + object_id == this_deleted_object_id, + "expected the next deleted object ID to be %s, instead found %s" + % (object_id, this_deleted_object_id), + ) try: next_in_linked_list = deleted_keys[0] except IndexError: next_in_linked_list = 0 - f.write(make_bytes("%010d %05d f \n" % - (next_in_linked_list, - self.deleted_entries[object_id]))) + f.write( + b"%010d %05d f \n" + % (next_in_linked_list, self.deleted_entries[object_id]) + ) return startxref @@ -221,8 +210,9 @@ class PdfName: return self.name.decode("us-ascii") def __eq__(self, other): - return (isinstance(other, PdfName) and other.name == self.name) or \ - other == self.name + return ( + isinstance(other, PdfName) and other.name == self.name + ) or other == self.name def __hash__(self): return hash(self.name) @@ -234,71 +224,53 @@ class PdfName: def from_pdf_stream(cls, data): return cls(PdfParser.interpret_name(data)) - allowed_chars = set(range(33, 127)) - set(ord(c) for c in "#%/()<>[]{}") + allowed_chars = set(range(33, 127)) - {ord(c) for c in "#%/()<>[]{}"} def __bytes__(self): result = bytearray(b"/") for b in self.name: - if py3: # Python 3.x - if b in self.allowed_chars: - result.append(b) - else: - result.extend(make_bytes("#%02X" % b)) - else: # Python 2.x - if ord(b) in self.allowed_chars: - result.append(b) - else: - result.extend(b"#%02X" % ord(b)) + if b in self.allowed_chars: + result.append(b) + else: + result.extend(b"#%02X" % b) return bytes(result) - __str__ = __bytes__ - class PdfArray(list): def __bytes__(self): return b"[ " + b" ".join(pdf_repr(x) for x in self) + b" ]" - __str__ = __bytes__ - -class PdfDict(UserDict): +class PdfDict(collections.UserDict): def __setattr__(self, key, value): if key == "data": - if hasattr(UserDict, "__setattr__"): - UserDict.__setattr__(self, key, value) - else: - self.__dict__[key] = value + collections.UserDict.__setattr__(self, key, value) else: - if isinstance(key, str): - key = key.encode("us-ascii") - self[key] = value + self[key.encode("us-ascii")] = value def __getattr__(self, key): try: - value = self[key] + value = self[key.encode("us-ascii")] except KeyError: - try: - value = self[key.encode("us-ascii")] - except KeyError: - raise AttributeError(key) + raise AttributeError(key) if isinstance(value, bytes): value = decode_text(value) if key.endswith("Date"): if value.startswith("D:"): value = value[2:] - relationship = 'Z' + relationship = "Z" if len(value) > 17: relationship = value[14] offset = int(value[15:17]) * 60 if len(value) > 20: offset += int(value[18:20]) - format = '%Y%m%d%H%M%S'[:len(value) - 2] - value = time.strptime(value[:len(format)+2], format) - if relationship in ['+', '-']: + format = "%Y%m%d%H%M%S"[: len(value) - 2] + value = time.strptime(value[: len(format) + 2], format) + if relationship in ["+", "-"]: offset *= 60 - if relationship == '+': + if relationship == "+": offset *= -1 value = time.gmtime(calendar.timegm(value) + offset) return value @@ -316,20 +288,13 @@ class PdfDict(UserDict): out.extend(b"\n>>") return bytes(out) - if not py3: - __str__ = __bytes__ - class PdfBinary: def __init__(self, data): self.data = data - if py3: # Python 3.x - def __bytes__(self): - return make_bytes("<%s>" % "".join("%02X" % b for b in self.data)) - else: # Python 2.x - def __str__(self): - return "<%s>" % "".join("%02X" % ord(b) for b in self.data) + def __bytes__(self): + return b"<%s>" % b"".join(b"%02X" % b for b in self.data) class PdfStream: @@ -350,8 +315,8 @@ class PdfStream: return zlib.decompress(self.buf, bufsize=int(expected_length)) else: raise NotImplementedError( - "stream filter %s unknown/unsupported" % - repr(self.dictionary.Filter)) + "stream filter %s unknown/unsupported" % repr(self.dictionary.Filter) + ) def pdf_repr(x): @@ -361,19 +326,17 @@ def pdf_repr(x): return b"false" elif x is None: return b"null" - elif (isinstance(x, PdfName) or isinstance(x, PdfDict) or - isinstance(x, PdfArray) or isinstance(x, PdfBinary)): + elif isinstance(x, (PdfName, PdfDict, PdfArray, PdfBinary)): return bytes(x) elif isinstance(x, int): return str(x).encode("us-ascii") elif isinstance(x, time.struct_time): - return b'(D:'+time.strftime('%Y%m%d%H%M%SZ', x).encode("us-ascii")+b')' + return b"(D:" + time.strftime("%Y%m%d%H%M%SZ", x).encode("us-ascii") + b")" elif isinstance(x, dict): return bytes(PdfDict(x)) elif isinstance(x, list): return bytes(PdfArray(x)) - elif ((py3 and isinstance(x, str)) or - (not py3 and isinstance(x, unicode))): + elif isinstance(x, str): return pdf_repr(encode_text(x)) elif isinstance(x, bytes): # XXX escape more chars? handle binary garbage @@ -386,17 +349,14 @@ def pdf_repr(x): class PdfParser: - """Based on https://www.adobe.com/content/dam/acom/en/devnet/acrobat/pdfs/PDF32000_2008.pdf + """Based on + https://www.adobe.com/content/dam/acom/en/devnet/acrobat/pdfs/PDF32000_2008.pdf Supports PDF up to 1.4 """ - def __init__(self, filename=None, f=None, - buf=None, start_offset=0, mode="rb"): - # type: (PdfParser, str, file, Union[bytes, bytearray], int, str) - # -> None + def __init__(self, filename=None, f=None, buf=None, start_offset=0, mode="rb"): if buf and f: - raise RuntimeError( - "specify buf or f or filename, but not both buf and f") + raise RuntimeError("specify buf or f or filename, but not both buf and f") self.filename = filename self.buf = buf self.f = f @@ -463,20 +423,20 @@ class PdfParser: self.f.write(b"%PDF-1.4\n") def write_comment(self, s): - self.f.write(("%% %s\n" % (s,)).encode("utf-8")) + self.f.write(("% {}\n".format(s)).encode("utf-8")) def write_catalog(self): self.del_root() self.root_ref = self.next_object_id(self.f.tell()) self.pages_ref = self.next_object_id(0) self.rewrite_pages() - self.write_obj(self.root_ref, - Type=PdfName(b"Catalog"), - Pages=self.pages_ref) - self.write_obj(self.pages_ref, - Type=PdfName(b"Pages"), - Count=len(self.pages), - Kids=self.pages) + self.write_obj(self.root_ref, Type=PdfName(b"Catalog"), Pages=self.pages_ref) + self.write_obj( + self.pages_ref, + Type=PdfName(b"Pages"), + Count=len(self.pages), + Kids=self.pages, + ) return self.root_ref def rewrite_pages(self): @@ -522,8 +482,11 @@ class PdfParser: if self.info: trailer_dict[b"Info"] = self.info_ref self.last_xref_section_offset = start_xref - self.f.write(b"trailer\n" + bytes(PdfDict(trailer_dict)) + - make_bytes("\nstartxref\n%d\n%%%%EOF" % start_xref)) + self.f.write( + b"trailer\n" + + bytes(PdfDict(trailer_dict)) + + b"\nstartxref\n%d\n%%%%EOF" % start_xref + ) def write_page(self, ref, *objs, **dict_obj): if isinstance(ref, int): @@ -585,12 +548,14 @@ class PdfParser: else: self.info = PdfDict(self.read_indirect(self.info_ref)) check_format_condition(b"Type" in self.root, "/Type missing in Root") - check_format_condition(self.root[b"Type"] == b"Catalog", - "/Type in Root is not /Catalog") + check_format_condition( + self.root[b"Type"] == b"Catalog", "/Type in Root is not /Catalog" + ) check_format_condition(b"Pages" in self.root, "/Pages missing in Root") - check_format_condition(isinstance(self.root[b"Pages"], - IndirectReference), - "/Pages in Root is not an indirect reference") + check_format_condition( + isinstance(self.root[b"Pages"], IndirectReference), + "/Pages in Root is not an indirect reference", + ) self.pages_ref = self.root[b"Pages"] self.page_tree_root = self.read_indirect(self.pages_ref) self.pages = self.linearize_page_tree(self.page_tree_root) @@ -618,13 +583,34 @@ class PdfParser: newline_only = br"[\r\n]+" newline = whitespace_optional + newline_only + whitespace_optional re_trailer_end = re.compile( - whitespace_mandatory + br"trailer" + whitespace_optional + - br"\<\<(.*\>\>)" + newline + br"startxref" + newline + br"([0-9]+)" + - newline + br"%%EOF" + whitespace_optional + br"$", re.DOTALL) + whitespace_mandatory + + br"trailer" + + whitespace_optional + + br"\<\<(.*\>\>)" + + newline + + br"startxref" + + newline + + br"([0-9]+)" + + newline + + br"%%EOF" + + whitespace_optional + + br"$", + re.DOTALL, + ) re_trailer_prev = re.compile( - whitespace_optional + br"trailer" + whitespace_optional + - br"\<\<(.*?\>\>)" + newline + br"startxref" + newline + br"([0-9]+)" + - newline + br"%%EOF" + whitespace_optional, re.DOTALL) + whitespace_optional + + br"trailer" + + whitespace_optional + + br"\<\<(.*?\>\>)" + + newline + + br"startxref" + + newline + + br"([0-9]+)" + + newline + + br"%%EOF" + + whitespace_optional, + re.DOTALL, + ) def read_trailer(self): search_start_offset = len(self.buf) - 16384 @@ -636,7 +622,7 @@ class PdfParser: last_match = m while m: last_match = m - m = self.re_trailer_end.search(self.buf, m.start()+16) + m = self.re_trailer_end.search(self.buf, m.start() + 16) if not m: m = last_match trailer_data = m.group(1) @@ -648,26 +634,29 @@ class PdfParser: self.read_prev_trailer(self.trailer_dict[b"Prev"]) def read_prev_trailer(self, xref_section_offset): - trailer_offset = self.read_xref_table( - xref_section_offset=xref_section_offset) + trailer_offset = self.read_xref_table(xref_section_offset=xref_section_offset) m = self.re_trailer_prev.search( - self.buf[trailer_offset:trailer_offset+16384]) + self.buf[trailer_offset : trailer_offset + 16384] + ) check_format_condition(m, "previous trailer not found") trailer_data = m.group(1) - check_format_condition(int(m.group(2)) == xref_section_offset, - "xref section offset in previous trailer " - "doesn't match what was expected") + check_format_condition( + int(m.group(2)) == xref_section_offset, + "xref section offset in previous trailer doesn't match what was expected", + ) trailer_dict = self.interpret_trailer(trailer_data) if b"Prev" in trailer_dict: self.read_prev_trailer(trailer_dict[b"Prev"]) re_whitespace_optional = re.compile(whitespace_optional) re_name = re.compile( - whitespace_optional + br"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?=" + - delimiter_or_ws + br")") + whitespace_optional + + br"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?=" + + delimiter_or_ws + + br")" + ) re_dict_start = re.compile(whitespace_optional + br"\<\<") - re_dict_end = re.compile( - whitespace_optional + br"\>\>" + whitespace_optional) + re_dict_end = re.compile(whitespace_optional + br"\>\>" + whitespace_optional) @classmethod def interpret_trailer(cls, trailer_data): @@ -679,19 +668,21 @@ class PdfParser: m = cls.re_dict_end.match(trailer_data, offset) check_format_condition( m and m.end() == len(trailer_data), - "name not found in trailer, remaining data: " + - repr(trailer_data[offset:])) + "name not found in trailer, remaining data: " + + repr(trailer_data[offset:]), + ) break key = cls.interpret_name(m.group(1)) value, offset = cls.get_value(trailer_data, m.end()) trailer[key] = value check_format_condition( b"Size" in trailer and isinstance(trailer[b"Size"], int), - "/Size not in trailer or not an integer") + "/Size not in trailer or not an integer", + ) check_format_condition( - b"Root" in trailer and - isinstance(trailer[b"Root"], IndirectReference), - "/Root not in trailer or not an indirect reference") + b"Root" in trailer and isinstance(trailer[b"Root"], IndirectReference), + "/Root not in trailer or not an indirect reference", + ) return trailer re_hashes_in_name = re.compile(br"([^#]*)(#([0-9a-fA-F]{2}))?") @@ -701,8 +692,7 @@ class PdfParser: name = b"" for m in cls.re_hashes_in_name.finditer(raw): if m.group(3): - name += m.group(1) + \ - bytearray.fromhex(m.group(3).decode("us-ascii")) + name += m.group(1) + bytearray.fromhex(m.group(3).decode("us-ascii")) else: name += m.group(1) if as_text: @@ -710,37 +700,54 @@ class PdfParser: else: return bytes(name) - re_null = re.compile( - whitespace_optional + br"null(?=" + delimiter_or_ws + br")") - re_true = re.compile( - whitespace_optional + br"true(?=" + delimiter_or_ws + br")") - re_false = re.compile( - whitespace_optional + br"false(?=" + delimiter_or_ws + br")") + re_null = re.compile(whitespace_optional + br"null(?=" + delimiter_or_ws + br")") + re_true = re.compile(whitespace_optional + br"true(?=" + delimiter_or_ws + br")") + re_false = re.compile(whitespace_optional + br"false(?=" + delimiter_or_ws + br")") re_int = re.compile( - whitespace_optional + br"([-+]?[0-9]+)(?=" + delimiter_or_ws + br")") + whitespace_optional + br"([-+]?[0-9]+)(?=" + delimiter_or_ws + br")" + ) re_real = re.compile( - whitespace_optional + br"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?=" + - delimiter_or_ws + br")") + whitespace_optional + + br"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?=" + + delimiter_or_ws + + br")" + ) re_array_start = re.compile(whitespace_optional + br"\[") re_array_end = re.compile(whitespace_optional + br"]") re_string_hex = re.compile( - whitespace_optional + br"\<(" + whitespace_or_hex + br"*)\>") + whitespace_optional + br"\<(" + whitespace_or_hex + br"*)\>" + ) re_string_lit = re.compile(whitespace_optional + br"\(") re_indirect_reference = re.compile( - whitespace_optional + br"([-+]?[0-9]+)" + whitespace_mandatory + - br"([-+]?[0-9]+)" + whitespace_mandatory + br"R(?=" + delimiter_or_ws + - br")") + whitespace_optional + + br"([-+]?[0-9]+)" + + whitespace_mandatory + + br"([-+]?[0-9]+)" + + whitespace_mandatory + + br"R(?=" + + delimiter_or_ws + + br")" + ) re_indirect_def_start = re.compile( - whitespace_optional + br"([-+]?[0-9]+)" + whitespace_mandatory + - br"([-+]?[0-9]+)" + whitespace_mandatory + br"obj(?=" + - delimiter_or_ws + br")") + whitespace_optional + + br"([-+]?[0-9]+)" + + whitespace_mandatory + + br"([-+]?[0-9]+)" + + whitespace_mandatory + + br"obj(?=" + + delimiter_or_ws + + br")" + ) re_indirect_def_end = re.compile( - whitespace_optional + br"endobj(?=" + delimiter_or_ws + br")") + whitespace_optional + br"endobj(?=" + delimiter_or_ws + br")" + ) re_comment = re.compile( - br"(" + whitespace_optional + br"%[^\r\n]*" + newline + br")*") + br"(" + whitespace_optional + br"%[^\r\n]*" + newline + br")*" + ) re_stream_start = re.compile(whitespace_optional + br"stream\r?\n") re_stream_end = re.compile( - whitespace_optional + br"endstream(?=" + delimiter_or_ws + br")") + whitespace_optional + br"endstream(?=" + delimiter_or_ws + br")" + ) @classmethod def get_value(cls, data, offset, expect_indirect=None, max_nesting=-1): @@ -753,32 +760,37 @@ class PdfParser: if m: check_format_condition( int(m.group(1)) > 0, - "indirect object definition: object ID must be greater than 0") + "indirect object definition: object ID must be greater than 0", + ) check_format_condition( int(m.group(2)) >= 0, - "indirect object definition: generation must be non-negative") + "indirect object definition: generation must be non-negative", + ) check_format_condition( - expect_indirect is None or expect_indirect == - IndirectReference(int(m.group(1)), int(m.group(2))), - "indirect object definition different than expected") - object, offset = cls.get_value( - data, m.end(), max_nesting=max_nesting-1) + expect_indirect is None + or expect_indirect + == IndirectReference(int(m.group(1)), int(m.group(2))), + "indirect object definition different than expected", + ) + object, offset = cls.get_value(data, m.end(), max_nesting=max_nesting - 1) if offset is None: return object, None m = cls.re_indirect_def_end.match(data, offset) - check_format_condition( - m, "indirect object definition end not found") + check_format_condition(m, "indirect object definition end not found") return object, m.end() check_format_condition( - not expect_indirect, "indirect object definition not found") + not expect_indirect, "indirect object definition not found" + ) m = cls.re_indirect_reference.match(data, offset) if m: check_format_condition( int(m.group(1)) > 0, - "indirect object reference: object ID must be greater than 0") + "indirect object reference: object ID must be greater than 0", + ) check_format_condition( int(m.group(2)) >= 0, - "indirect object reference: generation must be non-negative") + "indirect object reference: generation must be non-negative", + ) return IndirectReference(int(m.group(1)), int(m.group(2))), m.end() m = cls.re_dict_start.match(data, offset) if m: @@ -786,12 +798,10 @@ class PdfParser: result = {} m = cls.re_dict_end.match(data, offset) while not m: - key, offset = cls.get_value( - data, offset, max_nesting=max_nesting-1) + key, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1) if offset is None: return result, None - value, offset = cls.get_value( - data, offset, max_nesting=max_nesting-1) + value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1) result[key] = value if offset is None: return result, None @@ -803,9 +813,10 @@ class PdfParser: stream_len = int(result[b"Length"]) except (TypeError, KeyError, ValueError): raise PdfFormatError( - "bad or missing Length in stream dict (%r)" % - result.get(b"Length", None)) - stream_data = data[m.end():m.end() + stream_len] + "bad or missing Length in stream dict (%r)" + % result.get(b"Length", None) + ) + stream_data = data[m.end() : m.end() + stream_len] m = cls.re_stream_end.match(data, m.end() + stream_len) check_format_condition(m, "stream end not found") offset = m.end() @@ -819,8 +830,7 @@ class PdfParser: result = [] m = cls.re_array_end.match(data, offset) while not m: - value, offset = cls.get_value( - data, offset, max_nesting=max_nesting-1) + value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1) result.append(value) if offset is None: return result, None @@ -848,10 +858,9 @@ class PdfParser: m = cls.re_string_hex.match(data, offset) if m: # filter out whitespace - hex_string = bytearray([ - b for b in m.group(1) - if b in b"0123456789abcdefABCDEF" - ]) + hex_string = bytearray( + [b for b in m.group(1) if b in b"0123456789abcdefABCDEF"] + ) if len(hex_string) % 2 == 1: # append a 0 if the length is not even - yes, at the end hex_string.append(ord(b"0")) @@ -860,10 +869,11 @@ class PdfParser: if m: return cls.get_literal_string(data, m.end()) # return None, offset # fallback (only for debugging) - raise PdfFormatError( - "unrecognized object: " + repr(data[offset:offset+32])) + raise PdfFormatError("unrecognized object: " + repr(data[offset : offset + 32])) - re_lit_str_token = re.compile(br"(\\[nrtbf()\\])|(\\[0-9]{1,3})|(\\(\r\n|\r|\n))|(\r\n|\r|\n)|(\()|(\))") + re_lit_str_token = re.compile( + br"(\\[nrtbf()\\])|(\\[0-9]{1,3})|(\\(\r\n|\r|\n))|(\r\n|\r|\n)|(\()|(\))" + ) escaped_chars = { b"n": b"\n", b"r": b"\r", @@ -881,14 +891,14 @@ class PdfParser: ord(b"("): b"(", ord(b")"): b")", ord(b"\\"): b"\\", - } + } @classmethod def get_literal_string(cls, data, offset): nesting_depth = 0 result = bytearray() for m in cls.re_lit_str_token.finditer(data, offset): - result.extend(data[offset:m.start()]) + result.extend(data[offset : m.start()]) if m.group(1): result.extend(cls.escaped_chars[m.group(1)[1]]) elif m.group(2): @@ -908,30 +918,36 @@ class PdfParser: offset = m.end() raise PdfFormatError("unfinished literal string") - re_xref_section_start = re.compile( - whitespace_optional + br"xref" + newline) + re_xref_section_start = re.compile(whitespace_optional + br"xref" + newline) re_xref_subsection_start = re.compile( - whitespace_optional + br"([0-9]+)" + whitespace_mandatory + - br"([0-9]+)" + whitespace_optional + newline_only) + whitespace_optional + + br"([0-9]+)" + + whitespace_mandatory + + br"([0-9]+)" + + whitespace_optional + + newline_only + ) re_xref_entry = re.compile(br"([0-9]{10}) ([0-9]{5}) ([fn])( \r| \n|\r\n)") def read_xref_table(self, xref_section_offset): subsection_found = False m = self.re_xref_section_start.match( - self.buf, xref_section_offset + self.start_offset) + self.buf, xref_section_offset + self.start_offset + ) check_format_condition(m, "xref section start not found") offset = m.end() while True: m = self.re_xref_subsection_start.match(self.buf, offset) if not m: check_format_condition( - subsection_found, "xref subsection start not found") + subsection_found, "xref subsection start not found" + ) break subsection_found = True offset = m.end() first_object = int(m.group(1)) num_objects = int(m.group(2)) - for i in range(first_object, first_object+num_objects): + for i in range(first_object, first_object + num_objects): m = self.re_xref_entry.match(self.buf, offset) check_format_condition(m, "xref entry not found") offset = m.end() @@ -940,9 +956,9 @@ class PdfParser: if not is_free: new_entry = (int(m.group(1)), generation) check_format_condition( - i not in self.xref_table or - self.xref_table[i] == new_entry, - "xref entry duplicated (and not identical)") + i not in self.xref_table or self.xref_table[i] == new_entry, + "xref entry duplicated (and not identical)", + ) self.xref_table[i] = new_entry return offset @@ -952,10 +968,14 @@ class PdfParser: generation == ref[1], "expected to find generation %s for object ID %s in xref table, " "instead found generation %s at offset %s" - % (ref[1], ref[0], generation, offset)) - value = self.get_value(self.buf, offset + self.start_offset, - expect_indirect=IndirectReference(*ref), - max_nesting=max_nesting)[0] + % (ref[1], ref[0], generation, offset), + ) + value = self.get_value( + self.buf, + offset + self.start_offset, + expect_indirect=IndirectReference(*ref), + max_nesting=max_nesting, + )[0] self.cached_objects[ref] = value return value @@ -963,7 +983,8 @@ class PdfParser: if node is None: node = self.page_tree_root check_format_condition( - node[b"Type"] == b"Pages", "/Type of page tree node is not /Pages") + node[b"Type"] == b"Pages", "/Type of page tree node is not /Pages" + ) pages = [] for kid in node[b"Kids"]: kid_object = self.read_indirect(kid) diff --git a/server/www/packages/packages-linux/x64/PIL/PixarImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PixarImagePlugin.py index d07b28d..5ea32ba 100644 --- a/server/www/packages/packages-linux/x64/PIL/PixarImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/PixarImagePlugin.py @@ -22,12 +22,10 @@ from . import Image, ImageFile from ._binary import i16le as i16 -__version__ = "0.1" - - # # helpers + def _accept(prefix): return prefix[:4] == b"\200\350\000\000" @@ -35,6 +33,7 @@ def _accept(prefix): ## # Image plugin for PIXAR raster images. + class PixarImageFile(ImageFile.ImageFile): format = "PIXAR" @@ -60,7 +59,7 @@ class PixarImageFile(ImageFile.ImageFile): # FIXME: to be continued... # create tile descriptor (assuming "dumped") - self.tile = [("raw", (0, 0)+self.size, 1024, (self.mode, 0, 1))] + self.tile = [("raw", (0, 0) + self.size, 1024, (self.mode, 0, 1))] # diff --git a/server/www/packages/packages-linux/x64/PIL/PngImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PngImagePlugin.py index 15077fc..ee9d52b 100644 --- a/server/www/packages/packages-linux/x64/PIL/PngImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/PngImagePlugin.py @@ -31,16 +31,15 @@ # See the README file for information on usage and redistribution. # +import itertools import logging import re -import zlib import struct +import warnings +import zlib -from . import Image, ImageFile, ImagePalette -from ._binary import i8, i16be as i16, i32be as i32, o16be as o16, o32be as o32 -from ._util import py3 - -__version__ = "0.9" +from . import Image, ImageChops, ImageFile, ImagePalette, ImageSequence +from ._binary import i8, i16be as i16, i32be as i32, o8, o16be as o16, o32be as o32 logger = logging.getLogger(__name__) @@ -52,25 +51,30 @@ _MAGIC = b"\211PNG\r\n\032\n" _MODES = { # supported bits/color combinations, and corresponding modes/rawmodes - (1, 0): ("1", "1"), - (2, 0): ("L", "L;2"), - (4, 0): ("L", "L;4"), - (8, 0): ("L", "L"), + # Greyscale + (1, 0): ("1", "1"), + (2, 0): ("L", "L;2"), + (4, 0): ("L", "L;4"), + (8, 0): ("L", "L"), (16, 0): ("I", "I;16B"), - (8, 2): ("RGB", "RGB"), + # Truecolour + (8, 2): ("RGB", "RGB"), (16, 2): ("RGB", "RGB;16B"), - (1, 3): ("P", "P;1"), - (2, 3): ("P", "P;2"), - (4, 3): ("P", "P;4"), - (8, 3): ("P", "P"), - (8, 4): ("LA", "LA"), + # Indexed-colour + (1, 3): ("P", "P;1"), + (2, 3): ("P", "P;2"), + (4, 3): ("P", "P;4"), + (8, 3): ("P", "P"), + # Greyscale with alpha + (8, 4): ("LA", "LA"), (16, 4): ("RGBA", "LA;16B"), # LA;16B->LA not yet available - (8, 6): ("RGBA", "RGBA"), + # Truecolour with alpha + (8, 6): ("RGBA", "RGBA"), (16, 6): ("RGBA", "RGBA;16B"), } -_simple_palette = re.compile(b'^\xff*\x00\xff*$') +_simple_palette = re.compile(b"^\xff*\x00\xff*$") # Maximum decompressed size for a iTXt or zTXt chunk. # Eliminates decompression bombs where compressed chunks can expand 1000x @@ -79,6 +83,16 @@ MAX_TEXT_CHUNK = ImageFile.SAFEBLOCK MAX_TEXT_MEMORY = 64 * MAX_TEXT_CHUNK +# APNG frame disposal modes +APNG_DISPOSE_OP_NONE = 0 +APNG_DISPOSE_OP_BACKGROUND = 1 +APNG_DISPOSE_OP_PREVIOUS = 2 + +# APNG frame blend modes +APNG_BLEND_OP_SOURCE = 0 +APNG_BLEND_OP_OVER = 1 + + def _safe_zlib_decompress(s): dobj = zlib.decompressobj() plaintext = dobj.decompress(s, MAX_TEXT_CHUNK) @@ -88,21 +102,21 @@ def _safe_zlib_decompress(s): def _crc32(data, seed=0): - return zlib.crc32(data, seed) & 0xffffffff + return zlib.crc32(data, seed) & 0xFFFFFFFF # -------------------------------------------------------------------- # Support classes. Suitable for PNG and related formats like MNG etc. -class ChunkStream(object): +class ChunkStream: def __init__(self, fp): self.fp = fp self.queue = [] def read(self): - "Fetch a new chunk. Returns header information." + """Fetch a new chunk. Returns header information.""" cid = None if self.queue: @@ -134,13 +148,13 @@ class ChunkStream(object): self.queue.append((cid, pos, length)) def call(self, cid, pos, length): - "Call the appropriate chunk handler" + """Call the appropriate chunk handler""" logger.debug("STREAM %r %s %s", cid, pos, length) - return getattr(self, "chunk_" + cid.decode('ascii'))(pos, length) + return getattr(self, "chunk_" + cid.decode("ascii"))(pos, length) def crc(self, cid, data): - "Read and verify checksum" + """Read and verify checksum""" # Skip CRC checks for ancillary chunks if allowed to load truncated # images @@ -153,14 +167,12 @@ class ChunkStream(object): crc1 = _crc32(data, _crc32(cid)) crc2 = i32(self.fp.read(4)) if crc1 != crc2: - raise SyntaxError("broken PNG file (bad header checksum in %r)" - % cid) + raise SyntaxError("broken PNG file (bad header checksum in %r)" % cid) except struct.error: - raise SyntaxError("broken PNG file (incomplete checksum in %r)" - % cid) + raise SyntaxError("broken PNG file (incomplete checksum in %r)" % cid) def crc_skip(self, cid, data): - "Read checksum. Used if the C module is not present" + """Read checksum. Used if the C module is not present""" self.fp.read(4) @@ -175,7 +187,7 @@ class ChunkStream(object): try: cid, pos, length = self.read() except struct.error: - raise IOError("truncated PNG file") + raise OSError("truncated PNG file") if cid == endchunk: break @@ -191,8 +203,9 @@ class iTXt(str): keeping their extra information """ + @staticmethod - def __new__(cls, text, lang, tkey): + def __new__(cls, text, lang=None, tkey=None): """ :param cls: the class to use when creating the instance :param text: value for this key @@ -206,7 +219,7 @@ class iTXt(str): return self -class PngInfo(object): +class PngInfo: """ PNG chunk container (for use with save(pnginfo=)) @@ -246,11 +259,12 @@ class PngInfo(object): tkey = tkey.encode("utf-8", "strict") if zip: - self.add(b"iTXt", key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + - zlib.compress(value)) + self.add( + b"iTXt", + key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + zlib.compress(value), + ) else: - self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + - value) + self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + value) def add_text(self, key, value, zip=False): """Appends a text chunk. @@ -267,12 +281,12 @@ class PngInfo(object): # The tEXt chunk stores latin-1 text if not isinstance(value, bytes): try: - value = value.encode('latin-1', 'strict') + value = value.encode("latin-1", "strict") except UnicodeError: return self.add_itxt(key, value, zip=zip) if not isinstance(key, bytes): - key = key.encode('latin-1', 'strict') + key = key.encode("latin-1", "strict") if zip: self.add(b"zTXt", key + b"\0\0" + zlib.compress(value)) @@ -283,11 +297,10 @@ class PngInfo(object): # -------------------------------------------------------------------- # PNG image stream (IHDR/IEND) + class PngStream(ChunkStream): - def __init__(self, fp): - - ChunkStream.__init__(self, fp) + super().__init__(fp) # local copies of Image attributes self.im_info = {} @@ -296,14 +309,32 @@ class PngStream(ChunkStream): self.im_mode = None self.im_tile = None self.im_palette = None + self.im_custom_mimetype = None + self.im_n_frames = None + self._seq_num = None + self.rewind_state = None self.text_memory = 0 def check_text_memory(self, chunklen): self.text_memory += chunklen if self.text_memory > MAX_TEXT_MEMORY: - raise ValueError("Too much memory used in text chunks: " - "%s>MAX_TEXT_MEMORY" % self.text_memory) + raise ValueError( + "Too much memory used in text chunks: %s>MAX_TEXT_MEMORY" + % self.text_memory + ) + + def save_rewind(self): + self.rewind_state = { + "info": self.im_info.copy(), + "tile": self.im_tile, + "seq_num": self._seq_num, + } + + def rewind(self): + self.im_info = self.rewind_state["info"] + self.im_tile = self.rewind_state["tile"] + self._seq_num = self.rewind_state["seq_num"] def chunk_iCCP(self, pos, length): @@ -319,10 +350,11 @@ class PngStream(ChunkStream): logger.debug("Compression method %s", i8(s[i])) comp_method = i8(s[i]) if comp_method != 0: - raise SyntaxError("Unknown compression method %s in iCCP chunk" % - comp_method) + raise SyntaxError( + "Unknown compression method %s in iCCP chunk" % comp_method + ) try: - icc_profile = _safe_zlib_decompress(s[i+2:]) + icc_profile = _safe_zlib_decompress(s[i + 2 :]) except ValueError: if ImageFile.LOAD_TRUNCATED_IMAGES: icc_profile = None @@ -340,7 +372,7 @@ class PngStream(ChunkStream): self.im_size = i32(s), i32(s[4:]) try: self.im_mode, self.im_rawmode = _MODES[(i8(s[8]), i8(s[9]))] - except: + except Exception: pass if i8(s[12]): self.im_info["interlace"] = 1 @@ -351,7 +383,13 @@ class PngStream(ChunkStream): def chunk_IDAT(self, pos, length): # image data - self.im_tile = [("zip", (0, 0)+self.im_size, pos, self.im_rawmode)] + if "bbox" in self.im_info: + tile = [("zip", self.im_info["bbox"], pos, self.im_rawmode)] + else: + if self.im_n_frames is not None: + self.im_info["default_image"] = True + tile = [("zip", (0, 0) + self.im_size, pos, self.im_rawmode)] + self.im_tile = tile self.im_idat = length raise EOFError @@ -383,7 +421,7 @@ class PngStream(ChunkStream): # otherwise, we have a byte string with one alpha value # for each palette entry self.im_info["transparency"] = s - elif self.im_mode == "L": + elif self.im_mode in ("1", "L", "I"): self.im_info["transparency"] = i16(s) elif self.im_mode == "RGB": self.im_info["transparency"] = i16(s), i16(s[2:]), i16(s[4:]) @@ -400,8 +438,8 @@ class PngStream(ChunkStream): # WP x,y, Red x,y, Green x,y Blue x,y s = ImageFile._safe_read(self.fp, length) - raw_vals = struct.unpack('>%dI' % (len(s) // 4), s) - self.im_info['chromaticity'] = tuple(elt/100000.0 for elt in raw_vals) + raw_vals = struct.unpack(">%dI" % (len(s) // 4), s) + self.im_info["chromaticity"] = tuple(elt / 100000.0 for elt in raw_vals) return s def chunk_sRGB(self, pos, length): @@ -412,7 +450,7 @@ class PngStream(ChunkStream): # 3 absolute colorimetric s = ImageFile._safe_read(self.fp, length) - self.im_info['srgb'] = i8(s) + self.im_info["srgb"] = i8(s) return s def chunk_pHYs(self, pos, length): @@ -439,9 +477,8 @@ class PngStream(ChunkStream): k = s v = b"" if k: - if py3: - k = k.decode('latin-1', 'strict') - v = v.decode('latin-1', 'replace') + k = k.decode("latin-1", "strict") + v = v.decode("latin-1", "replace") self.im_info[k] = self.im_text[k] = v self.check_text_memory(len(v)) @@ -462,8 +499,9 @@ class PngStream(ChunkStream): else: comp_method = 0 if comp_method != 0: - raise SyntaxError("Unknown compression method %s in zTXt chunk" % - comp_method) + raise SyntaxError( + "Unknown compression method %s in zTXt chunk" % comp_method + ) try: v = _safe_zlib_decompress(v[1:]) except ValueError: @@ -475,9 +513,8 @@ class PngStream(ChunkStream): v = b"" if k: - if py3: - k = k.decode('latin-1', 'strict') - v = v.decode('latin-1', 'replace') + k = k.decode("latin-1", "strict") + v = v.decode("latin-1", "replace") self.im_info[k] = self.im_text[k] = v self.check_text_memory(len(v)) @@ -512,24 +549,75 @@ class PngStream(ChunkStream): return s else: return s - if py3: - try: - k = k.decode("latin-1", "strict") - lang = lang.decode("utf-8", "strict") - tk = tk.decode("utf-8", "strict") - v = v.decode("utf-8", "strict") - except UnicodeError: - return s + try: + k = k.decode("latin-1", "strict") + lang = lang.decode("utf-8", "strict") + tk = tk.decode("utf-8", "strict") + v = v.decode("utf-8", "strict") + except UnicodeError: + return s self.im_info[k] = self.im_text[k] = iTXt(v, lang, tk) self.check_text_memory(len(v)) return s + def chunk_eXIf(self, pos, length): + s = ImageFile._safe_read(self.fp, length) + self.im_info["exif"] = b"Exif\x00\x00" + s + return s + + # APNG chunks + def chunk_acTL(self, pos, length): + s = ImageFile._safe_read(self.fp, length) + if self.im_n_frames is not None: + self.im_n_frames = None + warnings.warn("Invalid APNG, will use default PNG image if possible") + return s + n_frames = i32(s) + if n_frames == 0 or n_frames > 0x80000000: + warnings.warn("Invalid APNG, will use default PNG image if possible") + return s + self.im_n_frames = n_frames + self.im_info["loop"] = i32(s[4:]) + self.im_custom_mimetype = "image/apng" + return s + + def chunk_fcTL(self, pos, length): + s = ImageFile._safe_read(self.fp, length) + seq = i32(s) + if (self._seq_num is None and seq != 0) or ( + self._seq_num is not None and self._seq_num != seq - 1 + ): + raise SyntaxError("APNG contains frame sequence errors") + self._seq_num = seq + width, height = i32(s[4:]), i32(s[8:]) + px, py = i32(s[12:]), i32(s[16:]) + im_w, im_h = self.im_size + if px + width > im_w or py + height > im_h: + raise SyntaxError("APNG contains invalid frames") + self.im_info["bbox"] = (px, py, px + width, py + height) + delay_num, delay_den = i16(s[20:]), i16(s[22:]) + if delay_den == 0: + delay_den = 100 + self.im_info["duration"] = float(delay_num) / float(delay_den) * 1000 + self.im_info["disposal"] = i8(s[24]) + self.im_info["blend"] = i8(s[25]) + return s + + def chunk_fdAT(self, pos, length): + s = ImageFile._safe_read(self.fp, 4) + seq = i32(s) + if self._seq_num != seq - 1: + raise SyntaxError("APNG contains frame sequence errors") + self._seq_num = seq + return self.chunk_IDAT(pos + 4, length - 4) + # -------------------------------------------------------------------- # PNG reader + def _accept(prefix): return prefix[:8] == _MAGIC @@ -537,6 +625,7 @@ def _accept(prefix): ## # Image plugin for PNG images. + class PngImageFile(ImageFile.ImageFile): format = "PNG" @@ -546,9 +635,11 @@ class PngImageFile(ImageFile.ImageFile): if self.fp.read(8) != _MAGIC: raise SyntaxError("not a PNG file") + self.__fp = self.fp + self.__frame = 0 # - # Parse headers up to the first IDAT chunk + # Parse headers up to the first IDAT or fDAT chunk self.png = PngStream(self.fp) @@ -579,17 +670,49 @@ class PngImageFile(ImageFile.ImageFile): self.mode = self.png.im_mode self._size = self.png.im_size self.info = self.png.im_info - self.text = self.png.im_text # experimental + self._text = None self.tile = self.png.im_tile + self.custom_mimetype = self.png.im_custom_mimetype + self.n_frames = self.png.im_n_frames or 1 + self.default_image = self.info.get("default_image", False) if self.png.im_palette: rawmode, data = self.png.im_palette self.palette = ImagePalette.raw(rawmode, data) - self.__idat = length # used by load_read() + if cid == b"fdAT": + self.__prepare_idat = length - 4 + else: + self.__prepare_idat = length # used by load_prepare() + + if self.png.im_n_frames is not None: + self._close_exclusive_fp_after_loading = False + self.png.save_rewind() + self.__rewind_idat = self.__prepare_idat + self.__rewind = self.__fp.tell() + if self.default_image: + # IDAT chunk contains default image and not first animation frame + self.n_frames += 1 + self._seek(0) + self.is_animated = self.n_frames > 1 + + @property + def text(self): + # experimental + if self._text is None: + # iTxt, tEXt and zTXt chunks may appear at the end of the file + # So load the file to ensure that they are read + if self.is_animated: + frame = self.__frame + # for APNG, seek to the final frame before loading + self.seek(self.n_frames - 1) + self.load() + if self.is_animated: + self.seek(frame) + return self._text def verify(self): - "Verify PNG file" + """Verify PNG file""" if self.fp is None: raise RuntimeError("verify must be called directly after open") @@ -600,18 +723,112 @@ class PngImageFile(ImageFile.ImageFile): self.png.verify() self.png.close() + if self._exclusive_fp: + self.fp.close() self.fp = None + def seek(self, frame): + if not self._seek_check(frame): + return + if frame < self.__frame: + self._seek(0, True) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError: + self.seek(last_frame) + raise EOFError("no more images in APNG file") + + def _seek(self, frame, rewind=False): + if frame == 0: + if rewind: + self.__fp.seek(self.__rewind) + self.png.rewind() + self.__prepare_idat = self.__rewind_idat + self.im = None + if self.pyaccess: + self.pyaccess = None + self.info = self.png.im_info + self.tile = self.png.im_tile + self.fp = self.__fp + self._prev_im = None + self.dispose = None + self.default_image = self.info.get("default_image", False) + self.dispose_op = self.info.get("disposal") + self.blend_op = self.info.get("blend") + self.dispose_extent = self.info.get("bbox") + self.__frame = 0 + return + else: + if frame != self.__frame + 1: + raise ValueError("cannot seek to frame %d" % frame) + + # ensure previous frame was loaded + self.load() + + self.fp = self.__fp + + # advance to the next frame + if self.__prepare_idat: + ImageFile._safe_read(self.fp, self.__prepare_idat) + self.__prepare_idat = 0 + frame_start = False + while True: + self.fp.read(4) # CRC + + try: + cid, pos, length = self.png.read() + except (struct.error, SyntaxError): + break + + if cid == b"IEND": + raise EOFError("No more images in APNG file") + if cid == b"fcTL": + if frame_start: + # there must be at least one fdAT chunk between fcTL chunks + raise SyntaxError("APNG missing frame data") + frame_start = True + + try: + self.png.call(cid, pos, length) + except UnicodeDecodeError: + break + except EOFError: + if cid == b"fdAT": + length -= 4 + if frame_start: + self.__prepare_idat = length + break + ImageFile._safe_read(self.fp, length) + except AttributeError: + logger.debug("%r %s %s (unknown)", cid, pos, length) + ImageFile._safe_read(self.fp, length) + + self.__frame = frame + self.tile = self.png.im_tile + self.dispose_op = self.info.get("disposal") + self.blend_op = self.info.get("blend") + self.dispose_extent = self.info.get("bbox") + + if not self.tile: + raise EOFError + + def tell(self): + return self.__frame + def load_prepare(self): - "internal: prepare to read PNG file" + """internal: prepare to read PNG file""" if self.info.get("interlace"): self.decoderconfig = self.decoderconfig + (1,) + self.__idat = self.__prepare_idat # used by load_read() ImageFile.ImageFile.load_prepare(self) def load_read(self, read_bytes): - "internal: read more image data" + """internal: read more image data""" while self.__idat == 0: # end of chunk, skip forward to next one @@ -620,11 +837,18 @@ class PngImageFile(ImageFile.ImageFile): cid, pos, length = self.png.read() - if cid not in [b"IDAT", b"DDAT"]: + if cid not in [b"IDAT", b"DDAT", b"fdAT"]: self.png.push(cid, pos, length) return b"" - self.__idat = length # empty chunks are allowed + if cid == b"fdAT": + try: + self.png.call(cid, pos, length) + except EOFError: + pass + self.__idat = length - 4 # sequence_num has already been read + else: + self.__idat = length # empty chunks are allowed # read more data from this chunk if read_bytes <= 0: @@ -637,10 +861,95 @@ class PngImageFile(ImageFile.ImageFile): return self.fp.read(read_bytes) def load_end(self): - "internal: finished reading image data" + """internal: finished reading image data""" + while True: + self.fp.read(4) # CRC - self.png.close() - self.png = None + try: + cid, pos, length = self.png.read() + except (struct.error, SyntaxError): + break + + if cid == b"IEND": + break + elif cid == b"fcTL" and self.is_animated: + # start of the next frame, stop reading + self.__prepare_idat = 0 + self.png.push(cid, pos, length) + break + + try: + self.png.call(cid, pos, length) + except UnicodeDecodeError: + break + except EOFError: + if cid == b"fdAT": + length -= 4 + ImageFile._safe_read(self.fp, length) + except AttributeError: + logger.debug("%r %s %s (unknown)", cid, pos, length) + ImageFile._safe_read(self.fp, length) + self._text = self.png.im_text + if not self.is_animated: + self.png.close() + self.png = None + else: + # setup frame disposal (actual disposal done when needed in _seek()) + if self._prev_im is None and self.dispose_op == APNG_DISPOSE_OP_PREVIOUS: + self.dispose_op = APNG_DISPOSE_OP_BACKGROUND + + if self.dispose_op == APNG_DISPOSE_OP_PREVIOUS: + dispose = self._prev_im.copy() + dispose = self._crop(dispose, self.dispose_extent) + elif self.dispose_op == APNG_DISPOSE_OP_BACKGROUND: + dispose = Image.core.fill("RGBA", self.size, (0, 0, 0, 0)) + dispose = self._crop(dispose, self.dispose_extent) + else: + dispose = None + + if self._prev_im and self.blend_op == APNG_BLEND_OP_OVER: + updated = self._crop(self.im, self.dispose_extent) + self._prev_im.paste( + updated, self.dispose_extent, updated.convert("RGBA") + ) + self.im = self._prev_im + if self.pyaccess: + self.pyaccess = None + self._prev_im = self.im.copy() + + if dispose: + self._prev_im.paste(dispose, self.dispose_extent) + + def _getexif(self): + if "exif" not in self.info: + self.load() + if "exif" not in self.info and "Raw profile type exif" not in self.info: + return None + return dict(self.getexif()) + + def getexif(self): + if "exif" not in self.info: + self.load() + + if self._exif is None: + self._exif = Image.Exif() + + exif_info = self.info.get("exif") + if exif_info is None and "Raw profile type exif" in self.info: + exif_info = bytes.fromhex( + "".join(self.info["Raw profile type exif"].split("\n")[3:]) + ) + self._exif.load(exif_info) + return self._exif + + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None # -------------------------------------------------------------------- @@ -648,19 +957,20 @@ class PngImageFile(ImageFile.ImageFile): _OUTMODES = { # supported PIL modes, and corresponding rawmodes/bits/color combinations - "1": ("1", b'\x01\x00'), - "L;1": ("L;1", b'\x01\x00'), - "L;2": ("L;2", b'\x02\x00'), - "L;4": ("L;4", b'\x04\x00'), - "L": ("L", b'\x08\x00'), - "LA": ("LA", b'\x08\x04'), - "I": ("I;16B", b'\x10\x00'), - "P;1": ("P;1", b'\x01\x03'), - "P;2": ("P;2", b'\x02\x03'), - "P;4": ("P;4", b'\x04\x03'), - "P": ("P", b'\x08\x03'), - "RGB": ("RGB", b'\x08\x02'), - "RGBA": ("RGBA", b'\x08\x06'), + "1": ("1", b"\x01\x00"), + "L;1": ("L;1", b"\x01\x00"), + "L;2": ("L;2", b"\x02\x00"), + "L;4": ("L;4", b"\x04\x00"), + "L": ("L", b"\x08\x00"), + "LA": ("LA", b"\x08\x04"), + "I": ("I;16B", b"\x10\x00"), + "I;16": ("I;16B", b"\x10\x00"), + "P;1": ("P;1", b"\x01\x03"), + "P;2": ("P;2", b"\x02\x03"), + "P;4": ("P;4", b"\x04\x03"), + "P": ("P", b"\x08\x03"), + "RGB": ("RGB", b"\x08\x02"), + "RGBA": ("RGBA", b"\x08\x06"), } @@ -675,7 +985,7 @@ def putchunk(fp, cid, *data): fp.write(o32(crc)) -class _idat(object): +class _idat: # wrap output from the encoder in IDAT chunks def __init__(self, fp, chunk): @@ -686,7 +996,147 @@ class _idat(object): self.chunk(self.fp, b"IDAT", data) -def _save(im, fp, filename, chunk=putchunk): +class _fdat: + # wrap encoder output in fdAT chunks + + def __init__(self, fp, chunk, seq_num): + self.fp = fp + self.chunk = chunk + self.seq_num = seq_num + + def write(self, data): + self.chunk(self.fp, b"fdAT", o32(self.seq_num), data) + self.seq_num += 1 + + +def _write_multiple_frames(im, fp, chunk, rawmode): + default_image = im.encoderinfo.get("default_image", im.info.get("default_image")) + duration = im.encoderinfo.get("duration", im.info.get("duration", 0)) + loop = im.encoderinfo.get("loop", im.info.get("loop", 0)) + disposal = im.encoderinfo.get("disposal", im.info.get("disposal")) + blend = im.encoderinfo.get("blend", im.info.get("blend")) + + if default_image: + chain = itertools.chain(im.encoderinfo.get("append_images", [])) + else: + chain = itertools.chain([im], im.encoderinfo.get("append_images", [])) + + im_frames = [] + frame_count = 0 + for im_seq in chain: + for im_frame in ImageSequence.Iterator(im_seq): + im_frame = im_frame.copy() + if im_frame.mode != im.mode: + if im.mode == "P": + im_frame = im_frame.convert(im.mode, palette=im.palette) + else: + im_frame = im_frame.convert(im.mode) + encoderinfo = im.encoderinfo.copy() + if isinstance(duration, (list, tuple)): + encoderinfo["duration"] = duration[frame_count] + if isinstance(disposal, (list, tuple)): + encoderinfo["disposal"] = disposal[frame_count] + if isinstance(blend, (list, tuple)): + encoderinfo["blend"] = blend[frame_count] + frame_count += 1 + + if im_frames: + previous = im_frames[-1] + prev_disposal = previous["encoderinfo"].get("disposal") + prev_blend = previous["encoderinfo"].get("blend") + if prev_disposal == APNG_DISPOSE_OP_PREVIOUS and len(im_frames) < 2: + prev_disposal == APNG_DISPOSE_OP_BACKGROUND + + if prev_disposal == APNG_DISPOSE_OP_BACKGROUND: + base_im = previous["im"] + dispose = Image.core.fill("RGBA", im.size, (0, 0, 0, 0)) + bbox = previous["bbox"] + if bbox: + dispose = dispose.crop(bbox) + else: + bbox = (0, 0) + im.size + base_im.paste(dispose, bbox) + elif prev_disposal == APNG_DISPOSE_OP_PREVIOUS: + base_im = im_frames[-2]["im"] + else: + base_im = previous["im"] + delta = ImageChops.subtract_modulo( + im_frame.convert("RGB"), base_im.convert("RGB") + ) + bbox = delta.getbbox() + if ( + not bbox + and prev_disposal == encoderinfo.get("disposal") + and prev_blend == encoderinfo.get("blend") + ): + duration = encoderinfo.get("duration", 0) + if duration: + if "duration" in previous["encoderinfo"]: + previous["encoderinfo"]["duration"] += duration + else: + previous["encoderinfo"]["duration"] = duration + continue + else: + bbox = None + im_frames.append({"im": im_frame, "bbox": bbox, "encoderinfo": encoderinfo}) + + # animation control + chunk( + fp, b"acTL", o32(len(im_frames)), o32(loop), # 0: num_frames # 4: num_plays + ) + + # default image IDAT (if it exists) + if default_image: + ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)]) + + seq_num = 0 + for frame, frame_data in enumerate(im_frames): + im_frame = frame_data["im"] + if not frame_data["bbox"]: + bbox = (0, 0) + im_frame.size + else: + bbox = frame_data["bbox"] + im_frame = im_frame.crop(bbox) + size = im_frame.size + duration = int(round(frame_data["encoderinfo"].get("duration", 0))) + disposal = frame_data["encoderinfo"].get("disposal", APNG_DISPOSE_OP_NONE) + blend = frame_data["encoderinfo"].get("blend", APNG_BLEND_OP_SOURCE) + # frame control + chunk( + fp, + b"fcTL", + o32(seq_num), # sequence_number + o32(size[0]), # width + o32(size[1]), # height + o32(bbox[0]), # x_offset + o32(bbox[1]), # y_offset + o16(duration), # delay_numerator + o16(1000), # delay_denominator + o8(disposal), # dispose_op + o8(blend), # blend_op + ) + seq_num += 1 + # frame data + if frame == 0 and not default_image: + # first frame must be in IDAT chunks for backwards compatibility + ImageFile._save( + im_frame, + _idat(fp, chunk), + [("zip", (0, 0) + im_frame.size, 0, rawmode)], + ) + else: + fdat_chunks = _fdat(fp, chunk, seq_num) + ImageFile._save( + im_frame, fdat_chunks, [("zip", (0, 0) + im_frame.size, 0, rawmode)], + ) + seq_num = fdat_chunks.seq_num + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +def _save(im, fp, filename, chunk=putchunk, save_all=False): # save an image to disk (called by the save method) mode = im.mode @@ -701,7 +1151,7 @@ def _save(im, fp, filename, chunk=putchunk): else: # check palette contents if im.palette: - colors = max(min(len(im.palette.getdata()[1])//3, 256), 2) + colors = max(min(len(im.palette.getdata()[1]) // 3, 256), 2) else: colors = 256 @@ -717,28 +1167,34 @@ def _save(im, fp, filename, chunk=putchunk): mode = "%s;%d" % (mode, bits) # encoder options - im.encoderconfig = (im.encoderinfo.get("optimize", False), - im.encoderinfo.get("compress_level", -1), - im.encoderinfo.get("compress_type", -1), - im.encoderinfo.get("dictionary", b"")) + im.encoderconfig = ( + im.encoderinfo.get("optimize", False), + im.encoderinfo.get("compress_level", -1), + im.encoderinfo.get("compress_type", -1), + im.encoderinfo.get("dictionary", b""), + ) # get the corresponding PNG mode try: rawmode, mode = _OUTMODES[mode] except KeyError: - raise IOError("cannot write mode %s as PNG" % mode) + raise OSError("cannot write mode %s as PNG" % mode) # # write minimal PNG file fp.write(_MAGIC) - chunk(fp, b"IHDR", - o32(im.size[0]), o32(im.size[1]), # 0: size - mode, # 8: depth/type - b'\0', # 10: compression - b'\0', # 11: filter category - b'\0') # 12: interlace flag + chunk( + fp, + b"IHDR", + o32(im.size[0]), # 0: size + o32(im.size[1]), + mode, # 8: depth/type + b"\0", # 10: compression + b"\0", # 11: filter category + b"\0", # 12: interlace flag + ) chunks = [b"cHRM", b"gAMA", b"sBIT", b"sRGB", b"tIME"] @@ -772,23 +1228,22 @@ def _save(im, fp, filename, chunk=putchunk): palette_byte_number = (2 ** bits) * 3 palette_bytes = im.im.getpalette("RGB")[:palette_byte_number] while len(palette_bytes) < palette_byte_number: - palette_bytes += b'\0' + palette_bytes += b"\0" chunk(fp, b"PLTE", palette_bytes) - transparency = im.encoderinfo.get('transparency', - im.info.get('transparency', None)) + transparency = im.encoderinfo.get("transparency", im.info.get("transparency", None)) if transparency or transparency == 0: if im.mode == "P": # limit to actual palette size - alpha_bytes = 2**bits + alpha_bytes = 2 ** bits if isinstance(transparency, bytes): chunk(fp, b"tRNS", transparency[:alpha_bytes]) else: transparency = max(0, min(255, transparency)) - alpha = b'\xFF' * transparency + b'\0' + alpha = b"\xFF" * transparency + b"\0" chunk(fp, b"tRNS", alpha[:alpha_bytes]) - elif im.mode == "L": + elif im.mode in ("1", "L", "I"): transparency = max(0, min(65535, transparency)) chunk(fp, b"tRNS", o16(transparency)) elif im.mode == "RGB": @@ -798,21 +1253,23 @@ def _save(im, fp, filename, chunk=putchunk): if "transparency" in im.encoderinfo: # don't bother with transparency if it's an RGBA # and it's in the info dict. It's probably just stale. - raise IOError("cannot use transparency for this mode") + raise OSError("cannot use transparency for this mode") else: if im.mode == "P" and im.im.getpalettemode() == "RGBA": alpha = im.im.getpalette("RGBA", "A") - alpha_bytes = 2**bits + alpha_bytes = 2 ** bits chunk(fp, b"tRNS", alpha[:alpha_bytes]) dpi = im.encoderinfo.get("dpi") if dpi: - chunk(fp, b"pHYs", - o32(int(dpi[0] / 0.0254 + 0.5)), - o32(int(dpi[1] / 0.0254 + 0.5)), - b'\x01') + chunk( + fp, + b"pHYs", + o32(int(dpi[0] / 0.0254 + 0.5)), + o32(int(dpi[1] / 0.0254 + 0.5)), + b"\x01", + ) - info = im.encoderinfo.get("pnginfo") if info: chunks = [b"bKGD", b"hIST"] for cid, data in info.chunks: @@ -820,8 +1277,18 @@ def _save(im, fp, filename, chunk=putchunk): chunks.remove(cid) chunk(fp, cid, data) - ImageFile._save(im, _idat(fp, chunk), - [("zip", (0, 0)+im.size, 0, rawmode)]) + exif = im.encoderinfo.get("exif", im.info.get("exif")) + if exif: + if isinstance(exif, Image.Exif): + exif = exif.tobytes(8) + if exif.startswith(b"Exif\x00\x00"): + exif = exif[6:] + chunk(fp, b"eXIf", exif) + + if save_all: + _write_multiple_frames(im, fp, chunk, rawmode) + else: + ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)]) chunk(fp, b"IEND", b"") @@ -832,10 +1299,11 @@ def _save(im, fp, filename, chunk=putchunk): # -------------------------------------------------------------------- # PNG chunk converter + def getchunks(im, **params): """Return a list of PNG chunks representing this image.""" - class collector(object): + class collector: data = [] def write(self, data): @@ -865,7 +1333,8 @@ def getchunks(im, **params): Image.register_open(PngImageFile.format, PngImageFile, _accept) Image.register_save(PngImageFile.format, _save) +Image.register_save_all(PngImageFile.format, _save_all) -Image.register_extension(PngImageFile.format, ".png") +Image.register_extensions(PngImageFile.format, [".png", ".apng"]) Image.register_mime(PngImageFile.format, "image/png") diff --git a/server/www/packages/packages-linux/x64/PIL/PpmImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PpmImagePlugin.py index 8002678..35a77ba 100644 --- a/server/www/packages/packages-linux/x64/PIL/PpmImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/PpmImagePlugin.py @@ -17,12 +17,10 @@ from . import Image, ImageFile -__version__ = "0.2" - # # -------------------------------------------------------------------- -b_whitespace = b'\x20\x09\x0a\x0b\x0c\x0d' +b_whitespace = b"\x20\x09\x0a\x0b\x0c\x0d" MODES = { # standard @@ -34,7 +32,7 @@ MODES = { # PIL extensions (for test purposes only) b"PyP": "P", b"PyRGBA": "RGBA", - b"PyCMYK": "CMYK" + b"PyCMYK": "CMYK", } @@ -45,6 +43,7 @@ def _accept(prefix): ## # Image plugin for PBM, PGM, and PPM images. + class PpmImageFile(ImageFile.ImageFile): format = "PPM" @@ -55,10 +54,10 @@ class PpmImageFile(ImageFile.ImageFile): c = self.fp.read(1) if not c or c in b_whitespace: break - if c > b'\x79': + if c > b"\x79": raise ValueError("Expected ASCII value, found binary") s = s + c - if (len(s) > 9): + if len(s) > 9: raise ValueError("Expected int, got > 9 digits") return s @@ -68,7 +67,14 @@ class PpmImageFile(ImageFile.ImageFile): s = self.fp.read(1) if s != b"P": raise SyntaxError("not a PPM file") - mode = MODES[self._token(s)] + magic_number = self._token(s) + mode = MODES[magic_number] + + self.custom_mimetype = { + b"P4": "image/x-portable-bitmap", + b"P5": "image/x-portable-graymap", + b"P6": "image/x-portable-pixmap", + }.get(magic_number) if mode == "1": self.mode = "1" @@ -83,8 +89,7 @@ class PpmImageFile(ImageFile.ImageFile): if s not in b_whitespace: break if s == b"": - raise ValueError( - "File does not extend beyond magic number") + raise ValueError("File does not extend beyond magic number") if s != b"#": break s = self.fp.readline() @@ -98,32 +103,30 @@ class PpmImageFile(ImageFile.ImageFile): elif ix == 2: # maxgrey if s > 255: - if not mode == 'L': + if not mode == "L": raise ValueError("Too many colors for band: %s" % s) - if s < 2**16: - self.mode = 'I' - rawmode = 'I;16B' + if s < 2 ** 16: + self.mode = "I" + rawmode = "I;16B" else: - self.mode = 'I' - rawmode = 'I;32B' + self.mode = "I" + rawmode = "I;32B" self._size = xsize, ysize - self.tile = [("raw", - (0, 0, xsize, ysize), - self.fp.tell(), - (rawmode, 0, 1))] + self.tile = [("raw", (0, 0, xsize, ysize), self.fp.tell(), (rawmode, 0, 1))] # # -------------------------------------------------------------------- + def _save(im, fp, filename): if im.mode == "1": rawmode, head = "1;I", b"P4" elif im.mode == "L": rawmode, head = "L", b"P5" elif im.mode == "I": - if im.getextrema()[1] < 2**16: + if im.getextrema()[1] < 2 ** 16: rawmode, head = "I;16B", b"P5" else: rawmode, head = "I;32B", b"P5" @@ -132,8 +135,8 @@ def _save(im, fp, filename): elif im.mode == "RGBA": rawmode, head = "RGB", b"P6" else: - raise IOError("cannot write mode %s as PPM" % im.mode) - fp.write(head + ("\n%d %d\n" % im.size).encode('ascii')) + raise OSError("cannot write mode %s as PPM" % im.mode) + fp.write(head + ("\n%d %d\n" % im.size).encode("ascii")) if head == b"P6": fp.write(b"255\n") if head == b"P5": @@ -143,11 +146,12 @@ def _save(im, fp, filename): fp.write(b"65535\n") elif rawmode == "I;32B": fp.write(b"2147483648\n") - ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, 1))]) + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))]) # ALTERNATIVE: save via builtin debug function # im._dump(filename) + # # -------------------------------------------------------------------- @@ -155,4 +159,6 @@ def _save(im, fp, filename): Image.register_open(PpmImageFile.format, PpmImageFile, _accept) Image.register_save(PpmImageFile.format, _save) -Image.register_extensions(PpmImageFile.format, [".pbm", ".pgm", ".ppm"]) +Image.register_extensions(PpmImageFile.format, [".pbm", ".pgm", ".ppm", ".pnm"]) + +Image.register_mime(PpmImageFile.format, "image/x-portable-anymap") diff --git a/server/www/packages/packages-linux/x64/PIL/PsdImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PsdImagePlugin.py index 2d64ecd..cceb85c 100644 --- a/server/www/packages/packages-linux/x64/PIL/PsdImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/PsdImagePlugin.py @@ -16,7 +16,7 @@ # See the README file for information on usage and redistribution. # -__version__ = "0.4" +import io from . import Image, ImageFile, ImagePalette from ._binary import i8, i16be as i16, i32be as i32 @@ -31,13 +31,14 @@ MODES = { (4, 8): ("CMYK", 4), (7, 8): ("L", 1), # FIXME: multilayer (8, 8): ("L", 1), # duotone - (9, 8): ("LAB", 3) + (9, 8): ("LAB", 3), } # --------------------------------------------------------------------. # read PSD images + def _accept(prefix): return prefix[:4] == b"8BPS" @@ -45,10 +46,12 @@ def _accept(prefix): ## # Image plugin for Photoshop images. + class PsdImageFile(ImageFile.ImageFile): format = "PSD" format_description = "Adobe Photoshop" + _close_exclusive_fp_after_loading = False def _open(self): @@ -68,7 +71,7 @@ class PsdImageFile(ImageFile.ImageFile): mode, channels = MODES[(psd_mode, psd_bits)] if channels > psd_channels: - raise IOError("not enough channels") + raise OSError("not enough channels") self.mode = mode self._size = i32(s[18:]), i32(s[14:]) @@ -92,13 +95,13 @@ class PsdImageFile(ImageFile.ImageFile): # load resources end = self.fp.tell() + size while self.fp.tell() < end: - signature = read(4) + read(4) # signature id = i16(read(2)) name = read(i8(read(1))) if not (len(name) & 1): read(1) # padding data = read(i32(read(4))) - if (len(data) & 1): + if len(data) & 1: read(1) # padding self.resources.append((id, name, data)) if id == 1039: # ICC profile @@ -123,7 +126,7 @@ class PsdImageFile(ImageFile.ImageFile): self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels) # keep the file open - self._fp = self.fp + self.__fp = self.fp self.frame = 1 self._min_frame = 1 @@ -141,11 +144,11 @@ class PsdImageFile(ImageFile.ImageFile): # seek to given layer (1..max) try: - name, mode, bbox, tile = self.layers[layer-1] + name, mode, bbox, tile = self.layers[layer - 1] self.mode = mode self.tile = tile self.frame = layer - self.fp = self._fp + self.fp = self.__fp return name, bbox except IndexError: raise EOFError("no such layer") @@ -156,13 +159,21 @@ class PsdImageFile(ImageFile.ImageFile): def load_prepare(self): # create image memory if necessary - if not self.im or\ - self.im.mode != self.mode or self.im.size != self.size: + if not self.im or self.im.mode != self.mode or self.im.size != self.size: self.im = Image.core.fill(self.mode, self.size, 0) # create palette (optional) if self.mode == "P": Image.Image.load(self) + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + def _layerinfo(file): # read layerinfo block @@ -207,33 +218,31 @@ def _layerinfo(file): mode = None # unknown # skip over blend flags and extra information - filler = read(12) + read(12) # filler name = "" - size = i32(read(4)) + size = i32(read(4)) # length of the extra data field combined = 0 if size: + data_end = file.tell() + size + length = i32(read(4)) if length: - mask_y = i32(read(4)) - mask_x = i32(read(4)) - mask_h = i32(read(4)) - mask_y - mask_w = i32(read(4)) - mask_x - file.seek(length - 16, 1) + file.seek(length - 16, io.SEEK_CUR) combined += length + 4 length = i32(read(4)) if length: - file.seek(length, 1) + file.seek(length, io.SEEK_CUR) combined += length + 4 length = i8(read(1)) if length: # Don't know the proper encoding, # Latin-1 should be a good guess - name = read(length).decode('latin-1', 'replace') + name = read(length).decode("latin-1", "replace") combined += length + 1 - file.seek(size - combined, 1) + file.seek(data_end) layers.append((name, mode, (x0, y0, x1, y1))) # get tiles @@ -271,7 +280,7 @@ def _maketile(file, mode, bbox, channels): if mode == "CMYK": layer += ";I" tile.append(("raw", bbox, offset, layer)) - offset = offset + xsize*ysize + offset = offset + xsize * ysize elif compression == 1: # @@ -284,11 +293,9 @@ def _maketile(file, mode, bbox, channels): layer = mode[channel] if mode == "CMYK": layer += ";I" - tile.append( - ("packbits", bbox, offset, layer) - ) + tile.append(("packbits", bbox, offset, layer)) for y in range(ysize): - offset = offset + i16(bytecount[i:i+2]) + offset = offset + i16(bytecount[i : i + 2]) i += 2 file.seek(offset) @@ -298,6 +305,7 @@ def _maketile(file, mode, bbox, channels): return tile + # -------------------------------------------------------------------- # registry diff --git a/server/www/packages/packages-linux/x64/PIL/PyAccess.py b/server/www/packages/packages-linux/x64/PIL/PyAccess.py index cce2de2..359a949 100644 --- a/server/www/packages/packages-linux/x64/PIL/PyAccess.py +++ b/server/www/packages/packages-linux/x64/PIL/PyAccess.py @@ -25,7 +25,6 @@ import sys from cffi import FFI - logger = logging.getLogger(__name__) @@ -41,18 +40,19 @@ ffi = FFI() ffi.cdef(defs) -class PyAccess(object): - +class PyAccess: def __init__(self, img, readonly=False): vals = dict(img.im.unsafe_ptrs) self.readonly = readonly - self.image8 = ffi.cast('unsigned char **', vals['image8']) - self.image32 = ffi.cast('int **', vals['image32']) - self.image = ffi.cast('unsigned char **', vals['image']) + self.image8 = ffi.cast("unsigned char **", vals["image8"]) + self.image32 = ffi.cast("int **", vals["image32"]) + self.image = ffi.cast("unsigned char **", vals["image"]) self.xsize, self.ysize = img.im.size # Keep pointer to im object to prevent dereferencing. self._im = img.im + if self._im.mode == "P": + self._palette = img.palette # Debugging is polluting test traces, only useful here # when hacking on PyAccess @@ -73,8 +73,22 @@ class PyAccess(object): :param color: The pixel value. """ if self.readonly: - raise ValueError('Attempt to putpixel a read only image') - (x, y) = self.check_xy(xy) + raise ValueError("Attempt to putpixel a read only image") + (x, y) = xy + if x < 0: + x = self.xsize + x + if y < 0: + y = self.ysize + y + (x, y) = self.check_xy((x, y)) + + if ( + self._im.mode == "P" + and isinstance(color, (list, tuple)) + and len(color) in [3, 4] + ): + # RGB or RGBA value for a P image + color = self._palette.getcolor(color) + return self.set_pixel(x, y, color) def __getitem__(self, xy): @@ -88,8 +102,12 @@ class PyAccess(object): :returns: a pixel value for single band images, a tuple of pixel values for multiband images. """ - - (x, y) = self.check_xy(xy) + (x, y) = xy + if x < 0: + x = self.xsize + x + if y < 0: + y = self.ysize + y + (x, y) = self.check_xy((x, y)) return self.get_pixel(x, y) putpixel = __setitem__ @@ -98,12 +116,13 @@ class PyAccess(object): def check_xy(self, xy): (x, y) = xy if not (0 <= x < self.xsize and 0 <= y < self.ysize): - raise ValueError('pixel location out of range') + raise ValueError("pixel location out of range") return xy class _PyAccess32_2(PyAccess): """ PA, LA, stored in first and last bytes of a 32 bit word """ + def _post_init(self, *args, **kwargs): self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) @@ -139,6 +158,7 @@ class _PyAccess32_3(PyAccess): class _PyAccess32_4(PyAccess): """ RGBA etc, all 4 bytes of a 32 bit word """ + def _post_init(self, *args, **kwargs): self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) @@ -157,6 +177,7 @@ class _PyAccess32_4(PyAccess): class _PyAccess8(PyAccess): """ 1, L, P, 8 bit images stored as uint8 """ + def _post_init(self, *args, **kwargs): self.pixels = self.image8 @@ -174,8 +195,9 @@ class _PyAccess8(PyAccess): class _PyAccessI16_N(PyAccess): """ I;16 access, native bitendian without conversion """ + def _post_init(self, *args, **kwargs): - self.pixels = ffi.cast('unsigned short **', self.image) + self.pixels = ffi.cast("unsigned short **", self.image) def get_pixel(self, x, y): return self.pixels[y][x] @@ -191,8 +213,9 @@ class _PyAccessI16_N(PyAccess): class _PyAccessI16_L(PyAccess): """ I;16L access, with conversion """ + def _post_init(self, *args, **kwargs): - self.pixels = ffi.cast('struct Pixel_I16 **', self.image) + self.pixels = ffi.cast("struct Pixel_I16 **", self.image) def get_pixel(self, x, y): pixel = self.pixels[y][x] @@ -205,14 +228,15 @@ class _PyAccessI16_L(PyAccess): except TypeError: color = min(color[0], 65535) - pixel.l = color & 0xFF + pixel.l = color & 0xFF # noqa: E741 pixel.r = color >> 8 class _PyAccessI16_B(PyAccess): """ I;16B access, with conversion """ + def _post_init(self, *args, **kwargs): - self.pixels = ffi.cast('struct Pixel_I16 **', self.image) + self.pixels = ffi.cast("struct Pixel_I16 **", self.image) def get_pixel(self, x, y): pixel = self.pixels[y][x] @@ -222,15 +246,16 @@ class _PyAccessI16_B(PyAccess): pixel = self.pixels[y][x] try: color = min(color, 65535) - except: + except Exception: color = min(color[0], 65535) - pixel.l = color >> 8 + pixel.l = color >> 8 # noqa: E741 pixel.r = color & 0xFF class _PyAccessI32_N(PyAccess): """ Signed Int32 access, native endian """ + def _post_init(self, *args, **kwargs): self.pixels = self.image32 @@ -243,15 +268,15 @@ class _PyAccessI32_N(PyAccess): class _PyAccessI32_Swap(PyAccess): """ I;32L/B access, with byteswapping conversion """ + def _post_init(self, *args, **kwargs): self.pixels = self.image32 def reverse(self, i): - orig = ffi.new('int *', i) - chars = ffi.cast('unsigned char *', orig) - chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], \ - chars[1], chars[0] - return ffi.cast('int *', chars)[0] + orig = ffi.new("int *", i) + chars = ffi.cast("unsigned char *", orig) + chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], chars[1], chars[0] + return ffi.cast("int *", chars)[0] def get_pixel(self, x, y): return self.reverse(self.pixels[y][x]) @@ -262,8 +287,9 @@ class _PyAccessI32_Swap(PyAccess): class _PyAccessF(PyAccess): """ 32 bit float access """ + def _post_init(self, *args, **kwargs): - self.pixels = ffi.cast('float **', self.image32) + self.pixels = ffi.cast("float **", self.image32) def get_pixel(self, x, y): return self.pixels[y][x] @@ -277,38 +303,39 @@ class _PyAccessF(PyAccess): self.pixels[y][x] = color[0] -mode_map = {'1': _PyAccess8, - 'L': _PyAccess8, - 'P': _PyAccess8, - 'LA': _PyAccess32_2, - 'La': _PyAccess32_2, - 'PA': _PyAccess32_2, - 'RGB': _PyAccess32_3, - 'LAB': _PyAccess32_3, - 'HSV': _PyAccess32_3, - 'YCbCr': _PyAccess32_3, - 'RGBA': _PyAccess32_4, - 'RGBa': _PyAccess32_4, - 'RGBX': _PyAccess32_4, - 'CMYK': _PyAccess32_4, - 'F': _PyAccessF, - 'I': _PyAccessI32_N, - } +mode_map = { + "1": _PyAccess8, + "L": _PyAccess8, + "P": _PyAccess8, + "LA": _PyAccess32_2, + "La": _PyAccess32_2, + "PA": _PyAccess32_2, + "RGB": _PyAccess32_3, + "LAB": _PyAccess32_3, + "HSV": _PyAccess32_3, + "YCbCr": _PyAccess32_3, + "RGBA": _PyAccess32_4, + "RGBa": _PyAccess32_4, + "RGBX": _PyAccess32_4, + "CMYK": _PyAccess32_4, + "F": _PyAccessF, + "I": _PyAccessI32_N, +} -if sys.byteorder == 'little': - mode_map['I;16'] = _PyAccessI16_N - mode_map['I;16L'] = _PyAccessI16_N - mode_map['I;16B'] = _PyAccessI16_B +if sys.byteorder == "little": + mode_map["I;16"] = _PyAccessI16_N + mode_map["I;16L"] = _PyAccessI16_N + mode_map["I;16B"] = _PyAccessI16_B - mode_map['I;32L'] = _PyAccessI32_N - mode_map['I;32B'] = _PyAccessI32_Swap + mode_map["I;32L"] = _PyAccessI32_N + mode_map["I;32B"] = _PyAccessI32_Swap else: - mode_map['I;16'] = _PyAccessI16_L - mode_map['I;16L'] = _PyAccessI16_L - mode_map['I;16B'] = _PyAccessI16_N + mode_map["I;16"] = _PyAccessI16_L + mode_map["I;16L"] = _PyAccessI16_L + mode_map["I;16B"] = _PyAccessI16_N - mode_map['I;32L'] = _PyAccessI32_Swap - mode_map['I;32B'] = _PyAccessI32_N + mode_map["I;32L"] = _PyAccessI32_Swap + mode_map["I;32B"] = _PyAccessI32_N def new(img, readonly=False): diff --git a/server/www/packages/packages-linux/x64/PIL/SgiImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/SgiImagePlugin.py index 88df351..ddd3de3 100644 --- a/server/www/packages/packages-linux/x64/PIL/SgiImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/SgiImagePlugin.py @@ -22,14 +22,11 @@ # -from . import Image, ImageFile -from ._binary import i8, o8, i16be as i16 -from ._util import py3 -import struct import os +import struct - -__version__ = "0.3" +from . import Image, ImageFile +from ._binary import i8, i16be as i16, o8 def _accept(prefix): @@ -44,7 +41,7 @@ MODES = { (1, 3, 3): "RGB", (2, 3, 3): "RGB;16B", (1, 3, 4): "RGBA", - (2, 3, 4): "RGBA;16B" + (2, 3, 4): "RGBA;16B", } @@ -98,6 +95,8 @@ class SgiImageFile(ImageFile.ImageFile): self._size = xsize, ysize self.mode = rawmode.split(";")[0] + if self.mode == "RGB": + self.custom_mimetype = "image/rgb" # orientation -1 : scanlines begins at the bottom-left corner orientation = -1 @@ -106,19 +105,21 @@ class SgiImageFile(ImageFile.ImageFile): if compression == 0: pagesize = xsize * ysize * bpc if bpc == 2: - self.tile = [("SGI16", (0, 0) + self.size, - headlen, (self.mode, 0, orientation))] + self.tile = [ + ("SGI16", (0, 0) + self.size, headlen, (self.mode, 0, orientation)) + ] else: self.tile = [] offset = headlen for layer in self.mode: self.tile.append( - ("raw", (0, 0) + self.size, - offset, (layer, 0, orientation))) + ("raw", (0, 0) + self.size, offset, (layer, 0, orientation)) + ) offset += pagesize elif compression == 1: - self.tile = [("sgi_rle", (0, 0) + self.size, - headlen, (rawmode, orientation, bpc))] + self.tile = [ + ("sgi_rle", (0, 0) + self.size, headlen, (rawmode, orientation, bpc)) + ] def _save(im, fp, filename): @@ -157,8 +158,11 @@ def _save(im, fp, filename): # assert we've got the right number of bands. if len(im.getbands()) != z: - raise ValueError("incorrect number of bands in SGI write: %s vs %s" % - (z, len(im.getbands()))) + raise ValueError( + "incorrect number of bands in SGI write: {} vs {}".format( + z, len(im.getbands()) + ) + ) # Minimum Byte value pinmin = 0 @@ -166,31 +170,30 @@ def _save(im, fp, filename): pinmax = 255 # Image name (79 characters max, truncated below in write) imgName = os.path.splitext(os.path.basename(filename))[0] - if py3: - imgName = imgName.encode('ascii', 'ignore') + imgName = imgName.encode("ascii", "ignore") # Standard representation of pixel in the file colormap = 0 - fp.write(struct.pack('>h', magicNumber)) + fp.write(struct.pack(">h", magicNumber)) fp.write(o8(rle)) fp.write(o8(bpc)) - fp.write(struct.pack('>H', dim)) - fp.write(struct.pack('>H', x)) - fp.write(struct.pack('>H', y)) - fp.write(struct.pack('>H', z)) - fp.write(struct.pack('>l', pinmin)) - fp.write(struct.pack('>l', pinmax)) - fp.write(struct.pack('4s', b'')) # dummy - fp.write(struct.pack('79s', imgName)) # truncates to 79 chars - fp.write(struct.pack('s', b'')) # force null byte after imgname - fp.write(struct.pack('>l', colormap)) - fp.write(struct.pack('404s', b'')) # dummy + fp.write(struct.pack(">H", dim)) + fp.write(struct.pack(">H", x)) + fp.write(struct.pack(">H", y)) + fp.write(struct.pack(">H", z)) + fp.write(struct.pack(">l", pinmin)) + fp.write(struct.pack(">l", pinmax)) + fp.write(struct.pack("4s", b"")) # dummy + fp.write(struct.pack("79s", imgName)) # truncates to 79 chars + fp.write(struct.pack("s", b"")) # force null byte after imgname + fp.write(struct.pack(">l", colormap)) + fp.write(struct.pack("404s", b"")) # dummy - rawmode = 'L' + rawmode = "L" if bpc == 2: - rawmode = 'L;16B' + rawmode = "L;16B" for channel in im.split(): - fp.write(channel.tobytes('raw', rawmode, 0, orientation)) + fp.write(channel.tobytes("raw", rawmode, 0, orientation)) fp.close() @@ -205,13 +208,15 @@ class SGI16Decoder(ImageFile.PyDecoder): self.fd.seek(512) for band in range(zsize): - channel = Image.new('L', (self.state.xsize, self.state.ysize)) - channel.frombytes(self.fd.read(2 * pagesize), 'raw', - 'L;16B', stride, orientation) + channel = Image.new("L", (self.state.xsize, self.state.ysize)) + channel.frombytes( + self.fd.read(2 * pagesize), "raw", "L;16B", stride, orientation + ) self.im.putband(channel.im, band) return -1, 0 + # # registry @@ -220,9 +225,7 @@ Image.register_decoder("SGI16", SGI16Decoder) Image.register_open(SgiImageFile.format, SgiImageFile, _accept) Image.register_save(SgiImageFile.format, _save) Image.register_mime(SgiImageFile.format, "image/sgi") -Image.register_mime(SgiImageFile.format, "image/rgb") -Image.register_extensions(SgiImageFile.format, - [".bw", ".rgb", ".rgba", ".sgi"]) +Image.register_extensions(SgiImageFile.format, [".bw", ".rgb", ".rgba", ".sgi"]) # End of file diff --git a/server/www/packages/packages-linux/x64/PIL/SpiderImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/SpiderImagePlugin.py index 3f57952..cbd31cf 100644 --- a/server/www/packages/packages-linux/x64/PIL/SpiderImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/SpiderImagePlugin.py @@ -32,19 +32,17 @@ # Details about the Spider image format: # https://spider.wadsworth.org/spider_doc/spider/docs/image_doc.html # - -from __future__ import print_function - -from PIL import Image, ImageFile import os import struct import sys +from PIL import Image, ImageFile + def isInt(f): try: i = int(f) - if f-i == 0: + if f - i == 0: return 1 else: return 0 @@ -60,8 +58,9 @@ iforms = [1, 3, -11, -12, -21, -22] # Returns no. of bytes in the header, if it is a valid Spider header, # otherwise returns 0 + def isSpiderHeader(t): - h = (99,) + t # add 1 value so can use spider header index start=1 + h = (99,) + t # add 1 value so can use spider header index start=1 # header values 1,2,5,12,13,22,23 should be integers for i in [1, 2, 5, 12, 13, 22, 23]: if not isInt(h[i]): @@ -71,9 +70,9 @@ def isSpiderHeader(t): if iform not in iforms: return 0 # check other header values - labrec = int(h[13]) # no. records in file header - labbyt = int(h[22]) # total no. of bytes in header - lenbyt = int(h[23]) # record length in bytes + labrec = int(h[13]) # no. records in file header + labbyt = int(h[22]) # total no. of bytes in header + lenbyt = int(h[23]) # record length in bytes if labbyt != (labrec * lenbyt): return 0 # looks like a valid header @@ -81,12 +80,12 @@ def isSpiderHeader(t): def isSpiderImage(filename): - with open(filename, 'rb') as fp: - f = fp.read(92) # read 23 * 4 bytes - t = struct.unpack('>23f', f) # try big-endian first + with open(filename, "rb") as fp: + f = fp.read(92) # read 23 * 4 bytes + t = struct.unpack(">23f", f) # try big-endian first hdrlen = isSpiderHeader(t) if hdrlen == 0: - t = struct.unpack('<23f', f) # little-endian + t = struct.unpack("<23f", f) # little-endian hdrlen = isSpiderHeader(t) return hdrlen @@ -104,18 +103,18 @@ class SpiderImageFile(ImageFile.ImageFile): try: self.bigendian = 1 - t = struct.unpack('>27f', f) # try big-endian first + t = struct.unpack(">27f", f) # try big-endian first hdrlen = isSpiderHeader(t) if hdrlen == 0: self.bigendian = 0 - t = struct.unpack('<27f', f) # little-endian + t = struct.unpack("<27f", f) # little-endian hdrlen = isSpiderHeader(t) if hdrlen == 0: raise SyntaxError("not a valid Spider file") except struct.error: raise SyntaxError("not a valid Spider file") - h = (99,) + t # add 1 value : spider header index starts at 1 + h = (99,) + t # add 1 value : spider header index starts at 1 iform = int(h[5]) if iform != 1: raise SyntaxError("not a Spider 2D image") @@ -149,9 +148,7 @@ class SpiderImageFile(ImageFile.ImageFile): self.rawmode = "F;32F" self.mode = "F" - self.tile = [ - ("raw", (0, 0) + self.size, offset, - (self.rawmode, 0, 1))] + self.tile = [("raw", (0, 0) + self.size, offset, (self.rawmode, 0, 1))] self.__fp = self.fp # FIXME: hack @property @@ -184,22 +181,32 @@ class SpiderImageFile(ImageFile.ImageFile): (minimum, maximum) = self.getextrema() m = 1 if maximum != minimum: - m = depth / (maximum-minimum) + m = depth / (maximum - minimum) b = -m * minimum return self.point(lambda i, m=m, b=b: i * m + b).convert("L") # returns a ImageTk.PhotoImage object, after rescaling to 0..255 def tkPhotoImage(self): from PIL import ImageTk + return ImageTk.PhotoImage(self.convert2byte(), palette=256) + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + # -------------------------------------------------------------------- # Image series # given a list of filenames, return a list of images def loadImageSeries(filelist=None): - """create a list of Image.images for use in montage""" + """create a list of :py:class:`~PIL.Image.Image` objects for use in a montage""" if filelist is None or len(filelist) < 1: return @@ -209,12 +216,13 @@ def loadImageSeries(filelist=None): print("unable to find %s" % img) continue try: - im = Image.open(img).convert2byte() - except: + with Image.open(img) as im: + im = im.convert2byte() + except Exception: if not isSpiderImage(img): print(img + " is not a Spider image file") continue - im.info['filename'] = img + im.info["filename"] = img imglist.append(im) return imglist @@ -222,10 +230,11 @@ def loadImageSeries(filelist=None): # -------------------------------------------------------------------- # For saving images in Spider format + def makeSpiderHeader(im): nsam, nrow = im.size lenbyt = nsam * 4 # There are labrec records in the header - labrec = 1024 / lenbyt + labrec = int(1024 / lenbyt) if 1024 % lenbyt != 0: labrec += 1 labbyt = labrec * lenbyt @@ -238,10 +247,10 @@ def makeSpiderHeader(im): return [] # NB these are Fortran indices - hdr[1] = 1.0 # nslice (=1 for an image) - hdr[2] = float(nrow) # number of rows per slice - hdr[5] = 1.0 # iform for 2D image - hdr[12] = float(nsam) # number of pixels per line + hdr[1] = 1.0 # nslice (=1 for an image) + hdr[2] = float(nrow) # number of rows per slice + hdr[5] = 1.0 # iform for 2D image + hdr[12] = float(nsam) # number of pixels per line hdr[13] = float(labrec) # number of records in file header hdr[22] = float(labbyt) # total number of bytes in header hdr[23] = float(lenbyt) # record length in bytes @@ -252,23 +261,23 @@ def makeSpiderHeader(im): # pack binary data into a string hdrstr = [] for v in hdr: - hdrstr.append(struct.pack('f', v)) + hdrstr.append(struct.pack("f", v)) return hdrstr def _save(im, fp, filename): if im.mode[0] != "F": - im = im.convert('F') + im = im.convert("F") hdr = makeSpiderHeader(im) if len(hdr) < 256: - raise IOError("Error creating Spider header") + raise OSError("Error creating Spider header") # write the SPIDER header fp.writelines(hdr) rawmode = "F;32NF" # 32-bit native floating point - ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, 1))]) + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))]) def _save_spider(im, fp, filename): @@ -277,6 +286,7 @@ def _save_spider(im, fp, filename): Image.register_extension(SpiderImageFile.format, ext) _save(im, fp, filename) + # -------------------------------------------------------------------- @@ -294,20 +304,21 @@ if __name__ == "__main__": print("input image must be in Spider format") sys.exit() - im = Image.open(filename) - print("image: " + str(im)) - print("format: " + str(im.format)) - print("size: " + str(im.size)) - print("mode: " + str(im.mode)) - print("max, min: ", end=' ') - print(im.getextrema()) + with Image.open(filename) as im: + print("image: " + str(im)) + print("format: " + str(im.format)) + print("size: " + str(im.size)) + print("mode: " + str(im.mode)) + print("max, min: ", end=" ") + print(im.getextrema()) - if len(sys.argv) > 2: - outfile = sys.argv[2] + if len(sys.argv) > 2: + outfile = sys.argv[2] - # perform some image operation - im = im.transpose(Image.FLIP_LEFT_RIGHT) - print( - "saving a flipped version of %s as %s " % - (os.path.basename(filename), outfile)) - im.save(outfile, SpiderImageFile.format) + # perform some image operation + im = im.transpose(Image.FLIP_LEFT_RIGHT) + print( + "saving a flipped version of %s as %s " + % (os.path.basename(filename), outfile) + ) + im.save(outfile, SpiderImageFile.format) diff --git a/server/www/packages/packages-linux/x64/PIL/SunImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/SunImagePlugin.py index 898350e..fd7ca8a 100644 --- a/server/www/packages/packages-linux/x64/PIL/SunImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/SunImagePlugin.py @@ -20,16 +20,15 @@ from . import Image, ImageFile, ImagePalette from ._binary import i32be as i32 -__version__ = "0.3" - def _accept(prefix): - return len(prefix) >= 4 and i32(prefix) == 0x59a66a95 + return len(prefix) >= 4 and i32(prefix) == 0x59A66A95 ## # Image plugin for Sun raster files. + class SunImageFile(ImageFile.ImageFile): format = "SUN" @@ -54,7 +53,7 @@ class SunImageFile(ImageFile.ImageFile): # HEAD s = self.fp.read(32) - if i32(s) != 0x59a66a95: + if i32(s) != 0x59A66A95: raise SyntaxError("not an SUN raster file") offset = 32 @@ -80,9 +79,9 @@ class SunImageFile(ImageFile.ImageFile): self.mode, rawmode = "RGB", "BGR" elif depth == 32: if file_type == 3: - self.mode, rawmode = 'RGB', 'RGBX' + self.mode, rawmode = "RGB", "RGBX" else: - self.mode, rawmode = 'RGB', 'BGRX' + self.mode, rawmode = "RGB", "BGRX" else: raise SyntaxError("Unsupported Mode/Bit Depth") @@ -94,11 +93,10 @@ class SunImageFile(ImageFile.ImageFile): raise SyntaxError("Unsupported Palette Type") offset = offset + palette_length - self.palette = ImagePalette.raw("RGB;L", - self.fp.read(palette_length)) + self.palette = ImagePalette.raw("RGB;L", self.fp.read(palette_length)) if self.mode == "L": self.mode = "P" - rawmode = rawmode.replace('L', 'P') + rawmode = rawmode.replace("L", "P") # 16 bit boundaries on stride stride = ((self.size[0] * depth + 15) // 16) * 2 @@ -122,11 +120,12 @@ class SunImageFile(ImageFile.ImageFile): # (https://www.fileformat.info/format/sunraster/egff.htm) if file_type in (0, 1, 3, 4, 5): - self.tile = [("raw", (0, 0)+self.size, offset, (rawmode, stride))] + self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride))] elif file_type == 2: - self.tile = [("sun_rle", (0, 0)+self.size, offset, rawmode)] + self.tile = [("sun_rle", (0, 0) + self.size, offset, rawmode)] else: - raise SyntaxError('Unsupported Sun Raster file type') + raise SyntaxError("Unsupported Sun Raster file type") + # # registry diff --git a/server/www/packages/packages-linux/x64/PIL/TarIO.py b/server/www/packages/packages-linux/x64/PIL/TarIO.py index 0e949ff..ede6464 100644 --- a/server/www/packages/packages-linux/x64/PIL/TarIO.py +++ b/server/www/packages/packages-linux/x64/PIL/TarIO.py @@ -14,15 +14,16 @@ # See the README file for information on usage and redistribution. # -from . import ContainerIO +import io +from . import ContainerIO ## # A file object that provides read access to a given member of a TAR # file. -class TarIO(ContainerIO.ContainerIO): +class TarIO(ContainerIO.ContainerIO): def __init__(self, tarfile, file): """ Create file object. @@ -30,18 +31,18 @@ class TarIO(ContainerIO.ContainerIO): :param tarfile: Name of TAR file. :param file: Name of member file. """ - fh = open(tarfile, "rb") + self.fh = open(tarfile, "rb") while True: - s = fh.read(512) + s = self.fh.read(512) if len(s) != 512: - raise IOError("unexpected end of tar file") + raise OSError("unexpected end of tar file") - name = s[:100].decode('utf-8') - i = name.find('\0') + name = s[:100].decode("utf-8") + i = name.find("\0") if i == 0: - raise IOError("cannot find subfile") + raise OSError("cannot find subfile") if i > 0: name = name[:i] @@ -50,7 +51,17 @@ class TarIO(ContainerIO.ContainerIO): if file == name: break - fh.seek((size + 511) & (~511), 1) + self.fh.seek((size + 511) & (~511), io.SEEK_CUR) # Open region - ContainerIO.ContainerIO.__init__(self, fh, fh.tell(), size) + super().__init__(self.fh, self.fh.tell(), size) + + # Context manager support + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def close(self): + self.fh.close() diff --git a/server/www/packages/packages-linux/x64/PIL/TgaImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/TgaImagePlugin.py index c266d50..fd71e54 100644 --- a/server/www/packages/packages-linux/x64/PIL/TgaImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/TgaImagePlugin.py @@ -17,13 +17,10 @@ # -from . import Image, ImageFile, ImagePalette -from ._binary import i8, i16le as i16, o8, o16le as o16 - import warnings -__version__ = "0.3" - +from . import Image, ImageFile, ImagePalette +from ._binary import i8, i16le as i16, o8, o16le as o16 # # -------------------------------------------------------------------- @@ -32,9 +29,9 @@ __version__ = "0.3" MODES = { # map imagetype/depth to rawmode - (1, 8): "P", - (3, 1): "1", - (3, 8): "L", + (1, 8): "P", + (3, 1): "1", + (3, 8): "L", (3, 16): "LA", (2, 16): "BGR;5", (2, 24): "BGR", @@ -45,6 +42,7 @@ MODES = { ## # Image plugin for Targa files. + class TgaImageFile(ImageFile.ImageFile): format = "TGA" @@ -67,9 +65,12 @@ class TgaImageFile(ImageFile.ImageFile): self._size = i16(s[12:]), i16(s[14:]) # validate header fields - if colormaptype not in (0, 1) or\ - self.size[0] <= 0 or self.size[1] <= 0 or\ - depth not in (1, 8, 16, 24, 32): + if ( + colormaptype not in (0, 1) + or self.size[0] <= 0 + or self.size[1] <= 0 + or depth not in (1, 8, 16, 24, 32) + ): raise SyntaxError("not a TGA file") # image mode @@ -110,27 +111,43 @@ class TgaImageFile(ImageFile.ImageFile): start, size, mapdepth = i16(s[3:]), i16(s[5:]), i16(s[7:]) if mapdepth == 16: self.palette = ImagePalette.raw( - "BGR;16", b"\0"*2*start + self.fp.read(2*size)) + "BGR;16", b"\0" * 2 * start + self.fp.read(2 * size) + ) elif mapdepth == 24: self.palette = ImagePalette.raw( - "BGR", b"\0"*3*start + self.fp.read(3*size)) + "BGR", b"\0" * 3 * start + self.fp.read(3 * size) + ) elif mapdepth == 32: self.palette = ImagePalette.raw( - "BGRA", b"\0"*4*start + self.fp.read(4*size)) + "BGRA", b"\0" * 4 * start + self.fp.read(4 * size) + ) # setup tile descriptor try: rawmode = MODES[(imagetype & 7, depth)] if imagetype & 8: # compressed - self.tile = [("tga_rle", (0, 0)+self.size, - self.fp.tell(), (rawmode, orientation, depth))] + self.tile = [ + ( + "tga_rle", + (0, 0) + self.size, + self.fp.tell(), + (rawmode, orientation, depth), + ) + ] else: - self.tile = [("raw", (0, 0)+self.size, - self.fp.tell(), (rawmode, 0, orientation))] + self.tile = [ + ( + "raw", + (0, 0) + self.size, + self.fp.tell(), + (rawmode, 0, orientation), + ) + ] except KeyError: pass # cannot decode + # # -------------------------------------------------------------------- # Write TGA file @@ -151,19 +168,17 @@ def _save(im, fp, filename): try: rawmode, bits, colormaptype, imagetype = SAVE[im.mode] except KeyError: - raise IOError("cannot write mode %s as TGA" % im.mode) + raise OSError("cannot write mode %s as TGA" % im.mode) if "rle" in im.encoderinfo: rle = im.encoderinfo["rle"] else: - compression = im.encoderinfo.get("compression", - im.info.get("compression")) + compression = im.encoderinfo.get("compression", im.info.get("compression")) rle = compression == "tga_rle" if rle: imagetype += 8 - id_section = im.encoderinfo.get("id_section", - im.info.get("id_section", "")) + id_section = im.encoderinfo.get("id_section", im.info.get("id_section", "")) id_len = len(id_section) if id_len > 255: id_len = 255 @@ -180,23 +195,24 @@ def _save(im, fp, filename): else: flags = 0 - orientation = im.encoderinfo.get("orientation", - im.info.get("orientation", -1)) + orientation = im.encoderinfo.get("orientation", im.info.get("orientation", -1)) if orientation > 0: flags = flags | 0x20 - fp.write(o8(id_len) + - o8(colormaptype) + - o8(imagetype) + - o16(colormapfirst) + - o16(colormaplength) + - o8(colormapentry) + - o16(0) + - o16(0) + - o16(im.size[0]) + - o16(im.size[1]) + - o8(bits) + - o8(flags)) + fp.write( + o8(id_len) + + o8(colormaptype) + + o8(imagetype) + + o16(colormapfirst) + + o16(colormaplength) + + o8(colormapentry) + + o16(0) + + o16(0) + + o16(im.size[0]) + + o16(im.size[1]) + + o8(bits) + + o8(flags) + ) if id_section: fp.write(id_section) @@ -206,16 +222,17 @@ def _save(im, fp, filename): if rle: ImageFile._save( - im, - fp, - [("tga_rle", (0, 0) + im.size, 0, (rawmode, orientation))]) + im, fp, [("tga_rle", (0, 0) + im.size, 0, (rawmode, orientation))] + ) else: ImageFile._save( - im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))]) + im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))] + ) # write targa version 2 footer fp.write(b"\000" * 8 + b"TRUEVISION-XFILE." + b"\000") + # # -------------------------------------------------------------------- # Registry @@ -224,4 +241,6 @@ def _save(im, fp, filename): Image.register_open(TgaImageFile.format, TgaImageFile) Image.register_save(TgaImageFile.format, _save) -Image.register_extension(TgaImageFile.format, ".tga") +Image.register_extensions(TgaImageFile.format, [".tga", ".icb", ".vda", ".vst"]) + +Image.register_mime(TgaImageFile.format, "image/x-tga") diff --git a/server/www/packages/packages-linux/x64/PIL/TiffImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/TiffImagePlugin.py index 5059a13..74fb695 100644 --- a/server/www/packages/packages-linux/x64/PIL/TiffImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/TiffImagePlugin.py @@ -38,34 +38,19 @@ # # See the README file for information on usage and redistribution. # - -from __future__ import division, print_function - -from . import Image, ImageFile, ImagePalette, TiffTags -from ._binary import i8, o8 -from ._util import py3 - -from fractions import Fraction -from numbers import Number, Rational - import io import itertools import os import struct -import sys import warnings +from collections.abc import MutableMapping +from fractions import Fraction +from numbers import Number, Rational +from . import Image, ImageFile, ImagePalette, TiffTags +from ._binary import i8, o8 from .TiffTags import TYPES -try: - # Python 3 - from collections.abc import MutableMapping -except ImportError: - # Python 2.7 - from collections import MutableMapping - - -__version__ = "1.3.5" DEBUG = False # Needs to be merged with the new logging approach. # Set these to true to force use of libtiff for reading or writing. @@ -96,6 +81,7 @@ X_RESOLUTION = 282 Y_RESOLUTION = 283 PLANAR_CONFIGURATION = 284 RESOLUTION_UNIT = 296 +TRANSFERFUNCTION = 301 SOFTWARE = 305 DATE_TIME = 306 ARTIST = 315 @@ -105,12 +91,14 @@ TILEOFFSETS = 324 EXTRASAMPLES = 338 SAMPLEFORMAT = 339 JPEGTABLES = 347 +REFERENCEBLACKWHITE = 532 COPYRIGHT = 33432 IPTC_NAA_CHUNK = 33723 # newsphoto properties PHOTOSHOP_CHUNK = 34377 # photoshop properties ICCPROFILE = 34675 EXIFIFD = 34665 XMP = 700 +JPEGQUALITY = 65537 # pseudo-tag by libtiff # https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java IMAGEJ_META_DATA_BYTE_COUNTS = 50838 @@ -132,6 +120,9 @@ COMPRESSION_INFO = { 32946: "tiff_deflate", 34676: "tiff_sgilog", 34677: "tiff_sgilog24", + 34925: "lzma", + 50000: "zstd", + 50001: "webp", } COMPRESSION_INFO_REV = {v: k for k, v in COMPRESSION_INFO.items()} @@ -147,7 +138,6 @@ OPEN_INFO = { (MM, 1, (1,), 1, (1,), ()): ("1", "1"), (II, 1, (1,), 2, (1,), ()): ("1", "1;R"), (MM, 1, (1,), 2, (1,), ()): ("1", "1;R"), - (II, 0, (1,), 1, (2,), ()): ("L", "L;2I"), (MM, 0, (1,), 1, (2,), ()): ("L", "L;2I"), (II, 0, (1,), 2, (2,), ()): ("L", "L;2IR"), @@ -156,7 +146,6 @@ OPEN_INFO = { (MM, 1, (1,), 1, (2,), ()): ("L", "L;2"), (II, 1, (1,), 2, (2,), ()): ("L", "L;2R"), (MM, 1, (1,), 2, (2,), ()): ("L", "L;2R"), - (II, 0, (1,), 1, (4,), ()): ("L", "L;4I"), (MM, 0, (1,), 1, (4,), ()): ("L", "L;4I"), (II, 0, (1,), 2, (4,), ()): ("L", "L;4IR"), @@ -165,7 +154,6 @@ OPEN_INFO = { (MM, 1, (1,), 1, (4,), ()): ("L", "L;4"), (II, 1, (1,), 2, (4,), ()): ("L", "L;4R"), (MM, 1, (1,), 2, (4,), ()): ("L", "L;4R"), - (II, 0, (1,), 1, (8,), ()): ("L", "L;I"), (MM, 0, (1,), 1, (8,), ()): ("L", "L;I"), (II, 0, (1,), 2, (8,), ()): ("L", "L;IR"), @@ -174,14 +162,11 @@ OPEN_INFO = { (MM, 1, (1,), 1, (8,), ()): ("L", "L"), (II, 1, (1,), 2, (8,), ()): ("L", "L;R"), (MM, 1, (1,), 2, (8,), ()): ("L", "L;R"), - (II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"), - (II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"), (MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"), (II, 1, (2,), 1, (16,), ()): ("I", "I;16S"), (MM, 1, (2,), 1, (16,), ()): ("I", "I;16BS"), - (II, 0, (3,), 1, (32,), ()): ("F", "F;32F"), (MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"), (II, 1, (1,), 1, (32,), ()): ("I", "I;32N"), @@ -189,10 +174,8 @@ OPEN_INFO = { (MM, 1, (2,), 1, (32,), ()): ("I", "I;32BS"), (II, 1, (3,), 1, (32,), ()): ("F", "F;32F"), (MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"), - (II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), (MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), - (II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), (MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), (II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), @@ -219,7 +202,6 @@ OPEN_INFO = { (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"), (II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 (MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 - (II, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16L"), (MM, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16B"), (II, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16L"), @@ -230,7 +212,6 @@ OPEN_INFO = { (MM, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16B"), (II, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16L"), (MM, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16B"), - (II, 3, (1,), 1, (1,), ()): ("P", "P;1"), (MM, 3, (1,), 1, (1,), ()): ("P", "P;1"), (II, 3, (1,), 2, (1,), ()): ("P", "P;1R"), @@ -249,19 +230,17 @@ OPEN_INFO = { (MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), (II, 3, (1,), 2, (8,), ()): ("P", "P;R"), (MM, 3, (1,), 2, (8,), ()): ("P", "P;R"), - (II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), (MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), (II, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"), (MM, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"), (II, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"), (MM, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"), - - # JPEG compressed images handled by LibTiff and auto-converted to RGB + (II, 5, (1,), 1, (16, 16, 16, 16), ()): ("CMYK", "CMYK;16L"), + # JPEG compressed images handled by LibTiff and auto-converted to RGBX # Minimal Baseline TIFF requires YCbCr images to have 3 SamplesPerPixel - (II, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), - (MM, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), - + (II, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"), + (MM, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"), (II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), (MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), } @@ -284,6 +263,20 @@ def _limit_rational(val, max_val): return n_d[::-1] if inv else n_d +def _limit_signed_rational(val, max_val, min_val): + frac = Fraction(val) + n_d = frac.numerator, frac.denominator + + if min(n_d) < min_val: + n_d = _limit_rational(val, abs(min_val)) + + if max(n_d) > max_val: + val = Fraction(*n_d) + n_d = _limit_rational(val, max_val) + + return n_d + + ## # Wrapper for TIFF IFDs. @@ -303,7 +296,7 @@ class IFDRational(Rational): """ - __slots__ = ('_numerator', '_denominator', '_val') + __slots__ = ("_numerator", "_denominator", "_val") def __init__(self, value, denominator=1): """ @@ -311,25 +304,21 @@ class IFDRational(Rational): float/rational/other number, or an IFDRational :param denominator: Optional integer denominator """ - self._denominator = denominator - self._numerator = value - self._val = float(1) + if isinstance(value, IFDRational): + self._numerator = value.numerator + self._denominator = value.denominator + self._val = value._val + return if isinstance(value, Fraction): self._numerator = value.numerator self._denominator = value.denominator - self._val = value - - if isinstance(value, IFDRational): - self._denominator = value.denominator - self._numerator = value.numerator - self._val = value._val - return + else: + self._numerator = value + self._denominator = denominator if denominator == 0: - self._val = float('nan') - return - + self._val = float("nan") elif denominator == 1: self._val = Fraction(value) else: @@ -368,44 +357,43 @@ class IFDRational(Rational): def _delegate(op): def delegate(self, *args): return getattr(self._val, op)(*args) + return delegate - """ a = ['add','radd', 'sub', 'rsub','div', 'rdiv', 'mul', 'rmul', - 'truediv', 'rtruediv', 'floordiv', - 'rfloordiv','mod','rmod', 'pow','rpow', 'pos', 'neg', - 'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'nonzero', + """ a = ['add','radd', 'sub', 'rsub', 'mul', 'rmul', + 'truediv', 'rtruediv', 'floordiv', 'rfloordiv', + 'mod','rmod', 'pow','rpow', 'pos', 'neg', + 'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'bool', 'ceil', 'floor', 'round'] print("\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a)) """ - __add__ = _delegate('__add__') - __radd__ = _delegate('__radd__') - __sub__ = _delegate('__sub__') - __rsub__ = _delegate('__rsub__') - __div__ = _delegate('__div__') - __rdiv__ = _delegate('__rdiv__') - __mul__ = _delegate('__mul__') - __rmul__ = _delegate('__rmul__') - __truediv__ = _delegate('__truediv__') - __rtruediv__ = _delegate('__rtruediv__') - __floordiv__ = _delegate('__floordiv__') - __rfloordiv__ = _delegate('__rfloordiv__') - __mod__ = _delegate('__mod__') - __rmod__ = _delegate('__rmod__') - __pow__ = _delegate('__pow__') - __rpow__ = _delegate('__rpow__') - __pos__ = _delegate('__pos__') - __neg__ = _delegate('__neg__') - __abs__ = _delegate('__abs__') - __trunc__ = _delegate('__trunc__') - __lt__ = _delegate('__lt__') - __gt__ = _delegate('__gt__') - __le__ = _delegate('__le__') - __ge__ = _delegate('__ge__') - __nonzero__ = _delegate('__nonzero__') - __ceil__ = _delegate('__ceil__') - __floor__ = _delegate('__floor__') - __round__ = _delegate('__round__') + __add__ = _delegate("__add__") + __radd__ = _delegate("__radd__") + __sub__ = _delegate("__sub__") + __rsub__ = _delegate("__rsub__") + __mul__ = _delegate("__mul__") + __rmul__ = _delegate("__rmul__") + __truediv__ = _delegate("__truediv__") + __rtruediv__ = _delegate("__rtruediv__") + __floordiv__ = _delegate("__floordiv__") + __rfloordiv__ = _delegate("__rfloordiv__") + __mod__ = _delegate("__mod__") + __rmod__ = _delegate("__rmod__") + __pow__ = _delegate("__pow__") + __rpow__ = _delegate("__rpow__") + __pos__ = _delegate("__pos__") + __neg__ = _delegate("__neg__") + __abs__ = _delegate("__abs__") + __trunc__ = _delegate("__trunc__") + __lt__ = _delegate("__lt__") + __gt__ = _delegate("__gt__") + __le__ = _delegate("__le__") + __ge__ = _delegate("__ge__") + __bool__ = _delegate("__bool__") + __ceil__ = _delegate("__ceil__") + __floor__ = _delegate("__floor__") + __round__ = _delegate("__round__") class ImageFileDirectory_v2(MutableMapping): @@ -416,7 +404,7 @@ class ImageFileDirectory_v2(MutableMapping): ifd = ImageFileDirectory_v2() ifd[key] = 'Some Data' - ifd.tagtype[key] = 2 + ifd.tagtype[key] = TiffTags.ASCII print(ifd[key]) 'Some Data' @@ -439,6 +427,7 @@ class ImageFileDirectory_v2(MutableMapping): .. versionadded:: 3.0.0 """ + """ Documentation: @@ -483,7 +472,7 @@ class ImageFileDirectory_v2(MutableMapping): else: raise SyntaxError("not a TIFF IFD") self.reset() - self.next, = self._unpack("L", ifh[4:]) + (self.next,) = self._unpack("L", ifh[4:]) self._legacy_api = False prefix = property(lambda self: self._prefix) @@ -498,7 +487,7 @@ class ImageFileDirectory_v2(MutableMapping): self._tags_v1 = {} # will remain empty if legacy_api is false self._tags_v2 = {} # main tag storage self._tagdata = {} - self.tagtype = {} # added 2008-06-05 by Florian Hoech + self.tagtype = {} # added 2008-06-05 by Florian Hoech self._next = None self._offset = None @@ -511,8 +500,7 @@ class ImageFileDirectory_v2(MutableMapping): Returns the complete tag dictionary, with named tags where possible. """ - return dict((TiffTags.lookup(code).name, value) - for code, value in self.items()) + return {TiffTags.lookup(code).name: value for code, value in self.items()} def __len__(self): return len(set(self._tagdata) | set(self._tags_v2)) @@ -525,23 +513,17 @@ class ImageFileDirectory_v2(MutableMapping): self[tag] = handler(self, data, self.legacy_api) # check type val = self._tags_v2[tag] if self.legacy_api and not isinstance(val, (tuple, bytes)): - val = val, + val = (val,) return val def __contains__(self, tag): return tag in self._tags_v2 or tag in self._tagdata - if not py3: - def has_key(self, tag): - return tag in self - def __setitem__(self, tag, value): self._setitem(tag, value, self.legacy_api) def _setitem(self, tag, value, legacy_api): basetypes = (Number, bytes, str) - if not py3: - basetypes += unicode, info = TiffTags.lookup(tag) values = [value] if isinstance(value, basetypes) else value @@ -550,30 +532,36 @@ class ImageFileDirectory_v2(MutableMapping): if info.type: self.tagtype[tag] = info.type else: - self.tagtype[tag] = 7 + self.tagtype[tag] = TiffTags.UNDEFINED if all(isinstance(v, IFDRational) for v in values): - self.tagtype[tag] = 5 + self.tagtype[tag] = ( + TiffTags.RATIONAL + if all(v >= 0 for v in values) + else TiffTags.SIGNED_RATIONAL + ) elif all(isinstance(v, int) for v in values): - if all(v < 2 ** 16 for v in values): - self.tagtype[tag] = 3 + if all(0 <= v < 2 ** 16 for v in values): + self.tagtype[tag] = TiffTags.SHORT + elif all(-(2 ** 15) < v < 2 ** 15 for v in values): + self.tagtype[tag] = TiffTags.SIGNED_SHORT else: - self.tagtype[tag] = 4 + self.tagtype[tag] = ( + TiffTags.LONG + if all(v >= 0 for v in values) + else TiffTags.SIGNED_LONG + ) elif all(isinstance(v, float) for v in values): - self.tagtype[tag] = 12 + self.tagtype[tag] = TiffTags.DOUBLE else: - if py3: - if all(isinstance(v, str) for v in values): - self.tagtype[tag] = 2 - else: - # Never treat data as binary by default on Python 2. - self.tagtype[tag] = 2 + if all(isinstance(v, str) for v in values): + self.tagtype[tag] = TiffTags.ASCII - if self.tagtype[tag] == 7 and py3: - values = [value.encode("ascii", 'replace') if isinstance( - value, str) else value] - elif self.tagtype[tag] == 5: - values = [float(v) if isinstance(v, int) else v - for v in values] + if self.tagtype[tag] == TiffTags.UNDEFINED: + values = [ + value.encode("ascii", "replace") if isinstance(value, str) else value + ] + elif self.tagtype[tag] == TiffTags.RATIONAL: + values = [float(v) if isinstance(v, int) else v for v in values] values = tuple(info.cvt_enum(value) for value in values) @@ -584,18 +572,23 @@ class ImageFileDirectory_v2(MutableMapping): # Spec'd length == 1, Actual > 1, Warn and truncate. Formerly barfed. # No Spec, Actual length 1, Formerly (<4.2) returned a 1 element tuple. # Don't mess with the legacy api, since it's frozen. - if (info.length == 1) or \ - (info.length is None and len(values) == 1 and not legacy_api): + if (info.length == 1) or ( + info.length is None and len(values) == 1 and not legacy_api + ): # Don't mess with the legacy api, since it's frozen. - if legacy_api and self.tagtype[tag] in [5, 10]: # rationals - values = values, + if legacy_api and self.tagtype[tag] in [ + TiffTags.RATIONAL, + TiffTags.SIGNED_RATIONAL, + ]: # rationals + values = (values,) try: - dest[tag], = values + (dest[tag],) = values except ValueError: # We've got a builtin tag with 1 expected entry warnings.warn( - "Metadata Warning, tag %s had too many entries: %s, expected 1" % ( - tag, len(values))) + "Metadata Warning, tag %s had too many entries: %s, expected 1" + % (tag, len(values)) + ) dest[tag] = values[0] else: @@ -620,36 +613,51 @@ class ImageFileDirectory_v2(MutableMapping): def _register_loader(idx, size): def decorator(func): from .TiffTags import TYPES + if func.__name__.startswith("load_"): TYPES[idx] = func.__name__[5:].replace("_", " ") - _load_dispatch[idx] = size, func + _load_dispatch[idx] = size, func # noqa: F821 return func + return decorator def _register_writer(idx): def decorator(func): - _write_dispatch[idx] = func + _write_dispatch[idx] = func # noqa: F821 return func + return decorator def _register_basic(idx_fmt_name): from .TiffTags import TYPES + idx, fmt, name = idx_fmt_name TYPES[idx] = name size = struct.calcsize("=" + fmt) - _load_dispatch[idx] = size, lambda self, data, legacy_api=True: ( - self._unpack("{}{}".format(len(data) // size, fmt), data)) - _write_dispatch[idx] = lambda self, *values: ( - b"".join(self._pack(fmt, value) for value in values)) + _load_dispatch[idx] = ( # noqa: F821 + size, + lambda self, data, legacy_api=True: ( + self._unpack("{}{}".format(len(data) // size, fmt), data) + ), + ) + _write_dispatch[idx] = lambda self, *values: ( # noqa: F821 + b"".join(self._pack(fmt, value) for value in values) + ) - list(map(_register_basic, - [(3, "H", "short"), - (4, "L", "long"), - (6, "b", "signed byte"), - (8, "h", "signed short"), - (9, "l", "signed long"), - (11, "f", "float"), - (12, "d", "double")])) + list( + map( + _register_basic, + [ + (TiffTags.SHORT, "H", "short"), + (TiffTags.LONG, "L", "long"), + (TiffTags.SIGNED_BYTE, "b", "signed byte"), + (TiffTags.SIGNED_SHORT, "h", "signed short"), + (TiffTags.SIGNED_LONG, "l", "signed long"), + (TiffTags.FLOAT, "f", "float"), + (TiffTags.DOUBLE, "d", "double"), + ], + ) + ) @_register_loader(1, 1) # Basic type, except for the legacy API. def load_byte(self, data, legacy_api=True): @@ -668,22 +676,22 @@ class ImageFileDirectory_v2(MutableMapping): @_register_writer(2) def write_string(self, value): # remerge of https://github.com/python-pillow/Pillow/pull/1416 - if sys.version_info.major == 2: - value = value.decode('ascii', 'replace') - return b"" + value.encode('ascii', 'replace') + b"\0" + return b"" + value.encode("ascii", "replace") + b"\0" @_register_loader(5, 8) def load_rational(self, data, legacy_api=True): vals = self._unpack("{}L".format(len(data) // 4), data) - def combine(a, b): return (a, b) if legacy_api else IFDRational(a, b) - return tuple(combine(num, denom) - for num, denom in zip(vals[::2], vals[1::2])) + def combine(a, b): + return (a, b) if legacy_api else IFDRational(a, b) + + return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2])) @_register_writer(5) def write_rational(self, *values): - return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 31)) - for frac in values) + return b"".join( + self._pack("2L", *_limit_rational(frac, 2 ** 32 - 1)) for frac in values + ) @_register_loader(7, 1) def load_undefined(self, data, legacy_api=True): @@ -697,21 +705,25 @@ class ImageFileDirectory_v2(MutableMapping): def load_signed_rational(self, data, legacy_api=True): vals = self._unpack("{}l".format(len(data) // 4), data) - def combine(a, b): return (a, b) if legacy_api else IFDRational(a, b) - return tuple(combine(num, denom) - for num, denom in zip(vals[::2], vals[1::2])) + def combine(a, b): + return (a, b) if legacy_api else IFDRational(a, b) + + return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2])) @_register_writer(10) def write_signed_rational(self, *values): - return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 30)) - for frac in values) + return b"".join( + self._pack("2l", *_limit_signed_rational(frac, 2 ** 31 - 1, -(2 ** 31))) + for frac in values + ) def _ensure_read(self, fp, size): ret = fp.read(size) if len(ret) != size: - raise IOError("Corrupt EXIF data. " + - "Expecting to read %d bytes but only got %d. " % - (size, len(ret))) + raise OSError( + "Corrupt EXIF data. " + + "Expecting to read %d bytes but only got %d. " % (size, len(ret)) + ) return ret def load(self, fp): @@ -721,13 +733,14 @@ class ImageFileDirectory_v2(MutableMapping): try: for i in range(self._unpack("H", self._ensure_read(fp, 2))[0]): - tag, typ, count, data = self._unpack("HHL4s", - self._ensure_read(fp, 12)) + tag, typ, count, data = self._unpack("HHL4s", self._ensure_read(fp, 12)) if DEBUG: tagname = TiffTags.lookup(tag).name typname = TYPES.get(typ, "unknown") - print("tag: %s (%d) - type: %s (%d)" % - (tagname, tag, typname, typ), end=" ") + print( + "tag: %s (%d) - type: %s (%d)" % (tagname, tag, typname, typ), + end=" ", + ) try: unit_size, handler = self._load_dispatch[typ] @@ -738,10 +751,12 @@ class ImageFileDirectory_v2(MutableMapping): size = count * unit_size if size > 4: here = fp.tell() - offset, = self._unpack("L", data) + (offset,) = self._unpack("L", data) if DEBUG: - print("Tag Location: %s - Data Location: %s" % - (here, offset), end=" ") + print( + "Tag Location: {} - Data Location: {}".format(here, offset), + end=" ", + ) fp.seek(offset) data = ImageFile._safe_read(fp, size) fp.seek(here) @@ -749,9 +764,11 @@ class ImageFileDirectory_v2(MutableMapping): data = data[:size] if len(data) != size: - warnings.warn("Possibly corrupt EXIF data. " - "Expecting to read %d bytes but only got %d." - " Skipping tag %s" % (size, len(data), tag)) + warnings.warn( + "Possibly corrupt EXIF data. " + "Expecting to read %d bytes but only got %d." + " Skipping tag %s" % (size, len(data), tag) + ) continue if not data: @@ -766,22 +783,17 @@ class ImageFileDirectory_v2(MutableMapping): else: print("- value:", self[tag]) - self.next, = self._unpack("L", self._ensure_read(fp, 4)) - except IOError as msg: + (self.next,) = self._unpack("L", self._ensure_read(fp, 4)) + except OSError as msg: warnings.warn(str(msg)) return - def save(self, fp): - - if fp.tell() == 0: # skip TIFF header on subsequent pages - # tiff header -- PIL always starts the first IFD at offset 8 - fp.write(self._prefix + self._pack("HL", 42, 8)) - + def tobytes(self, offset=0): # FIXME What about tagdata? - fp.write(self._pack("H", len(self._tags_v2))) + result = self._pack("H", len(self._tags_v2)) entries = [] - offset = fp.tell() + len(self._tags_v2) * 12 + 4 + offset = offset + len(result) + len(self._tags_v2) * 12 + 4 stripoffsets = None # pass 1: convert tags to binary format @@ -791,54 +803,68 @@ class ImageFileDirectory_v2(MutableMapping): stripoffsets = len(entries) typ = self.tagtype.get(tag) if DEBUG: - print("Tag %s, Type: %s, Value: %s" % (tag, typ, value)) + print("Tag {}, Type: {}, Value: {}".format(tag, typ, value)) values = value if isinstance(value, tuple) else (value,) data = self._write_dispatch[typ](self, *values) if DEBUG: tagname = TiffTags.lookup(tag).name typname = TYPES.get(typ, "unknown") - print("save: %s (%d) - type: %s (%d)" % - (tagname, tag, typname, typ), end=" ") + print( + "save: %s (%d) - type: %s (%d)" % (tagname, tag, typname, typ), + end=" ", + ) if len(data) >= 16: print("- value: " % len(data)) else: print("- value:", values) # count is sum of lengths for string and arbitrary data - count = len(data) if typ in [2, 7] else len(values) + if typ in [TiffTags.BYTE, TiffTags.ASCII, TiffTags.UNDEFINED]: + count = len(data) + else: + count = len(values) # figure out if data fits into the entry if len(data) <= 4: entries.append((tag, typ, count, data.ljust(4, b"\0"), b"")) else: - entries.append((tag, typ, count, self._pack("L", offset), - data)) + entries.append((tag, typ, count, self._pack("L", offset), data)) offset += (len(data) + 1) // 2 * 2 # pad to word # update strip offset data to point beyond auxiliary data if stripoffsets is not None: tag, typ, count, value, data = entries[stripoffsets] if data: - raise NotImplementedError( - "multistrip support not yet implemented") + raise NotImplementedError("multistrip support not yet implemented") value = self._pack("L", self._unpack("L", value)[0] + offset) entries[stripoffsets] = tag, typ, count, value, data # pass 2: write entries to file for tag, typ, count, value, data in entries: - if DEBUG > 1: + if DEBUG: print(tag, typ, count, repr(value), repr(data)) - fp.write(self._pack("HHL4s", tag, typ, count, value)) + result += self._pack("HHL4s", tag, typ, count, value) # -- overwrite here for multi-page -- - fp.write(b"\0\0\0\0") # end of entries + result += b"\0\0\0\0" # end of entries # pass 3: write auxiliary data to file for tag, typ, count, value, data in entries: - fp.write(data) + result += data if len(data) & 1: - fp.write(b"\0") + result += b"\0" - return offset + return result + + def save(self, fp): + + if fp.tell() == 0: # skip TIFF header on subsequent pages + # tiff header -- PIL always starts the first IFD at offset 8 + fp.write(self._prefix + self._pack("HL", 42, 8)) + + offset = fp.tell() + result = self.tobytes(offset) + fp.write(result) + return offset + len(result) ImageFileDirectory_v2._load_dispatch = _load_dispatch @@ -858,7 +884,7 @@ class ImageFileDirectory_v1(ImageFileDirectory_v2): ifd = ImageFileDirectory_v1() ifd[key] = 'Some Data' - ifd.tagtype[key] = 2 + ifd.tagtype[key] = TiffTags.ASCII print(ifd[key]) ('Some Data',) @@ -869,8 +895,9 @@ class ImageFileDirectory_v1(ImageFileDirectory_v2): .. deprecated:: 3.0.0 """ + def __init__(self, *args, **kwargs): - ImageFileDirectory_v2.__init__(self, *args, **kwargs) + super().__init__(*args, **kwargs) self._legacy_api = True tags = property(lambda self: self._tags_v1) @@ -933,7 +960,7 @@ class ImageFileDirectory_v1(ImageFileDirectory_v2): self._setitem(tag, handler(self, data, legacy), legacy) val = self._tags_v1[tag] if not isinstance(val, (tuple, bytes)): - val = val, + val = (val,) return val @@ -944,6 +971,7 @@ ImageFileDirectory = ImageFileDirectory_v1 ## # Image plugin for TIFF files. + class TiffImageFile(ImageFile.ImageFile): format = "TIFF" @@ -951,7 +979,7 @@ class TiffImageFile(ImageFile.ImageFile): _close_exclusive_fp_after_loading = False def _open(self): - "Open the first image in a TIFF file" + """Open the first image in a TIFF file""" # Header ifh = self.fp.read(8) @@ -968,7 +996,6 @@ class TiffImageFile(ImageFile.ImageFile): self.__fp = self.fp self._frame_pos = [] self._n_frames = None - self._is_animated = None if DEBUG: print("*** TiffImageFile._open ***") @@ -982,33 +1009,18 @@ class TiffImageFile(ImageFile.ImageFile): def n_frames(self): if self._n_frames is None: current = self.tell() - try: - while True: - self._seek(self.tell() + 1) - except EOFError: - self._n_frames = self.tell() + 1 + self._seek(len(self._frame_pos)) + while self._n_frames is None: + self._seek(self.tell() + 1) self.seek(current) return self._n_frames @property def is_animated(self): - if self._is_animated is None: - if self._n_frames is not None: - self._is_animated = self._n_frames != 1 - else: - current = self.tell() - - try: - self.seek(1) - self._is_animated = True - except EOFError: - self._is_animated = False - - self.seek(current) return self._is_animated def seek(self, frame): - "Select a given frame as current image" + """Select a given frame as current image""" if not self._seek_check(frame): return self._seek(frame) @@ -1024,10 +1036,11 @@ class TiffImageFile(ImageFile.ImageFile): if not self.__next: raise EOFError("no more images in TIFF file") if DEBUG: - print("Seeking to frame %s, on frame %s, " - "__next %s, location: %s" % - (frame, self.__frame, self.__next, self.fp.tell())) - # reset python3 buffered io handle in case fp + print( + "Seeking to frame %s, on frame %s, __next %s, location: %s" + % (frame, self.__frame, self.__next, self.fp.tell()) + ) + # reset buffered io handle in case fp # was passed to libtiff, invalidating the buffer self.fp.tell() self.fp.seek(self.__next) @@ -1036,41 +1049,45 @@ class TiffImageFile(ImageFile.ImageFile): print("Loading tags, location: %s" % self.fp.tell()) self.tag_v2.load(self.fp) self.__next = self.tag_v2.next + if self.__next == 0: + self._n_frames = frame + 1 + if len(self._frame_pos) == 1: + self._is_animated = self.__next != 0 self.__frame += 1 self.fp.seek(self._frame_pos[frame]) self.tag_v2.load(self.fp) - self.__next = self.tag_v2.next # fill the legacy tag/ifd entries self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2) self.__frame = frame self._setup() def tell(self): - "Return the current frame number" + """Return the current frame number""" return self.__frame - @property - def size(self): - return self._size - - @size.setter - def size(self, value): - warnings.warn( - 'Setting the size of a TIFF image directly is deprecated, and will' - ' be removed in a future version. Use the resize method instead.', - DeprecationWarning - ) - self._size = value - def load(self): if self.use_load_libtiff: return self._load_libtiff() - return super(TiffImageFile, self).load() + return super().load() def load_end(self): + if self._tile_orientation: + method = { + 2: Image.FLIP_LEFT_RIGHT, + 3: Image.ROTATE_180, + 4: Image.FLIP_TOP_BOTTOM, + 5: Image.TRANSPOSE, + 6: Image.ROTATE_270, + 7: Image.TRANSVERSE, + 8: Image.ROTATE_90, + }.get(self._tile_orientation) + if method is not None: + self.im = self.im.transpose(method) + self._size = self.im.size + # allow closing if we're on the first frame, there's no next # This is the ImageFile.load path only, libtiff specific below. - if self.__frame == 0 and not self.__next: + if not self._is_animated: self._close_exclusive_fp_after_loading = True def _load_libtiff(self): @@ -1080,19 +1097,19 @@ class TiffImageFile(ImageFile.ImageFile): pixel = Image.Image.load(self) if self.tile is None: - raise IOError("cannot load this image") + raise OSError("cannot load this image") if not self.tile: return pixel self.load_prepare() if not len(self.tile) == 1: - raise IOError("Not exactly one tile") + raise OSError("Not exactly one tile") # (self._compression, (extents tuple), # 0, (rawmode, self._compression, fp)) extents = self.tile[0][1] - args = list(self.tile[0][3]) + [self.tag_v2.offset] + args = list(self.tile[0][3]) # To be nice on memory footprint, if there's a # file descriptor, use that instead of reading @@ -1101,11 +1118,11 @@ class TiffImageFile(ImageFile.ImageFile): try: fp = hasattr(self.fp, "fileno") and os.dup(self.fp.fileno()) # flush the file descriptor, prevents error on pypy 2.4+ - # should also eliminate the need for fp.tell for py3 + # should also eliminate the need for fp.tell # in _seek if hasattr(self.fp, "flush"): self.fp.flush() - except IOError: + except OSError: # io.BytesIO have a fileno, but returns an IOError if # it doesn't use a file descriptor. fp = False @@ -1113,13 +1130,15 @@ class TiffImageFile(ImageFile.ImageFile): if fp: args[2] = fp - decoder = Image._getdecoder(self.mode, 'libtiff', tuple(args), - self.decoderconfig) + decoder = Image._getdecoder( + self.mode, "libtiff", tuple(args), self.decoderconfig + ) try: decoder.setimage(self.im, extents) except ValueError: - raise IOError("Couldn't set the image") + raise OSError("Couldn't set the image") + close_self_fp = self._exclusive_fp and not self._is_animated if hasattr(self.fp, "getvalue"): # We've got a stringio like thing passed in. Yay for all in memory. # The decoder needs the entire file in one shot, so there's not @@ -1133,38 +1152,42 @@ class TiffImageFile(ImageFile.ImageFile): if DEBUG: print("have getvalue. just sending in a string from getvalue") n, err = decoder.decode(self.fp.getvalue()) - elif hasattr(self.fp, "fileno"): + elif fp: # we've got a actual file on disk, pass in the fp. if DEBUG: print("have fileno, calling fileno version of the decoder.") - self.fp.seek(0) + if not close_self_fp: + self.fp.seek(0) # 4 bytes, otherwise the trace might error out n, err = decoder.decode(b"fpfp") else: # we have something else. if DEBUG: print("don't have fileno or getvalue. just reading") + self.fp.seek(0) # UNDONE -- so much for that buffer size thing. n, err = decoder.decode(self.fp.read()) self.tile = [] self.readonly = 0 + + self.load_end() + # libtiff closed the fp in a, we need to close self.fp, if possible - if self._exclusive_fp: - if self.__frame == 0 and not self.__next: - self.fp.close() - self.fp = None # might be shared + if close_self_fp: + self.fp.close() + self.fp = None # might be shared if err < 0: - raise IOError(err) + raise OSError(err) return Image.Image.load(self) def _setup(self): - "Setup this image object based on current tags" + """Setup this image object based on current tags""" if 0xBC01 in self.tag_v2: - raise IOError("Windows Media Photo files not yet supported") + raise OSError("Windows Media Photo files not yet supported") # extract relevant tags self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)] @@ -1174,6 +1197,10 @@ class TiffImageFile(ImageFile.ImageFile): # the specification photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0) + # old style jpeg compression images most certainly are YCbCr + if self._compression == "tiff_jpeg": + photo = 6 + fillorder = self.tag_v2.get(FILLORDER, 1) if DEBUG: @@ -1185,16 +1212,15 @@ class TiffImageFile(ImageFile.ImageFile): print("- YCbCr subsampling:", self.tag.get(530)) # size - xsize = self.tag_v2.get(IMAGEWIDTH) - ysize = self.tag_v2.get(IMAGELENGTH) + xsize = int(self.tag_v2.get(IMAGEWIDTH)) + ysize = int(self.tag_v2.get(IMAGELENGTH)) self._size = xsize, ysize if DEBUG: print("- size:", self.size) sampleFormat = self.tag_v2.get(SAMPLEFORMAT, (1,)) - if (len(sampleFormat) > 1 - and max(sampleFormat) == min(sampleFormat) == 1): + if len(sampleFormat) > 1 and max(sampleFormat) == min(sampleFormat) == 1: # SAMPLEFORMAT is properly per band, so an RGB image will # be (1,1,1). But, we don't support per band pixel types, # and anything more than one band is a uint8. So, just @@ -1217,8 +1243,14 @@ class TiffImageFile(ImageFile.ImageFile): bps_tuple = bps_tuple * bps_count # mode: check photometric interpretation and bits per pixel - key = (self.tag_v2.prefix, photo, sampleFormat, fillorder, - bps_tuple, extra_tuple) + key = ( + self.tag_v2.prefix, + photo, + sampleFormat, + fillorder, + bps_tuple, + extra_tuple, + ) if DEBUG: print("format key:", key) try: @@ -1240,11 +1272,11 @@ class TiffImageFile(ImageFile.ImageFile): if xres and yres: resunit = self.tag_v2.get(RESOLUTION_UNIT) if resunit == 2: # dots per inch - self.info["dpi"] = xres, yres + self.info["dpi"] = int(xres + 0.5), int(yres + 0.5) elif resunit == 3: # dots per centimeter. convert to dpi - self.info["dpi"] = xres * 2.54, yres * 2.54 + self.info["dpi"] = int(xres * 2.54 + 0.5), int(yres * 2.54 + 0.5) elif resunit is None: # used to default to 1, but now 2) - self.info["dpi"] = xres, yres + self.info["dpi"] = int(xres + 0.5), int(yres + 0.5) # For backward compatibility, # we also preserve the old behavior self.info["resolution"] = xres, yres @@ -1254,7 +1286,7 @@ class TiffImageFile(ImageFile.ImageFile): # build tile descriptors x = y = layer = 0 self.tile = [] - self.use_load_libtiff = READ_LIBTIFF or self._compression != 'raw' + self.use_load_libtiff = READ_LIBTIFF or self._compression != "raw" if self.use_load_libtiff: # Decoder expects entire file as one tile. # There's a buffer size limit in load (64k) @@ -1281,20 +1313,17 @@ class TiffImageFile(ImageFile.ImageFile): # we're expecting image byte order. So, if the rawmode # contains I;16, we need to convert from native to image # byte order. - if rawmode == 'I;16': - rawmode = 'I;16N' - if ';16B' in rawmode: - rawmode = rawmode.replace(';16B', ';16N') - if ';16L' in rawmode: - rawmode = rawmode.replace(';16L', ';16N') + if rawmode == "I;16": + rawmode = "I;16N" + if ";16B" in rawmode: + rawmode = rawmode.replace(";16B", ";16N") + if ";16L" in rawmode: + rawmode = rawmode.replace(";16L", ";16N") # Offset in the tile tuple is 0, we go from 0,0 to # w,h, and we only do this once -- eds - a = (rawmode, self._compression, False) - self.tile.append( - (self._compression, - (0, 0, xsize, ysize), - 0, a)) + a = (rawmode, self._compression, False, self.tag_v2.offset) + self.tile.append(("libtiff", (0, 0, xsize, ysize), 0, a)) elif STRIPOFFSETS in self.tag_v2 or TILEOFFSETS in self.tag_v2: # striped image @@ -1323,9 +1352,13 @@ class TiffImageFile(ImageFile.ImageFile): a = (tile_rawmode, int(stride), 1) self.tile.append( - (self._compression, - (x, y, min(x+w, xsize), min(y+h, ysize)), - offset, a)) + ( + self._compression, + (x, y, min(x + w, xsize), min(y + h, ysize)), + offset, + a, + ) + ) x = x + w if x >= self.size[0]: x, y = 0, y + h @@ -1339,14 +1372,25 @@ class TiffImageFile(ImageFile.ImageFile): # Fix up info. if ICCPROFILE in self.tag_v2: - self.info['icc_profile'] = self.tag_v2[ICCPROFILE] + self.info["icc_profile"] = self.tag_v2[ICCPROFILE] # fixup palette descriptor - if self.mode == "P": + if self.mode in ["P", "PA"]: palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]] self.palette = ImagePalette.raw("RGB;L", b"".join(palette)) + self._tile_orientation = self.tag_v2.get(0x0112) + + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + # # -------------------------------------------------------------------- @@ -1373,7 +1417,6 @@ SAVE_INFO = { "CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None), "YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None), "LAB": ("LAB", II, 8, 1, (8, 8, 8), None), - "I;32BS": ("I;32BS", MM, 1, 2, (32,), None), "I;16B": ("I;16B", MM, 1, 1, (16,), None), "I;16BS": ("I;16BS", MM, 1, 2, (16,), None), @@ -1386,18 +1429,18 @@ def _save(im, fp, filename): try: rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode] except KeyError: - raise IOError("cannot write mode %s as TIFF" % im.mode) + raise OSError("cannot write mode %s as TIFF" % im.mode) ifd = ImageFileDirectory_v2(prefix=prefix) - compression = im.encoderinfo.get('compression', im.info.get('compression')) + compression = im.encoderinfo.get("compression", im.info.get("compression")) if compression is None: - compression = 'raw' + compression = "raw" - libtiff = WRITE_LIBTIFF or compression != 'raw' + libtiff = WRITE_LIBTIFF or compression != "raw" # required for color libtiff images - ifd[PLANAR_CONFIGURATION] = getattr(im, '_planar_configuration', 1) + ifd[PLANAR_CONFIGURATION] = getattr(im, "_planar_configuration", 1) ifd[IMAGEWIDTH] = im.size[0] ifd[IMAGELENGTH] = im.size[1] @@ -1412,15 +1455,21 @@ def _save(im, fp, filename): ifd[key] = info.get(key) try: ifd.tagtype[key] = info.tagtype[key] - except: - pass # might not be an IFD, Might not have populated type + except Exception: + pass # might not be an IFD. Might not have populated type # additions written by Greg Couch, gregc@cgl.ucsf.edu # inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com - if hasattr(im, 'tag_v2'): + if hasattr(im, "tag_v2"): # preserve tags from original TIFF image file - for key in (RESOLUTION_UNIT, X_RESOLUTION, Y_RESOLUTION, - IPTC_NAA_CHUNK, PHOTOSHOP_CHUNK, XMP): + for key in ( + RESOLUTION_UNIT, + X_RESOLUTION, + Y_RESOLUTION, + IPTC_NAA_CHUNK, + PHOTOSHOP_CHUNK, + XMP, + ): if key in im.tag_v2: ifd[key] = im.tag_v2[key] ifd.tagtype[key] = im.tag_v2.tagtype[key] @@ -1430,24 +1479,26 @@ def _save(im, fp, filename): if "icc_profile" in im.info: ifd[ICCPROFILE] = im.info["icc_profile"] - for key, name in [(IMAGEDESCRIPTION, "description"), - (X_RESOLUTION, "resolution"), - (Y_RESOLUTION, "resolution"), - (X_RESOLUTION, "x_resolution"), - (Y_RESOLUTION, "y_resolution"), - (RESOLUTION_UNIT, "resolution_unit"), - (SOFTWARE, "software"), - (DATE_TIME, "date_time"), - (ARTIST, "artist"), - (COPYRIGHT, "copyright")]: + for key, name in [ + (IMAGEDESCRIPTION, "description"), + (X_RESOLUTION, "resolution"), + (Y_RESOLUTION, "resolution"), + (X_RESOLUTION, "x_resolution"), + (Y_RESOLUTION, "y_resolution"), + (RESOLUTION_UNIT, "resolution_unit"), + (SOFTWARE, "software"), + (DATE_TIME, "date_time"), + (ARTIST, "artist"), + (COPYRIGHT, "copyright"), + ]: if name in im.encoderinfo: ifd[key] = im.encoderinfo[name] dpi = im.encoderinfo.get("dpi") if dpi: ifd[RESOLUTION_UNIT] = 2 - ifd[X_RESOLUTION] = dpi[0] - ifd[Y_RESOLUTION] = dpi[1] + ifd[X_RESOLUTION] = int(dpi[0] + 0.5) + ifd[Y_RESOLUTION] = int(dpi[1] + 0.5) if bits != (1,): ifd[BITSPERSAMPLE] = bits @@ -1460,11 +1511,11 @@ def _save(im, fp, filename): ifd[PHOTOMETRIC_INTERPRETATION] = photo - if im.mode == "P": + if im.mode in ["P", "PA"]: lut = im.im.getpalette("RGB", "RGB;L") ifd[COLORMAP] = tuple(i8(v) * 256 for v in lut) # data orientation - stride = len(bits) * ((im.size[0]*bits[0]+7)//8) + stride = len(bits) * ((im.size[0] * bits[0] + 7) // 8) ifd[ROWSPERSTRIP] = im.size[1] ifd[STRIPBYTECOUNTS] = stride * im.size[1] ifd[STRIPOFFSETS] = 0 # this is adjusted by IFD writer @@ -1472,6 +1523,16 @@ def _save(im, fp, filename): ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1) if libtiff: + if "quality" in im.encoderinfo: + quality = im.encoderinfo["quality"] + if not isinstance(quality, int) or quality < 0 or quality > 100: + raise ValueError("Invalid quality setting") + if compression != "jpeg": + raise ValueError( + "quality setting only supported for 'jpeg' compression" + ) + ifd[JPEGQUALITY] = quality + if DEBUG: print("Saving using libtiff encoder") print("Items: %s" % sorted(ifd.items())) @@ -1483,9 +1544,24 @@ def _save(im, fp, filename): except io.UnsupportedOperation: pass + # optional types for non core tags + types = {} + # SAMPLEFORMAT is determined by the image format and should not be copied + # from legacy_ifd. # STRIPOFFSETS and STRIPBYTECOUNTS are added by the library # based on the data in the strip. - blocklist = [STRIPOFFSETS, STRIPBYTECOUNTS] + # The other tags expect arrays with a certain length (fixed or depending on + # BITSPERSAMPLE, etc), passing arrays with a different length will result in + # segfaults. Block these tags until we add extra validation. + blocklist = [ + COLORMAP, + REFERENCEBLACKWHITE, + SAMPLEFORMAT, + STRIPBYTECOUNTS, + STRIPOFFSETS, + TRANSFERFUNCTION, + ] + atts = {} # bits per sample is a single short in the tiff directory, not a list. atts[BITSPERSAMPLE] = bits[0] @@ -1493,21 +1569,29 @@ def _save(im, fp, filename): # the original file, e.g x,y resolution so that we can # save(load('')) == original file. legacy_ifd = {} - if hasattr(im, 'tag'): + if hasattr(im, "tag"): legacy_ifd = im.tag.to_v2() - for tag, value in itertools.chain(ifd.items(), - getattr(im, 'tag_v2', {}).items(), - legacy_ifd.items()): + for tag, value in itertools.chain( + ifd.items(), getattr(im, "tag_v2", {}).items(), legacy_ifd.items() + ): # Libtiff can only process certain core items without adding - # them to the custom dictionary. It will segfault if it attempts - # to add a custom tag without the dictionary entry - # - # UNDONE -- add code for the custom dictionary + # them to the custom dictionary. + # Custom items are supported for int, float, unicode, string and byte + # values. Other types and tuples require a tagtype. if tag not in TiffTags.LIBTIFF_CORE: - continue + if ( + TiffTags.lookup(tag).type == TiffTags.UNDEFINED + or not Image.core.libtiff_support_custom_tags + ): + continue + + if tag in ifd.tagtype: + types[tag] = ifd.tagtype[tag] + elif not (isinstance(value, (int, float, str, bytes))): + continue if tag not in atts and tag not in blocklist: - if isinstance(value, str if py3 else unicode): - atts[tag] = value.encode('ascii', 'replace') + b"\0" + if isinstance(value, str): + atts[tag] = value.encode("ascii", "replace") + b"\0" elif isinstance(value, IFDRational): atts[tag] = float(value) else: @@ -1520,28 +1604,33 @@ def _save(im, fp, filename): # we're storing image byte order. So, if the rawmode # contains I;16, we need to convert from native to image # byte order. - if im.mode in ('I;16B', 'I;16'): - rawmode = 'I;16N' + if im.mode in ("I;16B", "I;16"): + rawmode = "I;16N" - a = (rawmode, compression, _fp, filename, atts) - e = Image._getencoder(im.mode, 'libtiff', a, im.encoderconfig) - e.setimage(im.im, (0, 0)+im.size) + # Pass tags as sorted list so that the tags are set in a fixed order. + # This is required by libtiff for some tags. For example, the JPEGQUALITY + # pseudo tag requires that the COMPRESS tag was already set. + tags = list(atts.items()) + tags.sort() + a = (rawmode, compression, _fp, filename, tags, types) + e = Image._getencoder(im.mode, "libtiff", a, im.encoderconfig) + e.setimage(im.im, (0, 0) + im.size) while True: # undone, change to self.decodermaxblock: - l, s, d = e.encode(16*1024) + l, s, d = e.encode(16 * 1024) if not _fp: fp.write(d) if s: break if s < 0: - raise IOError("encoder error %d when writing image file" % s) + raise OSError("encoder error %d when writing image file" % s) else: offset = ifd.save(fp) - ImageFile._save(im, fp, [ - ("raw", (0, 0)+im.size, offset, (rawmode, stride, 1)) - ]) + ImageFile._save( + im, fp, [("raw", (0, 0) + im.size, offset, (rawmode, stride, 1))] + ) # -- helper for multi-page save -- if "_debug_multipage" in im.encoderinfo: @@ -1575,16 +1664,16 @@ class AppendingTiffWriter: Tags = {273, 288, 324, 519, 520, 521} def __init__(self, fn, new=False): - if hasattr(fn, 'read'): + if hasattr(fn, "read"): self.f = fn self.close_fp = False else: self.name = fn self.close_fp = True try: - self.f = io.open(fn, "w+b" if new else "r+b") - except IOError: - self.f = io.open(fn, "w+b") + self.f = open(fn, "w+b" if new else "r+b") + except OSError: + self.f = open(fn, "w+b") self.beginning = self.f.tell() self.setup() @@ -1626,8 +1715,7 @@ class AppendingTiffWriter: return if IIMM != self.IIMM: - raise RuntimeError("IIMM of new page doesn't match IIMM of " - "first page") + raise RuntimeError("IIMM of new page doesn't match IIMM of first page") IFDoffset = self.readLong() IFDoffset += self.offsetOfNewPage @@ -1652,7 +1740,7 @@ class AppendingTiffWriter: def tell(self): return self.f.tell() - self.offsetOfNewPage - def seek(self, offset, whence): + def seek(self, offset, whence=io.SEEK_SET): if whence == os.SEEK_SET: offset += self.offsetOfNewPage @@ -1666,7 +1754,7 @@ class AppendingTiffWriter: # pad to 16 byte boundary padBytes = 16 - pos % 16 if 0 < padBytes < 16: - self.f.write(bytes(bytearray(padBytes))) + self.f.write(bytes(padBytes)) self.offsetOfNewPage = self.f.tell() def setEndian(self, endian): @@ -1690,45 +1778,40 @@ class AppendingTiffWriter: return self.f.write(data) def readShort(self): - value, = struct.unpack(self.shortFmt, self.f.read(2)) + (value,) = struct.unpack(self.shortFmt, self.f.read(2)) return value def readLong(self): - value, = struct.unpack(self.longFmt, self.f.read(4)) + (value,) = struct.unpack(self.longFmt, self.f.read(4)) return value def rewriteLastShortToLong(self, value): self.f.seek(-2, os.SEEK_CUR) bytesWritten = self.f.write(struct.pack(self.longFmt, value)) if bytesWritten is not None and bytesWritten != 4: - raise RuntimeError("wrote only %u bytes but wanted 4" % - bytesWritten) + raise RuntimeError("wrote only %u bytes but wanted 4" % bytesWritten) def rewriteLastShort(self, value): self.f.seek(-2, os.SEEK_CUR) bytesWritten = self.f.write(struct.pack(self.shortFmt, value)) if bytesWritten is not None and bytesWritten != 2: - raise RuntimeError("wrote only %u bytes but wanted 2" % - bytesWritten) + raise RuntimeError("wrote only %u bytes but wanted 2" % bytesWritten) def rewriteLastLong(self, value): self.f.seek(-4, os.SEEK_CUR) bytesWritten = self.f.write(struct.pack(self.longFmt, value)) if bytesWritten is not None and bytesWritten != 4: - raise RuntimeError("wrote only %u bytes but wanted 4" % - bytesWritten) + raise RuntimeError("wrote only %u bytes but wanted 4" % bytesWritten) def writeShort(self, value): bytesWritten = self.f.write(struct.pack(self.shortFmt, value)) if bytesWritten is not None and bytesWritten != 2: - raise RuntimeError("wrote only %u bytes but wanted 2" % - bytesWritten) + raise RuntimeError("wrote only %u bytes but wanted 2" % bytesWritten) def writeLong(self, value): bytesWritten = self.f.write(struct.pack(self.longFmt, value)) if bytesWritten is not None and bytesWritten != 4: - raise RuntimeError("wrote only %u bytes but wanted 4" % - bytesWritten) + raise RuntimeError("wrote only %u bytes but wanted 4" % bytesWritten) def close(self): self.finalize() @@ -1738,12 +1821,11 @@ class AppendingTiffWriter: numTags = self.readShort() for i in range(numTags): - tag, fieldType, count = struct.unpack(self.tagFormat, - self.f.read(8)) + tag, fieldType, count = struct.unpack(self.tagFormat, self.f.read(8)) fieldSize = self.fieldSizes[fieldType] totalSize = fieldSize * count - isLocal = (totalSize <= 4) + isLocal = totalSize <= 4 if not isLocal: offset = self.readLong() offset += self.offsetOfNewPage @@ -1753,13 +1835,15 @@ class AppendingTiffWriter: curPos = self.f.tell() if isLocal: - self.fixOffsets(count, isShort=(fieldSize == 2), - isLong=(fieldSize == 4)) + self.fixOffsets( + count, isShort=(fieldSize == 2), isLong=(fieldSize == 4) + ) self.f.seek(curPos + 4) else: self.f.seek(offset) - self.fixOffsets(count, isShort=(fieldSize == 2), - isLong=(fieldSize == 4)) + self.fixOffsets( + count, isShort=(fieldSize == 2), isLong=(fieldSize == 4) + ) self.f.seek(curPos) offset = curPos = None @@ -1784,7 +1868,7 @@ class AppendingTiffWriter: # local (not referenced with another offset) self.rewriteLastShortToLong(offset) self.f.seek(-10, os.SEEK_CUR) - self.writeShort(4) # rewrite the type to LONG + self.writeShort(TiffTags.LONG) # rewrite the type to LONG self.f.seek(8, os.SEEK_CUR) elif isShort: self.rewriteLastShort(offset) @@ -1802,7 +1886,7 @@ def _save_all(im, fp, filename): cur_idx = im.tell() try: with AppendingTiffWriter(fp) as tf: - for ims in [im]+append_images: + for ims in [im] + append_images: ims.encoderinfo = encoderinfo ims.encoderconfig = encoderconfig if not hasattr(ims, "n_frames"): diff --git a/server/www/packages/packages-linux/x64/PIL/TiffTags.py b/server/www/packages/packages-linux/x64/PIL/TiffTags.py index c1e14af..6cc9ff7 100644 --- a/server/www/packages/packages-linux/x64/PIL/TiffTags.py +++ b/server/www/packages/packages-linux/x64/PIL/TiffTags.py @@ -23,13 +23,14 @@ from collections import namedtuple class TagInfo(namedtuple("_TagInfo", "value name type length enum")): __slots__ = [] - def __new__(cls, value=None, name="unknown", - type=None, length=None, enum=None): - return super(TagInfo, cls).__new__( - cls, value, name, type, length, enum or {}) + def __new__(cls, value=None, name="unknown", type=None, length=None, enum=None): + return super().__new__(cls, value, name, type, length, enum or {}) def cvt_enum(self, value): - return self.enum.get(value, value) + # Using get will call hash(value), which can be expensive + # for some types (e.g. Fraction). Since self.enum is rarely + # used, it's usually better to test it first. + return self.enum.get(value, value) if self.enum else value def lookup(tag): @@ -41,7 +42,7 @@ def lookup(tag): """ - return TAGS_V2.get(tag, TagInfo(tag, TAGS.get(tag, 'unknown'))) + return TAGS_V2.get(tag, TagInfo(tag, TAGS.get(tag, "unknown"))) ## @@ -61,32 +62,56 @@ ASCII = 2 SHORT = 3 LONG = 4 RATIONAL = 5 +SIGNED_BYTE = 6 UNDEFINED = 7 +SIGNED_SHORT = 8 +SIGNED_LONG = 9 SIGNED_RATIONAL = 10 +FLOAT = 11 DOUBLE = 12 TAGS_V2 = { - 254: ("NewSubfileType", LONG, 1), 255: ("SubfileType", SHORT, 1), 256: ("ImageWidth", LONG, 1), 257: ("ImageLength", LONG, 1), 258: ("BitsPerSample", SHORT, 0), - 259: ("Compression", SHORT, 1, - {"Uncompressed": 1, "CCITT 1d": 2, "Group 3 Fax": 3, - "Group 4 Fax": 4, "LZW": 5, "JPEG": 6, "PackBits": 32773}), - - 262: ("PhotometricInterpretation", SHORT, 1, - {"WhiteIsZero": 0, "BlackIsZero": 1, "RGB": 2, "RGB Palette": 3, - "Transparency Mask": 4, "CMYK": 5, "YCbCr": 6, "CieLAB": 8, - "CFA": 32803, # TIFF/EP, Adobe DNG - "LinearRaw": 32892}), # Adobe DNG + 259: ( + "Compression", + SHORT, + 1, + { + "Uncompressed": 1, + "CCITT 1d": 2, + "Group 3 Fax": 3, + "Group 4 Fax": 4, + "LZW": 5, + "JPEG": 6, + "PackBits": 32773, + }, + ), + 262: ( + "PhotometricInterpretation", + SHORT, + 1, + { + "WhiteIsZero": 0, + "BlackIsZero": 1, + "RGB": 2, + "RGB Palette": 3, + "Transparency Mask": 4, + "CMYK": 5, + "YCbCr": 6, + "CieLAB": 8, + "CFA": 32803, # TIFF/EP, Adobe DNG + "LinearRaw": 32892, # Adobe DNG + }, + ), 263: ("Threshholding", SHORT, 1), 264: ("CellWidth", SHORT, 1), 265: ("CellLength", SHORT, 1), 266: ("FillOrder", SHORT, 1), 269: ("DocumentName", ASCII, 1), - 270: ("ImageDescription", ASCII, 1), 271: ("Make", ASCII, 1), 272: ("Model", ASCII, 1), @@ -95,8 +120,7 @@ TAGS_V2 = { 277: ("SamplesPerPixel", SHORT, 1), 278: ("RowsPerStrip", LONG, 1), 279: ("StripByteCounts", LONG, 0), - - 280: ("MinSampleValue", LONG, 0), + 280: ("MinSampleValue", SHORT, 0), 281: ("MaxSampleValue", SHORT, 0), 282: ("XResolution", RATIONAL, 1), 283: ("YResolution", RATIONAL, 1), @@ -106,31 +130,26 @@ TAGS_V2 = { 287: ("YPosition", RATIONAL, 1), 288: ("FreeOffsets", LONG, 1), 289: ("FreeByteCounts", LONG, 1), - 290: ("GrayResponseUnit", SHORT, 1), 291: ("GrayResponseCurve", SHORT, 0), 292: ("T4Options", LONG, 1), 293: ("T6Options", LONG, 1), 296: ("ResolutionUnit", SHORT, 1, {"none": 1, "inch": 2, "cm": 3}), 297: ("PageNumber", SHORT, 2), - 301: ("TransferFunction", SHORT, 0), 305: ("Software", ASCII, 1), 306: ("DateTime", ASCII, 1), - 315: ("Artist", ASCII, 1), 316: ("HostComputer", ASCII, 1), 317: ("Predictor", SHORT, 1, {"none": 1, "Horizontal Differencing": 2}), 318: ("WhitePoint", RATIONAL, 2), 319: ("PrimaryChromaticities", RATIONAL, 6), - 320: ("ColorMap", SHORT, 0), 321: ("HalftoneHints", SHORT, 2), 322: ("TileWidth", LONG, 1), 323: ("TileLength", LONG, 1), 324: ("TileOffsets", LONG, 0), 325: ("TileByteCounts", LONG, 0), - 332: ("InkSet", SHORT, 1), 333: ("InkNames", ASCII, 1), 334: ("NumberOfInks", SHORT, 1), @@ -138,13 +157,10 @@ TAGS_V2 = { 337: ("TargetPrinter", ASCII, 1), 338: ("ExtraSamples", SHORT, 0), 339: ("SampleFormat", SHORT, 0), - 340: ("SMinSampleValue", DOUBLE, 0), 341: ("SMaxSampleValue", DOUBLE, 0), 342: ("TransferRange", SHORT, 6), - 347: ("JPEGTables", UNDEFINED, 1), - # obsolete JPEG tags 512: ("JPEGProc", SHORT, 1), 513: ("JPEGInterchangeFormat", LONG, 1), @@ -155,22 +171,18 @@ TAGS_V2 = { 519: ("JPEGQTables", LONG, 0), 520: ("JPEGDCTables", LONG, 0), 521: ("JPEGACTables", LONG, 0), - 529: ("YCbCrCoefficients", RATIONAL, 3), 530: ("YCbCrSubSampling", SHORT, 2), 531: ("YCbCrPositioning", SHORT, 1), 532: ("ReferenceBlackWhite", RATIONAL, 6), - - 700: ('XMP', BYTE, 1), - + 700: ("XMP", BYTE, 0), 33432: ("Copyright", ASCII, 1), - 34377: ('PhotoshopInfo', BYTE, 1), - + 33723: ("IptcNaaInfo", UNDEFINED, 0), + 34377: ("PhotoshopInfo", BYTE, 0), # FIXME add more tags here - 34665: ("ExifIFD", SHORT, 1), - 34675: ('ICCProfile', UNDEFINED, 1), - 34853: ('GPSInfoIFD', BYTE, 1), - + 34665: ("ExifIFD", LONG, 1), + 34675: ("ICCProfile", UNDEFINED, 1), + 34853: ("GPSInfoIFD", LONG, 1), # MPInfo 45056: ("MPFVersion", UNDEFINED, 1), 45057: ("NumberOfImages", LONG, 1), @@ -191,159 +203,157 @@ TAGS_V2 = { 45579: ("YawAngle", SIGNED_RATIONAL, 1), 45580: ("PitchAngle", SIGNED_RATIONAL, 1), 45581: ("RollAngle", SIGNED_RATIONAL, 1), - 50741: ("MakerNoteSafety", SHORT, 1, {"Unsafe": 0, "Safe": 1}), 50780: ("BestQualityScale", RATIONAL, 1), 50838: ("ImageJMetaDataByteCounts", LONG, 0), # Can be more than one - 50839: ("ImageJMetaData", UNDEFINED, 1) # see Issue #2006 + 50839: ("ImageJMetaData", UNDEFINED, 1), # see Issue #2006 } # Legacy Tags structure # these tags aren't included above, but were in the previous versions -TAGS = {347: 'JPEGTables', - 700: 'XMP', - - # Additional Exif Info - 32932: 'Wang Annotation', - 33434: 'ExposureTime', - 33437: 'FNumber', - 33445: 'MD FileTag', - 33446: 'MD ScalePixel', - 33447: 'MD ColorTable', - 33448: 'MD LabName', - 33449: 'MD SampleInfo', - 33450: 'MD PrepDate', - 33451: 'MD PrepTime', - 33452: 'MD FileUnits', - 33550: 'ModelPixelScaleTag', - 33723: 'IptcNaaInfo', - 33918: 'INGR Packet Data Tag', - 33919: 'INGR Flag Registers', - 33920: 'IrasB Transformation Matrix', - 33922: 'ModelTiepointTag', - 34264: 'ModelTransformationTag', - 34377: 'PhotoshopInfo', - 34735: 'GeoKeyDirectoryTag', - 34736: 'GeoDoubleParamsTag', - 34737: 'GeoAsciiParamsTag', - 34850: 'ExposureProgram', - 34852: 'SpectralSensitivity', - 34855: 'ISOSpeedRatings', - 34856: 'OECF', - 34864: 'SensitivityType', - 34865: 'StandardOutputSensitivity', - 34866: 'RecommendedExposureIndex', - 34867: 'ISOSpeed', - 34868: 'ISOSpeedLatitudeyyy', - 34869: 'ISOSpeedLatitudezzz', - 34908: 'HylaFAX FaxRecvParams', - 34909: 'HylaFAX FaxSubAddress', - 34910: 'HylaFAX FaxRecvTime', - 36864: 'ExifVersion', - 36867: 'DateTimeOriginal', - 36868: 'DateTImeDigitized', - 37121: 'ComponentsConfiguration', - 37122: 'CompressedBitsPerPixel', - 37724: 'ImageSourceData', - 37377: 'ShutterSpeedValue', - 37378: 'ApertureValue', - 37379: 'BrightnessValue', - 37380: 'ExposureBiasValue', - 37381: 'MaxApertureValue', - 37382: 'SubjectDistance', - 37383: 'MeteringMode', - 37384: 'LightSource', - 37385: 'Flash', - 37386: 'FocalLength', - 37396: 'SubjectArea', - 37500: 'MakerNote', - 37510: 'UserComment', - 37520: 'SubSec', - 37521: 'SubSecTimeOriginal', - 37522: 'SubsecTimeDigitized', - 40960: 'FlashPixVersion', - 40961: 'ColorSpace', - 40962: 'PixelXDimension', - 40963: 'PixelYDimension', - 40964: 'RelatedSoundFile', - 40965: 'InteroperabilityIFD', - 41483: 'FlashEnergy', - 41484: 'SpatialFrequencyResponse', - 41486: 'FocalPlaneXResolution', - 41487: 'FocalPlaneYResolution', - 41488: 'FocalPlaneResolutionUnit', - 41492: 'SubjectLocation', - 41493: 'ExposureIndex', - 41495: 'SensingMethod', - 41728: 'FileSource', - 41729: 'SceneType', - 41730: 'CFAPattern', - 41985: 'CustomRendered', - 41986: 'ExposureMode', - 41987: 'WhiteBalance', - 41988: 'DigitalZoomRatio', - 41989: 'FocalLengthIn35mmFilm', - 41990: 'SceneCaptureType', - 41991: 'GainControl', - 41992: 'Contrast', - 41993: 'Saturation', - 41994: 'Sharpness', - 41995: 'DeviceSettingDescription', - 41996: 'SubjectDistanceRange', - 42016: 'ImageUniqueID', - 42032: 'CameraOwnerName', - 42033: 'BodySerialNumber', - 42034: 'LensSpecification', - 42035: 'LensMake', - 42036: 'LensModel', - 42037: 'LensSerialNumber', - 42112: 'GDAL_METADATA', - 42113: 'GDAL_NODATA', - 42240: 'Gamma', - 50215: 'Oce Scanjob Description', - 50216: 'Oce Application Selector', - 50217: 'Oce Identification Number', - 50218: 'Oce ImageLogic Characteristics', - - # Adobe DNG - 50706: 'DNGVersion', - 50707: 'DNGBackwardVersion', - 50708: 'UniqueCameraModel', - 50709: 'LocalizedCameraModel', - 50710: 'CFAPlaneColor', - 50711: 'CFALayout', - 50712: 'LinearizationTable', - 50713: 'BlackLevelRepeatDim', - 50714: 'BlackLevel', - 50715: 'BlackLevelDeltaH', - 50716: 'BlackLevelDeltaV', - 50717: 'WhiteLevel', - 50718: 'DefaultScale', - 50719: 'DefaultCropOrigin', - 50720: 'DefaultCropSize', - 50721: 'ColorMatrix1', - 50722: 'ColorMatrix2', - 50723: 'CameraCalibration1', - 50724: 'CameraCalibration2', - 50725: 'ReductionMatrix1', - 50726: 'ReductionMatrix2', - 50727: 'AnalogBalance', - 50728: 'AsShotNeutral', - 50729: 'AsShotWhiteXY', - 50730: 'BaselineExposure', - 50731: 'BaselineNoise', - 50732: 'BaselineSharpness', - 50733: 'BayerGreenSplit', - 50734: 'LinearResponseLimit', - 50735: 'CameraSerialNumber', - 50736: 'LensInfo', - 50737: 'ChromaBlurRadius', - 50738: 'AntiAliasStrength', - 50740: 'DNGPrivateData', - 50778: 'CalibrationIlluminant1', - 50779: 'CalibrationIlluminant2', - 50784: 'Alias Layer Metadata' - } +TAGS = { + 347: "JPEGTables", + 700: "XMP", + # Additional Exif Info + 32932: "Wang Annotation", + 33434: "ExposureTime", + 33437: "FNumber", + 33445: "MD FileTag", + 33446: "MD ScalePixel", + 33447: "MD ColorTable", + 33448: "MD LabName", + 33449: "MD SampleInfo", + 33450: "MD PrepDate", + 33451: "MD PrepTime", + 33452: "MD FileUnits", + 33550: "ModelPixelScaleTag", + 33723: "IptcNaaInfo", + 33918: "INGR Packet Data Tag", + 33919: "INGR Flag Registers", + 33920: "IrasB Transformation Matrix", + 33922: "ModelTiepointTag", + 34264: "ModelTransformationTag", + 34377: "PhotoshopInfo", + 34735: "GeoKeyDirectoryTag", + 34736: "GeoDoubleParamsTag", + 34737: "GeoAsciiParamsTag", + 34850: "ExposureProgram", + 34852: "SpectralSensitivity", + 34855: "ISOSpeedRatings", + 34856: "OECF", + 34864: "SensitivityType", + 34865: "StandardOutputSensitivity", + 34866: "RecommendedExposureIndex", + 34867: "ISOSpeed", + 34868: "ISOSpeedLatitudeyyy", + 34869: "ISOSpeedLatitudezzz", + 34908: "HylaFAX FaxRecvParams", + 34909: "HylaFAX FaxSubAddress", + 34910: "HylaFAX FaxRecvTime", + 36864: "ExifVersion", + 36867: "DateTimeOriginal", + 36868: "DateTImeDigitized", + 37121: "ComponentsConfiguration", + 37122: "CompressedBitsPerPixel", + 37724: "ImageSourceData", + 37377: "ShutterSpeedValue", + 37378: "ApertureValue", + 37379: "BrightnessValue", + 37380: "ExposureBiasValue", + 37381: "MaxApertureValue", + 37382: "SubjectDistance", + 37383: "MeteringMode", + 37384: "LightSource", + 37385: "Flash", + 37386: "FocalLength", + 37396: "SubjectArea", + 37500: "MakerNote", + 37510: "UserComment", + 37520: "SubSec", + 37521: "SubSecTimeOriginal", + 37522: "SubsecTimeDigitized", + 40960: "FlashPixVersion", + 40961: "ColorSpace", + 40962: "PixelXDimension", + 40963: "PixelYDimension", + 40964: "RelatedSoundFile", + 40965: "InteroperabilityIFD", + 41483: "FlashEnergy", + 41484: "SpatialFrequencyResponse", + 41486: "FocalPlaneXResolution", + 41487: "FocalPlaneYResolution", + 41488: "FocalPlaneResolutionUnit", + 41492: "SubjectLocation", + 41493: "ExposureIndex", + 41495: "SensingMethod", + 41728: "FileSource", + 41729: "SceneType", + 41730: "CFAPattern", + 41985: "CustomRendered", + 41986: "ExposureMode", + 41987: "WhiteBalance", + 41988: "DigitalZoomRatio", + 41989: "FocalLengthIn35mmFilm", + 41990: "SceneCaptureType", + 41991: "GainControl", + 41992: "Contrast", + 41993: "Saturation", + 41994: "Sharpness", + 41995: "DeviceSettingDescription", + 41996: "SubjectDistanceRange", + 42016: "ImageUniqueID", + 42032: "CameraOwnerName", + 42033: "BodySerialNumber", + 42034: "LensSpecification", + 42035: "LensMake", + 42036: "LensModel", + 42037: "LensSerialNumber", + 42112: "GDAL_METADATA", + 42113: "GDAL_NODATA", + 42240: "Gamma", + 50215: "Oce Scanjob Description", + 50216: "Oce Application Selector", + 50217: "Oce Identification Number", + 50218: "Oce ImageLogic Characteristics", + # Adobe DNG + 50706: "DNGVersion", + 50707: "DNGBackwardVersion", + 50708: "UniqueCameraModel", + 50709: "LocalizedCameraModel", + 50710: "CFAPlaneColor", + 50711: "CFALayout", + 50712: "LinearizationTable", + 50713: "BlackLevelRepeatDim", + 50714: "BlackLevel", + 50715: "BlackLevelDeltaH", + 50716: "BlackLevelDeltaV", + 50717: "WhiteLevel", + 50718: "DefaultScale", + 50719: "DefaultCropOrigin", + 50720: "DefaultCropSize", + 50721: "ColorMatrix1", + 50722: "ColorMatrix2", + 50723: "CameraCalibration1", + 50724: "CameraCalibration2", + 50725: "ReductionMatrix1", + 50726: "ReductionMatrix2", + 50727: "AnalogBalance", + 50728: "AsShotNeutral", + 50729: "AsShotWhiteXY", + 50730: "BaselineExposure", + 50731: "BaselineNoise", + 50732: "BaselineSharpness", + 50733: "BayerGreenSplit", + 50734: "LinearResponseLimit", + 50735: "CameraSerialNumber", + 50736: "LensInfo", + 50737: "ChromaBlurRadius", + 50738: "AntiAliasStrength", + 50740: "DNGPrivateData", + 50778: "CalibrationIlluminant1", + 50779: "CalibrationIlluminant2", + 50784: "Alias Layer Metadata", +} def _populate(): @@ -423,15 +433,55 @@ TYPES = {} # 389: case TIFFTAG_REFERENCEBLACKWHITE: # 393: case TIFFTAG_INKNAMES: +# Following pseudo-tags are also handled by default in libtiff: +# TIFFTAG_JPEGQUALITY 65537 + # some of these are not in our TAGS_V2 dict and were included from tiff.h -LIBTIFF_CORE = {255, 256, 257, 258, 259, 262, 263, 266, 274, 277, - 278, 280, 281, 340, 341, 282, 283, 284, 286, 287, - 296, 297, 321, 320, 338, 32995, 322, 323, 32998, - 32996, 339, 32997, 330, 531, 530, 301, 532, 333, - # as above - 269 # this has been in our tests forever, and works - } +# This list also exists in encode.c +LIBTIFF_CORE = { + 255, + 256, + 257, + 258, + 259, + 262, + 263, + 266, + 274, + 277, + 278, + 280, + 281, + 340, + 341, + 282, + 283, + 284, + 286, + 287, + 296, + 297, + 321, + 320, + 338, + 32995, + 322, + 323, + 32998, + 32996, + 339, + 32997, + 330, + 531, + 530, + 301, + 532, + 333, + # as above + 269, # this has been in our tests forever, and works + 65537, +} LIBTIFF_CORE.remove(320) # Array of short, crashes LIBTIFF_CORE.remove(301) # Array of short, crashes diff --git a/server/www/packages/packages-linux/x64/PIL/WalImageFile.py b/server/www/packages/packages-linux/x64/PIL/WalImageFile.py index 6602cc8..d5a5c8e 100644 --- a/server/www/packages/packages-linux/x64/PIL/WalImageFile.py +++ b/server/www/packages/packages-linux/x64/PIL/WalImageFile.py @@ -1,4 +1,3 @@ -# encoding: utf-8 # # The Python Imaging Library. # $Id$ @@ -21,15 +20,11 @@ # https://www.flipcode.com/archives/Quake_2_BSP_File_Format.shtml # and has been tested with a few sample files found using google. +import builtins + from . import Image from ._binary import i32le as i32 -try: - import builtins -except ImportError: - import __builtin__ - builtins = __builtin__ - def open(filename): """ @@ -46,7 +41,7 @@ def open(filename): def imopen(fp): # read header fields - header = fp.read(32+24+32+12) + header = fp.read(32 + 24 + 32 + 12) size = i32(header, 32), i32(header, 36) offset = i32(header, 40) @@ -62,7 +57,7 @@ def open(filename): # strings are null-terminated im.info["name"] = header[:32].split(b"\0", 1)[0] - next_name = header[56:56+32].split(b"\0", 1)[0] + next_name = header[56 : 56 + 32].split(b"\0", 1)[0] if next_name: im.info["next_name"] = next_name diff --git a/server/www/packages/packages-linux/x64/PIL/WebPImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/WebPImagePlugin.py index e6485c2..eda6855 100644 --- a/server/www/packages/packages-linux/x64/PIL/WebPImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/WebPImagePlugin.py @@ -1,28 +1,24 @@ -from . import Image, ImageFile -try: - from . import _webp - SUPPORTED = True -except ImportError as e: - SUPPORTED = False from io import BytesIO +from . import Image, ImageFile -_VALID_WEBP_MODES = { - "RGBX": True, - "RGBA": True, - "RGB": True, - } +try: + from . import _webp -_VALID_WEBP_LEGACY_MODES = { - "RGB": True, - "RGBA": True, - } + SUPPORTED = True +except ImportError: + SUPPORTED = False + + +_VALID_WEBP_MODES = {"RGBX": True, "RGBA": True, "RGB": True} + +_VALID_WEBP_LEGACY_MODES = {"RGB": True, "RGBA": True} _VP8_MODES_BY_IDENTIFIER = { b"VP8 ": "RGB", b"VP8X": "RGBA", b"VP8L": "RGBA", # lossless - } +} def _accept(prefix): @@ -32,7 +28,9 @@ def _accept(prefix): if is_riff_file_format and is_webp_file and is_valid_vp8_mode: if not SUPPORTED: - return "image file could not be identified because WEBP support not installed" + return ( + "image file could not be identified because WEBP support not installed" + ) return True @@ -44,8 +42,9 @@ class WebPImageFile(ImageFile.ImageFile): def _open(self): if not _webp.HAVE_WEBPANIM: # Legacy mode - data, width, height, self.mode, icc_profile, exif = \ - _webp.WebPDecode(self.fp.read()) + data, width, height, self.mode, icc_profile, exif = _webp.WebPDecode( + self.fp.read() + ) if icc_profile: self.info["icc_profile"] = icc_profile if exif: @@ -61,18 +60,18 @@ class WebPImageFile(ImageFile.ImageFile): self._decoder = _webp.WebPAnimDecoder(self.fp.read()) # Get info from decoder - width, height, loop_count, bgcolor, frame_count, mode = \ - self._decoder.get_info() + width, height, loop_count, bgcolor, frame_count, mode = self._decoder.get_info() self._size = width, height self.info["loop"] = loop_count - bg_a, bg_r, bg_g, bg_b = \ - (bgcolor >> 24) & 0xFF, \ - (bgcolor >> 16) & 0xFF, \ - (bgcolor >> 8) & 0xFF, \ - bgcolor & 0xFF + bg_a, bg_r, bg_g, bg_b = ( + (bgcolor >> 24) & 0xFF, + (bgcolor >> 16) & 0xFF, + (bgcolor >> 8) & 0xFF, + bgcolor & 0xFF, + ) self.info["background"] = (bg_r, bg_g, bg_b, bg_a) self._n_frames = frame_count - self.mode = 'RGB' if mode == 'RGBX' else mode + self.mode = "RGB" if mode == "RGBX" else mode self.rawmode = mode self.tile = [] @@ -92,8 +91,9 @@ class WebPImageFile(ImageFile.ImageFile): self.seek(0) def _getexif(self): - from .JpegImagePlugin import _getexif - return _getexif(self) + if "exif" not in self.info: + return None + return dict(self.getexif()) @property def n_frames(self): @@ -105,7 +105,7 @@ class WebPImageFile(ImageFile.ImageFile): def seek(self, frame): if not _webp.HAVE_WEBPANIM: - return super(WebPImageFile, self).seek(frame) + return super().seek(frame) # Perform some simple checks first if frame >= self._n_frames: @@ -130,7 +130,7 @@ class WebPImageFile(ImageFile.ImageFile): # Check if an error occurred if ret is None: - self._reset() # Reset just to be safe + self._reset() # Reset just to be safe self.seek(0) raise EOFError("failed to decode next frame in WebP file") @@ -145,11 +145,11 @@ class WebPImageFile(ImageFile.ImageFile): def _seek(self, frame): if self.__physical_frame == frame: - return # Nothing to do + return # Nothing to do if frame < self.__physical_frame: - self._reset() # Rewind to beginning + self._reset() # Rewind to beginning while self.__physical_frame < frame: - self._get_next() # Advance to the requested frame + self._get_next() # Advance to the requested frame def load(self): if _webp.HAVE_WEBPANIM: @@ -163,16 +163,16 @@ class WebPImageFile(ImageFile.ImageFile): self.__loaded = self.__logical_frame # Set tile - if self.fp: + if self.fp and self._exclusive_fp: self.fp.close() self.fp = BytesIO(data) self.tile = [("raw", (0, 0) + self.size, 0, self.rawmode)] - return super(WebPImageFile, self).load() + return super().load() def tell(self): if not _webp.HAVE_WEBPANIM: - return super(WebPImageFile, self).tell() + return super().tell() return self.__logical_frame @@ -184,13 +184,25 @@ def _save_all(im, fp, filename): # If total frame count is 1, then save using the legacy API, which # will preserve non-alpha modes total = 0 - for ims in [im]+append_images: - total += 1 if not hasattr(ims, "n_frames") else ims.n_frames + for ims in [im] + append_images: + total += getattr(ims, "n_frames", 1) if total == 1: _save(im, fp, filename) return - background = encoderinfo.get("background", (0, 0, 0, 0)) + background = (0, 0, 0, 0) + if "background" in encoderinfo: + background = encoderinfo["background"] + elif "background" in im.info: + background = im.info["background"] + if isinstance(background, int): + # GifImagePlugin stores a global color table index in + # info["background"]. So it must be converted to an RGBA value + palette = im.getpalette() + if palette: + r, g, b = palette[background * 3 : (background + 1) * 3] + background = (r, g, b, 0) + duration = im.encoderinfo.get("duration", 0) loop = im.encoderinfo.get("loop", 0) minimize_size = im.encoderinfo.get("minimize_size", False) @@ -203,6 +215,8 @@ def _save_all(im, fp, filename): method = im.encoderinfo.get("method", 0) icc_profile = im.encoderinfo.get("icc_profile", "") exif = im.encoderinfo.get("exif", "") + if isinstance(exif, Image.Exif): + exif = exif.tobytes() xmp = im.encoderinfo.get("xmp", "") if allow_mixed: lossless = False @@ -214,10 +228,15 @@ def _save_all(im, fp, filename): kmax = 17 if lossless else 5 # Validate background color - if (not isinstance(background, (list, tuple)) or len(background) != 4 or - not all(v >= 0 and v < 256 for v in background)): - raise IOError("Background color is not an RGBA tuple clamped " - "to (0-255): %s" % str(background)) + if ( + not isinstance(background, (list, tuple)) + or len(background) != 4 + or not all(v >= 0 and v < 256 for v in background) + ): + raise OSError( + "Background color is not an RGBA tuple clamped to (0-255): %s" + % str(background) + ) # Convert to packed uint bg_r, bg_g, bg_b, bg_a = background @@ -225,13 +244,15 @@ def _save_all(im, fp, filename): # Setup the WebP animation encoder enc = _webp.WebPAnimEncoder( - im.size[0], im.size[1], + im.size[0], + im.size[1], background, loop, minimize_size, - kmin, kmax, + kmin, + kmax, allow_mixed, - verbose + verbose, ) # Add each frame @@ -239,12 +260,9 @@ def _save_all(im, fp, filename): timestamp = 0 cur_idx = im.tell() try: - for ims in [im]+append_images: + for ims in [im] + append_images: # Get # of frames in this image - if not hasattr(ims, "n_frames"): - nfr = 1 - else: - nfr = ims.n_frames + nfr = getattr(ims, "n_frames", 1) for idx in range(nfr): ims.seek(idx) @@ -254,24 +272,28 @@ def _save_all(im, fp, filename): frame = ims rawmode = ims.mode if ims.mode not in _VALID_WEBP_MODES: - alpha = 'A' in ims.mode or 'a' in ims.mode \ - or (ims.mode == 'P' and 'A' in ims.im.getpalettemode()) - rawmode = 'RGBA' if alpha else 'RGB' + alpha = ( + "A" in ims.mode + or "a" in ims.mode + or (ims.mode == "P" and "A" in ims.im.getpalettemode()) + ) + rawmode = "RGBA" if alpha else "RGB" frame = ims.convert(rawmode) - if rawmode == 'RGB': + if rawmode == "RGB": # For faster conversion, use RGBX - rawmode = 'RGBX' + rawmode = "RGBX" # Append the frame to the animation encoder enc.add( - frame.tobytes('raw', rawmode), + frame.tobytes("raw", rawmode), timestamp, - frame.size[0], frame.size[1], + frame.size[0], + frame.size[1], rawmode, lossless, quality, - method + method, ) # Update timestamp and frame index @@ -285,16 +307,12 @@ def _save_all(im, fp, filename): im.seek(cur_idx) # Force encoder to flush frames - enc.add( - None, - timestamp, - 0, 0, "", lossless, quality, 0 - ) + enc.add(None, timestamp, 0, 0, "", lossless, quality, 0) # Get the final output from the encoder data = enc.assemble(icc_profile, exif, xmp) if data is None: - raise IOError("cannot write file as WebP (encoder returned None)") + raise OSError("cannot write file as WebP (encoder returned None)") fp.write(data) @@ -304,12 +322,17 @@ def _save(im, fp, filename): quality = im.encoderinfo.get("quality", 80) icc_profile = im.encoderinfo.get("icc_profile", "") exif = im.encoderinfo.get("exif", "") + if isinstance(exif, Image.Exif): + exif = exif.tobytes() xmp = im.encoderinfo.get("xmp", "") if im.mode not in _VALID_WEBP_LEGACY_MODES: - alpha = 'A' in im.mode or 'a' in im.mode \ - or (im.mode == 'P' and 'A' in im.im.getpalettemode()) - im = im.convert('RGBA' if alpha else 'RGB') + alpha = ( + "A" in im.mode + or "a" in im.mode + or (im.mode == "P" and "A" in im.im.getpalettemode()) + ) + im = im.convert("RGBA" if alpha else "RGB") data = _webp.WebPEncode( im.tobytes(), @@ -320,10 +343,10 @@ def _save(im, fp, filename): im.mode, icc_profile, exif, - xmp + xmp, ) if data is None: - raise IOError("cannot write file as WebP (encoder returned None)") + raise OSError("cannot write file as WebP (encoder returned None)") fp.write(data) diff --git a/server/www/packages/packages-linux/x64/PIL/WmfImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/WmfImagePlugin.py index 81699bd..024222c 100644 --- a/server/www/packages/packages-linux/x64/PIL/WmfImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/WmfImagePlugin.py @@ -19,21 +19,11 @@ # http://wvware.sourceforge.net/caolan/index.html # http://wvware.sourceforge.net/caolan/ora-wmf.html -from __future__ import print_function - from . import Image, ImageFile -from ._binary import i16le as word, si16le as short, \ - i32le as dword, si32le as _long -from ._util import py3 - - -__version__ = "0.2" +from ._binary import i16le as word, i32le as dword, si16le as short, si32le as _long _handler = None -if py3: - long = int - def register_handler(handler): """ @@ -48,8 +38,7 @@ def register_handler(handler): if hasattr(Image.core, "drawwmf"): # install default handler (windows only) - class WmfHandler(object): - + class WmfHandler: def open(self, im): im.mode = "RGB" self.bbox = im.info["wmf_bbox"] @@ -57,10 +46,14 @@ if hasattr(Image.core, "drawwmf"): def load(self, im): im.fp.seek(0) # rewind return Image.frombytes( - "RGB", im.size, + "RGB", + im.size, Image.core.drawwmf(im.fp.read(), im.size, self.bbox), - "raw", "BGR", (im.size[0]*3 + 3) & -4, -1 - ) + "raw", + "BGR", + (im.size[0] * 3 + 3) & -4, + -1, + ) register_handler(WmfHandler()) @@ -71,20 +64,21 @@ if hasattr(Image.core, "drawwmf"): def _accept(prefix): return ( - prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or - prefix[:4] == b"\x01\x00\x00\x00" - ) + prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or prefix[:4] == b"\x01\x00\x00\x00" + ) ## # Image plugin for Windows metafiles. + class WmfStubImageFile(ImageFile.StubImageFile): format = "WMF" format_description = "Windows Metafile" def _open(self): + self._inch = None # check placable header s = self.fp.read(80) @@ -94,7 +88,7 @@ class WmfStubImageFile(ImageFile.StubImageFile): # placeable windows metafile # get units per inch - inch = word(s, 14) + self._inch = word(s, 14) # get bounding box x0 = short(s, 6) @@ -103,12 +97,14 @@ class WmfStubImageFile(ImageFile.StubImageFile): y1 = short(s, 12) # normalize size to 72 dots per inch - size = (x1 - x0) * 72 // inch, (y1 - y0) * 72 // inch + self.info["dpi"] = 72 + size = ( + (x1 - x0) * self.info["dpi"] // self._inch, + (y1 - y0) * self.info["dpi"] // self._inch, + ) self.info["wmf_bbox"] = x0, y0, x1, y1 - self.info["dpi"] = 72 - # sanity check (standard metafile header) if s[22:26] != b"\x01\x00\t\x00": raise SyntaxError("Unsupported WMF file format") @@ -125,12 +121,11 @@ class WmfStubImageFile(ImageFile.StubImageFile): # get frame (in 0.01 millimeter units) frame = _long(s, 24), _long(s, 28), _long(s, 32), _long(s, 36) - # normalize size to 72 dots per inch size = x1 - x0, y1 - y0 # calculate dots per inch from bbox and frame - xdpi = 2540 * (x1 - y0) // (frame[2] - frame[0]) - ydpi = 2540 * (y1 - y0) // (frame[3] - frame[1]) + xdpi = int(2540.0 * (x1 - y0) / (frame[2] - frame[0]) + 0.5) + ydpi = int(2540.0 * (y1 - y0) / (frame[3] - frame[1]) + 0.5) self.info["wmf_bbox"] = x0, y0, x1, y1 @@ -152,12 +147,23 @@ class WmfStubImageFile(ImageFile.StubImageFile): def _load(self): return _handler + def load(self, dpi=None): + if dpi is not None and self._inch is not None: + self.info["dpi"] = int(dpi + 0.5) + x0, y0, x1, y1 = self.info["wmf_bbox"] + self._size = ( + (x1 - x0) * self.info["dpi"] // self._inch, + (y1 - y0) * self.info["dpi"] // self._inch, + ) + super().load() + def _save(im, fp, filename): if _handler is None or not hasattr(_handler, "save"): - raise IOError("WMF save handler not installed") + raise OSError("WMF save handler not installed") _handler.save(im, fp, filename) + # # -------------------------------------------------------------------- # Registry stuff diff --git a/server/www/packages/packages-linux/x64/PIL/XVThumbImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/XVThumbImagePlugin.py index 8cdd848..c0d8db0 100644 --- a/server/www/packages/packages-linux/x64/PIL/XVThumbImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/XVThumbImagePlugin.py @@ -20,8 +20,6 @@ from . import Image, ImageFile, ImagePalette from ._binary import i8, o8 -__version__ = "0.1" - _MAGIC = b"P7 332" # standard color palette for thumbnails (RGB332) @@ -29,7 +27,9 @@ PALETTE = b"" for r in range(8): for g in range(8): for b in range(4): - PALETTE = PALETTE + (o8((r*255)//7)+o8((g*255)//7)+o8((b*255)//3)) + PALETTE = PALETTE + ( + o8((r * 255) // 7) + o8((g * 255) // 7) + o8((b * 255) // 3) + ) def _accept(prefix): @@ -39,6 +39,7 @@ def _accept(prefix): ## # Image plugin for XV thumbnail images. + class XVThumbImageFile(ImageFile.ImageFile): format = "XVThumb" @@ -69,10 +70,7 @@ class XVThumbImageFile(ImageFile.ImageFile): self.palette = ImagePalette.raw("RGB", PALETTE) - self.tile = [ - ("raw", (0, 0)+self.size, - self.fp.tell(), (self.mode, 0, 1) - )] + self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), (self.mode, 0, 1))] # -------------------------------------------------------------------- diff --git a/server/www/packages/packages-linux/x64/PIL/XbmImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/XbmImagePlugin.py index 0cccda1..ead9722 100644 --- a/server/www/packages/packages-linux/x64/PIL/XbmImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/XbmImagePlugin.py @@ -20,9 +20,8 @@ # import re -from . import Image, ImageFile -__version__ = "0.6" +from . import Image, ImageFile # XBM header xbm_head = re.compile( @@ -43,6 +42,7 @@ def _accept(prefix): ## # Image plugin for X11 bitmaps. + class XbmImageFile(ImageFile.ImageFile): format = "XBM" @@ -58,32 +58,30 @@ class XbmImageFile(ImageFile.ImageFile): ysize = int(m.group("height")) if m.group("hotspot"): - self.info["hotspot"] = ( - int(m.group("xhot")), int(m.group("yhot")) - ) + self.info["hotspot"] = (int(m.group("xhot")), int(m.group("yhot"))) self.mode = "1" self._size = xsize, ysize - self.tile = [("xbm", (0, 0)+self.size, m.end(), None)] + self.tile = [("xbm", (0, 0) + self.size, m.end(), None)] def _save(im, fp, filename): if im.mode != "1": - raise IOError("cannot write mode %s as XBM" % im.mode) + raise OSError("cannot write mode %s as XBM" % im.mode) - fp.write(("#define im_width %d\n" % im.size[0]).encode('ascii')) - fp.write(("#define im_height %d\n" % im.size[1]).encode('ascii')) + fp.write(("#define im_width %d\n" % im.size[0]).encode("ascii")) + fp.write(("#define im_height %d\n" % im.size[1]).encode("ascii")) hotspot = im.encoderinfo.get("hotspot") if hotspot: - fp.write(("#define im_x_hot %d\n" % hotspot[0]).encode('ascii')) - fp.write(("#define im_y_hot %d\n" % hotspot[1]).encode('ascii')) + fp.write(("#define im_x_hot %d\n" % hotspot[0]).encode("ascii")) + fp.write(("#define im_y_hot %d\n" % hotspot[1]).encode("ascii")) fp.write(b"static char im_bits[] = {\n") - ImageFile._save(im, fp, [("xbm", (0, 0)+im.size, 0, None)]) + ImageFile._save(im, fp, [("xbm", (0, 0) + im.size, 0, None)]) fp.write(b"};\n") diff --git a/server/www/packages/packages-linux/x64/PIL/XpmImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/XpmImagePlugin.py index 02bc28a..d8bd00a 100644 --- a/server/www/packages/packages-linux/x64/PIL/XpmImagePlugin.py +++ b/server/www/packages/packages-linux/x64/PIL/XpmImagePlugin.py @@ -16,13 +16,12 @@ import re + from . import Image, ImageFile, ImagePalette from ._binary import i8, o8 -__version__ = "0.2" - # XPM header -xpm_head = re.compile(b"\"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)") +xpm_head = re.compile(b'"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)') def _accept(prefix): @@ -32,6 +31,7 @@ def _accept(prefix): ## # Image plugin for X11 pixel maps. + class XpmImageFile(ImageFile.ImageFile): format = "XPM" @@ -67,9 +67,9 @@ class XpmImageFile(ImageFile.ImageFile): for i in range(pal): s = self.fp.readline() - if s[-2:] == b'\r\n': + if s[-2:] == b"\r\n": s = s[:-2] - elif s[-1:] in b'\r\n': + elif s[-1:] in b"\r\n": s = s[:-1] c = i8(s[1]) @@ -80,15 +80,15 @@ class XpmImageFile(ImageFile.ImageFile): if s[i] == b"c": # process colour key - rgb = s[i+1] + rgb = s[i + 1] if rgb == b"None": self.info["transparency"] = c elif rgb[0:1] == b"#": # FIXME: handle colour names (see ImagePalette.py) rgb = int(rgb[1:], 16) - palette[c] = (o8((rgb >> 16) & 255) + - o8((rgb >> 8) & 255) + - o8(rgb & 255)) + palette[c] = ( + o8((rgb >> 16) & 255) + o8((rgb >> 8) & 255) + o8(rgb & 255) + ) else: # unknown colour raise ValueError("cannot read this XPM file") @@ -102,7 +102,7 @@ class XpmImageFile(ImageFile.ImageFile): self.mode = "P" self.palette = ImagePalette.raw("RGB", b"".join(palette)) - self.tile = [("raw", (0, 0)+self.size, self.fp.tell(), ("P", 0, 1))] + self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), ("P", 0, 1))] def load_read(self, bytes): @@ -114,10 +114,11 @@ class XpmImageFile(ImageFile.ImageFile): s = [None] * ysize for i in range(ysize): - s[i] = self.fp.readline()[1:xsize+1].ljust(xsize) + s[i] = self.fp.readline()[1 : xsize + 1].ljust(xsize) return b"".join(s) + # # Registry diff --git a/server/www/packages/packages-linux/x64/PIL/__init__.py b/server/www/packages/packages-linux/x64/PIL/__init__.py index bc8cfed..f9cb157 100644 --- a/server/www/packages/packages-linux/x64/PIL/__init__.py +++ b/server/www/packages/packages-linux/x64/PIL/__init__.py @@ -9,64 +9,127 @@ PIL is the Python Imaging Library by Fredrik Lundh and Contributors. Copyright (c) 1999 by Secret Labs AB. Use PIL.__version__ for this Pillow version. -PIL.VERSION is the old PIL version and will be removed in the future. ;-) """ +import sys +import warnings + from . import _version -# VERSION is deprecated and will be removed in Pillow 6.0.0. -# PILLOW_VERSION is deprecated and will be removed after that. +# VERSION was removed in Pillow 6.0.0. +__version__ = _version.__version__ + + +# PILLOW_VERSION is deprecated and will be removed in a future release. # Use __version__ instead. -VERSION = '1.1.7' # PIL Version -PILLOW_VERSION = __version__ = _version.__version__ +def _raise_version_warning(): + warnings.warn( + "PILLOW_VERSION is deprecated and will be removed in a future release. " + "Use __version__ instead.", + DeprecationWarning, + stacklevel=3, + ) + + +if sys.version_info >= (3, 7): + + def __getattr__(name): + if name == "PILLOW_VERSION": + _raise_version_warning() + return __version__ + raise AttributeError("module '{}' has no attribute '{}'".format(__name__, name)) + + +else: + + class _Deprecated_Version(str): + def __str__(self): + _raise_version_warning() + return super().__str__() + + def __getitem__(self, key): + _raise_version_warning() + return super().__getitem__(key) + + def __eq__(self, other): + _raise_version_warning() + return super().__eq__(other) + + def __ne__(self, other): + _raise_version_warning() + return super().__ne__(other) + + def __gt__(self, other): + _raise_version_warning() + return super().__gt__(other) + + def __lt__(self, other): + _raise_version_warning() + return super().__lt__(other) + + def __ge__(self, other): + _raise_version_warning() + return super().__gt__(other) + + def __le__(self, other): + _raise_version_warning() + return super().__lt__(other) + + PILLOW_VERSION = _Deprecated_Version(__version__) del _version -_plugins = ['BlpImagePlugin', - 'BmpImagePlugin', - 'BufrStubImagePlugin', - 'CurImagePlugin', - 'DcxImagePlugin', - 'DdsImagePlugin', - 'EpsImagePlugin', - 'FitsStubImagePlugin', - 'FliImagePlugin', - 'FpxImagePlugin', - 'FtexImagePlugin', - 'GbrImagePlugin', - 'GifImagePlugin', - 'GribStubImagePlugin', - 'Hdf5StubImagePlugin', - 'IcnsImagePlugin', - 'IcoImagePlugin', - 'ImImagePlugin', - 'ImtImagePlugin', - 'IptcImagePlugin', - 'JpegImagePlugin', - 'Jpeg2KImagePlugin', - 'McIdasImagePlugin', - 'MicImagePlugin', - 'MpegImagePlugin', - 'MpoImagePlugin', - 'MspImagePlugin', - 'PalmImagePlugin', - 'PcdImagePlugin', - 'PcxImagePlugin', - 'PdfImagePlugin', - 'PixarImagePlugin', - 'PngImagePlugin', - 'PpmImagePlugin', - 'PsdImagePlugin', - 'SgiImagePlugin', - 'SpiderImagePlugin', - 'SunImagePlugin', - 'TgaImagePlugin', - 'TiffImagePlugin', - 'WebPImagePlugin', - 'WmfImagePlugin', - 'XbmImagePlugin', - 'XpmImagePlugin', - 'XVThumbImagePlugin'] +_plugins = [ + "BlpImagePlugin", + "BmpImagePlugin", + "BufrStubImagePlugin", + "CurImagePlugin", + "DcxImagePlugin", + "DdsImagePlugin", + "EpsImagePlugin", + "FitsStubImagePlugin", + "FliImagePlugin", + "FpxImagePlugin", + "FtexImagePlugin", + "GbrImagePlugin", + "GifImagePlugin", + "GribStubImagePlugin", + "Hdf5StubImagePlugin", + "IcnsImagePlugin", + "IcoImagePlugin", + "ImImagePlugin", + "ImtImagePlugin", + "IptcImagePlugin", + "JpegImagePlugin", + "Jpeg2KImagePlugin", + "McIdasImagePlugin", + "MicImagePlugin", + "MpegImagePlugin", + "MpoImagePlugin", + "MspImagePlugin", + "PalmImagePlugin", + "PcdImagePlugin", + "PcxImagePlugin", + "PdfImagePlugin", + "PixarImagePlugin", + "PngImagePlugin", + "PpmImagePlugin", + "PsdImagePlugin", + "SgiImagePlugin", + "SpiderImagePlugin", + "SunImagePlugin", + "TgaImagePlugin", + "TiffImagePlugin", + "WebPImagePlugin", + "WmfImagePlugin", + "XbmImagePlugin", + "XpmImagePlugin", + "XVThumbImagePlugin", +] + + +class UnidentifiedImageError(IOError): + pass diff --git a/server/www/packages/packages-linux/x64/PIL/__main__.py b/server/www/packages/packages-linux/x64/PIL/__main__.py new file mode 100644 index 0000000..a05323f --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/__main__.py @@ -0,0 +1,3 @@ +from .features import pilinfo + +pilinfo() diff --git a/server/www/packages/packages-linux/x64/PIL/_binary.py b/server/www/packages/packages-linux/x64/PIL/_binary.py index 767c13b..529b8c9 100644 --- a/server/www/packages/packages-linux/x64/PIL/_binary.py +++ b/server/www/packages/packages-linux/x64/PIL/_binary.py @@ -11,21 +11,15 @@ # See the README file for information on usage and redistribution. # -from struct import unpack_from, pack -from ._util import py3 +from struct import pack, unpack_from -if py3: - def i8(c): - return c if c.__class__ is int else c[0] - def o8(i): - return bytes((i & 255,)) -else: - def i8(c): - return ord(c) +def i8(c): + return c if c.__class__ is int else c[0] - def o8(i): - return chr(i & 255) + +def o8(i): + return bytes((i & 255,)) # Input, le = little endian, be = big endian @@ -33,8 +27,8 @@ def i16le(c, o=0): """ Converts a 2-bytes (16 bits) string to an unsigned integer. - c: string containing bytes to convert - o: offset of bytes to convert in string + :param c: string containing bytes to convert + :param o: offset of bytes to convert in string """ return unpack_from(" 2: - from tkinter import _tkinter as tk -else: - from Tkinter import tkinter as tk - -if hasattr(sys, 'pypy_find_executable'): +if hasattr(sys, "pypy_find_executable"): # Tested with packages at https://bitbucket.org/pypy/pypy/downloads. # PyPies 1.6, 2.0 do not have tkinter built in. PyPy3-2.3.1 gives an # OSError trying to import tkinter. Otherwise: diff --git a/server/www/packages/packages-linux/x64/PIL/_util.py b/server/www/packages/packages-linux/x64/PIL/_util.py index e6989d6..755b4b2 100644 --- a/server/www/packages/packages-linux/x64/PIL/_util.py +++ b/server/www/packages/packages-linux/x64/PIL/_util.py @@ -1,20 +1,20 @@ import os import sys -py3 = sys.version_info.major >= 3 +py36 = sys.version_info[0:2] >= (3, 6) -if py3: - def isStringType(t): - return isinstance(t, str) + +if py36: + from pathlib import Path + + def isPath(f): + return isinstance(f, (bytes, str, Path)) + + +else: def isPath(f): return isinstance(f, (bytes, str)) -else: - def isStringType(t): - return isinstance(t, basestring) - - def isPath(f): - return isinstance(f, basestring) # Checks if an object is a string, and that it points to a directory. @@ -22,7 +22,7 @@ def isDirectory(f): return isPath(f) and os.path.isdir(f) -class deferred_error(object): +class deferred_error: def __init__(self, ex): self.ex = ex diff --git a/server/www/packages/packages-linux/x64/PIL/_version.py b/server/www/packages/packages-linux/x64/PIL/_version.py index b5e4f0d..1af2909 100644 --- a/server/www/packages/packages-linux/x64/PIL/_version.py +++ b/server/www/packages/packages-linux/x64/PIL/_version.py @@ -1,2 +1,2 @@ # Master version for Pillow -__version__ = '5.3.0' +__version__ = "7.1.2" diff --git a/server/www/packages/packages-linux/x64/PIL/_webp.cpython-37m-x86_64-linux-gnu.so b/server/www/packages/packages-linux/x64/PIL/_webp.cpython-37m-x86_64-linux-gnu.so index 176915a..3f4ad33 100755 Binary files a/server/www/packages/packages-linux/x64/PIL/_webp.cpython-37m-x86_64-linux-gnu.so and b/server/www/packages/packages-linux/x64/PIL/_webp.cpython-37m-x86_64-linux-gnu.so differ diff --git a/server/www/packages/packages-linux/x64/PIL/features.py b/server/www/packages/packages-linux/x64/PIL/features.py index 9926445..ac06c0f 100644 --- a/server/www/packages/packages-linux/x64/PIL/features.py +++ b/server/www/packages/packages-linux/x64/PIL/features.py @@ -1,3 +1,10 @@ +import collections +import os +import sys +import warnings + +import PIL + from . import Image modules = { @@ -26,12 +33,7 @@ def get_supported_modules(): return [f for f in modules if check_module(f)] -codecs = { - "jpg": "jpeg", - "jpg_2000": "jpeg2k", - "zlib": "zip", - "libtiff": "libtiff" -} +codecs = {"jpg": "jpeg", "jpg_2000": "jpeg2k", "zlib": "zip", "libtiff": "libtiff"} def check_codec(feature): @@ -48,10 +50,13 @@ def get_supported_codecs(): features = { - "webp_anim": ("PIL._webp", 'HAVE_WEBPANIM'), - "webp_mux": ("PIL._webp", 'HAVE_WEBPMUX'), + "webp_anim": ("PIL._webp", "HAVE_WEBPANIM"), + "webp_mux": ("PIL._webp", "HAVE_WEBPMUX"), "transp_webp": ("PIL._webp", "HAVE_TRANSPARENCY"), - "raqm": ("PIL._imagingft", "HAVE_RAQM") + "raqm": ("PIL._imagingft", "HAVE_RAQM"), + "libjpeg_turbo": ("PIL._imaging", "HAVE_LIBJPEGTURBO"), + "libimagequant": ("PIL._imaging", "HAVE_LIBIMAGEQUANT"), + "xcb": ("PIL._imaging", "HAVE_XCB"), } @@ -62,7 +67,7 @@ def check_feature(feature): module, flag = features[feature] try: - imported_module = __import__(module, fromlist=['PIL']) + imported_module = __import__(module, fromlist=["PIL"]) return getattr(imported_module, flag) except ImportError: return None @@ -73,9 +78,14 @@ def get_supported_features(): def check(feature): - return (feature in modules and check_module(feature) or - feature in codecs and check_codec(feature) or - feature in features and check_feature(feature)) + if feature in modules: + return check_module(feature) + if feature in codecs: + return check_codec(feature) + if feature in features: + return check_feature(feature) + warnings.warn("Unknown feature '%s'." % feature, stacklevel=2) + return False def get_supported(): @@ -83,3 +93,81 @@ def get_supported(): ret.extend(get_supported_features()) ret.extend(get_supported_codecs()) return ret + + +def pilinfo(out=None, supported_formats=True): + if out is None: + out = sys.stdout + + Image.init() + + print("-" * 68, file=out) + print("Pillow {}".format(PIL.__version__), file=out) + py_version = sys.version.splitlines() + print("Python {}".format(py_version[0].strip()), file=out) + for py_version in py_version[1:]: + print(" {}".format(py_version.strip()), file=out) + print("-" * 68, file=out) + print( + "Python modules loaded from {}".format(os.path.dirname(Image.__file__)), + file=out, + ) + print( + "Binary modules loaded from {}".format(os.path.dirname(Image.core.__file__)), + file=out, + ) + print("-" * 68, file=out) + + for name, feature in [ + ("pil", "PIL CORE"), + ("tkinter", "TKINTER"), + ("freetype2", "FREETYPE2"), + ("littlecms2", "LITTLECMS2"), + ("webp", "WEBP"), + ("transp_webp", "WEBP Transparency"), + ("webp_mux", "WEBPMUX"), + ("webp_anim", "WEBP Animation"), + ("jpg", "JPEG"), + ("jpg_2000", "OPENJPEG (JPEG2000)"), + ("zlib", "ZLIB (PNG/ZIP)"), + ("libtiff", "LIBTIFF"), + ("raqm", "RAQM (Bidirectional Text)"), + ("libimagequant", "LIBIMAGEQUANT (Quantization method)"), + ("xcb", "XCB (X protocol)"), + ]: + if check(name): + print("---", feature, "support ok", file=out) + else: + print("***", feature, "support not installed", file=out) + print("-" * 68, file=out) + + if supported_formats: + extensions = collections.defaultdict(list) + for ext, i in Image.EXTENSION.items(): + extensions[i].append(ext) + + for i in sorted(Image.ID): + line = "{}".format(i) + if i in Image.MIME: + line = "{} {}".format(line, Image.MIME[i]) + print(line, file=out) + + if i in extensions: + print( + "Extensions: {}".format(", ".join(sorted(extensions[i]))), file=out + ) + + features = [] + if i in Image.OPEN: + features.append("open") + if i in Image.SAVE: + features.append("save") + if i in Image.SAVE_ALL: + features.append("save_all") + if i in Image.DECODERS: + features.append("decode") + if i in Image.ENCODERS: + features.append("encode") + + print("Features: {}".format(", ".join(features)), file=out) + print("-" * 68, file=out) diff --git a/server/www/packages/packages-linux/x64/Pillow.libs/libfreetype-69f25d5e.so.6.17.1 b/server/www/packages/packages-linux/x64/Pillow.libs/libfreetype-69f25d5e.so.6.17.1 new file mode 100755 index 0000000..70ffc5c Binary files /dev/null and b/server/www/packages/packages-linux/x64/Pillow.libs/libfreetype-69f25d5e.so.6.17.1 differ diff --git a/server/www/packages/packages-linux/x64/Pillow.libs/libjpeg-ba7bf5af.so.9.4.0 b/server/www/packages/packages-linux/x64/Pillow.libs/libjpeg-ba7bf5af.so.9.4.0 new file mode 100755 index 0000000..4994789 Binary files /dev/null and b/server/www/packages/packages-linux/x64/Pillow.libs/libjpeg-ba7bf5af.so.9.4.0 differ diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/liblcms2-a6801db4.so.2.0.8 b/server/www/packages/packages-linux/x64/Pillow.libs/liblcms2-a6801db4.so.2.0.8 similarity index 99% rename from server/www/packages/packages-linux/x64/PIL/.libs/liblcms2-a6801db4.so.2.0.8 rename to server/www/packages/packages-linux/x64/Pillow.libs/liblcms2-a6801db4.so.2.0.8 index 6f10af2..4b40543 100755 Binary files a/server/www/packages/packages-linux/x64/PIL/.libs/liblcms2-a6801db4.so.2.0.8 and b/server/www/packages/packages-linux/x64/Pillow.libs/liblcms2-a6801db4.so.2.0.8 differ diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/liblzma-6cd627ed.so.5.2.4 b/server/www/packages/packages-linux/x64/Pillow.libs/liblzma-99449165.so.5.2.5 similarity index 57% rename from server/www/packages/packages-linux/x64/PIL/.libs/liblzma-6cd627ed.so.5.2.4 rename to server/www/packages/packages-linux/x64/Pillow.libs/liblzma-99449165.so.5.2.5 index b1dd0d3..3bc2e71 100755 Binary files a/server/www/packages/packages-linux/x64/PIL/.libs/liblzma-6cd627ed.so.5.2.4 and b/server/www/packages/packages-linux/x64/Pillow.libs/liblzma-99449165.so.5.2.5 differ diff --git a/server/www/packages/packages-linux/x64/Pillow.libs/libopenjp2-b3d7668a.so.2.3.1 b/server/www/packages/packages-linux/x64/Pillow.libs/libopenjp2-b3d7668a.so.2.3.1 new file mode 100755 index 0000000..307cffb Binary files /dev/null and b/server/www/packages/packages-linux/x64/Pillow.libs/libopenjp2-b3d7668a.so.2.3.1 differ diff --git a/server/www/packages/packages-linux/x64/Pillow.libs/libpng16-bedcb7ea.so.16.37.0 b/server/www/packages/packages-linux/x64/Pillow.libs/libpng16-bedcb7ea.so.16.37.0 new file mode 100755 index 0000000..1563360 Binary files /dev/null and b/server/www/packages/packages-linux/x64/Pillow.libs/libpng16-bedcb7ea.so.16.37.0 differ diff --git a/server/www/packages/packages-linux/x64/Pillow.libs/libtiff-41910f6d.so.5.5.0 b/server/www/packages/packages-linux/x64/Pillow.libs/libtiff-41910f6d.so.5.5.0 new file mode 100755 index 0000000..f2330c2 Binary files /dev/null and b/server/www/packages/packages-linux/x64/Pillow.libs/libtiff-41910f6d.so.5.5.0 differ diff --git a/server/www/packages/packages-linux/x64/Pillow.libs/libwebp-122bd20b.so.7.1.0 b/server/www/packages/packages-linux/x64/Pillow.libs/libwebp-122bd20b.so.7.1.0 new file mode 100755 index 0000000..9695fd9 Binary files /dev/null and b/server/www/packages/packages-linux/x64/Pillow.libs/libwebp-122bd20b.so.7.1.0 differ diff --git a/server/www/packages/packages-linux/x64/Pillow.libs/libwebpdemux-2db559e5.so.2.0.6 b/server/www/packages/packages-linux/x64/Pillow.libs/libwebpdemux-2db559e5.so.2.0.6 new file mode 100755 index 0000000..26de7af Binary files /dev/null and b/server/www/packages/packages-linux/x64/Pillow.libs/libwebpdemux-2db559e5.so.2.0.6 differ diff --git a/server/www/packages/packages-linux/x64/Pillow.libs/libwebpmux-ec1d5c76.so.3.0.5 b/server/www/packages/packages-linux/x64/Pillow.libs/libwebpmux-ec1d5c76.so.3.0.5 new file mode 100755 index 0000000..2ee2558 Binary files /dev/null and b/server/www/packages/packages-linux/x64/Pillow.libs/libwebpmux-ec1d5c76.so.3.0.5 differ diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libz-a147dcb0.so.1.2.3 b/server/www/packages/packages-linux/x64/Pillow.libs/libz-a147dcb0.so.1.2.3 similarity index 99% rename from server/www/packages/packages-linux/x64/PIL/.libs/libz-a147dcb0.so.1.2.3 rename to server/www/packages/packages-linux/x64/Pillow.libs/libz-a147dcb0.so.1.2.3 index c123f89..0d6b630 100755 Binary files a/server/www/packages/packages-linux/x64/PIL/.libs/libz-a147dcb0.so.1.2.3 and b/server/www/packages/packages-linux/x64/Pillow.libs/libz-a147dcb0.so.1.2.3 differ diff --git a/server/www/packages/packages-linux/x64/_cffi_backend.cpython-37m-x86_64-linux-gnu.so b/server/www/packages/packages-linux/x64/_cffi_backend.cpython-37m-x86_64-linux-gnu.so new file mode 100755 index 0000000..c66494c Binary files /dev/null and b/server/www/packages/packages-linux/x64/_cffi_backend.cpython-37m-x86_64-linux-gnu.so differ diff --git a/server/www/packages/packages-linux/x64/cffi/__init__.py b/server/www/packages/packages-linux/x64/cffi/__init__.py new file mode 100644 index 0000000..7969abf --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/__init__.py @@ -0,0 +1,14 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI +from .error import CDefError, FFIError, VerificationError, VerificationMissing +from .error import PkgConfigError + +__version__ = "1.14.0" +__version_info__ = (1, 14, 0) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/server/www/packages/packages-linux/x64/cffi/_cffi_errors.h b/server/www/packages/packages-linux/x64/cffi/_cffi_errors.h new file mode 100644 index 0000000..83cdad0 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/_cffi_errors.h @@ -0,0 +1,147 @@ +#ifndef CFFI_MESSAGEBOX +# ifdef _MSC_VER +# define CFFI_MESSAGEBOX 1 +# else +# define CFFI_MESSAGEBOX 0 +# endif +#endif + + +#if CFFI_MESSAGEBOX +/* Windows only: logic to take the Python-CFFI embedding logic + initialization errors and display them in a background thread + with MessageBox. The idea is that if the whole program closes + as a result of this problem, then likely it is already a console + program and you can read the stderr output in the console too. + If it is not a console program, then it will likely show its own + dialog to complain, or generally not abruptly close, and for this + case the background thread should stay alive. +*/ +static void *volatile _cffi_bootstrap_text; + +static PyObject *_cffi_start_error_capture(void) +{ + PyObject *result = NULL; + PyObject *x, *m, *bi; + + if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text, + (void *)1, NULL) != NULL) + return (PyObject *)1; + + m = PyImport_AddModule("_cffi_error_capture"); + if (m == NULL) + goto error; + + result = PyModule_GetDict(m); + if (result == NULL) + goto error; + +#if PY_MAJOR_VERSION >= 3 + bi = PyImport_ImportModule("builtins"); +#else + bi = PyImport_ImportModule("__builtin__"); +#endif + if (bi == NULL) + goto error; + PyDict_SetItemString(result, "__builtins__", bi); + Py_DECREF(bi); + + x = PyRun_String( + "import sys\n" + "class FileLike:\n" + " def write(self, x):\n" + " try:\n" + " of.write(x)\n" + " except: pass\n" + " self.buf += x\n" + "fl = FileLike()\n" + "fl.buf = ''\n" + "of = sys.stderr\n" + "sys.stderr = fl\n" + "def done():\n" + " sys.stderr = of\n" + " return fl.buf\n", /* make sure the returned value stays alive */ + Py_file_input, + result, result); + Py_XDECREF(x); + + error: + if (PyErr_Occurred()) + { + PyErr_WriteUnraisable(Py_None); + PyErr_Clear(); + } + return result; +} + +#pragma comment(lib, "user32.lib") + +static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored) +{ + Sleep(666); /* may be interrupted if the whole process is closing */ +#if PY_MAJOR_VERSION >= 3 + MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text, + L"Python-CFFI error", + MB_OK | MB_ICONERROR); +#else + MessageBoxA(NULL, (char *)_cffi_bootstrap_text, + "Python-CFFI error", + MB_OK | MB_ICONERROR); +#endif + _cffi_bootstrap_text = NULL; + return 0; +} + +static void _cffi_stop_error_capture(PyObject *ecap) +{ + PyObject *s; + void *text; + + if (ecap == (PyObject *)1) + return; + + if (ecap == NULL) + goto error; + + s = PyRun_String("done()", Py_eval_input, ecap, ecap); + if (s == NULL) + goto error; + + /* Show a dialog box, but in a background thread, and + never show multiple dialog boxes at once. */ +#if PY_MAJOR_VERSION >= 3 + text = PyUnicode_AsWideCharString(s, NULL); +#else + text = PyString_AsString(s); +#endif + + _cffi_bootstrap_text = text; + + if (text != NULL) + { + HANDLE h; + h = CreateThread(NULL, 0, _cffi_bootstrap_dialog, + NULL, 0, NULL); + if (h != NULL) + CloseHandle(h); + } + /* decref the string, but it should stay alive as 'fl.buf' + in the small module above. It will really be freed only if + we later get another similar error. So it's a leak of at + most one copy of the small module. That's fine for this + situation which is usually a "fatal error" anyway. */ + Py_DECREF(s); + PyErr_Clear(); + return; + + error: + _cffi_bootstrap_text = NULL; + PyErr_Clear(); +} + +#else + +static PyObject *_cffi_start_error_capture(void) { return NULL; } +static void _cffi_stop_error_capture(PyObject *ecap) { } + +#endif diff --git a/server/www/packages/packages-linux/x64/cffi/_cffi_include.h b/server/www/packages/packages-linux/x64/cffi/_cffi_include.h new file mode 100644 index 0000000..3129150 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/_cffi_include.h @@ -0,0 +1,356 @@ +#define _CFFI_ + +/* We try to define Py_LIMITED_API before including Python.h. + + Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and + Py_REF_DEBUG are not defined. This is a best-effort approximation: + we can learn about Py_DEBUG from pyconfig.h, but it is unclear if + the same works for the other two macros. Py_DEBUG implies them, + but not the other way around. + + Issue #350 is still open: on Windows, the code here causes it to link + with PYTHON36.DLL (for example) instead of PYTHON3.DLL. A fix was + attempted in 164e526a5515 and 14ce6985e1c3, but reverted: virtualenv + does not make PYTHON3.DLL available, and so the "correctly" compiled + version would not run inside a virtualenv. We will re-apply the fix + after virtualenv has been fixed for some time. For explanation, see + issue #355. For a workaround if you want PYTHON3.DLL and don't worry + about virtualenv, see issue #350. See also 'py_limited_api' in + setuptools_ext.py. +*/ +#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API) +# include +# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) +# define Py_LIMITED_API +# endif +#endif + +#include +#ifdef __cplusplus +extern "C" { +#endif +#include +#include "parse_c_type.h" + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#ifdef __GNUC__ +# define _CFFI_UNUSED_FN __attribute__((unused)) +#else +# define _CFFI_UNUSED_FN /* nothing */ +#endif + +#ifdef __cplusplus +# ifndef _Bool + typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */ +# endif +#endif + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + not used any more +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \ + PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 +#define _cffi_call_python \ + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) +#define _cffi_to_c_wchar3216_t \ + ((int(*)(PyObject *))_cffi_exports[26]) +#define _cffi_from_c_wchar3216_t \ + ((PyObject *(*)(int))_cffi_exports[27]) +#define _CFFI_NUM_EXPORTS 28 + +struct _cffi_ctypedescr; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; + +#define _cffi_type(index) ( \ + assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ + (struct _cffi_ctypedescr *)_cffi_types[index]) + +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, + const struct _cffi_type_context_s *ctx) +{ + PyObject *module, *o_arg, *new_module; + void *raw[] = { + (void *)module_name, + (void *)version, + (void *)_cffi_exports, + (void *)ctx, + }; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + o_arg = PyLong_FromVoidPtr((void *)raw); + if (o_arg == NULL) + goto failure; + + new_module = PyObject_CallMethod( + module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); + + Py_DECREF(o_arg); + Py_DECREF(module); + return new_module; + + failure: + Py_XDECREF(module); + return NULL; +} + + +#ifdef HAVE_WCHAR_H +typedef wchar_t _cffi_wchar_t; +#else +typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */ +#endif + +_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 2) + return (uint16_t)_cffi_to_c_wchar_t(o); + else + return (uint16_t)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x) +{ + if (sizeof(_cffi_wchar_t) == 2) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 4) + return (int)_cffi_to_c_wchar_t(o); + else + return (int)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x) +{ + if (sizeof(_cffi_wchar_t) == 4) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +_CFFI_UNUSED_FN static int +_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +_CFFI_UNUSED_FN static void +_cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +/********** end CPython-specific section **********/ +#else +_CFFI_UNUSED_FN +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org +#endif + + +#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) + +#define _cffi_prim_int(size, sign) \ + ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ + (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ + (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ + (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ + _CFFI__UNKNOWN_PRIM) + +#define _cffi_prim_float(size) \ + ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ + (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ + (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ + _CFFI__UNKNOWN_FLOAT_PRIM) + +#define _cffi_check_int(got, got_nonpos, expected) \ + ((got_nonpos) == (expected <= 0) && \ + (got) == (unsigned long long)expected) + +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + +#ifdef __cplusplus +} +#endif diff --git a/server/www/packages/packages-linux/x64/cffi/_embedding.h b/server/www/packages/packages-linux/x64/cffi/_embedding.h new file mode 100644 index 0000000..34a4a66 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/_embedding.h @@ -0,0 +1,520 @@ + +/***** Support code for embedding *****/ + +#ifdef __cplusplus +extern "C" { +#endif + + +#if defined(_WIN32) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + +#include "_cffi_errors.h" + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + PyObject *builtins; + + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + builtins = PyEval_GetBuiltins(); + if (builtins == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *ecap; + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + ecap = _cffi_start_error_capture(); + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString( + "Failed to initialize the Python-CFFI embedding logic:\n\n", f); + } + + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.14.0" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + _cffi_stop_error_capture(ecap); + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + (What it really does used to be completely different in Python 2 + and Python 3, with the Python 2 solution avoiding the spin-lock + around the Py_InitializeEx() call. However, after recent changes + to CPython 2.7 (issue #358) it no longer works. So we use the + Python 3 solution everywhere.) + + This initializes Python by calling Py_InitializeEx(). + Important: this must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. + + In Python < 3.8, we choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + In Python >= 3.8, this string array is no longer writable, so + instead we pick PyCapsuleType.tp_version_tag. We can't change + Python < 3.8 because someone might use a mixture of cffi + embedded modules, some of which were compiled before this file + changed. + */ + +#ifdef WITH_THREAD +# if PY_VERSION_HEX < 0x03080000 + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value, *locked_value; + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = old_value + 1; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# else + int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag; + int old_value, locked_value; + assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG)); + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = -42; + if (old_value == 0) { + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value == locked_value); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# endif +#endif + + /* call Py_InitializeEx() */ + if (!Py_IsInitialized()) { + _cffi_py_initialize(); + PyEval_InitThreads(); + PyEval_SaveThread(); /* release the GIL */ + /* the returned tstate must be the one that has been stored into the + autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */ + } + else { + PyGILState_STATE state = PyGILState_Ensure(); + PyEval_InitThreads(); + PyGILState_Release(state); + } + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, locked_value, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + (void(*)(const void *[]))_CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier + +#ifdef __cplusplus +} +#endif diff --git a/server/www/packages/packages-linux/x64/cffi/api.py b/server/www/packages/packages-linux/x64/cffi/api.py new file mode 100644 index 0000000..999a8ae --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/api.py @@ -0,0 +1,965 @@ +import sys, types +from .lock import allocate_lock +from .error import CDefError +from . import model + +try: + callable +except NameError: + # Python 3.1 + from collections import Callable + callable = lambda x: isinstance(x, Callable) + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +_unspecified = object() + + + +class FFI(object): + r''' + The main top-level class that you instantiate once, or once per module. + + Example usage: + + ffi = FFI() + ffi.cdef(""" + int printf(const char *, ...); + """) + + C = ffi.dlopen(None) # standard library + -or- + C = ffi.verify() # use a C compiler: verify the decl above is right + + C.printf("hello, %s!\n", ffi.new("char[]", "world")) + ''' + + def __init__(self, backend=None): + """Create an FFI instance. The 'backend' argument is used to + select a non-default backend, mostly for tests. + """ + if backend is None: + # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with + # _cffi_backend.so compiled. + import _cffi_backend as backend + from . import __version__ + if backend.__version__ != __version__: + # bad version! Try to be as explicit as possible. + if hasattr(backend, '__file__'): + # CPython + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % ( + __version__, __file__, + backend.__version__, backend.__file__)) + else: + # PyPy + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % ( + __version__, __file__, backend.__version__)) + # (If you insist you can also try to pass the option + # 'backend=backend_ctypes.CTypesBackend()', but don't + # rely on it! It's probably not going to work well.) + + from . import cparser + self._backend = backend + self._lock = allocate_lock() + self._parser = cparser.Parser() + self._cached_btypes = {} + self._parsed_types = types.ModuleType('parsed_types').__dict__ + self._new_types = types.ModuleType('new_types').__dict__ + self._function_caches = [] + self._libraries = [] + self._cdefsources = [] + self._included_ffis = [] + self._windows_unicode = None + self._init_once_cache = {} + self._cdef_version = None + self._embedding = None + self._typecache = model.get_typecache(backend) + if hasattr(backend, 'set_ffi'): + backend.set_ffi(self) + for name in list(backend.__dict__): + if name.startswith('RTLD_'): + setattr(self, name, getattr(backend, name)) + # + with self._lock: + self.BVoidP = self._get_cached_btype(model.voidp_type) + self.BCharA = self._get_cached_btype(model.char_array_type) + if isinstance(backend, types.ModuleType): + # _cffi_backend: attach these constants to the class + if not hasattr(FFI, 'NULL'): + FFI.NULL = self.cast(self.BVoidP, 0) + FFI.CData, FFI.CType = backend._get_types() + else: + # ctypes backend: attach these constants to the instance + self.NULL = self.cast(self.BVoidP, 0) + self.CData, self.CType = backend._get_types() + self.buffer = backend.buffer + + def cdef(self, csource, override=False, packed=False, pack=None): + """Parse the given C source. This registers all declared functions, + types, and global variables. The functions and global variables can + then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. + The types can be used in 'ffi.new()' and other functions. + If 'packed' is specified as True, all structs declared inside this + cdef are packed, i.e. laid out without any field alignment at all. + Alternatively, 'pack' can be a small integer, and requests for + alignment greater than that are ignored (pack=1 is equivalent to + packed=True). + """ + self._cdef(csource, override=override, packed=packed, pack=pack) + + def embedding_api(self, csource, packed=False, pack=None): + self._cdef(csource, packed=packed, pack=pack, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): + if not isinstance(csource, str): # unicode, on Python 2 + if not isinstance(csource, basestring): + raise TypeError("cdef() argument must be a string") + csource = csource.encode('ascii') + with self._lock: + self._cdef_version = object() + self._parser.parse(csource, override=override, **options) + self._cdefsources.append(csource) + if override: + for cache in self._function_caches: + cache.clear() + finishlist = self._parser._recomplete + if finishlist: + self._parser._recomplete = [] + for tp in finishlist: + tp.finish_backend_type(self, finishlist) + + def dlopen(self, name, flags=0): + """Load and return a dynamic library identified by 'name'. + The standard C library can be loaded by passing None. + Note that functions and types declared by 'ffi.cdef()' are not + linked to a particular library, just like C headers; in the + library we only look for the actual (untyped) symbols. + """ + if not (isinstance(name, basestring) or + name is None or + isinstance(name, self.CData)): + raise TypeError("dlopen(name): name must be a file name, None, " + "or an already-opened 'void *' handle") + with self._lock: + lib, function_cache = _make_ffi_library(self, name, flags) + self._function_caches.append(function_cache) + self._libraries.append(lib) + return lib + + def dlclose(self, lib): + """Close a library obtained with ffi.dlopen(). After this call, + access to functions or variables from the library will fail + (possibly with a segmentation fault). + """ + type(lib).__cffi_close__(lib) + + def _typeof_locked(self, cdecl): + # call me with the lock! + key = cdecl + if key in self._parsed_types: + return self._parsed_types[key] + # + if not isinstance(cdecl, str): # unicode, on Python 2 + cdecl = cdecl.encode('ascii') + # + type = self._parser.parse_type(cdecl) + really_a_function_type = type.is_raw_function + if really_a_function_type: + type = type.as_function_pointer() + btype = self._get_cached_btype(type) + result = btype, really_a_function_type + self._parsed_types[key] = result + return result + + def _typeof(self, cdecl, consider_function_as_funcptr=False): + # string -> ctype object + try: + result = self._parsed_types[cdecl] + except KeyError: + with self._lock: + result = self._typeof_locked(cdecl) + # + btype, really_a_function_type = result + if really_a_function_type and not consider_function_as_funcptr: + raise CDefError("the type %r is a function type, not a " + "pointer-to-function type" % (cdecl,)) + return btype + + def typeof(self, cdecl): + """Parse the C type given as a string and return the + corresponding object. + It can also be used on 'cdata' instance to get its C type. + """ + if isinstance(cdecl, basestring): + return self._typeof(cdecl) + if isinstance(cdecl, self.CData): + return self._backend.typeof(cdecl) + if isinstance(cdecl, types.BuiltinFunctionType): + res = _builtin_function_type(cdecl) + if res is not None: + return res + if (isinstance(cdecl, types.FunctionType) + and hasattr(cdecl, '_cffi_base_type')): + with self._lock: + return self._get_cached_btype(cdecl._cffi_base_type) + raise TypeError(type(cdecl)) + + def sizeof(self, cdecl): + """Return the size in bytes of the argument. It can be a + string naming a C type, or a 'cdata' instance. + """ + if isinstance(cdecl, basestring): + BType = self._typeof(cdecl) + return self._backend.sizeof(BType) + else: + return self._backend.sizeof(cdecl) + + def alignof(self, cdecl): + """Return the natural alignment size in bytes of the C type + given as a string. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.alignof(cdecl) + + def offsetof(self, cdecl, *fields_or_indexes): + """Return the offset of the named field inside the given + structure or array, which must be given as a C type name. + You can give several field names in case of nested structures. + You can also give numeric values which correspond to array + items, in case of an array type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._typeoffsetof(cdecl, *fields_or_indexes)[1] + + def new(self, cdecl, init=None): + """Allocate an instance according to the specified C type and + return a pointer to it. The specified C type must be either a + pointer or an array: ``new('X *')`` allocates an X and returns + a pointer to it, whereas ``new('X[n]')`` allocates an array of + n X'es and returns an array referencing it (which works + mostly like a pointer, like in C). You can also use + ``new('X[]', n)`` to allocate an array of a non-constant + length n. + + The memory is initialized following the rules of declaring a + global variable in C: by default it is zero-initialized, but + an explicit initializer can be given which can be used to + fill all or part of the memory. + + When the returned object goes out of scope, the memory + is freed. In other words the returned object has + ownership of the value of type 'cdecl' that it points to. This + means that the raw data can be used as long as this object is + kept alive, but must not be used for a longer time. Be careful + about that when copying the pointer to the memory somewhere + else, e.g. into another structure. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.newp(cdecl, init) + + def new_allocator(self, alloc=None, free=None, + should_clear_after_alloc=True): + """Return a new allocator, i.e. a function that behaves like ffi.new() + but uses the provided low-level 'alloc' and 'free' functions. + + 'alloc' is called with the size as argument. If it returns NULL, a + MemoryError is raised. 'free' is called with the result of 'alloc' + as argument. Both can be either Python function or directly C + functions. If 'free' is None, then no free function is called. + If both 'alloc' and 'free' are None, the default is used. + + If 'should_clear_after_alloc' is set to False, then the memory + returned by 'alloc' is assumed to be already cleared (or you are + fine with garbage); otherwise CFFI will clear it. + """ + compiled_ffi = self._backend.FFI() + allocator = compiled_ffi.new_allocator(alloc, free, + should_clear_after_alloc) + def allocate(cdecl, init=None): + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return allocator(cdecl, init) + return allocate + + def cast(self, cdecl, source): + """Similar to a C cast: returns an instance of the named C + type initialized with the given 'source'. The source is + casted between integers or pointers of any type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.cast(cdecl, source) + + def string(self, cdata, maxlen=-1): + """Return a Python string (or unicode string) from the 'cdata'. + If 'cdata' is a pointer or array of characters or bytes, returns + the null-terminated string. The returned string extends until + the first null character, or at most 'maxlen' characters. If + 'cdata' is an array then 'maxlen' defaults to its length. + + If 'cdata' is a pointer or array of wchar_t, returns a unicode + string following the same rules. + + If 'cdata' is a single character or byte or a wchar_t, returns + it as a string or unicode string. + + If 'cdata' is an enum, returns the value of the enumerator as a + string, or 'NUMBER' if the value is out of range. + """ + return self._backend.string(cdata, maxlen) + + def unpack(self, cdata, length): + """Unpack an array of C data of the given length, + returning a Python string/unicode/list. + + If 'cdata' is a pointer to 'char', returns a byte string. + It does not stop at the first null. This is equivalent to: + ffi.buffer(cdata, length)[:] + + If 'cdata' is a pointer to 'wchar_t', returns a unicode string. + 'length' is measured in wchar_t's; it is not the size in bytes. + + If 'cdata' is a pointer to anything else, returns a list of + 'length' items. This is a faster equivalent to: + [cdata[i] for i in range(length)] + """ + return self._backend.unpack(cdata, length) + + #def buffer(self, cdata, size=-1): + # """Return a read-write buffer object that references the raw C data + # pointed to by the given 'cdata'. The 'cdata' must be a pointer or + # an array. Can be passed to functions expecting a buffer, or directly + # manipulated with: + # + # buf[:] get a copy of it in a regular string, or + # buf[idx] as a single character + # buf[:] = ... + # buf[idx] = ... change the content + # """ + # note that 'buffer' is a type, set on this instance by __init__ + + def from_buffer(self, cdecl, python_buffer=_unspecified, + require_writable=False): + """Return a cdata of the given type pointing to the data of the + given Python object, which must support the buffer interface. + Note that this is not meant to be used on the built-in types + str or unicode (you can build 'char[]' arrays explicitly) + but only on objects containing large quantities of raw data + in some other format, like 'array.array' or numpy arrays. + + The first argument is optional and default to 'char[]'. + """ + if python_buffer is _unspecified: + cdecl, python_buffer = self.BCharA, cdecl + elif isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.from_buffer(cdecl, python_buffer, + require_writable) + + def memmove(self, dest, src, n): + """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. + + Like the C function memmove(), the memory areas may overlap; + apart from that it behaves like the C function memcpy(). + + 'src' can be any cdata ptr or array, or any Python buffer object. + 'dest' can be any cdata ptr or array, or a writable Python buffer + object. The size to copy, 'n', is always measured in bytes. + + Unlike other methods, this one supports all Python buffer including + byte strings and bytearrays---but it still does not support + non-contiguous buffers. + """ + return self._backend.memmove(dest, src, n) + + def callback(self, cdecl, python_callable=None, error=None, onerror=None): + """Return a callback object or a decorator making such a + callback object. 'cdecl' must name a C function pointer type. + The callback invokes the specified 'python_callable' (which may + be provided either directly or via a decorator). Important: the + callback object must be manually kept alive for as long as the + callback may be invoked from the C level. + """ + def callback_decorator_wrap(python_callable): + if not callable(python_callable): + raise TypeError("the 'python_callable' argument " + "is not callable") + return self._backend.callback(cdecl, python_callable, + error, onerror) + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) + if python_callable is None: + return callback_decorator_wrap # decorator mode + else: + return callback_decorator_wrap(python_callable) # direct mode + + def getctype(self, cdecl, replace_with=''): + """Return a string giving the C type 'cdecl', which may be itself + a string or a object. If 'replace_with' is given, it gives + extra text to append (or insert for more complicated C types), like + a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + replace_with = replace_with.strip() + if (replace_with.startswith('*') + and '&[' in self._backend.getcname(cdecl, '&')): + replace_with = '(%s)' % replace_with + elif replace_with and not replace_with[0] in '[(': + replace_with = ' ' + replace_with + return self._backend.getcname(cdecl, replace_with) + + def gc(self, cdata, destructor, size=0): + """Return a new cdata object that points to the same + data. Later, when this new cdata object is garbage-collected, + 'destructor(old_cdata_object)' will be called. + + The optional 'size' gives an estimate of the size, used to + trigger the garbage collection more eagerly. So far only used + on PyPy. It tells the GC that the returned object keeps alive + roughly 'size' bytes of external memory. + """ + return self._backend.gcp(cdata, destructor, size) + + def _get_cached_btype(self, type): + assert self._lock.acquire(False) is False + # call me with the lock! + try: + BType = self._cached_btypes[type] + except KeyError: + finishlist = [] + BType = type.get_cached_btype(self, finishlist) + for type in finishlist: + type.finish_backend_type(self, finishlist) + return BType + + def verify(self, source='', tmpdir=None, **kwargs): + """Verify that the current ffi signatures compile on this + machine, and return a dynamic library object. The dynamic + library can be used to call functions and access global + variables declared in this 'ffi'. The library is compiled + by the C compiler: it gives you C-level API compatibility + (including calling macros). This is unlike 'ffi.dlopen()', + which requires binary compatibility in the signatures. + """ + from .verifier import Verifier, _caller_dir_pycache + # + # If set_unicode(True) was called, insert the UNICODE and + # _UNICODE macro declarations + if self._windows_unicode: + self._apply_windows_unicode(kwargs) + # + # Set the tmpdir here, and not in Verifier.__init__: it picks + # up the caller's directory, which we want to be the caller of + # ffi.verify(), as opposed to the caller of Veritier(). + tmpdir = tmpdir or _caller_dir_pycache() + # + # Make a Verifier() and use it to load the library. + self.verifier = Verifier(self, source, tmpdir, **kwargs) + lib = self.verifier.load_library() + # + # Save the loaded library for keep-alive purposes, even + # if the caller doesn't keep it alive itself (it should). + self._libraries.append(lib) + return lib + + def _get_errno(self): + return self._backend.get_errno() + def _set_errno(self, errno): + self._backend.set_errno(errno) + errno = property(_get_errno, _set_errno, None, + "the value of 'errno' from/to the C calls") + + def getwinerror(self, code=-1): + return self._backend.getwinerror(code) + + def _pointer_to(self, ctype): + with self._lock: + return model.pointer_cache(self, ctype) + + def addressof(self, cdata, *fields_or_indexes): + """Return the address of a . + If 'fields_or_indexes' are given, returns the address of that + field or array item in the structure or array, recursively in + case of nested structures. + """ + try: + ctype = self._backend.typeof(cdata) + except TypeError: + if '__addressof__' in type(cdata).__dict__: + return type(cdata).__addressof__(cdata, *fields_or_indexes) + raise + if fields_or_indexes: + ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) + else: + if ctype.kind == "pointer": + raise TypeError("addressof(pointer)") + offset = 0 + ctypeptr = self._pointer_to(ctype) + return self._backend.rawaddressof(ctypeptr, cdata, offset) + + def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): + ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) + for field1 in fields_or_indexes: + ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) + offset += offset1 + return ctype, offset + + def include(self, ffi_to_include): + """Includes the typedefs, structs, unions and enums defined + in another FFI instance. Usage is similar to a #include in C, + where a part of the program might include types defined in + another part for its own usage. Note that the include() + method has no effect on functions, constants and global + variables, which must anyway be accessed directly from the + lib object returned by the original FFI instance. + """ + if not isinstance(ffi_to_include, FFI): + raise TypeError("ffi.include() expects an argument that is also of" + " type cffi.FFI, not %r" % ( + type(ffi_to_include).__name__,)) + if ffi_to_include is self: + raise ValueError("self.include(self)") + with ffi_to_include._lock: + with self._lock: + self._parser.include(ffi_to_include._parser) + self._cdefsources.append('[') + self._cdefsources.extend(ffi_to_include._cdefsources) + self._cdefsources.append(']') + self._included_ffis.append(ffi_to_include) + + def new_handle(self, x): + return self._backend.newp_handle(self.BVoidP, x) + + def from_handle(self, x): + return self._backend.from_handle(x) + + def release(self, x): + self._backend.release(x) + + def set_unicode(self, enabled_flag): + """Windows: if 'enabled_flag' is True, enable the UNICODE and + _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR + to be (pointers to) wchar_t. If 'enabled_flag' is False, + declare these types to be (pointers to) plain 8-bit characters. + This is mostly for backward compatibility; you usually want True. + """ + if self._windows_unicode is not None: + raise ValueError("set_unicode() can only be called once") + enabled_flag = bool(enabled_flag) + if enabled_flag: + self.cdef("typedef wchar_t TBYTE;" + "typedef wchar_t TCHAR;" + "typedef const wchar_t *LPCTSTR;" + "typedef const wchar_t *PCTSTR;" + "typedef wchar_t *LPTSTR;" + "typedef wchar_t *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + else: + self.cdef("typedef char TBYTE;" + "typedef char TCHAR;" + "typedef const char *LPCTSTR;" + "typedef const char *PCTSTR;" + "typedef char *LPTSTR;" + "typedef char *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + self._windows_unicode = enabled_flag + + def _apply_windows_unicode(self, kwds): + defmacros = kwds.get('define_macros', ()) + if not isinstance(defmacros, (list, tuple)): + raise TypeError("'define_macros' must be a list or tuple") + defmacros = list(defmacros) + [('UNICODE', '1'), + ('_UNICODE', '1')] + kwds['define_macros'] = defmacros + + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python{0[0]}{0[1]}".format(sys.version_info) + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + if sys.version_info < (3,): + pythonlib = "pypy-c" + else: + pythonlib = "pypy3-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) + else: + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' + else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig + template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + + def set_source(self, module_name, source, source_extension='.c', **kwds): + import os + if hasattr(self, '_assigned_source'): + raise ValueError("set_source() cannot be called several times " + "per ffi object") + if not isinstance(module_name, basestring): + raise TypeError("'module_name' must be a string") + if os.sep in module_name or (os.altsep and os.altsep in module_name): + raise ValueError("'module_name' must not contain '/': use a dotted " + "name to make a 'package.module' location") + self._assigned_source = (str(module_name), source, + source_extension, kwds) + + def set_source_pkgconfig(self, module_name, pkgconfig_libs, source, + source_extension='.c', **kwds): + from . import pkgconfig + if not isinstance(pkgconfig_libs, list): + raise TypeError("the pkgconfig_libs argument must be a list " + "of package names") + kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs) + pkgconfig.merge_flags(kwds, kwds2) + self.set_source(module_name, source, source_extension, **kwds) + + def distutils_extension(self, tmpdir='build', verbose=True): + from distutils.dir_util import mkpath + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored + return self.verifier.get_extension() + raise ValueError("set_source() must be called before" + " distutils_extension()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("distutils_extension() is only for C extension " + "modules, not for dlopen()-style pure Python " + "modules") + mkpath(tmpdir) + ext, updated = recompile(self, module_name, + source, tmpdir=tmpdir, extradir=tmpdir, + source_extension=source_extension, + call_c_compiler=False, **kwds) + if verbose: + if updated: + sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) + else: + sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) + return ext + + def emit_c_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("emit_c_code() is only for C extension modules, " + "not for dlopen()-style pure Python modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def emit_python_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is not None: + raise TypeError("emit_python_code() is only for dlopen()-style " + "pure Python modules, not for C extension modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def compile(self, tmpdir='.', verbose=0, target=None, debug=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before compile()") + module_name, source, source_extension, kwds = self._assigned_source + return recompile(self, module_name, source, tmpdir=tmpdir, + target=target, source_extension=source_extension, + compiler_verbose=verbose, debug=debug, **kwds) + + def init_once(self, func, tag): + # Read _init_once_cache[tag], which is either (False, lock) if + # we're calling the function now in some thread, or (True, result). + # Don't call setdefault() in most cases, to avoid allocating and + # immediately freeing a lock; but still use setdefaut() to avoid + # races. + try: + x = self._init_once_cache[tag] + except KeyError: + x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) + # Common case: we got (True, result), so we return the result. + if x[0]: + return x[1] + # Else, it's a lock. Acquire it to serialize the following tests. + with x[1]: + # Read again from _init_once_cache the current status. + x = self._init_once_cache[tag] + if x[0]: + return x[1] + # Call the function and store the result back. + result = func() + self._init_once_cache[tag] = (True, result) + return result + + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + + def list_types(self): + """Returns the user type names known to this FFI instance. + This returns a tuple containing three lists of names: + (typedef_names, names_of_structs, names_of_unions) + """ + typedefs = [] + structs = [] + unions = [] + for key in self._parser._declarations: + if key.startswith('typedef '): + typedefs.append(key[8:]) + elif key.startswith('struct '): + structs.append(key[7:]) + elif key.startswith('union '): + unions.append(key[6:]) + typedefs.sort() + structs.sort() + unions.sort() + return (typedefs, structs, unions) + + +def _load_backend_lib(backend, name, flags): + import os + if not isinstance(name, basestring): + if sys.platform != "win32" or name is not None: + return backend.load_library(name, flags) + name = "c" # Windows: load_library(None) fails, but this works + # on Python 2 (backward compatibility hack only) + first_error = None + if '.' in name or '/' in name or os.sep in name: + try: + return backend.load_library(name, flags) + except OSError as e: + first_error = e + import ctypes.util + path = ctypes.util.find_library(name) + if path is None: + if name == "c" and sys.platform == "win32" and sys.version_info >= (3,): + raise OSError("dlopen(None) cannot work on Windows for Python 3 " + "(see http://bugs.python.org/issue23606)") + msg = ("ctypes.util.find_library() did not manage " + "to locate a library called %r" % (name,)) + if first_error is not None: + msg = "%s. Additionally, %s" % (first_error, msg) + raise OSError(msg) + return backend.load_library(path, flags) + +def _make_ffi_library(ffi, libname, flags): + backend = ffi._backend + backendlib = _load_backend_lib(backend, libname, flags) + # + def accessor_function(name): + key = 'function ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + value = backendlib.load_function(BType, name) + library.__dict__[name] = value + # + def accessor_variable(name): + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + read_variable = backendlib.read_variable + write_variable = backendlib.write_variable + setattr(FFILibrary, name, property( + lambda self: read_variable(BType, name), + lambda self, value: write_variable(BType, name, value))) + # + def addressof_var(name): + try: + return addr_variables[name] + except KeyError: + with ffi._lock: + if name not in addr_variables: + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + if BType.kind != 'array': + BType = model.pointer_cache(ffi, BType) + p = backendlib.load_function(BType, name) + addr_variables[name] = p + return addr_variables[name] + # + def accessor_constant(name): + raise NotImplementedError("non-integer constant '%s' cannot be " + "accessed from a dlopen() library" % (name,)) + # + def accessor_int_constant(name): + library.__dict__[name] = ffi._parser._int_constants[name] + # + accessors = {} + accessors_version = [False] + addr_variables = {} + # + def update_accessors(): + if accessors_version[0] is ffi._cdef_version: + return + # + for key, (tp, _) in ffi._parser._declarations.items(): + if not isinstance(tp, model.EnumType): + tag, name = key.split(' ', 1) + if tag == 'function': + accessors[name] = accessor_function + elif tag == 'variable': + accessors[name] = accessor_variable + elif tag == 'constant': + accessors[name] = accessor_constant + else: + for i, enumname in enumerate(tp.enumerators): + def accessor_enum(name, tp=tp, i=i): + tp.check_not_partial() + library.__dict__[name] = tp.enumvalues[i] + accessors[enumname] = accessor_enum + for name in ffi._parser._int_constants: + accessors.setdefault(name, accessor_int_constant) + accessors_version[0] = ffi._cdef_version + # + def make_accessor(name): + with ffi._lock: + if name in library.__dict__ or name in FFILibrary.__dict__: + return # added by another thread while waiting for the lock + if name not in accessors: + update_accessors() + if name not in accessors: + raise AttributeError(name) + accessors[name](name) + # + class FFILibrary(object): + def __getattr__(self, name): + make_accessor(name) + return getattr(self, name) + def __setattr__(self, name, value): + try: + property = getattr(self.__class__, name) + except AttributeError: + make_accessor(name) + setattr(self, name, value) + else: + property.__set__(self, value) + def __dir__(self): + with ffi._lock: + update_accessors() + return accessors.keys() + def __addressof__(self, name): + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + make_accessor(name) + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + raise AttributeError("cffi library has no function or " + "global variable named '%s'" % (name,)) + def __cffi_close__(self): + backendlib.close_lib() + self.__dict__.clear() + # + if isinstance(libname, basestring): + try: + if not isinstance(libname, str): # unicode, on Python 2 + libname = libname.encode('utf-8') + FFILibrary.__name__ = 'FFILibrary_%s' % libname + except UnicodeError: + pass + library = FFILibrary() + return library, library.__dict__ + +def _builtin_function_type(func): + # a hack to make at least ffi.typeof(builtin_function) work, + # if the builtin function was obtained by 'vengine_cpy'. + import sys + try: + module = sys.modules[func.__module__] + ffi = module._cffi_original_ffi + types_of_builtin_funcs = module._cffi_types_of_builtin_funcs + tp = types_of_builtin_funcs[func] + except (KeyError, AttributeError, TypeError): + return None + else: + with ffi._lock: + return ffi._get_cached_btype(tp) diff --git a/server/www/packages/packages-linux/x64/cffi/backend_ctypes.py b/server/www/packages/packages-linux/x64/cffi/backend_ctypes.py new file mode 100644 index 0000000..e7956a7 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/backend_ctypes.py @@ -0,0 +1,1121 @@ +import ctypes, ctypes.util, operator, sys +from . import model + +if sys.version_info < (3,): + bytechr = chr +else: + unicode = str + long = int + xrange = range + bytechr = lambda num: bytes([num]) + +class CTypesType(type): + pass + +class CTypesData(object): + __metaclass__ = CTypesType + __slots__ = ['__weakref__'] + __name__ = '' + + def __init__(self, *args): + raise TypeError("cannot instantiate %r" % (self.__class__,)) + + @classmethod + def _newp(cls, init): + raise TypeError("expected a pointer or array ctype, got '%s'" + % (cls._get_c_name(),)) + + @staticmethod + def _to_ctypes(value): + raise TypeError + + @classmethod + def _arg_to_ctypes(cls, *value): + try: + ctype = cls._ctype + except AttributeError: + raise TypeError("cannot create an instance of %r" % (cls,)) + if value: + res = cls._to_ctypes(*value) + if not isinstance(res, ctype): + res = cls._ctype(res) + else: + res = cls._ctype() + return res + + @classmethod + def _create_ctype_obj(cls, init): + if init is None: + return cls._arg_to_ctypes() + else: + return cls._arg_to_ctypes(init) + + @staticmethod + def _from_ctypes(ctypes_value): + raise TypeError + + @classmethod + def _get_c_name(cls, replace_with=''): + return cls._reftypename.replace(' &', replace_with) + + @classmethod + def _fix_class(cls): + cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__module__ = 'ffi' + + def _get_own_repr(self): + raise NotImplementedError + + def _addr_repr(self, address): + if address == 0: + return 'NULL' + else: + if address < 0: + address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) + return '0x%x' % address + + def __repr__(self, c_name=None): + own = self._get_own_repr() + return '' % (c_name or self._get_c_name(), own) + + def _convert_to_address(self, BClass): + if BClass is None: + raise TypeError("cannot convert %r to an address" % ( + self._get_c_name(),)) + else: + raise TypeError("cannot convert %r to %r" % ( + self._get_c_name(), BClass._get_c_name())) + + @classmethod + def _get_size(cls): + return ctypes.sizeof(cls._ctype) + + def _get_size_of_instance(self): + return ctypes.sizeof(self._ctype) + + @classmethod + def _cast_from(cls, source): + raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) + + def _cast_to_integer(self): + return self._convert_to_address(None) + + @classmethod + def _alignment(cls): + return ctypes.alignment(cls._ctype) + + def __iter__(self): + raise TypeError("cdata %r does not support iteration" % ( + self._get_c_name()),) + + def _make_cmp(name): + cmpfunc = getattr(operator, name) + def cmp(self, other): + v_is_ptr = not isinstance(self, CTypesGenericPrimitive) + w_is_ptr = (isinstance(other, CTypesData) and + not isinstance(other, CTypesGenericPrimitive)) + if v_is_ptr and w_is_ptr: + return cmpfunc(self._convert_to_address(None), + other._convert_to_address(None)) + elif v_is_ptr or w_is_ptr: + return NotImplemented + else: + if isinstance(self, CTypesGenericPrimitive): + self = self._value + if isinstance(other, CTypesGenericPrimitive): + other = other._value + return cmpfunc(self, other) + cmp.func_name = name + return cmp + + __eq__ = _make_cmp('__eq__') + __ne__ = _make_cmp('__ne__') + __lt__ = _make_cmp('__lt__') + __le__ = _make_cmp('__le__') + __gt__ = _make_cmp('__gt__') + __ge__ = _make_cmp('__ge__') + + def __hash__(self): + return hash(self._convert_to_address(None)) + + def _to_string(self, maxlen): + raise TypeError("string(): %r" % (self,)) + + +class CTypesGenericPrimitive(CTypesData): + __slots__ = [] + + def __hash__(self): + return hash(self._value) + + def _get_own_repr(self): + return repr(self._from_ctypes(self._value)) + + +class CTypesGenericArray(CTypesData): + __slots__ = [] + + @classmethod + def _newp(cls, init): + return cls(init) + + def __iter__(self): + for i in xrange(len(self)): + yield self[i] + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + +class CTypesGenericPtr(CTypesData): + __slots__ = ['_address', '_as_ctype_ptr'] + _automatic_casts = False + kind = "pointer" + + @classmethod + def _newp(cls, init): + return cls(init) + + @classmethod + def _cast_from(cls, source): + if source is None: + address = 0 + elif isinstance(source, CTypesData): + address = source._cast_to_integer() + elif isinstance(source, (int, long)): + address = source + else: + raise TypeError("bad type for cast to %r: %r" % + (cls, type(source).__name__)) + return cls._new_pointer_at(address) + + @classmethod + def _new_pointer_at(cls, address): + self = cls.__new__(cls) + self._address = address + self._as_ctype_ptr = ctypes.cast(address, cls._ctype) + return self + + def _get_own_repr(self): + try: + return self._addr_repr(self._address) + except AttributeError: + return '???' + + def _cast_to_integer(self): + return self._address + + def __nonzero__(self): + return bool(self._address) + __bool__ = __nonzero__ + + @classmethod + def _to_ctypes(cls, value): + if not isinstance(value, CTypesData): + raise TypeError("unexpected %s object" % type(value).__name__) + address = value._convert_to_address(cls) + return ctypes.cast(address, cls._ctype) + + @classmethod + def _from_ctypes(cls, ctypes_ptr): + address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 + return cls._new_pointer_at(address) + + @classmethod + def _initialize(cls, ctypes_ptr, value): + if value: + ctypes_ptr.contents = cls._to_ctypes(value).contents + + def _convert_to_address(self, BClass): + if (BClass in (self.__class__, None) or BClass._automatic_casts + or self._automatic_casts): + return self._address + else: + return CTypesData._convert_to_address(self, BClass) + + +class CTypesBaseStructOrUnion(CTypesData): + __slots__ = ['_blob'] + + @classmethod + def _create_ctype_obj(cls, init): + # may be overridden + raise TypeError("cannot instantiate opaque type %s" % (cls,)) + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + @classmethod + def _offsetof(cls, fieldname): + return getattr(cls._ctype, fieldname).offset + + def _convert_to_address(self, BClass): + if getattr(BClass, '_BItem', None) is self.__class__: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @classmethod + def _from_ctypes(cls, ctypes_struct_or_union): + self = cls.__new__(cls) + self._blob = ctypes_struct_or_union + return self + + @classmethod + def _to_ctypes(cls, value): + return value._blob + + def __repr__(self, c_name=None): + return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) + + +class CTypesBackend(object): + + PRIMITIVE_TYPES = { + 'char': ctypes.c_char, + 'short': ctypes.c_short, + 'int': ctypes.c_int, + 'long': ctypes.c_long, + 'long long': ctypes.c_longlong, + 'signed char': ctypes.c_byte, + 'unsigned char': ctypes.c_ubyte, + 'unsigned short': ctypes.c_ushort, + 'unsigned int': ctypes.c_uint, + 'unsigned long': ctypes.c_ulong, + 'unsigned long long': ctypes.c_ulonglong, + 'float': ctypes.c_float, + 'double': ctypes.c_double, + '_Bool': ctypes.c_bool, + } + + for _name in ['unsigned long long', 'unsigned long', + 'unsigned int', 'unsigned short', 'unsigned char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] + + for _name in ['long long', 'long', 'int', 'short', 'signed char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] + PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] + + + def __init__(self): + self.RTLD_LAZY = 0 # not supported anyway by ctypes + self.RTLD_NOW = 0 + self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL + self.RTLD_LOCAL = ctypes.RTLD_LOCAL + + def set_ffi(self, ffi): + self.ffi = ffi + + def _get_types(self): + return CTypesData, CTypesType + + def load_library(self, path, flags=0): + cdll = ctypes.CDLL(path, flags) + return CTypesLibrary(self, cdll) + + def new_void_type(self): + class CTypesVoid(CTypesData): + __slots__ = [] + _reftypename = 'void &' + @staticmethod + def _from_ctypes(novalue): + return None + @staticmethod + def _to_ctypes(novalue): + if novalue is not None: + raise TypeError("None expected, got %s object" % + (type(novalue).__name__,)) + return None + CTypesVoid._fix_class() + return CTypesVoid + + def new_primitive_type(self, name): + if name == 'wchar_t': + raise NotImplementedError(name) + ctype = self.PRIMITIVE_TYPES[name] + if name == 'char': + kind = 'char' + elif name in ('float', 'double'): + kind = 'float' + else: + if name in ('signed char', 'unsigned char'): + kind = 'byte' + elif name == '_Bool': + kind = 'bool' + else: + kind = 'int' + is_signed = (ctype(-1).value == -1) + # + def _cast_source_to_int(source): + if isinstance(source, (int, long, float)): + source = int(source) + elif isinstance(source, CTypesData): + source = source._cast_to_integer() + elif isinstance(source, bytes): + source = ord(source) + elif source is None: + source = 0 + else: + raise TypeError("bad type for cast to %r: %r" % + (CTypesPrimitive, type(source).__name__)) + return source + # + kind1 = kind + class CTypesPrimitive(CTypesGenericPrimitive): + __slots__ = ['_value'] + _ctype = ctype + _reftypename = '%s &' % name + kind = kind1 + + def __init__(self, value): + self._value = value + + @staticmethod + def _create_ctype_obj(init): + if init is None: + return ctype() + return ctype(CTypesPrimitive._to_ctypes(init)) + + if kind == 'int' or kind == 'byte': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = ctype(source).value # cast within range + return cls(source) + def __int__(self): + return self._value + + if kind == 'bool': + @classmethod + def _cast_from(cls, source): + if not isinstance(source, (int, long, float)): + source = _cast_source_to_int(source) + return cls(bool(source)) + def __int__(self): + return int(self._value) + + if kind == 'char': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = bytechr(source & 0xFF) + return cls(source) + def __int__(self): + return ord(self._value) + + if kind == 'float': + @classmethod + def _cast_from(cls, source): + if isinstance(source, float): + pass + elif isinstance(source, CTypesGenericPrimitive): + if hasattr(source, '__float__'): + source = float(source) + else: + source = int(source) + else: + source = _cast_source_to_int(source) + source = ctype(source).value # fix precision + return cls(source) + def __int__(self): + return int(self._value) + def __float__(self): + return self._value + + _cast_to_integer = __int__ + + if kind == 'int' or kind == 'byte' or kind == 'bool': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long)): + if isinstance(x, CTypesData): + x = int(x) + else: + raise TypeError("integer expected, got %s" % + type(x).__name__) + if ctype(x).value != x: + if not is_signed and x < 0: + raise OverflowError("%s: negative integer" % name) + else: + raise OverflowError("%s: integer out of bounds" + % name) + return x + + if kind == 'char': + @staticmethod + def _to_ctypes(x): + if isinstance(x, bytes) and len(x) == 1: + return x + if isinstance(x, CTypesPrimitive): # > + return x._value + raise TypeError("character expected, got %s" % + type(x).__name__) + def __nonzero__(self): + return ord(self._value) != 0 + else: + def __nonzero__(self): + return self._value != 0 + __bool__ = __nonzero__ + + if kind == 'float': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long, float, CTypesData)): + raise TypeError("float expected, got %s" % + type(x).__name__) + return ctype(x).value + + @staticmethod + def _from_ctypes(value): + return getattr(value, 'value', value) + + @staticmethod + def _initialize(blob, init): + blob.value = CTypesPrimitive._to_ctypes(init) + + if kind == 'char': + def _to_string(self, maxlen): + return self._value + if kind == 'byte': + def _to_string(self, maxlen): + return chr(self._value & 0xff) + # + CTypesPrimitive._fix_class() + return CTypesPrimitive + + def new_pointer_type(self, BItem): + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'charp' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'bytep' + elif BItem is getbtype(model.void_type): + kind = 'voidp' + else: + kind = 'generic' + # + class CTypesPtr(CTypesGenericPtr): + __slots__ = ['_own'] + if kind == 'charp': + __slots__ += ['__as_strbuf'] + _BItem = BItem + if hasattr(BItem, '_ctype'): + _ctype = ctypes.POINTER(BItem._ctype) + _bitem_size = ctypes.sizeof(BItem._ctype) + else: + _ctype = ctypes.c_void_p + if issubclass(BItem, CTypesGenericArray): + _reftypename = BItem._get_c_name('(* &)') + else: + _reftypename = BItem._get_c_name(' * &') + + def __init__(self, init): + ctypeobj = BItem._create_ctype_obj(init) + if kind == 'charp': + self.__as_strbuf = ctypes.create_string_buffer( + ctypeobj.value + b'\x00') + self._as_ctype_ptr = ctypes.cast( + self.__as_strbuf, self._ctype) + else: + self._as_ctype_ptr = ctypes.pointer(ctypeobj) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own = True + + def __add__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address + + other * self._bitem_size) + else: + return NotImplemented + + def __sub__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address - + other * self._bitem_size) + elif type(self) is type(other): + return (self._address - other._address) // self._bitem_size + else: + return NotImplemented + + def __getitem__(self, index): + if getattr(self, '_own', False) and index != 0: + raise IndexError + return BItem._from_ctypes(self._as_ctype_ptr[index]) + + def __setitem__(self, index, value): + self._as_ctype_ptr[index] = BItem._to_ctypes(value) + + if kind == 'charp' or kind == 'voidp': + @classmethod + def _arg_to_ctypes(cls, *value): + if value and isinstance(value[0], bytes): + return ctypes.c_char_p(value[0]) + else: + return super(CTypesPtr, cls)._arg_to_ctypes(*value) + + if kind == 'charp' or kind == 'bytep': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = sys.maxsize + p = ctypes.cast(self._as_ctype_ptr, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % ( + ctypes.sizeof(self._as_ctype_ptr.contents),) + return super(CTypesPtr, self)._get_own_repr() + # + if (BItem is self.ffi._get_cached_btype(model.void_type) or + BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): + CTypesPtr._automatic_casts = True + # + CTypesPtr._fix_class() + return CTypesPtr + + def new_array_type(self, CTypesPtr, length): + if length is None: + brackets = ' &[]' + else: + brackets = ' &[%d]' % length + BItem = CTypesPtr._BItem + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'char' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'byte' + else: + kind = 'generic' + # + class CTypesArray(CTypesGenericArray): + __slots__ = ['_blob', '_own'] + if length is not None: + _ctype = BItem._ctype * length + else: + __slots__.append('_ctype') + _reftypename = BItem._get_c_name(brackets) + _declared_length = length + _CTPtr = CTypesPtr + + def __init__(self, init): + if length is None: + if isinstance(init, (int, long)): + len1 = init + init = None + elif kind == 'char' and isinstance(init, bytes): + len1 = len(init) + 1 # extra null + else: + init = tuple(init) + len1 = len(init) + self._ctype = BItem._ctype * len1 + self._blob = self._ctype() + self._own = True + if init is not None: + self._initialize(self._blob, init) + + @staticmethod + def _initialize(blob, init): + if isinstance(init, bytes): + init = [init[i:i+1] for i in range(len(init))] + else: + if isinstance(init, CTypesGenericArray): + if (len(init) != len(blob) or + not isinstance(init, CTypesArray)): + raise TypeError("length/type mismatch: %s" % (init,)) + init = tuple(init) + if len(init) > len(blob): + raise IndexError("too many initializers") + addr = ctypes.cast(blob, ctypes.c_void_p).value + PTR = ctypes.POINTER(BItem._ctype) + itemsize = ctypes.sizeof(BItem._ctype) + for i, value in enumerate(init): + p = ctypes.cast(addr + i * itemsize, PTR) + BItem._initialize(p.contents, value) + + def __len__(self): + return len(self._blob) + + def __getitem__(self, index): + if not (0 <= index < len(self._blob)): + raise IndexError + return BItem._from_ctypes(self._blob[index]) + + def __setitem__(self, index, value): + if not (0 <= index < len(self._blob)): + raise IndexError + self._blob[index] = BItem._to_ctypes(value) + + if kind == 'char' or kind == 'byte': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = len(self._blob) + p = ctypes.cast(self._blob, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % (ctypes.sizeof(self._blob),) + return super(CTypesArray, self)._get_own_repr() + + def _convert_to_address(self, BClass): + if BClass in (CTypesPtr, None) or BClass._automatic_casts: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @staticmethod + def _from_ctypes(ctypes_array): + self = CTypesArray.__new__(CTypesArray) + self._blob = ctypes_array + return self + + @staticmethod + def _arg_to_ctypes(value): + return CTypesPtr._arg_to_ctypes(value) + + def __add__(self, other): + if isinstance(other, (int, long)): + return CTypesPtr._new_pointer_at( + ctypes.addressof(self._blob) + + other * ctypes.sizeof(BItem._ctype)) + else: + return NotImplemented + + @classmethod + def _cast_from(cls, source): + raise NotImplementedError("casting to %r" % ( + cls._get_c_name(),)) + # + CTypesArray._fix_class() + return CTypesArray + + def _new_struct_or_union(self, kind, name, base_ctypes_class): + # + class struct_or_union(base_ctypes_class): + pass + struct_or_union.__name__ = '%s_%s' % (kind, name) + kind1 = kind + # + class CTypesStructOrUnion(CTypesBaseStructOrUnion): + __slots__ = ['_blob'] + _ctype = struct_or_union + _reftypename = '%s &' % (name,) + _kind = kind = kind1 + # + CTypesStructOrUnion._fix_class() + return CTypesStructOrUnion + + def new_struct_type(self, name): + return self._new_struct_or_union('struct', name, ctypes.Structure) + + def new_union_type(self, name): + return self._new_struct_or_union('union', name, ctypes.Union) + + def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, + totalsize=-1, totalalignment=-1, sflags=0, + pack=0): + if totalsize >= 0 or totalalignment >= 0: + raise NotImplementedError("the ctypes backend of CFFI does not support " + "structures completed by verify(); please " + "compile and install the _cffi_backend module.") + struct_or_union = CTypesStructOrUnion._ctype + fnames = [fname for (fname, BField, bitsize) in fields] + btypes = [BField for (fname, BField, bitsize) in fields] + bitfields = [bitsize for (fname, BField, bitsize) in fields] + # + bfield_types = {} + cfields = [] + for (fname, BField, bitsize) in fields: + if bitsize < 0: + cfields.append((fname, BField._ctype)) + bfield_types[fname] = BField + else: + cfields.append((fname, BField._ctype, bitsize)) + bfield_types[fname] = Ellipsis + if sflags & 8: + struct_or_union._pack_ = 1 + elif pack: + struct_or_union._pack_ = pack + struct_or_union._fields_ = cfields + CTypesStructOrUnion._bfield_types = bfield_types + # + @staticmethod + def _create_ctype_obj(init): + result = struct_or_union() + if init is not None: + initialize(result, init) + return result + CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj + # + def initialize(blob, init): + if is_union: + if len(init) > 1: + raise ValueError("union initializer: %d items given, but " + "only one supported (use a dict if needed)" + % (len(init),)) + if not isinstance(init, dict): + if isinstance(init, (bytes, unicode)): + raise TypeError("union initializer: got a str") + init = tuple(init) + if len(init) > len(fnames): + raise ValueError("too many values for %s initializer" % + CTypesStructOrUnion._get_c_name()) + init = dict(zip(fnames, init)) + addr = ctypes.addressof(blob) + for fname, value in init.items(): + BField, bitsize = name2fieldtype[fname] + assert bitsize < 0, \ + "not implemented: initializer with bit fields" + offset = CTypesStructOrUnion._offsetof(fname) + PTR = ctypes.POINTER(BField._ctype) + p = ctypes.cast(addr + offset, PTR) + BField._initialize(p.contents, value) + is_union = CTypesStructOrUnion._kind == 'union' + name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) + # + for fname, BField, bitsize in fields: + if fname == '': + raise NotImplementedError("nested anonymous structs/unions") + if hasattr(CTypesStructOrUnion, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + if bitsize < 0: + def getter(self, fname=fname, BField=BField, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BField._from_ctypes(p.contents) + def setter(self, value, fname=fname, BField=BField): + setattr(self._blob, fname, BField._to_ctypes(value)) + # + if issubclass(BField, CTypesGenericArray): + setter = None + if BField._declared_length == 0: + def getter(self, fname=fname, BFieldPtr=BField._CTPtr, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BFieldPtr._from_ctypes(p) + # + else: + def getter(self, fname=fname, BField=BField): + return BField._from_ctypes(getattr(self._blob, fname)) + def setter(self, value, fname=fname, BField=BField): + # xxx obscure workaround + value = BField._to_ctypes(value) + oldvalue = getattr(self._blob, fname) + setattr(self._blob, fname, value) + if value != getattr(self._blob, fname): + setattr(self._blob, fname, oldvalue) + raise OverflowError("value too large for bitfield") + setattr(CTypesStructOrUnion, fname, property(getter, setter)) + # + CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) + for fname in fnames: + if hasattr(CTypesPtr, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + def getter(self, fname=fname): + return getattr(self[0], fname) + def setter(self, value, fname=fname): + setattr(self[0], fname, value) + setattr(CTypesPtr, fname, property(getter, setter)) + + def new_function_type(self, BArgs, BResult, has_varargs): + nameargs = [BArg._get_c_name() for BArg in BArgs] + if has_varargs: + nameargs.append('...') + nameargs = ', '.join(nameargs) + # + class CTypesFunctionPtr(CTypesGenericPtr): + __slots__ = ['_own_callback', '_name'] + _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), + *[BArg._ctype for BArg in BArgs], + use_errno=True) + _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) + + def __init__(self, init, error=None): + # create a callback to the Python callable init() + import traceback + assert not has_varargs, "varargs not supported for callbacks" + if getattr(BResult, '_ctype', None) is not None: + error = BResult._from_ctypes( + BResult._create_ctype_obj(error)) + else: + error = None + def callback(*args): + args2 = [] + for arg, BArg in zip(args, BArgs): + args2.append(BArg._from_ctypes(arg)) + try: + res2 = init(*args2) + res2 = BResult._to_ctypes(res2) + except: + traceback.print_exc() + res2 = error + if issubclass(BResult, CTypesGenericPtr): + if res2: + res2 = ctypes.cast(res2, ctypes.c_void_p).value + # .value: http://bugs.python.org/issue1574593 + else: + res2 = None + #print repr(res2) + return res2 + if issubclass(BResult, CTypesGenericPtr): + # The only pointers callbacks can return are void*s: + # http://bugs.python.org/issue5710 + callback_ctype = ctypes.CFUNCTYPE( + ctypes.c_void_p, + *[BArg._ctype for BArg in BArgs], + use_errno=True) + else: + callback_ctype = CTypesFunctionPtr._ctype + self._as_ctype_ptr = callback_ctype(callback) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own_callback = init + + @staticmethod + def _initialize(ctypes_ptr, value): + if value: + raise NotImplementedError("ctypes backend: not supported: " + "initializers for function pointers") + + def __repr__(self): + c_name = getattr(self, '_name', None) + if c_name: + i = self._reftypename.index('(* &)') + if self._reftypename[i-1] not in ' )*': + c_name = ' ' + c_name + c_name = self._reftypename.replace('(* &)', c_name) + return CTypesData.__repr__(self, c_name) + + def _get_own_repr(self): + if getattr(self, '_own_callback', None) is not None: + return 'calling %r' % (self._own_callback,) + return super(CTypesFunctionPtr, self)._get_own_repr() + + def __call__(self, *args): + if has_varargs: + assert len(args) >= len(BArgs) + extraargs = args[len(BArgs):] + args = args[:len(BArgs)] + else: + assert len(args) == len(BArgs) + ctypes_args = [] + for arg, BArg in zip(args, BArgs): + ctypes_args.append(BArg._arg_to_ctypes(arg)) + if has_varargs: + for i, arg in enumerate(extraargs): + if arg is None: + ctypes_args.append(ctypes.c_void_p(0)) # NULL + continue + if not isinstance(arg, CTypesData): + raise TypeError( + "argument %d passed in the variadic part " + "needs to be a cdata object (got %s)" % + (1 + len(BArgs) + i, type(arg).__name__)) + ctypes_args.append(arg._arg_to_ctypes(arg)) + result = self._as_ctype_ptr(*ctypes_args) + return BResult._from_ctypes(result) + # + CTypesFunctionPtr._fix_class() + return CTypesFunctionPtr + + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): + assert isinstance(name, str) + reverse_mapping = dict(zip(reversed(enumvalues), + reversed(enumerators))) + # + class CTypesEnum(CTypesInt): + __slots__ = [] + _reftypename = '%s &' % name + + def _get_own_repr(self): + value = self._value + try: + return '%d: %s' % (value, reverse_mapping[value]) + except KeyError: + return str(value) + + def _to_string(self, maxlen): + value = self._value + try: + return reverse_mapping[value] + except KeyError: + return str(value) + # + CTypesEnum._fix_class() + return CTypesEnum + + def get_errno(self): + return ctypes.get_errno() + + def set_errno(self, value): + ctypes.set_errno(value) + + def string(self, b, maxlen=-1): + return b._to_string(maxlen) + + def buffer(self, bptr, size=-1): + raise NotImplementedError("buffer() with ctypes backend") + + def sizeof(self, cdata_or_BType): + if isinstance(cdata_or_BType, CTypesData): + return cdata_or_BType._get_size_of_instance() + else: + assert issubclass(cdata_or_BType, CTypesData) + return cdata_or_BType._get_size() + + def alignof(self, BType): + assert issubclass(BType, CTypesData) + return BType._alignment() + + def newp(self, BType, source): + if not issubclass(BType, CTypesData): + raise TypeError + return BType._newp(source) + + def cast(self, BType, source): + return BType._cast_from(source) + + def callback(self, BType, source, error, onerror): + assert onerror is None # XXX not implemented + return BType(source, error) + + _weakref_cache_ref = None + + def gcp(self, cdata, destructor, size=0): + if self._weakref_cache_ref is None: + import weakref + class MyRef(weakref.ref): + def __eq__(self, other): + myref = self() + return self is other or ( + myref is not None and myref is other()) + def __ne__(self, other): + return not (self == other) + def __hash__(self): + try: + return self._hash + except AttributeError: + self._hash = hash(self()) + return self._hash + self._weakref_cache_ref = {}, MyRef + weak_cache, MyRef = self._weakref_cache_ref + + if destructor is None: + try: + del weak_cache[MyRef(cdata)] + except KeyError: + raise TypeError("Can remove destructor only on a object " + "previously returned by ffi.gc()") + return None + + def remove(k): + cdata, destructor = weak_cache.pop(k, (None, None)) + if destructor is not None: + destructor(cdata) + + new_cdata = self.cast(self.typeof(cdata), cdata) + assert new_cdata is not cdata + weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor) + return new_cdata + + typeof = type + + def getcname(self, BType, replace_with): + return BType._get_c_name(replace_with) + + def typeoffsetof(self, BType, fieldname, num=0): + if isinstance(fieldname, str): + if num == 0 and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") + BField = BType._bfield_types[fieldname] + if BField is Ellipsis: + raise TypeError("not supported for bitfields") + return (BField, BType._offsetof(fieldname)) + elif isinstance(fieldname, (int, long)): + if issubclass(BType, CTypesGenericArray): + BType = BType._CTPtr + if not issubclass(BType, CTypesGenericPtr): + raise TypeError("expected an array or ptr ctype") + BItem = BType._BItem + offset = BItem._get_size() * fieldname + if offset > sys.maxsize: + raise OverflowError + return (BItem, offset) + else: + raise TypeError(type(fieldname)) + + def rawaddressof(self, BTypePtr, cdata, offset=None): + if isinstance(cdata, CTypesBaseStructOrUnion): + ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) + elif isinstance(cdata, CTypesGenericPtr): + if offset is None or not issubclass(type(cdata)._BItem, + CTypesBaseStructOrUnion): + raise TypeError("unexpected cdata type") + ptr = type(cdata)._to_ctypes(cdata) + elif isinstance(cdata, CTypesGenericArray): + ptr = type(cdata)._to_ctypes(cdata) + else: + raise TypeError("expected a ") + if offset: + ptr = ctypes.cast( + ctypes.c_void_p( + ctypes.cast(ptr, ctypes.c_void_p).value + offset), + type(ptr)) + return BTypePtr._from_ctypes(ptr) + + +class CTypesLibrary(object): + + def __init__(self, backend, cdll): + self.backend = backend + self.cdll = cdll + + def load_function(self, BType, name): + c_func = getattr(self.cdll, name) + funcobj = BType._from_ctypes(c_func) + funcobj._name = name + return funcobj + + def read_variable(self, BType, name): + try: + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + except AttributeError as e: + raise NotImplementedError(e) + return BType._from_ctypes(ctypes_obj) + + def write_variable(self, BType, name, value): + new_ctypes_obj = BType._to_ctypes(value) + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + ctypes.memmove(ctypes.addressof(ctypes_obj), + ctypes.addressof(new_ctypes_obj), + ctypes.sizeof(BType._ctype)) diff --git a/server/www/packages/packages-linux/x64/cffi/cffi_opcode.py b/server/www/packages/packages-linux/x64/cffi/cffi_opcode.py new file mode 100644 index 0000000..a0df98d --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/cffi_opcode.py @@ -0,0 +1,187 @@ +from .error import VerificationError + +class CffiOp(object): + def __init__(self, op, arg): + self.op = op + self.arg = arg + + def as_c_expr(self): + if self.op is None: + assert isinstance(self.arg, str) + return '(_cffi_opcode_t)(%s)' % (self.arg,) + classname = CLASS_NAME[self.op] + return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) + + def as_python_bytes(self): + if self.op is None and self.arg.isdigit(): + value = int(self.arg) # non-negative: '-' not in self.arg + if value >= 2**31: + raise OverflowError("cannot emit %r: limited to 2**31-1" + % (self.arg,)) + return format_four_bytes(value) + if isinstance(self.arg, str): + raise VerificationError("cannot emit to Python: %r" % (self.arg,)) + return format_four_bytes((self.arg << 8) | self.op) + + def __str__(self): + classname = CLASS_NAME.get(self.op, self.op) + return '(%s %s)' % (classname, self.arg) + +def format_four_bytes(num): + return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( + (num >> 24) & 0xFF, + (num >> 16) & 0xFF, + (num >> 8) & 0xFF, + (num ) & 0xFF) + +OP_PRIMITIVE = 1 +OP_POINTER = 3 +OP_ARRAY = 5 +OP_OPEN_ARRAY = 7 +OP_STRUCT_UNION = 9 +OP_ENUM = 11 +OP_FUNCTION = 13 +OP_FUNCTION_END = 15 +OP_NOOP = 17 +OP_BITFIELD = 19 +OP_TYPENAME = 21 +OP_CPYTHON_BLTN_V = 23 # varargs +OP_CPYTHON_BLTN_N = 25 # noargs +OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) +OP_CONSTANT = 29 +OP_CONSTANT_INT = 31 +OP_GLOBAL_VAR = 33 +OP_DLOPEN_FUNC = 35 +OP_DLOPEN_CONST = 37 +OP_GLOBAL_VAR_F = 39 +OP_EXTERN_PYTHON = 41 + +PRIM_VOID = 0 +PRIM_BOOL = 1 +PRIM_CHAR = 2 +PRIM_SCHAR = 3 +PRIM_UCHAR = 4 +PRIM_SHORT = 5 +PRIM_USHORT = 6 +PRIM_INT = 7 +PRIM_UINT = 8 +PRIM_LONG = 9 +PRIM_ULONG = 10 +PRIM_LONGLONG = 11 +PRIM_ULONGLONG = 12 +PRIM_FLOAT = 13 +PRIM_DOUBLE = 14 +PRIM_LONGDOUBLE = 15 + +PRIM_WCHAR = 16 +PRIM_INT8 = 17 +PRIM_UINT8 = 18 +PRIM_INT16 = 19 +PRIM_UINT16 = 20 +PRIM_INT32 = 21 +PRIM_UINT32 = 22 +PRIM_INT64 = 23 +PRIM_UINT64 = 24 +PRIM_INTPTR = 25 +PRIM_UINTPTR = 26 +PRIM_PTRDIFF = 27 +PRIM_SIZE = 28 +PRIM_SSIZE = 29 +PRIM_INT_LEAST8 = 30 +PRIM_UINT_LEAST8 = 31 +PRIM_INT_LEAST16 = 32 +PRIM_UINT_LEAST16 = 33 +PRIM_INT_LEAST32 = 34 +PRIM_UINT_LEAST32 = 35 +PRIM_INT_LEAST64 = 36 +PRIM_UINT_LEAST64 = 37 +PRIM_INT_FAST8 = 38 +PRIM_UINT_FAST8 = 39 +PRIM_INT_FAST16 = 40 +PRIM_UINT_FAST16 = 41 +PRIM_INT_FAST32 = 42 +PRIM_UINT_FAST32 = 43 +PRIM_INT_FAST64 = 44 +PRIM_UINT_FAST64 = 45 +PRIM_INTMAX = 46 +PRIM_UINTMAX = 47 +PRIM_FLOATCOMPLEX = 48 +PRIM_DOUBLECOMPLEX = 49 +PRIM_CHAR16 = 50 +PRIM_CHAR32 = 51 + +_NUM_PRIM = 52 +_UNKNOWN_PRIM = -1 +_UNKNOWN_FLOAT_PRIM = -2 +_UNKNOWN_LONG_DOUBLE = -3 + +_IO_FILE_STRUCT = -1 + +PRIMITIVE_TO_INDEX = { + 'char': PRIM_CHAR, + 'short': PRIM_SHORT, + 'int': PRIM_INT, + 'long': PRIM_LONG, + 'long long': PRIM_LONGLONG, + 'signed char': PRIM_SCHAR, + 'unsigned char': PRIM_UCHAR, + 'unsigned short': PRIM_USHORT, + 'unsigned int': PRIM_UINT, + 'unsigned long': PRIM_ULONG, + 'unsigned long long': PRIM_ULONGLONG, + 'float': PRIM_FLOAT, + 'double': PRIM_DOUBLE, + 'long double': PRIM_LONGDOUBLE, + 'float _Complex': PRIM_FLOATCOMPLEX, + 'double _Complex': PRIM_DOUBLECOMPLEX, + '_Bool': PRIM_BOOL, + 'wchar_t': PRIM_WCHAR, + 'char16_t': PRIM_CHAR16, + 'char32_t': PRIM_CHAR32, + 'int8_t': PRIM_INT8, + 'uint8_t': PRIM_UINT8, + 'int16_t': PRIM_INT16, + 'uint16_t': PRIM_UINT16, + 'int32_t': PRIM_INT32, + 'uint32_t': PRIM_UINT32, + 'int64_t': PRIM_INT64, + 'uint64_t': PRIM_UINT64, + 'intptr_t': PRIM_INTPTR, + 'uintptr_t': PRIM_UINTPTR, + 'ptrdiff_t': PRIM_PTRDIFF, + 'size_t': PRIM_SIZE, + 'ssize_t': PRIM_SSIZE, + 'int_least8_t': PRIM_INT_LEAST8, + 'uint_least8_t': PRIM_UINT_LEAST8, + 'int_least16_t': PRIM_INT_LEAST16, + 'uint_least16_t': PRIM_UINT_LEAST16, + 'int_least32_t': PRIM_INT_LEAST32, + 'uint_least32_t': PRIM_UINT_LEAST32, + 'int_least64_t': PRIM_INT_LEAST64, + 'uint_least64_t': PRIM_UINT_LEAST64, + 'int_fast8_t': PRIM_INT_FAST8, + 'uint_fast8_t': PRIM_UINT_FAST8, + 'int_fast16_t': PRIM_INT_FAST16, + 'uint_fast16_t': PRIM_UINT_FAST16, + 'int_fast32_t': PRIM_INT_FAST32, + 'uint_fast32_t': PRIM_UINT_FAST32, + 'int_fast64_t': PRIM_INT_FAST64, + 'uint_fast64_t': PRIM_UINT_FAST64, + 'intmax_t': PRIM_INTMAX, + 'uintmax_t': PRIM_UINTMAX, + } + +F_UNION = 0x01 +F_CHECK_FIELDS = 0x02 +F_PACKED = 0x04 +F_EXTERNAL = 0x08 +F_OPAQUE = 0x10 + +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) + for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', + 'F_EXTERNAL', 'F_OPAQUE']]) + +CLASS_NAME = {} +for _name, _value in list(globals().items()): + if _name.startswith('OP_') and isinstance(_value, int): + CLASS_NAME[_value] = _name[3:] diff --git a/server/www/packages/packages-linux/x64/cffi/commontypes.py b/server/www/packages/packages-linux/x64/cffi/commontypes.py new file mode 100644 index 0000000..8ec97c7 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/commontypes.py @@ -0,0 +1,80 @@ +import sys +from . import model +from .error import FFIError + + +COMMON_TYPES = {} + +try: + # fetch "bool" and all simple Windows types + from _cffi_backend import _get_common_types + _get_common_types(COMMON_TYPES) +except ImportError: + pass + +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') +COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(parser, commontype): + try: + return _CACHE[commontype] + except KeyError: + cdecl = COMMON_TYPES.get(commontype, commontype) + if not isinstance(cdecl, str): + result, quals = cdecl, 0 # cdecl is already a BaseType + elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result, quals = model.PrimitiveType(cdecl), 0 + elif cdecl == 'set-unicode-needed': + raise FFIError("The Windows type %r is only available after " + "you call ffi.set_unicode()" % (commontype,)) + else: + if commontype == cdecl: + raise FFIError( + "Unsupported type: %r. Please look at " + "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " + "and file an issue if you think this type should really " + "be supported." % (commontype,)) + result, quals = parser.parse_type_and_quals(cdecl) # recursive + + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result, quals + return result, quals + + +# ____________________________________________________________ +# extra types for Windows (most of them are in commontypes.c) + + +def win_common_types(): + return { + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "TBYTE": "set-unicode-needed", + "TCHAR": "set-unicode-needed", + "LPCTSTR": "set-unicode-needed", + "PCTSTR": "set-unicode-needed", + "LPTSTR": "set-unicode-needed", + "PTSTR": "set-unicode-needed", + "PTBYTE": "set-unicode-needed", + "PTCHAR": "set-unicode-needed", + } + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types()) diff --git a/server/www/packages/packages-linux/x64/cffi/cparser.py b/server/www/packages/packages-linux/x64/cffi/cparser.py new file mode 100644 index 0000000..ea27c48 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/cparser.py @@ -0,0 +1,963 @@ +from . import model +from .commontypes import COMMON_TYPES, resolve_common_type +from .error import FFIError, CDefError +try: + from . import _pycparser as pycparser +except ImportError: + import pycparser +import weakref, re, sys + +try: + if sys.version_info < (3,): + import thread as _thread + else: + import _thread + lock = _thread.allocate_lock() +except ImportError: + lock = None + +def _workaround_for_static_import_finders(): + # Issue #392: packaging tools like cx_Freeze can not find these + # because pycparser uses exec dynamic import. This is an obscure + # workaround. This function is never called. + import pycparser.yacctab + import pycparser.lextab + +CDEF_SOURCE_STRING = "" +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", + re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" + r"\b((?:[^\n\\]|\\.)*?)$", + re.DOTALL | re.MULTILINE) +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") +_r_words = re.compile(r"\w+|\S") +_parser_cache = None +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") +_r_cdecl = re.compile(r"\b__cdecl\b") +_r_extern_python = re.compile(r'\bextern\s*"' + r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') +_r_star_const_space = re.compile( # matches "* const " + r"[*]\s*((const|volatile|restrict)\b\s*)+") +_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+" + r"\.\.\.") +_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.") + +def _get_parser(): + global _parser_cache + if _parser_cache is None: + _parser_cache = pycparser.CParser() + return _parser_cache + +def _workaround_for_old_pycparser(csource): + # Workaround for a pycparser issue (fixed between pycparser 2.10 and + # 2.14): "char*const***" gives us a wrong syntax tree, the same as + # for "char***(*const)". This means we can't tell the difference + # afterwards. But "char(*const(***))" gives us the right syntax + # tree. The issue only occurs if there are several stars in + # sequence with no parenthesis inbetween, just possibly qualifiers. + # Attempt to fix it by adding some parentheses in the source: each + # time we see "* const" or "* const *", we add an opening + # parenthesis before each star---the hard part is figuring out where + # to close them. + parts = [] + while True: + match = _r_star_const_space.search(csource) + if not match: + break + #print repr(''.join(parts)+csource), '=>', + parts.append(csource[:match.start()]) + parts.append('('); closing = ')' + parts.append(match.group()) # e.g. "* const " + endpos = match.end() + if csource.startswith('*', endpos): + parts.append('('); closing += ')' + level = 0 + i = endpos + while i < len(csource): + c = csource[i] + if c == '(': + level += 1 + elif c == ')': + if level == 0: + break + level -= 1 + elif c in ',;=': + if level == 0: + break + i += 1 + csource = csource[endpos:i] + closing + csource[i:] + #print repr(''.join(parts)+csource) + parts.append(csource) + return ''.join(parts) + +def _preprocess_extern_python(csource): + # input: `extern "Python" int foo(int);` or + # `extern "Python" { int foo(int); }` + # output: + # void __cffi_extern_python_start; + # int foo(int); + # void __cffi_extern_python_stop; + # + # input: `extern "Python+C" int foo(int);` + # output: + # void __cffi_extern_python_plus_c_start; + # int foo(int); + # void __cffi_extern_python_stop; + parts = [] + while True: + match = _r_extern_python.search(csource) + if not match: + break + endpos = match.end() - 1 + #print + #print ''.join(parts)+csource + #print '=>' + parts.append(csource[:match.start()]) + if 'C' in match.group(1): + parts.append('void __cffi_extern_python_plus_c_start; ') + else: + parts.append('void __cffi_extern_python_start; ') + if csource[endpos] == '{': + # grouping variant + closing = csource.find('}', endpos) + if closing < 0: + raise CDefError("'extern \"Python\" {': no '}' found") + if csource.find('{', endpos + 1, closing) >= 0: + raise NotImplementedError("cannot use { } inside a block " + "'extern \"Python\" { ... }'") + parts.append(csource[endpos+1:closing]) + csource = csource[closing+1:] + else: + # non-grouping variant + semicolon = csource.find(';', endpos) + if semicolon < 0: + raise CDefError("'extern \"Python\": no ';' found") + parts.append(csource[endpos:semicolon+1]) + csource = csource[semicolon+1:] + parts.append(' void __cffi_extern_python_stop;') + #print ''.join(parts)+csource + #print + parts.append(csource) + return ''.join(parts) + +def _warn_for_string_literal(csource): + if '"' not in csource: + return + for line in csource.splitlines(): + if '"' in line and not line.lstrip().startswith('#'): + import warnings + warnings.warn("String literal found in cdef() or type source. " + "String literals are ignored here, but you should " + "remove them anyway because some character sequences " + "confuse pre-parsing.") + break + +def _warn_for_non_extern_non_static_global_variable(decl): + if not decl.storage: + import warnings + warnings.warn("Global variable '%s' in cdef(): for consistency " + "with C it should have a storage class specifier " + "(usually 'extern')" % (decl.name,)) + +def _preprocess(csource): + # Remove comments. NOTE: this only work because the cdef() section + # should not contain any string literal! + csource = _r_comment.sub(' ', csource) + # Remove the "#define FOO x" lines + macros = {} + for match in _r_define.finditer(csource): + macroname, macrovalue = match.groups() + macrovalue = macrovalue.replace('\\\n', '').strip() + macros[macroname] = macrovalue + csource = _r_define.sub('', csource) + # + if pycparser.__version__ < '2.14': + csource = _workaround_for_old_pycparser(csource) + # + # BIG HACK: replace WINAPI or __stdcall with "volatile const". + # It doesn't make sense for the return type of a function to be + # "volatile volatile const", so we abuse it to detect __stdcall... + # Hack number 2 is that "int(volatile *fptr)();" is not valid C + # syntax, so we place the "volatile" before the opening parenthesis. + csource = _r_stdcall2.sub(' volatile volatile const(', csource) + csource = _r_stdcall1.sub(' volatile volatile const ', csource) + csource = _r_cdecl.sub(' ', csource) + # + # Replace `extern "Python"` with start/end markers + csource = _preprocess_extern_python(csource) + # + # Now there should not be any string literal left; warn if we get one + _warn_for_string_literal(csource) + # + # Replace "[...]" with "[__dotdotdotarray__]" + csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) + # + # Replace "...}" with "__dotdotdotNUM__}". This construction should + # occur only at the end of enums; at the end of structs we have "...;}" + # and at the end of vararg functions "...);". Also replace "=...[,}]" + # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when + # giving an unknown value. + matches = list(_r_partial_enum.finditer(csource)) + for number, match in enumerate(reversed(matches)): + p = match.start() + if csource[p] == '=': + p2 = csource.find('...', p, match.end()) + assert p2 > p + csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, + csource[p2+3:]) + else: + assert csource[p:p+3] == '...' + csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, + csource[p+3:]) + # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__" + csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource) + # Replace "float ..." or "double..." with "__dotdotdotfloat__" + csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource) + # Replace all remaining "..." with the same name, "__dotdotdot__", + # which is declared with a typedef for the purpose of C parsing. + return csource.replace('...', ' __dotdotdot__ '), macros + +def _common_type_names(csource): + # Look in the source for what looks like usages of types from the + # list of common types. A "usage" is approximated here as the + # appearance of the word, minus a "definition" of the type, which + # is the last word in a "typedef" statement. Approximative only + # but should be fine for all the common types. + look_for_words = set(COMMON_TYPES) + look_for_words.add(';') + look_for_words.add(',') + look_for_words.add('(') + look_for_words.add(')') + look_for_words.add('typedef') + words_used = set() + is_typedef = False + paren = 0 + previous_word = '' + for word in _r_words.findall(csource): + if word in look_for_words: + if word == ';': + if is_typedef: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + is_typedef = False + elif word == 'typedef': + is_typedef = True + paren = 0 + elif word == '(': + paren += 1 + elif word == ')': + paren -= 1 + elif word == ',': + if is_typedef and paren == 0: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + else: # word in COMMON_TYPES + words_used.add(word) + previous_word = word + return words_used + + +class Parser(object): + + def __init__(self): + self._declarations = {} + self._included_declarations = set() + self._anonymous_counter = 0 + self._structnode2type = weakref.WeakKeyDictionary() + self._options = {} + self._int_constants = {} + self._recomplete = [] + self._uses_new_feature = None + + def _parse(self, csource): + csource, macros = _preprocess(csource) + # XXX: for more efficiency we would need to poke into the + # internals of CParser... the following registers the + # typedefs, because their presence or absence influences the + # parsing itself (but what they are typedef'ed to plays no role) + ctn = _common_type_names(csource) + typenames = [] + for name in sorted(self._declarations): + if name.startswith('typedef '): + name = name[8:] + typenames.append(name) + ctn.discard(name) + typenames += sorted(ctn) + # + csourcelines = [] + csourcelines.append('# 1 ""') + for typename in typenames: + csourcelines.append('typedef int %s;' % typename) + csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,' + ' __dotdotdot__;') + # this forces pycparser to consider the following in the file + # called from line 1 + csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,)) + csourcelines.append(csource) + fullcsource = '\n'.join(csourcelines) + if lock is not None: + lock.acquire() # pycparser is not thread-safe... + try: + ast = _get_parser().parse(fullcsource) + except pycparser.c_parser.ParseError as e: + self.convert_pycparser_error(e, csource) + finally: + if lock is not None: + lock.release() + # csource will be used to find buggy source text + return ast, macros, csource + + def _convert_pycparser_error(self, e, csource): + # xxx look for ":NUM:" at the start of str(e) + # and interpret that as a line number. This will not work if + # the user gives explicit ``# NUM "FILE"`` directives. + line = None + msg = str(e) + match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg) + if match: + linenum = int(match.group(1), 10) + csourcelines = csource.splitlines() + if 1 <= linenum <= len(csourcelines): + line = csourcelines[linenum-1] + return line + + def convert_pycparser_error(self, e, csource): + line = self._convert_pycparser_error(e, csource) + + msg = str(e) + if line: + msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) + else: + msg = 'parse error\n%s' % (msg,) + raise CDefError(msg) + + def parse(self, csource, override=False, packed=False, pack=None, + dllexport=False): + if packed: + if packed != True: + raise ValueError("'packed' should be False or True; use " + "'pack' to give another value") + if pack: + raise ValueError("cannot give both 'pack' and 'packed'") + pack = 1 + elif pack: + if pack & (pack - 1): + raise ValueError("'pack' must be a power of two, not %r" % + (pack,)) + else: + pack = 0 + prev_options = self._options + try: + self._options = {'override': override, + 'packed': pack, + 'dllexport': dllexport} + self._internal_parse(csource) + finally: + self._options = prev_options + + def _internal_parse(self, csource): + ast, macros, csource = self._parse(csource) + # add the macros + self._process_macros(macros) + # find the first "__dotdotdot__" and use that as a separator + # between the repeated typedefs and the real csource + iterator = iter(ast.ext) + for decl in iterator: + if decl.name == '__dotdotdot__': + break + else: + assert 0 + current_decl = None + # + try: + self._inside_extern_python = '__cffi_extern_python_stop' + for decl in iterator: + current_decl = decl + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise CDefError("typedef does not declare any name", + decl) + quals = 0 + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and + decl.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_type(decl) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_ptr_type(decl) + else: + realtype, quals = self._get_type_and_quals( + decl.type, name=decl.name, partial_length_ok=True) + self._declare('typedef ' + decl.name, realtype, quals=quals) + elif decl.__class__.__name__ == 'Pragma': + pass # skip pragma, only in pycparser 2.15 + else: + raise CDefError("unexpected <%s>: this construct is valid " + "C but not valid in cdef()" % + decl.__class__.__name__, decl) + except CDefError as e: + if len(e.args) == 1: + e.args = e.args + (current_decl,) + raise + except FFIError as e: + msg = self._convert_pycparser_error(e, csource) + if msg: + e.args = (e.args[0] + "\n *** Err: %s" % msg,) + raise + + def _add_constants(self, key, val): + if key in self._int_constants: + if self._int_constants[key] == val: + return # ignore identical double declarations + raise FFIError( + "multiple declarations of constant: %s" % (key,)) + self._int_constants[key] = val + + def _add_integer_constant(self, name, int_str): + int_str = int_str.lower().rstrip("ul") + neg = int_str.startswith('-') + if neg: + int_str = int_str[1:] + # "010" is not valid oct in py3 + if (int_str.startswith("0") and int_str != '0' + and not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + pyvalue = int(int_str, 0) + if neg: + pyvalue = -pyvalue + self._add_constants(name, pyvalue) + self._declare('macro ' + name, pyvalue) + + def _process_macros(self, macros): + for key, value in macros.items(): + value = value.strip() + if _r_int_literal.match(value): + self._add_integer_constant(key, value) + elif value == '...': + self._declare('macro ' + key, value) + else: + raise CDefError( + 'only supports one of the following syntax:\n' + ' #define %s ... (literally dot-dot-dot)\n' + ' #define %s NUMBER (with NUMBER an integer' + ' constant, decimal/hex/octal)\n' + 'got:\n' + ' #define %s %s' + % (key, key, key, value)) + + def _declare_function(self, tp, quals, decl): + tp = self._get_type_pointer(tp, quals) + if self._options.get('dllexport'): + tag = 'dllexport_python ' + elif self._inside_extern_python == '__cffi_extern_python_start': + tag = 'extern_python ' + elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': + tag = 'extern_python_plus_c ' + else: + tag = 'function ' + self._declare(tag + decl.name, tp) + + def _parse_decl(self, decl): + node = decl.type + if isinstance(node, pycparser.c_ast.FuncDecl): + tp, quals = self._get_type_and_quals(node, name=decl.name) + assert isinstance(tp, model.RawFunctionType) + self._declare_function(tp, quals, decl) + else: + if isinstance(node, pycparser.c_ast.Struct): + self._get_struct_union_enum_type('struct', node) + elif isinstance(node, pycparser.c_ast.Union): + self._get_struct_union_enum_type('union', node) + elif isinstance(node, pycparser.c_ast.Enum): + self._get_struct_union_enum_type('enum', node) + elif not decl.name: + raise CDefError("construct does not declare any variable", + decl) + # + if decl.name: + tp, quals = self._get_type_and_quals(node, + partial_length_ok=True) + if tp.is_raw_function: + self._declare_function(tp, quals, decl) + elif (tp.is_integer_type() and + hasattr(decl, 'init') and + hasattr(decl.init, 'value') and + _r_int_literal.match(decl.init.value)): + self._add_integer_constant(decl.name, decl.init.value) + elif (tp.is_integer_type() and + isinstance(decl.init, pycparser.c_ast.UnaryOp) and + decl.init.op == '-' and + hasattr(decl.init.expr, 'value') and + _r_int_literal.match(decl.init.expr.value)): + self._add_integer_constant(decl.name, + '-' + decl.init.expr.value) + elif (tp is model.void_type and + decl.name.startswith('__cffi_extern_python_')): + # hack: `extern "Python"` in the C source is replaced + # with "void __cffi_extern_python_start;" and + # "void __cffi_extern_python_stop;" + self._inside_extern_python = decl.name + else: + if self._inside_extern_python !='__cffi_extern_python_stop': + raise CDefError( + "cannot declare constants or " + "variables with 'extern \"Python\"'") + if (quals & model.Q_CONST) and not tp.is_array_type: + self._declare('constant ' + decl.name, tp, quals=quals) + else: + _warn_for_non_extern_non_static_global_variable(decl) + self._declare('variable ' + decl.name, tp, quals=quals) + + def parse_type(self, cdecl): + return self.parse_type_and_quals(cdecl)[0] + + def parse_type_and_quals(self, cdecl): + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] + assert not macros + exprnode = ast.ext[-1].type.args.params[0] + if isinstance(exprnode, pycparser.c_ast.ID): + raise CDefError("unknown identifier '%s'" % (exprnode.name,)) + return self._get_type_and_quals(exprnode.type) + + def _declare(self, name, obj, included=False, quals=0): + if name in self._declarations: + prevobj, prevquals = self._declarations[name] + if prevobj is obj and prevquals == quals: + return + if not self._options.get('override'): + raise FFIError( + "multiple declarations of %s (for interactive usage, " + "try cdef(xx, override=True))" % (name,)) + assert '__dotdotdot__' not in name.split() + self._declarations[name] = (obj, quals) + if included: + self._included_declarations.add(obj) + + def _extract_quals(self, type): + quals = 0 + if isinstance(type, (pycparser.c_ast.TypeDecl, + pycparser.c_ast.PtrDecl)): + if 'const' in type.quals: + quals |= model.Q_CONST + if 'volatile' in type.quals: + quals |= model.Q_VOLATILE + if 'restrict' in type.quals: + quals |= model.Q_RESTRICT + return quals + + def _get_type_pointer(self, type, quals, declname=None): + if isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + if (isinstance(type, model.StructOrUnionOrEnum) and + type.name.startswith('$') and type.name[1:].isdigit() and + type.forcename is None and declname is not None): + return model.NamedPointerType(type, declname, quals) + return model.PointerType(type, quals) + + def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False): + # first, dereference typedefs, if we have it already parsed, we're good + if (isinstance(typenode, pycparser.c_ast.TypeDecl) and + isinstance(typenode.type, pycparser.c_ast.IdentifierType) and + len(typenode.type.names) == 1 and + ('typedef ' + typenode.type.names[0]) in self._declarations): + tp, quals = self._declarations['typedef ' + typenode.type.names[0]] + quals |= self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.ArrayDecl): + # array type + if typenode.dim is None: + length = None + else: + length = self._parse_constant( + typenode.dim, partial_length_ok=partial_length_ok) + tp, quals = self._get_type_and_quals(typenode.type, + partial_length_ok=partial_length_ok) + return model.ArrayType(tp, length), quals + # + if isinstance(typenode, pycparser.c_ast.PtrDecl): + # pointer type + itemtype, itemquals = self._get_type_and_quals(typenode.type) + tp = self._get_type_pointer(itemtype, itemquals, declname=name) + quals = self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.TypeDecl): + quals = self._extract_quals(typenode) + type = typenode.type + if isinstance(type, pycparser.c_ast.IdentifierType): + # assume a primitive type. get it from .names, but reduce + # synonyms to a single chosen combination + names = list(type.names) + if names != ['signed', 'char']: # keep this unmodified + prefixes = {} + while names: + name = names[0] + if name in ('short', 'long', 'signed', 'unsigned'): + prefixes[name] = prefixes.get(name, 0) + 1 + del names[0] + else: + break + # ignore the 'signed' prefix below, and reorder the others + newnames = [] + for prefix in ('unsigned', 'short', 'long'): + for i in range(prefixes.get(prefix, 0)): + newnames.append(prefix) + if not names: + names = ['int'] # implicitly + if names == ['int']: # but kill it if 'short' or 'long' + if 'short' in prefixes or 'long' in prefixes: + names = [] + names = newnames + names + ident = ' '.join(names) + if ident == 'void': + return model.void_type, quals + if ident == '__dotdotdot__': + raise FFIError(':%d: bad usage of "..."' % + typenode.coord.line) + tp0, quals0 = resolve_common_type(self, ident) + return tp0, (quals | quals0) + # + if isinstance(type, pycparser.c_ast.Struct): + # 'struct foobar' + tp = self._get_struct_union_enum_type('struct', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Union): + # 'union foobar' + tp = self._get_struct_union_enum_type('union', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Enum): + # 'enum foobar' + tp = self._get_struct_union_enum_type('enum', type, name) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.FuncDecl): + # a function type + return self._parse_function_type(typenode, name), 0 + # + # nested anonymous structs or unions end up here + if isinstance(typenode, pycparser.c_ast.Struct): + return self._get_struct_union_enum_type('struct', typenode, name, + nested=True), 0 + if isinstance(typenode, pycparser.c_ast.Union): + return self._get_struct_union_enum_type('union', typenode, name, + nested=True), 0 + # + raise FFIError(":%d: bad or unsupported type declaration" % + typenode.coord.line) + + def _parse_function_type(self, typenode, funcname=None): + params = list(getattr(typenode.args, 'params', [])) + for i, arg in enumerate(params): + if not hasattr(arg, 'type'): + raise CDefError("%s arg %d: unknown type '%s'" + " (if you meant to use the old C syntax of giving" + " untyped arguments, it is not supported)" + % (funcname or 'in expression', i + 1, + getattr(arg, 'name', '?'))) + ellipsis = ( + len(params) > 0 and + isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and + isinstance(params[-1].type.type, + pycparser.c_ast.IdentifierType) and + params[-1].type.type.names == ['__dotdotdot__']) + if ellipsis: + params.pop() + if not params: + raise CDefError( + "%s: a function with only '(...)' as argument" + " is not correct C" % (funcname or 'in expression')) + args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) + for argdeclnode in params] + if not ellipsis and args == [model.void_type]: + args = [] + result, quals = self._get_type_and_quals(typenode.type) + # the 'quals' on the result type are ignored. HACK: we absure them + # to detect __stdcall functions: we textually replace "__stdcall" + # with "volatile volatile const" above. + abi = None + if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway + if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: + abi = '__stdcall' + return model.RawFunctionType(tuple(args), result, ellipsis, abi) + + def _as_func_arg(self, type, quals): + if isinstance(type, model.ArrayType): + return model.PointerType(type.item, quals) + elif isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + else: + return type + + def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): + # First, a level of caching on the exact 'type' node of the AST. + # This is obscure, but needed because pycparser "unrolls" declarations + # such as "typedef struct { } foo_t, *foo_p" and we end up with + # an AST that is not a tree, but a DAG, with the "type" node of the + # two branches foo_t and foo_p of the trees being the same node. + # It's a bit silly but detecting "DAG-ness" in the AST tree seems + # to be the only way to distinguish this case from two independent + # structs. See test_struct_with_two_usages. + try: + return self._structnode2type[type] + except KeyError: + pass + # + # Note that this must handle parsing "struct foo" any number of + # times and always return the same StructType object. Additionally, + # one of these times (not necessarily the first), the fields of + # the struct can be specified with "struct foo { ...fields... }". + # If no name is given, then we have to create a new anonymous struct + # with no caching; in this case, the fields are either specified + # right now or never. + # + force_name = name + name = type.name + # + # get the type or create it if needed + if name is None: + # 'force_name' is used to guess a more readable name for + # anonymous structs, for the common case "typedef struct { } foo". + if force_name is not None: + explicit_name = '$%s' % force_name + else: + self._anonymous_counter += 1 + explicit_name = '$%d' % self._anonymous_counter + tp = None + else: + explicit_name = name + key = '%s %s' % (kind, name) + tp, _ = self._declarations.get(key, (None, None)) + # + if tp is None: + if kind == 'struct': + tp = model.StructType(explicit_name, None, None, None) + elif kind == 'union': + tp = model.UnionType(explicit_name, None, None, None) + elif kind == 'enum': + if explicit_name == '__dotdotdot__': + raise CDefError("Enums cannot be declared with ...") + tp = self._build_enum_type(explicit_name, type.values) + else: + raise AssertionError("kind = %r" % (kind,)) + if name is not None: + self._declare(key, tp) + else: + if kind == 'enum' and type.values is not None: + raise NotImplementedError( + "enum %s: the '{}' declaration should appear on the first " + "time the enum is mentioned, not later" % explicit_name) + if not tp.forcename: + tp.force_the_name(force_name) + if tp.forcename and '$' in tp.name: + self._declare('anonymous %s' % tp.forcename, tp) + # + self._structnode2type[type] = tp + # + # enums: done here + if kind == 'enum': + return tp + # + # is there a 'type.decls'? If yes, then this is the place in the + # C sources that declare the fields. If no, then just return the + # existing type, possibly still incomplete. + if type.decls is None: + return tp + # + if tp.fldnames is not None: + raise CDefError("duplicate declaration of struct %s" % name) + fldnames = [] + fldtypes = [] + fldbitsize = [] + fldquals = [] + for decl in type.decls: + if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and + ''.join(decl.type.names) == '__dotdotdot__'): + # XXX pycparser is inconsistent: 'names' should be a list + # of strings, but is sometimes just one string. Use + # str.join() as a way to cope with both. + self._make_partial(tp, nested) + continue + if decl.bitsize is None: + bitsize = -1 + else: + bitsize = self._parse_constant(decl.bitsize) + self._partial_length = False + type, fqual = self._get_type_and_quals(decl.type, + partial_length_ok=True) + if self._partial_length: + self._make_partial(tp, nested) + if isinstance(type, model.StructType) and type.partial: + self._make_partial(tp, nested) + fldnames.append(decl.name or '') + fldtypes.append(type) + fldbitsize.append(bitsize) + fldquals.append(fqual) + tp.fldnames = tuple(fldnames) + tp.fldtypes = tuple(fldtypes) + tp.fldbitsize = tuple(fldbitsize) + tp.fldquals = tuple(fldquals) + if fldbitsize != [-1] * len(fldbitsize): + if isinstance(tp, model.StructType) and tp.partial: + raise NotImplementedError("%s: using both bitfields and '...;'" + % (tp,)) + tp.packed = self._options.get('packed') + if tp.completed: # must be re-completed: it is not opaque any more + tp.completed = 0 + self._recomplete.append(tp) + return tp + + def _make_partial(self, tp, nested): + if not isinstance(tp, model.StructOrUnion): + raise CDefError("%s cannot be partial" % (tp,)) + if not tp.has_c_name() and not nested: + raise NotImplementedError("%s is partial but has no C name" %(tp,)) + tp.partial = True + + def _parse_constant(self, exprnode, partial_length_ok=False): + # for now, limited to expressions that are an immediate number + # or positive/negative number + if isinstance(exprnode, pycparser.c_ast.Constant): + s = exprnode.value + if '0' <= s[0] <= '9': + s = s.rstrip('uUlL') + try: + if s.startswith('0'): + return int(s, 8) + else: + return int(s, 10) + except ValueError: + if len(s) > 1: + if s.lower()[0:2] == '0x': + return int(s, 16) + elif s.lower()[0:2] == '0b': + return int(s, 2) + raise CDefError("invalid constant %r" % (s,)) + elif s[0] == "'" and s[-1] == "'" and ( + len(s) == 3 or (len(s) == 4 and s[1] == "\\")): + return ord(s[-2]) + else: + raise CDefError("invalid constant %r" % (s,)) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '+'): + return self._parse_constant(exprnode.expr) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '-'): + return -self._parse_constant(exprnode.expr) + # load previously defined int constant + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name in self._int_constants): + return self._int_constants[exprnode.name] + # + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name == '__dotdotdotarray__'): + if partial_length_ok: + self._partial_length = True + return '...' + raise FFIError(":%d: unsupported '[...]' here, cannot derive " + "the actual array length in this context" + % exprnode.coord.line) + # + if isinstance(exprnode, pycparser.c_ast.BinaryOp): + left = self._parse_constant(exprnode.left) + right = self._parse_constant(exprnode.right) + if exprnode.op == '+': + return left + right + elif exprnode.op == '-': + return left - right + elif exprnode.op == '*': + return left * right + elif exprnode.op == '/': + return self._c_div(left, right) + elif exprnode.op == '%': + return left - self._c_div(left, right) * right + elif exprnode.op == '<<': + return left << right + elif exprnode.op == '>>': + return left >> right + elif exprnode.op == '&': + return left & right + elif exprnode.op == '|': + return left | right + elif exprnode.op == '^': + return left ^ right + # + raise FFIError(":%d: unsupported expression: expected a " + "simple numeric constant" % exprnode.coord.line) + + def _c_div(self, a, b): + result = a // b + if ((a < 0) ^ (b < 0)) and (a % b) != 0: + result += 1 + return result + + def _build_enum_type(self, explicit_name, decls): + if decls is not None: + partial = False + enumerators = [] + enumvalues = [] + nextenumvalue = 0 + for enum in decls.enumerators: + if _r_enum_dotdotdot.match(enum.name): + partial = True + continue + if enum.value is not None: + nextenumvalue = self._parse_constant(enum.value) + enumerators.append(enum.name) + enumvalues.append(nextenumvalue) + self._add_constants(enum.name, nextenumvalue) + nextenumvalue += 1 + enumerators = tuple(enumerators) + enumvalues = tuple(enumvalues) + tp = model.EnumType(explicit_name, enumerators, enumvalues) + tp.partial = partial + else: # opaque enum + tp = model.EnumType(explicit_name, (), ()) + return tp + + def include(self, other): + for name, (tp, quals) in other._declarations.items(): + if name.startswith('anonymous $enum_$'): + continue # fix for test_anonymous_enum_include + kind = name.split(' ', 1)[0] + if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): + self._declare(name, tp, included=True, quals=quals) + for k, v in other._int_constants.items(): + self._add_constants(k, v) + + def _get_unknown_type(self, decl): + typenames = decl.type.type.names + if typenames == ['__dotdotdot__']: + return model.unknown_type(decl.name) + + if typenames == ['__dotdotdotint__']: + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef int... %s'" % decl.name + return model.UnknownIntegerType(decl.name) + + if typenames == ['__dotdotdotfloat__']: + # note: not for 'long double' so far + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef float... %s'" % decl.name + return model.UnknownFloatType(decl.name) + + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) + + def _get_unknown_ptr_type(self, decl): + if decl.type.type.type.names == ['__dotdotdot__']: + return model.unknown_ptr_type(decl.name) + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) diff --git a/server/www/packages/packages-linux/x64/cffi/error.py b/server/www/packages/packages-linux/x64/cffi/error.py new file mode 100644 index 0000000..0a27247 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/error.py @@ -0,0 +1,31 @@ + +class FFIError(Exception): + __module__ = 'cffi' + +class CDefError(Exception): + __module__ = 'cffi' + def __str__(self): + try: + current_decl = self.args[1] + filename = current_decl.coord.file + linenum = current_decl.coord.line + prefix = '%s:%d: ' % (filename, linenum) + except (AttributeError, TypeError, IndexError): + prefix = '' + return '%s%s' % (prefix, self.args[0]) + +class VerificationError(Exception): + """ An error raised when verification fails + """ + __module__ = 'cffi' + +class VerificationMissing(Exception): + """ An error raised when incomplete structures are passed into + cdef, but no verification has been done + """ + __module__ = 'cffi' + +class PkgConfigError(Exception): + """ An error raised for missing modules in pkg-config + """ + __module__ = 'cffi' diff --git a/server/www/packages/packages-linux/x64/cffi/ffiplatform.py b/server/www/packages/packages-linux/x64/cffi/ffiplatform.py new file mode 100644 index 0000000..8531346 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/ffiplatform.py @@ -0,0 +1,127 @@ +import sys, os +from .error import VerificationError + + +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', + 'extra_objects', 'depends'] + +def get_extension(srcfilename, modname, sources=(), **kwds): + _hack_at_distutils() + from distutils.core import Extension + allsources = [srcfilename] + for src in sources: + allsources.append(os.path.normpath(src)) + return Extension(name=modname, sources=allsources, **kwds) + +def compile(tmpdir, ext, compiler_verbose=0, debug=None): + """Compile a C extension module using distutils.""" + + _hack_at_distutils() + saved_environ = os.environ.copy() + try: + outputfilename = _build(tmpdir, ext, compiler_verbose, debug) + outputfilename = os.path.abspath(outputfilename) + finally: + # workaround for a distutils bugs where some env vars can + # become longer and longer every time it is used + for key, value in saved_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + return outputfilename + +def _build(tmpdir, ext, compiler_verbose=0, debug=None): + # XXX compact but horrible :-( + from distutils.core import Distribution + import distutils.errors, distutils.log + # + dist = Distribution({'ext_modules': [ext]}) + dist.parse_config_files() + options = dist.get_option_dict('build_ext') + if debug is None: + debug = sys.flags.debug + options['debug'] = ('ffiplatform', debug) + options['force'] = ('ffiplatform', True) + options['build_lib'] = ('ffiplatform', tmpdir) + options['build_temp'] = ('ffiplatform', tmpdir) + # + try: + old_level = distutils.log.set_threshold(0) or 0 + try: + distutils.log.set_verbosity(compiler_verbose) + dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() + finally: + distutils.log.set_threshold(old_level) + except (distutils.errors.CompileError, + distutils.errors.LinkError) as e: + raise VerificationError('%s: %s' % (e.__class__.__name__, e)) + # + return soname + +try: + from os.path import samefile +except ImportError: + def samefile(f1, f2): + return os.path.abspath(f1) == os.path.abspath(f2) + +def maybe_relative_path(path): + if not os.path.isabs(path): + return path # already relative + dir = path + names = [] + while True: + prevdir = dir + dir, name = os.path.split(prevdir) + if dir == prevdir or not dir: + return path # failed to make it relative + names.append(name) + try: + if samefile(dir, os.curdir): + names.reverse() + return os.path.join(*names) + except OSError: + pass + +# ____________________________________________________________ + +try: + int_or_long = (int, long) + import cStringIO +except NameError: + int_or_long = int # Python 3 + import io as cStringIO + +def _flatten(x, f): + if isinstance(x, str): + f.write('%ds%s' % (len(x), x)) + elif isinstance(x, dict): + keys = sorted(x.keys()) + f.write('%dd' % len(keys)) + for key in keys: + _flatten(key, f) + _flatten(x[key], f) + elif isinstance(x, (list, tuple)): + f.write('%dl' % len(x)) + for value in x: + _flatten(value, f) + elif isinstance(x, int_or_long): + f.write('%di' % (x,)) + else: + raise TypeError( + "the keywords to verify() contains unsupported object %r" % (x,)) + +def flatten(x): + f = cStringIO.StringIO() + _flatten(x, f) + return f.getvalue() + +def _hack_at_distutils(): + # Windows-only workaround for some configurations: see + # https://bugs.python.org/issue23246 (Python 2.7 with + # a specific MS compiler suite download) + if sys.platform == "win32": + try: + import setuptools # for side-effects, patches distutils + except ImportError: + pass diff --git a/server/www/packages/packages-linux/x64/cffi/lock.py b/server/www/packages/packages-linux/x64/cffi/lock.py new file mode 100644 index 0000000..db91b71 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/lock.py @@ -0,0 +1,30 @@ +import sys + +if sys.version_info < (3,): + try: + from thread import allocate_lock + except ImportError: + from dummy_thread import allocate_lock +else: + try: + from _thread import allocate_lock + except ImportError: + from _dummy_thread import allocate_lock + + +##import sys +##l1 = allocate_lock + +##class allocate_lock(object): +## def __init__(self): +## self._real = l1() +## def __enter__(self): +## for i in range(4, 0, -1): +## print sys._getframe(i).f_code +## print +## return self._real.__enter__() +## def __exit__(self, *args): +## return self._real.__exit__(*args) +## def acquire(self, f): +## assert f is False +## return self._real.acquire(f) diff --git a/server/www/packages/packages-linux/x64/cffi/model.py b/server/www/packages/packages-linux/x64/cffi/model.py new file mode 100644 index 0000000..5f1b0d2 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/model.py @@ -0,0 +1,614 @@ +import types +import weakref + +from .lock import allocate_lock +from .error import CDefError, VerificationError, VerificationMissing + +# type qualifiers +Q_CONST = 0x01 +Q_RESTRICT = 0x02 +Q_VOLATILE = 0x04 + +def qualify(quals, replace_with): + if quals & Q_CONST: + replace_with = ' const ' + replace_with.lstrip() + if quals & Q_VOLATILE: + replace_with = ' volatile ' + replace_with.lstrip() + if quals & Q_RESTRICT: + # It seems that __restrict is supported by gcc and msvc. + # If you hit some different compiler, add a #define in + # _cffi_include.h for it (and in its copies, documented there) + replace_with = ' __restrict ' + replace_with.lstrip() + return replace_with + + +class BaseTypeByIdentity(object): + is_array_type = False + is_raw_function = False + + def get_c_name(self, replace_with='', context='a C file', quals=0): + result = self.c_name_with_marker + assert result.count('&') == 1 + # some logic duplication with ffi.getctype()... :-( + replace_with = replace_with.strip() + if replace_with: + if replace_with.startswith('*') and '&[' in result: + replace_with = '(%s)' % replace_with + elif not replace_with[0] in '[(': + replace_with = ' ' + replace_with + replace_with = qualify(quals, replace_with) + result = result.replace('&', replace_with) + if '$' in result: + raise VerificationError( + "cannot generate '%s' in %s: unknown type name" + % (self._get_c_name(), context)) + return result + + def _get_c_name(self): + return self.c_name_with_marker.replace('&', '') + + def has_c_name(self): + return '$' not in self._get_c_name() + + def is_integer_type(self): + return False + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + try: + BType = ffi._cached_btypes[self] + except KeyError: + BType = self.build_backend_type(ffi, finishlist) + BType2 = ffi._cached_btypes.setdefault(self, BType) + assert BType2 is BType + return BType + + def __repr__(self): + return '<%s>' % (self._get_c_name(),) + + def _get_items(self): + return [(name, getattr(self, name)) for name in self._attrs_] + + +class BaseType(BaseTypeByIdentity): + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self._get_items() == other._get_items()) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.__class__, tuple(self._get_items()))) + + +class VoidType(BaseType): + _attrs_ = () + + def __init__(self): + self.c_name_with_marker = 'void&' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_void_type') + +void_type = VoidType() + + +class BasePrimitiveType(BaseType): + def is_complex_type(self): + return False + + +class PrimitiveType(BasePrimitiveType): + _attrs_ = ('name',) + + ALL_PRIMITIVE_TYPES = { + 'char': 'c', + 'short': 'i', + 'int': 'i', + 'long': 'i', + 'long long': 'i', + 'signed char': 'i', + 'unsigned char': 'i', + 'unsigned short': 'i', + 'unsigned int': 'i', + 'unsigned long': 'i', + 'unsigned long long': 'i', + 'float': 'f', + 'double': 'f', + 'long double': 'f', + 'float _Complex': 'j', + 'double _Complex': 'j', + '_Bool': 'i', + # the following types are not primitive in the C sense + 'wchar_t': 'c', + 'char16_t': 'c', + 'char32_t': 'c', + 'int8_t': 'i', + 'uint8_t': 'i', + 'int16_t': 'i', + 'uint16_t': 'i', + 'int32_t': 'i', + 'uint32_t': 'i', + 'int64_t': 'i', + 'uint64_t': 'i', + 'int_least8_t': 'i', + 'uint_least8_t': 'i', + 'int_least16_t': 'i', + 'uint_least16_t': 'i', + 'int_least32_t': 'i', + 'uint_least32_t': 'i', + 'int_least64_t': 'i', + 'uint_least64_t': 'i', + 'int_fast8_t': 'i', + 'uint_fast8_t': 'i', + 'int_fast16_t': 'i', + 'uint_fast16_t': 'i', + 'int_fast32_t': 'i', + 'uint_fast32_t': 'i', + 'int_fast64_t': 'i', + 'uint_fast64_t': 'i', + 'intptr_t': 'i', + 'uintptr_t': 'i', + 'intmax_t': 'i', + 'uintmax_t': 'i', + 'ptrdiff_t': 'i', + 'size_t': 'i', + 'ssize_t': 'i', + } + + def __init__(self, name): + assert name in self.ALL_PRIMITIVE_TYPES + self.name = name + self.c_name_with_marker = name + '&' + + def is_char_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' + def is_integer_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' + def is_float_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' + def is_complex_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'j' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_primitive_type', self.name) + + +class UnknownIntegerType(BasePrimitiveType): + _attrs_ = ('name',) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def is_integer_type(self): + return True + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("integer type '%s' can only be used after " + "compilation" % self.name) + +class UnknownFloatType(BasePrimitiveType): + _attrs_ = ('name', ) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("float type '%s' can only be used after " + "compilation" % self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis', 'abi') + + def __init__(self, args, result, ellipsis, abi=None): + self.args = args + self.result = result + self.ellipsis = ellipsis + self.abi = abi + # + reprargs = [arg._get_c_name() for arg in self.args] + if self.ellipsis: + reprargs.append('...') + reprargs = reprargs or ['void'] + replace_with = self._base_pattern % (', '.join(reprargs),) + if abi is not None: + replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] + self.c_name_with_marker = ( + self.result.c_name_with_marker.replace('&', replace_with)) + + +class RawFunctionType(BaseFunctionType): + # Corresponds to a C type like 'int(int)', which is the C type of + # a function, but not a pointer-to-function. The backend has no + # notion of such a type; it's used temporarily by parsing. + _base_pattern = '(&)(%s)' + is_raw_function = True + + def build_backend_type(self, ffi, finishlist): + raise CDefError("cannot render the type %r: it is a function " + "type, not a pointer-to-function type" % (self,)) + + def as_function_pointer(self): + return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) + + +class FunctionPtrType(BaseFunctionType): + _base_pattern = '(*&)(%s)' + + def build_backend_type(self, ffi, finishlist): + result = self.result.get_cached_btype(ffi, finishlist) + args = [] + for tp in self.args: + args.append(tp.get_cached_btype(ffi, finishlist)) + abi_args = () + if self.abi == "__stdcall": + if not self.ellipsis: # __stdcall ignored for variadic funcs + try: + abi_args = (ffi._backend.FFI_STDCALL,) + except AttributeError: + pass + return global_cache(self, ffi, 'new_function_type', + tuple(args), result, self.ellipsis, *abi_args) + + def as_raw_function(self): + return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) + + +class PointerType(BaseType): + _attrs_ = ('totype', 'quals') + + def __init__(self, totype, quals=0): + self.totype = totype + self.quals = quals + extra = qualify(quals, " *&") + if totype.is_array_type: + extra = "(%s)" % (extra.lstrip(),) + self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) + + def build_backend_type(self, ffi, finishlist): + BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) + return global_cache(self, ffi, 'new_pointer_type', BItem) + +voidp_type = PointerType(void_type) + +def ConstPointerType(totype): + return PointerType(totype, Q_CONST) + +const_voidp_type = ConstPointerType(void_type) + + +class NamedPointerType(PointerType): + _attrs_ = ('totype', 'name') + + def __init__(self, totype, name, quals=0): + PointerType.__init__(self, totype, quals) + self.name = name + self.c_name_with_marker = name + '&' + + +class ArrayType(BaseType): + _attrs_ = ('item', 'length') + is_array_type = True + + def __init__(self, item, length): + self.item = item + self.length = length + # + if length is None: + brackets = '&[]' + elif length == '...': + brackets = '&[/*...*/]' + else: + brackets = '&[%s]' % length + self.c_name_with_marker = ( + self.item.c_name_with_marker.replace('&', brackets)) + + def resolve_length(self, newlength): + return ArrayType(self.item, newlength) + + def build_backend_type(self, ffi, finishlist): + if self.length == '...': + raise CDefError("cannot render the type %r: unknown length" % + (self,)) + self.item.get_cached_btype(ffi, finishlist) # force the item BType + BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) + return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) + +char_array_type = ArrayType(PrimitiveType('char'), None) + + +class StructOrUnionOrEnum(BaseTypeByIdentity): + _attrs_ = ('name',) + forcename = None + + def build_c_name_with_marker(self): + name = self.forcename or '%s %s' % (self.kind, self.name) + self.c_name_with_marker = name + '&' + + def force_the_name(self, forcename): + self.forcename = forcename + self.build_c_name_with_marker() + + def get_official_name(self): + assert self.c_name_with_marker.endswith('&') + return self.c_name_with_marker[:-1] + + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout = None + completed = 0 + partial = False + packed = 0 + + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): + self.name = name + self.fldnames = fldnames + self.fldtypes = fldtypes + self.fldbitsize = fldbitsize + self.fldquals = fldquals + self.build_c_name_with_marker() + + def anonymous_struct_fields(self): + if self.fldtypes is not None: + for name, type in zip(self.fldnames, self.fldtypes): + if name == '' and isinstance(type, StructOrUnion): + yield type + + def enumfields(self, expand_anonymous_struct_union=True): + fldquals = self.fldquals + if fldquals is None: + fldquals = (0,) * len(self.fldnames) + for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, + self.fldbitsize, fldquals): + if (name == '' and isinstance(type, StructOrUnion) + and expand_anonymous_struct_union): + # nested anonymous struct/union + for result in type.enumfields(): + yield result + else: + yield (name, type, bitsize, quals) + + def force_flatten(self): + # force the struct or union to have a declaration that lists + # directly all fields returned by enumfields(), flattening + # nested anonymous structs/unions. + names = [] + types = [] + bitsizes = [] + fldquals = [] + for name, type, bitsize, quals in self.enumfields(): + names.append(name) + types.append(type) + bitsizes.append(bitsize) + fldquals.append(quals) + self.fldnames = tuple(names) + self.fldtypes = tuple(types) + self.fldbitsize = tuple(bitsizes) + self.fldquals = tuple(fldquals) + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, + can_delay) + if not can_delay: + self.finish_backend_type(ffi, finishlist) + return BType + + def finish_backend_type(self, ffi, finishlist): + if self.completed: + if self.completed != 2: + raise NotImplementedError("recursive structure declaration " + "for '%s'" % (self.name,)) + return + BType = ffi._cached_btypes[self] + # + self.completed = 1 + # + if self.fldtypes is None: + pass # not completing it: it's an opaque struct + # + elif self.fixedlayout is None: + fldtypes = [tp.get_cached_btype(ffi, finishlist) + for tp in self.fldtypes] + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) + extra_flags = () + if self.packed: + if self.packed == 1: + extra_flags = (8,) # SF_PACKED + else: + extra_flags = (0, self.packed) + ffi._backend.complete_struct_or_union(BType, lst, self, + -1, -1, *extra_flags) + # + else: + fldtypes = [] + fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout + for i in range(len(self.fldnames)): + fsize = fieldsize[i] + ftype = self.fldtypes[i] + # + if isinstance(ftype, ArrayType) and ftype.length == '...': + # fix the length to match the total size + BItemType = ftype.item.get_cached_btype(ffi, finishlist) + nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) + if nrest != 0: + self._verification_error( + "field '%s.%s' has a bogus size?" % ( + self.name, self.fldnames[i] or '{}')) + ftype = ftype.resolve_length(nlen) + self.fldtypes = (self.fldtypes[:i] + (ftype,) + + self.fldtypes[i+1:]) + # + BFieldType = ftype.get_cached_btype(ffi, finishlist) + if isinstance(ftype, ArrayType) and ftype.length is None: + assert fsize == 0 + else: + bitemsize = ffi.sizeof(BFieldType) + if bitemsize != fsize: + self._verification_error( + "field '%s.%s' is declared as %d bytes, but is " + "really %d bytes" % (self.name, + self.fldnames[i] or '{}', + bitemsize, fsize)) + fldtypes.append(BFieldType) + # + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) + ffi._backend.complete_struct_or_union(BType, lst, self, + totalsize, totalalignment) + self.completed = 2 + + def _verification_error(self, msg): + raise VerificationError(msg) + + def check_not_partial(self): + if self.partial and self.fixedlayout is None: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + finishlist.append(self) + # + return global_cache(self, ffi, 'new_%s_type' % self.kind, + self.get_official_name(), key=self) + + +class StructType(StructOrUnion): + kind = 'struct' + + +class UnionType(StructOrUnion): + kind = 'union' + + +class EnumType(StructOrUnionOrEnum): + kind = 'enum' + partial = False + partial_resolved = False + + def __init__(self, name, enumerators, enumvalues, baseinttype=None): + self.name = name + self.enumerators = enumerators + self.enumvalues = enumvalues + self.baseinttype = baseinttype + self.build_c_name_with_marker() + + def force_the_name(self, forcename): + StructOrUnionOrEnum.force_the_name(self, forcename) + if self.forcename is None: + name = self.get_official_name() + self.forcename = '$' + name.replace(' ', '_') + + def check_not_partial(self): + if self.partial and not self.partial_resolved: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + base_btype = self.build_baseinttype(ffi, finishlist) + return global_cache(self, ffi, 'new_enum_type', + self.get_official_name(), + self.enumerators, self.enumvalues, + base_btype, key=self) + + def build_baseinttype(self, ffi, finishlist): + if self.baseinttype is not None: + return self.baseinttype.get_cached_btype(ffi, finishlist) + # + if self.enumvalues: + smallest_value = min(self.enumvalues) + largest_value = max(self.enumvalues) + else: + import warnings + try: + # XXX! The goal is to ensure that the warnings.warn() + # will not suppress the warning. We want to get it + # several times if we reach this point several times. + __warningregistry__.clear() + except NameError: + pass + warnings.warn("%r has no values explicitly defined; " + "guessing that it is equivalent to 'unsigned int'" + % self._get_c_name()) + smallest_value = largest_value = 0 + if smallest_value < 0: # needs a signed type + sign = 1 + candidate1 = PrimitiveType("int") + candidate2 = PrimitiveType("long") + else: + sign = 0 + candidate1 = PrimitiveType("unsigned int") + candidate2 = PrimitiveType("unsigned long") + btype1 = candidate1.get_cached_btype(ffi, finishlist) + btype2 = candidate2.get_cached_btype(ffi, finishlist) + size1 = ffi.sizeof(btype1) + size2 = ffi.sizeof(btype2) + if (smallest_value >= ((-1) << (8*size1-1)) and + largest_value < (1 << (8*size1-sign))): + return btype1 + if (smallest_value >= ((-1) << (8*size2-1)) and + largest_value < (1 << (8*size2-sign))): + return btype2 + raise CDefError("%s values don't all fit into either 'long' " + "or 'unsigned long'" % self._get_c_name()) + +def unknown_type(name, structname=None): + if structname is None: + structname = '$%s' % name + tp = StructType(structname, None, None, None) + tp.force_the_name(name) + tp.origin = "unknown_type" + return tp + +def unknown_ptr_type(name, structname=None): + if structname is None: + structname = '$$%s' % name + tp = StructType(structname, None, None, None) + return NamedPointerType(tp, name) + + +global_lock = allocate_lock() +_typecache_cffi_backend = weakref.WeakValueDictionary() + +def get_typecache(backend): + # returns _typecache_cffi_backend if backend is the _cffi_backend + # module, or type(backend).__typecache if backend is an instance of + # CTypesBackend (or some FakeBackend class during tests) + if isinstance(backend, types.ModuleType): + return _typecache_cffi_backend + with global_lock: + if not hasattr(type(backend), '__typecache'): + type(backend).__typecache = weakref.WeakValueDictionary() + return type(backend).__typecache + +def global_cache(srctype, ffi, funcname, *args, **kwds): + key = kwds.pop('key', (funcname, args)) + assert not kwds + try: + return ffi._typecache[key] + except KeyError: + pass + try: + res = getattr(ffi._backend, funcname)(*args) + except NotImplementedError as e: + raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) + # note that setdefault() on WeakValueDictionary is not atomic + # and contains a rare bug (http://bugs.python.org/issue19542); + # we have to use a lock and do it ourselves + cache = ffi._typecache + with global_lock: + res1 = cache.get(key) + if res1 is None: + cache[key] = res + return res + else: + return res1 + +def pointer_cache(ffi, BType): + return global_cache('?', ffi, 'new_pointer_type', BType) + +def attach_exception_info(e, name): + if e.args and type(e.args[0]) is str: + e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/server/www/packages/packages-linux/x64/cffi/parse_c_type.h b/server/www/packages/packages-linux/x64/cffi/parse_c_type.h new file mode 100644 index 0000000..84e4ef8 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/parse_c_type.h @@ -0,0 +1,181 @@ + +/* This part is from file 'cffi/parse_c_type.h'. It is copied at the + beginning of C sources generated by CFFI's ffi.set_source(). */ + +typedef void *_cffi_opcode_t; + +#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) +#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) +#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) + +#define _CFFI_OP_PRIMITIVE 1 +#define _CFFI_OP_POINTER 3 +#define _CFFI_OP_ARRAY 5 +#define _CFFI_OP_OPEN_ARRAY 7 +#define _CFFI_OP_STRUCT_UNION 9 +#define _CFFI_OP_ENUM 11 +#define _CFFI_OP_FUNCTION 13 +#define _CFFI_OP_FUNCTION_END 15 +#define _CFFI_OP_NOOP 17 +#define _CFFI_OP_BITFIELD 19 +#define _CFFI_OP_TYPENAME 21 +#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs +#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs +#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) +#define _CFFI_OP_CONSTANT 29 +#define _CFFI_OP_CONSTANT_INT 31 +#define _CFFI_OP_GLOBAL_VAR 33 +#define _CFFI_OP_DLOPEN_FUNC 35 +#define _CFFI_OP_DLOPEN_CONST 37 +#define _CFFI_OP_GLOBAL_VAR_F 39 +#define _CFFI_OP_EXTERN_PYTHON 41 + +#define _CFFI_PRIM_VOID 0 +#define _CFFI_PRIM_BOOL 1 +#define _CFFI_PRIM_CHAR 2 +#define _CFFI_PRIM_SCHAR 3 +#define _CFFI_PRIM_UCHAR 4 +#define _CFFI_PRIM_SHORT 5 +#define _CFFI_PRIM_USHORT 6 +#define _CFFI_PRIM_INT 7 +#define _CFFI_PRIM_UINT 8 +#define _CFFI_PRIM_LONG 9 +#define _CFFI_PRIM_ULONG 10 +#define _CFFI_PRIM_LONGLONG 11 +#define _CFFI_PRIM_ULONGLONG 12 +#define _CFFI_PRIM_FLOAT 13 +#define _CFFI_PRIM_DOUBLE 14 +#define _CFFI_PRIM_LONGDOUBLE 15 + +#define _CFFI_PRIM_WCHAR 16 +#define _CFFI_PRIM_INT8 17 +#define _CFFI_PRIM_UINT8 18 +#define _CFFI_PRIM_INT16 19 +#define _CFFI_PRIM_UINT16 20 +#define _CFFI_PRIM_INT32 21 +#define _CFFI_PRIM_UINT32 22 +#define _CFFI_PRIM_INT64 23 +#define _CFFI_PRIM_UINT64 24 +#define _CFFI_PRIM_INTPTR 25 +#define _CFFI_PRIM_UINTPTR 26 +#define _CFFI_PRIM_PTRDIFF 27 +#define _CFFI_PRIM_SIZE 28 +#define _CFFI_PRIM_SSIZE 29 +#define _CFFI_PRIM_INT_LEAST8 30 +#define _CFFI_PRIM_UINT_LEAST8 31 +#define _CFFI_PRIM_INT_LEAST16 32 +#define _CFFI_PRIM_UINT_LEAST16 33 +#define _CFFI_PRIM_INT_LEAST32 34 +#define _CFFI_PRIM_UINT_LEAST32 35 +#define _CFFI_PRIM_INT_LEAST64 36 +#define _CFFI_PRIM_UINT_LEAST64 37 +#define _CFFI_PRIM_INT_FAST8 38 +#define _CFFI_PRIM_UINT_FAST8 39 +#define _CFFI_PRIM_INT_FAST16 40 +#define _CFFI_PRIM_UINT_FAST16 41 +#define _CFFI_PRIM_INT_FAST32 42 +#define _CFFI_PRIM_UINT_FAST32 43 +#define _CFFI_PRIM_INT_FAST64 44 +#define _CFFI_PRIM_UINT_FAST64 45 +#define _CFFI_PRIM_INTMAX 46 +#define _CFFI_PRIM_UINTMAX 47 +#define _CFFI_PRIM_FLOATCOMPLEX 48 +#define _CFFI_PRIM_DOUBLECOMPLEX 49 +#define _CFFI_PRIM_CHAR16 50 +#define _CFFI_PRIM_CHAR32 51 + +#define _CFFI__NUM_PRIM 52 +#define _CFFI__UNKNOWN_PRIM (-1) +#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) +#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) + +#define _CFFI__IO_FILE_STRUCT (-1) + + +struct _cffi_global_s { + const char *name; + void *address; + _cffi_opcode_t type_op; + void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown + // OP_CPYTHON_BLTN_*: addr of direct function +}; + +struct _cffi_getconst_s { + unsigned long long value; + const struct _cffi_type_context_s *ctx; + int gindex; +}; + +struct _cffi_struct_union_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_STRUCT_UNION + int flags; // _CFFI_F_* flags below + size_t size; + int alignment; + int first_field_index; // -> _cffi_fields array + int num_fields; +}; +#define _CFFI_F_UNION 0x01 // is a union, not a struct +#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the + // "standard layout" or if some are missing +#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct +#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() +#define _CFFI_F_OPAQUE 0x10 // opaque + +struct _cffi_field_s { + const char *name; + size_t field_offset; + size_t field_size; + _cffi_opcode_t field_type_op; +}; + +struct _cffi_enum_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_ENUM + int type_prim; // _CFFI_PRIM_xxx + const char *enumerators; // comma-delimited string +}; + +struct _cffi_typename_s { + const char *name; + int type_index; /* if opaque, points to a possibly artificial + OP_STRUCT which is itself opaque */ +}; + +struct _cffi_type_context_s { + _cffi_opcode_t *types; + const struct _cffi_global_s *globals; + const struct _cffi_field_s *fields; + const struct _cffi_struct_union_s *struct_unions; + const struct _cffi_enum_s *enums; + const struct _cffi_typename_s *typenames; + int num_globals; + int num_struct_unions; + int num_enums; + int num_typenames; + const char *const *includes; + int num_types; + int flags; /* future extension */ +}; + +struct _cffi_parse_info_s { + const struct _cffi_type_context_s *ctx; + _cffi_opcode_t *output; + unsigned int output_size; + size_t error_location; + const char *error_message; +}; + +struct _cffi_externpy_s { + const char *name; + size_t size_of_result; + void *reserved1, *reserved2; +}; + +#ifdef _CFFI_INTERNAL +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); +static int search_in_globals(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +#endif diff --git a/server/www/packages/packages-linux/x64/cffi/pkgconfig.py b/server/www/packages/packages-linux/x64/cffi/pkgconfig.py new file mode 100644 index 0000000..5c93f15 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/pkgconfig.py @@ -0,0 +1,121 @@ +# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi +import sys, os, subprocess + +from .error import PkgConfigError + + +def merge_flags(cfg1, cfg2): + """Merge values from cffi config flags cfg2 to cf1 + + Example: + merge_flags({"libraries": ["one"]}, {"libraries": ["two"]}) + {"libraries": ["one", "two"]} + """ + for key, value in cfg2.items(): + if key not in cfg1: + cfg1[key] = value + else: + if not isinstance(cfg1[key], list): + raise TypeError("cfg1[%r] should be a list of strings" % (key,)) + if not isinstance(value, list): + raise TypeError("cfg2[%r] should be a list of strings" % (key,)) + cfg1[key].extend(value) + return cfg1 + + +def call(libname, flag, encoding=sys.getfilesystemencoding()): + """Calls pkg-config and returns the output if found + """ + a = ["pkg-config", "--print-errors"] + a.append(flag) + a.append(libname) + try: + pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except EnvironmentError as e: + raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),)) + + bout, berr = pc.communicate() + if pc.returncode != 0: + try: + berr = berr.decode(encoding) + except Exception: + pass + raise PkgConfigError(berr.strip()) + + if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x + try: + bout = bout.decode(encoding) + except UnicodeDecodeError: + raise PkgConfigError("pkg-config %s %s returned bytes that cannot " + "be decoded with encoding %r:\n%r" % + (flag, libname, encoding, bout)) + + if os.altsep != '\\' and '\\' in bout: + raise PkgConfigError("pkg-config %s %s returned an unsupported " + "backslash-escaped output:\n%r" % + (flag, libname, bout)) + return bout + + +def flags_from_pkgconfig(libs): + r"""Return compiler line flags for FFI.set_source based on pkg-config output + + Usage + ... + ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"]) + + If pkg-config is installed on build machine, then arguments include_dirs, + library_dirs, libraries, define_macros, extra_compile_args and + extra_link_args are extended with an output of pkg-config for libfoo and + libbar. + + Raises PkgConfigError in case the pkg-config call fails. + """ + + def get_include_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-I")] + + def get_library_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-L")] + + def get_libraries(string): + return [x[2:] for x in string.split() if x.startswith("-l")] + + # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils + def get_macros(string): + def _macro(x): + x = x[2:] # drop "-D" + if '=' in x: + return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar") + else: + return (x, None) # "-Dfoo" => ("foo", None) + return [_macro(x) for x in string.split() if x.startswith("-D")] + + def get_other_cflags(string): + return [x for x in string.split() if not x.startswith("-I") and + not x.startswith("-D")] + + def get_other_libs(string): + return [x for x in string.split() if not x.startswith("-L") and + not x.startswith("-l")] + + # return kwargs for given libname + def kwargs(libname): + fse = sys.getfilesystemencoding() + all_cflags = call(libname, "--cflags") + all_libs = call(libname, "--libs") + return { + "include_dirs": get_include_dirs(all_cflags), + "library_dirs": get_library_dirs(all_libs), + "libraries": get_libraries(all_libs), + "define_macros": get_macros(all_cflags), + "extra_compile_args": get_other_cflags(all_cflags), + "extra_link_args": get_other_libs(all_libs), + } + + # merge all arguments together + ret = {} + for libname in libs: + lib_flags = kwargs(libname) + merge_flags(ret, lib_flags) + return ret diff --git a/server/www/packages/packages-linux/x64/cffi/recompiler.py b/server/www/packages/packages-linux/x64/cffi/recompiler.py new file mode 100644 index 0000000..d66ff7f --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/recompiler.py @@ -0,0 +1,1552 @@ +import os, sys, io +from . import ffiplatform, model +from .error import VerificationError +from .cffi_opcode import * + +VERSION_BASE = 0x2601 +VERSION_EMBEDDED = 0x2701 +VERSION_CHAR16CHAR32 = 0x2801 + + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + _num_externpy = 0 + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + self._version = VERSION_BASE + + def needs_version(self, ver): + self._version = max(self._version, ver) + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in tp.enumfields(): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + if self.ffi._embedding is not None: + prnt('#define _CFFI_USE_EMBEDDING') + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {') + self._print_string_literal_in_array(self.ffi._embedding) + prnt('0 };') + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + i = lines.index('#include "_cffi_errors.h"\n') + lines[i:i+1] = self._rel_readlines('_cffi_errors.h') + prnt(''.join(lines)) + self.needs_version(VERSION_EMBEDDED) + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + flags = 0 + if self._num_externpy: + flags |= 1 # set to mean that we use extern "Python" + prnt(' %d, /* flags */' % flags) + prnt('};') + prnt() + # + # the init function + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') + prnt('#endif') + prnt() + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + if self._num_externpy: + prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') + prnt(' _cffi_call_python_org = ' + '(void(*)(struct _cffi_externpy_s *, char *))p[1];') + prnt(' }') + prnt(' p[0] = (const void *)0x%x;' % self._version) + prnt(' p[1] = &_cffi_type_context;') + prnt('#if PY_MAJOR_VERSION >= 3') + prnt(' return NULL;') + prnt('#endif') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#endif') + prnt() + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility pop') + prnt('#endif') + self._version = None + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = 0x%x," % (self._version,)) + self._version = None + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type(): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + cname = tp.get_c_name('') + converter = '(%s)_cffi_to_c_%s' % (cname, + tp.name.replace(' ', '_')) + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif (isinstance(tp, model.StructOrUnionOrEnum) or + isinstance(tp, model.BasePrimitiveType)): + # a struct (not a struct pointer) as a function argument; + # or, a complex (the same code works) + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + '(%s)alloca((size_t)datasize) : NULL;' % ( + tovar, tp.get_c_name(''))) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double' and not tp.is_complex_type(): + cname = tp.name.replace(' ', '_') + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + return '_cffi_from_c_%s(%s)' % (cname, var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _typedef_type(self, tp, name): + return self._global_type(tp, "(*(%s *)0)" % (name,)) + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(self._typedef_type(tp, name)) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + tp = self._typedef_type(tp, name) + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + prnt(' PyObject *pyresult;') + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( + name, len(rng), len(rng), + ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. We also do that for + # complex args and return type. + def need_indirection(type): + return (isinstance(type, model.StructOrUnion) or + (isinstance(type, model.PrimitiveType) and + type.is_complex_type())) + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if need_indirection(type): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if need_indirection(tp_result): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + if self.target_is_python: + # also requires nested anon struct/unions in ABI mode, recursively + for fldtype in tp.anonymous_struct_fields(): + self._struct_collecttype(fldtype) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + if fname != '': + prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or any(tp.anonymous_struct_fields()): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + if tp.packed > 1: + raise NotImplementedError( + "%r is declared with 'pack=%r'; only 0 or 1 are " + "supported in API mode (try to use \"...;\", which " + "does not require a 'pack' declaration)" % + (tp, tp.packed)) + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + expand_anonymous_struct_union = not self.target_is_python + enumfields = list(tp.enumfields(expand_anonymous_struct_union)) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + self._check_not_opaque(fldtype, + "field '%s.%s'" % (tp.name, fldname)) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '((char *)&((%s)0)->%s) - (char *)0' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _check_not_opaque(self, tp, location): + while isinstance(tp, model.ArrayType): + tp = tp.item + if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: + raise TypeError( + "%s is of an opaque type (not declared in cdef())" % location) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) | 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # extern "Python" + + def _generate_cpy_extern_python_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + self._do_collect_type(tp) + _generate_cpy_dllexport_python_collecttype = \ + _generate_cpy_extern_python_plus_c_collecttype = \ + _generate_cpy_extern_python_collecttype + + def _extern_python_decl(self, tp, name, tag_and_space): + prnt = self._prnt + if isinstance(tp.result, model.VoidType): + size_of_result = '0' + else: + context = 'result of %s' % name + size_of_result = '(int)sizeof(%s)' % ( + tp.result.get_c_name('', context),) + prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) + prnt(' { "%s.%s", %s, 0, 0 };' % ( + self.module_name, name, size_of_result)) + prnt() + # + arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' a%d' % i, context) + arguments.append(arg) + # + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments + # + def may_need_128_bits(tp): + return (isinstance(tp, model.PrimitiveType) and + tp.name == 'long double') + # + size_of_a = max(len(tp.args)*8, 8) + if may_need_128_bits(tp.result): + size_of_a = max(size_of_a, 16) + if isinstance(tp.result, model.StructOrUnion): + size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( + tp.result.get_c_name(''), size_of_a, + tp.result.get_c_name(''), size_of_a) + prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) + prnt('{') + prnt(' char a[%s];' % size_of_a) + prnt(' char *p = a;') + for i, type in enumerate(tp.args): + arg = 'a%d' % i + if (isinstance(type, model.StructOrUnion) or + may_need_128_bits(type)): + arg = '&' + arg + type = model.PointerType(type) + prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) + prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) + if not isinstance(tp.result, model.VoidType): + prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) + prnt('}') + prnt() + self._num_externpy += 1 + + def _generate_cpy_extern_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'static ') + + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') + + def _generate_cpy_extern_python_plus_c_decl(self, tp, name): + self._extern_python_decl(tp, name, '') + + def _generate_cpy_extern_python_ctx(self, tp, name): + if self.target_is_python: + raise VerificationError( + "cannot use 'extern \"Python\"' in the ABI mode") + if tp.ellipsis: + raise NotImplementedError("a vararg function is extern \"Python\"") + type_index = self._typesdict[tp] + type_op = CffiOp(OP_EXTERN_PYTHON, type_index) + self._lsts["global"].append( + GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + + _generate_cpy_dllexport_python_ctx = \ + _generate_cpy_extern_python_plus_c_ctx = \ + _generate_cpy_extern_python_ctx + + def _print_string_literal_in_array(self, s): + prnt = self._prnt + prnt('// # NB. this is not a string because of a size limit in MSVC') + for line in s.splitlines(True): + prnt(('// ' + line).rstrip()) + printed_line = '' + for c in line: + if len(printed_line) >= 76: + prnt(printed_line) + printed_line = '' + printed_line += '%d,' % (ord(c),) + prnt(printed_line) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) | 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): + if verbose: + print("generating %s" % (target_file,)) + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + if verbose: + print("(already up-to-date)") + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, + verbose) + +def make_py_source(ffi, module_name, target_py_file, verbose=False): + return _make_c_or_py_source(ffi, module_name, None, target_py_file, + verbose) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, + compiler_verbose=1, target=None, debug=None, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + # + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + updated = make_c_source(ffi, module_name, preamble, c_file, + verbose=compiler_verbose) + if call_c_compiler: + patchlist = [] + cwd = os.getcwd() + try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) + if compiler_verbose: + if tmpdir == '.': + msg = 'the current directory is' + else: + msg = 'setting the current directory to' + print('%s %r' % (msg, os.path.abspath(tmpdir))) + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext, + compiler_verbose, debug) + finally: + os.chdir(cwd) + _unpatch_meths(patchlist) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file, + verbose=compiler_verbose) + if call_c_compiler: + return c_file + else: + return None, updated + diff --git a/server/www/packages/packages-linux/x64/cffi/setuptools_ext.py b/server/www/packages/packages-linux/x64/cffi/setuptools_ext.py new file mode 100644 index 0000000..df5a518 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/setuptools_ext.py @@ -0,0 +1,217 @@ +import os +import sys + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +def error(msg): + from distutils.errors import DistutilsSetupError + raise DistutilsSetupError(msg) + + +def execfile(filename, glob): + # We use execfile() (here rewritten for Python 3) instead of + # __import__() to load the build script. The problem with + # a normal import is that in some packages, the intermediate + # __init__.py files may already try to import the file that + # we are generating. + with open(filename) as f: + src = f.read() + src += '\n' # Python 2.6 compatibility + code = compile(src, filename, 'exec') + exec(code, glob, glob) + + +def add_cffi_module(dist, mod_spec): + from cffi.api import FFI + + if not isinstance(mod_spec, basestring): + error("argument to 'cffi_modules=...' must be a str or a list of str," + " not %r" % (type(mod_spec).__name__,)) + mod_spec = str(mod_spec) + try: + build_file_name, ffi_var_name = mod_spec.split(':') + except ValueError: + error("%r must be of the form 'path/build.py:ffi_variable'" % + (mod_spec,)) + if not os.path.exists(build_file_name): + ext = '' + rewritten = build_file_name.replace('.', '/') + '.py' + if os.path.exists(rewritten): + ext = ' (rewrite cffi_modules to [%r])' % ( + rewritten + ':' + ffi_var_name,) + error("%r does not name an existing file%s" % (build_file_name, ext)) + + mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} + execfile(build_file_name, mod_vars) + + try: + ffi = mod_vars[ffi_var_name] + except KeyError: + error("%r: object %r not found in module" % (mod_spec, + ffi_var_name)) + if not isinstance(ffi, FFI): + ffi = ffi() # maybe it's a function instead of directly an ffi + if not isinstance(ffi, FFI): + error("%r is not an FFI instance (got %r)" % (mod_spec, + type(ffi).__name__)) + if not hasattr(ffi, '_assigned_source'): + error("%r: the set_source() method was not called" % (mod_spec,)) + module_name, source, source_extension, kwds = ffi._assigned_source + if ffi._windows_unicode: + kwds = kwds.copy() + ffi._apply_windows_unicode(kwds) + + if source is None: + _add_py_module(dist, ffi, module_name) + else: + _add_c_module(dist, ffi, module_name, source, source_extension, kwds) + +def _set_py_limited_api(Extension, kwds): + """ + Add py_limited_api to kwds if setuptools >= 26 is in use. + Do not alter the setting if it already exists. + Setuptools takes care of ignoring the flag on Python 2 and PyPy. + + CPython itself should ignore the flag in a debugging version + (by not listing .abi3.so in the extensions it supports), but + it doesn't so far, creating troubles. That's why we check + for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent + of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) + + On Windows, with CPython <= 3.4, it's better not to use py_limited_api + because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. + For now we'll skip py_limited_api on all Windows versions to avoid an + inconsistent mess. + """ + if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount') + and sys.platform != 'win32'): + import setuptools + try: + setuptools_major_version = int(setuptools.__version__.partition('.')[0]) + if setuptools_major_version >= 26: + kwds['py_limited_api'] = True + except ValueError: # certain development versions of setuptools + # If we don't know the version number of setuptools, we + # try to set 'py_limited_api' anyway. At worst, we get a + # warning. + kwds['py_limited_api'] = True + return kwds + +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): + from distutils.core import Extension + # We are a setuptools extension. Need this build_ext for py_limited_api. + from setuptools.command.build_ext import build_ext + from distutils.dir_util import mkpath + from distutils import log + from cffi import recompiler + + allsources = ['$PLACEHOLDER'] + allsources.extend(kwds.pop('sources', [])) + kwds = _set_py_limited_api(Extension, kwds) + ext = Extension(name=module_name, sources=allsources, **kwds) + + def make_mod(tmpdir, pre_run=None): + c_file = os.path.join(tmpdir, module_name + source_extension) + log.info("generating cffi module %r" % c_file) + mkpath(tmpdir) + # a setuptools-only, API-only hook: called with the "ext" and "ffi" + # arguments just before we turn the ffi into C code. To use it, + # subclass the 'distutils.command.build_ext.build_ext' class and + # add a method 'def pre_run(self, ext, ffi)'. + if pre_run is not None: + pre_run(ext, ffi) + updated = recompiler.make_c_source(ffi, module_name, source, c_file) + if not updated: + log.info("already up-to-date") + return c_file + + if dist.ext_modules is None: + dist.ext_modules = [] + dist.ext_modules.append(ext) + + base_class = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class): + def run(self): + if ext.sources[0] == '$PLACEHOLDER': + pre_run = getattr(self, 'pre_run', None) + ext.sources[0] = make_mod(self.build_temp, pre_run) + base_class.run(self) + dist.cmdclass['build_ext'] = build_ext_make_mod + # NB. multiple runs here will create multiple 'build_ext_make_mod' + # classes. Even in this case the 'build_ext' command should be + # run once; but just in case, the logic above does nothing if + # called again. + + +def _add_py_module(dist, ffi, module_name): + from distutils.dir_util import mkpath + from setuptools.command.build_py import build_py + from setuptools.command.build_ext import build_ext + from distutils import log + from cffi import recompiler + + def generate_mod(py_file): + log.info("generating cffi module %r" % py_file) + mkpath(os.path.dirname(py_file)) + updated = recompiler.make_py_source(ffi, module_name, py_file) + if not updated: + log.info("already up-to-date") + + base_class = dist.cmdclass.get('build_py', build_py) + class build_py_make_mod(base_class): + def run(self): + base_class.run(self) + module_path = module_name.split('.') + module_path[-1] += '.py' + generate_mod(os.path.join(self.build_lib, *module_path)) + def get_source_files(self): + # This is called from 'setup.py sdist' only. Exclude + # the generate .py module in this case. + saved_py_modules = self.py_modules + try: + if saved_py_modules: + self.py_modules = [m for m in saved_py_modules + if m != module_name] + return base_class.get_source_files(self) + finally: + self.py_modules = saved_py_modules + dist.cmdclass['build_py'] = build_py_make_mod + + # distutils and setuptools have no notion I could find of a + # generated python module. If we don't add module_name to + # dist.py_modules, then things mostly work but there are some + # combination of options (--root and --record) that will miss + # the module. So we add it here, which gives a few apparently + # harmless warnings about not finding the file outside the + # build directory. + # Then we need to hack more in get_source_files(); see above. + if dist.py_modules is None: + dist.py_modules = [] + dist.py_modules.append(module_name) + + # the following is only for "build_ext -i" + base_class_2 = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class_2): + def run(self): + base_class_2.run(self) + if self.inplace: + # from get_ext_fullpath() in distutils/command/build_ext.py + module_path = module_name.split('.') + package = '.'.join(module_path[:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = build_py.get_package_dir(package) + file_name = module_path[-1] + '.py' + generate_mod(os.path.join(package_dir, file_name)) + dist.cmdclass['build_ext'] = build_ext_make_mod + +def cffi_modules(dist, attr, value): + assert attr == 'cffi_modules' + if isinstance(value, basestring): + value = [value] + + for cffi_module in value: + add_cffi_module(dist, cffi_module) diff --git a/server/www/packages/packages-linux/x64/cffi/vengine_cpy.py b/server/www/packages/packages-linux/x64/cffi/vengine_cpy.py new file mode 100644 index 0000000..cb344ce --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/vengine_cpy.py @@ -0,0 +1,1076 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, imp +from . import model +from .error import VerificationError + + +class VCPythonEngine(object): + _class_key = 'x' + _gen_python_module = True + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self._struct_pending_verification = {} + self._types_of_builtin_functions = {} + + def patch_extension_kwds(self, kwds): + pass + + def find_module(self, module_name, path, so_suffixes): + try: + f, filename, descr = imp.find_module(module_name, path) + except ImportError: + return None + if f is not None: + f.close() + # Note that after a setuptools installation, there are both .py + # and .so files with the same basename. The code here relies on + # imp.find_module() locating the .so in priority. + if descr[0] not in so_suffixes: + return None + return filename + + def collect_types(self): + self._typesdict = {} + self._generate("collecttype") + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _do_collect_type(self, tp): + if ((not isinstance(tp, model.PrimitiveType) + or tp.name == 'long double') + and tp not in self._typesdict): + num = len(self._typesdict) + self._typesdict[tp] = num + + def write_source_to_f(self): + self.collect_types() + # + # The new module will have a _cffi_setup() function that receives + # objects from the ffi world, and that calls some setup code in + # the module. This setup code is split in several independent + # functions, e.g. one per constant. The functions are "chained" + # by ending in a tail call to each other. + # + # This is further split in two chained lists, depending on if we + # can do it at import-time or if we must wait for _cffi_setup() to + # provide us with the objects. This is needed because we + # need the values of the enum constants in order to build the + # that we may have to pass to _cffi_setup(). + # + # The following two 'chained_list_constants' items contains + # the head of these two chained lists, as a string that gives the + # call to do, if any. + self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] + # + prnt = self._prnt + # first paste some standard set of lines that are mostly '#define' + prnt(cffimod_header) + prnt() + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate("decl") + # + # implement the function _cffi_setup_custom() as calling the + # head of the chained list. + self._generate_setup_custom() + prnt() + # + # produce the method table, including the entries for the + # generated Python->C function wrappers, which are done + # by generate_cpy_function_method(). + prnt('static PyMethodDef _cffi_methods[] = {') + self._generate("method") + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') + prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') + prnt('};') + prnt() + # + # standard init. + modname = self.verifier.get_module_name() + constants = self._chained_list_constants[False] + prnt('#if PY_MAJOR_VERSION >= 3') + prnt() + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = PyModule_Create(&_cffi_module_def);') + prnt(' if (lib == NULL)') + prnt(' return NULL;') + prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) + prnt(' Py_DECREF(lib);') + prnt(' return NULL;') + prnt(' }') + prnt(' return lib;') + prnt('}') + prnt() + prnt('#else') + prnt() + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) + prnt(' if (lib == NULL)') + prnt(' return;') + prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) + prnt(' return;') + prnt(' return;') + prnt('}') + prnt() + prnt('#endif') + + def load_library(self, flags=None): + # XXX review all usages of 'self' here! + # import it as a new extension module + imp.acquire_lock() + try: + if hasattr(sys, "getdlopenflags"): + previous_flags = sys.getdlopenflags() + try: + if hasattr(sys, "setdlopenflags") and flags is not None: + sys.setdlopenflags(flags) + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise VerificationError(error) + finally: + if hasattr(sys, "setdlopenflags"): + sys.setdlopenflags(previous_flags) + finally: + imp.release_lock() + # + # call loading_cpy_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + # + # the C code will need the objects. Collect them in + # order in a list. + revmapping = dict([(value, key) + for (key, value) in self._typesdict.items()]) + lst = [revmapping[i] for i in range(len(revmapping))] + lst = list(map(self.ffi._get_cached_btype, lst)) + # + # build the FFILibrary class and instance and call _cffi_setup(). + # this will set up some fields like '_cffi_types', and only then + # it will invoke the chained list of functions that will really + # build (notably) the constant objects, as if they are + # pointers, and store them as attributes on the 'library' object. + class FFILibrary(object): + _cffi_python_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + list(self.__dict__) + library = FFILibrary() + if module._cffi_setup(lst, VerificationError, library): + import warnings + warnings.warn("reimporting %r might overwrite older definitions" + % (self.verifier.get_module_name())) + # + # finally, call the loaded_cpy_xxx() functions. This will perform + # the final adjustments, like copying the Python->C wrapper + # functions from the module to the 'library' object, and setting + # up the FFILibrary class with properties for the global C variables. + self._load(module, 'loaded', library=library) + module._cffi_original_ffi = self.ffi + module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + 'alloca((size_t)datasize) : NULL;' % (tovar,)) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs: generates no code so far + + _generate_cpy_typedef_collecttype = _generate_nothing + _generate_cpy_typedef_decl = _generate_nothing + _generate_cpy_typedef_method = _generate_nothing + _loading_cpy_typedef = _loaded_noop + _loaded_cpy_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + self._do_collect_type(tp) + else: + # don't call _do_collect_type(tp) in this common case, + # otherwise test_autofilled_struct_as_argument fails + for type in tp.args: + self._do_collect_type(type) + self._do_collect_type(tp.result) + + def _generate_cpy_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + prnt(' %s;' % type.get_c_name(' x%d' % i, context)) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + prnt(' %s;' % tp.result.get_c_name(' result', context)) + prnt(' PyObject *pyresult;') + else: + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( + 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + prnt(' { %s%s(%s); }' % ( + result_code, name, + ', '.join(['x%d' % i for i in range(len(tp.args))]))) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + prnt() + + def _generate_cpy_function_method(self, tp, name): + if tp.ellipsis: + return + numargs = len(tp.args) + if numargs == 0: + meth = 'METH_NOARGS' + elif numargs == 1: + meth = 'METH_O' + else: + meth = 'METH_VARARGS' + self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) + + _loading_cpy_function = _loaded_noop + + def _loaded_cpy_function(self, tp, name, module, library): + if tp.ellipsis: + return + func = getattr(module, name) + setattr(library, name, func) + self._types_of_builtin_functions[func] = tp + + # ---------- + # named structs + + _generate_cpy_struct_collecttype = _generate_nothing + def _generate_cpy_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + def _generate_cpy_struct_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'struct', name) + def _loading_cpy_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + def _loaded_cpy_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + _generate_cpy_union_collecttype = _generate_nothing + def _generate_cpy_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + def _generate_cpy_union_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'union', name) + def _loading_cpy_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + def _loaded_cpy_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('static PyObject *') + prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static Py_ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' (void)self; /* unused */') + prnt(' (void)noarg; /* unused */') + prnt(' return _cffi_get_struct_layout(nums);') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _generate_struct_or_union_method(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, + layoutfuncname)) + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + function = getattr(module, layoutfuncname) + layout = function() + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + _generate_cpy_anonymous_collecttype = _generate_nothing + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _generate_cpy_anonymous_method(self, tp, name): + if not isinstance(tp, model.EnumType): + self._generate_struct_or_union_method(tp, '', name) + + def _loading_cpy_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_cpy_enum(tp, name, module) + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_cpy_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_cpy_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + vartp=None, delayed=True, size_too=False, + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + prnt(' PyObject *o;') + prnt(' int res;') + if not is_int: + prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) + else: + assert category == 'const' + # + if check_value is not None: + self._check_int_constant_value(name, check_value) + # + if not is_int: + if category == 'var': + realexpr = '&' + name + else: + realexpr = name + prnt(' i = (%s);' % (realexpr,)) + prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', + 'variable type'),)) + assert delayed + else: + prnt(' o = _cffi_from_c_int_const(%s);' % name) + prnt(' if (o == NULL)') + prnt(' return -1;') + if size_too: + prnt(' {') + prnt(' PyObject *o1 = o;') + prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' + % (name,)) + prnt(' Py_DECREF(o1);') + prnt(' if (o == NULL)') + prnt(' return -1;') + prnt(' }') + prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) + prnt(' Py_DECREF(o);') + prnt(' if (res < 0)') + prnt(' return -1;') + prnt(' return %s;' % self._chained_list_constants[delayed]) + self._chained_list_constants[delayed] = funcname + '(lib)' + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + if not is_int: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + _generate_cpy_constant_method = _generate_nothing + _loading_cpy_constant = _loaded_noop + _loaded_cpy_constant = _loaded_noop + + # ---------- + # enums + + def _check_int_constant_value(self, name, value, err_prefix=''): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + name) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "%s%s has the real value %s, not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + err_prefix, name, value)) + prnt(' return -1;') + prnt(' }') + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator, delayed=False) + return + # + funcname = self._enum_funcname(prefix, name) + prnt = self._prnt + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue, + "enum %s: " % name) + prnt(' return %s;' % self._chained_list_constants[True]) + self._chained_list_constants[True] = funcname + '(lib)' + prnt('}') + prnt() + + _generate_cpy_enum_collecttype = _generate_nothing + _generate_cpy_enum_method = _generate_nothing + + def _loading_cpy_enum(self, tp, name, module): + if tp.partial: + enumvalues = [getattr(module, enumerator) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + + def _loaded_cpy_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + _generate_cpy_macro_collecttype = _generate_nothing + _generate_cpy_macro_method = _generate_nothing + _loading_cpy_macro = _loaded_noop + _loaded_cpy_macro = _loaded_noop + + # ---------- + # global variables + + def _generate_cpy_variable_collecttype(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + else: + tp_ptr = model.PointerType(tp) + self._do_collect_type(tp_ptr) + + def _generate_cpy_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + self._generate_cpy_const(False, name, tp, vartp=tp_ptr, + size_too = (tp.length == '...')) + else: + tp_ptr = model.PointerType(tp) + self._generate_cpy_const(False, name, tp_ptr, category='var') + + _generate_cpy_variable_method = _generate_nothing + _loading_cpy_variable = _loaded_noop + + def _loaded_cpy_variable(self, tp, name, module, library): + value = getattr(library, name) + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length == '...': + assert isinstance(value, tuple) + (value, size) = value + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + ptr = value + delattr(library, name) + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + + # ---------- + + def _generate_setup_custom(self): + prnt = self._prnt + prnt('static int _cffi_setup_custom(PyObject *lib)') + prnt('{') + prnt(' return %s;' % self._chained_list_constants[True]) + prnt('}') + +cffimod_header = r''' +#include +#include + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#if PY_MAJOR_VERSION < 3 +# undef PyCapsule_CheckExact +# undef PyCapsule_GetPointer +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) +# define PyCapsule_GetPointer(capsule, name) \ + (PyCObject_AsVoidPtr(capsule)) +#endif + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int_const(x) \ + (((x) > 0) ? \ + ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ + ((long long)(x) >= (long long)LONG_MIN) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromLongLong((long long)(x))) + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; +static PyObject *_cffi_types, *_cffi_VerificationError; + +static int _cffi_setup_custom(PyObject *lib); /* forward */ + +static PyObject *_cffi_setup(PyObject *self, PyObject *args) +{ + PyObject *library; + int was_alive = (_cffi_types != NULL); + (void)self; /* unused */ + if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, + &library)) + return NULL; + Py_INCREF(_cffi_types); + Py_INCREF(_cffi_VerificationError); + if (_cffi_setup_custom(library) < 0) + return NULL; + return PyBool_FromLong(was_alive); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +static int _cffi_init(void) +{ + PyObject *module, *c_api_object = NULL; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + c_api_object = PyObject_GetAttrString(module, "_C_API"); + if (c_api_object == NULL) + goto failure; + if (!PyCapsule_CheckExact(c_api_object)) { + PyErr_SetNone(PyExc_ImportError); + goto failure; + } + memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), + _CFFI_NUM_EXPORTS * sizeof(void *)); + + Py_DECREF(module); + Py_DECREF(c_api_object); + return 0; + + failure: + Py_XDECREF(module); + Py_XDECREF(c_api_object); + return -1; +} + +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) + +/**********/ +''' diff --git a/server/www/packages/packages-linux/x64/cffi/vengine_gen.py b/server/www/packages/packages-linux/x64/cffi/vengine_gen.py new file mode 100644 index 0000000..a64ff64 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/vengine_gen.py @@ -0,0 +1,675 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os +import types + +from . import model +from .error import VerificationError + + +class VGenericEngine(object): + _class_key = 'g' + _gen_python_module = False + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self.export_symbols = [] + self._struct_pending_verification = {} + + def patch_extension_kwds(self, kwds): + # add 'export_symbols' to the dictionary. Note that we add the + # list before filling it. When we fill it, it will thus also show + # up in kwds['export_symbols']. + kwds.setdefault('export_symbols', self.export_symbols) + + def find_module(self, module_name, path, so_suffixes): + for so_suffix in so_suffixes: + basename = module_name + so_suffix + if path is None: + path = sys.path + for dirname in path: + filename = os.path.join(dirname, basename) + if os.path.isfile(filename): + return filename + + def collect_types(self): + pass # not needed in the generic engine + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self): + prnt = self._prnt + # first paste some standard set of lines that are mostly '#include' + prnt(cffimod_header) + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + # + # call generate_gen_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate('decl') + # + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + if sys.platform == 'win32': + if sys.version_info >= (3,): + prefix = 'PyInit_' + else: + prefix = 'init' + modname = self.verifier.get_module_name() + prnt("void %s%s(void) { }\n" % (prefix, modname)) + + def load_library(self, flags=0): + # import it with the CFFI backend + backend = self.ffi._backend + # needs to make a path that contains '/', on Posix + filename = os.path.join(os.curdir, self.verifier.modulefilename) + module = backend.load_library(filename, flags) + # + # call loading_gen_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + + # build the FFILibrary class and instance, this is a module subclass + # because modules are expected to have usually-constant-attributes and + # in PyPy this means the JIT is able to treat attributes as constant, + # which we want. + class FFILibrary(types.ModuleType): + _cffi_generic_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + library = FFILibrary("") + # + # finally, call the loaded_gen_xxx() functions. This will set + # up the 'library' object. + self._load(module, 'loaded', library=library) + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_gen_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_gen_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + # typedefs: generates no code so far + + _generate_gen_typedef_decl = _generate_nothing + _loading_gen_typedef = _loaded_noop + _loaded_gen_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_gen_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no _cffi_f_%s wrapper) + self._generate_gen_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + argnames = [] + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + argnames.append('%sx%d' % (indirection, i)) + context = 'argument of %s' % name + arglist = [type.get_c_name(' %s' % arg, context) + for type, arg in zip(tp.args, argnames)] + tpresult = tp.result + if isinstance(tpresult, model.StructOrUnion): + arglist.insert(0, tpresult.get_c_name(' *r', context)) + tpresult = model.void_type + arglist = ', '.join(arglist) or 'void' + wrappername = '_cffi_f_%s' % name + self.export_symbols.append(wrappername) + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) + context = 'result of %s' % name + prnt(tpresult.get_c_name(funcdecl, context)) + prnt('{') + # + if isinstance(tp.result, model.StructOrUnion): + result_code = '*r = ' + elif not isinstance(tp.result, model.VoidType): + result_code = 'return ' + else: + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) + prnt('}') + prnt() + + _loading_gen_function = _loaded_noop + + def _loaded_gen_function(self, tp, name, module, library): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + newfunction = self._load_constant(False, tp, name, module) + else: + indirections = [] + base_tp = tp + if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) + or isinstance(tp.result, model.StructOrUnion)): + indirect_args = [] + for i, typ in enumerate(tp.args): + if isinstance(typ, model.StructOrUnion): + typ = model.PointerType(typ) + indirections.append((i, typ)) + indirect_args.append(typ) + indirect_result = tp.result + if isinstance(indirect_result, model.StructOrUnion): + if indirect_result.fldtypes is None: + raise TypeError("'%s' is used as result type, " + "but is opaque" % ( + indirect_result._get_c_name(),)) + indirect_result = model.PointerType(indirect_result) + indirect_args.insert(0, indirect_result) + indirections.insert(0, ("result", indirect_result)) + indirect_result = model.void_type + tp = model.FunctionPtrType(tuple(indirect_args), + indirect_result, tp.ellipsis) + BFunc = self.ffi._get_cached_btype(tp) + wrappername = '_cffi_f_%s' % name + newfunction = module.load_function(BFunc, wrappername) + for i, typ in indirections: + newfunction = self._make_struct_wrapper(newfunction, i, typ, + base_tp) + setattr(library, name, newfunction) + type(library)._cffi_dir.append(name) + + def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): + backend = self.ffi._backend + BType = self.ffi._get_cached_btype(tp) + if i == "result": + ffi = self.ffi + def newfunc(*args): + res = ffi.new(BType) + oldfunc(res, *args) + return res[0] + else: + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) + newfunc._cffi_base_type = base_tp + return newfunc + + # ---------- + # named structs + + def _generate_gen_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + + def _loading_gen_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + + def _loaded_gen_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_gen_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + + def _loading_gen_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + + def _loaded_gen_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + self.export_symbols.append(layoutfuncname) + prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static intptr_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return nums[i];') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] + function = module.load_function(BFunc, layoutfuncname) + layout = [] + num = 0 + while True: + x = function(num) + if x < 0: break + layout.append(x) + num += 1 + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_gen_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_gen_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _loading_gen_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_gen_enum(tp, name, module, '') + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_gen_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_gen_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_gen_const(self, is_int, name, tp=None, category='const', + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + self.export_symbols.append(funcname) + if check_value is not None: + assert is_int + assert category == 'const' + prnt('int %s(char *out_error)' % funcname) + prnt('{') + self._check_int_constant_value(name, check_value) + prnt(' return 0;') + prnt('}') + elif is_int: + assert category == 'const' + prnt('int %s(long long *out_value)' % funcname) + prnt('{') + prnt(' *out_value = (long long)(%s);' % (name,)) + prnt(' return (%s) <= 0;' % (name,)) + prnt('}') + else: + assert tp is not None + assert check_value is None + if category == 'var': + ampersand = '&' + else: + ampersand = '' + extra = '' + if category == 'const' and isinstance(tp, model.StructOrUnion): + extra = 'const *' + ampersand = '&' + prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) + prnt('{') + prnt(' return (%s%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_gen_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_gen_const(is_int, name, tp) + + _loading_gen_constant = _loaded_noop + + def _load_constant(self, is_int, tp, name, module, check_value=None): + funcname = '_cffi_const_%s' % name + if check_value is not None: + assert is_int + self._load_known_int_constant(module, funcname) + value = check_value + elif is_int: + BType = self.ffi._typeof_locked("long long*")[0] + BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType) + negative = function(p) + value = int(p[0]) + if value < 0 and not negative: + BLongLong = self.ffi._typeof_locked("long long")[0] + value += (1 << (8*self.ffi.sizeof(BLongLong))) + else: + assert check_value is None + fntypeextra = '(*)(void)' + if isinstance(tp, model.StructOrUnion): + fntypeextra = '*' + fntypeextra + BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] + function = module.load_function(BFunc, funcname) + value = function() + if isinstance(tp, model.StructOrUnion): + value = value[0] + return value + + def _loaded_gen_constant(self, tp, name, module, library): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + value = self._load_constant(is_int, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # enums + + def _check_int_constant_value(self, name, value): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % + name) + prnt(' sprintf(out_error, "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % (name[:100], value)) + prnt(' return -1;') + prnt(' }') + + def _load_known_int_constant(self, module, funcname): + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise VerificationError(error) + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_gen_const(True, enumerator) + return + # + funcname = self._enum_funcname(prefix, name) + self.export_symbols.append(funcname) + prnt = self._prnt + prnt('int %s(char *out_error)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue) + prnt(' return 0;') + prnt('}') + prnt() + + def _loading_gen_enum(self, tp, name, module, prefix='enum'): + if tp.partial: + enumvalues = [self._load_constant(True, tp, enumerator, module) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + else: + funcname = self._enum_funcname(prefix, name) + self._load_known_int_constant(module, funcname) + + def _loaded_gen_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + type(library)._cffi_dir.append(enumerator) + + # ---------- + # macros: for now only for integers + + def _generate_gen_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_gen_const(True, name, check_value=check_value) + + _loading_gen_macro = _loaded_noop + + def _loaded_gen_macro(self, tp, name, module, library): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + value = self._load_constant(True, tp, name, module, + check_value=check_value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # global variables + + def _generate_gen_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + if tp.length == '...': + prnt = self._prnt + funcname = '_cffi_sizeof_%s' % (name,) + self.export_symbols.append(funcname) + prnt("size_t %s(void)" % funcname) + prnt("{") + prnt(" return sizeof(%s);" % (name,)) + prnt("}") + tp_ptr = model.PointerType(tp.item) + self._generate_gen_const(False, name, tp_ptr) + else: + tp_ptr = model.PointerType(tp) + self._generate_gen_const(False, name, tp_ptr, category='var') + + _loading_gen_variable = _loaded_noop + + def _loaded_gen_variable(self, tp, name, module, library): + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length == '...': + funcname = '_cffi_sizeof_%s' % (name,) + BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] + function = module.load_function(BFunc, funcname) + size = function() + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + tp_ptr = model.PointerType(tp.item) + value = self._load_constant(False, tp_ptr, name, module) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + funcname = '_cffi_var_%s' % name + BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + ptr = function() + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + +cffimod_header = r''' +#include +#include +#include +#include +#include /* XXX for ssize_t on some platforms */ + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif +''' diff --git a/server/www/packages/packages-linux/x64/cffi/verifier.py b/server/www/packages/packages-linux/x64/cffi/verifier.py new file mode 100644 index 0000000..59b78c2 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cffi/verifier.py @@ -0,0 +1,306 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os, binascii, shutil, io +from . import __version_verifier_modules__ +from . import ffiplatform +from .error import VerificationError + +if sys.version_info >= (3, 3): + import importlib.machinery + def _extension_suffixes(): + return importlib.machinery.EXTENSION_SUFFIXES[:] +else: + import imp + def _extension_suffixes(): + return [suffix for suffix, _, type in imp.get_suffixes() + if type == imp.C_EXTENSION] + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + + +class Verifier(object): + + def __init__(self, ffi, preamble, tmpdir=None, modulename=None, + ext_package=None, tag='', force_generic_engine=False, + source_extension='.c', flags=None, relative_to=None, **kwds): + if ffi._parser._uses_new_feature: + raise VerificationError( + "feature not supported with ffi.verify(), but only " + "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) + self.ffi = ffi + self.preamble = preamble + if not modulename: + flattened_kwds = ffiplatform.flatten(kwds) + vengine_class = _locate_engine_class(ffi, force_generic_engine) + self._vengine = vengine_class(self) + self._vengine.patch_extension_kwds(kwds) + self.flags = flags + self.kwds = self.make_relative_to(kwds, relative_to) + # + if modulename: + if tag: + raise TypeError("can't specify both 'modulename' and 'tag'") + else: + key = '\x00'.join([sys.version[:3], __version_verifier_modules__, + preamble, flattened_kwds] + + ffi._cdefsources) + if sys.version_info >= (3,): + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, + k1, k2) + suffix = _get_so_suffixes()[0] + self.tmpdir = tmpdir or _caller_dir_pycache() + self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) + self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) + self.ext_package = ext_package + self._has_source = False + self._has_module = False + + def write_source(self, file=None): + """Write the C source code. It is produced in 'self.sourcefilename', + which can be tweaked beforehand.""" + with self.ffi._lock: + if self._has_source and file is None: + raise VerificationError( + "source code already written") + self._write_source(file) + + def compile_module(self): + """Write the C source code (if not done already) and compile it. + This produces a dynamic link library in 'self.modulefilename'.""" + with self.ffi._lock: + if self._has_module: + raise VerificationError("module already compiled") + if not self._has_source: + self._write_source() + self._compile_module() + + def load_library(self): + """Get a C module from this Verifier instance. + Returns an instance of a FFILibrary class that behaves like the + objects returned by ffi.dlopen(), but that delegates all + operations to the C module. If necessary, the C code is written + and compiled first. + """ + with self.ffi._lock: + if not self._has_module: + self._locate_module() + if not self._has_module: + if not self._has_source: + self._write_source() + self._compile_module() + return self._load_library() + + def get_module_name(self): + basename = os.path.basename(self.modulefilename) + # kill both the .so extension and the other .'s, as introduced + # by Python 3: 'basename.cpython-33m.so' + basename = basename.split('.', 1)[0] + # and the _d added in Python 2 debug builds --- but try to be + # conservative and not kill a legitimate _d + if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): + basename = basename[:-2] + return basename + + def get_extension(self): + ffiplatform._hack_at_distutils() # backward compatibility hack + if not self._has_source: + with self.ffi._lock: + if not self._has_source: + self._write_source() + sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) + modname = self.get_module_name() + return ffiplatform.get_extension(sourcename, modname, **self.kwds) + + def generates_python_module(self): + return self._vengine._gen_python_module + + def make_relative_to(self, kwds, relative_to): + if relative_to and os.path.dirname(relative_to): + dirname = os.path.dirname(relative_to) + kwds = kwds.copy() + for key in ffiplatform.LIST_OF_FILE_NAMES: + if key in kwds: + lst = kwds[key] + if not isinstance(lst, (list, tuple)): + raise TypeError("keyword '%s' should be a list or tuple" + % (key,)) + lst = [os.path.join(dirname, fn) for fn in lst] + kwds[key] = lst + return kwds + + # ---------- + + def _locate_module(self): + if not os.path.isfile(self.modulefilename): + if self.ext_package: + try: + pkg = __import__(self.ext_package, None, None, ['__doc__']) + except ImportError: + return # cannot import the package itself, give up + # (e.g. it might be called differently before installation) + path = pkg.__path__ + else: + path = None + filename = self._vengine.find_module(self.get_module_name(), path, + _get_so_suffixes()) + if filename is None: + return + self.modulefilename = filename + self._vengine.collect_types() + self._has_module = True + + def _write_source_to(self, file): + self._vengine._f = file + try: + self._vengine.write_source_to_f() + finally: + del self._vengine._f + + def _write_source(self, file=None): + if file is not None: + self._write_source_to(file) + else: + # Write our source file to an in memory file. + f = NativeIO() + self._write_source_to(f) + source_data = f.getvalue() + + # Determine if this matches the current file + if os.path.exists(self.sourcefilename): + with open(self.sourcefilename, "r") as fp: + needs_written = not (fp.read() == source_data) + else: + needs_written = True + + # Actually write the file out if it doesn't match + if needs_written: + _ensure_dir(self.sourcefilename) + with open(self.sourcefilename, "w") as fp: + fp.write(source_data) + + # Set this flag + self._has_source = True + + def _compile_module(self): + # compile this C source + tmpdir = os.path.dirname(self.sourcefilename) + outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) + try: + same = ffiplatform.samefile(outputfilename, self.modulefilename) + except OSError: + same = False + if not same: + _ensure_dir(self.modulefilename) + shutil.move(outputfilename, self.modulefilename) + self._has_module = True + + def _load_library(self): + assert self._has_module + if self.flags is not None: + return self._vengine.load_library(self.flags) + else: + return self._vengine.load_library() + +# ____________________________________________________________ + +_FORCE_GENERIC_ENGINE = False # for tests + +def _locate_engine_class(ffi, force_generic_engine): + if _FORCE_GENERIC_ENGINE: + force_generic_engine = True + if not force_generic_engine: + if '__pypy__' in sys.builtin_module_names: + force_generic_engine = True + else: + try: + import _cffi_backend + except ImportError: + _cffi_backend = '?' + if ffi._backend is not _cffi_backend: + force_generic_engine = True + if force_generic_engine: + from . import vengine_gen + return vengine_gen.VGenericEngine + else: + from . import vengine_cpy + return vengine_cpy.VCPythonEngine + +# ____________________________________________________________ + +_TMPDIR = None + +def _caller_dir_pycache(): + if _TMPDIR: + return _TMPDIR + result = os.environ.get('CFFI_TMPDIR') + if result: + return result + filename = sys._getframe(2).f_code.co_filename + return os.path.abspath(os.path.join(os.path.dirname(filename), + '__pycache__')) + +def set_tmpdir(dirname): + """Set the temporary directory to use instead of __pycache__.""" + global _TMPDIR + _TMPDIR = dirname + +def cleanup_tmpdir(tmpdir=None, keep_so=False): + """Clean up the temporary directory by removing all files in it + called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" + tmpdir = tmpdir or _caller_dir_pycache() + try: + filelist = os.listdir(tmpdir) + except OSError: + return + if keep_so: + suffix = '.c' # only remove .c files + else: + suffix = _get_so_suffixes()[0].lower() + for fn in filelist: + if fn.lower().startswith('_cffi_') and ( + fn.lower().endswith(suffix) or fn.lower().endswith('.c')): + try: + os.unlink(os.path.join(tmpdir, fn)) + except OSError: + pass + clean_dir = [os.path.join(tmpdir, 'build')] + for dir in clean_dir: + try: + for fn in os.listdir(dir): + fn = os.path.join(dir, fn) + if os.path.isdir(fn): + clean_dir.append(fn) + else: + os.unlink(fn) + except OSError: + pass + +def _get_so_suffixes(): + suffixes = _extension_suffixes() + if not suffixes: + # bah, no C_EXTENSION available. Occurs on pypy without cpyext + if sys.platform == 'win32': + suffixes = [".pyd"] + else: + suffixes = [".so"] + + return suffixes + +def _ensure_dir(filename): + dirname = os.path.dirname(filename) + if dirname and not os.path.isdir(dirname): + os.makedirs(dirname) diff --git a/server/www/packages/packages-linux/x64/cryptography/__about__.py b/server/www/packages/packages-linux/x64/cryptography/__about__.py index ac18bb4..218b238 100644 --- a/server/www/packages/packages-linux/x64/cryptography/__about__.py +++ b/server/www/packages/packages-linux/x64/cryptography/__about__.py @@ -14,10 +14,10 @@ __summary__ = ("cryptography is a package which provides cryptographic recipes" " and primitives to Python developers.") __uri__ = "https://github.com/pyca/cryptography" -__version__ = "2.4.2" +__version__ = "2.9.2" __author__ = "The cryptography developers" __email__ = "cryptography-dev@python.org" __license__ = "BSD or Apache License, Version 2.0" -__copyright__ = "Copyright 2013-2017 {0}".format(__author__) +__copyright__ = "Copyright 2013-2019 {}".format(__author__) diff --git a/server/www/packages/packages-linux/x64/cryptography/exceptions.py b/server/www/packages/packages-linux/x64/cryptography/exceptions.py index 648cf9d..1d52d7d 100644 --- a/server/www/packages/packages-linux/x64/cryptography/exceptions.py +++ b/server/www/packages/packages-linux/x64/cryptography/exceptions.py @@ -19,6 +19,7 @@ class _Reasons(Enum): UNSUPPORTED_X509 = 8 UNSUPPORTED_EXCHANGE_ALGORITHM = 9 UNSUPPORTED_DIFFIE_HELLMAN = 10 + UNSUPPORTED_MAC = 11 class UnsupportedAlgorithm(Exception): diff --git a/server/www/packages/packages-linux/x64/cryptography/fernet.py b/server/www/packages/packages-linux/x64/cryptography/fernet.py index ac2dd0b..b990def 100644 --- a/server/www/packages/packages-linux/x64/cryptography/fernet.py +++ b/server/www/packages/packages-linux/x64/cryptography/fernet.py @@ -12,6 +12,7 @@ import time import six +from cryptography import utils from cryptography.exceptions import InvalidSignature from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes, padding @@ -51,8 +52,7 @@ class Fernet(object): return self._encrypt_from_parts(data, current_time, iv) def _encrypt_from_parts(self, data, current_time, iv): - if not isinstance(data, bytes): - raise TypeError("data must be bytes.") + utils._check_bytes("data", data) padder = padding.PKCS7(algorithms.AES.block_size).padder() padded_data = padder.update(data) + padder.finalize() @@ -82,9 +82,7 @@ class Fernet(object): @staticmethod def _get_unverified_token_data(token): - if not isinstance(token, bytes): - raise TypeError("token must be bytes.") - + utils._check_bytes("token", token) try: data = base64.urlsafe_b64decode(token) except (TypeError, binascii.Error): diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/_der.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/_der.py new file mode 100644 index 0000000..51518d6 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/_der.py @@ -0,0 +1,156 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography.utils import int_from_bytes, int_to_bytes + + +# This module contains a lightweight DER encoder and decoder. See X.690 for the +# specification. This module intentionally does not implement the more complex +# BER encoding, only DER. +# +# Note this implementation treats an element's constructed bit as part of the +# tag. This is fine for DER, where the bit is always computable from the type. + + +CONSTRUCTED = 0x20 +CONTEXT_SPECIFIC = 0x80 + +INTEGER = 0x02 +BIT_STRING = 0x03 +OCTET_STRING = 0x04 +NULL = 0x05 +OBJECT_IDENTIFIER = 0x06 +SEQUENCE = 0x10 | CONSTRUCTED +SET = 0x11 | CONSTRUCTED +PRINTABLE_STRING = 0x13 +UTC_TIME = 0x17 +GENERALIZED_TIME = 0x18 + + +class DERReader(object): + def __init__(self, data): + self.data = memoryview(data) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, tb): + if exc_value is None: + self.check_empty() + + def is_empty(self): + return len(self.data) == 0 + + def check_empty(self): + if not self.is_empty(): + raise ValueError("Invalid DER input: trailing data") + + def read_byte(self): + if len(self.data) < 1: + raise ValueError("Invalid DER input: insufficient data") + ret = six.indexbytes(self.data, 0) + self.data = self.data[1:] + return ret + + def read_bytes(self, n): + if len(self.data) < n: + raise ValueError("Invalid DER input: insufficient data") + ret = self.data[:n] + self.data = self.data[n:] + return ret + + def read_any_element(self): + tag = self.read_byte() + # Tag numbers 31 or higher are stored in multiple bytes. No supported + # ASN.1 types use such tags, so reject these. + if tag & 0x1f == 0x1f: + raise ValueError("Invalid DER input: unexpected high tag number") + length_byte = self.read_byte() + if length_byte & 0x80 == 0: + # If the high bit is clear, the first length byte is the length. + length = length_byte + else: + # If the high bit is set, the first length byte encodes the length + # of the length. + length_byte &= 0x7f + if length_byte == 0: + raise ValueError( + "Invalid DER input: indefinite length form is not allowed " + "in DER" + ) + length = 0 + for i in range(length_byte): + length <<= 8 + length |= self.read_byte() + if length == 0: + raise ValueError( + "Invalid DER input: length was not minimally-encoded" + ) + if length < 0x80: + # If the length could have been encoded in short form, it must + # not use long form. + raise ValueError( + "Invalid DER input: length was not minimally-encoded" + ) + body = self.read_bytes(length) + return tag, DERReader(body) + + def read_element(self, expected_tag): + tag, body = self.read_any_element() + if tag != expected_tag: + raise ValueError("Invalid DER input: unexpected tag") + return body + + def read_single_element(self, expected_tag): + with self: + return self.read_element(expected_tag) + + def read_optional_element(self, expected_tag): + if len(self.data) > 0 and six.indexbytes(self.data, 0) == expected_tag: + return self.read_element(expected_tag) + return None + + def as_integer(self): + if len(self.data) == 0: + raise ValueError("Invalid DER input: empty integer contents") + first = six.indexbytes(self.data, 0) + if first & 0x80 == 0x80: + raise ValueError("Negative DER integers are not supported") + # The first 9 bits must not all be zero or all be ones. Otherwise, the + # encoding should have been one byte shorter. + if len(self.data) > 1: + second = six.indexbytes(self.data, 1) + if first == 0 and second & 0x80 == 0: + raise ValueError( + "Invalid DER input: integer not minimally-encoded" + ) + return int_from_bytes(self.data, "big") + + +def encode_der_integer(x): + if not isinstance(x, six.integer_types): + raise ValueError("Value must be an integer") + if x < 0: + raise ValueError("Negative integers are not supported") + n = x.bit_length() // 8 + 1 + return int_to_bytes(x, n) + + +def encode_der(tag, *children): + length = 0 + for child in children: + length += len(child) + chunks = [six.int2byte(tag)] + if length < 0x80: + chunks.append(six.int2byte(length)) + else: + length_bytes = int_to_bytes(length) + chunks.append(six.int2byte(0x80 | len(length_bytes))) + chunks.append(length_bytes) + chunks.extend(children) + return b"".join(chunks) diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/_oid.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/_oid.py index cfe906c..f98912f 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/_oid.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/_oid.py @@ -19,11 +19,16 @@ class ObjectIdentifier(object): # range 0..39. All nodes must be integers. for node in nodes: try: - intnodes.append(int(node, 0)) + node_value = int(node, 10) except ValueError: raise ValueError( "Malformed OID: %s (non-integer nodes)" % ( self._dotted_string)) + if node_value < 0: + raise ValueError( + "Malformed OID: %s (negative-integer nodes)" % ( + self._dotted_string)) + intnodes.append(node_value) if len(nodes) < 2: raise ValueError( @@ -50,7 +55,7 @@ class ObjectIdentifier(object): return not self == other def __repr__(self): - return "".format( + return "".format( self.dotted_string, self._name ) diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/interfaces.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/interfaces.py index 0a476b9..20f4164 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/interfaces.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/interfaces.py @@ -57,7 +57,7 @@ class HMACBackend(object): @abc.abstractmethod def create_hmac_ctx(self, key, algorithm): """ - Create a MACContext for calculating a message authentication code. + Create a context for calculating a message authentication code. """ @@ -72,7 +72,7 @@ class CMACBackend(object): @abc.abstractmethod def create_cmac_ctx(self, algorithm): """ - Create a MACContext for calculating a message authentication code. + Create a context for calculating a message authentication code. """ diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/aead.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/aead.py index 9cec3e2..0cad15c 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/aead.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/aead.py @@ -18,10 +18,10 @@ def _aead_cipher_name(cipher): if isinstance(cipher, ChaCha20Poly1305): return b"chacha20-poly1305" elif isinstance(cipher, AESCCM): - return "aes-{0}-ccm".format(len(cipher._key) * 8).encode("ascii") + return "aes-{}-ccm".format(len(cipher._key) * 8).encode("ascii") else: assert isinstance(cipher, AESGCM) - return "aes-{0}-gcm".format(len(cipher._key) * 8).encode("ascii") + return "aes-{}-gcm".format(len(cipher._key) * 8).encode("ascii") def _aead_setup(backend, cipher_name, key, nonce, tag, tag_len, operation): @@ -49,17 +49,20 @@ def _aead_setup(backend, cipher_name, key, nonce, tag, tag_len, operation): ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag ) backend.openssl_assert(res != 0) - else: + elif cipher_name.endswith(b"-ccm"): res = backend._lib.EVP_CIPHER_CTX_ctrl( ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, tag_len, backend._ffi.NULL ) + backend.openssl_assert(res != 0) + nonce_ptr = backend._ffi.from_buffer(nonce) + key_ptr = backend._ffi.from_buffer(key) res = backend._lib.EVP_CipherInit_ex( ctx, backend._ffi.NULL, backend._ffi.NULL, - key, - nonce, + key_ptr, + nonce_ptr, int(operation == _ENCRYPT) ) backend.openssl_assert(res != 0) diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/backend.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/backend.py index 19734a5..96fa9ff 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/backend.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/backend.py @@ -5,18 +5,19 @@ from __future__ import absolute_import, division, print_function import base64 -import calendar import collections import contextlib import itertools from contextlib import contextmanager -import asn1crypto.core - import six +from six.moves import range from cryptography import utils, x509 from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat._der import ( + INTEGER, NULL, SEQUENCE, encode_der, encode_der_integer +) from cryptography.hazmat.backends.interfaces import ( CMACBackend, CipherBackend, DERSerializationBackend, DHBackend, DSABackend, EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend, @@ -26,7 +27,7 @@ from cryptography.hazmat.backends.openssl import aead from cryptography.hazmat.backends.openssl.ciphers import _CipherContext from cryptography.hazmat.backends.openssl.cmac import _CMACContext from cryptography.hazmat.backends.openssl.decode_asn1 import ( - _CRL_ENTRY_REASON_ENUM_TO_CODE, _Integers + _CRL_ENTRY_REASON_ENUM_TO_CODE ) from cryptography.hazmat.backends.openssl.dh import ( _DHParameters, _DHPrivateKey, _DHPublicKey, _dh_params_dup @@ -37,6 +38,12 @@ from cryptography.hazmat.backends.openssl.dsa import ( from cryptography.hazmat.backends.openssl.ec import ( _EllipticCurvePrivateKey, _EllipticCurvePublicKey ) +from cryptography.hazmat.backends.openssl.ed25519 import ( + _Ed25519PrivateKey, _Ed25519PublicKey +) +from cryptography.hazmat.backends.openssl.ed448 import ( + _ED448_KEY_SIZE, _Ed448PrivateKey, _Ed448PublicKey +) from cryptography.hazmat.backends.openssl.encode_asn1 import ( _CRL_ENTRY_EXTENSION_ENCODE_HANDLERS, _CRL_EXTENSION_ENCODE_HANDLERS, _EXTENSION_ENCODE_HANDLERS, @@ -49,19 +56,27 @@ from cryptography.hazmat.backends.openssl.hmac import _HMACContext from cryptography.hazmat.backends.openssl.ocsp import ( _OCSPRequest, _OCSPResponse ) +from cryptography.hazmat.backends.openssl.poly1305 import ( + _POLY1305_KEY_SIZE, _Poly1305Context +) from cryptography.hazmat.backends.openssl.rsa import ( _RSAPrivateKey, _RSAPublicKey ) from cryptography.hazmat.backends.openssl.x25519 import ( _X25519PrivateKey, _X25519PublicKey ) +from cryptography.hazmat.backends.openssl.x448 import ( + _X448PrivateKey, _X448PublicKey +) from cryptography.hazmat.backends.openssl.x509 import ( _Certificate, _CertificateRevocationList, _CertificateSigningRequest, _RevokedCertificate ) from cryptography.hazmat.bindings.openssl import binding from cryptography.hazmat.primitives import hashes, serialization -from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa +from cryptography.hazmat.primitives.asymmetric import ( + dsa, ec, ed25519, ed448, rsa +) from cryptography.hazmat.primitives.asymmetric.padding import ( MGF1, OAEP, PKCS1v15, PSS ) @@ -72,12 +87,18 @@ from cryptography.hazmat.primitives.ciphers.modes import ( CBC, CFB, CFB8, CTR, ECB, GCM, OFB, XTS ) from cryptography.hazmat.primitives.kdf import scrypt +from cryptography.hazmat.primitives.serialization import ssh from cryptography.x509 import ocsp _MemoryBIO = collections.namedtuple("_MemoryBIO", ["bio", "char_ptr"]) +# Not actually supported, just used as a marker for some serialization tests. +class _RC2(object): + pass + + @utils.register_interface(CipherBackend) @utils.register_interface(CMACBackend) @utils.register_interface(DERSerializationBackend) @@ -115,21 +136,23 @@ class Backend(object): return binding._openssl_assert(self._lib, ok) def activate_builtin_random(self): - # Obtain a new structural reference. - e = self._lib.ENGINE_get_default_RAND() - if e != self._ffi.NULL: - self._lib.ENGINE_unregister_RAND(e) - # Reset the RNG to use the new engine. - self._lib.RAND_cleanup() - # decrement the structural reference from get_default_RAND - res = self._lib.ENGINE_finish(e) - self.openssl_assert(res == 1) + if self._lib.Cryptography_HAS_ENGINE: + # Obtain a new structural reference. + e = self._lib.ENGINE_get_default_RAND() + if e != self._ffi.NULL: + self._lib.ENGINE_unregister_RAND(e) + # Reset the RNG to use the built-in. + res = self._lib.RAND_set_rand_method(self._ffi.NULL) + self.openssl_assert(res == 1) + # decrement the structural reference from get_default_RAND + res = self._lib.ENGINE_finish(e) + self.openssl_assert(res == 1) @contextlib.contextmanager def _get_osurandom_engine(self): # Fetches an engine by id and returns it. This creates a structural # reference. - e = self._lib.ENGINE_by_id(self._binding._osrandom_engine_id) + e = self._lib.ENGINE_by_id(self._lib.Cryptography_osrandom_engine_id) self.openssl_assert(e != self._ffi.NULL) # Initialize the engine for use. This adds a functional reference. res = self._lib.ENGINE_init(e) @@ -146,14 +169,16 @@ class Backend(object): self.openssl_assert(res == 1) def activate_osrandom_engine(self): - # Unregister and free the current engine. - self.activate_builtin_random() - with self._get_osurandom_engine() as e: - # Set the engine as the default RAND provider. - res = self._lib.ENGINE_set_default_RAND(e) + if self._lib.Cryptography_HAS_ENGINE: + # Unregister and free the current engine. + self.activate_builtin_random() + with self._get_osurandom_engine() as e: + # Set the engine as the default RAND provider. + res = self._lib.ENGINE_set_default_RAND(e) + self.openssl_assert(res == 1) + # Reset the RNG to use the engine + res = self._lib.RAND_set_rand_method(self._ffi.NULL) self.openssl_assert(res == 1) - # Reset the RNG to use the new engine. - self._lib.RAND_cleanup() def osrandom_engine_implementation(self): buf = self._ffi.new("char[]", 64) @@ -183,7 +208,7 @@ class Backend(object): def _evp_md_from_algorithm(self, algorithm): if algorithm.name == "blake2b" or algorithm.name == "blake2s": - alg = "{0}{1}".format( + alg = "{}{}".format( algorithm.name, algorithm.digest_size * 8 ).encode("ascii") else: @@ -217,7 +242,7 @@ class Backend(object): def register_cipher_adapter(self, cipher_cls, mode_cls, adapter): if (cipher_cls, mode_cls) in self._cipher_registry: - raise ValueError("Duplicate registration for: {0} {1}.".format( + raise ValueError("Duplicate registration for: {} {}.".format( cipher_cls, mode_cls) ) self._cipher_registry[cipher_cls, mode_cls] = adapter @@ -272,6 +297,10 @@ class Backend(object): type(None), GetCipherByName("rc4") ) + # We don't actually support RC2, this is just used by some tests. + self.register_cipher_adapter( + _RC2, type(None), GetCipherByName("rc2") + ) self.register_cipher_adapter( ChaCha20, type(None), @@ -292,8 +321,9 @@ class Backend(object): key_material): buf = self._ffi.new("unsigned char[]", length) evp_md = self._evp_md_non_null_from_algorithm(algorithm) + key_material_ptr = self._ffi.from_buffer(key_material) res = self._lib.PKCS5_PBKDF2_HMAC( - key_material, + key_material_ptr, len(key_material), salt, len(salt), @@ -318,7 +348,10 @@ class Backend(object): bin_len = self._lib.BN_bn2bin(bn, bin_ptr) # A zero length means the BN has value 0 self.openssl_assert(bin_len >= 0) - return int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big") + val = int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big") + if self._lib.BN_is_negative(bn): + val = -val + return val else: # Under Python 2 the best we can do is hex() hex_cdata = self._lib.BN_bn2hex(bn) @@ -446,13 +479,13 @@ class Backend(object): The char* is the storage for the BIO and it must stay alive until the BIO is finished with. """ - data_char_p = self._ffi.new("char[]", data) + data_ptr = self._ffi.from_buffer(data) bio = self._lib.BIO_new_mem_buf( - data_char_p, len(data) + data_ptr, len(data) ) self.openssl_assert(bio != self._ffi.NULL) - return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_char_p) + return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_ptr) def _create_mem_bio_gc(self): """ @@ -504,6 +537,18 @@ class Backend(object): self.openssl_assert(dh_cdata != self._ffi.NULL) dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) return _DHPrivateKey(self, dh_cdata, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None): + # EVP_PKEY_ED25519 is not present in OpenSSL < 1.1.1 + return _Ed25519PrivateKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_X448", None): + # EVP_PKEY_X448 is not present in OpenSSL < 1.1.1 + return _X448PrivateKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_X25519", None): + # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.0 + return _X25519PrivateKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None): + # EVP_PKEY_ED448 is not present in OpenSSL < 1.1.1 + return _Ed448PrivateKey(self, evp_pkey) else: raise UnsupportedAlgorithm("Unsupported key type.") @@ -535,6 +580,18 @@ class Backend(object): self.openssl_assert(dh_cdata != self._ffi.NULL) dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) return _DHPublicKey(self, dh_cdata, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None): + # EVP_PKEY_ED25519 is not present in OpenSSL < 1.1.1 + return _Ed25519PublicKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_X448", None): + # EVP_PKEY_X448 is not present in OpenSSL < 1.1.1 + return _X448PublicKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_X25519", None): + # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.0 + return _X25519PublicKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None): + # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.1 + return _Ed448PublicKey(self, evp_pkey) else: raise UnsupportedAlgorithm("Unsupported key type.") @@ -676,10 +733,18 @@ class Backend(object): return _CMACContext(self, algorithm) def create_x509_csr(self, builder, private_key, algorithm): - if not isinstance(algorithm, hashes.HashAlgorithm): - raise TypeError('Algorithm must be a registered hash algorithm.') + if not isinstance(builder, x509.CertificateSigningRequestBuilder): + raise TypeError('Builder type mismatch.') - if ( + if isinstance(private_key, + (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)): + if algorithm is not None: + raise ValueError( + "algorithm must be None when signing via ed25519 or ed448" + ) + elif not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError('Algorithm must be a registered hash algorithm.') + elif ( isinstance(algorithm, hashes.MD5) and not isinstance(private_key, rsa.RSAPrivateKey) ): @@ -688,7 +753,7 @@ class Backend(object): ) # Resolve the signature algorithm. - evp_md = self._evp_md_non_null_from_algorithm(algorithm) + evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm) # Create an empty request. x509_req = self._lib.X509_REQ_new() @@ -755,7 +820,13 @@ class Backend(object): def create_x509_certificate(self, builder, private_key, algorithm): if not isinstance(builder, x509.CertificateBuilder): raise TypeError('Builder type mismatch.') - if not isinstance(algorithm, hashes.HashAlgorithm): + if isinstance(private_key, + (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)): + if algorithm is not None: + raise ValueError( + "algorithm must be None when signing via ed25519 or ed448" + ) + elif not isinstance(algorithm, hashes.HashAlgorithm): raise TypeError('Algorithm must be a registered hash algorithm.') if ( @@ -763,11 +834,11 @@ class Backend(object): isinstance(private_key, rsa.RSAPrivateKey) ): raise ValueError( - "MD5 is not a supported hash algorithm for EC/DSA certificates" + "MD5 is only (reluctantly) supported for RSA certificates" ) # Resolve the signature algorithm. - evp_md = self._evp_md_non_null_from_algorithm(algorithm) + evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm) # Create an empty certificate. x509_cert = self._lib.X509_new() @@ -796,12 +867,12 @@ class Backend(object): # Set the "not before" time. self._set_asn1_time( - self._lib.X509_get_notBefore(x509_cert), builder._not_valid_before + self._lib.X509_getm_notBefore(x509_cert), builder._not_valid_before ) # Set the "not after" time. self._set_asn1_time( - self._lib.X509_get_notAfter(x509_cert), builder._not_valid_after + self._lib.X509_getm_notAfter(x509_cert), builder._not_valid_after ) # Add extensions. @@ -835,21 +906,21 @@ class Backend(object): return _Certificate(self, x509_cert) + def _evp_md_x509_null_if_eddsa(self, private_key, algorithm): + if isinstance(private_key, + (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)): + # OpenSSL requires us to pass NULL for EVP_MD for ed25519/ed448 + return self._ffi.NULL + else: + return self._evp_md_non_null_from_algorithm(algorithm) + def _set_asn1_time(self, asn1_time, time): - timestamp = calendar.timegm(time.timetuple()) - res = self._lib.ASN1_TIME_set(asn1_time, timestamp) - if res == self._ffi.NULL: - errors = self._consume_errors() - self.openssl_assert( - errors[0]._lib_reason_match( - self._lib.ERR_LIB_ASN1, - self._lib.ASN1_R_ERROR_GETTING_TIME - ) - ) - raise ValueError( - "Invalid time. This error can occur if you set a time too far " - "in the future on Windows." - ) + if time.year >= 2050: + asn1_str = time.strftime('%Y%m%d%H%M%SZ').encode('ascii') + else: + asn1_str = time.strftime('%y%m%d%H%M%SZ').encode('ascii') + res = self._lib.ASN1_TIME_set_string(asn1_time, asn1_str) + self.openssl_assert(res == 1) def _create_asn1_time(self, time): asn1_time = self._lib.ASN1_TIME_new() @@ -861,7 +932,13 @@ class Backend(object): def create_x509_crl(self, builder, private_key, algorithm): if not isinstance(builder, x509.CertificateRevocationListBuilder): raise TypeError('Builder type mismatch.') - if not isinstance(algorithm, hashes.HashAlgorithm): + if isinstance(private_key, + (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)): + if algorithm is not None: + raise ValueError( + "algorithm must be None when signing via ed25519 or ed448" + ) + elif not isinstance(algorithm, hashes.HashAlgorithm): raise TypeError('Algorithm must be a registered hash algorithm.') if ( @@ -872,7 +949,7 @@ class Backend(object): "MD5 is not a supported hash algorithm for EC/DSA CRLs" ) - evp_md = self._evp_md_non_null_from_algorithm(algorithm) + evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm) # Create an empty CRL. x509_crl = self._lib.X509_CRL_new() @@ -959,19 +1036,24 @@ class Backend(object): value = _encode_asn1_str_gc(self, extension.value.value) return self._create_raw_x509_extension(extension, value) elif isinstance(extension.value, x509.TLSFeature): - asn1 = _Integers([x.value for x in extension.value]).dump() + asn1 = encode_der( + SEQUENCE, + *[ + encode_der(INTEGER, encode_der_integer(x.value)) + for x in extension.value + ] + ) value = _encode_asn1_str_gc(self, asn1) return self._create_raw_x509_extension(extension, value) elif isinstance(extension.value, x509.PrecertPoison): - asn1 = asn1crypto.core.Null().dump() - value = _encode_asn1_str_gc(self, asn1) + value = _encode_asn1_str_gc(self, encode_der(NULL)) return self._create_raw_x509_extension(extension, value) else: try: encode = handlers[extension.oid] except KeyError: raise NotImplementedError( - 'Extension not supported: {0}'.format(extension.oid) + 'Extension not supported: {}'.format(extension.oid) ) ext_struct = encode(self, extension.value) @@ -1137,7 +1219,10 @@ class Backend(object): ) if x509 == self._ffi.NULL: self._consume_errors() - raise ValueError("Unable to load certificate") + raise ValueError( + "Unable to load certificate. See https://cryptography.io/en/la" + "test/faq/#why-can-t-i-import-my-pem-file for more details." + ) x509 = self._ffi.gc(x509, self._lib.X509_free) return _Certificate(self, x509) @@ -1159,7 +1244,10 @@ class Backend(object): ) if x509_crl == self._ffi.NULL: self._consume_errors() - raise ValueError("Unable to load CRL") + raise ValueError( + "Unable to load CRL. See https://cryptography.io/en/la" + "test/faq/#why-can-t-i-import-my-pem-file for more details." + ) x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free) return _CertificateRevocationList(self, x509_crl) @@ -1181,7 +1269,10 @@ class Backend(object): ) if x509_req == self._ffi.NULL: self._consume_errors() - raise ValueError("Unable to load request") + raise ValueError( + "Unable to load request. See https://cryptography.io/en/la" + "test/faq/#why-can-t-i-import-my-pem-file for more details." + ) x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free) return _CertificateSigningRequest(self, x509_req) @@ -1199,13 +1290,11 @@ class Backend(object): def _load_key(self, openssl_read_func, convert_func, data, password): mem_bio = self._bytes_to_bio(data) - if password is not None and not isinstance(password, bytes): - raise TypeError("Password must be bytes") - userdata = self._ffi.new("CRYPTOGRAPHY_PASSWORD_DATA *") if password is not None: - password_buf = self._ffi.new("char []", password) - userdata.password = password_buf + utils._check_byteslike("password", password) + password_ptr = self._ffi.from_buffer(password) + userdata.password = password_ptr userdata.length = len(password) evp_pkey = openssl_read_func( @@ -1228,7 +1317,7 @@ class Backend(object): else: assert userdata.error == -2 raise ValueError( - "Passwords longer than {0} bytes are not supported " + "Passwords longer than {} bytes are not supported " "by this backend.".format(userdata.maxsize - 1) ) else: @@ -1330,11 +1419,7 @@ class Backend(object): """ if self.elliptic_curve_supported(curve): - curve_nid = self._elliptic_curve_to_nid(curve) - - ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) - self.openssl_assert(ec_cdata != self._ffi.NULL) - ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + ec_cdata = self._ec_key_new_by_curve(curve) res = self._lib.EC_KEY_generate_key(ec_cdata) self.openssl_assert(res == 1) @@ -1344,18 +1429,14 @@ class Backend(object): return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) else: raise UnsupportedAlgorithm( - "Backend object does not support {0}.".format(curve.name), + "Backend object does not support {}.".format(curve.name), _Reasons.UNSUPPORTED_ELLIPTIC_CURVE ) def load_elliptic_curve_private_numbers(self, numbers): public = numbers.public_numbers - curve_nid = self._elliptic_curve_to_nid(public.curve) - - ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) - self.openssl_assert(ec_cdata != self._ffi.NULL) - ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + ec_cdata = self._ec_key_new_by_curve(public.curve) private_value = self._ffi.gc( self._int_to_bn(numbers.private_value), self._lib.BN_clear_free @@ -1371,24 +1452,35 @@ class Backend(object): return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) def load_elliptic_curve_public_numbers(self, numbers): - curve_nid = self._elliptic_curve_to_nid(numbers.curve) - - ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) - self.openssl_assert(ec_cdata != self._ffi.NULL) - ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) - + ec_cdata = self._ec_key_new_by_curve(numbers.curve) ec_cdata = self._ec_key_set_public_key_affine_coordinates( ec_cdata, numbers.x, numbers.y) evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey) - def derive_elliptic_curve_private_key(self, private_value, curve): - curve_nid = self._elliptic_curve_to_nid(curve) + def load_elliptic_curve_public_bytes(self, curve, point_bytes): + ec_cdata = self._ec_key_new_by_curve(curve) + group = self._lib.EC_KEY_get0_group(ec_cdata) + self.openssl_assert(group != self._ffi.NULL) + point = self._lib.EC_POINT_new(group) + self.openssl_assert(point != self._ffi.NULL) + point = self._ffi.gc(point, self._lib.EC_POINT_free) + with self._tmp_bn_ctx() as bn_ctx: + res = self._lib.EC_POINT_oct2point( + group, point, point_bytes, len(point_bytes), bn_ctx + ) + if res != 1: + self._consume_errors() + raise ValueError("Invalid public bytes for the given curve") - ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) - self.openssl_assert(ec_cdata != self._ffi.NULL) - ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + res = self._lib.EC_KEY_set_public_key(ec_cdata, point) + self.openssl_assert(res == 1) + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey) + + def derive_elliptic_curve_private_key(self, private_value, curve): + ec_cdata = self._ec_key_new_by_curve(curve) get_func, group = self._ec_key_determine_group_get_func(ec_cdata) @@ -1421,6 +1513,12 @@ class Backend(object): return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + def _ec_key_new_by_curve(self, curve): + curve_nid = self._elliptic_curve_to_nid(curve) + ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) + self.openssl_assert(ec_cdata != self._ffi.NULL) + return self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + def load_der_ocsp_request(self, data): mem_bio = self._bytes_to_bio(data) request = self._lib.d2i_OCSP_REQUEST_bio(mem_bio.bio, self._ffi.NULL) @@ -1507,7 +1605,7 @@ class Backend(object): ) self.openssl_assert(res != self._ffi.NULL) # okay, now sign the basic structure - evp_md = self._evp_md_non_null_from_algorithm(algorithm) + evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm) responder_cert, responder_encoding = builder._responder_id flags = self._lib.OCSP_NOCERTS if responder_encoding is ocsp.OCSPResponderEncoding.HASH: @@ -1585,7 +1683,7 @@ class Backend(object): curve_nid = self._lib.OBJ_sn2nid(curve_name.encode()) if curve_nid == self._lib.NID_undef: raise UnsupportedAlgorithm( - "{0} is not a supported elliptic curve".format(curve.name), + "{} is not a supported elliptic curve".format(curve.name), _Reasons.UNSUPPORTED_ELLIPTIC_CURVE ) return curve_nid @@ -1656,6 +1754,20 @@ class Backend(object): "format must be an item from the PrivateFormat enum" ) + # X9.62 encoding is only valid for EC public keys + if encoding is serialization.Encoding.X962: + raise ValueError("X9.62 format is only valid for EC public keys") + + # Raw format and encoding are only valid for X25519, Ed25519, X448, and + # Ed448 keys. We capture those cases before this method is called so if + # we see those enum values here it means the caller has passed them to + # a key that doesn't support raw type + if format is serialization.PrivateFormat.Raw: + raise ValueError("raw format is invalid with this key or encoding") + + if encoding is serialization.Encoding.Raw: + raise ValueError("raw encoding is invalid with this key or format") + if not isinstance(encryption_algorithm, serialization.KeySerializationEncryption): raise TypeError( @@ -1715,7 +1827,7 @@ class Backend(object): write_bio = self._lib.i2d_PKCS8PrivateKey_bio key = evp_pkey else: - raise TypeError("encoding must be an item from the Encoding enum") + raise TypeError("encoding must be Encoding.PEM or Encoding.DER") bio = self._create_mem_bio_gc() res = write_bio( @@ -1748,6 +1860,23 @@ class Backend(object): if not isinstance(encoding, serialization.Encoding): raise TypeError("encoding must be an item from the Encoding enum") + # Compressed/UncompressedPoint are only valid for EC keys and those + # cases are handled by the ECPublicKey public_bytes method before this + # method is called + if format in (serialization.PublicFormat.UncompressedPoint, + serialization.PublicFormat.CompressedPoint): + raise ValueError("Point formats are not valid for this key type") + + # Raw format and encoding are only valid for X25519, Ed25519, X448, and + # Ed448 keys. We capture those cases before this method is called so if + # we see those enum values here it means the caller has passed them to + # a key that doesn't support raw type + if format is serialization.PublicFormat.Raw: + raise ValueError("raw format is invalid with this key or encoding") + + if encoding is serialization.Encoding.Raw: + raise ValueError("raw encoding is invalid with this key or format") + if ( format is serialization.PublicFormat.OpenSSH or encoding is serialization.Encoding.OpenSSH @@ -1792,22 +1921,28 @@ class Backend(object): if isinstance(key, rsa.RSAPublicKey): public_numbers = key.public_numbers() return b"ssh-rsa " + base64.b64encode( - serialization._ssh_write_string(b"ssh-rsa") + - serialization._ssh_write_mpint(public_numbers.e) + - serialization._ssh_write_mpint(public_numbers.n) + ssh._ssh_write_string(b"ssh-rsa") + + ssh._ssh_write_mpint(public_numbers.e) + + ssh._ssh_write_mpint(public_numbers.n) ) elif isinstance(key, dsa.DSAPublicKey): public_numbers = key.public_numbers() parameter_numbers = public_numbers.parameter_numbers return b"ssh-dss " + base64.b64encode( - serialization._ssh_write_string(b"ssh-dss") + - serialization._ssh_write_mpint(parameter_numbers.p) + - serialization._ssh_write_mpint(parameter_numbers.q) + - serialization._ssh_write_mpint(parameter_numbers.g) + - serialization._ssh_write_mpint(public_numbers.y) + ssh._ssh_write_string(b"ssh-dss") + + ssh._ssh_write_mpint(parameter_numbers.p) + + ssh._ssh_write_mpint(parameter_numbers.q) + + ssh._ssh_write_mpint(parameter_numbers.g) + + ssh._ssh_write_mpint(public_numbers.y) ) - else: - assert isinstance(key, ec.EllipticCurvePublicKey) + elif isinstance(key, ed25519.Ed25519PublicKey): + raw_bytes = key.public_bytes(serialization.Encoding.Raw, + serialization.PublicFormat.Raw) + return b"ssh-ed25519 " + base64.b64encode( + ssh._ssh_write_string(b"ssh-ed25519") + + ssh._ssh_write_string(raw_bytes) + ) + elif isinstance(key, ec.EllipticCurvePublicKey): public_numbers = key.public_numbers() try: curve_name = { @@ -1820,10 +1955,19 @@ class Backend(object): "Only SECP256R1, SECP384R1, and SECP521R1 curves are " "supported by the SSH public key format" ) + + point = key.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint + ) return b"ecdsa-sha2-" + curve_name + b" " + base64.b64encode( - serialization._ssh_write_string(b"ecdsa-sha2-" + curve_name) + - serialization._ssh_write_string(curve_name) + - serialization._ssh_write_string(public_numbers.encode_point()) + ssh._ssh_write_string(b"ecdsa-sha2-" + curve_name) + + ssh._ssh_write_string(curve_name) + + ssh._ssh_write_string(point) + ) + else: + raise ValueError( + "OpenSSH encoding is not supported for this key type" ) def _parameter_bytes(self, encoding, format, cdata): @@ -2027,6 +2171,11 @@ class Backend(object): return self._ffi.buffer(pp[0], res)[:] def x25519_load_public_bytes(self, data): + # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can + # switch this to EVP_PKEY_new_raw_public_key + if len(data) != 32: + raise ValueError("An X25519 public key is 32 bytes long") + evp_pkey = self._create_evp_pkey_gc() res = self._lib.EVP_PKEY_set_type(evp_pkey, self._lib.NID_X25519) backend.openssl_assert(res == 1) @@ -2037,6 +2186,9 @@ class Backend(object): return _X25519PublicKey(self, evp_pkey) def x25519_load_private_bytes(self, data): + # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can + # switch this to EVP_PKEY_new_raw_private_key and drop the + # zeroed_bytearray garbage. # OpenSSL only has facilities for loading PKCS8 formatted private # keys using the algorithm identifiers specified in # https://tools.ietf.org/html/draft-ietf-curdle-pkix-09. @@ -2050,9 +2202,16 @@ class Backend(object): # Of course there's a bit more complexity. In reality OCTET STRING # contains an OCTET STRING of length 32! So the last two bytes here # are \x04\x20, which is an OCTET STRING of length 32. + if len(data) != 32: + raise ValueError("An X25519 private key is 32 bytes long") + pkcs8_prefix = b'0.\x02\x01\x000\x05\x06\x03+en\x04"\x04 ' - bio = self._bytes_to_bio(pkcs8_prefix + data) - evp_pkey = backend._lib.d2i_PrivateKey_bio(bio.bio, self._ffi.NULL) + with self._zeroed_bytearray(48) as ba: + ba[0:16] = pkcs8_prefix + ba[16:] = data + bio = self._bytes_to_bio(ba) + evp_pkey = backend._lib.d2i_PrivateKey_bio(bio.bio, self._ffi.NULL) + self.openssl_assert(evp_pkey != self._ffi.NULL) evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) self.openssl_assert( @@ -2060,14 +2219,10 @@ class Backend(object): ) return _X25519PrivateKey(self, evp_pkey) - def x25519_generate_key(self): - evp_pkey_ctx = self._lib.EVP_PKEY_CTX_new_id( - self._lib.NID_X25519, self._ffi.NULL - ) + def _evp_pkey_keygen_gc(self, nid): + evp_pkey_ctx = self._lib.EVP_PKEY_CTX_new_id(nid, self._ffi.NULL) self.openssl_assert(evp_pkey_ctx != self._ffi.NULL) - evp_pkey_ctx = self._ffi.gc( - evp_pkey_ctx, self._lib.EVP_PKEY_CTX_free - ) + evp_pkey_ctx = self._ffi.gc(evp_pkey_ctx, self._lib.EVP_PKEY_CTX_free) res = self._lib.EVP_PKEY_keygen_init(evp_pkey_ctx) self.openssl_assert(res == 1) evp_ppkey = self._ffi.new("EVP_PKEY **") @@ -2075,18 +2230,143 @@ class Backend(object): self.openssl_assert(res == 1) self.openssl_assert(evp_ppkey[0] != self._ffi.NULL) evp_pkey = self._ffi.gc(evp_ppkey[0], self._lib.EVP_PKEY_free) + return evp_pkey + + def x25519_generate_key(self): + evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_X25519) return _X25519PrivateKey(self, evp_pkey) def x25519_supported(self): return self._lib.CRYPTOGRAPHY_OPENSSL_110_OR_GREATER + def x448_load_public_bytes(self, data): + if len(data) != 56: + raise ValueError("An X448 public key is 56 bytes long") + + evp_pkey = self._lib.EVP_PKEY_new_raw_public_key( + self._lib.NID_X448, self._ffi.NULL, data, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return _X448PublicKey(self, evp_pkey) + + def x448_load_private_bytes(self, data): + if len(data) != 56: + raise ValueError("An X448 private key is 56 bytes long") + + data_ptr = self._ffi.from_buffer(data) + evp_pkey = self._lib.EVP_PKEY_new_raw_private_key( + self._lib.NID_X448, self._ffi.NULL, data_ptr, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return _X448PrivateKey(self, evp_pkey) + + def x448_generate_key(self): + evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_X448) + return _X448PrivateKey(self, evp_pkey) + + def x448_supported(self): + return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 + + def ed25519_supported(self): + return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111B + + def ed25519_load_public_bytes(self, data): + utils._check_bytes("data", data) + + if len(data) != ed25519._ED25519_KEY_SIZE: + raise ValueError("An Ed25519 public key is 32 bytes long") + + evp_pkey = self._lib.EVP_PKEY_new_raw_public_key( + self._lib.NID_ED25519, self._ffi.NULL, data, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + return _Ed25519PublicKey(self, evp_pkey) + + def ed25519_load_private_bytes(self, data): + if len(data) != ed25519._ED25519_KEY_SIZE: + raise ValueError("An Ed25519 private key is 32 bytes long") + + utils._check_byteslike("data", data) + data_ptr = self._ffi.from_buffer(data) + evp_pkey = self._lib.EVP_PKEY_new_raw_private_key( + self._lib.NID_ED25519, self._ffi.NULL, data_ptr, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + return _Ed25519PrivateKey(self, evp_pkey) + + def ed25519_generate_key(self): + evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_ED25519) + return _Ed25519PrivateKey(self, evp_pkey) + + def ed448_supported(self): + return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111B + + def ed448_load_public_bytes(self, data): + utils._check_bytes("data", data) + if len(data) != _ED448_KEY_SIZE: + raise ValueError("An Ed448 public key is 57 bytes long") + + evp_pkey = self._lib.EVP_PKEY_new_raw_public_key( + self._lib.NID_ED448, self._ffi.NULL, data, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + return _Ed448PublicKey(self, evp_pkey) + + def ed448_load_private_bytes(self, data): + utils._check_byteslike("data", data) + if len(data) != _ED448_KEY_SIZE: + raise ValueError("An Ed448 private key is 57 bytes long") + + data_ptr = self._ffi.from_buffer(data) + evp_pkey = self._lib.EVP_PKEY_new_raw_private_key( + self._lib.NID_ED448, self._ffi.NULL, data_ptr, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + return _Ed448PrivateKey(self, evp_pkey) + + def ed448_generate_key(self): + evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_ED448) + return _Ed448PrivateKey(self, evp_pkey) + def derive_scrypt(self, key_material, salt, length, n, r, p): buf = self._ffi.new("unsigned char[]", length) + key_material_ptr = self._ffi.from_buffer(key_material) res = self._lib.EVP_PBE_scrypt( - key_material, len(key_material), salt, len(salt), n, r, p, + key_material_ptr, len(key_material), salt, len(salt), n, r, p, scrypt._MEM_LIMIT, buf, length ) - self.openssl_assert(res == 1) + if res != 1: + errors = self._consume_errors() + if not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111: + # This error is only added to the stack in 1.1.1+ + self.openssl_assert( + errors[0]._lib_reason_match( + self._lib.ERR_LIB_EVP, + self._lib.ERR_R_MALLOC_FAILURE + ) or + errors[0]._lib_reason_match( + self._lib.ERR_LIB_EVP, + self._lib.EVP_R_MEMORY_LIMIT_EXCEEDED + ) + ) + + # memory required formula explained here: + # https://blog.filippo.io/the-scrypt-parameters/ + min_memory = 128 * n * r // (1024**2) + raise MemoryError( + "Not enough memory to derive key. These parameters require" + " {} MB of memory.".format(min_memory) + ) return self._ffi.buffer(buf)[:] def aead_cipher_supported(self, cipher): @@ -2095,6 +2375,105 @@ class Backend(object): self._lib.EVP_get_cipherbyname(cipher_name) != self._ffi.NULL ) + @contextlib.contextmanager + def _zeroed_bytearray(self, length): + """ + This method creates a bytearray, which we copy data into (hopefully + also from a mutable buffer that can be dynamically erased!), and then + zero when we're done. + """ + ba = bytearray(length) + try: + yield ba + finally: + self._zero_data(ba, length) + + def _zero_data(self, data, length): + # We clear things this way because at the moment we're not + # sure of a better way that can guarantee it overwrites the + # memory of a bytearray and doesn't just replace the underlying char *. + for i in range(length): + data[i] = 0 + + @contextlib.contextmanager + def _zeroed_null_terminated_buf(self, data): + """ + This method takes bytes, which can be a bytestring or a mutable + buffer like a bytearray, and yields a null-terminated version of that + data. This is required because PKCS12_parse doesn't take a length with + its password char * and ffi.from_buffer doesn't provide null + termination. So, to support zeroing the data via bytearray we + need to build this ridiculous construct that copies the memory, but + zeroes it after use. + """ + if data is None: + yield self._ffi.NULL + else: + data_len = len(data) + buf = self._ffi.new("char[]", data_len + 1) + self._ffi.memmove(buf, data, data_len) + try: + yield buf + finally: + # Cast to a uint8_t * so we can assign by integer + self._zero_data(self._ffi.cast("uint8_t *", buf), data_len) + + def load_key_and_certificates_from_pkcs12(self, data, password): + if password is not None: + utils._check_byteslike("password", password) + + bio = self._bytes_to_bio(data) + p12 = self._lib.d2i_PKCS12_bio(bio.bio, self._ffi.NULL) + if p12 == self._ffi.NULL: + self._consume_errors() + raise ValueError("Could not deserialize PKCS12 data") + + p12 = self._ffi.gc(p12, self._lib.PKCS12_free) + evp_pkey_ptr = self._ffi.new("EVP_PKEY **") + x509_ptr = self._ffi.new("X509 **") + sk_x509_ptr = self._ffi.new("Cryptography_STACK_OF_X509 **") + with self._zeroed_null_terminated_buf(password) as password_buf: + res = self._lib.PKCS12_parse( + p12, password_buf, evp_pkey_ptr, x509_ptr, sk_x509_ptr + ) + + if res == 0: + self._consume_errors() + raise ValueError("Invalid password or PKCS12 data") + + cert = None + key = None + additional_certificates = [] + + if evp_pkey_ptr[0] != self._ffi.NULL: + evp_pkey = self._ffi.gc(evp_pkey_ptr[0], self._lib.EVP_PKEY_free) + key = self._evp_pkey_to_private_key(evp_pkey) + + if x509_ptr[0] != self._ffi.NULL: + x509 = self._ffi.gc(x509_ptr[0], self._lib.X509_free) + cert = _Certificate(self, x509) + + if sk_x509_ptr[0] != self._ffi.NULL: + sk_x509 = self._ffi.gc(sk_x509_ptr[0], self._lib.sk_X509_free) + num = self._lib.sk_X509_num(sk_x509_ptr[0]) + for i in range(num): + x509 = self._lib.sk_X509_value(sk_x509, i) + x509 = self._ffi.gc(x509, self._lib.X509_free) + self.openssl_assert(x509 != self._ffi.NULL) + additional_certificates.append(_Certificate(self, x509)) + + return (key, cert, additional_certificates) + + def poly1305_supported(self): + return self._lib.Cryptography_HAS_POLY1305 == 1 + + def create_poly1305_ctx(self, key): + utils._check_byteslike("key", key) + if len(key) != _POLY1305_KEY_SIZE: + raise ValueError("A poly1305 key is 32 bytes long") + + return _Poly1305Context(self, key) + class GetCipherByName(object): def __init__(self, fmt): @@ -2106,7 +2485,7 @@ class GetCipherByName(object): def _get_xts_cipher(backend, cipher, mode): - cipher_name = "aes-{0}-xts".format(cipher.key_size // 2) + cipher_name = "aes-{}-xts".format(cipher.key_size // 2) return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii")) diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ciphers.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ciphers.py index e0ee06e..94b48f5 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ciphers.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ciphers.py @@ -40,7 +40,7 @@ class _CipherContext(object): adapter = registry[type(cipher), type(mode)] except KeyError: raise UnsupportedAlgorithm( - "cipher {0} in {1} mode is not supported " + "cipher {} in {} mode is not supported " "by this backend.".format( cipher.name, mode.name if mode else mode), _Reasons.UNSUPPORTED_CIPHER @@ -48,21 +48,25 @@ class _CipherContext(object): evp_cipher = adapter(self._backend, cipher, mode) if evp_cipher == self._backend._ffi.NULL: - raise UnsupportedAlgorithm( - "cipher {0} in {1} mode is not supported " - "by this backend.".format( - cipher.name, mode.name if mode else mode), - _Reasons.UNSUPPORTED_CIPHER - ) + msg = "cipher {0.name} ".format(cipher) + if mode is not None: + msg += "in {0.name} mode ".format(mode) + msg += ( + "is not supported by this backend (Your version of OpenSSL " + "may be too old. Current version: {}.)" + ).format(self._backend.openssl_version_text()) + raise UnsupportedAlgorithm(msg, _Reasons.UNSUPPORTED_CIPHER) if isinstance(mode, modes.ModeWithInitializationVector): - iv_nonce = mode.initialization_vector + iv_nonce = self._backend._ffi.from_buffer( + mode.initialization_vector + ) elif isinstance(mode, modes.ModeWithTweak): - iv_nonce = mode.tweak + iv_nonce = self._backend._ffi.from_buffer(mode.tweak) elif isinstance(mode, modes.ModeWithNonce): - iv_nonce = mode.nonce + iv_nonce = self._backend._ffi.from_buffer(mode.nonce) elif isinstance(cipher, modes.ModeWithNonce): - iv_nonce = cipher.nonce + iv_nonce = self._backend._ffi.from_buffer(cipher.nonce) else: iv_nonce = self._backend._ffi.NULL # begin init with cipher and operation type @@ -105,7 +109,7 @@ class _CipherContext(object): ctx, self._backend._ffi.NULL, self._backend._ffi.NULL, - cipher.key, + self._backend._ffi.from_buffer(cipher.key), iv_nonce, operation ) @@ -123,7 +127,7 @@ class _CipherContext(object): def update_into(self, data, buf): if len(buf) < (len(data) + self._block_size_bytes - 1): raise ValueError( - "buffer must be at least {0} bytes for this " + "buffer must be at least {} bytes for this " "payload".format(len(data) + self._block_size_bytes - 1) ) @@ -131,8 +135,10 @@ class _CipherContext(object): "unsigned char *", self._backend._ffi.from_buffer(buf) ) outlen = self._backend._ffi.new("int *") - res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen, - data, len(data)) + res = self._backend._lib.EVP_CipherUpdate( + self._ctx, buf, outlen, + self._backend._ffi.from_buffer(data), len(data) + ) self._backend.openssl_assert(res != 0) return outlen[0] @@ -201,7 +207,7 @@ class _CipherContext(object): ) if len(tag) < self._mode._min_tag_length: raise ValueError( - "Authentication tag must be {0} bytes or longer.".format( + "Authentication tag must be {} bytes or longer.".format( self._mode._min_tag_length) ) res = self._backend._lib.EVP_CIPHER_CTX_ctrl( @@ -215,7 +221,8 @@ class _CipherContext(object): def authenticate_additional_data(self, data): outlen = self._backend._ffi.new("int *") res = self._backend._lib.EVP_CipherUpdate( - self._ctx, self._backend._ffi.NULL, outlen, data, len(data) + self._ctx, self._backend._ffi.NULL, outlen, + self._backend._ffi.from_buffer(data), len(data) ) self._backend.openssl_assert(res != 0) diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/cmac.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/cmac.py index e20f66d..d4d46f5 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/cmac.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/cmac.py @@ -9,11 +9,10 @@ from cryptography import utils from cryptography.exceptions import ( InvalidSignature, UnsupportedAlgorithm, _Reasons ) -from cryptography.hazmat.primitives import constant_time, mac +from cryptography.hazmat.primitives import constant_time from cryptography.hazmat.primitives.ciphers.modes import CBC -@utils.register_interface(mac.MACContext) class _CMACContext(object): def __init__(self, backend, algorithm, ctx=None): if not backend.cmac_algorithm_supported(algorithm): @@ -36,8 +35,9 @@ class _CMACContext(object): self._backend.openssl_assert(ctx != self._backend._ffi.NULL) ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free) + key_ptr = self._backend._ffi.from_buffer(self._key) res = self._backend._lib.CMAC_Init( - ctx, self._key, len(self._key), + ctx, key_ptr, len(self._key), evp_cipher, self._backend._ffi.NULL ) self._backend.openssl_assert(res == 1) diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/decode_asn1.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/decode_asn1.py index e06e8cd..7639e68 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/decode_asn1.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/decode_asn1.py @@ -7,11 +7,10 @@ from __future__ import absolute_import, division, print_function import datetime import ipaddress -import asn1crypto.core - import six from cryptography import x509 +from cryptography.hazmat._der import DERReader, INTEGER, NULL, SEQUENCE from cryptography.x509.extensions import _TLS_FEATURE_TYPE_TO_ENUM from cryptography.x509.name import _ASN1_TYPE_TO_ENUM from cryptography.x509.oid import ( @@ -20,10 +19,6 @@ from cryptography.x509.oid import ( ) -class _Integers(asn1crypto.core.SequenceOf): - _child_spec = asn1crypto.core.Integer - - def _obj2txt(backend, obj): # Set to 80 on the recommendation of # https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values @@ -69,7 +64,7 @@ def _decode_x509_name(backend, x509_name): attribute = _decode_x509_name_entry(backend, entry) set_id = backend._lib.Cryptography_X509_NAME_ENTRY_set(entry) if set_id != prev_set_id: - attributes.append(set([attribute])) + attributes.append({attribute}) else: # is in the same RDN a previous entry attributes[-1].add(attribute) @@ -135,7 +130,7 @@ def _decode_general_name(backend, gn): if "1" in bits[prefix:]: raise ValueError("Invalid netmask") - ip = ipaddress.ip_network(base.exploded + u"/{0}".format(prefix)) + ip = ipaddress.ip_network(base.exploded + u"/{}".format(prefix)) else: ip = ipaddress.ip_address(data) @@ -160,7 +155,7 @@ def _decode_general_name(backend, gn): else: # x400Address or ediPartyName raise x509.UnsupportedGeneralNameType( - "{0} is not a supported type".format( + "{} is not a supported type".format( x509._GENERAL_NAMES.get(gn.type, gn.type) ), gn.type @@ -202,27 +197,32 @@ class _X509ExtensionParser(object): ) if oid in seen_oids: raise x509.DuplicateExtension( - "Duplicate {0} extension found".format(oid), oid + "Duplicate {} extension found".format(oid), oid ) # These OIDs are only supported in OpenSSL 1.1.0+ but we want # to support them in all versions of OpenSSL so we decode them # ourselves. if oid == ExtensionOID.TLS_FEATURE: + # The extension contents are a SEQUENCE OF INTEGERs. data = backend._lib.X509_EXTENSION_get_data(ext) - parsed = _Integers.load(_asn1_string_to_bytes(backend, data)) + data_bytes = _asn1_string_to_bytes(backend, data) + features = DERReader(data_bytes).read_single_element(SEQUENCE) + parsed = [] + while not features.is_empty(): + parsed.append(features.read_element(INTEGER).as_integer()) + # Map the features to their enum value. value = x509.TLSFeature( - [_TLS_FEATURE_TYPE_TO_ENUM[x.native] for x in parsed] + [_TLS_FEATURE_TYPE_TO_ENUM[x] for x in parsed] ) extensions.append(x509.Extension(oid, critical, value)) seen_oids.add(oid) continue elif oid == ExtensionOID.PRECERT_POISON: data = backend._lib.X509_EXTENSION_get_data(ext) - parsed = asn1crypto.core.Null.load( - _asn1_string_to_bytes(backend, data) - ) - assert parsed == asn1crypto.core.Null() + # The contents of the extension must be an ASN.1 NULL. + reader = DERReader(_asn1_string_to_bytes(backend, data)) + reader.read_single_element(NULL).check_empty() extensions.append(x509.Extension( oid, critical, x509.PrecertPoison() )) @@ -245,7 +245,7 @@ class _X509ExtensionParser(object): if ext_data == backend._ffi.NULL: backend._consume_errors() raise ValueError( - "The {0} extension is invalid and can't be " + "The {} extension is invalid and can't be " "parsed".format(oid) ) @@ -379,7 +379,14 @@ def _decode_authority_key_identifier(backend, akid): def _decode_authority_information_access(backend, aia): aia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", aia) - aia = backend._ffi.gc(aia, backend._lib.sk_ACCESS_DESCRIPTION_free) + aia = backend._ffi.gc( + aia, + lambda x: backend._lib.sk_ACCESS_DESCRIPTION_pop_free( + x, backend._ffi.addressof( + backend._lib._original_lib, "ACCESS_DESCRIPTION_free" + ) + ) + ) num = backend._lib.sk_ACCESS_DESCRIPTION_num(aia) access_descriptions = [] for i in range(num): @@ -464,6 +471,30 @@ def _decode_general_subtrees(backend, stack_subtrees): return subtrees +def _decode_issuing_dist_point(backend, idp): + idp = backend._ffi.cast("ISSUING_DIST_POINT *", idp) + idp = backend._ffi.gc(idp, backend._lib.ISSUING_DIST_POINT_free) + if idp.distpoint != backend._ffi.NULL: + full_name, relative_name = _decode_distpoint(backend, idp.distpoint) + else: + full_name = None + relative_name = None + + only_user = idp.onlyuser == 255 + only_ca = idp.onlyCA == 255 + indirect_crl = idp.indirectCRL == 255 + only_attr = idp.onlyattr == 255 + if idp.onlysomereasons != backend._ffi.NULL: + only_some_reasons = _decode_reasons(backend, idp.onlysomereasons) + else: + only_some_reasons = None + + return x509.IssuingDistributionPoint( + full_name, relative_name, only_user, only_ca, only_some_reasons, + indirect_crl, only_attr + ) + + def _decode_policy_constraints(backend, pc): pc = backend._ffi.cast("POLICY_CONSTRAINTS *", pc) pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free) @@ -674,7 +705,7 @@ def _decode_crl_reason(backend, enum): try: return x509.CRLReason(_CRL_ENTRY_REASON_CODE_TO_ENUM[code]) except KeyError: - raise ValueError("Unsupported reason code: {0}".format(code)) + raise ValueError("Unsupported reason code: {}".format(code)) def _decode_invalidity_date(backend, inv_date): @@ -734,7 +765,7 @@ def _asn1_string_to_utf8(backend, asn1_string): res = backend._lib.ASN1_STRING_to_UTF8(buf, asn1_string) if res == -1: raise ValueError( - "Unsupported ASN1 string type. Type: {0}".format(asn1_string.type) + "Unsupported ASN1 string type. Type: {}".format(asn1_string.type) ) backend.openssl_assert(buf[0] != backend._ffi.NULL) @@ -814,6 +845,8 @@ _CRL_EXTENSION_HANDLERS = { ExtensionOID.AUTHORITY_INFORMATION_ACCESS: ( _decode_authority_information_access ), + ExtensionOID.ISSUING_DISTRIBUTION_POINT: _decode_issuing_dist_point, + ExtensionOID.FRESHEST_CRL: _decode_freshest_crl, } _OCSP_REQ_EXTENSION_HANDLERS = { @@ -824,6 +857,10 @@ _OCSP_BASICRESP_EXTENSION_HANDLERS = { OCSPExtensionOID.NONCE: _decode_nonce, } +# All revoked extensions are valid single response extensions, see: +# https://tools.ietf.org/html/rfc6960#section-4.4.5 +_OCSP_SINGLERESP_EXTENSION_HANDLERS = _REVOKED_EXTENSION_HANDLERS.copy() + _CERTIFICATE_EXTENSION_PARSER_NO_SCT = _X509ExtensionParser( ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x), get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i), @@ -865,3 +902,9 @@ _OCSP_BASICRESP_EXT_PARSER = _X509ExtensionParser( get_ext=lambda backend, x, i: backend._lib.OCSP_BASICRESP_get_ext(x, i), handlers=_OCSP_BASICRESP_EXTENSION_HANDLERS, ) + +_OCSP_SINGLERESP_EXT_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.OCSP_SINGLERESP_get_ext_count(x), + get_ext=lambda backend, x, i: backend._lib.OCSP_SINGLERESP_get_ext(x, i), + handlers=_OCSP_SINGLERESP_EXTENSION_HANDLERS, +) diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/dsa.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/dsa.py index 48886e4..de61f08 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/dsa.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/dsa.py @@ -211,8 +211,7 @@ class _DSAPublicKey(object): def verifier(self, signature, signature_algorithm): _warn_sign_verify_deprecated() - if not isinstance(signature, bytes): - raise TypeError("signature must be bytes.") + utils._check_bytes("signature", signature) _check_not_prehashed(signature_algorithm) return _DSAVerificationContext( diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ec.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ec.py index 69da234..3d8681b 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ec.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ec.py @@ -34,7 +34,19 @@ def _ec_key_curve_sn(backend, ec_key): # an error for now. if nid == backend._lib.NID_undef: raise NotImplementedError( - "ECDSA certificates with unnamed curves are unsupported " + "ECDSA keys with unnamed curves are unsupported " + "at this time" + ) + + # This is like the above check, but it also catches the case where you + # explicitly encoded a curve with the same parameters as a named curve. + # Don't do that. + if ( + backend._lib.CRYPTOGRAPHY_OPENSSL_110_OR_GREATER and + backend._lib.EC_GROUP_get_asn1_flag(group) == 0 + ): + raise NotImplementedError( + "ECDSA keys with unnamed curves are unsupported " "at this time" ) @@ -62,7 +74,7 @@ def _sn_to_elliptic_curve(backend, sn): return ec._CURVE_TYPES[sn]() except KeyError: raise UnsupportedAlgorithm( - "{0} is not a supported elliptic curve".format(sn), + "{} is not a supported elliptic curve".format(sn), _Reasons.UNSUPPORTED_ELLIPTIC_CURVE ) @@ -127,12 +139,12 @@ class _ECDSAVerificationContext(object): class _EllipticCurvePrivateKey(object): def __init__(self, backend, ec_key_cdata, evp_pkey): self._backend = backend - _mark_asn1_named_ec_curve(backend, ec_key_cdata) self._ec_key = ec_key_cdata self._evp_pkey = evp_pkey sn = _ec_key_curve_sn(backend, ec_key_cdata) self._curve = _sn_to_elliptic_curve(backend, sn) + _mark_asn1_named_ec_curve(backend, ec_key_cdata) curve = utils.read_only_property("_curve") @@ -229,12 +241,12 @@ class _EllipticCurvePrivateKey(object): class _EllipticCurvePublicKey(object): def __init__(self, backend, ec_key_cdata, evp_pkey): self._backend = backend - _mark_asn1_named_ec_curve(backend, ec_key_cdata) self._ec_key = ec_key_cdata self._evp_pkey = evp_pkey sn = _ec_key_curve_sn(backend, ec_key_cdata) self._curve = _sn_to_elliptic_curve(backend, sn) + _mark_asn1_named_ec_curve(backend, ec_key_cdata) curve = utils.read_only_property("_curve") @@ -244,8 +256,7 @@ class _EllipticCurvePublicKey(object): def verifier(self, signature, signature_algorithm): _warn_sign_verify_deprecated() - if not isinstance(signature, bytes): - raise TypeError("signature must be bytes.") + utils._check_bytes("signature", signature) _check_signature_algorithm(signature_algorithm) _check_not_prehashed(signature_algorithm.algorithm) @@ -276,19 +287,62 @@ class _EllipticCurvePublicKey(object): curve=self._curve ) + def _encode_point(self, format): + if format is serialization.PublicFormat.CompressedPoint: + conversion = self._backend._lib.POINT_CONVERSION_COMPRESSED + else: + assert format is serialization.PublicFormat.UncompressedPoint + conversion = self._backend._lib.POINT_CONVERSION_UNCOMPRESSED + + group = self._backend._lib.EC_KEY_get0_group(self._ec_key) + self._backend.openssl_assert(group != self._backend._ffi.NULL) + point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key) + self._backend.openssl_assert(point != self._backend._ffi.NULL) + with self._backend._tmp_bn_ctx() as bn_ctx: + buflen = self._backend._lib.EC_POINT_point2oct( + group, point, conversion, self._backend._ffi.NULL, 0, bn_ctx + ) + self._backend.openssl_assert(buflen > 0) + buf = self._backend._ffi.new("char[]", buflen) + res = self._backend._lib.EC_POINT_point2oct( + group, point, conversion, buf, buflen, bn_ctx + ) + self._backend.openssl_assert(buflen == res) + + return self._backend._ffi.buffer(buf)[:] + def public_bytes(self, encoding, format): if format is serialization.PublicFormat.PKCS1: raise ValueError( "EC public keys do not support PKCS1 serialization" ) - return self._backend._public_key_bytes( - encoding, - format, - self, - self._evp_pkey, - None - ) + if ( + encoding is serialization.Encoding.X962 or + format is serialization.PublicFormat.CompressedPoint or + format is serialization.PublicFormat.UncompressedPoint + ): + if ( + encoding is not serialization.Encoding.X962 or + format not in ( + serialization.PublicFormat.CompressedPoint, + serialization.PublicFormat.UncompressedPoint + ) + ): + raise ValueError( + "X962 encoding must be used with CompressedPoint or " + "UncompressedPoint format" + ) + + return self._encode_point(format) + else: + return self._backend._public_key_bytes( + encoding, + format, + self, + self._evp_pkey, + None + ) def verify(self, signature, data, signature_algorithm): _check_signature_algorithm(signature_algorithm) diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ed25519.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ed25519.py new file mode 100644 index 0000000..f38f11d --- /dev/null +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ed25519.py @@ -0,0 +1,151 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import exceptions, utils +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.ed25519 import ( + Ed25519PrivateKey, Ed25519PublicKey, _ED25519_KEY_SIZE, _ED25519_SIG_SIZE +) + + +@utils.register_interface(Ed25519PublicKey) +class _Ed25519PublicKey(object): + def __init__(self, backend, evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_bytes(self, encoding, format): + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + encoding is not serialization.Encoding.Raw or + format is not serialization.PublicFormat.Raw + ): + raise ValueError( + "When using Raw both encoding and format must be Raw" + ) + + return self._raw_public_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PublicFormat.SubjectPublicKeyInfo + ): + raise ValueError( + "format must be SubjectPublicKeyInfo when encoding is PEM or " + "DER" + ) + + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, None + ) + + def _raw_public_bytes(self): + buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE) + return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:] + + def verify(self, signature, data): + evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new() + self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL) + evp_md_ctx = self._backend._ffi.gc( + evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_DigestVerifyInit( + evp_md_ctx, self._backend._ffi.NULL, self._backend._ffi.NULL, + self._backend._ffi.NULL, self._evp_pkey + ) + self._backend.openssl_assert(res == 1) + res = self._backend._lib.EVP_DigestVerify( + evp_md_ctx, signature, len(signature), data, len(data) + ) + if res != 1: + self._backend._consume_errors() + raise exceptions.InvalidSignature + + +@utils.register_interface(Ed25519PrivateKey) +class _Ed25519PrivateKey(object): + def __init__(self, backend, evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_key(self): + buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE) + public_bytes = self._backend._ffi.buffer(buf)[:] + return self._backend.ed25519_load_public_bytes(public_bytes) + + def sign(self, data): + evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new() + self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL) + evp_md_ctx = self._backend._ffi.gc( + evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_DigestSignInit( + evp_md_ctx, self._backend._ffi.NULL, self._backend._ffi.NULL, + self._backend._ffi.NULL, self._evp_pkey + ) + self._backend.openssl_assert(res == 1) + buf = self._backend._ffi.new("unsigned char[]", _ED25519_SIG_SIZE) + buflen = self._backend._ffi.new("size_t *", len(buf)) + res = self._backend._lib.EVP_DigestSign( + evp_md_ctx, buf, buflen, data, len(data) + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED25519_SIG_SIZE) + return self._backend._ffi.buffer(buf, buflen[0])[:] + + def private_bytes(self, encoding, format, encryption_algorithm): + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + format is not serialization.PrivateFormat.Raw or + encoding is not serialization.Encoding.Raw or not + isinstance(encryption_algorithm, serialization.NoEncryption) + ): + raise ValueError( + "When using Raw both encoding and format must be Raw " + "and encryption_algorithm must be NoEncryption()" + ) + + return self._raw_private_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PrivateFormat.PKCS8 + ): + raise ValueError( + "format must be PKCS8 when encoding is PEM or DER" + ) + + return self._backend._private_key_bytes( + encoding, format, encryption_algorithm, self._evp_pkey, None + ) + + def _raw_private_bytes(self): + buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_private_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE) + return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:] diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ed448.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ed448.py new file mode 100644 index 0000000..f541f05 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ed448.py @@ -0,0 +1,154 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import exceptions, utils +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.ed448 import ( + Ed448PrivateKey, Ed448PublicKey +) + +_ED448_KEY_SIZE = 57 +_ED448_SIG_SIZE = 114 + + +@utils.register_interface(Ed448PublicKey) +class _Ed448PublicKey(object): + def __init__(self, backend, evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_bytes(self, encoding, format): + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + encoding is not serialization.Encoding.Raw or + format is not serialization.PublicFormat.Raw + ): + raise ValueError( + "When using Raw both encoding and format must be Raw" + ) + + return self._raw_public_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PublicFormat.SubjectPublicKeyInfo + ): + raise ValueError( + "format must be SubjectPublicKeyInfo when encoding is PEM or " + "DER" + ) + + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, None + ) + + def _raw_public_bytes(self): + buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE) + return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:] + + def verify(self, signature, data): + evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new() + self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL) + evp_md_ctx = self._backend._ffi.gc( + evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_DigestVerifyInit( + evp_md_ctx, self._backend._ffi.NULL, self._backend._ffi.NULL, + self._backend._ffi.NULL, self._evp_pkey + ) + self._backend.openssl_assert(res == 1) + res = self._backend._lib.EVP_DigestVerify( + evp_md_ctx, signature, len(signature), data, len(data) + ) + if res != 1: + self._backend._consume_errors() + raise exceptions.InvalidSignature + + +@utils.register_interface(Ed448PrivateKey) +class _Ed448PrivateKey(object): + def __init__(self, backend, evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_key(self): + buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE) + public_bytes = self._backend._ffi.buffer(buf)[:] + return self._backend.ed448_load_public_bytes(public_bytes) + + def sign(self, data): + evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new() + self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL) + evp_md_ctx = self._backend._ffi.gc( + evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_DigestSignInit( + evp_md_ctx, self._backend._ffi.NULL, self._backend._ffi.NULL, + self._backend._ffi.NULL, self._evp_pkey + ) + self._backend.openssl_assert(res == 1) + buf = self._backend._ffi.new("unsigned char[]", _ED448_SIG_SIZE) + buflen = self._backend._ffi.new("size_t *", len(buf)) + res = self._backend._lib.EVP_DigestSign( + evp_md_ctx, buf, buflen, data, len(data) + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED448_SIG_SIZE) + return self._backend._ffi.buffer(buf, buflen[0])[:] + + def private_bytes(self, encoding, format, encryption_algorithm): + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + format is not serialization.PrivateFormat.Raw or + encoding is not serialization.Encoding.Raw or not + isinstance(encryption_algorithm, serialization.NoEncryption) + ): + raise ValueError( + "When using Raw both encoding and format must be Raw " + "and encryption_algorithm must be NoEncryption()" + ) + + return self._raw_private_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PrivateFormat.PKCS8 + ): + raise ValueError( + "format must be PKCS8 when encoding is PEM or DER" + ) + + return self._backend._private_key_bytes( + encoding, format, encryption_algorithm, self._evp_pkey, None + ) + + def _raw_private_bytes(self): + buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_private_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE) + return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:] diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/encode_asn1.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/encode_asn1.py index 6ff1a9a..ca35f0e 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/encode_asn1.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/encode_asn1.py @@ -114,13 +114,15 @@ def _encode_sk_name_entry(backend, attributes): for attribute in attributes: name_entry = _encode_name_entry(backend, attribute) res = backend._lib.sk_X509_NAME_ENTRY_push(stack, name_entry) - backend.openssl_assert(res == 1) + backend.openssl_assert(res >= 1) return stack def _encode_name_entry(backend, attribute): if attribute._type is _ASN1Type.BMPString: value = attribute.value.encode('utf_16_be') + elif attribute._type is _ASN1Type.UniversalString: + value = attribute.value.encode('utf_32_be') else: value = attribute.value.encode('utf8') @@ -136,6 +138,28 @@ def _encode_crl_number_delta_crl_indicator(backend, ext): return _encode_asn1_int_gc(backend, ext.crl_number) +def _encode_issuing_dist_point(backend, ext): + idp = backend._lib.ISSUING_DIST_POINT_new() + backend.openssl_assert(idp != backend._ffi.NULL) + idp = backend._ffi.gc(idp, backend._lib.ISSUING_DIST_POINT_free) + idp.onlyuser = 255 if ext.only_contains_user_certs else 0 + idp.onlyCA = 255 if ext.only_contains_ca_certs else 0 + idp.indirectCRL = 255 if ext.indirect_crl else 0 + idp.onlyattr = 255 if ext.only_contains_attribute_certs else 0 + if ext.only_some_reasons: + idp.onlysomereasons = _encode_reasonflags( + backend, ext.only_some_reasons + ) + + if ext.full_name: + idp.distpoint = _encode_full_name(backend, ext.full_name) + + if ext.relative_name: + idp.distpoint = _encode_relative_name(backend, ext.relative_name) + + return idp + + def _encode_crl_reason(backend, crl_reason): asn1enum = backend._lib.ASN1_ENUMERATED_new() backend.openssl_assert(asn1enum != backend._ffi.NULL) @@ -323,16 +347,22 @@ def _encode_authority_information_access(backend, authority_info_access): aia = backend._lib.sk_ACCESS_DESCRIPTION_new_null() backend.openssl_assert(aia != backend._ffi.NULL) aia = backend._ffi.gc( - aia, backend._lib.sk_ACCESS_DESCRIPTION_free + aia, + lambda x: backend._lib.sk_ACCESS_DESCRIPTION_pop_free( + x, backend._ffi.addressof( + backend._lib._original_lib, "ACCESS_DESCRIPTION_free" + ) + ) ) for access_description in authority_info_access: ad = backend._lib.ACCESS_DESCRIPTION_new() method = _txt2obj( backend, access_description.access_method.dotted_string ) - gn = _encode_general_name(backend, access_description.access_location) + _encode_general_name_preallocated( + backend, access_description.access_location, ad.location + ) ad.method = method - ad.location = gn res = backend._lib.sk_ACCESS_DESCRIPTION_push(aia, ad) backend.openssl_assert(res >= 1) @@ -363,8 +393,13 @@ def _encode_subject_key_identifier(backend, ski): def _encode_general_name(backend, name): + gn = backend._lib.GENERAL_NAME_new() + _encode_general_name_preallocated(backend, name, gn) + return gn + + +def _encode_general_name_preallocated(backend, name, gn): if isinstance(name, x509.DNSName): - gn = backend._lib.GENERAL_NAME_new() backend.openssl_assert(gn != backend._ffi.NULL) gn.type = backend._lib.GEN_DNS @@ -378,7 +413,6 @@ def _encode_general_name(backend, name): backend.openssl_assert(res == 1) gn.d.dNSName = ia5 elif isinstance(name, x509.RegisteredID): - gn = backend._lib.GENERAL_NAME_new() backend.openssl_assert(gn != backend._ffi.NULL) gn.type = backend._lib.GEN_RID obj = backend._lib.OBJ_txt2obj( @@ -387,13 +421,11 @@ def _encode_general_name(backend, name): backend.openssl_assert(obj != backend._ffi.NULL) gn.d.registeredID = obj elif isinstance(name, x509.DirectoryName): - gn = backend._lib.GENERAL_NAME_new() backend.openssl_assert(gn != backend._ffi.NULL) dir_name = _encode_name(backend, name.value) gn.type = backend._lib.GEN_DIRNAME gn.d.directoryName = dir_name elif isinstance(name, x509.IPAddress): - gn = backend._lib.GENERAL_NAME_new() backend.openssl_assert(gn != backend._ffi.NULL) if isinstance(name.value, ipaddress.IPv4Network): packed = ( @@ -411,7 +443,6 @@ def _encode_general_name(backend, name): gn.type = backend._lib.GEN_IPADD gn.d.iPAddress = ipaddr elif isinstance(name, x509.OtherName): - gn = backend._lib.GENERAL_NAME_new() backend.openssl_assert(gn != backend._ffi.NULL) other_name = backend._lib.OTHERNAME_new() backend.openssl_assert(other_name != backend._ffi.NULL) @@ -434,7 +465,6 @@ def _encode_general_name(backend, name): gn.type = backend._lib.GEN_OTHERNAME gn.d.otherName = other_name elif isinstance(name, x509.RFC822Name): - gn = backend._lib.GENERAL_NAME_new() backend.openssl_assert(gn != backend._ffi.NULL) # ia5strings are supposed to be ITU T.50 but to allow round-tripping # of broken certs that encode utf8 we'll encode utf8 here too. @@ -443,7 +473,6 @@ def _encode_general_name(backend, name): gn.type = backend._lib.GEN_EMAIL gn.d.rfc822Name = asn1_str elif isinstance(name, x509.UniformResourceIdentifier): - gn = backend._lib.GENERAL_NAME_new() backend.openssl_assert(gn != backend._ffi.NULL) # ia5strings are supposed to be ITU T.50 but to allow round-tripping # of broken certs that encode utf8 we'll encode utf8 here too. @@ -453,11 +482,9 @@ def _encode_general_name(backend, name): gn.d.uniformResourceIdentifier = asn1_str else: raise ValueError( - "{0} is an unknown GeneralName type".format(name) + "{} is an unknown GeneralName type".format(name) ) - return gn - def _encode_extended_key_usage(backend, extended_key_usage): eku = backend._lib.sk_ASN1_OBJECT_new_null() @@ -482,6 +509,34 @@ _CRLREASONFLAGS = { } +def _encode_reasonflags(backend, reasons): + bitmask = backend._lib.ASN1_BIT_STRING_new() + backend.openssl_assert(bitmask != backend._ffi.NULL) + for reason in reasons: + res = backend._lib.ASN1_BIT_STRING_set_bit( + bitmask, _CRLREASONFLAGS[reason], 1 + ) + backend.openssl_assert(res == 1) + + return bitmask + + +def _encode_full_name(backend, full_name): + dpn = backend._lib.DIST_POINT_NAME_new() + backend.openssl_assert(dpn != backend._ffi.NULL) + dpn.type = _DISTPOINT_TYPE_FULLNAME + dpn.name.fullname = _encode_general_names(backend, full_name) + return dpn + + +def _encode_relative_name(backend, relative_name): + dpn = backend._lib.DIST_POINT_NAME_new() + backend.openssl_assert(dpn != backend._ffi.NULL) + dpn.type = _DISTPOINT_TYPE_RELATIVENAME + dpn.name.relativename = _encode_sk_name_entry(backend, relative_name) + return dpn + + def _encode_cdps_freshest_crl(backend, cdps): cdp = backend._lib.sk_DIST_POINT_new_null() cdp = backend._ffi.gc(cdp, backend._lib.sk_DIST_POINT_free) @@ -490,30 +545,13 @@ def _encode_cdps_freshest_crl(backend, cdps): backend.openssl_assert(dp != backend._ffi.NULL) if point.reasons: - bitmask = backend._lib.ASN1_BIT_STRING_new() - backend.openssl_assert(bitmask != backend._ffi.NULL) - dp.reasons = bitmask - for reason in point.reasons: - res = backend._lib.ASN1_BIT_STRING_set_bit( - bitmask, _CRLREASONFLAGS[reason], 1 - ) - backend.openssl_assert(res == 1) + dp.reasons = _encode_reasonflags(backend, point.reasons) if point.full_name: - dpn = backend._lib.DIST_POINT_NAME_new() - backend.openssl_assert(dpn != backend._ffi.NULL) - dpn.type = _DISTPOINT_TYPE_FULLNAME - dpn.name.fullname = _encode_general_names(backend, point.full_name) - dp.distpoint = dpn + dp.distpoint = _encode_full_name(backend, point.full_name) if point.relative_name: - dpn = backend._lib.DIST_POINT_NAME_new() - backend.openssl_assert(dpn != backend._ffi.NULL) - dpn.type = _DISTPOINT_TYPE_RELATIVENAME - relativename = _encode_sk_name_entry(backend, point.relative_name) - backend.openssl_assert(relativename != backend._ffi.NULL) - dpn.name.relativename = relativename - dp.distpoint = dpn + dp.distpoint = _encode_relative_name(backend, point.relative_name) if point.crl_issuer: dp.CRLissuer = _encode_general_names(backend, point.crl_issuer) @@ -603,6 +641,8 @@ _CRL_EXTENSION_ENCODE_HANDLERS = { ), ExtensionOID.CRL_NUMBER: _encode_crl_number_delta_crl_indicator, ExtensionOID.DELTA_CRL_INDICATOR: _encode_crl_number_delta_crl_indicator, + ExtensionOID.ISSUING_DISTRIBUTION_POINT: _encode_issuing_dist_point, + ExtensionOID.FRESHEST_CRL: _encode_cdps_freshest_crl, } _CRL_ENTRY_EXTENSION_ENCODE_HANDLERS = { diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/hashes.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/hashes.py index c39f57d..7f9d840 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/hashes.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/hashes.py @@ -25,7 +25,7 @@ class _HashContext(object): evp_md = self._backend._evp_md_from_algorithm(algorithm) if evp_md == self._backend._ffi.NULL: raise UnsupportedAlgorithm( - "{0} is not a supported hash on this backend.".format( + "{} is not a supported hash on this backend.".format( algorithm.name), _Reasons.UNSUPPORTED_HASH ) @@ -47,14 +47,32 @@ class _HashContext(object): return _HashContext(self._backend, self.algorithm, ctx=copied_ctx) def update(self, data): - res = self._backend._lib.EVP_DigestUpdate(self._ctx, data, len(data)) + data_ptr = self._backend._ffi.from_buffer(data) + res = self._backend._lib.EVP_DigestUpdate( + self._ctx, data_ptr, len(data) + ) self._backend.openssl_assert(res != 0) def finalize(self): + if isinstance(self.algorithm, hashes.ExtendableOutputFunction): + # extendable output functions use a different finalize + return self._finalize_xof() + else: + buf = self._backend._ffi.new("unsigned char[]", + self._backend._lib.EVP_MAX_MD_SIZE) + outlen = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen) + self._backend.openssl_assert(res != 0) + self._backend.openssl_assert( + outlen[0] == self.algorithm.digest_size + ) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def _finalize_xof(self): buf = self._backend._ffi.new("unsigned char[]", - self._backend._lib.EVP_MAX_MD_SIZE) - outlen = self._backend._ffi.new("unsigned int *") - res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen) + self.algorithm.digest_size) + res = self._backend._lib.EVP_DigestFinalXOF( + self._ctx, buf, self.algorithm.digest_size + ) self._backend.openssl_assert(res != 0) - self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size) - return self._backend._ffi.buffer(buf)[:outlen[0]] + return self._backend._ffi.buffer(buf)[:self.algorithm.digest_size] diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/hmac.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/hmac.py index 99c43f2..2e09cbc 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/hmac.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/hmac.py @@ -9,10 +9,9 @@ from cryptography import utils from cryptography.exceptions import ( InvalidSignature, UnsupportedAlgorithm, _Reasons ) -from cryptography.hazmat.primitives import constant_time, hashes, mac +from cryptography.hazmat.primitives import constant_time, hashes -@utils.register_interface(mac.MACContext) @utils.register_interface(hashes.HashContext) class _HMACContext(object): def __init__(self, backend, key, algorithm, ctx=None): @@ -28,12 +27,13 @@ class _HMACContext(object): evp_md = self._backend._evp_md_from_algorithm(algorithm) if evp_md == self._backend._ffi.NULL: raise UnsupportedAlgorithm( - "{0} is not a supported hash on this backend".format( + "{} is not a supported hash on this backend".format( algorithm.name), _Reasons.UNSUPPORTED_HASH ) + key_ptr = self._backend._ffi.from_buffer(key) res = self._backend._lib.HMAC_Init_ex( - ctx, key, len(key), evp_md, self._backend._ffi.NULL + ctx, key_ptr, len(key), evp_md, self._backend._ffi.NULL ) self._backend.openssl_assert(res != 0) @@ -55,7 +55,8 @@ class _HMACContext(object): ) def update(self, data): - res = self._backend._lib.HMAC_Update(self._ctx, data, len(data)) + data_ptr = self._backend._ffi.from_buffer(data) + res = self._backend._lib.HMAC_Update(self._ctx, data_ptr, len(data)) self._backend.openssl_assert(res != 0) def finalize(self): diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ocsp.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ocsp.py index 32e26a0..e42565e 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ocsp.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/ocsp.py @@ -10,7 +10,8 @@ from cryptography import utils, x509 from cryptography.exceptions import UnsupportedAlgorithm from cryptography.hazmat.backends.openssl.decode_asn1 import ( _CRL_ENTRY_REASON_CODE_TO_ENUM, _OCSP_BASICRESP_EXT_PARSER, - _OCSP_REQ_EXT_PARSER, _asn1_integer_to_int, + _OCSP_REQ_EXT_PARSER, _OCSP_SINGLERESP_EXT_PARSER, + _asn1_integer_to_int, _asn1_string_to_bytes, _decode_x509_name, _obj2txt, _parse_asn1_generalized_time, ) @@ -82,7 +83,7 @@ def _hash_algorithm(backend, cert_id): return _OIDS_TO_HASH[oid] except KeyError: raise UnsupportedAlgorithm( - "Signature algorithm OID: {0} not recognized".format(oid) + "Signature algorithm OID: {} not recognized".format(oid) ) @@ -126,6 +127,17 @@ class _OCSPResponse(object): oid = _obj2txt(self._backend, alg.algorithm) return x509.ObjectIdentifier(oid) + @property + @_requires_successful_response + def signature_hash_algorithm(self): + oid = self.signature_algorithm_oid + try: + return x509._SIG_OIDS_TO_HASH[oid] + except KeyError: + raise UnsupportedAlgorithm( + "Signature algorithm OID:{} not recognized".format(oid) + ) + @property @_requires_successful_response def signature(self): @@ -308,6 +320,13 @@ class _OCSPResponse(object): def extensions(self): return _OCSP_BASICRESP_EXT_PARSER.parse(self._backend, self._basic) + @utils.cached_property + @_requires_successful_response + def single_extensions(self): + return _OCSP_SINGLERESP_EXT_PARSER.parse( + self._backend, self._single + ) + def public_bytes(self, encoding): if encoding is not serialization.Encoding.DER: raise ValueError( diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/poly1305.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/poly1305.py new file mode 100644 index 0000000..25448dd --- /dev/null +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/poly1305.py @@ -0,0 +1,60 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives import constant_time + + +_POLY1305_TAG_SIZE = 16 +_POLY1305_KEY_SIZE = 32 + + +class _Poly1305Context(object): + def __init__(self, backend, key): + self._backend = backend + + key_ptr = self._backend._ffi.from_buffer(key) + # This function copies the key into OpenSSL-owned memory so we don't + # need to retain it ourselves + evp_pkey = self._backend._lib.EVP_PKEY_new_raw_private_key( + self._backend._lib.NID_poly1305, + self._backend._ffi.NULL, key_ptr, len(key) + ) + self._backend.openssl_assert(evp_pkey != self._backend._ffi.NULL) + self._evp_pkey = self._backend._ffi.gc( + evp_pkey, self._backend._lib.EVP_PKEY_free + ) + ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new() + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) + self._ctx = self._backend._ffi.gc( + ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_DigestSignInit( + self._ctx, self._backend._ffi.NULL, self._backend._ffi.NULL, + self._backend._ffi.NULL, self._evp_pkey + ) + self._backend.openssl_assert(res == 1) + + def update(self, data): + data_ptr = self._backend._ffi.from_buffer(data) + res = self._backend._lib.EVP_DigestSignUpdate( + self._ctx, data_ptr, len(data) + ) + self._backend.openssl_assert(res != 0) + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", _POLY1305_TAG_SIZE) + outlen = self._backend._ffi.new("size_t *") + res = self._backend._lib.EVP_DigestSignFinal(self._ctx, buf, outlen) + self._backend.openssl_assert(res != 0) + self._backend.openssl_assert(outlen[0] == _POLY1305_TAG_SIZE) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def verify(self, tag): + mac = self.finalize() + if not constant_time.bytes_eq(mac, tag): + raise InvalidSignature("Value did not match computed tag.") diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/rsa.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/rsa.py index 00f5e37..3e4c2fd 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/rsa.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/rsa.py @@ -59,7 +59,7 @@ def _enc_dec_rsa(backend, key, data, padding): else: raise UnsupportedAlgorithm( - "{0} is not supported by this backend.".format( + "{} is not supported by this backend.".format( padding.name ), _Reasons.UNSUPPORTED_PADDING @@ -127,10 +127,11 @@ def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding): def _handle_rsa_enc_dec_error(backend, key): errors = backend._consume_errors() backend.openssl_assert(errors) - assert errors[0].lib == backend._lib.ERR_LIB_RSA + backend.openssl_assert(errors[0].lib == backend._lib.ERR_LIB_RSA) if isinstance(key, _RSAPublicKey): - assert (errors[0].reason == - backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE) + backend.openssl_assert( + errors[0].reason == backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE + ) raise ValueError( "Data too long for key size. Encrypt less data or use a " "larger key size." @@ -148,7 +149,7 @@ def _handle_rsa_enc_dec_error(backend, key): if backend._lib.Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR: decoding_errors.append(backend._lib.RSA_R_PKCS_DECODING_ERROR) - assert errors[0].reason in decoding_errors + backend.openssl_assert(errors[0].reason in decoding_errors) raise ValueError("Decryption failed.") @@ -177,7 +178,7 @@ def _rsa_sig_determine_padding(backend, key, padding, algorithm): padding_enum = backend._lib.RSA_PKCS1_PSS_PADDING else: raise UnsupportedAlgorithm( - "{0} is not supported by this backend.".format(padding.name), + "{} is not supported by this backend.".format(padding.name), _Reasons.UNSUPPORTED_PADDING ) @@ -196,7 +197,7 @@ def _rsa_sig_setup(backend, padding, algorithm, key, data, init_func): if res == 0: backend._consume_errors() raise UnsupportedAlgorithm( - "{0} is not supported by this backend for RSA signing.".format( + "{} is not supported by this backend for RSA signing.".format( algorithm.name ), _Reasons.UNSUPPORTED_HASH @@ -236,17 +237,19 @@ def _rsa_sig_sign(backend, padding, algorithm, private_key, data): pkey_ctx, buf, buflen, data, len(data)) if res != 1: errors = backend._consume_errors() - assert errors[0].lib == backend._lib.ERR_LIB_RSA - reason = None - if (errors[0].reason == - backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE): + backend.openssl_assert(errors[0].lib == backend._lib.ERR_LIB_RSA) + if ( + errors[0].reason == + backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE + ): reason = ("Salt length too long for key size. Try using " "MAX_LENGTH instead.") else: - assert (errors[0].reason == - backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY) + backend.openssl_assert( + errors[0].reason == + backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY + ) reason = "Digest too large for key size. Use a larger key." - assert reason is not None raise ValueError(reason) return backend._ffi.buffer(buf)[:] @@ -434,8 +437,7 @@ class _RSAPublicKey(object): def verifier(self, signature, padding, algorithm): _warn_sign_verify_deprecated() - if not isinstance(signature, bytes): - raise TypeError("signature must be bytes.") + utils._check_bytes("signature", signature) _check_not_prehashed(algorithm) return _RSAVerificationContext( diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/utils.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/utils.py index 05d0fe5..ee472c0 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/utils.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/utils.py @@ -11,6 +11,30 @@ from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric.utils import Prehashed +def _evp_pkey_derive(backend, evp_pkey, peer_public_key): + ctx = backend._lib.EVP_PKEY_CTX_new(evp_pkey, backend._ffi.NULL) + backend.openssl_assert(ctx != backend._ffi.NULL) + ctx = backend._ffi.gc(ctx, backend._lib.EVP_PKEY_CTX_free) + res = backend._lib.EVP_PKEY_derive_init(ctx) + backend.openssl_assert(res == 1) + res = backend._lib.EVP_PKEY_derive_set_peer( + ctx, peer_public_key._evp_pkey + ) + backend.openssl_assert(res == 1) + keylen = backend._ffi.new("size_t *") + res = backend._lib.EVP_PKEY_derive(ctx, backend._ffi.NULL, keylen) + backend.openssl_assert(res == 1) + backend.openssl_assert(keylen[0] > 0) + buf = backend._ffi.new("unsigned char[]", keylen[0]) + res = backend._lib.EVP_PKEY_derive(ctx, buf, keylen) + if res != 1: + raise ValueError( + "Null shared key derived from public/private pair." + ) + + return backend._ffi.buffer(buf, keylen[0])[:] + + def _calculate_digest_and_algorithm(backend, data, algorithm): if not isinstance(algorithm, Prehashed): hash_ctx = hashes.Hash(algorithm, backend) @@ -40,6 +64,6 @@ def _warn_sign_verify_deprecated(): warnings.warn( "signer and verifier have been deprecated. Please use sign " "and verify instead.", - utils.PersistentlyDeprecated, + utils.PersistentlyDeprecated2017, stacklevel=3 ) diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/x25519.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/x25519.py index 983ece6..8708834 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/x25519.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/x25519.py @@ -5,18 +5,51 @@ from __future__ import absolute_import, division, print_function from cryptography import utils +from cryptography.hazmat.backends.openssl.utils import _evp_pkey_derive +from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives.asymmetric.x25519 import ( X25519PrivateKey, X25519PublicKey ) +_X25519_KEY_SIZE = 32 + + @utils.register_interface(X25519PublicKey) class _X25519PublicKey(object): def __init__(self, backend, evp_pkey): self._backend = backend self._evp_pkey = evp_pkey - def public_bytes(self): + def public_bytes(self, encoding, format): + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + encoding is not serialization.Encoding.Raw or + format is not serialization.PublicFormat.Raw + ): + raise ValueError( + "When using Raw both encoding and format must be Raw" + ) + + return self._raw_public_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PublicFormat.SubjectPublicKeyInfo + ): + raise ValueError( + "format must be SubjectPublicKeyInfo when encoding is PEM or " + "DER" + ) + + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, None + ) + + def _raw_public_bytes(self): ucharpp = self._backend._ffi.new("unsigned char **") res = self._backend._lib.EVP_PKEY_get1_tls_encodedpoint( self._evp_pkey, ucharpp @@ -52,28 +85,51 @@ class _X25519PrivateKey(object): if not isinstance(peer_public_key, X25519PublicKey): raise TypeError("peer_public_key must be X25519PublicKey.") - ctx = self._backend._lib.EVP_PKEY_CTX_new( - self._evp_pkey, self._backend._ffi.NULL + return _evp_pkey_derive( + self._backend, self._evp_pkey, peer_public_key ) - self._backend.openssl_assert(ctx != self._backend._ffi.NULL) - ctx = self._backend._ffi.gc(ctx, self._backend._lib.EVP_PKEY_CTX_free) - res = self._backend._lib.EVP_PKEY_derive_init(ctx) - self._backend.openssl_assert(res == 1) - res = self._backend._lib.EVP_PKEY_derive_set_peer( - ctx, peer_public_key._evp_pkey - ) - self._backend.openssl_assert(res == 1) - keylen = self._backend._ffi.new("size_t *") - res = self._backend._lib.EVP_PKEY_derive( - ctx, self._backend._ffi.NULL, keylen - ) - self._backend.openssl_assert(res == 1) - self._backend.openssl_assert(keylen[0] > 0) - buf = self._backend._ffi.new("unsigned char[]", keylen[0]) - res = self._backend._lib.EVP_PKEY_derive(ctx, buf, keylen) - if res != 1: + + def private_bytes(self, encoding, format, encryption_algorithm): + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + format is not serialization.PrivateFormat.Raw or + encoding is not serialization.Encoding.Raw or not + isinstance(encryption_algorithm, serialization.NoEncryption) + ): + raise ValueError( + "When using Raw both encoding and format must be Raw " + "and encryption_algorithm must be NoEncryption()" + ) + + return self._raw_private_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PrivateFormat.PKCS8 + ): raise ValueError( - "Null shared key derived from public/private pair." + "format must be PKCS8 when encoding is PEM or DER" ) - return self._backend._ffi.buffer(buf, keylen[0])[:] + return self._backend._private_key_bytes( + encoding, format, encryption_algorithm, self._evp_pkey, None + ) + + def _raw_private_bytes(self): + # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can + # switch this to EVP_PKEY_new_raw_private_key + # The trick we use here is serializing to a PKCS8 key and just + # using the last 32 bytes, which is the key itself. + bio = self._backend._create_mem_bio_gc() + res = self._backend._lib.i2d_PKCS8PrivateKey_bio( + bio, self._evp_pkey, + self._backend._ffi.NULL, self._backend._ffi.NULL, + 0, self._backend._ffi.NULL, self._backend._ffi.NULL + ) + self._backend.openssl_assert(res == 1) + pkcs8 = self._backend._read_mem_bio(bio) + self._backend.openssl_assert(len(pkcs8) == 48) + return pkcs8[-_X25519_KEY_SIZE:] diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/x448.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/x448.py new file mode 100644 index 0000000..fe0dcd9 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/x448.py @@ -0,0 +1,123 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.hazmat.backends.openssl.utils import _evp_pkey_derive +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.x448 import ( + X448PrivateKey, X448PublicKey +) + +_X448_KEY_SIZE = 56 + + +@utils.register_interface(X448PublicKey) +class _X448PublicKey(object): + def __init__(self, backend, evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_bytes(self, encoding, format): + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + encoding is not serialization.Encoding.Raw or + format is not serialization.PublicFormat.Raw + ): + raise ValueError( + "When using Raw both encoding and format must be Raw" + ) + + return self._raw_public_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PublicFormat.SubjectPublicKeyInfo + ): + raise ValueError( + "format must be SubjectPublicKeyInfo when encoding is PEM or " + "DER" + ) + + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, None + ) + + def _raw_public_bytes(self): + buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE) + return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:] + + +@utils.register_interface(X448PrivateKey) +class _X448PrivateKey(object): + def __init__(self, backend, evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_key(self): + buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE) + return self._backend.x448_load_public_bytes(buf) + + def exchange(self, peer_public_key): + if not isinstance(peer_public_key, X448PublicKey): + raise TypeError("peer_public_key must be X448PublicKey.") + + return _evp_pkey_derive( + self._backend, self._evp_pkey, peer_public_key + ) + + def private_bytes(self, encoding, format, encryption_algorithm): + if ( + encoding is serialization.Encoding.Raw or + format is serialization.PublicFormat.Raw + ): + if ( + format is not serialization.PrivateFormat.Raw or + encoding is not serialization.Encoding.Raw or not + isinstance(encryption_algorithm, serialization.NoEncryption) + ): + raise ValueError( + "When using Raw both encoding and format must be Raw " + "and encryption_algorithm must be NoEncryption()" + ) + + return self._raw_private_bytes() + + if ( + encoding in serialization._PEM_DER and + format is not serialization.PrivateFormat.PKCS8 + ): + raise ValueError( + "format must be PKCS8 when encoding is PEM or DER" + ) + + return self._backend._private_key_bytes( + encoding, format, encryption_algorithm, self._evp_pkey, None + ) + + def _raw_private_bytes(self): + buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_private_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE) + return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:] diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/x509.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/x509.py index ac1838c..efbb179 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/x509.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/backends/openssl/x509.py @@ -6,7 +6,6 @@ from __future__ import absolute_import, division, print_function import datetime import operator -import warnings from cryptography import utils, x509 from cryptography.exceptions import UnsupportedAlgorithm @@ -30,7 +29,7 @@ class _Certificate(object): self._x509 = x509 def __repr__(self): - return "".format(self.subject) + return "".format(self.subject) def __eq__(self, other): if not isinstance(other, x509.Certificate): @@ -59,18 +58,9 @@ class _Certificate(object): return x509.Version.v3 else: raise x509.InvalidVersion( - "{0} is not a valid X509 version".format(version), version + "{} is not a valid X509 version".format(version), version ) - @property - def serial(self): - warnings.warn( - "Certificate serial is deprecated, use serial_number instead.", - utils.PersistentlyDeprecated, - stacklevel=2 - ) - return self.serial_number - @property def serial_number(self): asn1_int = self._backend._lib.X509_get_serialNumber(self._x509) @@ -90,12 +80,12 @@ class _Certificate(object): @property def not_valid_before(self): - asn1_time = self._backend._lib.X509_get_notBefore(self._x509) + asn1_time = self._backend._lib.X509_getm_notBefore(self._x509) return _parse_asn1_time(self._backend, asn1_time) @property def not_valid_after(self): - asn1_time = self._backend._lib.X509_get_notAfter(self._x509) + asn1_time = self._backend._lib.X509_getm_notAfter(self._x509) return _parse_asn1_time(self._backend, asn1_time) @property @@ -117,7 +107,7 @@ class _Certificate(object): return x509._SIG_OIDS_TO_HASH[oid] except KeyError: raise UnsupportedAlgorithm( - "Signature algorithm OID:{0} not recognized".format(oid) + "Signature algorithm OID:{} not recognized".format(oid) ) @property @@ -271,7 +261,7 @@ class _CertificateRevocationList(object): return x509._SIG_OIDS_TO_HASH[oid] except KeyError: raise UnsupportedAlgorithm( - "Signature algorithm OID:{0} not recognized".format(oid) + "Signature algorithm OID:{} not recognized".format(oid) ) @property @@ -423,7 +413,7 @@ class _CertificateSigningRequest(object): return x509._SIG_OIDS_TO_HASH[oid] except KeyError: raise UnsupportedAlgorithm( - "Signature algorithm OID:{0} not recognized".format(oid) + "Signature algorithm OID:{} not recognized".format(oid) ) @property diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/_constant_time.abi3.so b/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/_constant_time.abi3.so index 464be7e..5c2fee4 100755 Binary files a/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/_constant_time.abi3.so and b/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/_constant_time.abi3.so differ diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/_openssl.abi3.so b/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/_openssl.abi3.so index 99b5aaf..f8af81d 100755 Binary files a/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/_openssl.abi3.so and b/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/_openssl.abi3.so differ diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/_padding.abi3.so b/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/_padding.abi3.so index e5304d2..e809fd0 100755 Binary files a/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/_padding.abi3.so and b/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/_padding.abi3.so differ diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/openssl/_conditional.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/openssl/_conditional.py index 219f163..a293fb0 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/openssl/_conditional.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/openssl/_conditional.py @@ -99,6 +99,7 @@ def cryptography_has_102_verification_params(): "X509_VERIFY_PARAM_set1_ip_asc", "X509_VERIFY_PARAM_set_hostflags", "SSL_get0_param", + "SSL_CTX_get0_param", "X509_CHECK_FLAG_ALWAYS_CHECK_SUBJECT", "X509_CHECK_FLAG_NO_WILDCARDS", "X509_CHECK_FLAG_NO_PARTIAL_WILDCARDS", @@ -150,11 +151,6 @@ def cryptography_has_tls_st(): def cryptography_has_locking_callbacks(): return [ - "CRYPTO_LOCK", - "CRYPTO_UNLOCK", - "CRYPTO_READ", - "CRYPTO_LOCK_SSL", - "CRYPTO_lock", "Cryptography_setup_ssl_threads", ] @@ -224,9 +220,31 @@ def cryptography_has_x25519(): ] +def cryptography_has_x448(): + return [ + "EVP_PKEY_X448", + "NID_X448", + ] + + +def cryptography_has_ed448(): + return [ + "EVP_PKEY_ED448", + "NID_ED448", + ] + + def cryptography_has_ed25519(): return [ "NID_ED25519", + "EVP_PKEY_ED25519", + ] + + +def cryptography_has_poly1305(): + return [ + "NID_poly1305", + "EVP_PKEY_POLY1305", ] @@ -237,6 +255,12 @@ def cryptography_has_oneshot_evp_digest_sign_verify(): ] +def cryptography_has_evp_digestfinal_xof(): + return [ + "EVP_DigestFinalXOF", + ] + + def cryptography_has_evp_pkey_get_set_tls_encodedpoint(): return [ "EVP_PKEY_get1_tls_encodedpoint", @@ -246,7 +270,7 @@ def cryptography_has_evp_pkey_get_set_tls_encodedpoint(): def cryptography_has_fips(): return [ - "FIPS_set_mode", + "FIPS_mode_set", "FIPS_mode", ] @@ -298,6 +322,46 @@ def cryptography_has_tlsv13(): "SSL_verify_client_post_handshake", "SSL_CTX_set_post_handshake_auth", "SSL_set_post_handshake_auth", + "SSL_SESSION_get_max_early_data", + "SSL_write_early_data", + "SSL_read_early_data", + "SSL_CTX_set_max_early_data", + ] + + +def cryptography_has_raw_key(): + return [ + "EVP_PKEY_new_raw_private_key", + "EVP_PKEY_new_raw_public_key", + "EVP_PKEY_get_raw_private_key", + "EVP_PKEY_get_raw_public_key", + ] + + +def cryptography_has_evp_r_memory_limit_exceeded(): + return [ + "EVP_R_MEMORY_LIMIT_EXCEEDED", + ] + + +def cryptography_has_engine(): + return [ + "ENGINE_by_id", + "ENGINE_init", + "ENGINE_finish", + "ENGINE_get_default_RAND", + "ENGINE_set_default_RAND", + "ENGINE_unregister_RAND", + "ENGINE_ctrl_cmd", + "ENGINE_free", + "ENGINE_get_name", + "Cryptography_add_osrandom_engine", + ] + + +def cryptography_has_verified_chain(): + return [ + "SSL_get0_verified_chain", ] @@ -349,7 +413,10 @@ CONDITIONAL_NAMES = { cryptography_has_x509_store_ctx_get_issuer ), "Cryptography_HAS_X25519": cryptography_has_x25519, + "Cryptography_HAS_X448": cryptography_has_x448, + "Cryptography_HAS_ED448": cryptography_has_ed448, "Cryptography_HAS_ED25519": cryptography_has_ed25519, + "Cryptography_HAS_POLY1305": cryptography_has_poly1305, "Cryptography_HAS_ONESHOT_EVP_DIGEST_SIGN_VERIFY": ( cryptography_has_oneshot_evp_digest_sign_verify ), @@ -363,4 +430,13 @@ CONDITIONAL_NAMES = { "Cryptography_HAS_OPENSSL_CLEANUP": cryptography_has_openssl_cleanup, "Cryptography_HAS_CIPHER_DETAILS": cryptography_has_cipher_details, "Cryptography_HAS_TLSv1_3": cryptography_has_tlsv13, + "Cryptography_HAS_RAW_KEY": cryptography_has_raw_key, + "Cryptography_HAS_EVP_DIGESTFINAL_XOF": ( + cryptography_has_evp_digestfinal_xof + ), + "Cryptography_HAS_EVP_R_MEMORY_LIMIT_EXCEEDED": ( + cryptography_has_evp_r_memory_limit_exceeded + ), + "Cryptography_HAS_ENGINE": cryptography_has_engine, + "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain, } diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/openssl/binding.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/openssl/binding.py index 0824ea8..1e0f34c 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/openssl/binding.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/bindings/openssl/binding.py @@ -5,10 +5,12 @@ from __future__ import absolute_import, division, print_function import collections +import os import threading import types import warnings +import cryptography from cryptography import utils from cryptography.exceptions import InternalError from cryptography.hazmat.bindings._openssl import ffi, lib @@ -114,10 +116,9 @@ class Binding(object): # reliably clear the error queue. Once we clear it here we will # error on any subsequent unexpected item in the stack. cls.lib.ERR_clear_error() - cls._osrandom_engine_id = cls.lib.Cryptography_osrandom_engine_id - cls._osrandom_engine_name = cls.lib.Cryptography_osrandom_engine_name - result = cls.lib.Cryptography_add_osrandom_engine() - _openssl_assert(cls.lib, result in (1, 2)) + if cls.lib.Cryptography_HAS_ENGINE: + result = cls.lib.Cryptography_add_osrandom_engine() + _openssl_assert(cls.lib, result in (1, 2)) @classmethod def _ensure_ffi_initialized(cls): @@ -156,14 +157,44 @@ def _verify_openssl_version(lib): lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 and not lib.CRYPTOGRAPHY_IS_LIBRESSL ): - warnings.warn( - "OpenSSL version 1.0.1 is no longer supported by the OpenSSL " - "project, please upgrade. A future version of cryptography will " - "drop support for it.", - utils.CryptographyDeprecationWarning + if os.environ.get("CRYPTOGRAPHY_ALLOW_OPENSSL_101"): + warnings.warn( + "OpenSSL version 1.0.1 is no longer supported by the OpenSSL " + "project, please upgrade. The next version of cryptography " + "will completely remove support for it.", + utils.CryptographyDeprecationWarning + ) + else: + raise RuntimeError( + "You are linking against OpenSSL 1.0.1, which is no longer " + "supported by the OpenSSL project. You need to upgrade to a " + "newer version of OpenSSL." + ) + + +def _verify_package_version(version): + # Occasionally we run into situations where the version of the Python + # package does not match the version of the shared object that is loaded. + # This may occur in environments where multiple versions of cryptography + # are installed and available in the python path. To avoid errors cropping + # up later this code checks that the currently imported package and the + # shared object that were loaded have the same version and raise an + # ImportError if they do not + so_package_version = ffi.string(lib.CRYPTOGRAPHY_PACKAGE_VERSION) + if version.encode("ascii") != so_package_version: + raise ImportError( + "The version of cryptography does not match the loaded " + "shared object. This can happen if you have multiple copies of " + "cryptography installed in your Python path. Please try creating " + "a new virtual environment to resolve this issue. " + "Loaded python version: {}, shared object version: {}".format( + version, so_package_version + ) ) +_verify_package_version(cryptography.__version__) + # OpenSSL is not thread safe until the locks are initialized. We call this # method in module scope so that it executes with the import lock. On # Pythons < 3.4 this import lock is a global lock, which can prevent a race diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/ec.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/ec.py index 431ecb7..eef922d 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/ec.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/ec.py @@ -5,6 +5,7 @@ from __future__ import absolute_import, division, print_function import abc +import warnings import six @@ -19,6 +20,19 @@ class EllipticCurveOID(object): SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7") SECP384R1 = ObjectIdentifier("1.3.132.0.34") SECP521R1 = ObjectIdentifier("1.3.132.0.35") + BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7") + BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11") + BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13") + SECT163K1 = ObjectIdentifier("1.3.132.0.1") + SECT163R2 = ObjectIdentifier("1.3.132.0.15") + SECT233K1 = ObjectIdentifier("1.3.132.0.26") + SECT233R1 = ObjectIdentifier("1.3.132.0.27") + SECT283K1 = ObjectIdentifier("1.3.132.0.16") + SECT283R1 = ObjectIdentifier("1.3.132.0.17") + SECT409K1 = ObjectIdentifier("1.3.132.0.36") + SECT409R1 = ObjectIdentifier("1.3.132.0.37") + SECT571K1 = ObjectIdentifier("1.3.132.0.38") + SECT571R1 = ObjectIdentifier("1.3.132.0.39") @six.add_metaclass(abc.ABCMeta) @@ -138,6 +152,22 @@ class EllipticCurvePublicKey(object): Verifies the signature of the data. """ + @classmethod + def from_encoded_point(cls, curve, data): + utils._check_bytes("data", data) + + if not isinstance(curve, EllipticCurve): + raise TypeError("curve must be an EllipticCurve instance") + + if len(data) == 0: + raise ValueError("data must not be an empty byte string") + + if six.indexbytes(data, 0) not in [0x02, 0x03, 0x04]: + raise ValueError("Unsupported elliptic curve point type") + + from cryptography.hazmat.backends.openssl.backend import backend + return backend.load_elliptic_curve_public_bytes(curve, data) + EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey @@ -329,6 +359,14 @@ class EllipticCurvePublicNumbers(object): return backend.load_elliptic_curve_public_numbers(self) def encode_point(self): + warnings.warn( + "encode_point has been deprecated on EllipticCurvePublicNumbers" + " and will be removed in a future version. Please use " + "EllipticCurvePublicKey.public_bytes to obtain both " + "compressed and uncompressed point encoding.", + utils.PersistentlyDeprecated2019, + stacklevel=2, + ) # key_size is in bits. Convert to bytes and round up byte_length = (self.curve.key_size + 7) // 8 return ( @@ -341,6 +379,14 @@ class EllipticCurvePublicNumbers(object): if not isinstance(curve, EllipticCurve): raise TypeError("curve must be an EllipticCurve instance") + warnings.warn( + "Support for unsafe construction of public numbers from " + "encoded data will be removed in a future version. " + "Please use EllipticCurvePublicKey.from_encoded_point", + utils.PersistentlyDeprecated2019, + stacklevel=2, + ) + if data.startswith(b'\x04'): # key_size is in bits. Convert to bytes and round up byte_length = (curve.key_size + 7) // 8 @@ -419,3 +465,36 @@ class EllipticCurvePrivateNumbers(object): class ECDH(object): pass + + +_OID_TO_CURVE = { + EllipticCurveOID.SECP192R1: SECP192R1, + EllipticCurveOID.SECP224R1: SECP224R1, + EllipticCurveOID.SECP256K1: SECP256K1, + EllipticCurveOID.SECP256R1: SECP256R1, + EllipticCurveOID.SECP384R1: SECP384R1, + EllipticCurveOID.SECP521R1: SECP521R1, + EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1, + EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1, + EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1, + EllipticCurveOID.SECT163K1: SECT163K1, + EllipticCurveOID.SECT163R2: SECT163R2, + EllipticCurveOID.SECT233K1: SECT233K1, + EllipticCurveOID.SECT233R1: SECT233R1, + EllipticCurveOID.SECT283K1: SECT283K1, + EllipticCurveOID.SECT283R1: SECT283R1, + EllipticCurveOID.SECT409K1: SECT409K1, + EllipticCurveOID.SECT409R1: SECT409R1, + EllipticCurveOID.SECT571K1: SECT571K1, + EllipticCurveOID.SECT571R1: SECT571R1, +} + + +def get_curve_for_oid(oid): + try: + return _OID_TO_CURVE[oid] + except KeyError: + raise LookupError( + "The provided object identifier has no matching elliptic " + "curve class" + ) diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/ed25519.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/ed25519.py new file mode 100644 index 0000000..d89445f --- /dev/null +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/ed25519.py @@ -0,0 +1,84 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons + + +_ED25519_KEY_SIZE = 32 +_ED25519_SIG_SIZE = 64 + + +@six.add_metaclass(abc.ABCMeta) +class Ed25519PublicKey(object): + @classmethod + def from_public_bytes(cls, data): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + + return backend.ed25519_load_public_bytes(data) + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def verify(self, signature, data): + """ + Verify the signature. + """ + + +@six.add_metaclass(abc.ABCMeta) +class Ed25519PrivateKey(object): + @classmethod + def generate(cls): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + + return backend.ed25519_generate_key() + + @classmethod + def from_private_bytes(cls, data): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + + return backend.ed25519_load_private_bytes(data) + + @abc.abstractmethod + def public_key(self): + """ + The Ed25519PublicKey derived from the private key. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def sign(self, data): + """ + Signs the data. + """ diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/ed448.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/ed448.py new file mode 100644 index 0000000..939157a --- /dev/null +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/ed448.py @@ -0,0 +1,79 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons + + +@six.add_metaclass(abc.ABCMeta) +class Ed448PublicKey(object): + @classmethod + def from_public_bytes(cls, data): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + + return backend.ed448_load_public_bytes(data) + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def verify(self, signature, data): + """ + Verify the signature. + """ + + +@six.add_metaclass(abc.ABCMeta) +class Ed448PrivateKey(object): + @classmethod + def generate(cls): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + return backend.ed448_generate_key() + + @classmethod + def from_private_bytes(cls, data): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + + return backend.ed448_load_private_bytes(data) + + @abc.abstractmethod + def public_key(self): + """ + The Ed448PublicKey derived from the private key. + """ + + @abc.abstractmethod + def sign(self, data): + """ + Signs the data. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + The serialized bytes of the private key. + """ diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/rsa.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/rsa.py index 27db671..f20cdf9 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/rsa.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/rsa.py @@ -184,12 +184,12 @@ def _modinv(e, m): """ Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1 """ - x1, y1, x2, y2 = 1, 0, 0, 1 + x1, x2 = 1, 0 a, b = e, m while b > 0: q, r = divmod(a, b) - xn, yn = x1 - q * x2, y1 - q * y2 - a, b, x1, y1, x2, y2 = b, r, x2, y2, xn, yn + xn = x1 - q * x2 + a, b, x1, x2 = b, r, x2, xn return x1 % m diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/utils.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/utils.py index ef1e7eb..14d2abe 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/utils.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/utils.py @@ -4,49 +4,26 @@ from __future__ import absolute_import, division, print_function -import warnings - -from asn1crypto.algos import DSASignature - -import six - from cryptography import utils +from cryptography.hazmat._der import ( + DERReader, INTEGER, SEQUENCE, encode_der, encode_der_integer +) from cryptography.hazmat.primitives import hashes -def decode_rfc6979_signature(signature): - warnings.warn( - "decode_rfc6979_signature is deprecated and will " - "be removed in a future version, use decode_dss_signature instead.", - utils.PersistentlyDeprecated, - stacklevel=2 - ) - return decode_dss_signature(signature) - - def decode_dss_signature(signature): - data = DSASignature.load(signature, strict=True).native - return data['r'], data['s'] - - -def encode_rfc6979_signature(r, s): - warnings.warn( - "encode_rfc6979_signature is deprecated and will " - "be removed in a future version, use encode_dss_signature instead.", - utils.PersistentlyDeprecated, - stacklevel=2 - ) - return encode_dss_signature(r, s) + with DERReader(signature).read_single_element(SEQUENCE) as seq: + r = seq.read_element(INTEGER).as_integer() + s = seq.read_element(INTEGER).as_integer() + return r, s def encode_dss_signature(r, s): - if ( - not isinstance(r, six.integer_types) or - not isinstance(s, six.integer_types) - ): - raise ValueError("Both r and s must be integers") - - return DSASignature({'r': r, 's': s}).dump() + return encode_der( + SEQUENCE, + encode_der(INTEGER, encode_der_integer(r)), + encode_der(INTEGER, encode_der_integer(s)), + ) class Prehashed(object): diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/x25519.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/x25519.py index 5c4652a..61a95ff 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/x25519.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/x25519.py @@ -21,11 +21,14 @@ class X25519PublicKey(object): "X25519 is not supported by this version of OpenSSL.", _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM ) + return backend.x25519_load_public_bytes(data) @abc.abstractmethod - def public_bytes(self): - pass + def public_bytes(self, encoding, format): + """ + The serialized bytes of the public key. + """ @six.add_metaclass(abc.ABCMeta) @@ -41,14 +44,30 @@ class X25519PrivateKey(object): return backend.x25519_generate_key() @classmethod - def _from_private_bytes(cls, data): + def from_private_bytes(cls, data): from cryptography.hazmat.backends.openssl.backend import backend + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM + ) + return backend.x25519_load_private_bytes(data) @abc.abstractmethod def public_key(self): - pass + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + The serialized bytes of the private key. + """ @abc.abstractmethod def exchange(self, peer_public_key): - pass + """ + Performs a key exchange operation using the provided peer's public key. + """ diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/x448.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/x448.py new file mode 100644 index 0000000..475e678 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/asymmetric/x448.py @@ -0,0 +1,73 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons + + +@six.add_metaclass(abc.ABCMeta) +class X448PublicKey(object): + @classmethod + def from_public_bytes(cls, data): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM + ) + + return backend.x448_load_public_bytes(data) + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + The serialized bytes of the public key. + """ + + +@six.add_metaclass(abc.ABCMeta) +class X448PrivateKey(object): + @classmethod + def generate(cls): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM + ) + return backend.x448_generate_key() + + @classmethod + def from_private_bytes(cls, data): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM + ) + + return backend.x448_load_private_bytes(data) + + @abc.abstractmethod + def public_key(self): + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def exchange(self, peer_public_key): + """ + Performs a key exchange operation using the provided peer's public key. + """ diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/aead.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/aead.py index 16899d0..a20a80f 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/aead.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/aead.py @@ -20,7 +20,7 @@ class ChaCha20Poly1305(object): "ChaCha20Poly1305 is not supported by this version of OpenSSL", exceptions._Reasons.UNSUPPORTED_CIPHER ) - utils._check_bytes("key", key) + utils._check_byteslike("key", key) if len(key) != 32: raise ValueError("ChaCha20Poly1305 key must be 32 bytes.") @@ -56,7 +56,7 @@ class ChaCha20Poly1305(object): ) def _check_params(self, nonce, data, associated_data): - utils._check_bytes("nonce", nonce) + utils._check_byteslike("nonce", nonce) utils._check_bytes("data", data) utils._check_bytes("associated_data", associated_data) if len(nonce) != 12: @@ -67,7 +67,7 @@ class AESCCM(object): _MAX_SIZE = 2 ** 32 def __init__(self, key, tag_length=16): - utils._check_bytes("key", key) + utils._check_byteslike("key", key) if len(key) not in (16, 24, 32): raise ValueError("AESCCM key must be 128, 192, or 256 bits.") @@ -126,10 +126,10 @@ class AESCCM(object): # https://tools.ietf.org/html/rfc3610#section-2.1 l_val = 15 - len(nonce) if 2 ** (8 * l_val) < data_len: - raise ValueError("Nonce too long for data") + raise ValueError("Data too long for nonce") def _check_params(self, nonce, data, associated_data): - utils._check_bytes("nonce", nonce) + utils._check_byteslike("nonce", nonce) utils._check_bytes("data", data) utils._check_bytes("associated_data", associated_data) if not 7 <= len(nonce) <= 13: @@ -140,7 +140,7 @@ class AESGCM(object): _MAX_SIZE = 2 ** 32 def __init__(self, key): - utils._check_bytes("key", key) + utils._check_byteslike("key", key) if len(key) not in (16, 24, 32): raise ValueError("AESGCM key must be 128, 192, or 256 bits.") @@ -181,7 +181,7 @@ class AESGCM(object): ) def _check_params(self, nonce, data, associated_data): - utils._check_bytes("nonce", nonce) + utils._check_byteslike("nonce", nonce) utils._check_bytes("data", data) utils._check_bytes("associated_data", associated_data) if len(nonce) == 0: diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/algorithms.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/algorithms.py index 68a9e33..f4d5160 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/algorithms.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/algorithms.py @@ -13,11 +13,11 @@ from cryptography.hazmat.primitives.ciphers.modes import ModeWithNonce def _verify_key_size(algorithm, key): # Verify that the key is instance of bytes - utils._check_bytes("key", key) + utils._check_byteslike("key", key) # Verify that the key size matches the expected key size if len(key) * 8 not in algorithm.key_sizes: - raise ValueError("Invalid key size ({0}) for {1}.".format( + raise ValueError("Invalid key size ({}) for {}.".format( len(key) * 8, algorithm.name )) return key @@ -153,8 +153,7 @@ class ChaCha20(object): def __init__(self, key, nonce): self.key = _verify_key_size(self, key) - if not isinstance(nonce, bytes): - raise TypeError("nonce must be bytes") + utils._check_byteslike("nonce", nonce) if len(nonce) != 16: raise ValueError("nonce must be 128-bits (16 bytes)") diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/base.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/base.py index f857041..4d5f8d6 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/base.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/base.py @@ -179,7 +179,7 @@ class _AEADCipherContext(object): self._bytes_processed += data_size if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES: raise ValueError( - "{0} has a maximum encrypted byte limit of {1}".format( + "{} has a maximum encrypted byte limit of {}".format( self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES ) ) @@ -217,7 +217,7 @@ class _AEADCipherContext(object): self._aad_bytes_processed += len(data) if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES: raise ValueError( - "{0} has a maximum AAD byte limit of {1}".format( + "{} has a maximum AAD byte limit of {}".format( self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES ) ) diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/modes.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/modes.py index e82c1a8..78fa1c4 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/modes.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/ciphers/modes.py @@ -72,7 +72,7 @@ def _check_aes_key_length(self, algorithm): def _check_iv_length(self, algorithm): if len(self.initialization_vector) * 8 != algorithm.block_size: - raise ValueError("Invalid IV size ({0}) for {1}.".format( + raise ValueError("Invalid IV size ({}) for {}.".format( len(self.initialization_vector), self.name )) @@ -88,9 +88,7 @@ class CBC(object): name = "CBC" def __init__(self, initialization_vector): - if not isinstance(initialization_vector, bytes): - raise TypeError("initialization_vector must be bytes") - + utils._check_byteslike("initialization_vector", initialization_vector) self._initialization_vector = initialization_vector initialization_vector = utils.read_only_property("_initialization_vector") @@ -103,8 +101,7 @@ class XTS(object): name = "XTS" def __init__(self, tweak): - if not isinstance(tweak, bytes): - raise TypeError("tweak must be bytes") + utils._check_byteslike("tweak", tweak) if len(tweak) != 16: raise ValueError("tweak must be 128-bits (16 bytes)") @@ -134,9 +131,7 @@ class OFB(object): name = "OFB" def __init__(self, initialization_vector): - if not isinstance(initialization_vector, bytes): - raise TypeError("initialization_vector must be bytes") - + utils._check_byteslike("initialization_vector", initialization_vector) self._initialization_vector = initialization_vector initialization_vector = utils.read_only_property("_initialization_vector") @@ -149,9 +144,7 @@ class CFB(object): name = "CFB" def __init__(self, initialization_vector): - if not isinstance(initialization_vector, bytes): - raise TypeError("initialization_vector must be bytes") - + utils._check_byteslike("initialization_vector", initialization_vector) self._initialization_vector = initialization_vector initialization_vector = utils.read_only_property("_initialization_vector") @@ -164,9 +157,7 @@ class CFB8(object): name = "CFB8" def __init__(self, initialization_vector): - if not isinstance(initialization_vector, bytes): - raise TypeError("initialization_vector must be bytes") - + utils._check_byteslike("initialization_vector", initialization_vector) self._initialization_vector = initialization_vector initialization_vector = utils.read_only_property("_initialization_vector") @@ -179,9 +170,7 @@ class CTR(object): name = "CTR" def __init__(self, nonce): - if not isinstance(nonce, bytes): - raise TypeError("nonce must be bytes") - + utils._check_byteslike("nonce", nonce) self._nonce = nonce nonce = utils.read_only_property("_nonce") @@ -189,7 +178,7 @@ class CTR(object): def validate_for_algorithm(self, algorithm): _check_aes_key_length(self, algorithm) if len(self.nonce) * 8 != algorithm.block_size: - raise ValueError("Invalid nonce size ({0}) for {1}.".format( + raise ValueError("Invalid nonce size ({}) for {}.".format( len(self.nonce), self.name )) @@ -206,19 +195,17 @@ class GCM(object): # len(initialization_vector) must in [1, 2 ** 64), but it's impossible # to actually construct a bytes object that large, so we don't check # for it - if not isinstance(initialization_vector, bytes): - raise TypeError("initialization_vector must be bytes") + utils._check_byteslike("initialization_vector", initialization_vector) if len(initialization_vector) == 0: raise ValueError("initialization_vector must be at least 1 byte") self._initialization_vector = initialization_vector if tag is not None: - if not isinstance(tag, bytes): - raise TypeError("tag must be bytes or None") + utils._check_bytes("tag", tag) if min_tag_length < 4: raise ValueError("min_tag_length must be >= 4") if len(tag) < min_tag_length: raise ValueError( - "Authentication tag must be {0} bytes or longer.".format( + "Authentication tag must be {} bytes or longer.".format( min_tag_length) ) self._tag = tag diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/cmac.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/cmac.py index 77537f0..95a8d97 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/cmac.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/cmac.py @@ -9,10 +9,9 @@ from cryptography.exceptions import ( AlreadyFinalized, UnsupportedAlgorithm, _Reasons ) from cryptography.hazmat.backends.interfaces import CMACBackend -from cryptography.hazmat.primitives import ciphers, mac +from cryptography.hazmat.primitives import ciphers -@utils.register_interface(mac.MACContext) class CMAC(object): def __init__(self, algorithm, backend, ctx=None): if not isinstance(backend, CMACBackend): @@ -36,8 +35,8 @@ class CMAC(object): def update(self, data): if self._ctx is None: raise AlreadyFinalized("Context was already finalized.") - if not isinstance(data, bytes): - raise TypeError("data must be bytes.") + + utils._check_bytes("data", data) self._ctx.update(data) def finalize(self): @@ -48,8 +47,7 @@ class CMAC(object): return digest def verify(self, signature): - if not isinstance(signature, bytes): - raise TypeError("signature must be bytes.") + utils._check_bytes("signature", signature) if self._ctx is None: raise AlreadyFinalized("Context was already finalized.") diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/constant_time.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/constant_time.py index 0e987ea..35ceafe 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/constant_time.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/constant_time.py @@ -21,9 +21,9 @@ if hasattr(hmac, "compare_digest"): else: warnings.warn( "Support for your Python version is deprecated. The next version of " - "cryptography will remove support. Please upgrade to a 2.7.x " - "release that supports hmac.compare_digest as soon as possible.", - utils.DeprecatedIn23, + "cryptography will remove support. Please upgrade to a release " + "(2.7.7+) that supports hmac.compare_digest as soon as possible.", + utils.PersistentlyDeprecated2018, ) def bytes_eq(a, b): diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/hashes.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/hashes.py index 3f3aadd..9be2b60 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/hashes.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/hashes.py @@ -57,6 +57,13 @@ class HashContext(object): """ +@six.add_metaclass(abc.ABCMeta) +class ExtendableOutputFunction(object): + """ + An interface for extendable output functions. + """ + + @utils.register_interface(HashContext) class Hash(object): def __init__(self, algorithm, backend, ctx=None): @@ -82,8 +89,7 @@ class Hash(object): def update(self, data): if self._ctx is None: raise AlreadyFinalized("Context was already finalized.") - if not isinstance(data, bytes): - raise TypeError("data must be bytes.") + utils._check_byteslike("data", data) self._ctx.update(data) def copy(self): @@ -108,6 +114,20 @@ class SHA1(object): block_size = 64 +@utils.register_interface(HashAlgorithm) +class SHA512_224(object): # noqa: N801 + name = "sha512-224" + digest_size = 28 + block_size = 128 + + +@utils.register_interface(HashAlgorithm) +class SHA512_256(object): # noqa: N801 + name = "sha512-256" + digest_size = 32 + block_size = 128 + + @utils.register_interface(HashAlgorithm) class SHA224(object): name = "sha224" @@ -136,6 +156,64 @@ class SHA512(object): block_size = 128 +@utils.register_interface(HashAlgorithm) +class SHA3_224(object): # noqa: N801 + name = "sha3-224" + digest_size = 28 + + +@utils.register_interface(HashAlgorithm) +class SHA3_256(object): # noqa: N801 + name = "sha3-256" + digest_size = 32 + + +@utils.register_interface(HashAlgorithm) +class SHA3_384(object): # noqa: N801 + name = "sha3-384" + digest_size = 48 + + +@utils.register_interface(HashAlgorithm) +class SHA3_512(object): # noqa: N801 + name = "sha3-512" + digest_size = 64 + + +@utils.register_interface(HashAlgorithm) +@utils.register_interface(ExtendableOutputFunction) +class SHAKE128(object): + name = "shake128" + + def __init__(self, digest_size): + if not isinstance(digest_size, six.integer_types): + raise TypeError("digest_size must be an integer") + + if digest_size < 1: + raise ValueError("digest_size must be a positive integer") + + self._digest_size = digest_size + + digest_size = utils.read_only_property("_digest_size") + + +@utils.register_interface(HashAlgorithm) +@utils.register_interface(ExtendableOutputFunction) +class SHAKE256(object): + name = "shake256" + + def __init__(self, digest_size): + if not isinstance(digest_size, six.integer_types): + raise TypeError("digest_size must be an integer") + + if digest_size < 1: + raise ValueError("digest_size must be a positive integer") + + self._digest_size = digest_size + + digest_size = utils.read_only_property("_digest_size") + + @utils.register_interface(HashAlgorithm) class MD5(object): name = "md5" @@ -151,13 +229,9 @@ class BLAKE2b(object): block_size = 128 def __init__(self, digest_size): - if ( - digest_size > self._max_digest_size or - digest_size < self._min_digest_size - ): - raise ValueError("Digest size must be {0}-{1}".format( - self._min_digest_size, self._max_digest_size) - ) + + if digest_size != 64: + raise ValueError("Digest size must be 64") self._digest_size = digest_size @@ -172,13 +246,9 @@ class BLAKE2s(object): _min_digest_size = 1 def __init__(self, digest_size): - if ( - digest_size > self._max_digest_size or - digest_size < self._min_digest_size - ): - raise ValueError("Digest size must be {0}-{1}".format( - self._min_digest_size, self._max_digest_size) - ) + + if digest_size != 32: + raise ValueError("Digest size must be 32") self._digest_size = digest_size diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/hmac.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/hmac.py index 2e9a4e2..9eceeac 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/hmac.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/hmac.py @@ -9,10 +9,9 @@ from cryptography.exceptions import ( AlreadyFinalized, UnsupportedAlgorithm, _Reasons ) from cryptography.hazmat.backends.interfaces import HMACBackend -from cryptography.hazmat.primitives import hashes, mac +from cryptography.hazmat.primitives import hashes -@utils.register_interface(mac.MACContext) @utils.register_interface(hashes.HashContext) class HMAC(object): def __init__(self, key, algorithm, backend, ctx=None): @@ -38,8 +37,7 @@ class HMAC(object): def update(self, data): if self._ctx is None: raise AlreadyFinalized("Context was already finalized.") - if not isinstance(data, bytes): - raise TypeError("data must be bytes.") + utils._check_byteslike("data", data) self._ctx.update(data) def copy(self): @@ -60,8 +58,7 @@ class HMAC(object): return digest def verify(self, signature): - if not isinstance(signature, bytes): - raise TypeError("signature must be bytes.") + utils._check_bytes("signature", signature) if self._ctx is None: raise AlreadyFinalized("Context was already finalized.") diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/concatkdf.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/concatkdf.py index c6399e4..7cb6385 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/concatkdf.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/concatkdf.py @@ -24,17 +24,15 @@ def _common_args_checks(algorithm, length, otherinfo): max_length = algorithm.digest_size * (2 ** 32 - 1) if length > max_length: raise ValueError( - "Can not derive keys larger than {0} bits.".format( + "Can not derive keys larger than {} bits.".format( max_length )) - if not (otherinfo is None or isinstance(otherinfo, bytes)): - raise TypeError("otherinfo must be bytes.") + if otherinfo is not None: + utils._check_bytes("otherinfo", otherinfo) def _concatkdf_derive(key_material, length, auxfn, otherinfo): - if not isinstance(key_material, bytes): - raise TypeError("key_material must be bytes.") - + utils._check_byteslike("key_material", key_material) output = [b""] outlen = 0 counter = 1 @@ -96,10 +94,11 @@ class ConcatKDFHMAC(object): if self._otherinfo is None: self._otherinfo = b"" - if not (salt is None or isinstance(salt, bytes)): - raise TypeError("salt must be bytes.") if salt is None: salt = b"\x00" * algorithm.block_size + else: + utils._check_bytes("salt", salt) + self._salt = salt if not isinstance(backend, HMACBackend): diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/hkdf.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/hkdf.py index 917b4e9..01f0f28 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/hkdf.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/hkdf.py @@ -26,11 +26,10 @@ class HKDF(object): self._algorithm = algorithm - if not (salt is None or isinstance(salt, bytes)): - raise TypeError("salt must be bytes.") - if salt is None: salt = b"\x00" * self._algorithm.digest_size + else: + utils._check_bytes("salt", salt) self._salt = salt @@ -44,9 +43,7 @@ class HKDF(object): return h.finalize() def derive(self, key_material): - if not isinstance(key_material, bytes): - raise TypeError("key_material must be bytes.") - + utils._check_byteslike("key_material", key_material) return self._hkdf_expand.derive(self._extract(key_material)) def verify(self, key_material, expected_key): @@ -71,17 +68,16 @@ class HKDFExpand(object): if length > max_length: raise ValueError( - "Can not derive keys larger than {0} octets.".format( + "Can not derive keys larger than {} octets.".format( max_length )) self._length = length - if not (info is None or isinstance(info, bytes)): - raise TypeError("info must be bytes.") - if info is None: info = b"" + else: + utils._check_bytes("info", info) self._info = info @@ -102,9 +98,7 @@ class HKDFExpand(object): return b"".join(output)[:self._length] def derive(self, key_material): - if not isinstance(key_material, bytes): - raise TypeError("key_material must be bytes.") - + utils._check_byteslike("key_material", key_material) if self._used: raise AlreadyFinalized diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/kbkdf.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/kbkdf.py index 14de56e..56783a8 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/kbkdf.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/kbkdf.py @@ -73,10 +73,8 @@ class KBKDFHMAC(object): if context is None: context = b'' - if (not isinstance(label, bytes) or - not isinstance(context, bytes)): - raise TypeError('label and context must be of type bytes') - + utils._check_bytes("label", label) + utils._check_bytes("context", context) self._algorithm = algorithm self._mode = mode self._length = length @@ -102,8 +100,7 @@ class KBKDFHMAC(object): if self._used: raise AlreadyFinalized - if not isinstance(key_material, bytes): - raise TypeError('key_material must be bytes') + utils._check_byteslike("key_material", key_material) self._used = True # inverse floor division (equivalent to ceiling) diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/pbkdf2.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/pbkdf2.py index f8ce7a3..07d8ac6 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/pbkdf2.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/pbkdf2.py @@ -24,15 +24,14 @@ class PBKDF2HMAC(object): if not backend.pbkdf2_hmac_supported(algorithm): raise UnsupportedAlgorithm( - "{0} is not supported for PBKDF2 by this backend.".format( + "{} is not supported for PBKDF2 by this backend.".format( algorithm.name), _Reasons.UNSUPPORTED_HASH ) self._used = False self._algorithm = algorithm self._length = length - if not isinstance(salt, bytes): - raise TypeError("salt must be bytes.") + utils._check_bytes("salt", salt) self._salt = salt self._iterations = iterations self._backend = backend @@ -42,8 +41,7 @@ class PBKDF2HMAC(object): raise AlreadyFinalized("PBKDF2 instances can only be used once.") self._used = True - if not isinstance(key_material, bytes): - raise TypeError("key_material must be bytes.") + utils._check_byteslike("key_material", key_material) return self._backend.derive_pbkdf2_hmac( self._algorithm, self._length, diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/scrypt.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/scrypt.py index 77dcf9a..df9745e 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/scrypt.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/scrypt.py @@ -30,9 +30,7 @@ class Scrypt(object): ) self._length = length - if not isinstance(salt, bytes): - raise TypeError("salt must be bytes.") - + utils._check_bytes("salt", salt) if n < 2 or (n & (n - 1)) != 0: raise ValueError("n must be greater than 1 and be a power of 2.") @@ -54,8 +52,7 @@ class Scrypt(object): raise AlreadyFinalized("Scrypt instances can only be used once.") self._used = True - if not isinstance(key_material, bytes): - raise TypeError("key_material must be bytes.") + utils._check_byteslike("key_material", key_material) return self._backend.derive_scrypt( key_material, self._salt, self._length, self._n, self._r, self._p ) diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/x963kdf.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/x963kdf.py index 83789b3..9eb50b0 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/x963kdf.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/kdf/x963kdf.py @@ -26,9 +26,10 @@ class X963KDF(object): max_len = algorithm.digest_size * (2 ** 32 - 1) if length > max_len: raise ValueError( - "Can not derive keys larger than {0} bits.".format(max_len)) - if not (sharedinfo is None or isinstance(sharedinfo, bytes)): - raise TypeError("sharedinfo must be bytes.") + "Can not derive keys larger than {} bits.".format(max_len)) + if sharedinfo is not None: + utils._check_bytes("sharedinfo", sharedinfo) + self._algorithm = algorithm self._length = length self._sharedinfo = sharedinfo @@ -45,10 +46,7 @@ class X963KDF(object): if self._used: raise AlreadyFinalized self._used = True - - if not isinstance(key_material, bytes): - raise TypeError("key_material must be bytes.") - + utils._check_byteslike("key_material", key_material) output = [b""] outlen = 0 counter = 1 diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/mac.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/mac.py deleted file mode 100644 index 4c95190..0000000 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/mac.py +++ /dev/null @@ -1,37 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -import abc - -import six - - -@six.add_metaclass(abc.ABCMeta) -class MACContext(object): - @abc.abstractmethod - def update(self, data): - """ - Processes the provided bytes. - """ - - @abc.abstractmethod - def finalize(self): - """ - Returns the message authentication code as bytes. - """ - - @abc.abstractmethod - def copy(self): - """ - Return a MACContext that is a copy of the current context. - """ - - @abc.abstractmethod - def verify(self, signature): - """ - Checks if the generated message authentication code matches the - signature. - """ diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/padding.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/padding.py index a081976..170c802 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/padding.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/padding.py @@ -40,8 +40,7 @@ def _byte_padding_update(buffer_, data, block_size): if buffer_ is None: raise AlreadyFinalized("Context was already finalized.") - if not isinstance(data, bytes): - raise TypeError("data must be bytes.") + utils._check_bytes("data", data) buffer_ += data @@ -65,8 +64,7 @@ def _byte_unpadding_update(buffer_, data, block_size): if buffer_ is None: raise AlreadyFinalized("Context was already finalized.") - if not isinstance(data, bytes): - raise TypeError("data must be bytes.") + utils._check_bytes("data", data) buffer_ += data diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/poly1305.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/poly1305.py new file mode 100644 index 0000000..d92f62a --- /dev/null +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/poly1305.py @@ -0,0 +1,55 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, UnsupportedAlgorithm, _Reasons +) + + +class Poly1305(object): + def __init__(self, key): + from cryptography.hazmat.backends.openssl.backend import backend + if not backend.poly1305_supported(): + raise UnsupportedAlgorithm( + "poly1305 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_MAC + ) + self._ctx = backend.create_poly1305_ctx(key) + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + utils._check_byteslike("data", data) + self._ctx.update(data) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + mac = self._ctx.finalize() + self._ctx = None + return mac + + def verify(self, tag): + utils._check_bytes("tag", tag) + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + + ctx, self._ctx = self._ctx, None + ctx.verify(tag) + + @classmethod + def generate_tag(cls, key, data): + p = Poly1305(key) + p.update(data) + return p.finalize() + + @classmethod + def verify_tag(cls, key, data, tag): + p = Poly1305(key) + p.update(data) + p.verify(tag) diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/serialization/__init__.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/serialization/__init__.py new file mode 100644 index 0000000..f6d4ce9 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/serialization/__init__.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.primitives.serialization.base import ( + BestAvailableEncryption, Encoding, KeySerializationEncryption, + NoEncryption, ParameterFormat, PrivateFormat, PublicFormat, + load_der_parameters, load_der_private_key, load_der_public_key, + load_pem_parameters, load_pem_private_key, load_pem_public_key, +) +from cryptography.hazmat.primitives.serialization.ssh import ( + load_ssh_public_key +) + + +_PEM_DER = (Encoding.PEM, Encoding.DER) + +__all__ = [ + "load_der_parameters", "load_der_private_key", "load_der_public_key", + "load_pem_parameters", "load_pem_private_key", "load_pem_public_key", + "load_ssh_public_key", "Encoding", "PrivateFormat", "PublicFormat", + "ParameterFormat", "KeySerializationEncryption", "BestAvailableEncryption", + "NoEncryption", +] diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/serialization/base.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/serialization/base.py new file mode 100644 index 0000000..4218ea8 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/serialization/base.py @@ -0,0 +1,82 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +from enum import Enum + +import six + +from cryptography import utils + + +def load_pem_private_key(data, password, backend): + return backend.load_pem_private_key(data, password) + + +def load_pem_public_key(data, backend): + return backend.load_pem_public_key(data) + + +def load_pem_parameters(data, backend): + return backend.load_pem_parameters(data) + + +def load_der_private_key(data, password, backend): + return backend.load_der_private_key(data, password) + + +def load_der_public_key(data, backend): + return backend.load_der_public_key(data) + + +def load_der_parameters(data, backend): + return backend.load_der_parameters(data) + + +class Encoding(Enum): + PEM = "PEM" + DER = "DER" + OpenSSH = "OpenSSH" + Raw = "Raw" + X962 = "ANSI X9.62" + + +class PrivateFormat(Enum): + PKCS8 = "PKCS8" + TraditionalOpenSSL = "TraditionalOpenSSL" + Raw = "Raw" + + +class PublicFormat(Enum): + SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1" + PKCS1 = "Raw PKCS#1" + OpenSSH = "OpenSSH" + Raw = "Raw" + CompressedPoint = "X9.62 Compressed Point" + UncompressedPoint = "X9.62 Uncompressed Point" + + +class ParameterFormat(Enum): + PKCS3 = "PKCS3" + + +@six.add_metaclass(abc.ABCMeta) +class KeySerializationEncryption(object): + pass + + +@utils.register_interface(KeySerializationEncryption) +class BestAvailableEncryption(object): + def __init__(self, password): + if not isinstance(password, bytes) or len(password) == 0: + raise ValueError("Password must be 1 or more bytes.") + + self.password = password + + +@utils.register_interface(KeySerializationEncryption) +class NoEncryption(object): + pass diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/serialization/pkcs12.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/serialization/pkcs12.py new file mode 100644 index 0000000..98161d5 --- /dev/null +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/serialization/pkcs12.py @@ -0,0 +1,9 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +def load_key_and_certificates(data, password, backend): + return backend.load_key_and_certificates_from_pkcs12(data, password) diff --git a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/serialization.py b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/serialization/ssh.py similarity index 70% rename from server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/serialization.py rename to server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/serialization/ssh.py index bd09e6e..a1d6c8c 100644 --- a/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/serialization.py +++ b/server/www/packages/packages-linux/x64/cryptography/hazmat/primitives/serialization/ssh.py @@ -4,40 +4,14 @@ from __future__ import absolute_import, division, print_function -import abc import base64 import struct -from enum import Enum import six from cryptography import utils from cryptography.exceptions import UnsupportedAlgorithm -from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa - - -def load_pem_private_key(data, password, backend): - return backend.load_pem_private_key(data, password) - - -def load_pem_public_key(data, backend): - return backend.load_pem_public_key(data) - - -def load_pem_parameters(data, backend): - return backend.load_pem_parameters(data) - - -def load_der_private_key(data, password, backend): - return backend.load_der_private_key(data, password) - - -def load_der_public_key(data, backend): - return backend.load_der_public_key(data) - - -def load_der_parameters(data, backend): - return backend.load_der_parameters(data) +from cryptography.hazmat.primitives.asymmetric import dsa, ec, ed25519, rsa def load_ssh_public_key(data, backend): @@ -57,6 +31,8 @@ def load_ssh_public_key(data, backend): b'ecdsa-sha2-nistp256', b'ecdsa-sha2-nistp384', b'ecdsa-sha2-nistp521', ]: loader = _load_ssh_ecdsa_public_key + elif key_type == b'ssh-ed25519': + loader = _load_ssh_ed25519_public_key else: raise UnsupportedAlgorithm('Key type is not supported.') @@ -125,8 +101,16 @@ def _load_ssh_ecdsa_public_key(expected_key_type, decoded_data, backend): "Compressed elliptic curve points are not supported" ) - numbers = ec.EllipticCurvePublicNumbers.from_encoded_point(curve, data) - return numbers.public_key(backend) + return ec.EllipticCurvePublicKey.from_encoded_point(curve, data) + + +def _load_ssh_ed25519_public_key(expected_key_type, decoded_data, backend): + data, rest = _ssh_read_next_string(decoded_data) + + if rest: + raise ValueError('Key body contains extra bytes.') + + return ed25519.Ed25519PublicKey.from_public_bytes(data) def _ssh_read_next_string(data): @@ -167,43 +151,3 @@ def _ssh_write_mpint(value): if six.indexbytes(data, 0) & 0x80: data = b"\x00" + data return _ssh_write_string(data) - - -class Encoding(Enum): - PEM = "PEM" - DER = "DER" - OpenSSH = "OpenSSH" - - -class PrivateFormat(Enum): - PKCS8 = "PKCS8" - TraditionalOpenSSL = "TraditionalOpenSSL" - - -class PublicFormat(Enum): - SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1" - PKCS1 = "Raw PKCS#1" - OpenSSH = "OpenSSH" - - -class ParameterFormat(Enum): - PKCS3 = "PKCS3" - - -@six.add_metaclass(abc.ABCMeta) -class KeySerializationEncryption(object): - pass - - -@utils.register_interface(KeySerializationEncryption) -class BestAvailableEncryption(object): - def __init__(self, password): - if not isinstance(password, bytes) or len(password) == 0: - raise ValueError("Password must be 1 or more bytes.") - - self.password = password - - -@utils.register_interface(KeySerializationEncryption) -class NoEncryption(object): - pass diff --git a/server/www/packages/packages-linux/x64/cryptography/utils.py b/server/www/packages/packages-linux/x64/cryptography/utils.py index 3d45a77..e895aa0 100644 --- a/server/www/packages/packages-linux/x64/cryptography/utils.py +++ b/server/www/packages/packages-linux/x64/cryptography/utils.py @@ -20,14 +20,22 @@ class CryptographyDeprecationWarning(UserWarning): # Several APIs were deprecated with no specific end-of-life date because of the # ubiquity of their use. They should not be removed until we agree on when that # cycle ends. -PersistentlyDeprecated = CryptographyDeprecationWarning -DeprecatedIn21 = CryptographyDeprecationWarning -DeprecatedIn23 = CryptographyDeprecationWarning +PersistentlyDeprecated2017 = CryptographyDeprecationWarning +PersistentlyDeprecated2018 = CryptographyDeprecationWarning +PersistentlyDeprecated2019 = CryptographyDeprecationWarning +DeprecatedIn27 = CryptographyDeprecationWarning def _check_bytes(name, value): if not isinstance(value, bytes): - raise TypeError("{0} must be bytes".format(name)) + raise TypeError("{} must be bytes".format(name)) + + +def _check_byteslike(name, value): + try: + memoryview(value) + except TypeError: + raise TypeError("{} must be bytes-like".format(name)) def read_only_property(name): @@ -90,7 +98,7 @@ def verify_interface(iface, klass): for method in iface.__abstractmethods__: if not hasattr(klass, method): raise InterfaceNotImplemented( - "{0} is missing a {1!r} method".format(klass, method) + "{} is missing a {!r} method".format(klass, method) ) if isinstance(getattr(iface, method), abc.abstractproperty): # Can't properly verify these yet. @@ -99,8 +107,8 @@ def verify_interface(iface, klass): actual = signature(getattr(klass, method)) if sig != actual: raise InterfaceNotImplemented( - "{0}.{1}'s signature differs from the expected. Expected: " - "{2!r}. Received: {3!r}".format( + "{}.{}'s signature differs from the expected. Expected: " + "{!r}. Received: {!r}".format( klass, method, sig, actual ) ) @@ -152,7 +160,7 @@ def deprecated(value, module_name, message, warning_class): def cached_property(func): - cached_name = "_cached_{0}".format(func) + cached_name = "_cached_{}".format(func) sentinel = object() def inner(instance): diff --git a/server/www/packages/packages-linux/x64/cryptography/x509/__init__.py b/server/www/packages/packages-linux/x64/cryptography/x509/__init__.py index fd01945..b761e26 100644 --- a/server/www/packages/packages-linux/x64/cryptography/x509/__init__.py +++ b/server/www/packages/packages-linux/x64/cryptography/x509/__init__.py @@ -21,9 +21,9 @@ from cryptography.x509.extensions import ( DeltaCRLIndicator, DistributionPoint, DuplicateExtension, ExtendedKeyUsage, Extension, ExtensionNotFound, ExtensionType, Extensions, FreshestCRL, GeneralNames, InhibitAnyPolicy, InvalidityDate, IssuerAlternativeName, - KeyUsage, NameConstraints, NoticeReference, OCSPNoCheck, OCSPNonce, - PolicyConstraints, PolicyInformation, PrecertPoison, - PrecertificateSignedCertificateTimestamps, ReasonFlags, + IssuingDistributionPoint, KeyUsage, NameConstraints, NoticeReference, + OCSPNoCheck, OCSPNonce, PolicyConstraints, PolicyInformation, + PrecertPoison, PrecertificateSignedCertificateTimestamps, ReasonFlags, SubjectAlternativeName, SubjectKeyIdentifier, TLSFeature, TLSFeatureType, UnrecognizedExtension, UserNotice ) @@ -134,6 +134,7 @@ __all__ = [ "Extension", "ExtendedKeyUsage", "FreshestCRL", + "IssuingDistributionPoint", "TLSFeature", "TLSFeatureType", "OCSPNoCheck", diff --git a/server/www/packages/packages-linux/x64/cryptography/x509/base.py b/server/www/packages/packages-linux/x64/cryptography/x509/base.py index a3b334a..3983c9b 100644 --- a/server/www/packages/packages-linux/x64/cryptography/x509/base.py +++ b/server/www/packages/packages-linux/x64/cryptography/x509/base.py @@ -12,12 +12,14 @@ from enum import Enum import six from cryptography import utils -from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa +from cryptography.hazmat.primitives.asymmetric import ( + dsa, ec, ed25519, ed448, rsa +) from cryptography.x509.extensions import Extension, ExtensionType from cryptography.x509.name import Name -_UNIX_EPOCH = datetime.datetime(1970, 1, 1) +_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1) def _reject_duplicate_extension(extension, extensions): @@ -474,9 +476,12 @@ class CertificateBuilder(object): Sets the requestor's public key (as found in the signing request). """ if not isinstance(key, (dsa.DSAPublicKey, rsa.RSAPublicKey, - ec.EllipticCurvePublicKey)): + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey)): raise TypeError('Expecting one of DSAPublicKey, RSAPublicKey,' - ' or EllipticCurvePublicKey.') + ' EllipticCurvePublicKey, Ed25519PublicKey or' + ' Ed448PublicKey.') if self._public_key is not None: raise ValueError('The public key may only be set once.') return CertificateBuilder( @@ -516,9 +521,9 @@ class CertificateBuilder(object): if self._not_valid_before is not None: raise ValueError('The not valid before may only be set once.') time = _convert_to_naive_utc_time(time) - if time <= _UNIX_EPOCH: - raise ValueError('The not valid before date must be after the unix' - ' epoch (1970 January 1).') + if time < _EARLIEST_UTC_TIME: + raise ValueError('The not valid before date must be on or after' + ' 1950 January 1).') if self._not_valid_after is not None and time > self._not_valid_after: raise ValueError( 'The not valid before date must be before the not valid after ' @@ -539,9 +544,9 @@ class CertificateBuilder(object): if self._not_valid_after is not None: raise ValueError('The not valid after may only be set once.') time = _convert_to_naive_utc_time(time) - if time <= _UNIX_EPOCH: - raise ValueError('The not valid after date must be after the unix' - ' epoch (1970 January 1).') + if time < _EARLIEST_UTC_TIME: + raise ValueError('The not valid after date must be on or after' + ' 1950 January 1.') if (self._not_valid_before is not None and time < self._not_valid_before): raise ValueError( @@ -620,9 +625,9 @@ class CertificateRevocationListBuilder(object): if self._last_update is not None: raise ValueError('Last update may only be set once.') last_update = _convert_to_naive_utc_time(last_update) - if last_update <= _UNIX_EPOCH: - raise ValueError('The last update date must be after the unix' - ' epoch (1970 January 1).') + if last_update < _EARLIEST_UTC_TIME: + raise ValueError('The last update date must be on or after' + ' 1950 January 1.') if self._next_update is not None and last_update > self._next_update: raise ValueError( 'The last update date must be before the next update date.' @@ -638,9 +643,9 @@ class CertificateRevocationListBuilder(object): if self._next_update is not None: raise ValueError('Last update may only be set once.') next_update = _convert_to_naive_utc_time(next_update) - if next_update <= _UNIX_EPOCH: - raise ValueError('The last update date must be after the unix' - ' epoch (1970 January 1).') + if next_update < _EARLIEST_UTC_TIME: + raise ValueError('The last update date must be on or after' + ' 1950 January 1.') if self._last_update is not None and next_update < self._last_update: raise ValueError( 'The next update date must be after the last update date.' @@ -720,9 +725,9 @@ class RevokedCertificateBuilder(object): if self._revocation_date is not None: raise ValueError('The revocation date may only be set once.') time = _convert_to_naive_utc_time(time) - if time <= _UNIX_EPOCH: - raise ValueError('The revocation date must be after the unix' - ' epoch (1970 January 1).') + if time < _EARLIEST_UTC_TIME: + raise ValueError('The revocation date must be on or after' + ' 1950 January 1.') return RevokedCertificateBuilder( self._serial_number, time, self._extensions ) diff --git a/server/www/packages/packages-linux/x64/cryptography/x509/extensions.py b/server/www/packages/packages-linux/x64/cryptography/x509/extensions.py index 6301af5..ad90e9b 100644 --- a/server/www/packages/packages-linux/x64/cryptography/x509/extensions.py +++ b/server/www/packages/packages-linux/x64/cryptography/x509/extensions.py @@ -8,13 +8,15 @@ import abc import datetime import hashlib import ipaddress +import warnings from enum import Enum -from asn1crypto.keys import PublicKeyInfo - import six from cryptography import utils +from cryptography.hazmat._der import ( + BIT_STRING, DERReader, OBJECT_IDENTIFIER, SEQUENCE +) from cryptography.hazmat.primitives import constant_time, serialization from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey @@ -35,7 +37,10 @@ def _key_identifier_from_public_key(public_key): serialization.PublicFormat.PKCS1, ) elif isinstance(public_key, EllipticCurvePublicKey): - data = public_key.public_numbers().encode_point() + data = public_key.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint + ) else: # This is a very slow way to do this. serialized = public_key.public_bytes( @@ -43,11 +48,41 @@ def _key_identifier_from_public_key(public_key): serialization.PublicFormat.SubjectPublicKeyInfo ) - data = six.binary_type(PublicKeyInfo.load(serialized)['public_key']) + reader = DERReader(serialized) + with reader.read_single_element(SEQUENCE) as public_key_info: + algorithm = public_key_info.read_element(SEQUENCE) + public_key = public_key_info.read_element(BIT_STRING) + + # Double-check the algorithm structure. + with algorithm: + algorithm.read_element(OBJECT_IDENTIFIER) + if not algorithm.is_empty(): + # Skip the optional parameters field. + algorithm.read_any_element() + + # BIT STRING contents begin with the number of padding bytes added. It + # must be zero for SubjectPublicKeyInfo structures. + if public_key.read_byte() != 0: + raise ValueError('Invalid public key encoding') + + data = public_key.data return hashlib.sha1(data).digest() +def _make_sequence_methods(field_name): + def len_method(self): + return len(getattr(self, field_name)) + + def iter_method(self): + return iter(getattr(self, field_name)) + + def getitem_method(self, idx): + return getattr(self, field_name)[idx] + + return len_method, iter_method, getitem_method + + class DuplicateExtension(Exception): def __init__(self, msg, oid): super(DuplicateExtension, self).__init__(msg) @@ -78,7 +113,7 @@ class Extensions(object): if ext.oid == oid: return ext - raise ExtensionNotFound("No {0} extension was found".format(oid), oid) + raise ExtensionNotFound("No {} extension was found".format(oid), oid) def get_extension_for_class(self, extclass): if extclass is UnrecognizedExtension: @@ -93,21 +128,14 @@ class Extensions(object): return ext raise ExtensionNotFound( - "No {0} extension was found".format(extclass), extclass.oid + "No {} extension was found".format(extclass), extclass.oid ) - def __iter__(self): - return iter(self._extensions) - - def __len__(self): - return len(self._extensions) - - def __getitem__(self, idx): - return self._extensions[idx] + __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions") def __repr__(self): return ( - "".format(self._extensions) + "".format(self._extensions) ) @@ -134,7 +162,7 @@ class CRLNumber(object): return hash(self.crl_number) def __repr__(self): - return "".format(self.crl_number) + return "".format(self.crl_number) crl_number = utils.read_only_property("_crl_number") @@ -185,8 +213,21 @@ class AuthorityKeyIdentifier(object): @classmethod def from_issuer_subject_key_identifier(cls, ski): + if isinstance(ski, SubjectKeyIdentifier): + digest = ski.digest + else: + digest = ski.value.digest + warnings.warn( + "Extension objects are deprecated as arguments to " + "from_issuer_subject_key_identifier and support will be " + "removed soon. Please migrate to passing a " + "SubjectKeyIdentifier directly.", + utils.DeprecatedIn27, + stacklevel=2, + ) + return cls( - key_identifier=ski.value.digest, + key_identifier=digest, authority_cert_issuer=None, authority_cert_serial_number=None ) @@ -272,14 +313,10 @@ class AuthorityInformationAccess(object): self._descriptions = descriptions - def __iter__(self): - return iter(self._descriptions) - - def __len__(self): - return len(self._descriptions) + __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") def __repr__(self): - return "".format(self._descriptions) + return "".format(self._descriptions) def __eq__(self, other): if not isinstance(other, AuthorityInformationAccess): @@ -290,9 +327,6 @@ class AuthorityInformationAccess(object): def __ne__(self, other): return not self == other - def __getitem__(self, idx): - return self._descriptions[idx] - def __hash__(self): return hash(tuple(self._descriptions)) @@ -419,14 +453,12 @@ class CRLDistributionPoints(object): self._distribution_points = distribution_points - def __iter__(self): - return iter(self._distribution_points) - - def __len__(self): - return len(self._distribution_points) + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_distribution_points" + ) def __repr__(self): - return "".format(self._distribution_points) + return "".format(self._distribution_points) def __eq__(self, other): if not isinstance(other, CRLDistributionPoints): @@ -437,9 +469,6 @@ class CRLDistributionPoints(object): def __ne__(self, other): return not self == other - def __getitem__(self, idx): - return self._distribution_points[idx] - def __hash__(self): return hash(tuple(self._distribution_points)) @@ -460,14 +489,12 @@ class FreshestCRL(object): self._distribution_points = distribution_points - def __iter__(self): - return iter(self._distribution_points) - - def __len__(self): - return len(self._distribution_points) + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_distribution_points" + ) def __repr__(self): - return "".format(self._distribution_points) + return "".format(self._distribution_points) def __eq__(self, other): if not isinstance(other, FreshestCRL): @@ -478,9 +505,6 @@ class FreshestCRL(object): def __ne__(self, other): return not self == other - def __getitem__(self, idx): - return self._distribution_points[idx] - def __hash__(self): return hash(tuple(self._distribution_points)) @@ -541,8 +565,8 @@ class DistributionPoint(object): def __repr__(self): return ( "".format(self) + "tive_name}, reasons={0.reasons}, crl_issuer={0.crl_issuer})>" + .format(self) ) def __eq__(self, other): @@ -666,14 +690,10 @@ class CertificatePolicies(object): self._policies = policies - def __iter__(self): - return iter(self._policies) - - def __len__(self): - return len(self._policies) + __len__, __iter__, __getitem__ = _make_sequence_methods("_policies") def __repr__(self): - return "".format(self._policies) + return "".format(self._policies) def __eq__(self, other): if not isinstance(other, CertificatePolicies): @@ -684,9 +704,6 @@ class CertificatePolicies(object): def __ne__(self, other): return not self == other - def __getitem__(self, idx): - return self._policies[idx] - def __hash__(self): return hash(tuple(self._policies)) @@ -827,14 +844,10 @@ class ExtendedKeyUsage(object): self._usages = usages - def __iter__(self): - return iter(self._usages) - - def __len__(self): - return len(self._usages) + __len__, __iter__, __getitem__ = _make_sequence_methods("_usages") def __repr__(self): - return "".format(self._usages) + return "".format(self._usages) def __eq__(self, other): if not isinstance(other, ExtendedKeyUsage): @@ -853,11 +866,41 @@ class ExtendedKeyUsage(object): class OCSPNoCheck(object): oid = ExtensionOID.OCSP_NO_CHECK + def __eq__(self, other): + if not isinstance(other, OCSPNoCheck): + return NotImplemented + + return True + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(OCSPNoCheck) + + def __repr__(self): + return "" + @utils.register_interface(ExtensionType) class PrecertPoison(object): oid = ExtensionOID.PRECERT_POISON + def __eq__(self, other): + if not isinstance(other, PrecertPoison): + return NotImplemented + + return True + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(PrecertPoison) + + def __repr__(self): + return "" + @utils.register_interface(ExtensionType) class TLSFeature(object): @@ -876,11 +919,7 @@ class TLSFeature(object): self._features = features - def __iter__(self): - return iter(self._features) - - def __len__(self): - return len(self._features) + __len__, __iter__, __getitem__ = _make_sequence_methods("_features") def __repr__(self): return "".format(self) @@ -891,9 +930,6 @@ class TLSFeature(object): return self._features == other._features - def __getitem__(self, idx): - return self._features[idx] - def __ne__(self, other): return not self == other @@ -912,7 +948,7 @@ class TLSFeatureType(Enum): status_request_v2 = 17 -_TLS_FEATURE_TYPE_TO_ENUM = dict((x.value, x) for x in TLSFeatureType) +_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType} @utils.register_interface(ExtensionType) @@ -1172,12 +1208,7 @@ class GeneralNames(object): ) self._general_names = general_names - - def __iter__(self): - return iter(self._general_names) - - def __len__(self): - return len(self._general_names) + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") def get_values_for_type(self, type): # Return the value of each GeneralName, except for OtherName instances @@ -1189,7 +1220,7 @@ class GeneralNames(object): return list(objs) def __repr__(self): - return "".format(self._general_names) + return "".format(self._general_names) def __eq__(self, other): if not isinstance(other, GeneralNames): @@ -1200,9 +1231,6 @@ class GeneralNames(object): def __ne__(self, other): return not self == other - def __getitem__(self, idx): - return self._general_names[idx] - def __hash__(self): return hash(tuple(self._general_names)) @@ -1214,17 +1242,13 @@ class SubjectAlternativeName(object): def __init__(self, general_names): self._general_names = GeneralNames(general_names) - def __iter__(self): - return iter(self._general_names) - - def __len__(self): - return len(self._general_names) + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") def get_values_for_type(self, type): return self._general_names.get_values_for_type(type) def __repr__(self): - return "".format(self._general_names) + return "".format(self._general_names) def __eq__(self, other): if not isinstance(other, SubjectAlternativeName): @@ -1232,9 +1256,6 @@ class SubjectAlternativeName(object): return self._general_names == other._general_names - def __getitem__(self, idx): - return self._general_names[idx] - def __ne__(self, other): return not self == other @@ -1249,17 +1270,13 @@ class IssuerAlternativeName(object): def __init__(self, general_names): self._general_names = GeneralNames(general_names) - def __iter__(self): - return iter(self._general_names) - - def __len__(self): - return len(self._general_names) + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") def get_values_for_type(self, type): return self._general_names.get_values_for_type(type) def __repr__(self): - return "".format(self._general_names) + return "".format(self._general_names) def __eq__(self, other): if not isinstance(other, IssuerAlternativeName): @@ -1270,9 +1287,6 @@ class IssuerAlternativeName(object): def __ne__(self, other): return not self == other - def __getitem__(self, idx): - return self._general_names[idx] - def __hash__(self): return hash(self._general_names) @@ -1284,17 +1298,13 @@ class CertificateIssuer(object): def __init__(self, general_names): self._general_names = GeneralNames(general_names) - def __iter__(self): - return iter(self._general_names) - - def __len__(self): - return len(self._general_names) + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") def get_values_for_type(self, type): return self._general_names.get_values_for_type(type) def __repr__(self): - return "".format(self._general_names) + return "".format(self._general_names) def __eq__(self, other): if not isinstance(other, CertificateIssuer): @@ -1305,9 +1315,6 @@ class CertificateIssuer(object): def __ne__(self, other): return not self == other - def __getitem__(self, idx): - return self._general_names[idx] - def __hash__(self): return hash(self._general_names) @@ -1323,7 +1330,7 @@ class CRLReason(object): self._reason = reason def __repr__(self): - return "".format(self._reason) + return "".format(self._reason) def __eq__(self, other): if not isinstance(other, CRLReason): @@ -1351,7 +1358,7 @@ class InvalidityDate(object): self._invalidity_date = invalidity_date def __repr__(self): - return "".format( + return "".format( self._invalidity_date ) @@ -1386,18 +1393,13 @@ class PrecertificateSignedCertificateTimestamps(object): ) self._signed_certificate_timestamps = signed_certificate_timestamps - def __iter__(self): - return iter(self._signed_certificate_timestamps) - - def __len__(self): - return len(self._signed_certificate_timestamps) - - def __getitem__(self, idx): - return self._signed_certificate_timestamps[idx] + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_signed_certificate_timestamps" + ) def __repr__(self): return ( - "".format( + "".format( list(self) ) ) @@ -1446,6 +1448,136 @@ class OCSPNonce(object): nonce = utils.read_only_property("_nonce") +@utils.register_interface(ExtensionType) +class IssuingDistributionPoint(object): + oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT + + def __init__(self, full_name, relative_name, only_contains_user_certs, + only_contains_ca_certs, only_some_reasons, indirect_crl, + only_contains_attribute_certs): + if ( + only_some_reasons and ( + not isinstance(only_some_reasons, frozenset) or not all( + isinstance(x, ReasonFlags) for x in only_some_reasons + ) + ) + ): + raise TypeError( + "only_some_reasons must be None or frozenset of ReasonFlags" + ) + + if only_some_reasons and ( + ReasonFlags.unspecified in only_some_reasons or + ReasonFlags.remove_from_crl in only_some_reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in an " + "IssuingDistributionPoint" + ) + + if not ( + isinstance(only_contains_user_certs, bool) and + isinstance(only_contains_ca_certs, bool) and + isinstance(indirect_crl, bool) and + isinstance(only_contains_attribute_certs, bool) + ): + raise TypeError( + "only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl and only_contains_attribute_certs " + "must all be boolean." + ) + + crl_constraints = [ + only_contains_user_certs, only_contains_ca_certs, + indirect_crl, only_contains_attribute_certs + ] + + if len([x for x in crl_constraints if x]) > 1: + raise ValueError( + "Only one of the following can be set to True: " + "only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl, only_contains_attribute_certs" + ) + + if ( + not any([ + only_contains_user_certs, only_contains_ca_certs, + indirect_crl, only_contains_attribute_certs, full_name, + relative_name, only_some_reasons + ]) + ): + raise ValueError( + "Cannot create empty extension: " + "if only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl, and only_contains_attribute_certs are all False" + ", then either full_name, relative_name, or only_some_reasons " + "must have a value." + ) + + self._only_contains_user_certs = only_contains_user_certs + self._only_contains_ca_certs = only_contains_ca_certs + self._indirect_crl = indirect_crl + self._only_contains_attribute_certs = only_contains_attribute_certs + self._only_some_reasons = only_some_reasons + self._full_name = full_name + self._relative_name = relative_name + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, IssuingDistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name and + self.relative_name == other.relative_name and + self.only_contains_user_certs == other.only_contains_user_certs and + self.only_contains_ca_certs == other.only_contains_ca_certs and + self.only_some_reasons == other.only_some_reasons and + self.indirect_crl == other.indirect_crl and + self.only_contains_attribute_certs == + other.only_contains_attribute_certs + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(( + self.full_name, + self.relative_name, + self.only_contains_user_certs, + self.only_contains_ca_certs, + self.only_some_reasons, + self.indirect_crl, + self.only_contains_attribute_certs, + )) + + full_name = utils.read_only_property("_full_name") + relative_name = utils.read_only_property("_relative_name") + only_contains_user_certs = utils.read_only_property( + "_only_contains_user_certs" + ) + only_contains_ca_certs = utils.read_only_property( + "_only_contains_ca_certs" + ) + only_some_reasons = utils.read_only_property("_only_some_reasons") + indirect_crl = utils.read_only_property("_indirect_crl") + only_contains_attribute_certs = utils.read_only_property( + "_only_contains_attribute_certs" + ) + + @utils.register_interface(ExtensionType) class UnrecognizedExtension(object): def __init__(self, oid, value): diff --git a/server/www/packages/packages-linux/x64/cryptography/x509/general_name.py b/server/www/packages/packages-linux/x64/cryptography/x509/general_name.py index 26f389a..1233841 100644 --- a/server/www/packages/packages-linux/x64/cryptography/x509/general_name.py +++ b/server/www/packages/packages-linux/x64/cryptography/x509/general_name.py @@ -9,8 +9,6 @@ import ipaddress import warnings from email.utils import parseaddr -import idna - import six from six.moves import urllib_parse @@ -32,6 +30,20 @@ _GENERAL_NAMES = { } +def _lazy_import_idna(): + # Import idna lazily becase it allocates a decent amount of memory, and + # we're only using it in deprecated paths. + try: + import idna + return idna + except ImportError: + raise ImportError( + "idna is not installed, but a deprecated feature that requires it" + " was used. See: https://cryptography.io/en/latest/faq/#importe" + "rror-idna-is-not-installed" + ) + + class UnsupportedGeneralNameType(Exception): def __init__(self, msg, type): super(UnsupportedGeneralNameType, self).__init__(msg) @@ -60,7 +72,7 @@ class RFC822Name(object): "This means unicode characters should be encoded via " "idna. Support for passing unicode strings (aka U-label) " "will be removed in a future version.", - utils.DeprecatedIn21, + utils.PersistentlyDeprecated2017, stacklevel=2, ) else: @@ -83,6 +95,7 @@ class RFC822Name(object): return instance def _idna_encode(self, value): + idna = _lazy_import_idna() _, address = parseaddr(value) parts = address.split(u"@") return parts[0] + "@" + idna.encode(parts[1]).decode("ascii") @@ -104,6 +117,7 @@ class RFC822Name(object): def _idna_encode(value): + idna = _lazy_import_idna() # Retain prefixes '*.' for common/alt names and '.' for name constraints for prefix in ['*.', '.']: if value.startswith(prefix): @@ -125,7 +139,7 @@ class DNSName(object): "This means unicode characters should be encoded via " "idna. Support for passing unicode strings (aka U-label) " "will be removed in a future version.", - utils.DeprecatedIn21, + utils.PersistentlyDeprecated2017, stacklevel=2, ) else: @@ -170,7 +184,7 @@ class UniformResourceIdentifier(object): "This means unicode characters should be encoded via " "idna. Support for passing unicode strings (aka U-label) " " will be removed in a future version.", - utils.DeprecatedIn21, + utils.PersistentlyDeprecated2017, stacklevel=2, ) else: @@ -187,11 +201,12 @@ class UniformResourceIdentifier(object): return instance def _idna_encode(self, value): + idna = _lazy_import_idna() parsed = urllib_parse.urlparse(value) if parsed.port: netloc = ( idna.encode(parsed.hostname) + - ":{0}".format(parsed.port).encode("ascii") + ":{}".format(parsed.port).encode("ascii") ).decode("ascii") else: netloc = idna.encode(parsed.hostname).decode("ascii") @@ -235,7 +250,7 @@ class DirectoryName(object): value = utils.read_only_property("_value") def __repr__(self): - return "".format(self.value) + return "".format(self.value) def __eq__(self, other): if not isinstance(other, DirectoryName): @@ -261,7 +276,7 @@ class RegisteredID(object): value = utils.read_only_property("_value") def __repr__(self): - return "".format(self.value) + return "".format(self.value) def __eq__(self, other): if not isinstance(other, RegisteredID): @@ -299,7 +314,7 @@ class IPAddress(object): value = utils.read_only_property("_value") def __repr__(self): - return "".format(self.value) + return "".format(self.value) def __eq__(self, other): if not isinstance(other, IPAddress): @@ -329,7 +344,7 @@ class OtherName(object): value = utils.read_only_property("_value") def __repr__(self): - return "".format( + return "".format( self.type_id, self.value) def __eq__(self, other): diff --git a/server/www/packages/packages-linux/x64/cryptography/x509/name.py b/server/www/packages/packages-linux/x64/cryptography/x509/name.py index 5548eda..6816e06 100644 --- a/server/www/packages/packages-linux/x64/cryptography/x509/name.py +++ b/server/www/packages/packages-linux/x64/cryptography/x509/name.py @@ -25,7 +25,7 @@ class _ASN1Type(Enum): BMPString = 30 -_ASN1_TYPE_TO_ENUM = dict((i.value, i) for i in _ASN1Type) +_ASN1_TYPE_TO_ENUM = {i.value: i for i in _ASN1Type} _SENTINEL = object() _NAMEOID_DEFAULT_TYPE = { NameOID.COUNTRY_NAME: _ASN1Type.PrintableString, @@ -36,6 +36,44 @@ _NAMEOID_DEFAULT_TYPE = { NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String, } +#: Short attribute names from RFC 4514: +#: https://tools.ietf.org/html/rfc4514#page-7 +_NAMEOID_TO_NAME = { + NameOID.COMMON_NAME: 'CN', + NameOID.LOCALITY_NAME: 'L', + NameOID.STATE_OR_PROVINCE_NAME: 'ST', + NameOID.ORGANIZATION_NAME: 'O', + NameOID.ORGANIZATIONAL_UNIT_NAME: 'OU', + NameOID.COUNTRY_NAME: 'C', + NameOID.STREET_ADDRESS: 'STREET', + NameOID.DOMAIN_COMPONENT: 'DC', + NameOID.USER_ID: 'UID', +} + + +def _escape_dn_value(val): + """Escape special characters in RFC4514 Distinguished Name value.""" + + if not val: + return '' + + # See https://tools.ietf.org/html/rfc4514#section-2.4 + val = val.replace('\\', '\\\\') + val = val.replace('"', '\\"') + val = val.replace('+', '\\+') + val = val.replace(',', '\\,') + val = val.replace(';', '\\;') + val = val.replace('<', '\\<') + val = val.replace('>', '\\>') + val = val.replace('\0', '\\00') + + if val[0] in ('#', ' '): + val = '\\' + val + if val[-1] == ' ': + val = val[:-1] + '\\ ' + + return val + class NameAttribute(object): def __init__(self, oid, value, _type=_SENTINEL): @@ -58,9 +96,6 @@ class NameAttribute(object): "Country name must be a 2 character country code" ) - if len(value) == 0: - raise ValueError("Value cannot be an empty string") - # The appropriate ASN1 string type varies by OID and is defined across # multiple RFCs including 2459, 3280, and 5280. In general UTF8String # is preferred (2459), but 3280 and 5280 specify several OIDs with @@ -80,6 +115,16 @@ class NameAttribute(object): oid = utils.read_only_property("_oid") value = utils.read_only_property("_value") + def rfc4514_string(self): + """ + Format as RFC4514 Distinguished Name string. + + Use short attribute name if available, otherwise fall back to OID + dotted string. + """ + key = _NAMEOID_TO_NAME.get(self.oid, self.oid.dotted_string) + return '%s=%s' % (key, _escape_dn_value(self.value)) + def __eq__(self, other): if not isinstance(other, NameAttribute): return NotImplemented @@ -117,6 +162,15 @@ class RelativeDistinguishedName(object): def get_attributes_for_oid(self, oid): return [i for i in self if i.oid == oid] + def rfc4514_string(self): + """ + Format as RFC4514 Distinguished Name string. + + Within each RDN, attributes are joined by '+', although that is rarely + used in certificates. + """ + return '+'.join(attr.rfc4514_string() for attr in self._attributes) + def __eq__(self, other): if not isinstance(other, RelativeDistinguishedName): return NotImplemented @@ -136,7 +190,7 @@ class RelativeDistinguishedName(object): return len(self._attributes) def __repr__(self): - return "".format(list(self)) + return "".format(self.rfc4514_string()) class Name(object): @@ -154,6 +208,20 @@ class Name(object): " or a list RelativeDistinguishedName" ) + def rfc4514_string(self): + """ + Format as RFC4514 Distinguished Name string. + For example 'CN=foobar.com,O=Foo Corp,C=US' + + An X.509 name is a two-level structure: a list of sets of attributes. + Each list element is separated by ',' and within each list element, set + elements are separated by '+'. The latter is almost never used in + real world certificates. According to RFC4514 section 2.1 the + RDNSequence must be reversed when converting to string representation. + """ + return ','.join( + attr.rfc4514_string() for attr in reversed(self._attributes)) + def get_attributes_for_oid(self, oid): return [i for i in self if i.oid == oid] @@ -187,4 +255,9 @@ class Name(object): return sum(len(rdn) for rdn in self._attributes) def __repr__(self): - return "".format(list(self)) + rdns = ','.join(attr.rfc4514_string() for attr in self._attributes) + + if six.PY2: + return "".format(rdns.encode('utf8')) + else: + return "".format(rdns) diff --git a/server/www/packages/packages-linux/x64/cryptography/x509/ocsp.py b/server/www/packages/packages-linux/x64/cryptography/x509/ocsp.py index 2b0b1dc..4e0c985 100644 --- a/server/www/packages/packages-linux/x64/cryptography/x509/ocsp.py +++ b/server/www/packages/packages-linux/x64/cryptography/x509/ocsp.py @@ -12,8 +12,9 @@ import six from cryptography import x509 from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ed25519, ed448 from cryptography.x509.base import ( - _UNIX_EPOCH, _convert_to_naive_utc_time, _reject_duplicate_extension + _EARLIEST_UTC_TIME, _convert_to_naive_utc_time, _reject_duplicate_extension ) @@ -40,7 +41,7 @@ class OCSPResponseStatus(Enum): UNAUTHORIZED = 6 -_RESPONSE_STATUS_TO_ENUM = dict((x.value, x) for x in OCSPResponseStatus) +_RESPONSE_STATUS_TO_ENUM = {x.value: x for x in OCSPResponseStatus} _ALLOWED_HASHES = ( hashes.SHA1, hashes.SHA224, hashes.SHA256, hashes.SHA384, hashes.SHA512 @@ -60,7 +61,7 @@ class OCSPCertStatus(Enum): UNKNOWN = 2 -_CERT_STATUS_TO_ENUM = dict((x.value, x) for x in OCSPCertStatus) +_CERT_STATUS_TO_ENUM = {x.value: x for x in OCSPCertStatus} def load_der_ocsp_request(data): @@ -154,9 +155,9 @@ class _SingleResponse(object): raise TypeError("revocation_time must be a datetime object") revocation_time = _convert_to_naive_utc_time(revocation_time) - if revocation_time <= _UNIX_EPOCH: - raise ValueError('The revocation_time must be after the unix' - ' epoch (1970 January 1).') + if revocation_time < _EARLIEST_UTC_TIME: + raise ValueError('The revocation_time must be on or after' + ' 1950 January 1.') if ( revocation_reason is not None and @@ -241,7 +242,13 @@ class OCSPResponseBuilder(object): if self._responder_id is None: raise ValueError("You must add a responder_id before signing") - if not isinstance(algorithm, hashes.HashAlgorithm): + if isinstance(private_key, + (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)): + if algorithm is not None: + raise ValueError( + "algorithm must be None when signing via ed25519 or ed448" + ) + elif not isinstance(algorithm, hashes.HashAlgorithm): raise TypeError("Algorithm must be a registered hash algorithm.") return backend.create_ocsp_response( @@ -314,6 +321,12 @@ class OCSPResponse(object): The ObjectIdentifier of the signature algorithm """ + @abc.abstractproperty + def signature_hash_algorithm(self): + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + """ + @abc.abstractproperty def signature(self): """ @@ -414,3 +427,9 @@ class OCSPResponse(object): """ The list of response extensions. Not single response extensions. """ + + @abc.abstractproperty + def single_extensions(self): + """ + The list of single response extensions. Not response extensions. + """ diff --git a/server/www/packages/packages-linux/x64/cryptography/x509/oid.py b/server/www/packages/packages-linux/x64/cryptography/x509/oid.py index ec19007..c1e5dc5 100644 --- a/server/www/packages/packages-linux/x64/cryptography/x509/oid.py +++ b/server/www/packages/packages-linux/x64/cryptography/x509/oid.py @@ -96,6 +96,8 @@ class SignatureAlgorithmOID(object): DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3") DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1") DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2") + ED25519 = ObjectIdentifier("1.3.101.112") + ED448 = ObjectIdentifier("1.3.101.113") _SIG_OIDS_TO_HASH = { @@ -113,7 +115,9 @@ _SIG_OIDS_TO_HASH = { SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(), SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(), SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(), - SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256() + SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.ED25519: None, + SignatureAlgorithmOID.ED448: None, } @@ -181,6 +185,8 @@ _OID_NAMES = { SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1", SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224", SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256", + SignatureAlgorithmOID.ED25519: "ed25519", + SignatureAlgorithmOID.ED448: "ed448", ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth", ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth", ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning", @@ -196,6 +202,7 @@ _OID_NAMES = { ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: ( "signedCertificateTimestampList" ), + ExtensionOID.PRECERT_POISON: "ctPoison", CRLEntryExtensionOID.CRL_REASON: "cRLReason", CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate", CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer", diff --git a/server/www/packages/packages-linux/x64/ldap3/__init__.py b/server/www/packages/packages-linux/x64/ldap3/__init__.py index f6f1666..9bb5435 100644 --- a/server/www/packages/packages-linux/x64/ldap3/__init__.py +++ b/server/www/packages/packages-linux/x64/ldap3/__init__.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2013 - 2018 Giovanni Cannata +# Copyright 2013 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -89,6 +89,7 @@ SCHEMA = 'SCHEMA' ALL = 'ALL' OFFLINE_EDIR_8_8_8 = 'EDIR_8_8_8' +OFFLINE_EDIR_9_1_4 = 'EDIR_9_1_4' OFFLINE_AD_2012_R2 = 'AD_2012_R2' OFFLINE_SLAPD_2_4 = 'SLAPD_2_4' OFFLINE_DS389_1_3_3 = 'DS389_1_3_3' diff --git a/server/www/packages/packages-linux/x64/ldap3/abstract/__init__.py b/server/www/packages/packages-linux/x64/ldap3/abstract/__init__.py index c40f838..29aabb3 100644 --- a/server/www/packages/packages-linux/x64/ldap3/abstract/__init__.py +++ b/server/www/packages/packages-linux/x64/ldap3/abstract/__init__.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/abstract/attrDef.py b/server/www/packages/packages-linux/x64/ldap3/abstract/attrDef.py index d954e25..caffb53 100644 --- a/server/www/packages/packages-linux/x64/ldap3/abstract/attrDef.py +++ b/server/www/packages/packages-linux/x64/ldap3/abstract/attrDef.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. diff --git a/server/www/packages/packages-linux/x64/ldap3/abstract/attribute.py b/server/www/packages/packages-linux/x64/ldap3/abstract/attribute.py index 24f682c..5d33cc7 100644 --- a/server/www/packages/packages-linux/x64/ldap3/abstract/attribute.py +++ b/server/www/packages/packages-linux/x64/ldap3/abstract/attribute.py @@ -1,285 +1,290 @@ -""" -""" - -# Created on 2014.01.06 -# -# Author: Giovanni Cannata -# -# Copyright 2014 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . - -from os import linesep - -from .. import MODIFY_ADD, MODIFY_REPLACE, MODIFY_DELETE, SEQUENCE_TYPES -from ..core.exceptions import LDAPCursorError -from ..utils.repr import to_stdout_encoding -from . import STATUS_PENDING_CHANGES, STATUS_VIRTUAL, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING -from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED - - -# noinspection PyUnresolvedReferences -class Attribute(object): - """Attribute/values object, it includes the search result (after post_query transformation) of each attribute in an entry - - Attribute object is read only - - - values: contain the processed attribute values - - raw_values': contain the unprocessed attribute values - - - """ - - def __init__(self, attr_def, entry, cursor): - self.key = attr_def.key - self.definition = attr_def - self.values = [] - self.raw_values = [] - self.response = None - self.entry = entry - self.cursor = cursor - other_names = [name for name in attr_def.oid_info.name if self.key.lower() != name.lower()] if attr_def.oid_info else None - self.other_names = set(other_names) if other_names else None # self.other_names is None if there are no short names, else is a set of secondary names - - def __repr__(self): - if len(self.values) == 1: - r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0]) - elif len(self.values) > 1: - r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0]) - filler = ' ' * (len(self.key) + 6) - for value in self.values[1:]: - r += linesep + filler + to_stdout_encoding(value) - else: - r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding('') - - return r - - def __str__(self): - if len(self.values) == 1: - return to_stdout_encoding(self.values[0]) - else: - return to_stdout_encoding(self.values) - - def __len__(self): - return len(self.values) - - def __iter__(self): - return self.values.__iter__() - - def __getitem__(self, item): - return self.values[item] - - def __eq__(self, other): - try: - if self.value == other: - return True - except Exception: - return False - - def __ne__(self, other): - return not self == other - - @property - def value(self): - """ - :return: The single value or a list of values of the attribute. - """ - if not self.values: - return None - - return self.values[0] if len(self.values) == 1 else self.values - - -class OperationalAttribute(Attribute): - """Operational attribute/values object. Include the search result of an - operational attribute in an entry - - OperationalAttribute object is read only - - - values: contains the processed attribute values - - raw_values: contains the unprocessed attribute values - - It may not have an AttrDef - - """ - - def __repr__(self): - if len(self.values) == 1: - r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0]) - elif len(self.values) > 1: - r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0]) - filler = ' ' * (len(self.key) + 6) - for value in sorted(self.values[1:]): - r += linesep + filler + to_stdout_encoding(value) - else: - r = '' - - return r - - -class WritableAttribute(Attribute): - def __repr__(self): - filler = ' ' * (len(self.key) + 6) - if len(self.values) == 1: - r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0]) - elif len(self.values) > 1: - r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0]) - for value in self.values[1:]: - r += linesep + filler + to_stdout_encoding(value) - else: - r = to_stdout_encoding(self.key) + to_stdout_encoding(': ') - if self.definition.name in self.entry._changes: - r += linesep + filler + 'CHANGES: ' + str(self.entry._changes[self.definition.name]) - return r - - def __iadd__(self, other): - self.add(other) - return Ellipsis # hack to avoid calling set() in entry __setattr__ - - def __isub__(self, other): - self.delete(other) - return Ellipsis # hack to avoid calling set_value in entry __setattr__ - - def _update_changes(self, changes, remove_old=False): - # checks for friendly key in AttrDef and uses the real attribute name - if self.definition and self.definition.name: - key = self.definition.name - else: - key = self.key - - if key not in self.entry._changes or remove_old: # remove old changes (for removing attribute) - self.entry._changes[key] = [] - - self.entry._changes[key].append(changes) - if log_enabled(PROTOCOL): - log(PROTOCOL, 'updated changes <%r> for <%s> attribute in <%s> entry', changes, self.key, self.entry.entry_dn) - self.entry._state.set_status(STATUS_PENDING_CHANGES) - - def add(self, values): - if log_enabled(PROTOCOL): - log(PROTOCOL, 'adding %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn) - # new value for attribute to commit with a MODIFY_ADD - if self.entry._state._initial_status == STATUS_VIRTUAL: - error_message = 'cannot add an attribute value in a new entry' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]: - error_message = self.entry.entry_status + ' - cannot add attributes' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - if values is None: - error_message = 'value to add cannot be None' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - if values is not None: - validated = self.definition.validate(values) # returns True, False or a value to substitute to the actual values - if validated is False: - error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key) - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - elif validated is not True: # a valid LDAP value equivalent to the actual values - values = validated - self._update_changes((MODIFY_ADD, values if isinstance(values, SEQUENCE_TYPES) else [values])) - - def set(self, values): - # new value for attribute to commit with a MODIFY_REPLACE, old values are deleted - if log_enabled(PROTOCOL): - log(PROTOCOL, 'setting %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn) - if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]: - error_message = self.entry.entry_status + ' - cannot set attributes' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - if values is None: - error_message = 'new value cannot be None' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - validated = self.definition.validate(values) # returns True, False or a value to substitute to the actual values - if validated is False: - error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key) - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - elif validated is not True: # a valid LDAP value equivalent to the actual values - values = validated - self._update_changes((MODIFY_REPLACE, values if isinstance(values, SEQUENCE_TYPES) else [values]), remove_old=True) - - def delete(self, values): - # value for attribute to delete in commit with a MODIFY_DELETE - if log_enabled(PROTOCOL): - log(PROTOCOL, 'deleting %r from <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn) - if self.entry._state._initial_status == STATUS_VIRTUAL: - error_message = 'cannot delete an attribute value in a new entry' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]: - error_message = self.entry.entry_status + ' - cannot delete attributes' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - if values is None: - error_message = 'value to delete cannot be None' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - if not isinstance(values, SEQUENCE_TYPES): - values = [values] - for single_value in values: - if single_value not in self.values: - error_message = 'value \'%s\' not present in \'%s\'' % (single_value, ', '.join(self.values)) - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - self._update_changes((MODIFY_DELETE, values)) - - def remove(self): - if log_enabled(PROTOCOL): - log(PROTOCOL, 'removing <%s> attribute in <%s> entry', self.key, self.entry.entry_dn) - if self.entry._state._initial_status == STATUS_VIRTUAL: - error_message = 'cannot remove an attribute in a new entry' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]: - error_message = self.entry.entry_status + ' - cannot remove attributes' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - self._update_changes((MODIFY_REPLACE, []), True) - - def discard(self): - if log_enabled(PROTOCOL): - log(PROTOCOL, 'discarding <%s> attribute in <%s> entry', self.key, self.entry.entry_dn) - del self.entry._changes[self.key] - if not self.entry._changes: - self.entry._state.set_status(self.entry._state._initial_status) - - @property - def virtual(self): - return False if len(self.values) else True - - @property - def changes(self): - if self.key in self.entry._changes: - return self.entry._changes[self.key] - return None +""" +""" + +# Created on 2014.01.06 +# +# Author: Giovanni Cannata +# +# Copyright 2014 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +from os import linesep + +from .. import MODIFY_ADD, MODIFY_REPLACE, MODIFY_DELETE, SEQUENCE_TYPES +from ..core.exceptions import LDAPCursorError +from ..utils.repr import to_stdout_encoding +from . import STATUS_PENDING_CHANGES, STATUS_VIRTUAL, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING +from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED + + +# noinspection PyUnresolvedReferences +class Attribute(object): + """Attribute/values object, it includes the search result (after post_query transformation) of each attribute in an entry + + Attribute object is read only + + - values: contain the processed attribute values + - raw_values': contain the unprocessed attribute values + + + """ + + def __init__(self, attr_def, entry, cursor): + self.key = attr_def.key + self.definition = attr_def + self.values = [] + self.raw_values = [] + self.response = None + self.entry = entry + self.cursor = cursor + other_names = [name for name in attr_def.oid_info.name if self.key.lower() != name.lower()] if attr_def.oid_info else None + self.other_names = set(other_names) if other_names else None # self.other_names is None if there are no short names, else is a set of secondary names + + def __repr__(self): + if len(self.values) == 1: + r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0]) + elif len(self.values) > 1: + r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0]) + filler = ' ' * (len(self.key) + 6) + for value in self.values[1:]: + r += linesep + filler + to_stdout_encoding(value) + else: + r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding('') + + return r + + def __str__(self): + if len(self.values) == 1: + return to_stdout_encoding(self.values[0]) + else: + return to_stdout_encoding(self.values) + + def __len__(self): + return len(self.values) + + def __iter__(self): + return self.values.__iter__() + + def __getitem__(self, item): + return self.values[item] + + def __getstate__(self): + cpy = dict(self.__dict__) + cpy['cursor'] = None + return cpy + + def __eq__(self, other): + try: + if self.value == other: + return True + except Exception: + return False + + def __ne__(self, other): + return not self == other + + @property + def value(self): + """ + :return: The single value or a list of values of the attribute. + """ + if not self.values: + return None + + return self.values[0] if len(self.values) == 1 else self.values + + +class OperationalAttribute(Attribute): + """Operational attribute/values object. Include the search result of an + operational attribute in an entry + + OperationalAttribute object is read only + + - values: contains the processed attribute values + - raw_values: contains the unprocessed attribute values + + It may not have an AttrDef + + """ + + def __repr__(self): + if len(self.values) == 1: + r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0]) + elif len(self.values) > 1: + r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0]) + filler = ' ' * (len(self.key) + 6) + for value in sorted(self.values[1:]): + r += linesep + filler + to_stdout_encoding(value) + else: + r = '' + + return r + + +class WritableAttribute(Attribute): + def __repr__(self): + filler = ' ' * (len(self.key) + 6) + if len(self.values) == 1: + r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0]) + elif len(self.values) > 1: + r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0]) + for value in self.values[1:]: + r += linesep + filler + to_stdout_encoding(value) + else: + r = to_stdout_encoding(self.key) + to_stdout_encoding(': ') + if self.definition.name in self.entry._changes: + r += linesep + filler + 'CHANGES: ' + str(self.entry._changes[self.definition.name]) + return r + + def __iadd__(self, other): + self.add(other) + return Ellipsis # hack to avoid calling set() in entry __setattr__ + + def __isub__(self, other): + self.delete(other) + return Ellipsis # hack to avoid calling set_value in entry __setattr__ + + def _update_changes(self, changes, remove_old=False): + # checks for friendly key in AttrDef and uses the real attribute name + if self.definition and self.definition.name: + key = self.definition.name + else: + key = self.key + + if key not in self.entry._changes or remove_old: # remove old changes (for removing attribute) + self.entry._changes[key] = [] + + self.entry._changes[key].append(changes) + if log_enabled(PROTOCOL): + log(PROTOCOL, 'updated changes <%r> for <%s> attribute in <%s> entry', changes, self.key, self.entry.entry_dn) + self.entry._state.set_status(STATUS_PENDING_CHANGES) + + def add(self, values): + if log_enabled(PROTOCOL): + log(PROTOCOL, 'adding %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn) + # new value for attribute to commit with a MODIFY_ADD + if self.entry._state._initial_status == STATUS_VIRTUAL: + error_message = 'cannot perform a modify operation in a new entry' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]: + error_message = self.entry.entry_status + ' - cannot add attributes' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + if values is None: + error_message = 'value to add cannot be None' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + if values is not None: + validated = self.definition.validate(values) # returns True, False or a value to substitute to the actual values + if validated is False: + error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key) + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + elif validated is not True: # a valid LDAP value equivalent to the actual values + values = validated + self._update_changes((MODIFY_ADD, values if isinstance(values, SEQUENCE_TYPES) else [values])) + + def set(self, values): + # new value for attribute to commit with a MODIFY_REPLACE, old values are deleted + if log_enabled(PROTOCOL): + log(PROTOCOL, 'setting %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn) + if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]: + error_message = self.entry.entry_status + ' - cannot set attributes' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + if values is None: + error_message = 'new value cannot be None' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + validated = self.definition.validate(values) # returns True, False or a value to substitute to the actual values + if validated is False: + error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key) + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + elif validated is not True: # a valid LDAP value equivalent to the actual values + values = validated + self._update_changes((MODIFY_REPLACE, values if isinstance(values, SEQUENCE_TYPES) else [values]), remove_old=True) + + def delete(self, values): + # value for attribute to delete in commit with a MODIFY_DELETE + if log_enabled(PROTOCOL): + log(PROTOCOL, 'deleting %r from <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn) + if self.entry._state._initial_status == STATUS_VIRTUAL: + error_message = 'cannot delete an attribute value in a new entry' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]: + error_message = self.entry.entry_status + ' - cannot delete attributes' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + if values is None: + error_message = 'value to delete cannot be None' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + if not isinstance(values, SEQUENCE_TYPES): + values = [values] + for single_value in values: + if single_value not in self.values: + error_message = 'value \'%s\' not present in \'%s\'' % (single_value, ', '.join(self.values)) + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + self._update_changes((MODIFY_DELETE, values)) + + def remove(self): + if log_enabled(PROTOCOL): + log(PROTOCOL, 'removing <%s> attribute in <%s> entry', self.key, self.entry.entry_dn) + if self.entry._state._initial_status == STATUS_VIRTUAL: + error_message = 'cannot remove an attribute in a new entry' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]: + error_message = self.entry.entry_status + ' - cannot remove attributes' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + self._update_changes((MODIFY_REPLACE, []), True) + + def discard(self): + if log_enabled(PROTOCOL): + log(PROTOCOL, 'discarding <%s> attribute in <%s> entry', self.key, self.entry.entry_dn) + del self.entry._changes[self.key] + if not self.entry._changes: + self.entry._state.set_status(self.entry._state._initial_status) + + @property + def virtual(self): + return False if len(self.values) else True + + @property + def changes(self): + if self.key in self.entry._changes: + return self.entry._changes[self.key] + return None diff --git a/server/www/packages/packages-linux/x64/ldap3/abstract/cursor.py b/server/www/packages/packages-linux/x64/ldap3/abstract/cursor.py index 275a384..9259a2c 100644 --- a/server/www/packages/packages-linux/x64/ldap3/abstract/cursor.py +++ b/server/www/packages/packages-linux/x64/ldap3/abstract/cursor.py @@ -1,904 +1,906 @@ -""" -""" - -# Created on 2014.01.06 -# -# Author: Giovanni Cannata -# -# Copyright 2014 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . -from collections import namedtuple -from copy import deepcopy -from datetime import datetime -from os import linesep -from time import sleep - -from . import STATUS_VIRTUAL, STATUS_READ, STATUS_WRITABLE -from .. import SUBTREE, LEVEL, DEREF_ALWAYS, DEREF_NEVER, BASE, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter -from ..abstract import STATUS_PENDING_CHANGES -from .attribute import Attribute, OperationalAttribute, WritableAttribute -from .attrDef import AttrDef -from .objectDef import ObjectDef -from .entry import Entry, WritableEntry -from ..core.exceptions import LDAPCursorError, LDAPObjectDereferenceError -from ..core.results import RESULT_SUCCESS -from ..utils.ciDict import CaseInsensitiveWithAliasDict -from ..utils.dn import safe_dn, safe_rdn -from ..utils.conv import to_raw -from ..utils.config import get_config_parameter -from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED -from ..protocol.oid import ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION, CLASS_AUXILIARY - -Operation = namedtuple('Operation', ('request', 'result', 'response')) - - -def _ret_search_value(value): - return value[0] + '=' + value[1:] if value[0] in '<>~' and value[1] != '=' else value - - -def _create_query_dict(query_text): - """ - Create a dictionary with query key:value definitions - query_text is a comma delimited key:value sequence - """ - query_dict = dict() - if query_text: - for arg_value_str in query_text.split(','): - if ':' in arg_value_str: - arg_value_list = arg_value_str.split(':') - query_dict[arg_value_list[0].strip()] = arg_value_list[1].strip() - - return query_dict - - -class Cursor(object): - # entry_class and attribute_class define the type of entry and attribute used by the cursor - # entry_initial_status defines the initial status of a entry - # entry_class = Entry, must be defined in subclasses - # attribute_class = Attribute, must be defined in subclasses - # entry_initial_status = STATUS, must be defined in subclasses - - def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None): - conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')] - self.connection = connection - self.get_operational_attributes = get_operational_attributes - if connection._deferred_bind or connection._deferred_open: # probably a lazy connection, tries to bind - connection._fire_deferred() - - if isinstance(object_def, (STRING_TYPES, SEQUENCE_TYPES)): - object_def = ObjectDef(object_def, connection.server.schema, auxiliary_class=auxiliary_class) - self.definition = object_def - if attributes: # checks if requested attributes are defined in ObjectDef - not_defined_attributes = [] - if isinstance(attributes, STRING_TYPES): - attributes = [attributes] - - for attribute in attributes: - if attribute not in self.definition._attributes and attribute.lower() not in conf_attributes_excluded_from_object_def: - not_defined_attributes.append(attribute) - - if not_defined_attributes: - error_message = 'Attributes \'%s\' non in definition' % ', '.join(not_defined_attributes) - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - - self.attributes = set(attributes) if attributes else set([attr.name for attr in self.definition]) - self.controls = controls - self.execution_time = None - self.entries = [] - self.schema = self.connection.server.schema - self._do_not_reset = False # used for refreshing entry in entry_refresh() without removing all entries from the Cursor - self._operation_history = list() # a list storing all the requests, results and responses for the last cursor operation - - def __repr__(self): - r = 'CURSOR : ' + self.__class__.__name__ + linesep - r += 'CONN : ' + str(self.connection) + linesep - r += 'DEFS : ' + ', '.join(self.definition._object_class) - if self.definition._auxiliary_class: - r += ' [AUX: ' + ', '.join(self.definition._auxiliary_class) + ']' - r += linesep - # for attr_def in sorted(self.definition): - # r += (attr_def.key if attr_def.key == attr_def.name else (attr_def.key + ' <' + attr_def.name + '>')) + ', ' - # if r[-2] == ',': - # r = r[:-2] - # r += ']' + linesep - if hasattr(self, 'attributes'): - r += 'ATTRS : ' + repr(sorted(self.attributes)) + (' [OPERATIONAL]' if self.get_operational_attributes else '') + linesep - if isinstance(self, Reader): - if hasattr(self, 'base'): - r += 'BASE : ' + repr(self.base) + (' [SUB]' if self.sub_tree else ' [LEVEL]') + linesep - if hasattr(self, '_query') and self._query: - r += 'QUERY : ' + repr(self._query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep - if hasattr(self, 'validated_query') and self.validated_query: - r += 'PARSED : ' + repr(self.validated_query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep - if hasattr(self, 'query_filter') and self.query_filter: - r += 'FILTER : ' + repr(self.query_filter) + linesep - - if hasattr(self, 'execution_time') and self.execution_time: - r += 'ENTRIES: ' + str(len(self.entries)) - r += ' [executed at: ' + str(self.execution_time.isoformat()) + ']' + linesep - - if self.failed: - r += 'LAST OPERATION FAILED [' + str(len(self.errors)) + ' failure' + ('s' if len(self.errors) > 1 else '') + ' at operation' + ('s ' if len(self.errors) > 1 else ' ') + ', '.join([str(i) for i, error in enumerate(self.operations) if error.result['result'] != RESULT_SUCCESS]) + ']' - - return r - - def __str__(self): - return self.__repr__() - - def __iter__(self): - return self.entries.__iter__() - - def __getitem__(self, item): - """Return indexed item, if index is not found then try to sequentially search in DN of entries. - If only one entry is found return it else raise a KeyError exception. The exception message - includes the number of entries that matches, if less than 10 entries match then show the DNs - in the exception message. - """ - try: - return self.entries[item] - except TypeError: - pass - - if isinstance(item, STRING_TYPES): - found = self.match_dn(item) - - if len(found) == 1: - return found[0] - elif len(found) > 1: - error_message = 'Multiple entries found: %d entries match the text in dn' % len(found) + ('' if len(found) > 10 else (' [' + '; '.join([e.entry_dn for e in found]) + ']')) - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise KeyError(error_message) - - error_message = 'no entry found' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise KeyError(error_message) - - def __len__(self): - return len(self.entries) - - if str is not bytes: # Python 3 - def __bool__(self): # needed to make the cursor appears as existing in "if cursor:" even if there are no entries - return True - else: # Python 2 - def __nonzero__(self): - return True - - def _get_attributes(self, response, attr_defs, entry): - """Assign the result of the LDAP query to the Entry object dictionary. - - If the optional 'post_query' callable is present in the AttrDef it is called with each value of the attribute and the callable result is stored in the attribute. - - Returns the default value for missing attributes. - If the 'dereference_dn' in AttrDef is a ObjectDef then the attribute values are treated as distinguished name and the relevant entry is retrieved and stored in the attribute value. - - """ - conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX') - conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')] - attributes = CaseInsensitiveWithAliasDict() - used_attribute_names = set() - for attr in attr_defs: - attr_def = attr_defs[attr] - attribute_name = None - for attr_name in response['attributes']: - if attr_def.name.lower() == attr_name.lower(): - attribute_name = attr_name - break - - if attribute_name or attr_def.default is not NotImplemented: # attribute value found in result or default value present - NotImplemented allows use of None as default - attribute = self.attribute_class(attr_def, entry, self) - attribute.response = response - attribute.raw_values = response['raw_attributes'][attribute_name] if attribute_name else None - if attr_def.post_query and attr_def.name in response['attributes'] and response['raw_attributes'] != list(): - attribute.values = attr_def.post_query(attr_def.key, response['attributes'][attribute_name]) - else: - if attr_def.default is NotImplemented or (attribute_name and response['raw_attributes'][attribute_name] != list()): - attribute.values = response['attributes'][attribute_name] - else: - attribute.values = attr_def.default if isinstance(attr_def.default, SEQUENCE_TYPES) else [attr_def.default] - if not isinstance(attribute.values, list): # force attribute values to list (if attribute is single-valued) - attribute.values = [attribute.values] - if attr_def.dereference_dn: # try to get object referenced in value - if attribute.values: - temp_reader = Reader(self.connection, attr_def.dereference_dn, base='', get_operational_attributes=self.get_operational_attributes, controls=self.controls) - temp_values = [] - for element in attribute.values: - if entry.entry_dn != element: - temp_values.append(temp_reader.search_object(element)) - else: - error_message = 'object %s is referencing itself in the \'%s\' attribute' % (entry.entry_dn, attribute.definition.name) - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPObjectDereferenceError(error_message) - del temp_reader # remove the temporary Reader - attribute.values = temp_values - attributes[attribute.key] = attribute - if attribute.other_names: - attributes.set_alias(attribute.key, attribute.other_names) - if attr_def.other_names: - attributes.set_alias(attribute.key, attr_def.other_names) - used_attribute_names.add(attribute_name) - - if self.attributes: - used_attribute_names.update(self.attributes) - - for attribute_name in response['attributes']: - if attribute_name not in used_attribute_names: - operational_attribute = False - # check if the type is an operational attribute - if attribute_name in self.schema.attribute_types: - if self.schema.attribute_types[attribute_name].no_user_modification or self.schema.attribute_types[attribute_name].usage in [ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION]: - operational_attribute = True - else: - operational_attribute = True - if not operational_attribute and attribute_name not in attr_defs and attribute_name.lower() not in conf_attributes_excluded_from_object_def: - error_message = 'attribute \'%s\' not in object class \'%s\' for entry %s' % (attribute_name, ', '.join(entry.entry_definition._object_class), entry.entry_dn) - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - attribute = OperationalAttribute(AttrDef(conf_operational_attribute_prefix + attribute_name), entry, self) - attribute.raw_values = response['raw_attributes'][attribute_name] - attribute.values = response['attributes'][attribute_name] if isinstance(response['attributes'][attribute_name], SEQUENCE_TYPES) else [response['attributes'][attribute_name]] - if (conf_operational_attribute_prefix + attribute_name) not in attributes: - attributes[conf_operational_attribute_prefix + attribute_name] = attribute - - return attributes - - def match_dn(self, dn): - """Return entries with text in DN""" - matched = [] - for entry in self.entries: - if dn.lower() in entry.entry_dn.lower(): - matched.append(entry) - return matched - - def match(self, attributes, value): - """Return entries with text in one of the specified attributes""" - matched = [] - if not isinstance(attributes, SEQUENCE_TYPES): - attributes = [attributes] - - for entry in self.entries: - found = False - for attribute in attributes: - if attribute in entry: - for attr_value in entry[attribute].values: - if hasattr(attr_value, 'lower') and hasattr(value, 'lower') and value.lower() in attr_value.lower(): - found = True - elif value == attr_value: - found = True - if found: - matched.append(entry) - break - if found: - break - # checks raw values, tries to convert value to byte - raw_value = to_raw(value) - if isinstance(raw_value, (bytes, bytearray)): - for attr_value in entry[attribute].raw_values: - if hasattr(attr_value, 'lower') and hasattr(raw_value, 'lower') and raw_value.lower() in attr_value.lower(): - found = True - elif raw_value == attr_value: - found = True - if found: - matched.append(entry) - break - if found: - break - return matched - - def _create_entry(self, response): - if not response['type'] == 'searchResEntry': - return None - - entry = self.entry_class(response['dn'], self) # define an Entry (writable or readonly), as specified in the cursor definition - entry._state.attributes = self._get_attributes(response, self.definition._attributes, entry) - entry._state.entry_raw_attributes = deepcopy(response['raw_attributes']) - - entry._state.response = response - entry._state.read_time = datetime.now() - entry._state.set_status(self.entry_initial_status) - for attr in entry: # returns the whole attribute object - entry.__dict__[attr.key] = attr - - return entry - - def _execute_query(self, query_scope, attributes): - if not self.connection: - error_message = 'no connection established' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - old_query_filter = None - if query_scope == BASE: # requesting a single object so an always-valid filter is set - if hasattr(self, 'query_filter'): # only Reader has a query filter - old_query_filter = self.query_filter - self.query_filter = '(objectclass=*)' - else: - self._create_query_filter() - if log_enabled(PROTOCOL): - log(PROTOCOL, 'executing query - base: %s - filter: %s - scope: %s for <%s>', self.base, self.query_filter, query_scope, self) - with self.connection: - result = self.connection.search(search_base=self.base, - search_filter=self.query_filter, - search_scope=query_scope, - dereference_aliases=self.dereference_aliases, - attributes=attributes if attributes else list(self.attributes), - get_operational_attributes=self.get_operational_attributes, - controls=self.controls) - if not self.connection.strategy.sync: - response, result, request = self.connection.get_response(result, get_request=True) - else: - response = self.connection.response - result = self.connection.result - request = self.connection.request - - self._store_operation_in_history(request, result, response) - - if self._do_not_reset: # trick to not remove entries when using _refresh() - return self._create_entry(response[0]) - - self.entries = [] - for r in response: - entry = self._create_entry(r) - if entry is not None: - self.entries.append(entry) - if 'objectClass' in entry: - for object_class in entry.objectClass: - if self.schema.object_classes[object_class].kind == CLASS_AUXILIARY and object_class not in self.definition._auxiliary_class: - # add auxiliary class to object definition - self.definition._auxiliary_class.append(object_class) - self.definition._populate_attr_defs(object_class) - self.execution_time = datetime.now() - - if old_query_filter: # requesting a single object so an always-valid filter is set - self.query_filter = old_query_filter - - def remove(self, entry): - if log_enabled(PROTOCOL): - log(PROTOCOL, 'removing entry <%s> in <%s>', entry, self) - self.entries.remove(entry) - - def _reset_history(self): - self._operation_history = list() - - def _store_operation_in_history(self, request, result, response): - self._operation_history.append(Operation(request, result, response)) - - @property - def operations(self): - return self._operation_history - - @property - def errors(self): - return [error for error in self._operation_history if error.result['result'] != RESULT_SUCCESS] - - @property - def failed(self): - if hasattr(self, '_operation_history'): - return any([error.result['result'] != RESULT_SUCCESS for error in self._operation_history]) - - -class Reader(Cursor): - """Reader object to perform searches: - - :param connection: the LDAP connection object to use - :type connection: LDAPConnection - :param object_def: the ObjectDef of the LDAP object returned - :type object_def: ObjectDef - :param query: the simplified query (will be transformed in an LDAP filter) - :type query: str - :param base: starting base of the search - :type base: str - :param components_in_and: specify if assertions in the query must all be satisfied or not (AND/OR) - :type components_in_and: bool - :param sub_tree: specify if the search must be performed ad Single Level (False) or Whole SubTree (True) - :type sub_tree: bool - :param get_operational_attributes: specify if operational attributes are returned or not - :type get_operational_attributes: bool - :param controls: controls to be used in search - :type controls: tuple - - """ - entry_class = Entry # entries are read_only - attribute_class = Attribute # attributes are read_only - entry_initial_status = STATUS_READ - - def __init__(self, connection, object_def, base, query='', components_in_and=True, sub_tree=True, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None): - Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls, auxiliary_class) - self._components_in_and = components_in_and - self.sub_tree = sub_tree - self._query = query - self.base = base - self.dereference_aliases = DEREF_ALWAYS - self.validated_query = None - self._query_dict = dict() - self._validated_query_dict = dict() - self.query_filter = None - self.reset() - - if log_enabled(BASIC): - log(BASIC, 'instantiated Reader Cursor: <%r>', self) - - @property - def query(self): - return self._query - - @query.setter - def query(self, value): - self._query = value - self.reset() - - @property - def components_in_and(self): - return self._components_in_and - - @components_in_and.setter - def components_in_and(self, value): - self._components_in_and = value - self.reset() - - def clear(self): - """Clear the Reader search parameters - - """ - self.dereference_aliases = DEREF_ALWAYS - self._reset_history() - - def reset(self): - """Clear all the Reader parameters - - """ - self.clear() - self.validated_query = None - self._query_dict = dict() - self._validated_query_dict = dict() - self.execution_time = None - self.query_filter = None - self.entries = [] - self._create_query_filter() - - def _validate_query(self): - """Processes the text query and verifies that the requested friendly names are in the Reader dictionary - If the AttrDef has a 'validate' property the callable is executed and if it returns False an Exception is raised - - """ - if not self._query_dict: - self._query_dict = _create_query_dict(self._query) - - query = '' - for d in sorted(self._query_dict): - attr = d[1:] if d[0] in '&|' else d - for attr_def in self.definition: - if ''.join(attr.split()).lower() == attr_def.key.lower(): - attr = attr_def.key - break - if attr in self.definition: - vals = sorted(self._query_dict[d].split(';')) - - query += (d[0] + attr if d[0] in '&|' else attr) + ': ' - for val in vals: - val = val.strip() - val_not = True if val[0] == '!' else False - val_search_operator = '=' # default - if val_not: - if val[1:].lstrip()[0] not in '=<>~': - value = val[1:].lstrip() - else: - val_search_operator = val[1:].lstrip()[0] - value = val[1:].lstrip()[1:] - else: - if val[0] not in '=<>~': - value = val.lstrip() - else: - val_search_operator = val[0] - value = val[1:].lstrip() - - if self.definition[attr].validate: - validated = self.definition[attr].validate(value) # returns True, False or a value to substitute to the actual values - if validated is False: - error_message = 'validation failed for attribute %s and value %s' % (d, val) - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - elif validated is not True: # a valid LDAP value equivalent to the actual values - value = validated - if val_not: - query += '!' + val_search_operator + str(value) - else: - query += val_search_operator + str(value) - - query += ';' - query = query[:-1] + ', ' - else: - error_message = 'attribute \'%s\' not in definition' % attr - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - self.validated_query = query[:-2] - self._validated_query_dict = _create_query_dict(self.validated_query) - - def _create_query_filter(self): - """Converts the query dictionary to the filter text""" - self.query_filter = '' - - if self.definition._object_class: - self.query_filter += '(&' - if isinstance(self.definition._object_class, SEQUENCE_TYPES) and len(self.definition._object_class) == 1: - self.query_filter += '(objectClass=' + self.definition._object_class[0] + ')' - elif isinstance(self.definition._object_class, SEQUENCE_TYPES): - self.query_filter += '(&' - for object_class in self.definition._object_class: - self.query_filter += '(objectClass=' + object_class + ')' - self.query_filter += ')' - else: - error_message = 'object class must be a string or a list' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - - if self._query and self._query.startswith('(') and self._query.endswith(')'): # query is already an LDAP filter - if 'objectclass' not in self._query.lower(): - self.query_filter += self._query + ')' # if objectclass not in filter adds from definition - else: - self.query_filter = self._query - return - elif self._query: # if a simplified filter is present - if not self.components_in_and: - self.query_filter += '(|' - elif not self.definition._object_class: - self.query_filter += '(&' - - self._validate_query() - - attr_counter = 0 - for attr in sorted(self._validated_query_dict): - attr_counter += 1 - multi = True if ';' in self._validated_query_dict[attr] else False - vals = sorted(self._validated_query_dict[attr].split(';')) - attr_def = self.definition[attr[1:]] if attr[0] in '&|' else self.definition[attr] - if attr_def.pre_query: - modvals = [] - for val in vals: - modvals.append(val[0] + attr_def.pre_query(attr_def.key, val[1:])) - vals = modvals - if multi: - if attr[0] in '&|': - self.query_filter += '(' + attr[0] - else: - self.query_filter += '(|' - - for val in vals: - if val[0] == '!': - self.query_filter += '(!(' + attr_def.name + _ret_search_value(val[1:]) + '))' - else: - self.query_filter += '(' + attr_def.name + _ret_search_value(val) + ')' - if multi: - self.query_filter += ')' - - if not self.components_in_and: - self.query_filter += '))' - else: - self.query_filter += ')' - - if not self.definition._object_class and attr_counter == 1: # removes unneeded starting filter - self.query_filter = self.query_filter[2: -1] - - if self.query_filter == '(|)' or self.query_filter == '(&)': # removes empty filter - self.query_filter = '' - else: # no query, remove unneeded leading (& - self.query_filter = self.query_filter[2:] - - def search(self, attributes=None): - """Perform the LDAP search - - :return: Entries found in search - - """ - self.clear() - query_scope = SUBTREE if self.sub_tree else LEVEL - if log_enabled(PROTOCOL): - log(PROTOCOL, 'performing search in <%s>', self) - self._execute_query(query_scope, attributes) - - return self.entries - - def search_object(self, entry_dn=None, attributes=None): # base must be a single dn - """Perform the LDAP search operation SINGLE_OBJECT scope - - :return: Entry found in search - - """ - if log_enabled(PROTOCOL): - log(PROTOCOL, 'performing object search in <%s>', self) - self.clear() - if entry_dn: - old_base = self.base - self.base = entry_dn - self._execute_query(BASE, attributes) - self.base = old_base - else: - self._execute_query(BASE, attributes) - - return self.entries[0] if len(self.entries) > 0 else None - - def search_level(self, attributes=None): - """Perform the LDAP search operation with SINGLE_LEVEL scope - - :return: Entries found in search - - """ - if log_enabled(PROTOCOL): - log(PROTOCOL, 'performing single level search in <%s>', self) - self.clear() - self._execute_query(LEVEL, attributes) - - return self.entries - - def search_subtree(self, attributes=None): - """Perform the LDAP search operation WHOLE_SUBTREE scope - - :return: Entries found in search - - """ - if log_enabled(PROTOCOL): - log(PROTOCOL, 'performing whole subtree search in <%s>', self) - self.clear() - self._execute_query(SUBTREE, attributes) - - return self.entries - - def _entries_generator(self, responses): - for response in responses: - yield self._create_entry(response) - - def search_paged(self, paged_size, paged_criticality=True, generator=True, attributes=None): - """Perform a paged search, can be called as an Iterator - - :param attributes: optional attributes to search - :param paged_size: number of entries returned in each search - :type paged_size: int - :param paged_criticality: specify if server must not execute the search if it is not capable of paging searches - :type paged_criticality: bool - :param generator: if True the paged searches are executed while generating the entries, - if False all the paged searches are execute before returning the generator - :type generator: bool - :return: Entries found in search - - """ - if log_enabled(PROTOCOL): - log(PROTOCOL, 'performing paged search in <%s> with paged size %s', self, str(paged_size)) - if not self.connection: - error_message = 'no connection established' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - - self.clear() - self._create_query_filter() - self.entries = [] - self.execution_time = datetime.now() - response = self.connection.extend.standard.paged_search(search_base=self.base, - search_filter=self.query_filter, - search_scope=SUBTREE if self.sub_tree else LEVEL, - dereference_aliases=self.dereference_aliases, - attributes=attributes if attributes else self.attributes, - get_operational_attributes=self.get_operational_attributes, - controls=self.controls, - paged_size=paged_size, - paged_criticality=paged_criticality, - generator=generator) - if generator: - return self._entries_generator(response) - else: - return list(self._entries_generator(response)) - - -class Writer(Cursor): - entry_class = WritableEntry - attribute_class = WritableAttribute - entry_initial_status = STATUS_WRITABLE - - @staticmethod - def from_cursor(cursor, connection=None, object_def=None, custom_validator=None): - if connection is None: - connection = cursor.connection - if object_def is None: - object_def = cursor.definition - writer = Writer(connection, object_def, attributes=cursor.attributes) - for entry in cursor.entries: - if isinstance(cursor, Reader): - entry.entry_writable(object_def, writer, custom_validator=custom_validator) - elif isinstance(cursor, Writer): - pass - else: - error_message = 'unknown cursor type %s' % str(type(cursor)) - if log_enabled(ERROR): - log(ERROR, '%s', error_message) - raise LDAPCursorError(error_message) - writer.execution_time = cursor.execution_time - if log_enabled(BASIC): - log(BASIC, 'instantiated Writer Cursor <%r> from cursor <%r>', writer, cursor) - return writer - - @staticmethod - def from_response(connection, object_def, response=None): - if response is None: - if not connection.strategy.sync: - error_message = 'with asynchronous strategies response must be specified' - if log_enabled(ERROR): - log(ERROR, '%s', error_message) - raise LDAPCursorError(error_message) - elif connection.response: - response = connection.response - else: - error_message = 'response not present' - if log_enabled(ERROR): - log(ERROR, '%s', error_message) - raise LDAPCursorError(error_message) - writer = Writer(connection, object_def) - - for resp in response: - if resp['type'] == 'searchResEntry': - entry = writer._create_entry(resp) - writer.entries.append(entry) - if log_enabled(BASIC): - log(BASIC, 'instantiated Writer Cursor <%r> from response', writer) - return writer - - def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None): - Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls, auxiliary_class) - self.dereference_aliases = DEREF_NEVER - - if log_enabled(BASIC): - log(BASIC, 'instantiated Writer Cursor: <%r>', self) - - def commit(self, refresh=True): - if log_enabled(PROTOCOL): - log(PROTOCOL, 'committed changes for <%s>', self) - self._reset_history() - successful = True - for entry in self.entries: - if not entry.entry_commit_changes(refresh=refresh, controls=self.controls, clear_history=False): - successful = False - - self.execution_time = datetime.now() - - return successful - - def discard(self): - if log_enabled(PROTOCOL): - log(PROTOCOL, 'discarded changes for <%s>', self) - for entry in self.entries: - entry.entry_discard_changes() - - def _refresh_object(self, entry_dn, attributes=None, tries=4, seconds=2, controls=None): # base must be a single dn - """Performs the LDAP search operation SINGLE_OBJECT scope - - :return: Entry found in search - - """ - if log_enabled(PROTOCOL): - log(PROTOCOL, 'refreshing object <%s> for <%s>', entry_dn, self) - if not self.connection: - error_message = 'no connection established' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - - response = [] - with self.connection: - counter = 0 - while counter < tries: - result = self.connection.search(search_base=entry_dn, - search_filter='(objectclass=*)', - search_scope=BASE, - dereference_aliases=DEREF_NEVER, - attributes=attributes if attributes else self.attributes, - get_operational_attributes=self.get_operational_attributes, - controls=controls) - if not self.connection.strategy.sync: - response, result, request = self.connection.get_response(result, get_request=True) - else: - response = self.connection.response - result = self.connection.result - request = self.connection.request - - if result['result'] in [RESULT_SUCCESS]: - break - sleep(seconds) - counter += 1 - self._store_operation_in_history(request, result, response) - - if len(response) == 1: - return self._create_entry(response[0]) - elif len(response) == 0: - return None - - error_message = 'more than 1 entry returned for a single object search' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - - def new(self, dn): - if log_enabled(BASIC): - log(BASIC, 'creating new entry <%s> for <%s>', dn, self) - dn = safe_dn(dn) - for entry in self.entries: # checks if dn is already used in an cursor entry - if entry.entry_dn == dn: - error_message = 'dn already present in cursor' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - rdns = safe_rdn(dn, decompose=True) - entry = self.entry_class(dn, self) # defines a new empty Entry - for attr in entry.entry_mandatory_attributes: # defines all mandatory attributes as virtual - entry._state.attributes[attr] = self.attribute_class(entry._state.definition[attr], entry, self) - entry.__dict__[attr] = entry._state.attributes[attr] - entry.objectclass.set(self.definition._object_class) - for rdn in rdns: # adds virtual attributes from rdns in entry name (should be more than one with + syntax) - if rdn[0] in entry._state.definition._attributes: - rdn_name = entry._state.definition._attributes[rdn[0]].name # normalize case folding - if rdn_name not in entry._state.attributes: - entry._state.attributes[rdn_name] = self.attribute_class(entry._state.definition[rdn_name], entry, self) - entry.__dict__[rdn_name] = entry._state.attributes[rdn_name] - entry.__dict__[rdn_name].set(rdn[1]) - else: - error_message = 'rdn type \'%s\' not in object class definition' % rdn[0] - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - entry._state.set_status(STATUS_VIRTUAL) # set intial status - entry._state.set_status(STATUS_PENDING_CHANGES) # tries to change status to PENDING_CHANGES. If mandatory attributes are missing status is reverted to MANDATORY_MISSING - self.entries.append(entry) - return entry - - def refresh_entry(self, entry, tries=4, seconds=2): - conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX') - - self._do_not_reset = True - attr_list = [] - if log_enabled(PROTOCOL): - log(PROTOCOL, 'refreshing entry <%s> for <%s>', entry, self) - for attr in entry._state.attributes: # check friendly attribute name in AttrDef, do not check operational attributes - if attr.lower().startswith(conf_operational_attribute_prefix.lower()): - continue - if entry._state.definition[attr].name: - attr_list.append(entry._state.definition[attr].name) - else: - attr_list.append(entry._state.definition[attr].key) - - temp_entry = self._refresh_object(entry.entry_dn, attr_list, tries, seconds=seconds) # if any attributes is added adds only to the entry not to the definition - self._do_not_reset = False - if temp_entry: - temp_entry._state.origin = entry._state.origin - entry.__dict__.clear() - entry.__dict__['_state'] = temp_entry._state - for attr in entry._state.attributes: # returns the attribute key - entry.__dict__[attr] = entry._state.attributes[attr] - - for attr in entry.entry_attributes: # if any attribute of the class was deleted makes it virtual - if attr not in entry._state.attributes and attr in entry.entry_definition._attributes: - entry._state.attributes[attr] = WritableAttribute(entry.entry_definition[attr], entry, self) - entry.__dict__[attr] = entry._state.attributes[attr] - entry._state.set_status(entry._state._initial_status) - return True - return False +""" +""" + +# Created on 2014.01.06 +# +# Author: Giovanni Cannata +# +# Copyright 2014 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . +from collections import namedtuple +from copy import deepcopy +from datetime import datetime +from os import linesep +from time import sleep + +from . import STATUS_VIRTUAL, STATUS_READ, STATUS_WRITABLE +from .. import SUBTREE, LEVEL, DEREF_ALWAYS, DEREF_NEVER, BASE, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter +from ..abstract import STATUS_PENDING_CHANGES +from .attribute import Attribute, OperationalAttribute, WritableAttribute +from .attrDef import AttrDef +from .objectDef import ObjectDef +from .entry import Entry, WritableEntry +from ..core.exceptions import LDAPCursorError, LDAPObjectDereferenceError +from ..core.results import RESULT_SUCCESS +from ..utils.ciDict import CaseInsensitiveWithAliasDict +from ..utils.dn import safe_dn, safe_rdn +from ..utils.conv import to_raw +from ..utils.config import get_config_parameter +from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED +from ..protocol.oid import ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION, CLASS_AUXILIARY + +Operation = namedtuple('Operation', ('request', 'result', 'response')) + + +def _ret_search_value(value): + return value[0] + '=' + value[1:] if value[0] in '<>~' and value[1] != '=' else value + + +def _create_query_dict(query_text): + """ + Create a dictionary with query key:value definitions + query_text is a comma delimited key:value sequence + """ + query_dict = dict() + if query_text: + for arg_value_str in query_text.split(','): + if ':' in arg_value_str: + arg_value_list = arg_value_str.split(':') + query_dict[arg_value_list[0].strip()] = arg_value_list[1].strip() + + return query_dict + + +class Cursor(object): + # entry_class and attribute_class define the type of entry and attribute used by the cursor + # entry_initial_status defines the initial status of a entry + # entry_class = Entry, must be defined in subclasses + # attribute_class = Attribute, must be defined in subclasses + # entry_initial_status = STATUS, must be defined in subclasses + + def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None): + conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')] + self.connection = connection + self.get_operational_attributes = get_operational_attributes + if connection._deferred_bind or connection._deferred_open: # probably a lazy connection, tries to bind + connection._fire_deferred() + + if isinstance(object_def, (STRING_TYPES, SEQUENCE_TYPES)): + if connection.closed: # try to open connection if closed to read schema + connection.bind() + object_def = ObjectDef(object_def, connection.server.schema, auxiliary_class=auxiliary_class) + self.definition = object_def + if attributes: # checks if requested attributes are defined in ObjectDef + not_defined_attributes = [] + if isinstance(attributes, STRING_TYPES): + attributes = [attributes] + + for attribute in attributes: + if attribute not in self.definition._attributes and attribute.lower() not in conf_attributes_excluded_from_object_def: + not_defined_attributes.append(attribute) + + if not_defined_attributes: + error_message = 'Attributes \'%s\' non in definition' % ', '.join(not_defined_attributes) + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + + self.attributes = set(attributes) if attributes else set([attr.name for attr in self.definition]) + self.controls = controls + self.execution_time = None + self.entries = [] + self.schema = self.connection.server.schema + self._do_not_reset = False # used for refreshing entry in entry_refresh() without removing all entries from the Cursor + self._operation_history = list() # a list storing all the requests, results and responses for the last cursor operation + + def __repr__(self): + r = 'CURSOR : ' + self.__class__.__name__ + linesep + r += 'CONN : ' + str(self.connection) + linesep + r += 'DEFS : ' + ', '.join(self.definition._object_class) + if self.definition._auxiliary_class: + r += ' [AUX: ' + ', '.join(self.definition._auxiliary_class) + ']' + r += linesep + # for attr_def in sorted(self.definition): + # r += (attr_def.key if attr_def.key == attr_def.name else (attr_def.key + ' <' + attr_def.name + '>')) + ', ' + # if r[-2] == ',': + # r = r[:-2] + # r += ']' + linesep + if hasattr(self, 'attributes'): + r += 'ATTRS : ' + repr(sorted(self.attributes)) + (' [OPERATIONAL]' if self.get_operational_attributes else '') + linesep + if isinstance(self, Reader): + if hasattr(self, 'base'): + r += 'BASE : ' + repr(self.base) + (' [SUB]' if self.sub_tree else ' [LEVEL]') + linesep + if hasattr(self, '_query') and self._query: + r += 'QUERY : ' + repr(self._query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep + if hasattr(self, 'validated_query') and self.validated_query: + r += 'PARSED : ' + repr(self.validated_query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep + if hasattr(self, 'query_filter') and self.query_filter: + r += 'FILTER : ' + repr(self.query_filter) + linesep + + if hasattr(self, 'execution_time') and self.execution_time: + r += 'ENTRIES: ' + str(len(self.entries)) + r += ' [executed at: ' + str(self.execution_time.isoformat()) + ']' + linesep + + if self.failed: + r += 'LAST OPERATION FAILED [' + str(len(self.errors)) + ' failure' + ('s' if len(self.errors) > 1 else '') + ' at operation' + ('s ' if len(self.errors) > 1 else ' ') + ', '.join([str(i) for i, error in enumerate(self.operations) if error.result['result'] != RESULT_SUCCESS]) + ']' + + return r + + def __str__(self): + return self.__repr__() + + def __iter__(self): + return self.entries.__iter__() + + def __getitem__(self, item): + """Return indexed item, if index is not found then try to sequentially search in DN of entries. + If only one entry is found return it else raise a KeyError exception. The exception message + includes the number of entries that matches, if less than 10 entries match then show the DNs + in the exception message. + """ + try: + return self.entries[item] + except TypeError: + pass + + if isinstance(item, STRING_TYPES): + found = self.match_dn(item) + + if len(found) == 1: + return found[0] + elif len(found) > 1: + error_message = 'Multiple entries found: %d entries match the text in dn' % len(found) + ('' if len(found) > 10 else (' [' + '; '.join([e.entry_dn for e in found]) + ']')) + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise KeyError(error_message) + + error_message = 'no entry found' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise KeyError(error_message) + + def __len__(self): + return len(self.entries) + + if str is not bytes: # Python 3 + def __bool__(self): # needed to make the cursor appears as existing in "if cursor:" even if there are no entries + return True + else: # Python 2 + def __nonzero__(self): + return True + + def _get_attributes(self, response, attr_defs, entry): + """Assign the result of the LDAP query to the Entry object dictionary. + + If the optional 'post_query' callable is present in the AttrDef it is called with each value of the attribute and the callable result is stored in the attribute. + + Returns the default value for missing attributes. + If the 'dereference_dn' in AttrDef is a ObjectDef then the attribute values are treated as distinguished name and the relevant entry is retrieved and stored in the attribute value. + + """ + conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX') + conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')] + attributes = CaseInsensitiveWithAliasDict() + used_attribute_names = set() + for attr in attr_defs: + attr_def = attr_defs[attr] + attribute_name = None + for attr_name in response['attributes']: + if attr_def.name.lower() == attr_name.lower(): + attribute_name = attr_name + break + + if attribute_name or attr_def.default is not NotImplemented: # attribute value found in result or default value present - NotImplemented allows use of None as default + attribute = self.attribute_class(attr_def, entry, self) + attribute.response = response + attribute.raw_values = response['raw_attributes'][attribute_name] if attribute_name else None + if attr_def.post_query and attr_def.name in response['attributes'] and response['raw_attributes'] != list(): + attribute.values = attr_def.post_query(attr_def.key, response['attributes'][attribute_name]) + else: + if attr_def.default is NotImplemented or (attribute_name and response['raw_attributes'][attribute_name] != list()): + attribute.values = response['attributes'][attribute_name] + else: + attribute.values = attr_def.default if isinstance(attr_def.default, SEQUENCE_TYPES) else [attr_def.default] + if not isinstance(attribute.values, list): # force attribute values to list (if attribute is single-valued) + attribute.values = [attribute.values] + if attr_def.dereference_dn: # try to get object referenced in value + if attribute.values: + temp_reader = Reader(self.connection, attr_def.dereference_dn, base='', get_operational_attributes=self.get_operational_attributes, controls=self.controls) + temp_values = [] + for element in attribute.values: + if entry.entry_dn != element: + temp_values.append(temp_reader.search_object(element)) + else: + error_message = 'object %s is referencing itself in the \'%s\' attribute' % (entry.entry_dn, attribute.definition.name) + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPObjectDereferenceError(error_message) + del temp_reader # remove the temporary Reader + attribute.values = temp_values + attributes[attribute.key] = attribute + if attribute.other_names: + attributes.set_alias(attribute.key, attribute.other_names) + if attr_def.other_names: + attributes.set_alias(attribute.key, attr_def.other_names) + used_attribute_names.add(attribute_name) + + if self.attributes: + used_attribute_names.update(self.attributes) + + for attribute_name in response['attributes']: + if attribute_name not in used_attribute_names: + operational_attribute = False + # check if the type is an operational attribute + if attribute_name in self.schema.attribute_types: + if self.schema.attribute_types[attribute_name].no_user_modification or self.schema.attribute_types[attribute_name].usage in [ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION]: + operational_attribute = True + else: + operational_attribute = True + if not operational_attribute and attribute_name not in attr_defs and attribute_name.lower() not in conf_attributes_excluded_from_object_def: + error_message = 'attribute \'%s\' not in object class \'%s\' for entry %s' % (attribute_name, ', '.join(entry.entry_definition._object_class), entry.entry_dn) + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + attribute = OperationalAttribute(AttrDef(conf_operational_attribute_prefix + attribute_name), entry, self) + attribute.raw_values = response['raw_attributes'][attribute_name] + attribute.values = response['attributes'][attribute_name] if isinstance(response['attributes'][attribute_name], SEQUENCE_TYPES) else [response['attributes'][attribute_name]] + if (conf_operational_attribute_prefix + attribute_name) not in attributes: + attributes[conf_operational_attribute_prefix + attribute_name] = attribute + + return attributes + + def match_dn(self, dn): + """Return entries with text in DN""" + matched = [] + for entry in self.entries: + if dn.lower() in entry.entry_dn.lower(): + matched.append(entry) + return matched + + def match(self, attributes, value): + """Return entries with text in one of the specified attributes""" + matched = [] + if not isinstance(attributes, SEQUENCE_TYPES): + attributes = [attributes] + + for entry in self.entries: + found = False + for attribute in attributes: + if attribute in entry: + for attr_value in entry[attribute].values: + if hasattr(attr_value, 'lower') and hasattr(value, 'lower') and value.lower() in attr_value.lower(): + found = True + elif value == attr_value: + found = True + if found: + matched.append(entry) + break + if found: + break + # checks raw values, tries to convert value to byte + raw_value = to_raw(value) + if isinstance(raw_value, (bytes, bytearray)): + for attr_value in entry[attribute].raw_values: + if hasattr(attr_value, 'lower') and hasattr(raw_value, 'lower') and raw_value.lower() in attr_value.lower(): + found = True + elif raw_value == attr_value: + found = True + if found: + matched.append(entry) + break + if found: + break + return matched + + def _create_entry(self, response): + if not response['type'] == 'searchResEntry': + return None + + entry = self.entry_class(response['dn'], self) # define an Entry (writable or readonly), as specified in the cursor definition + entry._state.attributes = self._get_attributes(response, self.definition._attributes, entry) + entry._state.raw_attributes = deepcopy(response['raw_attributes']) + + entry._state.response = response + entry._state.read_time = datetime.now() + entry._state.set_status(self.entry_initial_status) + for attr in entry: # returns the whole attribute object + entry.__dict__[attr.key] = attr + + return entry + + def _execute_query(self, query_scope, attributes): + if not self.connection: + error_message = 'no connection established' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + old_query_filter = None + if query_scope == BASE: # requesting a single object so an always-valid filter is set + if hasattr(self, 'query_filter'): # only Reader has a query filter + old_query_filter = self.query_filter + self.query_filter = '(objectclass=*)' + else: + self._create_query_filter() + if log_enabled(PROTOCOL): + log(PROTOCOL, 'executing query - base: %s - filter: %s - scope: %s for <%s>', self.base, self.query_filter, query_scope, self) + with self.connection: + result = self.connection.search(search_base=self.base, + search_filter=self.query_filter, + search_scope=query_scope, + dereference_aliases=self.dereference_aliases, + attributes=attributes if attributes else list(self.attributes), + get_operational_attributes=self.get_operational_attributes, + controls=self.controls) + if not self.connection.strategy.sync: + response, result, request = self.connection.get_response(result, get_request=True) + else: + response = self.connection.response + result = self.connection.result + request = self.connection.request + + self._store_operation_in_history(request, result, response) + + if self._do_not_reset: # trick to not remove entries when using _refresh() + return self._create_entry(response[0]) + + self.entries = [] + for r in response: + entry = self._create_entry(r) + if entry is not None: + self.entries.append(entry) + if 'objectClass' in entry: + for object_class in entry.objectClass: + if self.schema and self.schema.object_classes[object_class].kind == CLASS_AUXILIARY and object_class not in self.definition._auxiliary_class: + # add auxiliary class to object definition + self.definition._auxiliary_class.append(object_class) + self.definition._populate_attr_defs(object_class) + self.execution_time = datetime.now() + + if old_query_filter: # requesting a single object so an always-valid filter is set + self.query_filter = old_query_filter + + def remove(self, entry): + if log_enabled(PROTOCOL): + log(PROTOCOL, 'removing entry <%s> in <%s>', entry, self) + self.entries.remove(entry) + + def _reset_history(self): + self._operation_history = list() + + def _store_operation_in_history(self, request, result, response): + self._operation_history.append(Operation(request, result, response)) + + @property + def operations(self): + return self._operation_history + + @property + def errors(self): + return [error for error in self._operation_history if error.result['result'] != RESULT_SUCCESS] + + @property + def failed(self): + if hasattr(self, '_operation_history'): + return any([error.result['result'] != RESULT_SUCCESS for error in self._operation_history]) + + +class Reader(Cursor): + """Reader object to perform searches: + + :param connection: the LDAP connection object to use + :type connection: LDAPConnection + :param object_def: the ObjectDef of the LDAP object returned + :type object_def: ObjectDef + :param query: the simplified query (will be transformed in an LDAP filter) + :type query: str + :param base: starting base of the search + :type base: str + :param components_in_and: specify if assertions in the query must all be satisfied or not (AND/OR) + :type components_in_and: bool + :param sub_tree: specify if the search must be performed ad Single Level (False) or Whole SubTree (True) + :type sub_tree: bool + :param get_operational_attributes: specify if operational attributes are returned or not + :type get_operational_attributes: bool + :param controls: controls to be used in search + :type controls: tuple + + """ + entry_class = Entry # entries are read_only + attribute_class = Attribute # attributes are read_only + entry_initial_status = STATUS_READ + + def __init__(self, connection, object_def, base, query='', components_in_and=True, sub_tree=True, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None): + Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls, auxiliary_class) + self._components_in_and = components_in_and + self.sub_tree = sub_tree + self._query = query + self.base = base + self.dereference_aliases = DEREF_ALWAYS + self.validated_query = None + self._query_dict = dict() + self._validated_query_dict = dict() + self.query_filter = None + self.reset() + + if log_enabled(BASIC): + log(BASIC, 'instantiated Reader Cursor: <%r>', self) + + @property + def query(self): + return self._query + + @query.setter + def query(self, value): + self._query = value + self.reset() + + @property + def components_in_and(self): + return self._components_in_and + + @components_in_and.setter + def components_in_and(self, value): + self._components_in_and = value + self.reset() + + def clear(self): + """Clear the Reader search parameters + + """ + self.dereference_aliases = DEREF_ALWAYS + self._reset_history() + + def reset(self): + """Clear all the Reader parameters + + """ + self.clear() + self.validated_query = None + self._query_dict = dict() + self._validated_query_dict = dict() + self.execution_time = None + self.query_filter = None + self.entries = [] + self._create_query_filter() + + def _validate_query(self): + """Processes the text query and verifies that the requested friendly names are in the Reader dictionary + If the AttrDef has a 'validate' property the callable is executed and if it returns False an Exception is raised + + """ + if not self._query_dict: + self._query_dict = _create_query_dict(self._query) + + query = '' + for d in sorted(self._query_dict): + attr = d[1:] if d[0] in '&|' else d + for attr_def in self.definition: + if ''.join(attr.split()).lower() == attr_def.key.lower(): + attr = attr_def.key + break + if attr in self.definition: + vals = sorted(self._query_dict[d].split(';')) + + query += (d[0] + attr if d[0] in '&|' else attr) + ': ' + for val in vals: + val = val.strip() + val_not = True if val[0] == '!' else False + val_search_operator = '=' # default + if val_not: + if val[1:].lstrip()[0] not in '=<>~': + value = val[1:].lstrip() + else: + val_search_operator = val[1:].lstrip()[0] + value = val[1:].lstrip()[1:] + else: + if val[0] not in '=<>~': + value = val.lstrip() + else: + val_search_operator = val[0] + value = val[1:].lstrip() + + if self.definition[attr].validate: + validated = self.definition[attr].validate(value) # returns True, False or a value to substitute to the actual values + if validated is False: + error_message = 'validation failed for attribute %s and value %s' % (d, val) + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + elif validated is not True: # a valid LDAP value equivalent to the actual values + value = validated + if val_not: + query += '!' + val_search_operator + str(value) + else: + query += val_search_operator + str(value) + + query += ';' + query = query[:-1] + ', ' + else: + error_message = 'attribute \'%s\' not in definition' % attr + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + self.validated_query = query[:-2] + self._validated_query_dict = _create_query_dict(self.validated_query) + + def _create_query_filter(self): + """Converts the query dictionary to the filter text""" + self.query_filter = '' + + if self.definition._object_class: + self.query_filter += '(&' + if isinstance(self.definition._object_class, SEQUENCE_TYPES) and len(self.definition._object_class) == 1: + self.query_filter += '(objectClass=' + self.definition._object_class[0] + ')' + elif isinstance(self.definition._object_class, SEQUENCE_TYPES): + self.query_filter += '(&' + for object_class in self.definition._object_class: + self.query_filter += '(objectClass=' + object_class + ')' + self.query_filter += ')' + else: + error_message = 'object class must be a string or a list' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + + if self._query and self._query.startswith('(') and self._query.endswith(')'): # query is already an LDAP filter + if 'objectclass' not in self._query.lower(): + self.query_filter += self._query + ')' # if objectclass not in filter adds from definition + else: + self.query_filter = self._query + return + elif self._query: # if a simplified filter is present + if not self.components_in_and: + self.query_filter += '(|' + elif not self.definition._object_class: + self.query_filter += '(&' + + self._validate_query() + + attr_counter = 0 + for attr in sorted(self._validated_query_dict): + attr_counter += 1 + multi = True if ';' in self._validated_query_dict[attr] else False + vals = sorted(self._validated_query_dict[attr].split(';')) + attr_def = self.definition[attr[1:]] if attr[0] in '&|' else self.definition[attr] + if attr_def.pre_query: + modvals = [] + for val in vals: + modvals.append(val[0] + attr_def.pre_query(attr_def.key, val[1:])) + vals = modvals + if multi: + if attr[0] in '&|': + self.query_filter += '(' + attr[0] + else: + self.query_filter += '(|' + + for val in vals: + if val[0] == '!': + self.query_filter += '(!(' + attr_def.name + _ret_search_value(val[1:]) + '))' + else: + self.query_filter += '(' + attr_def.name + _ret_search_value(val) + ')' + if multi: + self.query_filter += ')' + + if not self.components_in_and: + self.query_filter += '))' + else: + self.query_filter += ')' + + if not self.definition._object_class and attr_counter == 1: # removes unneeded starting filter + self.query_filter = self.query_filter[2: -1] + + if self.query_filter == '(|)' or self.query_filter == '(&)': # removes empty filter + self.query_filter = '' + else: # no query, remove unneeded leading (& + self.query_filter = self.query_filter[2:] + + def search(self, attributes=None): + """Perform the LDAP search + + :return: Entries found in search + + """ + self.clear() + query_scope = SUBTREE if self.sub_tree else LEVEL + if log_enabled(PROTOCOL): + log(PROTOCOL, 'performing search in <%s>', self) + self._execute_query(query_scope, attributes) + + return self.entries + + def search_object(self, entry_dn=None, attributes=None): # base must be a single dn + """Perform the LDAP search operation SINGLE_OBJECT scope + + :return: Entry found in search + + """ + if log_enabled(PROTOCOL): + log(PROTOCOL, 'performing object search in <%s>', self) + self.clear() + if entry_dn: + old_base = self.base + self.base = entry_dn + self._execute_query(BASE, attributes) + self.base = old_base + else: + self._execute_query(BASE, attributes) + + return self.entries[0] if len(self.entries) > 0 else None + + def search_level(self, attributes=None): + """Perform the LDAP search operation with SINGLE_LEVEL scope + + :return: Entries found in search + + """ + if log_enabled(PROTOCOL): + log(PROTOCOL, 'performing single level search in <%s>', self) + self.clear() + self._execute_query(LEVEL, attributes) + + return self.entries + + def search_subtree(self, attributes=None): + """Perform the LDAP search operation WHOLE_SUBTREE scope + + :return: Entries found in search + + """ + if log_enabled(PROTOCOL): + log(PROTOCOL, 'performing whole subtree search in <%s>', self) + self.clear() + self._execute_query(SUBTREE, attributes) + + return self.entries + + def _entries_generator(self, responses): + for response in responses: + yield self._create_entry(response) + + def search_paged(self, paged_size, paged_criticality=True, generator=True, attributes=None): + """Perform a paged search, can be called as an Iterator + + :param attributes: optional attributes to search + :param paged_size: number of entries returned in each search + :type paged_size: int + :param paged_criticality: specify if server must not execute the search if it is not capable of paging searches + :type paged_criticality: bool + :param generator: if True the paged searches are executed while generating the entries, + if False all the paged searches are execute before returning the generator + :type generator: bool + :return: Entries found in search + + """ + if log_enabled(PROTOCOL): + log(PROTOCOL, 'performing paged search in <%s> with paged size %s', self, str(paged_size)) + if not self.connection: + error_message = 'no connection established' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + + self.clear() + self._create_query_filter() + self.entries = [] + self.execution_time = datetime.now() + response = self.connection.extend.standard.paged_search(search_base=self.base, + search_filter=self.query_filter, + search_scope=SUBTREE if self.sub_tree else LEVEL, + dereference_aliases=self.dereference_aliases, + attributes=attributes if attributes else self.attributes, + get_operational_attributes=self.get_operational_attributes, + controls=self.controls, + paged_size=paged_size, + paged_criticality=paged_criticality, + generator=generator) + if generator: + return self._entries_generator(response) + else: + return list(self._entries_generator(response)) + + +class Writer(Cursor): + entry_class = WritableEntry + attribute_class = WritableAttribute + entry_initial_status = STATUS_WRITABLE + + @staticmethod + def from_cursor(cursor, connection=None, object_def=None, custom_validator=None): + if connection is None: + connection = cursor.connection + if object_def is None: + object_def = cursor.definition + writer = Writer(connection, object_def, attributes=cursor.attributes) + for entry in cursor.entries: + if isinstance(cursor, Reader): + entry.entry_writable(object_def, writer, custom_validator=custom_validator) + elif isinstance(cursor, Writer): + pass + else: + error_message = 'unknown cursor type %s' % str(type(cursor)) + if log_enabled(ERROR): + log(ERROR, '%s', error_message) + raise LDAPCursorError(error_message) + writer.execution_time = cursor.execution_time + if log_enabled(BASIC): + log(BASIC, 'instantiated Writer Cursor <%r> from cursor <%r>', writer, cursor) + return writer + + @staticmethod + def from_response(connection, object_def, response=None): + if response is None: + if not connection.strategy.sync: + error_message = 'with asynchronous strategies response must be specified' + if log_enabled(ERROR): + log(ERROR, '%s', error_message) + raise LDAPCursorError(error_message) + elif connection.response: + response = connection.response + else: + error_message = 'response not present' + if log_enabled(ERROR): + log(ERROR, '%s', error_message) + raise LDAPCursorError(error_message) + writer = Writer(connection, object_def) + + for resp in response: + if resp['type'] == 'searchResEntry': + entry = writer._create_entry(resp) + writer.entries.append(entry) + if log_enabled(BASIC): + log(BASIC, 'instantiated Writer Cursor <%r> from response', writer) + return writer + + def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None): + Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls, auxiliary_class) + self.dereference_aliases = DEREF_NEVER + + if log_enabled(BASIC): + log(BASIC, 'instantiated Writer Cursor: <%r>', self) + + def commit(self, refresh=True): + if log_enabled(PROTOCOL): + log(PROTOCOL, 'committed changes for <%s>', self) + self._reset_history() + successful = True + for entry in self.entries: + if not entry.entry_commit_changes(refresh=refresh, controls=self.controls, clear_history=False): + successful = False + + self.execution_time = datetime.now() + + return successful + + def discard(self): + if log_enabled(PROTOCOL): + log(PROTOCOL, 'discarded changes for <%s>', self) + for entry in self.entries: + entry.entry_discard_changes() + + def _refresh_object(self, entry_dn, attributes=None, tries=4, seconds=2, controls=None): # base must be a single dn + """Performs the LDAP search operation SINGLE_OBJECT scope + + :return: Entry found in search + + """ + if log_enabled(PROTOCOL): + log(PROTOCOL, 'refreshing object <%s> for <%s>', entry_dn, self) + if not self.connection: + error_message = 'no connection established' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + + response = [] + with self.connection: + counter = 0 + while counter < tries: + result = self.connection.search(search_base=entry_dn, + search_filter='(objectclass=*)', + search_scope=BASE, + dereference_aliases=DEREF_NEVER, + attributes=attributes if attributes else self.attributes, + get_operational_attributes=self.get_operational_attributes, + controls=controls) + if not self.connection.strategy.sync: + response, result, request = self.connection.get_response(result, get_request=True) + else: + response = self.connection.response + result = self.connection.result + request = self.connection.request + + if result['result'] in [RESULT_SUCCESS]: + break + sleep(seconds) + counter += 1 + self._store_operation_in_history(request, result, response) + + if len(response) == 1: + return self._create_entry(response[0]) + elif len(response) == 0: + return None + + error_message = 'more than 1 entry returned for a single object search' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + + def new(self, dn): + if log_enabled(BASIC): + log(BASIC, 'creating new entry <%s> for <%s>', dn, self) + dn = safe_dn(dn) + for entry in self.entries: # checks if dn is already used in an cursor entry + if entry.entry_dn == dn: + error_message = 'dn already present in cursor' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + rdns = safe_rdn(dn, decompose=True) + entry = self.entry_class(dn, self) # defines a new empty Entry + for attr in entry.entry_mandatory_attributes: # defines all mandatory attributes as virtual + entry._state.attributes[attr] = self.attribute_class(entry._state.definition[attr], entry, self) + entry.__dict__[attr] = entry._state.attributes[attr] + entry.objectclass.set(self.definition._object_class) + for rdn in rdns: # adds virtual attributes from rdns in entry name (should be more than one with + syntax) + if rdn[0] in entry._state.definition._attributes: + rdn_name = entry._state.definition._attributes[rdn[0]].name # normalize case folding + if rdn_name not in entry._state.attributes: + entry._state.attributes[rdn_name] = self.attribute_class(entry._state.definition[rdn_name], entry, self) + entry.__dict__[rdn_name] = entry._state.attributes[rdn_name] + entry.__dict__[rdn_name].set(rdn[1]) + else: + error_message = 'rdn type \'%s\' not in object class definition' % rdn[0] + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + entry._state.set_status(STATUS_VIRTUAL) # set intial status + entry._state.set_status(STATUS_PENDING_CHANGES) # tries to change status to PENDING_CHANGES. If mandatory attributes are missing status is reverted to MANDATORY_MISSING + self.entries.append(entry) + return entry + + def refresh_entry(self, entry, tries=4, seconds=2): + conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX') + + self._do_not_reset = True + attr_list = [] + if log_enabled(PROTOCOL): + log(PROTOCOL, 'refreshing entry <%s> for <%s>', entry, self) + for attr in entry._state.attributes: # check friendly attribute name in AttrDef, do not check operational attributes + if attr.lower().startswith(conf_operational_attribute_prefix.lower()): + continue + if entry._state.definition[attr].name: + attr_list.append(entry._state.definition[attr].name) + else: + attr_list.append(entry._state.definition[attr].key) + + temp_entry = self._refresh_object(entry.entry_dn, attr_list, tries, seconds=seconds) # if any attributes is added adds only to the entry not to the definition + self._do_not_reset = False + if temp_entry: + temp_entry._state.origin = entry._state.origin + entry.__dict__.clear() + entry.__dict__['_state'] = temp_entry._state + for attr in entry._state.attributes: # returns the attribute key + entry.__dict__[attr] = entry._state.attributes[attr] + + for attr in entry.entry_attributes: # if any attribute of the class was deleted makes it virtual + if attr not in entry._state.attributes and attr in entry.entry_definition._attributes: + entry._state.attributes[attr] = WritableAttribute(entry.entry_definition[attr], entry, self) + entry.__dict__[attr] = entry._state.attributes[attr] + entry._state.set_status(entry._state._initial_status) + return True + return False diff --git a/server/www/packages/packages-linux/x64/ldap3/abstract/entry.py b/server/www/packages/packages-linux/x64/ldap3/abstract/entry.py index 18c0420..b73c50f 100644 --- a/server/www/packages/packages-linux/x64/ldap3/abstract/entry.py +++ b/server/www/packages/packages-linux/x64/ldap3/abstract/entry.py @@ -1,671 +1,699 @@ -""" -""" - -# Created on 2016.08.19 -# -# Author: Giovanni Cannata -# -# Copyright 2016 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . - - -import json -try: - from collections import OrderedDict -except ImportError: - from ..utils.ordDict import OrderedDict # for Python 2.6 - -from os import linesep - -from .. import STRING_TYPES, SEQUENCE_TYPES, MODIFY_ADD, MODIFY_REPLACE -from .attribute import WritableAttribute -from .objectDef import ObjectDef -from .attrDef import AttrDef -from ..core.exceptions import LDAPKeyError, LDAPCursorError -from ..utils.conv import check_json_dict, format_json, prepare_for_stream -from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header -from ..utils.dn import safe_dn, safe_rdn, to_dn -from ..utils.repr import to_stdout_encoding -from ..utils.ciDict import CaseInsensitiveWithAliasDict -from ..utils.config import get_config_parameter -from . import STATUS_VIRTUAL, STATUS_WRITABLE, STATUS_PENDING_CHANGES, STATUS_COMMITTED, STATUS_DELETED,\ - STATUS_INIT, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING, STATUS_MANDATORY_MISSING, STATUSES, INITIAL_STATUSES -from ..core.results import RESULT_SUCCESS -from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED - - -class EntryState(object): - """Contains data on the status of the entry. Does not pollute the Entry __dict__. - - """ - - def __init__(self, dn, cursor): - self.dn = dn - self._initial_status = None - self._to = None # used for move and rename - self.status = STATUS_INIT - self.attributes = CaseInsensitiveWithAliasDict() - self.raw_attributes = CaseInsensitiveWithAliasDict() - self.response = None - self.cursor = cursor - self.origin = None # reference to the original read-only entry (set when made writable). Needed to update attributes in read-only when modified (only if both refer the same server) - self.read_time = None - self.changes = OrderedDict() # includes changes to commit in a writable entry - if cursor.definition: - self.definition = cursor.definition - else: - self.definition = None - - def __repr__(self): - if self.__dict__ and self.dn is not None: - r = 'DN: ' + to_stdout_encoding(self.dn) + ' - STATUS: ' + ((self._initial_status + ', ') if self._initial_status != self.status else '') + self.status + ' - READ TIME: ' + (self.read_time.isoformat() if self.read_time else '') + linesep - r += 'attributes: ' + ', '.join(sorted(self.attributes.keys())) + linesep - r += 'object def: ' + (', '.join(sorted(self.definition._object_class)) if self.definition._object_class else '') + linesep - r += 'attr defs: ' + ', '.join(sorted(self.definition._attributes.keys())) + linesep - r += 'response: ' + ('present' if self.response else '') + linesep - r += 'cursor: ' + (self.cursor.__class__.__name__ if self.cursor else '') + linesep - return r - else: - return object.__repr__(self) - - def __str__(self): - return self.__repr__() - - def set_status(self, status): - conf_ignored_mandatory_attributes_in_object_def = [v.lower() for v in get_config_parameter('IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF')] - if status not in STATUSES: - error_message = 'invalid entry status ' + str(status) - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - if status in INITIAL_STATUSES: - self._initial_status = status - self.status = status - if status == STATUS_DELETED: - self._initial_status = STATUS_VIRTUAL - if status == STATUS_COMMITTED: - self._initial_status = STATUS_WRITABLE - if self.status == STATUS_VIRTUAL or (self.status == STATUS_PENDING_CHANGES and self._initial_status == STATUS_VIRTUAL): # checks if all mandatory attributes are present in new entries - for attr in self.definition._attributes: - if self.definition._attributes[attr].mandatory and attr.lower() not in conf_ignored_mandatory_attributes_in_object_def: - if (attr not in self.attributes or self.attributes[attr].virtual) and attr not in self.changes: - self.status = STATUS_MANDATORY_MISSING - break - - -class EntryBase(object): - """The Entry object contains a single LDAP entry. - Attributes can be accessed either by sequence, by assignment - or as dictionary keys. Keys are not case sensitive. - - The Entry object is read only - - - The DN is retrieved by entry_dn - - The cursor reference is in _cursor - - Raw attributes values are retrieved with _raw_attributes and the _raw_attribute() methods - """ - - def __init__(self, dn, cursor): - self.__dict__['_state'] = EntryState(dn, cursor) - - def __repr__(self): - if self.__dict__ and self.entry_dn is not None: - r = 'DN: ' + to_stdout_encoding(self.entry_dn) + ' - STATUS: ' + ((self._state._initial_status + ', ') if self._state._initial_status != self.entry_status else '') + self.entry_status + ' - READ TIME: ' + (self.entry_read_time.isoformat() if self.entry_read_time else '') + linesep - if self._state.attributes: - for attr in sorted(self._state.attributes): - if self._state.attributes[attr] or (hasattr(self._state.attributes[attr], 'changes') and self._state.attributes[attr].changes): - r += ' ' + repr(self._state.attributes[attr]) + linesep - return r - else: - return object.__repr__(self) - - def __str__(self): - return self.__repr__() - - def __iter__(self): - for attribute in self._state.attributes: - yield self._state.attributes[attribute] - # raise StopIteration # deprecated in PEP 479 - return - - def __contains__(self, item): - try: - self.__getitem__(item) - return True - except LDAPKeyError: - return False - - def __getattr__(self, item): - if isinstance(item, STRING_TYPES): - if item == '_state': - return self.__dict__['_state'] - item = ''.join(item.split()).lower() - attr_found = None - for attr in self._state.attributes.keys(): - if item == attr.lower(): - attr_found = attr - break - if not attr_found: - for attr in self._state.attributes.aliases(): - if item == attr.lower(): - attr_found = attr - break - if not attr_found: - for attr in self._state.attributes.keys(): - if item + ';binary' == attr.lower(): - attr_found = attr - break - if not attr_found: - for attr in self._state.attributes.aliases(): - if item + ';binary' == attr.lower(): - attr_found = attr - break - if not attr_found: - for attr in self._state.attributes.keys(): - if item + ';range' in attr.lower(): - attr_found = attr - break - if not attr_found: - for attr in self._state.attributes.aliases(): - if item + ';range' in attr.lower(): - attr_found = attr - break - if not attr_found: - error_message = 'attribute \'%s\' not found' % item - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - return self._state.attributes[attr] - error_message = 'attribute name must be a string' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - - def __setattr__(self, item, value): - if item in self._state.attributes: - error_message = 'attribute \'%s\' is read only' % item - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - else: - error_message = 'entry is read only, cannot add \'%s\'' % item - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - - def __getitem__(self, item): - if isinstance(item, STRING_TYPES): - item = ''.join(item.split()).lower() - attr_found = None - for attr in self._state.attributes.keys(): - if item == attr.lower(): - attr_found = attr - break - if not attr_found: - for attr in self._state.attributes.aliases(): - if item == attr.lower(): - attr_found = attr - break - if not attr_found: - for attr in self._state.attributes.keys(): - if item + ';binary' == attr.lower(): - attr_found = attr - break - if not attr_found: - for attr in self._state.attributes.aliases(): - if item + ';binary' == attr.lower(): - attr_found = attr - break - if not attr_found: - error_message = 'key \'%s\' not found' % item - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPKeyError(error_message) - return self._state.attributes[attr] - - error_message = 'key must be a string' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPKeyError(error_message) - - def __eq__(self, other): - if isinstance(other, EntryBase): - return self.entry_dn == other.entry_dn - - return False - - def __lt__(self, other): - if isinstance(other, EntryBase): - return self.entry_dn <= other.entry_dn - - return False - - @property - def entry_dn(self): - return self._state.dn - - @property - def entry_cursor(self): - return self._state.cursor - - @property - def entry_status(self): - return self._state.status - - @property - def entry_definition(self): - return self._state.definition - - @property - def entry_raw_attributes(self): - return self._state.entry_raw_attributes - - def entry_raw_attribute(self, name): - """ - - :param name: name of the attribute - :return: raw (unencoded) value of the attribute, None if attribute is not found - """ - return self._state.entry_raw_attributes[name] if name in self._state.entry_raw_attributes else None - - @property - def entry_mandatory_attributes(self): - return [attribute for attribute in self.entry_definition._attributes if self.entry_definition._attributes[attribute].mandatory] - - @property - def entry_attributes(self): - return list(self._state.attributes.keys()) - - @property - def entry_attributes_as_dict(self): - return dict((attribute_key, attribute_value.values) for (attribute_key, attribute_value) in self._state.attributes.items()) - - @property - def entry_read_time(self): - return self._state.read_time - - @property - def _changes(self): - return self._state.changes - - def entry_to_json(self, raw=False, indent=4, sort=True, stream=None, checked_attributes=True, include_empty=True): - json_entry = dict() - json_entry['dn'] = self.entry_dn - if checked_attributes: - if not include_empty: - # needed for python 2.6 compatibility - json_entry['attributes'] = dict((key, self.entry_attributes_as_dict[key]) for key in self.entry_attributes_as_dict if self.entry_attributes_as_dict[key]) - else: - json_entry['attributes'] = self.entry_attributes_as_dict - if raw: - if not include_empty: - # needed for python 2.6 compatibility - json_entry['raw'] = dict((key, self.entry_raw_attributes[key]) for key in self.entry_raw_attributes if self.entry_raw_attributes[key]) - else: - json_entry['raw'] = dict(self.entry_raw_attributes) - - if str is bytes: # Python 2 - check_json_dict(json_entry) - - json_output = json.dumps(json_entry, - ensure_ascii=True, - sort_keys=sort, - indent=indent, - check_circular=True, - default=format_json, - separators=(',', ': ')) - - if stream: - stream.write(json_output) - - return json_output - - def entry_to_ldif(self, all_base64=False, line_separator=None, sort_order=None, stream=None): - ldif_lines = operation_to_ldif('searchResponse', [self._state.response], all_base64, sort_order=sort_order) - ldif_lines = add_ldif_header(ldif_lines) - line_separator = line_separator or linesep - ldif_output = line_separator.join(ldif_lines) - if stream: - if stream.tell() == 0: - header = add_ldif_header(['-'])[0] - stream.write(prepare_for_stream(header + line_separator + line_separator)) - stream.write(prepare_for_stream(ldif_output + line_separator + line_separator)) - return ldif_output - - -class Entry(EntryBase): - """The Entry object contains a single LDAP entry. - Attributes can be accessed either by sequence, by assignment - or as dictionary keys. Keys are not case sensitive. - - The Entry object is read only - - - The DN is retrieved by entry_dn - - The Reader reference is in _cursor() - - Raw attributes values are retrieved by the _ra_attributes and - _raw_attribute() methods - - """ - def entry_writable(self, object_def=None, writer_cursor=None, attributes=None, custom_validator=None, auxiliary_class=None): - if not self.entry_cursor.schema: - error_message = 'schema must be available to make an entry writable' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - # returns a new WritableEntry and its Writer cursor - if object_def is None: - if self.entry_cursor.definition._object_class: - object_def = self.entry_definition._object_class - auxiliary_class = self.entry_definition._auxiliary_class + (auxiliary_class if isinstance(auxiliary_class, SEQUENCE_TYPES) else []) - elif 'objectclass' in self: - object_def = self.objectclass.values - - if not object_def: - error_message = 'object class must be specified to make an entry writable' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - - if not isinstance(object_def, ObjectDef): - object_def = ObjectDef(object_def, self.entry_cursor.schema, custom_validator, auxiliary_class) - - if attributes: - if isinstance(attributes, STRING_TYPES): - attributes = [attributes] - - if isinstance(attributes, SEQUENCE_TYPES): - for attribute in attributes: - if attribute not in object_def._attributes: - error_message = 'attribute \'%s\' not in schema for \'%s\'' % (attribute, object_def) - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - else: - attributes = [] - - if not writer_cursor: - from .cursor import Writer # local import to avoid circular reference in import at startup - writable_cursor = Writer(self.entry_cursor.connection, object_def) - else: - writable_cursor = writer_cursor - - if attributes: # force reading of attributes - writable_entry = writable_cursor._refresh_object(self.entry_dn, list(attributes) + self.entry_attributes) - else: - writable_entry = writable_cursor._create_entry(self._state.response) - writable_cursor.entries.append(writable_entry) - writable_entry._state.read_time = self.entry_read_time - writable_entry._state.origin = self # reference to the original read-only entry - # checks original entry for custom definitions in AttrDefs - for attr in writable_entry._state.origin.entry_definition._attributes: - original_attr = writable_entry._state.origin.entry_definition._attributes[attr] - if attr != original_attr.name and attr not in writable_entry._state.attributes: - old_attr_def = writable_entry.entry_definition._attributes[original_attr.name] - new_attr_def = AttrDef(original_attr.name, - key=attr, - validate=original_attr.validate, - pre_query=original_attr.pre_query, - post_query=original_attr.post_query, - default=original_attr.default, - dereference_dn=original_attr.dereference_dn, - description=original_attr.description, - mandatory=old_attr_def.mandatory, # keeps value read from schema - single_value=old_attr_def.single_value, # keeps value read from schema - alias=original_attr.other_names) - object_def = writable_entry.entry_definition - object_def -= old_attr_def - object_def += new_attr_def - # updates attribute name in entry attributes - new_attr = WritableAttribute(new_attr_def, writable_entry, writable_cursor) - if original_attr.name in writable_entry._state.attributes: - new_attr.other_names = writable_entry._state.attributes[original_attr.name].other_names - new_attr.raw_values = writable_entry._state.attributes[original_attr.name].raw_values - new_attr.values = writable_entry._state.attributes[original_attr.name].values - new_attr.response = writable_entry._state.attributes[original_attr.name].response - writable_entry._state.attributes[attr] = new_attr - # writable_entry._state.attributes.set_alias(attr, new_attr.other_names) - del writable_entry._state.attributes[original_attr.name] - - writable_entry._state.set_status(STATUS_WRITABLE) - return writable_entry - - -class WritableEntry(EntryBase): - def __setitem__(self, key, value): - if value is not Ellipsis: # hack for using implicit operators in writable attributes - self.__setattr__(key, value) - - def __setattr__(self, item, value): - conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')] - if item == '_state' and isinstance(value, EntryState): - self.__dict__['_state'] = value - return - - if value is not Ellipsis: # hack for using implicit operators in writable attributes - # checks if using an alias - if item in self.entry_cursor.definition._attributes or item.lower() in conf_attributes_excluded_from_object_def: - if item not in self._state.attributes: # setting value to an attribute still without values - new_attribute = WritableAttribute(self.entry_cursor.definition._attributes[item], self, cursor=self.entry_cursor) - self._state.attributes[str(item)] = new_attribute # force item to a string for key in attributes dict - self._state.attributes[item].set(value) # try to add to new_values - else: - error_message = 'attribute \'%s\' not defined' % item - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - - def __getattr__(self, item): - if isinstance(item, STRING_TYPES): - if item == '_state': - return self.__dict__['_state'] - item = ''.join(item.split()).lower() - for attr in self._state.attributes.keys(): - if item == attr.lower(): - return self._state.attributes[attr] - for attr in self._state.attributes.aliases(): - if item == attr.lower(): - return self._state.attributes[attr] - if item in self.entry_definition._attributes: # item is a new attribute to commit, creates the AttrDef and add to the attributes to retrive - self._state.attributes[item] = WritableAttribute(self.entry_definition._attributes[item], self, self.entry_cursor) - self.entry_cursor.attributes.add(item) - return self._state.attributes[item] - error_message = 'attribute \'%s\' not defined' % item - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - else: - error_message = 'attribute name must be a string' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - - @property - def entry_virtual_attributes(self): - return [attr for attr in self.entry_attributes if self[attr].virtual] - - def entry_commit_changes(self, refresh=True, controls=None, clear_history=True): - if clear_history: - self.entry_cursor._reset_history() - - if self.entry_status == STATUS_READY_FOR_DELETION: - result = self.entry_cursor.connection.delete(self.entry_dn, controls) - if not self.entry_cursor.connection.strategy.sync: - response, result, request = self.entry_cursor.connection.get_response(result, get_request=True) - else: - response = self.entry_cursor.connection.response - result = self.entry_cursor.connection.result - request = self.entry_cursor.connection.request - self.entry_cursor._store_operation_in_history(request, result, response) - if result['result'] == RESULT_SUCCESS: - dn = self.entry_dn - if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # deletes original read-only Entry - cursor = self._state.origin.entry_cursor - self._state.origin.__dict__.clear() - self._state.origin.__dict__['_state'] = EntryState(dn, cursor) - self._state.origin._state.set_status(STATUS_DELETED) - cursor = self.entry_cursor - self.__dict__.clear() - self._state = EntryState(dn, cursor) - self._state.set_status(STATUS_DELETED) - return True - return False - elif self.entry_status == STATUS_READY_FOR_MOVING: - result = self.entry_cursor.connection.modify_dn(self.entry_dn, '+'.join(safe_rdn(self.entry_dn)), new_superior=self._state._to) - if not self.entry_cursor.connection.strategy.sync: - response, result, request = self.entry_cursor.connection.get_response(result, get_request=True) - else: - response = self.entry_cursor.connection.response - result = self.entry_cursor.connection.result - request = self.entry_cursor.connection.request - self.entry_cursor._store_operation_in_history(request, result, response) - if result['result'] == RESULT_SUCCESS: - self._state.dn = safe_dn('+'.join(safe_rdn(self.entry_dn)) + ',' + self._state._to) - if refresh: - if self.entry_refresh(): - if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # refresh dn of origin - self._state.origin._state.dn = self.entry_dn - self._state.set_status(STATUS_COMMITTED) - self._state._to = None - return True - return False - elif self.entry_status == STATUS_READY_FOR_RENAMING: - rdn = '+'.join(safe_rdn(self._state._to)) - result = self.entry_cursor.connection.modify_dn(self.entry_dn, rdn) - if not self.entry_cursor.connection.strategy.sync: - response, result, request = self.entry_cursor.connection.get_response(result, get_request=True) - else: - response = self.entry_cursor.connection.response - result = self.entry_cursor.connection.result - request = self.entry_cursor.connection.request - self.entry_cursor._store_operation_in_history(request, result, response) - if result['result'] == RESULT_SUCCESS: - self._state.dn = rdn + ',' + ','.join(to_dn(self.entry_dn)[1:]) - if refresh: - if self.entry_refresh(): - if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # refresh dn of origin - self._state.origin._state.dn = self.entry_dn - self._state.set_status(STATUS_COMMITTED) - self._state._to = None - return True - return False - elif self.entry_status in [STATUS_VIRTUAL, STATUS_MANDATORY_MISSING]: - missing_attributes = [] - for attr in self.entry_mandatory_attributes: - if (attr not in self._state.attributes or self._state.attributes[attr].virtual) and attr not in self._changes: - missing_attributes.append('\'' + attr + '\'') - error_message = 'mandatory attributes %s missing in entry %s' % (', '.join(missing_attributes), self.entry_dn) - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - elif self.entry_status == STATUS_PENDING_CHANGES: - if self._changes: - if self.entry_definition._auxiliary_class: # checks if an attribute is from an auxiliary class and adds it to the objectClass attribute if not present - for attr in self._changes: - # checks schema to see if attribute is defined in one of the already present object classes - attr_classes = self.entry_cursor.schema.attribute_types[attr].mandatory_in + self.entry_cursor.schema.attribute_types[attr].optional_in - for object_class in self.objectclass: - if object_class in attr_classes: - break - else: # executed only if the attribute class is not present in the objectClass attribute - # checks if attribute is defined in one of the possible auxiliary classes - for aux_class in self.entry_definition._auxiliary_class: - if aux_class in attr_classes: - if self._state._initial_status == STATUS_VIRTUAL: # entry is new, there must be a pending objectClass MODIFY_REPLACE - self._changes['objectClass'][0][1].append(aux_class) - else: - self.objectclass += aux_class - if self._state._initial_status == STATUS_VIRTUAL: - new_attributes = dict() - for attr in self._changes: - new_attributes[attr] = self._changes[attr][0][1] - result = self.entry_cursor.connection.add(self.entry_dn, None, new_attributes, controls) - else: - result = self.entry_cursor.connection.modify(self.entry_dn, self._changes, controls) - - if not self.entry_cursor.connection.strategy.sync: # asynchronous request - response, result, request = self.entry_cursor.connection.get_response(result, get_request=True) - else: - response = self.entry_cursor.connection.response - result = self.entry_cursor.connection.result - request = self.entry_cursor.connection.request - self.entry_cursor._store_operation_in_history(request, result, response) - - if result['result'] == RESULT_SUCCESS: - if refresh: - if self.entry_refresh(): - if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # updates original read-only entry if present - for attr in self: # adds AttrDefs from writable entry to origin entry definition if some is missing - if attr.key in self.entry_definition._attributes and attr.key not in self._state.origin.entry_definition._attributes: - self._state.origin.entry_cursor.definition.add_attribute(self.entry_cursor.definition._attributes[attr.key]) # adds AttrDef from writable entry to original entry if missing - temp_entry = self._state.origin.entry_cursor._create_entry(self._state.response) - self._state.origin.__dict__.clear() - self._state.origin.__dict__['_state'] = temp_entry._state - for attr in self: # returns the whole attribute object - if not attr.virtual: - self._state.origin.__dict__[attr.key] = self._state.origin._state.attributes[attr.key] - self._state.origin._state.read_time = self.entry_read_time - else: - self.entry_discard_changes() # if not refreshed remove committed changes - self._state.set_status(STATUS_COMMITTED) - return True - return False - - def entry_discard_changes(self): - self._changes.clear() - self._state.set_status(self._state._initial_status) - - def entry_delete(self): - if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_DELETION]: - error_message = 'cannot delete entry, invalid status: ' + self.entry_status - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - self._state.set_status(STATUS_READY_FOR_DELETION) - - def entry_refresh(self, tries=4, seconds=2): - """ - - Refreshes the entry from the LDAP Server - """ - if self.entry_cursor.connection: - if self.entry_cursor.refresh_entry(self, tries, seconds): - return True - - return False - - def entry_move(self, destination_dn): - if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_MOVING]: - error_message = 'cannot move entry, invalid status: ' + self.entry_status - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - self._state._to = safe_dn(destination_dn) - self._state.set_status(STATUS_READY_FOR_MOVING) - - def entry_rename(self, new_name): - if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_RENAMING]: - error_message = 'cannot rename entry, invalid status: ' + self.entry_status - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', error_message, self) - raise LDAPCursorError(error_message) - self._state._to = new_name - self._state.set_status(STATUS_READY_FOR_RENAMING) - - @property - def entry_changes(self): - return self._changes +""" +""" + +# Created on 2016.08.19 +# +# Author: Giovanni Cannata +# +# Copyright 2016 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + + +import json +try: + from collections import OrderedDict +except ImportError: + from ..utils.ordDict import OrderedDict # for Python 2.6 + +from os import linesep + +from .. import STRING_TYPES, SEQUENCE_TYPES, MODIFY_ADD, MODIFY_REPLACE +from .attribute import WritableAttribute +from .objectDef import ObjectDef +from .attrDef import AttrDef +from ..core.exceptions import LDAPKeyError, LDAPCursorError, LDAPCursorAttributeError +from ..utils.conv import check_json_dict, format_json, prepare_for_stream +from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header +from ..utils.dn import safe_dn, safe_rdn, to_dn +from ..utils.repr import to_stdout_encoding +from ..utils.ciDict import CaseInsensitiveWithAliasDict +from ..utils.config import get_config_parameter +from . import STATUS_VIRTUAL, STATUS_WRITABLE, STATUS_PENDING_CHANGES, STATUS_COMMITTED, STATUS_DELETED,\ + STATUS_INIT, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING, STATUS_MANDATORY_MISSING, STATUSES, INITIAL_STATUSES +from ..core.results import RESULT_SUCCESS +from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED + + +class EntryState(object): + """Contains data on the status of the entry. Does not pollute the Entry __dict__. + + """ + + def __init__(self, dn, cursor): + self.dn = dn + self._initial_status = None + self._to = None # used for move and rename + self.status = STATUS_INIT + self.attributes = CaseInsensitiveWithAliasDict() + self.raw_attributes = CaseInsensitiveWithAliasDict() + self.response = None + self.cursor = cursor + self.origin = None # reference to the original read-only entry (set when made writable). Needed to update attributes in read-only when modified (only if both refer the same server) + self.read_time = None + self.changes = OrderedDict() # includes changes to commit in a writable entry + if cursor.definition: + self.definition = cursor.definition + else: + self.definition = None + + def __repr__(self): + if self.__dict__ and self.dn is not None: + r = 'DN: ' + to_stdout_encoding(self.dn) + ' - STATUS: ' + ((self._initial_status + ', ') if self._initial_status != self.status else '') + self.status + ' - READ TIME: ' + (self.read_time.isoformat() if self.read_time else '') + linesep + r += 'attributes: ' + ', '.join(sorted(self.attributes.keys())) + linesep + r += 'object def: ' + (', '.join(sorted(self.definition._object_class)) if self.definition._object_class else '') + linesep + r += 'attr defs: ' + ', '.join(sorted(self.definition._attributes.keys())) + linesep + r += 'response: ' + ('present' if self.response else '') + linesep + r += 'cursor: ' + (self.cursor.__class__.__name__ if self.cursor else '') + linesep + return r + else: + return object.__repr__(self) + + def __str__(self): + return self.__repr__() + + def __getstate__(self): + cpy = dict(self.__dict__) + cpy['cursor'] = None + return cpy + + def set_status(self, status): + conf_ignored_mandatory_attributes_in_object_def = [v.lower() for v in get_config_parameter('IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF')] + if status not in STATUSES: + error_message = 'invalid entry status ' + str(status) + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + if status in INITIAL_STATUSES: + self._initial_status = status + self.status = status + if status == STATUS_DELETED: + self._initial_status = STATUS_VIRTUAL + if status == STATUS_COMMITTED: + self._initial_status = STATUS_WRITABLE + if self.status == STATUS_VIRTUAL or (self.status == STATUS_PENDING_CHANGES and self._initial_status == STATUS_VIRTUAL): # checks if all mandatory attributes are present in new entries + for attr in self.definition._attributes: + if self.definition._attributes[attr].mandatory and attr.lower() not in conf_ignored_mandatory_attributes_in_object_def: + if (attr not in self.attributes or self.attributes[attr].virtual) and attr not in self.changes: + self.status = STATUS_MANDATORY_MISSING + break + + @property + def entry_raw_attributes(self): + return self.raw_attributes + + +class EntryBase(object): + """The Entry object contains a single LDAP entry. + Attributes can be accessed either by sequence, by assignment + or as dictionary keys. Keys are not case sensitive. + + The Entry object is read only + + - The DN is retrieved by entry_dn + - The cursor reference is in _cursor + - Raw attributes values are retrieved with _raw_attributes and the _raw_attribute() methods + """ + + def __init__(self, dn, cursor): + self._state = EntryState(dn, cursor) + + def __repr__(self): + if self.__dict__ and self.entry_dn is not None: + r = 'DN: ' + to_stdout_encoding(self.entry_dn) + ' - STATUS: ' + ((self._state._initial_status + ', ') if self._state._initial_status != self.entry_status else '') + self.entry_status + ' - READ TIME: ' + (self.entry_read_time.isoformat() if self.entry_read_time else '') + linesep + if self._state.attributes: + for attr in sorted(self._state.attributes): + if self._state.attributes[attr] or (hasattr(self._state.attributes[attr], 'changes') and self._state.attributes[attr].changes): + r += ' ' + repr(self._state.attributes[attr]) + linesep + return r + else: + return object.__repr__(self) + + def __str__(self): + return self.__repr__() + + def __iter__(self): + for attribute in self._state.attributes: + yield self._state.attributes[attribute] + # raise StopIteration # deprecated in PEP 479 + return + + def __contains__(self, item): + try: + self.__getitem__(item) + return True + except LDAPKeyError: + return False + + def __getattr__(self, item): + if isinstance(item, STRING_TYPES): + if item == '_state': + return object.__getattr__(self, item) + item = ''.join(item.split()).lower() + attr_found = None + for attr in self._state.attributes.keys(): + if item == attr.lower(): + attr_found = attr + break + if not attr_found: + for attr in self._state.attributes.aliases(): + if item == attr.lower(): + attr_found = attr + break + if not attr_found: + for attr in self._state.attributes.keys(): + if item + ';binary' == attr.lower(): + attr_found = attr + break + if not attr_found: + for attr in self._state.attributes.aliases(): + if item + ';binary' == attr.lower(): + attr_found = attr + break + if not attr_found: + for attr in self._state.attributes.keys(): + if item + ';range' in attr.lower(): + attr_found = attr + break + if not attr_found: + for attr in self._state.attributes.aliases(): + if item + ';range' in attr.lower(): + attr_found = attr + break + if not attr_found: + error_message = 'attribute \'%s\' not found' % item + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorAttributeError(error_message) + return self._state.attributes[attr] + error_message = 'attribute name must be a string' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorAttributeError(error_message) + + def __setattr__(self, item, value): + if item == '_state': + object.__setattr__(self, item, value) + elif item in self._state.attributes: + error_message = 'attribute \'%s\' is read only' % item + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorAttributeError(error_message) + else: + error_message = 'entry is read only, cannot add \'%s\'' % item + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorAttributeError(error_message) + + def __getitem__(self, item): + if isinstance(item, STRING_TYPES): + item = ''.join(item.split()).lower() + attr_found = None + for attr in self._state.attributes.keys(): + if item == attr.lower(): + attr_found = attr + break + if not attr_found: + for attr in self._state.attributes.aliases(): + if item == attr.lower(): + attr_found = attr + break + if not attr_found: + for attr in self._state.attributes.keys(): + if item + ';binary' == attr.lower(): + attr_found = attr + break + if not attr_found: + for attr in self._state.attributes.aliases(): + if item + ';binary' == attr.lower(): + attr_found = attr + break + if not attr_found: + error_message = 'key \'%s\' not found' % item + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPKeyError(error_message) + return self._state.attributes[attr] + + error_message = 'key must be a string' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPKeyError(error_message) + + def __eq__(self, other): + if isinstance(other, EntryBase): + return self.entry_dn == other.entry_dn + + return False + + def __lt__(self, other): + if isinstance(other, EntryBase): + return self.entry_dn <= other.entry_dn + + return False + + @property + def entry_dn(self): + return self._state.dn + + @property + def entry_cursor(self): + return self._state.cursor + + @property + def entry_status(self): + return self._state.status + + @property + def entry_definition(self): + return self._state.definition + + @property + def entry_raw_attributes(self): + return self._state.raw_attributes + + def entry_raw_attribute(self, name): + """ + + :param name: name of the attribute + :return: raw (unencoded) value of the attribute, None if attribute is not found + """ + return self._state.raw_attributes[name] if name in self._state.raw_attributes else None + + @property + def entry_mandatory_attributes(self): + return [attribute for attribute in self.entry_definition._attributes if self.entry_definition._attributes[attribute].mandatory] + + @property + def entry_attributes(self): + return list(self._state.attributes.keys()) + + @property + def entry_attributes_as_dict(self): + return dict((attribute_key, attribute_value.values) for (attribute_key, attribute_value) in self._state.attributes.items()) + + @property + def entry_read_time(self): + return self._state.read_time + + @property + def _changes(self): + return self._state.changes + + def entry_to_json(self, raw=False, indent=4, sort=True, stream=None, checked_attributes=True, include_empty=True): + json_entry = dict() + json_entry['dn'] = self.entry_dn + if checked_attributes: + if not include_empty: + # needed for python 2.6 compatibility + json_entry['attributes'] = dict((key, self.entry_attributes_as_dict[key]) for key in self.entry_attributes_as_dict if self.entry_attributes_as_dict[key]) + else: + json_entry['attributes'] = self.entry_attributes_as_dict + if raw: + if not include_empty: + # needed for python 2.6 compatibility + json_entry['raw'] = dict((key, self.entry_raw_attributes[key]) for key in self.entry_raw_attributes if self.entry_raw_attributes[key]) + else: + json_entry['raw'] = dict(self.entry_raw_attributes) + + if str is bytes: # Python 2 + check_json_dict(json_entry) + + json_output = json.dumps(json_entry, + ensure_ascii=True, + sort_keys=sort, + indent=indent, + check_circular=True, + default=format_json, + separators=(',', ': ')) + + if stream: + stream.write(json_output) + + return json_output + + def entry_to_ldif(self, all_base64=False, line_separator=None, sort_order=None, stream=None): + ldif_lines = operation_to_ldif('searchResponse', [self._state.response], all_base64, sort_order=sort_order) + ldif_lines = add_ldif_header(ldif_lines) + line_separator = line_separator or linesep + ldif_output = line_separator.join(ldif_lines) + if stream: + if stream.tell() == 0: + header = add_ldif_header(['-'])[0] + stream.write(prepare_for_stream(header + line_separator + line_separator)) + stream.write(prepare_for_stream(ldif_output + line_separator + line_separator)) + return ldif_output + + +class Entry(EntryBase): + """The Entry object contains a single LDAP entry. + Attributes can be accessed either by sequence, by assignment + or as dictionary keys. Keys are not case sensitive. + + The Entry object is read only + + - The DN is retrieved by entry_dn + - The Reader reference is in _cursor() + - Raw attributes values are retrieved by the _ra_attributes and + _raw_attribute() methods + + """ + def entry_writable(self, object_def=None, writer_cursor=None, attributes=None, custom_validator=None, auxiliary_class=None): + conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX') + if not self.entry_cursor.schema: + error_message = 'schema must be available to make an entry writable' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + # returns a new WritableEntry and its Writer cursor + if object_def is None: + if self.entry_cursor.definition._object_class: + object_def = self.entry_definition._object_class + auxiliary_class = self.entry_definition._auxiliary_class + (auxiliary_class if isinstance(auxiliary_class, SEQUENCE_TYPES) else []) + elif 'objectclass' in self: + object_def = self.objectclass.values + + if not object_def: + error_message = 'object class must be specified to make an entry writable' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + + if not isinstance(object_def, ObjectDef): + object_def = ObjectDef(object_def, self.entry_cursor.schema, custom_validator, auxiliary_class) + + if attributes: + if isinstance(attributes, STRING_TYPES): + attributes = [attributes] + + if isinstance(attributes, SEQUENCE_TYPES): + for attribute in attributes: + if attribute not in object_def._attributes: + error_message = 'attribute \'%s\' not in schema for \'%s\'' % (attribute, object_def) + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + else: + attributes = [] + + if not writer_cursor: + from .cursor import Writer # local import to avoid circular reference in import at startup + writable_cursor = Writer(self.entry_cursor.connection, object_def) + else: + writable_cursor = writer_cursor + + if attributes: # force reading of attributes + writable_entry = writable_cursor._refresh_object(self.entry_dn, list(attributes) + self.entry_attributes) + else: + writable_entry = writable_cursor._create_entry(self._state.response) + writable_cursor.entries.append(writable_entry) + writable_entry._state.read_time = self.entry_read_time + writable_entry._state.origin = self # reference to the original read-only entry + # checks original entry for custom definitions in AttrDefs + attr_to_add = [] + attr_to_remove = [] + object_def_to_add = [] + object_def_to_remove = [] + for attr in writable_entry._state.origin.entry_definition._attributes: + original_attr = writable_entry._state.origin.entry_definition._attributes[attr] + if attr != original_attr.name and (attr not in writable_entry._state.attributes or conf_operational_attribute_prefix + original_attr.name not in writable_entry._state.attributes): + old_attr_def = writable_entry.entry_definition._attributes[original_attr.name] + new_attr_def = AttrDef(original_attr.name, + key=attr, + validate=original_attr.validate, + pre_query=original_attr.pre_query, + post_query=original_attr.post_query, + default=original_attr.default, + dereference_dn=original_attr.dereference_dn, + description=original_attr.description, + mandatory=old_attr_def.mandatory, # keeps value read from schema + single_value=old_attr_def.single_value, # keeps value read from schema + alias=original_attr.other_names) + od = writable_entry.entry_definition + object_def_to_remove.append(old_attr_def) + object_def_to_add.append(new_attr_def) + # updates attribute name in entry attributes + new_attr = WritableAttribute(new_attr_def, writable_entry, writable_cursor) + if original_attr.name in writable_entry._state.attributes: + new_attr.other_names = writable_entry._state.attributes[original_attr.name].other_names + new_attr.raw_values = writable_entry._state.attributes[original_attr.name].raw_values + new_attr.values = writable_entry._state.attributes[original_attr.name].values + new_attr.response = writable_entry._state.attributes[original_attr.name].response + attr_to_add.append((attr, new_attr)) + attr_to_remove.append(original_attr.name) + # writable_entry._state.attributes[attr] = new_attr + ## writable_entry._state.attributes.set_alias(attr, new_attr.other_names) + # del writable_entry._state.attributes[original_attr.name] + for attr, new_attr in attr_to_add: + writable_entry._state.attributes[attr] = new_attr + for attr in attr_to_remove: + del writable_entry._state.attributes[attr] + for object_def in object_def_to_remove: + o = writable_entry.entry_definition + o -= object_def + for object_def in object_def_to_add: + o = writable_entry.entry_definition + o += object_def + + writable_entry._state.set_status(STATUS_WRITABLE) + return writable_entry + + +class WritableEntry(EntryBase): + def __setitem__(self, key, value): + if value is not Ellipsis: # hack for using implicit operators in writable attributes + self.__setattr__(key, value) + + def __setattr__(self, item, value): + conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')] + if item == '_state' and isinstance(value, EntryState): + self.__dict__['_state'] = value + return + + if value is not Ellipsis: # hack for using implicit operators in writable attributes + # checks if using an alias + if item in self.entry_cursor.definition._attributes or item.lower() in conf_attributes_excluded_from_object_def: + if item not in self._state.attributes: # setting value to an attribute still without values + new_attribute = WritableAttribute(self.entry_cursor.definition._attributes[item], self, cursor=self.entry_cursor) + self._state.attributes[str(item)] = new_attribute # force item to a string for key in attributes dict + self._state.attributes[item].set(value) # try to add to new_values + else: + error_message = 'attribute \'%s\' not defined' % item + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorAttributeError(error_message) + + def __getattr__(self, item): + if isinstance(item, STRING_TYPES): + if item == '_state': + return self.__dict__['_state'] + item = ''.join(item.split()).lower() + for attr in self._state.attributes.keys(): + if item == attr.lower(): + return self._state.attributes[attr] + for attr in self._state.attributes.aliases(): + if item == attr.lower(): + return self._state.attributes[attr] + if item in self.entry_definition._attributes: # item is a new attribute to commit, creates the AttrDef and add to the attributes to retrive + self._state.attributes[item] = WritableAttribute(self.entry_definition._attributes[item], self, self.entry_cursor) + self.entry_cursor.attributes.add(item) + return self._state.attributes[item] + error_message = 'attribute \'%s\' not defined' % item + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorAttributeError(error_message) + else: + error_message = 'attribute name must be a string' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorAttributeError(error_message) + + @property + def entry_virtual_attributes(self): + return [attr for attr in self.entry_attributes if self[attr].virtual] + + def entry_commit_changes(self, refresh=True, controls=None, clear_history=True): + if clear_history: + self.entry_cursor._reset_history() + + if self.entry_status == STATUS_READY_FOR_DELETION: + result = self.entry_cursor.connection.delete(self.entry_dn, controls) + if not self.entry_cursor.connection.strategy.sync: + response, result, request = self.entry_cursor.connection.get_response(result, get_request=True) + else: + response = self.entry_cursor.connection.response + result = self.entry_cursor.connection.result + request = self.entry_cursor.connection.request + self.entry_cursor._store_operation_in_history(request, result, response) + if result['result'] == RESULT_SUCCESS: + dn = self.entry_dn + if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # deletes original read-only Entry + cursor = self._state.origin.entry_cursor + self._state.origin.__dict__.clear() + self._state.origin.__dict__['_state'] = EntryState(dn, cursor) + self._state.origin._state.set_status(STATUS_DELETED) + cursor = self.entry_cursor + self.__dict__.clear() + self._state = EntryState(dn, cursor) + self._state.set_status(STATUS_DELETED) + return True + return False + elif self.entry_status == STATUS_READY_FOR_MOVING: + result = self.entry_cursor.connection.modify_dn(self.entry_dn, '+'.join(safe_rdn(self.entry_dn)), new_superior=self._state._to) + if not self.entry_cursor.connection.strategy.sync: + response, result, request = self.entry_cursor.connection.get_response(result, get_request=True) + else: + response = self.entry_cursor.connection.response + result = self.entry_cursor.connection.result + request = self.entry_cursor.connection.request + self.entry_cursor._store_operation_in_history(request, result, response) + if result['result'] == RESULT_SUCCESS: + self._state.dn = safe_dn('+'.join(safe_rdn(self.entry_dn)) + ',' + self._state._to) + if refresh: + if self.entry_refresh(): + if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # refresh dn of origin + self._state.origin._state.dn = self.entry_dn + self._state.set_status(STATUS_COMMITTED) + self._state._to = None + return True + return False + elif self.entry_status == STATUS_READY_FOR_RENAMING: + rdn = '+'.join(safe_rdn(self._state._to)) + result = self.entry_cursor.connection.modify_dn(self.entry_dn, rdn) + if not self.entry_cursor.connection.strategy.sync: + response, result, request = self.entry_cursor.connection.get_response(result, get_request=True) + else: + response = self.entry_cursor.connection.response + result = self.entry_cursor.connection.result + request = self.entry_cursor.connection.request + self.entry_cursor._store_operation_in_history(request, result, response) + if result['result'] == RESULT_SUCCESS: + self._state.dn = rdn + ',' + ','.join(to_dn(self.entry_dn)[1:]) + if refresh: + if self.entry_refresh(): + if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # refresh dn of origin + self._state.origin._state.dn = self.entry_dn + self._state.set_status(STATUS_COMMITTED) + self._state._to = None + return True + return False + elif self.entry_status in [STATUS_VIRTUAL, STATUS_MANDATORY_MISSING]: + missing_attributes = [] + for attr in self.entry_mandatory_attributes: + if (attr not in self._state.attributes or self._state.attributes[attr].virtual) and attr not in self._changes: + missing_attributes.append('\'' + attr + '\'') + error_message = 'mandatory attributes %s missing in entry %s' % (', '.join(missing_attributes), self.entry_dn) + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + elif self.entry_status == STATUS_PENDING_CHANGES: + if self._changes: + if self.entry_definition._auxiliary_class: # checks if an attribute is from an auxiliary class and adds it to the objectClass attribute if not present + for attr in self._changes: + # checks schema to see if attribute is defined in one of the already present object classes + attr_classes = self.entry_cursor.schema.attribute_types[attr].mandatory_in + self.entry_cursor.schema.attribute_types[attr].optional_in + for object_class in self.objectclass: + if object_class in attr_classes: + break + else: # executed only if the attribute class is not present in the objectClass attribute + # checks if attribute is defined in one of the possible auxiliary classes + for aux_class in self.entry_definition._auxiliary_class: + if aux_class in attr_classes: + if self._state._initial_status == STATUS_VIRTUAL: # entry is new, there must be a pending objectClass MODIFY_REPLACE + self._changes['objectClass'][0][1].append(aux_class) + else: + self.objectclass += aux_class + if self._state._initial_status == STATUS_VIRTUAL: + new_attributes = dict() + for attr in self._changes: + new_attributes[attr] = self._changes[attr][0][1] + result = self.entry_cursor.connection.add(self.entry_dn, None, new_attributes, controls) + else: + result = self.entry_cursor.connection.modify(self.entry_dn, self._changes, controls) + + if not self.entry_cursor.connection.strategy.sync: # asynchronous request + response, result, request = self.entry_cursor.connection.get_response(result, get_request=True) + else: + response = self.entry_cursor.connection.response + result = self.entry_cursor.connection.result + request = self.entry_cursor.connection.request + self.entry_cursor._store_operation_in_history(request, result, response) + + if result['result'] == RESULT_SUCCESS: + if refresh: + if self.entry_refresh(): + if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # updates original read-only entry if present + for attr in self: # adds AttrDefs from writable entry to origin entry definition if some is missing + if attr.key in self.entry_definition._attributes and attr.key not in self._state.origin.entry_definition._attributes: + self._state.origin.entry_cursor.definition.add_attribute(self.entry_cursor.definition._attributes[attr.key]) # adds AttrDef from writable entry to original entry if missing + temp_entry = self._state.origin.entry_cursor._create_entry(self._state.response) + self._state.origin.__dict__.clear() + self._state.origin.__dict__['_state'] = temp_entry._state + for attr in self: # returns the whole attribute object + if not hasattr(attr,'virtual'): + self._state.origin.__dict__[attr.key] = self._state.origin._state.attributes[attr.key] + self._state.origin._state.read_time = self.entry_read_time + else: + self.entry_discard_changes() # if not refreshed remove committed changes + self._state.set_status(STATUS_COMMITTED) + return True + return False + + def entry_discard_changes(self): + self._changes.clear() + self._state.set_status(self._state._initial_status) + + def entry_delete(self): + if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_DELETION]: + error_message = 'cannot delete entry, invalid status: ' + self.entry_status + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + self._state.set_status(STATUS_READY_FOR_DELETION) + + def entry_refresh(self, tries=4, seconds=2): + """ + + Refreshes the entry from the LDAP Server + """ + if self.entry_cursor.connection: + if self.entry_cursor.refresh_entry(self, tries, seconds): + return True + + return False + + def entry_move(self, destination_dn): + if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_MOVING]: + error_message = 'cannot move entry, invalid status: ' + self.entry_status + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + self._state._to = safe_dn(destination_dn) + self._state.set_status(STATUS_READY_FOR_MOVING) + + def entry_rename(self, new_name): + if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_RENAMING]: + error_message = 'cannot rename entry, invalid status: ' + self.entry_status + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', error_message, self) + raise LDAPCursorError(error_message) + self._state._to = new_name + self._state.set_status(STATUS_READY_FOR_RENAMING) + + @property + def entry_changes(self): + return self._changes diff --git a/server/www/packages/packages-linux/x64/ldap3/abstract/objectDef.py b/server/www/packages/packages-linux/x64/ldap3/abstract/objectDef.py index 5af64d5..1f8609c 100644 --- a/server/www/packages/packages-linux/x64/ldap3/abstract/objectDef.py +++ b/server/www/packages/packages-linux/x64/ldap3/abstract/objectDef.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/core/connection.py b/server/www/packages/packages-linux/x64/ldap3/core/connection.py index b8ed002..0f148e8 100644 --- a/server/www/packages/packages-linux/x64/ldap3/core/connection.py +++ b/server/www/packages/packages-linux/x64/ldap3/core/connection.py @@ -1,1504 +1,1549 @@ -""" -""" - -# Created on 2014.05.31 -# -# Author: Giovanni Cannata -# -# Copyright 2014 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . -from copy import deepcopy -from os import linesep -from threading import RLock, Lock -from functools import reduce -import json - -from .. import ANONYMOUS, SIMPLE, SASL, MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, get_config_parameter, DEREF_ALWAYS, \ - SUBTREE, ASYNC, SYNC, NO_ATTRIBUTES, ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, MODIFY_INCREMENT, LDIF, ASYNC_STREAM, \ - RESTARTABLE, ROUND_ROBIN, REUSABLE, AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_TLS_BEFORE_BIND, AUTO_BIND_TLS_AFTER_BIND, AUTO_BIND_NO_TLS, \ - STRING_TYPES, SEQUENCE_TYPES, MOCK_SYNC, MOCK_ASYNC, NTLM, EXTERNAL, DIGEST_MD5, GSSAPI, PLAIN - -from .results import RESULT_SUCCESS, RESULT_COMPARE_TRUE, RESULT_COMPARE_FALSE -from ..extend import ExtendedOperationsRoot -from .pooling import ServerPool -from .server import Server -from ..operation.abandon import abandon_operation, abandon_request_to_dict -from ..operation.add import add_operation, add_request_to_dict -from ..operation.bind import bind_operation, bind_request_to_dict -from ..operation.compare import compare_operation, compare_request_to_dict -from ..operation.delete import delete_operation, delete_request_to_dict -from ..operation.extended import extended_operation, extended_request_to_dict -from ..operation.modify import modify_operation, modify_request_to_dict -from ..operation.modifyDn import modify_dn_operation, modify_dn_request_to_dict -from ..operation.search import search_operation, search_request_to_dict -from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header -from ..protocol.sasl.digestMd5 import sasl_digest_md5 -from ..protocol.sasl.external import sasl_external -from ..protocol.sasl.plain import sasl_plain -from ..strategy.sync import SyncStrategy -from ..strategy.mockAsync import MockAsyncStrategy -from ..strategy.asynchronous import AsyncStrategy -from ..strategy.reusable import ReusableStrategy -from ..strategy.restartable import RestartableStrategy -from ..strategy.ldifProducer import LdifProducerStrategy -from ..strategy.mockSync import MockSyncStrategy -from ..strategy.asyncStream import AsyncStreamStrategy -from ..operation.unbind import unbind_operation -from ..protocol.rfc2696 import paged_search_control -from .usage import ConnectionUsage -from .tls import Tls -from .exceptions import LDAPUnknownStrategyError, LDAPBindError, LDAPUnknownAuthenticationMethodError, \ - LDAPSASLMechanismNotSupportedError, LDAPObjectClassError, LDAPConnectionIsReadOnlyError, LDAPChangeError, LDAPExceptionError, \ - LDAPObjectError, LDAPSocketReceiveError, LDAPAttributeError, LDAPInvalidValueError, LDAPConfigurationError - -from ..utils.conv import escape_bytes, prepare_for_stream, check_json_dict, format_json, to_unicode -from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED, get_library_log_hide_sensitive_data -from ..utils.dn import safe_dn - - -SASL_AVAILABLE_MECHANISMS = [EXTERNAL, - DIGEST_MD5, - GSSAPI, - PLAIN] - -CLIENT_STRATEGIES = [SYNC, - ASYNC, - LDIF, - RESTARTABLE, - REUSABLE, - MOCK_SYNC, - MOCK_ASYNC, - ASYNC_STREAM] - - -def _format_socket_endpoint(endpoint): - if endpoint and len(endpoint) == 2: # IPv4 - return str(endpoint[0]) + ':' + str(endpoint[1]) - elif endpoint and len(endpoint) == 4: # IPv6 - return '[' + str(endpoint[0]) + ']:' + str(endpoint[1]) - - try: - return str(endpoint) - except Exception: - return '?' - - -def _format_socket_endpoints(sock): - if sock: - try: - local = sock.getsockname() - except Exception: - local = (None, None, None, None) - try: - remote = sock.getpeername() - except Exception: - remote = (None, None, None, None) - - return '' - return '' - - -# noinspection PyProtectedMember -class Connection(object): - """Main ldap connection class. - - Controls, if used, must be a list of tuples. Each tuple must have 3 - elements, the control OID, a boolean meaning if the control is - critical, a value. - - If the boolean is set to True the server must honor the control or - refuse the operation - - Mixing controls must be defined in controls specification (as per - RFC 4511) - - :param server: the Server object to connect to - :type server: Server, str - :param user: the user name for simple authentication - :type user: str - :param password: the password for simple authentication - :type password: str - :param auto_bind: specify if the bind will be performed automatically when defining the Connection object - :type auto_bind: int, can be one of AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_NO_TLS, AUTO_BIND_TLS_BEFORE_BIND, AUTO_BIND_TLS_AFTER_BIND as specified in ldap3 - :param version: LDAP version, default to 3 - :type version: int - :param authentication: type of authentication - :type authentication: int, can be one of AUTH_ANONYMOUS, AUTH_SIMPLE or AUTH_SASL, as specified in ldap3 - :param client_strategy: communication strategy used in the Connection - :type client_strategy: can be one of STRATEGY_SYNC, STRATEGY_ASYNC_THREADED, STRATEGY_LDIF_PRODUCER, STRATEGY_SYNC_RESTARTABLE, STRATEGY_REUSABLE_THREADED as specified in ldap3 - :param auto_referrals: specify if the connection object must automatically follow referrals - :type auto_referrals: bool - :param sasl_mechanism: mechanism for SASL authentication, can be one of 'EXTERNAL', 'DIGEST-MD5', 'GSSAPI', 'PLAIN' - :type sasl_mechanism: str - :param sasl_credentials: credentials for SASL mechanism - :type sasl_credentials: tuple - :param check_names: if True the library will check names of attributes and object classes against the schema. Also values found in entries will be formatted as indicated by the schema - :type check_names: bool - :param collect_usage: collect usage metrics in the usage attribute - :type collect_usage: bool - :param read_only: disable operations that modify data in the LDAP server - :type read_only: bool - :param lazy: open and bind the connection only when an actual operation is performed - :type lazy: bool - :param raise_exceptions: raise exceptions when operations are not successful, if False operations return False if not successful but not raise exceptions - :type raise_exceptions: bool - :param pool_name: pool name for pooled strategies - :type pool_name: str - :param pool_size: pool size for pooled strategies - :type pool_size: int - :param pool_lifetime: pool lifetime for pooled strategies - :type pool_lifetime: int - :param use_referral_cache: keep referral connections open and reuse them - :type use_referral_cache: bool - :param auto_escape: automatic escaping of filter values - :param auto_encode: automatic encoding of attribute values - :type use_referral_cache: bool - """ - - def __init__(self, - server, - user=None, - password=None, - auto_bind=AUTO_BIND_DEFAULT, - version=3, - authentication=None, - client_strategy=SYNC, - auto_referrals=True, - auto_range=True, - sasl_mechanism=None, - sasl_credentials=None, - check_names=True, - collect_usage=False, - read_only=False, - lazy=False, - raise_exceptions=False, - pool_name=None, - pool_size=None, - pool_lifetime=None, - fast_decoder=True, - receive_timeout=None, - return_empty_attributes=True, - use_referral_cache=False, - auto_escape=True, - auto_encode=True, - pool_keepalive=None): - - conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME') - self.connection_lock = RLock() # re-entrant lock to ensure that operations in the Connection object are executed atomically in the same thread - with self.connection_lock: - if client_strategy not in CLIENT_STRATEGIES: - self.last_error = 'unknown client connection strategy' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPUnknownStrategyError(self.last_error) - - self.strategy_type = client_strategy - self.user = user - self.password = password - - if not authentication and self.user: - self.authentication = SIMPLE - elif not authentication: - self.authentication = ANONYMOUS - elif authentication in [SIMPLE, ANONYMOUS, SASL, NTLM]: - self.authentication = authentication - else: - self.last_error = 'unknown authentication method' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPUnknownAuthenticationMethodError(self.last_error) - - self.version = version - self.auto_referrals = True if auto_referrals else False - self.request = None - self.response = None - self.result = None - self.bound = False - self.listening = False - self.closed = True - self.last_error = None - if auto_bind is False: # compatibility with older version where auto_bind was a boolean - self.auto_bind = AUTO_BIND_DEFAULT - elif auto_bind is True: - self.auto_bind = AUTO_BIND_NO_TLS - else: - self.auto_bind = auto_bind - self.sasl_mechanism = sasl_mechanism - self.sasl_credentials = sasl_credentials - self._usage = ConnectionUsage() if collect_usage else None - self.socket = None - self.tls_started = False - self.sasl_in_progress = False - self.read_only = read_only - self._context_state = [] - self._deferred_open = False - self._deferred_bind = False - self._deferred_start_tls = False - self._bind_controls = None - self._executing_deferred = False - self.lazy = lazy - self.pool_name = pool_name if pool_name else conf_default_pool_name - self.pool_size = pool_size - self.pool_lifetime = pool_lifetime - self.pool_keepalive = pool_keepalive - self.starting_tls = False - self.check_names = check_names - self.raise_exceptions = raise_exceptions - self.auto_range = True if auto_range else False - self.extend = ExtendedOperationsRoot(self) - self._entries = [] - self.fast_decoder = fast_decoder - self.receive_timeout = receive_timeout - self.empty_attributes = return_empty_attributes - self.use_referral_cache = use_referral_cache - self.auto_escape = auto_escape - self.auto_encode = auto_encode - - if isinstance(server, STRING_TYPES): - server = Server(server) - if isinstance(server, SEQUENCE_TYPES): - server = ServerPool(server, ROUND_ROBIN, active=True, exhaust=True) - - if isinstance(server, ServerPool): - self.server_pool = server - self.server_pool.initialize(self) - self.server = self.server_pool.get_current_server(self) - else: - self.server_pool = None - self.server = server - - # if self.authentication == SIMPLE and self.user and self.check_names: - # self.user = safe_dn(self.user) - # if log_enabled(EXTENDED): - # log(EXTENDED, 'user name sanitized to <%s> for simple authentication via <%s>', self.user, self) - - if self.strategy_type == SYNC: - self.strategy = SyncStrategy(self) - elif self.strategy_type == ASYNC: - self.strategy = AsyncStrategy(self) - elif self.strategy_type == LDIF: - self.strategy = LdifProducerStrategy(self) - elif self.strategy_type == RESTARTABLE: - self.strategy = RestartableStrategy(self) - elif self.strategy_type == REUSABLE: - self.strategy = ReusableStrategy(self) - self.lazy = False - elif self.strategy_type == MOCK_SYNC: - self.strategy = MockSyncStrategy(self) - elif self.strategy_type == MOCK_ASYNC: - self.strategy = MockAsyncStrategy(self) - elif self.strategy_type == ASYNC_STREAM: - self.strategy = AsyncStreamStrategy(self) - else: - self.last_error = 'unknown strategy' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPUnknownStrategyError(self.last_error) - - # maps strategy functions to connection functions - self.send = self.strategy.send - self.open = self.strategy.open - self.get_response = self.strategy.get_response - self.post_send_single_response = self.strategy.post_send_single_response - self.post_send_search = self.strategy.post_send_search - - if not self.strategy.no_real_dsa: - self.do_auto_bind() - # else: # for strategies with a fake server set get_info to NONE if server hasn't a schema - # if self.server and not self.server.schema: - # self.server.get_info = NONE - if log_enabled(BASIC): - if get_library_log_hide_sensitive_data(): - log(BASIC, 'instantiated Connection: <%s>', self.repr_with_sensitive_data_stripped()) - else: - log(BASIC, 'instantiated Connection: <%r>', self) - - def do_auto_bind(self): - if self.auto_bind and self.auto_bind not in [AUTO_BIND_NONE, AUTO_BIND_DEFAULT]: - if log_enabled(BASIC): - log(BASIC, 'performing automatic bind for <%s>', self) - if self.closed: - self.open(read_server_info=False) - if self.auto_bind == AUTO_BIND_NO_TLS: - self.bind(read_server_info=True) - elif self.auto_bind == AUTO_BIND_TLS_BEFORE_BIND: - self.start_tls(read_server_info=False) - self.bind(read_server_info=True) - elif self.auto_bind == AUTO_BIND_TLS_AFTER_BIND: - self.bind(read_server_info=False) - self.start_tls(read_server_info=True) - if not self.bound: - self.last_error = 'automatic bind not successful' + (' - ' + self.last_error if self.last_error else '') - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPBindError(self.last_error) - - def __str__(self): - s = [ - str(self.server) if self.server else 'None', - 'user: ' + str(self.user), - 'lazy' if self.lazy else 'not lazy', - 'unbound' if not self.bound else ('deferred bind' if self._deferred_bind else 'bound'), - 'closed' if self.closed else ('deferred open' if self._deferred_open else 'open'), - _format_socket_endpoints(self.socket), - 'tls not started' if not self.tls_started else('deferred start_tls' if self._deferred_start_tls else 'tls started'), - 'listening' if self.listening else 'not listening', - self.strategy.__class__.__name__ if hasattr(self, 'strategy') else 'No strategy', - 'internal decoder' if self.fast_decoder else 'pyasn1 decoder' - ] - return ' - '.join(s) - - def __repr__(self): - conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME') - if self.server_pool: - r = 'Connection(server={0.server_pool!r}'.format(self) - else: - r = 'Connection(server={0.server!r}'.format(self) - r += '' if self.user is None else ', user={0.user!r}'.format(self) - r += '' if self.password is None else ', password={0.password!r}'.format(self) - r += '' if self.auto_bind is None else ', auto_bind={0.auto_bind!r}'.format(self) - r += '' if self.version is None else ', version={0.version!r}'.format(self) - r += '' if self.authentication is None else ', authentication={0.authentication!r}'.format(self) - r += '' if self.strategy_type is None else ', client_strategy={0.strategy_type!r}'.format(self) - r += '' if self.auto_referrals is None else ', auto_referrals={0.auto_referrals!r}'.format(self) - r += '' if self.sasl_mechanism is None else ', sasl_mechanism={0.sasl_mechanism!r}'.format(self) - r += '' if self.sasl_credentials is None else ', sasl_credentials={0.sasl_credentials!r}'.format(self) - r += '' if self.check_names is None else ', check_names={0.check_names!r}'.format(self) - r += '' if self.usage is None else (', collect_usage=' + ('True' if self.usage else 'False')) - r += '' if self.read_only is None else ', read_only={0.read_only!r}'.format(self) - r += '' if self.lazy is None else ', lazy={0.lazy!r}'.format(self) - r += '' if self.raise_exceptions is None else ', raise_exceptions={0.raise_exceptions!r}'.format(self) - r += '' if (self.pool_name is None or self.pool_name == conf_default_pool_name) else ', pool_name={0.pool_name!r}'.format(self) - r += '' if self.pool_size is None else ', pool_size={0.pool_size!r}'.format(self) - r += '' if self.pool_lifetime is None else ', pool_lifetime={0.pool_lifetime!r}'.format(self) - r += '' if self.pool_keepalive is None else ', pool_keepalive={0.pool_keepalive!r}'.format(self) - r += '' if self.fast_decoder is None else (', fast_decoder=' + ('True' if self.fast_decoder else 'False')) - r += '' if self.auto_range is None else (', auto_range=' + ('True' if self.auto_range else 'False')) - r += '' if self.receive_timeout is None else ', receive_timeout={0.receive_timeout!r}'.format(self) - r += '' if self.empty_attributes is None else (', return_empty_attributes=' + ('True' if self.empty_attributes else 'False')) - r += '' if self.auto_encode is None else (', auto_encode=' + ('True' if self.auto_encode else 'False')) - r += '' if self.auto_escape is None else (', auto_escape=' + ('True' if self.auto_escape else 'False')) - r += '' if self.use_referral_cache is None else (', use_referral_cache=' + ('True' if self.use_referral_cache else 'False')) - r += ')' - - return r - - def repr_with_sensitive_data_stripped(self): - conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME') - if self.server_pool: - r = 'Connection(server={0.server_pool!r}'.format(self) - else: - r = 'Connection(server={0.server!r}'.format(self) - r += '' if self.user is None else ', user={0.user!r}'.format(self) - r += '' if self.password is None else ", password='{0}'".format('' % len(self.password)) - r += '' if self.auto_bind is None else ', auto_bind={0.auto_bind!r}'.format(self) - r += '' if self.version is None else ', version={0.version!r}'.format(self) - r += '' if self.authentication is None else ', authentication={0.authentication!r}'.format(self) - r += '' if self.strategy_type is None else ', client_strategy={0.strategy_type!r}'.format(self) - r += '' if self.auto_referrals is None else ', auto_referrals={0.auto_referrals!r}'.format(self) - r += '' if self.sasl_mechanism is None else ', sasl_mechanism={0.sasl_mechanism!r}'.format(self) - if self.sasl_mechanism == DIGEST_MD5: - r += '' if self.sasl_credentials is None else ", sasl_credentials=({0!r}, {1!r}, '{2}', {3!r})".format(self.sasl_credentials[0], self.sasl_credentials[1], '*' * len(self.sasl_credentials[2]), self.sasl_credentials[3]) - else: - r += '' if self.sasl_credentials is None else ', sasl_credentials={0.sasl_credentials!r}'.format(self) - r += '' if self.check_names is None else ', check_names={0.check_names!r}'.format(self) - r += '' if self.usage is None else (', collect_usage=' + 'True' if self.usage else 'False') - r += '' if self.read_only is None else ', read_only={0.read_only!r}'.format(self) - r += '' if self.lazy is None else ', lazy={0.lazy!r}'.format(self) - r += '' if self.raise_exceptions is None else ', raise_exceptions={0.raise_exceptions!r}'.format(self) - r += '' if (self.pool_name is None or self.pool_name == conf_default_pool_name) else ', pool_name={0.pool_name!r}'.format(self) - r += '' if self.pool_size is None else ', pool_size={0.pool_size!r}'.format(self) - r += '' if self.pool_lifetime is None else ', pool_lifetime={0.pool_lifetime!r}'.format(self) - r += '' if self.pool_keepalive is None else ', pool_keepalive={0.pool_keepalive!r}'.format(self) - r += '' if self.fast_decoder is None else (', fast_decoder=' + 'True' if self.fast_decoder else 'False') - r += '' if self.auto_range is None else (', auto_range=' + ('True' if self.auto_range else 'False')) - r += '' if self.receive_timeout is None else ', receive_timeout={0.receive_timeout!r}'.format(self) - r += '' if self.empty_attributes is None else (', return_empty_attributes=' + 'True' if self.empty_attributes else 'False') - r += '' if self.auto_encode is None else (', auto_encode=' + ('True' if self.auto_encode else 'False')) - r += '' if self.auto_escape is None else (', auto_escape=' + ('True' if self.auto_escape else 'False')) - r += '' if self.use_referral_cache is None else (', use_referral_cache=' + ('True' if self.use_referral_cache else 'False')) - r += ')' - - return r - - @property - def stream(self): - """Used by the LDIFProducer strategy to accumulate the ldif-change operations with a single LDIF header - :return: reference to the response stream if defined in the strategy. - """ - return self.strategy.get_stream() if self.strategy.can_stream else None - - @stream.setter - def stream(self, value): - with self.connection_lock: - if self.strategy.can_stream: - self.strategy.set_stream(value) - - @property - def usage(self): - """Usage statistics for the connection. - :return: Usage object - """ - if not self._usage: - return None - if self.strategy.pooled: # update master connection usage from pooled connections - self._usage.reset() - for worker in self.strategy.pool.workers: - self._usage += worker.connection.usage - self._usage += self.strategy.pool.terminated_usage - return self._usage - - def __enter__(self): - with self.connection_lock: - self._context_state.append((self.bound, self.closed)) # save status out of context as a tuple in a list - if self.auto_bind != AUTO_BIND_NONE: - if self.auto_bind == AUTO_BIND_DEFAULT: - self.auto_bind = AUTO_BIND_NO_TLS - if self.closed: - self.open() - if not self.bound: - self.bind() - - return self - - # noinspection PyUnusedLocal - def __exit__(self, exc_type, exc_val, exc_tb): - with self.connection_lock: - context_bound, context_closed = self._context_state.pop() - if (not context_bound and self.bound) or self.stream: # restore status prior to entering context - try: - self.unbind() - except LDAPExceptionError: - pass - - if not context_closed and self.closed: - self.open() - - if exc_type is not None: - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', exc_type, self) - return False # re-raise LDAPExceptionError - - def bind(self, - read_server_info=True, - controls=None): - """Bind to ldap Server with the authentication method and the user defined in the connection - - :param read_server_info: reads info from server - :param controls: LDAP controls to send along with the bind operation - :type controls: list of tuple - :return: bool - - """ - if log_enabled(BASIC): - log(BASIC, 'start BIND operation via <%s>', self) - self.last_error = None - with self.connection_lock: - if self.lazy and not self._executing_deferred: - if self.strategy.pooled: - self.strategy.validate_bind(controls) - self._deferred_bind = True - self._bind_controls = controls - self.bound = True - if log_enabled(BASIC): - log(BASIC, 'deferring bind for <%s>', self) - else: - self._deferred_bind = False - self._bind_controls = None - if self.closed: # try to open connection if closed - self.open(read_server_info=False) - if self.authentication == ANONYMOUS: - if log_enabled(PROTOCOL): - log(PROTOCOL, 'performing anonymous BIND for <%s>', self) - if not self.strategy.pooled: - request = bind_operation(self.version, self.authentication, self.user, '', auto_encode=self.auto_encode) - if log_enabled(PROTOCOL): - log(PROTOCOL, 'anonymous BIND request <%s> sent via <%s>', bind_request_to_dict(request), self) - response = self.post_send_single_response(self.send('bindRequest', request, controls)) - else: - response = self.strategy.validate_bind(controls) # only for REUSABLE - elif self.authentication == SIMPLE: - if log_enabled(PROTOCOL): - log(PROTOCOL, 'performing simple BIND for <%s>', self) - if not self.strategy.pooled: - request = bind_operation(self.version, self.authentication, self.user, self.password, auto_encode=self.auto_encode) - if log_enabled(PROTOCOL): - log(PROTOCOL, 'simple BIND request <%s> sent via <%s>', bind_request_to_dict(request), self) - response = self.post_send_single_response(self.send('bindRequest', request, controls)) - else: - response = self.strategy.validate_bind(controls) # only for REUSABLE - elif self.authentication == SASL: - if self.sasl_mechanism in SASL_AVAILABLE_MECHANISMS: - if log_enabled(PROTOCOL): - log(PROTOCOL, 'performing SASL BIND for <%s>', self) - if not self.strategy.pooled: - response = self.do_sasl_bind(controls) - else: - response = self.strategy.validate_bind(controls) # only for REUSABLE - else: - self.last_error = 'requested SASL mechanism not supported' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPSASLMechanismNotSupportedError(self.last_error) - elif self.authentication == NTLM: - if self.user and self.password and len(self.user.split('\\')) == 2: - if log_enabled(PROTOCOL): - log(PROTOCOL, 'performing NTLM BIND for <%s>', self) - if not self.strategy.pooled: - response = self.do_ntlm_bind(controls) - else: - response = self.strategy.validate_bind(controls) # only for REUSABLE - else: # user or password missing - self.last_error = 'NTLM needs domain\\username and a password' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPUnknownAuthenticationMethodError(self.last_error) - else: - self.last_error = 'unknown authentication method' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPUnknownAuthenticationMethodError(self.last_error) - - if not self.strategy.sync and not self.strategy.pooled and self.authentication not in (SASL, NTLM): # get response if asynchronous except for SASL and NTLM that return the bind result even for asynchronous strategy - _, result = self.get_response(response) - if log_enabled(PROTOCOL): - log(PROTOCOL, 'async BIND response id <%s> received via <%s>', result, self) - elif self.strategy.sync: - result = self.result - if log_enabled(PROTOCOL): - log(PROTOCOL, 'BIND response <%s> received via <%s>', result, self) - elif self.strategy.pooled or self.authentication in (SASL, NTLM): # asynchronous SASL and NTLM or reusable strtegy get the bind result synchronously - result = response - else: - self.last_error = 'unknown authentication method' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPUnknownAuthenticationMethodError(self.last_error) - - if result is None: - # self.bound = True if self.strategy_type == REUSABLE else False - self.bound = False - elif result is True: - self.bound = True - elif result is False: - self.bound = False - else: - self.bound = True if result['result'] == RESULT_SUCCESS else False - if not self.bound and result and result['description'] and not self.last_error: - self.last_error = result['description'] - - if read_server_info and self.bound: - self.refresh_server_info() - self._entries = [] - - if log_enabled(BASIC): - log(BASIC, 'done BIND operation, result <%s>', self.bound) - - return self.bound - - def rebind(self, - user=None, - password=None, - authentication=None, - sasl_mechanism=None, - sasl_credentials=None, - read_server_info=True, - controls=None - ): - - if log_enabled(BASIC): - log(BASIC, 'start (RE)BIND operation via <%s>', self) - self.last_error = None - with self.connection_lock: - if user: - self.user = user - if password is not None: - self.password = password - if not authentication and user: - self.authentication = SIMPLE - if authentication in [SIMPLE, ANONYMOUS, SASL, NTLM]: - self.authentication = authentication - elif authentication is not None: - self.last_error = 'unknown authentication method' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPUnknownAuthenticationMethodError(self.last_error) - if sasl_mechanism: - self.sasl_mechanism = sasl_mechanism - if sasl_credentials: - self.sasl_credentials = sasl_credentials - - # if self.authentication == SIMPLE and self.user and self.check_names: - # self.user = safe_dn(self.user) - # if log_enabled(EXTENDED): - # log(EXTENDED, 'user name sanitized to <%s> for rebind via <%s>', self.user, self) - - if not self.strategy.pooled: - try: - return self.bind(read_server_info, controls) - except LDAPSocketReceiveError: - raise LDAPBindError('Unable to rebind as a different user, furthermore the server abruptly closed the connection') - else: - self.strategy.pool.rebind_pool() - return True - - def unbind(self, - controls=None): - """Unbind the connected user. Unbind implies closing session as per RFC4511 (4.3) - - :param controls: LDAP controls to send along with the bind operation - - """ - if log_enabled(BASIC): - log(BASIC, 'start UNBIND operation via <%s>', self) - - if self.use_referral_cache: - self.strategy.unbind_referral_cache() - - self.last_error = None - with self.connection_lock: - if self.lazy and not self._executing_deferred and (self._deferred_bind or self._deferred_open): # _clear deferred status - self.strategy.close() - self._deferred_open = False - self._deferred_bind = False - self._deferred_start_tls = False - elif not self.closed: - request = unbind_operation() - if log_enabled(PROTOCOL): - log(PROTOCOL, 'UNBIND request sent via <%s>', self) - self.send('unbindRequest', request, controls) - self.strategy.close() - - if log_enabled(BASIC): - log(BASIC, 'done UNBIND operation, result <%s>', True) - - return True - - def search(self, - search_base, - search_filter, - search_scope=SUBTREE, - dereference_aliases=DEREF_ALWAYS, - attributes=None, - size_limit=0, - time_limit=0, - types_only=False, - get_operational_attributes=False, - controls=None, - paged_size=None, - paged_criticality=False, - paged_cookie=None, - auto_escape=None): - """ - Perform an ldap search: - - - If attributes is empty noRFC2696 with the specified size - - If paged is 0 and cookie is present the search is abandoned on - server attribute is returned - - If attributes is ALL_ATTRIBUTES all attributes are returned - - If paged_size is an int greater than 0 a simple paged search - is tried as described in - - Cookie is an opaque string received in the last paged search - and must be used on the next paged search response - - If lazy == True open and bind will be deferred until another - LDAP operation is performed - - If mssing_attributes == True then an attribute not returned by the server is set to None - - If auto_escape is set it overrides the Connection auto_escape - """ - conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')] - if log_enabled(BASIC): - log(BASIC, 'start SEARCH operation via <%s>', self) - - if self.check_names and search_base: - search_base = safe_dn(search_base) - if log_enabled(EXTENDED): - log(EXTENDED, 'search base sanitized to <%s> for SEARCH operation via <%s>', search_base, self) - - with self.connection_lock: - self._fire_deferred() - if not attributes: - attributes = [NO_ATTRIBUTES] - elif attributes == ALL_ATTRIBUTES: - attributes = [ALL_ATTRIBUTES] - - if isinstance(attributes, STRING_TYPES): - attributes = [attributes] - - if get_operational_attributes and isinstance(attributes, list): - attributes.append(ALL_OPERATIONAL_ATTRIBUTES) - elif get_operational_attributes and isinstance(attributes, tuple): - attributes += (ALL_OPERATIONAL_ATTRIBUTES, ) # concatenate tuple - - if isinstance(paged_size, int): - if log_enabled(PROTOCOL): - log(PROTOCOL, 'performing paged search for %d items with cookie <%s> for <%s>', paged_size, escape_bytes(paged_cookie), self) - - if controls is None: - controls = [] - else: - # Copy the controls to prevent modifying the original object - controls = list(controls) - controls.append(paged_search_control(paged_criticality, paged_size, paged_cookie)) - - if self.server and self.server.schema and self.check_names: - for attribute_name in attributes: - if ';' in attribute_name: # remove tags - attribute_name_to_check = attribute_name.split(';')[0] - else: - attribute_name_to_check = attribute_name - if self.server.schema and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types: - raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check) - - request = search_operation(search_base, - search_filter, - search_scope, - dereference_aliases, - attributes, - size_limit, - time_limit, - types_only, - self.auto_escape if auto_escape is None else auto_escape, - self.auto_encode, - self.server.schema if self.server else None, - validator=self.server.custom_validator, - check_names=self.check_names) - if log_enabled(PROTOCOL): - log(PROTOCOL, 'SEARCH request <%s> sent via <%s>', search_request_to_dict(request), self) - response = self.post_send_search(self.send('searchRequest', request, controls)) - self._entries = [] - - if isinstance(response, int): # asynchronous strategy - return_value = response - if log_enabled(PROTOCOL): - log(PROTOCOL, 'async SEARCH response id <%s> received via <%s>', return_value, self) - else: - return_value = True if self.result['type'] == 'searchResDone' and len(response) > 0 else False - if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error: - self.last_error = self.result['description'] - - if log_enabled(PROTOCOL): - for entry in response: - if entry['type'] == 'searchResEntry': - log(PROTOCOL, 'SEARCH response entry <%s> received via <%s>', entry, self) - elif entry['type'] == 'searchResRef': - log(PROTOCOL, 'SEARCH response reference <%s> received via <%s>', entry, self) - - if log_enabled(BASIC): - log(BASIC, 'done SEARCH operation, result <%s>', return_value) - - return return_value - - def compare(self, - dn, - attribute, - value, - controls=None): - """ - Perform a compare operation - """ - conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')] - - if log_enabled(BASIC): - log(BASIC, 'start COMPARE operation via <%s>', self) - self.last_error = None - if self.check_names: - dn = safe_dn(dn) - if log_enabled(EXTENDED): - log(EXTENDED, 'dn sanitized to <%s> for COMPARE operation via <%s>', dn, self) - - if self.server and self.server.schema and self.check_names: - if ';' in attribute: # remove tags for checking - attribute_name_to_check = attribute.split(';')[0] - else: - attribute_name_to_check = attribute - - if self.server.schema.attribute_types and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types: - raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check) - - if isinstance(value, SEQUENCE_TYPES): # value can't be a sequence - raise LDAPInvalidValueError('value cannot be a sequence') - - with self.connection_lock: - self._fire_deferred() - request = compare_operation(dn, attribute, value, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names) - if log_enabled(PROTOCOL): - log(PROTOCOL, 'COMPARE request <%s> sent via <%s>', compare_request_to_dict(request), self) - response = self.post_send_single_response(self.send('compareRequest', request, controls)) - self._entries = [] - if isinstance(response, int): - return_value = response - if log_enabled(PROTOCOL): - log(PROTOCOL, 'async COMPARE response id <%s> received via <%s>', return_value, self) - else: - return_value = True if self.result['type'] == 'compareResponse' and self.result['result'] == RESULT_COMPARE_TRUE else False - if not return_value and self.result['result'] not in [RESULT_COMPARE_TRUE, RESULT_COMPARE_FALSE] and not self.last_error: - self.last_error = self.result['description'] - - if log_enabled(PROTOCOL): - log(PROTOCOL, 'COMPARE response <%s> received via <%s>', response, self) - - if log_enabled(BASIC): - log(BASIC, 'done COMPARE operation, result <%s>', return_value) - - return return_value - - def add(self, - dn, - object_class=None, - attributes=None, - controls=None): - """ - Add dn to the DIT, object_class is None, a class name or a list - of class names. - - Attributes is a dictionary in the form 'attr': 'val' or 'attr': - ['val1', 'val2', ...] for multivalued attributes - """ - conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')] - conf_classes_excluded_from_check = [v.lower() for v in get_config_parameter('CLASSES_EXCLUDED_FROM_CHECK')] - if log_enabled(BASIC): - log(BASIC, 'start ADD operation via <%s>', self) - self.last_error = None - _attributes = deepcopy(attributes) # dict could change when adding objectClass values - if self.check_names: - dn = safe_dn(dn) - if log_enabled(EXTENDED): - log(EXTENDED, 'dn sanitized to <%s> for ADD operation via <%s>', dn, self) - - with self.connection_lock: - self._fire_deferred() - attr_object_class = [] - if object_class is None: - parm_object_class = [] - else: - parm_object_class = list(object_class) if isinstance(object_class, SEQUENCE_TYPES) else [object_class] - - object_class_attr_name = '' - if _attributes: - for attr in _attributes: - if attr.lower() == 'objectclass': - object_class_attr_name = attr - attr_object_class = list(_attributes[object_class_attr_name]) if isinstance(_attributes[object_class_attr_name], SEQUENCE_TYPES) else [_attributes[object_class_attr_name]] - break - else: - _attributes = dict() - - if not object_class_attr_name: - object_class_attr_name = 'objectClass' - - attr_object_class = [to_unicode(object_class) for object_class in attr_object_class] # converts objectclass to unicode in case of bytes value - _attributes[object_class_attr_name] = reduce(lambda x, y: x + [y] if y not in x else x, parm_object_class + attr_object_class, []) # remove duplicate ObjectClasses - - if not _attributes[object_class_attr_name]: - self.last_error = 'objectClass attribute is mandatory' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPObjectClassError(self.last_error) - - if self.server and self.server.schema and self.check_names: - for object_class_name in _attributes[object_class_attr_name]: - if object_class_name.lower() not in conf_classes_excluded_from_check and object_class_name not in self.server.schema.object_classes: - raise LDAPObjectClassError('invalid object class ' + str(object_class_name)) - - for attribute_name in _attributes: - if ';' in attribute_name: # remove tags for checking - attribute_name_to_check = attribute_name.split(';')[0] - else: - attribute_name_to_check = attribute_name - - if attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types: - raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check) - - request = add_operation(dn, _attributes, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names) - if log_enabled(PROTOCOL): - log(PROTOCOL, 'ADD request <%s> sent via <%s>', add_request_to_dict(request), self) - response = self.post_send_single_response(self.send('addRequest', request, controls)) - self._entries = [] - - if isinstance(response, STRING_TYPES + (int, )): - return_value = response - if log_enabled(PROTOCOL): - log(PROTOCOL, 'async ADD response id <%s> received via <%s>', return_value, self) - else: - if log_enabled(PROTOCOL): - log(PROTOCOL, 'ADD response <%s> received via <%s>', response, self) - return_value = True if self.result['type'] == 'addResponse' and self.result['result'] == RESULT_SUCCESS else False - if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error: - self.last_error = self.result['description'] - - if log_enabled(BASIC): - log(BASIC, 'done ADD operation, result <%s>', return_value) - - return return_value - - def delete(self, - dn, - controls=None): - """ - Delete the entry identified by the DN from the DIB. - """ - if log_enabled(BASIC): - log(BASIC, 'start DELETE operation via <%s>', self) - self.last_error = None - if self.check_names: - dn = safe_dn(dn) - if log_enabled(EXTENDED): - log(EXTENDED, 'dn sanitized to <%s> for DELETE operation via <%s>', dn, self) - - with self.connection_lock: - self._fire_deferred() - if self.read_only: - self.last_error = 'connection is read-only' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPConnectionIsReadOnlyError(self.last_error) - - request = delete_operation(dn) - if log_enabled(PROTOCOL): - log(PROTOCOL, 'DELETE request <%s> sent via <%s>', delete_request_to_dict(request), self) - response = self.post_send_single_response(self.send('delRequest', request, controls)) - self._entries = [] - - if isinstance(response, STRING_TYPES + (int, )): - return_value = response - if log_enabled(PROTOCOL): - log(PROTOCOL, 'async DELETE response id <%s> received via <%s>', return_value, self) - else: - if log_enabled(PROTOCOL): - log(PROTOCOL, 'DELETE response <%s> received via <%s>', response, self) - return_value = True if self.result['type'] == 'delResponse' and self.result['result'] == RESULT_SUCCESS else False - if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error: - self.last_error = self.result['description'] - - if log_enabled(BASIC): - log(BASIC, 'done DELETE operation, result <%s>', return_value) - - return return_value - - def modify(self, - dn, - changes, - controls=None): - """ - Modify attributes of entry - - - changes is a dictionary in the form {'attribute1': change), 'attribute2': [change, change, ...], ...} - - change is (operation, [value1, value2, ...]) - - operation is 0 (MODIFY_ADD), 1 (MODIFY_DELETE), 2 (MODIFY_REPLACE), 3 (MODIFY_INCREMENT) - """ - conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')] - - if log_enabled(BASIC): - log(BASIC, 'start MODIFY operation via <%s>', self) - self.last_error = None - if self.check_names: - dn = safe_dn(dn) - if log_enabled(EXTENDED): - log(EXTENDED, 'dn sanitized to <%s> for MODIFY operation via <%s>', dn, self) - - with self.connection_lock: - self._fire_deferred() - if self.read_only: - self.last_error = 'connection is read-only' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPConnectionIsReadOnlyError(self.last_error) - - if not isinstance(changes, dict): - self.last_error = 'changes must be a dictionary' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPChangeError(self.last_error) - - if not changes: - self.last_error = 'no changes in modify request' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPChangeError(self.last_error) - - for attribute_name in changes: - if self.server and self.server.schema and self.check_names: - if ';' in attribute_name: # remove tags for checking - attribute_name_to_check = attribute_name.split(';')[0] - else: - attribute_name_to_check = attribute_name - - if self.server.schema.attribute_types and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types: - raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check) - change = changes[attribute_name] - if isinstance(change, SEQUENCE_TYPES) and change[0] in [MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, MODIFY_INCREMENT, 0, 1, 2, 3]: - if len(change) != 2: - self.last_error = 'malformed change' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPChangeError(self.last_error) - - changes[attribute_name] = [change] # insert change in a tuple - else: - for change_operation in change: - if len(change_operation) != 2 or change_operation[0] not in [MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, MODIFY_INCREMENT, 0, 1, 2, 3]: - self.last_error = 'invalid change list' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPChangeError(self.last_error) - request = modify_operation(dn, changes, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names) - if log_enabled(PROTOCOL): - log(PROTOCOL, 'MODIFY request <%s> sent via <%s>', modify_request_to_dict(request), self) - response = self.post_send_single_response(self.send('modifyRequest', request, controls)) - self._entries = [] - - if isinstance(response, STRING_TYPES + (int, )): - return_value = response - if log_enabled(PROTOCOL): - log(PROTOCOL, 'async MODIFY response id <%s> received via <%s>', return_value, self) - else: - if log_enabled(PROTOCOL): - log(PROTOCOL, 'MODIFY response <%s> received via <%s>', response, self) - return_value = True if self.result['type'] == 'modifyResponse' and self.result['result'] == RESULT_SUCCESS else False - if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error: - self.last_error = self.result['description'] - - if log_enabled(BASIC): - log(BASIC, 'done MODIFY operation, result <%s>', return_value) - - return return_value - - def modify_dn(self, - dn, - relative_dn, - delete_old_dn=True, - new_superior=None, - controls=None): - """ - Modify DN of the entry or performs a move of the entry in the - DIT. - """ - if log_enabled(BASIC): - log(BASIC, 'start MODIFY DN operation via <%s>', self) - self.last_error = None - if self.check_names: - dn = safe_dn(dn) - if log_enabled(EXTENDED): - log(EXTENDED, 'dn sanitized to <%s> for MODIFY DN operation via <%s>', dn, self) - relative_dn = safe_dn(relative_dn) - if log_enabled(EXTENDED): - log(EXTENDED, 'relative dn sanitized to <%s> for MODIFY DN operation via <%s>', relative_dn, self) - - with self.connection_lock: - self._fire_deferred() - if self.read_only: - self.last_error = 'connection is read-only' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPConnectionIsReadOnlyError(self.last_error) - - if new_superior and not dn.startswith(relative_dn): # as per RFC4511 (4.9) - self.last_error = 'DN cannot change while performing moving' - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', self.last_error, self) - raise LDAPChangeError(self.last_error) - - request = modify_dn_operation(dn, relative_dn, delete_old_dn, new_superior) - if log_enabled(PROTOCOL): - log(PROTOCOL, 'MODIFY DN request <%s> sent via <%s>', modify_dn_request_to_dict(request), self) - response = self.post_send_single_response(self.send('modDNRequest', request, controls)) - self._entries = [] - - if isinstance(response, STRING_TYPES + (int, )): - return_value = response - if log_enabled(PROTOCOL): - log(PROTOCOL, 'async MODIFY DN response id <%s> received via <%s>', return_value, self) - else: - if log_enabled(PROTOCOL): - log(PROTOCOL, 'MODIFY DN response <%s> received via <%s>', response, self) - return_value = True if self.result['type'] == 'modDNResponse' and self.result['result'] == RESULT_SUCCESS else False - if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error: - self.last_error = self.result['description'] - - if log_enabled(BASIC): - log(BASIC, 'done MODIFY DN operation, result <%s>', return_value) - - return return_value - - def abandon(self, - message_id, - controls=None): - """ - Abandon the operation indicated by message_id - """ - if log_enabled(BASIC): - log(BASIC, 'start ABANDON operation via <%s>', self) - self.last_error = None - with self.connection_lock: - self._fire_deferred() - return_value = False - if self.strategy._outstanding or message_id == 0: - # only current operation should be abandoned, abandon, bind and unbind cannot ever be abandoned, - # messagiId 0 is invalid and should be used as a "ping" to keep alive the connection - if (self.strategy._outstanding and message_id in self.strategy._outstanding and self.strategy._outstanding[message_id]['type'] not in ['abandonRequest', 'bindRequest', 'unbindRequest']) or message_id == 0: - request = abandon_operation(message_id) - if log_enabled(PROTOCOL): - log(PROTOCOL, 'ABANDON request: <%s> sent via <%s>', abandon_request_to_dict(request), self) - self.send('abandonRequest', request, controls) - self.result = None - self.response = None - self._entries = [] - return_value = True - else: - if log_enabled(ERROR): - log(ERROR, 'cannot abandon a Bind, an Unbind or an Abandon operation or message ID %s not found via <%s>', str(message_id), self) - - if log_enabled(BASIC): - log(BASIC, 'done ABANDON operation, result <%s>', return_value) - - return return_value - - def extended(self, - request_name, - request_value=None, - controls=None, - no_encode=None): - """ - Performs an extended operation - """ - if log_enabled(BASIC): - log(BASIC, 'start EXTENDED operation via <%s>', self) - self.last_error = None - with self.connection_lock: - self._fire_deferred() - request = extended_operation(request_name, request_value, no_encode=no_encode) - if log_enabled(PROTOCOL): - log(PROTOCOL, 'EXTENDED request <%s> sent via <%s>', extended_request_to_dict(request), self) - response = self.post_send_single_response(self.send('extendedReq', request, controls)) - self._entries = [] - if isinstance(response, int): - return_value = response - if log_enabled(PROTOCOL): - log(PROTOCOL, 'async EXTENDED response id <%s> received via <%s>', return_value, self) - else: - if log_enabled(PROTOCOL): - log(PROTOCOL, 'EXTENDED response <%s> received via <%s>', response, self) - return_value = True if self.result['type'] == 'extendedResp' and self.result['result'] == RESULT_SUCCESS else False - if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error: - self.last_error = self.result['description'] - - if log_enabled(BASIC): - log(BASIC, 'done EXTENDED operation, result <%s>', return_value) - - return return_value - - def start_tls(self, read_server_info=True): # as per RFC4511. Removal of TLS is defined as MAY in RFC4511 so the client can't implement a generic stop_tls method0 - - if log_enabled(BASIC): - log(BASIC, 'start START TLS operation via <%s>', self) - - with self.connection_lock: - return_value = False - if not self.server.tls: - self.server.tls = Tls() - - if self.lazy and not self._executing_deferred: - self._deferred_start_tls = True - self.tls_started = True - return_value = True - if log_enabled(BASIC): - log(BASIC, 'deferring START TLS for <%s>', self) - else: - self._deferred_start_tls = False - if self.server.tls.start_tls(self) and self.strategy.sync: # for asynchronous connections _start_tls is run by the strategy - if read_server_info: - self.refresh_server_info() # refresh server info as per RFC4515 (3.1.5) - return_value = True - elif not self.strategy.sync: - return_value = True - - if log_enabled(BASIC): - log(BASIC, 'done START TLS operation, result <%s>', return_value) - - return return_value - - def do_sasl_bind(self, - controls): - if log_enabled(BASIC): - log(BASIC, 'start SASL BIND operation via <%s>', self) - self.last_error = None - with self.connection_lock: - result = None - - if not self.sasl_in_progress: - self.sasl_in_progress = True - try: - if self.sasl_mechanism == EXTERNAL: - result = sasl_external(self, controls) - elif self.sasl_mechanism == DIGEST_MD5: - result = sasl_digest_md5(self, controls) - elif self.sasl_mechanism == GSSAPI: - from ..protocol.sasl.kerberos import sasl_gssapi # needs the gssapi package - result = sasl_gssapi(self, controls) - elif self.sasl_mechanism == 'PLAIN': - result = sasl_plain(self, controls) - finally: - self.sasl_in_progress = False - - if log_enabled(BASIC): - log(BASIC, 'done SASL BIND operation, result <%s>', result) - - return result - - def do_ntlm_bind(self, - controls): - if log_enabled(BASIC): - log(BASIC, 'start NTLM BIND operation via <%s>', self) - self.last_error = None - with self.connection_lock: - result = None - if not self.sasl_in_progress: - self.sasl_in_progress = True # ntlm is same of sasl authentication - # additional import for NTLM - from ..utils.ntlm import NtlmClient - domain_name, user_name = self.user.split('\\', 1) - ntlm_client = NtlmClient(user_name=user_name, domain=domain_name, password=self.password) - - # as per https://msdn.microsoft.com/en-us/library/cc223501.aspx - # send a sicilyPackageDiscovery request (in the bindRequest) - request = bind_operation(self.version, 'SICILY_PACKAGE_DISCOVERY', ntlm_client) - if log_enabled(PROTOCOL): - log(PROTOCOL, 'NTLM SICILY PACKAGE DISCOVERY request sent via <%s>', self) - response = self.post_send_single_response(self.send('bindRequest', request, controls)) - if not self.strategy.sync: - _, result = self.get_response(response) - else: - result = response[0] - if 'server_creds' in result: - sicily_packages = result['server_creds'].decode('ascii').split(';') - if 'NTLM' in sicily_packages: # NTLM available on server - request = bind_operation(self.version, 'SICILY_NEGOTIATE_NTLM', ntlm_client) - if log_enabled(PROTOCOL): - log(PROTOCOL, 'NTLM SICILY NEGOTIATE request sent via <%s>', self) - response = self.post_send_single_response(self.send('bindRequest', request, controls)) - if not self.strategy.sync: - _, result = self.get_response(response) - else: - if log_enabled(PROTOCOL): - log(PROTOCOL, 'NTLM SICILY NEGOTIATE response <%s> received via <%s>', response[0], self) - result = response[0] - - if result['result'] == RESULT_SUCCESS: - request = bind_operation(self.version, 'SICILY_RESPONSE_NTLM', ntlm_client, result['server_creds']) - if log_enabled(PROTOCOL): - log(PROTOCOL, 'NTLM SICILY RESPONSE NTLM request sent via <%s>', self) - response = self.post_send_single_response(self.send('bindRequest', request, controls)) - if not self.strategy.sync: - _, result = self.get_response(response) - else: - if log_enabled(PROTOCOL): - log(PROTOCOL, 'NTLM BIND response <%s> received via <%s>', response[0], self) - result = response[0] - else: - result = None - self.sasl_in_progress = False - - if log_enabled(BASIC): - log(BASIC, 'done SASL NTLM operation, result <%s>', result) - - return result - - def refresh_server_info(self): - # if self.strategy.no_real_dsa: # do not refresh for mock strategies - # return - - if not self.strategy.pooled: - with self.connection_lock: - if not self.closed: - if log_enabled(BASIC): - log(BASIC, 'refreshing server info for <%s>', self) - previous_response = self.response - previous_result = self.result - previous_entries = self._entries - self.server.get_info_from_server(self) - self.response = previous_response - self.result = previous_result - self._entries = previous_entries - else: - if log_enabled(BASIC): - log(BASIC, 'refreshing server info from pool for <%s>', self) - self.strategy.pool.get_info_from_server() - - def response_to_ldif(self, - search_result=None, - all_base64=False, - line_separator=None, - sort_order=None, - stream=None): - with self.connection_lock: - if search_result is None: - search_result = self.response - - if isinstance(search_result, SEQUENCE_TYPES): - ldif_lines = operation_to_ldif('searchResponse', search_result, all_base64, sort_order=sort_order) - ldif_lines = add_ldif_header(ldif_lines) - line_separator = line_separator or linesep - ldif_output = line_separator.join(ldif_lines) - if stream: - if stream.tell() == 0: - header = add_ldif_header(['-'])[0] - stream.write(prepare_for_stream(header + line_separator + line_separator)) - stream.write(prepare_for_stream(ldif_output + line_separator + line_separator)) - if log_enabled(BASIC): - log(BASIC, 'building LDIF output <%s> for <%s>', ldif_output, self) - return ldif_output - - return None - - def response_to_json(self, - raw=False, - search_result=None, - indent=4, - sort=True, - stream=None, - checked_attributes=True, - include_empty=True): - - with self.connection_lock: - if search_result is None: - search_result = self.response - - if isinstance(search_result, SEQUENCE_TYPES): - json_dict = dict() - json_dict['entries'] = [] - - for response in search_result: - if response['type'] == 'searchResEntry': - entry = dict() - - entry['dn'] = response['dn'] - if checked_attributes: - if not include_empty: - # needed for python 2.6 compatibility - entry['attributes'] = dict((key, response['attributes'][key]) for key in response['attributes'] if response['attributes'][key]) - else: - entry['attributes'] = dict(response['attributes']) - if raw: - if not include_empty: - # needed for python 2.6 compatibility - entry['raw_attributes'] = dict((key, response['raw_attributes'][key]) for key in response['raw_attributes'] if response['raw:attributes'][key]) - else: - entry['raw'] = dict(response['raw_attributes']) - json_dict['entries'].append(entry) - - if str is bytes: # Python 2 - check_json_dict(json_dict) - - json_output = json.dumps(json_dict, ensure_ascii=True, sort_keys=sort, indent=indent, check_circular=True, default=format_json, separators=(',', ': ')) - - if log_enabled(BASIC): - log(BASIC, 'building JSON output <%s> for <%s>', json_output, self) - if stream: - stream.write(json_output) - - return json_output - - def response_to_file(self, - target, - raw=False, - indent=4, - sort=True): - with self.connection_lock: - if self.response: - if isinstance(target, STRING_TYPES): - target = open(target, 'w+') - - if log_enabled(BASIC): - log(BASIC, 'writing response to file for <%s>', self) - - target.writelines(self.response_to_json(raw=raw, indent=indent, sort=sort)) - target.close() - - def _fire_deferred(self, read_info=True): - with self.connection_lock: - if self.lazy and not self._executing_deferred: - self._executing_deferred = True - - if log_enabled(BASIC): - log(BASIC, 'executing deferred (open: %s, start_tls: %s, bind: %s) for <%s>', self._deferred_open, self._deferred_start_tls, self._deferred_bind, self) - try: - if self._deferred_open: - self.open(read_server_info=False) - if self._deferred_start_tls: - self.start_tls(read_server_info=False) - if self._deferred_bind: - self.bind(read_server_info=False, controls=self._bind_controls) - if read_info: - self.refresh_server_info() - except LDAPExceptionError as e: - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', e, self) - raise # re-raise LDAPExceptionError - finally: - self._executing_deferred = False - - @property - def entries(self): - if self.response: - if not self._entries: - self._entries = self._get_entries(self.response) - return self._entries - - def _get_entries(self, search_response): - with self.connection_lock: - from .. import ObjectDef, Reader - - # build a table of ObjectDefs, grouping the entries found in search_response for their attributes set, subset will be included in superset - attr_sets = [] - for response in search_response: - if response['type'] == 'searchResEntry': - resp_attr_set = set(response['attributes'].keys()) - if resp_attr_set not in attr_sets: - attr_sets.append(resp_attr_set) - attr_sets.sort(key=lambda x: -len(x)) # sorts the list in descending length order - unique_attr_sets = [] - for attr_set in attr_sets: - for unique_set in unique_attr_sets: - if unique_set >= attr_set: # checks if unique set is a superset of attr_set - break - else: # the attr_set is not a subset of any element in unique_attr_sets - unique_attr_sets.append(attr_set) - object_defs = [] - for attr_set in unique_attr_sets: - object_def = ObjectDef(schema=self.server.schema) - object_def += list(attr_set) # converts the set in a list to be added to the object definition - object_defs.append((attr_set, - object_def, - Reader(self, object_def, self.request['base'], self.request['filter'], attributes=attr_set) if self.strategy.sync else Reader(self, object_def, '', '', attributes=attr_set)) - ) # objects_defs contains a tuple with the set, the ObjectDef and a cursor - - entries = [] - for response in search_response: - if response['type'] == 'searchResEntry': - resp_attr_set = set(response['attributes'].keys()) - for object_def in object_defs: - if resp_attr_set <= object_def[0]: # finds the ObjectDef for the attribute set of this entry - entry = object_def[2]._create_entry(response) - entries.append(entry) - break - else: - if log_enabled(ERROR): - log(ERROR, 'attribute set not found for %s in <%s>', resp_attr_set, self) - raise LDAPObjectError('attribute set not found for ' + str(resp_attr_set)) - - return entries +""" +""" + +# Created on 2014.05.31 +# +# Author: Giovanni Cannata +# +# Copyright 2014 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . +from copy import deepcopy +from os import linesep +from threading import RLock, Lock +from functools import reduce +import json + +from .. import ANONYMOUS, SIMPLE, SASL, MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, get_config_parameter, DEREF_ALWAYS, \ + SUBTREE, ASYNC, SYNC, NO_ATTRIBUTES, ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, MODIFY_INCREMENT, LDIF, ASYNC_STREAM, \ + RESTARTABLE, ROUND_ROBIN, REUSABLE, AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_TLS_BEFORE_BIND,\ + AUTO_BIND_TLS_AFTER_BIND, AUTO_BIND_NO_TLS, STRING_TYPES, SEQUENCE_TYPES, MOCK_SYNC, MOCK_ASYNC, NTLM, EXTERNAL,\ + DIGEST_MD5, GSSAPI, PLAIN + +from .results import RESULT_SUCCESS, RESULT_COMPARE_TRUE, RESULT_COMPARE_FALSE +from ..extend import ExtendedOperationsRoot +from .pooling import ServerPool +from .server import Server +from ..operation.abandon import abandon_operation, abandon_request_to_dict +from ..operation.add import add_operation, add_request_to_dict +from ..operation.bind import bind_operation, bind_request_to_dict +from ..operation.compare import compare_operation, compare_request_to_dict +from ..operation.delete import delete_operation, delete_request_to_dict +from ..operation.extended import extended_operation, extended_request_to_dict +from ..operation.modify import modify_operation, modify_request_to_dict +from ..operation.modifyDn import modify_dn_operation, modify_dn_request_to_dict +from ..operation.search import search_operation, search_request_to_dict +from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header +from ..protocol.sasl.digestMd5 import sasl_digest_md5 +from ..protocol.sasl.external import sasl_external +from ..protocol.sasl.plain import sasl_plain +from ..strategy.sync import SyncStrategy +from ..strategy.mockAsync import MockAsyncStrategy +from ..strategy.asynchronous import AsyncStrategy +from ..strategy.reusable import ReusableStrategy +from ..strategy.restartable import RestartableStrategy +from ..strategy.ldifProducer import LdifProducerStrategy +from ..strategy.mockSync import MockSyncStrategy +from ..strategy.asyncStream import AsyncStreamStrategy +from ..operation.unbind import unbind_operation +from ..protocol.rfc2696 import paged_search_control +from .usage import ConnectionUsage +from .tls import Tls +from .exceptions import LDAPUnknownStrategyError, LDAPBindError, LDAPUnknownAuthenticationMethodError, \ + LDAPSASLMechanismNotSupportedError, LDAPObjectClassError, LDAPConnectionIsReadOnlyError, LDAPChangeError, LDAPExceptionError, \ + LDAPObjectError, LDAPSocketReceiveError, LDAPAttributeError, LDAPInvalidValueError, LDAPConfigurationError, \ + LDAPInvalidPortError + +from ..utils.conv import escape_bytes, prepare_for_stream, check_json_dict, format_json, to_unicode +from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED, get_library_log_hide_sensitive_data +from ..utils.dn import safe_dn +from ..utils.port_validators import check_port_and_port_list + + +SASL_AVAILABLE_MECHANISMS = [EXTERNAL, + DIGEST_MD5, + GSSAPI, + PLAIN] + +CLIENT_STRATEGIES = [SYNC, + ASYNC, + LDIF, + RESTARTABLE, + REUSABLE, + MOCK_SYNC, + MOCK_ASYNC, + ASYNC_STREAM] + + +def _format_socket_endpoint(endpoint): + if endpoint and len(endpoint) == 2: # IPv4 + return str(endpoint[0]) + ':' + str(endpoint[1]) + elif endpoint and len(endpoint) == 4: # IPv6 + return '[' + str(endpoint[0]) + ']:' + str(endpoint[1]) + + try: + return str(endpoint) + except Exception: + return '?' + + +def _format_socket_endpoints(sock): + if sock: + try: + local = sock.getsockname() + except Exception: + local = (None, None, None, None) + try: + remote = sock.getpeername() + except Exception: + remote = (None, None, None, None) + + return '' + return '' + + +# noinspection PyProtectedMember +class Connection(object): + """Main ldap connection class. + + Controls, if used, must be a list of tuples. Each tuple must have 3 + elements, the control OID, a boolean meaning if the control is + critical, a value. + + If the boolean is set to True the server must honor the control or + refuse the operation + + Mixing controls must be defined in controls specification (as per + RFC 4511) + + :param server: the Server object to connect to + :type server: Server, str + :param user: the user name for simple authentication + :type user: str + :param password: the password for simple authentication + :type password: str + :param auto_bind: specify if the bind will be performed automatically when defining the Connection object + :type auto_bind: int, can be one of AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_NO_TLS, AUTO_BIND_TLS_BEFORE_BIND, AUTO_BIND_TLS_AFTER_BIND as specified in ldap3 + :param version: LDAP version, default to 3 + :type version: int + :param authentication: type of authentication + :type authentication: int, can be one of AUTH_ANONYMOUS, AUTH_SIMPLE or AUTH_SASL, as specified in ldap3 + :param client_strategy: communication strategy used in the Connection + :type client_strategy: can be one of STRATEGY_SYNC, STRATEGY_ASYNC_THREADED, STRATEGY_LDIF_PRODUCER, STRATEGY_SYNC_RESTARTABLE, STRATEGY_REUSABLE_THREADED as specified in ldap3 + :param auto_referrals: specify if the connection object must automatically follow referrals + :type auto_referrals: bool + :param sasl_mechanism: mechanism for SASL authentication, can be one of 'EXTERNAL', 'DIGEST-MD5', 'GSSAPI', 'PLAIN' + :type sasl_mechanism: str + :param sasl_credentials: credentials for SASL mechanism + :type sasl_credentials: tuple + :param check_names: if True the library will check names of attributes and object classes against the schema. Also values found in entries will be formatted as indicated by the schema + :type check_names: bool + :param collect_usage: collect usage metrics in the usage attribute + :type collect_usage: bool + :param read_only: disable operations that modify data in the LDAP server + :type read_only: bool + :param lazy: open and bind the connection only when an actual operation is performed + :type lazy: bool + :param raise_exceptions: raise exceptions when operations are not successful, if False operations return False if not successful but not raise exceptions + :type raise_exceptions: bool + :param pool_name: pool name for pooled strategies + :type pool_name: str + :param pool_size: pool size for pooled strategies + :type pool_size: int + :param pool_lifetime: pool lifetime for pooled strategies + :type pool_lifetime: int + :param cred_store: credential store for gssapi + :type cred_store: dict + :param use_referral_cache: keep referral connections open and reuse them + :type use_referral_cache: bool + :param auto_escape: automatic escaping of filter values + :type auto_escape: bool + :param auto_encode: automatic encoding of attribute values + :type auto_encode: bool + :param source_address: the ip address or hostname to use as the source when opening the connection to the server + :type source_address: str + :param source_port: the source port to use when opening the connection to the server. Cannot be specified with source_port_list + :type source_port: int + :param source_port_list: a list of source ports to choose from when opening the connection to the server. Cannot be specified with source_port + :type source_port_list: list + """ + + def __init__(self, + server, + user=None, + password=None, + auto_bind=AUTO_BIND_DEFAULT, + version=3, + authentication=None, + client_strategy=SYNC, + auto_referrals=True, + auto_range=True, + sasl_mechanism=None, + sasl_credentials=None, + check_names=True, + collect_usage=False, + read_only=False, + lazy=False, + raise_exceptions=False, + pool_name=None, + pool_size=None, + pool_lifetime=None, + cred_store=None, + fast_decoder=True, + receive_timeout=None, + return_empty_attributes=True, + use_referral_cache=False, + auto_escape=True, + auto_encode=True, + pool_keepalive=None, + source_address=None, + source_port=None, + source_port_list=None): + + conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME') + self.connection_lock = RLock() # re-entrant lock to ensure that operations in the Connection object are executed atomically in the same thread + with self.connection_lock: + if client_strategy not in CLIENT_STRATEGIES: + self.last_error = 'unknown client connection strategy' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPUnknownStrategyError(self.last_error) + + self.strategy_type = client_strategy + self.user = user + self.password = password + + if not authentication and self.user: + self.authentication = SIMPLE + elif not authentication: + self.authentication = ANONYMOUS + elif authentication in [SIMPLE, ANONYMOUS, SASL, NTLM]: + self.authentication = authentication + else: + self.last_error = 'unknown authentication method' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPUnknownAuthenticationMethodError(self.last_error) + + self.version = version + self.auto_referrals = True if auto_referrals else False + self.request = None + self.response = None + self.result = None + self.bound = False + self.listening = False + self.closed = True + self.last_error = None + if auto_bind is False: # compatibility with older version where auto_bind was a boolean + self.auto_bind = AUTO_BIND_DEFAULT + elif auto_bind is True: + self.auto_bind = AUTO_BIND_NO_TLS + else: + self.auto_bind = auto_bind + self.sasl_mechanism = sasl_mechanism + self.sasl_credentials = sasl_credentials + self._usage = ConnectionUsage() if collect_usage else None + self.socket = None + self.tls_started = False + self.sasl_in_progress = False + self.read_only = read_only + self._context_state = [] + self._deferred_open = False + self._deferred_bind = False + self._deferred_start_tls = False + self._bind_controls = None + self._executing_deferred = False + self.lazy = lazy + self.pool_name = pool_name if pool_name else conf_default_pool_name + self.pool_size = pool_size + self.cred_store = cred_store + self.pool_lifetime = pool_lifetime + self.pool_keepalive = pool_keepalive + self.starting_tls = False + self.check_names = check_names + self.raise_exceptions = raise_exceptions + self.auto_range = True if auto_range else False + self.extend = ExtendedOperationsRoot(self) + self._entries = [] + self.fast_decoder = fast_decoder + self.receive_timeout = receive_timeout + self.empty_attributes = return_empty_attributes + self.use_referral_cache = use_referral_cache + self.auto_escape = auto_escape + self.auto_encode = auto_encode + + port_err = check_port_and_port_list(source_port, source_port_list) + if port_err: + if log_enabled(ERROR): + log(ERROR, port_err) + raise LDAPInvalidPortError(port_err) + # using an empty string to bind a socket means "use the default as if this wasn't provided" because socket + # binding requires that you pass something for the ip if you want to pass a specific port + self.source_address = source_address if source_address is not None else '' + # using 0 as the source port to bind a socket means "use the default behavior of picking a random port from + # all ports as if this wasn't provided" because socket binding requires that you pass something for the port + # if you want to pass a specific ip + self.source_port_list = [0] + if source_port is not None: + self.source_port_list = [source_port] + elif source_port_list is not None: + self.source_port_list = source_port_list[:] + + if isinstance(server, STRING_TYPES): + server = Server(server) + if isinstance(server, SEQUENCE_TYPES): + server = ServerPool(server, ROUND_ROBIN, active=True, exhaust=True) + + if isinstance(server, ServerPool): + self.server_pool = server + self.server_pool.initialize(self) + self.server = self.server_pool.get_current_server(self) + else: + self.server_pool = None + self.server = server + + # if self.authentication == SIMPLE and self.user and self.check_names: + # self.user = safe_dn(self.user) + # if log_enabled(EXTENDED): + # log(EXTENDED, 'user name sanitized to <%s> for simple authentication via <%s>', self.user, self) + + if self.strategy_type == SYNC: + self.strategy = SyncStrategy(self) + elif self.strategy_type == ASYNC: + self.strategy = AsyncStrategy(self) + elif self.strategy_type == LDIF: + self.strategy = LdifProducerStrategy(self) + elif self.strategy_type == RESTARTABLE: + self.strategy = RestartableStrategy(self) + elif self.strategy_type == REUSABLE: + self.strategy = ReusableStrategy(self) + self.lazy = False + elif self.strategy_type == MOCK_SYNC: + self.strategy = MockSyncStrategy(self) + elif self.strategy_type == MOCK_ASYNC: + self.strategy = MockAsyncStrategy(self) + elif self.strategy_type == ASYNC_STREAM: + self.strategy = AsyncStreamStrategy(self) + else: + self.last_error = 'unknown strategy' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPUnknownStrategyError(self.last_error) + + # maps strategy functions to connection functions + self.send = self.strategy.send + self.open = self.strategy.open + self.get_response = self.strategy.get_response + self.post_send_single_response = self.strategy.post_send_single_response + self.post_send_search = self.strategy.post_send_search + + if not self.strategy.no_real_dsa: + self.do_auto_bind() + # else: # for strategies with a fake server set get_info to NONE if server hasn't a schema + # if self.server and not self.server.schema: + # self.server.get_info = NONE + if log_enabled(BASIC): + if get_library_log_hide_sensitive_data(): + log(BASIC, 'instantiated Connection: <%s>', self.repr_with_sensitive_data_stripped()) + else: + log(BASIC, 'instantiated Connection: <%r>', self) + + def do_auto_bind(self): + if self.auto_bind and self.auto_bind not in [AUTO_BIND_NONE, AUTO_BIND_DEFAULT]: + if log_enabled(BASIC): + log(BASIC, 'performing automatic bind for <%s>', self) + if self.closed: + self.open(read_server_info=False) + if self.auto_bind == AUTO_BIND_NO_TLS: + self.bind(read_server_info=True) + elif self.auto_bind == AUTO_BIND_TLS_BEFORE_BIND: + self.start_tls(read_server_info=False) + self.bind(read_server_info=True) + elif self.auto_bind == AUTO_BIND_TLS_AFTER_BIND: + self.bind(read_server_info=False) + self.start_tls(read_server_info=True) + if not self.bound: + self.last_error = 'automatic bind not successful' + (' - ' + self.last_error if self.last_error else '') + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + self.unbind() + raise LDAPBindError(self.last_error) + + def __str__(self): + s = [ + str(self.server) if self.server else 'None', + 'user: ' + str(self.user), + 'lazy' if self.lazy else 'not lazy', + 'unbound' if not self.bound else ('deferred bind' if self._deferred_bind else 'bound'), + 'closed' if self.closed else ('deferred open' if self._deferred_open else 'open'), + _format_socket_endpoints(self.socket), + 'tls not started' if not self.tls_started else('deferred start_tls' if self._deferred_start_tls else 'tls started'), + 'listening' if self.listening else 'not listening', + self.strategy.__class__.__name__ if hasattr(self, 'strategy') else 'No strategy', + 'internal decoder' if self.fast_decoder else 'pyasn1 decoder' + ] + return ' - '.join(s) + + def __repr__(self): + conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME') + if self.server_pool: + r = 'Connection(server={0.server_pool!r}'.format(self) + else: + r = 'Connection(server={0.server!r}'.format(self) + r += '' if self.user is None else ', user={0.user!r}'.format(self) + r += '' if self.password is None else ', password={0.password!r}'.format(self) + r += '' if self.auto_bind is None else ', auto_bind={0.auto_bind!r}'.format(self) + r += '' if self.version is None else ', version={0.version!r}'.format(self) + r += '' if self.authentication is None else ', authentication={0.authentication!r}'.format(self) + r += '' if self.strategy_type is None else ', client_strategy={0.strategy_type!r}'.format(self) + r += '' if self.auto_referrals is None else ', auto_referrals={0.auto_referrals!r}'.format(self) + r += '' if self.sasl_mechanism is None else ', sasl_mechanism={0.sasl_mechanism!r}'.format(self) + r += '' if self.sasl_credentials is None else ', sasl_credentials={0.sasl_credentials!r}'.format(self) + r += '' if self.check_names is None else ', check_names={0.check_names!r}'.format(self) + r += '' if self.usage is None else (', collect_usage=' + ('True' if self.usage else 'False')) + r += '' if self.read_only is None else ', read_only={0.read_only!r}'.format(self) + r += '' if self.lazy is None else ', lazy={0.lazy!r}'.format(self) + r += '' if self.raise_exceptions is None else ', raise_exceptions={0.raise_exceptions!r}'.format(self) + r += '' if (self.pool_name is None or self.pool_name == conf_default_pool_name) else ', pool_name={0.pool_name!r}'.format(self) + r += '' if self.pool_size is None else ', pool_size={0.pool_size!r}'.format(self) + r += '' if self.pool_lifetime is None else ', pool_lifetime={0.pool_lifetime!r}'.format(self) + r += '' if self.pool_keepalive is None else ', pool_keepalive={0.pool_keepalive!r}'.format(self) + r += '' if self.cred_store is None else (', cred_store=' + repr(self.cred_store)) + r += '' if self.fast_decoder is None else (', fast_decoder=' + ('True' if self.fast_decoder else 'False')) + r += '' if self.auto_range is None else (', auto_range=' + ('True' if self.auto_range else 'False')) + r += '' if self.receive_timeout is None else ', receive_timeout={0.receive_timeout!r}'.format(self) + r += '' if self.empty_attributes is None else (', return_empty_attributes=' + ('True' if self.empty_attributes else 'False')) + r += '' if self.auto_encode is None else (', auto_encode=' + ('True' if self.auto_encode else 'False')) + r += '' if self.auto_escape is None else (', auto_escape=' + ('True' if self.auto_escape else 'False')) + r += '' if self.use_referral_cache is None else (', use_referral_cache=' + ('True' if self.use_referral_cache else 'False')) + r += ')' + + return r + + def repr_with_sensitive_data_stripped(self): + conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME') + if self.server_pool: + r = 'Connection(server={0.server_pool!r}'.format(self) + else: + r = 'Connection(server={0.server!r}'.format(self) + r += '' if self.user is None else ', user={0.user!r}'.format(self) + r += '' if self.password is None else ", password='{0}'".format('' % len(self.password)) + r += '' if self.auto_bind is None else ', auto_bind={0.auto_bind!r}'.format(self) + r += '' if self.version is None else ', version={0.version!r}'.format(self) + r += '' if self.authentication is None else ', authentication={0.authentication!r}'.format(self) + r += '' if self.strategy_type is None else ', client_strategy={0.strategy_type!r}'.format(self) + r += '' if self.auto_referrals is None else ', auto_referrals={0.auto_referrals!r}'.format(self) + r += '' if self.sasl_mechanism is None else ', sasl_mechanism={0.sasl_mechanism!r}'.format(self) + if self.sasl_mechanism == DIGEST_MD5: + r += '' if self.sasl_credentials is None else ", sasl_credentials=({0!r}, {1!r}, '{2}', {3!r})".format(self.sasl_credentials[0], self.sasl_credentials[1], '*' * len(self.sasl_credentials[2]), self.sasl_credentials[3]) + else: + r += '' if self.sasl_credentials is None else ', sasl_credentials={0.sasl_credentials!r}'.format(self) + r += '' if self.check_names is None else ', check_names={0.check_names!r}'.format(self) + r += '' if self.usage is None else (', collect_usage=' + 'True' if self.usage else 'False') + r += '' if self.read_only is None else ', read_only={0.read_only!r}'.format(self) + r += '' if self.lazy is None else ', lazy={0.lazy!r}'.format(self) + r += '' if self.raise_exceptions is None else ', raise_exceptions={0.raise_exceptions!r}'.format(self) + r += '' if (self.pool_name is None or self.pool_name == conf_default_pool_name) else ', pool_name={0.pool_name!r}'.format(self) + r += '' if self.pool_size is None else ', pool_size={0.pool_size!r}'.format(self) + r += '' if self.pool_lifetime is None else ', pool_lifetime={0.pool_lifetime!r}'.format(self) + r += '' if self.pool_keepalive is None else ', pool_keepalive={0.pool_keepalive!r}'.format(self) + r += '' if self.cred_store is None else (', cred_store=' + repr(self.cred_store)) + r += '' if self.fast_decoder is None else (', fast_decoder=' + 'True' if self.fast_decoder else 'False') + r += '' if self.auto_range is None else (', auto_range=' + ('True' if self.auto_range else 'False')) + r += '' if self.receive_timeout is None else ', receive_timeout={0.receive_timeout!r}'.format(self) + r += '' if self.empty_attributes is None else (', return_empty_attributes=' + 'True' if self.empty_attributes else 'False') + r += '' if self.auto_encode is None else (', auto_encode=' + ('True' if self.auto_encode else 'False')) + r += '' if self.auto_escape is None else (', auto_escape=' + ('True' if self.auto_escape else 'False')) + r += '' if self.use_referral_cache is None else (', use_referral_cache=' + ('True' if self.use_referral_cache else 'False')) + r += ')' + + return r + + @property + def stream(self): + """Used by the LDIFProducer strategy to accumulate the ldif-change operations with a single LDIF header + :return: reference to the response stream if defined in the strategy. + """ + return self.strategy.get_stream() if self.strategy.can_stream else None + + @stream.setter + def stream(self, value): + with self.connection_lock: + if self.strategy.can_stream: + self.strategy.set_stream(value) + + @property + def usage(self): + """Usage statistics for the connection. + :return: Usage object + """ + if not self._usage: + return None + if self.strategy.pooled: # update master connection usage from pooled connections + self._usage.reset() + for worker in self.strategy.pool.workers: + self._usage += worker.connection.usage + self._usage += self.strategy.pool.terminated_usage + return self._usage + + def __enter__(self): + with self.connection_lock: + self._context_state.append((self.bound, self.closed)) # save status out of context as a tuple in a list + if self.auto_bind != AUTO_BIND_NONE: + if self.auto_bind == AUTO_BIND_DEFAULT: + self.auto_bind = AUTO_BIND_NO_TLS + if self.closed: + self.open() + if not self.bound: + self.bind() + + return self + + # noinspection PyUnusedLocal + def __exit__(self, exc_type, exc_val, exc_tb): + with self.connection_lock: + context_bound, context_closed = self._context_state.pop() + if (not context_bound and self.bound) or self.stream: # restore status prior to entering context + try: + self.unbind() + except LDAPExceptionError: + pass + + if not context_closed and self.closed: + self.open() + + if exc_type is not None: + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', exc_type, self) + return False # re-raise LDAPExceptionError + + def bind(self, + read_server_info=True, + controls=None): + """Bind to ldap Server with the authentication method and the user defined in the connection + + :param read_server_info: reads info from server + :param controls: LDAP controls to send along with the bind operation + :type controls: list of tuple + :return: bool + + """ + if log_enabled(BASIC): + log(BASIC, 'start BIND operation via <%s>', self) + self.last_error = None + with self.connection_lock: + if self.lazy and not self._executing_deferred: + if self.strategy.pooled: + self.strategy.validate_bind(controls) + self._deferred_bind = True + self._bind_controls = controls + self.bound = True + if log_enabled(BASIC): + log(BASIC, 'deferring bind for <%s>', self) + else: + self._deferred_bind = False + self._bind_controls = None + if self.closed: # try to open connection if closed + self.open(read_server_info=False) + if self.authentication == ANONYMOUS: + if log_enabled(PROTOCOL): + log(PROTOCOL, 'performing anonymous BIND for <%s>', self) + if not self.strategy.pooled: + request = bind_operation(self.version, self.authentication, self.user, '', auto_encode=self.auto_encode) + if log_enabled(PROTOCOL): + log(PROTOCOL, 'anonymous BIND request <%s> sent via <%s>', bind_request_to_dict(request), self) + response = self.post_send_single_response(self.send('bindRequest', request, controls)) + else: + response = self.strategy.validate_bind(controls) # only for REUSABLE + elif self.authentication == SIMPLE: + if log_enabled(PROTOCOL): + log(PROTOCOL, 'performing simple BIND for <%s>', self) + if not self.strategy.pooled: + request = bind_operation(self.version, self.authentication, self.user, self.password, auto_encode=self.auto_encode) + if log_enabled(PROTOCOL): + log(PROTOCOL, 'simple BIND request <%s> sent via <%s>', bind_request_to_dict(request), self) + response = self.post_send_single_response(self.send('bindRequest', request, controls)) + else: + response = self.strategy.validate_bind(controls) # only for REUSABLE + elif self.authentication == SASL: + if self.sasl_mechanism in SASL_AVAILABLE_MECHANISMS: + if log_enabled(PROTOCOL): + log(PROTOCOL, 'performing SASL BIND for <%s>', self) + if not self.strategy.pooled: + response = self.do_sasl_bind(controls) + else: + response = self.strategy.validate_bind(controls) # only for REUSABLE + else: + self.last_error = 'requested SASL mechanism not supported' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPSASLMechanismNotSupportedError(self.last_error) + elif self.authentication == NTLM: + if self.user and self.password and len(self.user.split('\\')) == 2: + if log_enabled(PROTOCOL): + log(PROTOCOL, 'performing NTLM BIND for <%s>', self) + if not self.strategy.pooled: + response = self.do_ntlm_bind(controls) + else: + response = self.strategy.validate_bind(controls) # only for REUSABLE + else: # user or password missing + self.last_error = 'NTLM needs domain\\username and a password' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPUnknownAuthenticationMethodError(self.last_error) + else: + self.last_error = 'unknown authentication method' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPUnknownAuthenticationMethodError(self.last_error) + + if not self.strategy.sync and not self.strategy.pooled and self.authentication not in (SASL, NTLM): # get response if asynchronous except for SASL and NTLM that return the bind result even for asynchronous strategy + _, result = self.get_response(response) + if log_enabled(PROTOCOL): + log(PROTOCOL, 'async BIND response id <%s> received via <%s>', result, self) + elif self.strategy.sync: + result = self.result + if log_enabled(PROTOCOL): + log(PROTOCOL, 'BIND response <%s> received via <%s>', result, self) + elif self.strategy.pooled or self.authentication in (SASL, NTLM): # asynchronous SASL and NTLM or reusable strtegy get the bind result synchronously + result = response + else: + self.last_error = 'unknown authentication method' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPUnknownAuthenticationMethodError(self.last_error) + + if result is None: + # self.bound = True if self.strategy_type == REUSABLE else False + self.bound = False + elif result is True: + self.bound = True + elif result is False: + self.bound = False + else: + self.bound = True if result['result'] == RESULT_SUCCESS else False + if not self.bound and result and result['description'] and not self.last_error: + self.last_error = result['description'] + + if read_server_info and self.bound: + self.refresh_server_info() + self._entries = [] + + if log_enabled(BASIC): + log(BASIC, 'done BIND operation, result <%s>', self.bound) + + return self.bound + + def rebind(self, + user=None, + password=None, + authentication=None, + sasl_mechanism=None, + sasl_credentials=None, + read_server_info=True, + controls=None + ): + + if log_enabled(BASIC): + log(BASIC, 'start (RE)BIND operation via <%s>', self) + self.last_error = None + with self.connection_lock: + if user: + self.user = user + if password is not None: + self.password = password + if not authentication and user: + self.authentication = SIMPLE + if authentication in [SIMPLE, ANONYMOUS, SASL, NTLM]: + self.authentication = authentication + elif authentication is not None: + self.last_error = 'unknown authentication method' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPUnknownAuthenticationMethodError(self.last_error) + if sasl_mechanism: + self.sasl_mechanism = sasl_mechanism + if sasl_credentials: + self.sasl_credentials = sasl_credentials + + # if self.authentication == SIMPLE and self.user and self.check_names: + # self.user = safe_dn(self.user) + # if log_enabled(EXTENDED): + # log(EXTENDED, 'user name sanitized to <%s> for rebind via <%s>', self.user, self) + + if not self.strategy.pooled: + try: + return self.bind(read_server_info, controls) + except LDAPSocketReceiveError: + raise LDAPBindError('Unable to rebind as a different user, furthermore the server abruptly closed the connection') + else: + self.strategy.pool.rebind_pool() + return True + + def unbind(self, + controls=None): + """Unbind the connected user. Unbind implies closing session as per RFC4511 (4.3) + + :param controls: LDAP controls to send along with the bind operation + + """ + if log_enabled(BASIC): + log(BASIC, 'start UNBIND operation via <%s>', self) + + if self.use_referral_cache: + self.strategy.unbind_referral_cache() + + self.last_error = None + with self.connection_lock: + if self.lazy and not self._executing_deferred and (self._deferred_bind or self._deferred_open): # _clear deferred status + self.strategy.close() + self._deferred_open = False + self._deferred_bind = False + self._deferred_start_tls = False + elif not self.closed: + request = unbind_operation() + if log_enabled(PROTOCOL): + log(PROTOCOL, 'UNBIND request sent via <%s>', self) + self.send('unbindRequest', request, controls) + self.strategy.close() + + if log_enabled(BASIC): + log(BASIC, 'done UNBIND operation, result <%s>', True) + + return True + + def search(self, + search_base, + search_filter, + search_scope=SUBTREE, + dereference_aliases=DEREF_ALWAYS, + attributes=None, + size_limit=0, + time_limit=0, + types_only=False, + get_operational_attributes=False, + controls=None, + paged_size=None, + paged_criticality=False, + paged_cookie=None, + auto_escape=None): + """ + Perform an ldap search: + + - If attributes is empty noRFC2696 with the specified size + - If paged is 0 and cookie is present the search is abandoned on + server attribute is returned + - If attributes is ALL_ATTRIBUTES all attributes are returned + - If paged_size is an int greater than 0 a simple paged search + is tried as described in + - Cookie is an opaque string received in the last paged search + and must be used on the next paged search response + - If lazy == True open and bind will be deferred until another + LDAP operation is performed + - If mssing_attributes == True then an attribute not returned by the server is set to None + - If auto_escape is set it overrides the Connection auto_escape + """ + conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')] + if log_enabled(BASIC): + log(BASIC, 'start SEARCH operation via <%s>', self) + + if self.check_names and search_base: + search_base = safe_dn(search_base) + if log_enabled(EXTENDED): + log(EXTENDED, 'search base sanitized to <%s> for SEARCH operation via <%s>', search_base, self) + + with self.connection_lock: + self._fire_deferred() + if not attributes: + attributes = [NO_ATTRIBUTES] + elif attributes == ALL_ATTRIBUTES: + attributes = [ALL_ATTRIBUTES] + + if isinstance(attributes, STRING_TYPES): + attributes = [attributes] + + if get_operational_attributes and isinstance(attributes, list): + attributes.append(ALL_OPERATIONAL_ATTRIBUTES) + elif get_operational_attributes and isinstance(attributes, tuple): + attributes += (ALL_OPERATIONAL_ATTRIBUTES, ) # concatenate tuple + + if isinstance(paged_size, int): + if log_enabled(PROTOCOL): + log(PROTOCOL, 'performing paged search for %d items with cookie <%s> for <%s>', paged_size, escape_bytes(paged_cookie), self) + + if controls is None: + controls = [] + else: + # Copy the controls to prevent modifying the original object + controls = list(controls) + controls.append(paged_search_control(paged_criticality, paged_size, paged_cookie)) + + if self.server and self.server.schema and self.check_names: + for attribute_name in attributes: + if ';' in attribute_name: # remove tags + attribute_name_to_check = attribute_name.split(';')[0] + else: + attribute_name_to_check = attribute_name + if self.server.schema and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types: + raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check) + + request = search_operation(search_base, + search_filter, + search_scope, + dereference_aliases, + attributes, + size_limit, + time_limit, + types_only, + self.auto_escape if auto_escape is None else auto_escape, + self.auto_encode, + self.server.schema if self.server else None, + validator=self.server.custom_validator, + check_names=self.check_names) + if log_enabled(PROTOCOL): + log(PROTOCOL, 'SEARCH request <%s> sent via <%s>', search_request_to_dict(request), self) + response = self.post_send_search(self.send('searchRequest', request, controls)) + self._entries = [] + + if isinstance(response, int): # asynchronous strategy + return_value = response + if log_enabled(PROTOCOL): + log(PROTOCOL, 'async SEARCH response id <%s> received via <%s>', return_value, self) + else: + return_value = True if self.result['type'] == 'searchResDone' and len(response) > 0 else False + if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error: + self.last_error = self.result['description'] + + if log_enabled(PROTOCOL): + for entry in response: + if entry['type'] == 'searchResEntry': + log(PROTOCOL, 'SEARCH response entry <%s> received via <%s>', entry, self) + elif entry['type'] == 'searchResRef': + log(PROTOCOL, 'SEARCH response reference <%s> received via <%s>', entry, self) + + if log_enabled(BASIC): + log(BASIC, 'done SEARCH operation, result <%s>', return_value) + + return return_value + + def compare(self, + dn, + attribute, + value, + controls=None): + """ + Perform a compare operation + """ + conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')] + + if log_enabled(BASIC): + log(BASIC, 'start COMPARE operation via <%s>', self) + self.last_error = None + if self.check_names: + dn = safe_dn(dn) + if log_enabled(EXTENDED): + log(EXTENDED, 'dn sanitized to <%s> for COMPARE operation via <%s>', dn, self) + + if self.server and self.server.schema and self.check_names: + if ';' in attribute: # remove tags for checking + attribute_name_to_check = attribute.split(';')[0] + else: + attribute_name_to_check = attribute + + if self.server.schema.attribute_types and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types: + raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check) + + if isinstance(value, SEQUENCE_TYPES): # value can't be a sequence + raise LDAPInvalidValueError('value cannot be a sequence') + + with self.connection_lock: + self._fire_deferred() + request = compare_operation(dn, attribute, value, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names) + if log_enabled(PROTOCOL): + log(PROTOCOL, 'COMPARE request <%s> sent via <%s>', compare_request_to_dict(request), self) + response = self.post_send_single_response(self.send('compareRequest', request, controls)) + self._entries = [] + if isinstance(response, int): + return_value = response + if log_enabled(PROTOCOL): + log(PROTOCOL, 'async COMPARE response id <%s> received via <%s>', return_value, self) + else: + return_value = True if self.result['type'] == 'compareResponse' and self.result['result'] == RESULT_COMPARE_TRUE else False + if not return_value and self.result['result'] not in [RESULT_COMPARE_TRUE, RESULT_COMPARE_FALSE] and not self.last_error: + self.last_error = self.result['description'] + + if log_enabled(PROTOCOL): + log(PROTOCOL, 'COMPARE response <%s> received via <%s>', response, self) + + if log_enabled(BASIC): + log(BASIC, 'done COMPARE operation, result <%s>', return_value) + + return return_value + + def add(self, + dn, + object_class=None, + attributes=None, + controls=None): + """ + Add dn to the DIT, object_class is None, a class name or a list + of class names. + + Attributes is a dictionary in the form 'attr': 'val' or 'attr': + ['val1', 'val2', ...] for multivalued attributes + """ + conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')] + conf_classes_excluded_from_check = [v.lower() for v in get_config_parameter('CLASSES_EXCLUDED_FROM_CHECK')] + if log_enabled(BASIC): + log(BASIC, 'start ADD operation via <%s>', self) + self.last_error = None + _attributes = deepcopy(attributes) # dict could change when adding objectClass values + if self.check_names: + dn = safe_dn(dn) + if log_enabled(EXTENDED): + log(EXTENDED, 'dn sanitized to <%s> for ADD operation via <%s>', dn, self) + + with self.connection_lock: + self._fire_deferred() + attr_object_class = [] + if object_class is None: + parm_object_class = [] + else: + parm_object_class = list(object_class) if isinstance(object_class, SEQUENCE_TYPES) else [object_class] + + object_class_attr_name = '' + if _attributes: + for attr in _attributes: + if attr.lower() == 'objectclass': + object_class_attr_name = attr + attr_object_class = list(_attributes[object_class_attr_name]) if isinstance(_attributes[object_class_attr_name], SEQUENCE_TYPES) else [_attributes[object_class_attr_name]] + break + else: + _attributes = dict() + + if not object_class_attr_name: + object_class_attr_name = 'objectClass' + + attr_object_class = [to_unicode(object_class) for object_class in attr_object_class] # converts objectclass to unicode in case of bytes value + _attributes[object_class_attr_name] = reduce(lambda x, y: x + [y] if y not in x else x, parm_object_class + attr_object_class, []) # remove duplicate ObjectClasses + + if not _attributes[object_class_attr_name]: + self.last_error = 'objectClass attribute is mandatory' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPObjectClassError(self.last_error) + + if self.server and self.server.schema and self.check_names: + for object_class_name in _attributes[object_class_attr_name]: + if object_class_name.lower() not in conf_classes_excluded_from_check and object_class_name not in self.server.schema.object_classes: + raise LDAPObjectClassError('invalid object class ' + str(object_class_name)) + + for attribute_name in _attributes: + if ';' in attribute_name: # remove tags for checking + attribute_name_to_check = attribute_name.split(';')[0] + else: + attribute_name_to_check = attribute_name + + if attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types: + raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check) + + request = add_operation(dn, _attributes, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names) + if log_enabled(PROTOCOL): + log(PROTOCOL, 'ADD request <%s> sent via <%s>', add_request_to_dict(request), self) + response = self.post_send_single_response(self.send('addRequest', request, controls)) + self._entries = [] + + if isinstance(response, STRING_TYPES + (int, )): + return_value = response + if log_enabled(PROTOCOL): + log(PROTOCOL, 'async ADD response id <%s> received via <%s>', return_value, self) + else: + if log_enabled(PROTOCOL): + log(PROTOCOL, 'ADD response <%s> received via <%s>', response, self) + return_value = True if self.result['type'] == 'addResponse' and self.result['result'] == RESULT_SUCCESS else False + if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error: + self.last_error = self.result['description'] + + if log_enabled(BASIC): + log(BASIC, 'done ADD operation, result <%s>', return_value) + + return return_value + + def delete(self, + dn, + controls=None): + """ + Delete the entry identified by the DN from the DIB. + """ + if log_enabled(BASIC): + log(BASIC, 'start DELETE operation via <%s>', self) + self.last_error = None + if self.check_names: + dn = safe_dn(dn) + if log_enabled(EXTENDED): + log(EXTENDED, 'dn sanitized to <%s> for DELETE operation via <%s>', dn, self) + + with self.connection_lock: + self._fire_deferred() + if self.read_only: + self.last_error = 'connection is read-only' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPConnectionIsReadOnlyError(self.last_error) + + request = delete_operation(dn) + if log_enabled(PROTOCOL): + log(PROTOCOL, 'DELETE request <%s> sent via <%s>', delete_request_to_dict(request), self) + response = self.post_send_single_response(self.send('delRequest', request, controls)) + self._entries = [] + + if isinstance(response, STRING_TYPES + (int, )): + return_value = response + if log_enabled(PROTOCOL): + log(PROTOCOL, 'async DELETE response id <%s> received via <%s>', return_value, self) + else: + if log_enabled(PROTOCOL): + log(PROTOCOL, 'DELETE response <%s> received via <%s>', response, self) + return_value = True if self.result['type'] == 'delResponse' and self.result['result'] == RESULT_SUCCESS else False + if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error: + self.last_error = self.result['description'] + + if log_enabled(BASIC): + log(BASIC, 'done DELETE operation, result <%s>', return_value) + + return return_value + + def modify(self, + dn, + changes, + controls=None): + """ + Modify attributes of entry + + - changes is a dictionary in the form {'attribute1': change), 'attribute2': [change, change, ...], ...} + - change is (operation, [value1, value2, ...]) + - operation is 0 (MODIFY_ADD), 1 (MODIFY_DELETE), 2 (MODIFY_REPLACE), 3 (MODIFY_INCREMENT) + """ + conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')] + + if log_enabled(BASIC): + log(BASIC, 'start MODIFY operation via <%s>', self) + self.last_error = None + if self.check_names: + dn = safe_dn(dn) + if log_enabled(EXTENDED): + log(EXTENDED, 'dn sanitized to <%s> for MODIFY operation via <%s>', dn, self) + + with self.connection_lock: + self._fire_deferred() + if self.read_only: + self.last_error = 'connection is read-only' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPConnectionIsReadOnlyError(self.last_error) + + if not isinstance(changes, dict): + self.last_error = 'changes must be a dictionary' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPChangeError(self.last_error) + + if not changes: + self.last_error = 'no changes in modify request' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPChangeError(self.last_error) + + changelist = dict() + for attribute_name in changes: + if self.server and self.server.schema and self.check_names: + if ';' in attribute_name: # remove tags for checking + attribute_name_to_check = attribute_name.split(';')[0] + else: + attribute_name_to_check = attribute_name + + if self.server.schema.attribute_types and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types: + raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check) + change = changes[attribute_name] + if isinstance(change, SEQUENCE_TYPES) and change[0] in [MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, MODIFY_INCREMENT, 0, 1, 2, 3]: + if len(change) != 2: + self.last_error = 'malformed change' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPChangeError(self.last_error) + + changelist[attribute_name] = [change] # insert change in a list + else: + for change_operation in change: + if len(change_operation) != 2 or change_operation[0] not in [MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, MODIFY_INCREMENT, 0, 1, 2, 3]: + self.last_error = 'invalid change list' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPChangeError(self.last_error) + changelist[attribute_name] = change + request = modify_operation(dn, changelist, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names) + if log_enabled(PROTOCOL): + log(PROTOCOL, 'MODIFY request <%s> sent via <%s>', modify_request_to_dict(request), self) + response = self.post_send_single_response(self.send('modifyRequest', request, controls)) + self._entries = [] + + if isinstance(response, STRING_TYPES + (int, )): + return_value = response + if log_enabled(PROTOCOL): + log(PROTOCOL, 'async MODIFY response id <%s> received via <%s>', return_value, self) + else: + if log_enabled(PROTOCOL): + log(PROTOCOL, 'MODIFY response <%s> received via <%s>', response, self) + return_value = True if self.result['type'] == 'modifyResponse' and self.result['result'] == RESULT_SUCCESS else False + if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error: + self.last_error = self.result['description'] + + if log_enabled(BASIC): + log(BASIC, 'done MODIFY operation, result <%s>', return_value) + + return return_value + + def modify_dn(self, + dn, + relative_dn, + delete_old_dn=True, + new_superior=None, + controls=None): + """ + Modify DN of the entry or performs a move of the entry in the + DIT. + """ + if log_enabled(BASIC): + log(BASIC, 'start MODIFY DN operation via <%s>', self) + self.last_error = None + if self.check_names: + dn = safe_dn(dn) + if log_enabled(EXTENDED): + log(EXTENDED, 'dn sanitized to <%s> for MODIFY DN operation via <%s>', dn, self) + relative_dn = safe_dn(relative_dn) + if log_enabled(EXTENDED): + log(EXTENDED, 'relative dn sanitized to <%s> for MODIFY DN operation via <%s>', relative_dn, self) + + with self.connection_lock: + self._fire_deferred() + if self.read_only: + self.last_error = 'connection is read-only' + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', self.last_error, self) + raise LDAPConnectionIsReadOnlyError(self.last_error) + + # if new_superior and not dn.startswith(relative_dn): # as per RFC4511 (4.9) + # self.last_error = 'DN cannot change while performing moving' + # if log_enabled(ERROR): + # log(ERROR, '%s for <%s>', self.last_error, self) + # raise LDAPChangeError(self.last_error) + + request = modify_dn_operation(dn, relative_dn, delete_old_dn, new_superior) + if log_enabled(PROTOCOL): + log(PROTOCOL, 'MODIFY DN request <%s> sent via <%s>', modify_dn_request_to_dict(request), self) + response = self.post_send_single_response(self.send('modDNRequest', request, controls)) + self._entries = [] + + if isinstance(response, STRING_TYPES + (int, )): + return_value = response + if log_enabled(PROTOCOL): + log(PROTOCOL, 'async MODIFY DN response id <%s> received via <%s>', return_value, self) + else: + if log_enabled(PROTOCOL): + log(PROTOCOL, 'MODIFY DN response <%s> received via <%s>', response, self) + return_value = True if self.result['type'] == 'modDNResponse' and self.result['result'] == RESULT_SUCCESS else False + if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error: + self.last_error = self.result['description'] + + if log_enabled(BASIC): + log(BASIC, 'done MODIFY DN operation, result <%s>', return_value) + + return return_value + + def abandon(self, + message_id, + controls=None): + """ + Abandon the operation indicated by message_id + """ + if log_enabled(BASIC): + log(BASIC, 'start ABANDON operation via <%s>', self) + self.last_error = None + with self.connection_lock: + self._fire_deferred() + return_value = False + if self.strategy._outstanding or message_id == 0: + # only current operation should be abandoned, abandon, bind and unbind cannot ever be abandoned, + # messagiId 0 is invalid and should be used as a "ping" to keep alive the connection + if (self.strategy._outstanding and message_id in self.strategy._outstanding and self.strategy._outstanding[message_id]['type'] not in ['abandonRequest', 'bindRequest', 'unbindRequest']) or message_id == 0: + request = abandon_operation(message_id) + if log_enabled(PROTOCOL): + log(PROTOCOL, 'ABANDON request: <%s> sent via <%s>', abandon_request_to_dict(request), self) + self.send('abandonRequest', request, controls) + self.result = None + self.response = None + self._entries = [] + return_value = True + else: + if log_enabled(ERROR): + log(ERROR, 'cannot abandon a Bind, an Unbind or an Abandon operation or message ID %s not found via <%s>', str(message_id), self) + + if log_enabled(BASIC): + log(BASIC, 'done ABANDON operation, result <%s>', return_value) + + return return_value + + def extended(self, + request_name, + request_value=None, + controls=None, + no_encode=None): + """ + Performs an extended operation + """ + if log_enabled(BASIC): + log(BASIC, 'start EXTENDED operation via <%s>', self) + self.last_error = None + with self.connection_lock: + self._fire_deferred() + request = extended_operation(request_name, request_value, no_encode=no_encode) + if log_enabled(PROTOCOL): + log(PROTOCOL, 'EXTENDED request <%s> sent via <%s>', extended_request_to_dict(request), self) + response = self.post_send_single_response(self.send('extendedReq', request, controls)) + self._entries = [] + if isinstance(response, int): + return_value = response + if log_enabled(PROTOCOL): + log(PROTOCOL, 'async EXTENDED response id <%s> received via <%s>', return_value, self) + else: + if log_enabled(PROTOCOL): + log(PROTOCOL, 'EXTENDED response <%s> received via <%s>', response, self) + return_value = True if self.result['type'] == 'extendedResp' and self.result['result'] == RESULT_SUCCESS else False + if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error: + self.last_error = self.result['description'] + + if log_enabled(BASIC): + log(BASIC, 'done EXTENDED operation, result <%s>', return_value) + + return return_value + + def start_tls(self, read_server_info=True): # as per RFC4511. Removal of TLS is defined as MAY in RFC4511 so the client can't implement a generic stop_tls method0 + + if log_enabled(BASIC): + log(BASIC, 'start START TLS operation via <%s>', self) + + with self.connection_lock: + return_value = False + if not self.server.tls: + self.server.tls = Tls() + + if self.lazy and not self._executing_deferred: + self._deferred_start_tls = True + self.tls_started = True + return_value = True + if log_enabled(BASIC): + log(BASIC, 'deferring START TLS for <%s>', self) + else: + self._deferred_start_tls = False + if self.closed: + self.open() + if self.server.tls.start_tls(self) and self.strategy.sync: # for asynchronous connections _start_tls is run by the strategy + if read_server_info: + self.refresh_server_info() # refresh server info as per RFC4515 (3.1.5) + return_value = True + elif not self.strategy.sync: + return_value = True + + if log_enabled(BASIC): + log(BASIC, 'done START TLS operation, result <%s>', return_value) + + return return_value + + def do_sasl_bind(self, + controls): + if log_enabled(BASIC): + log(BASIC, 'start SASL BIND operation via <%s>', self) + self.last_error = None + with self.connection_lock: + result = None + + if not self.sasl_in_progress: + self.sasl_in_progress = True + try: + if self.sasl_mechanism == EXTERNAL: + result = sasl_external(self, controls) + elif self.sasl_mechanism == DIGEST_MD5: + result = sasl_digest_md5(self, controls) + elif self.sasl_mechanism == GSSAPI: + from ..protocol.sasl.kerberos import sasl_gssapi # needs the gssapi package + result = sasl_gssapi(self, controls) + elif self.sasl_mechanism == 'PLAIN': + result = sasl_plain(self, controls) + finally: + self.sasl_in_progress = False + + if log_enabled(BASIC): + log(BASIC, 'done SASL BIND operation, result <%s>', result) + + return result + + def do_ntlm_bind(self, + controls): + if log_enabled(BASIC): + log(BASIC, 'start NTLM BIND operation via <%s>', self) + self.last_error = None + with self.connection_lock: + result = None + if not self.sasl_in_progress: + self.sasl_in_progress = True # ntlm is same of sasl authentication + try: + # additional import for NTLM + from ..utils.ntlm import NtlmClient + domain_name, user_name = self.user.split('\\', 1) + ntlm_client = NtlmClient(user_name=user_name, domain=domain_name, password=self.password) + + # as per https://msdn.microsoft.com/en-us/library/cc223501.aspx + # send a sicilyPackageDiscovery request (in the bindRequest) + request = bind_operation(self.version, 'SICILY_PACKAGE_DISCOVERY', ntlm_client) + if log_enabled(PROTOCOL): + log(PROTOCOL, 'NTLM SICILY PACKAGE DISCOVERY request sent via <%s>', self) + response = self.post_send_single_response(self.send('bindRequest', request, controls)) + if not self.strategy.sync: + _, result = self.get_response(response) + else: + result = response[0] + if 'server_creds' in result: + sicily_packages = result['server_creds'].decode('ascii').split(';') + if 'NTLM' in sicily_packages: # NTLM available on server + request = bind_operation(self.version, 'SICILY_NEGOTIATE_NTLM', ntlm_client) + if log_enabled(PROTOCOL): + log(PROTOCOL, 'NTLM SICILY NEGOTIATE request sent via <%s>', self) + response = self.post_send_single_response(self.send('bindRequest', request, controls)) + if not self.strategy.sync: + _, result = self.get_response(response) + else: + if log_enabled(PROTOCOL): + log(PROTOCOL, 'NTLM SICILY NEGOTIATE response <%s> received via <%s>', response[0], + self) + result = response[0] + + if result['result'] == RESULT_SUCCESS: + request = bind_operation(self.version, 'SICILY_RESPONSE_NTLM', ntlm_client, + result['server_creds']) + if log_enabled(PROTOCOL): + log(PROTOCOL, 'NTLM SICILY RESPONSE NTLM request sent via <%s>', self) + response = self.post_send_single_response(self.send('bindRequest', request, controls)) + if not self.strategy.sync: + _, result = self.get_response(response) + else: + if log_enabled(PROTOCOL): + log(PROTOCOL, 'NTLM BIND response <%s> received via <%s>', response[0], self) + result = response[0] + else: + result = None + finally: + self.sasl_in_progress = False + + if log_enabled(BASIC): + log(BASIC, 'done SASL NTLM operation, result <%s>', result) + + return result + + def refresh_server_info(self): + # if self.strategy.no_real_dsa: # do not refresh for mock strategies + # return + + if not self.strategy.pooled: + with self.connection_lock: + if not self.closed: + if log_enabled(BASIC): + log(BASIC, 'refreshing server info for <%s>', self) + previous_response = self.response + previous_result = self.result + previous_entries = self._entries + self.server.get_info_from_server(self) + self.response = previous_response + self.result = previous_result + self._entries = previous_entries + else: + if log_enabled(BASIC): + log(BASIC, 'refreshing server info from pool for <%s>', self) + self.strategy.pool.get_info_from_server() + + def response_to_ldif(self, + search_result=None, + all_base64=False, + line_separator=None, + sort_order=None, + stream=None): + with self.connection_lock: + if search_result is None: + search_result = self.response + + if isinstance(search_result, SEQUENCE_TYPES): + ldif_lines = operation_to_ldif('searchResponse', search_result, all_base64, sort_order=sort_order) + ldif_lines = add_ldif_header(ldif_lines) + line_separator = line_separator or linesep + ldif_output = line_separator.join(ldif_lines) + if stream: + if stream.tell() == 0: + header = add_ldif_header(['-'])[0] + stream.write(prepare_for_stream(header + line_separator + line_separator)) + stream.write(prepare_for_stream(ldif_output + line_separator + line_separator)) + if log_enabled(BASIC): + log(BASIC, 'building LDIF output <%s> for <%s>', ldif_output, self) + return ldif_output + + return None + + def response_to_json(self, + raw=False, + search_result=None, + indent=4, + sort=True, + stream=None, + checked_attributes=True, + include_empty=True): + + with self.connection_lock: + if search_result is None: + search_result = self.response + + if isinstance(search_result, SEQUENCE_TYPES): + json_dict = dict() + json_dict['entries'] = [] + + for response in search_result: + if response['type'] == 'searchResEntry': + entry = dict() + + entry['dn'] = response['dn'] + if checked_attributes: + if not include_empty: + # needed for python 2.6 compatibility + entry['attributes'] = dict((key, response['attributes'][key]) for key in response['attributes'] if response['attributes'][key]) + else: + entry['attributes'] = dict(response['attributes']) + if raw: + if not include_empty: + # needed for python 2.6 compatibility + entry['raw_attributes'] = dict((key, response['raw_attributes'][key]) for key in response['raw_attributes'] if response['raw:attributes'][key]) + else: + entry['raw'] = dict(response['raw_attributes']) + json_dict['entries'].append(entry) + + if str is bytes: # Python 2 + check_json_dict(json_dict) + + json_output = json.dumps(json_dict, ensure_ascii=True, sort_keys=sort, indent=indent, check_circular=True, default=format_json, separators=(',', ': ')) + + if log_enabled(BASIC): + log(BASIC, 'building JSON output <%s> for <%s>', json_output, self) + if stream: + stream.write(json_output) + + return json_output + + def response_to_file(self, + target, + raw=False, + indent=4, + sort=True): + with self.connection_lock: + if self.response: + if isinstance(target, STRING_TYPES): + target = open(target, 'w+') + + if log_enabled(BASIC): + log(BASIC, 'writing response to file for <%s>', self) + + target.writelines(self.response_to_json(raw=raw, indent=indent, sort=sort)) + target.close() + + def _fire_deferred(self, read_info=True): + with self.connection_lock: + if self.lazy and not self._executing_deferred: + self._executing_deferred = True + + if log_enabled(BASIC): + log(BASIC, 'executing deferred (open: %s, start_tls: %s, bind: %s) for <%s>', self._deferred_open, self._deferred_start_tls, self._deferred_bind, self) + try: + if self._deferred_open: + self.open(read_server_info=False) + if self._deferred_start_tls: + self.start_tls(read_server_info=False) + if self._deferred_bind: + self.bind(read_server_info=False, controls=self._bind_controls) + if read_info: + self.refresh_server_info() + except LDAPExceptionError as e: + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', e, self) + raise # re-raise LDAPExceptionError + finally: + self._executing_deferred = False + + @property + def entries(self): + if self.response: + if not self._entries: + self._entries = self._get_entries(self.response) + return self._entries + + def _get_entries(self, search_response): + with self.connection_lock: + from .. import ObjectDef, Reader + + # build a table of ObjectDefs, grouping the entries found in search_response for their attributes set, subset will be included in superset + attr_sets = [] + for response in search_response: + if response['type'] == 'searchResEntry': + resp_attr_set = set(response['attributes'].keys()) + if resp_attr_set not in attr_sets: + attr_sets.append(resp_attr_set) + attr_sets.sort(key=lambda x: -len(x)) # sorts the list in descending length order + unique_attr_sets = [] + for attr_set in attr_sets: + for unique_set in unique_attr_sets: + if unique_set >= attr_set: # checks if unique set is a superset of attr_set + break + else: # the attr_set is not a subset of any element in unique_attr_sets + unique_attr_sets.append(attr_set) + object_defs = [] + for attr_set in unique_attr_sets: + object_def = ObjectDef(schema=self.server.schema) + object_def += list(attr_set) # converts the set in a list to be added to the object definition + object_defs.append((attr_set, + object_def, + Reader(self, object_def, self.request['base'], self.request['filter'], attributes=attr_set) if self.strategy.sync else Reader(self, object_def, '', '', attributes=attr_set)) + ) # objects_defs contains a tuple with the set, the ObjectDef and a cursor + + entries = [] + for response in search_response: + if response['type'] == 'searchResEntry': + resp_attr_set = set(response['attributes'].keys()) + for object_def in object_defs: + if resp_attr_set <= object_def[0]: # finds the ObjectDef for the attribute set of this entry + entry = object_def[2]._create_entry(response) + entries.append(entry) + break + else: + if log_enabled(ERROR): + log(ERROR, 'attribute set not found for %s in <%s>', resp_attr_set, self) + raise LDAPObjectError('attribute set not found for ' + str(resp_attr_set)) + + return entries diff --git a/server/www/packages/packages-linux/x64/ldap3/core/exceptions.py b/server/www/packages/packages-linux/x64/ldap3/core/exceptions.py index cfefb6d..29aed26 100644 --- a/server/www/packages/packages-linux/x64/ldap3/core/exceptions.py +++ b/server/www/packages/packages-linux/x64/ldap3/core/exceptions.py @@ -1,599 +1,609 @@ -""" -""" - -# Created on 2014.05.14 -# -# Author: Giovanni Cannata -# -# Copyright 2014 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . - -from os import sep -from .results import RESULT_OPERATIONS_ERROR, RESULT_PROTOCOL_ERROR, RESULT_TIME_LIMIT_EXCEEDED, RESULT_SIZE_LIMIT_EXCEEDED, \ - RESULT_STRONGER_AUTH_REQUIRED, RESULT_REFERRAL, RESULT_ADMIN_LIMIT_EXCEEDED, RESULT_UNAVAILABLE_CRITICAL_EXTENSION, \ - RESULT_AUTH_METHOD_NOT_SUPPORTED, RESULT_UNDEFINED_ATTRIBUTE_TYPE, RESULT_NO_SUCH_ATTRIBUTE, \ - RESULT_SASL_BIND_IN_PROGRESS, RESULT_CONFIDENTIALITY_REQUIRED, RESULT_INAPPROPRIATE_MATCHING, \ - RESULT_CONSTRAINT_VIOLATION, \ - RESULT_ATTRIBUTE_OR_VALUE_EXISTS, RESULT_INVALID_ATTRIBUTE_SYNTAX, RESULT_NO_SUCH_OBJECT, RESULT_ALIAS_PROBLEM, \ - RESULT_INVALID_DN_SYNTAX, RESULT_ALIAS_DEREFERENCING_PROBLEM, RESULT_INVALID_CREDENTIALS, RESULT_LOOP_DETECTED, \ - RESULT_ENTRY_ALREADY_EXISTS, RESULT_LCUP_SECURITY_VIOLATION, RESULT_CANCELED, RESULT_E_SYNC_REFRESH_REQUIRED, \ - RESULT_NO_SUCH_OPERATION, RESULT_LCUP_INVALID_DATA, RESULT_OBJECT_CLASS_MODS_PROHIBITED, RESULT_NAMING_VIOLATION, \ - RESULT_INSUFFICIENT_ACCESS_RIGHTS, RESULT_OBJECT_CLASS_VIOLATION, RESULT_TOO_LATE, RESULT_CANNOT_CANCEL, \ - RESULT_LCUP_UNSUPPORTED_SCHEME, RESULT_BUSY, RESULT_AFFECT_MULTIPLE_DSAS, RESULT_UNAVAILABLE, \ - RESULT_NOT_ALLOWED_ON_NON_LEAF, \ - RESULT_UNWILLING_TO_PERFORM, RESULT_OTHER, RESULT_LCUP_RELOAD_REQUIRED, RESULT_ASSERTION_FAILED, \ - RESULT_AUTHORIZATION_DENIED, RESULT_LCUP_RESOURCES_EXHAUSTED, RESULT_NOT_ALLOWED_ON_RDN, \ - RESULT_INAPPROPRIATE_AUTHENTICATION -import socket - - -# LDAPException hierarchy -class LDAPException(Exception): - pass - - -class LDAPOperationResult(LDAPException): - def __new__(cls, result=None, description=None, dn=None, message=None, response_type=None, response=None): - if cls is LDAPOperationResult and result and result in exception_table: - exc = super(LDAPOperationResult, exception_table[result]).__new__( - exception_table[result]) # create an exception of the required result error - exc.result = result - exc.description = description - exc.dn = dn - exc.message = message - exc.type = response_type - exc.response = response - else: - exc = super(LDAPOperationResult, cls).__new__(cls) - return exc - - def __init__(self, result=None, description=None, dn=None, message=None, response_type=None, response=None): - self.result = result - self.description = description - self.dn = dn - self.message = message - self.type = response_type - self.response = response - - def __str__(self): - s = [self.__class__.__name__, - str(self.result) if self.result else None, - self.description if self.description else None, - self.dn if self.dn else None, - self.message if self.message else None, - self.type if self.type else None, - self.response if self.response else None] - - return ' - '.join([str(item) for item in s if s is not None]) - - def __repr__(self): - return self.__str__() - - -class LDAPOperationsErrorResult(LDAPOperationResult): - pass - - -class LDAPProtocolErrorResult(LDAPOperationResult): - pass - - -class LDAPTimeLimitExceededResult(LDAPOperationResult): - pass - - -class LDAPSizeLimitExceededResult(LDAPOperationResult): - pass - - -class LDAPAuthMethodNotSupportedResult(LDAPOperationResult): - pass - - -class LDAPStrongerAuthRequiredResult(LDAPOperationResult): - pass - - -class LDAPReferralResult(LDAPOperationResult): - pass - - -class LDAPAdminLimitExceededResult(LDAPOperationResult): - pass - - -class LDAPUnavailableCriticalExtensionResult(LDAPOperationResult): - pass - - -class LDAPConfidentialityRequiredResult(LDAPOperationResult): - pass - - -class LDAPSASLBindInProgressResult(LDAPOperationResult): - pass - - -class LDAPNoSuchAttributeResult(LDAPOperationResult): - pass - - -class LDAPUndefinedAttributeTypeResult(LDAPOperationResult): - pass - - -class LDAPInappropriateMatchingResult(LDAPOperationResult): - pass - - -class LDAPConstraintViolationResult(LDAPOperationResult): - pass - - -class LDAPAttributeOrValueExistsResult(LDAPOperationResult): - pass - - -class LDAPInvalidAttributeSyntaxResult(LDAPOperationResult): - pass - - -class LDAPNoSuchObjectResult(LDAPOperationResult): - pass - - -class LDAPAliasProblemResult(LDAPOperationResult): - pass - - -class LDAPInvalidDNSyntaxResult(LDAPOperationResult): - pass - - -class LDAPAliasDereferencingProblemResult(LDAPOperationResult): - pass - - -class LDAPInappropriateAuthenticationResult(LDAPOperationResult): - pass - - -class LDAPInvalidCredentialsResult(LDAPOperationResult): - pass - - -class LDAPInsufficientAccessRightsResult(LDAPOperationResult): - pass - - -class LDAPBusyResult(LDAPOperationResult): - pass - - -class LDAPUnavailableResult(LDAPOperationResult): - pass - - -class LDAPUnwillingToPerformResult(LDAPOperationResult): - pass - - -class LDAPLoopDetectedResult(LDAPOperationResult): - pass - - -class LDAPNamingViolationResult(LDAPOperationResult): - pass - - -class LDAPObjectClassViolationResult(LDAPOperationResult): - pass - - -class LDAPNotAllowedOnNotLeafResult(LDAPOperationResult): - pass - - -class LDAPNotAllowedOnRDNResult(LDAPOperationResult): - pass - - -class LDAPEntryAlreadyExistsResult(LDAPOperationResult): - pass - - -class LDAPObjectClassModsProhibitedResult(LDAPOperationResult): - pass - - -class LDAPAffectMultipleDSASResult(LDAPOperationResult): - pass - - -class LDAPOtherResult(LDAPOperationResult): - pass - - -class LDAPLCUPResourcesExhaustedResult(LDAPOperationResult): - pass - - -class LDAPLCUPSecurityViolationResult(LDAPOperationResult): - pass - - -class LDAPLCUPInvalidDataResult(LDAPOperationResult): - pass - - -class LDAPLCUPUnsupportedSchemeResult(LDAPOperationResult): - pass - - -class LDAPLCUPReloadRequiredResult(LDAPOperationResult): - pass - - -class LDAPCanceledResult(LDAPOperationResult): - pass - - -class LDAPNoSuchOperationResult(LDAPOperationResult): - pass - - -class LDAPTooLateResult(LDAPOperationResult): - pass - - -class LDAPCannotCancelResult(LDAPOperationResult): - pass - - -class LDAPAssertionFailedResult(LDAPOperationResult): - pass - - -class LDAPAuthorizationDeniedResult(LDAPOperationResult): - pass - - -class LDAPESyncRefreshRequiredResult(LDAPOperationResult): - pass - - -exception_table = {RESULT_OPERATIONS_ERROR: LDAPOperationsErrorResult, - RESULT_PROTOCOL_ERROR: LDAPProtocolErrorResult, - RESULT_TIME_LIMIT_EXCEEDED: LDAPTimeLimitExceededResult, - RESULT_SIZE_LIMIT_EXCEEDED: LDAPSizeLimitExceededResult, - RESULT_AUTH_METHOD_NOT_SUPPORTED: LDAPAuthMethodNotSupportedResult, - RESULT_STRONGER_AUTH_REQUIRED: LDAPStrongerAuthRequiredResult, - RESULT_REFERRAL: LDAPReferralResult, - RESULT_ADMIN_LIMIT_EXCEEDED: LDAPAdminLimitExceededResult, - RESULT_UNAVAILABLE_CRITICAL_EXTENSION: LDAPUnavailableCriticalExtensionResult, - RESULT_CONFIDENTIALITY_REQUIRED: LDAPConfidentialityRequiredResult, - RESULT_SASL_BIND_IN_PROGRESS: LDAPSASLBindInProgressResult, - RESULT_NO_SUCH_ATTRIBUTE: LDAPNoSuchAttributeResult, - RESULT_UNDEFINED_ATTRIBUTE_TYPE: LDAPUndefinedAttributeTypeResult, - RESULT_INAPPROPRIATE_MATCHING: LDAPInappropriateMatchingResult, - RESULT_CONSTRAINT_VIOLATION: LDAPConstraintViolationResult, - RESULT_ATTRIBUTE_OR_VALUE_EXISTS: LDAPAttributeOrValueExistsResult, - RESULT_INVALID_ATTRIBUTE_SYNTAX: LDAPInvalidAttributeSyntaxResult, - RESULT_NO_SUCH_OBJECT: LDAPNoSuchObjectResult, - RESULT_ALIAS_PROBLEM: LDAPAliasProblemResult, - RESULT_INVALID_DN_SYNTAX: LDAPInvalidDNSyntaxResult, - RESULT_ALIAS_DEREFERENCING_PROBLEM: LDAPAliasDereferencingProblemResult, - RESULT_INAPPROPRIATE_AUTHENTICATION: LDAPInappropriateAuthenticationResult, - RESULT_INVALID_CREDENTIALS: LDAPInvalidCredentialsResult, - RESULT_INSUFFICIENT_ACCESS_RIGHTS: LDAPInsufficientAccessRightsResult, - RESULT_BUSY: LDAPBusyResult, - RESULT_UNAVAILABLE: LDAPUnavailableResult, - RESULT_UNWILLING_TO_PERFORM: LDAPUnwillingToPerformResult, - RESULT_LOOP_DETECTED: LDAPLoopDetectedResult, - RESULT_NAMING_VIOLATION: LDAPNamingViolationResult, - RESULT_OBJECT_CLASS_VIOLATION: LDAPObjectClassViolationResult, - RESULT_NOT_ALLOWED_ON_NON_LEAF: LDAPNotAllowedOnNotLeafResult, - RESULT_NOT_ALLOWED_ON_RDN: LDAPNotAllowedOnRDNResult, - RESULT_ENTRY_ALREADY_EXISTS: LDAPEntryAlreadyExistsResult, - RESULT_OBJECT_CLASS_MODS_PROHIBITED: LDAPObjectClassModsProhibitedResult, - RESULT_AFFECT_MULTIPLE_DSAS: LDAPAffectMultipleDSASResult, - RESULT_OTHER: LDAPOtherResult, - RESULT_LCUP_RESOURCES_EXHAUSTED: LDAPLCUPResourcesExhaustedResult, - RESULT_LCUP_SECURITY_VIOLATION: LDAPLCUPSecurityViolationResult, - RESULT_LCUP_INVALID_DATA: LDAPLCUPInvalidDataResult, - RESULT_LCUP_UNSUPPORTED_SCHEME: LDAPLCUPUnsupportedSchemeResult, - RESULT_LCUP_RELOAD_REQUIRED: LDAPLCUPReloadRequiredResult, - RESULT_CANCELED: LDAPCanceledResult, - RESULT_NO_SUCH_OPERATION: LDAPNoSuchOperationResult, - RESULT_TOO_LATE: LDAPTooLateResult, - RESULT_CANNOT_CANCEL: LDAPCannotCancelResult, - RESULT_ASSERTION_FAILED: LDAPAssertionFailedResult, - RESULT_AUTHORIZATION_DENIED: LDAPAuthorizationDeniedResult, - RESULT_E_SYNC_REFRESH_REQUIRED: LDAPESyncRefreshRequiredResult} - - -class LDAPExceptionError(LDAPException): - pass - - -# configuration exceptions -class LDAPConfigurationError(LDAPExceptionError): - pass - - -class LDAPUnknownStrategyError(LDAPConfigurationError): - pass - - -class LDAPUnknownAuthenticationMethodError(LDAPConfigurationError): - pass - - -class LDAPSSLConfigurationError(LDAPConfigurationError): - pass - - -class LDAPDefinitionError(LDAPConfigurationError): - pass - - -class LDAPPackageUnavailableError(LDAPConfigurationError, ImportError): - pass - - -class LDAPConfigurationParameterError(LDAPConfigurationError): - pass - - -# abstract layer exceptions -class LDAPKeyError(LDAPExceptionError, KeyError, AttributeError): - pass - - -class LDAPObjectError(LDAPExceptionError, ValueError): - pass - - -class LDAPAttributeError(LDAPExceptionError, ValueError, TypeError): - pass - - -class LDAPCursorError(LDAPExceptionError): - pass - -class LDAPObjectDereferenceError(LDAPExceptionError): - pass - -# security exceptions -class LDAPSSLNotSupportedError(LDAPExceptionError, ImportError): - pass - - -class LDAPInvalidTlsSpecificationError(LDAPExceptionError): - pass - - -class LDAPInvalidHashAlgorithmError(LDAPExceptionError, ValueError): - pass - - -# connection exceptions -class LDAPBindError(LDAPExceptionError): - pass - - -class LDAPInvalidServerError(LDAPExceptionError): - pass - - -class LDAPSASLMechanismNotSupportedError(LDAPExceptionError): - pass - - -class LDAPConnectionIsReadOnlyError(LDAPExceptionError): - pass - - -class LDAPChangeError(LDAPExceptionError, ValueError): - pass - - -class LDAPServerPoolError(LDAPExceptionError): - pass - - -class LDAPServerPoolExhaustedError(LDAPExceptionError): - pass - - -class LDAPInvalidPortError(LDAPExceptionError): - pass - - -class LDAPStartTLSError(LDAPExceptionError): - pass - - -class LDAPCertificateError(LDAPExceptionError): - pass - - -class LDAPUserNameNotAllowedError(LDAPExceptionError): - pass - - -class LDAPUserNameIsMandatoryError(LDAPExceptionError): - pass - - -class LDAPPasswordIsMandatoryError(LDAPExceptionError): - pass - - -class LDAPInvalidFilterError(LDAPExceptionError): - pass - - -class LDAPInvalidScopeError(LDAPExceptionError, ValueError): - pass - - -class LDAPInvalidDereferenceAliasesError(LDAPExceptionError, ValueError): - pass - - -class LDAPInvalidValueError(LDAPExceptionError, ValueError): - pass - - -class LDAPControlError(LDAPExceptionError, ValueError): - pass - - -class LDAPExtensionError(LDAPExceptionError, ValueError): - pass - - -class LDAPLDIFError(LDAPExceptionError): - pass - - -class LDAPSchemaError(LDAPExceptionError): - pass - - -class LDAPSASLPrepError(LDAPExceptionError): - pass - - -class LDAPSASLBindInProgressError(LDAPExceptionError): - pass - - -class LDAPMetricsError(LDAPExceptionError): - pass - - -class LDAPObjectClassError(LDAPExceptionError): - pass - - -class LDAPInvalidDnError(LDAPExceptionError): - pass - - -class LDAPResponseTimeoutError(LDAPExceptionError): - pass - - -class LDAPTransactionError(LDAPExceptionError): - pass - - -# communication exceptions -class LDAPCommunicationError(LDAPExceptionError): - pass - - -class LDAPSocketOpenError(LDAPCommunicationError): - pass - - -class LDAPSocketCloseError(LDAPCommunicationError): - pass - - -class LDAPSocketReceiveError(LDAPCommunicationError, socket.error): - pass - - -class LDAPSocketSendError(LDAPCommunicationError, socket.error): - pass - - -class LDAPSessionTerminatedByServerError(LDAPCommunicationError): - pass - - -class LDAPUnknownResponseError(LDAPCommunicationError): - pass - - -class LDAPUnknownRequestError(LDAPCommunicationError): - pass - - -class LDAPReferralError(LDAPCommunicationError): - pass - - -# pooling exceptions -class LDAPConnectionPoolNameIsMandatoryError(LDAPExceptionError): - pass - - -class LDAPConnectionPoolNotStartedError(LDAPExceptionError): - pass - - -# restartable strategy -class LDAPMaximumRetriesError(LDAPExceptionError): - def __str__(self): - s = [] - if self.args: - if isinstance(self.args, tuple): - if len(self.args) > 0: - s.append('LDAPMaximumRetriesError: ' + str(self.args[0])) - if len(self.args) > 1: - s.append('Exception history:') - prev_exc = '' - for i, exc in enumerate(self.args[1]): # args[1] contains exception history - # if str(exc[1]) != prev_exc: - # s.append((str(i).rjust(5) + ' ' + str(exc[0]) + ': ' + str(exc[1]) + ' - ' + str(exc[2]))) - # prev_exc = str(exc[1]) - if str(exc) != prev_exc: - s.append((str(i).rjust(5) + ' ' + str(type(exc)) + ': ' + str(exc))) - prev_exc = str(exc) - if len(self.args) > 2: - s.append('Maximum number of retries reached: ' + str(self.args[2])) - else: - s = [LDAPExceptionError.__str__(self)] - - return sep.join(s) - - -# exception factories -def communication_exception_factory(exc_to_raise, exc): - """ - Generates a new exception class of the requested type (subclass of LDAPCommunication) merged with the exception raised by the interpreter - """ - if exc_to_raise.__name__ in [cls.__name__ for cls in LDAPCommunicationError.__subclasses__()]: - return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict()) - else: - raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise)) - - -def start_tls_exception_factory(exc_to_raise, exc): - """ - Generates a new exception class of the requested type merged with the exception raised by the interpreter - """ - - if exc_to_raise.__name__ == 'LDAPStartTLSError': - return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict()) - else: - raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise)) +""" +""" + +# Created on 2014.05.14 +# +# Author: Giovanni Cannata +# +# Copyright 2014 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +from os import sep +from .results import RESULT_OPERATIONS_ERROR, RESULT_PROTOCOL_ERROR, RESULT_TIME_LIMIT_EXCEEDED, RESULT_SIZE_LIMIT_EXCEEDED, \ + RESULT_STRONGER_AUTH_REQUIRED, RESULT_REFERRAL, RESULT_ADMIN_LIMIT_EXCEEDED, RESULT_UNAVAILABLE_CRITICAL_EXTENSION, \ + RESULT_AUTH_METHOD_NOT_SUPPORTED, RESULT_UNDEFINED_ATTRIBUTE_TYPE, RESULT_NO_SUCH_ATTRIBUTE, \ + RESULT_SASL_BIND_IN_PROGRESS, RESULT_CONFIDENTIALITY_REQUIRED, RESULT_INAPPROPRIATE_MATCHING, \ + RESULT_CONSTRAINT_VIOLATION, \ + RESULT_ATTRIBUTE_OR_VALUE_EXISTS, RESULT_INVALID_ATTRIBUTE_SYNTAX, RESULT_NO_SUCH_OBJECT, RESULT_ALIAS_PROBLEM, \ + RESULT_INVALID_DN_SYNTAX, RESULT_ALIAS_DEREFERENCING_PROBLEM, RESULT_INVALID_CREDENTIALS, RESULT_LOOP_DETECTED, \ + RESULT_ENTRY_ALREADY_EXISTS, RESULT_LCUP_SECURITY_VIOLATION, RESULT_CANCELED, RESULT_E_SYNC_REFRESH_REQUIRED, \ + RESULT_NO_SUCH_OPERATION, RESULT_LCUP_INVALID_DATA, RESULT_OBJECT_CLASS_MODS_PROHIBITED, RESULT_NAMING_VIOLATION, \ + RESULT_INSUFFICIENT_ACCESS_RIGHTS, RESULT_OBJECT_CLASS_VIOLATION, RESULT_TOO_LATE, RESULT_CANNOT_CANCEL, \ + RESULT_LCUP_UNSUPPORTED_SCHEME, RESULT_BUSY, RESULT_AFFECT_MULTIPLE_DSAS, RESULT_UNAVAILABLE, \ + RESULT_NOT_ALLOWED_ON_NON_LEAF, \ + RESULT_UNWILLING_TO_PERFORM, RESULT_OTHER, RESULT_LCUP_RELOAD_REQUIRED, RESULT_ASSERTION_FAILED, \ + RESULT_AUTHORIZATION_DENIED, RESULT_LCUP_RESOURCES_EXHAUSTED, RESULT_NOT_ALLOWED_ON_RDN, \ + RESULT_INAPPROPRIATE_AUTHENTICATION +import socket + + +# LDAPException hierarchy +class LDAPException(Exception): + pass + + +class LDAPOperationResult(LDAPException): + def __new__(cls, result=None, description=None, dn=None, message=None, response_type=None, response=None): + if cls is LDAPOperationResult and result and result in exception_table: + exc = super(LDAPOperationResult, exception_table[result]).__new__( + exception_table[result]) # create an exception of the required result error + exc.result = result + exc.description = description + exc.dn = dn + exc.message = message + exc.type = response_type + exc.response = response + else: + exc = super(LDAPOperationResult, cls).__new__(cls) + return exc + + def __init__(self, result=None, description=None, dn=None, message=None, response_type=None, response=None): + self.result = result + self.description = description + self.dn = dn + self.message = message + self.type = response_type + self.response = response + + def __str__(self): + s = [self.__class__.__name__, + str(self.result) if self.result else None, + self.description if self.description else None, + self.dn if self.dn else None, + self.message if self.message else None, + self.type if self.type else None, + self.response if self.response else None] + + return ' - '.join([str(item) for item in s if s is not None]) + + def __repr__(self): + return self.__str__() + + +class LDAPOperationsErrorResult(LDAPOperationResult): + pass + + +class LDAPProtocolErrorResult(LDAPOperationResult): + pass + + +class LDAPTimeLimitExceededResult(LDAPOperationResult): + pass + + +class LDAPSizeLimitExceededResult(LDAPOperationResult): + pass + + +class LDAPAuthMethodNotSupportedResult(LDAPOperationResult): + pass + + +class LDAPStrongerAuthRequiredResult(LDAPOperationResult): + pass + + +class LDAPReferralResult(LDAPOperationResult): + pass + + +class LDAPAdminLimitExceededResult(LDAPOperationResult): + pass + + +class LDAPUnavailableCriticalExtensionResult(LDAPOperationResult): + pass + + +class LDAPConfidentialityRequiredResult(LDAPOperationResult): + pass + + +class LDAPSASLBindInProgressResult(LDAPOperationResult): + pass + + +class LDAPNoSuchAttributeResult(LDAPOperationResult): + pass + + +class LDAPUndefinedAttributeTypeResult(LDAPOperationResult): + pass + + +class LDAPInappropriateMatchingResult(LDAPOperationResult): + pass + + +class LDAPConstraintViolationResult(LDAPOperationResult): + pass + + +class LDAPAttributeOrValueExistsResult(LDAPOperationResult): + pass + + +class LDAPInvalidAttributeSyntaxResult(LDAPOperationResult): + pass + + +class LDAPNoSuchObjectResult(LDAPOperationResult): + pass + + +class LDAPAliasProblemResult(LDAPOperationResult): + pass + + +class LDAPInvalidDNSyntaxResult(LDAPOperationResult): + pass + + +class LDAPAliasDereferencingProblemResult(LDAPOperationResult): + pass + + +class LDAPInappropriateAuthenticationResult(LDAPOperationResult): + pass + + +class LDAPInvalidCredentialsResult(LDAPOperationResult): + pass + + +class LDAPInsufficientAccessRightsResult(LDAPOperationResult): + pass + + +class LDAPBusyResult(LDAPOperationResult): + pass + + +class LDAPUnavailableResult(LDAPOperationResult): + pass + + +class LDAPUnwillingToPerformResult(LDAPOperationResult): + pass + + +class LDAPLoopDetectedResult(LDAPOperationResult): + pass + + +class LDAPNamingViolationResult(LDAPOperationResult): + pass + + +class LDAPObjectClassViolationResult(LDAPOperationResult): + pass + + +class LDAPNotAllowedOnNotLeafResult(LDAPOperationResult): + pass + + +class LDAPNotAllowedOnRDNResult(LDAPOperationResult): + pass + + +class LDAPEntryAlreadyExistsResult(LDAPOperationResult): + pass + + +class LDAPObjectClassModsProhibitedResult(LDAPOperationResult): + pass + + +class LDAPAffectMultipleDSASResult(LDAPOperationResult): + pass + + +class LDAPOtherResult(LDAPOperationResult): + pass + + +class LDAPLCUPResourcesExhaustedResult(LDAPOperationResult): + pass + + +class LDAPLCUPSecurityViolationResult(LDAPOperationResult): + pass + + +class LDAPLCUPInvalidDataResult(LDAPOperationResult): + pass + + +class LDAPLCUPUnsupportedSchemeResult(LDAPOperationResult): + pass + + +class LDAPLCUPReloadRequiredResult(LDAPOperationResult): + pass + + +class LDAPCanceledResult(LDAPOperationResult): + pass + + +class LDAPNoSuchOperationResult(LDAPOperationResult): + pass + + +class LDAPTooLateResult(LDAPOperationResult): + pass + + +class LDAPCannotCancelResult(LDAPOperationResult): + pass + + +class LDAPAssertionFailedResult(LDAPOperationResult): + pass + + +class LDAPAuthorizationDeniedResult(LDAPOperationResult): + pass + + +class LDAPESyncRefreshRequiredResult(LDAPOperationResult): + pass + + +exception_table = {RESULT_OPERATIONS_ERROR: LDAPOperationsErrorResult, + RESULT_PROTOCOL_ERROR: LDAPProtocolErrorResult, + RESULT_TIME_LIMIT_EXCEEDED: LDAPTimeLimitExceededResult, + RESULT_SIZE_LIMIT_EXCEEDED: LDAPSizeLimitExceededResult, + RESULT_AUTH_METHOD_NOT_SUPPORTED: LDAPAuthMethodNotSupportedResult, + RESULT_STRONGER_AUTH_REQUIRED: LDAPStrongerAuthRequiredResult, + RESULT_REFERRAL: LDAPReferralResult, + RESULT_ADMIN_LIMIT_EXCEEDED: LDAPAdminLimitExceededResult, + RESULT_UNAVAILABLE_CRITICAL_EXTENSION: LDAPUnavailableCriticalExtensionResult, + RESULT_CONFIDENTIALITY_REQUIRED: LDAPConfidentialityRequiredResult, + RESULT_SASL_BIND_IN_PROGRESS: LDAPSASLBindInProgressResult, + RESULT_NO_SUCH_ATTRIBUTE: LDAPNoSuchAttributeResult, + RESULT_UNDEFINED_ATTRIBUTE_TYPE: LDAPUndefinedAttributeTypeResult, + RESULT_INAPPROPRIATE_MATCHING: LDAPInappropriateMatchingResult, + RESULT_CONSTRAINT_VIOLATION: LDAPConstraintViolationResult, + RESULT_ATTRIBUTE_OR_VALUE_EXISTS: LDAPAttributeOrValueExistsResult, + RESULT_INVALID_ATTRIBUTE_SYNTAX: LDAPInvalidAttributeSyntaxResult, + RESULT_NO_SUCH_OBJECT: LDAPNoSuchObjectResult, + RESULT_ALIAS_PROBLEM: LDAPAliasProblemResult, + RESULT_INVALID_DN_SYNTAX: LDAPInvalidDNSyntaxResult, + RESULT_ALIAS_DEREFERENCING_PROBLEM: LDAPAliasDereferencingProblemResult, + RESULT_INAPPROPRIATE_AUTHENTICATION: LDAPInappropriateAuthenticationResult, + RESULT_INVALID_CREDENTIALS: LDAPInvalidCredentialsResult, + RESULT_INSUFFICIENT_ACCESS_RIGHTS: LDAPInsufficientAccessRightsResult, + RESULT_BUSY: LDAPBusyResult, + RESULT_UNAVAILABLE: LDAPUnavailableResult, + RESULT_UNWILLING_TO_PERFORM: LDAPUnwillingToPerformResult, + RESULT_LOOP_DETECTED: LDAPLoopDetectedResult, + RESULT_NAMING_VIOLATION: LDAPNamingViolationResult, + RESULT_OBJECT_CLASS_VIOLATION: LDAPObjectClassViolationResult, + RESULT_NOT_ALLOWED_ON_NON_LEAF: LDAPNotAllowedOnNotLeafResult, + RESULT_NOT_ALLOWED_ON_RDN: LDAPNotAllowedOnRDNResult, + RESULT_ENTRY_ALREADY_EXISTS: LDAPEntryAlreadyExistsResult, + RESULT_OBJECT_CLASS_MODS_PROHIBITED: LDAPObjectClassModsProhibitedResult, + RESULT_AFFECT_MULTIPLE_DSAS: LDAPAffectMultipleDSASResult, + RESULT_OTHER: LDAPOtherResult, + RESULT_LCUP_RESOURCES_EXHAUSTED: LDAPLCUPResourcesExhaustedResult, + RESULT_LCUP_SECURITY_VIOLATION: LDAPLCUPSecurityViolationResult, + RESULT_LCUP_INVALID_DATA: LDAPLCUPInvalidDataResult, + RESULT_LCUP_UNSUPPORTED_SCHEME: LDAPLCUPUnsupportedSchemeResult, + RESULT_LCUP_RELOAD_REQUIRED: LDAPLCUPReloadRequiredResult, + RESULT_CANCELED: LDAPCanceledResult, + RESULT_NO_SUCH_OPERATION: LDAPNoSuchOperationResult, + RESULT_TOO_LATE: LDAPTooLateResult, + RESULT_CANNOT_CANCEL: LDAPCannotCancelResult, + RESULT_ASSERTION_FAILED: LDAPAssertionFailedResult, + RESULT_AUTHORIZATION_DENIED: LDAPAuthorizationDeniedResult, + RESULT_E_SYNC_REFRESH_REQUIRED: LDAPESyncRefreshRequiredResult} + + +class LDAPExceptionError(LDAPException): + pass + + +# configuration exceptions +class LDAPConfigurationError(LDAPExceptionError): + pass + + +class LDAPUnknownStrategyError(LDAPConfigurationError): + pass + + +class LDAPUnknownAuthenticationMethodError(LDAPConfigurationError): + pass + + +class LDAPSSLConfigurationError(LDAPConfigurationError): + pass + + +class LDAPDefinitionError(LDAPConfigurationError): + pass + + +class LDAPPackageUnavailableError(LDAPConfigurationError, ImportError): + pass + + +class LDAPConfigurationParameterError(LDAPConfigurationError): + pass + + +# abstract layer exceptions +class LDAPKeyError(LDAPExceptionError, KeyError, AttributeError): + pass + + +class LDAPObjectError(LDAPExceptionError, ValueError): + pass + + +class LDAPAttributeError(LDAPExceptionError, ValueError, TypeError): + pass + + +class LDAPCursorError(LDAPExceptionError): + pass + + +class LDAPCursorAttributeError(LDAPCursorError, AttributeError): + pass + + +class LDAPObjectDereferenceError(LDAPExceptionError): + pass + + +# security exceptions +class LDAPSSLNotSupportedError(LDAPExceptionError, ImportError): + pass + + +class LDAPInvalidTlsSpecificationError(LDAPExceptionError): + pass + + +class LDAPInvalidHashAlgorithmError(LDAPExceptionError, ValueError): + pass + + +# connection exceptions +class LDAPBindError(LDAPExceptionError): + pass + + +class LDAPInvalidServerError(LDAPExceptionError): + pass + + +class LDAPSASLMechanismNotSupportedError(LDAPExceptionError): + pass + + +class LDAPConnectionIsReadOnlyError(LDAPExceptionError): + pass + + +class LDAPChangeError(LDAPExceptionError, ValueError): + pass + + +class LDAPServerPoolError(LDAPExceptionError): + pass + + +class LDAPServerPoolExhaustedError(LDAPExceptionError): + pass + + +class LDAPInvalidPortError(LDAPExceptionError): + pass + + +class LDAPStartTLSError(LDAPExceptionError): + pass + + +class LDAPCertificateError(LDAPExceptionError): + pass + + +class LDAPUserNameNotAllowedError(LDAPExceptionError): + pass + + +class LDAPUserNameIsMandatoryError(LDAPExceptionError): + pass + + +class LDAPPasswordIsMandatoryError(LDAPExceptionError): + pass + + +class LDAPInvalidFilterError(LDAPExceptionError): + pass + + +class LDAPInvalidScopeError(LDAPExceptionError, ValueError): + pass + + +class LDAPInvalidDereferenceAliasesError(LDAPExceptionError, ValueError): + pass + + +class LDAPInvalidValueError(LDAPExceptionError, ValueError): + pass + + +class LDAPControlError(LDAPExceptionError, ValueError): + pass + + +class LDAPExtensionError(LDAPExceptionError, ValueError): + pass + + +class LDAPLDIFError(LDAPExceptionError): + pass + + +class LDAPSchemaError(LDAPExceptionError): + pass + + +class LDAPSASLPrepError(LDAPExceptionError): + pass + + +class LDAPSASLBindInProgressError(LDAPExceptionError): + pass + + +class LDAPMetricsError(LDAPExceptionError): + pass + + +class LDAPObjectClassError(LDAPExceptionError): + pass + + +class LDAPInvalidDnError(LDAPExceptionError): + pass + + +class LDAPResponseTimeoutError(LDAPExceptionError): + pass + + +class LDAPTransactionError(LDAPExceptionError): + pass + + +class LDAPInfoError(LDAPExceptionError): + pass + + +# communication exceptions +class LDAPCommunicationError(LDAPExceptionError): + pass + + +class LDAPSocketOpenError(LDAPCommunicationError): + pass + + +class LDAPSocketCloseError(LDAPCommunicationError): + pass + + +class LDAPSocketReceiveError(LDAPCommunicationError, socket.error): + pass + + +class LDAPSocketSendError(LDAPCommunicationError, socket.error): + pass + + +class LDAPSessionTerminatedByServerError(LDAPCommunicationError): + pass + + +class LDAPUnknownResponseError(LDAPCommunicationError): + pass + + +class LDAPUnknownRequestError(LDAPCommunicationError): + pass + + +class LDAPReferralError(LDAPCommunicationError): + pass + + +# pooling exceptions +class LDAPConnectionPoolNameIsMandatoryError(LDAPExceptionError): + pass + + +class LDAPConnectionPoolNotStartedError(LDAPExceptionError): + pass + + +# restartable strategy +class LDAPMaximumRetriesError(LDAPExceptionError): + def __str__(self): + s = [] + if self.args: + if isinstance(self.args, tuple): + if len(self.args) > 0: + s.append('LDAPMaximumRetriesError: ' + str(self.args[0])) + if len(self.args) > 1: + s.append('Exception history:') + prev_exc = '' + for i, exc in enumerate(self.args[1]): # args[1] contains exception history + # if str(exc[1]) != prev_exc: + # s.append((str(i).rjust(5) + ' ' + str(exc[0]) + ': ' + str(exc[1]) + ' - ' + str(exc[2]))) + # prev_exc = str(exc[1]) + if str(exc) != prev_exc: + s.append((str(i).rjust(5) + ' ' + str(type(exc)) + ': ' + str(exc))) + prev_exc = str(exc) + if len(self.args) > 2: + s.append('Maximum number of retries reached: ' + str(self.args[2])) + else: + s = [LDAPExceptionError.__str__(self)] + + return sep.join(s) + + +# exception factories +def communication_exception_factory(exc_to_raise, exc): + """ + Generates a new exception class of the requested type (subclass of LDAPCommunication) merged with the exception raised by the interpreter + """ + if exc_to_raise.__name__ in [cls.__name__ for cls in LDAPCommunicationError.__subclasses__()]: + return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict()) + else: + raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise)) + + +def start_tls_exception_factory(exc_to_raise, exc): + """ + Generates a new exception class of the requested type merged with the exception raised by the interpreter + """ + + if exc_to_raise.__name__ == 'LDAPStartTLSError': + return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict()) + else: + raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise)) diff --git a/server/www/packages/packages-linux/x64/ldap3/core/pooling.py b/server/www/packages/packages-linux/x64/ldap3/core/pooling.py index 66a0bbd..24a5b0f 100644 --- a/server/www/packages/packages-linux/x64/ldap3/core/pooling.py +++ b/server/www/packages/packages-linux/x64/ldap3/core/pooling.py @@ -1,306 +1,329 @@ -""" -""" - -# Created on 2014.03.14 -# -# Author: Giovanni Cannata -# -# Copyright 2014 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . - -from datetime import datetime, MINYEAR -from os import linesep -from random import randint -from time import sleep - -from .. import FIRST, ROUND_ROBIN, RANDOM, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter -from .exceptions import LDAPUnknownStrategyError, LDAPServerPoolError, LDAPServerPoolExhaustedError -from .server import Server -from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK - -POOLING_STRATEGIES = [FIRST, ROUND_ROBIN, RANDOM] - - -class ServerPoolState(object): - def __init__(self, server_pool): - self.servers = [] # each element is a list: [server, last_checked_time, available] - self.strategy = server_pool.strategy - self.server_pool = server_pool - self.last_used_server = 0 - self.refresh() - self.initialize_time = datetime.now() - - if log_enabled(BASIC): - log(BASIC, 'instantiated ServerPoolState: <%r>', self) - - def __str__(self): - s = 'servers: ' + linesep - if self.servers: - for server in self.servers: - s += str(server[0]) + linesep - else: - s += 'None' + linesep - s += 'Pool strategy: ' + str(self.strategy) + linesep - s += ' - Last used server: ' + ('None' if self.last_used_server == -1 else str(self.servers[self.last_used_server][0])) - - return s - - def refresh(self): - self.servers = [] - for server in self.server_pool.servers: - self.servers.append([server, datetime(MINYEAR, 1, 1), True]) # server, smallest date ever, supposed available - self.last_used_server = randint(0, len(self.servers) - 1) - - def get_current_server(self): - return self.servers[self.last_used_server][0] - - def get_server(self): - if self.servers: - if self.server_pool.strategy == FIRST: - if self.server_pool.active: - # returns the first active server - self.last_used_server = self.find_active_server(starting=0) - else: - # returns always the first server - no pooling - self.last_used_server = 0 - elif self.server_pool.strategy == ROUND_ROBIN: - if self.server_pool.active: - # returns the next active server in a circular range - self.last_used_server = self.find_active_server(self.last_used_server + 1) - else: - # returns the next server in a circular range - self.last_used_server = self.last_used_server + 1 if (self.last_used_server + 1) < len(self.servers) else 0 - elif self.server_pool.strategy == RANDOM: - if self.server_pool.active: - self.last_used_server = self.find_active_random_server() - else: - # returns a random server in the pool - self.last_used_server = randint(0, len(self.servers) - 1) - else: - if log_enabled(ERROR): - log(ERROR, 'unknown server pooling strategy <%s>', self.server_pool.strategy) - raise LDAPUnknownStrategyError('unknown server pooling strategy') - if log_enabled(BASIC): - log(BASIC, 'server returned from Server Pool: <%s>', self.last_used_server) - return self.servers[self.last_used_server][0] - else: - if log_enabled(ERROR): - log(ERROR, 'no servers in Server Pool <%s>', self) - raise LDAPServerPoolError('no servers in server pool') - - def find_active_random_server(self): - counter = self.server_pool.active # can be True for "forever" or the number of cycles to try - while counter: - if log_enabled(NETWORK): - log(NETWORK, 'entering loop for finding active server in pool <%s>', self) - temp_list = self.servers[:] # copy - while temp_list: - # pops a random server from a temp list and checks its - # availability, if not available tries another one - server = temp_list.pop(randint(0, len(temp_list) - 1)) - if not server[2]: # server is offline - if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - server[1]).seconds < self.server_pool.exhaust: # keeps server offline - if log_enabled(NETWORK): - log(NETWORK, 'server <%s> excluded from checking because it is offline', server[0]) - continue - if log_enabled(NETWORK): - log(NETWORK, 'server <%s> reinserted in pool', server[0]) - server[1] = datetime.now() - if log_enabled(NETWORK): - log(NETWORK, 'checking server <%s> for availability', server[0]) - if server[0].check_availability(): - # returns a random active server in the pool - server[2] = True - return self.servers.index(server) - else: - server[2] = False - if not isinstance(self.server_pool.active, bool): - counter -= 1 - if log_enabled(ERROR): - log(ERROR, 'no random active server available in Server Pool <%s> after maximum number of tries', self) - raise LDAPServerPoolExhaustedError('no random active server available in server pool after maximum number of tries') - - def find_active_server(self, starting): - conf_pool_timeout = get_config_parameter('POOLING_LOOP_TIMEOUT') - counter = self.server_pool.active # can be True for "forever" or the number of cycles to try - if starting >= len(self.servers): - starting = 0 - - while counter: - if log_enabled(NETWORK): - log(NETWORK, 'entering loop number <%s> for finding active server in pool <%s>', counter, self) - index = -1 - pool_size = len(self.servers) - while index < pool_size - 1: - index += 1 - offset = index + starting if index + starting < pool_size else index + starting - pool_size - if not self.servers[offset][2]: # server is offline - if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - self.servers[offset][1]).seconds < self.server_pool.exhaust: # keeps server offline - if log_enabled(NETWORK): - if isinstance(self.server_pool.exhaust, bool): - log(NETWORK, 'server <%s> excluded from checking because is offline', self.servers[offset][0]) - else: - log(NETWORK, 'server <%s> excluded from checking because is offline for %d seconds', self.servers[offset][0], (self.server_pool.exhaust - (datetime.now() - self.servers[offset][1]).seconds)) - continue - if log_enabled(NETWORK): - log(NETWORK, 'server <%s> reinserted in pool', self.servers[offset][0]) - self.servers[offset][1] = datetime.now() - if log_enabled(NETWORK): - log(NETWORK, 'checking server <%s> for availability', self.servers[offset][0]) - if self.servers[offset][0].check_availability(): - self.servers[offset][2] = True - return offset - else: - self.servers[offset][2] = False # sets server offline - - if not isinstance(self.server_pool.active, bool): - counter -= 1 - if log_enabled(NETWORK): - log(NETWORK, 'waiting for %d seconds before retrying pool servers cycle', conf_pool_timeout) - sleep(conf_pool_timeout) - - if log_enabled(ERROR): - log(ERROR, 'no active server available in Server Pool <%s> after maximum number of tries', self) - raise LDAPServerPoolExhaustedError('no active server available in server pool after maximum number of tries') - - def __len__(self): - return len(self.servers) - - -class ServerPool(object): - def __init__(self, - servers=None, - pool_strategy=ROUND_ROBIN, - active=True, - exhaust=False): - - if pool_strategy not in POOLING_STRATEGIES: - if log_enabled(ERROR): - log(ERROR, 'unknown pooling strategy <%s>', pool_strategy) - raise LDAPUnknownStrategyError('unknown pooling strategy') - if exhaust and not active: - if log_enabled(ERROR): - log(ERROR, 'cannot instantiate pool with exhaust and not active') - raise LDAPServerPoolError('pools can be exhausted only when checking for active servers') - self.servers = [] - self.pool_states = dict() - self.active = active - self.exhaust = exhaust - if isinstance(servers, SEQUENCE_TYPES + (Server, )): - self.add(servers) - elif isinstance(servers, STRING_TYPES): - self.add(Server(servers)) - self.strategy = pool_strategy - - if log_enabled(BASIC): - log(BASIC, 'instantiated ServerPool: <%r>', self) - - def __str__(self): - s = 'servers: ' + linesep - if self.servers: - for server in self.servers: - s += str(server) + linesep - else: - s += 'None' + linesep - s += 'Pool strategy: ' + str(self.strategy) - s += ' - ' + 'active: ' + (str(self.active) if self.active else 'False') - s += ' - ' + 'exhaust pool: ' + (str(self.exhaust) if self.exhaust else 'False') - return s - - def __repr__(self): - r = 'ServerPool(servers=' - if self.servers: - r += '[' - for server in self.servers: - r += server.__repr__() + ', ' - r = r[:-2] + ']' - else: - r += 'None' - r += ', pool_strategy={0.strategy!r}'.format(self) - r += ', active={0.active!r}'.format(self) - r += ', exhaust={0.exhaust!r}'.format(self) - r += ')' - - return r - - def __len__(self): - return len(self.servers) - - def __getitem__(self, item): - return self.servers[item] - - def __iter__(self): - return self.servers.__iter__() - - def add(self, servers): - if isinstance(servers, Server): - if servers not in self.servers: - self.servers.append(servers) - elif isinstance(servers, STRING_TYPES): - self.servers.append(Server(servers)) - elif isinstance(servers, SEQUENCE_TYPES): - for server in servers: - if isinstance(server, Server): - self.servers.append(server) - elif isinstance(server, STRING_TYPES): - self.servers.append(Server(server)) - else: - if log_enabled(ERROR): - log(ERROR, 'element must be a server in Server Pool <%s>', self) - raise LDAPServerPoolError('server in ServerPool must be a Server') - else: - if log_enabled(ERROR): - log(ERROR, 'server must be a Server of a list of Servers when adding to Server Pool <%s>', self) - raise LDAPServerPoolError('server must be a Server or a list of Server') - - for connection in self.pool_states: - # notifies connections using this pool to refresh - self.pool_states[connection].refresh() - - def remove(self, server): - if server in self.servers: - self.servers.remove(server) - else: - if log_enabled(ERROR): - log(ERROR, 'server %s to be removed not in Server Pool <%s>', server, self) - raise LDAPServerPoolError('server not in server pool') - - for connection in self.pool_states: - # notifies connections using this pool to refresh - self.pool_states[connection].refresh() - - def initialize(self, connection): - pool_state = ServerPoolState(self) - # registers pool_state in ServerPool object - self.pool_states[connection] = pool_state - - def get_server(self, connection): - if connection in self.pool_states: - return self.pool_states[connection].get_server() - else: - if log_enabled(ERROR): - log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self) - raise LDAPServerPoolError('connection not in ServerPoolState') - - def get_current_server(self, connection): - if connection in self.pool_states: - return self.pool_states[connection].get_current_server() - else: - if log_enabled(ERROR): - log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self) - raise LDAPServerPoolError('connection not in ServerPoolState') +""" +""" + +# Created on 2014.03.14 +# +# Author: Giovanni Cannata +# +# Copyright 2014 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +from datetime import datetime, MINYEAR +from os import linesep +from random import randint +from time import sleep + +from .. import FIRST, ROUND_ROBIN, RANDOM, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter +from .exceptions import LDAPUnknownStrategyError, LDAPServerPoolError, LDAPServerPoolExhaustedError +from .server import Server +from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK + +POOLING_STRATEGIES = [FIRST, ROUND_ROBIN, RANDOM] + + +class ServerState(object): + def __init__(self, server, last_checked_time, available): + self.server = server + self.last_checked_time = last_checked_time + self.available = available + + +class ServerPoolState(object): + def __init__(self, server_pool): + self.server_states = [] # each element is a ServerState + self.strategy = server_pool.strategy + self.server_pool = server_pool + self.last_used_server = 0 + self.refresh() + self.initialize_time = datetime.now() + + if log_enabled(BASIC): + log(BASIC, 'instantiated ServerPoolState: <%r>', self) + + def __str__(self): + s = 'servers: ' + linesep + if self.server_states: + for state in self.server_states: + s += str(state.server) + linesep + else: + s += 'None' + linesep + s += 'Pool strategy: ' + str(self.strategy) + linesep + s += ' - Last used server: ' + ('None' if self.last_used_server == -1 else str(self.server_states[self.last_used_server].server)) + + return s + + def refresh(self): + self.server_states = [] + for server in self.server_pool.servers: + self.server_states.append(ServerState(server, datetime(MINYEAR, 1, 1), True)) # server, smallest date ever, supposed available + self.last_used_server = randint(0, len(self.server_states) - 1) + + def get_current_server(self): + return self.server_states[self.last_used_server].server + + def get_server(self): + if self.server_states: + if self.server_pool.strategy == FIRST: + if self.server_pool.active: + # returns the first active server + self.last_used_server = self.find_active_server(starting=0) + else: + # returns always the first server - no pooling + self.last_used_server = 0 + elif self.server_pool.strategy == ROUND_ROBIN: + if self.server_pool.active: + # returns the next active server in a circular range + self.last_used_server = self.find_active_server(self.last_used_server + 1) + else: + # returns the next server in a circular range + self.last_used_server = self.last_used_server + 1 if (self.last_used_server + 1) < len(self.server_states) else 0 + elif self.server_pool.strategy == RANDOM: + if self.server_pool.active: + self.last_used_server = self.find_active_random_server() + else: + # returns a random server in the pool + self.last_used_server = randint(0, len(self.server_states) - 1) + else: + if log_enabled(ERROR): + log(ERROR, 'unknown server pooling strategy <%s>', self.server_pool.strategy) + raise LDAPUnknownStrategyError('unknown server pooling strategy') + if log_enabled(BASIC): + log(BASIC, 'server returned from Server Pool: <%s>', self.last_used_server) + return self.server_states[self.last_used_server].server + else: + if log_enabled(ERROR): + log(ERROR, 'no servers in Server Pool <%s>', self) + raise LDAPServerPoolError('no servers in server pool') + + def find_active_random_server(self): + counter = self.server_pool.active # can be True for "forever" or the number of cycles to try + while counter: + if log_enabled(NETWORK): + log(NETWORK, 'entering loop for finding active server in pool <%s>', self) + temp_list = self.server_states[:] # copy + while temp_list: + # pops a random server from a temp list and checks its + # availability, if not available tries another one + server_state = temp_list.pop(randint(0, len(temp_list) - 1)) + if not server_state.available: # server is offline + if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - server_state.last_checked_time).seconds < self.server_pool.exhaust: # keeps server offline + if log_enabled(NETWORK): + log(NETWORK, 'server <%s> excluded from checking because it is offline', server_state.server) + continue + if log_enabled(NETWORK): + log(NETWORK, 'server <%s> reinserted in pool', server_state.server) + server_state.last_checked_time = datetime.now() + if log_enabled(NETWORK): + log(NETWORK, 'checking server <%s> for availability', server_state.server) + if server_state.server.check_availability(): + # returns a random active server in the pool + server_state.available = True + return self.server_states.index(server_state) + else: + server_state.available = False + if not isinstance(self.server_pool.active, bool): + counter -= 1 + if log_enabled(ERROR): + log(ERROR, 'no random active server available in Server Pool <%s> after maximum number of tries', self) + raise LDAPServerPoolExhaustedError('no random active server available in server pool after maximum number of tries') + + def find_active_server(self, starting): + conf_pool_timeout = get_config_parameter('POOLING_LOOP_TIMEOUT') + counter = self.server_pool.active # can be True for "forever" or the number of cycles to try + if starting >= len(self.server_states): + starting = 0 + + while counter: + if log_enabled(NETWORK): + log(NETWORK, 'entering loop number <%s> for finding active server in pool <%s>', counter, self) + index = -1 + pool_size = len(self.server_states) + while index < pool_size - 1: + index += 1 + offset = index + starting if index + starting < pool_size else index + starting - pool_size + server_state = self.server_states[offset] + if not server_state.available: # server is offline + if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - server_state.last_checked_time).seconds < self.server_pool.exhaust: # keeps server offline + if log_enabled(NETWORK): + if isinstance(self.server_pool.exhaust, bool): + log(NETWORK, 'server <%s> excluded from checking because is offline', server_state.server) + else: + log(NETWORK, 'server <%s> excluded from checking because is offline for %d seconds', server_state.server, (self.server_pool.exhaust - (datetime.now() - server_state.last_checked_time).seconds)) + continue + if log_enabled(NETWORK): + log(NETWORK, 'server <%s> reinserted in pool', server_state.server) + server_state.last_checked_time = datetime.now() + if log_enabled(NETWORK): + log(NETWORK, 'checking server <%s> for availability', server_state.server) + if server_state.server.check_availability(): + server_state.available = True + return offset + else: + server_state.available = False # sets server offline + + if not isinstance(self.server_pool.active, bool): + counter -= 1 + if log_enabled(NETWORK): + log(NETWORK, 'waiting for %d seconds before retrying pool servers cycle', conf_pool_timeout) + sleep(conf_pool_timeout) + + if log_enabled(ERROR): + log(ERROR, 'no active server available in Server Pool <%s> after maximum number of tries', self) + raise LDAPServerPoolExhaustedError('no active server available in server pool after maximum number of tries') + + def __len__(self): + return len(self.server_states) + + +class ServerPool(object): + def __init__(self, + servers=None, + pool_strategy=ROUND_ROBIN, + active=True, + exhaust=False, + single_state=True): + + if pool_strategy not in POOLING_STRATEGIES: + if log_enabled(ERROR): + log(ERROR, 'unknown pooling strategy <%s>', pool_strategy) + raise LDAPUnknownStrategyError('unknown pooling strategy') + if exhaust and not active: + if log_enabled(ERROR): + log(ERROR, 'cannot instantiate pool with exhaust and not active') + raise LDAPServerPoolError('pools can be exhausted only when checking for active servers') + self.servers = [] + self.pool_states = dict() + self.active = active + self.exhaust = exhaust + self.single = single_state + self._pool_state = None # used for storing the global state of the pool + if isinstance(servers, SEQUENCE_TYPES + (Server, )): + self.add(servers) + elif isinstance(servers, STRING_TYPES): + self.add(Server(servers)) + self.strategy = pool_strategy + + if log_enabled(BASIC): + log(BASIC, 'instantiated ServerPool: <%r>', self) + + def __str__(self): + s = 'servers: ' + linesep + if self.servers: + for server in self.servers: + s += str(server) + linesep + else: + s += 'None' + linesep + s += 'Pool strategy: ' + str(self.strategy) + s += ' - ' + 'active: ' + (str(self.active) if self.active else 'False') + s += ' - ' + 'exhaust pool: ' + (str(self.exhaust) if self.exhaust else 'False') + return s + + def __repr__(self): + r = 'ServerPool(servers=' + if self.servers: + r += '[' + for server in self.servers: + r += server.__repr__() + ', ' + r = r[:-2] + ']' + else: + r += 'None' + r += ', pool_strategy={0.strategy!r}'.format(self) + r += ', active={0.active!r}'.format(self) + r += ', exhaust={0.exhaust!r}'.format(self) + r += ')' + + return r + + def __len__(self): + return len(self.servers) + + def __getitem__(self, item): + return self.servers[item] + + def __iter__(self): + return self.servers.__iter__() + + def add(self, servers): + if isinstance(servers, Server): + if servers not in self.servers: + self.servers.append(servers) + elif isinstance(servers, STRING_TYPES): + self.servers.append(Server(servers)) + elif isinstance(servers, SEQUENCE_TYPES): + for server in servers: + if isinstance(server, Server): + self.servers.append(server) + elif isinstance(server, STRING_TYPES): + self.servers.append(Server(server)) + else: + if log_enabled(ERROR): + log(ERROR, 'element must be a server in Server Pool <%s>', self) + raise LDAPServerPoolError('server in ServerPool must be a Server') + else: + if log_enabled(ERROR): + log(ERROR, 'server must be a Server of a list of Servers when adding to Server Pool <%s>', self) + raise LDAPServerPoolError('server must be a Server or a list of Server') + + if self.single: + if self._pool_state: + self._pool_state.refresh() + else: + for connection in self.pool_states: + # notifies connections using this pool to refresh + self.pool_states[connection].refresh() + + def remove(self, server): + if server in self.servers: + self.servers.remove(server) + else: + if log_enabled(ERROR): + log(ERROR, 'server %s to be removed not in Server Pool <%s>', server, self) + raise LDAPServerPoolError('server not in server pool') + + if self.single: + if self._pool_state: + self._pool_state.refresh() + else: + for connection in self.pool_states: + # notifies connections using this pool to refresh + self.pool_states[connection].refresh() + + def initialize(self, connection): + # registers pool_state in ServerPool object + if self.single: + if not self._pool_state: + self._pool_state = ServerPoolState(self) + self.pool_states[connection] = self._pool_state + else: + self.pool_states[connection] = ServerPoolState(self) + + def get_server(self, connection): + if connection in self.pool_states: + return self.pool_states[connection].get_server() + else: + if log_enabled(ERROR): + log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self) + raise LDAPServerPoolError('connection not in ServerPoolState') + + def get_current_server(self, connection): + if connection in self.pool_states: + return self.pool_states[connection].get_current_server() + else: + if log_enabled(ERROR): + log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self) + raise LDAPServerPoolError('connection not in ServerPoolState') diff --git a/server/www/packages/packages-linux/x64/ldap3/core/results.py b/server/www/packages/packages-linux/x64/ldap3/core/results.py index 6f10643..14f8f73 100644 --- a/server/www/packages/packages-linux/x64/ldap3/core/results.py +++ b/server/www/packages/packages-linux/x64/ldap3/core/results.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -134,4 +134,4 @@ RESULT_CODES = { } # do not raise exception for (in raise_exceptions connection mode) -DO_NOT_RAISE_EXCEPTIONS = [RESULT_SUCCESS, RESULT_COMPARE_FALSE, RESULT_COMPARE_TRUE, RESULT_REFERRAL, RESULT_SASL_BIND_IN_PROGRESS] +DO_NOT_RAISE_EXCEPTIONS = [RESULT_SUCCESS, RESULT_COMPARE_FALSE, RESULT_COMPARE_TRUE, RESULT_REFERRAL, RESULT_SASL_BIND_IN_PROGRESS, RESULT_SIZE_LIMIT_EXCEEDED, RESULT_TIME_LIMIT_EXCEEDED] diff --git a/server/www/packages/packages-linux/x64/ldap3/core/server.py b/server/www/packages/packages-linux/x64/ldap3/core/server.py index 36c782b..43189ef 100644 --- a/server/www/packages/packages-linux/x64/ldap3/core/server.py +++ b/server/www/packages/packages-linux/x64/ldap3/core/server.py @@ -1,572 +1,663 @@ -""" -""" - -# Created on 2014.05.31 -# -# Author: Giovanni Cannata -# -# Copyright 2014 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . - -import socket -from threading import Lock -from datetime import datetime, MINYEAR - -from .. import DSA, SCHEMA, ALL, BASE, get_config_parameter, OFFLINE_EDIR_8_8_8, OFFLINE_AD_2012_R2, OFFLINE_SLAPD_2_4, OFFLINE_DS389_1_3_3, SEQUENCE_TYPES, IP_SYSTEM_DEFAULT, IP_V4_ONLY, IP_V6_ONLY, IP_V4_PREFERRED, IP_V6_PREFERRED, STRING_TYPES -from .exceptions import LDAPInvalidServerError, LDAPDefinitionError, LDAPInvalidPortError, LDAPInvalidTlsSpecificationError, LDAPSocketOpenError -from ..protocol.formatters.standard import format_attribute_values -from ..protocol.rfc4511 import LDAP_MAX_INT -from ..protocol.rfc4512 import SchemaInfo, DsaInfo -from .tls import Tls -from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL -from ..utils.conv import to_unicode - -try: - from urllib.parse import unquote # Python 3 -except ImportError: - from urllib import unquote # Python 2 - -try: # try to discover if unix sockets are available for LDAP over IPC (ldapi:// scheme) - # noinspection PyUnresolvedReferences - from socket import AF_UNIX - unix_socket_available = True -except ImportError: - unix_socket_available = False - - -class Server(object): - """ - LDAP Server definition class - - Allowed_referral_hosts can be None (default), or a list of tuples of - allowed servers ip address or names to contact while redirecting - search to referrals. - - The second element of the tuple is a boolean to indicate if - authentication to that server is allowed; if False only anonymous - bind will be used. - - Per RFC 4516. Use [('*', False)] to allow any host with anonymous - bind, use [('*', True)] to allow any host with same authentication of - Server. - """ - - _message_counter = 0 - _message_id_lock = Lock() # global lock for message_id shared by all Server objects - - - def __init__(self, - host, - port=None, - use_ssl=False, - allowed_referral_hosts=None, - get_info=SCHEMA, - tls=None, - formatter=None, - connect_timeout=None, - mode=IP_V6_PREFERRED, - validator=None): - - self.ipc = False - url_given = False - host = host.strip() - if host.lower().startswith('ldap://'): - self.host = host[7:] - use_ssl = False - url_given = True - elif host.lower().startswith('ldaps://'): - self.host = host[8:] - use_ssl = True - url_given = True - elif host.lower().startswith('ldapi://') and unix_socket_available: - self.ipc = True - use_ssl = False - url_given = True - elif host.lower().startswith('ldapi://') and not unix_socket_available: - raise LDAPSocketOpenError('LDAP over IPC not available - UNIX sockets non present') - else: - self.host = host - - if self.ipc: - if str is bytes: # Python 2 - self.host = unquote(host[7:]).decode('utf-8') - else: # Python 3 - self.host = unquote(host[7:]) # encoding defaults to utf-8 in python3 - self.port = None - elif ':' in self.host and self.host.count(':') == 1: - hostname, _, hostport = self.host.partition(':') - try: - port = int(hostport) or port - except ValueError: - if log_enabled(ERROR): - log(ERROR, 'port <%s> must be an integer', port) - raise LDAPInvalidPortError('port must be an integer') - self.host = hostname - elif url_given and self.host.startswith('['): - hostname, sep, hostport = self.host[1:].partition(']') - if sep != ']' or not self._is_ipv6(hostname): - if log_enabled(ERROR): - log(ERROR, 'invalid IPv6 server address for <%s>', self.host) - raise LDAPInvalidServerError() - if len(hostport): - if not hostport.startswith(':'): - if log_enabled(ERROR): - log(ERROR, 'invalid URL in server name for <%s>', self.host) - raise LDAPInvalidServerError('invalid URL in server name') - if not hostport[1:].isdecimal(): - if log_enabled(ERROR): - log(ERROR, 'port must be an integer for <%s>', self.host) - raise LDAPInvalidPortError('port must be an integer') - port = int(hostport[1:]) - self.host = hostname - elif not url_given and self._is_ipv6(self.host): - pass - elif self.host.count(':') > 1: - if log_enabled(ERROR): - log(ERROR, 'invalid server address for <%s>', self.host) - raise LDAPInvalidServerError() - - if not self.ipc: - self.host.rstrip('/') - if not use_ssl and not port: - port = 389 - elif use_ssl and not port: - port = 636 - - if isinstance(port, int): - if port in range(0, 65535): - self.port = port - else: - if log_enabled(ERROR): - log(ERROR, 'port <%s> must be in range from 0 to 65535', port) - raise LDAPInvalidPortError('port must in range from 0 to 65535') - else: - if log_enabled(ERROR): - log(ERROR, 'port <%s> must be an integer', port) - raise LDAPInvalidPortError('port must be an integer') - - if allowed_referral_hosts is None: # defaults to any server with authentication - allowed_referral_hosts = [('*', True)] - - if isinstance(allowed_referral_hosts, SEQUENCE_TYPES): - self.allowed_referral_hosts = [] - for referral_host in allowed_referral_hosts: - if isinstance(referral_host, tuple): - if isinstance(referral_host[1], bool): - self.allowed_referral_hosts.append(referral_host) - elif isinstance(allowed_referral_hosts, tuple): - if isinstance(allowed_referral_hosts[1], bool): - self.allowed_referral_hosts = [allowed_referral_hosts] - else: - self.allowed_referral_hosts = [] - - self.ssl = True if use_ssl else False - if tls and not isinstance(tls, Tls): - if log_enabled(ERROR): - log(ERROR, 'invalid tls specification: <%s>', tls) - raise LDAPInvalidTlsSpecificationError('invalid Tls object') - - self.tls = Tls() if self.ssl and not tls else tls - - if not self.ipc: - if self._is_ipv6(self.host): - self.name = ('ldaps' if self.ssl else 'ldap') + '://[' + self.host + ']:' + str(self.port) - else: - self.name = ('ldaps' if self.ssl else 'ldap') + '://' + self.host + ':' + str(self.port) - else: - self.name = host - - self.get_info = get_info - self._dsa_info = None - self._schema_info = None - self.dit_lock = Lock() - self.custom_formatter = formatter - self.custom_validator = validator - self._address_info = [] # property self.address_info resolved at open time (or when check_availability is called) - self._address_info_resolved_time = datetime(MINYEAR, 1, 1) # smallest date ever - self.current_address = None - self.connect_timeout = connect_timeout - self.mode = mode - - self.get_info_from_server(None) # load offline schema if needed - - if log_enabled(BASIC): - log(BASIC, 'instantiated Server: <%r>', self) - - @staticmethod - def _is_ipv6(host): - try: - socket.inet_pton(socket.AF_INET6, host) - except (socket.error, AttributeError, ValueError): - return False - return True - - def __str__(self): - if self.host: - s = self.name + (' - ssl' if self.ssl else ' - cleartext') + (' - unix socket' if self.ipc else '') - else: - s = object.__str__(self) - return s - - def __repr__(self): - r = 'Server(host={0.host!r}, port={0.port!r}, use_ssl={0.ssl!r}'.format(self) - r += '' if not self.allowed_referral_hosts else ', allowed_referral_hosts={0.allowed_referral_hosts!r}'.format(self) - r += '' if self.tls is None else ', tls={0.tls!r}'.format(self) - r += '' if not self.get_info else ', get_info={0.get_info!r}'.format(self) - r += '' if not self.connect_timeout else ', connect_timeout={0.connect_timeout!r}'.format(self) - r += '' if not self.mode else ', mode={0.mode!r}'.format(self) - r += ')' - - return r - - @property - def address_info(self): - conf_refresh_interval = get_config_parameter('ADDRESS_INFO_REFRESH_TIME') - if not self._address_info or (datetime.now() - self._address_info_resolved_time).seconds > conf_refresh_interval: - # converts addresses tuple to list and adds a 6th parameter for availability (None = not checked, True = available, False=not available) and a 7th parameter for the checking time - addresses = None - try: - if self.ipc: - addresses = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, None, self.host, None)] - else: - addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP, socket.AI_ADDRCONFIG | socket.AI_V4MAPPED) - except (socket.gaierror, AttributeError): - pass - - if not addresses: # if addresses not found or raised an exception (for example for bad flags) tries again without flags - try: - addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP) - except socket.gaierror: - pass - - if addresses: - self._address_info = [list(address) + [None, None] for address in addresses] - self._address_info_resolved_time = datetime.now() - else: - self._address_info = [] - self._address_info_resolved_time = datetime(MINYEAR, 1, 1) # smallest date - - if log_enabled(BASIC): - for address in self._address_info: - log(BASIC, 'address for <%s> resolved as <%r>', self, address[:-2]) - return self._address_info - - def update_availability(self, address, available): - cont = 0 - while cont < len(self._address_info): - if self.address_info[cont] == address: - self._address_info[cont][5] = True if available else False - self._address_info[cont][6] = datetime.now() - break - cont += 1 - - def reset_availability(self): - for address in self._address_info: - address[5] = None - address[6] = None - - def check_availability(self): - """ - Tries to open, connect and close a socket to specified address - and port to check availability. Timeout in seconds is specified in CHECK_AVAILABITY_TIMEOUT if not specified in - the Server object - """ - conf_availability_timeout = get_config_parameter('CHECK_AVAILABILITY_TIMEOUT') - available = False - self.reset_availability() - for address in self.candidate_addresses(): - available = True - try: - temp_socket = socket.socket(*address[:3]) - if self.connect_timeout: - temp_socket.settimeout(self.connect_timeout) - else: - temp_socket.settimeout(conf_availability_timeout) # set timeout for checking availability to default - try: - temp_socket.connect(address[4]) - except socket.error: - available = False - finally: - try: - temp_socket.shutdown(socket.SHUT_RDWR) - except socket.error: - available = False - finally: - temp_socket.close() - except socket.gaierror: - available = False - - if available: - if log_enabled(BASIC): - log(BASIC, 'server <%s> available at <%r>', self, address) - self.update_availability(address, True) - break # if an available address is found exits immediately - else: - self.update_availability(address, False) - if log_enabled(ERROR): - log(ERROR, 'server <%s> not available at <%r>', self, address) - - return available - - @staticmethod - def next_message_id(): - """ - LDAP messageId is unique for all connections to same server - """ - with Server._message_id_lock: - Server._message_counter += 1 - if Server._message_counter >= LDAP_MAX_INT: - Server._message_counter = 1 - if log_enabled(PROTOCOL): - log(PROTOCOL, 'new message id <%d> generated', Server._message_counter) - - return Server._message_counter - - def _get_dsa_info(self, connection): - """ - Retrieve DSE operational attribute as per RFC4512 (5.1). - """ - if connection.strategy.no_real_dsa: # do not try for mock strategies - return - - if not connection.strategy.pooled: # in pooled strategies get_dsa_info is performed by the worker threads - result = connection.search(search_base='', - search_filter='(objectClass=*)', - search_scope=BASE, - attributes=['altServer', # requests specific dsa info attributes - 'namingContexts', - 'supportedControl', - 'supportedExtension', - 'supportedFeatures', - 'supportedCapabilities', - 'supportedLdapVersion', - 'supportedSASLMechanisms', - 'vendorName', - 'vendorVersion', - 'subschemaSubentry', - '*', - '+'], # requests all remaining attributes (other), - get_operational_attributes=True) - - with self.dit_lock: - if isinstance(result, bool): # sync request - self._dsa_info = DsaInfo(connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else self._dsa_info - elif result: # asynchronous request, must check if attributes in response - results, _ = connection.get_response(result) - if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]: - self._dsa_info = DsaInfo(results[0]['attributes'], results[0]['raw_attributes']) - - if log_enabled(BASIC): - log(BASIC, 'DSA info read for <%s> via <%s>', self, connection) - - def _get_schema_info(self, connection, entry=''): - """ - Retrieve schema from subschemaSubentry DSE attribute, per RFC - 4512 (4.4 and 5.1); entry = '' means DSE. - """ - if connection.strategy.no_real_dsa: # do not try for mock strategies - return - - schema_entry = None - if self._dsa_info and entry == '': # subschemaSubentry already present in dsaInfo - if isinstance(self._dsa_info.schema_entry, SEQUENCE_TYPES): - schema_entry = self._dsa_info.schema_entry[0] if self._dsa_info.schema_entry else None - else: - schema_entry = self._dsa_info.schema_entry if self._dsa_info.schema_entry else None - else: - result = connection.search(entry, '(objectClass=*)', BASE, attributes=['subschemaSubentry'], get_operational_attributes=True) - if isinstance(result, bool): # sync request - if result and 'subschemaSubentry' in connection.response[0]['raw_attributes']: - if len(connection.response[0]['raw_attributes']['subschemaSubentry']) > 0: - schema_entry = connection.response[0]['raw_attributes']['subschemaSubentry'][0] - else: # asynchronous request, must check if subschemaSubentry in attributes - results, _ = connection.get_response(result) - if len(results) == 1 and 'raw_attributes' in results[0] and 'subschemaSubentry' in results[0]['attributes']: - if len(results[0]['raw_attributes']['subschemaSubentry']) > 0: - schema_entry = results[0]['raw_attributes']['subschemaSubentry'][0] - - if schema_entry and not connection.strategy.pooled: # in pooled strategies get_schema_info is performed by the worker threads - if isinstance(schema_entry, bytes) and str is not bytes: # Python 3 - schema_entry = to_unicode(schema_entry, from_server=True) - result = connection.search(schema_entry, - search_filter='(objectClass=subschema)', - search_scope=BASE, - attributes=['objectClasses', # requests specific subschema attributes - 'attributeTypes', - 'ldapSyntaxes', - 'matchingRules', - 'matchingRuleUse', - 'dITContentRules', - 'dITStructureRules', - 'nameForms', - 'createTimestamp', - 'modifyTimestamp', - '*'], # requests all remaining attributes (other) - get_operational_attributes=True - ) - with self.dit_lock: - self._schema_info = None - if result: - if isinstance(result, bool): # sync request - self._schema_info = SchemaInfo(schema_entry, connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else None - else: # asynchronous request, must check if attributes in response - results, result = connection.get_response(result) - if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]: - self._schema_info = SchemaInfo(schema_entry, results[0]['attributes'], results[0]['raw_attributes']) - if self._schema_info and not self._schema_info.is_valid(): # flaky servers can return an empty schema, checks if it is so and set schema to None - self._schema_info = None - if self._schema_info: # if schema is valid tries to apply formatter to the "other" dict with raw values for schema and info - for attribute in self._schema_info.other: - self._schema_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._schema_info.raw[attribute], self.custom_formatter) - if self._dsa_info: # try to apply formatter to the "other" dict with dsa info raw values - for attribute in self._dsa_info.other: - self._dsa_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._dsa_info.raw[attribute], self.custom_formatter) - if log_enabled(BASIC): - log(BASIC, 'schema read for <%s> via <%s>', self, connection) - - def get_info_from_server(self, connection): - """ - reads info from DSE and from subschema - """ - if connection and not connection.closed: - if self.get_info in [DSA, ALL]: - self._get_dsa_info(connection) - if self.get_info in [SCHEMA, ALL]: - self._get_schema_info(connection) - elif self.get_info == OFFLINE_EDIR_8_8_8: - from ..protocol.schemas.edir888 import edir_8_8_8_schema, edir_8_8_8_dsa_info - self.attach_schema_info(SchemaInfo.from_json(edir_8_8_8_schema)) - self.attach_dsa_info(DsaInfo.from_json(edir_8_8_8_dsa_info)) - elif self.get_info == OFFLINE_AD_2012_R2: - from ..protocol.schemas.ad2012R2 import ad_2012_r2_schema, ad_2012_r2_dsa_info - self.attach_schema_info(SchemaInfo.from_json(ad_2012_r2_schema)) - self.attach_dsa_info(DsaInfo.from_json(ad_2012_r2_dsa_info)) - elif self.get_info == OFFLINE_SLAPD_2_4: - from ..protocol.schemas.slapd24 import slapd_2_4_schema, slapd_2_4_dsa_info - self.attach_schema_info(SchemaInfo.from_json(slapd_2_4_schema)) - self.attach_dsa_info(DsaInfo.from_json(slapd_2_4_dsa_info)) - elif self.get_info == OFFLINE_DS389_1_3_3: - from ..protocol.schemas.ds389 import ds389_1_3_3_schema, ds389_1_3_3_dsa_info - self.attach_schema_info(SchemaInfo.from_json(ds389_1_3_3_schema)) - self.attach_dsa_info(DsaInfo.from_json(ds389_1_3_3_dsa_info)) - - def attach_dsa_info(self, dsa_info=None): - if isinstance(dsa_info, DsaInfo): - self._dsa_info = dsa_info - if log_enabled(BASIC): - log(BASIC, 'attached DSA info to Server <%s>', self) - - def attach_schema_info(self, dsa_schema=None): - if isinstance(dsa_schema, SchemaInfo): - self._schema_info = dsa_schema - if log_enabled(BASIC): - log(BASIC, 'attached schema info to Server <%s>', self) - - @property - def info(self): - return self._dsa_info - - @property - def schema(self): - return self._schema_info - - @staticmethod - def from_definition(host, dsa_info, dsa_schema, port=None, use_ssl=False, formatter=None, validator=None): - """ - Define a dummy server with preloaded schema and info - :param host: host name - :param dsa_info: DsaInfo preloaded object or a json formatted string or a file name - :param dsa_schema: SchemaInfo preloaded object or a json formatted string or a file name - :param port: dummy port - :param use_ssl: use_ssl - :param formatter: custom formatter - :return: Server object - """ - if isinstance(host, SEQUENCE_TYPES): - dummy = Server(host=host[0], port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, get_info=ALL) # for ServerPool object - else: - dummy = Server(host=host, port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, get_info=ALL) - if isinstance(dsa_info, DsaInfo): - dummy._dsa_info = dsa_info - elif isinstance(dsa_info, STRING_TYPES): - try: - dummy._dsa_info = DsaInfo.from_json(dsa_info) # tries to use dsa_info as a json configuration string - except Exception: - dummy._dsa_info = DsaInfo.from_file(dsa_info) # tries to use dsa_info as a file name - - if not dummy.info: - if log_enabled(ERROR): - log(ERROR, 'invalid DSA info for %s', host) - raise LDAPDefinitionError('invalid dsa info') - - if isinstance(dsa_schema, SchemaInfo): - dummy._schema_info = dsa_schema - elif isinstance(dsa_schema, STRING_TYPES): - try: - dummy._schema_info = SchemaInfo.from_json(dsa_schema) - except Exception: - dummy._schema_info = SchemaInfo.from_file(dsa_schema) - - if not dummy.schema: - if log_enabled(ERROR): - log(ERROR, 'invalid schema info for %s', host) - raise LDAPDefinitionError('invalid schema info') - - if log_enabled(BASIC): - log(BASIC, 'created server <%s> from definition', dummy) - - return dummy - - def candidate_addresses(self): - conf_reset_availability_timeout = get_config_parameter('RESET_AVAILABILITY_TIMEOUT') - if self.ipc: - candidates = self.address_info - if log_enabled(BASIC): - log(BASIC, 'candidate address for <%s>: <%s> with mode UNIX_SOCKET', self, self.name) - else: - # checks reset availability timeout - for address in self.address_info: - if address[6] and ((datetime.now() - address[6]).seconds > conf_reset_availability_timeout): - address[5] = None - address[6] = None - - # selects server address based on server mode and availability (in address[5]) - addresses = self.address_info[:] # copy to avoid refreshing while searching candidates - candidates = [] - if addresses: - if self.mode == IP_SYSTEM_DEFAULT: - candidates.append(addresses[0]) - elif self.mode == IP_V4_ONLY: - candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)] - elif self.mode == IP_V6_ONLY: - candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)] - elif self.mode == IP_V4_PREFERRED: - candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)] - candidates += [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)] - elif self.mode == IP_V6_PREFERRED: - candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)] - candidates += [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)] - else: - if log_enabled(ERROR): - log(ERROR, 'invalid server mode for <%s>', self) - raise LDAPInvalidServerError('invalid server mode') - - if log_enabled(BASIC): - for candidate in candidates: - log(BASIC, 'obtained candidate address for <%s>: <%r> with mode %s', self, candidate[:-2], self.mode) - return candidates +""" +""" + +# Created on 2014.05.31 +# +# Author: Giovanni Cannata +# +# Copyright 2014 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +import socket +from threading import Lock +from datetime import datetime, MINYEAR + +from .. import DSA, SCHEMA, ALL, BASE, get_config_parameter, OFFLINE_EDIR_8_8_8, OFFLINE_EDIR_9_1_4, OFFLINE_AD_2012_R2, OFFLINE_SLAPD_2_4, OFFLINE_DS389_1_3_3, SEQUENCE_TYPES, IP_SYSTEM_DEFAULT, IP_V4_ONLY, IP_V6_ONLY, IP_V4_PREFERRED, IP_V6_PREFERRED, STRING_TYPES +from .exceptions import LDAPInvalidServerError, LDAPDefinitionError, LDAPInvalidPortError, LDAPInvalidTlsSpecificationError, LDAPSocketOpenError, LDAPInfoError +from ..protocol.formatters.standard import format_attribute_values +from ..protocol.rfc4511 import LDAP_MAX_INT +from ..protocol.rfc4512 import SchemaInfo, DsaInfo +from .tls import Tls +from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, NETWORK +from ..utils.conv import to_unicode +from ..utils.port_validators import check_port, check_port_and_port_list + +try: + from urllib.parse import unquote # Python 3 +except ImportError: + from urllib import unquote # Python 2 + +try: # try to discover if unix sockets are available for LDAP over IPC (ldapi:// scheme) + # noinspection PyUnresolvedReferences + from socket import AF_UNIX + unix_socket_available = True +except ImportError: + unix_socket_available = False + + +class Server(object): + """ + LDAP Server definition class + + Allowed_referral_hosts can be None (default), or a list of tuples of + allowed servers ip address or names to contact while redirecting + search to referrals. + + The second element of the tuple is a boolean to indicate if + authentication to that server is allowed; if False only anonymous + bind will be used. + + Per RFC 4516. Use [('*', False)] to allow any host with anonymous + bind, use [('*', True)] to allow any host with same authentication of + Server. + """ + + _message_counter = 0 + _message_id_lock = Lock() # global lock for message_id shared by all Server objects + + def __init__(self, + host, + port=None, + use_ssl=False, + allowed_referral_hosts=None, + get_info=SCHEMA, + tls=None, + formatter=None, + connect_timeout=None, + mode=IP_V6_PREFERRED, + validator=None): + + self.ipc = False + url_given = False + host = host.strip() + if host.lower().startswith('ldap://'): + self.host = host[7:] + use_ssl = False + url_given = True + elif host.lower().startswith('ldaps://'): + self.host = host[8:] + use_ssl = True + url_given = True + elif host.lower().startswith('ldapi://') and unix_socket_available: + self.ipc = True + use_ssl = False + url_given = True + elif host.lower().startswith('ldapi://') and not unix_socket_available: + raise LDAPSocketOpenError('LDAP over IPC not available - UNIX sockets non present') + else: + self.host = host + + if self.ipc: + if str is bytes: # Python 2 + self.host = unquote(host[7:]).decode('utf-8') + else: # Python 3 + self.host = unquote(host[7:]) # encoding defaults to utf-8 in python3 + self.port = None + elif ':' in self.host and self.host.count(':') == 1: + hostname, _, hostport = self.host.partition(':') + try: + port = int(hostport) or port + except ValueError: + if log_enabled(ERROR): + log(ERROR, 'port <%s> must be an integer', port) + raise LDAPInvalidPortError('port must be an integer') + self.host = hostname + elif url_given and self.host.startswith('['): + hostname, sep, hostport = self.host[1:].partition(']') + if sep != ']' or not self._is_ipv6(hostname): + if log_enabled(ERROR): + log(ERROR, 'invalid IPv6 server address for <%s>', self.host) + raise LDAPInvalidServerError() + if len(hostport): + if not hostport.startswith(':'): + if log_enabled(ERROR): + log(ERROR, 'invalid URL in server name for <%s>', self.host) + raise LDAPInvalidServerError('invalid URL in server name') + if not hostport[1:].isdecimal(): + if log_enabled(ERROR): + log(ERROR, 'port must be an integer for <%s>', self.host) + raise LDAPInvalidPortError('port must be an integer') + port = int(hostport[1:]) + self.host = hostname + elif not url_given and self._is_ipv6(self.host): + pass + elif self.host.count(':') > 1: + if log_enabled(ERROR): + log(ERROR, 'invalid server address for <%s>', self.host) + raise LDAPInvalidServerError() + + if not self.ipc: + self.host.rstrip('/') + if not use_ssl and not port: + port = 389 + elif use_ssl and not port: + port = 636 + + port_err = check_port(port) + if port_err: + if log_enabled(ERROR): + log(ERROR, port_err) + raise LDAPInvalidPortError(port_err) + self.port = port + + if allowed_referral_hosts is None: # defaults to any server with authentication + allowed_referral_hosts = [('*', True)] + + if isinstance(allowed_referral_hosts, SEQUENCE_TYPES): + self.allowed_referral_hosts = [] + for referral_host in allowed_referral_hosts: + if isinstance(referral_host, tuple): + if isinstance(referral_host[1], bool): + self.allowed_referral_hosts.append(referral_host) + elif isinstance(allowed_referral_hosts, tuple): + if isinstance(allowed_referral_hosts[1], bool): + self.allowed_referral_hosts = [allowed_referral_hosts] + else: + self.allowed_referral_hosts = [] + + self.ssl = True if use_ssl else False + if tls and not isinstance(tls, Tls): + if log_enabled(ERROR): + log(ERROR, 'invalid tls specification: <%s>', tls) + raise LDAPInvalidTlsSpecificationError('invalid Tls object') + + self.tls = Tls() if self.ssl and not tls else tls + + if not self.ipc: + if self._is_ipv6(self.host): + self.name = ('ldaps' if self.ssl else 'ldap') + '://[' + self.host + ']:' + str(self.port) + else: + self.name = ('ldaps' if self.ssl else 'ldap') + '://' + self.host + ':' + str(self.port) + else: + self.name = host + + self.get_info = get_info + self._dsa_info = None + self._schema_info = None + self.dit_lock = Lock() + self.custom_formatter = formatter + self.custom_validator = validator + self._address_info = [] # property self.address_info resolved at open time (or when check_availability is called) + self._address_info_resolved_time = datetime(MINYEAR, 1, 1) # smallest date ever + self.current_address = None + self.connect_timeout = connect_timeout + self.mode = mode + + self.get_info_from_server(None) # load offline schema if needed + + if log_enabled(BASIC): + log(BASIC, 'instantiated Server: <%r>', self) + + @staticmethod + def _is_ipv6(host): + try: + socket.inet_pton(socket.AF_INET6, host) + except (socket.error, AttributeError, ValueError): + return False + return True + + def __str__(self): + if self.host: + s = self.name + (' - ssl' if self.ssl else ' - cleartext') + (' - unix socket' if self.ipc else '') + else: + s = object.__str__(self) + return s + + def __repr__(self): + r = 'Server(host={0.host!r}, port={0.port!r}, use_ssl={0.ssl!r}'.format(self) + r += '' if not self.allowed_referral_hosts else ', allowed_referral_hosts={0.allowed_referral_hosts!r}'.format(self) + r += '' if self.tls is None else ', tls={0.tls!r}'.format(self) + r += '' if not self.get_info else ', get_info={0.get_info!r}'.format(self) + r += '' if not self.connect_timeout else ', connect_timeout={0.connect_timeout!r}'.format(self) + r += '' if not self.mode else ', mode={0.mode!r}'.format(self) + r += ')' + + return r + + @property + def address_info(self): + conf_refresh_interval = get_config_parameter('ADDRESS_INFO_REFRESH_TIME') + if not self._address_info or (datetime.now() - self._address_info_resolved_time).seconds > conf_refresh_interval: + # converts addresses tuple to list and adds a 6th parameter for availability (None = not checked, True = available, False=not available) and a 7th parameter for the checking time + addresses = None + try: + if self.ipc: + addresses = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, None, self.host, None)] + else: + if self.mode == IP_V4_ONLY: + addresses = socket.getaddrinfo(self.host, self.port, socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP, socket.AI_ADDRCONFIG | socket.AI_V4MAPPED) + elif self.mode == IP_V6_ONLY: + addresses = socket.getaddrinfo(self.host, self.port, socket.AF_INET6, socket.SOCK_STREAM, socket.IPPROTO_TCP, socket.AI_ADDRCONFIG | socket.AI_V4MAPPED) + else: + addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP, socket.AI_ADDRCONFIG | socket.AI_V4MAPPED) + except (socket.gaierror, AttributeError): + pass + + if not addresses: # if addresses not found or raised an exception (for example for bad flags) tries again without flags + try: + if self.mode == IP_V4_ONLY: + addresses = socket.getaddrinfo(self.host, self.port, socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP) + elif self.mode == IP_V6_ONLY: + addresses = socket.getaddrinfo(self.host, self.port, socket.AF_INET6, socket.SOCK_STREAM, socket.IPPROTO_TCP) + else: + addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP) + except socket.gaierror: + pass + + if addresses: + self._address_info = [list(address) + [None, None] for address in addresses] + self._address_info_resolved_time = datetime.now() + else: + self._address_info = [] + self._address_info_resolved_time = datetime(MINYEAR, 1, 1) # smallest date + + if log_enabled(BASIC): + for address in self._address_info: + log(BASIC, 'address for <%s> resolved as <%r>', self, address[:-2]) + return self._address_info + + def update_availability(self, address, available): + cont = 0 + while cont < len(self._address_info): + if self.address_info[cont] == address: + self._address_info[cont][5] = True if available else False + self._address_info[cont][6] = datetime.now() + break + cont += 1 + + def reset_availability(self): + for address in self._address_info: + address[5] = None + address[6] = None + + def check_availability(self, source_address=None, source_port=None, source_port_list=None): + """ + Tries to open, connect and close a socket to specified address and port to check availability. + Timeout in seconds is specified in CHECK_AVAILABITY_TIMEOUT if not specified in + the Server object. + If specified, use a specific address, port, or list of possible ports, when attempting to check availability. + NOTE: This will only consider multiple ports from the source port list if the first ones we try to bind to are + already in use. This will not attempt using different ports in the list if the server is unavailable, + as that could result in the runtime of check_availability significantly exceeding the connection timeout. + """ + source_port_err = check_port_and_port_list(source_port, source_port_list) + if source_port_err: + if log_enabled(ERROR): + log(ERROR, source_port_err) + raise LDAPInvalidPortError(source_port_err) + + # using an empty string to bind a socket means "use the default as if this wasn't provided" because socket + # binding requires that you pass something for the ip if you want to pass a specific port + bind_address = source_address if source_address is not None else '' + # using 0 as the source port to bind a socket means "use the default behavior of picking a random port from + # all ports as if this wasn't provided" because socket binding requires that you pass something for the port + # if you want to pass a specific ip + candidate_bind_ports = [0] + + # if we have either a source port or source port list, convert that into our candidate list + if source_port is not None: + candidate_bind_ports = [source_port] + elif source_port_list is not None: + candidate_bind_ports = source_port_list[:] + + conf_availability_timeout = get_config_parameter('CHECK_AVAILABILITY_TIMEOUT') + available = False + self.reset_availability() + for address in self.candidate_addresses(): + available = True + try: + temp_socket = socket.socket(*address[:3]) + + # Go through our candidate bind ports and try to bind our socket to our source address with them. + # if no source address or ports were specified, this will have the same success/fail result as if we + # tried to connect to the remote server without binding locally first. + # This is actually a little bit better, as it lets us distinguish the case of "issue binding the socket + # locally" from "remote server is unavailable" with more clarity, though this will only really be an + # issue when no source address/port is specified if the system checking server availability is running + # as a very unprivileged user. + last_bind_exc = None + socket_bind_succeeded = False + for bind_port in candidate_bind_ports: + try: + temp_socket.bind((bind_address, bind_port)) + socket_bind_succeeded = True + break + except Exception as bind_ex: + last_bind_exc = bind_ex + if log_enabled(NETWORK): + log(NETWORK, 'Unable to bind to local address <%s> with source port <%s> due to <%s>', + bind_address, bind_port, bind_ex) + if not socket_bind_succeeded: + if log_enabled(ERROR): + log(ERROR, 'Unable to locally bind to local address <%s> with any of the source ports <%s> due to <%s>', + bind_address, candidate_bind_ports, last_bind_exc) + raise LDAPSocketOpenError('Unable to bind socket locally to address {} with any of the source ports {} due to {}' + .format(bind_address, candidate_bind_ports, last_bind_exc)) + + if self.connect_timeout: + temp_socket.settimeout(self.connect_timeout) + else: + temp_socket.settimeout(conf_availability_timeout) # set timeout for checking availability to default + try: + temp_socket.connect(address[4]) + except socket.error: + available = False + finally: + try: + temp_socket.shutdown(socket.SHUT_RDWR) + except socket.error: + available = False + finally: + temp_socket.close() + except socket.gaierror: + available = False + + if available: + if log_enabled(BASIC): + log(BASIC, 'server <%s> available at <%r>', self, address) + self.update_availability(address, True) + break # if an available address is found exits immediately + else: + self.update_availability(address, False) + if log_enabled(ERROR): + log(ERROR, 'server <%s> not available at <%r>', self, address) + + return available + + @staticmethod + def next_message_id(): + """ + LDAP messageId is unique for all connections to same server + """ + with Server._message_id_lock: + Server._message_counter += 1 + if Server._message_counter >= LDAP_MAX_INT: + Server._message_counter = 1 + if log_enabled(PROTOCOL): + log(PROTOCOL, 'new message id <%d> generated', Server._message_counter) + + return Server._message_counter + + def _get_dsa_info(self, connection): + """ + Retrieve DSE operational attribute as per RFC4512 (5.1). + """ + if connection.strategy.no_real_dsa: # do not try for mock strategies + return + + if not connection.strategy.pooled: # in pooled strategies get_dsa_info is performed by the worker threads + result = connection.search(search_base='', + search_filter='(objectClass=*)', + search_scope=BASE, + attributes=['altServer', # requests specific dsa info attributes + 'namingContexts', + 'supportedControl', + 'supportedExtension', + 'supportedFeatures', + 'supportedCapabilities', + 'supportedLdapVersion', + 'supportedSASLMechanisms', + 'vendorName', + 'vendorVersion', + 'subschemaSubentry', + '*', + '+'], # requests all remaining attributes (other), + get_operational_attributes=True) + + with self.dit_lock: + if isinstance(result, bool): # sync request + self._dsa_info = DsaInfo(connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else self._dsa_info + elif result: # asynchronous request, must check if attributes in response + results, _ = connection.get_response(result) + if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]: + self._dsa_info = DsaInfo(results[0]['attributes'], results[0]['raw_attributes']) + + if log_enabled(BASIC): + log(BASIC, 'DSA info read for <%s> via <%s>', self, connection) + + def _get_schema_info(self, connection, entry=''): + """ + Retrieve schema from subschemaSubentry DSE attribute, per RFC + 4512 (4.4 and 5.1); entry = '' means DSE. + """ + if connection.strategy.no_real_dsa: # do not try for mock strategies + return + + schema_entry = None + if self._dsa_info and entry == '': # subschemaSubentry already present in dsaInfo + if isinstance(self._dsa_info.schema_entry, SEQUENCE_TYPES): + schema_entry = self._dsa_info.schema_entry[0] if self._dsa_info.schema_entry else None + else: + schema_entry = self._dsa_info.schema_entry if self._dsa_info.schema_entry else None + else: + result = connection.search(entry, '(objectClass=*)', BASE, attributes=['subschemaSubentry'], get_operational_attributes=True) + if isinstance(result, bool): # sync request + if result and 'subschemaSubentry' in connection.response[0]['raw_attributes']: + if len(connection.response[0]['raw_attributes']['subschemaSubentry']) > 0: + schema_entry = connection.response[0]['raw_attributes']['subschemaSubentry'][0] + else: # asynchronous request, must check if subschemaSubentry in attributes + results, _ = connection.get_response(result) + if len(results) == 1 and 'raw_attributes' in results[0] and 'subschemaSubentry' in results[0]['attributes']: + if len(results[0]['raw_attributes']['subschemaSubentry']) > 0: + schema_entry = results[0]['raw_attributes']['subschemaSubentry'][0] + + if schema_entry and not connection.strategy.pooled: # in pooled strategies get_schema_info is performed by the worker threads + if isinstance(schema_entry, bytes) and str is not bytes: # Python 3 + schema_entry = to_unicode(schema_entry, from_server=True) + result = connection.search(schema_entry, + search_filter='(objectClass=subschema)', + search_scope=BASE, + attributes=['objectClasses', # requests specific subschema attributes + 'attributeTypes', + 'ldapSyntaxes', + 'matchingRules', + 'matchingRuleUse', + 'dITContentRules', + 'dITStructureRules', + 'nameForms', + 'createTimestamp', + 'modifyTimestamp', + '*'], # requests all remaining attributes (other) + get_operational_attributes=True + ) + with self.dit_lock: + self._schema_info = None + if result: + if isinstance(result, bool): # sync request + self._schema_info = SchemaInfo(schema_entry, connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else None + else: # asynchronous request, must check if attributes in response + results, result = connection.get_response(result) + if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]: + self._schema_info = SchemaInfo(schema_entry, results[0]['attributes'], results[0]['raw_attributes']) + if self._schema_info and not self._schema_info.is_valid(): # flaky servers can return an empty schema, checks if it is so and set schema to None + self._schema_info = None + if self._schema_info: # if schema is valid tries to apply formatter to the "other" dict with raw values for schema and info + for attribute in self._schema_info.other: + self._schema_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._schema_info.raw[attribute], self.custom_formatter) + if self._dsa_info: # try to apply formatter to the "other" dict with dsa info raw values + for attribute in self._dsa_info.other: + self._dsa_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._dsa_info.raw[attribute], self.custom_formatter) + if log_enabled(BASIC): + log(BASIC, 'schema read for <%s> via <%s>', self, connection) + + def get_info_from_server(self, connection): + """ + reads info from DSE and from subschema + """ + if connection and not connection.closed: + if self.get_info in [DSA, ALL]: + self._get_dsa_info(connection) + if self.get_info in [SCHEMA, ALL]: + self._get_schema_info(connection) + elif self.get_info == OFFLINE_EDIR_8_8_8: + from ..protocol.schemas.edir888 import edir_8_8_8_schema, edir_8_8_8_dsa_info + self.attach_schema_info(SchemaInfo.from_json(edir_8_8_8_schema)) + self.attach_dsa_info(DsaInfo.from_json(edir_8_8_8_dsa_info)) + elif self.get_info == OFFLINE_EDIR_9_1_4: + from ..protocol.schemas.edir914 import edir_9_1_4_schema, edir_9_1_4_dsa_info + self.attach_schema_info(SchemaInfo.from_json(edir_9_1_4_schema)) + self.attach_dsa_info(DsaInfo.from_json(edir_9_1_4_dsa_info)) + elif self.get_info == OFFLINE_AD_2012_R2: + from ..protocol.schemas.ad2012R2 import ad_2012_r2_schema, ad_2012_r2_dsa_info + self.attach_schema_info(SchemaInfo.from_json(ad_2012_r2_schema)) + self.attach_dsa_info(DsaInfo.from_json(ad_2012_r2_dsa_info)) + elif self.get_info == OFFLINE_SLAPD_2_4: + from ..protocol.schemas.slapd24 import slapd_2_4_schema, slapd_2_4_dsa_info + self.attach_schema_info(SchemaInfo.from_json(slapd_2_4_schema)) + self.attach_dsa_info(DsaInfo.from_json(slapd_2_4_dsa_info)) + elif self.get_info == OFFLINE_DS389_1_3_3: + from ..protocol.schemas.ds389 import ds389_1_3_3_schema, ds389_1_3_3_dsa_info + self.attach_schema_info(SchemaInfo.from_json(ds389_1_3_3_schema)) + self.attach_dsa_info(DsaInfo.from_json(ds389_1_3_3_dsa_info)) + + def attach_dsa_info(self, dsa_info=None): + if isinstance(dsa_info, DsaInfo): + self._dsa_info = dsa_info + if log_enabled(BASIC): + log(BASIC, 'attached DSA info to Server <%s>', self) + + def attach_schema_info(self, dsa_schema=None): + if isinstance(dsa_schema, SchemaInfo): + self._schema_info = dsa_schema + if log_enabled(BASIC): + log(BASIC, 'attached schema info to Server <%s>', self) + + @property + def info(self): + return self._dsa_info + + @property + def schema(self): + return self._schema_info + + @staticmethod + def from_definition(host, dsa_info, dsa_schema, port=None, use_ssl=False, formatter=None, validator=None): + """ + Define a dummy server with preloaded schema and info + :param host: host name + :param dsa_info: DsaInfo preloaded object or a json formatted string or a file name + :param dsa_schema: SchemaInfo preloaded object or a json formatted string or a file name + :param port: fake port + :param use_ssl: use_ssl + :param formatter: custom formatters + :return: Server object + """ + if isinstance(host, SEQUENCE_TYPES): + dummy = Server(host=host[0], port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, get_info=ALL) # for ServerPool object + else: + dummy = Server(host=host, port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, get_info=ALL) + if isinstance(dsa_info, DsaInfo): + dummy._dsa_info = dsa_info + elif isinstance(dsa_info, STRING_TYPES): + try: + dummy._dsa_info = DsaInfo.from_json(dsa_info) # tries to use dsa_info as a json configuration string + except Exception: + dummy._dsa_info = DsaInfo.from_file(dsa_info) # tries to use dsa_info as a file name + + if not dummy.info: + if log_enabled(ERROR): + log(ERROR, 'invalid DSA info for %s', host) + raise LDAPDefinitionError('invalid dsa info') + + if isinstance(dsa_schema, SchemaInfo): + dummy._schema_info = dsa_schema + elif isinstance(dsa_schema, STRING_TYPES): + try: + dummy._schema_info = SchemaInfo.from_json(dsa_schema) + except Exception: + dummy._schema_info = SchemaInfo.from_file(dsa_schema) + + if not dummy.schema: + if log_enabled(ERROR): + log(ERROR, 'invalid schema info for %s', host) + raise LDAPDefinitionError('invalid schema info') + + if log_enabled(BASIC): + log(BASIC, 'created server <%s> from definition', dummy) + + return dummy + + def candidate_addresses(self): + conf_reset_availability_timeout = get_config_parameter('RESET_AVAILABILITY_TIMEOUT') + if self.ipc: + candidates = self.address_info + if log_enabled(BASIC): + log(BASIC, 'candidate address for <%s>: <%s> with mode UNIX_SOCKET', self, self.name) + else: + # checks reset availability timeout + for address in self.address_info: + if address[6] and ((datetime.now() - address[6]).seconds > conf_reset_availability_timeout): + address[5] = None + address[6] = None + + # selects server address based on server mode and availability (in address[5]) + addresses = self.address_info[:] # copy to avoid refreshing while searching candidates + candidates = [] + if addresses: + if self.mode == IP_SYSTEM_DEFAULT: + candidates.append(addresses[0]) + elif self.mode == IP_V4_ONLY: + candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)] + elif self.mode == IP_V6_ONLY: + candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)] + elif self.mode == IP_V4_PREFERRED: + candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)] + candidates += [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)] + elif self.mode == IP_V6_PREFERRED: + candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)] + candidates += [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)] + else: + if log_enabled(ERROR): + log(ERROR, 'invalid server mode for <%s>', self) + raise LDAPInvalidServerError('invalid server mode') + + if log_enabled(BASIC): + for candidate in candidates: + log(BASIC, 'obtained candidate address for <%s>: <%r> with mode %s', self, candidate[:-2], self.mode) + return candidates + + def _check_info_property(self, kind, name): + if not self._dsa_info: + raise LDAPInfoError('server info not loaded') + + if kind == 'control': + properties = self.info.supported_controls + elif kind == 'extension': + properties = self.info.supported_extensions + elif kind == 'feature': + properties = self.info.supported_features + else: + raise LDAPInfoError('invalid info category') + + for prop in properties: + if name == prop[0] or (prop[2] and name.lower() == prop[2].lower()): # checks oid and description + return True + + return False + + def has_control(self, control): + return self._check_info_property('control', control) + + def has_extension(self, extension): + return self._check_info_property('extension', extension) + + def has_feature(self, feature): + return self._check_info_property('feature', feature) + + + diff --git a/server/www/packages/packages-linux/x64/ldap3/core/timezone.py b/server/www/packages/packages-linux/x64/ldap3/core/timezone.py index 728f73b..0c24a77 100644 --- a/server/www/packages/packages-linux/x64/ldap3/core/timezone.py +++ b/server/www/packages/packages-linux/x64/ldap3/core/timezone.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2015 - 2018 Giovanni Cannata +# Copyright 2015 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/core/tls.py b/server/www/packages/packages-linux/x64/ldap3/core/tls.py index befb019..1539b9f 100644 --- a/server/www/packages/packages-linux/x64/ldap3/core/tls.py +++ b/server/www/packages/packages-linux/x64/ldap3/core/tls.py @@ -1,321 +1,327 @@ -""" -""" - -# Created on 2013.08.05 -# -# Author: Giovanni Cannata -# -# Copyright 2013 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . - -from .exceptions import LDAPSSLNotSupportedError, LDAPSSLConfigurationError, LDAPStartTLSError, LDAPCertificateError, start_tls_exception_factory -from .. import SEQUENCE_TYPES -from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK - -try: - # noinspection PyUnresolvedReferences - import ssl -except ImportError: - if log_enabled(ERROR): - log(ERROR, 'SSL not supported in this Python interpreter') - raise LDAPSSLNotSupportedError('SSL not supported in this Python interpreter') - -try: - from ssl import match_hostname, CertificateError # backport for python2 missing ssl functionalities -except ImportError: - from ..utils.tls_backport import CertificateError - from ..utils.tls_backport import match_hostname - if log_enabled(BASIC): - log(BASIC, 'using tls_backport') - -try: # try to use SSLContext - # noinspection PyUnresolvedReferences - from ssl import create_default_context, Purpose # defined in Python 3.4 and Python 2.7.9 - use_ssl_context = True -except ImportError: - use_ssl_context = False - if log_enabled(BASIC): - log(BASIC, 'SSLContext unavailable') - -from os import path - - -# noinspection PyProtectedMember -class Tls(object): - """ - tls/ssl configuration for Server object - Starting from python 2.7.9 and python 3.4 uses the SSLContext object - that tries to read the CAs defined at system level - ca_certs_path and ca_certs_data are valid only when using SSLContext - local_private_key_password is valid only when using SSLContext - sni is the server name for Server Name Indication (when available) - """ - - def __init__(self, - local_private_key_file=None, - local_certificate_file=None, - validate=ssl.CERT_NONE, - version=None, - ca_certs_file=None, - valid_names=None, - ca_certs_path=None, - ca_certs_data=None, - local_private_key_password=None, - ciphers=None, - sni=None): - - if validate in [ssl.CERT_NONE, ssl.CERT_OPTIONAL, ssl.CERT_REQUIRED]: - self.validate = validate - elif validate: - if log_enabled(ERROR): - log(ERROR, 'invalid validate parameter <%s>', validate) - raise LDAPSSLConfigurationError('invalid validate parameter') - if ca_certs_file and path.exists(ca_certs_file): - self.ca_certs_file = ca_certs_file - elif ca_certs_file: - if log_enabled(ERROR): - log(ERROR, 'invalid CA public key file <%s>', ca_certs_file) - raise LDAPSSLConfigurationError('invalid CA public key file') - else: - self.ca_certs_file = None - - if ca_certs_path and use_ssl_context and path.exists(ca_certs_path): - self.ca_certs_path = ca_certs_path - elif ca_certs_path and not use_ssl_context: - if log_enabled(ERROR): - log(ERROR, 'cannot use CA public keys path, SSLContext not available') - raise LDAPSSLNotSupportedError('cannot use CA public keys path, SSLContext not available') - elif ca_certs_path: - if log_enabled(ERROR): - log(ERROR, 'invalid CA public keys path <%s>', ca_certs_path) - raise LDAPSSLConfigurationError('invalid CA public keys path') - else: - self.ca_certs_path = None - - if ca_certs_data and use_ssl_context: - self.ca_certs_data = ca_certs_data - elif ca_certs_data: - if log_enabled(ERROR): - log(ERROR, 'cannot use CA data, SSLContext not available') - raise LDAPSSLNotSupportedError('cannot use CA data, SSLContext not available') - else: - self.ca_certs_data = None - - if local_private_key_password and use_ssl_context: - self.private_key_password = local_private_key_password - elif local_private_key_password: - if log_enabled(ERROR): - log(ERROR, 'cannot use local private key password, SSLContext not available') - raise LDAPSSLNotSupportedError('cannot use local private key password, SSLContext is not available') - else: - self.private_key_password = None - - self.version = version - self.private_key_file = local_private_key_file - self.certificate_file = local_certificate_file - self.valid_names = valid_names - self.ciphers = ciphers - self.sni = sni - - if log_enabled(BASIC): - log(BASIC, 'instantiated Tls: <%r>' % self) - - def __str__(self): - s = [ - 'protocol: ' + str(self.version), - 'client private key: ' + ('present ' if self.private_key_file else 'not present'), - 'client certificate: ' + ('present ' if self.certificate_file else 'not present'), - 'private key password: ' + ('present ' if self.private_key_password else 'not present'), - 'CA certificates file: ' + ('present ' if self.ca_certs_file else 'not present'), - 'CA certificates path: ' + ('present ' if self.ca_certs_path else 'not present'), - 'CA certificates data: ' + ('present ' if self.ca_certs_data else 'not present'), - 'verify mode: ' + str(self.validate), - 'valid names: ' + str(self.valid_names), - 'ciphers: ' + str(self.ciphers), - 'sni: ' + str(self.sni) - ] - return ' - '.join(s) - - def __repr__(self): - r = '' if self.private_key_file is None else ', local_private_key_file={0.private_key_file!r}'.format(self) - r += '' if self.certificate_file is None else ', local_certificate_file={0.certificate_file!r}'.format(self) - r += '' if self.validate is None else ', validate={0.validate!r}'.format(self) - r += '' if self.version is None else ', version={0.version!r}'.format(self) - r += '' if self.ca_certs_file is None else ', ca_certs_file={0.ca_certs_file!r}'.format(self) - r += '' if self.ca_certs_path is None else ', ca_certs_path={0.ca_certs_path!r}'.format(self) - r += '' if self.ca_certs_data is None else ', ca_certs_data={0.ca_certs_data!r}'.format(self) - r += '' if self.ciphers is None else ', ciphers={0.ciphers!r}'.format(self) - r += '' if self.sni is None else ', sni={0.sni!r}'.format(self) - r = 'Tls(' + r[2:] + ')' - return r - - def wrap_socket(self, connection, do_handshake=False): - """ - Adds TLS to the connection socket - """ - if use_ssl_context: - if self.version is None: # uses the default ssl context for reasonable security - ssl_context = create_default_context(purpose=Purpose.SERVER_AUTH, - cafile=self.ca_certs_file, - capath=self.ca_certs_path, - cadata=self.ca_certs_data) - else: # code from create_default_context in the Python standard library 3.5.1, creates a ssl context with the specificd protocol version - ssl_context = ssl.SSLContext(self.version) - if self.ca_certs_file or self.ca_certs_path or self.ca_certs_data: - ssl_context.load_verify_locations(self.ca_certs_file, self.ca_certs_path, self.ca_certs_data) - elif self.validate != ssl.CERT_NONE: - ssl_context.load_default_certs(Purpose.SERVER_AUTH) - - if self.certificate_file: - ssl_context.load_cert_chain(self.certificate_file, keyfile=self.private_key_file, password=self.private_key_password) - ssl_context.check_hostname = False - ssl_context.verify_mode = self.validate - - if self.ciphers: - try: - ssl_context.set_ciphers(self.ciphers) - except ssl.SSLError: - pass - - if self.sni: - wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake, server_hostname=self.sni) - else: - wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake) - if log_enabled(NETWORK): - log(NETWORK, 'socket wrapped with SSL using SSLContext for <%s>', connection) - else: - if self.version is None and hasattr(ssl, 'PROTOCOL_SSLv23'): - self.version = ssl.PROTOCOL_SSLv23 - if self.ciphers: - try: - - wrapped_socket = ssl.wrap_socket(connection.socket, - keyfile=self.private_key_file, - certfile=self.certificate_file, - server_side=False, - cert_reqs=self.validate, - ssl_version=self.version, - ca_certs=self.ca_certs_file, - do_handshake_on_connect=do_handshake, - ciphers=self.ciphers) - except ssl.SSLError: - raise - except TypeError: # in python2.6 no ciphers argument is present, failback to self.ciphers=None - self.ciphers = None - - if not self.ciphers: - wrapped_socket = ssl.wrap_socket(connection.socket, - keyfile=self.private_key_file, - certfile=self.certificate_file, - server_side=False, - cert_reqs=self.validate, - ssl_version=self.version, - ca_certs=self.ca_certs_file, - do_handshake_on_connect=do_handshake) - if log_enabled(NETWORK): - log(NETWORK, 'socket wrapped with SSL for <%s>', connection) - - if do_handshake and (self.validate == ssl.CERT_REQUIRED or self.validate == ssl.CERT_OPTIONAL): - check_hostname(wrapped_socket, connection.server.host, self.valid_names) - - connection.socket = wrapped_socket - return - - def start_tls(self, connection): - if connection.server.ssl: # ssl already established at server level - return False - - if (connection.tls_started and not connection._executing_deferred) or connection.strategy._outstanding or connection.sasl_in_progress: - # Per RFC 4513 (3.1.1) - if log_enabled(ERROR): - log(ERROR, "can't start tls because operations are in progress for <%s>", self) - return False - connection.starting_tls = True - if log_enabled(BASIC): - log(BASIC, 'starting tls for <%s>', connection) - if not connection.strategy.sync: - connection._awaiting_for_async_start_tls = True # some flaky servers (OpenLDAP) doesn't return the extended response name in response - result = connection.extended('1.3.6.1.4.1.1466.20037') - if not connection.strategy.sync: - # asynchronous - _start_tls must be executed by the strategy - response = connection.get_response(result) - if response != (None, None): - if log_enabled(BASIC): - log(BASIC, 'tls started for <%s>', connection) - return True - else: - if log_enabled(BASIC): - log(BASIC, 'tls not started for <%s>', connection) - return False - else: - if connection.result['description'] not in ['success']: - # startTLS failed - connection.last_error = 'startTLS failed - ' + str(connection.result['description']) - if log_enabled(ERROR): - log(ERROR, '%s for <%s>', connection.last_error, connection) - raise LDAPStartTLSError(connection.last_error) - if log_enabled(BASIC): - log(BASIC, 'tls started for <%s>', connection) - return self._start_tls(connection) - - def _start_tls(self, connection): - try: - self.wrap_socket(connection, do_handshake=True) - except Exception as e: - connection.last_error = 'wrap socket error: ' + str(e) - if log_enabled(ERROR): - log(ERROR, 'error <%s> wrapping socket for TLS in <%s>', connection.last_error, connection) - raise start_tls_exception_factory(LDAPStartTLSError, e)(connection.last_error) - finally: - connection.starting_tls = False - - if connection.usage: - connection._usage.wrapped_sockets += 1 - connection.tls_started = True - return True - - -def check_hostname(sock, server_name, additional_names): - server_certificate = sock.getpeercert() - if log_enabled(NETWORK): - log(NETWORK, 'certificate found for %s: %s', sock, server_certificate) - if additional_names: - host_names = [server_name] + (additional_names if isinstance(additional_names, SEQUENCE_TYPES) else [additional_names]) - else: - host_names = [server_name] - - for host_name in host_names: - if not host_name: - continue - elif host_name == '*': - if log_enabled(NETWORK): - log(NETWORK, 'certificate matches * wildcard') - return # valid - - try: - match_hostname(server_certificate, host_name) # raise CertificateError if certificate doesn't match server name - if log_enabled(NETWORK): - log(NETWORK, 'certificate matches host name <%s>', host_name) - return # valid - except CertificateError as e: - if log_enabled(NETWORK): - log(NETWORK, str(e)) - - if log_enabled(ERROR): - log(ERROR, "hostname doesn't match certificate") - raise LDAPCertificateError("certificate %s doesn't match any name in %s " % (server_certificate, str(host_names))) +""" +""" + +# Created on 2013.08.05 +# +# Author: Giovanni Cannata +# +# Copyright 2013 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +from .exceptions import LDAPSSLNotSupportedError, LDAPSSLConfigurationError, LDAPStartTLSError, LDAPCertificateError, start_tls_exception_factory +from .. import SEQUENCE_TYPES +from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK + +try: + # noinspection PyUnresolvedReferences + import ssl +except ImportError: + if log_enabled(ERROR): + log(ERROR, 'SSL not supported in this Python interpreter') + raise LDAPSSLNotSupportedError('SSL not supported in this Python interpreter') + +try: + from ssl import match_hostname, CertificateError # backport for python2 missing ssl functionalities +except ImportError: + from ..utils.tls_backport import CertificateError + from ..utils.tls_backport import match_hostname + if log_enabled(BASIC): + log(BASIC, 'using tls_backport') + +try: # try to use SSLContext + # noinspection PyUnresolvedReferences + from ssl import create_default_context, Purpose # defined in Python 3.4 and Python 2.7.9 + use_ssl_context = True +except ImportError: + use_ssl_context = False + if log_enabled(BASIC): + log(BASIC, 'SSLContext unavailable') + +from os import path + + +# noinspection PyProtectedMember +class Tls(object): + """ + tls/ssl configuration for Server object + Starting from python 2.7.9 and python 3.4 uses the SSLContext object + that tries to read the CAs defined at system level + ca_certs_path and ca_certs_data are valid only when using SSLContext + local_private_key_password is valid only when using SSLContext + ssl_options is valid only when using SSLContext + sni is the server name for Server Name Indication (when available) + """ + + def __init__(self, + local_private_key_file=None, + local_certificate_file=None, + validate=ssl.CERT_NONE, + version=None, + ssl_options=None, + ca_certs_file=None, + valid_names=None, + ca_certs_path=None, + ca_certs_data=None, + local_private_key_password=None, + ciphers=None, + sni=None): + if ssl_options is None: + ssl_options = [] + self.ssl_options = ssl_options + if validate in [ssl.CERT_NONE, ssl.CERT_OPTIONAL, ssl.CERT_REQUIRED]: + self.validate = validate + elif validate: + if log_enabled(ERROR): + log(ERROR, 'invalid validate parameter <%s>', validate) + raise LDAPSSLConfigurationError('invalid validate parameter') + if ca_certs_file and path.exists(ca_certs_file): + self.ca_certs_file = ca_certs_file + elif ca_certs_file: + if log_enabled(ERROR): + log(ERROR, 'invalid CA public key file <%s>', ca_certs_file) + raise LDAPSSLConfigurationError('invalid CA public key file') + else: + self.ca_certs_file = None + + if ca_certs_path and use_ssl_context and path.exists(ca_certs_path): + self.ca_certs_path = ca_certs_path + elif ca_certs_path and not use_ssl_context: + if log_enabled(ERROR): + log(ERROR, 'cannot use CA public keys path, SSLContext not available') + raise LDAPSSLNotSupportedError('cannot use CA public keys path, SSLContext not available') + elif ca_certs_path: + if log_enabled(ERROR): + log(ERROR, 'invalid CA public keys path <%s>', ca_certs_path) + raise LDAPSSLConfigurationError('invalid CA public keys path') + else: + self.ca_certs_path = None + + if ca_certs_data and use_ssl_context: + self.ca_certs_data = ca_certs_data + elif ca_certs_data: + if log_enabled(ERROR): + log(ERROR, 'cannot use CA data, SSLContext not available') + raise LDAPSSLNotSupportedError('cannot use CA data, SSLContext not available') + else: + self.ca_certs_data = None + + if local_private_key_password and use_ssl_context: + self.private_key_password = local_private_key_password + elif local_private_key_password: + if log_enabled(ERROR): + log(ERROR, 'cannot use local private key password, SSLContext not available') + raise LDAPSSLNotSupportedError('cannot use local private key password, SSLContext is not available') + else: + self.private_key_password = None + + self.version = version + self.private_key_file = local_private_key_file + self.certificate_file = local_certificate_file + self.valid_names = valid_names + self.ciphers = ciphers + self.sni = sni + + if log_enabled(BASIC): + log(BASIC, 'instantiated Tls: <%r>' % self) + + def __str__(self): + s = [ + 'protocol: ' + str(self.version), + 'client private key: ' + ('present ' if self.private_key_file else 'not present'), + 'client certificate: ' + ('present ' if self.certificate_file else 'not present'), + 'private key password: ' + ('present ' if self.private_key_password else 'not present'), + 'CA certificates file: ' + ('present ' if self.ca_certs_file else 'not present'), + 'CA certificates path: ' + ('present ' if self.ca_certs_path else 'not present'), + 'CA certificates data: ' + ('present ' if self.ca_certs_data else 'not present'), + 'verify mode: ' + str(self.validate), + 'valid names: ' + str(self.valid_names), + 'ciphers: ' + str(self.ciphers), + 'sni: ' + str(self.sni) + ] + return ' - '.join(s) + + def __repr__(self): + r = '' if self.private_key_file is None else ', local_private_key_file={0.private_key_file!r}'.format(self) + r += '' if self.certificate_file is None else ', local_certificate_file={0.certificate_file!r}'.format(self) + r += '' if self.validate is None else ', validate={0.validate!r}'.format(self) + r += '' if self.version is None else ', version={0.version!r}'.format(self) + r += '' if self.ca_certs_file is None else ', ca_certs_file={0.ca_certs_file!r}'.format(self) + r += '' if self.ca_certs_path is None else ', ca_certs_path={0.ca_certs_path!r}'.format(self) + r += '' if self.ca_certs_data is None else ', ca_certs_data={0.ca_certs_data!r}'.format(self) + r += '' if self.ciphers is None else ', ciphers={0.ciphers!r}'.format(self) + r += '' if self.sni is None else ', sni={0.sni!r}'.format(self) + r = 'Tls(' + r[2:] + ')' + return r + + def wrap_socket(self, connection, do_handshake=False): + """ + Adds TLS to the connection socket + """ + if use_ssl_context: + if self.version is None: # uses the default ssl context for reasonable security + ssl_context = create_default_context(purpose=Purpose.SERVER_AUTH, + cafile=self.ca_certs_file, + capath=self.ca_certs_path, + cadata=self.ca_certs_data) + else: # code from create_default_context in the Python standard library 3.5.1, creates a ssl context with the specificd protocol version + ssl_context = ssl.SSLContext(self.version) + if self.ca_certs_file or self.ca_certs_path or self.ca_certs_data: + ssl_context.load_verify_locations(self.ca_certs_file, self.ca_certs_path, self.ca_certs_data) + elif self.validate != ssl.CERT_NONE: + ssl_context.load_default_certs(Purpose.SERVER_AUTH) + + if self.certificate_file: + ssl_context.load_cert_chain(self.certificate_file, keyfile=self.private_key_file, password=self.private_key_password) + ssl_context.check_hostname = False + ssl_context.verify_mode = self.validate + for option in self.ssl_options: + ssl_context.options |= option + + if self.ciphers: + try: + ssl_context.set_ciphers(self.ciphers) + except ssl.SSLError: + pass + + if self.sni: + wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake, server_hostname=self.sni) + else: + wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake) + if log_enabled(NETWORK): + log(NETWORK, 'socket wrapped with SSL using SSLContext for <%s>', connection) + else: + if self.version is None and hasattr(ssl, 'PROTOCOL_SSLv23'): + self.version = ssl.PROTOCOL_SSLv23 + if self.ciphers: + try: + + wrapped_socket = ssl.wrap_socket(connection.socket, + keyfile=self.private_key_file, + certfile=self.certificate_file, + server_side=False, + cert_reqs=self.validate, + ssl_version=self.version, + ca_certs=self.ca_certs_file, + do_handshake_on_connect=do_handshake, + ciphers=self.ciphers) + except ssl.SSLError: + raise + except TypeError: # in python2.6 no ciphers argument is present, failback to self.ciphers=None + self.ciphers = None + + if not self.ciphers: + wrapped_socket = ssl.wrap_socket(connection.socket, + keyfile=self.private_key_file, + certfile=self.certificate_file, + server_side=False, + cert_reqs=self.validate, + ssl_version=self.version, + ca_certs=self.ca_certs_file, + do_handshake_on_connect=do_handshake) + if log_enabled(NETWORK): + log(NETWORK, 'socket wrapped with SSL for <%s>', connection) + + if do_handshake and (self.validate == ssl.CERT_REQUIRED or self.validate == ssl.CERT_OPTIONAL): + check_hostname(wrapped_socket, connection.server.host, self.valid_names) + + connection.socket = wrapped_socket + return + + def start_tls(self, connection): + if connection.server.ssl: # ssl already established at server level + return False + + if (connection.tls_started and not connection._executing_deferred) or connection.strategy._outstanding or connection.sasl_in_progress: + # Per RFC 4513 (3.1.1) + if log_enabled(ERROR): + log(ERROR, "can't start tls because operations are in progress for <%s>", self) + return False + connection.starting_tls = True + if log_enabled(BASIC): + log(BASIC, 'starting tls for <%s>', connection) + if not connection.strategy.sync: + connection._awaiting_for_async_start_tls = True # some flaky servers (OpenLDAP) doesn't return the extended response name in response + result = connection.extended('1.3.6.1.4.1.1466.20037') + if not connection.strategy.sync: + # asynchronous - _start_tls must be executed by the strategy + response = connection.get_response(result) + if response != (None, None): + if log_enabled(BASIC): + log(BASIC, 'tls started for <%s>', connection) + return True + else: + if log_enabled(BASIC): + log(BASIC, 'tls not started for <%s>', connection) + return False + else: + if connection.result['description'] not in ['success']: + # startTLS failed + connection.last_error = 'startTLS failed - ' + str(connection.result['description']) + if log_enabled(ERROR): + log(ERROR, '%s for <%s>', connection.last_error, connection) + raise LDAPStartTLSError(connection.last_error) + if log_enabled(BASIC): + log(BASIC, 'tls started for <%s>', connection) + return self._start_tls(connection) + + def _start_tls(self, connection): + try: + self.wrap_socket(connection, do_handshake=True) + except Exception as e: + connection.last_error = 'wrap socket error: ' + str(e) + if log_enabled(ERROR): + log(ERROR, 'error <%s> wrapping socket for TLS in <%s>', connection.last_error, connection) + raise start_tls_exception_factory(LDAPStartTLSError, e)(connection.last_error) + finally: + connection.starting_tls = False + + if connection.usage: + connection._usage.wrapped_sockets += 1 + connection.tls_started = True + return True + + +def check_hostname(sock, server_name, additional_names): + server_certificate = sock.getpeercert() + if log_enabled(NETWORK): + log(NETWORK, 'certificate found for %s: %s', sock, server_certificate) + if additional_names: + host_names = [server_name] + (additional_names if isinstance(additional_names, SEQUENCE_TYPES) else [additional_names]) + else: + host_names = [server_name] + + for host_name in host_names: + if not host_name: + continue + elif host_name == '*': + if log_enabled(NETWORK): + log(NETWORK, 'certificate matches * wildcard') + return # valid + + try: + match_hostname(server_certificate, host_name) # raise CertificateError if certificate doesn't match server name + if log_enabled(NETWORK): + log(NETWORK, 'certificate matches host name <%s>', host_name) + return # valid + except CertificateError as e: + if log_enabled(NETWORK): + log(NETWORK, str(e)) + + if log_enabled(ERROR): + log(ERROR, "hostname doesn't match certificate") + raise LDAPCertificateError("certificate %s doesn't match any name in %s " % (server_certificate, str(host_names))) diff --git a/server/www/packages/packages-linux/x64/ldap3/core/usage.py b/server/www/packages/packages-linux/x64/ldap3/core/usage.py index 187d415..7748c76 100644 --- a/server/www/packages/packages-linux/x64/ldap3/core/usage.py +++ b/server/www/packages/packages-linux/x64/ldap3/core/usage.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2013 - 2018 Giovanni Cannata +# Copyright 2013 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/__init__.py b/server/www/packages/packages-linux/x64/ldap3/extend/__init__.py index 24f426e..32795ef 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/__init__.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/__init__.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -169,6 +169,33 @@ class StandardExtendedOperations(ExtendedOperationContainer): streaming, callback) + def funnel_search(self, + search_base='', + search_filter='', + search_scope=SUBTREE, + dereference_aliases=DEREF_NEVER, + attributes=ALL_ATTRIBUTES, + size_limit=0, + time_limit=0, + controls=None, + streaming=False, + callback=None + ): + return PersistentSearch(self._connection, + search_base, + search_filter, + search_scope, + dereference_aliases, + attributes, + size_limit, + time_limit, + controls, + None, + None, + None, + streaming, + callback) + class NovellExtendedOperations(ExtendedOperationContainer): def get_bind_dn(self, controls=None): diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/addMembersToGroups.py b/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/addMembersToGroups.py index 28c409f..eaf6cfd 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/addMembersToGroups.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/addMembersToGroups.py @@ -1,81 +1,93 @@ -""" -""" - -# Created on 2016.12.26 -# -# Author: Giovanni Cannata -# -# Copyright 2016 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . -from ...core.exceptions import LDAPInvalidDnError -from ... import SEQUENCE_TYPES, MODIFY_ADD, BASE, DEREF_NEVER - - -def ad_add_members_to_groups(connection, - members_dn, - groups_dn, - fix=True): - """ - :param connection: a bound Connection object - :param members_dn: the list of members to add to groups - :param groups_dn: the list of groups where members are to be added - :param fix: checks for group existence and already assigned members - :return: a boolean where True means that the operation was successful and False means an error has happened - Establishes users-groups relations following the Active Directory rules: users are added to the member attribute of groups. - Raises LDAPInvalidDnError if members or groups are not found in the DIT. - """ - - if not isinstance(members_dn, SEQUENCE_TYPES): - members_dn = [members_dn] - - if not isinstance(groups_dn, SEQUENCE_TYPES): - groups_dn = [groups_dn] - - error = False - for group in groups_dn: - if fix: # checks for existance of group and for already assigned members - result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member']) - - if not connection.strategy.sync: - response, result = connection.get_response(result) - else: - response, result = connection.response, connection.result - - if not result['description'] == 'success': - raise LDAPInvalidDnError(group + ' not found') - - existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else [] - existing_members = [element.lower() for element in existing_members] - else: - existing_members = [] - - changes = dict() - member_to_add = [element for element in members_dn if element.lower() not in existing_members] - if member_to_add: - changes['member'] = (MODIFY_ADD, member_to_add) - if changes: - result = connection.modify(group, changes) - if not connection.strategy.sync: - _, result = connection.get_response(result) - else: - result = connection.result - if result['description'] != 'success': - error = True - break - - return not error # returns True if no error is raised in the LDAP operations +""" +""" + +# Created on 2016.12.26 +# +# Author: Giovanni Cannata +# +# Copyright 2016 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +from ... import SEQUENCE_TYPES, MODIFY_ADD, BASE, DEREF_NEVER +from ...core.exceptions import LDAPInvalidDnError, LDAPOperationsErrorResult +from ...utils.dn import safe_dn + + +def ad_add_members_to_groups(connection, + members_dn, + groups_dn, + fix=True, + raise_error=False): + """ + :param connection: a bound Connection object + :param members_dn: the list of members to add to groups + :param groups_dn: the list of groups where members are to be added + :param fix: checks for group existence and already assigned members + :param raise_error: If the operation fails it raises an error instead of returning False + :return: a boolean where True means that the operation was successful and False means an error has happened + Establishes users-groups relations following the Active Directory rules: users are added to the member attribute of groups. + Raises LDAPInvalidDnError if members or groups are not found in the DIT. + """ + + if not isinstance(members_dn, SEQUENCE_TYPES): + members_dn = [members_dn] + + if not isinstance(groups_dn, SEQUENCE_TYPES): + groups_dn = [groups_dn] + + if connection.check_names: # builds new lists with sanitized dn + members_dn = [safe_dn(member_dn) for member_dn in members_dn] + groups_dn = [safe_dn(group_dn) for group_dn in groups_dn] + + error = False + for group in groups_dn: + if fix: # checks for existance of group and for already assigned members + result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, + attributes=['member']) + + if not connection.strategy.sync: + response, result = connection.get_response(result) + else: + response, result = connection.response, connection.result + + if not result['description'] == 'success': + raise LDAPInvalidDnError(group + ' not found') + + existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else [] + existing_members = [element.lower() for element in existing_members] + else: + existing_members = [] + + changes = dict() + member_to_add = [element for element in members_dn if element.lower() not in existing_members] + if member_to_add: + changes['member'] = (MODIFY_ADD, member_to_add) + if changes: + result = connection.modify(group, changes) + if not connection.strategy.sync: + _, result = connection.get_response(result) + else: + result = connection.result + if result['description'] != 'success': + error = True + result_error_params = ['result', 'description', 'dn', 'message'] + if raise_error: + raise LDAPOperationsErrorResult([(k, v) for k, v in result.items() if k in result_error_params]) + break + + return not error # returns True if no error is raised in the LDAP operations diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/dirSync.py b/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/dirSync.py index cb18e7a..db403a1 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/dirSync.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/dirSync.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2015 - 2018 Giovanni Cannata +# Copyright 2015 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/modifyPassword.py b/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/modifyPassword.py index 4a17fb0..0bf1c06 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/modifyPassword.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/modifyPassword.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2015 - 2018 Giovanni Cannata +# Copyright 2015 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/removeMembersFromGroups.py b/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/removeMembersFromGroups.py index 1b7feb3..0998713 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/removeMembersFromGroups.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/removeMembersFromGroups.py @@ -1,93 +1,92 @@ -""" -""" - -# Created on 2016.12.26 -# -# Author: Giovanni Cannata -# -# Copyright 2016 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . -from ...core.exceptions import LDAPInvalidDnError -from ... import SEQUENCE_TYPES, MODIFY_DELETE, BASE, DEREF_NEVER -from ...utils.dn import safe_dn - - -def ad_remove_members_from_groups(connection, - members_dn, - groups_dn, - fix): - """ - :param connection: a bound Connection object - :param members_dn: the list of members to remove from groups - :param groups_dn: the list of groups where members are to be removed - :param fix: checks for group existence and existing members - :return: a boolean where True means that the operation was successful and False means an error has happened - Removes users-groups relations following the Activwe Directory rules: users are removed from groups' member attribute - - """ - if not isinstance(members_dn, SEQUENCE_TYPES): - members_dn = [members_dn] - - if not isinstance(groups_dn, SEQUENCE_TYPES): - groups_dn = [groups_dn] - - if connection.check_names: # builds new lists with sanitized dn - safe_members_dn = [] - safe_groups_dn = [] - for member_dn in members_dn: - safe_members_dn.append(safe_dn(member_dn)) - for group_dn in groups_dn: - safe_groups_dn.append(safe_dn(group_dn)) - - members_dn = safe_members_dn - groups_dn = safe_groups_dn - - error = False - - for group in groups_dn: - if fix: # checks for existance of group and for already assigned members - result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member']) - - if not connection.strategy.sync: - response, result = connection.get_response(result) - else: - response, result = connection.response, connection.result - - if not result['description'] == 'success': - raise LDAPInvalidDnError(group + ' not found') - - existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else [] - else: - existing_members = members_dn - - existing_members = [element.lower() for element in existing_members] - changes = dict() - member_to_remove = [element for element in members_dn if element.lower() in existing_members] - if member_to_remove: - changes['member'] = (MODIFY_DELETE, member_to_remove) - if changes: - result = connection.modify(group, changes) - if not connection.strategy.sync: - _, result = connection.get_response(result) - else: - result = connection.result - if result['description'] != 'success': - error = True - break - - return not error +""" +""" + +# Created on 2016.12.26 +# +# Author: Giovanni Cannata +# +# Copyright 2016 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +from ...core.exceptions import LDAPInvalidDnError, LDAPOperationsErrorResult +from ... import SEQUENCE_TYPES, MODIFY_DELETE, BASE, DEREF_NEVER +from ...utils.dn import safe_dn + + +def ad_remove_members_from_groups(connection, + members_dn, + groups_dn, + fix, + raise_error=False): + """ + :param connection: a bound Connection object + :param members_dn: the list of members to remove from groups + :param groups_dn: the list of groups where members are to be removed + :param fix: checks for group existence and existing members + :param raise_error: If the operation fails it raises an error instead of returning False + :return: a boolean where True means that the operation was successful and False means an error has happened + Removes users-groups relations following the Activwe Directory rules: users are removed from groups' member attribute + + """ + if not isinstance(members_dn, SEQUENCE_TYPES): + members_dn = [members_dn] + + if not isinstance(groups_dn, SEQUENCE_TYPES): + groups_dn = [groups_dn] + + if connection.check_names: # builds new lists with sanitized dn + members_dn = [safe_dn(member_dn) for member_dn in members_dn] + groups_dn = [safe_dn(group_dn) for group_dn in groups_dn] + + error = False + + for group in groups_dn: + if fix: # checks for existance of group and for already assigned members + result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member']) + + if not connection.strategy.sync: + response, result = connection.get_response(result) + else: + response, result = connection.response, connection.result + + if not result['description'] == 'success': + raise LDAPInvalidDnError(group + ' not found') + + existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else [] + else: + existing_members = members_dn + + existing_members = [element.lower() for element in existing_members] + changes = dict() + member_to_remove = [element for element in members_dn if element.lower() in existing_members] + if member_to_remove: + changes['member'] = (MODIFY_DELETE, member_to_remove) + if changes: + result = connection.modify(group, changes) + if not connection.strategy.sync: + _, result = connection.get_response(result) + else: + result = connection.result + if result['description'] != 'success': + error = True + result_error_params = ['result', 'description', 'dn', 'message'] + if raise_error: + raise LDAPOperationsErrorResult([(k, v) for k, v in result.items() if k in result_error_params]) + break + + return not error diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/unlockAccount.py b/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/unlockAccount.py index 393e08c..bc59b58 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/unlockAccount.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/microsoft/unlockAccount.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2015 - 2018 Giovanni Cannata +# Copyright 2015 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/novell/addMembersToGroups.py b/server/www/packages/packages-linux/x64/ldap3/extend/novell/addMembersToGroups.py index 5583549..d649dc8 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/novell/addMembersToGroups.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/novell/addMembersToGroups.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2016 - 2018 Giovanni Cannata +# Copyright 2016 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/novell/checkGroupsMemberships.py b/server/www/packages/packages-linux/x64/ldap3/extend/novell/checkGroupsMemberships.py index 1013fde..c51dbf2 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/novell/checkGroupsMemberships.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/novell/checkGroupsMemberships.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2016 - 2018 Giovanni Cannata +# Copyright 2016 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/novell/endTransaction.py b/server/www/packages/packages-linux/x64/ldap3/extend/novell/endTransaction.py index 0e9a58c..18bc041 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/novell/endTransaction.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/novell/endTransaction.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2016 - 2018 Giovanni Cannata +# Copyright 2016 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/novell/getBindDn.py b/server/www/packages/packages-linux/x64/ldap3/extend/novell/getBindDn.py index 39fae2b..492bcdd 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/novell/getBindDn.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/novell/getBindDn.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/novell/listReplicas.py b/server/www/packages/packages-linux/x64/ldap3/extend/novell/listReplicas.py index fdc6d08..8ccf2ff 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/novell/listReplicas.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/novell/listReplicas.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -45,6 +45,6 @@ class ListReplicas(ExtendedOperation): def populate_result(self): try: - self.result['replicas'] = str(self.decoded_response['replicaList']) if self.decoded_response['replicaList'] else None + self.result['replicas'] = [str(replica) for replica in self.decoded_response] if self.decoded_response else None except TypeError: self.result['replicas'] = None diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/novell/nmasGetUniversalPassword.py b/server/www/packages/packages-linux/x64/ldap3/extend/novell/nmasGetUniversalPassword.py index b8b045b..291ae92 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/novell/nmasGetUniversalPassword.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/novell/nmasGetUniversalPassword.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/novell/nmasSetUniversalPassword.py b/server/www/packages/packages-linux/x64/ldap3/extend/novell/nmasSetUniversalPassword.py index 65ea0d6..dadab59 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/novell/nmasSetUniversalPassword.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/novell/nmasSetUniversalPassword.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/novell/partition_entry_count.py b/server/www/packages/packages-linux/x64/ldap3/extend/novell/partition_entry_count.py index 8218aea..3d46c7a 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/novell/partition_entry_count.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/novell/partition_entry_count.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/novell/removeMembersFromGroups.py b/server/www/packages/packages-linux/x64/ldap3/extend/novell/removeMembersFromGroups.py index df493ba..c46c275 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/novell/removeMembersFromGroups.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/novell/removeMembersFromGroups.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2016 - 2018 Giovanni Cannata +# Copyright 2016 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/novell/replicaInfo.py b/server/www/packages/packages-linux/x64/ldap3/extend/novell/replicaInfo.py index 45bd0e9..057f934 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/novell/replicaInfo.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/novell/replicaInfo.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/novell/startTransaction.py b/server/www/packages/packages-linux/x64/ldap3/extend/novell/startTransaction.py index 2ed21c2..6179cb0 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/novell/startTransaction.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/novell/startTransaction.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2016 - 2018 Giovanni Cannata +# Copyright 2016 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/operation.py b/server/www/packages/packages-linux/x64/ldap3/extend/operation.py index 9906885..c1d478c 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/operation.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/operation.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/standard/PagedSearch.py b/server/www/packages/packages-linux/x64/ldap3/extend/standard/PagedSearch.py index 1b5df49..f8bc7e6 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/standard/PagedSearch.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/standard/PagedSearch.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -47,7 +47,11 @@ def paged_search_generator(connection, search_base = safe_dn(search_base) responses = [] - cookie = True # performs search at least one time + original_connection = None + original_auto_referrals = connection.auto_referrals + connection.auto_referrals = False # disable auto referrals because it cannot handle paged searches + cookie = True # performs search operation at least one time + cachekey = None # for referrals cache while cookie: result = connection.search(search_base, search_filter, @@ -69,13 +73,19 @@ def paged_search_generator(connection, response = connection.response result = connection.result + if result['referrals'] and original_auto_referrals: # if rererrals are returned start over the loop with a new connection to the referral + if not original_connection: + original_connection = connection + _, connection, cachekey = connection.strategy.create_referral_connection(result['referrals']) # change connection to a valid referrals + continue + responses.extend(response) try: cookie = result['controls']['1.2.840.113556.1.4.319']['value']['cookie'] except KeyError: cookie = None - if result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS: + if connection.raise_exceptions and result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS: if log_enabled(PROTOCOL): log(PROTOCOL, 'paged search operation result <%s> for <%s>', result, connection) if result['result'] == RESULT_SIZE_LIMIT_EXCEEDED: @@ -86,6 +96,14 @@ def paged_search_generator(connection, while responses: yield responses.pop() + if original_connection: + connection = original_connection + if connection.use_referral_cache and cachekey: + connection.strategy.referral_cache[cachekey] = connection + else: + connection.unbind() + + connection.auto_referrals = original_auto_referrals connection.response = None diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/standard/PersistentSearch.py b/server/www/packages/packages-linux/x64/ldap3/extend/standard/PersistentSearch.py index 62286e1..b25ec68 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/standard/PersistentSearch.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/standard/PersistentSearch.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2016 - 2018 Giovanni Cannata +# Copyright 2016 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -80,7 +80,8 @@ class PersistentSearch(object): else: self.controls = controls - self.controls.append(persistent_search_control(events_type, changes_only, notifications)) + if events_type and changes_only and notifications: + self.controls.append(persistent_search_control(events_type, changes_only, notifications)) self.start() def start(self): @@ -101,9 +102,10 @@ class PersistentSearch(object): controls=self.controls) self.connection.strategy.persistent_search_message_id = self.message_id - def stop(self): + def stop(self, unbind=True): self.connection.abandon(self.message_id) - self.connection.unbind() + if unbind: + self.connection.unbind() if self.message_id in self.connection.strategy._responses: del self.connection.strategy._responses[self.message_id] if hasattr(self.connection.strategy, '_requests') and self.message_id in self.connection.strategy._requests: # asynchronous strategy has a dict of request that could be returned by get_response() @@ -111,11 +113,25 @@ class PersistentSearch(object): self.connection.strategy.persistent_search_message_id = None self.message_id = None - def next(self): + def next(self, block=False, timeout=None): if not self.connection.strategy.streaming and not self.connection.strategy.callback: try: - return self.connection.strategy.events.get_nowait() + return self.connection.strategy.events.get(block, timeout) except Empty: return None raise LDAPExtensionError('Persistent search is not accumulating events in queue') + + def funnel(self, block=False, timeout=None): + done = False + while not done: + try: + entry = self.connection.strategy.events.get(block, timeout) + except Empty: + yield None + if entry['type'] == 'searchResEntry': + yield entry + else: + done = True + + yield entry diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/standard/modifyPassword.py b/server/www/packages/packages-linux/x64/ldap3/extend/standard/modifyPassword.py index 167816e..7837355 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/standard/modifyPassword.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/standard/modifyPassword.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -67,6 +67,6 @@ class ModifyPassword(ExtendedOperation): self.result[self.response_attribute] = True else: # change was not successful, raises exception if raise_exception = True in connection or returns the operation result, error code is in result['result'] self.result[self.response_attribute] = False - if not self.connection.raise_exceptions: + if self.connection.raise_exceptions: from ...core.exceptions import LDAPOperationResult raise LDAPOperationResult(result=self.result['result'], description=self.result['description'], dn=self.result['dn'], message=self.result['message'], response_type=self.result['type']) diff --git a/server/www/packages/packages-linux/x64/ldap3/extend/standard/whoAmI.py b/server/www/packages/packages-linux/x64/ldap3/extend/standard/whoAmI.py index 0eda5c4..a6c08a8 100644 --- a/server/www/packages/packages-linux/x64/ldap3/extend/standard/whoAmI.py +++ b/server/www/packages/packages-linux/x64/ldap3/extend/standard/whoAmI.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -24,11 +24,9 @@ # If not, see . # implements RFC4532 -from pyasn1.type.univ import NoValue - from ...extend.operation import ExtendedOperation from ...utils.conv import to_unicode -from ...protocol.rfc4511 import OctetString + class WhoAmI(ExtendedOperation): def config(self): diff --git a/server/www/packages/packages-linux/x64/ldap3/operation/abandon.py b/server/www/packages/packages-linux/x64/ldap3/operation/abandon.py index ccc3e88..66fcb6c 100644 --- a/server/www/packages/packages-linux/x64/ldap3/operation/abandon.py +++ b/server/www/packages/packages-linux/x64/ldap3/operation/abandon.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2013 - 2018 Giovanni Cannata +# Copyright 2013 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/operation/add.py b/server/www/packages/packages-linux/x64/ldap3/operation/add.py index a08e463..d0b95b4 100644 --- a/server/www/packages/packages-linux/x64/ldap3/operation/add.py +++ b/server/www/packages/packages-linux/x64/ldap3/operation/add.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2013 - 2018 Giovanni Cannata +# Copyright 2013 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/operation/bind.py b/server/www/packages/packages-linux/x64/ldap3/operation/bind.py index 0eecc4e..43ad1fb 100644 --- a/server/www/packages/packages-linux/x64/ldap3/operation/bind.py +++ b/server/www/packages/packages-linux/x64/ldap3/operation/bind.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2013 - 2018 Giovanni Cannata +# Copyright 2013 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -122,7 +122,7 @@ def bind_response_to_dict(response): 'description': ResultCode().getNamedValues().getName(response['resultCode']), 'dn': str(response['matchedDN']), 'message': str(response['diagnosticMessage']), - 'referrals': referrals_to_list(response['referral']), + 'referrals': referrals_to_list(response['referral']) if response['referral'] is not None and response['referral'].hasValue() else [], 'saslCreds': bytes(response['serverSaslCreds']) if response['serverSaslCreds'] is not None and response['serverSaslCreds'].hasValue() else None} diff --git a/server/www/packages/packages-linux/x64/ldap3/operation/compare.py b/server/www/packages/packages-linux/x64/ldap3/operation/compare.py index 5ee03d5..2232f61 100644 --- a/server/www/packages/packages-linux/x64/ldap3/operation/compare.py +++ b/server/www/packages/packages-linux/x64/ldap3/operation/compare.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2013 - 2018 Giovanni Cannata +# Copyright 2013 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/operation/delete.py b/server/www/packages/packages-linux/x64/ldap3/operation/delete.py index df0aee8..2db40f4 100644 --- a/server/www/packages/packages-linux/x64/ldap3/operation/delete.py +++ b/server/www/packages/packages-linux/x64/ldap3/operation/delete.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2013 - 2018 Giovanni Cannata +# Copyright 2013 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/operation/extended.py b/server/www/packages/packages-linux/x64/ldap3/operation/extended.py index 3bbdd87..4b1ebc7 100644 --- a/server/www/packages/packages-linux/x64/ldap3/operation/extended.py +++ b/server/www/packages/packages-linux/x64/ldap3/operation/extended.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2013 - 2018 Giovanni Cannata +# Copyright 2013 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/operation/modify.py b/server/www/packages/packages-linux/x64/ldap3/operation/modify.py index 363e1ef..31867e9 100644 --- a/server/www/packages/packages-linux/x64/ldap3/operation/modify.py +++ b/server/www/packages/packages-linux/x64/ldap3/operation/modify.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2013 - 2018 Giovanni Cannata +# Copyright 2013 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/operation/modifyDn.py b/server/www/packages/packages-linux/x64/ldap3/operation/modifyDn.py index 174bb36..73c6da3 100644 --- a/server/www/packages/packages-linux/x64/ldap3/operation/modifyDn.py +++ b/server/www/packages/packages-linux/x64/ldap3/operation/modifyDn.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2013 - 2018 Giovanni Cannata +# Copyright 2013 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/operation/search.py b/server/www/packages/packages-linux/x64/ldap3/operation/search.py index 7cf2fb3..b78d86d 100644 --- a/server/www/packages/packages-linux/x64/ldap3/operation/search.py +++ b/server/www/packages/packages-linux/x64/ldap3/operation/search.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2013 - 2018 Giovanni Cannata +# Copyright 2013 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -38,7 +38,7 @@ from ..operation.bind import referrals_to_list from ..protocol.convert import ava_to_dict, attributes_to_list, search_refs_to_list, validate_assertion_value, prepare_filter_for_sending, search_refs_to_list_fast from ..protocol.formatters.standard import format_attribute_values from ..utils.conv import to_unicode, to_raw - +from pyasn1.error import PyAsn1UnicodeDecodeError ROOT = 0 AND = 1 @@ -379,8 +379,10 @@ def search_operation(search_base, def decode_vals(vals): - return [str(val) for val in vals if val] if vals else None - + try: + return [str(val) for val in vals if val] if vals else None + except PyAsn1UnicodeDecodeError: + return decode_raw_vals(vals) def decode_vals_fast(vals): try: @@ -393,8 +395,7 @@ def attributes_to_dict(attribute_list): conf_case_insensitive_attributes = get_config_parameter('CASE_INSENSITIVE_ATTRIBUTE_NAMES') attributes = CaseInsensitiveDict() if conf_case_insensitive_attributes else dict() for attribute in attribute_list: - attributes[str(attribute['type'])] = decode_vals(attribute['vals']) - + attributes[str(attribute['type'])] = decode_vals(attribute['vals']) return attributes @@ -525,10 +526,11 @@ def search_result_entry_response_to_dict(response, schema, custom_formatter, che entry = dict() # entry['dn'] = str(response['object']) if response['object']: - entry['raw_dn'] = to_raw(response['object']) if isinstance(response['object'], STRING_TYPES): # mock strategies return string not a PyAsn1 object + entry['raw_dn'] = to_raw(response['object']) entry['dn'] = to_unicode(response['object']) else: + entry['raw_dn'] = str(response['object']) entry['dn'] = to_unicode(bytes(response['object']), from_server=True) else: entry['raw_dn'] = b'' @@ -555,6 +557,8 @@ def search_result_done_response_to_dict(response): result['controls'][control[0]] = control[1] return result + + def search_result_reference_response_to_dict(response): return {'uri': search_refs_to_list(response)} diff --git a/server/www/packages/packages-linux/x64/ldap3/operation/unbind.py b/server/www/packages/packages-linux/x64/ldap3/operation/unbind.py index 6f1e713..4d418fb 100644 --- a/server/www/packages/packages-linux/x64/ldap3/operation/unbind.py +++ b/server/www/packages/packages-linux/x64/ldap3/operation/unbind.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2013 - 2018 Giovanni Cannata +# Copyright 2013 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/protocol/controls.py b/server/www/packages/packages-linux/x64/ldap3/protocol/controls.py index 197777e..658867b 100644 --- a/server/www/packages/packages-linux/x64/ldap3/protocol/controls.py +++ b/server/www/packages/packages-linux/x64/ldap3/protocol/controls.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2015 - 2018 Giovanni Cannata +# Copyright 2015 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/protocol/convert.py b/server/www/packages/packages-linux/x64/ldap3/protocol/convert.py index 319f36d..af3a6f8 100644 --- a/server/www/packages/packages-linux/x64/ldap3/protocol/convert.py +++ b/server/www/packages/packages-linux/x64/ldap3/protocol/convert.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2013 - 2018 Giovanni Cannata +# Copyright 2013 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -37,6 +37,7 @@ def attribute_to_dict(attribute): except PyAsn1Error: # invalid encoding, return bytes value return {'type': str(attribute['type']), 'values': [bytes(val) for val in attribute['vals']]} + def attributes_to_dict(attributes): attributes_dict = dict() for attribute in attributes: @@ -46,7 +47,10 @@ def attributes_to_dict(attributes): def referrals_to_list(referrals): - return [str(referral) for referral in referrals if referral] if referrals else None + if isinstance(referrals, list): + return [str(referral) for referral in referrals if referral] if referrals else None + else: + return [str(referral) for referral in referrals if referral] if referrals is not None and referrals.hasValue() else None def search_refs_to_list(search_refs): @@ -93,6 +97,7 @@ def ava_to_dict(ava): except Exception: return {'attribute': str(ava['attributeDesc']), 'value': bytes(ava['assertionValue'])} + def substring_to_dict(substring): return {'initial': substring['initial'] if substring['initial'] else '', 'any': [middle for middle in substring['any']] if substring['any'] else '', 'final': substring['final'] if substring['final'] else ''} @@ -183,7 +188,7 @@ def prepare_filter_for_sending(raw_string): ints = [] raw_string = to_raw(raw_string) while i < len(raw_string): - if (raw_string[i] == 92 or raw_string[i] == '\\') and i < len(raw_string) - 2: # 92 is backslash + if (raw_string[i] == 92 or raw_string[i] == '\\') and i < len(raw_string) - 2: # 92 (0x5C) is backslash try: ints.append(int(raw_string[i + 1: i + 3], 16)) i += 2 diff --git a/server/www/packages/packages-linux/x64/ldap3/protocol/formatters/formatters.py b/server/www/packages/packages-linux/x64/ldap3/protocol/formatters/formatters.py index 36cd9c8..2638d52 100644 --- a/server/www/packages/packages-linux/x64/ldap3/protocol/formatters/formatters.py +++ b/server/www/packages/packages-linux/x64/ldap3/protocol/formatters/formatters.py @@ -1,407 +1,436 @@ -""" -""" - -# Created on 2014.10.28 -# -# Author: Giovanni Cannata -# -# Copyright 2014 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . - -import re - -from binascii import hexlify -from uuid import UUID -from datetime import datetime, timedelta -from ...utils.conv import to_unicode - -from ...core.timezone import OffsetTzInfo - -def format_unicode(raw_value): - try: - if str is not bytes: # Python 3 - return str(raw_value, 'utf-8', errors='strict') - else: # Python 2 - return unicode(raw_value, 'utf-8', errors='strict') - except (TypeError, UnicodeDecodeError): - pass - - return raw_value - - -def format_integer(raw_value): - try: - return int(raw_value) - except (TypeError, ValueError): # expected exceptions - pass - except Exception: # any other exception should be investigated, anyway the formatter return the raw_value - pass - - return raw_value - - -def format_binary(raw_value): - try: - return bytes(raw_value) - except TypeError: # expected exceptions - pass - except Exception: # any other exception should be investigated, anyway the formatter return the raw_value - pass - - return raw_value - - -def format_uuid(raw_value): - try: - return str(UUID(bytes=raw_value)) - except (TypeError, ValueError): - return format_unicode(raw_value) - except Exception: # any other exception should be investigated, anyway the formatter return the raw_value - pass - - return raw_value - - -def format_uuid_le(raw_value): - try: - return '{' + str(UUID(bytes_le=raw_value)) + '}' - except (TypeError, ValueError): - return format_unicode(raw_value) - except Exception: # any other exception should be investigated, anyway the formatter return the raw_value - pass - - return raw_value - - -def format_boolean(raw_value): - if raw_value in [b'TRUE', b'true', b'True']: - return True - if raw_value in [b'FALSE', b'false', b'False']: - return False - - return raw_value - - -def format_ad_timestamp(raw_value): - """ - Active Directory stores date/time values as the number of 100-nanosecond intervals - that have elapsed since the 0 hour on January 1, 1601 till the date/time that is being stored. - The time is always stored in Greenwich Mean Time (GMT) in the Active Directory. - """ - if raw_value == b'9223372036854775807': # max value to be stored in a 64 bit signed int - return datetime.max # returns datetime.datetime(9999, 12, 31, 23, 59, 59, 999999) - try: - timestamp = int(raw_value) - if timestamp < 0: # ad timestamp cannot be negative - return raw_value - except Exception: - return raw_value - - try: - return datetime.fromtimestamp(timestamp / 10000000.0 - 11644473600, tz=OffsetTzInfo(0, 'UTC')) # forces true division in python 2 - except (OSError, OverflowError, ValueError): # on Windows backwards timestamps are not allowed - try: - unix_epoch = datetime.fromtimestamp(0, tz=OffsetTzInfo(0, 'UTC')) - diff_seconds = timedelta(seconds=timestamp/10000000.0 - 11644473600) - return unix_epoch + diff_seconds - except Exception: - pass - except Exception: - pass - - return raw_value - - -try: # uses regular expressions and the timezone class (python3.2 and later) - from datetime import timezone - time_format = re.compile( - r''' - ^ - (?P[0-9]{4}) - (?P0[1-9]|1[0-2]) - (?P0[1-9]|[12][0-9]|3[01]) - (?P[01][0-9]|2[0-3]) - (?: - (?P[0-5][0-9]) - (?P[0-5][0-9]|60)? - )? - (?: - [.,] - (?P[0-9]+) - )? - (?: - Z - | - (?: - (?P[+-]) - (?P[01][0-9]|2[0-3]) - (?P[0-5][0-9])? - ) - ) - $ - ''', - re.VERBOSE - ) - - def format_time(raw_value): - try: - match = time_format.fullmatch(to_unicode(raw_value)) - if match is None: - return raw_value - matches = match.groupdict() - - offset = timedelta( - hours=int(matches['OffHour'] or 0), - minutes=int(matches['OffMinute'] or 0) - ) - - if matches['Offset'] == '-': - offset *= -1 - - # Python does not support leap second in datetime (!) - if matches['Second'] == '60': - matches['Second'] = '59' - - # According to RFC, fraction may be applied to an Hour/Minute (!) - fraction = float('0.' + (matches['Fraction'] or '0')) - - if matches['Minute'] is None: - fraction *= 60 - minute = int(fraction) - fraction -= minute - else: - minute = int(matches['Minute']) - - if matches['Second'] is None: - fraction *= 60 - second = int(fraction) - fraction -= second - else: - second = int(matches['Second']) - - microseconds = int(fraction * 1000000) - - return datetime( - int(matches['Year']), - int(matches['Month']), - int(matches['Day']), - int(matches['Hour']), - minute, - second, - microseconds, - timezone(offset), - ) - except Exception: # exceptions should be investigated, anyway the formatter return the raw_value - pass - return raw_value - -except ImportError: - def format_time(raw_value): - """ - From RFC4517: - A value of the Generalized Time syntax is a character string - representing a date and time. The LDAP-specific encoding of a value - of this syntax is a restriction of the format defined in [ISO8601], - and is described by the following ABNF: - - GeneralizedTime = century year month day hour - [ minute [ second / leap-second ] ] - [ fraction ] - g-time-zone - - century = 2(%x30-39) ; "00" to "99" - year = 2(%x30-39) ; "00" to "99" - month = ( %x30 %x31-39 ) ; "01" (January) to "09" - / ( %x31 %x30-32 ) ; "10" to "12" - day = ( %x30 %x31-39 ) ; "01" to "09" - / ( %x31-32 %x30-39 ) ; "10" to "29" - / ( %x33 %x30-31 ) ; "30" to "31" - hour = ( %x30-31 %x30-39 ) / ( %x32 %x30-33 ) ; "00" to "23" - minute = %x30-35 %x30-39 ; "00" to "59" - second = ( %x30-35 %x30-39 ) ; "00" to "59" - leap-second = ( %x36 %x30 ) ; "60" - fraction = ( DOT / COMMA ) 1*(%x30-39) - g-time-zone = %x5A ; "Z" - / g-differential - g-differential = ( MINUS / PLUS ) hour [ minute ] - MINUS = %x2D ; minus sign ("-") - """ - - if len(raw_value) < 10 or not all((c in b'0123456789+-,.Z' for c in raw_value)) or (b'Z' in raw_value and not raw_value.endswith(b'Z')): # first ten characters are mandatory and must be numeric or timezone or fraction - return raw_value - - # sets position for fixed values - year = int(raw_value[0: 4]) - month = int(raw_value[4: 6]) - day = int(raw_value[6: 8]) - hour = int(raw_value[8: 10]) - minute = 0 - second = 0 - microsecond = 0 - - remain = raw_value[10:] - if remain and remain.endswith(b'Z'): # uppercase 'Z' - sep = b'Z' - elif b'+' in remain: # timezone can be specified with +hh[mm] or -hh[mm] - sep = b'+' - elif b'-' in remain: - sep = b'-' - else: # timezone not specified - return raw_value - - time, _, offset = remain.partition(sep) - - if time and (b'.' in time or b',' in time): - # fraction time - if time[0] in b',.': - minute = 6 * int(time[1] if str is bytes else chr(time[1])) # Python 2 / Python 3 - elif time[2] in b',.': - minute = int(raw_value[10: 12]) - second = 6 * int(time[3] if str is bytes else chr(time[3])) # Python 2 / Python 3 - elif time[4] in b',.': - minute = int(raw_value[10: 12]) - second = int(raw_value[12: 14]) - microsecond = 100000 * int(time[5] if str is bytes else chr(time[5])) # Python 2 / Python 3 - elif len(time) == 2: # mmZ format - minute = int(raw_value[10: 12]) - elif len(time) == 0: # Z format - pass - elif len(time) == 4: # mmssZ - minute = int(raw_value[10: 12]) - second = int(raw_value[12: 14]) - else: - return raw_value - - if sep == b'Z': # UTC - timezone = OffsetTzInfo(0, 'UTC') - else: # build timezone - try: - if len(offset) == 2: - timezone_hour = int(offset[:2]) - timezone_minute = 0 - elif len(offset) == 4: - timezone_hour = int(offset[:2]) - timezone_minute = int(offset[2:4]) - else: # malformed timezone - raise ValueError - except ValueError: - return raw_value - if timezone_hour > 23 or timezone_minute > 59: # invalid timezone - return raw_value - - if str is not bytes: # Python 3 - timezone = OffsetTzInfo((timezone_hour * 60 + timezone_minute) * (1 if sep == b'+' else -1), 'UTC' + str(sep + offset, encoding='utf-8')) - else: # Python 2 - timezone = OffsetTzInfo((timezone_hour * 60 + timezone_minute) * (1 if sep == b'+' else -1), unicode('UTC' + sep + offset, encoding='utf-8')) - - try: - return datetime(year=year, - month=month, - day=day, - hour=hour, - minute=minute, - second=second, - microsecond=microsecond, - tzinfo=timezone) - except (TypeError, ValueError): - pass - - return raw_value - - -def format_time_with_0_year(raw_value): - try: - if raw_value.startswith(b'0000'): - return raw_value - except Exception: - try: - if raw_value.startswith('0000'): - return raw_value - except Exception: - pass - - return format_time(raw_value) - - -def format_sid(raw_value): - """ - SID= "S-1-" IdentifierAuthority 1*SubAuthority - IdentifierAuthority= IdentifierAuthorityDec / IdentifierAuthorityHex - ; If the identifier authority is < 2^32, the - ; identifier authority is represented as a decimal - ; number - ; If the identifier authority is >= 2^32, - ; the identifier authority is represented in - ; hexadecimal - IdentifierAuthorityDec = 1*10DIGIT - ; IdentifierAuthorityDec, top level authority of a - ; security identifier is represented as a decimal number - IdentifierAuthorityHex = "0x" 12HEXDIG - ; IdentifierAuthorityHex, the top-level authority of a - ; security identifier is represented as a hexadecimal number - SubAuthority= "-" 1*10DIGIT - ; Sub-Authority is always represented as a decimal number - ; No leading "0" characters are allowed when IdentifierAuthority - ; or SubAuthority is represented as a decimal number - ; All hexadecimal digits must be output in string format, - ; pre-pended by "0x" - - Revision (1 byte): An 8-bit unsigned integer that specifies the revision level of the SID. This value MUST be set to 0x01. - SubAuthorityCount (1 byte): An 8-bit unsigned integer that specifies the number of elements in the SubAuthority array. The maximum number of elements allowed is 15. - IdentifierAuthority (6 bytes): A SID_IDENTIFIER_AUTHORITY structure that indicates the authority under which the SID was created. It describes the entity that created the SID. The Identifier Authority value {0,0,0,0,0,5} denotes SIDs created by the NT SID authority. - SubAuthority (variable): A variable length array of unsigned 32-bit integers that uniquely identifies a principal relative to the IdentifierAuthority. Its length is determined by SubAuthorityCount. - """ - try: - if raw_value.startswith(b'S-1-'): - return raw_value - except Exception: - try: - if raw_value.startswith('S-1-'): - return raw_value - except Exception: - pass - try: - if str is not bytes: # Python 3 - revision = int(raw_value[0]) - sub_authority_count = int(raw_value[1]) - identifier_authority = int.from_bytes(raw_value[2:8], byteorder='big') - if identifier_authority >= 4294967296: # 2 ^ 32 - identifier_authority = hex(identifier_authority) - - sub_authority = '' - i = 0 - while i < sub_authority_count: - sub_authority += '-' + str(int.from_bytes(raw_value[8 + (i * 4): 12 + (i * 4)], byteorder='little')) # little endian - i += 1 - else: # Python 2 - revision = int(ord(raw_value[0])) - sub_authority_count = int(ord(raw_value[1])) - identifier_authority = int(hexlify(raw_value[2:8]), 16) - if identifier_authority >= 4294967296: # 2 ^ 32 - identifier_authority = hex(identifier_authority) - - sub_authority = '' - i = 0 - while i < sub_authority_count: - sub_authority += '-' + str(int(hexlify(raw_value[11 + (i * 4): 7 + (i * 4): -1]), 16)) # little endian - i += 1 - return 'S-' + str(revision) + '-' + str(identifier_authority) + sub_authority - except Exception: # any exception should be investigated, anyway the formatter return the raw_value - pass - - return raw_value +""" +""" + +# Created on 2014.10.28 +# +# Author: Giovanni Cannata +# +# Copyright 2014 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +import re + +from binascii import hexlify +from uuid import UUID +from datetime import datetime, timedelta +from ...utils.conv import to_unicode + +from ...core.timezone import OffsetTzInfo + + +def format_unicode(raw_value): + try: + if str is not bytes: # Python 3 + return str(raw_value, 'utf-8', errors='strict') + else: # Python 2 + return unicode(raw_value, 'utf-8', errors='strict') + except (TypeError, UnicodeDecodeError): + pass + + return raw_value + + +def format_integer(raw_value): + try: + return int(raw_value) + except (TypeError, ValueError): # expected exceptions + pass + except Exception: # any other exception should be investigated, anyway the formatter return the raw_value + pass + + return raw_value + + +def format_binary(raw_value): + try: + return bytes(raw_value) + except TypeError: # expected exceptions + pass + except Exception: # any other exception should be investigated, anyway the formatter return the raw_value + pass + + return raw_value + + +def format_uuid(raw_value): + try: + return str(UUID(bytes=raw_value)) + except (TypeError, ValueError): + return format_unicode(raw_value) + except Exception: # any other exception should be investigated, anyway the formatter return the raw_value + pass + + return raw_value + + +def format_uuid_le(raw_value): + try: + return '{' + str(UUID(bytes_le=raw_value)) + '}' + except (TypeError, ValueError): + return format_unicode(raw_value) + except Exception: # any other exception should be investigated, anyway the formatter return the raw_value + pass + + return raw_value + + +def format_boolean(raw_value): + if raw_value in [b'TRUE', b'true', b'True']: + return True + if raw_value in [b'FALSE', b'false', b'False']: + return False + + return raw_value + + +def format_ad_timestamp(raw_value): + """ + Active Directory stores date/time values as the number of 100-nanosecond intervals + that have elapsed since the 0 hour on January 1, 1601 till the date/time that is being stored. + The time is always stored in Greenwich Mean Time (GMT) in the Active Directory. + """ + utc_timezone = OffsetTzInfo(0, 'UTC') + if raw_value == b'9223372036854775807': # max value to be stored in a 64 bit signed int + return datetime.max.replace(tzinfo=utc_timezone) # returns datetime.datetime(9999, 12, 31, 23, 59, 59, 999999, tzinfo=OffsetTzInfo(offset=0, name='UTC')) + try: + timestamp = int(raw_value) + if timestamp < 0: # ad timestamp cannot be negative + timestamp = timestamp * -1 + except Exception: + return raw_value + + try: + return datetime.fromtimestamp(timestamp / 10000000.0 - 11644473600, + tz=utc_timezone) # forces true division in python 2 + except (OSError, OverflowError, ValueError): # on Windows backwards timestamps are not allowed + try: + unix_epoch = datetime.fromtimestamp(0, tz=utc_timezone) + diff_seconds = timedelta(seconds=timestamp / 10000000.0 - 11644473600) + return unix_epoch + diff_seconds + except Exception: + pass + except Exception: + pass + + return raw_value + + +try: # uses regular expressions and the timezone class (python3.2 and later) + from datetime import timezone + + time_format = re.compile( + r''' + ^ + (?P[0-9]{4}) + (?P0[1-9]|1[0-2]) + (?P0[1-9]|[12][0-9]|3[01]) + (?P[01][0-9]|2[0-3]) + (?: + (?P[0-5][0-9]) + (?P[0-5][0-9]|60)? + )? + (?: + [.,] + (?P[0-9]+) + )? + (?: + Z + | + (?: + (?P[+-]) + (?P[01][0-9]|2[0-3]) + (?P[0-5][0-9])? + ) + ) + $ + ''', + re.VERBOSE + ) + + + def format_time(raw_value): + try: + match = time_format.fullmatch(to_unicode(raw_value)) + if match is None: + return raw_value + matches = match.groupdict() + + offset = timedelta( + hours=int(matches['OffHour'] or 0), + minutes=int(matches['OffMinute'] or 0) + ) + + if matches['Offset'] == '-': + offset *= -1 + + # Python does not support leap second in datetime (!) + if matches['Second'] == '60': + matches['Second'] = '59' + + # According to RFC, fraction may be applied to an Hour/Minute (!) + fraction = float('0.' + (matches['Fraction'] or '0')) + + if matches['Minute'] is None: + fraction *= 60 + minute = int(fraction) + fraction -= minute + else: + minute = int(matches['Minute']) + + if matches['Second'] is None: + fraction *= 60 + second = int(fraction) + fraction -= second + else: + second = int(matches['Second']) + + microseconds = int(fraction * 1000000) + + return datetime( + int(matches['Year']), + int(matches['Month']), + int(matches['Day']), + int(matches['Hour']), + minute, + second, + microseconds, + timezone(offset), + ) + except Exception: # exceptions should be investigated, anyway the formatter return the raw_value + pass + return raw_value + +except ImportError: + def format_time(raw_value): + """ + From RFC4517: + A value of the Generalized Time syntax is a character string + representing a date and time. The LDAP-specific encoding of a value + of this syntax is a restriction of the format defined in [ISO8601], + and is described by the following ABNF: + + GeneralizedTime = century year month day hour + [ minute [ second / leap-second ] ] + [ fraction ] + g-time-zone + + century = 2(%x30-39) ; "00" to "99" + year = 2(%x30-39) ; "00" to "99" + month = ( %x30 %x31-39 ) ; "01" (January) to "09" + / ( %x31 %x30-32 ) ; "10" to "12" + day = ( %x30 %x31-39 ) ; "01" to "09" + / ( %x31-32 %x30-39 ) ; "10" to "29" + / ( %x33 %x30-31 ) ; "30" to "31" + hour = ( %x30-31 %x30-39 ) / ( %x32 %x30-33 ) ; "00" to "23" + minute = %x30-35 %x30-39 ; "00" to "59" + second = ( %x30-35 %x30-39 ) ; "00" to "59" + leap-second = ( %x36 %x30 ) ; "60" + fraction = ( DOT / COMMA ) 1*(%x30-39) + g-time-zone = %x5A ; "Z" + / g-differential + g-differential = ( MINUS / PLUS ) hour [ minute ] + MINUS = %x2D ; minus sign ("-") + """ + + if len(raw_value) < 10 or not all((c in b'0123456789+-,.Z' for c in raw_value)) or ( + b'Z' in raw_value and not raw_value.endswith( + b'Z')): # first ten characters are mandatory and must be numeric or timezone or fraction + return raw_value + + # sets position for fixed values + year = int(raw_value[0: 4]) + month = int(raw_value[4: 6]) + day = int(raw_value[6: 8]) + hour = int(raw_value[8: 10]) + minute = 0 + second = 0 + microsecond = 0 + + remain = raw_value[10:] + if remain and remain.endswith(b'Z'): # uppercase 'Z' + sep = b'Z' + elif b'+' in remain: # timezone can be specified with +hh[mm] or -hh[mm] + sep = b'+' + elif b'-' in remain: + sep = b'-' + else: # timezone not specified + return raw_value + + time, _, offset = remain.partition(sep) + + if time and (b'.' in time or b',' in time): + # fraction time + if time[0] in b',.': + minute = 6 * int(time[1] if str is bytes else chr(time[1])) # Python 2 / Python 3 + elif time[2] in b',.': + minute = int(raw_value[10: 12]) + second = 6 * int(time[3] if str is bytes else chr(time[3])) # Python 2 / Python 3 + elif time[4] in b',.': + minute = int(raw_value[10: 12]) + second = int(raw_value[12: 14]) + microsecond = 100000 * int(time[5] if str is bytes else chr(time[5])) # Python 2 / Python 3 + elif len(time) == 2: # mmZ format + minute = int(raw_value[10: 12]) + elif len(time) == 0: # Z format + pass + elif len(time) == 4: # mmssZ + minute = int(raw_value[10: 12]) + second = int(raw_value[12: 14]) + else: + return raw_value + + if sep == b'Z': # UTC + timezone = OffsetTzInfo(0, 'UTC') + else: # build timezone + try: + if len(offset) == 2: + timezone_hour = int(offset[:2]) + timezone_minute = 0 + elif len(offset) == 4: + timezone_hour = int(offset[:2]) + timezone_minute = int(offset[2:4]) + else: # malformed timezone + raise ValueError + except ValueError: + return raw_value + if timezone_hour > 23 or timezone_minute > 59: # invalid timezone + return raw_value + + if str is not bytes: # Python 3 + timezone = OffsetTzInfo((timezone_hour * 60 + timezone_minute) * (1 if sep == b'+' else -1), + 'UTC' + str(sep + offset, encoding='utf-8')) + else: # Python 2 + timezone = OffsetTzInfo((timezone_hour * 60 + timezone_minute) * (1 if sep == b'+' else -1), + unicode('UTC' + sep + offset, encoding='utf-8')) + + try: + return datetime(year=year, + month=month, + day=day, + hour=hour, + minute=minute, + second=second, + microsecond=microsecond, + tzinfo=timezone) + except (TypeError, ValueError): + pass + + return raw_value + + +def format_ad_timedelta(raw_value): + """ + Convert a negative filetime value to a timedelta. + """ + # Active Directory stores attributes like "minPwdAge" as a negative + # "filetime" timestamp, which is the number of 100-nanosecond intervals that + # have elapsed since the 0 hour on January 1, 1601. + # + # Handle the minimum value that can be stored in a 64 bit signed integer. + # See https://docs.microsoft.com/en-us/dotnet/api/system.int64.minvalue + # In attributes like "maxPwdAge", this signifies never. + if raw_value == b'-9223372036854775808': + return timedelta.max + # We can reuse format_ad_timestamp to get a datetime object from the + # timestamp. Afterwards, we can subtract a datetime representing 0 hour on + # January 1, 1601 from the returned datetime to get the timedelta. + return format_ad_timestamp(raw_value) - format_ad_timestamp(0) + + +def format_time_with_0_year(raw_value): + try: + if raw_value.startswith(b'0000'): + return raw_value + except Exception: + try: + if raw_value.startswith('0000'): + return raw_value + except Exception: + pass + + return format_time(raw_value) + + +def format_sid(raw_value): + """ + SID= "S-1-" IdentifierAuthority 1*SubAuthority + IdentifierAuthority= IdentifierAuthorityDec / IdentifierAuthorityHex + ; If the identifier authority is < 2^32, the + ; identifier authority is represented as a decimal + ; number + ; If the identifier authority is >= 2^32, + ; the identifier authority is represented in + ; hexadecimal + IdentifierAuthorityDec = 1*10DIGIT + ; IdentifierAuthorityDec, top level authority of a + ; security identifier is represented as a decimal number + IdentifierAuthorityHex = "0x" 12HEXDIG + ; IdentifierAuthorityHex, the top-level authority of a + ; security identifier is represented as a hexadecimal number + SubAuthority= "-" 1*10DIGIT + ; Sub-Authority is always represented as a decimal number + ; No leading "0" characters are allowed when IdentifierAuthority + ; or SubAuthority is represented as a decimal number + ; All hexadecimal digits must be output in string format, + ; pre-pended by "0x" + + Revision (1 byte): An 8-bit unsigned integer that specifies the revision level of the SID. This value MUST be set to 0x01. + SubAuthorityCount (1 byte): An 8-bit unsigned integer that specifies the number of elements in the SubAuthority array. The maximum number of elements allowed is 15. + IdentifierAuthority (6 bytes): A SID_IDENTIFIER_AUTHORITY structure that indicates the authority under which the SID was created. It describes the entity that created the SID. The Identifier Authority value {0,0,0,0,0,5} denotes SIDs created by the NT SID authority. + SubAuthority (variable): A variable length array of unsigned 32-bit integers that uniquely identifies a principal relative to the IdentifierAuthority. Its length is determined by SubAuthorityCount. + """ + try: + if raw_value.startswith(b'S-1-'): + return raw_value + except Exception: + try: + if raw_value.startswith('S-1-'): + return raw_value + except Exception: + pass + try: + if str is not bytes: # Python 3 + revision = int(raw_value[0]) + sub_authority_count = int(raw_value[1]) + identifier_authority = int.from_bytes(raw_value[2:8], byteorder='big') + if identifier_authority >= 4294967296: # 2 ^ 32 + identifier_authority = hex(identifier_authority) + + sub_authority = '' + i = 0 + while i < sub_authority_count: + sub_authority += '-' + str( + int.from_bytes(raw_value[8 + (i * 4): 12 + (i * 4)], byteorder='little')) # little endian + i += 1 + else: # Python 2 + revision = int(ord(raw_value[0])) + sub_authority_count = int(ord(raw_value[1])) + identifier_authority = int(hexlify(raw_value[2:8]), 16) + if identifier_authority >= 4294967296: # 2 ^ 32 + identifier_authority = hex(identifier_authority) + + sub_authority = '' + i = 0 + while i < sub_authority_count: + sub_authority += '-' + str(int(hexlify(raw_value[11 + (i * 4): 7 + (i * 4): -1]), 16)) # little endian + i += 1 + return 'S-' + str(revision) + '-' + str(identifier_authority) + sub_authority + except Exception: # any exception should be investigated, anyway the formatter return the raw_value + pass + + return raw_value diff --git a/server/www/packages/packages-linux/x64/ldap3/protocol/formatters/standard.py b/server/www/packages/packages-linux/x64/ldap3/protocol/formatters/standard.py index 77f7b2e..42f6c26 100644 --- a/server/www/packages/packages-linux/x64/ldap3/protocol/formatters/standard.py +++ b/server/www/packages/packages-linux/x64/ldap3/protocol/formatters/standard.py @@ -1,232 +1,238 @@ -""" -""" - -# Created on 2014.10.28 -# -# Author: Giovanni Cannata -# -# Copyright 2014 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . - -from ... import SEQUENCE_TYPES -from .formatters import format_ad_timestamp, format_binary, format_boolean,\ - format_integer, format_sid, format_time, format_unicode, format_uuid, format_uuid_le, format_time_with_0_year -from .validators import validate_integer, validate_time, always_valid,\ - validate_generic_single_value, validate_boolean, validate_ad_timestamp, validate_sid,\ - validate_uuid_le, validate_uuid, validate_zero_and_minus_one_and_positive_int, validate_guid, validate_time_with_0_year - -# for each syntax can be specified a format function and a input validation function - -standard_formatter = { - '1.2.840.113556.1.4.903': (format_binary, None), # Object (DN-binary) - Microsoft - '1.2.840.113556.1.4.904': (format_unicode, None), # Object (DN-string) - Microsoft - '1.2.840.113556.1.4.905': (format_unicode, None), # String (Teletex) - Microsoft - '1.2.840.113556.1.4.906': (format_integer, validate_integer), # Large integer - Microsoft - '1.2.840.113556.1.4.907': (format_binary, None), # String (NT-sec-desc) - Microsoft - '1.2.840.113556.1.4.1221': (format_binary, None), # Object (OR-name) - Microsoft - '1.2.840.113556.1.4.1362': (format_unicode, None), # String (Case) - Microsoft - '1.3.6.1.4.1.1466.115.121.1.1': (format_binary, None), # ACI item [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.2': (format_binary, None), # Access point [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.3': (format_unicode, None), # Attribute type description - '1.3.6.1.4.1.1466.115.121.1.4': (format_binary, None), # Audio [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.5': (format_binary, None), # Binary [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.6': (format_unicode, None), # Bit String - '1.3.6.1.4.1.1466.115.121.1.7': (format_boolean, validate_boolean), # Boolean - '1.3.6.1.4.1.1466.115.121.1.8': (format_binary, None), # Certificate [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.9': (format_binary, None), # Certificate List [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.10': (format_binary, None), # Certificate Pair [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.11': (format_unicode, None), # Country String - '1.3.6.1.4.1.1466.115.121.1.12': (format_unicode, None), # Distinguished name (DN) - '1.3.6.1.4.1.1466.115.121.1.13': (format_binary, None), # Data Quality Syntax [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.14': (format_unicode, None), # Delivery method - '1.3.6.1.4.1.1466.115.121.1.15': (format_unicode, None), # Directory string - '1.3.6.1.4.1.1466.115.121.1.16': (format_unicode, None), # DIT Content Rule Description - '1.3.6.1.4.1.1466.115.121.1.17': (format_unicode, None), # DIT Structure Rule Description - '1.3.6.1.4.1.1466.115.121.1.18': (format_binary, None), # DL Submit Permission [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.19': (format_binary, None), # DSA Quality Syntax [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.20': (format_binary, None), # DSE Type [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.21': (format_binary, None), # Enhanced Guide - '1.3.6.1.4.1.1466.115.121.1.22': (format_unicode, None), # Facsimile Telephone Number - '1.3.6.1.4.1.1466.115.121.1.23': (format_binary, None), # Fax - '1.3.6.1.4.1.1466.115.121.1.24': (format_time, validate_time), # Generalized time - '1.3.6.1.4.1.1466.115.121.1.25': (format_binary, None), # Guide [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.26': (format_unicode, None), # IA5 string - '1.3.6.1.4.1.1466.115.121.1.27': (format_integer, validate_integer), # Integer - '1.3.6.1.4.1.1466.115.121.1.28': (format_binary, None), # JPEG - '1.3.6.1.4.1.1466.115.121.1.29': (format_binary, None), # Master and Shadow Access Points [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.30': (format_unicode, None), # Matching rule description - '1.3.6.1.4.1.1466.115.121.1.31': (format_unicode, None), # Matching rule use description - '1.3.6.1.4.1.1466.115.121.1.32': (format_unicode, None), # Mail Preference [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.33': (format_unicode, None), # MHS OR Address [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.34': (format_unicode, None), # Name and optional UID - '1.3.6.1.4.1.1466.115.121.1.35': (format_unicode, None), # Name form description - '1.3.6.1.4.1.1466.115.121.1.36': (format_unicode, None), # Numeric string - '1.3.6.1.4.1.1466.115.121.1.37': (format_unicode, None), # Object class description - '1.3.6.1.4.1.1466.115.121.1.38': (format_unicode, None), # OID - '1.3.6.1.4.1.1466.115.121.1.39': (format_unicode, None), # Other mailbox - '1.3.6.1.4.1.1466.115.121.1.40': (format_binary, None), # Octet string - '1.3.6.1.4.1.1466.115.121.1.41': (format_unicode, None), # Postal address - '1.3.6.1.4.1.1466.115.121.1.42': (format_binary, None), # Protocol Information [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.43': (format_binary, None), # Presentation Address [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.44': (format_unicode, None), # Printable string - '1.3.6.1.4.1.1466.115.121.1.45': (format_binary, None), # Subtree specification [OBSOLETE - '1.3.6.1.4.1.1466.115.121.1.46': (format_binary, None), # Supplier Information [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.47': (format_binary, None), # Supplier Or Consumer [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.48': (format_binary, None), # Supplier And Consumer [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.49': (format_binary, None), # Supported Algorithm [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.50': (format_unicode, None), # Telephone number - '1.3.6.1.4.1.1466.115.121.1.51': (format_unicode, None), # Teletex terminal identifier - '1.3.6.1.4.1.1466.115.121.1.52': (format_unicode, None), # Teletex number - '1.3.6.1.4.1.1466.115.121.1.53': (format_time, validate_time), # Utc time (deprecated) - '1.3.6.1.4.1.1466.115.121.1.54': (format_unicode, None), # LDAP syntax description - '1.3.6.1.4.1.1466.115.121.1.55': (format_binary, None), # Modify rights [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.56': (format_binary, None), # LDAP Schema Definition [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.57': (format_unicode, None), # LDAP Schema Description [OBSOLETE] - '1.3.6.1.4.1.1466.115.121.1.58': (format_unicode, None), # Substring assertion - '1.3.6.1.1.16.1': (format_uuid, validate_uuid), # UUID - '1.3.6.1.1.16.4': (format_uuid, validate_uuid), # entryUUID (RFC 4530) - '2.16.840.1.113719.1.1.4.1.501': (format_uuid, validate_guid), # GUID (Novell) - '2.16.840.1.113719.1.1.5.1.0': (format_binary, None), # Unknown (Novell) - '2.16.840.1.113719.1.1.5.1.6': (format_unicode, None), # Case Ignore List (Novell) - '2.16.840.1.113719.1.1.5.1.12': (format_binary, None), # Tagged Data (Novell) - '2.16.840.1.113719.1.1.5.1.13': (format_binary, None), # Octet List (Novell) - '2.16.840.1.113719.1.1.5.1.14': (format_unicode, None), # Tagged String (Novell) - '2.16.840.1.113719.1.1.5.1.15': (format_unicode, None), # Tagged Name And String (Novell) - '2.16.840.1.113719.1.1.5.1.16': (format_binary, None), # NDS Replica Pointer (Novell) - '2.16.840.1.113719.1.1.5.1.17': (format_unicode, None), # NDS ACL (Novell) - '2.16.840.1.113719.1.1.5.1.19': (format_time, validate_time), # NDS Timestamp (Novell) - '2.16.840.1.113719.1.1.5.1.22': (format_integer, validate_integer), # Counter (Novell) - '2.16.840.1.113719.1.1.5.1.23': (format_unicode, None), # Tagged Name (Novell) - '2.16.840.1.113719.1.1.5.1.25': (format_unicode, None), # Typed Name (Novell) - 'supportedldapversion': (format_integer, None), # supportedLdapVersion (Microsoft) - 'octetstring': (format_binary, validate_uuid_le), # octect string (Microsoft) - '1.2.840.113556.1.4.2': (format_uuid_le, validate_uuid_le), # object guid (Microsoft) - '1.2.840.113556.1.4.13': (format_ad_timestamp, validate_ad_timestamp), # builtinCreationTime (Microsoft) - '1.2.840.113556.1.4.26': (format_ad_timestamp, validate_ad_timestamp), # creationTime (Microsoft) - '1.2.840.113556.1.4.49': (format_ad_timestamp, validate_ad_timestamp), # badPasswordTime (Microsoft) - '1.2.840.113556.1.4.51': (format_ad_timestamp, validate_ad_timestamp), # lastLogoff (Microsoft) - '1.2.840.113556.1.4.52': (format_ad_timestamp, validate_ad_timestamp), # lastLogon (Microsoft) - '1.2.840.113556.1.4.96': (format_ad_timestamp, validate_zero_and_minus_one_and_positive_int), # pwdLastSet (Microsoft, can be set to -1 only) - '1.2.840.113556.1.4.146': (format_sid, validate_sid), # objectSid (Microsoft) - '1.2.840.113556.1.4.159': (format_ad_timestamp, validate_ad_timestamp), # accountExpires (Microsoft) - '1.2.840.113556.1.4.662': (format_ad_timestamp, validate_ad_timestamp), # lockoutTime (Microsoft) - '1.2.840.113556.1.4.1696': (format_ad_timestamp, validate_ad_timestamp), # lastLogonTimestamp (Microsoft) - '1.3.6.1.4.1.42.2.27.8.1.17': (format_time_with_0_year, validate_time_with_0_year) # pwdAccountLockedTime (Novell) -} - - -def find_attribute_helpers(attr_type, name, custom_formatter): - """ - Tries to format following the OIDs info and format_helper specification. - Search for attribute oid, then attribute name (can be multiple), then attribute syntax - Precedence is: - 1. attribute name - 2. attribute oid(from schema) - 3. attribute names (from oid_info) - 4. attribute syntax (from schema) - Custom formatters can be defined in Server object and have precedence over the standard_formatters - If no formatter is found the raw_value is returned as bytes. - Attributes defined as SINGLE_VALUE in schema are returned as a single object, otherwise are returned as a list of object - Formatter functions can return any kind of object - return a tuple (formatter, validator) - """ - formatter = None - if custom_formatter and isinstance(custom_formatter, dict): # if custom formatters are defined they have precedence over the standard formatters - if name in custom_formatter: # search for attribute name, as returned by the search operation - formatter = custom_formatter[name] - - if not formatter and attr_type and attr_type.oid in custom_formatter: # search for attribute oid as returned by schema - formatter = custom_formatter[attr_type.oid] - if not formatter and attr_type and attr_type.oid_info: - if isinstance(attr_type.oid_info[2], SEQUENCE_TYPES): # search for multiple names defined in oid_info - for attr_name in attr_type.oid_info[2]: - if attr_name in custom_formatter: - formatter = custom_formatter[attr_name] - break - elif attr_type.oid_info[2] in custom_formatter: # search for name defined in oid_info - formatter = custom_formatter[attr_type.oid_info[2]] - - if not formatter and attr_type and attr_type.syntax in custom_formatter: # search for syntax defined in schema - formatter = custom_formatter[attr_type.syntax] - - if not formatter and name in standard_formatter: # search for attribute name, as returned by the search operation - formatter = standard_formatter[name] - - if not formatter and attr_type and attr_type.oid in standard_formatter: # search for attribute oid as returned by schema - formatter = standard_formatter[attr_type.oid] - - if not formatter and attr_type and attr_type.oid_info: - if isinstance(attr_type.oid_info[2], SEQUENCE_TYPES): # search for multiple names defined in oid_info - for attr_name in attr_type.oid_info[2]: - if attr_name in standard_formatter: - formatter = standard_formatter[attr_name] - break - elif attr_type.oid_info[2] in standard_formatter: # search for name defined in oid_info - formatter = standard_formatter[attr_type.oid_info[2]] - if not formatter and attr_type and attr_type.syntax in standard_formatter: # search for syntax defined in schema - formatter = standard_formatter[attr_type.syntax] - - if formatter is None: - return None, None - - return formatter - - -def format_attribute_values(schema, name, values, custom_formatter): - if not values: # RFCs states that attributes must always have values, but a flaky server returns empty values too - return [] - - if not isinstance(values, SEQUENCE_TYPES): - values = [values] - - if schema and schema.attribute_types and name in schema.attribute_types: - attr_type = schema.attribute_types[name] - else: - attr_type = None - - attribute_helpers = find_attribute_helpers(attr_type, name, custom_formatter) - if not isinstance(attribute_helpers, tuple): # custom formatter - formatter = attribute_helpers - else: - formatter = format_unicode if not attribute_helpers[0] else attribute_helpers[0] - - formatted_values = [formatter(raw_value) for raw_value in values] # executes formatter - if formatted_values: - return formatted_values[0] if (attr_type and attr_type.single_value) else formatted_values - else: # RFCs states that attributes must always have values, but AD return empty values in DirSync - return [] - - -def find_attribute_validator(schema, name, custom_validator): - if schema and schema.attribute_types and name in schema.attribute_types: - attr_type = schema.attribute_types[name] - else: - attr_type = None - - attribute_helpers = find_attribute_helpers(attr_type, name, custom_validator) - if not isinstance(attribute_helpers, tuple): # custom validator - validator = attribute_helpers - else: - if not attribute_helpers[1]: - if attr_type and attr_type.single_value: - validator = validate_generic_single_value # validate only single value - else: - validator = always_valid # unknown syntax, accepts single and multi value - else: - validator = attribute_helpers[1] - return validator +""" +""" + +# Created on 2014.10.28 +# +# Author: Giovanni Cannata +# +# Copyright 2014 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +from ... import SEQUENCE_TYPES +from .formatters import format_ad_timestamp, format_binary, format_boolean,\ + format_integer, format_sid, format_time, format_unicode, format_uuid, format_uuid_le, format_time_with_0_year,\ + format_ad_timedelta +from .validators import validate_integer, validate_time, always_valid,\ + validate_generic_single_value, validate_boolean, validate_ad_timestamp, validate_sid,\ + validate_uuid_le, validate_uuid, validate_zero_and_minus_one_and_positive_int, validate_guid, validate_time_with_0_year,\ + validate_ad_timedelta + +# for each syntax can be specified a format function and a input validation function + +standard_formatter = { + '1.2.840.113556.1.4.903': (format_binary, None), # Object (DN-binary) - Microsoft + '1.2.840.113556.1.4.904': (format_unicode, None), # Object (DN-string) - Microsoft + '1.2.840.113556.1.4.905': (format_unicode, None), # String (Teletex) - Microsoft + '1.2.840.113556.1.4.906': (format_integer, validate_integer), # Large integer - Microsoft + '1.2.840.113556.1.4.907': (format_binary, None), # String (NT-sec-desc) - Microsoft + '1.2.840.113556.1.4.1221': (format_binary, None), # Object (OR-name) - Microsoft + '1.2.840.113556.1.4.1362': (format_unicode, None), # String (Case) - Microsoft + '1.3.6.1.4.1.1466.115.121.1.1': (format_binary, None), # ACI item [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.2': (format_binary, None), # Access point [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.3': (format_unicode, None), # Attribute type description + '1.3.6.1.4.1.1466.115.121.1.4': (format_binary, None), # Audio [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.5': (format_binary, None), # Binary [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.6': (format_unicode, None), # Bit String + '1.3.6.1.4.1.1466.115.121.1.7': (format_boolean, validate_boolean), # Boolean + '1.3.6.1.4.1.1466.115.121.1.8': (format_binary, None), # Certificate [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.9': (format_binary, None), # Certificate List [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.10': (format_binary, None), # Certificate Pair [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.11': (format_unicode, None), # Country String + '1.3.6.1.4.1.1466.115.121.1.12': (format_unicode, None), # Distinguished name (DN) + '1.3.6.1.4.1.1466.115.121.1.13': (format_binary, None), # Data Quality Syntax [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.14': (format_unicode, None), # Delivery method + '1.3.6.1.4.1.1466.115.121.1.15': (format_unicode, None), # Directory string + '1.3.6.1.4.1.1466.115.121.1.16': (format_unicode, None), # DIT Content Rule Description + '1.3.6.1.4.1.1466.115.121.1.17': (format_unicode, None), # DIT Structure Rule Description + '1.3.6.1.4.1.1466.115.121.1.18': (format_binary, None), # DL Submit Permission [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.19': (format_binary, None), # DSA Quality Syntax [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.20': (format_binary, None), # DSE Type [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.21': (format_binary, None), # Enhanced Guide + '1.3.6.1.4.1.1466.115.121.1.22': (format_unicode, None), # Facsimile Telephone Number + '1.3.6.1.4.1.1466.115.121.1.23': (format_binary, None), # Fax + '1.3.6.1.4.1.1466.115.121.1.24': (format_time, validate_time), # Generalized time + '1.3.6.1.4.1.1466.115.121.1.25': (format_binary, None), # Guide [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.26': (format_unicode, None), # IA5 string + '1.3.6.1.4.1.1466.115.121.1.27': (format_integer, validate_integer), # Integer + '1.3.6.1.4.1.1466.115.121.1.28': (format_binary, None), # JPEG + '1.3.6.1.4.1.1466.115.121.1.29': (format_binary, None), # Master and Shadow Access Points [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.30': (format_unicode, None), # Matching rule description + '1.3.6.1.4.1.1466.115.121.1.31': (format_unicode, None), # Matching rule use description + '1.3.6.1.4.1.1466.115.121.1.32': (format_unicode, None), # Mail Preference [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.33': (format_unicode, None), # MHS OR Address [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.34': (format_unicode, None), # Name and optional UID + '1.3.6.1.4.1.1466.115.121.1.35': (format_unicode, None), # Name form description + '1.3.6.1.4.1.1466.115.121.1.36': (format_unicode, None), # Numeric string + '1.3.6.1.4.1.1466.115.121.1.37': (format_unicode, None), # Object class description + '1.3.6.1.4.1.1466.115.121.1.38': (format_unicode, None), # OID + '1.3.6.1.4.1.1466.115.121.1.39': (format_unicode, None), # Other mailbox + '1.3.6.1.4.1.1466.115.121.1.40': (format_binary, None), # Octet string + '1.3.6.1.4.1.1466.115.121.1.41': (format_unicode, None), # Postal address + '1.3.6.1.4.1.1466.115.121.1.42': (format_binary, None), # Protocol Information [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.43': (format_binary, None), # Presentation Address [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.44': (format_unicode, None), # Printable string + '1.3.6.1.4.1.1466.115.121.1.45': (format_binary, None), # Subtree specification [OBSOLETE + '1.3.6.1.4.1.1466.115.121.1.46': (format_binary, None), # Supplier Information [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.47': (format_binary, None), # Supplier Or Consumer [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.48': (format_binary, None), # Supplier And Consumer [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.49': (format_binary, None), # Supported Algorithm [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.50': (format_unicode, None), # Telephone number + '1.3.6.1.4.1.1466.115.121.1.51': (format_unicode, None), # Teletex terminal identifier + '1.3.6.1.4.1.1466.115.121.1.52': (format_unicode, None), # Teletex number + '1.3.6.1.4.1.1466.115.121.1.53': (format_time, validate_time), # Utc time (deprecated) + '1.3.6.1.4.1.1466.115.121.1.54': (format_unicode, None), # LDAP syntax description + '1.3.6.1.4.1.1466.115.121.1.55': (format_binary, None), # Modify rights [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.56': (format_binary, None), # LDAP Schema Definition [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.57': (format_unicode, None), # LDAP Schema Description [OBSOLETE] + '1.3.6.1.4.1.1466.115.121.1.58': (format_unicode, None), # Substring assertion + '1.3.6.1.1.16.1': (format_uuid, validate_uuid), # UUID + '1.3.6.1.1.16.4': (format_uuid, validate_uuid), # entryUUID (RFC 4530) + '2.16.840.1.113719.1.1.4.1.501': (format_uuid, validate_guid), # GUID (Novell) + '2.16.840.1.113719.1.1.5.1.0': (format_binary, None), # Unknown (Novell) + '2.16.840.1.113719.1.1.5.1.6': (format_unicode, None), # Case Ignore List (Novell) + '2.16.840.1.113719.1.1.5.1.12': (format_binary, None), # Tagged Data (Novell) + '2.16.840.1.113719.1.1.5.1.13': (format_binary, None), # Octet List (Novell) + '2.16.840.1.113719.1.1.5.1.14': (format_unicode, None), # Tagged String (Novell) + '2.16.840.1.113719.1.1.5.1.15': (format_unicode, None), # Tagged Name And String (Novell) + '2.16.840.1.113719.1.1.5.1.16': (format_binary, None), # NDS Replica Pointer (Novell) + '2.16.840.1.113719.1.1.5.1.17': (format_unicode, None), # NDS ACL (Novell) + '2.16.840.1.113719.1.1.5.1.19': (format_time, validate_time), # NDS Timestamp (Novell) + '2.16.840.1.113719.1.1.5.1.22': (format_integer, validate_integer), # Counter (Novell) + '2.16.840.1.113719.1.1.5.1.23': (format_unicode, None), # Tagged Name (Novell) + '2.16.840.1.113719.1.1.5.1.25': (format_unicode, None), # Typed Name (Novell) + 'supportedldapversion': (format_integer, None), # supportedLdapVersion (Microsoft) + 'octetstring': (format_binary, validate_uuid_le), # octect string (Microsoft) + '1.2.840.113556.1.4.2': (format_uuid_le, validate_uuid_le), # objectGUID (Microsoft) + '1.2.840.113556.1.4.13': (format_ad_timestamp, validate_ad_timestamp), # builtinCreationTime (Microsoft) + '1.2.840.113556.1.4.26': (format_ad_timestamp, validate_ad_timestamp), # creationTime (Microsoft) + '1.2.840.113556.1.4.49': (format_ad_timestamp, validate_ad_timestamp), # badPasswordTime (Microsoft) + '1.2.840.113556.1.4.51': (format_ad_timestamp, validate_ad_timestamp), # lastLogoff (Microsoft) + '1.2.840.113556.1.4.52': (format_ad_timestamp, validate_ad_timestamp), # lastLogon (Microsoft) + '1.2.840.113556.1.4.60': (format_ad_timedelta, validate_ad_timedelta), # lockoutDuration (Microsoft) + '1.2.840.113556.1.4.61': (format_ad_timedelta, validate_ad_timedelta), # lockOutObservationWindow (Microsoft) + '1.2.840.113556.1.4.74': (format_ad_timedelta, validate_ad_timedelta), # maxPwdAge (Microsoft) + '1.2.840.113556.1.4.78': (format_ad_timedelta, validate_ad_timedelta), # minPwdAge (Microsoft) + '1.2.840.113556.1.4.96': (format_ad_timestamp, validate_zero_and_minus_one_and_positive_int), # pwdLastSet (Microsoft, can be set to -1 only) + '1.2.840.113556.1.4.146': (format_sid, validate_sid), # objectSid (Microsoft) + '1.2.840.113556.1.4.159': (format_ad_timestamp, validate_ad_timestamp), # accountExpires (Microsoft) + '1.2.840.113556.1.4.662': (format_ad_timestamp, validate_ad_timestamp), # lockoutTime (Microsoft) + '1.2.840.113556.1.4.1696': (format_ad_timestamp, validate_ad_timestamp), # lastLogonTimestamp (Microsoft) + '1.3.6.1.4.1.42.2.27.8.1.17': (format_time_with_0_year, validate_time_with_0_year) # pwdAccountLockedTime (Novell) +} + + +def find_attribute_helpers(attr_type, name, custom_formatter): + """ + Tries to format following the OIDs info and format_helper specification. + Search for attribute oid, then attribute name (can be multiple), then attribute syntax + Precedence is: + 1. attribute name + 2. attribute oid(from schema) + 3. attribute names (from oid_info) + 4. attribute syntax (from schema) + Custom formatters can be defined in Server object and have precedence over the standard_formatters + If no formatter is found the raw_value is returned as bytes. + Attributes defined as SINGLE_VALUE in schema are returned as a single object, otherwise are returned as a list of object + Formatter functions can return any kind of object + return a tuple (formatter, validator) + """ + formatter = None + if custom_formatter and isinstance(custom_formatter, dict): # if custom formatters are defined they have precedence over the standard formatters + if name in custom_formatter: # search for attribute name, as returned by the search operation + formatter = custom_formatter[name] + + if not formatter and attr_type and attr_type.oid in custom_formatter: # search for attribute oid as returned by schema + formatter = custom_formatter[attr_type.oid] + if not formatter and attr_type and attr_type.oid_info: + if isinstance(attr_type.oid_info[2], SEQUENCE_TYPES): # search for multiple names defined in oid_info + for attr_name in attr_type.oid_info[2]: + if attr_name in custom_formatter: + formatter = custom_formatter[attr_name] + break + elif attr_type.oid_info[2] in custom_formatter: # search for name defined in oid_info + formatter = custom_formatter[attr_type.oid_info[2]] + + if not formatter and attr_type and attr_type.syntax in custom_formatter: # search for syntax defined in schema + formatter = custom_formatter[attr_type.syntax] + + if not formatter and name in standard_formatter: # search for attribute name, as returned by the search operation + formatter = standard_formatter[name] + + if not formatter and attr_type and attr_type.oid in standard_formatter: # search for attribute oid as returned by schema + formatter = standard_formatter[attr_type.oid] + + if not formatter and attr_type and attr_type.oid_info: + if isinstance(attr_type.oid_info[2], SEQUENCE_TYPES): # search for multiple names defined in oid_info + for attr_name in attr_type.oid_info[2]: + if attr_name in standard_formatter: + formatter = standard_formatter[attr_name] + break + elif attr_type.oid_info[2] in standard_formatter: # search for name defined in oid_info + formatter = standard_formatter[attr_type.oid_info[2]] + if not formatter and attr_type and attr_type.syntax in standard_formatter: # search for syntax defined in schema + formatter = standard_formatter[attr_type.syntax] + + if formatter is None: + return None, None + + return formatter + + +def format_attribute_values(schema, name, values, custom_formatter): + if not values: # RFCs states that attributes must always have values, but a flaky server returns empty values too + return [] + + if not isinstance(values, SEQUENCE_TYPES): + values = [values] + + if schema and schema.attribute_types and name in schema.attribute_types: + attr_type = schema.attribute_types[name] + else: + attr_type = None + + attribute_helpers = find_attribute_helpers(attr_type, name, custom_formatter) + if not isinstance(attribute_helpers, tuple): # custom formatter + formatter = attribute_helpers + else: + formatter = format_unicode if not attribute_helpers[0] else attribute_helpers[0] + + formatted_values = [formatter(raw_value) for raw_value in values] # executes formatter + if formatted_values: + return formatted_values[0] if (attr_type and attr_type.single_value) else formatted_values + else: # RFCs states that attributes must always have values, but AD return empty values in DirSync + return [] + + +def find_attribute_validator(schema, name, custom_validator): + if schema and schema.attribute_types and name in schema.attribute_types: + attr_type = schema.attribute_types[name] + else: + attr_type = None + + attribute_helpers = find_attribute_helpers(attr_type, name, custom_validator) + if not isinstance(attribute_helpers, tuple): # custom validator + validator = attribute_helpers + else: + if not attribute_helpers[1]: + if attr_type and attr_type.single_value: + validator = validate_generic_single_value # validate only single value + else: + validator = always_valid # unknown syntax, accepts single and multi value + else: + validator = attribute_helpers[1] + return validator diff --git a/server/www/packages/packages-linux/x64/ldap3/protocol/formatters/validators.py b/server/www/packages/packages-linux/x64/ldap3/protocol/formatters/validators.py index fff2198..3ab300d 100644 --- a/server/www/packages/packages-linux/x64/ldap3/protocol/formatters/validators.py +++ b/server/www/packages/packages-linux/x64/ldap3/protocol/formatters/validators.py @@ -1,461 +1,503 @@ -""" -""" - -# Created on 2016.08.09 -# -# Author: Giovanni Cannata -# -# Copyright 2016 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . -from binascii import a2b_hex -from datetime import datetime -from calendar import timegm -from uuid import UUID -from struct import pack - - -from ... import SEQUENCE_TYPES, STRING_TYPES, NUMERIC_TYPES, INTEGER_TYPES -from .formatters import format_time, format_ad_timestamp -from ...utils.conv import to_raw, to_unicode, ldap_escape_to_bytes - -# Validators return True if value is valid, False if value is not valid, -# or a value different from True and False that is a valid value to substitute to the input value - - -def check_type(input_value, value_type): - if isinstance(input_value, value_type): - return True - - if isinstance(input_value, SEQUENCE_TYPES): - for value in input_value: - if not isinstance(value, value_type): - return False - return True - - return False - - -# noinspection PyUnusedLocal -def always_valid(input_value): - return True - - -def validate_generic_single_value(input_value): - if not isinstance(input_value, SEQUENCE_TYPES): - return True - - try: # object couldn't have a __len__ method - if len(input_value) == 1: - return True - except Exception: - pass - - return False - - -def validate_zero_and_minus_one_and_positive_int(input_value): - """Accept -1 only (used by pwdLastSet in AD) - """ - if not isinstance(input_value, SEQUENCE_TYPES): - if isinstance(input_value, NUMERIC_TYPES) or isinstance(input_value, STRING_TYPES): - return True if int(input_value) >= -1 else False - return False - else: - if len(input_value) == 1 and (isinstance(input_value[0], NUMERIC_TYPES) or isinstance(input_value[0], STRING_TYPES)): - return True if int(input_value[0]) >= -1 else False - - return False - - -def validate_integer(input_value): - if check_type(input_value, (float, bool)): - return False - if check_type(input_value, INTEGER_TYPES): - return True - - if not isinstance(input_value, SEQUENCE_TYPES): - sequence = False - input_value = [input_value] - else: - sequence = True # indicates if a sequence must be returned - - valid_values = [] # builds a list of valid int values - from decimal import Decimal, InvalidOperation - for element in input_value: - try: # try to convert any type to int, an invalid conversion raise TypeError or ValueError, doublecheck with Decimal type, if both are valid and equal then then int() value is used - value = to_unicode(element) if isinstance(element, bytes) else element - decimal_value = Decimal(value) - int_value = int(value) - if decimal_value == int_value: - valid_values.append(int_value) - else: - return False - except (ValueError, TypeError, InvalidOperation): - return False - - if sequence: - return valid_values - else: - return valid_values[0] - - -def validate_bytes(input_value): - return check_type(input_value, bytes) - - -def validate_boolean(input_value): - # it could be a real bool or the string TRUE or FALSE, # only a single valued is allowed - if validate_generic_single_value(input_value): # valid only if a single value or a sequence with a single element - if isinstance(input_value, SEQUENCE_TYPES): - input_value = input_value[0] - if isinstance(input_value, bool): - if input_value: - return 'TRUE' - else: - return 'FALSE' - if str is not bytes and isinstance(input_value, bytes): # python3 try to converts bytes to string - input_value = to_unicode(input_value) - if isinstance(input_value, STRING_TYPES): - if input_value.lower() == 'true': - return 'TRUE' - elif input_value.lower() == 'false': - return 'FALSE' - return False - - -def validate_time_with_0_year(input_value): - # validates generalized time but accept a 0000 year too - # if datetime object doesn't have a timezone it's considered local time and is adjusted to UTC - if not isinstance(input_value, SEQUENCE_TYPES): - sequence = False - input_value = [input_value] - else: - sequence = True # indicates if a sequence must be returned - - valid_values = [] - changed = False - for element in input_value: - if str is not bytes and isinstance(element, bytes): # python3 try to converts bytes to string - element = to_unicode(element) - if isinstance(element, STRING_TYPES): # tries to check if it is already be a Generalized Time - if element.startswith('0000') or isinstance(format_time(to_raw(element)), datetime): # valid Generalized Time string - valid_values.append(element) - else: - return False - elif isinstance(element, datetime): - changed = True - if element.tzinfo: # a datetime with a timezone - valid_values.append(element.strftime('%Y%m%d%H%M%S%z')) - else: # datetime without timezone, assumed local and adjusted to UTC - offset = datetime.now() - datetime.utcnow() - valid_values.append((element - offset).strftime('%Y%m%d%H%M%SZ')) - else: - return False - - if changed: - if sequence: - return valid_values - else: - return valid_values[0] - else: - return True - - -def validate_time(input_value): - # if datetime object doesn't have a timezone it's considered local time and is adjusted to UTC - if not isinstance(input_value, SEQUENCE_TYPES): - sequence = False - input_value = [input_value] - else: - sequence = True # indicates if a sequence must be returned - - valid_values = [] - changed = False - for element in input_value: - if str is not bytes and isinstance(element, bytes): # python3 try to converts bytes to string - element = to_unicode(element) - if isinstance(element, STRING_TYPES): # tries to check if it is already be a Generalized Time - if isinstance(format_time(to_raw(element)), datetime): # valid Generalized Time string - valid_values.append(element) - else: - return False - elif isinstance(element, datetime): - changed = True - if element.tzinfo: # a datetime with a timezone - valid_values.append(element.strftime('%Y%m%d%H%M%S%z')) - else: # datetime without timezone, assumed local and adjusted to UTC - offset = datetime.now() - datetime.utcnow() - valid_values.append((element - offset).strftime('%Y%m%d%H%M%SZ')) - else: - return False - - if changed: - if sequence: - return valid_values - else: - return valid_values[0] - else: - return True - - -def validate_ad_timestamp(input_value): - """ - Active Directory stores date/time values as the number of 100-nanosecond intervals - that have elapsed since the 0 hour on January 1, 1601 till the date/time that is being stored. - The time is always stored in Greenwich Mean Time (GMT) in the Active Directory. - """ - if not isinstance(input_value, SEQUENCE_TYPES): - sequence = False - input_value = [input_value] - else: - sequence = True # indicates if a sequence must be returned - - valid_values = [] - changed = False - for element in input_value: - if str is not bytes and isinstance(element, bytes): # python3 try to converts bytes to string - element = to_unicode(element) - if isinstance(element, NUMERIC_TYPES): - if 0 <= element <= 9223372036854775807: # min and max for the AD timestamp starting from 12:00 AM January 1, 1601 - valid_values.append(element) - else: - return False - elif isinstance(element, STRING_TYPES): # tries to check if it is already be a AD timestamp - if isinstance(format_ad_timestamp(to_raw(element)), datetime): # valid Generalized Time string - valid_values.append(element) - else: - return False - elif isinstance(element, datetime): - changed = True - if element.tzinfo: # a datetime with a timezone - valid_values.append(to_raw((timegm(element.utctimetuple()) + 11644473600) * 10000000, encoding='ascii')) - else: # datetime without timezone, assumed local and adjusted to UTC - offset = datetime.now() - datetime.utcnow() - valid_values.append(to_raw((timegm((element - offset).timetuple()) + 11644473600) * 10000000, encoding='ascii')) - else: - return False - - if changed: - if sequence: - return valid_values - else: - return valid_values[0] - else: - return True - - -def validate_guid(input_value): - """ - object guid in uuid format (Novell eDirectory) - """ - if not isinstance(input_value, SEQUENCE_TYPES): - sequence = False - input_value = [input_value] - else: - sequence = True # indicates if a sequence must be returned - - valid_values = [] - changed = False - for element in input_value: - if isinstance(element, STRING_TYPES): - try: - valid_values.append(UUID(element).bytes) - changed = True - except ValueError: # try if the value is an escaped byte sequence - try: - valid_values.append(UUID(element.replace('\\', '')).bytes) - changed = True - continue - except ValueError: - if str is not bytes: # python 3 - pass - else: - valid_values.append(element) - continue - return False - elif isinstance(element, (bytes, bytearray)): # assumes bytes are valid - valid_values.append(element) - else: - return False - - if changed: - if sequence: - return valid_values - else: - return valid_values[0] - else: - return True - -def validate_uuid(input_value): - """ - object entryUUID in uuid format - """ - if not isinstance(input_value, SEQUENCE_TYPES): - sequence = False - input_value = [input_value] - else: - sequence = True # indicates if a sequence must be returned - - valid_values = [] - changed = False - for element in input_value: - if isinstance(element, STRING_TYPES): - try: - valid_values.append(str(UUID(element))) - changed = True - except ValueError: # try if the value is an escaped byte sequence - try: - valid_values.append(str(UUID(element.replace('\\', '')))) - changed = True - continue - except ValueError: - if str is not bytes: # python 3 - pass - else: - valid_values.append(element) - continue - return False - elif isinstance(element, (bytes, bytearray)): # assumes bytes are valid - valid_values.append(element) - else: - return False - - if changed: - if sequence: - return valid_values - else: - return valid_values[0] - else: - return True - - -def validate_uuid_le(input_value): - """ - Active Directory stores objectGUID in uuid_le format, follows RFC4122 and MS-DTYP: - "{07039e68-4373-264d-a0a7-07039e684373}": string representation big endian, converted to little endian (with or without brace curles) - "689e030773434d26a7a007039e684373": packet representation, already in little endian - "\68\9e\03\07\73\43\4d\26\a7\a0\07\03\9e\68\43\73": bytes representation, already in little endian - byte sequence: already in little endian - - """ - if not isinstance(input_value, SEQUENCE_TYPES): - sequence = False - input_value = [input_value] - else: - sequence = True # indicates if a sequence must be returned - - valid_values = [] - changed = False - for element in input_value: - if isinstance(element, STRING_TYPES): - if element[0] == '{' and element[-1] == '}': - valid_values.append(UUID(hex=element).bytes_le) # string representation, value in big endian, converts to little endian - changed = True - elif '-' in element: - valid_values.append(UUID(hex=element).bytes_le) # string representation, value in big endian, converts to little endian - changed = True - elif '\\' in element: - valid_values.append(UUID(bytes_le=ldap_escape_to_bytes(element)).bytes_le) # byte representation, value in little endian - changed = True - elif '-' not in element: # value in little endian - valid_values.append(UUID(bytes_le=a2b_hex(element)).bytes_le) # packet representation, value in little endian, converts to little endian - changed = True - elif isinstance(element, (bytes, bytearray)): # assumes bytes are valid uuid - valid_values.append(element) # value is untouched, must be in little endian - else: - return False - - if changed: - if sequence: - return valid_values - else: - return valid_values[0] - else: - return True - - -def validate_sid(input_value): - """ - SID= "S-1-" IdentifierAuthority 1*SubAuthority - IdentifierAuthority= IdentifierAuthorityDec / IdentifierAuthorityHex - ; If the identifier authority is < 2^32, the - ; identifier authority is represented as a decimal - ; number - ; If the identifier authority is >= 2^32, - ; the identifier authority is represented in - ; hexadecimal - IdentifierAuthorityDec = 1*10DIGIT - ; IdentifierAuthorityDec, top level authority of a - ; security identifier is represented as a decimal number - IdentifierAuthorityHex = "0x" 12HEXDIG - ; IdentifierAuthorityHex, the top-level authority of a - ; security identifier is represented as a hexadecimal number - SubAuthority= "-" 1*10DIGIT - ; Sub-Authority is always represented as a decimal number - ; No leading "0" characters are allowed when IdentifierAuthority - ; or SubAuthority is represented as a decimal number - ; All hexadecimal digits must be output in string format, - ; pre-pended by "0x" - - Revision (1 byte): An 8-bit unsigned integer that specifies the revision level of the SID. This value MUST be set to 0x01. - SubAuthorityCount (1 byte): An 8-bit unsigned integer that specifies the number of elements in the SubAuthority array. The maximum number of elements allowed is 15. - IdentifierAuthority (6 bytes): A SID_IDENTIFIER_AUTHORITY structure that indicates the authority under which the SID was created. It describes the entity that created the SID. The Identifier Authority value {0,0,0,0,0,5} denotes SIDs created by the NT SID authority. - SubAuthority (variable): A variable length array of unsigned 32-bit integers that uniquely identifies a principal relative to the IdentifierAuthority. Its length is determined by SubAuthorityCount. - - If you have a SID like S-a-b-c-d-e-f-g-... - - Then the bytes are - a (revision) - N (number of dashes minus two) - bbbbbb (six bytes of "b" treated as a 48-bit number in big-endian format) - cccc (four bytes of "c" treated as a 32-bit number in little-endian format) - dddd (four bytes of "d" treated as a 32-bit number in little-endian format) - eeee (four bytes of "e" treated as a 32-bit number in little-endian format) - ffff (four bytes of "f" treated as a 32-bit number in little-endian format) - - """ - if not isinstance(input_value, SEQUENCE_TYPES): - sequence = False - input_value = [input_value] - else: - sequence = True # indicates if a sequence must be returned - - valid_values = [] - changed = False - for element in input_value: - if isinstance(element, STRING_TYPES): - if element.startswith('S-'): - parts = element.split('-') - sid_bytes = pack('q', int(parts[2]))[2:] # authority (in dec) - else: - sid_bytes += pack('>q', int(parts[2], 16))[2:] # authority (in hex) - for sub_auth in parts[3:]: - sid_bytes += pack('. +from binascii import a2b_hex, hexlify +from datetime import datetime +from calendar import timegm +from uuid import UUID +from struct import pack + + +from ... import SEQUENCE_TYPES, STRING_TYPES, NUMERIC_TYPES, INTEGER_TYPES +from .formatters import format_time, format_ad_timestamp +from ...utils.conv import to_raw, to_unicode, ldap_escape_to_bytes, escape_bytes + +# Validators return True if value is valid, False if value is not valid, +# or a value different from True and False that is a valid value to substitute to the input value + + +def check_backslash(value): + if isinstance(value, (bytearray, bytes)): + if b'\\' in value: + value = value.replace(b'\\', b'\\5C') + elif isinstance(value, STRING_TYPES): + if '\\' in value: + value = value.replace('\\', '\\5C') + return value + + +def check_type(input_value, value_type): + if isinstance(input_value, value_type): + return True + + if isinstance(input_value, SEQUENCE_TYPES): + for value in input_value: + if not isinstance(value, value_type): + return False + return True + + return False + + +# noinspection PyUnusedLocal +def always_valid(input_value): + return True + + +def validate_generic_single_value(input_value): + if not isinstance(input_value, SEQUENCE_TYPES): + return True + + try: # object couldn't have a __len__ method + if len(input_value) == 1: + return True + except Exception: + pass + + return False + + +def validate_zero_and_minus_one_and_positive_int(input_value): + """Accept -1 and 0 only (used by pwdLastSet in AD) + """ + if not isinstance(input_value, SEQUENCE_TYPES): + if isinstance(input_value, NUMERIC_TYPES) or isinstance(input_value, STRING_TYPES): + return True if int(input_value) >= -1 else False + return False + else: + if len(input_value) == 1 and (isinstance(input_value[0], NUMERIC_TYPES) or isinstance(input_value[0], STRING_TYPES)): + return True if int(input_value[0]) >= -1 else False + + return False + + +def validate_integer(input_value): + if check_type(input_value, (float, bool)): + return False + if check_type(input_value, INTEGER_TYPES): + return True + + if not isinstance(input_value, SEQUENCE_TYPES): + sequence = False + input_value = [input_value] + else: + sequence = True # indicates if a sequence must be returned + + valid_values = [] # builds a list of valid int values + from decimal import Decimal, InvalidOperation + for element in input_value: + try: #try to convert any type to int, an invalid conversion raise TypeError or ValueError, doublecheck with Decimal type, if both are valid and equal then then int() value is used + value = to_unicode(element) if isinstance(element, bytes) else element + decimal_value = Decimal(value) + int_value = int(value) + if decimal_value == int_value: + valid_values.append(int_value) + else: + return False + except (ValueError, TypeError, InvalidOperation): + return False + + if sequence: + return valid_values + else: + return valid_values[0] + + +def validate_bytes(input_value): + return check_type(input_value, bytes) + + +def validate_boolean(input_value): + # it could be a real bool or the string TRUE or FALSE, # only a single valued is allowed + if validate_generic_single_value(input_value): # valid only if a single value or a sequence with a single element + if isinstance(input_value, SEQUENCE_TYPES): + input_value = input_value[0] + if isinstance(input_value, bool): + if input_value: + return 'TRUE' + else: + return 'FALSE' + if str is not bytes and isinstance(input_value, bytes): # python3 try to converts bytes to string + input_value = to_unicode(input_value) + if isinstance(input_value, STRING_TYPES): + if input_value.lower() == 'true': + return 'TRUE' + elif input_value.lower() == 'false': + return 'FALSE' + return False + + +def validate_time_with_0_year(input_value): + # validates generalized time but accept a 0000 year too + # if datetime object doesn't have a timezone it's considered local time and is adjusted to UTC + if not isinstance(input_value, SEQUENCE_TYPES): + sequence = False + input_value = [input_value] + else: + sequence = True # indicates if a sequence must be returned + + valid_values = [] + changed = False + for element in input_value: + if str is not bytes and isinstance(element, bytes): # python3 try to converts bytes to string + element = to_unicode(element) + if isinstance(element, STRING_TYPES): # tries to check if it is already be a Generalized Time + if element.startswith('0000') or isinstance(format_time(to_raw(element)), datetime): # valid Generalized Time string + valid_values.append(element) + else: + return False + elif isinstance(element, datetime): + changed = True + if element.tzinfo: # a datetime with a timezone + valid_values.append(element.strftime('%Y%m%d%H%M%S%z')) + else: # datetime without timezone, assumed local and adjusted to UTC + offset = datetime.now() - datetime.utcnow() + valid_values.append((element - offset).strftime('%Y%m%d%H%M%SZ')) + else: + return False + + if changed: + if sequence: + return valid_values + else: + return valid_values[0] + else: + return True + + +def validate_time(input_value): + # if datetime object doesn't have a timezone it's considered local time and is adjusted to UTC + if not isinstance(input_value, SEQUENCE_TYPES): + sequence = False + input_value = [input_value] + else: + sequence = True # indicates if a sequence must be returned + + valid_values = [] + changed = False + for element in input_value: + if str is not bytes and isinstance(element, bytes): # python3 try to converts bytes to string + element = to_unicode(element) + if isinstance(element, STRING_TYPES): # tries to check if it is already be a Generalized Time + if isinstance(format_time(to_raw(element)), datetime): # valid Generalized Time string + valid_values.append(element) + else: + return False + elif isinstance(element, datetime): + changed = True + if element.tzinfo: # a datetime with a timezone + valid_values.append(element.strftime('%Y%m%d%H%M%S%z')) + else: # datetime without timezone, assumed local and adjusted to UTC + offset = datetime.now() - datetime.utcnow() + valid_values.append((element - offset).strftime('%Y%m%d%H%M%SZ')) + else: + return False + + if changed: + if sequence: + return valid_values + else: + return valid_values[0] + else: + return True + + +def validate_ad_timestamp(input_value): + """ + Active Directory stores date/time values as the number of 100-nanosecond intervals + that have elapsed since the 0 hour on January 1, 1601 till the date/time that is being stored. + The time is always stored in Greenwich Mean Time (GMT) in the Active Directory. + """ + if not isinstance(input_value, SEQUENCE_TYPES): + sequence = False + input_value = [input_value] + else: + sequence = True # indicates if a sequence must be returned + + valid_values = [] + changed = False + for element in input_value: + if str is not bytes and isinstance(element, bytes): # python3 try to converts bytes to string + element = to_unicode(element) + if isinstance(element, NUMERIC_TYPES): + if 0 <= element <= 9223372036854775807: # min and max for the AD timestamp starting from 12:00 AM January 1, 1601 + valid_values.append(element) + else: + return False + elif isinstance(element, STRING_TYPES): # tries to check if it is already be a AD timestamp + if isinstance(format_ad_timestamp(to_raw(element)), datetime): # valid Generalized Time string + valid_values.append(element) + else: + return False + elif isinstance(element, datetime): + changed = True + if element.tzinfo: # a datetime with a timezone + valid_values.append(to_raw((timegm(element.utctimetuple()) + 11644473600) * 10000000, encoding='ascii')) + else: # datetime without timezone, assumed local and adjusted to UTC + offset = datetime.now() - datetime.utcnow() + valid_values.append(to_raw((timegm((element - offset).timetuple()) + 11644473600) * 10000000, encoding='ascii')) + else: + return False + + if changed: + if sequence: + return valid_values + else: + return valid_values[0] + else: + return True + + +def validate_ad_timedelta(input_value): + """ + Should be validated like an AD timestamp except that since it is a time + delta, it is stored as a negative number. + """ + if not isinstance(input_value, INTEGER_TYPES) or input_value > 0: + return False + return validate_ad_timestamp(input_value * -1) + + +def validate_guid(input_value): + """ + object guid in uuid format (Novell eDirectory) + """ + if not isinstance(input_value, SEQUENCE_TYPES): + sequence = False + input_value = [input_value] + else: + sequence = True # indicates if a sequence must be returned + + valid_values = [] + changed = False + for element in input_value: + if isinstance(element, STRING_TYPES): + try: + valid_values.append(UUID(element).bytes) + changed = True + except ValueError: # try if the value is an escaped byte sequence + try: + valid_values.append(UUID(element.replace('\\', '')).bytes) + changed = True + continue + except ValueError: + if str is not bytes: # python 3 + pass + else: + valid_values.append(element) + continue + return False + elif isinstance(element, (bytes, bytearray)): # assumes bytes are valid + valid_values.append(element) + else: + return False + + if changed: + valid_values = [check_backslash(value) for value in valid_values] + if sequence: + return valid_values + else: + return valid_values[0] + else: + return True + + +def validate_uuid(input_value): + """ + object entryUUID in uuid format + """ + if not isinstance(input_value, SEQUENCE_TYPES): + sequence = False + input_value = [input_value] + else: + sequence = True # indicates if a sequence must be returned + + valid_values = [] + changed = False + for element in input_value: + if isinstance(element, STRING_TYPES): + try: + valid_values.append(str(UUID(element))) + changed = True + except ValueError: # try if the value is an escaped byte sequence + try: + valid_values.append(str(UUID(element.replace('\\', '')))) + changed = True + continue + except ValueError: + if str is not bytes: # python 3 + pass + else: + valid_values.append(element) + continue + return False + elif isinstance(element, (bytes, bytearray)): # assumes bytes are valid + valid_values.append(element) + else: + return False + + if changed: + valid_values = [check_backslash(value) for value in valid_values] + if sequence: + return valid_values + else: + return valid_values[0] + else: + return True + + +def validate_uuid_le(input_value): + """ + Active Directory stores objectGUID in uuid_le format, follows RFC4122 and MS-DTYP: + "{07039e68-4373-264d-a0a7-07039e684373}": string representation big endian, converted to little endian (with or without brace curles) + "689e030773434d26a7a007039e684373": packet representation, already in little endian + "\68\9e\03\07\73\43\4d\26\a7\a0\07\03\9e\68\43\73": bytes representation, already in little endian + byte sequence: already in little endian + + """ + if not isinstance(input_value, SEQUENCE_TYPES): + sequence = False + input_value = [input_value] + else: + sequence = True # indicates if a sequence must be returned + + valid_values = [] + changed = False + for element in input_value: + error = False + if isinstance(element, STRING_TYPES): + if element[0] == '{' and element[-1] == '}': + try: + valid_values.append(UUID(hex=element).bytes_le) # string representation, value in big endian, converts to little endian + changed = True + except ValueError: + error = True + elif '-' in element: + try: + valid_values.append(UUID(hex=element).bytes_le) # string representation, value in big endian, converts to little endian + changed = True + except ValueError: + error = True + elif '\\' in element: + try: + uuid = UUID(bytes_le=ldap_escape_to_bytes(element)).bytes_le + uuid = escape_bytes(uuid) + valid_values.append(uuid) # byte representation, value in little endian + changed = True + except ValueError: + error = True + elif '-' not in element: # value in little endian + try: + valid_values.append(UUID(bytes_le=a2b_hex(element)).bytes_le) # packet representation, value in little endian, converts to little endian + changed = True + except ValueError: + error = True + if error and str == bytes: # python2 only assume value is bytes and valid + valid_values.append(element) # value is untouched, must be in little endian + elif isinstance(element, (bytes, bytearray)): # assumes bytes are valid uuid + valid_values.append(element) # value is untouched, must be in little endian + else: + return False + + if changed: + valid_values = [check_backslash(value) for value in valid_values] + if sequence: + return valid_values + else: + return valid_values[0] + else: + return True + + +def validate_sid(input_value): + """ + SID= "S-1-" IdentifierAuthority 1*SubAuthority + IdentifierAuthority= IdentifierAuthorityDec / IdentifierAuthorityHex + ; If the identifier authority is < 2^32, the + ; identifier authority is represented as a decimal + ; number + ; If the identifier authority is >= 2^32, + ; the identifier authority is represented in + ; hexadecimal + IdentifierAuthorityDec = 1*10DIGIT + ; IdentifierAuthorityDec, top level authority of a + ; security identifier is represented as a decimal number + IdentifierAuthorityHex = "0x" 12HEXDIG + ; IdentifierAuthorityHex, the top-level authority of a + ; security identifier is represented as a hexadecimal number + SubAuthority= "-" 1*10DIGIT + ; Sub-Authority is always represented as a decimal number + ; No leading "0" characters are allowed when IdentifierAuthority + ; or SubAuthority is represented as a decimal number + ; All hexadecimal digits must be output in string format, + ; pre-pended by "0x" + + Revision (1 byte): An 8-bit unsigned integer that specifies the revision level of the SID. This value MUST be set to 0x01. + SubAuthorityCount (1 byte): An 8-bit unsigned integer that specifies the number of elements in the SubAuthority array. The maximum number of elements allowed is 15. + IdentifierAuthority (6 bytes): A SID_IDENTIFIER_AUTHORITY structure that indicates the authority under which the SID was created. It describes the entity that created the SID. The Identifier Authority value {0,0,0,0,0,5} denotes SIDs created by the NT SID authority. + SubAuthority (variable): A variable length array of unsigned 32-bit integers that uniquely identifies a principal relative to the IdentifierAuthority. Its length is determined by SubAuthorityCount. + + If you have a SID like S-a-b-c-d-e-f-g-... + + Then the bytes are + a (revision) + N (number of dashes minus two) + bbbbbb (six bytes of "b" treated as a 48-bit number in big-endian format) + cccc (four bytes of "c" treated as a 32-bit number in little-endian format) + dddd (four bytes of "d" treated as a 32-bit number in little-endian format) + eeee (four bytes of "e" treated as a 32-bit number in little-endian format) + ffff (four bytes of "f" treated as a 32-bit number in little-endian format) + + """ + if not isinstance(input_value, SEQUENCE_TYPES): + sequence = False + input_value = [input_value] + else: + sequence = True # indicates if a sequence must be returned + + valid_values = [] + changed = False + for element in input_value: + if isinstance(element, STRING_TYPES): + if element.startswith('S-'): + parts = element.split('-') + sid_bytes = pack('q', int(parts[2]))[2:] # authority (in dec) + else: + sid_bytes += pack('>q', int(parts[2], 16))[2:] # authority (in hex) + for sub_auth in parts[3:]: + sid_bytes += pack('= 1 and connection.sasl_credentials[0]: if connection.sasl_credentials[0] is True: @@ -70,9 +75,15 @@ def sasl_gssapi(connection, controls): target_name = gssapi.Name('ldap@' + connection.sasl_credentials[0], gssapi.NameType.hostbased_service) if len(connection.sasl_credentials) >= 2 and connection.sasl_credentials[1]: authz_id = connection.sasl_credentials[1].encode("utf-8") + if len(connection.sasl_credentials) >= 3 and connection.sasl_credentials[2]: + raw_creds = connection.sasl_credentials[2] if target_name is None: target_name = gssapi.Name('ldap@' + connection.server.host, gssapi.NameType.hostbased_service) - creds = gssapi.Credentials(name=gssapi.Name(connection.user), usage='initiate') if connection.user else None + + if raw_creds is not None: + creds = gssapi.Credentials(base=raw_creds, usage='initiate', store=connection.cred_store) + else: + creds = gssapi.Credentials(name=gssapi.Name(connection.user), usage='initiate', store=connection.cred_store) if connection.user else None ctx = gssapi.SecurityContext(name=target_name, mech=gssapi.MechType.kerberos, creds=creds) in_token = None try: diff --git a/server/www/packages/packages-linux/x64/ldap3/protocol/sasl/plain.py b/server/www/packages/packages-linux/x64/ldap3/protocol/sasl/plain.py index 1de2a36..f7f7456 100644 --- a/server/www/packages/packages-linux/x64/ldap3/protocol/sasl/plain.py +++ b/server/www/packages/packages-linux/x64/ldap3/protocol/sasl/plain.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/protocol/sasl/sasl.py b/server/www/packages/packages-linux/x64/ldap3/protocol/sasl/sasl.py index 375b235..30fe0e9 100644 --- a/server/www/packages/packages-linux/x64/ldap3/protocol/sasl/sasl.py +++ b/server/www/packages/packages-linux/x64/ldap3/protocol/sasl/sasl.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2013 - 2018 Giovanni Cannata +# Copyright 2013 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/ad2012R2.py b/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/ad2012R2.py index f583973..1712613 100644 --- a/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/ad2012R2.py +++ b/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/ad2012R2.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/ds389.py b/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/ds389.py index 0ede92f..f0e19dc 100644 --- a/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/ds389.py +++ b/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/ds389.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/edir888.py b/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/edir888.py index 630d7dc..8243a7e 100644 --- a/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/edir888.py +++ b/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/edir888.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -939,12 +939,7 @@ edir_8_8_8_dsa_info = """ "addEntryOps": [ "947" ], - "altServer": [ - "ldap://192.168.137.102:389/", - "ldaps://192.168.137.102:636/", - "ldap://192.168.137.103:389/", - "ldaps://192.168.137.103:636/" - ], + "altServer": [], "bindSecurityErrors": [ "3" ], diff --git a/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/edir914.py b/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/edir914.py new file mode 100644 index 0000000..0a1d2e6 --- /dev/null +++ b/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/edir914.py @@ -0,0 +1,1157 @@ +""" +""" + +# Created on 2019.08.31 +# +# Author: Giovanni Cannata +# +# Copyright 2014 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +edir_9_1_4_schema = """ +{ + "raw": { + "attributeTypes": [ + "( 2.5.4.35 NAME 'userPassword' DESC 'Internal NDS policy forces this to be single-valued' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{128} USAGE directoryOperation )", + "( 2.5.18.1 NAME 'createTimestamp' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )", + "( 2.5.18.2 NAME 'modifyTimestamp' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )", + "( 2.5.18.10 NAME 'subschemaSubentry' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 USAGE directoryOperation )", + "( 2.5.21.9 NAME 'structuralObjectClass' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )", + "( 2.16.840.1.113719.1.27.4.49 NAME 'subordinateCount' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )", + "( 2.16.840.1.113719.1.27.4.48 NAME 'entryFlags' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )", + "( 2.16.840.1.113719.1.27.4.51 NAME 'federationBoundary' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 NO-USER-MODIFICATION USAGE directoryOperation )", + "( 2.5.21.5 NAME 'attributeTypes' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.3 USAGE directoryOperation )", + "( 2.5.21.6 NAME 'objectClasses' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.37 USAGE directoryOperation )", + "( 1.3.6.1.1.20 NAME 'entryDN' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )", + "( 2.16.840.1.113719.1.1.4.1.2 NAME 'ACL' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )", + "( 2.5.4.1 NAME 'aliasedObjectName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Aliased Object Name' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )", + "( 2.16.840.1.113719.1.1.4.1.6 NAME 'backLink' SYNTAX 2.16.840.1.113719.1.1.5.1.23 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Back Link' X-NDS_SERVER_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.8 NAME 'binderyProperty' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Bindery Property' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.7 NAME 'binderyObjectRestriction' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Bindery Object Restriction' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.9 NAME 'binderyType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Bindery Type' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.11 NAME 'cAPrivateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'CA Private Key' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.12 NAME 'cAPublicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'CA Public Key' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.10 NAME 'Cartridge' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.3 NAME ( 'cn' 'commonName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'CN' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.78 NAME 'printerConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Printer Configuration' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.15 NAME 'Convergence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{1} SINGLE-VALUE X-NDS_UPPER_BOUND '1' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.6 NAME ( 'c' 'countryName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{2} SINGLE-VALUE X-NDS_NAME 'C' X-NDS_LOWER_BOUND '2' X-NDS_UPPER_BOUND '2' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.18 NAME 'defaultQueue' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Default Queue' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.13 NAME ( 'description' 'multiLineDescription' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{1024} X-NDS_NAME 'Description' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '1024' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.64 NAME 'partitionCreationTime' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Partition Creation Time' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.5.4.23 NAME 'facsimileTelephoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.22{64512} X-NDS_NAME 'Facsimile Telephone Number' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.117 NAME 'highConvergenceSyncInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'High Convergence Sync Interval' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.25 NAME 'groupMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Group Membership' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.26 NAME 'ndsHomeDirectory' SYNTAX 2.16.840.1.113719.1.1.5.1.15{255} SINGLE-VALUE X-NDS_NAME 'Home Directory' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '255' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.27 NAME 'hostDevice' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Host Device' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.28 NAME 'hostResourceName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'Host Resource Name' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.29 NAME 'hostServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Host Server' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.30 NAME 'inheritedACL' SYNTAX 2.16.840.1.113719.1.1.5.1.17 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Inherited ACL' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.5.4.7 NAME ( 'l' 'localityname' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'L' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.39 NAME 'loginAllowedTimeMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{42} SINGLE-VALUE X-NDS_NAME 'Login Allowed Time Map' X-NDS_LOWER_BOUND '42' X-NDS_UPPER_BOUND '42' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.40 NAME 'loginDisabled' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Login Disabled' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.41 NAME 'loginExpirationTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Login Expiration Time' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.42 NAME 'loginGraceLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Login Grace Limit' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.43 NAME 'loginGraceRemaining' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_NAME 'Login Grace Remaining' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.44 NAME 'loginIntruderAddress' SYNTAX 2.16.840.1.113719.1.1.5.1.12 SINGLE-VALUE X-NDS_NAME 'Login Intruder Address' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.45 NAME 'loginIntruderAttempts' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_NAME 'Login Intruder Attempts' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.46 NAME 'loginIntruderLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Login Intruder Limit' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.31 NAME 'intruderAttemptResetInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Intruder Attempt Reset Interval' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.47 NAME 'loginIntruderResetTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Login Intruder Reset Time' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.48 NAME 'loginMaximumSimultaneous' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Login Maximum Simultaneous' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.49 NAME 'loginScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Login Script' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.50 NAME 'loginTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Login Time' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.31 NAME ( 'member' 'uniqueMember' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Member' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.52 NAME 'Memory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.22 NAME 'eMailAddress' SYNTAX 2.16.840.1.113719.1.1.5.1.14{64512} X-NDS_NAME 'EMail Address' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.55 NAME 'networkAddress' SYNTAX 2.16.840.1.113719.1.1.5.1.12 X-NDS_NAME 'Network Address' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.56 NAME 'networkAddressRestriction' SYNTAX 2.16.840.1.113719.1.1.5.1.12 X-NDS_NAME 'Network Address Restriction' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.57 NAME 'notify' SYNTAX 2.16.840.1.113719.1.1.5.1.25 X-NDS_NAME 'Notify' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.114 NAME 'Obituary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )", + "( 2.5.4.0 NAME 'objectClass' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 X-NDS_NAME 'Object Class' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )", + "( 2.16.840.1.113719.1.1.4.1.59 NAME 'operator' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Operator' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.11 NAME ( 'ou' 'organizationalUnitName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'OU' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.10 NAME ( 'o' 'organizationname' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'O' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.32 NAME 'owner' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Owner' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.63 NAME 'pageDescriptionLanguage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{64} X-NDS_NAME 'Page Description Language' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.65 NAME 'passwordsUsed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'Passwords Used' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.66 NAME 'passwordAllowChange' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Password Allow Change' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.67 NAME 'passwordExpirationInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Password Expiration Interval' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.68 NAME 'passwordExpirationTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Password Expiration Time' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.69 NAME 'passwordMinimumLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Password Minimum Length' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.70 NAME 'passwordRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Password Required' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.71 NAME 'passwordUniqueRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Password Unique Required' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.72 NAME 'path' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'Path' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.19 NAME 'physicalDeliveryOfficeName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'Physical Delivery Office Name' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.16 NAME 'postalAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.41{64512} X-NDS_NAME 'Postal Address' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.17 NAME 'postalCode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{40} X-NDS_NAME 'Postal Code' X-NDS_UPPER_BOUND '40' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.18 NAME 'postOfficeBox' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{40} X-NDS_NAME 'Postal Office Box' X-NDS_UPPER_BOUND '40' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.80 NAME 'printJobConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Print Job Configuration' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.79 NAME 'printerControl' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Printer Control' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.82 NAME 'privateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Private Key' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.83 NAME 'Profile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.84 NAME 'publicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Public Key' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_OPERATIONAL '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.85 NAME 'queue' SYNTAX 2.16.840.1.113719.1.1.5.1.25 X-NDS_NAME 'Queue' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.86 NAME 'queueDirectory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{255} SINGLE-VALUE X-NDS_NAME 'Queue Directory' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '255' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.115 NAME 'Reference' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.88 NAME 'Replica' SYNTAX 2.16.840.1.113719.1.1.5.1.16{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.89 NAME 'Resource' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.33 NAME 'roleOccupant' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Role Occupant' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.116 NAME 'higherPrivileges' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Higher Privileges' X-NDS_SERVER_READ '1' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.92 NAME 'securityEquals' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Security Equals' X-NDS_SERVER_READ '1' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )", + "( 2.5.4.34 NAME 'seeAlso' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'See Also' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.5 NAME 'serialNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{64} X-NDS_NAME 'Serial Number' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.95 NAME 'server' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Server' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.8 NAME ( 'st' 'stateOrProvinceName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'S' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.98 NAME 'status' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Status' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_OPERATIONAL '1' )", + "( 2.5.4.9 NAME 'street' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'SA' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.102 NAME 'supportedTypefaces' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Supported Typefaces' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.101 NAME 'supportedServices' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Supported Services' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.4 NAME ( 'sn' 'surname' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Surname' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.20 NAME 'telephoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} X-NDS_NAME 'Telephone Number' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.12 NAME 'title' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Title' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.111 NAME 'User' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.112 NAME 'Version' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} SINGLE-VALUE X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.1 NAME 'accountBalance' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_NAME 'Account Balance' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.4 NAME 'allowUnlimitedCredit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Allow Unlimited Credit' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.118 NAME 'lowConvergenceResetTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Low Convergence Reset Time' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.54 NAME 'minimumAccountBalance' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Minimum Account Balance' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.104 NAME 'lowConvergenceSyncInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Low Convergence Sync Interval' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.21 NAME 'Device' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.53 NAME 'messageServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Message Server' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.34 NAME 'Language' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} SINGLE-VALUE X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.100 NAME 'supportedConnections' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Supported Connections' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.107 NAME 'typeCreatorMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Type Creator Map' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.108 NAME 'ndsUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'UID' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.24 NAME 'groupID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'GID' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.110 NAME 'unknownBaseClass' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Unknown Base Class' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.87 NAME 'receivedUpTo' SYNTAX 2.16.840.1.113719.1.1.5.1.19 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Received Up To' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.33 NAME 'synchronizedUpTo' SYNTAX 2.16.840.1.113719.1.1.5.1.19 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Synchronized Up To' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.5 NAME 'authorityRevocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Authority Revocation' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.13 NAME 'certificateRevocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Certificate Revocation' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.17 NAME 'ndsCrossCertificatePair' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'Cross Certificate Pair' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.37 NAME 'lockedByIntruder' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Locked By Intruder' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.77 NAME 'printer' SYNTAX 2.16.840.1.113719.1.1.5.1.25 X-NDS_NAME 'Printer' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.20 NAME 'detectIntruder' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Detect Intruder' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.38 NAME 'lockoutAfterDetection' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Lockout After Detection' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.32 NAME 'intruderLockoutResetInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Intruder Lockout Reset Interval' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.96 NAME 'serverHolds' SYNTAX 2.16.840.1.113719.1.1.5.1.26 X-NDS_NAME 'Server Holds' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.91 NAME 'sAPName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{47} SINGLE-VALUE X-NDS_NAME 'SAP Name' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '47' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.113 NAME 'Volume' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.35 NAME 'lastLoginTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Last Login Time' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.81 NAME 'printServer' SYNTAX 2.16.840.1.113719.1.1.5.1.25 SINGLE-VALUE X-NDS_NAME 'Print Server' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.119 NAME 'nNSDomain' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'NNS Domain' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.120 NAME 'fullName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{127} X-NDS_NAME 'Full Name' X-NDS_UPPER_BOUND '127' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.121 NAME 'partitionControl' SYNTAX 2.16.840.1.113719.1.1.5.1.25 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Partition Control' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.122 NAME 'revision' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Revision' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_SCHED_SYNC_NEVER '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.123 NAME 'certificateValidityInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'Certificate Validity Interval' X-NDS_LOWER_BOUND '60' X-NDS_UPPER_BOUND '-1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.124 NAME 'externalSynchronizer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'External Synchronizer' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.125 NAME 'messagingDatabaseLocation' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NAME 'Messaging Database Location' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.126 NAME 'messageRoutingGroup' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Message Routing Group' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.127 NAME 'messagingServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Messaging Server' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.128 NAME 'Postmaster' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.162 NAME 'mailboxLocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Mailbox Location' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.163 NAME 'mailboxID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{8} SINGLE-VALUE X-NDS_NAME 'Mailbox ID' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '8' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.164 NAME 'externalName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'External Name' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.165 NAME 'securityFlags' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Security Flags' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.166 NAME 'messagingServerType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} SINGLE-VALUE X-NDS_NAME 'Messaging Server Type' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.167 NAME 'lastReferencedTime' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Last Referenced Time' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.5.4.42 NAME 'givenName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} X-NDS_NAME 'Given Name' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.43 NAME 'initials' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{8} X-NDS_NAME 'Initials' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '8' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )", + "( 2.5.4.44 NAME 'generationQualifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{8} SINGLE-VALUE X-NDS_NAME 'Generational Qualifier' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '8' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.171 NAME 'profileMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Profile Membership' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.172 NAME 'dsRevision' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'DS Revision' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_OPERATIONAL '1' )", + "( 2.16.840.1.113719.1.1.4.1.173 NAME 'supportedGateway' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{4096} X-NDS_NAME 'Supported Gateway' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '4096' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.174 NAME 'equivalentToMe' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Equivalent To Me' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )", + "( 2.16.840.1.113719.1.1.4.1.175 NAME 'replicaUpTo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Replica Up To' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.176 NAME 'partitionStatus' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Partition Status' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.177 NAME 'permanentConfigParms' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'Permanent Config Parms' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.178 NAME 'Timezone' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.179 NAME 'binderyRestrictionLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Bindery Restriction Level' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.180 NAME 'transitiveVector' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Transitive Vector' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_SCHED_SYNC_NEVER '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.181 NAME 'T' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.183 NAME 'purgeVector' SYNTAX 2.16.840.1.113719.1.1.5.1.19 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Purge Vector' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_SCHED_SYNC_NEVER '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.184 NAME 'synchronizationTolerance' SYNTAX 2.16.840.1.113719.1.1.5.1.19 USAGE directoryOperation X-NDS_NAME 'Synchronization Tolerance' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.185 NAME 'passwordManagement' SYNTAX 2.16.840.1.113719.1.1.5.1.0 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Password Management' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.186 NAME 'usedBy' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Used By' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.187 NAME 'Uses' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.500 NAME 'obituaryNotify' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Obituary Notify' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.501 NAME 'GUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{16} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_LOWER_BOUND '16' X-NDS_UPPER_BOUND '16' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.502 NAME 'otherGUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{16} USAGE directoryOperation X-NDS_NAME 'Other GUID' X-NDS_LOWER_BOUND '16' X-NDS_UPPER_BOUND '16' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.503 NAME 'auxiliaryClassFlag' SYNTAX 2.16.840.1.113719.1.1.5.1.0 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Auxiliary Class Flag' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.504 NAME 'unknownAuxiliaryClass' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} USAGE directoryOperation X-NDS_NAME 'Unknown Auxiliary Class' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 0.9.2342.19200300.100.1.1 NAME ( 'uid' 'userId' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'uniqueID' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )", + "( 0.9.2342.19200300.100.1.25 NAME 'dc' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64} X-NDS_NAME 'dc' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.507 NAME 'auxClassObjectClassBackup' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'AuxClass Object Class Backup' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.508 NAME 'localReceivedUpTo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Local Received Up To' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.141.4.4 NAME 'federationControl' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.141.4.2 NAME 'federationSearchPath' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.141.4.3 NAME 'federationDNSName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.141.4.1 NAME 'federationBoundaryType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.14.4.1.4 NAME 'DirXML-Associations' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )", + "( 2.5.18.3 NAME 'creatorsName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )", + "( 2.5.18.4 NAME 'modifiersName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.300 NAME 'languageId' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.27.4.35 NAME 'ndsPredicate' SYNTAX 2.16.840.1.113719.1.1.5.1.12 X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.27.4.36 NAME 'ndsPredicateState' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.27.4.37 NAME 'ndsPredicateFlush' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.27.4.38 NAME 'ndsPredicateTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_UPPER_BOUND '2147483647' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.27.4.40 NAME 'ndsPredicateStatsDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.27.4.39 NAME 'ndsPredicateUseValues' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.601 NAME 'syncPanePoint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.600 NAME 'syncWindowVector' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.602 NAME 'objectVersion' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.27.4.52 NAME 'memberQueryURL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'memberQuery' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.302 NAME 'excludedMember' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.1.525 NAME 'auxClassCompatibility' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.518 NAME 'ndsAgentPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.519 NAME 'ndsOperationCheckpoint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.520 NAME 'localReferral' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.521 NAME 'treeReferral' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.522 NAME 'schemaResetLock' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.523 NAME 'modifiedACLEntry' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.524 NAME 'monitoredConnection' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.526 NAME 'localFederationBoundary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.527 NAME 'replicationFilter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.721 NAME 'ServerEBAEnabled' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.716 NAME 'EBATreeConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.722 NAME 'EBAPartitionConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.723 NAME 'EBAServerConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' )", + "( 2.16.840.1.113719.1.1.4.1.296 NAME 'loginActivationTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.687 NAME 'UpdateInProgress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.720 NAME 'dsContainerReadyAttrs' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.4.400.1 NAME 'edirSchemaFlagVersion' SYNTAX 2.16.840.1.113719.1.1.5.1.0 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )", + "( 2.16.840.1.113719.1.1.4.1.512 NAME 'indexDefinition' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.1.513 NAME 'ndsStatusRepair' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.1.514 NAME 'ndsStatusExternalReference' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.1.515 NAME 'ndsStatusObituary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.1.516 NAME 'ndsStatusSchema' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.1.517 NAME 'ndsStatusLimber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.1.511 NAME 'authoritative' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113730.3.1.34 NAME 'ref' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.1.546 NAME 'CachedAttrsOnExtRefs' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.1.4.1.547 NAME 'ExtRefLastUpdatedTime' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.688 NAME 'NCPKeyMaterialName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.1.4.713 NAME 'UTF8LoginScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.1.4.714 NAME 'loginScriptCharset' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE )", + "( 2.16.840.1.113719.1.1.4.721 NAME 'NDSRightsToMonitor' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.1.1.192 NAME 'lDAPLogLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_NAME 'LDAP Log Level' X-NDS_UPPER_BOUND '32768' )", + "( 2.16.840.1.113719.1.27.4.12 NAME 'lDAPUDPPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{65535} SINGLE-VALUE X-NDS_NAME 'LDAP UDP Port' X-NDS_UPPER_BOUND '65535' )", + "( 2.16.840.1.113719.1.1.4.1.204 NAME 'lDAPLogFilename' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Log Filename' )", + "( 2.16.840.1.113719.1.1.4.1.205 NAME 'lDAPBackupLogFilename' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Backup Log Filename' )", + "( 2.16.840.1.113719.1.1.4.1.206 NAME 'lDAPLogSizeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'LDAP Log Size Limit' X-NDS_LOWER_BOUND '2048' X-NDS_UPPER_BOUND '-1' )", + "( 2.16.840.1.113719.1.1.4.1.194 NAME 'lDAPSearchSizeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_NAME 'LDAP Search Size Limit' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '2147483647' )", + "( 2.16.840.1.113719.1.1.4.1.195 NAME 'lDAPSearchTimeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_NAME 'LDAP Search Time Limit' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '2147483647' )", + "( 2.16.840.1.113719.1.1.4.1.207 NAME 'lDAPSuffix' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'LDAP Suffix' )", + "( 2.16.840.1.113719.1.27.4.70 NAME 'ldapConfigVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.14 NAME 'ldapReferral' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Referral' )", + "( 2.16.840.1.113719.1.27.4.73 NAME 'ldapDefaultReferralBehavior' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.23 NAME 'ldapSearchReferralUsage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'LDAP:searchReferralUsage' )", + "( 2.16.840.1.113719.1.27.4.24 NAME 'lDAPOtherReferralUsage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'LDAP:otherReferralUsage' )", + "( 2.16.840.1.113719.1.27.4.1 NAME 'ldapHostServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'LDAP Host Server' )", + "( 2.16.840.1.113719.1.27.4.2 NAME 'ldapGroupDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'LDAP Group' )", + "( 2.16.840.1.113719.1.27.4.3 NAME 'ldapTraceLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_NAME 'LDAP Screen Level' X-NDS_UPPER_BOUND '32768' )", + "( 2.16.840.1.113719.1.27.4.4 NAME 'searchSizeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_UPPER_BOUND '2147483647' )", + "( 2.16.840.1.113719.1.27.4.5 NAME 'searchTimeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_UPPER_BOUND '2147483647' )", + "( 2.16.840.1.113719.1.27.4.6 NAME 'ldapServerBindLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'LDAP Server Bind Limit' X-NDS_UPPER_BOUND '-1' )", + "( 2.16.840.1.113719.1.27.4.7 NAME 'ldapServerIdleTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'LDAP Server Idle Timeout' X-NDS_UPPER_BOUND '-1' )", + "( 2.16.840.1.113719.1.27.4.8 NAME 'ldapEnableTCP' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'LDAP Enable TCP' )", + "( 2.16.840.1.113719.1.27.4.10 NAME 'ldapEnableSSL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'LDAP Enable SSL' )", + "( 2.16.840.1.113719.1.27.4.11 NAME 'ldapTCPPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{65535} SINGLE-VALUE X-NDS_NAME 'LDAP TCP Port' X-NDS_UPPER_BOUND '65535' )", + "( 2.16.840.1.113719.1.27.4.13 NAME 'ldapSSLPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{65535} SINGLE-VALUE X-NDS_NAME 'LDAP SSL Port' X-NDS_UPPER_BOUND '65535' )", + "( 2.16.840.1.113719.1.27.4.21 NAME 'filteredReplicaUsage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.22 NAME 'ldapKeyMaterialName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP:keyMaterialName' )", + "( 2.16.840.1.113719.1.27.4.42 NAME 'extensionInfo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.27.4.45 NAME 'nonStdClientSchemaCompatMode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.46 NAME 'sslEnableMutualAuthentication' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.62 NAME 'ldapEnablePSearch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.63 NAME 'ldapMaximumPSearchOperations' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.64 NAME 'ldapIgnorePSearchLimitsForEvents' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.65 NAME 'ldapTLSTrustedRootContainer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )", + "( 2.16.840.1.113719.1.27.4.66 NAME 'ldapEnableMonitorEvents' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.67 NAME 'ldapMaximumMonitorEventsLoad' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.68 NAME 'ldapTLSRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.69 NAME 'ldapTLSVerifyClientCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.71 NAME 'ldapDerefAlias' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.72 NAME 'ldapNonStdAllUserAttrsMode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.75 NAME 'ldapBindRestrictions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.79 NAME 'ldapInterfaces' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.27.4.80 NAME 'ldapChainSecureRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.82 NAME 'ldapStdCompliance' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.83 NAME 'ldapDerefAliasOnAuth' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.84 NAME 'ldapGeneralizedTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.85 NAME 'ldapPermissiveModify' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.86 NAME 'ldapSSLConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.27.4.15 NAME 'ldapServerList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'LDAP Server List' )", + "( 2.16.840.1.113719.1.27.4.16 NAME 'ldapAttributeMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Attribute Map v11' )", + "( 2.16.840.1.113719.1.27.4.17 NAME 'ldapClassMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Class Map v11' )", + "( 2.16.840.1.113719.1.27.4.18 NAME 'ldapAllowClearTextPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'LDAP Allow Clear Text Password' )", + "( 2.16.840.1.113719.1.27.4.19 NAME 'ldapAnonymousIdentity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'LDAP Anonymous Identity' )", + "( 2.16.840.1.113719.1.27.4.52 NAME 'ldapAttributeList' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} )", + "( 2.16.840.1.113719.1.27.4.53 NAME 'ldapClassList' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} )", + "( 2.16.840.1.113719.1.27.4.56 NAME 'transitionGroupDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.74 NAME 'ldapTransitionBackLink' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.78 NAME 'ldapLBURPNumWriterThreads' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.27.4.20 NAME 'ldapServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'LDAP Server' )", + "( 0.9.2342.19200300.100.1.3 NAME 'mail' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NAME 'Internet EMail Address' X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113730.3.1.3 NAME 'employeeNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NAME 'NSCP:employeeNumber' )", + "( 2.16.840.1.113719.1.27.4.76 NAME 'referralExcludeFilter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.27.4.77 NAME 'referralIncludeFilter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.5.4.36 NAME 'userCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'userCertificate' X-NDS_PUBLIC_READ '1' )", + "( 2.5.4.37 NAME 'cACertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'cACertificate' X-NDS_PUBLIC_READ '1' )", + "( 2.5.4.40 NAME 'crossCertificatePair' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'crossCertificatePair' X-NDS_PUBLIC_READ '1' )", + "( 2.5.4.58 NAME 'attributeCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.5.4.2 NAME 'knowledgeInformation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32768' )", + "( 2.5.4.14 NAME 'searchGuide' SYNTAX 1.3.6.1.4.1.1466.115.121.1.25{64512} X-NDS_NAME 'searchGuide' )", + "( 2.5.4.15 NAME 'businessCategory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' )", + "( 2.5.4.21 NAME 'telexNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.52{64512} X-NDS_NAME 'telexNumber' )", + "( 2.5.4.22 NAME 'teletexTerminalIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.51{64512} X-NDS_NAME 'teletexTerminalIdentifier' )", + "( 2.5.4.24 NAME 'x121Address' SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{15} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '15' )", + "( 2.5.4.25 NAME 'internationaliSDNNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{16} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '16' )", + "( 2.5.4.26 NAME 'registeredAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.41{64512} X-NDS_NAME 'registeredAddress' )", + "( 2.5.4.27 NAME 'destinationIndicator' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{128} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' )", + "( 2.5.4.28 NAME 'preferredDeliveryMethod' SYNTAX 1.3.6.1.4.1.1466.115.121.1.14{64512} SINGLE-VALUE X-NDS_NAME 'preferredDeliveryMethod' )", + "( 2.5.4.29 NAME 'presentationAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.43{64512} SINGLE-VALUE X-NDS_NAME 'presentationAddress' )", + "( 2.5.4.30 NAME 'supportedApplicationContext' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38{64512} X-NDS_NAME 'supportedApplicationContext' )", + "( 2.5.4.45 NAME 'x500UniqueIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.6{64512} X-NDS_NAME 'x500UniqueIdentifier' )", + "( 2.5.4.46 NAME 'dnQualifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{64512} )", + "( 2.5.4.47 NAME 'enhancedSearchGuide' SYNTAX 1.3.6.1.4.1.1466.115.121.1.21{64512} X-NDS_NAME 'enhancedSearchGuide' )", + "( 2.5.4.48 NAME 'protocolInformation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.42{64512} X-NDS_NAME 'protocolInformation' )", + "( 2.5.4.51 NAME 'houseIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32768' )", + "( 2.5.4.52 NAME 'supportedAlgorithms' SYNTAX 1.3.6.1.4.1.1466.115.121.1.49{64512} X-NDS_NAME 'supportedAlgorithms' )", + "( 2.5.4.54 NAME 'dmdName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32768' )", + "( 0.9.2342.19200300.100.1.6 NAME 'roomNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 0.9.2342.19200300.100.1.38 NAME 'associatedName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )", + "( 2.5.4.49 NAME 'dn' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.3.4.1 NAME 'httpServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )", + "( 2.16.840.1.113719.1.3.4.2 NAME 'httpHostServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.3.4.3 NAME 'httpThreadsPerCPU' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.3.4.4 NAME 'httpIOBufferSize' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.3.4.5 NAME 'httpRequestTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.3.4.6 NAME 'httpKeepAliveRequestTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.3.4.7 NAME 'httpSessionTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.3.4.8 NAME 'httpKeyMaterialObject' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.3.4.9 NAME 'httpTraceLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.3.4.10 NAME 'httpAuthRequiresTLS' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.3.4.11 NAME 'httpDefaultClearPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.3.4.12 NAME 'httpDefaultTLSPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.3.4.13 NAME 'httpBindRestrictions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.1.4.1.295 NAME 'emboxConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )", + "( 2.16.840.1.113719.1.54.4.1.1 NAME 'trusteesOfNewObject' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NAME 'Trustees Of New Object' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.55.4.1.1 NAME 'newObjectSDSRights' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NAME 'New Object's DS Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.56.4.1.1 NAME 'newObjectSFSRights' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'New Object's FS Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.57.4.1.1 NAME 'setupScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Setup Script' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.58.4.1.1 NAME 'runSetupScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Run Setup Script' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.59.4.1.1 NAME 'membersOfTemplate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Members Of Template' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.60.4.1.1 NAME 'volumeSpaceRestrictions' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'Volume Space Restrictions' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.61.4.1.1 NAME 'setPasswordAfterCreate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Set Password After Create' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.62.4.1.1 NAME 'homeDirectoryRights' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_NAME 'Home Directory Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.63.4.1.1 NAME 'newObjectSSelfRights' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NAME 'New Object's Self Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.8.4.1 NAME 'digitalMeID' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE )", + "( 2.16.840.1.113719.1.8.4.2 NAME 'assistant' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )", + "( 2.16.840.1.113719.1.8.4.3 NAME 'assistantPhone' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )", + "( 2.16.840.1.113719.1.8.4.4 NAME 'city' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.5 NAME 'company' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 0.9.2342.19200300.100.1.43 NAME 'co' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.6 NAME 'directReports' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )", + "( 0.9.2342.19200300.100.1.10 NAME 'manager' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )", + "( 2.16.840.1.113719.1.8.4.7 NAME 'mailstop' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 0.9.2342.19200300.100.1.41 NAME 'mobile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )", + "( 0.9.2342.19200300.100.1.40 NAME 'personalTitle' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 0.9.2342.19200300.100.1.42 NAME 'pager' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )", + "( 2.16.840.1.113719.1.8.4.8 NAME 'workforceID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.9 NAME 'instantMessagingID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.10 NAME 'preferredName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 0.9.2342.19200300.100.1.7 NAME 'photo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )", + "( 2.16.840.1.113719.1.8.4.11 NAME 'jobCode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.12 NAME 'siteLocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.13 NAME 'employeeStatus' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113730.3.1.4 NAME 'employeeType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.14 NAME 'costCenter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.15 NAME 'costCenterDescription' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.16 NAME 'tollFreePhoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )", + "( 2.16.840.1.113719.1.8.4.17 NAME 'otherPhoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )", + "( 2.16.840.1.113719.1.8.4.18 NAME 'managerWorkforceID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.19 NAME 'jackNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113730.3.1.2 NAME 'departmentNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.20 NAME 'vehicleInformation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.21 NAME 'accessCardNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.32 NAME 'isManager' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.8.4.22 NAME 'homeCity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.23 NAME 'homeEmailAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 1.3.6.1.4.1.1466.101.120.31 NAME 'homeFax' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )", + "( 0.9.2342.19200300.100.1.20 NAME 'homePhone' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )", + "( 2.16.840.1.113719.1.8.4.24 NAME 'homeState' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 0.9.2342.19200300.100.1.39 NAME 'homePostalAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.41{64512} )", + "( 2.16.840.1.113719.1.8.4.25 NAME 'homeZipCode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.26 NAME 'personalMobile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )", + "( 2.16.840.1.113719.1.8.4.27 NAME 'children' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.28 NAME 'spouse' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.29 NAME 'vendorName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.30 NAME 'vendorAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.8.4.31 NAME 'vendorPhoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )", + "( 2.16.840.1.113719.1.1.4.1.303 NAME 'dgIdentity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME_VALUE_ACCESS '1' )", + "( 2.16.840.1.113719.1.1.4.1.304 NAME 'dgTimeOut' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.1.4.1.305 NAME 'dgAllowUnknown' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.1.4.1.306 NAME 'dgAllowDuplicates' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.1.4.1.546 NAME 'allowAliasToAncestor' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.39.4.1.1 NAME 'sASSecurityDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Security DN' X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.4.1.2 NAME 'sASServiceDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Service DN' X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.4.1.3 NAME 'sASSecretStore' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'SAS:SecretStore' )", + "( 2.16.840.1.113719.1.39.4.1.4 NAME 'sASSecretStoreKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'SAS:SecretStore:Key' X-NDS_HIDDEN '1' )", + "( 2.16.840.1.113719.1.39.4.1.5 NAME 'sASSecretStoreData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'SAS:SecretStore:Data' X-NDS_HIDDEN '1' )", + "( 2.16.840.1.113719.1.39.4.1.6 NAME 'sASPKIStoreKeys' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'SAS:PKIStore:Keys' X-NDS_HIDDEN '1' )", + "( 2.16.840.1.113719.1.48.4.1.1 NAME 'nDSPKIPublicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key' X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.2 NAME 'nDSPKIPrivateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Private Key' )", + "( 2.16.840.1.113719.1.48.4.1.3 NAME 'nDSPKIPublicKeyCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key Certificate' X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.4 NAME 'nDSPKICertificateChain' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'NDSPKI:Certificate Chain' X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.16 NAME 'nDSPKIPublicKeyEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key EC' X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.17 NAME 'nDSPKIPrivateKeyEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Private Key EC' )", + "( 2.16.840.1.113719.1.48.4.1.18 NAME 'nDSPKIPublicKeyCertificateEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key Certificate EC' X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.19 NAME 'crossCertificatePairEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'Cross Certificate Pair EC' X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.20 NAME 'nDSPKICertificateChainEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'NDSPKI:Certificate Chain EC' X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.5 NAME 'nDSPKIParentCA' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Parent CA' )", + "( 2.16.840.1.113719.1.48.4.1.6 NAME 'nDSPKIParentCADN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'NDSPKI:Parent CA DN' )", + "( 2.16.840.1.113719.1.48.4.1.20 NAME 'nDSPKISuiteBMode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'NDSPKI:SuiteBMode' X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.7 NAME 'nDSPKIKeyFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Key File' )", + "( 2.16.840.1.113719.1.48.4.1.8 NAME 'nDSPKISubjectName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Subject Name' )", + "( 2.16.840.1.113719.1.48.4.1.11 NAME 'nDSPKIGivenName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Given Name' )", + "( 2.16.840.1.113719.1.48.4.1.9 NAME 'nDSPKIKeyMaterialDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'NDSPKI:Key Material DN' )", + "( 2.16.840.1.113719.1.48.4.1.10 NAME 'nDSPKITreeCADN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'NDSPKI:Tree CA DN' )", + "( 2.5.4.59 NAME 'cAECCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.12 NAME 'nDSPKIUserCertificateInfo' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'NDSPKI:userCertificateInfo' )", + "( 2.16.840.1.113719.1.48.4.1.13 NAME 'nDSPKITrustedRootCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Trusted Root Certificate' X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.14 NAME 'nDSPKINotBefore' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Not Before' X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.15 NAME 'nDSPKINotAfter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Not After' X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.101 NAME 'nDSPKISDKeyServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'NDSPKI:SD Key Server DN' X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.102 NAME 'nDSPKISDKeyStruct' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'NDSPKI:SD Key Struct' )", + "( 2.16.840.1.113719.1.48.4.1.103 NAME 'nDSPKISDKeyCert' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:SD Key Cert' )", + "( 2.16.840.1.113719.1.48.4.1.104 NAME 'nDSPKISDKeyID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:SD Key ID' )", + "( 2.16.840.1.113719.1.39.4.1.105 NAME 'nDSPKIKeystore' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'NDSPKI:Keystore' X-NDS_HIDDEN '1' )", + "( 2.16.840.1.113719.1.39.4.1.106 NAME 'ndspkiAdditionalRoots' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.31.4.2.3 NAME 'masvLabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.31.4.2.4 NAME 'masvProposedLabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.31.4.2.5 NAME 'masvDefaultRange' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.31.4.2.6 NAME 'masvAuthorizedRange' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.31.4.2.7 NAME 'masvDomainPolicy' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.31.4.1.8 NAME 'masvClearanceNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.31.4.1.9 NAME 'masvLabelNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.31.4.1.10 NAME 'masvLabelSecrecyLevelNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.31.4.1.11 NAME 'masvLabelSecrecyCategoryNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.31.4.1.12 NAME 'masvLabelIntegrityLevelNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.31.4.1.13 NAME 'masvLabelIntegrityCategoryNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.31.4.1.14 NAME 'masvPolicyUpdate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.31.4.1.16 NAME 'masvNDSAttributeLabels' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.31.4.1.15 NAME 'masvPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.2 NAME 'sASLoginSequence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NAME 'SAS:Login Sequence' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.8 NAME 'sASLoginPolicyUpdate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:Login Policy Update' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.38 NAME 'sasNMASProductOptions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.74 NAME 'sasAuditConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.14 NAME 'sASNDSPasswordWindow' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:NDS Password Window' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.15 NAME 'sASPolicyCredentials' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Policy Credentials' X-NDS_SERVER_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.16 NAME 'sASPolicyMethods' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Methods' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.17 NAME 'sASPolicyObjectVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:Policy Object Version' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.18 NAME 'sASPolicyServiceSubtypes' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Service Subtypes' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.19 NAME 'sASPolicyServices' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Services' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.20 NAME 'sASPolicyUsers' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Users' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.21 NAME 'sASAllowNDSPasswordWindow' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'SAS:Allow NDS Password Window' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.9 NAME 'sASMethodIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Method Identifier' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.10 NAME 'sASMethodVendor' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Method Vendor' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.11 NAME 'sASAdvisoryMethodGrade' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Advisory Method Grade' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.12 NAME 'sASVendorSupport' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Vendor Support' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.13 NAME 'sasCertificateSearchContainers' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.70 NAME 'sasNMASMethodConfigData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.22 NAME 'sASLoginClientMethodNetWare' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Client Method NetWare' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.23 NAME 'sASLoginServerMethodNetWare' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Server Method NetWare' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.24 NAME 'sASLoginClientMethodWINNT' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Client Method WINNT' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.25 NAME 'sASLoginServerMethodWINNT' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Server Method WINNT' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.26 NAME 'sasLoginClientMethodSolaris' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.27 NAME 'sasLoginServerMethodSolaris' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.28 NAME 'sasLoginClientMethodLinux' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.29 NAME 'sasLoginServerMethodLinux' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.30 NAME 'sasLoginClientMethodTru64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.31 NAME 'sasLoginServerMethodTru64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.32 NAME 'sasLoginClientMethodAIX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.33 NAME 'sasLoginServerMethodAIX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.34 NAME 'sasLoginClientMethodHPUX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.35 NAME 'sasLoginServerMethodHPUX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1000 NAME 'sasLoginClientMethods390' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1001 NAME 'sasLoginServerMethods390' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1002 NAME 'sasLoginClientMethodLinuxX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1003 NAME 'sasLoginServerMethodLinuxX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1004 NAME 'sasLoginClientMethodWinX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1005 NAME 'sasLoginServerMethodWinX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1006 NAME 'sasLoginClientMethodSolaris64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1007 NAME 'sasLoginServerMethodSolaris64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1008 NAME 'sasLoginClientMethodAIX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1009 NAME 'sasLoginServerMethodAIX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1011 NAME 'sasLoginServerMethodSolarisi386' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1012 NAME 'sasLoginClientMethodSolarisi386' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.78 NAME 'sasUnsignedMethodModules' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.79 NAME 'sasServerModuleName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.80 NAME 'sasServerModuleEntryPointName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.81 NAME 'sasSASLMechanismName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.82 NAME 'sasSASLMechanismEntryPointName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.83 NAME 'sasClientModuleName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.84 NAME 'sasClientModuleEntryPointName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.36 NAME 'sASLoginMethodContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Login Method Container DN' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.37 NAME 'sASLoginPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Login Policy DN' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.63 NAME 'sasPostLoginMethodContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.38 NAME 'rADIUSActiveConnections' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Active Connections' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.39 NAME 'rADIUSAgedInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Aged Interval' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.40 NAME 'rADIUSAttributeList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Attribute List' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.41 NAME 'rADIUSAttributeLists' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Attribute Lists' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.42 NAME 'rADIUSClient' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Client' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.43 NAME 'rADIUSCommonNameResolution' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Common Name Resolution' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.44 NAME 'rADIUSConcurrentLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Concurrent Limit' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.45 NAME 'rADIUSConnectionHistory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Connection History' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.46 NAME 'rADIUSDASVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:DAS Version' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.47 NAME 'rADIUSDefaultProfile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Default Profile' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.48 NAME 'rADIUSDialAccessGroup' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'RADIUS:Dial Access Group' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.49 NAME 'rADIUSEnableCommonNameLogin' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'RADIUS:Enable Common Name Login' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.50 NAME 'rADIUSEnableDialAccess' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'RADIUS:Enable Dial Access' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.51 NAME 'rADIUSInterimAcctingTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Interim Accting Timeout' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.52 NAME 'rADIUSLookupContexts' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'RADIUS:Lookup Contexts' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.53 NAME 'rADIUSMaxDASHistoryRecord' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Max DAS History Record' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.54 NAME 'rADIUSMaximumHistoryRecord' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Maximum History Record' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.55 NAME 'rADIUSPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Password' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.56 NAME 'rADIUSPasswordPolicy' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Password Policy' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.57 NAME 'rADIUSPrivateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Private Key' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.58 NAME 'rADIUSProxyContext' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'RADIUS:Proxy Context' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.59 NAME 'rADIUSProxyDomain' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Proxy Domain' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.60 NAME 'rADIUSProxyTarget' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Proxy Target' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.61 NAME 'rADIUSPublicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Public Key' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.62 NAME 'rADIUSServiceList' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'RADIUS:Service List' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.3 NAME 'sASLoginSecret' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Secret' X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.4 NAME 'sASLoginSecretKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Secret Key' X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.5 NAME 'sASEncryptionType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:Encryption Type' X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.6 NAME 'sASLoginConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Configuration' X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.7 NAME 'sASLoginConfigurationKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Configuration Key' X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.73 NAME 'sasDefaultLoginSequence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.64 NAME 'sasAuthorizedLoginSequences' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.69 NAME 'sasAllowableSubjectNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.71 NAME 'sasLoginFailureDelay' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.72 NAME 'sasMethodVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1010 NAME 'sasUpdateLoginInfo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1011 NAME 'sasOTPEnabled' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1012 NAME 'sasOTPCounter' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1013 NAME 'sasOTPLookAheadWindow' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1014 NAME 'sasOTPDigits' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1015 NAME 'sasOTPReSync' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.39.42.1.0.1016 NAME 'sasUpdateLoginTimeInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.6.4.1 NAME 'snmpGroupDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.6.4.2 NAME 'snmpServerList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )", + "( 2.16.840.1.113719.1.6.4.3 NAME 'snmpTrapConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )", + "( 2.16.840.1.113719.1.6.4.4 NAME 'snmpTrapDescription' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )", + "( 2.16.840.1.113719.1.6.4.5 NAME 'snmpTrapInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.6.4.6 NAME 'snmpTrapDisable' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.1.4.1.528 NAME 'ndapPartitionPasswordMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.1.529 NAME 'ndapClassPasswordMgmt' SYNTAX 2.16.840.1.113719.1.1.5.1.0 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.1.530 NAME 'ndapPasswordMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.1.537 NAME 'ndapPartitionLoginMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.1.538 NAME 'ndapClassLoginMgmt' SYNTAX 2.16.840.1.113719.1.1.5.1.0 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.1.539 NAME 'ndapLoginMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.1 NAME 'nspmPasswordKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )", + "( 2.16.840.1.113719.1.39.43.4.2 NAME 'nspmPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )", + "( 2.16.840.1.113719.1.39.43.4.3 NAME 'nspmDistributionPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )", + "( 2.16.840.1.113719.1.39.43.4.4 NAME 'nspmPasswordHistory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_HIDDEN '1' )", + "( 2.16.840.1.113719.1.39.43.4.5 NAME 'nspmAdministratorChangeCount' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )", + "( 2.16.840.1.113719.1.39.43.4.6 NAME 'nspmPasswordPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.7 NAME 'nspmPreviousDistributionPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )", + "( 2.16.840.1.113719.1.39.43.4.8 NAME 'nspmDoNotExpirePassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 1.3.6.1.4.1.42.2.27.8.1.16 NAME 'pwdChangedTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )", + "( 1.3.6.1.4.1.42.2.27.8.1.17 NAME 'pwdAccountLockedTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )", + "( 1.3.6.1.4.1.42.2.27.8.1.19 NAME 'pwdFailureTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 NO-USER-MODIFICATION USAGE directoryOperation )", + "( 2.16.840.1.113719.1.39.43.4.100 NAME 'nspmConfigurationOptions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.102 NAME 'nspmChangePasswordMessage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.103 NAME 'nspmPasswordHistoryLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.104 NAME 'nspmPasswordHistoryExpiration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 1.3.6.1.4.1.42.2.27.8.1.4 NAME 'pwdInHistory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.105 NAME 'nspmMinPasswordLifetime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.106 NAME 'nspmAdminsDoNotExpirePassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.107 NAME 'nspmPasswordACL' SYNTAX 2.16.840.1.113719.1.1.5.1.17 )", + "( 2.16.840.1.113719.1.39.43.4.200 NAME 'nspmMaximumLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.201 NAME 'nspmMinUpperCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.202 NAME 'nspmMaxUpperCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.203 NAME 'nspmMinLowerCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.204 NAME 'nspmMaxLowerCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.205 NAME 'nspmNumericCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.206 NAME 'nspmNumericAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.207 NAME 'nspmNumericAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.208 NAME 'nspmMinNumericCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.209 NAME 'nspmMaxNumericCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.210 NAME 'nspmSpecialCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.211 NAME 'nspmSpecialAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.212 NAME 'nspmSpecialAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.213 NAME 'nspmMinSpecialCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.214 NAME 'nspmMaxSpecialCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.215 NAME 'nspmMaxRepeatedCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.216 NAME 'nspmMaxConsecutiveCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.217 NAME 'nspmMinUniqueCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.218 NAME 'nspmDisallowedAttributeValues' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.219 NAME 'nspmExcludeList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.220 NAME 'nspmCaseSensitive' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.221 NAME 'nspmPolicyPrecedence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.222 NAME 'nspmExtendedCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.223 NAME 'nspmExtendedAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.224 NAME 'nspmExtendedAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.225 NAME 'nspmMinExtendedCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.226 NAME 'nspmMaxExtendedCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.227 NAME 'nspmUpperAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.228 NAME 'nspmUpperAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.229 NAME 'nspmLowerAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.230 NAME 'nspmLowerAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.231 NAME 'nspmComplexityRules' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.233 NAME 'nspmAD2K8Syntax' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.234 NAME 'nspmAD2K8maxViolation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.235 NAME 'nspmXCharLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.236 NAME 'nspmXCharHistoryLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.237 NAME 'nspmUnicodeAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.238 NAME 'nspmNonAlphaCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.239 NAME 'nspmMinNonAlphaCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.240 NAME 'nspmMaxNonAlphaCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.241 NAME 'nspmGraceLoginHistoryLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.300 NAME 'nspmPolicyAgentContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.301 NAME 'nspmPolicyAgentNetWare' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.302 NAME 'nspmPolicyAgentWINNT' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.303 NAME 'nspmPolicyAgentSolaris' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.304 NAME 'nspmPolicyAgentLinux' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.305 NAME 'nspmPolicyAgentAIX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.43.4.306 NAME 'nspmPolicyAgentHPUX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 0.9.2342.19200300.100.1.55 NAME 'audio' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )", + "( 2.16.840.1.113730.3.1.1 NAME 'carLicense' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113730.3.1.241 NAME 'displayName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 0.9.2342.19200300.100.1.60 NAME 'jpegPhoto' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )", + "( 1.3.6.1.4.1.250.1.57 NAME 'labeledUri' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 0.9.2342.19200300.100.1.7 NAME 'ldapPhoto' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )", + "( 2.16.840.1.113730.3.1.39 NAME 'preferredLanguage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE )", + "( 0.9.2342.19200300.100.1.21 NAME 'secretary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )", + "( 2.16.840.1.113730.3.1.40 NAME 'userSMIMECertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )", + "( 2.16.840.1.113730.3.1.216 NAME 'userPKCS12' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )", + "( 2.16.840.1.113719.1.12.4.1.0 NAME 'auditAEncryptionKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:A Encryption Key' )", + "( 2.16.840.1.113719.1.12.4.2.0 NAME 'auditBEncryptionKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:B Encryption Key' )", + "( 2.16.840.1.113719.1.12.4.3.0 NAME 'auditContents' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Audit:Contents' )", + "( 2.16.840.1.113719.1.12.4.4.0 NAME 'auditType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Audit:Type' )", + "( 2.16.840.1.113719.1.12.4.5.0 NAME 'auditCurrentEncryptionKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:Current Encryption Key' )", + "( 2.16.840.1.113719.1.12.4.6.0 NAME 'auditFileLink' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Audit:File Link' )", + "( 2.16.840.1.113719.1.12.4.7.0 NAME 'auditLinkList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Audit:Link List' )", + "( 2.16.840.1.113719.1.12.4.8.0 NAME 'auditPath' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NAME 'Audit:Path' )", + "( 2.16.840.1.113719.1.12.4.9.0 NAME 'auditPolicy' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:Policy' )", + "( 2.16.840.1.113719.1.38.4.1.1 NAME 'wANMANWANPolicy' SYNTAX 2.16.840.1.113719.1.1.5.1.13{64512} X-NDS_NAME 'WANMAN:WAN Policy' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.38.4.1.2 NAME 'wANMANLANAreaMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'WANMAN:LAN Area Membership' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.38.4.1.3 NAME 'wANMANCost' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'WANMAN:Cost' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.38.4.1.4 NAME 'wANMANDefaultCost' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'WANMAN:Default Cost' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.135.4.30 NAME 'rbsAssignedRoles' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )", + "( 2.16.840.1.113719.1.135.4.31 NAME 'rbsContent' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )", + "( 2.16.840.1.113719.1.135.4.32 NAME 'rbsContentMembership' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )", + "( 2.16.840.1.113719.1.135.4.33 NAME 'rbsEntryPoint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE )", + "( 2.16.840.1.113719.1.135.4.34 NAME 'rbsMember' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )", + "( 2.16.840.1.113719.1.135.4.35 NAME 'rbsOwnedCollections' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )", + "( 2.16.840.1.113719.1.135.4.36 NAME 'rbsPath' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )", + "( 2.16.840.1.113719.1.135.4.37 NAME 'rbsParameters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} )", + "( 2.16.840.1.113719.1.135.4.38 NAME 'rbsTaskRights' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )", + "( 2.16.840.1.113719.1.135.4.39 NAME 'rbsTrusteeOf' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )", + "( 2.16.840.1.113719.1.135.4.40 NAME 'rbsType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} SINGLE-VALUE X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '256' )", + "( 2.16.840.1.113719.1.135.4.41 NAME 'rbsURL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )", + "( 2.16.840.1.113719.1.135.4.42 NAME 'rbsTaskTemplates' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )", + "( 2.16.840.1.113719.1.135.4.43 NAME 'rbsTaskTemplatesURL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )", + "( 2.16.840.1.113719.1.135.4.44 NAME 'rbsGALabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )", + "( 2.16.840.1.113719.1.135.4.45 NAME 'rbsPageMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} )", + "( 2.16.840.1.113719.1.135.4.46 NAME 'rbsTargetObjectType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.135.4.47 NAME 'rbsContext' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.135.4.48 NAME 'rbsXMLInfo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE )", + "( 2.16.840.1.113719.1.135.4.51 NAME 'rbsAssignedRoles2' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )", + "( 2.16.840.1.113719.1.135.4.52 NAME 'rbsOwnedCollections2' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )", + "( 2.16.840.1.113719.1.1.4.1.540 NAME 'prSyncPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.1.4.1.541 NAME 'prSyncAttributes' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_SERVER_READ '1' )", + "( 2.16.840.1.113719.1.1.4.1.542 NAME 'dsEncryptedReplicationConfig' SYNTAX 2.16.840.1.113719.1.1.5.1.19 )", + "( 2.16.840.1.113719.1.1.4.1.543 NAME 'encryptionPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.1.4.1.544 NAME 'attrEncryptionRequiresSecure' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.1.4.1.545 NAME 'attrEncryptionDefinition' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.48.4.1.16 NAME 'ndspkiCRLFileName' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.17 NAME 'ndspkiStatus' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.18 NAME 'ndspkiIssueTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.19 NAME 'ndspkiNextIssueTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.20 NAME 'ndspkiAttemptTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.21 NAME 'ndspkiTimeInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.22 NAME 'ndspkiCRLMaxProcessingInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.23 NAME 'ndspkiCRLNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.24 NAME 'ndspkiDistributionPoints' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.25 NAME 'ndspkiCRLProcessData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.26 NAME 'ndspkiCRLConfigurationDNList' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.27 NAME 'ndspkiCADN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.28 NAME 'ndspkiCRLContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.29 NAME 'ndspkiIssuedCertContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.30 NAME 'ndspkiDistributionPointDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.31 NAME 'ndspkiCRLConfigurationDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.32 NAME 'ndspkiDirectory' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} )", + "( 2.5.4.38 NAME 'authorityRevocationList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 SINGLE-VALUE X-NDS_NAME 'ndspkiAuthorityRevocationList' X-NDS_PUBLIC_READ '1' )", + "( 2.5.4.39 NAME 'certificateRevocationList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 SINGLE-VALUE X-NDS_NAME 'ndspkiCertificateRevocationList' X-NDS_PUBLIC_READ '1' )", + "( 2.5.4.53 NAME 'deltaRevocationList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 SINGLE-VALUE X-NDS_NAME 'ndspkiDeltaRevocationList' X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.36 NAME 'ndspkiTrustedRootList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.37 NAME 'ndspkiSecurityRightsLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.48.4.1.38 NAME 'ndspkiKMOExport' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.48.4.1.39 NAME 'ndspkiCRLECConfigurationDNList' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.40 NAME 'ndspkiCRLType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.41 NAME 'ndspkiCRLExtendValidity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.42 NAME 'ndspkiDefaultRSAKeySize' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.43 NAME 'ndspkiDefaultECCurve' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.48.4.1.44 NAME 'ndspkiDefaultCertificateLife' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.7.4.1 NAME 'notfSMTPEmailHost' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.7.4.2 NAME 'notfSMTPEmailFrom' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.7.4.3 NAME 'notfSMTPEmailUserName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.7.4.5 NAME 'notfMergeTemplateData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.7.4.6 NAME 'notfMergeTemplateSubject' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.44.4.1 NAME 'nsimRequiredQuestions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.44.4.2 NAME 'nsimRandomQuestions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.44.4.3 NAME 'nsimNumberRandomQuestions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.44.4.4 NAME 'nsimMinResponseLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.44.4.5 NAME 'nsimMaxResponseLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.44.4.6 NAME 'nsimForgottenLoginConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.44.4.7 NAME 'nsimForgottenAction' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.44.4.8 NAME 'nsimAssignments' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.44.4.9 NAME 'nsimChallengeSetDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.44.4.10 NAME 'nsimChallengeSetGUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.44.4.11 NAME 'nsimPwdRuleEnforcement' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.39.44.4.12 NAME 'nsimHint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.39.44.4.13 NAME 'nsimPasswordReminder' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE )", + "( 2.16.840.1.113719.1.266.4.4 NAME 'sssProxyStoreKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )", + "( 2.16.840.1.113719.1.266.4.5 NAME 'sssProxyStoreSecrets' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_HIDDEN '1' )", + "( 2.16.840.1.113719.1.266.4.6 NAME 'sssActiveServerList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )", + "( 2.16.840.1.113719.1.266.4.7 NAME 'sssCacheRefreshInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.266.4.8 NAME 'sssAdminList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )", + "( 2.16.840.1.113719.1.266.4.9 NAME 'sssAdminGALabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )", + "( 2.16.840.1.113719.1.266.4.10 NAME 'sssEnableReadTimestamps' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.266.4.11 NAME 'sssDisableMasterPasswords' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.266.4.12 NAME 'sssEnableAdminAccess' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.266.4.13 NAME 'sssReadSecretPolicies' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )", + "( 2.16.840.1.113719.1.266.4.14 NAME 'sssServerPolicyOverrideDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.1.4.1.531 NAME 'eDirCloneSource' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.1.532 NAME 'eDirCloneKeys' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_HIDDEN '1' )", + "( 2.16.840.1.113719.1.1.4.1.533 NAME 'eDirCloneLock' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )", + "( 2.16.840.1.113719.1.1.4.711 NAME 'groupMember' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )", + "( 2.16.840.1.113719.1.1.4.712 NAME 'nestedConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )", + "( 2.16.840.1.113719.1.1.4.717 NAME 'xdasDSConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.1.4.718 NAME 'xdasConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.1.4.719 NAME 'xdasVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_UPPER_BOUND '32768' )", + "( 2.16.840.1.113719.1.347.4.79 NAME 'NAuditInstrumentation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.347.4.2 NAME 'NAuditLoggingServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_PUBLIC_READ '1' )", + "( 2.16.840.1.113719.1.1.4.724 NAME 'cefConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )", + "( 2.16.840.1.113719.1.1.4.725 NAME 'cefVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_UPPER_BOUND '32768' )" + ], + "createTimestamp": [], + "dITContentRules": [], + "dITStructureRules": [], + "ldapSyntaxes": [ + "( 1.3.6.1.4.1.1466.115.121.1.1 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.2 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.3 X-NDS_SYNTAX '3' )", + "( 1.3.6.1.4.1.1466.115.121.1.4 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.5 X-NDS_SYNTAX '21' )", + "( 1.3.6.1.4.1.1466.115.121.1.6 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.7 X-NDS_SYNTAX '7' )", + "( 2.16.840.1.113719.1.1.5.1.6 X-NDS_SYNTAX '6' )", + "( 1.3.6.1.4.1.1466.115.121.1.8 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.9 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.10 X-NDS_SYNTAX '9' )", + "( 2.16.840.1.113719.1.1.5.1.22 X-NDS_SYNTAX '22' )", + "( 1.3.6.1.4.1.1466.115.121.1.11 X-NDS_SYNTAX '3' )", + "( 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_SYNTAX '1' )", + "( 1.3.6.1.4.1.1466.115.121.1.13 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.14 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.15 X-NDS_SYNTAX '3' )", + "( 1.3.6.1.4.1.1466.115.121.1.16 X-NDS_SYNTAX '3' )", + "( 1.3.6.1.4.1.1466.115.121.1.17 X-NDS_SYNTAX '3' )", + "( 1.3.6.1.4.1.1466.115.121.1.18 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.19 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.20 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.21 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.22 X-NDS_SYNTAX '11' )", + "( 1.3.6.1.4.1.1466.115.121.1.23 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.24 X-NDS_SYNTAX '24' )", + "( 1.3.6.1.4.1.1466.115.121.1.25 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.26 X-NDS_SYNTAX '2' )", + "( 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_SYNTAX '8' )", + "( 1.3.6.1.4.1.1466.115.121.1.28 X-NDS_SYNTAX '9' )", + "( 1.2.840.113556.1.4.906 X-NDS_SYNTAX '29' )", + "( 1.3.6.1.4.1.1466.115.121.1.54 X-NDS_SYNTAX '3' )", + "( 1.3.6.1.4.1.1466.115.121.1.56 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.57 X-NDS_SYNTAX '3' )", + "( 1.3.6.1.4.1.1466.115.121.1.29 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.30 X-NDS_SYNTAX '3' )", + "( 1.3.6.1.4.1.1466.115.121.1.31 X-NDS_SYNTAX '3' )", + "( 1.3.6.1.4.1.1466.115.121.1.32 X-NDS_SYNTAX '3' )", + "( 1.3.6.1.4.1.1466.115.121.1.33 X-NDS_SYNTAX '3' )", + "( 1.3.6.1.4.1.1466.115.121.1.55 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.34 X-NDS_SYNTAX '3' )", + "( 1.3.6.1.4.1.1466.115.121.1.35 X-NDS_SYNTAX '3' )", + "( 2.16.840.1.113719.1.1.5.1.19 X-NDS_SYNTAX '19' )", + "( 1.3.6.1.4.1.1466.115.121.1.36 X-NDS_SYNTAX '5' )", + "( 2.16.840.1.113719.1.1.5.1.17 X-NDS_SYNTAX '17' )", + "( 1.3.6.1.4.1.1466.115.121.1.37 X-NDS_SYNTAX '3' )", + "( 2.16.840.1.113719.1.1.5.1.13 X-NDS_SYNTAX '13' )", + "( 1.3.6.1.4.1.1466.115.121.1.40 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.38 X-NDS_SYNTAX '20' )", + "( 1.3.6.1.4.1.1466.115.121.1.39 X-NDS_SYNTAX '3' )", + "( 1.3.6.1.4.1.1466.115.121.1.41 X-NDS_SYNTAX '18' )", + "( 1.3.6.1.4.1.1466.115.121.1.43 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.44 X-NDS_SYNTAX '4' )", + "( 1.3.6.1.4.1.1466.115.121.1.42 X-NDS_SYNTAX '9' )", + "( 2.16.840.1.113719.1.1.5.1.16 X-NDS_SYNTAX '16' )", + "( 1.3.6.1.4.1.1466.115.121.1.58 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.45 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.46 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.47 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.48 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.49 X-NDS_SYNTAX '9' )", + "( 2.16.840.1.113719.1.1.5.1.12 X-NDS_SYNTAX '12' )", + "( 2.16.840.1.113719.1.1.5.1.23 X-NDS_SYNTAX '23' )", + "( 2.16.840.1.113719.1.1.5.1.15 X-NDS_SYNTAX '15' )", + "( 2.16.840.1.113719.1.1.5.1.14 X-NDS_SYNTAX '14' )", + "( 1.3.6.1.4.1.1466.115.121.1.50 X-NDS_SYNTAX '10' )", + "( 1.3.6.1.4.1.1466.115.121.1.51 X-NDS_SYNTAX '9' )", + "( 1.3.6.1.4.1.1466.115.121.1.52 X-NDS_SYNTAX '9' )", + "( 2.16.840.1.113719.1.1.5.1.25 X-NDS_SYNTAX '25' )", + "( 1.3.6.1.4.1.1466.115.121.1.53 X-NDS_SYNTAX '9' )", + "( 2.16.840.1.113719.1.1.5.1.26 X-NDS_SYNTAX '26' )", + "( 2.16.840.1.113719.1.1.5.1.27 X-NDS_SYNTAX '27' )" + ], + "matchingRuleUse": [], + "matchingRules": [], + "modifyTimestamp": [ + "20190831135835Z" + ], + "nameForms": [], + "objectClass": [ + "top", + "subschema" + ], + "objectClasses": [ + "( 2.5.6.0 NAME 'Top' STRUCTURAL MUST objectClass MAY ( cAPublicKey $ cAPrivateKey $ certificateValidityInterval $ authorityRevocation $ lastReferencedTime $ equivalentToMe $ ACL $ backLink $ binderyProperty $ Obituary $ Reference $ revision $ ndsCrossCertificatePair $ certificateRevocation $ usedBy $ GUID $ otherGUID $ DirXML-Associations $ creatorsName $ modifiersName $ objectVersion $ auxClassCompatibility $ unknownBaseClass $ unknownAuxiliaryClass $ masvProposedLabel $ masvDefaultRange $ masvAuthorizedRange $ auditFileLink $ rbsAssignedRoles $ rbsOwnedCollections $ rbsAssignedRoles2 $ rbsOwnedCollections2 ) X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '16#subtree#[Creator]#[Entry Rights]' )", + "( 1.3.6.1.4.1.42.2.27.1.2.1 NAME 'aliasObject' SUP Top STRUCTURAL MUST aliasedObjectName X-NDS_NAME 'Alias' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.5.6.2 NAME 'Country' SUP Top STRUCTURAL MUST c MAY ( description $ searchGuide $ sssActiveServerList $ sssServerPolicyOverrideDN ) X-NDS_NAMING 'c' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'domain' ) X-NDS_NONREMOVABLE '1' )", + "( 2.5.6.3 NAME 'Locality' SUP Top STRUCTURAL MAY ( description $ l $ seeAlso $ st $ street $ searchGuide $ sssActiveServerList $ sssServerPolicyOverrideDN ) X-NDS_NAMING ( 'l' 'st' ) X-NDS_CONTAINMENT ( 'Country' 'organizationalUnit' 'Locality' 'Organization' 'domain' ) X-NDS_NONREMOVABLE '1' )", + "( 2.5.6.4 NAME 'Organization' SUP ( ndsLoginProperties $ ndsContainerLoginProperties ) STRUCTURAL MUST o MAY ( description $ facsimileTelephoneNumber $ l $ loginScript $ eMailAddress $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ printJobConfiguration $ printerControl $ seeAlso $ st $ street $ telephoneNumber $ loginIntruderLimit $ intruderAttemptResetInterval $ detectIntruder $ lockoutAfterDetection $ intruderLockoutResetInterval $ nNSDomain $ mailboxLocation $ mailboxID $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ internationaliSDNNumber $ businessCategory $ searchGuide $ rADIUSAttributeLists $ rADIUSDefaultProfile $ rADIUSDialAccessGroup $ rADIUSEnableDialAccess $ rADIUSServiceList $ sssActiveServerList $ sssServerPolicyOverrideDN $ userPassword ) X-NDS_NAMING 'o' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'Country' 'Locality' 'domain' ) X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Self]#loginScript' '2#entry#[Self]#printJobConfiguration') )", + "( 2.5.6.5 NAME 'organizationalUnit' SUP ( ndsLoginProperties $ ndsContainerLoginProperties ) STRUCTURAL MUST ou MAY ( description $ facsimileTelephoneNumber $ l $ loginScript $ eMailAddress $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ printJobConfiguration $ printerControl $ seeAlso $ st $ street $ telephoneNumber $ loginIntruderLimit $ intruderAttemptResetInterval $ detectIntruder $ lockoutAfterDetection $ intruderLockoutResetInterval $ nNSDomain $ mailboxLocation $ mailboxID $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ internationaliSDNNumber $ businessCategory $ searchGuide $ rADIUSAttributeLists $ rADIUSDefaultProfile $ rADIUSDialAccessGroup $ rADIUSEnableDialAccess $ rADIUSServiceList $ sssActiveServerList $ sssServerPolicyOverrideDN $ userPassword ) X-NDS_NAMING 'ou' X-NDS_CONTAINMENT ( 'Locality' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Organizational Unit' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Self]#loginScript' '2#entry#[Self]#printJobConfiguration') )", + "( 2.5.6.8 NAME 'organizationalRole' SUP Top STRUCTURAL MUST cn MAY ( description $ facsimileTelephoneNumber $ l $ eMailAddress $ ou $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ roleOccupant $ seeAlso $ st $ street $ telephoneNumber $ mailboxLocation $ mailboxID $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ internationaliSDNNumber ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Organizational Role' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.5.6.9 NAME ( 'groupOfNames' 'group' 'groupOfUniqueNames' ) SUP Top STRUCTURAL MUST cn MAY ( description $ l $ member $ ou $ o $ owner $ seeAlso $ groupID $ fullName $ eMailAddress $ mailboxLocation $ mailboxID $ Profile $ profileMembership $ loginScript $ businessCategory $ nspmPasswordPolicyDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Group' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.5.6.6 NAME 'Person' SUP ndsLoginProperties STRUCTURAL MUST ( cn $ sn ) MAY ( description $ seeAlso $ telephoneNumber $ fullName $ givenName $ initials $ generationQualifier $ uid $ assistant $ assistantPhone $ city $ st $ company $ co $ directReports $ manager $ mailstop $ mobile $ personalTitle $ pager $ workforceID $ instantMessagingID $ preferredName $ photo $ jobCode $ siteLocation $ employeeStatus $ employeeType $ costCenter $ costCenterDescription $ tollFreePhoneNumber $ otherPhoneNumber $ managerWorkforceID $ roomNumber $ jackNumber $ departmentNumber $ vehicleInformation $ accessCardNumber $ isManager $ userPassword ) X-NDS_NAMING ( 'cn' 'uid' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.5.6.7 NAME 'organizationalPerson' SUP Person STRUCTURAL MAY ( facsimileTelephoneNumber $ l $ eMailAddress $ ou $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ st $ street $ title $ mailboxLocation $ mailboxID $ uid $ mail $ employeeNumber $ destinationIndicator $ internationaliSDNNumber $ preferredDeliveryMethod $ registeredAddress $ teletexTerminalIdentifier $ telexNumber $ x121Address $ businessCategory $ roomNumber $ x500UniqueIdentifier ) X-NDS_NAMING ( 'cn' 'ou' 'uid' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Organizational Person' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113730.3.2.2 NAME 'inetOrgPerson' SUP organizationalPerson STRUCTURAL MAY ( groupMembership $ ndsHomeDirectory $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginGraceLimit $ loginGraceRemaining $ loginIntruderAddress $ loginIntruderAttempts $ loginIntruderResetTime $ loginMaximumSimultaneous $ loginScript $ loginTime $ networkAddressRestriction $ networkAddress $ passwordsUsed $ passwordAllowChange $ passwordExpirationInterval $ passwordExpirationTime $ passwordMinimumLength $ passwordRequired $ passwordUniqueRequired $ printJobConfiguration $ privateKey $ Profile $ publicKey $ securityEquals $ accountBalance $ allowUnlimitedCredit $ minimumAccountBalance $ messageServer $ Language $ ndsUID $ lockedByIntruder $ serverHolds $ lastLoginTime $ typeCreatorMap $ higherPrivileges $ printerControl $ securityFlags $ profileMembership $ Timezone $ sASServiceDN $ sASSecretStore $ sASSecretStoreKey $ sASSecretStoreData $ sASPKIStoreKeys $ userCertificate $ nDSPKIUserCertificateInfo $ nDSPKIKeystore $ rADIUSActiveConnections $ rADIUSAttributeLists $ rADIUSConcurrentLimit $ rADIUSConnectionHistory $ rADIUSDefaultProfile $ rADIUSDialAccessGroup $ rADIUSEnableDialAccess $ rADIUSPassword $ rADIUSServiceList $ audio $ businessCategory $ carLicense $ departmentNumber $ employeeNumber $ employeeType $ displayName $ givenName $ homePhone $ homePostalAddress $ initials $ jpegPhoto $ labeledUri $ mail $ manager $ mobile $ o $ pager $ ldapPhoto $ preferredLanguage $ roomNumber $ secretary $ uid $ userSMIMECertificate $ x500UniqueIdentifier $ userPKCS12 $ sssProxyStoreKey $ sssProxyStoreSecrets $ sssServerPolicyOverrideDN ) X-NDS_NAME 'User' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#subtree#[Self]#[All Attributes Rights]' '6#entry#[Self]#loginScript' '1#subtree#[Root Template]#[Entry Rights]' '2#entry#[Public]#messageServer' '2#entry#[Root Template]#groupMembership' '6#entry#[Self]#printJobConfiguration' '2#entry#[Root Template]#networkAddress') )", + "( 2.5.6.14 NAME 'Device' SUP Top STRUCTURAL MUST cn MAY ( description $ l $ networkAddress $ ou $ o $ owner $ seeAlso $ serialNumber ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.6.1.4 NAME 'Computer' SUP Device STRUCTURAL MAY ( operator $ server $ status ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.6.1.17 NAME 'Printer' SUP Device STRUCTURAL MAY ( Cartridge $ printerConfiguration $ defaultQueue $ hostDevice $ printServer $ Memory $ networkAddressRestriction $ notify $ operator $ pageDescriptionLanguage $ queue $ status $ supportedTypefaces ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.6.1.21 NAME 'Resource' SUP Top ABSTRACT MUST cn MAY ( description $ hostResourceName $ l $ ou $ o $ seeAlso $ Uses ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.6.1.20 NAME 'Queue' SUP Resource STRUCTURAL MUST queueDirectory MAY ( Device $ operator $ server $ User $ networkAddress $ Volume $ hostServer ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#subtree#[Root Template]#[All Attributes Rights]' )", + "( 2.16.840.1.113719.1.1.6.1.3 NAME 'binderyQueue' SUP Queue STRUCTURAL MUST binderyType X-NDS_NAMING ( 'cn' 'binderyType' ) X-NDS_NAME 'Bindery Queue' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#subtree#[Root Template]#[All Attributes Rights]' )", + "( 2.16.840.1.113719.1.1.6.1.26 NAME 'Volume' SUP Resource STRUCTURAL MUST hostServer MAY status X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Root Template]#hostResourceName' '2#entry#[Root Template]#hostServer') )", + "( 2.16.840.1.113719.1.1.6.1.7 NAME 'directoryMap' SUP Resource STRUCTURAL MUST hostServer MAY path X-NDS_NAME 'Directory Map' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.6.1.19 NAME 'Profile' SUP Top STRUCTURAL MUST ( cn $ loginScript ) MAY ( description $ l $ ou $ o $ seeAlso $ fullName ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.6.1.22 NAME 'Server' SUP Top ABSTRACT MUST cn MAY ( description $ hostDevice $ l $ ou $ o $ privateKey $ publicKey $ Resource $ seeAlso $ status $ User $ Version $ networkAddress $ accountBalance $ allowUnlimitedCredit $ minimumAccountBalance $ fullName $ securityEquals $ securityFlags $ Timezone $ ndapClassPasswordMgmt $ ndapClassLoginMgmt ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Public]#networkAddress' '16#subtree#[Self]#[Entry Rights]') )", + "( 2.16.840.1.113719.1.1.6.1.10 NAME 'ncpServer' SUP Server STRUCTURAL MAY ( operator $ supportedServices $ messagingServer $ dsRevision $ permanentConfigParms $ ndsPredicateStatsDN $ languageId $ indexDefinition $ CachedAttrsOnExtRefs $ NCPKeyMaterialName $ NDSRightsToMonitor $ ldapServerDN $ httpServerDN $ emboxConfig $ sASServiceDN $ cACertificate $ cAECCertificate $ nDSPKIPublicKey $ nDSPKIPrivateKey $ nDSPKICertificateChain $ nDSPKIParentCADN $ nDSPKISDKeyID $ nDSPKISDKeyStruct $ snmpGroupDN $ wANMANWANPolicy $ wANMANLANAreaMembership $ wANMANCost $ wANMANDefaultCost $ encryptionPolicyDN $ eDirCloneSource $ eDirCloneLock $ xdasDSConfiguration $ xdasConfiguration $ xdasVersion $ NAuditLoggingServer $ NAuditInstrumentation $ cefConfiguration $ cefVersion ) X-NDS_NAME 'NCP Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#entry#[Public]#messagingServer' )", + "( 2.16.840.1.113719.1.1.6.1.18 NAME 'printServer' SUP Server STRUCTURAL MAY ( operator $ printer $ sAPName ) X-NDS_NAME 'Print Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#subtree#[Root Template]#[All Attributes Rights]' )", + "( 2.16.840.1.113719.1.1.6.1.31 NAME 'CommExec' SUP Server STRUCTURAL MAY networkAddressRestriction X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.6.1.2 NAME 'binderyObject' SUP Top STRUCTURAL MUST ( binderyObjectRestriction $ binderyType $ cn ) X-NDS_NAMING ( 'cn' 'binderyType' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Bindery Object' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.6.1.15 NAME 'Partition' AUXILIARY MAY ( Convergence $ partitionCreationTime $ Replica $ inheritedACL $ lowConvergenceSyncInterval $ receivedUpTo $ synchronizedUpTo $ authorityRevocation $ certificateRevocation $ cAPrivateKey $ cAPublicKey $ ndsCrossCertificatePair $ lowConvergenceResetTime $ highConvergenceSyncInterval $ partitionControl $ replicaUpTo $ partitionStatus $ transitiveVector $ purgeVector $ synchronizationTolerance $ obituaryNotify $ localReceivedUpTo $ federationControl $ syncPanePoint $ syncWindowVector $ EBAPartitionConfiguration $ authoritative $ allowAliasToAncestor $ sASSecurityDN $ masvLabel $ ndapPartitionPasswordMgmt $ ndapPartitionLoginMgmt $ prSyncPolicyDN $ dsEncryptedReplicationConfig ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.6.1.0 NAME 'aFPServer' SUP Server STRUCTURAL MAY ( serialNumber $ supportedConnections ) X-NDS_NAME 'AFP Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.6.1.27 NAME 'messagingServer' SUP Server STRUCTURAL MAY ( messagingDatabaseLocation $ messageRoutingGroup $ Postmaster $ supportedServices $ messagingServerType $ supportedGateway ) X-NDS_NAME 'Messaging Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '1#subtree#[Self]#[Entry Rights]' '2#subtree#[Self]#[All Attributes Rights]' '6#entry#[Self]#status' '2#entry#[Public]#messagingServerType' '2#entry#[Public]#messagingDatabaseLocation') )", + "( 2.16.840.1.113719.1.1.6.1.28 NAME 'messageRoutingGroup' SUP groupOfNames STRUCTURAL X-NDS_NAME 'Message Routing Group' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '1#subtree#[Self]#[Entry Rights]' '2#subtree#[Self]#[All Attributes Rights]') )", + "( 2.16.840.1.113719.1.1.6.1.29 NAME 'externalEntity' SUP Top STRUCTURAL MUST cn MAY ( description $ seeAlso $ facsimileTelephoneNumber $ l $ eMailAddress $ ou $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ st $ street $ title $ externalName $ mailboxLocation $ mailboxID ) X-NDS_NAMING ( 'cn' 'ou' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'External Entity' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#entry#[Public]#externalName' )", + "( 2.16.840.1.113719.1.1.6.1.30 NAME 'List' SUP Top STRUCTURAL MUST cn MAY ( description $ l $ member $ ou $ o $ eMailAddress $ mailboxLocation $ mailboxID $ owner $ seeAlso $ fullName ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#entry#[Root Template]#member' )", + "( 2.16.840.1.113719.1.1.6.1.32 NAME 'treeRoot' SUP Top STRUCTURAL MUST T MAY ( EBATreeConfiguration $ sssActiveServerList ) X-NDS_NAMING 'T' X-NDS_NAME 'Tree Root' X-NDS_NONREMOVABLE '1' )", + "( 0.9.2342.19200300.100.4.13 NAME 'domain' SUP ( Top $ ndsLoginProperties $ ndsContainerLoginProperties ) STRUCTURAL MUST dc MAY ( searchGuide $ o $ seeAlso $ businessCategory $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $ street $ postOfficeBox $ postalCode $ postalAddress $ physicalDeliveryOfficeName $ l $ associatedName $ description $ sssActiveServerList $ sssServerPolicyOverrideDN $ userPassword ) X-NDS_NAMING 'dc' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'Country' 'Locality' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NONREMOVABLE '1' )", + "( 1.3.6.1.4.1.1466.344 NAME 'dcObject' AUXILIARY MUST dc X-NDS_NAMING 'dc' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.6.1.33 NAME 'ndsLoginProperties' SUP Top ABSTRACT MAY ( groupMembership $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginGraceLimit $ loginGraceRemaining $ loginIntruderAddress $ loginIntruderAttempts $ loginIntruderResetTime $ loginMaximumSimultaneous $ loginScript $ loginTime $ networkAddressRestriction $ networkAddress $ passwordsUsed $ passwordAllowChange $ passwordExpirationInterval $ passwordExpirationTime $ passwordMinimumLength $ passwordRequired $ passwordUniqueRequired $ privateKey $ Profile $ publicKey $ securityEquals $ accountBalance $ allowUnlimitedCredit $ minimumAccountBalance $ Language $ lockedByIntruder $ serverHolds $ lastLoginTime $ higherPrivileges $ securityFlags $ profileMembership $ Timezone $ loginActivationTime $ UTF8LoginScript $ loginScriptCharset $ sASNDSPasswordWindow $ sASLoginSecret $ sASLoginSecretKey $ sASEncryptionType $ sASLoginConfiguration $ sASLoginConfigurationKey $ sasLoginFailureDelay $ sasDefaultLoginSequence $ sasAuthorizedLoginSequences $ sasAllowableSubjectNames $ sasUpdateLoginInfo $ sasOTPEnabled $ sasOTPCounter $ sasOTPDigits $ sasOTPReSync $ sasUpdateLoginTimeInterval $ ndapPasswordMgmt $ ndapLoginMgmt $ nspmPasswordKey $ nspmPassword $ pwdChangedTime $ pwdAccountLockedTime $ pwdFailureTime $ nspmDoNotExpirePassword $ nspmDistributionPassword $ nspmPreviousDistributionPassword $ nspmPasswordHistory $ nspmAdministratorChangeCount $ nspmPasswordPolicyDN $ nsimHint $ nsimPasswordReminder $ userPassword ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.141.6.1 NAME 'federationBoundary' AUXILIARY MUST federationBoundaryType MAY ( federationControl $ federationDNSName $ federationSearchPath ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.6.1.34 NAME 'ndsContainerLoginProperties' SUP Top ABSTRACT MAY ( loginIntruderLimit $ intruderAttemptResetInterval $ detectIntruder $ lockoutAfterDetection $ intruderLockoutResetInterval $ sasLoginFailureDelay $ sasDefaultLoginSequence $ sasAuthorizedLoginSequences $ sasUpdateLoginInfo $ sasOTPEnabled $ sasOTPDigits $ sasUpdateLoginTimeInterval $ ndapPasswordMgmt $ ndapLoginMgmt $ nspmPasswordPolicyDN ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.27.6.3 NAME 'ndsPredicateStats' SUP Top STRUCTURAL MUST ( cn $ ndsPredicateState $ ndsPredicateFlush ) MAY ( ndsPredicate $ ndsPredicateTimeout $ ndsPredicateUseValues ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.6.400.1 NAME 'edirSchemaVersion' SUP Top ABSTRACT MAY edirSchemaFlagVersion X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )", + "( 2.16.840.1.113719.1.1.6.1.47 NAME 'immediateSuperiorReference' AUXILIARY MAY ref X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.27.6.1 NAME 'ldapServer' SUP Top STRUCTURAL MUST cn MAY ( ldapHostServer $ ldapGroupDN $ ldapTraceLevel $ ldapServerBindLimit $ ldapServerIdleTimeout $ lDAPUDPPort $ lDAPSearchSizeLimit $ lDAPSearchTimeLimit $ lDAPLogLevel $ lDAPLogFilename $ lDAPBackupLogFilename $ lDAPLogSizeLimit $ Version $ searchSizeLimit $ searchTimeLimit $ ldapEnableTCP $ ldapTCPPort $ ldapEnableSSL $ ldapSSLPort $ ldapKeyMaterialName $ filteredReplicaUsage $ extensionInfo $ nonStdClientSchemaCompatMode $ sslEnableMutualAuthentication $ ldapEnablePSearch $ ldapMaximumPSearchOperations $ ldapIgnorePSearchLimitsForEvents $ ldapTLSTrustedRootContainer $ ldapEnableMonitorEvents $ ldapMaximumMonitorEventsLoad $ ldapTLSRequired $ ldapTLSVerifyClientCertificate $ ldapConfigVersion $ ldapDerefAlias $ ldapNonStdAllUserAttrsMode $ ldapBindRestrictions $ ldapDefaultReferralBehavior $ ldapReferral $ ldapSearchReferralUsage $ lDAPOtherReferralUsage $ ldapLBURPNumWriterThreads $ ldapInterfaces $ ldapChainSecureRequired $ ldapStdCompliance $ ldapDerefAliasOnAuth $ ldapGeneralizedTime $ ldapPermissiveModify $ ldapSSLConfig ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) X-NDS_NAME 'LDAP Server' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.27.6.2 NAME 'ldapGroup' SUP Top STRUCTURAL MUST cn MAY ( ldapReferral $ ldapServerList $ ldapAllowClearTextPassword $ ldapAnonymousIdentity $ lDAPSuffix $ ldapAttributeMap $ ldapClassMap $ ldapSearchReferralUsage $ lDAPOtherReferralUsage $ transitionGroupDN $ ldapAttributeList $ ldapClassList $ ldapConfigVersion $ Version $ ldapDefaultReferralBehavior $ ldapTransitionBackLink $ ldapSSLConfig $ referralIncludeFilter $ referralExcludeFilter ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) X-NDS_NAME 'LDAP Group' X-NDS_NOT_CONTAINER '1' )", + "( 2.5.6.22 NAME 'pkiCA' AUXILIARY MAY ( cACertificate $ certificateRevocationList $ authorityRevocationList $ crossCertificatePair $ attributeCertificate $ publicKey $ privateKey $ networkAddress $ loginTime $ lastLoginTime $ cAECCertificate $ crossCertificatePairEC ) X-NDS_NOT_CONTAINER '1' )", + "( 2.5.6.21 NAME 'pkiUser' AUXILIARY MAY userCertificate X-NDS_NOT_CONTAINER '1' )", + "( 2.5.6.15 NAME 'strongAuthenticationUser' AUXILIARY MAY userCertificate X-NDS_NOT_CONTAINER '1' )", + "( 2.5.6.11 NAME 'applicationProcess' SUP Top STRUCTURAL MUST cn MAY ( seeAlso $ ou $ l $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )", + "( 2.5.6.12 NAME 'applicationEntity' SUP Top STRUCTURAL MUST ( presentationAddress $ cn ) MAY ( supportedApplicationContext $ seeAlso $ ou $ o $ l $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )", + "( 2.5.6.13 NAME 'dSA' SUP applicationEntity STRUCTURAL MAY knowledgeInformation X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )", + "( 2.5.6.16 NAME 'certificationAuthority' AUXILIARY MUST ( authorityRevocationList $ certificateRevocationList $ cACertificate ) MAY crossCertificatePair X-NDS_NOT_CONTAINER '1' )", + "( 2.5.6.18 NAME 'userSecurityInformation' AUXILIARY MAY supportedAlgorithms X-NDS_NOT_CONTAINER '1' )", + "( 2.5.6.20 NAME 'dmd' SUP ndsLoginProperties AUXILIARY MUST dmdName MAY ( searchGuide $ seeAlso $ businessCategory $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $ street $ postOfficeBox $ postalCode $ postalAddress $ physicalDeliveryOfficeName $ l $ description $ userPassword ) X-NDS_NOT_CONTAINER '1' )", + "( 2.5.6.16.2 NAME 'certificationAuthority-V2' AUXILIARY MUST ( authorityRevocationList $ certificateRevocationList $ cACertificate ) MAY ( crossCertificatePair $ deltaRevocationList ) X-NDS_NAME 'certificationAuthorityVer2' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.3.6.1 NAME 'httpServer' SUP Top STRUCTURAL MUST cn MAY ( httpHostServerDN $ httpThreadsPerCPU $ httpIOBufferSize $ httpRequestTimeout $ httpKeepAliveRequestTimeout $ httpSessionTimeout $ httpKeyMaterialObject $ httpTraceLevel $ httpAuthRequiresTLS $ httpDefaultClearPort $ httpDefaultTLSPort $ httpBindRestrictions ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'domain' 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.64.6.1.1 NAME 'Template' SUP Top STRUCTURAL MUST cn MAY ( trusteesOfNewObject $ newObjectSDSRights $ newObjectSFSRights $ setupScript $ runSetupScript $ membersOfTemplate $ volumeSpaceRestrictions $ setPasswordAfterCreate $ homeDirectoryRights $ accountBalance $ allowUnlimitedCredit $ description $ eMailAddress $ facsimileTelephoneNumber $ groupMembership $ higherPrivileges $ ndsHomeDirectory $ l $ Language $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginGraceLimit $ loginMaximumSimultaneous $ loginScript $ mailboxID $ mailboxLocation $ member $ messageServer $ minimumAccountBalance $ networkAddressRestriction $ newObjectSSelfRights $ ou $ passwordAllowChange $ passwordExpirationInterval $ passwordExpirationTime $ passwordMinimumLength $ passwordRequired $ passwordUniqueRequired $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ Profile $ st $ street $ securityEquals $ securityFlags $ seeAlso $ telephoneNumber $ title $ assistant $ assistantPhone $ city $ company $ co $ manager $ managerWorkforceID $ mailstop $ siteLocation $ employeeType $ costCenter $ costCenterDescription $ tollFreePhoneNumber $ departmentNumber ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.8.6.1 NAME 'homeInfo' AUXILIARY MAY ( homeCity $ homeEmailAddress $ homeFax $ homePhone $ homeState $ homePostalAddress $ homeZipCode $ personalMobile $ spouse $ children ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.8.6.2 NAME 'contingentWorker' AUXILIARY MAY ( vendorName $ vendorAddress $ vendorPhoneNumber ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.1.6.1.45 NAME 'dynamicGroup' SUP ( groupOfNames $ ndsLoginProperties ) STRUCTURAL MAY ( memberQueryURL $ excludedMember $ dgIdentity $ dgAllowUnknown $ dgTimeOut $ dgAllowDuplicates $ userPassword ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.1.6.1.46 NAME 'dynamicGroupAux' SUP ( groupOfNames $ ndsLoginProperties ) AUXILIARY MAY ( memberQueryURL $ excludedMember $ dgIdentity $ dgAllowUnknown $ dgTimeOut $ dgAllowDuplicates $ userPassword ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.39.6.1.1 NAME 'sASSecurity' SUP Top STRUCTURAL MUST cn MAY ( nDSPKITreeCADN $ masvPolicyDN $ sASLoginPolicyDN $ sASLoginMethodContainerDN $ sasPostLoginMethodContainerDN $ nspmPolicyAgentContainerDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'Country' 'Organization' 'domain' ) X-NDS_NAME 'SAS:Security' )", + "( 2.16.840.1.113719.1.39.6.1.2 NAME 'sASService' SUP Resource STRUCTURAL MAY ( hostServer $ privateKey $ publicKey $ allowUnlimitedCredit $ fullName $ lastLoginTime $ lockedByIntruder $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginIntruderAddress $ loginIntruderAttempts $ loginIntruderResetTime $ loginMaximumSimultaneous $ loginTime $ networkAddress $ networkAddressRestriction $ notify $ operator $ owner $ path $ securityEquals $ securityFlags $ status $ Version $ nDSPKIKeyMaterialDN $ ndspkiKMOExport ) X-NDS_NAMING 'cn' X-NDS_NAME 'SAS:Service' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.48.6.1.1 NAME 'nDSPKICertificateAuthority' SUP Top STRUCTURAL MUST cn MAY ( hostServer $ nDSPKIPublicKey $ nDSPKIPrivateKey $ nDSPKIPublicKeyCertificate $ nDSPKICertificateChain $ nDSPKICertificateChainEC $ nDSPKIParentCA $ nDSPKIParentCADN $ nDSPKISubjectName $ nDSPKIPublicKeyEC $ nDSPKIPrivateKeyEC $ nDSPKIPublicKeyCertificateEC $ crossCertificatePairEC $ nDSPKISuiteBMode $ cACertificate $ cAECCertificate $ ndspkiCRLContainerDN $ ndspkiIssuedCertContainerDN $ ndspkiCRLConfigurationDNList $ ndspkiCRLECConfigurationDNList $ ndspkiSecurityRightsLevel $ ndspkiDefaultRSAKeySize $ ndspkiDefaultECCurve $ ndspkiDefaultCertificateLife ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'NDSPKI:Certificate Authority' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.48.6.1.2 NAME 'nDSPKIKeyMaterial' SUP Top STRUCTURAL MUST cn MAY ( hostServer $ nDSPKIKeyFile $ nDSPKIPrivateKey $ nDSPKIPublicKey $ nDSPKIPublicKeyCertificate $ nDSPKICertificateChain $ nDSPKISubjectName $ nDSPKIGivenName $ ndspkiAdditionalRoots $ nDSPKINotBefore $ nDSPKINotAfter ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'NDSPKI:Key Material' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.48.6.1.3 NAME 'nDSPKITrustedRoot' SUP Top STRUCTURAL MUST cn MAY ndspkiTrustedRootList X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'domain' ) X-NDS_NAME 'NDSPKI:Trusted Root' )", + "( 2.16.840.1.113719.1.48.6.1.4 NAME 'nDSPKITrustedRootObject' SUP Top STRUCTURAL MUST ( cn $ nDSPKITrustedRootCertificate ) MAY ( nDSPKISubjectName $ nDSPKINotBefore $ nDSPKINotAfter $ externalName $ givenName $ sn ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'nDSPKITrustedRoot' X-NDS_NAME 'NDSPKI:Trusted Root Object' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.48.6.1.101 NAME 'nDSPKISDKeyAccessPartition' SUP Top STRUCTURAL MUST cn X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'NDSPKI:SD Key Access Partition' )", + "( 2.16.840.1.113719.1.48.6.1.102 NAME 'nDSPKISDKeyList' SUP Top STRUCTURAL MUST cn MAY ( nDSPKISDKeyServerDN $ nDSPKISDKeyStruct $ nDSPKISDKeyCert ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'nDSPKISDKeyAccessPartition' X-NDS_NAME 'NDSPKI:SD Key List' )", + "( 2.16.840.1.113719.1.31.6.2.1 NAME 'mASVSecurityPolicy' SUP Top STRUCTURAL MUST cn MAY ( description $ masvDomainPolicy $ masvPolicyUpdate $ masvClearanceNames $ masvLabelNames $ masvLabelSecrecyLevelNames $ masvLabelSecrecyCategoryNames $ masvLabelIntegrityLevelNames $ masvLabelIntegrityCategoryNames $ masvNDSAttributeLabels ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'MASV:Security Policy' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.39.42.2.0.1 NAME 'sASLoginMethodContainer' SUP Top STRUCTURAL MUST cn MAY description X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NAME 'SAS:Login Method Container' )", + "( 2.16.840.1.113719.1.39.42.2.0.4 NAME 'sASLoginPolicy' SUP Top STRUCTURAL MUST cn MAY ( description $ privateKey $ publicKey $ sASAllowNDSPasswordWindow $ sASPolicyCredentials $ sASPolicyMethods $ sASPolicyObjectVersion $ sASPolicyServiceSubtypes $ sASPolicyServices $ sASPolicyUsers $ sASLoginSequence $ sASLoginPolicyUpdate $ sasNMASProductOptions $ sasPolicyMethods $ sasPolicyServices $ sasPolicyUsers $ sasAllowNDSPasswordWindow $ sasLoginFailureDelay $ sasDefaultLoginSequence $ sasAuthorizedLoginSequences $ sasAuditConfiguration $ sasUpdateLoginInfo $ sasOTPEnabled $ sasOTPLookAheadWindow $ sasOTPDigits $ sasUpdateLoginTimeInterval $ nspmPasswordPolicyDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'SAS:Login Policy' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.39.42.2.0.7 NAME 'sASNMASBaseLoginMethod' SUP Top ABSTRACT MUST cn MAY ( description $ sASLoginSecret $ sASLoginSecretKey $ sASEncryptionType $ sASLoginConfiguration $ sASLoginConfigurationKey $ sASMethodIdentifier $ sASMethodVendor $ sASVendorSupport $ sASAdvisoryMethodGrade $ sASLoginClientMethodNetWare $ sASLoginServerMethodNetWare $ sASLoginClientMethodWINNT $ sASLoginServerMethodWINNT $ sasCertificateSearchContainers $ sasNMASMethodConfigData $ sasMethodVersion $ sASLoginPolicyUpdate $ sasUnsignedMethodModules $ sasServerModuleName $ sasServerModuleEntryPointName $ sasSASLMechanismName $ sasSASLMechanismEntryPointName $ sasClientModuleName $ sasClientModuleEntryPointName $ sasLoginClientMethodSolaris $ sasLoginServerMethodSolaris $ sasLoginClientMethodLinux $ sasLoginServerMethodLinux $ sasLoginClientMethodTru64 $ sasLoginServerMethodTru64 $ sasLoginClientMethodAIX $ sasLoginServerMethodAIX $ sasLoginClientMethodHPUX $ sasLoginServerMethodHPUX $ sasLoginClientMethods390 $ sasLoginServerMethods390 $ sasLoginClientMethodLinuxX64 $ sasLoginServerMethodLinuxX64 $ sasLoginClientMethodWinX64 $ sasLoginServerMethodWinX64 $ sasLoginClientMethodSolaris64 $ sasLoginServerMethodSolaris64 $ sasLoginClientMethodSolarisi386 $ sasLoginServerMethodSolarisi386 $ sasLoginClientMethodAIX64 $ sasLoginServerMethodAIX64 ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASLoginMethodContainer' X-NDS_NAME 'SAS:NMAS Base Login Method' )", + "( 2.16.840.1.113719.1.39.42.2.0.8 NAME 'sASNMASLoginMethod' SUP sASNMASBaseLoginMethod STRUCTURAL X-NDS_NAME 'SAS:NMAS Login Method' )", + "( 2.16.840.1.113719.1.39.42.2.0.9 NAME 'rADIUSDialAccessSystem' SUP Top STRUCTURAL MUST cn MAY ( publicKey $ privateKey $ rADIUSAgedInterval $ rADIUSClient $ rADIUSCommonNameResolution $ rADIUSConcurrentLimit $ rADIUSDASVersion $ rADIUSEnableCommonNameLogin $ rADIUSEnableDialAccess $ rADIUSInterimAcctingTimeout $ rADIUSLookupContexts $ rADIUSMaxDASHistoryRecord $ rADIUSMaximumHistoryRecord $ rADIUSPasswordPolicy $ rADIUSPrivateKey $ rADIUSProxyContext $ rADIUSProxyDomain $ rADIUSProxyTarget $ rADIUSPublicKey $ sASLoginConfiguration $ sASLoginConfigurationKey ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NAME 'RADIUS:Dial Access System' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.39.42.2.0.10 NAME 'rADIUSProfile' SUP Top STRUCTURAL MUST cn MAY rADIUSAttributeList X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NAME 'RADIUS:Profile' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.39.42.2.0.11 NAME 'sasPostLoginMethodContainer' SUP Top STRUCTURAL MUST cn MAY description X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' )", + "( 2.16.840.1.113719.1.39.42.2.0.12 NAME 'sasPostLoginMethod' SUP Top STRUCTURAL MUST cn MAY ( description $ sASLoginSecret $ sASLoginSecretKey $ sASEncryptionType $ sASLoginConfiguration $ sASLoginConfigurationKey $ sASMethodIdentifier $ sASMethodVendor $ sASVendorSupport $ sASAdvisoryMethodGrade $ sASLoginClientMethodNetWare $ sASLoginServerMethodNetWare $ sASLoginClientMethodWINNT $ sASLoginServerMethodWINNT $ sasMethodVersion $ sASLoginPolicyUpdate $ sasUnsignedMethodModules $ sasServerModuleName $ sasServerModuleEntryPointName $ sasSASLMechanismName $ sasSASLMechanismEntryPointName $ sasClientModuleName $ sasClientModuleEntryPointName $ sasLoginClientMethodSolaris $ sasLoginServerMethodSolaris $ sasLoginClientMethodLinux $ sasLoginServerMethodLinux $ sasLoginClientMethodTru64 $ sasLoginServerMethodTru64 $ sasLoginClientMethodAIX $ sasLoginServerMethodAIX $ sasLoginClientMethodHPUX $ sasLoginServerMethodHPUX $ sasLoginClientMethods390 $ sasLoginServerMethods390 $ sasLoginClientMethodLinuxX64 $ sasLoginServerMethodLinuxX64 $ sasLoginClientMethodWinX64 $ sasLoginServerMethodWinX64 $ sasLoginClientMethodSolaris64 $ sasLoginServerMethodSolaris64 $ sasLoginClientMethodSolarisi386 $ sasLoginServerMethodSolarisi386 $ sasLoginClientMethodAIX64 $ sasLoginServerMethodAIX64 ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sasPostLoginMethodContainer' )", + "( 2.16.840.1.113719.1.6.6.1 NAME 'snmpGroup' SUP Top STRUCTURAL MUST cn MAY ( Version $ snmpServerList $ snmpTrapDisable $ snmpTrapInterval $ snmpTrapDescription $ snmpTrapConfig ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'domain' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.39.43.6.2 NAME 'nspmPasswordPolicyContainer' SUP Top STRUCTURAL MUST cn MAY description X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Country' 'domain' 'Locality' 'Organization' 'organizationalUnit' ) )", + "( 2.16.840.1.113719.1.39.43.6.3 NAME 'nspmPolicyAgent' SUP Top STRUCTURAL MUST cn MAY ( description $ nspmPolicyAgentNetWare $ nspmPolicyAgentWINNT $ nspmPolicyAgentSolaris $ nspmPolicyAgentLinux $ nspmPolicyAgentAIX $ nspmPolicyAgentHPUX ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'nspmPasswordPolicyContainer' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.39.43.6.1 NAME 'nspmPasswordPolicy' SUP Top STRUCTURAL MUST cn MAY ( description $ nspmPolicyPrecedence $ nspmConfigurationOptions $ nspmChangePasswordMessage $ passwordExpirationInterval $ loginGraceLimit $ nspmMinPasswordLifetime $ passwordUniqueRequired $ nspmPasswordHistoryLimit $ nspmPasswordHistoryExpiration $ passwordAllowChange $ passwordRequired $ passwordMinimumLength $ nspmMaximumLength $ nspmCaseSensitive $ nspmMinUpperCaseCharacters $ nspmMaxUpperCaseCharacters $ nspmMinLowerCaseCharacters $ nspmMaxLowerCaseCharacters $ nspmNumericCharactersAllowed $ nspmNumericAsFirstCharacter $ nspmNumericAsLastCharacter $ nspmMinNumericCharacters $ nspmMaxNumericCharacters $ nspmSpecialCharactersAllowed $ nspmSpecialAsFirstCharacter $ nspmSpecialAsLastCharacter $ nspmMinSpecialCharacters $ nspmMaxSpecialCharacters $ nspmMaxRepeatedCharacters $ nspmMaxConsecutiveCharacters $ nspmMinUniqueCharacters $ nspmDisallowedAttributeValues $ nspmExcludeList $ nspmExtendedCharactersAllowed $ nspmExtendedAsFirstCharacter $ nspmExtendedAsLastCharacter $ nspmMinExtendedCharacters $ nspmMaxExtendedCharacters $ nspmUpperAsFirstCharacter $ nspmUpperAsLastCharacter $ nspmLowerAsFirstCharacter $ nspmLowerAsLastCharacter $ nspmComplexityRules $ nspmAD2K8Syntax $ nspmAD2K8maxViolation $ nspmXCharLimit $ nspmXCharHistoryLimit $ nspmUnicodeAllowed $ nspmNonAlphaCharactersAllowed $ nspmMinNonAlphaCharacters $ nspmMaxNonAlphaCharacters $ pwdInHistory $ nspmAdminsDoNotExpirePassword $ nspmPasswordACL $ nsimChallengeSetDN $ nsimForgottenAction $ nsimForgottenLoginConfig $ nsimAssignments $ nsimChallengeSetGUID $ nsimPwdRuleEnforcement ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'nspmPasswordPolicyContainer' 'domain' 'Locality' 'Organization' 'organizationalUnit' 'Country' ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.39.43.6.4 NAME 'nspmPasswordAux' AUXILIARY MAY ( publicKey $ privateKey $ loginGraceLimit $ loginGraceRemaining $ passwordExpirationTime $ passwordRequired $ nspmPasswordKey $ nspmPassword $ nspmDistributionPassword $ nspmPreviousDistributionPassword $ nspmPasswordHistory $ nspmAdministratorChangeCount $ nspmPasswordPolicyDN $ pwdChangedTime $ pwdAccountLockedTime $ pwdFailureTime $ nspmDoNotExpirePassword ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.12.6.1.0 NAME 'auditFileObject' SUP Top STRUCTURAL MUST ( cn $ auditPolicy $ auditContents ) MAY ( description $ auditPath $ auditLinkList $ auditType $ auditCurrentEncryptionKey $ auditAEncryptionKey $ auditBEncryptionKey ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Top' 'Country' 'Locality' 'Organization' 'organizationalUnit' 'treeRoot' 'domain' ) X-NDS_NAME 'Audit:File Object' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.38.6.1.4 NAME 'wANMANLANArea' SUP Top STRUCTURAL MUST cn MAY ( description $ l $ member $ o $ ou $ owner $ seeAlso $ wANMANWANPolicy $ wANMANCost $ wANMANDefaultCost ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'Organization' 'organizationalUnit' ) X-NDS_NAME 'WANMAN:LAN Area' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.135.6.37.1 NAME 'rbsCollection' SUP Top STRUCTURAL MUST cn MAY ( owner $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )", + "( 2.16.840.1.113719.1.135.6.30.1 NAME 'rbsExternalScope' SUP Top ABSTRACT MUST cn MAY ( rbsURL $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.135.6.31.1 NAME 'rbsModule' SUP Top STRUCTURAL MUST cn MAY ( rbsURL $ rbsPath $ rbsType $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection' )", + "( 2.16.840.1.113719.1.135.6.32.1 NAME 'rbsRole' SUP Top STRUCTURAL MUST cn MAY ( rbsContent $ rbsMember $ rbsTrusteeOf $ rbsGALabel $ rbsParameters $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection' )", + "( 2.16.840.1.113719.1.135.6.33.1 NAME 'rbsTask' SUP Top STRUCTURAL MUST cn MAY ( rbsContentMembership $ rbsType $ rbsTaskRights $ rbsEntryPoint $ rbsParameters $ rbsTaskTemplates $ rbsTaskTemplatesURL $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsModule' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.135.6.34.1 NAME 'rbsBook' SUP rbsTask STRUCTURAL MAY ( rbsTargetObjectType $ rbsPageMembership ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.135.6.35.1 NAME 'rbsScope' SUP groupOfNames STRUCTURAL MAY ( rbsContext $ rbsXMLInfo ) X-NDS_CONTAINMENT 'rbsRole' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.135.6.45.1 NAME 'rbsCollection2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsParameters $ owner $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )", + "( 2.16.840.1.113719.1.135.6.38.1 NAME 'rbsExternalScope2' SUP Top ABSTRACT MUST cn MAY ( rbsXMLInfo $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection2' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.135.6.39.1 NAME 'rbsModule2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsPath $ rbsType $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection2' )", + "( 2.16.840.1.113719.1.135.6.40.1 NAME 'rbsRole2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsContent $ rbsMember $ rbsTrusteeOf $ rbsParameters $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection2' )", + "( 2.16.840.1.113719.1.135.6.41.1 NAME 'rbsTask2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsContentMembership $ rbsType $ rbsTaskRights $ rbsEntryPoint $ rbsParameters $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsModule2' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.135.6.42.1 NAME 'rbsBook2' SUP rbsTask2 STRUCTURAL MAY ( rbsTargetObjectType $ rbsPageMembership ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.135.6.43.1 NAME 'rbsScope2' SUP groupOfNames STRUCTURAL MAY ( rbsContext $ rbsXMLInfo ) X-NDS_CONTAINMENT 'rbsRole2' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.1.6.1.49 NAME 'prSyncPolicy' SUP Top STRUCTURAL MUST cn MAY prSyncAttributes X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'domain' 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.1.6.1.50 NAME 'encryptionPolicy' SUP Top STRUCTURAL MUST cn MAY ( attrEncryptionDefinition $ attrEncryptionRequiresSecure ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'domain' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.48.6.1.5 NAME 'ndspkiContainer' SUP Top STRUCTURAL MUST cn X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'ndspkiContainer' 'sASSecurity' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'nDSPKITrustedRoot' ) )", + "( 2.16.840.1.113719.1.48.6.1.6 NAME 'ndspkiCertificate' SUP Top STRUCTURAL MUST ( cn $ userCertificate ) MAY ( nDSPKISubjectName $ nDSPKINotBefore $ nDSPKINotAfter $ externalName $ givenName $ sn ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'ndspkiContainer' 'nDSPKITrustedRoot' ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.48.6.1.7 NAME 'ndspkiCRLConfiguration' SUP Top STRUCTURAL MUST cn MAY ( ndspkiCRLFileName $ ndspkiDirectory $ ndspkiStatus $ ndspkiIssueTime $ ndspkiNextIssueTime $ ndspkiAttemptTime $ ndspkiTimeInterval $ ndspkiCRLMaxProcessingInterval $ ndspkiCRLNumber $ ndspkiDistributionPoints $ ndspkiDistributionPointDN $ ndspkiCADN $ ndspkiCRLProcessData $ nDSPKIPublicKey $ nDSPKIPrivateKey $ nDSPKIPublicKeyCertificate $ nDSPKICertificateChain $ nDSPKIParentCA $ nDSPKIParentCADN $ nDSPKISubjectName $ cACertificate $ hostServer $ ndspkiCRLType $ ndspkiCRLExtendValidity ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'ndspkiContainer' )", + "( 2.5.6.19 NAME 'cRLDistributionPoint' SUP Top STRUCTURAL MUST cn MAY ( authorityRevocationList $ authorityRevocationList $ cACertificate $ certificateRevocationList $ certificateRevocationList $ crossCertificatePair $ deltaRevocationList $ deltaRevocationList $ ndspkiCRLConfigurationDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'sASSecurity' 'domain' 'ndspkiCRLConfiguration' ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.7.6.1 NAME 'notfTemplateCollection' SUP Top STRUCTURAL MUST cn MAY ( notfSMTPEmailHost $ notfSMTPEmailFrom $ notfSMTPEmailUserName $ sASSecretStore ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' )", + "( 2.16.840.1.113719.1.7.6.2 NAME 'notfMergeTemplate' SUP Top STRUCTURAL MUST cn MAY ( notfMergeTemplateData $ notfMergeTemplateSubject ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'notfTemplateCollection' X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.39.44.6.1 NAME 'nsimChallengeSet' SUP Top STRUCTURAL MUST cn MAY ( description $ nsimRequiredQuestions $ nsimRandomQuestions $ nsimNumberRandomQuestions $ nsimMinResponseLength $ nsimMaxResponseLength ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'nspmPasswordPolicyContainer' 'Country' 'domain' 'Locality' 'Organization' 'organizationalUnit' ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.266.6.1 NAME 'sssServerPolicies' SUP Top STRUCTURAL MUST cn MAY ( sssCacheRefreshInterval $ sssEnableReadTimestamps $ sssDisableMasterPasswords $ sssEnableAdminAccess $ sssAdminList $ sssAdminGALabel $ sssReadSecretPolicies ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' )", + "( 2.16.840.1.113719.1.266.6.2 NAME 'sssServerPolicyOverride' SUP Top STRUCTURAL MUST cn MAY ( sssCacheRefreshInterval $ sssEnableReadTimestamps $ sssDisableMasterPasswords $ sssEnableAdminAccess $ sssAdminList $ sssAdminGALabel $ sssReadSecretPolicies ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sssServerPolicies' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'domain' ) X-NDS_NOT_CONTAINER '1' )", + "( 2.16.840.1.113719.1.1.6.1.91 NAME 'nestedGroupAux' AUXILIARY MAY ( groupMember $ excludedMember $ nestedConfig $ groupMembership ) X-NDS_NOT_CONTAINER '1' )" + ] + }, + "schema_entry": "cn=schema", + "type": "SchemaInfo" +} +""" + +edir_9_1_4_dsa_info = """ +{ + "raw": { + "abandonOps": [ + "0" + ], + "addEntryOps": [ + "0" + ], + "altServer": [], + "bindSecurityErrors": [ + "0" + ], + "chainings": [ + "0" + ], + "compareOps": [ + "0" + ], + "directoryTreeName": [ + "TEST_TREE" + ], + "dsaName": [ + "cn=MYSERVER,o=resources" + ], + "errors": [ + "0" + ], + "extendedOps": [ + "0" + ], + "inBytes": [ + "293" + ], + "inOps": [ + "3" + ], + "listOps": [ + "0" + ], + "modifyEntryOps": [ + "0" + ], + "modifyRDNOps": [ + "0" + ], + "namingContexts": [ + "" + ], + "oneLevelSearchOps": [ + "0" + ], + "outBytes": [ + "14" + ], + "readOps": [ + "1" + ], + "referralsReturned": [ + "0" + ], + "removeEntryOps": [ + "0" + ], + "repUpdatesIn": [ + "0" + ], + "repUpdatesOut": [ + "0" + ], + "searchOps": [ + "1" + ], + "securityErrors": [ + "0" + ], + "simpleAuthBinds": [ + "1" + ], + "strongAuthBinds": [ + "0" + ], + "subschemaSubentry": [ + "cn=schema" + ], + "supportedCapabilities": [], + "supportedControl": [ + "2.16.840.1.113719.1.27.101.6", + "2.16.840.1.113719.1.27.101.5", + "1.2.840.113556.1.4.319", + "2.16.840.1.113730.3.4.3", + "2.16.840.1.113730.3.4.2", + "2.16.840.1.113719.1.27.101.57", + "2.16.840.1.113719.1.27.103.7", + "2.16.840.1.113719.1.27.101.40", + "2.16.840.1.113719.1.27.101.41", + "1.2.840.113556.1.4.1413", + "1.2.840.113556.1.4.805", + "2.16.840.1.113730.3.4.18", + "1.2.840.113556.1.4.529" + ], + "supportedExtension": [ + "2.16.840.1.113719.1.148.100.1", + "2.16.840.1.113719.1.148.100.3", + "2.16.840.1.113719.1.148.100.5", + "2.16.840.1.113719.1.148.100.7", + "2.16.840.1.113719.1.148.100.9", + "2.16.840.1.113719.1.148.100.11", + "2.16.840.1.113719.1.148.100.13", + "2.16.840.1.113719.1.148.100.15", + "2.16.840.1.113719.1.148.100.17", + "2.16.840.1.113719.1.39.42.100.1", + "2.16.840.1.113719.1.39.42.100.3", + "2.16.840.1.113719.1.39.42.100.5", + "2.16.840.1.113719.1.39.42.100.7", + "2.16.840.1.113719.1.39.42.100.9", + "2.16.840.1.113719.1.39.42.100.11", + "2.16.840.1.113719.1.39.42.100.13", + "2.16.840.1.113719.1.39.42.100.15", + "2.16.840.1.113719.1.39.42.100.17", + "2.16.840.1.113719.1.39.42.100.19", + "2.16.840.1.113719.1.39.42.100.21", + "2.16.840.1.113719.1.39.42.100.23", + "2.16.840.1.113719.1.39.42.100.25", + "2.16.840.1.113719.1.39.42.100.27", + "2.16.840.1.113719.1.39.42.100.29", + "1.3.6.1.4.1.4203.1.11.1", + "2.16.840.1.113719.1.27.100.1", + "2.16.840.1.113719.1.27.100.3", + "2.16.840.1.113719.1.27.100.5", + "2.16.840.1.113719.1.27.100.7", + "2.16.840.1.113719.1.27.100.11", + "2.16.840.1.113719.1.27.100.13", + "2.16.840.1.113719.1.27.100.15", + "2.16.840.1.113719.1.27.100.17", + "2.16.840.1.113719.1.27.100.19", + "2.16.840.1.113719.1.27.100.21", + "2.16.840.1.113719.1.27.100.23", + "2.16.840.1.113719.1.27.100.25", + "2.16.840.1.113719.1.27.100.27", + "2.16.840.1.113719.1.27.100.29", + "2.16.840.1.113719.1.27.100.31", + "2.16.840.1.113719.1.27.100.33", + "2.16.840.1.113719.1.27.100.35", + "2.16.840.1.113719.1.27.100.37", + "2.16.840.1.113719.1.27.100.39", + "2.16.840.1.113719.1.27.100.41", + "2.16.840.1.113719.1.27.100.96", + "2.16.840.1.113719.1.27.100.98", + "2.16.840.1.113719.1.27.100.101", + "2.16.840.1.113719.1.27.100.103", + "2.16.840.1.113719.1.142.100.1", + "2.16.840.1.113719.1.142.100.4", + "2.16.840.1.113719.1.142.100.6", + "2.16.840.1.113719.1.27.100.9", + "2.16.840.1.113719.1.27.100.43", + "2.16.840.1.113719.1.27.100.45", + "2.16.840.1.113719.1.27.100.47", + "2.16.840.1.113719.1.27.100.49", + "2.16.840.1.113719.1.27.100.51", + "2.16.840.1.113719.1.27.100.53", + "2.16.840.1.113719.1.27.100.55", + "1.3.6.1.4.1.1466.20037", + "2.16.840.1.113719.1.27.100.79", + "2.16.840.1.113719.1.27.100.84", + "2.16.840.1.113719.1.27.103.1", + "2.16.840.1.113719.1.27.103.2" + ], + "supportedFeatures": [ + "1.3.6.1.4.1.4203.1.5.1", + "2.16.840.1.113719.1.27.99.1" + ], + "supportedGroupingTypes": [ + "2.16.840.1.113719.1.27.103.8" + ], + "supportedLDAPVersion": [ + "2", + "3" + ], + "supportedSASLMechanisms": [ + "NMAS_LOGIN" + ], + "unAuthBinds": [ + "0" + ], + "vendorName": [ + "NetIQ Corporation" + ], + "vendorVersion": [ + "LDAP Agent for NetIQ eDirectory 9.1.4 (40105.09)" + ], + "wholeSubtreeSearchOps": [ + "0" + ] + }, + "type": "DsaInfo" +} +""" \ No newline at end of file diff --git a/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/slapd24.py b/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/slapd24.py index 30e1795..1c66332 100644 --- a/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/slapd24.py +++ b/server/www/packages/packages-linux/x64/ldap3/protocol/schemas/slapd24.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/strategy/asyncStream.py b/server/www/packages/packages-linux/x64/ldap3/strategy/asyncStream.py index 7977d7e..631331c 100644 --- a/server/www/packages/packages-linux/x64/ldap3/strategy/asyncStream.py +++ b/server/www/packages/packages-linux/x64/ldap3/strategy/asyncStream.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2016 - 2018 Giovanni Cannata +# Copyright 2016 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -55,7 +55,11 @@ class AsyncStreamStrategy(AsyncStrategy): self.persistent_search_message_id = None self.streaming = False self.callback = None - self.events = Queue() + if ldap_connection.pool_size: + self.events = Queue(ldap_connection.pool_size) + else: + self.events = Queue() + del self._requests # remove _requests dict from Async Strategy def _start_listen(self): @@ -77,7 +81,6 @@ class AsyncStreamStrategy(AsyncStrategy): if not self._header_added and self.stream.tell() == 0: header = add_ldif_header(['-'])[0] self.stream.write(prepare_for_stream(header + self.line_separator + self.line_separator)) - ldif_lines = persistent_search_response_to_ldif(change) if self.stream and ldif_lines and not self.connection.closed: fragment = self.line_separator.join(ldif_lines) diff --git a/server/www/packages/packages-linux/x64/ldap3/strategy/asynchronous.py b/server/www/packages/packages-linux/x64/ldap3/strategy/asynchronous.py index 8ac79ee..b772ad2 100644 --- a/server/www/packages/packages-linux/x64/ldap3/strategy/asynchronous.py +++ b/server/www/packages/packages-linux/x64/ldap3/strategy/asynchronous.py @@ -1,221 +1,253 @@ -""" -""" - -# Created on 2013.07.15 -# -# Author: Giovanni Cannata -# -# Copyright 2013 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . - -from threading import Thread, Lock -import socket - -from .. import get_config_parameter -from ..core.exceptions import LDAPSSLConfigurationError, LDAPStartTLSError, LDAPOperationResult -from ..strategy.base import BaseStrategy, RESPONSE_COMPLETE -from ..protocol.rfc4511 import LDAPMessage -from ..utils.log import log, log_enabled, format_ldap_message, ERROR, NETWORK, EXTENDED -from ..utils.asn1 import decoder, decode_message_fast - - -# noinspection PyProtectedMember -class AsyncStrategy(BaseStrategy): - """ - This strategy is asynchronous. You send the request and get the messageId of the request sent - Receiving data from socket is managed in a separated thread in a blocking mode - Requests return an int value to indicate the messageId of the requested Operation - You get the response with get_response, it has a timeout to wait for response to appear - Connection.response will contain the whole LDAP response for the messageId requested in a dict form - Connection.request will contain the result LDAP message in a dict form - Response appear in strategy._responses dictionary - """ - - # noinspection PyProtectedMember - class ReceiverSocketThread(Thread): - """ - The thread that actually manage the receiver socket - """ - - def __init__(self, ldap_connection): - Thread.__init__(self) - self.connection = ldap_connection - self.socket_size = get_config_parameter('SOCKET_SIZE') - - def run(self): - """ - Wait for data on socket, compute the length of the message and wait for enough bytes to decode the message - Message are appended to strategy._responses - """ - unprocessed = b'' - get_more_data = True - listen = True - data = b'' - while listen: - if get_more_data: - try: - data = self.connection.socket.recv(self.socket_size) - except (OSError, socket.error, AttributeError): - if self.connection.receive_timeout: # a receive timeout has been detected - keep kistening on the socket - continue - except Exception as e: - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', str(e), self.connection) - raise # unexpected exception - re-raise - if len(data) > 0: - unprocessed += data - data = b'' - else: - listen = False - length = BaseStrategy.compute_ldap_message_size(unprocessed) - if length == -1 or len(unprocessed) < length: - get_more_data = True - elif len(unprocessed) >= length: # add message to message list - if self.connection.usage: - self.connection._usage.update_received_message(length) - if log_enabled(NETWORK): - log(NETWORK, 'received %d bytes via <%s>', length, self.connection) - if self.connection.fast_decoder: - ldap_resp = decode_message_fast(unprocessed[:length]) - dict_response = self.connection.strategy.decode_response_fast(ldap_resp) - else: - ldap_resp = decoder.decode(unprocessed[:length], asn1Spec=LDAPMessage())[0] - dict_response = self.connection.strategy.decode_response(ldap_resp) - message_id = int(ldap_resp['messageID']) - if log_enabled(NETWORK): - log(NETWORK, 'received 1 ldap message via <%s>', self.connection) - if log_enabled(EXTENDED): - log(EXTENDED, 'ldap message received via <%s>:%s', self.connection, format_ldap_message(ldap_resp, '<<')) - if dict_response['type'] == 'extendedResp' and (dict_response['responseName'] == '1.3.6.1.4.1.1466.20037' or hasattr(self.connection, '_awaiting_for_async_start_tls')): - if dict_response['result'] == 0: # StartTls in progress - if self.connection.server.tls: - self.connection.server.tls._start_tls(self.connection) - else: - self.connection.last_error = 'no Tls object defined in Server' - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - raise LDAPSSLConfigurationError(self.connection.last_error) - else: - self.connection.last_error = 'asynchronous StartTls failed' - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - raise LDAPStartTLSError(self.connection.last_error) - del self.connection._awaiting_for_async_start_tls - if message_id != 0: # 0 is reserved for 'Unsolicited Notification' from server as per RFC4511 (paragraph 4.4) - with self.connection.strategy.async_lock: - if message_id in self.connection.strategy._responses: - self.connection.strategy._responses[message_id].append(dict_response) - else: - self.connection.strategy._responses[message_id] = [dict_response] - if dict_response['type'] not in ['searchResEntry', 'searchResRef', 'intermediateResponse']: - self.connection.strategy._responses[message_id].append(RESPONSE_COMPLETE) - if self.connection.strategy.can_stream: # for AsyncStreamStrategy, used for PersistentSearch - self.connection.strategy.accumulate_stream(message_id, dict_response) - unprocessed = unprocessed[length:] - get_more_data = False if unprocessed else True - listen = True if self.connection.listening or unprocessed else False - else: # Unsolicited Notification - if dict_response['responseName'] == '1.3.6.1.4.1.1466.20036': # Notice of Disconnection as per RFC4511 (paragraph 4.4.1) - listen = False - else: - self.connection.last_error = 'unknown unsolicited notification from server' - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - raise LDAPStartTLSError(self.connection.last_error) - self.connection.strategy.close() - - def __init__(self, ldap_connection): - BaseStrategy.__init__(self, ldap_connection) - self.sync = False - self.no_real_dsa = False - self.pooled = False - self._responses = None - self._requests = None - self.can_stream = False - self.receiver = None - self.async_lock = Lock() - - def open(self, reset_usage=True, read_server_info=True): - """ - Open connection and start listen on the socket in a different thread - """ - with self.connection.connection_lock: - self._responses = dict() - self._requests = dict() - BaseStrategy.open(self, reset_usage, read_server_info) - - if read_server_info: - try: - self.connection.refresh_server_info() - except LDAPOperationResult: # catch errors from server if raise_exception = True - self.connection.server._dsa_info = None - self.connection.server._schema_info = None - - def close(self): - """ - Close connection and stop socket thread - """ - with self.connection.connection_lock: - BaseStrategy.close(self) - - def post_send_search(self, message_id): - """ - Clears connection.response and returns messageId - """ - self.connection.response = None - self.connection.request = None - self.connection.result = None - return message_id - - def post_send_single_response(self, message_id): - """ - Clears connection.response and returns messageId. - """ - self.connection.response = None - self.connection.request = None - self.connection.result = None - return message_id - - def _start_listen(self): - """ - Start thread in daemon mode - """ - if not self.connection.listening: - self.receiver = AsyncStrategy.ReceiverSocketThread(self.connection) - self.connection.listening = True - self.receiver.daemon = True - self.receiver.start() - - def _get_response(self, message_id): - """ - Performs the capture of LDAP response for this strategy - Checks lock to avoid race condition with receiver thread - """ - with self.async_lock: - responses = self._responses.pop(message_id) if message_id in self._responses and self._responses[message_id][-1] == RESPONSE_COMPLETE else None - - return responses - - def receiving(self): - raise NotImplementedError - - def get_stream(self): - raise NotImplementedError - - def set_stream(self, value): - raise NotImplementedError +""" +""" + +# Created on 2013.07.15 +# +# Author: Giovanni Cannata +# +# Copyright 2013 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +from threading import Thread, Lock, Event +import socket + +from .. import get_config_parameter +from ..core.exceptions import LDAPSSLConfigurationError, LDAPStartTLSError, LDAPOperationResult +from ..strategy.base import BaseStrategy, RESPONSE_COMPLETE +from ..protocol.rfc4511 import LDAPMessage +from ..utils.log import log, log_enabled, format_ldap_message, ERROR, NETWORK, EXTENDED +from ..utils.asn1 import decoder, decode_message_fast + + +# noinspection PyProtectedMember +class AsyncStrategy(BaseStrategy): + """ + This strategy is asynchronous. You send the request and get the messageId of the request sent + Receiving data from socket is managed in a separated thread in a blocking mode + Requests return an int value to indicate the messageId of the requested Operation + You get the response with get_response, it has a timeout to wait for response to appear + Connection.response will contain the whole LDAP response for the messageId requested in a dict form + Connection.request will contain the result LDAP message in a dict form + Response appear in strategy._responses dictionary + """ + + # noinspection PyProtectedMember + class ReceiverSocketThread(Thread): + """ + The thread that actually manage the receiver socket + """ + + def __init__(self, ldap_connection): + Thread.__init__(self) + self.connection = ldap_connection + self.socket_size = get_config_parameter('SOCKET_SIZE') + + def run(self): + """ + Waits for data on socket, computes the length of the message and waits for enough bytes to decode the message + Message are appended to strategy._responses + """ + unprocessed = b'' + get_more_data = True + listen = True + data = b'' + while listen: + if get_more_data: + try: + data = self.connection.socket.recv(self.socket_size) + except (OSError, socket.error, AttributeError): + if self.connection.receive_timeout: # a receive timeout has been detected - keep kistening on the socket + continue + except Exception as e: + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', str(e), self.connection) + raise # unexpected exception - re-raise + if len(data) > 0: + unprocessed += data + data = b'' + else: + listen = False + length = BaseStrategy.compute_ldap_message_size(unprocessed) + if length == -1 or len(unprocessed) < length: + get_more_data = True + elif len(unprocessed) >= length: # add message to message list + if self.connection.usage: + self.connection._usage.update_received_message(length) + if log_enabled(NETWORK): + log(NETWORK, 'received %d bytes via <%s>', length, self.connection) + if self.connection.fast_decoder: + ldap_resp = decode_message_fast(unprocessed[:length]) + dict_response = self.connection.strategy.decode_response_fast(ldap_resp) + else: + ldap_resp = decoder.decode(unprocessed[:length], asn1Spec=LDAPMessage())[0] + dict_response = self.connection.strategy.decode_response(ldap_resp) + message_id = int(ldap_resp['messageID']) + if log_enabled(NETWORK): + log(NETWORK, 'received 1 ldap message via <%s>', self.connection) + if log_enabled(EXTENDED): + log(EXTENDED, 'ldap message received via <%s>:%s', self.connection, format_ldap_message(ldap_resp, '<<')) + if dict_response['type'] == 'extendedResp' and (dict_response['responseName'] == '1.3.6.1.4.1.1466.20037' or hasattr(self.connection, '_awaiting_for_async_start_tls')): + if dict_response['result'] == 0: # StartTls in progress + if self.connection.server.tls: + self.connection.server.tls._start_tls(self.connection) + else: + self.connection.last_error = 'no Tls object defined in Server' + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + raise LDAPSSLConfigurationError(self.connection.last_error) + else: + self.connection.last_error = 'asynchronous StartTls failed' + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + raise LDAPStartTLSError(self.connection.last_error) + del self.connection._awaiting_for_async_start_tls + if message_id != 0: # 0 is reserved for 'Unsolicited Notification' from server as per RFC4511 (paragraph 4.4) + with self.connection.strategy.async_lock: + if message_id in self.connection.strategy._responses: + self.connection.strategy._responses[message_id].append(dict_response) + else: + self.connection.strategy._responses[message_id] = [dict_response] + if dict_response['type'] not in ['searchResEntry', 'searchResRef', 'intermediateResponse']: + self.connection.strategy._responses[message_id].append(RESPONSE_COMPLETE) + self.connection.strategy.set_event_for_message(message_id) + + if self.connection.strategy.can_stream: # for AsyncStreamStrategy, used for PersistentSearch + self.connection.strategy.accumulate_stream(message_id, dict_response) + unprocessed = unprocessed[length:] + get_more_data = False if unprocessed else True + listen = True if self.connection.listening or unprocessed else False + else: # Unsolicited Notification + if dict_response['responseName'] == '1.3.6.1.4.1.1466.20036': # Notice of Disconnection as per RFC4511 (paragraph 4.4.1) + listen = False + else: + self.connection.last_error = 'unknown unsolicited notification from server' + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + raise LDAPStartTLSError(self.connection.last_error) + self.connection.strategy.close() + + def __init__(self, ldap_connection): + BaseStrategy.__init__(self, ldap_connection) + self.sync = False + self.no_real_dsa = False + self.pooled = False + self._responses = None + self._requests = None + self.can_stream = False + self.receiver = None + self.async_lock = Lock() + self.event_lock = Lock() + self._events = {} + + def open(self, reset_usage=True, read_server_info=True): + """ + Open connection and start listen on the socket in a different thread + """ + with self.connection.connection_lock: + self._responses = dict() + self._requests = dict() + BaseStrategy.open(self, reset_usage, read_server_info) + + if read_server_info: + try: + self.connection.refresh_server_info() + except LDAPOperationResult: # catch errors from server if raise_exception = True + self.connection.server._dsa_info = None + self.connection.server._schema_info = None + + def close(self): + """ + Close connection and stop socket thread + """ + with self.connection.connection_lock: + BaseStrategy.close(self) + + def _add_event_for_message(self, message_id): + with self.event_lock: + # Should have the check here because the receiver thread may has created it + if message_id not in self._events: + self._events[message_id] = Event() + + def set_event_for_message(self, message_id): + with self.event_lock: + # The receiver thread may receive the response before the sender set the event for the message_id, + # so we have to check if the event exists + if message_id not in self._events: + self._events[message_id] = Event() + self._events[message_id].set() + + def _get_event_for_message(self, message_id): + with self.event_lock: + if message_id not in self._events: + raise RuntimeError('Event for message[{}] should have been created before accessing'.format(message_id)) + return self._events[message_id] + + def post_send_search(self, message_id): + """ + Clears connection.response and returns messageId + """ + self.connection.response = None + self.connection.request = None + self.connection.result = None + self._add_event_for_message(message_id) + return message_id + + def post_send_single_response(self, message_id): + """ + Clears connection.response and returns messageId. + """ + self.connection.response = None + self.connection.request = None + self.connection.result = None + self._add_event_for_message(message_id) + return message_id + + def _start_listen(self): + """ + Start thread in daemon mode + """ + if not self.connection.listening: + self.receiver = AsyncStrategy.ReceiverSocketThread(self.connection) + self.connection.listening = True + self.receiver.daemon = True + self.receiver.start() + + def _get_response(self, message_id, timeout): + """ + Performs the capture of LDAP response for this strategy + The response is only complete after the event been set + """ + event = self._get_event_for_message(message_id) + flag = event.wait(timeout) + if not flag: + # timeout + return None + + # In this stage we could ensure the response is already there + self._events.pop(message_id) + with self.async_lock: + return self._responses.pop(message_id) + + def receiving(self): + raise NotImplementedError + + def get_stream(self): + raise NotImplementedError + + def set_stream(self, value): + raise NotImplementedError diff --git a/server/www/packages/packages-linux/x64/ldap3/strategy/base.py b/server/www/packages/packages-linux/x64/ldap3/strategy/base.py index 0506703..568459e 100644 --- a/server/www/packages/packages-linux/x64/ldap3/strategy/base.py +++ b/server/www/packages/packages-linux/x64/ldap3/strategy/base.py @@ -1,867 +1,902 @@ -""" -""" - -# Created on 2013.07.15 -# -# Author: Giovanni Cannata -# -# Copyright 2013 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more dectails. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . - -import socket -from struct import pack -from platform import system -from time import sleep -from random import choice -from datetime import datetime - -from .. import SYNC, ANONYMOUS, get_config_parameter, BASE, ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES -from ..core.results import DO_NOT_RAISE_EXCEPTIONS, RESULT_REFERRAL -from ..core.exceptions import LDAPOperationResult, LDAPSASLBindInProgressError, LDAPSocketOpenError, LDAPSessionTerminatedByServerError,\ - LDAPUnknownResponseError, LDAPUnknownRequestError, LDAPReferralError, communication_exception_factory, \ - LDAPSocketSendError, LDAPExceptionError, LDAPControlError, LDAPResponseTimeoutError, LDAPTransactionError -from ..utils.uri import parse_uri -from ..protocol.rfc4511 import LDAPMessage, ProtocolOp, MessageID, SearchResultEntry -from ..operation.add import add_response_to_dict, add_request_to_dict -from ..operation.modify import modify_request_to_dict, modify_response_to_dict -from ..operation.search import search_result_reference_response_to_dict, search_result_done_response_to_dict,\ - search_result_entry_response_to_dict, search_request_to_dict, search_result_entry_response_to_dict_fast,\ - search_result_reference_response_to_dict_fast, attributes_to_dict, attributes_to_dict_fast -from ..operation.bind import bind_response_to_dict, bind_request_to_dict, sicily_bind_response_to_dict, bind_response_to_dict_fast, \ - sicily_bind_response_to_dict_fast -from ..operation.compare import compare_response_to_dict, compare_request_to_dict -from ..operation.extended import extended_request_to_dict, extended_response_to_dict, intermediate_response_to_dict, extended_response_to_dict_fast, intermediate_response_to_dict_fast -from ..core.server import Server -from ..operation.modifyDn import modify_dn_request_to_dict, modify_dn_response_to_dict -from ..operation.delete import delete_response_to_dict, delete_request_to_dict -from ..protocol.convert import prepare_changes_for_request, build_controls_list -from ..operation.abandon import abandon_request_to_dict -from ..core.tls import Tls -from ..protocol.oid import Oids -from ..protocol.rfc2696 import RealSearchControlValue -from ..protocol.microsoft import DirSyncControlResponseValue -from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, NETWORK, EXTENDED, format_ldap_message -from ..utils.asn1 import encode, decoder, ldap_result_to_dict_fast, decode_sequence -from ..utils.conv import to_unicode - -SESSION_TERMINATED_BY_SERVER = 'TERMINATED_BY_SERVER' -TRANSACTION_ERROR = 'TRANSACTION_ERROR' -RESPONSE_COMPLETE = 'RESPONSE_FROM_SERVER_COMPLETE' - - -# noinspection PyProtectedMember -class BaseStrategy(object): - """ - Base class for connection strategy - """ - - def __init__(self, ldap_connection): - self.connection = ldap_connection - self._outstanding = None - self._referrals = [] - self.sync = None # indicates a synchronous connection - self.no_real_dsa = None # indicates a connection to a fake LDAP server - self.pooled = None # Indicates a connection with a connection pool - self.can_stream = None # indicates if a strategy keeps a stream of responses (i.e. LdifProducer can accumulate responses with a single header). Stream must be initialized and closed in _start_listen() and _stop_listen() - self.referral_cache = {} - if log_enabled(BASIC): - log(BASIC, 'instantiated <%s>: <%s>', self.__class__.__name__, self) - - def __str__(self): - s = [ - str(self.connection) if self.connection else 'None', - 'sync' if self.sync else 'async', - 'no real DSA' if self.no_real_dsa else 'real DSA', - 'pooled' if self.pooled else 'not pooled', - 'can stream output' if self.can_stream else 'cannot stream output', - ] - return ' - '.join(s) - - def open(self, reset_usage=True, read_server_info=True): - """ - Open a socket to a server. Choose a server from the server pool if available - """ - if log_enabled(NETWORK): - log(NETWORK, 'opening connection for <%s>', self.connection) - if self.connection.lazy and not self.connection._executing_deferred: - self.connection._deferred_open = True - self.connection.closed = False - if log_enabled(NETWORK): - log(NETWORK, 'deferring open connection for <%s>', self.connection) - else: - if not self.connection.closed and not self.connection._executing_deferred: # try to close connection if still open - self.close() - - self._outstanding = dict() - if self.connection.usage: - if reset_usage or not self.connection._usage.initial_connection_start_time: - self.connection._usage.start() - - if self.connection.server_pool: - new_server = self.connection.server_pool.get_server(self.connection) # get a server from the server_pool if available - if self.connection.server != new_server: - self.connection.server = new_server - if self.connection.usage: - self.connection._usage.servers_from_pool += 1 - - exception_history = [] - if not self.no_real_dsa: # tries to connect to a real server - for candidate_address in self.connection.server.candidate_addresses(): - try: - if log_enabled(BASIC): - log(BASIC, 'try to open candidate address %s', candidate_address[:-2]) - self._open_socket(candidate_address, self.connection.server.ssl, unix_socket=self.connection.server.ipc) - self.connection.server.current_address = candidate_address - self.connection.server.update_availability(candidate_address, True) - break - except Exception as e: - self.connection.server.update_availability(candidate_address, False) - # exception_history.append((datetime.now(), exc_type, exc_value, candidate_address[4])) - exception_history.append((type(e)(str(e)), candidate_address[4])) - if not self.connection.server.current_address and exception_history: - # if len(exception_history) == 1: # only one exception, reraise - # if log_enabled(ERROR): - # log(ERROR, '<%s> for <%s>', exception_history[0][1](exception_history[0][2]), self.connection) - # raise exception_history[0][1](exception_history[0][2]) - # else: - # if log_enabled(ERROR): - # log(ERROR, 'unable to open socket for <%s>', self.connection) - # raise LDAPSocketOpenError('unable to open socket', exception_history) - if log_enabled(ERROR): - log(ERROR, 'unable to open socket for <%s>', self.connection) - raise LDAPSocketOpenError('unable to open socket', exception_history) - elif not self.connection.server.current_address: - if log_enabled(ERROR): - log(ERROR, 'invalid server address for <%s>', self.connection) - raise LDAPSocketOpenError('invalid server address') - - self.connection._deferred_open = False - self._start_listen() - # self.connection.do_auto_bind() - if log_enabled(NETWORK): - log(NETWORK, 'connection open for <%s>', self.connection) - - def close(self): - """ - Close connection - """ - if log_enabled(NETWORK): - log(NETWORK, 'closing connection for <%s>', self.connection) - if self.connection.lazy and not self.connection._executing_deferred and (self.connection._deferred_bind or self.connection._deferred_open): - self.connection.listening = False - self.connection.closed = True - if log_enabled(NETWORK): - log(NETWORK, 'deferred connection closed for <%s>', self.connection) - else: - if not self.connection.closed: - self._stop_listen() - if not self. no_real_dsa: - self._close_socket() - if log_enabled(NETWORK): - log(NETWORK, 'connection closed for <%s>', self.connection) - - self.connection.bound = False - self.connection.request = None - self.connection.response = None - self.connection.tls_started = False - self._outstanding = None - self._referrals = [] - - if not self.connection.strategy.no_real_dsa: - self.connection.server.current_address = None - if self.connection.usage: - self.connection._usage.stop() - - def _open_socket(self, address, use_ssl=False, unix_socket=False): - """ - Tries to open and connect a socket to a Server - raise LDAPExceptionError if unable to open or connect socket - """ - try: - self.connection.socket = socket.socket(*address[:3]) - except Exception as e: - self.connection.last_error = 'socket creation error: ' + str(e) - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error) - raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error) - try: # set socket timeout for opening connection - if self.connection.server.connect_timeout: - self.connection.socket.settimeout(self.connection.server.connect_timeout) - self.connection.socket.connect(address[4]) - except socket.error as e: - self.connection.last_error = 'socket connection error while opening: ' + str(e) - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error) - raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error) - - # Set connection recv timeout (must be set after connect, - # because socket.settimeout() affects both, connect() as - # well as recv(). Set it before tls.wrap_socket() because - # the recv timeout should take effect during the TLS - # handshake. - if self.connection.receive_timeout is not None: - try: # set receive timeout for the connection socket - self.connection.socket.settimeout(self.connection.receive_timeout) - if system().lower() == 'windows': - self.connection.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, int(1000 * self.connection.receive_timeout)) - else: - self.connection.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, pack('LL', self.connection.receive_timeout, 0)) - except socket.error as e: - self.connection.last_error = 'unable to set receive timeout for socket connection: ' + str(e) - - # if exc: - # if log_enabled(ERROR): - # log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error) - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error) - - if use_ssl: - try: - self.connection.server.tls.wrap_socket(self.connection, do_handshake=True) - if self.connection.usage: - self.connection._usage.wrapped_sockets += 1 - except Exception as e: - self.connection.last_error = 'socket ssl wrapping error: ' + str(e) - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error) - raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error) - if self.connection.usage: - self.connection._usage.open_sockets += 1 - - self.connection.closed = False - - def _close_socket(self): - """ - Try to close a socket - don't raise exception if unable to close socket, assume socket is already closed - """ - - try: - self.connection.socket.shutdown(socket.SHUT_RDWR) - except Exception: - pass - - try: - self.connection.socket.close() - except Exception: - pass - - self.connection.socket = None - self.connection.closed = True - - if self.connection.usage: - self.connection._usage.closed_sockets += 1 - - def _stop_listen(self): - self.connection.listening = False - - def send(self, message_type, request, controls=None): - """ - Send an LDAP message - Returns the message_id - """ - self.connection.request = None - if self.connection.listening: - if self.connection.sasl_in_progress and message_type not in ['bindRequest']: # as per RFC4511 (4.2.1) - self.connection.last_error = 'cannot send operation requests while SASL bind is in progress' - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - raise LDAPSASLBindInProgressError(self.connection.last_error) - message_id = self.connection.server.next_message_id() - ldap_message = LDAPMessage() - ldap_message['messageID'] = MessageID(message_id) - ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request) - message_controls = build_controls_list(controls) - if message_controls is not None: - ldap_message['controls'] = message_controls - self.connection.request = BaseStrategy.decode_request(message_type, request, controls) - self._outstanding[message_id] = self.connection.request - self.sending(ldap_message) - else: - self.connection.last_error = 'unable to send message, socket is not open' - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - raise LDAPSocketOpenError(self.connection.last_error) - - return message_id - - def get_response(self, message_id, timeout=None, get_request=False): - """ - Get response LDAP messages - Responses are returned by the underlying connection strategy - Check if message_id LDAP message is still outstanding and wait for timeout to see if it appears in _get_response - Result is stored in connection.result - Responses without result is stored in connection.response - A tuple (responses, result) is returned - """ - conf_sleep_interval = get_config_parameter('RESPONSE_SLEEPTIME') - if timeout is None: - timeout = get_config_parameter('RESPONSE_WAITING_TIMEOUT') - response = None - result = None - request = None - if self._outstanding and message_id in self._outstanding: - while timeout >= 0: # waiting for completed message to appear in responses - responses = self._get_response(message_id) - if not responses: - sleep(conf_sleep_interval) - timeout -= conf_sleep_interval - continue - - if responses == SESSION_TERMINATED_BY_SERVER: - try: # try to close the session but don't raise any error if server has already closed the session - self.close() - except (socket.error, LDAPExceptionError): - pass - self.connection.last_error = 'session terminated by server' - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - raise LDAPSessionTerminatedByServerError(self.connection.last_error) - elif responses == TRANSACTION_ERROR: # Novell LDAP Transaction unsolicited notification - self.connection.last_error = 'transaction error' - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - raise LDAPTransactionError(self.connection.last_error) - - # if referral in response opens a new connection to resolve referrals if requested - - if responses[-2]['result'] == RESULT_REFERRAL: - if self.connection.usage: - self.connection._usage.referrals_received += 1 - if self.connection.auto_referrals: - ref_response, ref_result = self.do_operation_on_referral(self._outstanding[message_id], responses[-2]['referrals']) - if ref_response is not None: - responses = ref_response + [ref_result] - responses.append(RESPONSE_COMPLETE) - elif ref_result is not None: - responses = [ref_result, RESPONSE_COMPLETE] - - self._referrals = [] - - if responses: - result = responses[-2] - response = responses[:-2] - self.connection.result = None - self.connection.response = None - break - - if timeout <= 0: - if log_enabled(ERROR): - log(ERROR, 'socket timeout, no response from server for <%s>', self.connection) - raise LDAPResponseTimeoutError('no response from server') - - if self.connection.raise_exceptions and result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS: - if log_enabled(PROTOCOL): - log(PROTOCOL, 'operation result <%s> for <%s>', result, self.connection) - self._outstanding.pop(message_id) - self.connection.result = result.copy() - raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type']) - - # checks if any response has a range tag - # self._auto_range_searching is set as a flag to avoid recursive searches - if self.connection.auto_range and not hasattr(self, '_auto_range_searching') and any((True for resp in response if 'raw_attributes' in resp for name in resp['raw_attributes'] if ';range=' in name)): - self._auto_range_searching = result.copy() - temp_response = response[:] # copy - if self.do_search_on_auto_range(self._outstanding[message_id], response): - for resp in temp_response: - if resp['type'] == 'searchResEntry': - keys = [key for key in resp['raw_attributes'] if ';range=' in key] - for key in keys: - del resp['raw_attributes'][key] - del resp['attributes'][key] - response = temp_response - result = self._auto_range_searching - del self._auto_range_searching - - if self.connection.empty_attributes: - for entry in response: - if entry['type'] == 'searchResEntry': - for attribute_type in self._outstanding[message_id]['attributes']: - if attribute_type not in entry['raw_attributes'] and attribute_type not in (ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES): - entry['raw_attributes'][attribute_type] = list() - entry['attributes'][attribute_type] = list() - if log_enabled(PROTOCOL): - log(PROTOCOL, 'attribute set to empty list for missing attribute <%s> in <%s>', attribute_type, self) - if not self.connection.auto_range: - attrs_to_remove = [] - # removes original empty attribute in case a range tag is returned - for attribute_type in entry['attributes']: - if ';range' in attribute_type.lower(): - orig_attr, _, _ = attribute_type.partition(';') - attrs_to_remove.append(orig_attr) - for attribute_type in attrs_to_remove: - if log_enabled(PROTOCOL): - log(PROTOCOL, 'attribute type <%s> removed in response because of same attribute returned as range by the server in <%s>', attribute_type, self) - del entry['raw_attributes'][attribute_type] - del entry['attributes'][attribute_type] - - request = self._outstanding.pop(message_id) - else: - if log_enabled(ERROR): - log(ERROR, 'message id not in outstanding queue for <%s>', self.connection) - raise(LDAPResponseTimeoutError('message id not in outstanding queue')) - - if get_request: - return response, result, request - else: - return response, result - - @staticmethod - def compute_ldap_message_size(data): - """ - Compute LDAP Message size according to BER definite length rules - Returns -1 if too few data to compute message length - """ - if isinstance(data, str): # fix for Python 2, data is string not bytes - data = bytearray(data) # Python 2 bytearray is equivalent to Python 3 bytes - - ret_value = -1 - if len(data) > 2: - if data[1] <= 127: # BER definite length - short form. Highest bit of byte 1 is 0, message length is in the last 7 bits - Value can be up to 127 bytes long - ret_value = data[1] + 2 - else: # BER definite length - long form. Highest bit of byte 1 is 1, last 7 bits counts the number of following octets containing the value length - bytes_length = data[1] - 128 - if len(data) >= bytes_length + 2: - value_length = 0 - cont = bytes_length - for byte in data[2:2 + bytes_length]: - cont -= 1 - value_length += byte * (256 ** cont) - ret_value = value_length + 2 + bytes_length - - return ret_value - - def decode_response(self, ldap_message): - """ - Convert received LDAPMessage to a dict - """ - message_type = ldap_message.getComponentByName('protocolOp').getName() - component = ldap_message['protocolOp'].getComponent() - controls = ldap_message['controls'] - if message_type == 'bindResponse': - if not bytes(component['matchedDN']).startswith(b'NTLM'): # patch for microsoft ntlm authentication - result = bind_response_to_dict(component) - else: - result = sicily_bind_response_to_dict(component) - elif message_type == 'searchResEntry': - result = search_result_entry_response_to_dict(component, self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names) - elif message_type == 'searchResDone': - result = search_result_done_response_to_dict(component) - elif message_type == 'searchResRef': - result = search_result_reference_response_to_dict(component) - elif message_type == 'modifyResponse': - result = modify_response_to_dict(component) - elif message_type == 'addResponse': - result = add_response_to_dict(component) - elif message_type == 'delResponse': - result = delete_response_to_dict(component) - elif message_type == 'modDNResponse': - result = modify_dn_response_to_dict(component) - elif message_type == 'compareResponse': - result = compare_response_to_dict(component) - elif message_type == 'extendedResp': - result = extended_response_to_dict(component) - elif message_type == 'intermediateResponse': - result = intermediate_response_to_dict(component) - else: - if log_enabled(ERROR): - log(ERROR, 'unknown response <%s> for <%s>', message_type, self.connection) - raise LDAPUnknownResponseError('unknown response') - result['type'] = message_type - if controls: - result['controls'] = dict() - for control in controls: - decoded_control = self.decode_control(control) - result['controls'][decoded_control[0]] = decoded_control[1] - return result - - def decode_response_fast(self, ldap_message): - """ - Convert received LDAPMessage from fast ber decoder to a dict - """ - if ldap_message['protocolOp'] == 1: # bindResponse - if not ldap_message['payload'][1][3].startswith(b'NTLM'): # patch for microsoft ntlm authentication - result = bind_response_to_dict_fast(ldap_message['payload']) - else: - result = sicily_bind_response_to_dict_fast(ldap_message['payload']) - result['type'] = 'bindResponse' - elif ldap_message['protocolOp'] == 4: # searchResEntry' - result = search_result_entry_response_to_dict_fast(ldap_message['payload'], self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names) - result['type'] = 'searchResEntry' - elif ldap_message['protocolOp'] == 5: # searchResDone - result = ldap_result_to_dict_fast(ldap_message['payload']) - result['type'] = 'searchResDone' - elif ldap_message['protocolOp'] == 19: # searchResRef - result = search_result_reference_response_to_dict_fast(ldap_message['payload']) - result['type'] = 'searchResRef' - elif ldap_message['protocolOp'] == 7: # modifyResponse - result = ldap_result_to_dict_fast(ldap_message['payload']) - result['type'] = 'modifyResponse' - elif ldap_message['protocolOp'] == 9: # addResponse - result = ldap_result_to_dict_fast(ldap_message['payload']) - result['type'] = 'addResponse' - elif ldap_message['protocolOp'] == 11: # delResponse - result = ldap_result_to_dict_fast(ldap_message['payload']) - result['type'] = 'delResponse' - elif ldap_message['protocolOp'] == 13: # modDNResponse - result = ldap_result_to_dict_fast(ldap_message['payload']) - result['type'] = 'modDNResponse' - elif ldap_message['protocolOp'] == 15: # compareResponse - result = ldap_result_to_dict_fast(ldap_message['payload']) - result['type'] = 'compareResponse' - elif ldap_message['protocolOp'] == 24: # extendedResp - result = extended_response_to_dict_fast(ldap_message['payload']) - result['type'] = 'extendedResp' - elif ldap_message['protocolOp'] == 25: # intermediateResponse - result = intermediate_response_to_dict_fast(ldap_message['payload']) - result['type'] = 'intermediateResponse' - else: - if log_enabled(ERROR): - log(ERROR, 'unknown response <%s> for <%s>', ldap_message['protocolOp'], self.connection) - raise LDAPUnknownResponseError('unknown response') - if ldap_message['controls']: - result['controls'] = dict() - for control in ldap_message['controls']: - decoded_control = self.decode_control_fast(control[3]) - result['controls'][decoded_control[0]] = decoded_control[1] - return result - - @staticmethod - def decode_control(control): - """ - decode control, return a 2-element tuple where the first element is the control oid - and the second element is a dictionary with description (from Oids), criticality and decoded control value - """ - control_type = str(control['controlType']) - criticality = bool(control['criticality']) - control_value = bytes(control['controlValue']) - unprocessed = None - if control_type == '1.2.840.113556.1.4.319': # simple paged search as per RFC2696 - control_resp, unprocessed = decoder.decode(control_value, asn1Spec=RealSearchControlValue()) - control_value = dict() - control_value['size'] = int(control_resp['size']) - control_value['cookie'] = bytes(control_resp['cookie']) - elif control_type == '1.2.840.113556.1.4.841': # DirSync AD - control_resp, unprocessed = decoder.decode(control_value, asn1Spec=DirSyncControlResponseValue()) - control_value = dict() - control_value['more_results'] = bool(control_resp['MoreResults']) # more_result if nonzero - control_value['cookie'] = bytes(control_resp['CookieServer']) - elif control_type == '1.3.6.1.1.13.1' or control_type == '1.3.6.1.1.13.2': # Pre-Read control, Post-Read Control as per RFC 4527 - control_resp, unprocessed = decoder.decode(control_value, asn1Spec=SearchResultEntry()) - control_value = dict() - control_value['result'] = attributes_to_dict(control_resp['attributes']) - if unprocessed: - if log_enabled(ERROR): - log(ERROR, 'unprocessed control response in substrate') - raise LDAPControlError('unprocessed control response in substrate') - return control_type, {'description': Oids.get(control_type, ''), 'criticality': criticality, 'value': control_value} - - @staticmethod - def decode_control_fast(control): - """ - decode control, return a 2-element tuple where the first element is the control oid - and the second element is a dictionary with description (from Oids), criticality and decoded control value - """ - control_type = str(to_unicode(control[0][3], from_server=True)) - criticality = False - control_value = None - for r in control[1:]: - if r[2] == 4: # controlValue - control_value = r[3] - else: - criticality = False if r[3] == 0 else True # criticality (booleand default to False) - if control_type == '1.2.840.113556.1.4.319': # simple paged search as per RFC2696 - control_resp = decode_sequence(control_value, 0, len(control_value)) - control_value = dict() - control_value['size'] = int(control_resp[0][3][0][3]) - control_value['cookie'] = bytes(control_resp[0][3][1][3]) - elif control_type == '1.2.840.113556.1.4.841': # DirSync AD - control_resp = decode_sequence(control_value, 0, len(control_value)) - control_value = dict() - control_value['more_results'] = True if control_resp[0][3][0][3] else False # more_result if nonzero - control_value['cookie'] = control_resp[0][3][2][3] - elif control_type == '1.3.6.1.1.13.1' or control_type == '1.3.6.1.1.13.2': # Pre-Read control, Post-Read Control as per RFC 4527 - control_resp = decode_sequence(control_value, 0, len(control_value)) - control_value = dict() - control_value['result'] = attributes_to_dict_fast(control_resp[0][3][1][3]) - return control_type, {'description': Oids.get(control_type, ''), 'criticality': criticality, 'value': control_value} - - @staticmethod - def decode_request(message_type, component, controls=None): - # message_type = ldap_message.getComponentByName('protocolOp').getName() - # component = ldap_message['protocolOp'].getComponent() - if message_type == 'bindRequest': - result = bind_request_to_dict(component) - elif message_type == 'unbindRequest': - result = dict() - elif message_type == 'addRequest': - result = add_request_to_dict(component) - elif message_type == 'compareRequest': - result = compare_request_to_dict(component) - elif message_type == 'delRequest': - result = delete_request_to_dict(component) - elif message_type == 'extendedReq': - result = extended_request_to_dict(component) - elif message_type == 'modifyRequest': - result = modify_request_to_dict(component) - elif message_type == 'modDNRequest': - result = modify_dn_request_to_dict(component) - elif message_type == 'searchRequest': - result = search_request_to_dict(component) - elif message_type == 'abandonRequest': - result = abandon_request_to_dict(component) - else: - if log_enabled(ERROR): - log(ERROR, 'unknown request <%s>', message_type) - raise LDAPUnknownRequestError('unknown request') - result['type'] = message_type - result['controls'] = controls - - return result - - def valid_referral_list(self, referrals): - referral_list = [] - for referral in referrals: - candidate_referral = parse_uri(referral) - if candidate_referral: - for ref_host in self.connection.server.allowed_referral_hosts: - if ref_host[0] == candidate_referral['host'] or ref_host[0] == '*': - if candidate_referral['host'] not in self._referrals: - candidate_referral['anonymousBindOnly'] = not ref_host[1] - referral_list.append(candidate_referral) - break - - return referral_list - - def do_next_range_search(self, request, response, attr_name): - done = False - current_response = response - while not done: - attr_type, _, returned_range = attr_name.partition(';range=') - _, _, high_range = returned_range.partition('-') - response['raw_attributes'][attr_type] += current_response['raw_attributes'][attr_name] - response['attributes'][attr_type] += current_response['attributes'][attr_name] - if high_range != '*': - if log_enabled(PROTOCOL): - log(PROTOCOL, 'performing next search on auto-range <%s> via <%s>', str(int(high_range) + 1), self.connection) - requested_range = attr_type + ';range=' + str(int(high_range) + 1) + '-*' - result = self.connection.search(search_base=response['dn'], - search_filter='(objectclass=*)', - search_scope=BASE, - dereference_aliases=request['dereferenceAlias'], - attributes=[attr_type + ';range=' + str(int(high_range) + 1) + '-*']) - if isinstance(result, bool): - if result: - current_response = self.connection.response[0] - else: - done = True - else: - current_response, _ = self.get_response(result) - current_response = current_response[0] - - if not done: - if requested_range in current_response['raw_attributes'] and len(current_response['raw_attributes'][requested_range]) == 0: - del current_response['raw_attributes'][requested_range] - del current_response['attributes'][requested_range] - attr_name = list(filter(lambda a: ';range=' in a, current_response['raw_attributes'].keys()))[0] - continue - - done = True - - def do_search_on_auto_range(self, request, response): - for resp in [r for r in response if r['type'] == 'searchResEntry']: - for attr_name in list(resp['raw_attributes'].keys()): # generate list to avoid changing of dict size error - if ';range=' in attr_name: - attr_type, _, range_values = attr_name.partition(';range=') - if range_values in ('1-1', '0-0'): # DirSync returns these values for adding and removing members - return False - if attr_type not in resp['raw_attributes'] or resp['raw_attributes'][attr_type] is None: - resp['raw_attributes'][attr_type] = list() - if attr_type not in resp['attributes'] or resp['attributes'][attr_type] is None: - resp['attributes'][attr_type] = list() - self.do_next_range_search(request, resp, attr_name) - return True - def do_operation_on_referral(self, request, referrals): - if log_enabled(PROTOCOL): - log(PROTOCOL, 'following referral for <%s>', self.connection) - valid_referral_list = self.valid_referral_list(referrals) - if valid_referral_list: - preferred_referral_list = [referral for referral in valid_referral_list if referral['ssl'] == self.connection.server.ssl] - selected_referral = choice(preferred_referral_list) if preferred_referral_list else choice(valid_referral_list) - - cachekey = (selected_referral['host'], selected_referral['port'] or self.connection.server.port, selected_referral['ssl']) - if self.connection.use_referral_cache and cachekey in self.referral_cache: - referral_connection = self.referral_cache[cachekey] - else: - referral_server = Server(host=selected_referral['host'], - port=selected_referral['port'] or self.connection.server.port, - use_ssl=selected_referral['ssl'], - get_info=self.connection.server.get_info, - formatter=self.connection.server.custom_formatter, - connect_timeout=self.connection.server.connect_timeout, - mode=self.connection.server.mode, - allowed_referral_hosts=self.connection.server.allowed_referral_hosts, - tls=Tls(local_private_key_file=self.connection.server.tls.private_key_file, - local_certificate_file=self.connection.server.tls.certificate_file, - validate=self.connection.server.tls.validate, - version=self.connection.server.tls.version, - ca_certs_file=self.connection.server.tls.ca_certs_file) if selected_referral['ssl'] else None) - - from ..core.connection import Connection - - referral_connection = Connection(server=referral_server, - user=self.connection.user if not selected_referral['anonymousBindOnly'] else None, - password=self.connection.password if not selected_referral['anonymousBindOnly'] else None, - version=self.connection.version, - authentication=self.connection.authentication if not selected_referral['anonymousBindOnly'] else ANONYMOUS, - client_strategy=SYNC, - auto_referrals=True, - read_only=self.connection.read_only, - check_names=self.connection.check_names, - raise_exceptions=self.connection.raise_exceptions, - fast_decoder=self.connection.fast_decoder, - receive_timeout=self.connection.receive_timeout, - sasl_mechanism=self.connection.sasl_mechanism, - sasl_credentials=self.connection.sasl_credentials) - - if self.connection.usage: - self.connection._usage.referrals_connections += 1 - - referral_connection.open() - referral_connection.strategy._referrals = self._referrals - if self.connection.tls_started and not referral_server.ssl: # if the original server was in start_tls mode and the referral server is not in ssl then start_tls on the referral connection - referral_connection.start_tls() - - if self.connection.bound: - referral_connection.bind() - - if self.connection.usage: - self.connection._usage.referrals_followed += 1 - - if request['type'] == 'searchRequest': - referral_connection.search(selected_referral['base'] or request['base'], - selected_referral['filter'] or request['filter'], - selected_referral['scope'] or request['scope'], - request['dereferenceAlias'], - selected_referral['attributes'] or request['attributes'], - request['sizeLimit'], - request['timeLimit'], - request['typesOnly'], - controls=request['controls']) - elif request['type'] == 'addRequest': - referral_connection.add(selected_referral['base'] or request['entry'], - None, - request['attributes'], - controls=request['controls']) - elif request['type'] == 'compareRequest': - referral_connection.compare(selected_referral['base'] or request['entry'], - request['attribute'], - request['value'], - controls=request['controls']) - elif request['type'] == 'delRequest': - referral_connection.delete(selected_referral['base'] or request['entry'], - controls=request['controls']) - elif request['type'] == 'extendedReq': - referral_connection.extended(request['name'], - request['value'], - controls=request['controls'], - no_encode=True - ) - elif request['type'] == 'modifyRequest': - referral_connection.modify(selected_referral['base'] or request['entry'], - prepare_changes_for_request(request['changes']), - controls=request['controls']) - elif request['type'] == 'modDNRequest': - referral_connection.modify_dn(selected_referral['base'] or request['entry'], - request['newRdn'], - request['deleteOldRdn'], - request['newSuperior'], - controls=request['controls']) - else: - self.connection.last_error = 'referral operation not permitted' - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - raise LDAPReferralError(self.connection.last_error) - - response = referral_connection.response - result = referral_connection.result - if self.connection.use_referral_cache: - self.referral_cache[cachekey] = referral_connection - else: - referral_connection.unbind() - else: - response = None - result = None - - return response, result - - def sending(self, ldap_message): - if log_enabled(NETWORK): - log(NETWORK, 'sending 1 ldap message for <%s>', self.connection) - try: - encoded_message = encode(ldap_message) - self.connection.socket.sendall(encoded_message) - if log_enabled(EXTENDED): - log(EXTENDED, 'ldap message sent via <%s>:%s', self.connection, format_ldap_message(ldap_message, '>>')) - if log_enabled(NETWORK): - log(NETWORK, 'sent %d bytes via <%s>', len(encoded_message), self.connection) - except socket.error as e: - self.connection.last_error = 'socket sending error' + str(e) - encoded_message = None - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - # raise communication_exception_factory(LDAPSocketSendError, exc)(self.connection.last_error) - raise communication_exception_factory(LDAPSocketSendError, type(e)(str(e)))(self.connection.last_error) - if self.connection.usage: - self.connection._usage.update_transmitted_message(self.connection.request, len(encoded_message)) - - def _start_listen(self): - # overridden on strategy class - raise NotImplementedError - - def _get_response(self, message_id): - # overridden in strategy class - raise NotImplementedError - - def receiving(self): - # overridden in strategy class - raise NotImplementedError - - def post_send_single_response(self, message_id): - # overridden in strategy class - raise NotImplementedError - - def post_send_search(self, message_id): - # overridden in strategy class - raise NotImplementedError - - def get_stream(self): - raise NotImplementedError - - def set_stream(self, value): - raise NotImplementedError - - def unbind_referral_cache(self): - while len(self.referral_cache) > 0: - cachekey, referral_connection = self.referral_cache.popitem() - referral_connection.unbind() +""" +""" + +# Created on 2013.07.15 +# +# Author: Giovanni Cannata +# +# Copyright 2013 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more dectails. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +import socket +from struct import pack +from platform import system +from time import sleep +from random import choice + +from .. import SYNC, ANONYMOUS, get_config_parameter, BASE, ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES +from ..core.results import DO_NOT_RAISE_EXCEPTIONS, RESULT_REFERRAL +from ..core.exceptions import LDAPOperationResult, LDAPSASLBindInProgressError, LDAPSocketOpenError, LDAPSessionTerminatedByServerError,\ + LDAPUnknownResponseError, LDAPUnknownRequestError, LDAPReferralError, communication_exception_factory, \ + LDAPSocketSendError, LDAPExceptionError, LDAPControlError, LDAPResponseTimeoutError, LDAPTransactionError +from ..utils.uri import parse_uri +from ..protocol.rfc4511 import LDAPMessage, ProtocolOp, MessageID, SearchResultEntry +from ..operation.add import add_response_to_dict, add_request_to_dict +from ..operation.modify import modify_request_to_dict, modify_response_to_dict +from ..operation.search import search_result_reference_response_to_dict, search_result_done_response_to_dict,\ + search_result_entry_response_to_dict, search_request_to_dict, search_result_entry_response_to_dict_fast,\ + search_result_reference_response_to_dict_fast, attributes_to_dict, attributes_to_dict_fast +from ..operation.bind import bind_response_to_dict, bind_request_to_dict, sicily_bind_response_to_dict, bind_response_to_dict_fast, \ + sicily_bind_response_to_dict_fast +from ..operation.compare import compare_response_to_dict, compare_request_to_dict +from ..operation.extended import extended_request_to_dict, extended_response_to_dict, intermediate_response_to_dict, extended_response_to_dict_fast, intermediate_response_to_dict_fast +from ..core.server import Server +from ..operation.modifyDn import modify_dn_request_to_dict, modify_dn_response_to_dict +from ..operation.delete import delete_response_to_dict, delete_request_to_dict +from ..protocol.convert import prepare_changes_for_request, build_controls_list +from ..operation.abandon import abandon_request_to_dict +from ..core.tls import Tls +from ..protocol.oid import Oids +from ..protocol.rfc2696 import RealSearchControlValue +from ..protocol.microsoft import DirSyncControlResponseValue +from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, NETWORK, EXTENDED, format_ldap_message +from ..utils.asn1 import encode, decoder, ldap_result_to_dict_fast, decode_sequence +from ..utils.conv import to_unicode + +SESSION_TERMINATED_BY_SERVER = 'TERMINATED_BY_SERVER' +TRANSACTION_ERROR = 'TRANSACTION_ERROR' +RESPONSE_COMPLETE = 'RESPONSE_FROM_SERVER_COMPLETE' + + +# noinspection PyProtectedMember +class BaseStrategy(object): + """ + Base class for connection strategy + """ + + def __init__(self, ldap_connection): + self.connection = ldap_connection + self._outstanding = None + self._referrals = [] + self.sync = None # indicates a synchronous connection + self.no_real_dsa = None # indicates a connection to a fake LDAP server + self.pooled = None # Indicates a connection with a connection pool + self.can_stream = None # indicates if a strategy keeps a stream of responses (i.e. LdifProducer can accumulate responses with a single header). Stream must be initialized and closed in _start_listen() and _stop_listen() + self.referral_cache = {} + if log_enabled(BASIC): + log(BASIC, 'instantiated <%s>: <%s>', self.__class__.__name__, self) + + def __str__(self): + s = [ + str(self.connection) if self.connection else 'None', + 'sync' if self.sync else 'async', + 'no real DSA' if self.no_real_dsa else 'real DSA', + 'pooled' if self.pooled else 'not pooled', + 'can stream output' if self.can_stream else 'cannot stream output', + ] + return ' - '.join(s) + + def open(self, reset_usage=True, read_server_info=True): + """ + Open a socket to a server. Choose a server from the server pool if available + """ + if log_enabled(NETWORK): + log(NETWORK, 'opening connection for <%s>', self.connection) + if self.connection.lazy and not self.connection._executing_deferred: + self.connection._deferred_open = True + self.connection.closed = False + if log_enabled(NETWORK): + log(NETWORK, 'deferring open connection for <%s>', self.connection) + else: + if not self.connection.closed and not self.connection._executing_deferred: # try to close connection if still open + self.close() + + self._outstanding = dict() + if self.connection.usage: + if reset_usage or not self.connection._usage.initial_connection_start_time: + self.connection._usage.start() + + if self.connection.server_pool: + new_server = self.connection.server_pool.get_server(self.connection) # get a server from the server_pool if available + if self.connection.server != new_server: + self.connection.server = new_server + if self.connection.usage: + self.connection._usage.servers_from_pool += 1 + + exception_history = [] + if not self.no_real_dsa: # tries to connect to a real server + for candidate_address in self.connection.server.candidate_addresses(): + try: + if log_enabled(BASIC): + log(BASIC, 'try to open candidate address %s', candidate_address[:-2]) + self._open_socket(candidate_address, self.connection.server.ssl, unix_socket=self.connection.server.ipc) + self.connection.server.current_address = candidate_address + self.connection.server.update_availability(candidate_address, True) + break + except Exception as e: + self.connection.server.update_availability(candidate_address, False) + # exception_history.append((datetime.now(), exc_type, exc_value, candidate_address[4])) + exception_history.append((type(e)(str(e)), candidate_address[4])) + if not self.connection.server.current_address and exception_history: + if len(exception_history) == 1: # only one exception, reraise + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', str(exception_history[0][0]) + ' ' + str((exception_history[0][1])), self.connection) + raise exception_history[0][0] + else: + if log_enabled(ERROR): + log(ERROR, 'unable to open socket for <%s>', self.connection) + raise LDAPSocketOpenError('unable to open socket', exception_history) + if log_enabled(ERROR): + log(ERROR, 'unable to open socket for <%s>', self.connection) + raise LDAPSocketOpenError('unable to open socket', exception_history) + elif not self.connection.server.current_address: + if log_enabled(ERROR): + log(ERROR, 'invalid server address for <%s>', self.connection) + raise LDAPSocketOpenError('invalid server address') + + self.connection._deferred_open = False + self._start_listen() + # self.connection.do_auto_bind() + if log_enabled(NETWORK): + log(NETWORK, 'connection open for <%s>', self.connection) + + def close(self): + """ + Close connection + """ + if log_enabled(NETWORK): + log(NETWORK, 'closing connection for <%s>', self.connection) + if self.connection.lazy and not self.connection._executing_deferred and (self.connection._deferred_bind or self.connection._deferred_open): + self.connection.listening = False + self.connection.closed = True + if log_enabled(NETWORK): + log(NETWORK, 'deferred connection closed for <%s>', self.connection) + else: + if not self.connection.closed: + self._stop_listen() + if not self. no_real_dsa: + self._close_socket() + if log_enabled(NETWORK): + log(NETWORK, 'connection closed for <%s>', self.connection) + + self.connection.bound = False + self.connection.request = None + self.connection.response = None + self.connection.tls_started = False + self._outstanding = None + self._referrals = [] + + if not self.connection.strategy.no_real_dsa: + self.connection.server.current_address = None + if self.connection.usage: + self.connection._usage.stop() + + def _open_socket(self, address, use_ssl=False, unix_socket=False): + """ + Tries to open and connect a socket to a Server + raise LDAPExceptionError if unable to open or connect socket + """ + try: + self.connection.socket = socket.socket(*address[:3]) + except Exception as e: + self.connection.last_error = 'socket creation error: ' + str(e) + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error) + raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error) + + # Try to bind the socket locally before connecting to the remote address + # We go through our connection's source ports and try to bind our socket to our connection's source address + # with them. + # If no source address or ports were specified, this will have the same success/fail result as if we + # tried to connect to the remote server without binding locally first. + # This is actually a little bit better, as it lets us distinguish the case of "issue binding the socket + # locally" from "remote server is unavailable" with more clarity, though this will only really be an + # issue when no source address/port is specified if the system checking server availability is running + # as a very unprivileged user. + last_bind_exc = None + socket_bind_succeeded = False + for source_port in self.connection.source_port_list: + try: + self.connection.socket.bind((self.connection.source_address, source_port)) + socket_bind_succeeded = True + break + except Exception as bind_ex: + last_bind_exc = bind_ex + # we'll always end up logging at error level if we cannot bind any ports to the address locally. + # but if some work and some don't you probably don't want the ones that don't at ERROR level + if log_enabled(NETWORK): + log(NETWORK, 'Unable to bind to local address <%s> with source port <%s> due to <%s>', + self.connection.source_address, source_port, bind_ex) + if not socket_bind_succeeded: + self.connection.last_error = 'socket connection error while locally binding: ' + str(last_bind_exc) + if log_enabled(ERROR): + log(ERROR, 'Unable to locally bind to local address <%s> with any of the source ports <%s> for connection <%s due to <%s>', + self.connection.source_address, self.connection.source_port_list, self.connection, last_bind_exc) + raise communication_exception_factory(LDAPSocketOpenError, type(last_bind_exc)(str(last_bind_exc)))(last_bind_exc) + + try: # set socket timeout for opening connection + if self.connection.server.connect_timeout: + self.connection.socket.settimeout(self.connection.server.connect_timeout) + self.connection.socket.connect(address[4]) + except socket.error as e: + self.connection.last_error = 'socket connection error while opening: ' + str(e) + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error) + raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error) + + # Set connection recv timeout (must be set after connect, + # because socket.settimeout() affects both, connect() as + # well as recv(). Set it before tls.wrap_socket() because + # the recv timeout should take effect during the TLS + # handshake. + if self.connection.receive_timeout is not None: + try: # set receive timeout for the connection socket + self.connection.socket.settimeout(self.connection.receive_timeout) + if system().lower() == 'windows': + self.connection.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, int(1000 * self.connection.receive_timeout)) + else: + self.connection.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, pack('LL', self.connection.receive_timeout, 0)) + except socket.error as e: + self.connection.last_error = 'unable to set receive timeout for socket connection: ' + str(e) + + # if exc: + # if log_enabled(ERROR): + # log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error) + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error) + + if use_ssl: + try: + self.connection.server.tls.wrap_socket(self.connection, do_handshake=True) + if self.connection.usage: + self.connection._usage.wrapped_sockets += 1 + except Exception as e: + self.connection.last_error = 'socket ssl wrapping error: ' + str(e) + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error) + raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error) + if self.connection.usage: + self.connection._usage.open_sockets += 1 + + self.connection.closed = False + + def _close_socket(self): + """ + Try to close a socket + don't raise exception if unable to close socket, assume socket is already closed + """ + + try: + self.connection.socket.shutdown(socket.SHUT_RDWR) + except Exception: + pass + + try: + self.connection.socket.close() + except Exception: + pass + + self.connection.socket = None + self.connection.closed = True + + if self.connection.usage: + self.connection._usage.closed_sockets += 1 + + def _stop_listen(self): + self.connection.listening = False + + def send(self, message_type, request, controls=None): + """ + Send an LDAP message + Returns the message_id + """ + self.connection.request = None + if self.connection.listening: + if self.connection.sasl_in_progress and message_type not in ['bindRequest']: # as per RFC4511 (4.2.1) + self.connection.last_error = 'cannot send operation requests while SASL bind is in progress' + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + raise LDAPSASLBindInProgressError(self.connection.last_error) + message_id = self.connection.server.next_message_id() + ldap_message = LDAPMessage() + ldap_message['messageID'] = MessageID(message_id) + ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request) + message_controls = build_controls_list(controls) + if message_controls is not None: + ldap_message['controls'] = message_controls + self.connection.request = BaseStrategy.decode_request(message_type, request, controls) + self._outstanding[message_id] = self.connection.request + self.sending(ldap_message) + else: + self.connection.last_error = 'unable to send message, socket is not open' + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + raise LDAPSocketOpenError(self.connection.last_error) + + return message_id + + def get_response(self, message_id, timeout=None, get_request=False): + """ + Get response LDAP messages + Responses are returned by the underlying connection strategy + Check if message_id LDAP message is still outstanding and wait for timeout to see if it appears in _get_response + Result is stored in connection.result + Responses without result is stored in connection.response + A tuple (responses, result) is returned + """ + if timeout is None: + timeout = get_config_parameter('RESPONSE_WAITING_TIMEOUT') + response = None + result = None + request = None + if self._outstanding and message_id in self._outstanding: + responses = self._get_response(message_id, timeout) + + if not responses: + if log_enabled(ERROR): + log(ERROR, 'socket timeout, no response from server for <%s>', self.connection) + raise LDAPResponseTimeoutError('no response from server') + + if responses == SESSION_TERMINATED_BY_SERVER: + try: # try to close the session but don't raise any error if server has already closed the session + self.close() + except (socket.error, LDAPExceptionError): + pass + self.connection.last_error = 'session terminated by server' + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + raise LDAPSessionTerminatedByServerError(self.connection.last_error) + elif responses == TRANSACTION_ERROR: # Novell LDAP Transaction unsolicited notification + self.connection.last_error = 'transaction error' + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + raise LDAPTransactionError(self.connection.last_error) + + # if referral in response opens a new connection to resolve referrals if requested + + if responses[-2]['result'] == RESULT_REFERRAL: + if self.connection.usage: + self.connection._usage.referrals_received += 1 + if self.connection.auto_referrals: + ref_response, ref_result = self.do_operation_on_referral(self._outstanding[message_id], responses[-2]['referrals']) + if ref_response is not None: + responses = ref_response + [ref_result] + responses.append(RESPONSE_COMPLETE) + elif ref_result is not None: + responses = [ref_result, RESPONSE_COMPLETE] + + self._referrals = [] + + if responses: + result = responses[-2] + response = responses[:-2] + self.connection.result = None + self.connection.response = None + + if self.connection.raise_exceptions and result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS: + if log_enabled(PROTOCOL): + log(PROTOCOL, 'operation result <%s> for <%s>', result, self.connection) + self._outstanding.pop(message_id) + self.connection.result = result.copy() + raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type']) + + # checks if any response has a range tag + # self._auto_range_searching is set as a flag to avoid recursive searches + if self.connection.auto_range and not hasattr(self, '_auto_range_searching') and any((True for resp in response if 'raw_attributes' in resp for name in resp['raw_attributes'] if ';range=' in name)): + self._auto_range_searching = result.copy() + temp_response = response[:] # copy + if self.do_search_on_auto_range(self._outstanding[message_id], response): + for resp in temp_response: + if resp['type'] == 'searchResEntry': + keys = [key for key in resp['raw_attributes'] if ';range=' in key] + for key in keys: + del resp['raw_attributes'][key] + del resp['attributes'][key] + response = temp_response + result = self._auto_range_searching + del self._auto_range_searching + + if self.connection.empty_attributes: + for entry in response: + if entry['type'] == 'searchResEntry': + for attribute_type in self._outstanding[message_id]['attributes']: + if attribute_type not in entry['raw_attributes'] and attribute_type not in (ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES): + entry['raw_attributes'][attribute_type] = list() + entry['attributes'][attribute_type] = list() + if log_enabled(PROTOCOL): + log(PROTOCOL, 'attribute set to empty list for missing attribute <%s> in <%s>', attribute_type, self) + if not self.connection.auto_range: + attrs_to_remove = [] + # removes original empty attribute in case a range tag is returned + for attribute_type in entry['attributes']: + if ';range' in attribute_type.lower(): + orig_attr, _, _ = attribute_type.partition(';') + attrs_to_remove.append(orig_attr) + for attribute_type in attrs_to_remove: + if log_enabled(PROTOCOL): + log(PROTOCOL, 'attribute type <%s> removed in response because of same attribute returned as range by the server in <%s>', attribute_type, self) + del entry['raw_attributes'][attribute_type] + del entry['attributes'][attribute_type] + + request = self._outstanding.pop(message_id) + else: + if log_enabled(ERROR): + log(ERROR, 'message id not in outstanding queue for <%s>', self.connection) + raise(LDAPResponseTimeoutError('message id not in outstanding queue')) + + if get_request: + return response, result, request + else: + return response, result + + @staticmethod + def compute_ldap_message_size(data): + """ + Compute LDAP Message size according to BER definite length rules + Returns -1 if too few data to compute message length + """ + if isinstance(data, str): # fix for Python 2, data is string not bytes + data = bytearray(data) # Python 2 bytearray is equivalent to Python 3 bytes + + ret_value = -1 + if len(data) > 2: + if data[1] <= 127: # BER definite length - short form. Highest bit of byte 1 is 0, message length is in the last 7 bits - Value can be up to 127 bytes long + ret_value = data[1] + 2 + else: # BER definite length - long form. Highest bit of byte 1 is 1, last 7 bits counts the number of following octets containing the value length + bytes_length = data[1] - 128 + if len(data) >= bytes_length + 2: + value_length = 0 + cont = bytes_length + for byte in data[2:2 + bytes_length]: + cont -= 1 + value_length += byte * (256 ** cont) + ret_value = value_length + 2 + bytes_length + + return ret_value + + def decode_response(self, ldap_message): + """ + Convert received LDAPMessage to a dict + """ + message_type = ldap_message.getComponentByName('protocolOp').getName() + component = ldap_message['protocolOp'].getComponent() + controls = ldap_message['controls'] if ldap_message['controls'].hasValue() else None + if message_type == 'bindResponse': + if not bytes(component['matchedDN']).startswith(b'NTLM'): # patch for microsoft ntlm authentication + result = bind_response_to_dict(component) + else: + result = sicily_bind_response_to_dict(component) + elif message_type == 'searchResEntry': + result = search_result_entry_response_to_dict(component, self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names) + elif message_type == 'searchResDone': + result = search_result_done_response_to_dict(component) + elif message_type == 'searchResRef': + result = search_result_reference_response_to_dict(component) + elif message_type == 'modifyResponse': + result = modify_response_to_dict(component) + elif message_type == 'addResponse': + result = add_response_to_dict(component) + elif message_type == 'delResponse': + result = delete_response_to_dict(component) + elif message_type == 'modDNResponse': + result = modify_dn_response_to_dict(component) + elif message_type == 'compareResponse': + result = compare_response_to_dict(component) + elif message_type == 'extendedResp': + result = extended_response_to_dict(component) + elif message_type == 'intermediateResponse': + result = intermediate_response_to_dict(component) + else: + if log_enabled(ERROR): + log(ERROR, 'unknown response <%s> for <%s>', message_type, self.connection) + raise LDAPUnknownResponseError('unknown response') + result['type'] = message_type + if controls: + result['controls'] = dict() + for control in controls: + decoded_control = self.decode_control(control) + result['controls'][decoded_control[0]] = decoded_control[1] + return result + + def decode_response_fast(self, ldap_message): + """ + Convert received LDAPMessage from fast ber decoder to a dict + """ + if ldap_message['protocolOp'] == 1: # bindResponse + if not ldap_message['payload'][1][3].startswith(b'NTLM'): # patch for microsoft ntlm authentication + result = bind_response_to_dict_fast(ldap_message['payload']) + else: + result = sicily_bind_response_to_dict_fast(ldap_message['payload']) + result['type'] = 'bindResponse' + elif ldap_message['protocolOp'] == 4: # searchResEntry' + result = search_result_entry_response_to_dict_fast(ldap_message['payload'], self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names) + result['type'] = 'searchResEntry' + elif ldap_message['protocolOp'] == 5: # searchResDone + result = ldap_result_to_dict_fast(ldap_message['payload']) + result['type'] = 'searchResDone' + elif ldap_message['protocolOp'] == 19: # searchResRef + result = search_result_reference_response_to_dict_fast(ldap_message['payload']) + result['type'] = 'searchResRef' + elif ldap_message['protocolOp'] == 7: # modifyResponse + result = ldap_result_to_dict_fast(ldap_message['payload']) + result['type'] = 'modifyResponse' + elif ldap_message['protocolOp'] == 9: # addResponse + result = ldap_result_to_dict_fast(ldap_message['payload']) + result['type'] = 'addResponse' + elif ldap_message['protocolOp'] == 11: # delResponse + result = ldap_result_to_dict_fast(ldap_message['payload']) + result['type'] = 'delResponse' + elif ldap_message['protocolOp'] == 13: # modDNResponse + result = ldap_result_to_dict_fast(ldap_message['payload']) + result['type'] = 'modDNResponse' + elif ldap_message['protocolOp'] == 15: # compareResponse + result = ldap_result_to_dict_fast(ldap_message['payload']) + result['type'] = 'compareResponse' + elif ldap_message['protocolOp'] == 24: # extendedResp + result = extended_response_to_dict_fast(ldap_message['payload']) + result['type'] = 'extendedResp' + elif ldap_message['protocolOp'] == 25: # intermediateResponse + result = intermediate_response_to_dict_fast(ldap_message['payload']) + result['type'] = 'intermediateResponse' + else: + if log_enabled(ERROR): + log(ERROR, 'unknown response <%s> for <%s>', ldap_message['protocolOp'], self.connection) + raise LDAPUnknownResponseError('unknown response') + if ldap_message['controls']: + result['controls'] = dict() + for control in ldap_message['controls']: + decoded_control = self.decode_control_fast(control[3]) + result['controls'][decoded_control[0]] = decoded_control[1] + return result + + @staticmethod + def decode_control(control): + """ + decode control, return a 2-element tuple where the first element is the control oid + and the second element is a dictionary with description (from Oids), criticality and decoded control value + """ + control_type = str(control['controlType']) + criticality = bool(control['criticality']) + control_value = bytes(control['controlValue']) + unprocessed = None + if control_type == '1.2.840.113556.1.4.319': # simple paged search as per RFC2696 + control_resp, unprocessed = decoder.decode(control_value, asn1Spec=RealSearchControlValue()) + control_value = dict() + control_value['size'] = int(control_resp['size']) + control_value['cookie'] = bytes(control_resp['cookie']) + elif control_type == '1.2.840.113556.1.4.841': # DirSync AD + control_resp, unprocessed = decoder.decode(control_value, asn1Spec=DirSyncControlResponseValue()) + control_value = dict() + control_value['more_results'] = bool(control_resp['MoreResults']) # more_result if nonzero + control_value['cookie'] = bytes(control_resp['CookieServer']) + elif control_type == '1.3.6.1.1.13.1' or control_type == '1.3.6.1.1.13.2': # Pre-Read control, Post-Read Control as per RFC 4527 + control_resp, unprocessed = decoder.decode(control_value, asn1Spec=SearchResultEntry()) + control_value = dict() + control_value['result'] = attributes_to_dict(control_resp['attributes']) + if unprocessed: + if log_enabled(ERROR): + log(ERROR, 'unprocessed control response in substrate') + raise LDAPControlError('unprocessed control response in substrate') + return control_type, {'description': Oids.get(control_type, ''), 'criticality': criticality, 'value': control_value} + + @staticmethod + def decode_control_fast(control, from_server=True): + """ + decode control, return a 2-element tuple where the first element is the control oid + and the second element is a dictionary with description (from Oids), criticality and decoded control value + """ + control_type = str(to_unicode(control[0][3], from_server=from_server)) + criticality = False + control_value = None + for r in control[1:]: + if r[2] == 4: # controlValue + control_value = r[3] + else: + criticality = False if r[3] == 0 else True # criticality (booleand default to False) + if control_type == '1.2.840.113556.1.4.319': # simple paged search as per RFC2696 + control_resp = decode_sequence(control_value, 0, len(control_value)) + control_value = dict() + control_value['size'] = int(control_resp[0][3][0][3]) + control_value['cookie'] = bytes(control_resp[0][3][1][3]) + elif control_type == '1.2.840.113556.1.4.841': # DirSync AD + control_resp = decode_sequence(control_value, 0, len(control_value)) + control_value = dict() + control_value['more_results'] = True if control_resp[0][3][0][3] else False # more_result if nonzero + control_value['cookie'] = control_resp[0][3][2][3] + elif control_type == '1.3.6.1.1.13.1' or control_type == '1.3.6.1.1.13.2': # Pre-Read control, Post-Read Control as per RFC 4527 + control_resp = decode_sequence(control_value, 0, len(control_value)) + control_value = dict() + control_value['result'] = attributes_to_dict_fast(control_resp[0][3][1][3]) + return control_type, {'description': Oids.get(control_type, ''), 'criticality': criticality, 'value': control_value} + + @staticmethod + def decode_request(message_type, component, controls=None): + # message_type = ldap_message.getComponentByName('protocolOp').getName() + # component = ldap_message['protocolOp'].getComponent() + if message_type == 'bindRequest': + result = bind_request_to_dict(component) + elif message_type == 'unbindRequest': + result = dict() + elif message_type == 'addRequest': + result = add_request_to_dict(component) + elif message_type == 'compareRequest': + result = compare_request_to_dict(component) + elif message_type == 'delRequest': + result = delete_request_to_dict(component) + elif message_type == 'extendedReq': + result = extended_request_to_dict(component) + elif message_type == 'modifyRequest': + result = modify_request_to_dict(component) + elif message_type == 'modDNRequest': + result = modify_dn_request_to_dict(component) + elif message_type == 'searchRequest': + result = search_request_to_dict(component) + elif message_type == 'abandonRequest': + result = abandon_request_to_dict(component) + else: + if log_enabled(ERROR): + log(ERROR, 'unknown request <%s>', message_type) + raise LDAPUnknownRequestError('unknown request') + result['type'] = message_type + result['controls'] = controls + + return result + + def valid_referral_list(self, referrals): + referral_list = [] + for referral in referrals: + candidate_referral = parse_uri(referral) + if candidate_referral: + for ref_host in self.connection.server.allowed_referral_hosts: + if ref_host[0] == candidate_referral['host'] or ref_host[0] == '*': + if candidate_referral['host'] not in self._referrals: + candidate_referral['anonymousBindOnly'] = not ref_host[1] + referral_list.append(candidate_referral) + break + + return referral_list + + def do_next_range_search(self, request, response, attr_name): + done = False + current_response = response + while not done: + attr_type, _, returned_range = attr_name.partition(';range=') + _, _, high_range = returned_range.partition('-') + response['raw_attributes'][attr_type] += current_response['raw_attributes'][attr_name] + response['attributes'][attr_type] += current_response['attributes'][attr_name] + if high_range != '*': + if log_enabled(PROTOCOL): + log(PROTOCOL, 'performing next search on auto-range <%s> via <%s>', str(int(high_range) + 1), self.connection) + requested_range = attr_type + ';range=' + str(int(high_range) + 1) + '-*' + result = self.connection.search(search_base=response['dn'], + search_filter='(objectclass=*)', + search_scope=BASE, + dereference_aliases=request['dereferenceAlias'], + attributes=[attr_type + ';range=' + str(int(high_range) + 1) + '-*']) + if isinstance(result, bool): + if result: + current_response = self.connection.response[0] + else: + done = True + else: + current_response, _ = self.get_response(result) + current_response = current_response[0] + + if not done: + if requested_range in current_response['raw_attributes'] and len(current_response['raw_attributes'][requested_range]) == 0: + del current_response['raw_attributes'][requested_range] + del current_response['attributes'][requested_range] + attr_name = list(filter(lambda a: ';range=' in a, current_response['raw_attributes'].keys()))[0] + continue + + done = True + + def do_search_on_auto_range(self, request, response): + for resp in [r for r in response if r['type'] == 'searchResEntry']: + for attr_name in list(resp['raw_attributes'].keys()): # generate list to avoid changing of dict size error + if ';range=' in attr_name: + attr_type, _, range_values = attr_name.partition(';range=') + if range_values in ('1-1', '0-0'): # DirSync returns these values for adding and removing members + return False + if attr_type not in resp['raw_attributes'] or resp['raw_attributes'][attr_type] is None: + resp['raw_attributes'][attr_type] = list() + if attr_type not in resp['attributes'] or resp['attributes'][attr_type] is None: + resp['attributes'][attr_type] = list() + self.do_next_range_search(request, resp, attr_name) + return True + + def create_referral_connection(self, referrals): + referral_connection = None + selected_referral = None + cachekey = None + valid_referral_list = self.valid_referral_list(referrals) + if valid_referral_list: + preferred_referral_list = [referral for referral in valid_referral_list if + referral['ssl'] == self.connection.server.ssl] + selected_referral = choice(preferred_referral_list) if preferred_referral_list else choice( + valid_referral_list) + + cachekey = (selected_referral['host'], selected_referral['port'] or self.connection.server.port, selected_referral['ssl']) + if self.connection.use_referral_cache and cachekey in self.referral_cache: + referral_connection = self.referral_cache[cachekey] + else: + referral_server = Server(host=selected_referral['host'], + port=selected_referral['port'] or self.connection.server.port, + use_ssl=selected_referral['ssl'], + get_info=self.connection.server.get_info, + formatter=self.connection.server.custom_formatter, + connect_timeout=self.connection.server.connect_timeout, + mode=self.connection.server.mode, + allowed_referral_hosts=self.connection.server.allowed_referral_hosts, + tls=Tls(local_private_key_file=self.connection.server.tls.private_key_file, + local_certificate_file=self.connection.server.tls.certificate_file, + validate=self.connection.server.tls.validate, + version=self.connection.server.tls.version, + ca_certs_file=self.connection.server.tls.ca_certs_file) if + selected_referral['ssl'] else None) + + from ..core.connection import Connection + + referral_connection = Connection(server=referral_server, + user=self.connection.user if not selected_referral['anonymousBindOnly'] else None, + password=self.connection.password if not selected_referral['anonymousBindOnly'] else None, + version=self.connection.version, + authentication=self.connection.authentication if not selected_referral['anonymousBindOnly'] else ANONYMOUS, + client_strategy=SYNC, + auto_referrals=True, + read_only=self.connection.read_only, + check_names=self.connection.check_names, + raise_exceptions=self.connection.raise_exceptions, + fast_decoder=self.connection.fast_decoder, + receive_timeout=self.connection.receive_timeout, + sasl_mechanism=self.connection.sasl_mechanism, + sasl_credentials=self.connection.sasl_credentials) + + if self.connection.usage: + self.connection._usage.referrals_connections += 1 + + referral_connection.open() + referral_connection.strategy._referrals = self._referrals + if self.connection.tls_started and not referral_server.ssl: # if the original server was in start_tls mode and the referral server is not in ssl then start_tls on the referral connection + referral_connection.start_tls() + + if self.connection.bound: + referral_connection.bind() + + if self.connection.usage: + self.connection._usage.referrals_followed += 1 + + return selected_referral, referral_connection, cachekey + + def do_operation_on_referral(self, request, referrals): + if log_enabled(PROTOCOL): + log(PROTOCOL, 'following referral for <%s>', self.connection) + selected_referral, referral_connection, cachekey = self.create_referral_connection(referrals) + if selected_referral: + if request['type'] == 'searchRequest': + referral_connection.search(selected_referral['base'] or request['base'], + selected_referral['filter'] or request['filter'], + selected_referral['scope'] or request['scope'], + request['dereferenceAlias'], + selected_referral['attributes'] or request['attributes'], + request['sizeLimit'], + request['timeLimit'], + request['typesOnly'], + controls=request['controls']) + elif request['type'] == 'addRequest': + referral_connection.add(selected_referral['base'] or request['entry'], + None, + request['attributes'], + controls=request['controls']) + elif request['type'] == 'compareRequest': + referral_connection.compare(selected_referral['base'] or request['entry'], + request['attribute'], + request['value'], + controls=request['controls']) + elif request['type'] == 'delRequest': + referral_connection.delete(selected_referral['base'] or request['entry'], + controls=request['controls']) + elif request['type'] == 'extendedReq': + referral_connection.extended(request['name'], + request['value'], + controls=request['controls'], + no_encode=True + ) + elif request['type'] == 'modifyRequest': + referral_connection.modify(selected_referral['base'] or request['entry'], + prepare_changes_for_request(request['changes']), + controls=request['controls']) + elif request['type'] == 'modDNRequest': + referral_connection.modify_dn(selected_referral['base'] or request['entry'], + request['newRdn'], + request['deleteOldRdn'], + request['newSuperior'], + controls=request['controls']) + else: + self.connection.last_error = 'referral operation not permitted' + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + raise LDAPReferralError(self.connection.last_error) + + response = referral_connection.response + result = referral_connection.result + if self.connection.use_referral_cache: + self.referral_cache[cachekey] = referral_connection + else: + referral_connection.unbind() + else: + response = None + result = None + + return response, result + + def sending(self, ldap_message): + if log_enabled(NETWORK): + log(NETWORK, 'sending 1 ldap message for <%s>', self.connection) + try: + encoded_message = encode(ldap_message) + self.connection.socket.sendall(encoded_message) + if log_enabled(EXTENDED): + log(EXTENDED, 'ldap message sent via <%s>:%s', self.connection, format_ldap_message(ldap_message, '>>')) + if log_enabled(NETWORK): + log(NETWORK, 'sent %d bytes via <%s>', len(encoded_message), self.connection) + except socket.error as e: + self.connection.last_error = 'socket sending error' + str(e) + encoded_message = None + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + # raise communication_exception_factory(LDAPSocketSendError, exc)(self.connection.last_error) + raise communication_exception_factory(LDAPSocketSendError, type(e)(str(e)))(self.connection.last_error) + if self.connection.usage: + self.connection._usage.update_transmitted_message(self.connection.request, len(encoded_message)) + + def _start_listen(self): + # overridden on strategy class + raise NotImplementedError + + def _get_response(self, message_id, timeout): + # overridden in strategy class + raise NotImplementedError + + def receiving(self): + # overridden in strategy class + raise NotImplementedError + + def post_send_single_response(self, message_id): + # overridden in strategy class + raise NotImplementedError + + def post_send_search(self, message_id): + # overridden in strategy class + raise NotImplementedError + + def get_stream(self): + raise NotImplementedError + + def set_stream(self, value): + raise NotImplementedError + + def unbind_referral_cache(self): + while len(self.referral_cache) > 0: + cachekey, referral_connection = self.referral_cache.popitem() + referral_connection.unbind() diff --git a/server/www/packages/packages-linux/x64/ldap3/strategy/ldifProducer.py b/server/www/packages/packages-linux/x64/ldap3/strategy/ldifProducer.py index 119e172..392239e 100644 --- a/server/www/packages/packages-linux/x64/ldap3/strategy/ldifProducer.py +++ b/server/www/packages/packages-linux/x64/ldap3/strategy/ldifProducer.py @@ -1,148 +1,150 @@ -""" -""" - -# Created on 2013.07.15 -# -# Author: Giovanni Cannata -# -# Copyright 2013 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . - -from io import StringIO -from os import linesep -import random - -from ..core.exceptions import LDAPLDIFError -from ..utils.conv import prepare_for_stream -from ..protocol.rfc4511 import LDAPMessage, MessageID, ProtocolOp, LDAP_MAX_INT -from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header -from ..protocol.convert import build_controls_list -from .base import BaseStrategy - - -class LdifProducerStrategy(BaseStrategy): - """ - This strategy is used to create the LDIF stream for the Add, Delete, Modify, ModifyDn operations. - You send the request and get the request in the ldif-change representation of the operation. - NO OPERATION IS SENT TO THE LDAP SERVER! - Connection.request will contain the result LDAP message in a dict form - Connection.response will contain the ldif-change format of the requested operation if available - You don't need a real server to connect to for this strategy - """ - - def __init__(self, ldap_connection): - BaseStrategy.__init__(self, ldap_connection) - self.sync = True - self.no_real_dsa = True - self.pooled = False - self.can_stream = True - self.line_separator = linesep - self.all_base64 = False - self.stream = None - self.order = dict() - self._header_added = False - random.seed() - - def _open_socket(self, address, use_ssl=False, unix_socket=False): # fake open socket - self.connection.socket = NotImplemented # placeholder for a dummy socket - if self.connection.usage: - self.connection._usage.open_sockets += 1 - - self.connection.closed = False - - def _close_socket(self): - if self.connection.usage: - self.connection._usage.closed_sockets += 1 - - self.connection.socket = None - self.connection.closed = True - - def _start_listen(self): - self.connection.listening = True - self.connection.closed = False - self._header_added = False - if not self.stream or (isinstance(self.stream, StringIO) and self.stream.closed): - self.set_stream(StringIO()) - - def _stop_listen(self): - self.stream.close() - self.connection.listening = False - self.connection.closed = True - - def receiving(self): - return None - - def send(self, message_type, request, controls=None): - """ - Build the LDAPMessage without sending to server - """ - message_id = random.randint(0, LDAP_MAX_INT) - ldap_message = LDAPMessage() - ldap_message['messageID'] = MessageID(message_id) - ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request) - message_controls = build_controls_list(controls) - if message_controls is not None: - ldap_message['controls'] = message_controls - - self.connection.request = BaseStrategy.decode_request(message_type, request, controls) - self.connection.request['controls'] = controls - self._outstanding[message_id] = self.connection.request - return message_id - - def post_send_single_response(self, message_id): - self.connection.response = None - self.connection.result = None - if self._outstanding and message_id in self._outstanding: - request = self._outstanding.pop(message_id) - ldif_lines = operation_to_ldif(self.connection.request['type'], request, self.all_base64, self.order.get(self.connection.request['type'])) - if self.stream and ldif_lines and not self.connection.closed: - self.accumulate_stream(self.line_separator.join(ldif_lines)) - ldif_lines = add_ldif_header(ldif_lines) - self.connection.response = self.line_separator.join(ldif_lines) - return self.connection.response - - return None - - def post_send_search(self, message_id): - raise LDAPLDIFError('LDIF-CONTENT cannot be produced for Search operations') - - def _get_response(self, message_id): - pass - - def accumulate_stream(self, fragment): - if not self._header_added and self.stream.tell() == 0: - self._header_added = True - header = add_ldif_header(['-'])[0] - self.stream.write(prepare_for_stream(header + self.line_separator + self.line_separator)) - self.stream.write(prepare_for_stream(fragment + self.line_separator + self.line_separator)) - - def get_stream(self): - return self.stream - - def set_stream(self, value): - error = False - try: - if not value.writable(): - error = True - except (ValueError, AttributeError): - error = True - - if error: - raise LDAPLDIFError('stream must be writable') - - self.stream = value +""" +""" + +# Created on 2013.07.15 +# +# Author: Giovanni Cannata +# +# Copyright 2013 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +from io import StringIO +from os import linesep +import random + +from ..core.exceptions import LDAPLDIFError +from ..utils.conv import prepare_for_stream +from ..protocol.rfc4511 import LDAPMessage, MessageID, ProtocolOp, LDAP_MAX_INT +from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header +from ..protocol.convert import build_controls_list +from .base import BaseStrategy + + +class LdifProducerStrategy(BaseStrategy): + """ + This strategy is used to create the LDIF stream for the Add, Delete, Modify, ModifyDn operations. + You send the request and get the request in the ldif-change representation of the operation. + NO OPERATION IS SENT TO THE LDAP SERVER! + Connection.request will contain the result LDAP message in a dict form + Connection.response will contain the ldif-change format of the requested operation if available + You don't need a real server to connect to for this strategy + """ + + def __init__(self, ldap_connection): + BaseStrategy.__init__(self, ldap_connection) + self.sync = True + self.no_real_dsa = True + self.pooled = False + self.can_stream = True + self.line_separator = linesep + self.all_base64 = False + self.stream = None + self.order = dict() + self._header_added = False + random.seed() + + def _open_socket(self, address, use_ssl=False, unix_socket=False): # fake open socket + self.connection.socket = NotImplemented # placeholder for a dummy socket + if self.connection.usage: + self.connection._usage.open_sockets += 1 + + self.connection.closed = False + + def _close_socket(self): + if self.connection.usage: + self.connection._usage.closed_sockets += 1 + + self.connection.socket = None + self.connection.closed = True + + def _start_listen(self): + self.connection.listening = True + self.connection.closed = False + self._header_added = False + if not self.stream or (isinstance(self.stream, StringIO) and self.stream.closed): + self.set_stream(StringIO()) + + def _stop_listen(self): + self.stream.close() + self.connection.listening = False + self.connection.closed = True + + def receiving(self): + return None + + def send(self, message_type, request, controls=None): + """ + Build the LDAPMessage without sending to server + """ + message_id = random.randint(0, LDAP_MAX_INT) + ldap_message = LDAPMessage() + ldap_message['messageID'] = MessageID(message_id) + ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request) + message_controls = build_controls_list(controls) + if message_controls is not None: + ldap_message['controls'] = message_controls + + self.connection.request = BaseStrategy.decode_request(message_type, request, controls) + self.connection.request['controls'] = controls + if self._outstanding is None: + self._outstanding = dict() + self._outstanding[message_id] = self.connection.request + return message_id + + def post_send_single_response(self, message_id): + self.connection.response = None + self.connection.result = None + if self._outstanding and message_id in self._outstanding: + request = self._outstanding.pop(message_id) + ldif_lines = operation_to_ldif(self.connection.request['type'], request, self.all_base64, self.order.get(self.connection.request['type'])) + if self.stream and ldif_lines and not self.connection.closed: + self.accumulate_stream(self.line_separator.join(ldif_lines)) + ldif_lines = add_ldif_header(ldif_lines) + self.connection.response = self.line_separator.join(ldif_lines) + return self.connection.response + + return None + + def post_send_search(self, message_id): + raise LDAPLDIFError('LDIF-CONTENT cannot be produced for Search operations') + + def _get_response(self, message_id, timeout): + pass + + def accumulate_stream(self, fragment): + if not self._header_added and self.stream.tell() == 0: + self._header_added = True + header = add_ldif_header(['-'])[0] + self.stream.write(prepare_for_stream(header + self.line_separator + self.line_separator)) + self.stream.write(prepare_for_stream(fragment + self.line_separator + self.line_separator)) + + def get_stream(self): + return self.stream + + def set_stream(self, value): + error = False + try: + if not value.writable(): + error = True + except (ValueError, AttributeError): + error = True + + if error: + raise LDAPLDIFError('stream must be writable') + + self.stream = value diff --git a/server/www/packages/packages-linux/x64/ldap3/strategy/mockAsync.py b/server/www/packages/packages-linux/x64/ldap3/strategy/mockAsync.py index 2891506..f9965dc 100644 --- a/server/www/packages/packages-linux/x64/ldap3/strategy/mockAsync.py +++ b/server/www/packages/packages-linux/x64/ldap3/strategy/mockAsync.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2016 - 2018 Giovanni Cannata +# Copyright 2016 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/strategy/mockBase.py b/server/www/packages/packages-linux/x64/ldap3/strategy/mockBase.py index f07c7c2..7acf706 100644 --- a/server/www/packages/packages-linux/x64/ldap3/strategy/mockBase.py +++ b/server/www/packages/packages-linux/x64/ldap3/strategy/mockBase.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2016 - 2018 Giovanni Cannata +# Copyright 2016 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -26,7 +26,6 @@ import json import re -from threading import Lock from random import SystemRandom from pyasn1.type.univ import OctetString @@ -224,7 +223,7 @@ class MockBaseStrategy(object): return False if attribute.lower() == 'objectclass' and self.connection.server.schema: # builds the objectClass hierarchy only if schema is present class_set = set() - for object_class in attributes['objectClass']: + for object_class in attributes[attribute]: if self.connection.server.schema.object_classes and object_class not in self.connection.server.schema.object_classes: return False # walkups the class hierarchy and buils a set of all classes in it @@ -654,6 +653,7 @@ class MockBaseStrategy(object): if '+' in attributes: # operational attributes requested attributes.extend(self.operational_attributes) attributes.remove('+') + attributes = [attr.lower() for attr in request['attributes']] filter_root = parse_filter(request['filter'], self.connection.server.schema, auto_escape=True, auto_encode=False, validator=self.connection.server.custom_validator, check_names=self.connection.check_names) @@ -687,7 +687,11 @@ class MockBaseStrategy(object): for attribute in self.connection.server.dit[match] if attribute.lower() in attributes or ALL_ATTRIBUTES in attributes] }) - + if '+' not in attributes: # remove operational attributes + for op_attr in self.operational_attributes: + for i, attr in enumerate(responses[len(responses)-1]['attributes']): + if attr['type'] == op_attr: + del responses[len(responses)-1]['attributes'][i] result_code = 0 message = '' @@ -724,12 +728,12 @@ class MockBaseStrategy(object): if extension[0] == '2.16.840.1.113719.1.27.100.31': # getBindDNRequest [NOVELL] result_code = 0 message = '' - response_name = '2.16.840.1.113719.1.27.100.32' # getBindDNResponse [NOVELL] + response_name = OctetString('2.16.840.1.113719.1.27.100.32') # getBindDNResponse [NOVELL] response_value = OctetString(self.bound) elif extension[0] == '1.3.6.1.4.1.4203.1.11.3': # WhoAmI [RFC4532] result_code = 0 message = '' - response_name = '1.3.6.1.4.1.4203.1.11.3' # WhoAmI [RFC4532] + response_name = OctetString('1.3.6.1.4.1.4203.1.11.3') # WhoAmI [RFC4532] response_value = OctetString(self.bound) break @@ -845,7 +849,6 @@ class MockBaseStrategy(object): attr_name = node.assertion['attr'] attr_value = node.assertion['value'] for candidate in candidates: - # if attr_name in self.connection.server.dit[candidate] and attr_value in self.connection.server.dit[candidate][attr_name]: if attr_name in self.connection.server.dit[candidate] and self.equal(candidate, attr_name, attr_value): node.matched.add(candidate) else: diff --git a/server/www/packages/packages-linux/x64/ldap3/strategy/mockSync.py b/server/www/packages/packages-linux/x64/ldap3/strategy/mockSync.py index b155781..efd2c15 100644 --- a/server/www/packages/packages-linux/x64/ldap3/strategy/mockSync.py +++ b/server/www/packages/packages-linux/x64/ldap3/strategy/mockSync.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/strategy/restartable.py b/server/www/packages/packages-linux/x64/ldap3/strategy/restartable.py index 77ef4cd..d739f41 100644 --- a/server/www/packages/packages-linux/x64/ldap3/strategy/restartable.py +++ b/server/www/packages/packages-linux/x64/ldap3/strategy/restartable.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/strategy/reusable.py b/server/www/packages/packages-linux/x64/ldap3/strategy/reusable.py index d70c4d9..01bd9d3 100644 --- a/server/www/packages/packages-linux/x64/ldap3/strategy/reusable.py +++ b/server/www/packages/packages-linux/x64/ldap3/strategy/reusable.py @@ -1,493 +1,495 @@ -""" -""" - -# Created on 2014.03.23 -# -# Author: Giovanni Cannata -# -# Copyright 2014 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . - -from datetime import datetime -from os import linesep -from threading import Thread, Lock -from time import sleep - -from .. import RESTARTABLE, get_config_parameter, AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_NO_TLS, AUTO_BIND_TLS_AFTER_BIND, AUTO_BIND_TLS_BEFORE_BIND -from .base import BaseStrategy -from ..core.usage import ConnectionUsage -from ..core.exceptions import LDAPConnectionPoolNameIsMandatoryError, LDAPConnectionPoolNotStartedError, LDAPOperationResult, LDAPExceptionError, LDAPResponseTimeoutError -from ..utils.log import log, log_enabled, ERROR, BASIC -from ..protocol.rfc4511 import LDAP_MAX_INT - -TERMINATE_REUSABLE = 'TERMINATE_REUSABLE_CONNECTION' - -BOGUS_BIND = -1 -BOGUS_UNBIND = -2 -BOGUS_EXTENDED = -3 -BOGUS_ABANDON = -4 - -try: - from queue import Queue, Empty -except ImportError: # Python 2 - # noinspection PyUnresolvedReferences - from Queue import Queue, Empty - - -# noinspection PyProtectedMember -class ReusableStrategy(BaseStrategy): - """ - A pool of reusable SyncWaitRestartable connections with lazy behaviour and limited lifetime. - The connection using this strategy presents itself as a normal connection, but internally the strategy has a pool of - connections that can be used as needed. Each connection lives in its own thread and has a busy/available status. - The strategy performs the requested operation on the first available connection. - The pool of connections is instantiated at strategy initialization. - Strategy has two customizable properties, the total number of connections in the pool and the lifetime of each connection. - When lifetime is expired the connection is closed and will be open again when needed. - """ - pools = dict() - - def receiving(self): - raise NotImplementedError - - def _start_listen(self): - raise NotImplementedError - - def _get_response(self, message_id): - raise NotImplementedError - - def get_stream(self): - raise NotImplementedError - - def set_stream(self, value): - raise NotImplementedError - - # noinspection PyProtectedMember - class ConnectionPool(object): - """ - Container for the Connection Threads - """ - def __new__(cls, connection): - if connection.pool_name in ReusableStrategy.pools: # returns existing connection pool - pool = ReusableStrategy.pools[connection.pool_name] - if not pool.started: # if pool is not started remove it from the pools singleton and create a new onw - del ReusableStrategy.pools[connection.pool_name] - return object.__new__(cls) - if connection.pool_keepalive and pool.keepalive != connection.pool_keepalive: # change lifetime - pool.keepalive = connection.pool_keepalive - if connection.pool_lifetime and pool.lifetime != connection.pool_lifetime: # change keepalive - pool.lifetime = connection.pool_lifetime - if connection.pool_size and pool.pool_size != connection.pool_size: # if pool size has changed terminate and recreate the connections - pool.terminate_pool() - pool.pool_size = connection.pool_size - return pool - else: - return object.__new__(cls) - - def __init__(self, connection): - if not hasattr(self, 'workers'): - self.name = connection.pool_name - self.master_connection = connection - self.workers = [] - self.pool_size = connection.pool_size or get_config_parameter('REUSABLE_THREADED_POOL_SIZE') - self.lifetime = connection.pool_lifetime or get_config_parameter('REUSABLE_THREADED_LIFETIME') - self.keepalive = connection.pool_keepalive - self.request_queue = Queue() - self.open_pool = False - self.bind_pool = False - self.tls_pool = False - self._incoming = dict() - self.counter = 0 - self.terminated_usage = ConnectionUsage() if connection._usage else None - self.terminated = False - self.pool_lock = Lock() - ReusableStrategy.pools[self.name] = self - self.started = False - if log_enabled(BASIC): - log(BASIC, 'instantiated ConnectionPool: <%r>', self) - - def __str__(self): - s = 'POOL: ' + str(self.name) + ' - status: ' + ('started' if self.started else 'terminated') - s += ' - responses in queue: ' + str(len(self._incoming)) - s += ' - pool size: ' + str(self.pool_size) - s += ' - lifetime: ' + str(self.lifetime) - s += ' - keepalive: ' + str(self.keepalive) - s += ' - open: ' + str(self.open_pool) - s += ' - bind: ' + str(self.bind_pool) - s += ' - tls: ' + str(self.tls_pool) + linesep - s += 'MASTER CONN: ' + str(self.master_connection) + linesep - s += 'WORKERS:' - if self.workers: - for i, worker in enumerate(self.workers): - s += linesep + str(i).rjust(5) + ': ' + str(worker) - else: - s += linesep + ' no active workers in pool' - - return s - - def __repr__(self): - return self.__str__() - - def get_info_from_server(self): - for worker in self.workers: - with worker.worker_lock: - if not worker.connection.server.schema or not worker.connection.server.info: - worker.get_info_from_server = True - else: - worker.get_info_from_server = False - - def rebind_pool(self): - for worker in self.workers: - with worker.worker_lock: - worker.connection.rebind(self.master_connection.user, - self.master_connection.password, - self.master_connection.authentication, - self.master_connection.sasl_mechanism, - self.master_connection.sasl_credentials) - - def start_pool(self): - if not self.started: - self.create_pool() - for worker in self.workers: - with worker.worker_lock: - worker.thread.start() - self.started = True - self.terminated = False - if log_enabled(BASIC): - log(BASIC, 'worker started for pool <%s>', self) - return True - return False - - def create_pool(self): - if log_enabled(BASIC): - log(BASIC, 'created pool <%s>', self) - self.workers = [ReusableStrategy.PooledConnectionWorker(self.master_connection, self.request_queue) for _ in range(self.pool_size)] - - def terminate_pool(self): - if not self.terminated: - if log_enabled(BASIC): - log(BASIC, 'terminating pool <%s>', self) - self.started = False - self.request_queue.join() # waits for all queue pending operations - for _ in range(len([worker for worker in self.workers if worker.thread.is_alive()])): # put a TERMINATE signal on the queue for each active thread - self.request_queue.put((TERMINATE_REUSABLE, None, None, None)) - self.request_queue.join() # waits for all queue terminate operations - self.terminated = True - if log_enabled(BASIC): - log(BASIC, 'pool terminated for <%s>', self) - - class PooledConnectionThread(Thread): - """ - The thread that holds the Reusable connection and receive operation request via the queue - Result are sent back in the pool._incoming list when ready - """ - def __init__(self, worker, master_connection): - Thread.__init__(self) - self.daemon = True - self.worker = worker - self.master_connection = master_connection - if log_enabled(BASIC): - log(BASIC, 'instantiated PooledConnectionThread: <%r>', self) - - # noinspection PyProtectedMember - def run(self): - self.worker.running = True - terminate = False - pool = self.master_connection.strategy.pool - while not terminate: - try: - counter, message_type, request, controls = pool.request_queue.get(block=True, timeout=self.master_connection.strategy.pool.keepalive) - except Empty: # issue an Abandon(0) operation to keep the connection live - Abandon(0) is a harmless operation - if not self.worker.connection.closed: - self.worker.connection.abandon(0) - continue - - with self.worker.worker_lock: - self.worker.busy = True - if counter == TERMINATE_REUSABLE: - terminate = True - if self.worker.connection.bound: - try: - self.worker.connection.unbind() - if log_enabled(BASIC): - log(BASIC, 'thread terminated') - except LDAPExceptionError: - pass - else: - if (datetime.now() - self.worker.creation_time).seconds >= self.master_connection.strategy.pool.lifetime: # destroy and create a new connection - try: - self.worker.connection.unbind() - except LDAPExceptionError: - pass - self.worker.new_connection() - if log_enabled(BASIC): - log(BASIC, 'thread respawn') - if message_type not in ['bindRequest', 'unbindRequest']: - try: - if pool.open_pool and self.worker.connection.closed: - self.worker.connection.open(read_server_info=False) - if pool.tls_pool and not self.worker.connection.tls_started: - self.worker.connection.start_tls(read_server_info=False) - if pool.bind_pool and not self.worker.connection.bound: - self.worker.connection.bind(read_server_info=False) - elif pool.open_pool and not self.worker.connection.closed: # connection already open, issues a start_tls - if pool.tls_pool and not self.worker.connection.tls_started: - self.worker.connection.start_tls(read_server_info=False) - if self.worker.get_info_from_server and counter: - self.worker.connection._fire_deferred() - self.worker.get_info_from_server = False - response = None - result = None - if message_type == 'searchRequest': - response = self.worker.connection.post_send_search(self.worker.connection.send(message_type, request, controls)) - else: - response = self.worker.connection.post_send_single_response(self.worker.connection.send(message_type, request, controls)) - result = self.worker.connection.result - with pool.pool_lock: - pool._incoming[counter] = (response, result, BaseStrategy.decode_request(message_type, request, controls)) - except LDAPOperationResult as e: # raise_exceptions has raised an exception. It must be redirected to the original connection thread - with pool.pool_lock: - pool._incoming[counter] = (type(e)(str(e)), None, None) - # except LDAPOperationResult as e: # raise_exceptions has raised an exception. It must be redirected to the original connection thread - # exc = e - # with pool.pool_lock: - # if exc: - # pool._incoming[counter] = (exc, None, None) - # else: - # pool._incoming[counter] = (response, result, BaseStrategy.decode_request(message_type, request, controls)) - - self.worker.busy = False - pool.request_queue.task_done() - self.worker.task_counter += 1 - if log_enabled(BASIC): - log(BASIC, 'thread terminated') - if self.master_connection.usage: - pool.terminated_usage += self.worker.connection.usage - self.worker.running = False - - class PooledConnectionWorker(object): - """ - Container for the restartable connection. it includes a thread and a lock to execute the connection in the pool - """ - def __init__(self, connection, request_queue): - self.master_connection = connection - self.request_queue = request_queue - self.running = False - self.busy = False - self.get_info_from_server = False - self.connection = None - self.creation_time = None - self.task_counter = 0 - self.new_connection() - self.thread = ReusableStrategy.PooledConnectionThread(self, self.master_connection) - self.worker_lock = Lock() - if log_enabled(BASIC): - log(BASIC, 'instantiated PooledConnectionWorker: <%s>', self) - - def __str__(self): - s = 'CONN: ' + str(self.connection) + linesep + ' THREAD: ' - s += 'running' if self.running else 'halted' - s += ' - ' + ('busy' if self.busy else 'available') - s += ' - ' + ('created at: ' + self.creation_time.isoformat()) - s += ' - time to live: ' + str(self.master_connection.strategy.pool.lifetime - (datetime.now() - self.creation_time).seconds) - s += ' - requests served: ' + str(self.task_counter) - - return s - - def new_connection(self): - from ..core.connection import Connection - # noinspection PyProtectedMember - self.creation_time = datetime.now() - self.connection = Connection(server=self.master_connection.server_pool if self.master_connection.server_pool else self.master_connection.server, - user=self.master_connection.user, - password=self.master_connection.password, - auto_bind=AUTO_BIND_NONE, # do not perform auto_bind because it reads again the schema - version=self.master_connection.version, - authentication=self.master_connection.authentication, - client_strategy=RESTARTABLE, - auto_referrals=self.master_connection.auto_referrals, - auto_range=self.master_connection.auto_range, - sasl_mechanism=self.master_connection.sasl_mechanism, - sasl_credentials=self.master_connection.sasl_credentials, - check_names=self.master_connection.check_names, - collect_usage=self.master_connection._usage, - read_only=self.master_connection.read_only, - raise_exceptions=self.master_connection.raise_exceptions, - lazy=False, - fast_decoder=self.master_connection.fast_decoder, - receive_timeout=self.master_connection.receive_timeout, - return_empty_attributes=self.master_connection.empty_attributes) - - # simulates auto_bind, always with read_server_info=False - if self.master_connection.auto_bind and self.master_connection.auto_bind not in [AUTO_BIND_NONE, AUTO_BIND_DEFAULT]: - if log_enabled(BASIC): - log(BASIC, 'performing automatic bind for <%s>', self.connection) - self.connection.open(read_server_info=False) - if self.master_connection.auto_bind == AUTO_BIND_NO_TLS: - self.connection.bind(read_server_info=False) - elif self.master_connection.auto_bind == AUTO_BIND_TLS_BEFORE_BIND: - self.connection.start_tls(read_server_info=False) - self.connection.bind(read_server_info=False) - elif self.master_connection.auto_bind == AUTO_BIND_TLS_AFTER_BIND: - self.connection.bind(read_server_info=False) - self.connection.start_tls(read_server_info=False) - - if self.master_connection.server_pool: - self.connection.server_pool = self.master_connection.server_pool - self.connection.server_pool.initialize(self.connection) - - # ReusableStrategy methods - def __init__(self, ldap_connection): - BaseStrategy.__init__(self, ldap_connection) - self.sync = False - self.no_real_dsa = False - self.pooled = True - self.can_stream = False - if hasattr(ldap_connection, 'pool_name') and ldap_connection.pool_name: - self.pool = ReusableStrategy.ConnectionPool(ldap_connection) - else: - if log_enabled(ERROR): - log(ERROR, 'reusable connection must have a pool_name') - raise LDAPConnectionPoolNameIsMandatoryError('reusable connection must have a pool_name') - - def open(self, reset_usage=True, read_server_info=True): - # read_server_info not used - self.pool.open_pool = True - self.pool.start_pool() - self.connection.closed = False - if self.connection.usage: - if reset_usage or not self.connection._usage.initial_connection_start_time: - self.connection._usage.start() - - def terminate(self): - self.pool.terminate_pool() - self.pool.open_pool = False - self.connection.bound = False - self.connection.closed = True - self.pool.bind_pool = False - self.pool.tls_pool = False - - def _close_socket(self): - """ - Doesn't really close the socket - """ - self.connection.closed = True - - if self.connection.usage: - self.connection._usage.closed_sockets += 1 - - def send(self, message_type, request, controls=None): - if self.pool.started: - if message_type == 'bindRequest': - self.pool.bind_pool = True - counter = BOGUS_BIND - elif message_type == 'unbindRequest': - self.pool.bind_pool = False - counter = BOGUS_UNBIND - elif message_type == 'abandonRequest': - counter = BOGUS_ABANDON - elif message_type == 'extendedReq' and self.connection.starting_tls: - self.pool.tls_pool = True - counter = BOGUS_EXTENDED - else: - with self.pool.pool_lock: - self.pool.counter += 1 - if self.pool.counter > LDAP_MAX_INT: - self.pool.counter = 1 - counter = self.pool.counter - self.pool.request_queue.put((counter, message_type, request, controls)) - return counter - if log_enabled(ERROR): - log(ERROR, 'reusable connection pool not started') - raise LDAPConnectionPoolNotStartedError('reusable connection pool not started') - - def validate_bind(self, controls): - # in case of a new connection or different credentials - if (self.connection.user != self.pool.master_connection.user or - self.connection.password != self.pool.master_connection.password or - self.connection.authentication != self.pool.master_connection.authentication or - self.connection.sasl_mechanism != self.pool.master_connection.sasl_mechanism or - self.connection.sasl_credentials != self.pool.master_connection.sasl_credentials): - self.pool.master_connection.user = self.connection.user - self.pool.master_connection.password = self.connection.password - self.pool.master_connection.authentication = self.connection.authentication - self.pool.master_connection.sasl_mechanism = self.connection.sasl_mechanism - self.pool.master_connection.sasl_credentials = self.connection.sasl_credentials - self.pool.rebind_pool() - temp_connection = self.pool.workers[0].connection - temp_connection.lazy = False - if not self.connection.server.schema or not self.connection.server.info: - result = self.pool.workers[0].connection.bind(controls=controls) - else: - result = self.pool.workers[0].connection.bind(controls=controls, read_server_info=False) - - temp_connection.unbind() - temp_connection.lazy = True - if result: - self.pool.bind_pool = True # bind pool if bind is validated - return result - - def get_response(self, counter, timeout=None, get_request=False): - sleeptime = get_config_parameter('RESPONSE_SLEEPTIME') - request=None - if timeout is None: - timeout = get_config_parameter('RESPONSE_WAITING_TIMEOUT') - if counter == BOGUS_BIND: # send a bogus bindResponse - response = list() - result = {'description': 'success', 'referrals': None, 'type': 'bindResponse', 'result': 0, 'dn': '', 'message': '', 'saslCreds': None} - elif counter == BOGUS_UNBIND: # bogus unbind response - response = None - result = None - elif counter == BOGUS_ABANDON: # abandon cannot be executed because of multiple connections - response = list() - result = {'result': 0, 'referrals': None, 'responseName': '1.3.6.1.4.1.1466.20037', 'type': 'extendedResp', 'description': 'success', 'responseValue': 'None', 'dn': '', 'message': ''} - elif counter == BOGUS_EXTENDED: # bogus startTls extended response - response = list() - result = {'result': 0, 'referrals': None, 'responseName': '1.3.6.1.4.1.1466.20037', 'type': 'extendedResp', 'description': 'success', 'responseValue': 'None', 'dn': '', 'message': ''} - self.connection.starting_tls = False - else: - response = None - result = None - while timeout >= 0: # waiting for completed message to appear in _incoming - try: - with self.connection.strategy.pool.pool_lock: - response, result, request = self.connection.strategy.pool._incoming.pop(counter) - except KeyError: - sleep(sleeptime) - timeout -= sleeptime - continue - break - - if timeout <= 0: - if log_enabled(ERROR): - log(ERROR, 'no response from worker threads in Reusable connection') - raise LDAPResponseTimeoutError('no response from worker threads in Reusable connection') - - if isinstance(response, LDAPOperationResult): - raise response # an exception has been raised with raise_exceptions - - if get_request: - return response, result, request - - return response, result - - def post_send_single_response(self, counter): - return counter - - def post_send_search(self, counter): - return counter +""" +""" + +# Created on 2014.03.23 +# +# Author: Giovanni Cannata +# +# Copyright 2014 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +from datetime import datetime +from os import linesep +from threading import Thread, Lock +from time import sleep + +from .. import RESTARTABLE, get_config_parameter, AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_NO_TLS, AUTO_BIND_TLS_AFTER_BIND, AUTO_BIND_TLS_BEFORE_BIND +from .base import BaseStrategy +from ..core.usage import ConnectionUsage +from ..core.exceptions import LDAPConnectionPoolNameIsMandatoryError, LDAPConnectionPoolNotStartedError, LDAPOperationResult, LDAPExceptionError, LDAPResponseTimeoutError +from ..utils.log import log, log_enabled, ERROR, BASIC +from ..protocol.rfc4511 import LDAP_MAX_INT + +TERMINATE_REUSABLE = 'TERMINATE_REUSABLE_CONNECTION' + +BOGUS_BIND = -1 +BOGUS_UNBIND = -2 +BOGUS_EXTENDED = -3 +BOGUS_ABANDON = -4 + +try: + from queue import Queue, Empty +except ImportError: # Python 2 + # noinspection PyUnresolvedReferences + from Queue import Queue, Empty + + +# noinspection PyProtectedMember +class ReusableStrategy(BaseStrategy): + """ + A pool of reusable SyncWaitRestartable connections with lazy behaviour and limited lifetime. + The connection using this strategy presents itself as a normal connection, but internally the strategy has a pool of + connections that can be used as needed. Each connection lives in its own thread and has a busy/available status. + The strategy performs the requested operation on the first available connection. + The pool of connections is instantiated at strategy initialization. + Strategy has two customizable properties, the total number of connections in the pool and the lifetime of each connection. + When lifetime is expired the connection is closed and will be open again when needed. + """ + pools = dict() + + def receiving(self): + raise NotImplementedError + + def _start_listen(self): + raise NotImplementedError + + def _get_response(self, message_id, timeout): + raise NotImplementedError + + def get_stream(self): + raise NotImplementedError + + def set_stream(self, value): + raise NotImplementedError + + # noinspection PyProtectedMember + class ConnectionPool(object): + """ + Container for the Connection Threads + """ + def __new__(cls, connection): + if connection.pool_name in ReusableStrategy.pools: # returns existing connection pool + pool = ReusableStrategy.pools[connection.pool_name] + if not pool.started: # if pool is not started remove it from the pools singleton and create a new onw + del ReusableStrategy.pools[connection.pool_name] + return object.__new__(cls) + if connection.pool_keepalive and pool.keepalive != connection.pool_keepalive: # change lifetime + pool.keepalive = connection.pool_keepalive + if connection.pool_lifetime and pool.lifetime != connection.pool_lifetime: # change keepalive + pool.lifetime = connection.pool_lifetime + if connection.pool_size and pool.pool_size != connection.pool_size: # if pool size has changed terminate and recreate the connections + pool.terminate_pool() + pool.pool_size = connection.pool_size + return pool + else: + return object.__new__(cls) + + def __init__(self, connection): + if not hasattr(self, 'workers'): + self.name = connection.pool_name + self.master_connection = connection + self.workers = [] + self.pool_size = connection.pool_size or get_config_parameter('REUSABLE_THREADED_POOL_SIZE') + self.lifetime = connection.pool_lifetime or get_config_parameter('REUSABLE_THREADED_LIFETIME') + self.keepalive = connection.pool_keepalive + self.request_queue = Queue() + self.open_pool = False + self.bind_pool = False + self.tls_pool = False + self._incoming = dict() + self.counter = 0 + self.terminated_usage = ConnectionUsage() if connection._usage else None + self.terminated = False + self.pool_lock = Lock() + ReusableStrategy.pools[self.name] = self + self.started = False + if log_enabled(BASIC): + log(BASIC, 'instantiated ConnectionPool: <%r>', self) + + def __str__(self): + s = 'POOL: ' + str(self.name) + ' - status: ' + ('started' if self.started else 'terminated') + s += ' - responses in queue: ' + str(len(self._incoming)) + s += ' - pool size: ' + str(self.pool_size) + s += ' - lifetime: ' + str(self.lifetime) + s += ' - keepalive: ' + str(self.keepalive) + s += ' - open: ' + str(self.open_pool) + s += ' - bind: ' + str(self.bind_pool) + s += ' - tls: ' + str(self.tls_pool) + linesep + s += 'MASTER CONN: ' + str(self.master_connection) + linesep + s += 'WORKERS:' + if self.workers: + for i, worker in enumerate(self.workers): + s += linesep + str(i).rjust(5) + ': ' + str(worker) + else: + s += linesep + ' no active workers in pool' + + return s + + def __repr__(self): + return self.__str__() + + def get_info_from_server(self): + for worker in self.workers: + with worker.worker_lock: + if not worker.connection.server.schema or not worker.connection.server.info: + worker.get_info_from_server = True + else: + worker.get_info_from_server = False + + def rebind_pool(self): + for worker in self.workers: + with worker.worker_lock: + worker.connection.rebind(self.master_connection.user, + self.master_connection.password, + self.master_connection.authentication, + self.master_connection.sasl_mechanism, + self.master_connection.sasl_credentials) + + def start_pool(self): + if not self.started: + self.create_pool() + for worker in self.workers: + with worker.worker_lock: + worker.thread.start() + self.started = True + self.terminated = False + if log_enabled(BASIC): + log(BASIC, 'worker started for pool <%s>', self) + return True + return False + + def create_pool(self): + if log_enabled(BASIC): + log(BASIC, 'created pool <%s>', self) + self.workers = [ReusableStrategy.PooledConnectionWorker(self.master_connection, self.request_queue) for _ in range(self.pool_size)] + + def terminate_pool(self): + if not self.terminated: + if log_enabled(BASIC): + log(BASIC, 'terminating pool <%s>', self) + self.started = False + self.request_queue.join() # waits for all queue pending operations + for _ in range(len([worker for worker in self.workers if worker.thread.is_alive()])): # put a TERMINATE signal on the queue for each active thread + self.request_queue.put((TERMINATE_REUSABLE, None, None, None)) + self.request_queue.join() # waits for all queue terminate operations + self.terminated = True + if log_enabled(BASIC): + log(BASIC, 'pool terminated for <%s>', self) + + class PooledConnectionThread(Thread): + """ + The thread that holds the Reusable connection and receive operation request via the queue + Result are sent back in the pool._incoming list when ready + """ + def __init__(self, worker, master_connection): + Thread.__init__(self) + self.daemon = True + self.worker = worker + self.master_connection = master_connection + if log_enabled(BASIC): + log(BASIC, 'instantiated PooledConnectionThread: <%r>', self) + + # noinspection PyProtectedMember + def run(self): + self.worker.running = True + terminate = False + pool = self.master_connection.strategy.pool + while not terminate: + try: + counter, message_type, request, controls = pool.request_queue.get(block=True, timeout=self.master_connection.strategy.pool.keepalive) + except Empty: # issue an Abandon(0) operation to keep the connection live - Abandon(0) is a harmless operation + if not self.worker.connection.closed: + self.worker.connection.abandon(0) + continue + + with self.worker.worker_lock: + self.worker.busy = True + if counter == TERMINATE_REUSABLE: + terminate = True + if self.worker.connection.bound: + try: + self.worker.connection.unbind() + if log_enabled(BASIC): + log(BASIC, 'thread terminated') + except LDAPExceptionError: + pass + else: + if (datetime.now() - self.worker.creation_time).seconds >= self.master_connection.strategy.pool.lifetime: # destroy and create a new connection + try: + self.worker.connection.unbind() + except LDAPExceptionError: + pass + self.worker.new_connection() + if log_enabled(BASIC): + log(BASIC, 'thread respawn') + if message_type not in ['bindRequest', 'unbindRequest']: + try: + if pool.open_pool and self.worker.connection.closed: + self.worker.connection.open(read_server_info=False) + if pool.tls_pool and not self.worker.connection.tls_started: + self.worker.connection.start_tls(read_server_info=False) + if pool.bind_pool and not self.worker.connection.bound: + self.worker.connection.bind(read_server_info=False) + elif pool.open_pool and not self.worker.connection.closed: # connection already open, issues a start_tls + if pool.tls_pool and not self.worker.connection.tls_started: + self.worker.connection.start_tls(read_server_info=False) + if self.worker.get_info_from_server and counter: + self.worker.connection.refresh_server_info() + self.worker.get_info_from_server = False + response = None + result = None + if message_type == 'searchRequest': + response = self.worker.connection.post_send_search(self.worker.connection.send(message_type, request, controls)) + else: + response = self.worker.connection.post_send_single_response(self.worker.connection.send(message_type, request, controls)) + result = self.worker.connection.result + with pool.pool_lock: + pool._incoming[counter] = (response, result, BaseStrategy.decode_request(message_type, request, controls)) + except LDAPOperationResult as e: # raise_exceptions has raised an exception. It must be redirected to the original connection thread + with pool.pool_lock: + pool._incoming[counter] = (e, None, None) + # pool._incoming[counter] = (type(e)(str(e)), None, None) + # except LDAPOperationResult as e: # raise_exceptions has raised an exception. It must be redirected to the original connection thread + # exc = e + # with pool.pool_lock: + # if exc: + # pool._incoming[counter] = (exc, None, None) + # else: + # pool._incoming[counter] = (response, result, BaseStrategy.decode_request(message_type, request, controls)) + + self.worker.busy = False + pool.request_queue.task_done() + self.worker.task_counter += 1 + if log_enabled(BASIC): + log(BASIC, 'thread terminated') + if self.master_connection.usage: + pool.terminated_usage += self.worker.connection.usage + self.worker.running = False + + class PooledConnectionWorker(object): + """ + Container for the restartable connection. it includes a thread and a lock to execute the connection in the pool + """ + def __init__(self, connection, request_queue): + self.master_connection = connection + self.request_queue = request_queue + self.running = False + self.busy = False + self.get_info_from_server = False + self.connection = None + self.creation_time = None + self.task_counter = 0 + self.new_connection() + self.thread = ReusableStrategy.PooledConnectionThread(self, self.master_connection) + self.worker_lock = Lock() + if log_enabled(BASIC): + log(BASIC, 'instantiated PooledConnectionWorker: <%s>', self) + + def __str__(self): + s = 'CONN: ' + str(self.connection) + linesep + ' THREAD: ' + s += 'running' if self.running else 'halted' + s += ' - ' + ('busy' if self.busy else 'available') + s += ' - ' + ('created at: ' + self.creation_time.isoformat()) + s += ' - time to live: ' + str(self.master_connection.strategy.pool.lifetime - (datetime.now() - self.creation_time).seconds) + s += ' - requests served: ' + str(self.task_counter) + + return s + + def new_connection(self): + from ..core.connection import Connection + # noinspection PyProtectedMember + self.creation_time = datetime.now() + self.connection = Connection(server=self.master_connection.server_pool if self.master_connection.server_pool else self.master_connection.server, + user=self.master_connection.user, + password=self.master_connection.password, + auto_bind=AUTO_BIND_NONE, # do not perform auto_bind because it reads again the schema + version=self.master_connection.version, + authentication=self.master_connection.authentication, + client_strategy=RESTARTABLE, + auto_referrals=self.master_connection.auto_referrals, + auto_range=self.master_connection.auto_range, + sasl_mechanism=self.master_connection.sasl_mechanism, + sasl_credentials=self.master_connection.sasl_credentials, + check_names=self.master_connection.check_names, + collect_usage=self.master_connection._usage, + read_only=self.master_connection.read_only, + raise_exceptions=self.master_connection.raise_exceptions, + lazy=False, + fast_decoder=self.master_connection.fast_decoder, + receive_timeout=self.master_connection.receive_timeout, + return_empty_attributes=self.master_connection.empty_attributes) + + # simulates auto_bind, always with read_server_info=False + if self.master_connection.auto_bind and self.master_connection.auto_bind not in [AUTO_BIND_NONE, AUTO_BIND_DEFAULT]: + if log_enabled(BASIC): + log(BASIC, 'performing automatic bind for <%s>', self.connection) + self.connection.open(read_server_info=False) + if self.master_connection.auto_bind == AUTO_BIND_NO_TLS: + self.connection.bind(read_server_info=False) + elif self.master_connection.auto_bind == AUTO_BIND_TLS_BEFORE_BIND: + self.connection.start_tls(read_server_info=False) + self.connection.bind(read_server_info=False) + elif self.master_connection.auto_bind == AUTO_BIND_TLS_AFTER_BIND: + self.connection.bind(read_server_info=False) + self.connection.start_tls(read_server_info=False) + + if self.master_connection.server_pool: + self.connection.server_pool = self.master_connection.server_pool + self.connection.server_pool.initialize(self.connection) + + # ReusableStrategy methods + def __init__(self, ldap_connection): + BaseStrategy.__init__(self, ldap_connection) + self.sync = False + self.no_real_dsa = False + self.pooled = True + self.can_stream = False + if hasattr(ldap_connection, 'pool_name') and ldap_connection.pool_name: + self.pool = ReusableStrategy.ConnectionPool(ldap_connection) + else: + if log_enabled(ERROR): + log(ERROR, 'reusable connection must have a pool_name') + raise LDAPConnectionPoolNameIsMandatoryError('reusable connection must have a pool_name') + + def open(self, reset_usage=True, read_server_info=True): + # read_server_info not used + self.pool.open_pool = True + self.pool.start_pool() + self.connection.closed = False + if self.connection.usage: + if reset_usage or not self.connection._usage.initial_connection_start_time: + self.connection._usage.start() + + def terminate(self): + self.pool.terminate_pool() + self.pool.open_pool = False + self.connection.bound = False + self.connection.closed = True + self.pool.bind_pool = False + self.pool.tls_pool = False + + def _close_socket(self): + """ + Doesn't really close the socket + """ + self.connection.closed = True + + if self.connection.usage: + self.connection._usage.closed_sockets += 1 + + def send(self, message_type, request, controls=None): + if self.pool.started: + if message_type == 'bindRequest': + self.pool.bind_pool = True + counter = BOGUS_BIND + elif message_type == 'unbindRequest': + self.pool.bind_pool = False + counter = BOGUS_UNBIND + elif message_type == 'abandonRequest': + counter = BOGUS_ABANDON + elif message_type == 'extendedReq' and self.connection.starting_tls: + self.pool.tls_pool = True + counter = BOGUS_EXTENDED + else: + with self.pool.pool_lock: + self.pool.counter += 1 + if self.pool.counter > LDAP_MAX_INT: + self.pool.counter = 1 + counter = self.pool.counter + self.pool.request_queue.put((counter, message_type, request, controls)) + return counter + if log_enabled(ERROR): + log(ERROR, 'reusable connection pool not started') + raise LDAPConnectionPoolNotStartedError('reusable connection pool not started') + + def validate_bind(self, controls): + # in case of a new connection or different credentials + if (self.connection.user != self.pool.master_connection.user or + self.connection.password != self.pool.master_connection.password or + self.connection.authentication != self.pool.master_connection.authentication or + self.connection.sasl_mechanism != self.pool.master_connection.sasl_mechanism or + self.connection.sasl_credentials != self.pool.master_connection.sasl_credentials): + self.pool.master_connection.user = self.connection.user + self.pool.master_connection.password = self.connection.password + self.pool.master_connection.authentication = self.connection.authentication + self.pool.master_connection.sasl_mechanism = self.connection.sasl_mechanism + self.pool.master_connection.sasl_credentials = self.connection.sasl_credentials + self.pool.rebind_pool() + temp_connection = self.pool.workers[0].connection + old_lazy = temp_connection.lazy + temp_connection.lazy = False + if not self.connection.server.schema or not self.connection.server.info: + result = self.pool.workers[0].connection.bind(controls=controls) + else: + result = self.pool.workers[0].connection.bind(controls=controls, read_server_info=False) + + temp_connection.unbind() + temp_connection.lazy = old_lazy + if result: + self.pool.bind_pool = True # bind pool if bind is validated + return result + + def get_response(self, counter, timeout=None, get_request=False): + sleeptime = get_config_parameter('RESPONSE_SLEEPTIME') + request=None + if timeout is None: + timeout = get_config_parameter('RESPONSE_WAITING_TIMEOUT') + if counter == BOGUS_BIND: # send a bogus bindResponse + response = list() + result = {'description': 'success', 'referrals': None, 'type': 'bindResponse', 'result': 0, 'dn': '', 'message': '', 'saslCreds': None} + elif counter == BOGUS_UNBIND: # bogus unbind response + response = None + result = None + elif counter == BOGUS_ABANDON: # abandon cannot be executed because of multiple connections + response = list() + result = {'result': 0, 'referrals': None, 'responseName': '1.3.6.1.4.1.1466.20037', 'type': 'extendedResp', 'description': 'success', 'responseValue': 'None', 'dn': '', 'message': ''} + elif counter == BOGUS_EXTENDED: # bogus startTls extended response + response = list() + result = {'result': 0, 'referrals': None, 'responseName': '1.3.6.1.4.1.1466.20037', 'type': 'extendedResp', 'description': 'success', 'responseValue': 'None', 'dn': '', 'message': ''} + self.connection.starting_tls = False + else: + response = None + result = None + while timeout >= 0: # waiting for completed message to appear in _incoming + try: + with self.connection.strategy.pool.pool_lock: + response, result, request = self.connection.strategy.pool._incoming.pop(counter) + except KeyError: + sleep(sleeptime) + timeout -= sleeptime + continue + break + + if timeout <= 0: + if log_enabled(ERROR): + log(ERROR, 'no response from worker threads in Reusable connection') + raise LDAPResponseTimeoutError('no response from worker threads in Reusable connection') + + if isinstance(response, LDAPOperationResult): + raise response # an exception has been raised with raise_exceptions + + if get_request: + return response, result, request + + return response, result + + def post_send_single_response(self, counter): + return counter + + def post_send_search(self, counter): + return counter diff --git a/server/www/packages/packages-linux/x64/ldap3/strategy/sync.py b/server/www/packages/packages-linux/x64/ldap3/strategy/sync.py index b2c0257..fdb1441 100644 --- a/server/www/packages/packages-linux/x64/ldap3/strategy/sync.py +++ b/server/www/packages/packages-linux/x64/ldap3/strategy/sync.py @@ -1,212 +1,212 @@ -""" -""" - -# Created on 2013.07.15 -# -# Author: Giovanni Cannata -# -# Copyright 2013 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . - -import socket - -from .. import SEQUENCE_TYPES, get_config_parameter -from ..core.exceptions import LDAPSocketReceiveError, communication_exception_factory, LDAPExceptionError, LDAPExtensionError, LDAPOperationResult -from ..strategy.base import BaseStrategy, SESSION_TERMINATED_BY_SERVER, RESPONSE_COMPLETE, TRANSACTION_ERROR -from ..protocol.rfc4511 import LDAPMessage -from ..utils.log import log, log_enabled, ERROR, NETWORK, EXTENDED, format_ldap_message -from ..utils.asn1 import decoder, decode_message_fast - -LDAP_MESSAGE_TEMPLATE = LDAPMessage() - - -# noinspection PyProtectedMember -class SyncStrategy(BaseStrategy): - """ - This strategy is synchronous. You send the request and get the response - Requests return a boolean value to indicate the result of the requested Operation - Connection.response will contain the whole LDAP response for the messageId requested in a dict form - Connection.request will contain the result LDAP message in a dict form - """ - - def __init__(self, ldap_connection): - BaseStrategy.__init__(self, ldap_connection) - self.sync = True - self.no_real_dsa = False - self.pooled = False - self.can_stream = False - self.socket_size = get_config_parameter('SOCKET_SIZE') - - def open(self, reset_usage=True, read_server_info=True): - BaseStrategy.open(self, reset_usage, read_server_info) - if read_server_info: - try: - self.connection.refresh_server_info() - except LDAPOperationResult: # catch errors from server if raise_exception = True - self.connection.server._dsa_info = None - self.connection.server._schema_info = None - - def _start_listen(self): - if not self.connection.listening and not self.connection.closed: - self.connection.listening = True - - def receiving(self): - """ - Receive data over the socket - Checks if the socket is closed - """ - messages = [] - receiving = True - unprocessed = b'' - data = b'' - get_more_data = True - exc = None - while receiving: - if get_more_data: - try: - data = self.connection.socket.recv(self.socket_size) - except (OSError, socket.error, AttributeError) as e: - self.connection.last_error = 'error receiving data: ' + str(e) - try: # try to close the connection before raising exception - self.close() - except (socket.error, LDAPExceptionError): - pass - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - # raise communication_exception_factory(LDAPSocketReceiveError, exc)(self.connection.last_error) - raise communication_exception_factory(LDAPSocketReceiveError, type(e)(str(e)))(self.connection.last_error) - unprocessed += data - if len(data) > 0: - length = BaseStrategy.compute_ldap_message_size(unprocessed) - if length == -1: # too few data to decode message length - get_more_data = True - continue - if len(unprocessed) < length: - get_more_data = True - else: - if log_enabled(NETWORK): - log(NETWORK, 'received %d bytes via <%s>', len(unprocessed[:length]), self.connection) - messages.append(unprocessed[:length]) - unprocessed = unprocessed[length:] - get_more_data = False - if len(unprocessed) == 0: - receiving = False - else: - receiving = False - - if log_enabled(NETWORK): - log(NETWORK, 'received %d ldap messages via <%s>', len(messages), self.connection) - return messages - - def post_send_single_response(self, message_id): - """ - Executed after an Operation Request (except Search) - Returns the result message or None - """ - responses, result = self.get_response(message_id) - self.connection.result = result - if result['type'] == 'intermediateResponse': # checks that all responses are intermediates (there should be only one) - for response in responses: - if response['type'] != 'intermediateResponse': - self.connection.last_error = 'multiple messages received error' - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - raise LDAPSocketReceiveError(self.connection.last_error) - - responses.append(result) - return responses - - def post_send_search(self, message_id): - """ - Executed after a search request - Returns the result message and store in connection.response the objects found - """ - responses, result = self.get_response(message_id) - self.connection.result = result - if isinstance(responses, SEQUENCE_TYPES): - self.connection.response = responses[:] # copy search result entries - return responses - - self.connection.last_error = 'error receiving response' - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - raise LDAPSocketReceiveError(self.connection.last_error) - - def _get_response(self, message_id): - """ - Performs the capture of LDAP response for SyncStrategy - """ - ldap_responses = [] - response_complete = False - while not response_complete: - responses = self.receiving() - if responses: - for response in responses: - if len(response) > 0: - if self.connection.usage: - self.connection._usage.update_received_message(len(response)) - if self.connection.fast_decoder: - ldap_resp = decode_message_fast(response) - dict_response = self.decode_response_fast(ldap_resp) - else: - ldap_resp, _ = decoder.decode(response, asn1Spec=LDAP_MESSAGE_TEMPLATE) # unprocessed unused because receiving() waits for the whole message - dict_response = self.decode_response(ldap_resp) - if log_enabled(EXTENDED): - log(EXTENDED, 'ldap message received via <%s>:%s', self.connection, format_ldap_message(ldap_resp, '<<')) - if int(ldap_resp['messageID']) == message_id: - ldap_responses.append(dict_response) - if dict_response['type'] not in ['searchResEntry', 'searchResRef', 'intermediateResponse']: - response_complete = True - elif int(ldap_resp['messageID']) == 0: # 0 is reserved for 'Unsolicited Notification' from server as per RFC4511 (paragraph 4.4) - if dict_response['responseName'] == '1.3.6.1.4.1.1466.20036': # Notice of Disconnection as per RFC4511 (paragraph 4.4.1) - return SESSION_TERMINATED_BY_SERVER - elif dict_response['responseName'] == '2.16.840.1.113719.1.27.103.4': # Novell LDAP transaction error unsolicited notification - return TRANSACTION_ERROR - else: - self.connection.last_error = 'unknown unsolicited notification from server' - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - raise LDAPSocketReceiveError(self.connection.last_error) - elif int(ldap_resp['messageID']) != message_id and dict_response['type'] == 'extendedResp': - self.connection.last_error = 'multiple extended responses to a single extended request' - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - raise LDAPExtensionError(self.connection.last_error) - # pass # ignore message with invalid messageId when receiving multiple extendedResp. This is not allowed by RFC4511 but some LDAP server do it - else: - self.connection.last_error = 'invalid messageId received' - if log_enabled(ERROR): - log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - raise LDAPSocketReceiveError(self.connection.last_error) - # response = unprocessed - # if response: # if this statement is removed unprocessed data will be processed as another message - # self.connection.last_error = 'unprocessed substrate error' - # if log_enabled(ERROR): - # log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) - # raise LDAPSocketReceiveError(self.connection.last_error) - else: - return SESSION_TERMINATED_BY_SERVER - ldap_responses.append(RESPONSE_COMPLETE) - - return ldap_responses - - def set_stream(self, value): - raise NotImplementedError - - def get_stream(self): - raise NotImplementedError +""" +""" + +# Created on 2013.07.15 +# +# Author: Giovanni Cannata +# +# Copyright 2013 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +import socket + +from .. import SEQUENCE_TYPES, get_config_parameter +from ..core.exceptions import LDAPSocketReceiveError, communication_exception_factory, LDAPExceptionError, LDAPExtensionError, LDAPOperationResult +from ..strategy.base import BaseStrategy, SESSION_TERMINATED_BY_SERVER, RESPONSE_COMPLETE, TRANSACTION_ERROR +from ..protocol.rfc4511 import LDAPMessage +from ..utils.log import log, log_enabled, ERROR, NETWORK, EXTENDED, format_ldap_message +from ..utils.asn1 import decoder, decode_message_fast + +LDAP_MESSAGE_TEMPLATE = LDAPMessage() + + +# noinspection PyProtectedMember +class SyncStrategy(BaseStrategy): + """ + This strategy is synchronous. You send the request and get the response + Requests return a boolean value to indicate the result of the requested Operation + Connection.response will contain the whole LDAP response for the messageId requested in a dict form + Connection.request will contain the result LDAP message in a dict form + """ + + def __init__(self, ldap_connection): + BaseStrategy.__init__(self, ldap_connection) + self.sync = True + self.no_real_dsa = False + self.pooled = False + self.can_stream = False + self.socket_size = get_config_parameter('SOCKET_SIZE') + + def open(self, reset_usage=True, read_server_info=True): + BaseStrategy.open(self, reset_usage, read_server_info) + if read_server_info: + try: + self.connection.refresh_server_info() + except LDAPOperationResult: # catch errors from server if raise_exception = True + self.connection.server._dsa_info = None + self.connection.server._schema_info = None + + def _start_listen(self): + if not self.connection.listening and not self.connection.closed: + self.connection.listening = True + + def receiving(self): + """ + Receives data over the socket + Checks if the socket is closed + """ + messages = [] + receiving = True + unprocessed = b'' + data = b'' + get_more_data = True + exc = None + while receiving: + if get_more_data: + try: + data = self.connection.socket.recv(self.socket_size) + except (OSError, socket.error, AttributeError) as e: + self.connection.last_error = 'error receiving data: ' + str(e) + try: # try to close the connection before raising exception + self.close() + except (socket.error, LDAPExceptionError): + pass + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + # raise communication_exception_factory(LDAPSocketReceiveError, exc)(self.connection.last_error) + raise communication_exception_factory(LDAPSocketReceiveError, type(e)(str(e)))(self.connection.last_error) + unprocessed += data + if len(data) > 0: + length = BaseStrategy.compute_ldap_message_size(unprocessed) + if length == -1: # too few data to decode message length + get_more_data = True + continue + if len(unprocessed) < length: + get_more_data = True + else: + if log_enabled(NETWORK): + log(NETWORK, 'received %d bytes via <%s>', len(unprocessed[:length]), self.connection) + messages.append(unprocessed[:length]) + unprocessed = unprocessed[length:] + get_more_data = False + if len(unprocessed) == 0: + receiving = False + else: + receiving = False + + if log_enabled(NETWORK): + log(NETWORK, 'received %d ldap messages via <%s>', len(messages), self.connection) + return messages + + def post_send_single_response(self, message_id): + """ + Executed after an Operation Request (except Search) + Returns the result message or None + """ + responses, result = self.get_response(message_id) + self.connection.result = result + if result['type'] == 'intermediateResponse': # checks that all responses are intermediates (there should be only one) + for response in responses: + if response['type'] != 'intermediateResponse': + self.connection.last_error = 'multiple messages received error' + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + raise LDAPSocketReceiveError(self.connection.last_error) + + responses.append(result) + return responses + + def post_send_search(self, message_id): + """ + Executed after a search request + Returns the result message and store in connection.response the objects found + """ + responses, result = self.get_response(message_id) + self.connection.result = result + if isinstance(responses, SEQUENCE_TYPES): + self.connection.response = responses[:] # copy search result entries + return responses + + self.connection.last_error = 'error receiving response' + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + raise LDAPSocketReceiveError(self.connection.last_error) + + def _get_response(self, message_id, timeout): + """ + Performs the capture of LDAP response for SyncStrategy + """ + ldap_responses = [] + response_complete = False + while not response_complete: + responses = self.receiving() + if responses: + for response in responses: + if len(response) > 0: + if self.connection.usage: + self.connection._usage.update_received_message(len(response)) + if self.connection.fast_decoder: + ldap_resp = decode_message_fast(response) + dict_response = self.decode_response_fast(ldap_resp) + else: + ldap_resp, _ = decoder.decode(response, asn1Spec=LDAP_MESSAGE_TEMPLATE) # unprocessed unused because receiving() waits for the whole message + dict_response = self.decode_response(ldap_resp) + if log_enabled(EXTENDED): + log(EXTENDED, 'ldap message received via <%s>:%s', self.connection, format_ldap_message(ldap_resp, '<<')) + if int(ldap_resp['messageID']) == message_id: + ldap_responses.append(dict_response) + if dict_response['type'] not in ['searchResEntry', 'searchResRef', 'intermediateResponse']: + response_complete = True + elif int(ldap_resp['messageID']) == 0: # 0 is reserved for 'Unsolicited Notification' from server as per RFC4511 (paragraph 4.4) + if dict_response['responseName'] == '1.3.6.1.4.1.1466.20036': # Notice of Disconnection as per RFC4511 (paragraph 4.4.1) + return SESSION_TERMINATED_BY_SERVER + elif dict_response['responseName'] == '2.16.840.1.113719.1.27.103.4': # Novell LDAP transaction error unsolicited notification + return TRANSACTION_ERROR + else: + self.connection.last_error = 'unknown unsolicited notification from server' + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + raise LDAPSocketReceiveError(self.connection.last_error) + elif int(ldap_resp['messageID']) != message_id and dict_response['type'] == 'extendedResp': + self.connection.last_error = 'multiple extended responses to a single extended request' + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + raise LDAPExtensionError(self.connection.last_error) + # pass # ignore message with invalid messageId when receiving multiple extendedResp. This is not allowed by RFC4511 but some LDAP server do it + else: + self.connection.last_error = 'invalid messageId received' + if log_enabled(ERROR): + log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + raise LDAPSocketReceiveError(self.connection.last_error) + # response = unprocessed + # if response: # if this statement is removed unprocessed data will be processed as another message + # self.connection.last_error = 'unprocessed substrate error' + # if log_enabled(ERROR): + # log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection) + # raise LDAPSocketReceiveError(self.connection.last_error) + else: + return SESSION_TERMINATED_BY_SERVER + ldap_responses.append(RESPONSE_COMPLETE) + + return ldap_responses + + def set_stream(self, value): + raise NotImplementedError + + def get_stream(self): + raise NotImplementedError diff --git a/server/www/packages/packages-linux/x64/ldap3/utils/asn1.py b/server/www/packages/packages-linux/x64/ldap3/utils/asn1.py index 6b0b0bb..1b6091d 100644 --- a/server/www/packages/packages-linux/x64/ldap3/utils/asn1.py +++ b/server/www/packages/packages-linux/x64/ldap3/utils/asn1.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2015 - 2018 Giovanni Cannata +# Copyright 2015 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/utils/ciDict.py b/server/www/packages/packages-linux/x64/ldap3/utils/ciDict.py index f81ba1b..c51d7ff 100644 --- a/server/www/packages/packages-linux/x64/ldap3/utils/ciDict.py +++ b/server/www/packages/packages-linux/x64/ldap3/utils/ciDict.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2014 - 2018 Giovanni Cannata +# Copyright 2014 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -143,7 +143,7 @@ class CaseInsensitiveWithAliasDict(CaseInsensitiveDict): if ci_key in self._aliases: self.remove_alias(ci_key) - def set_alias(self, key, alias): + def set_alias(self, key, alias, ignore_duplicates=False): if not isinstance(alias, SEQUENCE_TYPES): alias = [alias] for alias_to_add in alias: @@ -153,23 +153,28 @@ class CaseInsensitiveWithAliasDict(CaseInsensitiveDict): if ci_alias not in self._case_insensitive_keymap: # checks if alias is used a key if ci_alias not in self._aliases: # checks if alias is used as another alias self._aliases[ci_alias] = ci_key - if ci_key in self._alias_keymap: # extend alias keymap + if ci_key in self._alias_keymap: # extends alias keymap self._alias_keymap[ci_key].append(self._ci_key(ci_alias)) else: self._alias_keymap[ci_key] = list() self._alias_keymap[ci_key].append(self._ci_key(ci_alias)) else: - if ci_key == self._ci_key(self._alias_keymap[ci_alias]): # passes if alias is already defined to the same key + if ci_key in self._alias_keymap and ci_alias in self._alias_keymap[ci_key]: # passes if alias is already defined to the same key pass - else: + elif not ignore_duplicates: raise KeyError('\'' + str(alias_to_add) + '\' already used as alias') else: if ci_key == self._ci_key(self._case_insensitive_keymap[ci_alias]): # passes if alias is already defined to the same key pass - else: + elif not ignore_duplicates: raise KeyError('\'' + str(alias_to_add) + '\' already used as key') else: - raise KeyError('\'' + str(ci_key) + '\' is not an existing key') + for keymap in self._alias_keymap: + if ci_key in self._alias_keymap[keymap]: # kye is already aliased + self.set_alias(keymap, alias + [ci_key], ignore_duplicates=ignore_duplicates) + break + else: + raise KeyError('\'' + str(ci_key) + '\' is not an existing alias or key') def remove_alias(self, alias): if not isinstance(alias, SEQUENCE_TYPES): diff --git a/server/www/packages/packages-linux/x64/ldap3/utils/config.py b/server/www/packages/packages-linux/x64/ldap3/utils/config.py index 36b57a9..e3edbf8 100644 --- a/server/www/packages/packages-linux/x64/ldap3/utils/config.py +++ b/server/www/packages/packages-linux/x64/ldap3/utils/config.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2013 - 2018 Giovanni Cannata +# Copyright 2013 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/utils/conv.py b/server/www/packages/packages-linux/x64/ldap3/utils/conv.py index ee90c66..b000e30 100644 --- a/server/www/packages/packages-linux/x64/ldap3/utils/conv.py +++ b/server/www/packages/packages-linux/x64/ldap3/utils/conv.py @@ -1,278 +1,270 @@ -""" -""" - -# Created on 2014.04.26 -# -# Author: Giovanni Cannata -# -# Copyright 2014 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . - -from base64 import b64encode, b64decode -import datetime -import re - -from .. import SEQUENCE_TYPES, STRING_TYPES, NUMERIC_TYPES, get_config_parameter -from ..utils.ciDict import CaseInsensitiveDict -from ..core.exceptions import LDAPDefinitionError - - -def to_unicode(obj, encoding=None, from_server=False): - """Try to convert bytes (and str in python2) to unicode. - Return object unmodified if python3 string, else raise an exception - """ - conf_default_client_encoding = get_config_parameter('DEFAULT_CLIENT_ENCODING') - conf_default_server_encoding = get_config_parameter('DEFAULT_SERVER_ENCODING') - conf_additional_server_encodings = get_config_parameter('ADDITIONAL_SERVER_ENCODINGS') - conf_additional_client_encodings = get_config_parameter('ADDITIONAL_CLIENT_ENCODINGS') - if isinstance(obj, NUMERIC_TYPES): - obj = str(obj) - - if isinstance(obj, (bytes, bytearray)): - if from_server: # data from server - if encoding is None: - encoding = conf_default_server_encoding - try: - return obj.decode(encoding) - except UnicodeDecodeError: - for encoding in conf_additional_server_encodings: # AD could have DN not encoded in utf-8 (even if this is not allowed by RFC4510) - try: - return obj.decode(encoding) - except UnicodeDecodeError: - pass - raise UnicodeError("Unable to convert server data to unicode: %r" % obj) - else: # data from client - if encoding is None: - encoding = conf_default_client_encoding - try: - return obj.decode(encoding) - except UnicodeDecodeError: - for encoding in conf_additional_client_encodings: # tries additional encodings - try: - return obj.decode(encoding) - except UnicodeDecodeError: - pass - raise UnicodeError("Unable to convert client data to unicode: %r" % obj) - - if isinstance(obj, STRING_TYPES): # python3 strings, python 2 unicode - return obj - - raise UnicodeError("Unable to convert type %s to unicode: %r" % (type(obj).__class__.__name__, obj)) - - -def to_raw(obj, encoding='utf-8'): - """Tries to convert to raw bytes from unicode""" - if isinstance(obj, NUMERIC_TYPES): - obj = str(obj) - - if not (isinstance(obj, bytes)): - if isinstance(obj, SEQUENCE_TYPES): - return [to_raw(element) for element in obj] - elif isinstance(obj, STRING_TYPES): - return obj.encode(encoding) - return obj - - -def escape_filter_chars(text, encoding=None): - """ Escape chars mentioned in RFC4515. """ - if encoding is None: - encoding = get_config_parameter('DEFAULT_ENCODING') - - try: - text = to_unicode(text, encoding) - escaped = text.replace('\\', '\\5c') - escaped = escaped.replace('*', '\\2a') - escaped = escaped.replace('(', '\\28') - escaped = escaped.replace(')', '\\29') - escaped = escaped.replace('\x00', '\\00') - except Exception: # probably raw bytes values, return escaped bytes value - escaped = to_unicode(escape_bytes(text)) - # escape all octets greater than 0x7F that are not part of a valid UTF-8 - # escaped = ''.join(c if c <= ord(b'\x7f') else escape_bytes(to_raw(to_unicode(c, encoding))) for c in escaped) - return escaped - - -def unescape_filter_chars(text, encoding=None): - """ unescape chars mentioned in RFC4515. """ - if encoding is None: - encoding = get_config_parameter('DEFAULT_ENCODING') - - unescaped = to_raw(text, encoding) - unescaped = unescaped.replace(b'\\5c', b'\\') - unescaped = unescaped.replace(b'\\5C', b'\\') - unescaped = unescaped.replace(b'\\2a', b'*') - unescaped = unescaped.replace(b'\\2A', b'*') - unescaped = unescaped.replace(b'\\28', b'(') - unescaped = unescaped.replace(b'\\29', b')') - unescaped = unescaped.replace(b'\\00', b'\x00') - return unescaped - - -def escape_bytes(bytes_value): - """ Convert a byte sequence to a properly escaped for LDAP (format BACKSLASH HEX HEX) string""" - if bytes_value: - if str is not bytes: # Python 3 - if isinstance(bytes_value, str): - bytes_value = bytearray(bytes_value, encoding='utf-8') - escaped = '\\'.join([('%02x' % int(b)) for b in bytes_value]) - else: # Python 2 - if isinstance(bytes_value, unicode): - bytes_value = bytes_value.encode('utf-8') - escaped = '\\'.join([('%02x' % ord(b)) for b in bytes_value]) - else: - escaped = '' - - return ('\\' + escaped) if escaped else '' - - -def prepare_for_stream(value): - if str is not bytes: # Python 3 - return value - else: # Python 2 - return value.decode() - -def json_encode_b64(obj): - try: - return dict(encoding='base64', encoded=b64encode(obj)) - except Exception as e: - raise LDAPDefinitionError('unable to encode ' + str(obj) + ' - ' + str(e)) - - -# noinspection PyProtectedMember -def check_json_dict(json_dict): - # needed for python 2 - - for k, v in json_dict.items(): - if isinstance(v, dict): - check_json_dict(v) - elif isinstance(v, CaseInsensitiveDict): - check_json_dict(v._store) - elif isinstance(v, SEQUENCE_TYPES): - for i, e in enumerate(v): - if isinstance(e, dict): - check_json_dict(e) - elif isinstance(e, CaseInsensitiveDict): - check_json_dict(e._store) - else: - v[i] = format_json(e) - else: - json_dict[k] = format_json(v) - - -def json_hook(obj): - if hasattr(obj, 'keys') and len(list(obj.keys())) == 2 and 'encoding' in obj.keys() and 'encoded' in obj.keys(): - return b64decode(obj['encoded']) - - return obj - - -# noinspection PyProtectedMember -def format_json(obj): - if isinstance(obj, CaseInsensitiveDict): - return obj._store - - if isinstance(obj, datetime.datetime): - return str(obj) - - if isinstance(obj, int): - return obj - - if str is bytes: # Python 2 - if isinstance(obj, long): # long exists only in python2 - return obj - - try: - if str is not bytes: # Python 3 - if isinstance(obj, bytes): - # return check_escape(str(obj, 'utf-8', errors='strict')) - return str(obj, 'utf-8', errors='strict') - raise LDAPDefinitionError('unable to serialize ' + str(obj)) - else: # Python 2 - if isinstance(obj, unicode): - return obj - else: - # return unicode(check_escape(obj)) - return unicode(obj) - except (TypeError, UnicodeDecodeError): - pass - - try: - return json_encode_b64(bytes(obj)) - except Exception: - pass - - raise LDAPDefinitionError('unable to serialize ' + str(obj)) - - -def is_filter_escaped(text): - if not type(text) == ((str is not bytes) and str or unicode): # requires str for Python 3 and unicode for Python 2 - raise ValueError('unicode input expected') - - return all(c not in text for c in '()*\0') and not re.search('\\\\([^0-9a-fA-F]|(.[^0-9a-fA-F]))', text) - - -# def ldap_escape_to_bytes(text): -# bytesequence = bytearray() -# if text.startswith('\\'): -# byte_values = text.split('\\') -# for value in byte_values[1:]: -# if len(value) != 2 and not value.isdigit(): -# raise LDAPDefinitionError('badly formatted LDAP byte escaped sequence') -# bytesequence.append(int(value, 16)) -# return bytes(bytesequence) -# raise LDAPDefinitionError('badly formatted LDAP byte escaped sequence') - - -def ldap_escape_to_bytes(text): - bytesequence = bytearray() - i = 0 - try: - if isinstance(text, STRING_TYPES): - while i < len(text): - if text[i] == '\\': - if len(text) > i + 2: - try: - bytesequence.append(int(text[i+1:i+3], 16)) - i += 3 - continue - except ValueError: - pass - bytesequence.append(92) # "\" ASCII code - else: - raw = to_raw(text[i]) - for c in raw: - bytesequence.append(c) - i += 1 - elif isinstance(text, (bytes, bytearray)): - while i < len(text): - if text[i] == 92: # "\" ASCII code - if len(text) > i + 2: - try: - bytesequence.append(int(text[i + 1:i + 3], 16)) - i += 3 - continue - except ValueError: - pass - bytesequence.append(92) # "\" ASCII code - else: - bytesequence.append(text[i]) - i += 1 - except Exception: - raise LDAPDefinitionError('badly formatted LDAP byte escaped sequence') - - return bytes(bytesequence) +""" +""" + +# Created on 2014.04.26 +# +# Author: Giovanni Cannata +# +# Copyright 2014 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +from base64 import b64encode, b64decode +import datetime +import re + +from .. import SEQUENCE_TYPES, STRING_TYPES, NUMERIC_TYPES, get_config_parameter +from ..utils.ciDict import CaseInsensitiveDict +from ..core.exceptions import LDAPDefinitionError + + +def to_unicode(obj, encoding=None, from_server=False): + """Try to convert bytes (and str in python2) to unicode. + Return object unmodified if python3 string, else raise an exception + """ + conf_default_client_encoding = get_config_parameter('DEFAULT_CLIENT_ENCODING') + conf_default_server_encoding = get_config_parameter('DEFAULT_SERVER_ENCODING') + conf_additional_server_encodings = get_config_parameter('ADDITIONAL_SERVER_ENCODINGS') + conf_additional_client_encodings = get_config_parameter('ADDITIONAL_CLIENT_ENCODINGS') + if isinstance(obj, NUMERIC_TYPES): + obj = str(obj) + + if isinstance(obj, (bytes, bytearray)): + if from_server: # data from server + if encoding is None: + encoding = conf_default_server_encoding + try: + return obj.decode(encoding) + except UnicodeDecodeError: + for encoding in conf_additional_server_encodings: # AD could have DN not encoded in utf-8 (even if this is not allowed by RFC4510) + try: + return obj.decode(encoding) + except UnicodeDecodeError: + pass + raise UnicodeError("Unable to convert server data to unicode: %r" % obj) + else: # data from client + if encoding is None: + encoding = conf_default_client_encoding + try: + return obj.decode(encoding) + except UnicodeDecodeError: + for encoding in conf_additional_client_encodings: # tries additional encodings + try: + return obj.decode(encoding) + except UnicodeDecodeError: + pass + raise UnicodeError("Unable to convert client data to unicode: %r" % obj) + + if isinstance(obj, STRING_TYPES): # python3 strings, python 2 unicode + return obj + + raise UnicodeError("Unable to convert type %s to unicode: %r" % (obj.__class__.__name__, obj)) + + +def to_raw(obj, encoding='utf-8'): + """Tries to convert to raw bytes from unicode""" + if isinstance(obj, NUMERIC_TYPES): + obj = str(obj) + + if not (isinstance(obj, bytes)): + if isinstance(obj, SEQUENCE_TYPES): + return [to_raw(element) for element in obj] + elif isinstance(obj, STRING_TYPES): + return obj.encode(encoding) + return obj + + +def escape_filter_chars(text, encoding=None): + """ Escape chars mentioned in RFC4515. """ + if encoding is None: + encoding = get_config_parameter('DEFAULT_ENCODING') + + try: + text = to_unicode(text, encoding) + escaped = text.replace('\\', '\\5c') + escaped = escaped.replace('*', '\\2a') + escaped = escaped.replace('(', '\\28') + escaped = escaped.replace(')', '\\29') + escaped = escaped.replace('\x00', '\\00') + except Exception: # probably raw bytes values, return escaped bytes value + escaped = to_unicode(escape_bytes(text)) + # escape all octets greater than 0x7F that are not part of a valid UTF-8 + # escaped = ''.join(c if c <= ord(b'\x7f') else escape_bytes(to_raw(to_unicode(c, encoding))) for c in escaped) + return escaped + + +def unescape_filter_chars(text, encoding=None): + """ unescape chars mentioned in RFC4515. """ + if encoding is None: + encoding = get_config_parameter('DEFAULT_ENCODING') + + unescaped = to_raw(text, encoding) + unescaped = unescaped.replace(b'\\5c', b'\\') + unescaped = unescaped.replace(b'\\5C', b'\\') + unescaped = unescaped.replace(b'\\2a', b'*') + unescaped = unescaped.replace(b'\\2A', b'*') + unescaped = unescaped.replace(b'\\28', b'(') + unescaped = unescaped.replace(b'\\29', b')') + unescaped = unescaped.replace(b'\\00', b'\x00') + return unescaped + + +def escape_bytes(bytes_value): + """ Convert a byte sequence to a properly escaped for LDAP (format BACKSLASH HEX HEX) string""" + if bytes_value: + if str is not bytes: # Python 3 + if isinstance(bytes_value, str): + bytes_value = bytearray(bytes_value, encoding='utf-8') + escaped = '\\'.join([('%02x' % int(b)) for b in bytes_value]) + else: # Python 2 + if isinstance(bytes_value, unicode): + bytes_value = bytes_value.encode('utf-8') + escaped = '\\'.join([('%02x' % ord(b)) for b in bytes_value]) + else: + escaped = '' + + return ('\\' + escaped) if escaped else '' + + +def prepare_for_stream(value): + if str is not bytes: # Python 3 + return value + else: # Python 2 + return value.decode() + + +def json_encode_b64(obj): + try: + return dict(encoding='base64', encoded=b64encode(obj)) + except Exception as e: + raise LDAPDefinitionError('unable to encode ' + str(obj) + ' - ' + str(e)) + + +# noinspection PyProtectedMember +def check_json_dict(json_dict): + # needed for python 2 + + for k, v in json_dict.items(): + if isinstance(v, dict): + check_json_dict(v) + elif isinstance(v, CaseInsensitiveDict): + check_json_dict(v._store) + elif isinstance(v, SEQUENCE_TYPES): + for i, e in enumerate(v): + if isinstance(e, dict): + check_json_dict(e) + elif isinstance(e, CaseInsensitiveDict): + check_json_dict(e._store) + else: + v[i] = format_json(e) + else: + json_dict[k] = format_json(v) + + +def json_hook(obj): + if hasattr(obj, 'keys') and len(list(obj.keys())) == 2 and 'encoding' in obj.keys() and 'encoded' in obj.keys(): + return b64decode(obj['encoded']) + + return obj + + +# noinspection PyProtectedMember +def format_json(obj): + if isinstance(obj, CaseInsensitiveDict): + return obj._store + + if isinstance(obj, datetime.datetime): + return str(obj) + + if isinstance(obj, int): + return obj + + if isinstance(obj, datetime.timedelta): + return str(obj) + + if str is bytes: # Python 2 + if isinstance(obj, long): # long exists only in python2 + return obj + + try: + if str is not bytes: # Python 3 + if isinstance(obj, bytes): + # return check_escape(str(obj, 'utf-8', errors='strict')) + return str(obj, 'utf-8', errors='strict') + raise LDAPDefinitionError('unable to serialize ' + str(obj)) + else: # Python 2 + if isinstance(obj, unicode): + return obj + else: + # return unicode(check_escape(obj)) + return unicode(obj) + except (TypeError, UnicodeDecodeError): + pass + + try: + return json_encode_b64(bytes(obj)) + except Exception: + pass + + raise LDAPDefinitionError('unable to serialize ' + str(obj)) + + +def is_filter_escaped(text): + if not type(text) == ((str is not bytes) and str or unicode): # requires str for Python 3 and unicode for Python 2 + raise ValueError('unicode input expected') + + return all(c not in text for c in '()*\0') and not re.search('\\\\([^0-9a-fA-F]|(.[^0-9a-fA-F]))', text) + + +def ldap_escape_to_bytes(text): + bytesequence = bytearray() + i = 0 + try: + if isinstance(text, STRING_TYPES): + while i < len(text): + if text[i] == '\\': + if len(text) > i + 2: + try: + bytesequence.append(int(text[i+1:i+3], 16)) + i += 3 + continue + except ValueError: + pass + bytesequence.append(92) # "\" ASCII code + else: + raw = to_raw(text[i]) + for c in raw: + bytesequence.append(c) + i += 1 + elif isinstance(text, (bytes, bytearray)): + while i < len(text): + if text[i] == 92: # "\" ASCII code + if len(text) > i + 2: + try: + bytesequence.append(int(text[i + 1:i + 3], 16)) + i += 3 + continue + except ValueError: + pass + bytesequence.append(92) # "\" ASCII code + else: + bytesequence.append(text[i]) + i += 1 + except Exception: + raise LDAPDefinitionError('badly formatted LDAP byte escaped sequence') + + return bytes(bytesequence) diff --git a/server/www/packages/packages-linux/x64/ldap3/utils/dn.py b/server/www/packages/packages-linux/x64/ldap3/utils/dn.py index d1a50a9..c2a1e66 100644 --- a/server/www/packages/packages-linux/x64/ldap3/utils/dn.py +++ b/server/www/packages/packages-linux/x64/ldap3/utils/dn.py @@ -1,375 +1,405 @@ -""" -""" - -# Created on 2014.09.08 -# -# Author: Giovanni Cannata -# -# Copyright 2014 - 2018 Giovanni Cannata -# -# This file is part of ldap3. -# -# ldap3 is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# ldap3 is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with ldap3 in the COPYING and COPYING.LESSER files. -# If not, see . - -from string import hexdigits, ascii_letters, digits - -from .. import SEQUENCE_TYPES -from ..core.exceptions import LDAPInvalidDnError - - -STATE_ANY = 0 -STATE_ESCAPE = 1 -STATE_ESCAPE_HEX = 2 - - -def _add_ava(ava, decompose, remove_space, space_around_equal): - if not ava: - return '' - - space = ' ' if space_around_equal else '' - attr_name, _, value = ava.partition('=') - if decompose: - if remove_space: - component = (attr_name.strip(), value.strip()) - else: - component = (attr_name, value) - else: - if remove_space: - component = attr_name.strip() + space + '=' + space + value.strip() - else: - component = attr_name + space + '=' + space + value - - return component - - -def to_dn(iterator, decompose=False, remove_space=False, space_around_equal=False, separate_rdn=False): - """ - Convert an iterator to a list of dn parts - if decompose=True return a list of tuple (one for each dn component) else return a list of strings - if remove_space=True removes unneeded spaces - if space_around_equal=True add spaces around equal in returned strings - if separate_rdn=True consider multiple RDNs as different component of DN - """ - dn = [] - component = '' - escape_sequence = False - for c in iterator: - if c == '\\': # escape sequence - escape_sequence = True - elif escape_sequence and c != ' ': - escape_sequence = False - elif c == '+' and separate_rdn: - dn.append(_add_ava(component, decompose, remove_space, space_around_equal)) - component = '' - continue - elif c == ',': - if '=' in component: - dn.append(_add_ava(component, decompose, remove_space, space_around_equal)) - component = '' - continue - - component += c - - dn.append(_add_ava(component, decompose, remove_space, space_around_equal)) - return dn - - -def _find_first_unescaped(dn, char, pos): - while True: - pos = dn.find(char, pos) - if pos == -1: - break # no char found - if pos > 0 and dn[pos - 1] != '\\': # unescaped char - break - - pos += 1 - - return pos - - -def _find_last_unescaped(dn, char, start, stop=0): - while True: - stop = dn.rfind(char, start, stop) - if stop == -1: - break - if stop >= 0 and dn[stop - 1] != '\\': - break - - if stop < start: - stop = -1 - break - - return stop - - -def _get_next_ava(dn): - comma = _find_first_unescaped(dn, ',', 0) - plus = _find_first_unescaped(dn, '+', 0) - - if plus > 0 and (plus < comma or comma == -1): - equal = _find_first_unescaped(dn, '=', plus + 1) - if equal > plus + 1: - plus = _find_last_unescaped(dn, '+', plus, equal) - return dn[:plus], '+' - - if comma > 0: - equal = _find_first_unescaped(dn, '=', comma + 1) - if equal > comma + 1: - comma = _find_last_unescaped(dn, ',', comma, equal) - return dn[:comma], ',' - - return dn, '' - - -def _split_ava(ava, escape=False, strip=True): - equal = ava.find('=') - while equal > 0: # not first character - if ava[equal - 1] != '\\': # not an escaped equal so it must be an ava separator - # attribute_type1 = ava[0:equal].strip() if strip else ava[0:equal] - if strip: - attribute_type = ava[0:equal].strip() - attribute_value = _escape_attribute_value(ava[equal + 1:].strip()) if escape else ava[equal + 1:].strip() - else: - attribute_type = ava[0:equal] - attribute_value = _escape_attribute_value(ava[equal + 1:]) if escape else ava[equal + 1:] - - return attribute_type, attribute_value - equal = ava.find('=', equal + 1) - - return '', (ava.strip if strip else ava) # if no equal found return only value - - -def _validate_attribute_type(attribute_type): - if not attribute_type: - raise LDAPInvalidDnError('attribute type not present') - - if attribute_type == ' pairs') - if attribute_value[0] == ' ': # space cannot be used as first or last character - raise LDAPInvalidDnError('SPACE not allowed as first character of attribute value') - if attribute_value[-1] == ' ': - raise LDAPInvalidDnError('SPACE not allowed as last character of attribute value') - - state = STATE_ANY - for c in attribute_value: - if state == STATE_ANY: - if c == '\\': - state = STATE_ESCAPE - elif c in '"#+,;<=>\00': - raise LDAPInvalidDnError('special characters ' + c + ' must be escaped') - elif state == STATE_ESCAPE: - if c in hexdigits: - state = STATE_ESCAPE_HEX - elif c in ' "#+,;<=>\\\00': - state = STATE_ANY - else: - raise LDAPInvalidDnError('invalid escaped character ' + c) - elif state == STATE_ESCAPE_HEX: - if c in hexdigits: - state = STATE_ANY - else: - raise LDAPInvalidDnError('invalid escaped character ' + c) - - # final state - if state != STATE_ANY: - raise LDAPInvalidDnError('invalid final character') - - return True - - -def _escape_attribute_value(attribute_value): - if not attribute_value: - return '' - - if attribute_value[0] == '#': # with leading SHARP only pairs of hex characters are valid - valid_hex = True - if len(attribute_value) % 2 == 0: # string must be # + HEX HEX (an odd number of chars) - valid_hex = False - - if valid_hex: - for c in attribute_value: - if c not in hexdigits: # allowed only hex digits as per RFC 4514 - valid_hex = False - break - - if valid_hex: - return attribute_value - - state = STATE_ANY - escaped = '' - tmp_buffer = '' - for c in attribute_value: - if state == STATE_ANY: - if c == '\\': - state = STATE_ESCAPE - elif c in '"#+,;<=>\00': - escaped += '\\' + c - else: - escaped += c - elif state == STATE_ESCAPE: - if c in hexdigits: - tmp_buffer = c - state = STATE_ESCAPE_HEX - elif c in ' "#+,;<=>\\\00': - escaped += '\\' + c - state = STATE_ANY - else: - escaped += '\\\\' + c - elif state == STATE_ESCAPE_HEX: - if c in hexdigits: - escaped += '\\' + tmp_buffer + c - else: - escaped += '\\\\' + tmp_buffer + c - tmp_buffer = '' - state = STATE_ANY - - # final state - if state == STATE_ESCAPE: - escaped += '\\\\' - elif state == STATE_ESCAPE_HEX: - escaped += '\\\\' + tmp_buffer - - if escaped[0] == ' ': # leading SPACE must be escaped - escaped = '\\' + escaped - - if escaped[-1] == ' ' and len(escaped) > 1 and escaped[-2] != '\\': # trailing SPACE must be escaped - escaped = escaped[:-1] + '\\ ' - - return escaped - - -def parse_dn(dn, escape=False, strip=True): - rdns = [] - avas = [] - while dn: - ava, separator = _get_next_ava(dn) # if returned ava doesn't containg any unescaped equal it'a appended to last ava in avas - - dn = dn[len(ava) + 1:] - if _find_first_unescaped(ava, '=', 0) > 0 or len(avas) == 0: - avas.append((ava, separator)) - else: - avas[len(avas) - 1] = (avas[len(avas) - 1][0] + avas[len(avas) - 1][1] + ava, separator) - - for ava, separator in avas: - attribute_type, attribute_value = _split_ava(ava, escape, strip) - - if not _validate_attribute_type(attribute_type): - raise LDAPInvalidDnError('unable to validate attribute type in ' + ava) - - if not _validate_attribute_value(attribute_value): - raise LDAPInvalidDnError('unable to validate attribute value in ' + ava) - - rdns.append((attribute_type, attribute_value, separator)) - dn = dn[len(ava) + 1:] - - if not rdns: - raise LDAPInvalidDnError('empty dn') - - return rdns - - -def safe_dn(dn, decompose=False, reverse=False): - """ - normalize and escape a dn, if dn is a sequence it is joined. - the reverse parameter changes the join direction of the sequence - """ - if isinstance(dn, SEQUENCE_TYPES): - components = [rdn for rdn in dn] - if reverse: - dn = ','.join(reversed(components)) - else: - dn = ','.join(components) - if decompose: - escaped_dn = [] - else: - escaped_dn = '' - - if dn.startswith(''): # Active Directory allows looking up objects by putting its GUID in a specially-formatted DN (e.g. '') - escaped_dn = dn - elif '@' not in dn and '\\' not in dn: # active directory UPN (User Principal Name) consist of an account, the at sign (@) and a domain, or the domain level logn name domain\username - for component in parse_dn(dn, escape=True): - if decompose: - escaped_dn.append((component[0], component[1], component[2])) - else: - escaped_dn += component[0] + '=' + component[1] + component[2] - elif '@' in dn and '=' not in dn and len(dn.split('@')) != 2: - raise LDAPInvalidDnError('Active Directory User Principal Name must consist of name@domain') - elif '\\' in dn and '=' not in dn and len(dn.split('\\')) != 2: - raise LDAPInvalidDnError('Active Directory Domain Level Logon Name must consist of name\\domain') - else: - escaped_dn = dn - - return escaped_dn - - -def safe_rdn(dn, decompose=False): - """Returns a list of rdn for the dn, usually there is only one rdn, but it can be more than one when the + sign is used""" - escaped_rdn = [] - one_more = True - for component in parse_dn(dn, escape=True): - if component[2] == '+' or one_more: - if decompose: - escaped_rdn.append((component[0], component[1])) - else: - escaped_rdn.append(component[0] + '=' + component[1]) - if component[2] == '+': - one_more = True - else: - one_more = False - break - - if one_more: - raise LDAPInvalidDnError('bad dn ' + str(dn)) - - return escaped_rdn - - -def escape_rdn(rdn): - """ - Escape rdn characters to prevent injection according to RFC 4514. - """ - - # '/' must be handled first or the escape slashes will be escaped! - for char in ['\\', ',', '+', '"', '<', '>', ';', '=', '\x00']: - rdn = rdn.replace(char, '\\' + char) - - if rdn[0] == '#' or rdn[0] == ' ': - rdn = ''.join(('\\', rdn)) - - if rdn[-1] == ' ': - rdn = ''.join((rdn[:-1], '\\ ')) - - return rdn +""" +""" + +# Created on 2014.09.08 +# +# Author: Giovanni Cannata +# +# Copyright 2014 - 2020 Giovanni Cannata +# +# This file is part of ldap3. +# +# ldap3 is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# ldap3 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with ldap3 in the COPYING and COPYING.LESSER files. +# If not, see . + +from string import hexdigits, ascii_letters, digits + +from .. import SEQUENCE_TYPES +from ..core.exceptions import LDAPInvalidDnError + + +STATE_ANY = 0 +STATE_ESCAPE = 1 +STATE_ESCAPE_HEX = 2 + + +def _add_ava(ava, decompose, remove_space, space_around_equal): + if not ava: + return '' + + space = ' ' if space_around_equal else '' + attr_name, _, value = ava.partition('=') + if decompose: + if remove_space: + component = (attr_name.strip(), value.strip()) + else: + component = (attr_name, value) + else: + if remove_space: + component = attr_name.strip() + space + '=' + space + value.strip() + else: + component = attr_name + space + '=' + space + value + + return component + + +def to_dn(iterator, decompose=False, remove_space=False, space_around_equal=False, separate_rdn=False): + """ + Convert an iterator to a list of dn parts + if decompose=True return a list of tuple (one for each dn component) else return a list of strings + if remove_space=True removes unneeded spaces + if space_around_equal=True add spaces around equal in returned strings + if separate_rdn=True consider multiple RDNs as different component of DN + """ + dn = [] + component = '' + escape_sequence = False + for c in iterator: + if c == '\\': # escape sequence + escape_sequence = True + elif escape_sequence and c != ' ': + escape_sequence = False + elif c == '+' and separate_rdn: + dn.append(_add_ava(component, decompose, remove_space, space_around_equal)) + component = '' + continue + elif c == ',': + if '=' in component: + dn.append(_add_ava(component, decompose, remove_space, space_around_equal)) + component = '' + continue + + component += c + + dn.append(_add_ava(component, decompose, remove_space, space_around_equal)) + return dn + + +def _find_first_unescaped(dn, char, pos): + while True: + pos = dn.find(char, pos) + if pos == -1: + break # no char found + if pos > 0 and dn[pos - 1] != '\\': # unescaped char + break + elif pos > 1 and dn[pos - 1] == '\\': # may be unescaped + escaped = True + for c in dn[pos - 2:0:-1]: + if c == '\\': + escaped = not escaped + else: + break + if not escaped: + break + pos += 1 + + return pos + + +def _find_last_unescaped(dn, char, start, stop=0): + while True: + stop = dn.rfind(char, start, stop) + if stop == -1: + break + if stop >= 0 and dn[stop - 1] != '\\': + break + elif stop > 1 and dn[stop - 1] == '\\': # may be unescaped + escaped = True + for c in dn[stop - 2:0:-1]: + if c == '\\': + escaped = not escaped + else: + break + if not escaped: + break + if stop < start: + stop = -1 + break + + return stop + + +def _get_next_ava(dn): + comma = _find_first_unescaped(dn, ',', 0) + plus = _find_first_unescaped(dn, '+', 0) + + if plus > 0 and (plus < comma or comma == -1): + equal = _find_first_unescaped(dn, '=', plus + 1) + if equal > plus + 1: + plus = _find_last_unescaped(dn, '+', plus, equal) + return dn[:plus], '+' + + if comma > 0: + equal = _find_first_unescaped(dn, '=', comma + 1) + if equal > comma + 1: + comma = _find_last_unescaped(dn, ',', comma, equal) + return dn[:comma], ',' + + return dn, '' + + +def _split_ava(ava, escape=False, strip=True): + equal = ava.find('=') + while equal > 0: # not first character + if ava[equal - 1] != '\\': # not an escaped equal so it must be an ava separator + # attribute_type1 = ava[0:equal].strip() if strip else ava[0:equal] + if strip: + attribute_type = ava[0:equal].strip() + attribute_value = _escape_attribute_value(ava[equal + 1:].strip()) if escape else ava[equal + 1:].strip() + else: + attribute_type = ava[0:equal] + attribute_value = _escape_attribute_value(ava[equal + 1:]) if escape else ava[equal + 1:] + + return attribute_type, attribute_value + equal = ava.find('=', equal + 1) + + return '', (ava.strip if strip else ava) # if no equal found return only value + + +def _validate_attribute_type(attribute_type): + if not attribute_type: + raise LDAPInvalidDnError('attribute type not present') + + if attribute_type == ' pairs') + if attribute_value[0] == ' ': # unescaped space cannot be used as leading or last character + raise LDAPInvalidDnError('SPACE must be escaped as leading character of attribute value') + if attribute_value.endswith(' ') and not attribute_value.endswith('\\ '): + raise LDAPInvalidDnError('SPACE must be escaped as trailing character of attribute value') + + state = STATE_ANY + for c in attribute_value: + if state == STATE_ANY: + if c == '\\': + state = STATE_ESCAPE + elif c in '"#+,;<=>\00': + raise LDAPInvalidDnError('special character ' + c + ' must be escaped') + elif state == STATE_ESCAPE: + if c in hexdigits: + state = STATE_ESCAPE_HEX + elif c in ' "#+,;<=>\\\00': + state = STATE_ANY + else: + raise LDAPInvalidDnError('invalid escaped character ' + c) + elif state == STATE_ESCAPE_HEX: + if c in hexdigits: + state = STATE_ANY + else: + raise LDAPInvalidDnError('invalid escaped character ' + c) + + # final state + if state != STATE_ANY: + raise LDAPInvalidDnError('invalid final character') + + return True + + +def _escape_attribute_value(attribute_value): + if not attribute_value: + return '' + + if attribute_value[0] == '#': # with leading SHARP only pairs of hex characters are valid + valid_hex = True + if len(attribute_value) % 2 == 0: # string must be # + HEX HEX (an odd number of chars) + valid_hex = False + + if valid_hex: + for c in attribute_value: + if c not in hexdigits: # allowed only hex digits as per RFC 4514 + valid_hex = False + break + + if valid_hex: + return attribute_value + + state = STATE_ANY + escaped = '' + tmp_buffer = '' + for c in attribute_value: + if state == STATE_ANY: + if c == '\\': + state = STATE_ESCAPE + elif c in '"#+,;<=>\00': + escaped += '\\' + c + else: + escaped += c + elif state == STATE_ESCAPE: + if c in hexdigits: + tmp_buffer = c + state = STATE_ESCAPE_HEX + elif c in ' "#+,;<=>\\\00': + escaped += '\\' + c + state = STATE_ANY + else: + escaped += '\\\\' + c + elif state == STATE_ESCAPE_HEX: + if c in hexdigits: + escaped += '\\' + tmp_buffer + c + else: + escaped += '\\\\' + tmp_buffer + c + tmp_buffer = '' + state = STATE_ANY + + # final state + if state == STATE_ESCAPE: + escaped += '\\\\' + elif state == STATE_ESCAPE_HEX: + escaped += '\\\\' + tmp_buffer + + if escaped[0] == ' ': # leading SPACE must be escaped + escaped = '\\' + escaped + + if escaped[-1] == ' ' and len(escaped) > 1 and escaped[-2] != '\\': # trailing SPACE must be escaped + escaped = escaped[:-1] + '\\ ' + + return escaped + + +def parse_dn(dn, escape=False, strip=False): + """ + Parses a DN into syntactic components + :param dn: + :param escape: + :param strip: + :return: + a list of tripels representing `attributeTypeAndValue` elements + containing `attributeType`, `attributeValue` and the following separator (`COMMA` or `PLUS`) if given, else an empty `str`. + in their original representation, still containing escapes or encoded as hex. + """ + rdns = [] + avas = [] + while dn: + ava, separator = _get_next_ava(dn) # if returned ava doesn't containg any unescaped equal it'a appended to last ava in avas + + dn = dn[len(ava) + 1:] + if _find_first_unescaped(ava, '=', 0) > 0 or len(avas) == 0: + avas.append((ava, separator)) + else: + avas[len(avas) - 1] = (avas[len(avas) - 1][0] + avas[len(avas) - 1][1] + ava, separator) + + for ava, separator in avas: + attribute_type, attribute_value = _split_ava(ava, escape, strip) + + if not _validate_attribute_type(attribute_type): + raise LDAPInvalidDnError('unable to validate attribute type in ' + ava) + + if not _validate_attribute_value(attribute_value): + raise LDAPInvalidDnError('unable to validate attribute value in ' + ava) + + rdns.append((attribute_type, attribute_value, separator)) + dn = dn[len(ava) + 1:] + + if not rdns: + raise LDAPInvalidDnError('empty dn') + + return rdns + + +def safe_dn(dn, decompose=False, reverse=False): + """ + normalize and escape a dn, if dn is a sequence it is joined. + the reverse parameter changes the join direction of the sequence + """ + if isinstance(dn, SEQUENCE_TYPES): + components = [rdn for rdn in dn] + if reverse: + dn = ','.join(reversed(components)) + else: + dn = ','.join(components) + if decompose: + escaped_dn = [] + else: + escaped_dn = '' + + if dn.startswith(''): # Active Directory allows looking up objects by putting its GUID in a specially-formatted DN (e.g. '') + escaped_dn = dn + elif dn.startswith(''): # Active Directory allows Binding to Well-Known Objects Using WKGUID in a specially-formatted DN (e.g. ) + escaped_dn = dn + elif dn.startswith(''): # Active Directory allows looking up objects by putting its security identifier (SID) in a specially-formatted DN (e.g. '') + escaped_dn = dn + elif '@' not in dn: # active directory UPN (User Principal Name) consist of an account, the at sign (@) and a domain, or the domain level logn name domain\username + for component in parse_dn(dn, escape=True): + if decompose: + escaped_dn.append((component[0], component[1], component[2])) + else: + escaped_dn += component[0] + '=' + component[1] + component[2] + elif '@' in dn and '=' not in dn and len(dn.split('@')) != 2: + raise LDAPInvalidDnError('Active Directory User Principal Name must consist of name@domain') + elif '\\' in dn and '=' not in dn and len(dn.split('\\')) != 2: + raise LDAPInvalidDnError('Active Directory Domain Level Logon Name must consist of name\\domain') + else: + escaped_dn = dn + + return escaped_dn + + +def safe_rdn(dn, decompose=False): + """Returns a list of rdn for the dn, usually there is only one rdn, but it can be more than one when the + sign is used""" + escaped_rdn = [] + one_more = True + for component in parse_dn(dn, escape=True): + if component[2] == '+' or one_more: + if decompose: + escaped_rdn.append((component[0], component[1])) + else: + escaped_rdn.append(component[0] + '=' + component[1]) + if component[2] == '+': + one_more = True + else: + one_more = False + break + + if one_more: + raise LDAPInvalidDnError('bad dn ' + str(dn)) + + return escaped_rdn + + +def escape_rdn(rdn): + """ + Escape rdn characters to prevent injection according to RFC 4514. + """ + + # '/' must be handled first or the escape slashes will be escaped! + for char in ['\\', ',', '+', '"', '<', '>', ';', '=', '\x00']: + rdn = rdn.replace(char, '\\' + char) + + if rdn[0] == '#' or rdn[0] == ' ': + rdn = ''.join(('\\', rdn)) + + if rdn[-1] == ' ': + rdn = ''.join((rdn[:-1], '\\ ')) + + return rdn diff --git a/server/www/packages/packages-linux/x64/ldap3/utils/hashed.py b/server/www/packages/packages-linux/x64/ldap3/utils/hashed.py index 33a2b89..e58d67d 100644 --- a/server/www/packages/packages-linux/x64/ldap3/utils/hashed.py +++ b/server/www/packages/packages-linux/x64/ldap3/utils/hashed.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2015 - 2018 Giovanni Cannata +# Copyright 2015 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/utils/log.py b/server/www/packages/packages-linux/x64/ldap3/utils/log.py index d65cc1b..228c745 100644 --- a/server/www/packages/packages-linux/x64/ldap3/utils/log.py +++ b/server/www/packages/packages-linux/x64/ldap3/utils/log.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2015 - 2018 Giovanni Cannata +# Copyright 2015 - 2020 Giovanni Cannata # # This file is part of ldap3. # diff --git a/server/www/packages/packages-linux/x64/ldap3/utils/ntlm.py b/server/www/packages/packages-linux/x64/ldap3/utils/ntlm.py index 54efaae..f91776d 100644 --- a/server/www/packages/packages-linux/x64/ldap3/utils/ntlm.py +++ b/server/www/packages/packages-linux/x64/ldap3/utils/ntlm.py @@ -5,7 +5,7 @@ # # Author: Giovanni Cannata # -# Copyright 2015 - 2018 Giovanni Cannata +# Copyright 2015 - 2020 Giovanni Cannata # # This file is part of ldap3. # @@ -483,7 +483,7 @@ class NtlmClient(object): temp += self.server_target_info_raw temp += pack(' +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -__version__ = '1.0.7' +__version__ = '1.1.3' diff --git a/server/www/packages/packages-linux/x64/mako/_ast_util.py b/server/www/packages/packages-linux/x64/mako/_ast_util.py index c410287..bdcdbf6 100644 --- a/server/www/packages/packages-linux/x64/mako/_ast_util.py +++ b/server/www/packages/packages-linux/x64/mako/_ast_util.py @@ -1,5 +1,5 @@ # mako/_ast_util.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,69 +8,77 @@ ast ~~~ - The `ast` module helps Python applications to process trees of the Python - abstract syntax grammar. The abstract syntax itself might change with - each Python release; this module helps to find out programmatically what - the current grammar looks like and allows modifications of it. - - An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as - a flag to the `compile()` builtin function or by using the `parse()` - function from this module. The result will be a tree of objects whose - classes all inherit from `ast.AST`. - - A modified abstract syntax tree can be compiled into a Python code object - using the built-in `compile()` function. - - Additionally various helper functions are provided that make working with - the trees simpler. The main intention of the helper functions and this - module in general is to provide an easy to use interface for libraries - that work tightly with the python syntax (template engines for example). - + This is a stripped down version of Armin Ronacher's ast module. :copyright: Copyright 2008 by Armin Ronacher. :license: Python License. """ -from _ast import * # noqa + + +from _ast import Add +from _ast import And +from _ast import AST +from _ast import BitAnd +from _ast import BitOr +from _ast import BitXor +from _ast import Div +from _ast import Eq +from _ast import FloorDiv +from _ast import Gt +from _ast import GtE +from _ast import If +from _ast import In +from _ast import Invert +from _ast import Is +from _ast import IsNot +from _ast import LShift +from _ast import Lt +from _ast import LtE +from _ast import Mod +from _ast import Mult +from _ast import Name +from _ast import Not +from _ast import NotEq +from _ast import NotIn +from _ast import Or +from _ast import PyCF_ONLY_AST +from _ast import RShift +from _ast import Sub +from _ast import UAdd +from _ast import USub + from mako.compat import arg_stringname -BOOLOP_SYMBOLS = { - And: 'and', - Or: 'or' -} +BOOLOP_SYMBOLS = {And: "and", Or: "or"} BINOP_SYMBOLS = { - Add: '+', - Sub: '-', - Mult: '*', - Div: '/', - FloorDiv: '//', - Mod: '%', - LShift: '<<', - RShift: '>>', - BitOr: '|', - BitAnd: '&', - BitXor: '^' + Add: "+", + Sub: "-", + Mult: "*", + Div: "/", + FloorDiv: "//", + Mod: "%", + LShift: "<<", + RShift: ">>", + BitOr: "|", + BitAnd: "&", + BitXor: "^", } CMPOP_SYMBOLS = { - Eq: '==', - Gt: '>', - GtE: '>=', - In: 'in', - Is: 'is', - IsNot: 'is not', - Lt: '<', - LtE: '<=', - NotEq: '!=', - NotIn: 'not in' + Eq: "==", + Gt: ">", + GtE: ">=", + In: "in", + Is: "is", + IsNot: "is not", + Lt: "<", + LtE: "<=", + NotEq: "!=", + NotIn: "not in", } -UNARYOP_SYMBOLS = { - Invert: '~', - Not: 'not', - UAdd: '+', - USub: '-' -} +UNARYOP_SYMBOLS = {Invert: "~", Not: "not", UAdd: "+", USub: "-"} ALL_SYMBOLS = {} ALL_SYMBOLS.update(BOOLOP_SYMBOLS) @@ -79,105 +87,15 @@ ALL_SYMBOLS.update(CMPOP_SYMBOLS) ALL_SYMBOLS.update(UNARYOP_SYMBOLS) -def parse(expr, filename='', mode='exec'): +def parse(expr, filename="", mode="exec"): """Parse an expression into an AST node.""" return compile(expr, filename, mode, PyCF_ONLY_AST) -def to_source(node, indent_with=' ' * 4): - """ - This function can convert a node tree back into python sourcecode. This - is useful for debugging purposes, especially if you're dealing with custom - asts not generated by python itself. - - It could be that the sourcecode is evaluable when the AST itself is not - compilable / evaluable. The reason for this is that the AST contains some - more data than regular sourcecode does, which is dropped during - conversion. - - Each level of indentation is replaced with `indent_with`. Per default this - parameter is equal to four spaces as suggested by PEP 8, but it might be - adjusted to match the application's styleguide. - """ - generator = SourceGenerator(indent_with) - generator.visit(node) - return ''.join(generator.result) - - -def dump(node): - """ - A very verbose representation of the node passed. This is useful for - debugging purposes. - """ - def _format(node): - if isinstance(node, AST): - return '%s(%s)' % (node.__class__.__name__, - ', '.join('%s=%s' % (a, _format(b)) - for a, b in iter_fields(node))) - elif isinstance(node, list): - return '[%s]' % ', '.join(_format(x) for x in node) - return repr(node) - if not isinstance(node, AST): - raise TypeError('expected AST, got %r' % node.__class__.__name__) - return _format(node) - - -def copy_location(new_node, old_node): - """ - Copy the source location hint (`lineno` and `col_offset`) from the - old to the new node if possible and return the new one. - """ - for attr in 'lineno', 'col_offset': - if attr in old_node._attributes and attr in new_node._attributes \ - and hasattr(old_node, attr): - setattr(new_node, attr, getattr(old_node, attr)) - return new_node - - -def fix_missing_locations(node): - """ - Some nodes require a line number and the column offset. Without that - information the compiler will abort the compilation. Because it can be - a dull task to add appropriate line numbers and column offsets when - adding new nodes this function can help. It copies the line number and - column offset of the parent node to the child nodes without this - information. - - Unlike `copy_location` this works recursive and won't touch nodes that - already have a location information. - """ - def _fix(node, lineno, col_offset): - if 'lineno' in node._attributes: - if not hasattr(node, 'lineno'): - node.lineno = lineno - else: - lineno = node.lineno - if 'col_offset' in node._attributes: - if not hasattr(node, 'col_offset'): - node.col_offset = col_offset - else: - col_offset = node.col_offset - for child in iter_child_nodes(node): - _fix(child, lineno, col_offset) - _fix(node, 1, 0) - return node - - -def increment_lineno(node, n=1): - """ - Increment the line numbers of all nodes by `n` if they have line number - attributes. This is useful to "move code" to a different location in a - file. - """ - for node in zip((node,), walk(node)): - if 'lineno' in node._attributes: - node.lineno = getattr(node, 'lineno', 0) + n - - def iter_fields(node): """Iterate over all fields of a node, only yielding existing fields.""" # CPython 2.5 compat - if not hasattr(node, '_fields') or not node._fields: + if not hasattr(node, "_fields") or not node._fields: return for field in node._fields: try: @@ -186,65 +104,6 @@ def iter_fields(node): pass -def get_fields(node): - """Like `iter_fields` but returns a dict.""" - return dict(iter_fields(node)) - - -def iter_child_nodes(node): - """Iterate over all child nodes or a node.""" - for name, field in iter_fields(node): - if isinstance(field, AST): - yield field - elif isinstance(field, list): - for item in field: - if isinstance(item, AST): - yield item - - -def get_child_nodes(node): - """Like `iter_child_nodes` but returns a list.""" - return list(iter_child_nodes(node)) - - -def get_compile_mode(node): - """ - Get the mode for `compile` of a given node. If the node is not a `mod` - node (`Expression`, `Module` etc.) a `TypeError` is thrown. - """ - if not isinstance(node, mod): - raise TypeError('expected mod node, got %r' % node.__class__.__name__) - return { - Expression: 'eval', - Interactive: 'single' - }.get(node.__class__, 'expr') - - -def get_docstring(node): - """ - Return the docstring for the given node or `None` if no docstring can be - found. If the node provided does not accept docstrings a `TypeError` - will be raised. - """ - if not isinstance(node, (FunctionDef, ClassDef, Module)): - raise TypeError("%r can't have docstrings" % node.__class__.__name__) - if node.body and isinstance(node.body[0], Str): - return node.body[0].s - - -def walk(node): - """ - Iterate over all nodes. This is useful if you only want to modify nodes in - place and don't care about the context or the order the nodes are returned. - """ - from collections import deque - todo = deque([node]) - while todo: - node = todo.popleft() - todo.extend(iter_child_nodes(node)) - yield node - - class NodeVisitor(object): """ @@ -269,7 +128,7 @@ class NodeVisitor(object): exists for this node. In that case the generic visit function is used instead. """ - method = 'visit_' + node.__class__.__name__ + method = "visit_" + node.__class__.__name__ return getattr(self, method, None) def visit(self, node): @@ -367,7 +226,7 @@ class SourceGenerator(NodeVisitor): def write(self, x): if self.new_lines: if self.result: - self.result.append('\n' * self.new_lines) + self.result.append("\n" * self.new_lines) self.result.append(self.indent_with * self.indentation) self.new_lines = 0 self.result.append(x) @@ -386,7 +245,7 @@ class SourceGenerator(NodeVisitor): self.body(node.body) if node.orelse: self.newline() - self.write('else:') + self.write("else:") self.body(node.orelse) def signature(self, node): @@ -394,7 +253,7 @@ class SourceGenerator(NodeVisitor): def write_comma(): if want_comma: - self.write(', ') + self.write(", ") else: want_comma.append(True) @@ -403,19 +262,19 @@ class SourceGenerator(NodeVisitor): write_comma() self.visit(arg) if default is not None: - self.write('=') + self.write("=") self.visit(default) if node.vararg is not None: write_comma() - self.write('*' + arg_stringname(node.vararg)) + self.write("*" + arg_stringname(node.vararg)) if node.kwarg is not None: write_comma() - self.write('**' + arg_stringname(node.kwarg)) + self.write("**" + arg_stringname(node.kwarg)) def decorators(self, node): for decorator in node.decorator_list: self.newline() - self.write('@') + self.write("@") self.visit(decorator) # Statements @@ -424,29 +283,29 @@ class SourceGenerator(NodeVisitor): self.newline() for idx, target in enumerate(node.targets): if idx: - self.write(', ') + self.write(", ") self.visit(target) - self.write(' = ') + self.write(" = ") self.visit(node.value) def visit_AugAssign(self, node): self.newline() self.visit(node.target) - self.write(BINOP_SYMBOLS[type(node.op)] + '=') + self.write(BINOP_SYMBOLS[type(node.op)] + "=") self.visit(node.value) def visit_ImportFrom(self, node): self.newline() - self.write('from %s%s import ' % ('.' * node.level, node.module)) + self.write("from %s%s import " % ("." * node.level, node.module)) for idx, item in enumerate(node.names): if idx: - self.write(', ') + self.write(", ") self.write(item) def visit_Import(self, node): self.newline() for item in node.names: - self.write('import ') + self.write("import ") self.visit(item) def visit_Expr(self, node): @@ -457,9 +316,9 @@ class SourceGenerator(NodeVisitor): self.newline(n=2) self.decorators(node) self.newline() - self.write('def %s(' % node.name) + self.write("def %s(" % node.name) self.signature(node.args) - self.write('):') + self.write("):") self.body(node.body) def visit_ClassDef(self, node): @@ -467,200 +326,200 @@ class SourceGenerator(NodeVisitor): def paren_or_comma(): if have_args: - self.write(', ') + self.write(", ") else: have_args.append(True) - self.write('(') + self.write("(") self.newline(n=3) self.decorators(node) self.newline() - self.write('class %s' % node.name) + self.write("class %s" % node.name) for base in node.bases: paren_or_comma() self.visit(base) # XXX: the if here is used to keep this module compatible # with python 2.6. - if hasattr(node, 'keywords'): + if hasattr(node, "keywords"): for keyword in node.keywords: paren_or_comma() - self.write(keyword.arg + '=') + self.write(keyword.arg + "=") self.visit(keyword.value) if getattr(node, "starargs", None): paren_or_comma() - self.write('*') + self.write("*") self.visit(node.starargs) if getattr(node, "kwargs", None): paren_or_comma() - self.write('**') + self.write("**") self.visit(node.kwargs) - self.write(have_args and '):' or ':') + self.write(have_args and "):" or ":") self.body(node.body) def visit_If(self, node): self.newline() - self.write('if ') + self.write("if ") self.visit(node.test) - self.write(':') + self.write(":") self.body(node.body) while True: else_ = node.orelse if len(else_) == 1 and isinstance(else_[0], If): node = else_[0] self.newline() - self.write('elif ') + self.write("elif ") self.visit(node.test) - self.write(':') + self.write(":") self.body(node.body) else: self.newline() - self.write('else:') + self.write("else:") self.body(else_) break def visit_For(self, node): self.newline() - self.write('for ') + self.write("for ") self.visit(node.target) - self.write(' in ') + self.write(" in ") self.visit(node.iter) - self.write(':') + self.write(":") self.body_or_else(node) def visit_While(self, node): self.newline() - self.write('while ') + self.write("while ") self.visit(node.test) - self.write(':') + self.write(":") self.body_or_else(node) def visit_With(self, node): self.newline() - self.write('with ') + self.write("with ") self.visit(node.context_expr) if node.optional_vars is not None: - self.write(' as ') + self.write(" as ") self.visit(node.optional_vars) - self.write(':') + self.write(":") self.body(node.body) def visit_Pass(self, node): self.newline() - self.write('pass') + self.write("pass") def visit_Print(self, node): # XXX: python 2.6 only self.newline() - self.write('print ') + self.write("print ") want_comma = False if node.dest is not None: - self.write(' >> ') + self.write(" >> ") self.visit(node.dest) want_comma = True for value in node.values: if want_comma: - self.write(', ') + self.write(", ") self.visit(value) want_comma = True if not node.nl: - self.write(',') + self.write(",") def visit_Delete(self, node): self.newline() - self.write('del ') + self.write("del ") for idx, target in enumerate(node): if idx: - self.write(', ') + self.write(", ") self.visit(target) def visit_TryExcept(self, node): self.newline() - self.write('try:') + self.write("try:") self.body(node.body) for handler in node.handlers: self.visit(handler) def visit_TryFinally(self, node): self.newline() - self.write('try:') + self.write("try:") self.body(node.body) self.newline() - self.write('finally:') + self.write("finally:") self.body(node.finalbody) def visit_Global(self, node): self.newline() - self.write('global ' + ', '.join(node.names)) + self.write("global " + ", ".join(node.names)) def visit_Nonlocal(self, node): self.newline() - self.write('nonlocal ' + ', '.join(node.names)) + self.write("nonlocal " + ", ".join(node.names)) def visit_Return(self, node): self.newline() - self.write('return ') + self.write("return ") self.visit(node.value) def visit_Break(self, node): self.newline() - self.write('break') + self.write("break") def visit_Continue(self, node): self.newline() - self.write('continue') + self.write("continue") def visit_Raise(self, node): # XXX: Python 2.6 / 3.0 compatibility self.newline() - self.write('raise') - if hasattr(node, 'exc') and node.exc is not None: - self.write(' ') + self.write("raise") + if hasattr(node, "exc") and node.exc is not None: + self.write(" ") self.visit(node.exc) if node.cause is not None: - self.write(' from ') + self.write(" from ") self.visit(node.cause) - elif hasattr(node, 'type') and node.type is not None: + elif hasattr(node, "type") and node.type is not None: self.visit(node.type) if node.inst is not None: - self.write(', ') + self.write(", ") self.visit(node.inst) if node.tback is not None: - self.write(', ') + self.write(", ") self.visit(node.tback) # Expressions def visit_Attribute(self, node): self.visit(node.value) - self.write('.' + node.attr) + self.write("." + node.attr) def visit_Call(self, node): want_comma = [] def write_comma(): if want_comma: - self.write(', ') + self.write(", ") else: want_comma.append(True) self.visit(node.func) - self.write('(') + self.write("(") for arg in node.args: write_comma() self.visit(arg) for keyword in node.keywords: write_comma() - self.write(keyword.arg + '=') + self.write(keyword.arg + "=") self.visit(keyword.value) if getattr(node, "starargs", None): write_comma() - self.write('*') + self.write("*") self.visit(node.starargs) if getattr(node, "kwargs", None): write_comma() - self.write('**') + self.write("**") self.visit(node.kwargs) - self.write(')') + self.write(")") def visit_Name(self, node): self.write(node.id) @@ -680,106 +539,111 @@ class SourceGenerator(NodeVisitor): def visit_Num(self, node): self.write(repr(node.n)) + # newly needed in Python 3.8 + def visit_Constant(self, node): + self.write(repr(node.value)) + def visit_Tuple(self, node): - self.write('(') + self.write("(") idx = -1 for idx, item in enumerate(node.elts): if idx: - self.write(', ') + self.write(", ") self.visit(item) - self.write(idx and ')' or ',)') + self.write(idx and ")" or ",)") def sequence_visit(left, right): def visit(self, node): self.write(left) for idx, item in enumerate(node.elts): if idx: - self.write(', ') + self.write(", ") self.visit(item) self.write(right) + return visit - visit_List = sequence_visit('[', ']') - visit_Set = sequence_visit('{', '}') + visit_List = sequence_visit("[", "]") + visit_Set = sequence_visit("{", "}") del sequence_visit def visit_Dict(self, node): - self.write('{') + self.write("{") for idx, (key, value) in enumerate(zip(node.keys, node.values)): if idx: - self.write(', ') + self.write(", ") self.visit(key) - self.write(': ') + self.write(": ") self.visit(value) - self.write('}') + self.write("}") def visit_BinOp(self, node): - self.write('(') + self.write("(") self.visit(node.left) - self.write(' %s ' % BINOP_SYMBOLS[type(node.op)]) + self.write(" %s " % BINOP_SYMBOLS[type(node.op)]) self.visit(node.right) - self.write(')') + self.write(")") def visit_BoolOp(self, node): - self.write('(') + self.write("(") for idx, value in enumerate(node.values): if idx: - self.write(' %s ' % BOOLOP_SYMBOLS[type(node.op)]) + self.write(" %s " % BOOLOP_SYMBOLS[type(node.op)]) self.visit(value) - self.write(')') + self.write(")") def visit_Compare(self, node): - self.write('(') + self.write("(") self.visit(node.left) for op, right in zip(node.ops, node.comparators): - self.write(' %s ' % CMPOP_SYMBOLS[type(op)]) + self.write(" %s " % CMPOP_SYMBOLS[type(op)]) self.visit(right) - self.write(')') + self.write(")") def visit_UnaryOp(self, node): - self.write('(') + self.write("(") op = UNARYOP_SYMBOLS[type(node.op)] self.write(op) - if op == 'not': - self.write(' ') + if op == "not": + self.write(" ") self.visit(node.operand) - self.write(')') + self.write(")") def visit_Subscript(self, node): self.visit(node.value) - self.write('[') + self.write("[") self.visit(node.slice) - self.write(']') + self.write("]") def visit_Slice(self, node): if node.lower is not None: self.visit(node.lower) - self.write(':') + self.write(":") if node.upper is not None: self.visit(node.upper) if node.step is not None: - self.write(':') - if not (isinstance(node.step, Name) and node.step.id == 'None'): + self.write(":") + if not (isinstance(node.step, Name) and node.step.id == "None"): self.visit(node.step) def visit_ExtSlice(self, node): for idx, item in node.dims: if idx: - self.write(', ') + self.write(", ") self.visit(item) def visit_Yield(self, node): - self.write('yield ') + self.write("yield ") self.visit(node.value) def visit_Lambda(self, node): - self.write('lambda ') + self.write("lambda ") self.signature(node.args) - self.write(': ') + self.write(": ") self.visit(node.body) def visit_Ellipsis(self, node): - self.write('Ellipsis') + self.write("Ellipsis") def generator_visit(left, right): def visit(self, node): @@ -788,64 +652,65 @@ class SourceGenerator(NodeVisitor): for comprehension in node.generators: self.visit(comprehension) self.write(right) + return visit - visit_ListComp = generator_visit('[', ']') - visit_GeneratorExp = generator_visit('(', ')') - visit_SetComp = generator_visit('{', '}') + visit_ListComp = generator_visit("[", "]") + visit_GeneratorExp = generator_visit("(", ")") + visit_SetComp = generator_visit("{", "}") del generator_visit def visit_DictComp(self, node): - self.write('{') + self.write("{") self.visit(node.key) - self.write(': ') + self.write(": ") self.visit(node.value) for comprehension in node.generators: self.visit(comprehension) - self.write('}') + self.write("}") def visit_IfExp(self, node): self.visit(node.body) - self.write(' if ') + self.write(" if ") self.visit(node.test) - self.write(' else ') + self.write(" else ") self.visit(node.orelse) def visit_Starred(self, node): - self.write('*') + self.write("*") self.visit(node.value) def visit_Repr(self, node): # XXX: python 2.6 only - self.write('`') + self.write("`") self.visit(node.value) - self.write('`') + self.write("`") # Helper Nodes def visit_alias(self, node): self.write(node.name) if node.asname is not None: - self.write(' as ' + node.asname) + self.write(" as " + node.asname) def visit_comprehension(self, node): - self.write(' for ') + self.write(" for ") self.visit(node.target) - self.write(' in ') + self.write(" in ") self.visit(node.iter) if node.ifs: for if_ in node.ifs: - self.write(' if ') + self.write(" if ") self.visit(if_) def visit_excepthandler(self, node): self.newline() - self.write('except') + self.write("except") if node.type is not None: - self.write(' ') + self.write(" ") self.visit(node.type) if node.name is not None: - self.write(' as ') + self.write(" as ") self.visit(node.name) - self.write(':') + self.write(":") self.body(node.body) diff --git a/server/www/packages/packages-linux/x64/mako/ast.py b/server/www/packages/packages-linux/x64/mako/ast.py index 8d2d150..cfae280 100644 --- a/server/www/packages/packages-linux/x64/mako/ast.py +++ b/server/www/packages/packages-linux/x64/mako/ast.py @@ -1,5 +1,5 @@ # mako/ast.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -7,9 +7,12 @@ """utilities for analyzing expressions and blocks of Python code, as well as generating Python from AST nodes""" -from mako import exceptions, pyparser, compat import re +from mako import compat +from mako import exceptions +from mako import pyparser + class PythonCode(object): @@ -72,36 +75,39 @@ class PythonFragment(PythonCode): """extends PythonCode to provide identifier lookups in partial control statements - e.g. + e.g.:: + for x in 5: elif y==9: except (MyException, e): - etc. + """ def __init__(self, code, **exception_kwargs): - m = re.match(r'^(\w+)(?:\s+(.*?))?:\s*(#|$)', code.strip(), re.S) + m = re.match(r"^(\w+)(?:\s+(.*?))?:\s*(#|$)", code.strip(), re.S) if not m: raise exceptions.CompileException( - "Fragment '%s' is not a partial control statement" % - code, **exception_kwargs) + "Fragment '%s' is not a partial control statement" % code, + **exception_kwargs + ) if m.group(3): - code = code[:m.start(3)] + code = code[: m.start(3)] (keyword, expr) = m.group(1, 2) - if keyword in ['for', 'if', 'while']: + if keyword in ["for", "if", "while"]: code = code + "pass" - elif keyword == 'try': + elif keyword == "try": code = code + "pass\nexcept:pass" - elif keyword == 'elif' or keyword == 'else': + elif keyword == "elif" or keyword == "else": code = "if False:pass\n" + code + "pass" - elif keyword == 'except': + elif keyword == "except": code = "try:pass\n" + code + "pass" - elif keyword == 'with': + elif keyword == "with": code = code + "pass" else: raise exceptions.CompileException( - "Unsupported control keyword: '%s'" % - keyword, **exception_kwargs) + "Unsupported control keyword: '%s'" % keyword, + **exception_kwargs + ) super(PythonFragment, self).__init__(code, **exception_kwargs) @@ -115,14 +121,17 @@ class FunctionDecl(object): f = pyparser.ParseFunc(self, **exception_kwargs) f.visit(expr) - if not hasattr(self, 'funcname'): + if not hasattr(self, "funcname"): raise exceptions.CompileException( "Code '%s' is not a function declaration" % code, - **exception_kwargs) + **exception_kwargs + ) if not allow_kwargs and self.kwargs: raise exceptions.CompileException( - "'**%s' keyword argument not allowed here" % - self.kwargnames[-1], **exception_kwargs) + "'**%s' keyword argument not allowed here" + % self.kwargnames[-1], + **exception_kwargs + ) def get_argument_expressions(self, as_call=False): """Return the argument declarations of this FunctionDecl as a printable @@ -157,8 +166,10 @@ class FunctionDecl(object): # `def foo(*, a=1, b, c=3)` namedecls.append(name) else: - namedecls.append("%s=%s" % ( - name, pyparser.ExpressionGenerator(default).value())) + namedecls.append( + "%s=%s" + % (name, pyparser.ExpressionGenerator(default).value()) + ) else: namedecls.append(name) @@ -171,8 +182,10 @@ class FunctionDecl(object): namedecls.append(name) else: default = defaults.pop(0) - namedecls.append("%s=%s" % ( - name, pyparser.ExpressionGenerator(default).value())) + namedecls.append( + "%s=%s" + % (name, pyparser.ExpressionGenerator(default).value()) + ) namedecls.reverse() return namedecls @@ -187,5 +200,6 @@ class FunctionArgs(FunctionDecl): """the argument portion of a function declaration""" def __init__(self, code, **kwargs): - super(FunctionArgs, self).__init__("def ANON(%s):pass" % code, - **kwargs) + super(FunctionArgs, self).__init__( + "def ANON(%s):pass" % code, **kwargs + ) diff --git a/server/www/packages/packages-linux/x64/mako/cache.py b/server/www/packages/packages-linux/x64/mako/cache.py index 1af17dd..26aa93e 100644 --- a/server/www/packages/packages-linux/x64/mako/cache.py +++ b/server/www/packages/packages-linux/x64/mako/cache.py @@ -1,10 +1,11 @@ # mako/cache.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from mako import compat, util +from mako import compat +from mako import util _cache_plugins = util.PluginLoader("mako.cache") @@ -90,9 +91,8 @@ class Cache(object): return creation_function() return self.impl.get_or_create( - key, - creation_function, - **self._get_cache_kw(kw, context)) + key, creation_function, **self._get_cache_kw(kw, context) + ) def set(self, key, value, **kw): r"""Place a value in the cache. @@ -141,7 +141,7 @@ class Cache(object): template. """ - self.invalidate('render_body', __M_defname='render_body') + self.invalidate("render_body", __M_defname="render_body") def invalidate_def(self, name): """Invalidate the cached content of a particular ``<%def>`` within this @@ -149,7 +149,7 @@ class Cache(object): """ - self.invalidate('render_%s' % name, __M_defname='render_%s' % name) + self.invalidate("render_%s" % name, __M_defname="render_%s" % name) def invalidate_closure(self, name): """Invalidate a nested ``<%def>`` within this template. @@ -165,7 +165,7 @@ class Cache(object): self.invalidate(name, __M_defname=name) def _get_cache_kw(self, kw, context): - defname = kw.pop('__M_defname', None) + defname = kw.pop("__M_defname", None) if not defname: tmpl_kw = self.template.cache_args.copy() tmpl_kw.update(kw) @@ -177,7 +177,7 @@ class Cache(object): self._def_regions[defname] = tmpl_kw if context and self.impl.pass_context: tmpl_kw = tmpl_kw.copy() - tmpl_kw.setdefault('context', context) + tmpl_kw.setdefault("context", context) return tmpl_kw diff --git a/server/www/packages/packages-linux/x64/mako/cmd.py b/server/www/packages/packages-linux/x64/mako/cmd.py index 8db1346..c0f2c75 100644 --- a/server/www/packages/packages-linux/x64/mako/cmd.py +++ b/server/www/packages/packages-linux/x64/mako/cmd.py @@ -1,14 +1,17 @@ # mako/cmd.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from argparse import ArgumentParser -from os.path import isfile, dirname +import io +from os.path import dirname +from os.path import isfile import sys -from mako.template import Template -from mako.lookup import TemplateLookup + from mako import exceptions +from mako.lookup import TemplateLookup +from mako.template import Template def varsplit(var): @@ -24,25 +27,47 @@ def _exit(): def cmdline(argv=None): - parser = ArgumentParser("usage: %prog [FILENAME]") + parser = ArgumentParser() parser.add_argument( - "--var", default=[], action="append", - help="variable (can be used multiple times, use name=value)") + "--var", + default=[], + action="append", + help="variable (can be used multiple times, use name=value)", + ) parser.add_argument( - "--template-dir", default=[], action="append", + "--template-dir", + default=[], + action="append", help="Directory to use for template lookup (multiple " "directories may be provided). If not given then if the " "template is read from stdin, the value defaults to be " "the current directory, otherwise it defaults to be the " - "parent directory of the file provided.") - parser.add_argument('input', nargs='?', default='-') + "parent directory of the file provided.", + ) + parser.add_argument( + "--output-encoding", default=None, help="force output encoding" + ) + parser.add_argument( + "--output-file", + default=None, + help="Write to file upon successful render instead of stdout", + ) + parser.add_argument("input", nargs="?", default="-") options = parser.parse_args(argv) - if options.input == '-': + + output_encoding = options.output_encoding + output_file = options.output_file + + if options.input == "-": lookup_dirs = options.template_dir or ["."] lookup = TemplateLookup(lookup_dirs) try: - template = Template(sys.stdin.read(), lookup=lookup) + template = Template( + sys.stdin.read(), + lookup=lookup, + output_encoding=output_encoding, + ) except: _exit() else: @@ -52,15 +77,26 @@ def cmdline(argv=None): lookup_dirs = options.template_dir or [dirname(filename)] lookup = TemplateLookup(lookup_dirs) try: - template = Template(filename=filename, lookup=lookup) + template = Template( + filename=filename, + lookup=lookup, + output_encoding=output_encoding, + ) except: _exit() kw = dict([varsplit(var) for var in options.var]) try: - sys.stdout.write(template.render(**kw)) + rendered = template.render(**kw) except: _exit() + else: + if output_file: + io.open(output_file, "wt", encoding=output_encoding).write( + rendered + ) + else: + sys.stdout.write(rendered) if __name__ == "__main__": diff --git a/server/www/packages/packages-linux/x64/mako/codegen.py b/server/www/packages/packages-linux/x64/mako/codegen.py index d4ecbe8..a9ae55b 100644 --- a/server/www/packages/packages-linux/x64/mako/codegen.py +++ b/server/www/packages/packages-linux/x64/mako/codegen.py @@ -1,5 +1,5 @@ # mako/codegen.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -7,11 +7,17 @@ """provides functionality for rendering a parsetree constructing into module source code.""" -import time +import json import re -from mako.pygen import PythonPrinter -from mako import util, ast, parsetree, filters, exceptions +import time + +from mako import ast from mako import compat +from mako import exceptions +from mako import filters +from mako import parsetree +from mako import util +from mako.pygen import PythonPrinter MAGIC_NUMBER = 10 @@ -20,22 +26,24 @@ MAGIC_NUMBER = 10 # template and are not accessed via the # context itself TOPLEVEL_DECLARED = set(["UNDEFINED", "STOP_RENDERING"]) -RESERVED_NAMES = set(['context', 'loop']).union(TOPLEVEL_DECLARED) +RESERVED_NAMES = set(["context", "loop"]).union(TOPLEVEL_DECLARED) -def compile(node, - uri, - filename=None, - default_filters=None, - buffer_filters=None, - imports=None, - future_imports=None, - source_encoding=None, - generate_magic_comment=True, - disable_unicode=False, - strict_undefined=False, - enable_loop=True, - reserved_names=frozenset()): +def compile( # noqa + node, + uri, + filename=None, + default_filters=None, + buffer_filters=None, + imports=None, + future_imports=None, + source_encoding=None, + generate_magic_comment=True, + disable_unicode=False, + strict_undefined=False, + enable_loop=True, + reserved_names=frozenset(), +): """Generate module source code given a parsetree node, uri, and optional source filename""" @@ -49,38 +57,43 @@ def compile(node, buf = util.FastEncodingBuffer() printer = PythonPrinter(buf) - _GenerateRenderMethod(printer, - _CompileContext(uri, - filename, - default_filters, - buffer_filters, - imports, - future_imports, - source_encoding, - generate_magic_comment, - disable_unicode, - strict_undefined, - enable_loop, - reserved_names), - node) + _GenerateRenderMethod( + printer, + _CompileContext( + uri, + filename, + default_filters, + buffer_filters, + imports, + future_imports, + source_encoding, + generate_magic_comment, + disable_unicode, + strict_undefined, + enable_loop, + reserved_names, + ), + node, + ) return buf.getvalue() class _CompileContext(object): - - def __init__(self, - uri, - filename, - default_filters, - buffer_filters, - imports, - future_imports, - source_encoding, - generate_magic_comment, - disable_unicode, - strict_undefined, - enable_loop, - reserved_names): + def __init__( + self, + uri, + filename, + default_filters, + buffer_filters, + imports, + future_imports, + source_encoding, + generate_magic_comment, + disable_unicode, + strict_undefined, + enable_loop, + reserved_names, + ): self.uri = uri self.filename = filename self.default_filters = default_filters @@ -113,12 +126,12 @@ class _GenerateRenderMethod(object): name = "render_%s" % node.funcname args = node.get_argument_expressions() filtered = len(node.filter_args.args) > 0 - buffered = eval(node.attributes.get('buffered', 'False')) - cached = eval(node.attributes.get('cached', 'False')) + buffered = eval(node.attributes.get("buffered", "False")) + cached = eval(node.attributes.get("cached", "False")) defs = None pagetag = None if node.is_block and not node.is_anonymous: - args += ['**pageargs'] + args += ["**pageargs"] else: defs = self.write_toplevel() pagetag = self.compiler.pagetag @@ -126,25 +139,23 @@ class _GenerateRenderMethod(object): if pagetag is not None: args = pagetag.body_decl.get_argument_expressions() if not pagetag.body_decl.kwargs: - args += ['**pageargs'] - cached = eval(pagetag.attributes.get('cached', 'False')) + args += ["**pageargs"] + cached = eval(pagetag.attributes.get("cached", "False")) self.compiler.enable_loop = self.compiler.enable_loop or eval( - pagetag.attributes.get( - 'enable_loop', 'False') + pagetag.attributes.get("enable_loop", "False") ) else: - args = ['**pageargs'] + args = ["**pageargs"] cached = False buffered = filtered = False if args is None: - args = ['context'] + args = ["context"] else: - args = [a for a in ['context'] + args] + args = [a for a in ["context"] + args] self.write_render_callable( - pagetag or node, - name, args, - buffered, filtered, cached) + pagetag or node, name, args, buffered, filtered, cached + ) if defs is not None: for node in defs: @@ -154,8 +165,9 @@ class _GenerateRenderMethod(object): self.write_metadata_struct() def write_metadata_struct(self): - self.printer.source_map[self.printer.lineno] = \ - max(self.printer.source_map) + self.printer.source_map[self.printer.lineno] = max( + self.printer.source_map + ) struct = { "filename": self.compiler.filename, "uri": self.compiler.uri, @@ -164,10 +176,9 @@ class _GenerateRenderMethod(object): } self.printer.writelines( '"""', - '__M_BEGIN_METADATA', - compat.json.dumps(struct), - '__M_END_METADATA\n' - '"""' + "__M_BEGIN_METADATA", + json.dumps(struct), + "__M_END_METADATA\n" '"""', ) @property @@ -186,7 +197,6 @@ class _GenerateRenderMethod(object): self.compiler.pagetag = None class FindTopLevel(object): - def visitInheritTag(s, node): inherit.append(node) @@ -214,14 +224,19 @@ class _GenerateRenderMethod(object): module_identifiers.declared = module_ident # module-level names, python code - if self.compiler.generate_magic_comment and \ - self.compiler.source_encoding: - self.printer.writeline("# -*- coding:%s -*-" % - self.compiler.source_encoding) + if ( + self.compiler.generate_magic_comment + and self.compiler.source_encoding + ): + self.printer.writeline( + "# -*- coding:%s -*-" % self.compiler.source_encoding + ) if self.compiler.future_imports: - self.printer.writeline("from __future__ import %s" % - (", ".join(self.compiler.future_imports),)) + self.printer.writeline( + "from __future__ import %s" + % (", ".join(self.compiler.future_imports),) + ) self.printer.writeline("from mako import runtime, filters, cache") self.printer.writeline("UNDEFINED = runtime.UNDEFINED") self.printer.writeline("STOP_RENDERING = runtime.STOP_RENDERING") @@ -231,36 +246,41 @@ class _GenerateRenderMethod(object): self.printer.writeline("_modified_time = %r" % time.time()) self.printer.writeline("_enable_loop = %r" % self.compiler.enable_loop) self.printer.writeline( - "_template_filename = %r" % self.compiler.filename) + "_template_filename = %r" % self.compiler.filename + ) self.printer.writeline("_template_uri = %r" % self.compiler.uri) self.printer.writeline( - "_source_encoding = %r" % self.compiler.source_encoding) + "_source_encoding = %r" % self.compiler.source_encoding + ) if self.compiler.imports: - buf = '' + buf = "" for imp in self.compiler.imports: buf += imp + "\n" self.printer.writeline(imp) impcode = ast.PythonCode( buf, - source='', lineno=0, + source="", + lineno=0, pos=0, - filename='template defined imports') + filename="template defined imports", + ) else: impcode = None main_identifiers = module_identifiers.branch(self.node) - module_identifiers.topleveldefs = \ - module_identifiers.topleveldefs.\ - union(main_identifiers.topleveldefs) + mit = module_identifiers.topleveldefs + module_identifiers.topleveldefs = mit.union( + main_identifiers.topleveldefs + ) module_identifiers.declared.update(TOPLEVEL_DECLARED) if impcode: module_identifiers.declared.update(impcode.declared_identifiers) self.compiler.identifiers = module_identifiers - self.printer.writeline("_exports = %r" % - [n.name for n in - main_identifiers.topleveldefs.values()] - ) + self.printer.writeline( + "_exports = %r" + % [n.name for n in main_identifiers.topleveldefs.values()] + ) self.printer.write_blanks(2) if len(module_code): @@ -274,8 +294,9 @@ class _GenerateRenderMethod(object): return list(main_identifiers.topleveldefs.values()) - def write_render_callable(self, node, name, args, buffered, filtered, - cached): + def write_render_callable( + self, node, name, args, buffered, filtered, cached + ): """write a top-level render callable. this could be the main render() method or that of a top-level def.""" @@ -284,32 +305,38 @@ class _GenerateRenderMethod(object): decorator = node.decorator if decorator: self.printer.writeline( - "@runtime._decorate_toplevel(%s)" % decorator) + "@runtime._decorate_toplevel(%s)" % decorator + ) self.printer.start_source(node.lineno) self.printer.writelines( - "def %s(%s):" % (name, ','.join(args)), + "def %s(%s):" % (name, ",".join(args)), # push new frame, assign current frame to __M_caller "__M_caller = context.caller_stack._push_frame()", - "try:" + "try:", ) if buffered or filtered or cached: self.printer.writeline("context._push_buffer()") self.identifier_stack.append( - self.compiler.identifiers.branch(self.node)) - if (not self.in_def or self.node.is_block) and '**pageargs' in args: - self.identifier_stack[-1].argument_declared.add('pageargs') + self.compiler.identifiers.branch(self.node) + ) + if (not self.in_def or self.node.is_block) and "**pageargs" in args: + self.identifier_stack[-1].argument_declared.add("pageargs") if not self.in_def and ( - len(self.identifiers.locally_assigned) > 0 or - len(self.identifiers.argument_declared) > 0 + len(self.identifiers.locally_assigned) > 0 + or len(self.identifiers.argument_declared) > 0 ): - self.printer.writeline("__M_locals = __M_dict_builtin(%s)" % - ','.join([ - "%s=%s" % (x, x) for x in - self.identifiers.argument_declared - ])) + self.printer.writeline( + "__M_locals = __M_dict_builtin(%s)" + % ",".join( + [ + "%s=%s" % (x, x) + for x in self.identifiers.argument_declared + ] + ) + ) self.write_variable_declares(self.identifiers, toplevel=True) @@ -321,16 +348,14 @@ class _GenerateRenderMethod(object): self.printer.write_blanks(2) if cached: self.write_cache_decorator( - node, name, - args, buffered, - self.identifiers, toplevel=True) + node, name, args, buffered, self.identifiers, toplevel=True + ) def write_module_code(self, module_code): """write module-level template code, i.e. that which is enclosed in <%! %> tags in the template.""" for n in module_code: - self.printer.start_source(n.lineno) - self.printer.write_indented_block(n.text) + self.printer.write_indented_block(n.text, starting_lineno=n.lineno) def write_inherit(self, node): """write the module-level inheritance-determination callable.""" @@ -338,9 +363,9 @@ class _GenerateRenderMethod(object): self.printer.writelines( "def _mako_inherit(template, context):", "_mako_generate_namespaces(context)", - "return runtime._inherit_from(context, %s, _template_uri)" % - (node.parsed_attributes['file']), - None + "return runtime._inherit_from(context, %s, _template_uri)" + % (node.parsed_attributes["file"]), + None, ) def write_namespaces(self, namespaces): @@ -352,12 +377,13 @@ class _GenerateRenderMethod(object): "except KeyError:", "_mako_generate_namespaces(context)", "return context.namespaces[(__name__, name)]", - None, None + None, + None, ) self.printer.writeline("def _mako_generate_namespaces(context):") for node in namespaces.values(): - if 'import' in node.attributes: + if "import" in node.attributes: self.compiler.has_ns_imports = True self.printer.start_source(node.lineno) if len(node.nodes): @@ -367,7 +393,6 @@ class _GenerateRenderMethod(object): self.in_def = True class NSDefVisitor(object): - def visitDefTag(s, node): s.visitDefOrBase(node) @@ -383,56 +408,54 @@ class _GenerateRenderMethod(object): ) self.write_inline_def(node, identifiers, nested=False) export.append(node.funcname) + vis = NSDefVisitor() for n in node.nodes: n.accept_visitor(vis) - self.printer.writeline("return [%s]" % (','.join(export))) + self.printer.writeline("return [%s]" % (",".join(export))) self.printer.writeline(None) self.in_def = False callable_name = "make_namespace()" else: callable_name = "None" - if 'file' in node.parsed_attributes: + if "file" in node.parsed_attributes: self.printer.writeline( "ns = runtime.TemplateNamespace(%r," " context._clean_inheritance_tokens()," " templateuri=%s, callables=%s, " - " calling_uri=_template_uri)" % - ( + " calling_uri=_template_uri)" + % ( node.name, - node.parsed_attributes.get('file', 'None'), + node.parsed_attributes.get("file", "None"), callable_name, ) ) - elif 'module' in node.parsed_attributes: + elif "module" in node.parsed_attributes: self.printer.writeline( "ns = runtime.ModuleNamespace(%r," " context._clean_inheritance_tokens()," " callables=%s, calling_uri=_template_uri," - " module=%s)" % - ( + " module=%s)" + % ( node.name, callable_name, - node.parsed_attributes.get( - 'module', 'None') + node.parsed_attributes.get("module", "None"), ) ) else: self.printer.writeline( "ns = runtime.Namespace(%r," " context._clean_inheritance_tokens()," - " callables=%s, calling_uri=_template_uri)" % - ( - node.name, - callable_name, - ) + " callables=%s, calling_uri=_template_uri)" + % (node.name, callable_name) ) - if eval(node.attributes.get('inheritable', "False")): + if eval(node.attributes.get("inheritable", "False")): self.printer.writeline("context['self'].%s = ns" % (node.name)) self.printer.writeline( - "context.namespaces[(__name__, %s)] = ns" % repr(node.name)) + "context.namespaces[(__name__, %s)] = ns" % repr(node.name) + ) self.printer.write_blanks(1) if not len(namespaces): self.printer.writeline("pass") @@ -468,7 +491,8 @@ class _GenerateRenderMethod(object): # write closure functions for closures that we define # right here to_write = to_write.union( - [c.funcname for c in identifiers.closuredefs.values()]) + [c.funcname for c in identifiers.closuredefs.values()] + ) # remove identifiers that are declared in the argument # signature of the callable @@ -492,23 +516,22 @@ class _GenerateRenderMethod(object): if limit is not None: to_write = to_write.intersection(limit) - if toplevel and getattr(self.compiler, 'has_ns_imports', False): + if toplevel and getattr(self.compiler, "has_ns_imports", False): self.printer.writeline("_import_ns = {}") self.compiler.has_imports = True for ident, ns in self.compiler.namespaces.items(): - if 'import' in ns.attributes: + if "import" in ns.attributes: self.printer.writeline( "_mako_get_namespace(context, %r)." - "_populate(_import_ns, %r)" % - ( + "_populate(_import_ns, %r)" + % ( ident, - re.split(r'\s*,\s*', ns.attributes['import']) - )) + re.split(r"\s*,\s*", ns.attributes["import"]), + ) + ) if has_loop: - self.printer.writeline( - 'loop = __M_loop = runtime.LoopStack()' - ) + self.printer.writeline("loop = __M_loop = runtime.LoopStack()") for ident in to_write: if ident in comp_idents: @@ -526,37 +549,36 @@ class _GenerateRenderMethod(object): elif ident in self.compiler.namespaces: self.printer.writeline( - "%s = _mako_get_namespace(context, %r)" % - (ident, ident) + "%s = _mako_get_namespace(context, %r)" % (ident, ident) ) else: - if getattr(self.compiler, 'has_ns_imports', False): + if getattr(self.compiler, "has_ns_imports", False): if self.compiler.strict_undefined: self.printer.writelines( - "%s = _import_ns.get(%r, UNDEFINED)" % - (ident, ident), + "%s = _import_ns.get(%r, UNDEFINED)" + % (ident, ident), "if %s is UNDEFINED:" % ident, "try:", "%s = context[%r]" % (ident, ident), "except KeyError:", - "raise NameError(\"'%s' is not defined\")" % - ident, - None, None + "raise NameError(\"'%s' is not defined\")" % ident, + None, + None, ) else: self.printer.writeline( "%s = _import_ns.get" - "(%r, context.get(%r, UNDEFINED))" % - (ident, ident, ident)) + "(%r, context.get(%r, UNDEFINED))" + % (ident, ident, ident) + ) else: if self.compiler.strict_undefined: self.printer.writelines( "try:", "%s = context[%r]" % (ident, ident), "except KeyError:", - "raise NameError(\"'%s' is not defined\")" % - ident, - None + "raise NameError(\"'%s' is not defined\")" % ident, + None, ) else: self.printer.writeline( @@ -572,14 +594,16 @@ class _GenerateRenderMethod(object): nameargs = node.get_argument_expressions(as_call=True) if not self.in_def and ( - len(self.identifiers.locally_assigned) > 0 or - len(self.identifiers.argument_declared) > 0): - nameargs.insert(0, 'context._locals(__M_locals)') + len(self.identifiers.locally_assigned) > 0 + or len(self.identifiers.argument_declared) > 0 + ): + nameargs.insert(0, "context._locals(__M_locals)") else: - nameargs.insert(0, 'context') + nameargs.insert(0, "context") self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls))) self.printer.writeline( - "return render_%s(%s)" % (funcname, ",".join(nameargs))) + "return render_%s(%s)" % (funcname, ",".join(nameargs)) + ) self.printer.writeline(None) def write_inline_def(self, node, identifiers, nested): @@ -590,21 +614,21 @@ class _GenerateRenderMethod(object): decorator = node.decorator if decorator: self.printer.writeline( - "@runtime._decorate_inline(context, %s)" % decorator) + "@runtime._decorate_inline(context, %s)" % decorator + ) self.printer.writeline( - "def %s(%s):" % (node.funcname, ",".join(namedecls))) + "def %s(%s):" % (node.funcname, ",".join(namedecls)) + ) filtered = len(node.filter_args.args) > 0 - buffered = eval(node.attributes.get('buffered', 'False')) - cached = eval(node.attributes.get('cached', 'False')) + buffered = eval(node.attributes.get("buffered", "False")) + cached = eval(node.attributes.get("cached", "False")) self.printer.writelines( # push new frame, assign current frame to __M_caller "__M_caller = context.caller_stack._push_frame()", - "try:" + "try:", ) if buffered or filtered or cached: - self.printer.writelines( - "context._push_buffer()", - ) + self.printer.writelines("context._push_buffer()") identifiers = identifiers.branch(node, nested=nested) @@ -618,12 +642,19 @@ class _GenerateRenderMethod(object): self.write_def_finish(node, buffered, filtered, cached) self.printer.writeline(None) if cached: - self.write_cache_decorator(node, node.funcname, - namedecls, False, identifiers, - inline=True, toplevel=False) + self.write_cache_decorator( + node, + node.funcname, + namedecls, + False, + identifiers, + inline=True, + toplevel=False, + ) - def write_def_finish(self, node, buffered, filtered, cached, - callstack=True): + def write_def_finish( + self, node, buffered, filtered, cached, callstack=True + ): """write the end section of a rendering function, either outermost or inline. @@ -636,9 +667,7 @@ class _GenerateRenderMethod(object): self.printer.writeline("return ''") if callstack: self.printer.writelines( - "finally:", - "context.caller_stack._pop_frame()", - None + "finally:", "context.caller_stack._pop_frame()", None ) if buffered or filtered or cached: @@ -648,13 +677,12 @@ class _GenerateRenderMethod(object): # implemenation might be using a context with no # extra buffers self.printer.writelines( - "finally:", - "__M_buf = context._pop_buffer()" + "finally:", "__M_buf = context._pop_buffer()" ) else: self.printer.writelines( "finally:", - "__M_buf, __M_writer = context._pop_buffer_and_writer()" + "__M_buf, __M_writer = context._pop_buffer_and_writer()", ) if callstack: @@ -662,89 +690,100 @@ class _GenerateRenderMethod(object): s = "__M_buf.getvalue()" if filtered: - s = self.create_filter_callable(node.filter_args.args, s, - False) + s = self.create_filter_callable( + node.filter_args.args, s, False + ) self.printer.writeline(None) if buffered and not cached: - s = self.create_filter_callable(self.compiler.buffer_filters, - s, False) + s = self.create_filter_callable( + self.compiler.buffer_filters, s, False + ) if buffered or cached: self.printer.writeline("return %s" % s) else: - self.printer.writelines( - "__M_writer(%s)" % s, - "return ''" - ) + self.printer.writelines("__M_writer(%s)" % s, "return ''") - def write_cache_decorator(self, node_or_pagetag, name, - args, buffered, identifiers, - inline=False, toplevel=False): + def write_cache_decorator( + self, + node_or_pagetag, + name, + args, + buffered, + identifiers, + inline=False, + toplevel=False, + ): """write a post-function decorator to replace a rendering callable with a cached version of itself.""" self.printer.writeline("__M_%s = %s" % (name, name)) - cachekey = node_or_pagetag.parsed_attributes.get('cache_key', - repr(name)) + cachekey = node_or_pagetag.parsed_attributes.get( + "cache_key", repr(name) + ) cache_args = {} if self.compiler.pagetag is not None: cache_args.update( - ( - pa[6:], - self.compiler.pagetag.parsed_attributes[pa] - ) + (pa[6:], self.compiler.pagetag.parsed_attributes[pa]) for pa in self.compiler.pagetag.parsed_attributes - if pa.startswith('cache_') and pa != 'cache_key' + if pa.startswith("cache_") and pa != "cache_key" ) cache_args.update( - ( - pa[6:], - node_or_pagetag.parsed_attributes[pa] - ) for pa in node_or_pagetag.parsed_attributes - if pa.startswith('cache_') and pa != 'cache_key' + (pa[6:], node_or_pagetag.parsed_attributes[pa]) + for pa in node_or_pagetag.parsed_attributes + if pa.startswith("cache_") and pa != "cache_key" ) - if 'timeout' in cache_args: - cache_args['timeout'] = int(eval(cache_args['timeout'])) + if "timeout" in cache_args: + cache_args["timeout"] = int(eval(cache_args["timeout"])) - self.printer.writeline("def %s(%s):" % (name, ','.join(args))) + self.printer.writeline("def %s(%s):" % (name, ",".join(args))) # form "arg1, arg2, arg3=arg3, arg4=arg4", etc. pass_args = [ - "%s=%s" % ((a.split('=')[0],) * 2) if '=' in a else a - for a in args + "%s=%s" % ((a.split("=")[0],) * 2) if "=" in a else a for a in args ] self.write_variable_declares( identifiers, toplevel=toplevel, - limit=node_or_pagetag.undeclared_identifiers() + limit=node_or_pagetag.undeclared_identifiers(), ) if buffered: - s = "context.get('local')."\ - "cache._ctx_get_or_create("\ - "%s, lambda:__M_%s(%s), context, %s__M_defname=%r)" % ( - cachekey, name, ','.join(pass_args), - ''.join(["%s=%s, " % (k, v) - for k, v in cache_args.items()]), - name + s = ( + "context.get('local')." + "cache._ctx_get_or_create(" + "%s, lambda:__M_%s(%s), context, %s__M_defname=%r)" + % ( + cachekey, + name, + ",".join(pass_args), + "".join( + ["%s=%s, " % (k, v) for k, v in cache_args.items()] + ), + name, ) + ) # apply buffer_filters - s = self.create_filter_callable(self.compiler.buffer_filters, s, - False) + s = self.create_filter_callable( + self.compiler.buffer_filters, s, False + ) self.printer.writelines("return " + s, None) else: self.printer.writelines( "__M_writer(context.get('local')." "cache._ctx_get_or_create(" - "%s, lambda:__M_%s(%s), context, %s__M_defname=%r))" % - ( - cachekey, name, ','.join(pass_args), - ''.join(["%s=%s, " % (k, v) - for k, v in cache_args.items()]), + "%s, lambda:__M_%s(%s), context, %s__M_defname=%r))" + % ( + cachekey, + name, + ",".join(pass_args), + "".join( + ["%s=%s, " % (k, v) for k, v in cache_args.items()] + ), name, ), "return ''", - None + None, ) def create_filter_callable(self, args, target, is_expression): @@ -753,24 +792,24 @@ class _GenerateRenderMethod(object): 'default' filter aliases as needed.""" def locate_encode(name): - if re.match(r'decode\..+', name): + if re.match(r"decode\..+", name): return "filters." + name elif self.compiler.disable_unicode: return filters.NON_UNICODE_ESCAPES.get(name, name) else: return filters.DEFAULT_ESCAPES.get(name, name) - if 'n' not in args: + if "n" not in args: if is_expression: if self.compiler.pagetag: args = self.compiler.pagetag.filter_args.args + args - if self.compiler.default_filters: + if self.compiler.default_filters and "n" not in args: args = self.compiler.default_filters + args for e in args: # if filter given as a function, get just the identifier portion - if e == 'n': + if e == "n": continue - m = re.match(r'(.+?)(\(.*\))', e) + m = re.match(r"(.+?)(\(.*\))", e) if m: ident, fargs = m.group(1, 2) f = locate_encode(ident) @@ -783,15 +822,18 @@ class _GenerateRenderMethod(object): def visitExpression(self, node): self.printer.start_source(node.lineno) - if len(node.escapes) or \ - ( - self.compiler.pagetag is not None and - len(self.compiler.pagetag.filter_args.args) - ) or \ - len(self.compiler.default_filters): + if ( + len(node.escapes) + or ( + self.compiler.pagetag is not None + and len(self.compiler.pagetag.filter_args.args) + ) + or len(self.compiler.default_filters) + ): - s = self.create_filter_callable(node.escapes_code.args, - "%s" % node.text, True) + s = self.create_filter_callable( + node.escapes_code.args, "%s" % node.text, True + ) self.printer.writeline("__M_writer(%s)" % s) else: self.printer.writeline("__M_writer(%s)" % node.text) @@ -800,12 +842,12 @@ class _GenerateRenderMethod(object): if node.isend: self.printer.writeline(None) if node.has_loop_context: - self.printer.writeline('finally:') + self.printer.writeline("finally:") self.printer.writeline("loop = __M_loop._exit()") self.printer.writeline(None) else: self.printer.start_source(node.lineno) - if self.compiler.enable_loop and node.keyword == 'for': + if self.compiler.enable_loop and node.keyword == "for": text = mangle_mako_loop(node, self.printer) else: text = node.text @@ -817,12 +859,16 @@ class _GenerateRenderMethod(object): # and end control lines, and # 3) any control line with no content other than comments if not children or ( - compat.all(isinstance(c, (parsetree.Comment, - parsetree.ControlLine)) - for c in children) and - compat.all((node.is_ternary(c.keyword) or c.isend) - for c in children - if isinstance(c, parsetree.ControlLine))): + compat.all( + isinstance(c, (parsetree.Comment, parsetree.ControlLine)) + for c in children + ) + and compat.all( + (node.is_ternary(c.keyword) or c.isend) + for c in children + if isinstance(c, parsetree.ControlLine) + ) + ): self.printer.writeline("pass") def visitText(self, node): @@ -833,8 +879,7 @@ class _GenerateRenderMethod(object): filtered = len(node.filter_args.args) > 0 if filtered: self.printer.writelines( - "__M_writer = context._push_writer()", - "try:", + "__M_writer = context._push_writer()", "try:" ) for n in node.nodes: n.accept_visitor(self) @@ -842,18 +887,18 @@ class _GenerateRenderMethod(object): self.printer.writelines( "finally:", "__M_buf, __M_writer = context._pop_buffer_and_writer()", - "__M_writer(%s)" % - self.create_filter_callable( - node.filter_args.args, - "__M_buf.getvalue()", - False), - None + "__M_writer(%s)" + % self.create_filter_callable( + node.filter_args.args, "__M_buf.getvalue()", False + ), + None, ) def visitCode(self, node): if not node.ismodule: - self.printer.start_source(node.lineno) - self.printer.write_indented_block(node.text) + self.printer.write_indented_block( + node.text, starting_lineno=node.lineno + ) if not self.in_def and len(self.identifiers.locally_assigned) > 0: # if we are the "template" def, fudge locally @@ -861,24 +906,28 @@ class _GenerateRenderMethod(object): # which is used for def calls within the same template, # to simulate "enclosing scope" self.printer.writeline( - '__M_locals_builtin_stored = __M_locals_builtin()') + "__M_locals_builtin_stored = __M_locals_builtin()" + ) self.printer.writeline( - '__M_locals.update(__M_dict_builtin([(__M_key,' - ' __M_locals_builtin_stored[__M_key]) for __M_key in' - ' [%s] if __M_key in __M_locals_builtin_stored]))' % - ','.join([repr(x) for x in node.declared_identifiers()])) + "__M_locals.update(__M_dict_builtin([(__M_key," + " __M_locals_builtin_stored[__M_key]) for __M_key in" + " [%s] if __M_key in __M_locals_builtin_stored]))" + % ",".join([repr(x) for x in node.declared_identifiers()]) + ) def visitIncludeTag(self, node): self.printer.start_source(node.lineno) - args = node.attributes.get('args') + args = node.attributes.get("args") if args: self.printer.writeline( - "runtime._include_file(context, %s, _template_uri, %s)" % - (node.parsed_attributes['file'], args)) + "runtime._include_file(context, %s, _template_uri, %s)" + % (node.parsed_attributes["file"], args) + ) else: self.printer.writeline( - "runtime._include_file(context, %s, _template_uri)" % - (node.parsed_attributes['file'])) + "runtime._include_file(context, %s, _template_uri)" + % (node.parsed_attributes["file"]) + ) def visitNamespaceTag(self, node): pass @@ -891,13 +940,14 @@ class _GenerateRenderMethod(object): self.printer.writeline("%s()" % node.funcname) else: nameargs = node.get_argument_expressions(as_call=True) - nameargs += ['**pageargs'] + nameargs += ["**pageargs"] self.printer.writeline( "if 'parent' not in context._data or " - "not hasattr(context._data['parent'], '%s'):" - % node.funcname) + "not hasattr(context._data['parent'], '%s'):" % node.funcname + ) self.printer.writeline( - "context['self'].%s(%s)" % (node.funcname, ",".join(nameargs))) + "context['self'].%s(%s)" % (node.funcname, ",".join(nameargs)) + ) self.printer.writeline("\n") def visitCallNamespaceTag(self, node): @@ -908,19 +958,18 @@ class _GenerateRenderMethod(object): def visitCallTag(self, node): self.printer.writeline("def ccall(caller):") - export = ['body'] + export = ["body"] callable_identifiers = self.identifiers.branch(node, nested=True) body_identifiers = callable_identifiers.branch(node, nested=False) # we want the 'caller' passed to ccall to be used # for the body() function, but for other non-body() # <%def>s within <%call> we want the current caller # off the call stack (if any) - body_identifiers.add_declared('caller') + body_identifiers.add_declared("caller") self.identifier_stack.append(body_identifiers) class DefVisitor(object): - def visitDefTag(s, node): s.visitDefOrBase(node) @@ -942,16 +991,13 @@ class _GenerateRenderMethod(object): self.identifier_stack.pop() bodyargs = node.body_decl.get_argument_expressions() - self.printer.writeline("def body(%s):" % ','.join(bodyargs)) + self.printer.writeline("def body(%s):" % ",".join(bodyargs)) # TODO: figure out best way to specify # buffering/nonbuffering (at call time would be better) buffered = False if buffered: - self.printer.writelines( - "context._push_buffer()", - "try:" - ) + self.printer.writelines("context._push_buffer()", "try:") self.write_variable_declares(body_identifiers) self.identifier_stack.append(body_identifiers) @@ -960,25 +1006,22 @@ class _GenerateRenderMethod(object): self.identifier_stack.pop() self.write_def_finish(node, buffered, False, False, callstack=False) - self.printer.writelines( - None, - "return [%s]" % (','.join(export)), - None - ) + self.printer.writelines(None, "return [%s]" % (",".join(export)), None) self.printer.writelines( # push on caller for nested call "context.caller_stack.nextcaller = " "runtime.Namespace('caller', context, " "callables=ccall(__M_caller))", - "try:") + "try:", + ) self.printer.start_source(node.lineno) self.printer.writelines( - "__M_writer(%s)" % self.create_filter_callable( - [], node.expression, True), + "__M_writer(%s)" + % self.create_filter_callable([], node.expression, True), "finally:", "context.caller_stack.nextcaller = None", - None + None, ) @@ -996,10 +1039,12 @@ class _Identifiers(object): else: # things that have already been declared # in an enclosing namespace (i.e. names we can just use) - self.declared = set(parent.declared).\ - union([c.name for c in parent.closuredefs.values()]).\ - union(parent.locally_declared).\ - union(parent.argument_declared) + self.declared = ( + set(parent.declared) + .union([c.name for c in parent.closuredefs.values()]) + .union(parent.locally_declared) + .union(parent.argument_declared) + ) # if these identifiers correspond to a "nested" # scope, it means whatever the parent identifiers @@ -1043,11 +1088,13 @@ class _Identifiers(object): node.accept_visitor(self) illegal_names = self.compiler.reserved_names.intersection( - self.locally_declared) + self.locally_declared + ) if illegal_names: raise exceptions.NameConflictError( - "Reserved words declared in template: %s" % - ", ".join(illegal_names)) + "Reserved words declared in template: %s" + % ", ".join(illegal_names) + ) def branch(self, node, **kwargs): """create a new Identifiers for a new Node, with @@ -1060,24 +1107,28 @@ class _Identifiers(object): return set(self.topleveldefs.union(self.closuredefs).values()) def __repr__(self): - return "Identifiers(declared=%r, locally_declared=%r, "\ - "undeclared=%r, topleveldefs=%r, closuredefs=%r, "\ - "argumentdeclared=%r)" %\ - ( + return ( + "Identifiers(declared=%r, locally_declared=%r, " + "undeclared=%r, topleveldefs=%r, closuredefs=%r, " + "argumentdeclared=%r)" + % ( list(self.declared), list(self.locally_declared), list(self.undeclared), [c.name for c in self.topleveldefs.values()], [c.name for c in self.closuredefs.values()], - self.argument_declared) + self.argument_declared, + ) + ) def check_declared(self, node): """update the state of this Identifiers with the undeclared and declared identifiers of the given node.""" for ident in node.undeclared_identifiers(): - if ident != 'context' and\ - ident not in self.declared.union(self.locally_declared): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): self.undeclared.add(ident) for ident in node.declared_identifiers(): self.locally_declared.add(ident) @@ -1097,7 +1148,8 @@ class _Identifiers(object): if not node.ismodule: self.check_declared(node) self.locally_assigned = self.locally_assigned.union( - node.declared_identifiers()) + node.declared_identifiers() + ) def visitNamespaceTag(self, node): # only traverse into the sub-elements of a @@ -1110,13 +1162,16 @@ class _Identifiers(object): def _check_name_exists(self, collection, node): existing = collection.get(node.funcname) collection[node.funcname] = node - if existing is not None and \ - existing is not node and \ - (node.is_block or existing.is_block): + if ( + existing is not None + and existing is not node + and (node.is_block or existing.is_block) + ): raise exceptions.CompileException( "%%def or %%block named '%s' already " - "exists in this template." % - node.funcname, **node.exception_kwargs) + "exists in this template." % node.funcname, + **node.exception_kwargs + ) def visitDefTag(self, node): if node.is_root() and not node.is_anonymous: @@ -1125,8 +1180,9 @@ class _Identifiers(object): self._check_name_exists(self.closuredefs, node) for ident in node.undeclared_identifiers(): - if ident != 'context' and \ - ident not in self.declared.union(self.locally_declared): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): self.undeclared.add(ident) # visit defs only one level deep @@ -1143,16 +1199,22 @@ class _Identifiers(object): if isinstance(self.node, parsetree.DefTag): raise exceptions.CompileException( "Named block '%s' not allowed inside of def '%s'" - % (node.name, self.node.name), **node.exception_kwargs) - elif isinstance(self.node, - (parsetree.CallTag, parsetree.CallNamespaceTag)): + % (node.name, self.node.name), + **node.exception_kwargs + ) + elif isinstance( + self.node, (parsetree.CallTag, parsetree.CallNamespaceTag) + ): raise exceptions.CompileException( "Named block '%s' not allowed inside of <%%call> tag" - % (node.name, ), **node.exception_kwargs) + % (node.name,), + **node.exception_kwargs + ) for ident in node.undeclared_identifiers(): - if ident != 'context' and \ - ident not in self.declared.union(self.locally_declared): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): self.undeclared.add(ident) if not node.is_anonymous: @@ -1167,8 +1229,9 @@ class _Identifiers(object): def visitTextTag(self, node): for ident in node.undeclared_identifiers(): - if ident != 'context' and \ - ident not in self.declared.union(self.locally_declared): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): self.undeclared.add(ident) def visitIncludeTag(self, node): @@ -1185,9 +1248,9 @@ class _Identifiers(object): def visitCallTag(self, node): if node is self.node: for ident in node.undeclared_identifiers(): - if ident != 'context' and \ - ident not in self.declared.union( - self.locally_declared): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): self.undeclared.add(ident) for ident in node.declared_identifiers(): self.argument_declared.add(ident) @@ -1195,15 +1258,15 @@ class _Identifiers(object): n.accept_visitor(self) else: for ident in node.undeclared_identifiers(): - if ident != 'context' and \ - ident not in self.declared.union( - self.locally_declared): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): self.undeclared.add(ident) _FOR_LOOP = re.compile( - r'^for\s+((?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*' - r'(?:\s*,\s*(?:[A-Za-z_][A-Za-z0-9_]*),??)*\s*(?:\)?))\s+in\s+(.*):' + r"^for\s+((?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*" + r"(?:\s*,\s*(?:[A-Za-z_][A-Za-z0-9_]*),??)*\s*(?:\)?))\s+in\s+(.*):" ) @@ -1218,11 +1281,11 @@ def mangle_mako_loop(node, printer): match = _FOR_LOOP.match(node.text) if match: printer.writelines( - 'loop = __M_loop._enter(%s)' % match.group(2), - 'try:' + "loop = __M_loop._enter(%s)" % match.group(2), + "try:" # 'with __M_loop(%s) as loop:' % match.group(2) ) - text = 'for %s in loop:' % match.group(1) + text = "for %s in loop:" % match.group(1) else: raise SyntaxError("Couldn't apply loop context: %s" % node.text) else: @@ -1239,7 +1302,7 @@ class LoopVariable(object): self.detected = False def _loop_reference_detected(self, node): - if 'loop' in node.undeclared_identifiers(): + if "loop" in node.undeclared_identifiers(): self.detected = True else: for n in node.get_children(): diff --git a/server/www/packages/packages-linux/x64/mako/compat.py b/server/www/packages/packages-linux/x64/mako/compat.py index a2ab243..9aac98c 100644 --- a/server/www/packages/packages-linux/x64/mako/compat.py +++ b/server/www/packages/packages-linux/x64/mako/compat.py @@ -1,34 +1,52 @@ +# mako/compat.py +# Copyright 2006-2020 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import collections +import inspect import sys -import time py3k = sys.version_info >= (3, 0) -py33 = sys.version_info >= (3, 3) py2k = sys.version_info < (3,) -py26 = sys.version_info >= (2, 6) py27 = sys.version_info >= (2, 7) -jython = sys.platform.startswith('java') -win32 = sys.platform.startswith('win') -pypy = hasattr(sys, 'pypy_version_info') +jython = sys.platform.startswith("java") +win32 = sys.platform.startswith("win") +pypy = hasattr(sys, "pypy_version_info") -if py3k: - # create a "getargspec" from getfullargspec(), which is not deprecated - # in Py3K; getargspec() has started to emit warnings as of Py3.5. - # As of Py3.4, now they are trying to move from getfullargspec() - # to "signature()", but getfullargspec() is not deprecated, so stick - # with that for now. +ArgSpec = collections.namedtuple( + "ArgSpec", ["args", "varargs", "keywords", "defaults"] +) - import collections - ArgSpec = collections.namedtuple( - "ArgSpec", - ["args", "varargs", "keywords", "defaults"]) - from inspect import getfullargspec as inspect_getfullargspec - def inspect_getargspec(func): - return ArgSpec( - *inspect_getfullargspec(func)[0:4] - ) -else: - from inspect import getargspec as inspect_getargspec # noqa +def inspect_getargspec(func): + """getargspec based on fully vendored getfullargspec from Python 3.3.""" + + if inspect.ismethod(func): + func = func.__func__ + if not inspect.isfunction(func): + raise TypeError("{!r} is not a Python function".format(func)) + + co = func.__code__ + if not inspect.iscode(co): + raise TypeError("{!r} is not a code object".format(co)) + + nargs = co.co_argcount + names = co.co_varnames + nkwargs = co.co_kwonlyargcount if py3k else 0 + args = list(names[:nargs]) + + nargs += nkwargs + varargs = None + if co.co_flags & inspect.CO_VARARGS: + varargs = co.co_varnames[nargs] + nargs = nargs + 1 + varkw = None + if co.co_flags & inspect.CO_VARKEYWORDS: + varkw = co.co_varnames[nargs] + + return ArgSpec(args, varargs, varkw, func.__defaults__) if py3k: @@ -36,7 +54,8 @@ if py3k: import builtins as compat_builtins from urllib.parse import quote_plus, unquote_plus from html.entities import codepoint2name, name2codepoint - string_types = str, + + string_types = (str,) binary_type = bytes text_type = str @@ -51,8 +70,10 @@ if py3k: def octal(lit): return eval("0o" + lit) + else: import __builtin__ as compat_builtins # noqa + try: from cStringIO import StringIO except: @@ -62,7 +83,8 @@ else: from urllib import quote_plus, unquote_plus # noqa from htmlentitydefs import codepoint2name, name2codepoint # noqa - string_types = basestring, # noqa + + string_types = (basestring,) # noqa binary_type = str text_type = unicode # noqa @@ -76,16 +98,18 @@ else: return eval("0" + lit) -if py33: +if py3k: from importlib import machinery def load_module(module_id, path): return machinery.SourceFileLoader(module_id, path).load_module() + + else: import imp def load_module(module_id, path): - fp = open(path, 'rb') + fp = open(path, "rb") try: return imp.load_source(module_id, path, fp) finally: @@ -93,93 +117,32 @@ else: if py3k: + def reraise(tp, value, tb=None, cause=None): if cause is not None: value.__cause__ = cause if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value + + else: - exec("def reraise(tp, value, tb=None, cause=None):\n" - " raise tp, value, tb\n") + exec( + "def reraise(tp, value, tb=None, cause=None):\n" + " raise tp, value, tb\n" + ) def exception_as(): return sys.exc_info()[1] -try: - import threading - if py3k: - import _thread as thread - else: - import thread -except ImportError: - import dummy_threading as threading # noqa - if py3k: - import _dummy_thread as thread - else: - import dummy_thread as thread # noqa -if win32 or jython: - time_func = time.clock -else: - time_func = time.time - -try: - from functools import partial -except: - def partial(func, *args, **keywords): - def newfunc(*fargs, **fkeywords): - newkeywords = keywords.copy() - newkeywords.update(fkeywords) - return func(*(args + fargs), **newkeywords) - return newfunc - - -all = all -import json # noqa +all = all # noqa def exception_name(exc): return exc.__class__.__name__ -try: - from inspect import CO_VARKEYWORDS, CO_VARARGS - - def inspect_func_args(fn): - if py3k: - co = fn.__code__ - else: - co = fn.func_code - - nargs = co.co_argcount - names = co.co_varnames - args = list(names[:nargs]) - - varargs = None - if co.co_flags & CO_VARARGS: - varargs = co.co_varnames[nargs] - nargs = nargs + 1 - varkw = None - if co.co_flags & CO_VARKEYWORDS: - varkw = co.co_varnames[nargs] - - if py3k: - return args, varargs, varkw, fn.__defaults__ - else: - return args, varargs, varkw, fn.func_defaults -except ImportError: - import inspect - - def inspect_func_args(fn): - return inspect.getargspec(fn) - -if py3k: - def callable(fn): - return hasattr(fn, '__call__') -else: - callable = callable - ################################################ # cross-compatible metaclass implementation @@ -187,6 +150,8 @@ else: def with_metaclass(meta, base=object): """Create a base class with a metaclass.""" return meta("%sBase" % meta.__name__, (base,), {}) + + ################################################ @@ -195,7 +160,7 @@ def arg_stringname(func_arg): In Python3.4 a function's args are of _ast.arg type not _ast.name """ - if hasattr(func_arg, 'arg'): + if hasattr(func_arg, "arg"): return func_arg.arg else: return str(func_arg) diff --git a/server/www/packages/packages-linux/x64/mako/exceptions.py b/server/www/packages/packages-linux/x64/mako/exceptions.py index cb6fb3f..ea7b20d 100644 --- a/server/www/packages/packages-linux/x64/mako/exceptions.py +++ b/server/www/packages/packages-linux/x64/mako/exceptions.py @@ -1,14 +1,16 @@ # mako/exceptions.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """exception classes""" -import traceback import sys -from mako import util, compat +import traceback + +from mako import compat +from mako import util class MakoException(Exception): @@ -27,11 +29,10 @@ def _format_filepos(lineno, pos, filename): class CompileException(MakoException): - def __init__(self, message, source, lineno, pos, filename): MakoException.__init__( - self, - message + _format_filepos(lineno, pos, filename)) + self, message + _format_filepos(lineno, pos, filename) + ) self.lineno = lineno self.pos = pos self.filename = filename @@ -39,11 +40,10 @@ class CompileException(MakoException): class SyntaxException(MakoException): - def __init__(self, message, source, lineno, pos, filename): MakoException.__init__( - self, - message + _format_filepos(lineno, pos, filename)) + self, message + _format_filepos(lineno, pos, filename) + ) self.lineno = lineno self.pos = pos self.filename = filename @@ -115,7 +115,7 @@ class RichTraceback(object): # str(Exception(u'\xe6')) work in Python < 2.6 self.message = self.error.args[0] if not isinstance(self.message, compat.text_type): - self.message = compat.text_type(self.message, 'ascii', 'replace') + self.message = compat.text_type(self.message, "ascii", "replace") def _get_reformatted_records(self, records): for rec in records: @@ -151,25 +151,28 @@ class RichTraceback(object): source, and code line from that line number of the template.""" import mako.template + mods = {} rawrecords = traceback.extract_tb(trcback) new_trcback = [] for filename, lineno, function, line in rawrecords: if not line: - line = '' + line = "" try: - (line_map, template_lines) = mods[filename] + (line_map, template_lines, template_filename) = mods[filename] except KeyError: try: info = mako.template._get_module_info(filename) module_source = info.code template_source = info.source - template_filename = info.template_filename or filename + template_filename = ( + info.template_filename or info.template_uri or filename + ) except KeyError: # A normal .py file (not a Template) if not compat.py3k: try: - fp = open(filename, 'rb') + fp = open(filename, "rb") encoding = util.parse_encoding(fp) fp.close() except IOError: @@ -177,21 +180,33 @@ class RichTraceback(object): if encoding: line = line.decode(encoding) else: - line = line.decode('ascii', 'replace') - new_trcback.append((filename, lineno, function, line, - None, None, None, None)) + line = line.decode("ascii", "replace") + new_trcback.append( + ( + filename, + lineno, + function, + line, + None, + None, + None, + None, + ) + ) continue template_ln = 1 - source_map = mako.template.ModuleInfo.\ - get_module_source_metadata( - module_source, full_line_map=True) - line_map = source_map['full_line_map'] + mtm = mako.template.ModuleInfo + source_map = mtm.get_module_source_metadata( + module_source, full_line_map=True + ) + line_map = source_map["full_line_map"] - template_lines = [line_ for line_ in - template_source.split("\n")] - mods[filename] = (line_map, template_lines) + template_lines = [ + line_ for line_ in template_source.split("\n") + ] + mods[filename] = (line_map, template_lines, template_filename) template_ln = line_map[lineno - 1] @@ -199,9 +214,18 @@ class RichTraceback(object): template_line = template_lines[template_ln - 1] else: template_line = None - new_trcback.append((filename, lineno, function, - line, template_filename, template_ln, - template_line, template_source)) + new_trcback.append( + ( + filename, + lineno, + function, + line, + template_filename, + template_ln, + template_line, + template_source, + ) + ) if not self.source: for l in range(len(new_trcback) - 1, 0, -1): if new_trcback[l][5]: @@ -212,15 +236,17 @@ class RichTraceback(object): if new_trcback: try: # A normal .py file (not a Template) - fp = open(new_trcback[-1][0], 'rb') + fp = open(new_trcback[-1][0], "rb") encoding = util.parse_encoding(fp) + if compat.py3k and not encoding: + encoding = "utf-8" fp.seek(0) self.source = fp.read() fp.close() if encoding: self.source = self.source.decode(encoding) except IOError: - self.source = '' + self.source = "" self.lineno = new_trcback[-1][1] return new_trcback @@ -233,7 +259,9 @@ def text_error_template(lookup=None): """ import mako.template - return mako.template.Template(r""" + + return mako.template.Template( + r""" <%page args="error=None, traceback=None"/> <%! from mako.exceptions import RichTraceback @@ -247,7 +275,8 @@ Traceback (most recent call last): ${line | trim} % endfor ${tback.errorname}: ${tback.message} -""") +""" + ) def _install_pygments(): @@ -259,9 +288,10 @@ def _install_pygments(): def _install_fallback(): global syntax_highlight, pygments_html_formatter from mako.filters import html_escape + pygments_html_formatter = None - def syntax_highlight(filename='', language=None): + def syntax_highlight(filename="", language=None): return html_escape @@ -270,6 +300,8 @@ def _install_highlighting(): _install_pygments() except ImportError: _install_fallback() + + _install_highlighting() @@ -287,7 +319,9 @@ def html_error_template(): """ import mako.template - return mako.template.Template(r""" + + return mako.template.Template( + r""" <%! from mako.exceptions import RichTraceback, syntax_highlight,\ pygments_html_formatter @@ -390,5 +424,7 @@ def html_error_template(): % endif -""", output_encoding=sys.getdefaultencoding(), - encoding_errors='htmlentityreplace') +""", + output_encoding=sys.getdefaultencoding(), + encoding_errors="htmlentityreplace", + ) diff --git a/server/www/packages/packages-linux/x64/mako/ext/autohandler.py b/server/www/packages/packages-linux/x64/mako/ext/autohandler.py index 9d1c911..8b1324e 100644 --- a/server/www/packages/packages-linux/x64/mako/ext/autohandler.py +++ b/server/www/packages/packages-linux/x64/mako/ext/autohandler.py @@ -1,5 +1,5 @@ # ext/autohandler.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,29 +8,29 @@ requires that the TemplateLookup class is used with templates. -usage: +usage:: -<%! - from mako.ext.autohandler import autohandler -%> -<%inherit file="${autohandler(template, context)}"/> + <%! + from mako.ext.autohandler import autohandler + %> + <%inherit file="${autohandler(template, context)}"/> -or with custom autohandler filename: +or with custom autohandler filename:: -<%! - from mako.ext.autohandler import autohandler -%> -<%inherit file="${autohandler(template, context, name='somefilename')}"/> + <%! + from mako.ext.autohandler import autohandler + %> + <%inherit file="${autohandler(template, context, name='somefilename')}"/> """ -import posixpath import os +import posixpath import re -def autohandler(template, context, name='autohandler'): +def autohandler(template, context, name="autohandler"): lookup = context.lookup _template_uri = template.module._template_uri if not lookup.filesystem_checks: @@ -39,13 +39,14 @@ def autohandler(template, context, name='autohandler'): except KeyError: pass - tokens = re.findall(r'([^/]+)', posixpath.dirname(_template_uri)) + [name] + tokens = re.findall(r"([^/]+)", posixpath.dirname(_template_uri)) + [name] while len(tokens): - path = '/' + '/'.join(tokens) + path = "/" + "/".join(tokens) if path != _template_uri and _file_exists(lookup, path): if not lookup.filesystem_checks: return lookup._uri_cache.setdefault( - (autohandler, _template_uri, name), path) + (autohandler, _template_uri, name), path + ) else: return path if len(tokens) == 1: @@ -54,15 +55,16 @@ def autohandler(template, context, name='autohandler'): if not lookup.filesystem_checks: return lookup._uri_cache.setdefault( - (autohandler, _template_uri, name), None) + (autohandler, _template_uri, name), None + ) else: return None def _file_exists(lookup, path): - psub = re.sub(r'^/', '', path) + psub = re.sub(r"^/", "", path) for d in lookup.directories: - if os.path.exists(d + '/' + psub): + if os.path.exists(d + "/" + psub): return True else: return False diff --git a/server/www/packages/packages-linux/x64/mako/ext/babelplugin.py b/server/www/packages/packages-linux/x64/mako/ext/babelplugin.py index 0b5e84f..76bbc5b 100644 --- a/server/www/packages/packages-linux/x64/mako/ext/babelplugin.py +++ b/server/www/packages/packages-linux/x64/mako/ext/babelplugin.py @@ -1,23 +1,24 @@ # ext/babelplugin.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """gettext message extraction via Babel: http://babel.edgewall.org/""" from babel.messages.extract import extract_python + from mako.ext.extract import MessageExtractor class BabelMakoExtractor(MessageExtractor): - def __init__(self, keywords, comment_tags, options): self.keywords = keywords self.options = options self.config = { - 'comment-tags': u' '.join(comment_tags), - 'encoding': options.get('input_encoding', - options.get('encoding', None)), + "comment-tags": u" ".join(comment_tags), + "encoding": options.get( + "input_encoding", options.get("encoding", None) + ), } super(BabelMakoExtractor, self).__init__() @@ -25,12 +26,19 @@ class BabelMakoExtractor(MessageExtractor): return self.process_file(fileobj) def process_python(self, code, code_lineno, translator_strings): - comment_tags = self.config['comment-tags'] - for lineno, funcname, messages, python_translator_comments \ - in extract_python(code, - self.keywords, comment_tags, self.options): - yield (code_lineno + (lineno - 1), funcname, messages, - translator_strings + python_translator_comments) + comment_tags = self.config["comment-tags"] + for ( + lineno, + funcname, + messages, + python_translator_comments, + ) in extract_python(code, self.keywords, comment_tags, self.options): + yield ( + code_lineno + (lineno - 1), + funcname, + messages, + translator_strings + python_translator_comments, + ) def extract(fileobj, keywords, comment_tags, options): diff --git a/server/www/packages/packages-linux/x64/mako/ext/beaker_cache.py b/server/www/packages/packages-linux/x64/mako/ext/beaker_cache.py index c7c260d..f65ce43 100644 --- a/server/www/packages/packages-linux/x64/mako/ext/beaker_cache.py +++ b/server/www/packages/packages-linux/x64/mako/ext/beaker_cache.py @@ -1,7 +1,12 @@ +# ext/beaker_cache.py +# Copyright 2006-2020 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + """Provide a :class:`.CacheImpl` for the Beaker caching system.""" from mako import exceptions - from mako.cache import CacheImpl try: @@ -27,36 +32,37 @@ class BeakerCacheImpl(CacheImpl): def __init__(self, cache): if not has_beaker: raise exceptions.RuntimeException( - "Can't initialize Beaker plugin; Beaker is not installed.") + "Can't initialize Beaker plugin; Beaker is not installed." + ) global _beaker_cache if _beaker_cache is None: - if 'manager' in cache.template.cache_args: - _beaker_cache = cache.template.cache_args['manager'] + if "manager" in cache.template.cache_args: + _beaker_cache = cache.template.cache_args["manager"] else: _beaker_cache = beaker_cache.CacheManager() super(BeakerCacheImpl, self).__init__(cache) def _get_cache(self, **kw): - expiretime = kw.pop('timeout', None) - if 'dir' in kw: - kw['data_dir'] = kw.pop('dir') + expiretime = kw.pop("timeout", None) + if "dir" in kw: + kw["data_dir"] = kw.pop("dir") elif self.cache.template.module_directory: - kw['data_dir'] = self.cache.template.module_directory + kw["data_dir"] = self.cache.template.module_directory - if 'manager' in kw: - kw.pop('manager') + if "manager" in kw: + kw.pop("manager") - if kw.get('type') == 'memcached': - kw['type'] = 'ext:memcached' + if kw.get("type") == "memcached": + kw["type"] = "ext:memcached" - if 'region' in kw: - region = kw.pop('region') + if "region" in kw: + region = kw.pop("region") cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw) else: cache = _beaker_cache.get_cache(self.cache.id, **kw) - cache_args = {'starttime': self.cache.starttime} + cache_args = {"starttime": self.cache.starttime} if expiretime: - cache_args['expiretime'] = expiretime + cache_args["expiretime"] = expiretime return cache, cache_args def get_or_create(self, key, creation_function, **kw): diff --git a/server/www/packages/packages-linux/x64/mako/ext/extract.py b/server/www/packages/packages-linux/x64/mako/ext/extract.py index d777ea8..ad2348a 100644 --- a/server/www/packages/packages-linux/x64/mako/ext/extract.py +++ b/server/www/packages/packages-linux/x64/mako/ext/extract.py @@ -1,30 +1,39 @@ +# ext/extract.py +# Copyright 2006-2020 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + import re + from mako import compat from mako import lexer from mako import parsetree class MessageExtractor(object): - def process_file(self, fileobj): template_node = lexer.Lexer( - fileobj.read(), - input_encoding=self.config['encoding']).parse() + fileobj.read(), input_encoding=self.config["encoding"] + ).parse() for extracted in self.extract_nodes(template_node.get_children()): yield extracted def extract_nodes(self, nodes): translator_comments = [] in_translator_comments = False - input_encoding = self.config['encoding'] or 'ascii' + input_encoding = self.config["encoding"] or "ascii" comment_tags = list( - filter(None, re.split(r'\s+', self.config['comment-tags']))) + filter(None, re.split(r"\s+", self.config["comment-tags"])) + ) for node in nodes: child_nodes = None - if in_translator_comments and \ - isinstance(node, parsetree.Text) and \ - not node.content.strip(): + if ( + in_translator_comments + and isinstance(node, parsetree.Text) + and not node.content.strip() + ): # Ignore whitespace within translator comments continue @@ -32,13 +41,15 @@ class MessageExtractor(object): value = node.text.strip() if in_translator_comments: translator_comments.extend( - self._split_comment(node.lineno, value)) + self._split_comment(node.lineno, value) + ) continue for comment_tag in comment_tags: if value.startswith(comment_tag): in_translator_comments = True translator_comments.extend( - self._split_comment(node.lineno, value)) + self._split_comment(node.lineno, value) + ) continue if isinstance(node, parsetree.DefTag): @@ -69,15 +80,18 @@ class MessageExtractor(object): continue # Comments don't apply unless they immediately precede the message - if translator_comments and \ - translator_comments[-1][0] < node.lineno - 1: + if ( + translator_comments + and translator_comments[-1][0] < node.lineno - 1 + ): translator_comments = [] translator_strings = [ - comment[1] for comment in translator_comments] + comment[1] for comment in translator_comments + ] if isinstance(code, compat.text_type): - code = code.encode(input_encoding, 'backslashreplace') + code = code.encode(input_encoding, "backslashreplace") used_translator_comments = False # We add extra newline to work around a pybabel bug @@ -85,10 +99,11 @@ class MessageExtractor(object): # input string of the input is non-ascii) # Also, because we added it, we have to subtract one from # node.lineno - code = compat.byte_buffer(compat.b('\n') + code) + code = compat.byte_buffer(compat.b("\n") + code) for message in self.process_python( - code, node.lineno - 1, translator_strings): + code, node.lineno - 1, translator_strings + ): yield message used_translator_comments = True @@ -104,5 +119,7 @@ class MessageExtractor(object): def _split_comment(lineno, comment): """Return the multiline comment at lineno split into a list of comment line numbers and the accompanying comment line""" - return [(lineno + index, line) for index, line in - enumerate(comment.splitlines())] + return [ + (lineno + index, line) + for index, line in enumerate(comment.splitlines()) + ] diff --git a/server/www/packages/packages-linux/x64/mako/ext/linguaplugin.py b/server/www/packages/packages-linux/x64/mako/ext/linguaplugin.py index 46b0d6a..0f6d165 100644 --- a/server/www/packages/packages-linux/x64/mako/ext/linguaplugin.py +++ b/server/www/packages/packages-linux/x64/mako/ext/linguaplugin.py @@ -1,43 +1,57 @@ +# ext/linguaplugin.py +# Copyright 2006-2020 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + import io + from lingua.extractors import Extractor -from lingua.extractors import Message from lingua.extractors import get_extractor -from mako.ext.extract import MessageExtractor +from lingua.extractors import Message + from mako import compat +from mako.ext.extract import MessageExtractor class LinguaMakoExtractor(Extractor, MessageExtractor): - '''Mako templates''' - extensions = ['.mako'] - default_config = { - 'encoding': 'utf-8', - 'comment-tags': '', - } + """Mako templates""" + + extensions = [".mako"] + default_config = {"encoding": "utf-8", "comment-tags": ""} def __call__(self, filename, options, fileobj=None): self.options = options self.filename = filename - self.python_extractor = get_extractor('x.py') + self.python_extractor = get_extractor("x.py") if fileobj is None: - fileobj = open(filename, 'rb') + fileobj = open(filename, "rb") return self.process_file(fileobj) def process_python(self, code, code_lineno, translator_strings): source = code.getvalue().strip() - if source.endswith(compat.b(':')): - if source in (compat.b('try:'), compat.b('else:')) or source.startswith(compat.b('except')): - source = compat.b('') # Ignore try/except and else - elif source.startswith(compat.b('elif')): - source = source[2:] # Replace "elif" with "if" - source += compat.b('pass') + if source.endswith(compat.b(":")): + if source in ( + compat.b("try:"), + compat.b("else:"), + ) or source.startswith(compat.b("except")): + source = compat.b("") # Ignore try/except and else + elif source.startswith(compat.b("elif")): + source = source[2:] # Replace "elif" with "if" + source += compat.b("pass") code = io.BytesIO(source) for msg in self.python_extractor( - self.filename, self.options, code, code_lineno -1): + self.filename, self.options, code, code_lineno - 1 + ): if translator_strings: - msg = Message(msg.msgctxt, msg.msgid, msg.msgid_plural, - msg.flags, - compat.u(' ').join( - translator_strings + [msg.comment]), - msg.tcomment, msg.location) + msg = Message( + msg.msgctxt, + msg.msgid, + msg.msgid_plural, + msg.flags, + compat.u(" ").join(translator_strings + [msg.comment]), + msg.tcomment, + msg.location, + ) yield msg diff --git a/server/www/packages/packages-linux/x64/mako/ext/preprocessors.py b/server/www/packages/packages-linux/x64/mako/ext/preprocessors.py index 9b700d1..9cc0621 100644 --- a/server/www/packages/packages-linux/x64/mako/ext/preprocessors.py +++ b/server/www/packages/packages-linux/x64/mako/ext/preprocessors.py @@ -1,5 +1,5 @@ # ext/preprocessors.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -17,4 +17,4 @@ def convert_comments(text): from mako.ext.preprocessors import convert_comments t = Template(..., preprocessor=convert_comments)""" - return re.sub(r'(?<=\n)\s*#[^#]', "##", text) + return re.sub(r"(?<=\n)\s*#[^#]", "##", text) diff --git a/server/www/packages/packages-linux/x64/mako/ext/pygmentplugin.py b/server/www/packages/packages-linux/x64/mako/ext/pygmentplugin.py index 4057caa..943a67a 100644 --- a/server/www/packages/packages-linux/x64/mako/ext/pygmentplugin.py +++ b/server/www/packages/packages-linux/x64/mako/ext/pygmentplugin.py @@ -1,45 +1,73 @@ # ext/pygmentplugin.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from pygments.lexers.web import \ - HtmlLexer, XmlLexer, JavascriptLexer, CssLexer -from pygments.lexers.agile import PythonLexer, Python3Lexer -from pygments.lexer import DelegatingLexer, RegexLexer, bygroups, \ - include, using -from pygments.token import \ - Text, Comment, Operator, Keyword, Name, String, Other -from pygments.formatters.html import HtmlFormatter from pygments import highlight +from pygments.formatters.html import HtmlFormatter +from pygments.lexer import bygroups +from pygments.lexer import DelegatingLexer +from pygments.lexer import include +from pygments.lexer import RegexLexer +from pygments.lexer import using +from pygments.lexers.agile import Python3Lexer +from pygments.lexers.agile import PythonLexer +from pygments.lexers.web import CssLexer +from pygments.lexers.web import HtmlLexer +from pygments.lexers.web import JavascriptLexer +from pygments.lexers.web import XmlLexer +from pygments.token import Comment +from pygments.token import Keyword +from pygments.token import Name +from pygments.token import Operator +from pygments.token import Other +from pygments.token import String +from pygments.token import Text + from mako import compat class MakoLexer(RegexLexer): - name = 'Mako' - aliases = ['mako'] - filenames = ['*.mao'] + name = "Mako" + aliases = ["mako"] + filenames = ["*.mao"] tokens = { - 'root': [ - (r'(\s*)(\%)(\s*end(?:\w+))(\n|\Z)', - bygroups(Text, Comment.Preproc, Keyword, Other)), - (r'(\s*)(\%(?!%))([^\n]*)(\n|\Z)', - bygroups(Text, Comment.Preproc, using(PythonLexer), Other)), - (r'(\s*)(##[^\n]*)(\n|\Z)', - bygroups(Text, Comment.Preproc, Other)), - (r'''(?s)<%doc>.*?''', Comment.Preproc), - (r'(<%)([\w\.\:]+)', - bygroups(Comment.Preproc, Name.Builtin), 'tag'), - (r'()', - bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)), - (r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'), - (r'(<%(?:!?))(.*?)(%>)(?s)', - bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), - (r'(\$\{)(.*?)(\})', - bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), - (r'''(?sx) + "root": [ + ( + r"(\s*)(\%)(\s*end(?:\w+))(\n|\Z)", + bygroups(Text, Comment.Preproc, Keyword, Other), + ), + ( + r"(\s*)(\%(?!%))([^\n]*)(\n|\Z)", + bygroups(Text, Comment.Preproc, using(PythonLexer), Other), + ), + ( + r"(\s*)(##[^\n]*)(\n|\Z)", + bygroups(Text, Comment.Preproc, Other), + ), + (r"""(?s)<%doc>.*?""", Comment.Preproc), + ( + r"(<%)([\w\.\:]+)", + bygroups(Comment.Preproc, Name.Builtin), + "tag", + ), + ( + r"()", + bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc), + ), + (r"<%(?=([\w\.\:]+))", Comment.Preproc, "ondeftags"), + ( + r"(?s)(<%(?:!?))(.*?)(%>)", + bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc), + ), + ( + r"(\$\{)(.*?)(\})", + bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc), + ), + ( + r"""(?sx) (.+?) # anything, followed by: (?: (?<=\n)(?=%(?!%)|\#\#) | # an eval or comment line @@ -52,76 +80,78 @@ class MakoLexer(RegexLexer): (\\\n) | # an escaped newline \Z # end of string ) - ''', bygroups(Other, Operator)), - (r'\s+', Text), + """, + bygroups(Other, Operator), + ), + (r"\s+", Text), ], - 'ondeftags': [ - (r'<%', Comment.Preproc), - (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin), - include('tag'), + "ondeftags": [ + (r"<%", Comment.Preproc), + (r"(?<=<%)(include|inherit|namespace|page)", Name.Builtin), + include("tag"), ], - 'tag': [ - (r'((?:\w+)\s*=)\s*(".*?")', - bygroups(Name.Attribute, String)), - (r'/?\s*>', Comment.Preproc, '#pop'), - (r'\s+', Text), + "tag": [ + (r'((?:\w+)\s*=)\s*(".*?")', bygroups(Name.Attribute, String)), + (r"/?\s*>", Comment.Preproc, "#pop"), + (r"\s+", Text), ], - 'attr': [ - ('".*?"', String, '#pop'), - ("'.*?'", String, '#pop'), - (r'[^\s>]+', String, '#pop'), + "attr": [ + ('".*?"', String, "#pop"), + ("'.*?'", String, "#pop"), + (r"[^\s>]+", String, "#pop"), ], } class MakoHtmlLexer(DelegatingLexer): - name = 'HTML+Mako' - aliases = ['html+mako'] + name = "HTML+Mako" + aliases = ["html+mako"] def __init__(self, **options): - super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, - **options) + super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, **options) class MakoXmlLexer(DelegatingLexer): - name = 'XML+Mako' - aliases = ['xml+mako'] + name = "XML+Mako" + aliases = ["xml+mako"] def __init__(self, **options): - super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, - **options) + super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, **options) class MakoJavascriptLexer(DelegatingLexer): - name = 'JavaScript+Mako' - aliases = ['js+mako', 'javascript+mako'] + name = "JavaScript+Mako" + aliases = ["js+mako", "javascript+mako"] def __init__(self, **options): - super(MakoJavascriptLexer, self).__init__(JavascriptLexer, - MakoLexer, **options) + super(MakoJavascriptLexer, self).__init__( + JavascriptLexer, MakoLexer, **options + ) class MakoCssLexer(DelegatingLexer): - name = 'CSS+Mako' - aliases = ['css+mako'] + name = "CSS+Mako" + aliases = ["css+mako"] def __init__(self, **options): - super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, - **options) + super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, **options) -pygments_html_formatter = HtmlFormatter(cssclass='syntax-highlighted', - linenos=True) +pygments_html_formatter = HtmlFormatter( + cssclass="syntax-highlighted", linenos=True +) -def syntax_highlight(filename='', language=None): +def syntax_highlight(filename="", language=None): mako_lexer = MakoLexer() if compat.py3k: python_lexer = Python3Lexer() else: python_lexer = PythonLexer() - if filename.startswith('memory:') or language == 'mako': - return lambda string: highlight(string, mako_lexer, - pygments_html_formatter) - return lambda string: highlight(string, python_lexer, - pygments_html_formatter) + if filename.startswith("memory:") or language == "mako": + return lambda string: highlight( + string, mako_lexer, pygments_html_formatter + ) + return lambda string: highlight( + string, python_lexer, pygments_html_formatter + ) diff --git a/server/www/packages/packages-linux/x64/mako/ext/turbogears.py b/server/www/packages/packages-linux/x64/mako/ext/turbogears.py index eaa2d78..722a6b4 100644 --- a/server/www/packages/packages-linux/x64/mako/ext/turbogears.py +++ b/server/www/packages/packages-linux/x64/mako/ext/turbogears.py @@ -1,5 +1,5 @@ # ext/turbogears.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -13,7 +13,7 @@ class TGPlugin(object): """TurboGears compatible Template Plugin.""" - def __init__(self, extra_vars_func=None, options=None, extension='mak'): + def __init__(self, extra_vars_func=None, options=None, extension="mak"): self.extra_vars_func = extra_vars_func self.extension = extension if not options: @@ -22,9 +22,9 @@ class TGPlugin(object): # Pull the options out and initialize the lookup lookup_options = {} for k, v in options.items(): - if k.startswith('mako.'): + if k.startswith("mako."): lookup_options[k[5:]] = v - elif k in ['directories', 'filesystem_checks', 'module_directory']: + elif k in ["directories", "filesystem_checks", "module_directory"]: lookup_options[k] = v self.lookup = TemplateLookup(**lookup_options) @@ -40,14 +40,17 @@ class TGPlugin(object): if template_string is not None: return Template(template_string, **self.tmpl_options) # Translate TG dot notation to normal / template path - if '/' not in templatename: - templatename = '/' + templatename.replace('.', '/') + '.' +\ - self.extension + if "/" not in templatename: + templatename = ( + "/" + templatename.replace(".", "/") + "." + self.extension + ) # Lookup template return self.lookup.get_template(templatename) - def render(self, info, format="html", fragment=False, template=None): + def render( + self, info, format="html", fragment=False, template=None # noqa + ): if isinstance(template, compat.string_types): template = self.load_template(template) diff --git a/server/www/packages/packages-linux/x64/mako/filters.py b/server/www/packages/packages-linux/x64/mako/filters.py index c082690..0ae33ff 100644 --- a/server/www/packages/packages-linux/x64/mako/filters.py +++ b/server/www/packages/packages-linux/x64/mako/filters.py @@ -1,24 +1,25 @@ # mako/filters.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -import re import codecs - -from mako.compat import quote_plus, unquote_plus, codepoint2name, \ - name2codepoint +import re from mako import compat +from mako.compat import codepoint2name +from mako.compat import name2codepoint +from mako.compat import quote_plus +from mako.compat import unquote_plus xml_escapes = { - '&': '&', - '>': '>', - '<': '<', - '"': '"', # also " in html-only - "'": ''' # also ' in html-only + "&": "&", + ">": ">", + "<": "<", + '"': """, # also " in html-only + "'": "'", # also ' in html-only } # XXX: " is valid in HTML and XML @@ -37,6 +38,7 @@ def legacy_html_escape(s): try: import markupsafe + html_escape = markupsafe.escape except ImportError: html_escape = legacy_html_escape @@ -69,7 +71,6 @@ def trim(string): class Decode(object): - def __getattr__(self, key): def decode(x): if isinstance(x, compat.text_type): @@ -78,24 +79,31 @@ class Decode(object): return decode(str(x)) else: return compat.text_type(x, encoding=key) + return decode + + decode = Decode() -_ASCII_re = re.compile(r'\A[\x00-\x7f]*\Z') +_ASCII_re = re.compile(r"\A[\x00-\x7f]*\Z") def is_ascii_str(text): return isinstance(text, str) and _ASCII_re.match(text) + ################################################################ class XMLEntityEscaper(object): - def __init__(self, codepoint2name, name2codepoint): - self.codepoint2entity = dict([(c, compat.text_type('&%s;' % n)) - for c, n in codepoint2name.items()]) + self.codepoint2entity = dict( + [ + (c, compat.text_type("&%s;" % n)) + for c, n in codepoint2name.items() + ] + ) self.name2codepoint = name2codepoint def escape_entities(self, text): @@ -110,7 +118,7 @@ class XMLEntityEscaper(object): try: return self.codepoint2entity[codepoint] except (KeyError, IndexError): - return '&#x%X;' % codepoint + return "&#x%X;" % codepoint __escapable = re.compile(r'["&<>]|[^\x00-\x7f]') @@ -123,19 +131,22 @@ class XMLEntityEscaper(object): The return value is guaranteed to be ASCII. """ - return self.__escapable.sub(self.__escape, compat.text_type(text) - ).encode('ascii') + return self.__escapable.sub( + self.__escape, compat.text_type(text) + ).encode("ascii") # XXX: This regexp will not match all valid XML entity names__. # (It punts on details involving involving CombiningChars and Extenders.) # # .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef - __characterrefs = re.compile(r'''& (?: + __characterrefs = re.compile( + r"""& (?: \#(\d+) | \#x([\da-f]+) | ( (?!\d) [:\w] [-.:\w]+ ) - ) ;''', - re.X | re.UNICODE) + ) ;""", + re.X | re.UNICODE, + ) def __unescape(self, m): dval, hval, name = m.groups() @@ -144,7 +155,7 @@ class XMLEntityEscaper(object): elif hval: codepoint = int(hval, 16) else: - codepoint = self.name2codepoint.get(name, 0xfffd) + codepoint = self.name2codepoint.get(name, 0xFFFD) # U+FFFD = "REPLACEMENT CHARACTER" if codepoint < 128: return chr(codepoint) @@ -168,42 +179,41 @@ html_entities_unescape = _html_entities_escaper.unescape def htmlentityreplace_errors(ex): """An encoding error handler. - This python `codecs`_ error handler replaces unencodable + This python codecs error handler replaces unencodable characters with HTML entities, or, if no HTML entity exists for - the character, XML character references. + the character, XML character references:: - >>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace') - 'The cost was €12.' + >>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace') + 'The cost was €12.' """ if isinstance(ex, UnicodeEncodeError): # Handle encoding errors - bad_text = ex.object[ex.start:ex.end] + bad_text = ex.object[ex.start : ex.end] text = _html_entities_escaper.escape(bad_text) return (compat.text_type(text), ex.end) raise ex -codecs.register_error('htmlentityreplace', htmlentityreplace_errors) + +codecs.register_error("htmlentityreplace", htmlentityreplace_errors) # TODO: options to make this dynamic per-compilation will be added in a later # release DEFAULT_ESCAPES = { - 'x': 'filters.xml_escape', - 'h': 'filters.html_escape', - 'u': 'filters.url_escape', - 'trim': 'filters.trim', - 'entity': 'filters.html_entities_escape', - 'unicode': 'unicode', - 'decode': 'decode', - 'str': 'str', - 'n': 'n' + "x": "filters.xml_escape", + "h": "filters.html_escape", + "u": "filters.url_escape", + "trim": "filters.trim", + "entity": "filters.html_entities_escape", + "unicode": "unicode", + "decode": "decode", + "str": "str", + "n": "n", } if compat.py3k: - DEFAULT_ESCAPES.update({ - 'unicode': 'str' - }) + DEFAULT_ESCAPES.update({"unicode": "str"}) NON_UNICODE_ESCAPES = DEFAULT_ESCAPES.copy() -NON_UNICODE_ESCAPES['h'] = 'filters.legacy_html_escape' -NON_UNICODE_ESCAPES['u'] = 'filters.legacy_url_escape' +NON_UNICODE_ESCAPES["h"] = "filters.legacy_html_escape" +NON_UNICODE_ESCAPES["u"] = "filters.legacy_url_escape" diff --git a/server/www/packages/packages-linux/x64/mako/lexer.py b/server/www/packages/packages-linux/x64/mako/lexer.py index cf4187f..6226e26 100644 --- a/server/www/packages/packages-linux/x64/mako/lexer.py +++ b/server/www/packages/packages-linux/x64/mako/lexer.py @@ -1,24 +1,31 @@ # mako/lexer.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """provides the Lexer class for parsing template strings into parse trees.""" -import re import codecs -from mako import parsetree, exceptions, compat +import re + +from mako import compat +from mako import exceptions +from mako import parsetree from mako.pygen import adjust_whitespace _regexp_cache = {} class Lexer(object): - - def __init__(self, text, filename=None, - disable_unicode=False, - input_encoding=None, preprocessor=None): + def __init__( + self, + text, + filename=None, + disable_unicode=False, + input_encoding=None, + preprocessor=None, + ): self.text = text self.filename = filename self.template = parsetree.TemplateNode(self.filename) @@ -34,22 +41,24 @@ class Lexer(object): if compat.py3k and disable_unicode: raise exceptions.UnsupportedError( - "Mako for Python 3 does not " - "support disabling Unicode") + "Mako for Python 3 does not " "support disabling Unicode" + ) if preprocessor is None: self.preprocessor = [] - elif not hasattr(preprocessor, '__iter__'): + elif not hasattr(preprocessor, "__iter__"): self.preprocessor = [preprocessor] else: self.preprocessor = preprocessor @property def exception_kwargs(self): - return {'source': self.text, - 'lineno': self.matched_lineno, - 'pos': self.matched_charpos, - 'filename': self.filename} + return { + "source": self.text, + "lineno": self.matched_lineno, + "pos": self.matched_charpos, + "filename": self.filename, + } def match(self, regexp, flags=None): """compile the given regexp, cache the reg, and call match_reg().""" @@ -83,9 +92,9 @@ class Lexer(object): else: self.match_position = end self.matched_lineno = self.lineno - lines = re.findall(r"\n", self.text[mp:self.match_position]) + lines = re.findall(r"\n", self.text[mp : self.match_position]) cp = mp - 1 - while (cp >= 0 and cp < self.textlength and self.text[cp] != '\n'): + while cp >= 0 and cp < self.textlength and self.text[cp] != "\n": cp -= 1 self.matched_charpos = mp - cp self.lineno += len(lines) @@ -97,46 +106,49 @@ class Lexer(object): def parse_until_text(self, watch_nesting, *text): startpos = self.match_position - text_re = r'|'.join(text) + text_re = r"|".join(text) brace_level = 0 paren_level = 0 bracket_level = 0 while True: - match = self.match(r'#.*\n') + match = self.match(r"#.*\n") if match: continue - match = self.match(r'(\"\"\"|\'\'\'|\"|\')[^\\]*?(\\.[^\\]*?)*\1', - re.S) + match = self.match( + r"(\"\"\"|\'\'\'|\"|\')[^\\]*?(\\.[^\\]*?)*\1", re.S + ) if match: continue - match = self.match(r'(%s)' % text_re) - if match and not (watch_nesting - and (brace_level > 0 or paren_level > 0 - or bracket_level > 0)): - return \ - self.text[startpos: - self.match_position - len(match.group(1))],\ - match.group(1) + match = self.match(r"(%s)" % text_re) + if match and not ( + watch_nesting + and (brace_level > 0 or paren_level > 0 or bracket_level > 0) + ): + return ( + self.text[ + startpos : self.match_position - len(match.group(1)) + ], + match.group(1), + ) elif not match: match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S) if match: - brace_level += match.group(1).count('{') - brace_level -= match.group(1).count('}') - paren_level += match.group(1).count('(') - paren_level -= match.group(1).count(')') - bracket_level += match.group(1).count('[') - bracket_level -= match.group(1).count(']') + brace_level += match.group(1).count("{") + brace_level -= match.group(1).count("}") + paren_level += match.group(1).count("(") + paren_level -= match.group(1).count(")") + bracket_level += match.group(1).count("[") + bracket_level -= match.group(1).count("]") continue raise exceptions.SyntaxException( - "Expected: %s" % - ','.join(text), - **self.exception_kwargs) + "Expected: %s" % ",".join(text), **self.exception_kwargs + ) def append_node(self, nodecls, *args, **kwargs): - kwargs.setdefault('source', self.text) - kwargs.setdefault('lineno', self.matched_lineno) - kwargs.setdefault('pos', self.matched_charpos) - kwargs['filename'] = self.filename + kwargs.setdefault("source", self.text) + kwargs.setdefault("lineno", self.matched_lineno) + kwargs.setdefault("pos", self.matched_charpos) + kwargs["filename"] = self.filename node = nodecls(*args, **kwargs) if len(self.tag): self.tag[-1].nodes.append(node) @@ -149,8 +161,10 @@ class Lexer(object): if self.control_line: control_frame = self.control_line[-1] control_frame.nodes.append(node) - if not (isinstance(node, parsetree.ControlLine) and - control_frame.is_ternary(node.keyword)): + if not ( + isinstance(node, parsetree.ControlLine) + and control_frame.is_ternary(node.keyword) + ): if self.ternary_stack and self.ternary_stack[-1]: self.ternary_stack[-1][-1].nodes.append(node) if isinstance(node, parsetree.Tag): @@ -164,17 +178,20 @@ class Lexer(object): elif node.is_primary: self.control_line.append(node) self.ternary_stack.append([]) - elif self.control_line and \ - self.control_line[-1].is_ternary(node.keyword): + elif self.control_line and self.control_line[-1].is_ternary( + node.keyword + ): self.ternary_stack[-1].append(node) - elif self.control_line and \ - not self.control_line[-1].is_ternary(node.keyword): + elif self.control_line and not self.control_line[-1].is_ternary( + node.keyword + ): raise exceptions.SyntaxException( - "Keyword '%s' not a legal ternary for keyword '%s'" % - (node.keyword, self.control_line[-1].keyword), - **self.exception_kwargs) + "Keyword '%s' not a legal ternary for keyword '%s'" + % (node.keyword, self.control_line[-1].keyword), + **self.exception_kwargs + ) - _coding_re = re.compile(r'#.*coding[:=]\s*([-\w.]+).*\r?\n') + _coding_re = re.compile(r"#.*coding[:=]\s*([-\w.]+).*\r?\n") def decode_raw_stream(self, text, decode_raw, known_encoding, filename): """given string/unicode or bytes/string, determine encoding @@ -184,44 +201,48 @@ class Lexer(object): """ if isinstance(text, compat.text_type): m = self._coding_re.match(text) - encoding = m and m.group(1) or known_encoding or 'ascii' + encoding = m and m.group(1) or known_encoding or "utf-8" return encoding, text if text.startswith(codecs.BOM_UTF8): - text = text[len(codecs.BOM_UTF8):] - parsed_encoding = 'utf-8' - m = self._coding_re.match(text.decode('utf-8', 'ignore')) - if m is not None and m.group(1) != 'utf-8': + text = text[len(codecs.BOM_UTF8) :] + parsed_encoding = "utf-8" + m = self._coding_re.match(text.decode("utf-8", "ignore")) + if m is not None and m.group(1) != "utf-8": raise exceptions.CompileException( "Found utf-8 BOM in file, with conflicting " "magic encoding comment of '%s'" % m.group(1), - text.decode('utf-8', 'ignore'), - 0, 0, filename) + text.decode("utf-8", "ignore"), + 0, + 0, + filename, + ) else: - m = self._coding_re.match(text.decode('utf-8', 'ignore')) + m = self._coding_re.match(text.decode("utf-8", "ignore")) if m: parsed_encoding = m.group(1) else: - parsed_encoding = known_encoding or 'ascii' + parsed_encoding = known_encoding or "utf-8" if decode_raw: try: text = text.decode(parsed_encoding) except UnicodeDecodeError: raise exceptions.CompileException( - "Unicode decode operation of encoding '%s' failed" % - parsed_encoding, - text.decode('utf-8', 'ignore'), - 0, 0, filename) + "Unicode decode operation of encoding '%s' failed" + % parsed_encoding, + text.decode("utf-8", "ignore"), + 0, + 0, + filename, + ) return parsed_encoding, text def parse(self): self.encoding, self.text = self.decode_raw_stream( - self.text, - not self.disable_unicode, - self.encoding, - self.filename) + self.text, not self.disable_unicode, self.encoding, self.filename + ) for preproc in self.preprocessor: self.text = preproc(self.text) @@ -232,7 +253,7 @@ class Lexer(object): self.textlength = len(self.text) - while (True): + while True: if self.match_position > self.textlength: break @@ -258,20 +279,24 @@ class Lexer(object): raise exceptions.CompileException("assertion failed") if len(self.tag): - raise exceptions.SyntaxException("Unclosed tag: <%%%s>" % - self.tag[-1].keyword, - **self.exception_kwargs) + raise exceptions.SyntaxException( + "Unclosed tag: <%%%s>" % self.tag[-1].keyword, + **self.exception_kwargs + ) if len(self.control_line): raise exceptions.SyntaxException( - "Unterminated control keyword: '%s'" % - self.control_line[-1].keyword, + "Unterminated control keyword: '%s'" + % self.control_line[-1].keyword, self.text, self.control_line[-1].lineno, - self.control_line[-1].pos, self.filename) + self.control_line[-1].pos, + self.filename, + ) return self.template def match_tag_start(self): - match = self.match(r''' + match = self.match( + r""" \<% # opening tag ([\w\.\:]+) # keyword @@ -283,9 +308,9 @@ class Lexer(object): (/)?> # closing - ''', - - re.I | re.S | re.X) + """, + re.I | re.S | re.X, + ) if match: keyword, attr, isend = match.groups() @@ -293,22 +318,23 @@ class Lexer(object): attributes = {} if attr: for att in re.findall( - r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr): + r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr + ): key, val1, val2 = att text = val1 or val2 - text = text.replace('\r\n', '\n') + text = text.replace("\r\n", "\n") attributes[key] = text self.append_node(parsetree.Tag, keyword, attributes) if isend: self.tag.pop() else: - if keyword == 'text': - match = self.match(r'(.*?)(?=\)', re.S) + if keyword == "text": + match = self.match(r"(.*?)(?=\)", re.S) if not match: raise exceptions.SyntaxException( - "Unclosed tag: <%%%s>" % - self.tag[-1].keyword, - **self.exception_kwargs) + "Unclosed tag: <%%%s>" % self.tag[-1].keyword, + **self.exception_kwargs + ) self.append_node(parsetree.Text, match.group(1)) return self.match_tag_end() return True @@ -316,25 +342,27 @@ class Lexer(object): return False def match_tag_end(self): - match = self.match(r'\') + match = self.match(r"\") if match: if not len(self.tag): raise exceptions.SyntaxException( - "Closing tag without opening tag: " % - match.group(1), - **self.exception_kwargs) + "Closing tag without opening tag: " + % match.group(1), + **self.exception_kwargs + ) elif self.tag[-1].keyword != match.group(1): raise exceptions.SyntaxException( - "Closing tag does not match tag: <%%%s>" % - (match.group(1), self.tag[-1].keyword), - **self.exception_kwargs) + "Closing tag does not match tag: <%%%s>" + % (match.group(1), self.tag[-1].keyword), + **self.exception_kwargs + ) self.tag.pop() return True else: return False def match_end(self): - match = self.match(r'\Z', re.S) + match = self.match(r"\Z", re.S) if match: string = match.group() if string: @@ -345,7 +373,8 @@ class Lexer(object): return False def match_text(self): - match = self.match(r""" + match = self.match( + r""" (.*?) # anything, followed by: ( (?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based @@ -360,7 +389,9 @@ class Lexer(object): (\\\r?\n) # an escaped newline - throw away | \Z # end of string - )""", re.X | re.S) + )""", + re.X | re.S, + ) if match: text = match.group(1) @@ -374,14 +405,17 @@ class Lexer(object): match = self.match(r"<%(!)?") if match: line, pos = self.matched_lineno, self.matched_charpos - text, end = self.parse_until_text(False, r'%>') + text, end = self.parse_until_text(False, r"%>") # the trailing newline helps # compiler.parse() not complain about indentation text = adjust_whitespace(text) + "\n" self.append_node( parsetree.Code, text, - match.group(1) == '!', lineno=line, pos=pos) + match.group(1) == "!", + lineno=line, + pos=pos, + ) return True else: return False @@ -390,16 +424,19 @@ class Lexer(object): match = self.match(r"\${") if match: line, pos = self.matched_lineno, self.matched_charpos - text, end = self.parse_until_text(True, r'\|', r'}') - if end == '|': - escapes, end = self.parse_until_text(True, r'}') + text, end = self.parse_until_text(True, r"\|", r"}") + if end == "|": + escapes, end = self.parse_until_text(True, r"}") else: escapes = "" - text = text.replace('\r\n', '\n') + text = text.replace("\r\n", "\n") self.append_node( parsetree.Expression, - text, escapes.strip(), - lineno=line, pos=pos) + text, + escapes.strip(), + lineno=line, + pos=pos, + ) return True else: return False @@ -407,31 +444,35 @@ class Lexer(object): def match_control_line(self): match = self.match( r"(?<=^)[\t ]*(%(?!%)|##)[\t ]*((?:(?:\\r?\n)|[^\r\n])*)" - r"(?:\r?\n|\Z)", re.M) + r"(?:\r?\n|\Z)", + re.M, + ) if match: operator = match.group(1) text = match.group(2) - if operator == '%': - m2 = re.match(r'(end)?(\w+)\s*(.*)', text) + if operator == "%": + m2 = re.match(r"(end)?(\w+)\s*(.*)", text) if not m2: raise exceptions.SyntaxException( - "Invalid control line: '%s'" % - text, - **self.exception_kwargs) + "Invalid control line: '%s'" % text, + **self.exception_kwargs + ) isend, keyword = m2.group(1, 2) - isend = (isend is not None) + isend = isend is not None if isend: if not len(self.control_line): raise exceptions.SyntaxException( - "No starting keyword '%s' for '%s'" % - (keyword, text), - **self.exception_kwargs) + "No starting keyword '%s' for '%s'" + % (keyword, text), + **self.exception_kwargs + ) elif self.control_line[-1].keyword != keyword: raise exceptions.SyntaxException( - "Keyword '%s' doesn't match keyword '%s'" % - (text, self.control_line[-1].keyword), - **self.exception_kwargs) + "Keyword '%s' doesn't match keyword '%s'" + % (text, self.control_line[-1].keyword), + **self.exception_kwargs + ) self.append_node(parsetree.ControlLine, keyword, isend, text) else: self.append_node(parsetree.Comment, text) diff --git a/server/www/packages/packages-linux/x64/mako/lookup.py b/server/www/packages/packages-linux/x64/mako/lookup.py index 0d3f304..476326d 100644 --- a/server/www/packages/packages-linux/x64/mako/lookup.py +++ b/server/www/packages/packages-linux/x64/mako/lookup.py @@ -1,14 +1,16 @@ # mako/lookup.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php import os -import stat import posixpath import re -from mako import exceptions, util +import stat + +from mako import exceptions +from mako import util from mako.template import Template try: @@ -151,41 +153,41 @@ class TemplateLookup(TemplateCollection): """ - def __init__(self, - directories=None, - module_directory=None, - filesystem_checks=True, - collection_size=-1, - format_exceptions=False, - error_handler=None, - disable_unicode=False, - bytestring_passthrough=False, - output_encoding=None, - encoding_errors='strict', + def __init__( + self, + directories=None, + module_directory=None, + filesystem_checks=True, + collection_size=-1, + format_exceptions=False, + error_handler=None, + disable_unicode=False, + bytestring_passthrough=False, + output_encoding=None, + encoding_errors="strict", + cache_args=None, + cache_impl="beaker", + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + modulename_callable=None, + module_writer=None, + default_filters=None, + buffer_filters=(), + strict_undefined=False, + imports=None, + future_imports=None, + enable_loop=True, + input_encoding=None, + preprocessor=None, + lexer_cls=None, + include_error_handler=None, + ): - cache_args=None, - cache_impl='beaker', - cache_enabled=True, - cache_type=None, - cache_dir=None, - cache_url=None, - - modulename_callable=None, - module_writer=None, - default_filters=None, - buffer_filters=(), - strict_undefined=False, - imports=None, - future_imports=None, - enable_loop=True, - input_encoding=None, - preprocessor=None, - lexer_cls=None, - include_error_handler=None): - - self.directories = [posixpath.normpath(d) for d in - util.to_list(directories, ()) - ] + self.directories = [ + posixpath.normpath(d) for d in util.to_list(directories, ()) + ] self.module_directory = module_directory self.modulename_callable = modulename_callable self.filesystem_checks = filesystem_checks @@ -195,34 +197,34 @@ class TemplateLookup(TemplateCollection): cache_args = {} # transfer deprecated cache_* args if cache_dir: - cache_args.setdefault('dir', cache_dir) + cache_args.setdefault("dir", cache_dir) if cache_url: - cache_args.setdefault('url', cache_url) + cache_args.setdefault("url", cache_url) if cache_type: - cache_args.setdefault('type', cache_type) + cache_args.setdefault("type", cache_type) self.template_args = { - 'format_exceptions': format_exceptions, - 'error_handler': error_handler, - 'include_error_handler': include_error_handler, - 'disable_unicode': disable_unicode, - 'bytestring_passthrough': bytestring_passthrough, - 'output_encoding': output_encoding, - 'cache_impl': cache_impl, - 'encoding_errors': encoding_errors, - 'input_encoding': input_encoding, - 'module_directory': module_directory, - 'module_writer': module_writer, - 'cache_args': cache_args, - 'cache_enabled': cache_enabled, - 'default_filters': default_filters, - 'buffer_filters': buffer_filters, - 'strict_undefined': strict_undefined, - 'imports': imports, - 'future_imports': future_imports, - 'enable_loop': enable_loop, - 'preprocessor': preprocessor, - 'lexer_cls': lexer_cls + "format_exceptions": format_exceptions, + "error_handler": error_handler, + "include_error_handler": include_error_handler, + "disable_unicode": disable_unicode, + "bytestring_passthrough": bytestring_passthrough, + "output_encoding": output_encoding, + "cache_impl": cache_impl, + "encoding_errors": encoding_errors, + "input_encoding": input_encoding, + "module_directory": module_directory, + "module_writer": module_writer, + "cache_args": cache_args, + "cache_enabled": cache_enabled, + "default_filters": default_filters, + "buffer_filters": buffer_filters, + "strict_undefined": strict_undefined, + "imports": imports, + "future_imports": future_imports, + "enable_loop": enable_loop, + "preprocessor": preprocessor, + "lexer_cls": lexer_cls, } if collection_size == -1: @@ -248,17 +250,18 @@ class TemplateLookup(TemplateCollection): else: return self._collection[uri] except KeyError: - u = re.sub(r'^\/+', '', uri) - for dir in self.directories: + u = re.sub(r"^\/+", "", uri) + for dir_ in self.directories: # make sure the path seperators are posix - os.altsep is empty # on POSIX and cannot be used. - dir = dir.replace(os.path.sep, posixpath.sep) - srcfile = posixpath.normpath(posixpath.join(dir, u)) + dir_ = dir_.replace(os.path.sep, posixpath.sep) + srcfile = posixpath.normpath(posixpath.join(dir_, u)) if os.path.isfile(srcfile): return self._load(srcfile, uri) else: raise exceptions.TopLevelLookupException( - "Cant locate template for uri %r" % uri) + "Cant locate template for uri %r" % uri + ) def adjust_uri(self, uri, relativeto): """Adjust the given ``uri`` based on the given relative URI.""" @@ -267,12 +270,13 @@ class TemplateLookup(TemplateCollection): if key in self._uri_cache: return self._uri_cache[key] - if uri[0] != '/': + if uri[0] != "/": if relativeto is not None: v = self._uri_cache[key] = posixpath.join( - posixpath.dirname(relativeto), uri) + posixpath.dirname(relativeto), uri + ) else: - v = self._uri_cache[key] = '/' + uri + v = self._uri_cache[key] = "/" + uri else: v = self._uri_cache[key] = uri return v @@ -295,9 +299,9 @@ class TemplateLookup(TemplateCollection): """ filename = posixpath.normpath(filename) - for dir in self.directories: - if filename[0:len(dir)] == dir: - return filename[len(dir):] + for dir_ in self.directories: + if filename[0 : len(dir_)] == dir_: + return filename[len(dir_) :] else: return None @@ -320,7 +324,8 @@ class TemplateLookup(TemplateCollection): filename=posixpath.normpath(filename), lookup=self, module_filename=module_filename, - **self.template_args) + **self.template_args + ) return template except: # if compilation fails etc, ensure @@ -337,8 +342,7 @@ class TemplateLookup(TemplateCollection): try: template_stat = os.stat(template.filename) - if template.module._modified_time < \ - template_stat[stat.ST_MTIME]: + if template.module._modified_time < template_stat[stat.ST_MTIME]: self._collection.pop(uri, None) return self._load(template.filename, uri) else: @@ -346,7 +350,8 @@ class TemplateLookup(TemplateCollection): except OSError: self._collection.pop(uri, None) raise exceptions.TemplateLookupException( - "Cant locate template for uri %r" % uri) + "Cant locate template for uri %r" % uri + ) def put_string(self, uri, text): """Place a new :class:`.Template` object into this @@ -355,10 +360,8 @@ class TemplateLookup(TemplateCollection): """ self._collection[uri] = Template( - text, - lookup=self, - uri=uri, - **self.template_args) + text, lookup=self, uri=uri, **self.template_args + ) def put_template(self, uri, template): """Place a new :class:`.Template` object into this diff --git a/server/www/packages/packages-linux/x64/mako/parsetree.py b/server/www/packages/packages-linux/x64/mako/parsetree.py index e129916..801e48a 100644 --- a/server/www/packages/packages-linux/x64/mako/parsetree.py +++ b/server/www/packages/packages-linux/x64/mako/parsetree.py @@ -1,14 +1,19 @@ # mako/parsetree.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """defines the parse tree components for Mako templates.""" -from mako import exceptions, ast, util, filters, compat import re +from mako import ast +from mako import compat +from mako import exceptions +from mako import filters +from mako import util + class Node(object): @@ -22,8 +27,12 @@ class Node(object): @property def exception_kwargs(self): - return {'source': self.source, 'lineno': self.lineno, - 'pos': self.pos, 'filename': self.filename} + return { + "source": self.source, + "lineno": self.lineno, + "pos": self.pos, + "filename": self.filename, + } def get_children(self): return [] @@ -42,7 +51,7 @@ class TemplateNode(Node): """a 'container' node that stores the overall collection of nodes.""" def __init__(self, filename): - super(TemplateNode, self).__init__('', 0, 0, filename) + super(TemplateNode, self).__init__("", 0, 0, filename) self.nodes = [] self.page_attributes = {} @@ -52,7 +61,8 @@ class TemplateNode(Node): def __repr__(self): return "TemplateNode(%s, %r)" % ( util.sorted_dict_repr(self.page_attributes), - self.nodes) + self.nodes, + ) class ControlLine(Node): @@ -74,7 +84,7 @@ class ControlLine(Node): self.text = text self.keyword = keyword self.isend = isend - self.is_primary = keyword in ['for', 'if', 'while', 'try', 'with'] + self.is_primary = keyword in ["for", "if", "while", "try", "with"] self.nodes = [] if self.isend: self._declared_identifiers = [] @@ -98,9 +108,9 @@ class ControlLine(Node): for this ControlLine""" return keyword in { - 'if': set(['else', 'elif']), - 'try': set(['except', 'finally']), - 'for': set(['else']) + "if": set(["else", "elif"]), + "try": set(["except", "finally"]), + "for": set(["else"]), }.get(self.keyword, []) def __repr__(self): @@ -108,7 +118,7 @@ class ControlLine(Node): self.keyword, self.text, self.isend, - (self.lineno, self.pos) + (self.lineno, self.pos), ) @@ -158,7 +168,7 @@ class Code(Node): return "Code(%r, %r, %r)" % ( self.text, self.ismodule, - (self.lineno, self.pos) + (self.lineno, self.pos), ) @@ -208,7 +218,7 @@ class Expression(Node): return "Expression(%r, %r, %r)" % ( self.text, self.escapes_code.args, - (self.lineno, self.pos) + (self.lineno, self.pos), ) @@ -219,45 +229,55 @@ class _TagMeta(type): _classmap = {} - def __init__(cls, clsname, bases, dict): - if getattr(cls, '__keyword__', None) is not None: + def __init__(cls, clsname, bases, dict_): + if getattr(cls, "__keyword__", None) is not None: cls._classmap[cls.__keyword__] = cls - super(_TagMeta, cls).__init__(clsname, bases, dict) + super(_TagMeta, cls).__init__(clsname, bases, dict_) def __call__(cls, keyword, attributes, **kwargs): if ":" in keyword: - ns, defname = keyword.split(':') - return type.__call__(CallNamespaceTag, ns, defname, - attributes, **kwargs) + ns, defname = keyword.split(":") + return type.__call__( + CallNamespaceTag, ns, defname, attributes, **kwargs + ) try: cls = _TagMeta._classmap[keyword] except KeyError: raise exceptions.CompileException( "No such tag: '%s'" % keyword, - source=kwargs['source'], - lineno=kwargs['lineno'], - pos=kwargs['pos'], - filename=kwargs['filename'] + source=kwargs["source"], + lineno=kwargs["lineno"], + pos=kwargs["pos"], + filename=kwargs["filename"], ) return type.__call__(cls, keyword, attributes, **kwargs) class Tag(compat.with_metaclass(_TagMeta, Node)): - """abstract base class for tags. - <%sometag/> + e.g.:: - <%someothertag> - stuff - + <%sometag/> + + <%someothertag> + stuff + """ + __keyword__ = None - def __init__(self, keyword, attributes, expressions, - nonexpressions, required, **kwargs): + def __init__( + self, + keyword, + attributes, + expressions, + nonexpressions, + required, + **kwargs + ): r"""construct a new Tag instance. this constructor not called directly, and is only called @@ -284,9 +304,10 @@ class Tag(compat.with_metaclass(_TagMeta, Node)): missing = [r for r in required if r not in self.parsed_attributes] if len(missing): raise exceptions.CompileException( - "Missing attribute(s): %s" % - ",".join([repr(m) for m in missing]), - **self.exception_kwargs) + "Missing attribute(s): %s" + % ",".join([repr(m) for m in missing]), + **self.exception_kwargs + ) self.parent = None self.nodes = [] @@ -302,36 +323,40 @@ class Tag(compat.with_metaclass(_TagMeta, Node)): for key in self.attributes: if key in expressions: expr = [] - for x in re.compile(r'(\${.+?})', - re.S).split(self.attributes[key]): - m = re.compile(r'^\${(.+?)}$', re.S).match(x) + for x in re.compile(r"(\${.+?})", re.S).split( + self.attributes[key] + ): + m = re.compile(r"^\${(.+?)}$", re.S).match(x) if m: - code = ast.PythonCode(m.group(1).rstrip(), - **self.exception_kwargs) + code = ast.PythonCode( + m.group(1).rstrip(), **self.exception_kwargs + ) # we aren't discarding "declared_identifiers" here, # which we do so that list comprehension-declared # variables aren't counted. As yet can't find a # condition that requires it here. - undeclared_identifiers = \ - undeclared_identifiers.union( - code.undeclared_identifiers) - expr.append('(%s)' % m.group(1)) + undeclared_identifiers = undeclared_identifiers.union( + code.undeclared_identifiers + ) + expr.append("(%s)" % m.group(1)) else: if x: expr.append(repr(x)) - self.parsed_attributes[key] = " + ".join(expr) or repr('') + self.parsed_attributes[key] = " + ".join(expr) or repr("") elif key in nonexpressions: - if re.search(r'\${.+?}', self.attributes[key]): + if re.search(r"\${.+?}", self.attributes[key]): raise exceptions.CompileException( "Attibute '%s' in tag '%s' does not allow embedded " "expressions" % (key, self.keyword), - **self.exception_kwargs) + **self.exception_kwargs + ) self.parsed_attributes[key] = repr(self.attributes[key]) else: raise exceptions.CompileException( - "Invalid attribute for tag '%s': '%s'" % - (self.keyword, key), - **self.exception_kwargs) + "Invalid attribute for tag '%s': '%s'" + % (self.keyword, key), + **self.exception_kwargs + ) self.expression_undeclared_identifiers = undeclared_identifiers def declared_identifiers(self): @@ -341,56 +366,64 @@ class Tag(compat.with_metaclass(_TagMeta, Node)): return self.expression_undeclared_identifiers def __repr__(self): - return "%s(%r, %s, %r, %r)" % (self.__class__.__name__, - self.keyword, - util.sorted_dict_repr(self.attributes), - (self.lineno, self.pos), - self.nodes - ) + return "%s(%r, %s, %r, %r)" % ( + self.__class__.__name__, + self.keyword, + util.sorted_dict_repr(self.attributes), + (self.lineno, self.pos), + self.nodes, + ) class IncludeTag(Tag): - __keyword__ = 'include' + __keyword__ = "include" def __init__(self, keyword, attributes, **kwargs): super(IncludeTag, self).__init__( keyword, attributes, - ('file', 'import', 'args'), - (), ('file',), **kwargs) + ("file", "import", "args"), + (), + ("file",), + **kwargs + ) self.page_args = ast.PythonCode( - "__DUMMY(%s)" % attributes.get('args', ''), - **self.exception_kwargs) + "__DUMMY(%s)" % attributes.get("args", ""), **self.exception_kwargs + ) def declared_identifiers(self): return [] def undeclared_identifiers(self): - identifiers = self.page_args.undeclared_identifiers.\ - difference(set(["__DUMMY"])).\ - difference(self.page_args.declared_identifiers) - return identifiers.union(super(IncludeTag, self). - undeclared_identifiers()) + identifiers = self.page_args.undeclared_identifiers.difference( + set(["__DUMMY"]) + ).difference(self.page_args.declared_identifiers) + return identifiers.union( + super(IncludeTag, self).undeclared_identifiers() + ) class NamespaceTag(Tag): - __keyword__ = 'namespace' + __keyword__ = "namespace" def __init__(self, keyword, attributes, **kwargs): super(NamespaceTag, self).__init__( - keyword, attributes, - ('file',), - ('name', 'inheritable', - 'import', 'module'), - (), **kwargs) + keyword, + attributes, + ("file",), + ("name", "inheritable", "import", "module"), + (), + **kwargs + ) - self.name = attributes.get('name', '__anon_%s' % hex(abs(id(self)))) - if 'name' not in attributes and 'import' not in attributes: + self.name = attributes.get("name", "__anon_%s" % hex(abs(id(self)))) + if "name" not in attributes and "import" not in attributes: raise exceptions.CompileException( "'name' and/or 'import' attributes are required " "for <%namespace>", - **self.exception_kwargs) - if 'file' in attributes and 'module' in attributes: + **self.exception_kwargs + ) + if "file" in attributes and "module" in attributes: raise exceptions.CompileException( "<%namespace> may only have one of 'file' or 'module'", **self.exception_kwargs @@ -401,51 +434,51 @@ class NamespaceTag(Tag): class TextTag(Tag): - __keyword__ = 'text' + __keyword__ = "text" def __init__(self, keyword, attributes, **kwargs): super(TextTag, self).__init__( - keyword, - attributes, (), - ('filter'), (), **kwargs) + keyword, attributes, (), ("filter"), (), **kwargs + ) self.filter_args = ast.ArgumentList( - attributes.get('filter', ''), - **self.exception_kwargs) + attributes.get("filter", ""), **self.exception_kwargs + ) def undeclared_identifiers(self): - return self.filter_args.\ - undeclared_identifiers.\ - difference(filters.DEFAULT_ESCAPES.keys()).union( - self.expression_undeclared_identifiers - ) + return self.filter_args.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES.keys() + ).union(self.expression_undeclared_identifiers) class DefTag(Tag): - __keyword__ = 'def' + __keyword__ = "def" def __init__(self, keyword, attributes, **kwargs): - expressions = ['buffered', 'cached'] + [ - c for c in attributes if c.startswith('cache_')] + expressions = ["buffered", "cached"] + [ + c for c in attributes if c.startswith("cache_") + ] super(DefTag, self).__init__( keyword, attributes, expressions, - ('name', 'filter', 'decorator'), - ('name',), - **kwargs) - name = attributes['name'] - if re.match(r'^[\w_]+$', name): + ("name", "filter", "decorator"), + ("name",), + **kwargs + ) + name = attributes["name"] + if re.match(r"^[\w_]+$", name): raise exceptions.CompileException( - "Missing parenthesis in %def", - **self.exception_kwargs) - self.function_decl = ast.FunctionDecl("def " + name + ":pass", - **self.exception_kwargs) + "Missing parenthesis in %def", **self.exception_kwargs + ) + self.function_decl = ast.FunctionDecl( + "def " + name + ":pass", **self.exception_kwargs + ) self.name = self.function_decl.funcname - self.decorator = attributes.get('decorator', '') + self.decorator = attributes.get("decorator", "") self.filter_args = ast.ArgumentList( - attributes.get('filter', ''), - **self.exception_kwargs) + attributes.get("filter", ""), **self.exception_kwargs + ) is_anonymous = False is_block = False @@ -463,51 +496,58 @@ class DefTag(Tag): def undeclared_identifiers(self): res = [] for c in self.function_decl.defaults: - res += list(ast.PythonCode(c, **self.exception_kwargs). - undeclared_identifiers) - return set(res).union( - self.filter_args. - undeclared_identifiers. - difference(filters.DEFAULT_ESCAPES.keys()) - ).union( - self.expression_undeclared_identifiers - ).difference( - self.function_decl.allargnames + res += list( + ast.PythonCode( + c, **self.exception_kwargs + ).undeclared_identifiers + ) + return ( + set(res) + .union( + self.filter_args.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES.keys() + ) + ) + .union(self.expression_undeclared_identifiers) + .difference(self.function_decl.allargnames) ) class BlockTag(Tag): - __keyword__ = 'block' + __keyword__ = "block" def __init__(self, keyword, attributes, **kwargs): - expressions = ['buffered', 'cached', 'args'] + [ - c for c in attributes if c.startswith('cache_')] + expressions = ["buffered", "cached", "args"] + [ + c for c in attributes if c.startswith("cache_") + ] super(BlockTag, self).__init__( keyword, attributes, expressions, - ('name', 'filter', 'decorator'), + ("name", "filter", "decorator"), (), - **kwargs) - name = attributes.get('name') - if name and not re.match(r'^[\w_]+$', name): + **kwargs + ) + name = attributes.get("name") + if name and not re.match(r"^[\w_]+$", name): raise exceptions.CompileException( "%block may not specify an argument signature", - **self.exception_kwargs) - if not name and attributes.get('args', None): - raise exceptions.CompileException( - "Only named %blocks may specify args", **self.exception_kwargs ) - self.body_decl = ast.FunctionArgs(attributes.get('args', ''), - **self.exception_kwargs) + if not name and attributes.get("args", None): + raise exceptions.CompileException( + "Only named %blocks may specify args", **self.exception_kwargs + ) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) self.name = name - self.decorator = attributes.get('decorator', '') + self.decorator = attributes.get("decorator", "") self.filter_args = ast.ArgumentList( - attributes.get('filter', ''), - **self.exception_kwargs) + attributes.get("filter", ""), **self.exception_kwargs + ) is_block = True @@ -517,7 +557,7 @@ class BlockTag(Tag): @property def funcname(self): - return self.name or "__M_anon_%d" % (self.lineno, ) + return self.name or "__M_anon_%d" % (self.lineno,) def get_argument_expressions(self, **kw): return self.body_decl.get_argument_expressions(**kw) @@ -526,91 +566,100 @@ class BlockTag(Tag): return self.body_decl.allargnames def undeclared_identifiers(self): - return (self.filter_args. - undeclared_identifiers. - difference(filters.DEFAULT_ESCAPES.keys()) - ).union(self.expression_undeclared_identifiers) + return ( + self.filter_args.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES.keys() + ) + ).union(self.expression_undeclared_identifiers) class CallTag(Tag): - __keyword__ = 'call' + __keyword__ = "call" def __init__(self, keyword, attributes, **kwargs): - super(CallTag, self).__init__(keyword, attributes, - ('args'), ('expr',), ('expr',), **kwargs) - self.expression = attributes['expr'] + super(CallTag, self).__init__( + keyword, attributes, ("args"), ("expr",), ("expr",), **kwargs + ) + self.expression = attributes["expr"] self.code = ast.PythonCode(self.expression, **self.exception_kwargs) - self.body_decl = ast.FunctionArgs(attributes.get('args', ''), - **self.exception_kwargs) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) def declared_identifiers(self): return self.code.declared_identifiers.union(self.body_decl.allargnames) def undeclared_identifiers(self): - return self.code.undeclared_identifiers.\ - difference(self.code.declared_identifiers) + return self.code.undeclared_identifiers.difference( + self.code.declared_identifiers + ) class CallNamespaceTag(Tag): - def __init__(self, namespace, defname, attributes, **kwargs): super(CallNamespaceTag, self).__init__( namespace + ":" + defname, attributes, - tuple(attributes.keys()) + ('args', ), + tuple(attributes.keys()) + ("args",), (), (), - **kwargs) + **kwargs + ) self.expression = "%s.%s(%s)" % ( namespace, defname, - ",".join(["%s=%s" % (k, v) for k, v in - self.parsed_attributes.items() - if k != 'args']) + ",".join( + [ + "%s=%s" % (k, v) + for k, v in self.parsed_attributes.items() + if k != "args" + ] + ), ) self.code = ast.PythonCode(self.expression, **self.exception_kwargs) self.body_decl = ast.FunctionArgs( - attributes.get('args', ''), - **self.exception_kwargs) + attributes.get("args", ""), **self.exception_kwargs + ) def declared_identifiers(self): return self.code.declared_identifiers.union(self.body_decl.allargnames) def undeclared_identifiers(self): - return self.code.undeclared_identifiers.\ - difference(self.code.declared_identifiers) + return self.code.undeclared_identifiers.difference( + self.code.declared_identifiers + ) class InheritTag(Tag): - __keyword__ = 'inherit' + __keyword__ = "inherit" def __init__(self, keyword, attributes, **kwargs): super(InheritTag, self).__init__( - keyword, attributes, - ('file',), (), ('file',), **kwargs) + keyword, attributes, ("file",), (), ("file",), **kwargs + ) class PageTag(Tag): - __keyword__ = 'page' + __keyword__ = "page" def __init__(self, keyword, attributes, **kwargs): - expressions = \ - ['cached', 'args', 'expression_filter', 'enable_loop'] + \ - [c for c in attributes if c.startswith('cache_')] + expressions = [ + "cached", + "args", + "expression_filter", + "enable_loop", + ] + [c for c in attributes if c.startswith("cache_")] super(PageTag, self).__init__( - keyword, - attributes, - expressions, - (), - (), - **kwargs) - self.body_decl = ast.FunctionArgs(attributes.get('args', ''), - **self.exception_kwargs) + keyword, attributes, expressions, (), (), **kwargs + ) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) self.filter_args = ast.ArgumentList( - attributes.get('expression_filter', ''), - **self.exception_kwargs) + attributes.get("expression_filter", ""), **self.exception_kwargs + ) def declared_identifiers(self): return self.body_decl.allargnames diff --git a/server/www/packages/packages-linux/x64/mako/pygen.py b/server/www/packages/packages-linux/x64/mako/pygen.py index 8514e02..947721f 100644 --- a/server/www/packages/packages-linux/x64/mako/pygen.py +++ b/server/www/packages/packages-linux/x64/mako/pygen.py @@ -1,5 +1,5 @@ # mako/pygen.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -7,11 +7,11 @@ """utilities for generating and formatting literal Python code.""" import re + from mako import exceptions class PythonPrinter(object): - def __init__(self, stream): # indentation counter self.indent = 0 @@ -54,14 +54,16 @@ class PythonPrinter(object): self.stream.write("\n" * num) self._update_lineno(num) - def write_indented_block(self, block): + def write_indented_block(self, block, starting_lineno=None): """print a line or lines of python which already contain indentation. The indentation of the total block of lines will be adjusted to that of the current indent level.""" self.in_indent_lines = False - for l in re.split(r'\r?\n', block): + for i, l in enumerate(re.split(r"\r?\n", block)): self.line_buffer.append(l) + if starting_lineno is not None: + self.start_source(starting_lineno + i) self._update_lineno(1) def writelines(self, *lines): @@ -83,21 +85,18 @@ class PythonPrinter(object): self.in_indent_lines = True if ( - line is None or - re.match(r"^\s*#", line) or - re.match(r"^\s*$", line) + line is None + or re.match(r"^\s*#", line) + or re.match(r"^\s*$", line) ): hastext = False else: hastext = True - is_comment = line and len(line) and line[0] == '#' + is_comment = line and len(line) and line[0] == "#" # see if this line should decrease the indentation level - if ( - not is_comment and - (not hastext or self._is_unindentor(line)) - ): + if not is_comment and (not hastext or self._is_unindentor(line)): if self.indent > 0: self.indent -= 1 @@ -106,7 +105,8 @@ class PythonPrinter(object): # module wont compile. if len(self.indent_detail) == 0: raise exceptions.SyntaxException( - "Too many whitespace closures") + "Too many whitespace closures" + ) self.indent_detail.pop() if line is None: @@ -136,8 +136,9 @@ class PythonPrinter(object): # its not a "compound" keyword. but lets also # test for valid Python keywords that might be indenting us, # else assume its a non-indenting line - m2 = re.match(r"^\s*(def|class|else|elif|except|finally)", - line) + m2 = re.match( + r"^\s*(def|class|else|elif|except|finally)", line + ) if m2: self.indent += 1 self.indent_detail.append(indentor) @@ -189,14 +190,15 @@ class PythonPrinter(object): # return False - def _indent_line(self, line, stripspace=''): + def _indent_line(self, line, stripspace=""): """indent the given line according to the current indent level. stripspace is a string of space that will be truncated from the start of the line before indenting.""" - return re.sub(r"^%s" % stripspace, self.indentstring - * self.indent, line) + return re.sub( + r"^%s" % stripspace, self.indentstring * self.indent, line + ) def _reset_multi_line_flags(self): """reset the flags which would indicate we are in a backslashed @@ -214,7 +216,7 @@ class PythonPrinter(object): # a literal multiline string with unfortunately placed # whitespace - current_state = (self.backslashed or self.triplequoted) + current_state = self.backslashed or self.triplequoted if re.search(r"\\$", line): self.backslashed = True @@ -251,7 +253,7 @@ def adjust_whitespace(text): (backslashed, triplequoted) = (0, 1) def in_multi_line(line): - start_state = (state[backslashed] or state[triplequoted]) + start_state = state[backslashed] or state[triplequoted] if re.search(r"\\$", line): state[backslashed] = True @@ -261,7 +263,7 @@ def adjust_whitespace(text): def match(reg, t): m = re.match(reg, t) if m: - return m, t[len(m.group(0)):] + return m, t[len(m.group(0)) :] else: return None, t @@ -273,7 +275,7 @@ def adjust_whitespace(text): else: m, line = match(r".*?(?=%s|$)" % state[triplequoted], line) else: - m, line = match(r'#', line) + m, line = match(r"#", line) if m: return start_state @@ -286,13 +288,13 @@ def adjust_whitespace(text): return start_state - def _indent_line(line, stripspace=''): - return re.sub(r"^%s" % stripspace, '', line) + def _indent_line(line, stripspace=""): + return re.sub(r"^%s" % stripspace, "", line) lines = [] stripspace = None - for line in re.split(r'\r?\n', text): + for line in re.split(r"\r?\n", text): if in_multi_line(line): lines.append(line) else: diff --git a/server/www/packages/packages-linux/x64/mako/pyparser.py b/server/www/packages/packages-linux/x64/mako/pyparser.py index 15d0da6..b16672d 100644 --- a/server/www/packages/packages-linux/x64/mako/pyparser.py +++ b/server/www/packages/packages-linux/x64/mako/pyparser.py @@ -1,5 +1,5 @@ # mako/pyparser.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -10,46 +10,52 @@ Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler module is used. """ -from mako import exceptions, util, compat -from mako.compat import arg_stringname import operator +import _ast + +from mako import _ast_util +from mako import compat +from mako import exceptions +from mako import util +from mako.compat import arg_stringname + if compat.py3k: # words that cannot be assigned to (notably # smaller than the total keys in __builtins__) - reserved = set(['True', 'False', 'None', 'print']) + reserved = set(["True", "False", "None", "print"]) # the "id" attribute on a function node - arg_id = operator.attrgetter('arg') + arg_id = operator.attrgetter("arg") else: # words that cannot be assigned to (notably # smaller than the total keys in __builtins__) - reserved = set(['True', 'False', 'None']) + reserved = set(["True", "False", "None"]) # the "id" attribute on a function node - arg_id = operator.attrgetter('id') + arg_id = operator.attrgetter("id") -import _ast util.restore__ast(_ast) -from mako import _ast_util -def parse(code, mode='exec', **exception_kwargs): +def parse(code, mode="exec", **exception_kwargs): """Parse an expression into AST""" try: - return _ast_util.parse(code, '', mode) + return _ast_util.parse(code, "", mode) except Exception: raise exceptions.SyntaxException( - "(%s) %s (%r)" % ( + "(%s) %s (%r)" + % ( compat.exception_as().__class__.__name__, compat.exception_as(), - code[0:50] - ), **exception_kwargs) + code[0:50], + ), + **exception_kwargs + ) class FindIdentifiers(_ast_util.NodeVisitor): - def __init__(self, listener, **exception_kwargs): self.in_function = False self.in_assign_targets = False @@ -119,9 +125,9 @@ class FindIdentifiers(_ast_util.NodeVisitor): self.in_function = True local_ident_stack = self.local_ident_stack - self.local_ident_stack = local_ident_stack.union([ - arg_id(arg) for arg in self._expand_tuples(node.args.args) - ]) + self.local_ident_stack = local_ident_stack.union( + [arg_id(arg) for arg in self._expand_tuples(node.args.args)] + ) if islambda: self.visit(node.body) else: @@ -146,9 +152,11 @@ class FindIdentifiers(_ast_util.NodeVisitor): # this is eqiuvalent to visit_AssName in # compiler self._add_declared(node.id) - elif node.id not in reserved and node.id \ - not in self.listener.declared_identifiers and node.id \ - not in self.local_ident_stack: + elif ( + node.id not in reserved + and node.id not in self.listener.declared_identifiers + and node.id not in self.local_ident_stack + ): self.listener.undeclared_identifiers.add(node.id) def visit_Import(self, node): @@ -156,24 +164,25 @@ class FindIdentifiers(_ast_util.NodeVisitor): if name.asname is not None: self._add_declared(name.asname) else: - self._add_declared(name.name.split('.')[0]) + self._add_declared(name.name.split(".")[0]) def visit_ImportFrom(self, node): for name in node.names: if name.asname is not None: self._add_declared(name.asname) else: - if name.name == '*': + if name.name == "*": raise exceptions.CompileException( "'import *' is not supported, since all identifier " "names must be explicitly declared. Please use the " "form 'from import , , " - "...' instead.", **self.exception_kwargs) + "...' instead.", + **self.exception_kwargs + ) self._add_declared(name.name) class FindTuple(_ast_util.NodeVisitor): - def __init__(self, listener, code_factory, **exception_kwargs): self.listener = listener self.exception_kwargs = exception_kwargs @@ -184,16 +193,17 @@ class FindTuple(_ast_util.NodeVisitor): p = self.code_factory(n, **self.exception_kwargs) self.listener.codeargs.append(p) self.listener.args.append(ExpressionGenerator(n).value()) - self.listener.declared_identifiers = \ - self.listener.declared_identifiers.union( - p.declared_identifiers) - self.listener.undeclared_identifiers = \ - self.listener.undeclared_identifiers.union( - p.undeclared_identifiers) + ldi = self.listener.declared_identifiers + self.listener.declared_identifiers = ldi.union( + p.declared_identifiers + ) + lui = self.listener.undeclared_identifiers + self.listener.undeclared_identifiers = lui.union( + p.undeclared_identifiers + ) class ParseFunc(_ast_util.NodeVisitor): - def __init__(self, listener, **exception_kwargs): self.listener = listener self.exception_kwargs = exception_kwargs @@ -224,10 +234,9 @@ class ParseFunc(_ast_util.NodeVisitor): class ExpressionGenerator(object): - def __init__(self, astnode): - self.generator = _ast_util.SourceGenerator(' ' * 4) + self.generator = _ast_util.SourceGenerator(" " * 4) self.generator.visit(astnode) def value(self): - return ''.join(self.generator.result) + return "".join(self.generator.result) diff --git a/server/www/packages/packages-linux/x64/mako/runtime.py b/server/www/packages/packages-linux/x64/mako/runtime.py index 769541c..465908e 100644 --- a/server/www/packages/packages-linux/x64/mako/runtime.py +++ b/server/www/packages/packages-linux/x64/mako/runtime.py @@ -1,5 +1,5 @@ # mako/runtime.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -7,10 +7,14 @@ """provides runtime services for templates, including Context, Namespace, and various helper functions.""" -from mako import exceptions, util, compat -from mako.compat import compat_builtins +import functools import sys +from mako import compat +from mako import exceptions +from mako import util +from mako.compat import compat_builtins + class Context(object): @@ -34,18 +38,19 @@ class Context(object): # "capture" function which proxies to the # generic "capture" function - self._data['capture'] = compat.partial(capture, self) + self._data["capture"] = functools.partial(capture, self) # "caller" stack used by def calls with content - self.caller_stack = self._data['caller'] = CallerStack() + self.caller_stack = self._data["caller"] = CallerStack() def _set_with_template(self, t): self._with_template = t illegal_names = t.reserved_names.intersection(self._data) if illegal_names: raise exceptions.NameConflictError( - "Reserved words passed to render(): %s" % - ", ".join(illegal_names)) + "Reserved words passed to render(): %s" + % ", ".join(illegal_names) + ) @property def lookup(self): @@ -177,14 +182,13 @@ class Context(object): c = self._copy() x = c._data - x.pop('self', None) - x.pop('parent', None) - x.pop('next', None) + x.pop("self", None) + x.pop("parent", None) + x.pop("next", None) return c class CallerStack(list): - def __init__(self): self.nextcaller = None @@ -231,6 +235,7 @@ class Undefined(object): def __bool__(self): return False + UNDEFINED = Undefined() STOP_RENDERING = "" @@ -342,7 +347,6 @@ class LoopContext(object): class _NSAttr(object): - def __init__(self, parent): self.__parent = parent @@ -373,9 +377,15 @@ class Namespace(object): """ - def __init__(self, name, context, - callables=None, inherits=None, - populate_self=True, calling_uri=None): + def __init__( + self, + name, + context, + callables=None, + inherits=None, + populate_self=True, + calling_uri=None, + ): self.name = name self.context = context self.inherits = inherits @@ -473,9 +483,12 @@ class Namespace(object): if key in self.context.namespaces: return self.context.namespaces[key] else: - ns = TemplateNamespace(uri, self.context._copy(), - templateuri=uri, - calling_uri=self._templateuri) + ns = TemplateNamespace( + uri, + self.context._copy(), + templateuri=uri, + calling_uri=self._templateuri, + ) self.context.namespaces[key] = ns return ns @@ -518,7 +531,7 @@ class Namespace(object): def _populate(self, d, l): for ident in l: - if ident == '*': + if ident == "*": for (k, v) in self._get_star(): d[k] = v else: @@ -536,8 +549,8 @@ class Namespace(object): val = getattr(self.inherits, key) else: raise AttributeError( - "Namespace '%s' has no member '%s'" % - (self.name, key)) + "Namespace '%s' has no member '%s'" % (self.name, key) + ) setattr(self, key, val) return val @@ -546,9 +559,17 @@ class TemplateNamespace(Namespace): """A :class:`.Namespace` specific to a :class:`.Template` instance.""" - def __init__(self, name, context, template=None, templateuri=None, - callables=None, inherits=None, - populate_self=True, calling_uri=None): + def __init__( + self, + name, + context, + template=None, + templateuri=None, + callables=None, + inherits=None, + populate_self=True, + calling_uri=None, + ): self.name = name self.context = context self.inherits = inherits @@ -556,8 +577,7 @@ class TemplateNamespace(Namespace): self.callables = dict([(c.__name__, c) for c in callables]) if templateuri is not None: - self.template = _lookup_template(context, templateuri, - calling_uri) + self.template = _lookup_template(context, templateuri, calling_uri) self._templateuri = self.template.module._template_uri elif template is not None: self.template = template @@ -566,9 +586,9 @@ class TemplateNamespace(Namespace): raise TypeError("'template' argument is required.") if populate_self: - lclcallable, lclcontext = \ - _populate_self_namespace(context, self.template, - self_ns=self) + lclcallable, lclcontext = _populate_self_namespace( + context, self.template, self_ns=self + ) @property def module(self): @@ -606,7 +626,8 @@ class TemplateNamespace(Namespace): def get(key): callable_ = self.template._get_def_callable(key) - return compat.partial(callable_, self.context) + return functools.partial(callable_, self.context) + for k in self.template.module._exports: yield (k, get(k)) @@ -615,14 +636,14 @@ class TemplateNamespace(Namespace): val = self.callables[key] elif self.template.has_def(key): callable_ = self.template._get_def_callable(key) - val = compat.partial(callable_, self.context) + val = functools.partial(callable_, self.context) elif self.inherits: val = getattr(self.inherits, key) else: raise AttributeError( - "Namespace '%s' has no member '%s'" % - (self.name, key)) + "Namespace '%s' has no member '%s'" % (self.name, key) + ) setattr(self, key, val) return val @@ -631,9 +652,16 @@ class ModuleNamespace(Namespace): """A :class:`.Namespace` specific to a Python module instance.""" - def __init__(self, name, context, module, - callables=None, inherits=None, - populate_self=True, calling_uri=None): + def __init__( + self, + name, + context, + module, + callables=None, + inherits=None, + populate_self=True, + calling_uri=None, + ): self.name = name self.context = context self.inherits = inherits @@ -641,7 +669,7 @@ class ModuleNamespace(Namespace): self.callables = dict([(c.__name__, c) for c in callables]) mod = __import__(module) - for token in module.split('.')[1:]: + for token in module.split(".")[1:]: mod = getattr(mod, token) self.module = mod @@ -657,23 +685,23 @@ class ModuleNamespace(Namespace): for key in self.callables: yield (key, self.callables[key]) for key in dir(self.module): - if key[0] != '_': + if key[0] != "_": callable_ = getattr(self.module, key) - if compat.callable(callable_): - yield key, compat.partial(callable_, self.context) + if callable(callable_): + yield key, functools.partial(callable_, self.context) def __getattr__(self, key): if key in self.callables: val = self.callables[key] elif hasattr(self.module, key): callable_ = getattr(self.module, key) - val = compat.partial(callable_, self.context) + val = functools.partial(callable_, self.context) elif self.inherits: val = getattr(self.inherits, key) else: raise AttributeError( - "Namespace '%s' has no member '%s'" % - (self.name, key)) + "Namespace '%s' has no member '%s'" % (self.name, key) + ) setattr(self, key, val) return val @@ -692,6 +720,7 @@ def supports_caller(func): return func(context, *args, **kwargs) finally: context.caller_stack._pop_frame() + return wrap_stackframe @@ -703,7 +732,7 @@ def capture(context, callable_, *args, **kwargs): """ - if not compat.callable(callable_): + if not callable(callable_): raise exceptions.RuntimeException( "capture() function expects a callable as " "its argument (i.e. capture(func, *args, **kwargs))" @@ -721,13 +750,16 @@ def _decorate_toplevel(fn): def go(context, *args, **kw): def y(*args, **kw): return render_fn(context, *args, **kw) + try: y.__name__ = render_fn.__name__[7:] except TypeError: # < Python 2.4 pass return fn(y)(context, *args, **kw) + return go + return decorate_render @@ -737,7 +769,9 @@ def _decorate_inline(context, fn): def go(*args, **kw): return dec(context, *args, **kw) + return go + return decorate_render @@ -747,8 +781,8 @@ def _include_file(context, uri, calling_uri, **kwargs): template = _lookup_template(context, uri, calling_uri) (callable_, ctx) = _populate_self_namespace( - context._clean_inheritance_tokens(), - template) + context._clean_inheritance_tokens(), template + ) kwargs = _kwargs_for_include(callable_, context._data, **kwargs) if template.include_error_handler: try: @@ -769,23 +803,25 @@ def _inherit_from(context, uri, calling_uri): if uri is None: return None template = _lookup_template(context, uri, calling_uri) - self_ns = context['self'] + self_ns = context["self"] ih = self_ns while ih.inherits is not None: ih = ih.inherits - lclcontext = context._locals({'next': ih}) - ih.inherits = TemplateNamespace("self:%s" % template.uri, - lclcontext, - template=template, - populate_self=False) - context._data['parent'] = lclcontext._data['local'] = ih.inherits - callable_ = getattr(template.module, '_mako_inherit', None) + lclcontext = context._locals({"next": ih}) + ih.inherits = TemplateNamespace( + "self:%s" % template.uri, + lclcontext, + template=template, + populate_self=False, + ) + context._data["parent"] = lclcontext._data["local"] = ih.inherits + callable_ = getattr(template.module, "_mako_inherit", None) if callable_ is not None: ret = callable_(template, lclcontext) if ret: return ret - gen_ns = getattr(template.module, '_mako_generate_namespaces', None) + gen_ns = getattr(template.module, "_mako_generate_namespaces", None) if gen_ns is not None: gen_ns(context) return (template.callable_, lclcontext) @@ -795,8 +831,9 @@ def _lookup_template(context, uri, relativeto): lookup = context._with_template.lookup if lookup is None: raise exceptions.TemplateLookupException( - "Template '%s' has no TemplateLookup associated" % - context._with_template.uri) + "Template '%s' has no TemplateLookup associated" + % context._with_template.uri + ) uri = lookup.adjust_uri(uri, relativeto) try: return lookup.get_template(uri) @@ -806,11 +843,14 @@ def _lookup_template(context, uri, relativeto): def _populate_self_namespace(context, template, self_ns=None): if self_ns is None: - self_ns = TemplateNamespace('self:%s' % template.uri, - context, template=template, - populate_self=False) - context._data['self'] = context._data['local'] = self_ns - if hasattr(template.module, '_mako_inherit'): + self_ns = TemplateNamespace( + "self:%s" % template.uri, + context, + template=template, + populate_self=False, + ) + context._data["self"] = context._data["local"] = self_ns + if hasattr(template.module, "_mako_inherit"): ret = template.module._mako_inherit(template, context) if ret: return ret @@ -829,18 +869,24 @@ def _render(template, callable_, args, data, as_unicode=False): buf = util.FastEncodingBuffer( as_unicode=as_unicode, encoding=template.output_encoding, - errors=template.encoding_errors) + errors=template.encoding_errors, + ) context = Context(buf, **data) context._outputting_as_unicode = as_unicode context._set_with_template(template) - _render_context(template, callable_, context, *args, - **_kwargs_for_callable(callable_, data)) + _render_context( + template, + callable_, + context, + *args, + **_kwargs_for_callable(callable_, data) + ) return context._pop_buffer().getvalue() def _kwargs_for_callable(callable_, data): - argspec = compat.inspect_func_args(callable_) + argspec = compat.inspect_getargspec(callable_) # for normal pages, **pageargs is usually present if argspec[2]: return data @@ -849,22 +895,23 @@ def _kwargs_for_callable(callable_, data): namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None] kwargs = {} for arg in namedargs: - if arg != 'context' and arg in data and arg not in kwargs: + if arg != "context" and arg in data and arg not in kwargs: kwargs[arg] = data[arg] return kwargs def _kwargs_for_include(callable_, data, **kwargs): - argspec = compat.inspect_func_args(callable_) + argspec = compat.inspect_getargspec(callable_) namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None] for arg in namedargs: - if arg != 'context' and arg in data and arg not in kwargs: + if arg != "context" and arg in data and arg not in kwargs: kwargs[arg] = data[arg] return kwargs def _render_context(tmpl, callable_, context, *args, **kwargs): import mako.template as template + # create polymorphic 'self' namespace for this # template with possibly updated context if not isinstance(tmpl, template.DefTemplate): @@ -886,8 +933,9 @@ def _exec_template(callable_, context, args=None, kwargs=None): be interpreted here. """ template = context._with_template - if template is not None and \ - (template.format_exceptions or template.error_handler): + if template is not None and ( + template.format_exceptions or template.error_handler + ): try: callable_(context, *args, **kwargs) except Exception: @@ -908,11 +956,15 @@ def _render_error(template, context, error): error_template = exceptions.html_error_template() if context._outputting_as_unicode: context._buffer_stack[:] = [ - util.FastEncodingBuffer(as_unicode=True)] + util.FastEncodingBuffer(as_unicode=True) + ] else: - context._buffer_stack[:] = [util.FastEncodingBuffer( - error_template.output_encoding, - error_template.encoding_errors)] + context._buffer_stack[:] = [ + util.FastEncodingBuffer( + error_template.output_encoding, + error_template.encoding_errors, + ) + ] context._set_with_template(error_template) error_template.render_context(context, error=error) diff --git a/server/www/packages/packages-linux/x64/mako/template.py b/server/www/packages/packages-linux/x64/mako/template.py index 329632c..3fd0871 100644 --- a/server/www/packages/packages-linux/x64/mako/template.py +++ b/server/www/packages/packages-linux/x64/mako/template.py @@ -1,5 +1,5 @@ # mako/template.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -7,8 +7,7 @@ """Provides the Template class, a facade for parsing, generating and executing template strings, as well as template runtime operations.""" -from mako.lexer import Lexer -from mako import runtime, util, exceptions, codegen, cache, compat +import json import os import re import shutil @@ -18,6 +17,14 @@ import tempfile import types import weakref +from mako import cache +from mako import codegen +from mako import compat +from mako import exceptions +from mako import runtime +from mako import util +from mako.lexer import Lexer + class Template(object): @@ -230,41 +237,43 @@ class Template(object): lexer_cls = Lexer - def __init__(self, - text=None, - filename=None, - uri=None, - format_exceptions=False, - error_handler=None, - lookup=None, - output_encoding=None, - encoding_errors='strict', - module_directory=None, - cache_args=None, - cache_impl='beaker', - cache_enabled=True, - cache_type=None, - cache_dir=None, - cache_url=None, - module_filename=None, - input_encoding=None, - disable_unicode=False, - module_writer=None, - bytestring_passthrough=False, - default_filters=None, - buffer_filters=(), - strict_undefined=False, - imports=None, - future_imports=None, - enable_loop=True, - preprocessor=None, - lexer_cls=None, - include_error_handler=None): + def __init__( + self, + text=None, + filename=None, + uri=None, + format_exceptions=False, + error_handler=None, + lookup=None, + output_encoding=None, + encoding_errors="strict", + module_directory=None, + cache_args=None, + cache_impl="beaker", + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + module_filename=None, + input_encoding=None, + disable_unicode=False, + module_writer=None, + bytestring_passthrough=False, + default_filters=None, + buffer_filters=(), + strict_undefined=False, + imports=None, + future_imports=None, + enable_loop=True, + preprocessor=None, + lexer_cls=None, + include_error_handler=None, + ): if uri: - self.module_id = re.sub(r'\W', "_", uri) + self.module_id = re.sub(r"\W", "_", uri) self.uri = uri elif filename: - self.module_id = re.sub(r'\W', "_", filename) + self.module_id = re.sub(r"\W", "_", filename) drive, path = os.path.splitdrive(filename) path = os.path.normpath(path).replace(os.path.sep, "/") self.uri = path @@ -278,9 +287,10 @@ class Template(object): u_norm = os.path.normpath(u_norm) if u_norm.startswith(".."): raise exceptions.TemplateLookupException( - "Template uri \"%s\" is invalid - " + 'Template uri "%s" is invalid - ' "it cannot be relative outside " - "of the root path." % self.uri) + "of the root path." % self.uri + ) self.input_encoding = input_encoding self.output_encoding = output_encoding @@ -293,17 +303,18 @@ class Template(object): if compat.py3k and disable_unicode: raise exceptions.UnsupportedError( - "Mako for Python 3 does not " - "support disabling Unicode") + "Mako for Python 3 does not " "support disabling Unicode" + ) elif output_encoding and disable_unicode: raise exceptions.UnsupportedError( "output_encoding must be set to " - "None when disable_unicode is used.") + "None when disable_unicode is used." + ) if default_filters is None: if compat.py3k or self.disable_unicode: - self.default_filters = ['str'] + self.default_filters = ["str"] else: - self.default_filters = ['unicode'] + self.default_filters = ["unicode"] else: self.default_filters = default_filters self.buffer_filters = buffer_filters @@ -320,7 +331,7 @@ class Template(object): (code, module) = _compile_text(self, text, filename) self._code = code self._source = text - ModuleInfo(module, None, self, filename, code, text) + ModuleInfo(module, None, self, filename, code, text, uri) elif filename is not None: # if template filename and a module directory, load # a filesystem-based module file, generating if needed @@ -329,8 +340,7 @@ class Template(object): elif module_directory is not None: path = os.path.abspath( os.path.join( - os.path.normpath(module_directory), - u_norm + ".py" + os.path.normpath(module_directory), u_norm + ".py" ) ) else: @@ -338,7 +348,8 @@ class Template(object): module = self._compile_from_file(path, filename) else: raise exceptions.RuntimeException( - "Template requires text or filename") + "Template requires text or filename" + ) self.module = module self.filename = filename @@ -351,8 +362,12 @@ class Template(object): self.module_directory = module_directory self._setup_cache_args( - cache_impl, cache_enabled, cache_args, - cache_type, cache_dir, cache_url + cache_impl, + cache_enabled, + cache_args, + cache_type, + cache_dir, + cache_url, ) @util.memoized_property @@ -360,11 +375,17 @@ class Template(object): if self.enable_loop: return codegen.RESERVED_NAMES else: - return codegen.RESERVED_NAMES.difference(['loop']) + return codegen.RESERVED_NAMES.difference(["loop"]) - def _setup_cache_args(self, - cache_impl, cache_enabled, cache_args, - cache_type, cache_dir, cache_url): + def _setup_cache_args( + self, + cache_impl, + cache_enabled, + cache_args, + cache_type, + cache_dir, + cache_url, + ): self.cache_impl = cache_impl self.cache_enabled = cache_enabled if cache_args: @@ -374,49 +395,42 @@ class Template(object): # transfer deprecated cache_* args if cache_type: - self.cache_args['type'] = cache_type + self.cache_args["type"] = cache_type if cache_dir: - self.cache_args['dir'] = cache_dir + self.cache_args["dir"] = cache_dir if cache_url: - self.cache_args['url'] = cache_url + self.cache_args["url"] = cache_url def _compile_from_file(self, path, filename): if path is not None: util.verify_directory(os.path.dirname(path)) filemtime = os.stat(filename)[stat.ST_MTIME] - if not os.path.exists(path) or \ - os.stat(path)[stat.ST_MTIME] < filemtime: + if ( + not os.path.exists(path) + or os.stat(path)[stat.ST_MTIME] < filemtime + ): data = util.read_file(filename) _compile_module_file( - self, - data, - filename, - path, - self.module_writer) + self, data, filename, path, self.module_writer + ) module = compat.load_module(self.module_id, path) del sys.modules[self.module_id] if module._magic_number != codegen.MAGIC_NUMBER: data = util.read_file(filename) _compile_module_file( - self, - data, - filename, - path, - self.module_writer) + self, data, filename, path, self.module_writer + ) module = compat.load_module(self.module_id, path) del sys.modules[self.module_id] - ModuleInfo(module, path, self, filename, None, None) + ModuleInfo(module, path, self, filename, None, None, None) else: # template filename and no module directory, compile code # in memory data = util.read_file(filename) - code, module = _compile_text( - self, - data, - filename) + code, module = _compile_text(self, data, filename) self._source = None self._code = code - ModuleInfo(module, None, self, filename, code, None) + ModuleInfo(module, None, self, filename, code, None, None) return module @property @@ -437,15 +451,15 @@ class Template(object): @property def cache_dir(self): - return self.cache_args['dir'] + return self.cache_args["dir"] @property def cache_url(self): - return self.cache_args['url'] + return self.cache_args["url"] @property def cache_type(self): - return self.cache_args['type'] + return self.cache_args["type"] def render(self, *args, **data): """Render the output of this template as a string. @@ -464,11 +478,9 @@ class Template(object): def render_unicode(self, *args, **data): """Render the output of this template as a unicode object.""" - return runtime._render(self, - self.callable_, - args, - data, - as_unicode=True) + return runtime._render( + self, self.callable_, args, data, as_unicode=True + ) def render_context(self, context, *args, **kwargs): """Render this :class:`.Template` with the given context. @@ -476,13 +488,9 @@ class Template(object): The data is written to the context's buffer. """ - if getattr(context, '_with_template', None) is None: + if getattr(context, "_with_template", None) is None: context._set_with_template(self) - runtime._render_context(self, - self.callable_, - context, - *args, - **kwargs) + runtime._render_context(self, self.callable_, context, *args, **kwargs) def has_def(self, name): return hasattr(self.module, "render_%s" % name) @@ -498,7 +506,7 @@ class Template(object): .. versionadded:: 1.0.4 """ - return [i[7:] for i in dir(self.module) if i[:7] == 'render_'] + return [i[7:] for i in dir(self.module) if i[:7] == "render_"] def _get_def_callable(self, name): return getattr(self.module, "render_%s" % name) @@ -512,42 +520,44 @@ class ModuleTemplate(Template): """A Template which is constructed given an existing Python module. - e.g.:: + e.g.:: - t = Template("this is a template") - f = file("mymodule.py", "w") - f.write(t.code) - f.close() + t = Template("this is a template") + f = file("mymodule.py", "w") + f.write(t.code) + f.close() - import mymodule + import mymodule - t = ModuleTemplate(mymodule) - print t.render() + t = ModuleTemplate(mymodule) + print(t.render()) """ - def __init__(self, module, - module_filename=None, - template=None, - template_filename=None, - module_source=None, - template_source=None, - output_encoding=None, - encoding_errors='strict', - disable_unicode=False, - bytestring_passthrough=False, - format_exceptions=False, - error_handler=None, - lookup=None, - cache_args=None, - cache_impl='beaker', - cache_enabled=True, - cache_type=None, - cache_dir=None, - cache_url=None, - include_error_handler=None, - ): - self.module_id = re.sub(r'\W', "_", module._template_uri) + def __init__( + self, + module, + module_filename=None, + template=None, + template_filename=None, + module_source=None, + template_source=None, + output_encoding=None, + encoding_errors="strict", + disable_unicode=False, + bytestring_passthrough=False, + format_exceptions=False, + error_handler=None, + lookup=None, + cache_args=None, + cache_impl="beaker", + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + include_error_handler=None, + ): + self.module_id = re.sub(r"\W", "_", module._template_uri) self.uri = module._template_uri self.input_encoding = module._source_encoding self.output_encoding = output_encoding @@ -558,21 +568,25 @@ class ModuleTemplate(Template): if compat.py3k and disable_unicode: raise exceptions.UnsupportedError( - "Mako for Python 3 does not " - "support disabling Unicode") + "Mako for Python 3 does not " "support disabling Unicode" + ) elif output_encoding and disable_unicode: raise exceptions.UnsupportedError( "output_encoding must be set to " - "None when disable_unicode is used.") + "None when disable_unicode is used." + ) self.module = module self.filename = template_filename - ModuleInfo(module, - module_filename, - self, - template_filename, - module_source, - template_source) + ModuleInfo( + module, + module_filename, + self, + template_filename, + module_source, + template_source, + module._template_uri, + ) self.callable_ = self.module.render_body self.format_exceptions = format_exceptions @@ -580,8 +594,12 @@ class ModuleTemplate(Template): self.include_error_handler = include_error_handler self.lookup = lookup self._setup_cache_args( - cache_impl, cache_enabled, cache_args, - cache_type, cache_dir, cache_url + cache_impl, + cache_enabled, + cache_args, + cache_type, + cache_dir, + cache_url, ) @@ -614,20 +632,25 @@ class ModuleInfo(object): source code based on a module's identifier. """ + _modules = weakref.WeakValueDictionary() - def __init__(self, - module, - module_filename, - template, - template_filename, - module_source, - template_source): + def __init__( + self, + module, + module_filename, + template, + template_filename, + module_source, + template_source, + template_uri, + ): self.module = module self.module_filename = module_filename self.template_filename = template_filename self.module_source = module_source self.template_source = template_source + self.template_uri = template_uri self._modules[module.__name__] = template._mmarker = self if module_filename: self._modules[module_filename] = self @@ -635,15 +658,15 @@ class ModuleInfo(object): @classmethod def get_module_source_metadata(cls, module_source, full_line_map=False): source_map = re.search( - r"__M_BEGIN_METADATA(.+?)__M_END_METADATA", - module_source, re.S).group(1) - source_map = compat.json.loads(source_map) - source_map['line_map'] = dict( - (int(k), int(v)) - for k, v in source_map['line_map'].items()) + r"__M_BEGIN_METADATA(.+?)__M_END_METADATA", module_source, re.S + ).group(1) + source_map = json.loads(source_map) + source_map["line_map"] = dict( + (int(k), int(v)) for k, v in source_map["line_map"].items() + ) if full_line_map: - f_line_map = source_map['full_line_map'] = [] - line_map = source_map['line_map'] + f_line_map = source_map["full_line_map"] = [] + line_map = source_map["line_map"] curr_templ_line = 1 for mod_line in range(1, max(line_map)): @@ -662,10 +685,12 @@ class ModuleInfo(object): @property def source(self): if self.template_source is not None: - if self.module._source_encoding and \ - not isinstance(self.template_source, compat.text_type): + if self.module._source_encoding and not isinstance( + self.template_source, compat.text_type + ): return self.template_source.decode( - self.module._source_encoding) + self.module._source_encoding + ) else: return self.template_source else: @@ -677,38 +702,46 @@ class ModuleInfo(object): def _compile(template, text, filename, generate_magic_comment): - lexer = template.lexer_cls(text, - filename, - disable_unicode=template.disable_unicode, - input_encoding=template.input_encoding, - preprocessor=template.preprocessor) + lexer = template.lexer_cls( + text, + filename, + disable_unicode=template.disable_unicode, + input_encoding=template.input_encoding, + preprocessor=template.preprocessor, + ) node = lexer.parse() - source = codegen.compile(node, - template.uri, - filename, - default_filters=template.default_filters, - buffer_filters=template.buffer_filters, - imports=template.imports, - future_imports=template.future_imports, - source_encoding=lexer.encoding, - generate_magic_comment=generate_magic_comment, - disable_unicode=template.disable_unicode, - strict_undefined=template.strict_undefined, - enable_loop=template.enable_loop, - reserved_names=template.reserved_names) + source = codegen.compile( + node, + template.uri, + filename, + default_filters=template.default_filters, + buffer_filters=template.buffer_filters, + imports=template.imports, + future_imports=template.future_imports, + source_encoding=lexer.encoding, + generate_magic_comment=generate_magic_comment, + disable_unicode=template.disable_unicode, + strict_undefined=template.strict_undefined, + enable_loop=template.enable_loop, + reserved_names=template.reserved_names, + ) return source, lexer def _compile_text(template, text, filename): identifier = template.module_id - source, lexer = _compile(template, text, filename, - generate_magic_comment=template.disable_unicode) + source, lexer = _compile( + template, + text, + filename, + generate_magic_comment=template.disable_unicode, + ) cid = identifier if not compat.py3k and isinstance(cid, compat.text_type): cid = cid.encode() module = types.ModuleType(cid) - code = compile(source, cid, 'exec') + code = compile(source, cid, "exec") # this exec() works for 2.4->3.3. exec(code, module.__dict__, module.__dict__) @@ -716,11 +749,12 @@ def _compile_text(template, text, filename): def _compile_module_file(template, text, filename, outputpath, module_writer): - source, lexer = _compile(template, text, filename, - generate_magic_comment=True) + source, lexer = _compile( + template, text, filename, generate_magic_comment=True + ) if isinstance(source, compat.text_type): - source = source.encode(lexer.encoding or 'ascii') + source = source.encode(lexer.encoding or "ascii") if module_writer: module_writer(source, outputpath) @@ -737,9 +771,9 @@ def _compile_module_file(template, text, filename, outputpath, module_writer): def _get_module_info_from_callable(callable_): if compat.py3k: - return _get_module_info(callable_.__globals__['__name__']) + return _get_module_info(callable_.__globals__["__name__"]) else: - return _get_module_info(callable_.func_globals['__name__']) + return _get_module_info(callable_.func_globals["__name__"]) def _get_module_info(filename): diff --git a/server/www/packages/packages-linux/x64/mako/util.py b/server/www/packages/packages-linux/x64/mako/util.py index 2f089ff..16e3c72 100644 --- a/server/www/packages/packages-linux/x64/mako/util.py +++ b/server/www/packages/packages-linux/x64/mako/util.py @@ -1,15 +1,19 @@ # mako/util.py -# Copyright (C) 2006-2016 the Mako authors and contributors +# Copyright 2006-2020 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php +from __future__ import absolute_import -import re -import collections +from ast import parse import codecs -import os -from mako import compat +import collections import operator +import os +import re +import timeit + +from mako import compat def update_wrapper(decorated, fn): @@ -19,7 +23,6 @@ def update_wrapper(decorated, fn): class PluginLoader(object): - def __init__(self, group): self.group = group self.impls = {} @@ -29,16 +32,16 @@ class PluginLoader(object): return self.impls[name]() else: import pkg_resources - for impl in pkg_resources.iter_entry_points( - self.group, - name): + + for impl in pkg_resources.iter_entry_points(self.group, name): self.impls[name] = impl.load return impl.load() else: from mako import exceptions + raise exceptions.RuntimeException( - "Can't load plugin %s %s" % - (self.group, name)) + "Can't load plugin %s %s" % (self.group, name) + ) def register(self, name, modulepath, objname): def load(): @@ -46,18 +49,19 @@ class PluginLoader(object): for token in modulepath.split(".")[1:]: mod = getattr(mod, token) return getattr(mod, objname) + self.impls[name] = load -def verify_directory(dir): +def verify_directory(dir_): """create and/or verify a filesystem directory.""" tries = 0 - while not os.path.exists(dir): + while not os.path.exists(dir_): try: tries += 1 - os.makedirs(dir, compat.octal("0775")) + os.makedirs(dir_, compat.octal("0775")) except: if tries > 5: raise @@ -109,11 +113,15 @@ class memoized_instancemethod(object): def oneshot(*args, **kw): result = self.fget(obj, *args, **kw) - memo = lambda *a, **kw: result + + def memo(*a, **kw): + return result + memo.__name__ = self.__name__ memo.__doc__ = self.__doc__ obj.__dict__[self.__name__] = memo return result + oneshot.__name__ = self.__name__ oneshot.__doc__ = self.__doc__ return oneshot @@ -137,13 +145,13 @@ class FastEncodingBuffer(object): """a very rudimentary buffer that is faster than StringIO, but doesn't crash on unicode data like cStringIO.""" - def __init__(self, encoding=None, errors='strict', as_unicode=False): + def __init__(self, encoding=None, errors="strict", as_unicode=False): self.data = collections.deque() self.encoding = encoding if as_unicode: - self.delim = compat.u('') + self.delim = compat.u("") else: - self.delim = '' + self.delim = "" self.as_unicode = as_unicode self.errors = errors self.write = self.data.append @@ -154,8 +162,9 @@ class FastEncodingBuffer(object): def getvalue(self): if self.encoding: - return self.delim.join(self.data).encode(self.encoding, - self.errors) + return self.delim.join(self.data).encode( + self.encoding, self.errors + ) else: return self.delim.join(self.data) @@ -171,22 +180,21 @@ class LRUCache(dict): """ class _Item(object): - def __init__(self, key, value): self.key = key self.value = value - self.timestamp = compat.time_func() + self.timestamp = timeit.default_timer() def __repr__(self): return repr(self.value) - def __init__(self, capacity, threshold=.5): + def __init__(self, capacity, threshold=0.5): self.capacity = capacity self.threshold = threshold def __getitem__(self, key): item = dict.__getitem__(self, key) - item.timestamp = compat.time_func() + item.timestamp = timeit.default_timer() return item.value def values(self): @@ -210,9 +218,12 @@ class LRUCache(dict): def _manage_size(self): while len(self) > self.capacity + self.capacity * self.threshold: - bytime = sorted(dict.values(self), - key=operator.attrgetter('timestamp'), reverse=True) - for item in bytime[self.capacity:]: + bytime = sorted( + dict.values(self), + key=operator.attrgetter("timestamp"), + reverse=True, + ) + for item in bytime[self.capacity :]: try: del self[item.key] except KeyError: @@ -220,10 +231,11 @@ class LRUCache(dict): # broke in on us. loop around and try again break + # Regexp to match python magic encoding line _PYTHON_MAGIC_COMMENT_re = re.compile( - r'[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)', - re.VERBOSE) + r"[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)", re.VERBOSE +) def parse_encoding(fp): @@ -242,13 +254,12 @@ def parse_encoding(fp): line1 = fp.readline() has_bom = line1.startswith(codecs.BOM_UTF8) if has_bom: - line1 = line1[len(codecs.BOM_UTF8):] + line1 = line1[len(codecs.BOM_UTF8) :] - m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode('ascii', 'ignore')) + m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode("ascii", "ignore")) if not m: try: - import parser - parser.suite(line1.decode('ascii', 'ignore')) + parse(line1.decode("ascii", "ignore")) except (ImportError, SyntaxError): # Either it's a real syntax error, in which case the source # is not valid python source, or line2 is a continuation of @@ -258,14 +269,16 @@ def parse_encoding(fp): else: line2 = fp.readline() m = _PYTHON_MAGIC_COMMENT_re.match( - line2.decode('ascii', 'ignore')) + line2.decode("ascii", "ignore") + ) if has_bom: if m: raise SyntaxError( "python refuses to compile code with both a UTF8" - " byte-order-mark and a magic encoding comment") - return 'utf_8' + " byte-order-mark and a magic encoding comment" + ) + return "utf_8" elif m: return m.group(1) else: @@ -289,10 +302,11 @@ def restore__ast(_ast): """Attempt to restore the required classes to the _ast module if it appears to be missing them """ - if hasattr(_ast, 'AST'): + if hasattr(_ast, "AST"): return _ast.PyCF_ONLY_AST = 2 << 9 - m = compile("""\ + m = compile( + """\ def foo(): pass class Bar(object): pass if False: pass @@ -305,13 +319,17 @@ baz = 'mako' baz and 'foo' or 'bar' (mako is baz == baz) is not baz != mako mako > baz < mako >= baz <= mako -mako in baz not in mako""", '', 'exec', _ast.PyCF_ONLY_AST) +mako in baz not in mako""", + "", + "exec", + _ast.PyCF_ONLY_AST, + ) _ast.Module = type(m) for cls in _ast.Module.__mro__: - if cls.__name__ == 'mod': + if cls.__name__ == "mod": _ast.mod = cls - elif cls.__name__ == 'AST': + elif cls.__name__ == "AST": _ast.AST = cls _ast.FunctionDef = type(m.body[0]) @@ -361,7 +379,7 @@ mako in baz not in mako""", '', 'exec', _ast.PyCF_ONLY_AST) _ast.NotIn = type(m.body[12].value.ops[1]) -def read_file(path, mode='rb'): +def read_file(path, mode="rb"): fp = open(path, mode) try: data = fp.read() diff --git a/server/www/packages/packages-linux/x64/psutil/__init__.py b/server/www/packages/packages-linux/x64/psutil/__init__.py index c2a83fb..22bb46f 100644 --- a/server/www/packages/packages-linux/x64/psutil/__init__.py +++ b/server/www/packages/packages-linux/x64/psutil/__init__.py @@ -17,7 +17,7 @@ sensors) in Python. Supported platforms: - Sun Solaris - AIX -Works with Python versions from 2.6 to 3.X. +Works with Python versions from 2.6 to 3.4+. """ from __future__ import division @@ -25,12 +25,12 @@ from __future__ import division import collections import contextlib import datetime -import errno import functools import os import signal import subprocess import sys +import threading import time try: import pwd @@ -38,11 +38,18 @@ except ImportError: pwd = None from . import _common +from ._common import AccessDenied from ._common import deprecated_method +from ._common import Error from ._common import memoize from ._common import memoize_when_activated +from ._common import NoSuchProcess +from ._common import TimeoutExpired from ._common import wrap_numbers as _wrap_numbers +from ._common import ZombieProcess from ._compat import long +from ._compat import PermissionError +from ._compat import ProcessLookupError from ._compat import PY3 as _PY3 from ._common import STATUS_DEAD @@ -86,12 +93,6 @@ from ._common import POSIX # NOQA from ._common import SUNOS from ._common import WINDOWS -from ._exceptions import AccessDenied -from ._exceptions import Error -from ._exceptions import NoSuchProcess -from ._exceptions import TimeoutExpired -from ._exceptions import ZombieProcess - if LINUX: # This is public API and it will be retrieved from _pslinux.py # via sys.modules. @@ -151,6 +152,10 @@ elif WINDOWS: from ._psutil_windows import NORMAL_PRIORITY_CLASS # NOQA from ._psutil_windows import REALTIME_PRIORITY_CLASS # NOQA from ._pswindows import CONN_DELETE_TCB # NOQA + from ._pswindows import IOPRIO_VERYLOW # NOQA + from ._pswindows import IOPRIO_LOW # NOQA + from ._pswindows import IOPRIO_NORMAL # NOQA + from ._pswindows import IOPRIO_HIGH # NOQA elif MACOS: from . import _psosx as _psplatform @@ -211,23 +216,26 @@ __all__ = [ "pid_exists", "pids", "process_iter", "wait_procs", # proc "virtual_memory", "swap_memory", # memory "cpu_times", "cpu_percent", "cpu_times_percent", "cpu_count", # cpu - "cpu_stats", # "cpu_freq", + "cpu_stats", # "cpu_freq", "getloadavg" "net_io_counters", "net_connections", "net_if_addrs", # network "net_if_stats", "disk_io_counters", "disk_partitions", "disk_usage", # disk # "sensors_temperatures", "sensors_battery", "sensors_fans" # sensors "users", "boot_time", # others ] + + __all__.extend(_psplatform.__extra__all__) __author__ = "Giampaolo Rodola'" -__version__ = "5.4.8" +__version__ = "5.7.0" version_info = tuple([int(num) for num in __version__.split('.')]) + +_timer = getattr(time, 'monotonic', time.time) AF_LINK = _psplatform.AF_LINK POWER_TIME_UNLIMITED = _common.POWER_TIME_UNLIMITED POWER_TIME_UNKNOWN = _common.POWER_TIME_UNKNOWN _TOTAL_PHYMEM = None -_timer = getattr(time, 'monotonic', time.time) - +_LOWEST_PID = None # Sanity check in case the user messed up with psutil installation # or did something weird with sys.path. In this case we might end @@ -352,7 +360,7 @@ class Process(object): self._create_time = None self._gone = False self._hash = None - self._oneshot_inctx = False + self._lock = threading.RLock() # used for caching on Windows only (on POSIX ppid may change) self._ppid = None # platform-specific modules define an _psplatform.Process @@ -456,40 +464,45 @@ class Process(object): ... >>> """ - if self._oneshot_inctx: - # NOOP: this covers the use case where the user enters the - # context twice. Since as_dict() internally uses oneshot() - # I expect that the code below will be a pretty common - # "mistake" that the user will make, so let's guard - # against that: - # - # >>> with p.oneshot(): - # ... p.as_dict() - # ... - yield - else: - self._oneshot_inctx = True - try: - # cached in case cpu_percent() is used - self.cpu_times.cache_activate() - # cached in case memory_percent() is used - self.memory_info.cache_activate() - # cached in case parent() is used - self.ppid.cache_activate() - # cached in case username() is used - if POSIX: - self.uids.cache_activate() - # specific implementation cache - self._proc.oneshot_enter() + with self._lock: + if hasattr(self, "_cache"): + # NOOP: this covers the use case where the user enters the + # context twice: + # + # >>> with p.oneshot(): + # ... with p.oneshot(): + # ... + # + # Also, since as_dict() internally uses oneshot() + # I expect that the code below will be a pretty common + # "mistake" that the user will make, so let's guard + # against that: + # + # >>> with p.oneshot(): + # ... p.as_dict() + # ... yield - finally: - self.cpu_times.cache_deactivate() - self.memory_info.cache_deactivate() - self.ppid.cache_deactivate() - if POSIX: - self.uids.cache_deactivate() - self._proc.oneshot_exit() - self._oneshot_inctx = False + else: + try: + # cached in case cpu_percent() is used + self.cpu_times.cache_activate(self) + # cached in case memory_percent() is used + self.memory_info.cache_activate(self) + # cached in case parent() is used + self.ppid.cache_activate(self) + # cached in case username() is used + if POSIX: + self.uids.cache_activate(self) + # specific implementation cache + self._proc.oneshot_enter() + yield + finally: + self.cpu_times.cache_deactivate(self) + self.memory_info.cache_deactivate(self) + self.ppid.cache_deactivate(self) + if POSIX: + self.uids.cache_deactivate(self) + self._proc.oneshot_exit() def as_dict(self, attrs=None, ad_value=None): """Utility method returning process information as a @@ -540,6 +553,9 @@ class Process(object): checking whether PID has been reused. If no parent is known return None. """ + lowest_pid = _LOWEST_PID if _LOWEST_PID is not None else pids()[0] + if self.pid == lowest_pid: + return None ppid = self.ppid() if ppid is not None: ctime = self.create_time() @@ -551,6 +567,17 @@ class Process(object): except NoSuchProcess: pass + def parents(self): + """Return the parents of this process as a list of Process + instances. If no parents are known return an empty list. + """ + parents = [] + proc = self.parent() + while proc is not None: + parents.append(proc) + proc = proc.parent() + return parents + def is_running(self): """Return whether this process is running. It also checks if PID has been reused by another process in @@ -748,7 +775,7 @@ class Process(object): """ return self._proc.io_counters() - # Linux and Windows >= Vista only + # Linux and Windows if hasattr(_psplatform.Process, "ionice_get"): def ionice(self, ioclass=None, value=None): @@ -799,9 +826,6 @@ class Process(object): (and set). (Windows, Linux and BSD only). """ - # Automatically remove duplicates both on get and - # set (for get it's not really necessary, it's - # just for extra safety). if cpus is None: return list(set(self._proc.cpu_affinity_get())) else: @@ -825,7 +849,7 @@ class Process(object): """ return self._proc.cpu_num() - # Linux, macOS and Windows only + # Linux, macOS, Windows, Solaris, AIX if hasattr(_psplatform.Process, "environ"): def environ(self): @@ -1095,7 +1119,6 @@ class Process(object): return (value / float(total_phymem)) * 100 if hasattr(_psplatform.Process, "memory_maps"): - # Available everywhere except OpenBSD and NetBSD. def memory_maps(self, grouped=True): """Return process' mapped memory regions as a list of namedtuples whose fields are variable depending on the platform. @@ -1167,18 +1190,16 @@ class Process(object): "calling process (os.getpid()) instead of PID 0") try: os.kill(self.pid, sig) - except OSError as err: - if err.errno == errno.ESRCH: - if OPENBSD and pid_exists(self.pid): - # We do this because os.kill() lies in case of - # zombie processes. - raise ZombieProcess(self.pid, self._name, self._ppid) - else: - self._gone = True - raise NoSuchProcess(self.pid, self._name) - if err.errno in (errno.EPERM, errno.EACCES): - raise AccessDenied(self.pid, self._name) - raise + except ProcessLookupError: + if OPENBSD and pid_exists(self.pid): + # We do this because os.kill() lies in case of + # zombie processes. + raise ZombieProcess(self.pid, self._name, self._ppid) + else: + self._gone = True + raise NoSuchProcess(self.pid, self._name) + except PermissionError: + raise AccessDenied(self.pid, self._name) @_assert_pid_not_reused def send_signal(self, sig): @@ -1190,16 +1211,7 @@ class Process(object): if POSIX: self._send_signal(sig) else: # pragma: no cover - if sig == signal.SIGTERM: - self._proc.kill() - # py >= 2.7 - elif sig in (getattr(signal, "CTRL_C_EVENT", object()), - getattr(signal, "CTRL_BREAK_EVENT", object())): - self._proc.send_signal(sig) - else: - raise ValueError( - "only SIGTERM, CTRL_C_EVENT and CTRL_BREAK_EVENT signals " - "are supported on Windows") + self._proc.send_signal(sig) @_assert_pid_not_reused def suspend(self): @@ -1247,6 +1259,8 @@ class Process(object): def wait(self, timeout=None): """Wait for process to terminate and, if process is a children of os.getpid(), also return its exit code, else None. + On Windows there's no such limitation (exit code is always + returned). If the process is already terminated immediately return None instead of raising NoSuchProcess. @@ -1298,7 +1312,7 @@ class Popen(Process): http://bugs.python.org/issue6973. For a complete documentation refer to: - http://docs.python.org/library/subprocess.html + http://docs.python.org/3/library/subprocess.html """ def __init__(self, *args, **kwargs): @@ -1354,7 +1368,7 @@ class Popen(Process): _as_dict_attrnames = set( [x for x in dir(Process) if not x.startswith('_') and x not in ['send_signal', 'suspend', 'resume', 'terminate', 'kill', 'wait', - 'is_running', 'as_dict', 'parent', 'children', 'rlimit', + 'is_running', 'as_dict', 'parent', 'parents', 'children', 'rlimit', 'memory_info_ex', 'oneshot']]) @@ -1365,7 +1379,10 @@ _as_dict_attrnames = set( def pids(): """Return a list of current running PIDs.""" - return _psplatform.pids() + global _LOWEST_PID + ret = sorted(_psplatform.pids()) + _LOWEST_PID = ret[0] + return ret def pid_exists(pid): @@ -1387,6 +1404,7 @@ def pid_exists(pid): _pmap = {} +_lock = threading.Lock() def process_iter(attrs=None, ad_value=None): @@ -1414,21 +1432,26 @@ def process_iter(attrs=None, ad_value=None): proc = Process(pid) if attrs is not None: proc.info = proc.as_dict(attrs=attrs, ad_value=ad_value) - _pmap[proc.pid] = proc + with _lock: + _pmap[proc.pid] = proc return proc def remove(pid): - _pmap.pop(pid, None) + with _lock: + _pmap.pop(pid, None) a = set(pids()) b = set(_pmap.keys()) new_pids = a - b gone_pids = b - a - for pid in gone_pids: remove(pid) - for pid, proc in sorted(list(_pmap.items()) + - list(dict.fromkeys(new_pids).items())): + + with _lock: + ls = sorted(list(_pmap.items()) + + list(dict.fromkeys(new_pids).items())) + + for pid, proc in ls: try: if proc is None: # new process yield add(pid) @@ -1502,6 +1525,7 @@ def wait_procs(procs, timeout=None, callback=None): pass else: if returncode is not None or not proc.is_running(): + # Set new Process instance attribute. proc.returncode = returncode gone.add(proc) if callback is not None: @@ -1885,6 +1909,17 @@ if hasattr(_psplatform, "cpu_freq"): __all__.append("cpu_freq") +if hasattr(os, "getloadavg") or hasattr(_psplatform, "getloadavg"): + # Perform this hasattr check once on import time to either use the + # platform based code or proxy straight from the os module. + if hasattr(os, "getloadavg"): + getloadavg = os.getloadavg + else: + getloadavg = _psplatform.getloadavg + + __all__.append("getloadavg") + + # ===================================================================== # --- system memory related functions # ===================================================================== @@ -1910,7 +1945,7 @@ def virtual_memory(): - used: memory used, calculated differently depending on the platform and designed for informational purposes only: - macOS: active + inactive + wired + macOS: active + wired BSD: active + wired + cached Linux: total - free @@ -2306,19 +2341,16 @@ if WINDOWS: def test(): # pragma: no cover - """List info of all currently running processes emulating ps aux - output. - """ + from ._common import bytes2human + from ._compat import get_terminal_size + today_day = datetime.date.today() - templ = "%-10s %5s %4s %7s %7s %-13s %5s %7s %s" - attrs = ['pid', 'memory_percent', 'name', 'cpu_times', 'create_time', - 'memory_info'] - if POSIX: - attrs.append('uids') - attrs.append('terminal') - print(templ % ("USER", "PID", "%MEM", "VSZ", "RSS", "TTY", "START", "TIME", - "COMMAND")) - for p in process_iter(attrs=attrs, ad_value=''): + templ = "%-10s %5s %5s %7s %7s %5s %6s %6s %6s %s" + attrs = ['pid', 'memory_percent', 'name', 'cmdline', 'cpu_times', + 'create_time', 'memory_info', 'status', 'nice', 'username'] + print(templ % ("USER", "PID", "%MEM", "VSZ", "RSS", "NICE", + "STATUS", "START", "TIME", "CMDLINE")) + for p in process_iter(attrs, ad_value=None): if p.info['create_time']: ctime = datetime.datetime.fromtimestamp(p.info['create_time']) if ctime.date() == today_day: @@ -2327,30 +2359,46 @@ def test(): # pragma: no cover ctime = ctime.strftime("%b%d") else: ctime = '' - cputime = time.strftime("%M:%S", - time.localtime(sum(p.info['cpu_times']))) - try: - user = p.username() - except Error: - user = '' - if WINDOWS and '\\' in user: + if p.info['cpu_times']: + cputime = time.strftime("%M:%S", + time.localtime(sum(p.info['cpu_times']))) + else: + cputime = '' + + user = p.info['username'] or '' + if not user and POSIX: + try: + user = p.uids()[0] + except Error: + pass + if user and WINDOWS and '\\' in user: user = user.split('\\')[1] - vms = p.info['memory_info'] and \ - int(p.info['memory_info'].vms / 1024) or '?' - rss = p.info['memory_info'] and \ - int(p.info['memory_info'].rss / 1024) or '?' - memp = p.info['memory_percent'] and \ - round(p.info['memory_percent'], 1) or '?' - print(templ % ( + user = user[:9] + vms = bytes2human(p.info['memory_info'].vms) if \ + p.info['memory_info'] is not None else '' + rss = bytes2human(p.info['memory_info'].rss) if \ + p.info['memory_info'] is not None else '' + memp = round(p.info['memory_percent'], 1) if \ + p.info['memory_percent'] is not None else '' + nice = int(p.info['nice']) if p.info['nice'] else '' + if p.info['cmdline']: + cmdline = ' '.join(p.info['cmdline']) + else: + cmdline = p.info['name'] + status = p.info['status'][:5] if p.info['status'] else '' + + line = templ % ( user[:10], p.info['pid'], memp, vms, rss, - p.info.get('terminal', '') or '?', + nice, + status, ctime, cputime, - p.info['name'].strip() or '?')) + cmdline) + print(line[:get_terminal_size()[0]]) del memoize, memoize_when_activated, division, deprecated_method diff --git a/server/www/packages/packages-linux/x64/psutil/_common.py b/server/www/packages/packages-linux/x64/psutil/_common.py index bee9579..17b6eeb 100644 --- a/server/www/packages/packages-linux/x64/psutil/_common.py +++ b/server/www/packages/packages-linux/x64/psutil/_common.py @@ -7,7 +7,7 @@ # Note: this module is imported by setup.py so it should not import # psutil or third-party modules. -from __future__ import division +from __future__ import division, print_function import contextlib import errno @@ -23,6 +23,7 @@ from collections import namedtuple from socket import AF_INET from socket import SOCK_DGRAM from socket import SOCK_STREAM + try: from socket import AF_INET6 except ImportError: @@ -37,14 +38,14 @@ if sys.version_info >= (3, 4): else: enum = None + # can't take it from _common.py as this script is imported by setup.py PY3 = sys.version_info[0] == 3 __all__ = [ - # constants + # OS constants 'FREEBSD', 'BSD', 'LINUX', 'NETBSD', 'OPENBSD', 'MACOS', 'OSX', 'POSIX', 'SUNOS', 'WINDOWS', - 'ENCODING', 'ENCODING_ERRS', 'AF_INET6', # connection constants 'CONN_CLOSE', 'CONN_CLOSE_WAIT', 'CONN_CLOSING', 'CONN_ESTABLISHED', 'CONN_FIN_WAIT1', 'CONN_FIN_WAIT2', 'CONN_LAST_ACK', 'CONN_LISTEN', @@ -56,6 +57,8 @@ __all__ = [ 'STATUS_RUNNING', 'STATUS_SLEEPING', 'STATUS_STOPPED', 'STATUS_SUSPENDED', 'STATUS_TRACING_STOP', 'STATUS_WAITING', 'STATUS_WAKE_KILL', 'STATUS_WAKING', 'STATUS_ZOMBIE', 'STATUS_PARKED', + # other constants + 'ENCODING', 'ENCODING_ERRS', 'AF_INET6', # named tuples 'pconn', 'pcputimes', 'pctxsw', 'pgids', 'pio', 'pionice', 'popenfile', 'pthread', 'puids', 'sconn', 'scpustats', 'sdiskio', 'sdiskpart', @@ -64,6 +67,9 @@ __all__ = [ 'conn_tmap', 'deprecated_method', 'isfile_strict', 'memoize', 'parse_environ_block', 'path_exists_strict', 'usage_percent', 'supports_ipv6', 'sockfam_to_enum', 'socktype_to_enum', "wrap_numbers", + 'bytes2human', 'conn_to_ntuple', 'debug', + # shell utils + 'hilite', 'term_supports_colors', 'print_color', ] @@ -256,7 +262,109 @@ if AF_UNIX is not None: "unix": ([AF_UNIX], [SOCK_STREAM, SOCK_DGRAM]), }) -del AF_INET, AF_UNIX, SOCK_STREAM, SOCK_DGRAM + +# ===================================================================== +# --- Exceptions +# ===================================================================== + + +class Error(Exception): + """Base exception class. All other psutil exceptions inherit + from this one. + """ + __module__ = 'psutil' + + def __init__(self, msg=""): + Exception.__init__(self, msg) + self.msg = msg + + def __repr__(self): + ret = "psutil.%s %s" % (self.__class__.__name__, self.msg) + return ret.strip() + + __str__ = __repr__ + + +class NoSuchProcess(Error): + """Exception raised when a process with a certain PID doesn't + or no longer exists. + """ + __module__ = 'psutil' + + def __init__(self, pid, name=None, msg=None): + Error.__init__(self, msg) + self.pid = pid + self.name = name + self.msg = msg + if msg is None: + if name: + details = "(pid=%s, name=%s)" % (self.pid, repr(self.name)) + else: + details = "(pid=%s)" % self.pid + self.msg = "process no longer exists " + details + + def __path__(self): + return 'xxx' + + +class ZombieProcess(NoSuchProcess): + """Exception raised when querying a zombie process. This is + raised on macOS, BSD and Solaris only, and not always: depending + on the query the OS may be able to succeed anyway. + On Linux all zombie processes are querable (hence this is never + raised). Windows doesn't have zombie processes. + """ + __module__ = 'psutil' + + def __init__(self, pid, name=None, ppid=None, msg=None): + NoSuchProcess.__init__(self, msg) + self.pid = pid + self.ppid = ppid + self.name = name + self.msg = msg + if msg is None: + args = ["pid=%s" % pid] + if name: + args.append("name=%s" % repr(self.name)) + if ppid: + args.append("ppid=%s" % self.ppid) + details = "(%s)" % ", ".join(args) + self.msg = "process still exists but it's a zombie " + details + + +class AccessDenied(Error): + """Exception raised when permission to perform an action is denied.""" + __module__ = 'psutil' + + def __init__(self, pid=None, name=None, msg=None): + Error.__init__(self, msg) + self.pid = pid + self.name = name + self.msg = msg + if msg is None: + if (pid is not None) and (name is not None): + self.msg = "(pid=%s, name=%s)" % (pid, repr(name)) + elif (pid is not None): + self.msg = "(pid=%s)" % self.pid + else: + self.msg = "" + + +class TimeoutExpired(Error): + """Raised on Process.wait(timeout) if timeout expires and process + is still alive. + """ + __module__ = 'psutil' + + def __init__(self, seconds, pid=None, name=None): + Error.__init__(self, "timeout after %s seconds" % seconds) + self.seconds = seconds + self.pid = pid + self.name = name + if (pid is not None) and (name is not None): + self.msg += " (pid=%s, name=%s)" % (pid, repr(name)) + elif (pid is not None): + self.msg += " (pid=%s)" % self.pid # =================================================================== @@ -267,12 +375,12 @@ del AF_INET, AF_UNIX, SOCK_STREAM, SOCK_DGRAM def usage_percent(used, total, round_=None): """Calculate percentage usage of 'used' against 'total'.""" try: - ret = (used / total) * 100 + ret = (float(used) / total) * 100 except ZeroDivisionError: - ret = 0.0 if isinstance(used, float) or isinstance(total, float) else 0 - if round_ is not None: - return round(ret, round_) + return 0.0 else: + if round_ is not None: + ret = round(ret, round_) return ret @@ -327,7 +435,7 @@ def memoize_when_activated(fun): 1 >>> >>> # activated - >>> foo.cache_activate() + >>> foo.cache_activate(self) >>> foo() 1 >>> foo() @@ -336,26 +444,30 @@ def memoize_when_activated(fun): """ @functools.wraps(fun) def wrapper(self): - if not wrapper.cache_activated: + try: + # case 1: we previously entered oneshot() ctx + ret = self._cache[fun] + except AttributeError: + # case 2: we never entered oneshot() ctx return fun(self) - else: - try: - ret = cache[fun] - except KeyError: - ret = cache[fun] = fun(self) - return ret + except KeyError: + # case 3: we entered oneshot() ctx but there's no cache + # for this entry yet + ret = self._cache[fun] = fun(self) + return ret - def cache_activate(): - """Activate cache.""" - wrapper.cache_activated = True + def cache_activate(proc): + """Activate cache. Expects a Process instance. Cache will be + stored as a "_cache" instance attribute.""" + proc._cache = {} - def cache_deactivate(): + def cache_deactivate(proc): """Deactivate and clear cache.""" - wrapper.cache_activated = False - cache.clear() + try: + del proc._cache + except AttributeError: + pass - cache = {} - wrapper.cache_activated = False wrapper.cache_activate = cache_activate wrapper.cache_deactivate = cache_deactivate return wrapper @@ -442,7 +554,7 @@ def sockfam_to_enum(num): else: # pragma: no cover try: return socket.AddressFamily(num) - except (ValueError, AttributeError): + except ValueError: return num @@ -454,11 +566,30 @@ def socktype_to_enum(num): return num else: # pragma: no cover try: - return socket.AddressType(num) - except (ValueError, AttributeError): + return socket.SocketKind(num) + except ValueError: return num +def conn_to_ntuple(fd, fam, type_, laddr, raddr, status, status_map, pid=None): + """Convert a raw connection tuple to a proper ntuple.""" + if fam in (socket.AF_INET, AF_INET6): + if laddr: + laddr = addr(*laddr) + if raddr: + raddr = addr(*raddr) + if type_ == socket.SOCK_STREAM and fam in (AF_INET, AF_INET6): + status = status_map.get(status, CONN_NONE) + else: + status = CONN_NONE # ignore whatever C returned to us + fam = sockfam_to_enum(fam) + type_ = socktype_to_enum(type_) + if pid is None: + return pconn(fd, fam, type_, laddr, raddr, status) + else: + return sconn(fd, fam, type_, laddr, raddr, status, pid) + + def deprecated_method(replacement): """A decorator which can be used to mark a method as deprecated 'replcement' is the method name which will be called instead. @@ -471,7 +602,7 @@ def deprecated_method(replacement): @functools.wraps(fun) def inner(self, *args, **kwargs): - warnings.warn(msg, category=FutureWarning, stacklevel=2) + warnings.warn(msg, category=DeprecationWarning, stacklevel=2) return getattr(self, replacement)(*args, **kwargs) return inner return outer @@ -594,3 +725,122 @@ def open_text(fname, **kwargs): kwargs.setdefault('encoding', ENCODING) kwargs.setdefault('errors', ENCODING_ERRS) return open(fname, "rt", **kwargs) + + +def bytes2human(n, format="%(value).1f%(symbol)s"): + """Used by various scripts. See: + http://goo.gl/zeJZl + + >>> bytes2human(10000) + '9.8K' + >>> bytes2human(100001221) + '95.4M' + """ + symbols = ('B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') + prefix = {} + for i, s in enumerate(symbols[1:]): + prefix[s] = 1 << (i + 1) * 10 + for symbol in reversed(symbols[1:]): + if n >= prefix[symbol]: + value = float(n) / prefix[symbol] + return format % locals() + return format % dict(symbol=symbols[0], value=n) + + +def get_procfs_path(): + """Return updated psutil.PROCFS_PATH constant.""" + return sys.modules['psutil'].PROCFS_PATH + + +if PY3: + def decode(s): + return s.decode(encoding=ENCODING, errors=ENCODING_ERRS) +else: + def decode(s): + return s + + +# ===================================================================== +# --- shell utils +# ===================================================================== + + +@memoize +def term_supports_colors(file=sys.stdout): + if os.name == 'nt': + return True + try: + import curses + assert file.isatty() + curses.setupterm() + assert curses.tigetnum("colors") > 0 + except Exception: + return False + else: + return True + + +def hilite(s, color="green", bold=False): + """Return an highlighted version of 'string'.""" + if not term_supports_colors(): + return s + attr = [] + colors = dict(green='32', red='91', brown='33') + colors[None] = '29' + try: + color = colors[color] + except KeyError: + raise ValueError("invalid color %r; choose between %s" % ( + list(colors.keys()))) + attr.append(color) + if bold: + attr.append('1') + return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), s) + + +def print_color(s, color="green", bold=False, file=sys.stdout): + """Print a colorized version of string.""" + if not term_supports_colors(): + print(s, file=file) + elif POSIX: + print(hilite(s, color, bold), file=file) + else: + import ctypes + + DEFAULT_COLOR = 7 + GetStdHandle = ctypes.windll.Kernel32.GetStdHandle + SetConsoleTextAttribute = \ + ctypes.windll.Kernel32.SetConsoleTextAttribute + + colors = dict(green=2, red=4, brown=6) + colors[None] = DEFAULT_COLOR + try: + color = colors[color] + except KeyError: + raise ValueError("invalid color %r; choose between %r" % ( + color, list(colors.keys()))) + if bold and color <= 7: + color += 8 + + handle_id = -12 if file is sys.stderr else -11 + GetStdHandle.restype = ctypes.c_ulong + handle = GetStdHandle(handle_id) + SetConsoleTextAttribute(handle, color) + try: + print(s, file=file) + finally: + SetConsoleTextAttribute(handle, DEFAULT_COLOR) + + +if bool(os.getenv('PSUTIL_DEBUG', 0)): + import inspect + + def debug(msg): + """If PSUTIL_DEBUG env var is set, print a debug message to stderr.""" + fname, lineno, func_name, lines, index = inspect.getframeinfo( + inspect.currentframe().f_back) + print("psutil-debug [%s:%s]> %s" % (fname, lineno, msg), + file=sys.stderr) +else: + def debug(msg): + pass diff --git a/server/www/packages/packages-linux/x64/psutil/_compat.py b/server/www/packages/packages-linux/x64/psutil/_compat.py index 08aefe4..a937138 100644 --- a/server/www/packages/packages-linux/x64/psutil/_compat.py +++ b/server/www/packages/packages-linux/x64/psutil/_compat.py @@ -5,12 +5,15 @@ """Module which provides compatibility with older Python versions.""" import collections +import errno import functools import os import sys __all__ = ["PY3", "long", "xrange", "unicode", "basestring", "u", "b", - "lru_cache", "which"] + "lru_cache", "which", "get_terminal_size", + "FileNotFoundError", "PermissionError", "ProcessLookupError", + "InterruptedError", "ChildProcessError", "FileExistsError"] PY3 = sys.version_info[0] == 3 @@ -38,6 +41,86 @@ else: return s +# --- exceptions + + +if PY3: + FileNotFoundError = FileNotFoundError # NOQA + PermissionError = PermissionError # NOQA + ProcessLookupError = ProcessLookupError # NOQA + InterruptedError = InterruptedError # NOQA + ChildProcessError = ChildProcessError # NOQA + FileExistsError = FileExistsError # NOQA +else: + # https://github.com/PythonCharmers/python-future/blob/exceptions/ + # src/future/types/exceptions/pep3151.py + import platform + + _singleton = object() + + def instance_checking_exception(base_exception=Exception): + def wrapped(instance_checker): + class TemporaryClass(base_exception): + + def __init__(self, *args, **kwargs): + if len(args) == 1 and isinstance(args[0], TemporaryClass): + unwrap_me = args[0] + for attr in dir(unwrap_me): + if not attr.startswith('__'): + setattr(self, attr, getattr(unwrap_me, attr)) + else: + super(TemporaryClass, self).__init__(*args, **kwargs) + + class __metaclass__(type): + def __instancecheck__(cls, inst): + return instance_checker(inst) + + def __subclasscheck__(cls, classinfo): + value = sys.exc_info()[1] + return isinstance(value, cls) + + TemporaryClass.__name__ = instance_checker.__name__ + TemporaryClass.__doc__ = instance_checker.__doc__ + return TemporaryClass + + return wrapped + + @instance_checking_exception(EnvironmentError) + def FileNotFoundError(inst): + return getattr(inst, 'errno', _singleton) == errno.ENOENT + + @instance_checking_exception(EnvironmentError) + def ProcessLookupError(inst): + return getattr(inst, 'errno', _singleton) == errno.ESRCH + + @instance_checking_exception(EnvironmentError) + def PermissionError(inst): + return getattr(inst, 'errno', _singleton) in ( + errno.EACCES, errno.EPERM) + + @instance_checking_exception(EnvironmentError) + def InterruptedError(inst): + return getattr(inst, 'errno', _singleton) == errno.EINTR + + @instance_checking_exception(EnvironmentError) + def ChildProcessError(inst): + return getattr(inst, 'errno', _singleton) == errno.ECHILD + + @instance_checking_exception(EnvironmentError) + def FileExistsError(inst): + return getattr(inst, 'errno', _singleton) == errno.EEXIST + + if platform.python_implementation() != "CPython": + try: + raise OSError(errno.EEXIST, "perm") + except FileExistsError: + pass + except OSError: + raise RuntimeError( + "broken / incompatible Python implementation, see: " + "https://github.com/giampaolo/psutil/issues/1659") + + # --- stdlib additions @@ -239,3 +322,24 @@ except ImportError: if _access_check(name, mode): return name return None + + +# python 3.3 +try: + from shutil import get_terminal_size +except ImportError: + def get_terminal_size(fallback=(80, 24)): + try: + import fcntl + import termios + import struct + except ImportError: + return fallback + else: + try: + # This should work on Linux. + res = struct.unpack( + 'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, '1234')) + return (res[1], res[0]) + except Exception: + return fallback diff --git a/server/www/packages/packages-linux/x64/psutil/_exceptions.py b/server/www/packages/packages-linux/x64/psutil/_exceptions.py deleted file mode 100644 index 6dbbd28..0000000 --- a/server/www/packages/packages-linux/x64/psutil/_exceptions.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - - -class Error(Exception): - """Base exception class. All other psutil exceptions inherit - from this one. - """ - - def __init__(self, msg=""): - Exception.__init__(self, msg) - self.msg = msg - - def __repr__(self): - ret = "psutil.%s %s" % (self.__class__.__name__, self.msg) - return ret.strip() - - __str__ = __repr__ - - -class NoSuchProcess(Error): - """Exception raised when a process with a certain PID doesn't - or no longer exists. - """ - - def __init__(self, pid, name=None, msg=None): - Error.__init__(self, msg) - self.pid = pid - self.name = name - self.msg = msg - if msg is None: - if name: - details = "(pid=%s, name=%s)" % (self.pid, repr(self.name)) - else: - details = "(pid=%s)" % self.pid - self.msg = "process no longer exists " + details - - -class ZombieProcess(NoSuchProcess): - """Exception raised when querying a zombie process. This is - raised on macOS, BSD and Solaris only, and not always: depending - on the query the OS may be able to succeed anyway. - On Linux all zombie processes are querable (hence this is never - raised). Windows doesn't have zombie processes. - """ - - def __init__(self, pid, name=None, ppid=None, msg=None): - NoSuchProcess.__init__(self, msg) - self.pid = pid - self.ppid = ppid - self.name = name - self.msg = msg - if msg is None: - args = ["pid=%s" % pid] - if name: - args.append("name=%s" % repr(self.name)) - if ppid: - args.append("ppid=%s" % self.ppid) - details = "(%s)" % ", ".join(args) - self.msg = "process still exists but it's a zombie " + details - - -class AccessDenied(Error): - """Exception raised when permission to perform an action is denied.""" - - def __init__(self, pid=None, name=None, msg=None): - Error.__init__(self, msg) - self.pid = pid - self.name = name - self.msg = msg - if msg is None: - if (pid is not None) and (name is not None): - self.msg = "(pid=%s, name=%s)" % (pid, repr(name)) - elif (pid is not None): - self.msg = "(pid=%s)" % self.pid - else: - self.msg = "" - - -class TimeoutExpired(Error): - """Raised on Process.wait(timeout) if timeout expires and process - is still alive. - """ - - def __init__(self, seconds, pid=None, name=None): - Error.__init__(self, "timeout after %s seconds" % seconds) - self.seconds = seconds - self.pid = pid - self.name = name - if (pid is not None) and (name is not None): - self.msg += " (pid=%s, name=%s)" % (pid, repr(name)) - elif (pid is not None): - self.msg += " (pid=%s)" % self.pid diff --git a/server/www/packages/packages-linux/x64/psutil/_psaix.py b/server/www/packages/packages-linux/x64/psutil/_psaix.py index 7ba212d..994366a 100644 --- a/server/www/packages/packages-linux/x64/psutil/_psaix.py +++ b/server/www/packages/packages-linux/x64/psutil/_psaix.py @@ -6,31 +6,32 @@ """AIX platform implementation.""" -import errno +import functools import glob import os import re import subprocess import sys from collections import namedtuple -from socket import AF_INET from . import _common from . import _psposix from . import _psutil_aix as cext from . import _psutil_posix as cext_posix -from ._common import AF_INET6 +from ._common import AccessDenied +from ._common import conn_to_ntuple +from ._common import get_procfs_path from ._common import memoize_when_activated from ._common import NIC_DUPLEX_FULL from ._common import NIC_DUPLEX_HALF from ._common import NIC_DUPLEX_UNKNOWN -from ._common import sockfam_to_enum -from ._common import socktype_to_enum +from ._common import NoSuchProcess from ._common import usage_percent +from ._common import ZombieProcess +from ._compat import FileNotFoundError +from ._compat import PermissionError +from ._compat import ProcessLookupError from ._compat import PY3 -from ._exceptions import AccessDenied -from ._exceptions import NoSuchProcess -from ._exceptions import ZombieProcess __extra__all__ = ["PROCFS_PATH"] @@ -42,6 +43,8 @@ __extra__all__ = ["PROCFS_PATH"] HAS_THREADS = hasattr(cext, "proc_threads") +HAS_NET_IO_COUNTERS = hasattr(cext, "net_io_counters") +HAS_PROC_IO_COUNTERS = hasattr(cext, "proc_io_counters") PAGE_SIZE = os.sysconf('SC_PAGE_SIZE') AF_LINK = cext_posix.AF_LINK @@ -93,21 +96,6 @@ pfullmem = pmem scputimes = namedtuple('scputimes', ['user', 'system', 'idle', 'iowait']) # psutil.virtual_memory() svmem = namedtuple('svmem', ['total', 'available', 'percent', 'used', 'free']) -# psutil.Process.memory_maps(grouped=True) -pmmap_grouped = namedtuple('pmmap_grouped', ['path', 'rss', 'anon', 'locked']) -# psutil.Process.memory_maps(grouped=False) -pmmap_ext = namedtuple( - 'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields)) - - -# ===================================================================== -# --- utils -# ===================================================================== - - -def get_procfs_path(): - """Return updated psutil.PROCFS_PATH constant.""" - return sys.modules['psutil'].PROCFS_PATH # ===================================================================== @@ -212,7 +200,9 @@ def disk_partitions(all=False): net_if_addrs = cext_posix.net_if_addrs -net_io_counters = cext.net_io_counters + +if HAS_NET_IO_COUNTERS: + net_io_counters = cext.net_io_counters def net_connections(kind, _pid=-1): @@ -225,27 +215,17 @@ def net_connections(kind, _pid=-1): % (kind, ', '.join([repr(x) for x in cmap]))) families, types = _common.conn_tmap[kind] rawlist = cext.net_connections(_pid) - ret = set() + ret = [] for item in rawlist: fd, fam, type_, laddr, raddr, status, pid = item if fam not in families: continue if type_ not in types: continue - status = TCP_STATUSES[status] - if fam in (AF_INET, AF_INET6): - if laddr: - laddr = _common.addr(*laddr) - if raddr: - raddr = _common.addr(*raddr) - fam = sockfam_to_enum(fam) - type_ = socktype_to_enum(type_) - if _pid == -1: - nt = _common.sconn(fd, fam, type_, laddr, raddr, status, pid) - else: - nt = _common.pconn(fd, fam, type_, laddr, raddr, status) - ret.add(nt) - return list(ret) + nt = conn_to_ntuple(fd, fam, type_, laddr, raddr, status, + TCP_STATUSES, pid=pid if _pid == -1 else None) + ret.append(nt) + return ret def net_if_stats(): @@ -328,33 +308,27 @@ def wrap_exceptions(fun): """Call callable into a try/except clause and translate ENOENT, EACCES and EPERM in NoSuchProcess or AccessDenied exceptions. """ - + @functools.wraps(fun) def wrapper(self, *args, **kwargs): try: return fun(self, *args, **kwargs) - except EnvironmentError as err: - # support for private module import - if (NoSuchProcess is None or AccessDenied is None or - ZombieProcess is None): - raise + except (FileNotFoundError, ProcessLookupError): # ENOENT (no such file or directory) gets raised on open(). # ESRCH (no such process) can get raised on read() if # process is gone in meantime. - if err.errno in (errno.ENOENT, errno.ESRCH): - if not pid_exists(self.pid): - raise NoSuchProcess(self.pid, self._name) - else: - raise ZombieProcess(self.pid, self._name, self._ppid) - if err.errno in (errno.EPERM, errno.EACCES): - raise AccessDenied(self.pid, self._name) - raise + if not pid_exists(self.pid): + raise NoSuchProcess(self.pid, self._name) + else: + raise ZombieProcess(self.pid, self._name, self._ppid) + except PermissionError: + raise AccessDenied(self.pid, self._name) return wrapper class Process(object): """Wrapper class around underlying C implementation.""" - __slots__ = ["pid", "_name", "_ppid", "_procfs_path"] + __slots__ = ["pid", "_name", "_ppid", "_procfs_path", "_cache"] def __init__(self, pid): self.pid = pid @@ -363,23 +337,19 @@ class Process(object): self._procfs_path = get_procfs_path() def oneshot_enter(self): - self._proc_name_and_args.cache_activate() - self._proc_basic_info.cache_activate() - self._proc_cred.cache_activate() + self._proc_basic_info.cache_activate(self) + self._proc_cred.cache_activate(self) def oneshot_exit(self): - self._proc_name_and_args.cache_deactivate() - self._proc_basic_info.cache_deactivate() - self._proc_cred.cache_deactivate() - - @memoize_when_activated - def _proc_name_and_args(self): - return cext.proc_name_and_args(self.pid, self._procfs_path) + self._proc_basic_info.cache_deactivate(self) + self._proc_cred.cache_deactivate(self) + @wrap_exceptions @memoize_when_activated def _proc_basic_info(self): return cext.proc_basic_info(self.pid, self._procfs_path) + @wrap_exceptions @memoize_when_activated def _proc_cred(self): return cext.proc_cred(self.pid, self._procfs_path) @@ -388,22 +358,25 @@ class Process(object): def name(self): if self.pid == 0: return "swapper" - # note: this is limited to 15 characters - return self._proc_name_and_args()[0].rstrip("\x00") + # note: max 16 characters + return cext.proc_name(self.pid, self._procfs_path).rstrip("\x00") @wrap_exceptions def exe(self): # there is no way to get executable path in AIX other than to guess, # and guessing is more complex than what's in the wrapping class - exe = self.cmdline()[0] + cmdline = self.cmdline() + if not cmdline: + return '' + exe = cmdline[0] if os.path.sep in exe: # relative or absolute path if not os.path.isabs(exe): # if cwd has changed, we're out of luck - this may be wrong! exe = os.path.abspath(os.path.join(self.cwd(), exe)) if (os.path.isabs(exe) and - os.path.isfile(exe) and - os.access(exe, os.X_OK)): + os.path.isfile(exe) and + os.access(exe, os.X_OK)): return exe # not found, move to search in PATH using basename only exe = os.path.basename(exe) @@ -411,13 +384,17 @@ class Process(object): for path in os.environ["PATH"].split(":"): possible_exe = os.path.abspath(os.path.join(path, exe)) if (os.path.isfile(possible_exe) and - os.access(possible_exe, os.X_OK)): + os.access(possible_exe, os.X_OK)): return possible_exe return '' @wrap_exceptions def cmdline(self): - return self._proc_name_and_args()[1].split(' ') + return cext.proc_args(self.pid) + + @wrap_exceptions + def environ(self): + return cext.proc_environ(self.pid) @wrap_exceptions def create_time(self): @@ -503,11 +480,9 @@ class Process(object): try: result = os.readlink("%s/%s/cwd" % (procfs_path, self.pid)) return result.rstrip('/') - except OSError as err: - if err.errno == errno.ENOENT: - os.stat("%s/%s" % (procfs_path, self.pid)) # raise NSP or AD - return None - raise + except FileNotFoundError: + os.stat("%s/%s" % (procfs_path, self.pid)) # raise NSP or AD + return None @wrap_exceptions def memory_info(self): @@ -561,14 +536,15 @@ class Process(object): def wait(self, timeout=None): return _psposix.wait_pid(self.pid, timeout, self._name) - @wrap_exceptions - def io_counters(self): - try: - rc, wc, rb, wb = cext.proc_io_counters(self.pid) - except OSError: - # if process is terminated, proc_io_counters returns OSError - # instead of NSP - if not pid_exists(self.pid): - raise NoSuchProcess(self.pid, self._name) - raise - return _common.pio(rc, wc, rb, wb) + if HAS_PROC_IO_COUNTERS: + @wrap_exceptions + def io_counters(self): + try: + rc, wc, rb, wb = cext.proc_io_counters(self.pid) + except OSError: + # if process is terminated, proc_io_counters returns OSError + # instead of NSP + if not pid_exists(self.pid): + raise NoSuchProcess(self.pid, self._name) + raise + return _common.pio(rc, wc, rb, wb) diff --git a/server/www/packages/packages-linux/x64/psutil/_psbsd.py b/server/www/packages/packages-linux/x64/psutil/_psbsd.py index c2896cb..49ad1e9 100644 --- a/server/www/packages/packages-linux/x64/psutil/_psbsd.py +++ b/server/www/packages/packages-linux/x64/psutil/_psbsd.py @@ -10,26 +10,28 @@ import functools import os import xml.etree.ElementTree as ET from collections import namedtuple -from socket import AF_INET +from collections import defaultdict from . import _common from . import _psposix from . import _psutil_bsd as cext from . import _psutil_posix as cext_posix -from ._common import AF_INET6 +from ._common import AccessDenied from ._common import conn_tmap +from ._common import conn_to_ntuple from ._common import FREEBSD from ._common import memoize from ._common import memoize_when_activated from ._common import NETBSD +from ._common import NoSuchProcess from ._common import OPENBSD -from ._common import sockfam_to_enum -from ._common import socktype_to_enum from ._common import usage_percent +from ._common import ZombieProcess +from ._compat import FileNotFoundError +from ._compat import PermissionError +from ._compat import ProcessLookupError from ._compat import which -from ._exceptions import AccessDenied -from ._exceptions import NoSuchProcess -from ._exceptions import ZombieProcess + __extra__all__ = [] @@ -394,22 +396,8 @@ def net_connections(kind): fd, fam, type, laddr, raddr, status, pid = item # TODO: apply filter at C level if fam in families and type in types: - try: - status = TCP_STATUSES[status] - except KeyError: - # XXX: Not sure why this happens. I saw this occurring - # with IPv6 sockets opened by 'vim'. Those sockets - # have a very short lifetime so maybe the kernel - # can't initialize their status? - status = TCP_STATUSES[cext.PSUTIL_CONN_NONE] - if fam in (AF_INET, AF_INET6): - if laddr: - laddr = _common.addr(*laddr) - if raddr: - raddr = _common.addr(*raddr) - fam = sockfam_to_enum(fam) - type = socktype_to_enum(type) - nt = _common.sconn(fd, fam, type, laddr, raddr, status, pid) + nt = conn_to_ntuple(fd, fam, type, laddr, raddr, status, + TCP_STATUSES, pid) ret.add(nt) return list(ret) @@ -437,6 +425,47 @@ if FREEBSD: secsleft = minsleft * 60 return _common.sbattery(percent, secsleft, power_plugged) + def sensors_temperatures(): + "Return CPU cores temperatures if available, else an empty dict." + ret = defaultdict(list) + num_cpus = cpu_count_logical() + for cpu in range(num_cpus): + try: + current, high = cext.sensors_cpu_temperature(cpu) + if high <= 0: + high = None + name = "Core %s" % cpu + ret["coretemp"].append( + _common.shwtemp(name, current, high, high)) + except NotImplementedError: + pass + + return ret + + def cpu_freq(): + """Return frequency metrics for CPUs. As of Dec 2018 only + CPU 0 appears to be supported by FreeBSD and all other cores + match the frequency of CPU 0. + """ + ret = [] + num_cpus = cpu_count_logical() + for cpu in range(num_cpus): + try: + current, available_freq = cext.cpu_frequency(cpu) + except NotImplementedError: + continue + if available_freq: + try: + min_freq = int(available_freq.split(" ")[-1].split("/")[0]) + except(IndexError, ValueError): + min_freq = None + try: + max_freq = int(available_freq.split(" ")[0].split("/")[0]) + except(IndexError, ValueError): + max_freq = None + ret.append(_common.scpufreq(current, min_freq, max_freq)) + return ret + # ===================================================================== # --- other system functions @@ -505,6 +534,14 @@ else: pid_exists = _psposix.pid_exists +def is_zombie(pid): + try: + st = cext.proc_oneshot_info(pid)[kinfo_proc_map['status']] + return st == cext.SZOMB + except Exception: + return False + + def wrap_exceptions(fun): """Decorator which translates bare OSError exceptions into NoSuchProcess and AccessDenied. @@ -513,19 +550,19 @@ def wrap_exceptions(fun): def wrapper(self, *args, **kwargs): try: return fun(self, *args, **kwargs) - except OSError as err: + except ProcessLookupError: + if not pid_exists(self.pid): + raise NoSuchProcess(self.pid, self._name) + else: + raise ZombieProcess(self.pid, self._name, self._ppid) + except PermissionError: + raise AccessDenied(self.pid, self._name) + except OSError: if self.pid == 0: if 0 in pids(): raise AccessDenied(self.pid, self._name) else: raise - if err.errno == errno.ESRCH: - if not pid_exists(self.pid): - raise NoSuchProcess(self.pid, self._name) - else: - raise ZombieProcess(self.pid, self._name, self._ppid) - if err.errno in (errno.EPERM, errno.EACCES): - raise AccessDenied(self.pid, self._name) raise return wrapper @@ -535,30 +572,35 @@ def wrap_exceptions_procfs(inst): """Same as above, for routines relying on reading /proc fs.""" try: yield - except EnvironmentError as err: + except (ProcessLookupError, FileNotFoundError): # ENOENT (no such file or directory) gets raised on open(). # ESRCH (no such process) can get raised on read() if # process is gone in meantime. - if err.errno in (errno.ENOENT, errno.ESRCH): - if not pid_exists(inst.pid): - raise NoSuchProcess(inst.pid, inst._name) - else: - raise ZombieProcess(inst.pid, inst._name, inst._ppid) - if err.errno in (errno.EPERM, errno.EACCES): - raise AccessDenied(inst.pid, inst._name) - raise + if not pid_exists(inst.pid): + raise NoSuchProcess(inst.pid, inst._name) + else: + raise ZombieProcess(inst.pid, inst._name, inst._ppid) + except PermissionError: + raise AccessDenied(inst.pid, inst._name) class Process(object): """Wrapper class around underlying C implementation.""" - __slots__ = ["pid", "_name", "_ppid"] + __slots__ = ["pid", "_name", "_ppid", "_cache"] def __init__(self, pid): self.pid = pid self._name = None self._ppid = None + def _assert_alive(self): + """Raise NSP if the process disappeared on us.""" + # For those C function who do not raise NSP, possibly returning + # incorrect or incomplete result. + cext.proc_name(self.pid) + + @wrap_exceptions @memoize_when_activated def oneshot(self): """Retrieves multiple process info in one shot as a raw tuple.""" @@ -567,10 +609,10 @@ class Process(object): return ret def oneshot_enter(self): - self.oneshot.cache_activate() + self.oneshot.cache_activate(self) def oneshot_exit(self): - self.oneshot.cache_deactivate() + self.oneshot.cache_deactivate(self) @wrap_exceptions def name(self): @@ -580,6 +622,8 @@ class Process(object): @wrap_exceptions def exe(self): if FREEBSD: + if self.pid == 0: + return '' # else NSP return cext.proc_exe(self.pid) elif NETBSD: if self.pid == 0: @@ -595,7 +639,7 @@ class Process(object): # cmdline arg (may return None). cmdline = self.cmdline() if cmdline: - return which(cmdline[0]) + return which(cmdline[0]) or "" else: return "" @@ -612,10 +656,14 @@ class Process(object): return cext.proc_cmdline(self.pid) except OSError as err: if err.errno == errno.EINVAL: - if not pid_exists(self.pid): - raise NoSuchProcess(self.pid, self._name) - else: + if is_zombie(self.pid): raise ZombieProcess(self.pid, self._name, self._ppid) + elif not pid_exists(self.pid): + raise NoSuchProcess(self.pid, self._name, self._ppid) + else: + # XXX: this happens with unicode tests. It means the C + # routine is unable to decode invalid unicode chars. + return [] else: raise else: @@ -705,10 +753,7 @@ class Process(object): ntuple = _common.pthread(thread_id, utime, stime) retlist.append(ntuple) if OPENBSD: - # On OpenBSD the underlying C function does not raise NSP - # in case the process is gone (and the returned list may - # incomplete). - self.name() # raise NSP if the process disappeared on us + self._assert_alive() return retlist @wrap_exceptions @@ -719,29 +764,16 @@ class Process(object): if NETBSD: families, types = conn_tmap[kind] - ret = set() + ret = [] rawlist = cext.net_connections(self.pid) for item in rawlist: fd, fam, type, laddr, raddr, status, pid = item assert pid == self.pid if fam in families and type in types: - try: - status = TCP_STATUSES[status] - except KeyError: - status = TCP_STATUSES[cext.PSUTIL_CONN_NONE] - if fam in (AF_INET, AF_INET6): - if laddr: - laddr = _common.addr(*laddr) - if raddr: - raddr = _common.addr(*raddr) - fam = sockfam_to_enum(fam) - type = socktype_to_enum(type) - nt = _common.pconn(fd, fam, type, laddr, raddr, status) - ret.add(nt) - # On NetBSD the underlying C function does not raise NSP - # in case the process is gone (and the returned list may - # incomplete). - self.name() # raise NSP if the process disappeared on us + nt = conn_to_ntuple(fd, fam, type, laddr, raddr, status, + TCP_STATUSES) + ret.append(nt) + self._assert_alive() return list(ret) families, types = conn_tmap[kind] @@ -749,21 +781,13 @@ class Process(object): ret = [] for item in rawlist: fd, fam, type, laddr, raddr, status = item - if fam in (AF_INET, AF_INET6): - if laddr: - laddr = _common.addr(*laddr) - if raddr: - raddr = _common.addr(*raddr) - fam = sockfam_to_enum(fam) - type = socktype_to_enum(type) - status = TCP_STATUSES[status] - nt = _common.pconn(fd, fam, type, laddr, raddr, status) + nt = conn_to_ntuple(fd, fam, type, laddr, raddr, status, + TCP_STATUSES) ret.append(nt) + if OPENBSD: - # On OpenBSD the underlying C function does not raise NSP - # in case the process is gone (and the returned list may - # incomplete). - self.name() # raise NSP if the process disappeared on us + self._assert_alive() + return ret @wrap_exceptions @@ -800,10 +824,7 @@ class Process(object): # it into None if OPENBSD and self.pid == 0: return None # ...else it would raise EINVAL - elif NETBSD: - with wrap_exceptions_procfs(self): - return os.readlink("/proc/%s/cwd" % self.pid) - elif HAS_PROC_OPEN_FILES: + elif NETBSD or HAS_PROC_OPEN_FILES: # FreeBSD < 8 does not support functions based on # kinfo_getfile() and kinfo_getvmmap() return cext.proc_cwd(self.pid) or None @@ -839,9 +860,7 @@ class Process(object): """Return the number of file descriptors opened by this process.""" ret = cext.proc_num_fds(self.pid) if NETBSD: - # On NetBSD the underlying C function does not raise NSP - # in case the process is gone. - self.name() # raise NSP if the process disappeared on us + self._assert_alive() return ret else: num_fds = _not_implemented diff --git a/server/www/packages/packages-linux/x64/psutil/_pslinux.py b/server/www/packages/packages-linux/x64/psutil/_pslinux.py index ecc4c70..9e32f25 100644 --- a/server/www/packages/packages-linux/x64/psutil/_pslinux.py +++ b/server/www/packages/packages-linux/x64/psutil/_pslinux.py @@ -25,27 +25,30 @@ from . import _common from . import _psposix from . import _psutil_linux as cext from . import _psutil_posix as cext_posix -from ._common import ENCODING -from ._common import ENCODING_ERRS +from ._common import AccessDenied +from ._common import debug +from ._common import decode +from ._common import get_procfs_path from ._common import isfile_strict from ._common import memoize from ._common import memoize_when_activated from ._common import NIC_DUPLEX_FULL from ._common import NIC_DUPLEX_HALF from ._common import NIC_DUPLEX_UNKNOWN +from ._common import NoSuchProcess from ._common import open_binary from ._common import open_text from ._common import parse_environ_block from ._common import path_exists_strict from ._common import supports_ipv6 from ._common import usage_percent +from ._common import ZombieProcess from ._compat import b from ._compat import basestring -from ._compat import long +from ._compat import FileNotFoundError +from ._compat import PermissionError +from ._compat import ProcessLookupError from ._compat import PY3 -from ._exceptions import AccessDenied -from ._exceptions import NoSuchProcess -from ._exceptions import ZombieProcess if sys.version_info >= (3, 4): import enum @@ -74,6 +77,7 @@ POWER_SUPPLY_PATH = "/sys/class/power_supply" HAS_SMAPS = os.path.exists('/proc/%s/smaps' % os.getpid()) HAS_PRLIMIT = hasattr(cext, "linux_prlimit") HAS_PROC_IO_PRIORITY = hasattr(cext, "proc_ioprio_get") +HAS_CPU_AFFINITY = hasattr(cext, "proc_cpu_affinity_get") _DEFAULT = object() # RLIMIT_* constants, not guaranteed to be present on all kernels @@ -197,6 +201,10 @@ pmmap_ext = namedtuple( pio = namedtuple('pio', ['read_count', 'write_count', 'read_bytes', 'write_bytes', 'read_chars', 'write_chars']) +# psutil.Process.cpu_times() +pcputimes = namedtuple('pcputimes', + ['user', 'system', 'children_user', 'children_system', + 'iowait']) # ===================================================================== @@ -204,19 +212,6 @@ pio = namedtuple('pio', ['read_count', 'write_count', # ===================================================================== -if PY3: - def decode(s): - return s.decode(encoding=ENCODING, errors=ENCODING_ERRS) -else: - def decode(s): - return s - - -def get_procfs_path(): - """Return updated psutil.PROCFS_PATH constant.""" - return sys.modules['psutil'].PROCFS_PATH - - def readlink(path): """Wrapper around os.readlink().""" assert isinstance(path, basestring), path @@ -623,6 +618,17 @@ def cpu_count_logical(): def cpu_count_physical(): """Return the number of physical cores in the system.""" + # Method #1 + core_ids = set() + for path in glob.glob( + "/sys/devices/system/cpu/cpu[0-9]*/topology/core_id"): + with open_binary(path) as f: + core_ids.add(int(f.read())) + result = len(core_ids) + if result != 0: + return result + + # Method #2 mapping = {} current_info = {} with open_binary('%s/cpuinfo' % get_procfs_path()) as f: @@ -642,8 +648,8 @@ def cpu_count_physical(): key, value = line.split(b'\t:', 1) current_info[key] = int(value) - # mimic os.cpu_count() - return sum(mapping.values()) or None + result = sum(mapping.values()) + return result or None # mimic os.cpu_count() def cpu_stats(): @@ -667,30 +673,26 @@ def cpu_stats(): ctx_switches, interrupts, soft_interrupts, syscalls) -if os.path.exists("/sys/devices/system/cpu/cpufreq") or \ +if os.path.exists("/sys/devices/system/cpu/cpufreq/policy0") or \ os.path.exists("/sys/devices/system/cpu/cpu0/cpufreq"): def cpu_freq(): """Return frequency metrics for all CPUs. Contrarily to other OSes, Linux updates these values in real-time. """ - # scaling_* files seem preferable to cpuinfo_*, see: - # http://unix.stackexchange.com/a/87537/168884 - ret = [] - ls = glob.glob("/sys/devices/system/cpu/cpufreq/policy*") - if ls: - # Sort the list so that '10' comes after '2'. This should - # ensure the CPU order is consistent with other CPU functions - # having a 'percpu' argument and returning results for multiple - # CPUs (cpu_times(), cpu_percent(), cpu_times_percent()). - ls.sort(key=lambda x: int(os.path.basename(x)[6:])) - else: - # https://github.com/giampaolo/psutil/issues/981 - ls = glob.glob("/sys/devices/system/cpu/cpu[0-9]*/cpufreq") - ls.sort(key=lambda x: int(re.search('[0-9]+', x).group(0))) + def get_path(num): + for p in ("/sys/devices/system/cpu/cpufreq/policy%s" % num, + "/sys/devices/system/cpu/cpu%s/cpufreq" % num): + if os.path.exists(p): + return p - pjoin = os.path.join - for path in ls: + ret = [] + for n in range(cpu_count_logical()): + path = get_path(n) + if not path: + continue + + pjoin = os.path.join curr = cat(pjoin(path, "scaling_cur_freq"), fallback=None) if curr is None: # Likely an old RedHat, see: @@ -715,9 +717,15 @@ elif os.path.exists("/proc/cpuinfo"): for line in f: if line.lower().startswith(b'cpu mhz'): key, value = line.split(b'\t:', 1) - ret.append(_common.scpufreq(float(value), None, None)) + ret.append(_common.scpufreq(float(value), 0., 0.)) return ret +else: + def cpu_freq(): + """Dummy implementation when none of the above files are present. + """ + return [] + # ===================================================================== # --- network @@ -744,6 +752,8 @@ class Connections: """ def __init__(self): + # The string represents the basename of the corresponding + # /proc/net/{proto_name} file. tcp4 = ("tcp", socket.AF_INET, socket.SOCK_STREAM) tcp6 = ("tcp6", socket.AF_INET6, socket.SOCK_STREAM) udp4 = ("udp", socket.AF_INET, socket.SOCK_DGRAM) @@ -769,17 +779,16 @@ class Connections: for fd in os.listdir("%s/%s/fd" % (self._procfs_path, pid)): try: inode = readlink("%s/%s/fd/%s" % (self._procfs_path, pid, fd)) - except OSError as err: + except (FileNotFoundError, ProcessLookupError): # ENOENT == file which is gone in the meantime; # os.stat('/proc/%s' % self.pid) will be done later # to force NSP (if it's the case) - if err.errno in (errno.ENOENT, errno.ESRCH): - continue - elif err.errno == errno.EINVAL: + continue + except OSError as err: + if err.errno == errno.EINVAL: # not a link continue - else: - raise + raise else: if inode.startswith('socket:['): # the process is using a socket @@ -792,7 +801,7 @@ class Connections: for pid in pids(): try: inodes.update(self.get_proc_inodes(pid)) - except OSError as err: + except (FileNotFoundError, ProcessLookupError, PermissionError): # os.listdir() is gonna raise a lot of access denied # exceptions in case of unprivileged user; that's fine # as we'll just end up returning a connection with PID @@ -800,9 +809,7 @@ class Connections: # Both netstat -an and lsof does the same so it's # unlikely we can do any better. # ENOENT just means a PID disappeared on us. - if err.errno not in ( - errno.ENOENT, errno.ESRCH, errno.EPERM, errno.EACCES): - raise + continue return inodes @staticmethod @@ -930,7 +937,7 @@ class Connections: path = tokens[-1] else: path = "" - type_ = int(type_) + type_ = _common.socktype_to_enum(int(type_)) # XXX: determining the remote endpoint of a # UNIX socket on Linux is not possible, see: # https://serverfault.com/questions/252723/ @@ -951,15 +958,14 @@ class Connections: else: inodes = self.get_all_inodes() ret = set() - for f, family, type_ in self.tmap[kind]: + for proto_name, family, type_ in self.tmap[kind]: + path = "%s/net/%s" % (self._procfs_path, proto_name) if family in (socket.AF_INET, socket.AF_INET6): ls = self.process_inet( - "%s/net/%s" % (self._procfs_path, f), - family, type_, inodes, filter_pid=pid) + path, family, type_, inodes, filter_pid=pid) else: ls = self.process_unix( - "%s/net/%s" % (self._procfs_path, f), - family, inodes, filter_pid=pid) + path, family, inodes, filter_pid=pid) for fd, family, type_, laddr, raddr, status, bound_pid in ls: if pid: conn = _common.pconn(fd, family, type_, laddr, raddr, @@ -1060,6 +1066,9 @@ def disk_io_counters(perdisk=False): # ...unless (Linux 2.6) the line refers to a partition instead # of a disk, in which case the line has less fields (7): # "3 1 hda1 8 8 8 8" + # 4.18+ has 4 fields added: + # "3 0 hda 8 8 8 8 8 8 8 8 8 8 8 0 0 0 0" + # 5.5 has 2 more fields. # See: # https://www.kernel.org/doc/Documentation/iostats.txt # https://www.kernel.org/doc/Documentation/ABI/testing/procfs-diskstats @@ -1074,7 +1083,7 @@ def disk_io_counters(perdisk=False): reads = int(fields[2]) (reads_merged, rbytes, rtime, writes, writes_merged, wbytes, wtime, _, busy_time, _) = map(int, fields[4:14]) - elif flen == 14: + elif flen == 14 or flen >= 18: # Linux 2.6+, line referring to a disk name = fields[2] (reads, reads_merged, rbytes, rtime, writes, writes_merged, @@ -1098,7 +1107,7 @@ def disk_io_counters(perdisk=False): fields = f.read().strip().split() name = os.path.basename(root) (reads, reads_merged, rbytes, rtime, writes, writes_merged, - wbytes, wtime, _, busy_time, _) = map(int, fields) + wbytes, wtime, _, busy_time) = map(int, fields[:10]) yield (name, reads, writes, rbytes, wbytes, rtime, wtime, reads_merged, writes_merged, busy_time) @@ -1153,13 +1162,13 @@ def disk_partitions(all=False): fstypes.add("zfs") # See: https://github.com/giampaolo/psutil/issues/1307 - if procfs_path == "/proc": - mtab_path = os.path.realpath("/etc/mtab") + if procfs_path == "/proc" and os.path.isfile('/etc/mtab'): + mounts_path = os.path.realpath("/etc/mtab") else: - mtab_path = os.path.realpath("%s/self/mounts" % procfs_path) + mounts_path = os.path.realpath("%s/self/mounts" % procfs_path) retlist = [] - partitions = cext.disk_partitions(mtab_path) + partitions = cext.disk_partitions(mounts_path) for partition in partitions: device, mountpoint, fstype, opts = partition if device == 'none': @@ -1169,6 +1178,7 @@ def disk_partitions(all=False): continue ntuple = _common.sdiskpart(device, mountpoint, fstype, opts) retlist.append(ntuple) + return retlist @@ -1196,6 +1206,8 @@ def sensors_temperatures(): # https://github.com/giampaolo/psutil/issues/971 # https://github.com/nicolargo/glances/issues/1060 basenames.extend(glob.glob('/sys/class/hwmon/hwmon*/device/temp*_*')) + basenames.extend(glob.glob( + '/sys/devices/platform/coretemp.*/hwmon/hwmon*/temp*_*')) basenames = sorted(set([x.split('_')[0] for x in basenames])) for base in basenames: @@ -1204,7 +1216,7 @@ def sensors_temperatures(): current = float(cat(path)) / 1000.0 path = os.path.join(os.path.dirname(base), 'name') unit_name = cat(path, binary=False) - except (IOError, OSError, ValueError) as err: + except (IOError, OSError, ValueError): # A lot of things can go wrong here, so let's just skip the # whole entry. Sure thing is Linux's /sys/class/hwmon really # is a stinky broken mess. @@ -1213,8 +1225,6 @@ def sensors_temperatures(): # https://github.com/giampaolo/psutil/issues/1129 # https://github.com/giampaolo/psutil/issues/1245 # https://github.com/giampaolo/psutil/issues/1323 - warnings.warn("ignoring %r for file %r" % (err, path), - RuntimeWarning) continue high = cat(base + '_max', fallback=None) @@ -1246,8 +1256,7 @@ def sensors_temperatures(): path = os.path.join(base, 'type') unit_name = cat(path, binary=False) except (IOError, OSError, ValueError) as err: - warnings.warn("ignoring %r for file %r" % (err, path), - RuntimeWarning) + debug("ignoring %r for file %r" % (err, path)) continue trip_paths = glob.glob(base + '/trip_point*') @@ -1485,11 +1494,10 @@ def ppid_map(): try: with open_binary("%s/%s/stat" % (procfs_path, pid)) as f: data = f.read() - except EnvironmentError as err: + except (FileNotFoundError, ProcessLookupError): # Note: we should be able to access /stat for all processes # aka it's unlikely we'll bump into EPERM, which is good. - if err.errno not in (errno.ENOENT, errno.ESRCH): - raise + pass else: rpar = data.rfind(b')') dset = data[rpar + 2:].split() @@ -1506,16 +1514,12 @@ def wrap_exceptions(fun): def wrapper(self, *args, **kwargs): try: return fun(self, *args, **kwargs) - except EnvironmentError as err: - if err.errno in (errno.EPERM, errno.EACCES): - raise AccessDenied(self.pid, self._name) - # ESRCH (no such process) can be raised on read() if - # process is gone in the meantime. - if err.errno == errno.ESRCH: - raise NoSuchProcess(self.pid, self._name) - # ENOENT (no such file or directory) can be raised on open(). - if err.errno == errno.ENOENT and not os.path.exists("%s/%s" % ( - self._procfs_path, self.pid)): + except PermissionError: + raise AccessDenied(self.pid, self._name) + except ProcessLookupError: + raise NoSuchProcess(self.pid, self._name) + except FileNotFoundError: + if not os.path.exists("%s/%s" % (self._procfs_path, self.pid)): raise NoSuchProcess(self.pid, self._name) # Note: zombies will keep existing under /proc until they're # gone so there's no way to distinguish them in here. @@ -1526,7 +1530,7 @@ def wrap_exceptions(fun): class Process(object): """Linux process implementation.""" - __slots__ = ["pid", "_name", "_ppid", "_procfs_path"] + __slots__ = ["pid", "_name", "_ppid", "_procfs_path", "_cache"] def __init__(self, pid): self.pid = pid @@ -1534,13 +1538,20 @@ class Process(object): self._ppid = None self._procfs_path = get_procfs_path() + def _assert_alive(self): + """Raise NSP if the process disappeared on us.""" + # For those C function who do not raise NSP, possibly returning + # incorrect or incomplete result. + os.stat('%s/%s' % (self._procfs_path, self.pid)) + + @wrap_exceptions @memoize_when_activated def _parse_stat_file(self): - """Parse /proc/{pid}/stat file. Return a list of fields where - process name is in position 0. + """Parse /proc/{pid}/stat file and return a dict with various + process info. Using "man proc" as a reference: where "man proc" refers to - position N, always substract 2 (e.g starttime pos 22 in - 'man proc' == pos 20 in the list returned here). + position N always substract 3 (e.g ppid position 4 in + 'man proc' == position 1 in here). The return value is cached in case oneshot() ctx manager is in use. """ @@ -1551,9 +1562,24 @@ class Process(object): # the first occurrence of "(" and the last occurence of ")". rpar = data.rfind(b')') name = data[data.find(b'(') + 1:rpar] - others = data[rpar + 2:].split() - return [name] + others + fields = data[rpar + 2:].split() + ret = {} + ret['name'] = name + ret['status'] = fields[0] + ret['ppid'] = fields[1] + ret['ttynr'] = fields[4] + ret['utime'] = fields[11] + ret['stime'] = fields[12] + ret['children_utime'] = fields[13] + ret['children_stime'] = fields[14] + ret['create_time'] = fields[19] + ret['cpu_num'] = fields[36] + ret['blkio_ticks'] = fields[39] # aka 'delayacct_blkio_ticks' + + return ret + + @wrap_exceptions @memoize_when_activated def _read_status_file(self): """Read /proc/{pid}/stat file and return its content. @@ -1563,6 +1589,7 @@ class Process(object): with open_binary("%s/%s/status" % (self._procfs_path, self.pid)) as f: return f.read() + @wrap_exceptions @memoize_when_activated def _read_smaps_file(self): with open_binary("%s/%s/smaps" % (self._procfs_path, self.pid), @@ -1570,18 +1597,18 @@ class Process(object): return f.read().strip() def oneshot_enter(self): - self._parse_stat_file.cache_activate() - self._read_status_file.cache_activate() - self._read_smaps_file.cache_activate() + self._parse_stat_file.cache_activate(self) + self._read_status_file.cache_activate(self) + self._read_smaps_file.cache_activate(self) def oneshot_exit(self): - self._parse_stat_file.cache_deactivate() - self._read_status_file.cache_deactivate() - self._read_smaps_file.cache_deactivate() + self._parse_stat_file.cache_deactivate(self) + self._read_status_file.cache_deactivate(self) + self._read_smaps_file.cache_deactivate(self) @wrap_exceptions def name(self): - name = self._parse_stat_file()[0] + name = self._parse_stat_file()['name'] if PY3: name = decode(name) # XXX - gets changed later and probably needs refactoring @@ -1590,21 +1617,19 @@ class Process(object): def exe(self): try: return readlink("%s/%s/exe" % (self._procfs_path, self.pid)) - except OSError as err: - if err.errno in (errno.ENOENT, errno.ESRCH): - # no such file error; might be raised also if the - # path actually exists for system processes with - # low pids (about 0-20) - if os.path.lexists("%s/%s" % (self._procfs_path, self.pid)): - return "" + except (FileNotFoundError, ProcessLookupError): + # no such file error; might be raised also if the + # path actually exists for system processes with + # low pids (about 0-20) + if os.path.lexists("%s/%s" % (self._procfs_path, self.pid)): + return "" + else: + if not pid_exists(self.pid): + raise NoSuchProcess(self.pid, self._name) else: - if not pid_exists(self.pid): - raise NoSuchProcess(self.pid, self._name) - else: - raise ZombieProcess(self.pid, self._name, self._ppid) - if err.errno in (errno.EPERM, errno.EACCES): - raise AccessDenied(self.pid, self._name) - raise + raise ZombieProcess(self.pid, self._name, self._ppid) + except PermissionError: + raise AccessDenied(self.pid, self._name) @wrap_exceptions def cmdline(self): @@ -1623,7 +1648,13 @@ class Process(object): sep = '\x00' if data.endswith('\x00') else ' ' if data.endswith(sep): data = data[:-1] - return [x for x in data.split(sep)] + cmdline = data.split(sep) + # Sometimes last char is a null byte '\0' but the args are + # separated by spaces, see: https://github.com/giampaolo/psutil/ + # issues/1179#issuecomment-552984549 + if sep == '\x00' and len(cmdline) == 1 and ' ' in data: + cmdline = data.split(' ') + return cmdline @wrap_exceptions def environ(self): @@ -1633,13 +1664,14 @@ class Process(object): @wrap_exceptions def terminal(self): - tty_nr = int(self._parse_stat_file()[5]) + tty_nr = int(self._parse_stat_file()['ttynr']) tmap = _psposix.get_terminal_map() try: return tmap[tty_nr] except KeyError: return None + # May not be available on old kernels. if os.path.exists('/proc/%s/io' % os.getpid()): @wrap_exceptions def io_counters(self): @@ -1671,24 +1703,21 @@ class Process(object): except KeyError as err: raise ValueError("%r field was not found in %s; found fields " "are %r" % (err[0], fname, fields)) - else: - def io_counters(self): - raise NotImplementedError("couldn't find /proc/%s/io (kernel " - "too old?)" % self.pid) @wrap_exceptions def cpu_times(self): values = self._parse_stat_file() - utime = float(values[12]) / CLOCK_TICKS - stime = float(values[13]) / CLOCK_TICKS - children_utime = float(values[14]) / CLOCK_TICKS - children_stime = float(values[15]) / CLOCK_TICKS - return _common.pcputimes(utime, stime, children_utime, children_stime) + utime = float(values['utime']) / CLOCK_TICKS + stime = float(values['stime']) / CLOCK_TICKS + children_utime = float(values['children_utime']) / CLOCK_TICKS + children_stime = float(values['children_stime']) / CLOCK_TICKS + iowait = float(values['blkio_ticks']) / CLOCK_TICKS + return pcputimes(utime, stime, children_utime, children_stime, iowait) @wrap_exceptions def cpu_num(self): """What CPU the process is on.""" - return int(self._parse_stat_file()[37]) + return int(self._parse_stat_file()['cpu_num']) @wrap_exceptions def wait(self, timeout=None): @@ -1696,14 +1725,14 @@ class Process(object): @wrap_exceptions def create_time(self): - values = self._parse_stat_file() + ctime = float(self._parse_stat_file()['create_time']) # According to documentation, starttime is in field 21 and the # unit is jiffies (clock ticks). # We first divide it for clock ticks and then add uptime returning # seconds since the epoch, in UTC. # Also use cached value if available. bt = BOOT_TIME or boot_time() - return (float(values[20]) / CLOCK_TICKS) + bt + return (ctime / CLOCK_TICKS) + bt @wrap_exceptions def memory_info(self): @@ -1765,6 +1794,9 @@ class Process(object): """Return process's mapped memory regions as a list of named tuples. Fields are explained in 'man proc'; here is an updated (Apr 2012) version: http://goo.gl/fmebo + + /proc/{PID}/smaps does not exist on kernels < 2.6.14 or if + CONFIG_MMU kernel configuration option is not enabled. """ def get_blocks(lines, current_block): data = {} @@ -1812,7 +1844,7 @@ class Process(object): path = path[:-10] ls.append(( decode(addr), decode(perms), path, - data[b'Rss:'], + data.get(b'Rss:', 0), data.get(b'Size:', 0), data.get(b'Pss:', 0), data.get(b'Shared_Clean:', 0), @@ -1825,25 +1857,16 @@ class Process(object): )) return ls - else: # pragma: no cover - def memory_maps(self): - raise NotImplementedError( - "/proc/%s/smaps does not exist on kernels < 2.6.14 or " - "if CONFIG_MMU kernel configuration option is not " - "enabled." % self.pid) - @wrap_exceptions def cwd(self): try: return readlink("%s/%s/cwd" % (self._procfs_path, self.pid)) - except OSError as err: + except (FileNotFoundError, ProcessLookupError): # https://github.com/giampaolo/psutil/issues/986 - if err.errno in (errno.ENOENT, errno.ESRCH): - if not pid_exists(self.pid): - raise NoSuchProcess(self.pid, self._name) - else: - raise ZombieProcess(self.pid, self._name, self._ppid) - raise + if not pid_exists(self.pid): + raise NoSuchProcess(self.pid, self._name) + else: + raise ZombieProcess(self.pid, self._name, self._ppid) @wrap_exceptions def num_ctx_switches(self, @@ -1879,13 +1902,11 @@ class Process(object): try: with open_binary(fname) as f: st = f.read().strip() - except IOError as err: - if err.errno == errno.ENOENT: - # no such file or directory; it means thread - # disappeared on us - hit_enoent = True - continue - raise + except FileNotFoundError: + # no such file or directory; it means thread + # disappeared on us + hit_enoent = True + continue # ignore the first two values ("pid (exe)") st = st[st.find(b')') + 2:] values = st.split(b' ') @@ -1894,8 +1915,7 @@ class Process(object): ntuple = _common.pthread(int(thread_id), utime, stime) retlist.append(ntuple) if hit_enoent: - # raise NSP if the process disappeared on us - os.stat('%s/%s' % (self._procfs_path, self.pid)) + self._assert_alive() return retlist @wrap_exceptions @@ -1911,38 +1931,41 @@ class Process(object): def nice_set(self, value): return cext_posix.setpriority(self.pid, value) - @wrap_exceptions - def cpu_affinity_get(self): - return cext.proc_cpu_affinity_get(self.pid) + # starting from CentOS 6. + if HAS_CPU_AFFINITY: - def _get_eligible_cpus( - self, _re=re.compile(br"Cpus_allowed_list:\t(\d+)-(\d+)")): - # See: https://github.com/giampaolo/psutil/issues/956 - data = self._read_status_file() - match = _re.findall(data) - if match: - return list(range(int(match[0][0]), int(match[0][1]) + 1)) - else: - return list(range(len(per_cpu_times()))) + @wrap_exceptions + def cpu_affinity_get(self): + return cext.proc_cpu_affinity_get(self.pid) - @wrap_exceptions - def cpu_affinity_set(self, cpus): - try: - cext.proc_cpu_affinity_set(self.pid, cpus) - except (OSError, ValueError) as err: - if isinstance(err, ValueError) or err.errno == errno.EINVAL: - eligible_cpus = self._get_eligible_cpus() - all_cpus = tuple(range(len(per_cpu_times()))) - for cpu in cpus: - if cpu not in all_cpus: - raise ValueError( - "invalid CPU number %r; choose between %s" % ( - cpu, eligible_cpus)) - if cpu not in eligible_cpus: - raise ValueError( - "CPU number %r is not eligible; choose " - "between %s" % (cpu, eligible_cpus)) - raise + def _get_eligible_cpus( + self, _re=re.compile(br"Cpus_allowed_list:\t(\d+)-(\d+)")): + # See: https://github.com/giampaolo/psutil/issues/956 + data = self._read_status_file() + match = _re.findall(data) + if match: + return list(range(int(match[0][0]), int(match[0][1]) + 1)) + else: + return list(range(len(per_cpu_times()))) + + @wrap_exceptions + def cpu_affinity_set(self, cpus): + try: + cext.proc_cpu_affinity_set(self.pid, cpus) + except (OSError, ValueError) as err: + if isinstance(err, ValueError) or err.errno == errno.EINVAL: + eligible_cpus = self._get_eligible_cpus() + all_cpus = tuple(range(len(per_cpu_times()))) + for cpu in cpus: + if cpu not in all_cpus: + raise ValueError( + "invalid CPU number %r; choose between %s" % ( + cpu, eligible_cpus)) + if cpu not in eligible_cpus: + raise ValueError( + "CPU number %r is not eligible; choose " + "between %s" % (cpu, eligible_cpus)) + raise # only starting from kernel 2.6.13 if HAS_PROC_IO_PRIORITY: @@ -1956,35 +1979,12 @@ class Process(object): @wrap_exceptions def ionice_set(self, ioclass, value): - if value is not None: - if not PY3 and not isinstance(value, (int, long)): - msg = "value argument is not an integer (gor %r)" % value - raise TypeError(msg) - if not 0 <= value <= 7: - raise ValueError( - "value argument range expected is between 0 and 7") - - if ioclass in (IOPRIO_CLASS_NONE, None): - if value: - msg = "can't specify value with IOPRIO_CLASS_NONE " \ - "(got %r)" % value - raise ValueError(msg) - ioclass = IOPRIO_CLASS_NONE + if value is None: value = 0 - elif ioclass == IOPRIO_CLASS_IDLE: - if value: - msg = "can't specify value with IOPRIO_CLASS_IDLE " \ - "(got %r)" % value - raise ValueError(msg) - value = 0 - elif ioclass in (IOPRIO_CLASS_RT, IOPRIO_CLASS_BE): - if value is None: - # TODO: add comment explaining why this is 4 (?) - value = 4 - else: - # otherwise we would get OSError(EVINAL) - raise ValueError("invalid ioclass argument %r" % ioclass) - + if value and ioclass in (IOPRIO_CLASS_IDLE, IOPRIO_CLASS_NONE): + raise ValueError("%r ioclass accepts no value" % ioclass) + if value < 0 or value > 7: + raise ValueError("value not in 0-7 range") return cext.proc_ioprio_set(self.pid, ioclass, value) if HAS_PRLIMIT: @@ -2018,7 +2018,7 @@ class Process(object): @wrap_exceptions def status(self): - letter = self._parse_stat_file()[1] + letter = self._parse_stat_file()['status'] if PY3: letter = letter.decode() # XXX is '?' legit? (we're not supposed to return it anyway) @@ -2033,16 +2033,15 @@ class Process(object): file = "%s/%s/fd/%s" % (self._procfs_path, self.pid, fd) try: path = readlink(file) - except OSError as err: + except (FileNotFoundError, ProcessLookupError): # ENOENT == file which is gone in the meantime - if err.errno in (errno.ENOENT, errno.ESRCH): - hit_enoent = True - continue - elif err.errno == errno.EINVAL: + hit_enoent = True + continue + except OSError as err: + if err.errno == errno.EINVAL: # not a link continue - else: - raise + raise else: # If path is not an absolute there's no way to tell # whether it's a regular file or not, so we skip it. @@ -2056,29 +2055,23 @@ class Process(object): with open_binary(file) as f: pos = int(f.readline().split()[1]) flags = int(f.readline().split()[1], 8) - except IOError as err: - if err.errno == errno.ENOENT: - # fd gone in the meantime; does not - # necessarily mean the process disappeared - # on us. - hit_enoent = True - else: - raise + except FileNotFoundError: + # fd gone in the meantime; process may + # still be alive + hit_enoent = True else: mode = file_flags_to_mode(flags) ntuple = popenfile( path, int(fd), int(pos), mode, flags) retlist.append(ntuple) if hit_enoent: - # raise NSP if the process disappeared on us - os.stat('%s/%s' % (self._procfs_path, self.pid)) + self._assert_alive() return retlist @wrap_exceptions def connections(self, kind='inet'): ret = _connections.retrieve(kind, self.pid) - # raise NSP if the process disappeared on us - os.stat('%s/%s' % (self._procfs_path, self.pid)) + self._assert_alive() return ret @wrap_exceptions @@ -2087,7 +2080,7 @@ class Process(object): @wrap_exceptions def ppid(self): - return int(self._parse_stat_file()[2]) + return int(self._parse_stat_file()['ppid']) @wrap_exceptions def uids(self, _uids_re=re.compile(br'Uid:\t(\d+)\t(\d+)\t(\d+)')): diff --git a/server/www/packages/packages-linux/x64/psutil/_psosx.py b/server/www/packages/packages-linux/x64/psutil/_psosx.py index 94e22bc..e429649 100644 --- a/server/www/packages/packages-linux/x64/psutil/_psosx.py +++ b/server/www/packages/packages-linux/x64/psutil/_psosx.py @@ -8,24 +8,23 @@ import contextlib import errno import functools import os -from socket import AF_INET from collections import namedtuple from . import _common from . import _psposix from . import _psutil_osx as cext from . import _psutil_posix as cext_posix -from ._common import AF_INET6 +from ._common import AccessDenied from ._common import conn_tmap +from ._common import conn_to_ntuple from ._common import isfile_strict from ._common import memoize_when_activated +from ._common import NoSuchProcess from ._common import parse_environ_block -from ._common import sockfam_to_enum -from ._common import socktype_to_enum from ._common import usage_percent -from ._exceptions import AccessDenied -from ._exceptions import NoSuchProcess -from ._exceptions import ZombieProcess +from ._common import ZombieProcess +from ._compat import PermissionError +from ._compat import ProcessLookupError __extra__all__ = [] @@ -103,13 +102,6 @@ svmem = namedtuple( pmem = namedtuple('pmem', ['rss', 'vms', 'pfaults', 'pageins']) # psutil.Process.memory_full_info() pfullmem = namedtuple('pfullmem', pmem._fields + ('uss', )) -# psutil.Process.memory_maps(grouped=True) -pmmap_grouped = namedtuple( - 'pmmap_grouped', - 'path rss private swapped dirtied ref_count shadow_depth') -# psutil.Process.memory_maps(grouped=False) -pmmap_ext = namedtuple( - 'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields)) # ===================================================================== @@ -340,12 +332,10 @@ def wrap_exceptions(fun): def wrapper(self, *args, **kwargs): try: return fun(self, *args, **kwargs) - except OSError as err: - if err.errno == errno.ESRCH: - raise NoSuchProcess(self.pid, self._name) - if err.errno in (errno.EPERM, errno.EACCES): - raise AccessDenied(self.pid, self._name) - raise + except ProcessLookupError: + raise NoSuchProcess(self.pid, self._name) + except PermissionError: + raise AccessDenied(self.pid, self._name) except cext.ZombieProcessError: raise ZombieProcess(self.pid, self._name, self._ppid) return wrapper @@ -380,13 +370,14 @@ def catch_zombie(proc): class Process(object): """Wrapper class around underlying C implementation.""" - __slots__ = ["pid", "_name", "_ppid"] + __slots__ = ["pid", "_name", "_ppid", "_cache"] def __init__(self, pid): self.pid = pid self._name = None self._ppid = None + @wrap_exceptions @memoize_when_activated def _get_kinfo_proc(self): # Note: should work with all PIDs without permission issues. @@ -394,6 +385,7 @@ class Process(object): assert len(ret) == len(kinfo_proc_map) return ret + @wrap_exceptions @memoize_when_activated def _get_pidtaskinfo(self): # Note: should work for PIDs owned by user only. @@ -403,12 +395,12 @@ class Process(object): return ret def oneshot_enter(self): - self._get_kinfo_proc.cache_activate() - self._get_pidtaskinfo.cache_activate() + self._get_kinfo_proc.cache_activate(self) + self._get_pidtaskinfo.cache_activate(self) def oneshot_exit(self): - self._get_kinfo_proc.cache_deactivate() - self._get_pidtaskinfo.cache_deactivate() + self._get_kinfo_proc.cache_deactivate(self) + self._get_pidtaskinfo.cache_deactivate(self) @wrap_exceptions def name(self): @@ -530,15 +522,8 @@ class Process(object): ret = [] for item in rawlist: fd, fam, type, laddr, raddr, status = item - status = TCP_STATUSES[status] - fam = sockfam_to_enum(fam) - type = socktype_to_enum(type) - if fam in (AF_INET, AF_INET6): - if laddr: - laddr = _common.addr(*laddr) - if raddr: - raddr = _common.addr(*raddr) - nt = _common.pconn(fd, fam, type, laddr, raddr, status) + nt = conn_to_ntuple(fd, fam, type, laddr, raddr, status, + TCP_STATUSES) ret.append(nt) return ret @@ -577,7 +562,3 @@ class Process(object): ntuple = _common.pthread(thread_id, utime, stime) retlist.append(ntuple) return retlist - - @wrap_exceptions - def memory_maps(self): - return cext.proc_memory_maps(self.pid) diff --git a/server/www/packages/packages-linux/x64/psutil/_psposix.py b/server/www/packages/packages-linux/x64/psutil/_psposix.py index 9c3fac2..88213ef 100644 --- a/server/www/packages/packages-linux/x64/psutil/_psposix.py +++ b/server/www/packages/packages-linux/x64/psutil/_psposix.py @@ -4,7 +4,6 @@ """Routines common to all posix systems.""" -import errno import glob import os import sys @@ -12,10 +11,15 @@ import time from ._common import memoize from ._common import sdiskusage +from ._common import TimeoutExpired from ._common import usage_percent +from ._compat import ChildProcessError +from ._compat import FileNotFoundError +from ._compat import InterruptedError +from ._compat import PermissionError +from ._compat import ProcessLookupError from ._compat import PY3 from ._compat import unicode -from ._exceptions import TimeoutExpired __all__ = ['pid_exists', 'wait_pid', 'disk_usage', 'get_terminal_map'] @@ -32,19 +36,13 @@ def pid_exists(pid): return True try: os.kill(pid, 0) - except OSError as err: - if err.errno == errno.ESRCH: - # ESRCH == No such process - return False - elif err.errno == errno.EPERM: - # EPERM clearly means there's a process to deny access to - return True - else: - # According to "man 2 kill" possible error values are - # (EINVAL, EPERM, ESRCH) therefore we should never get - # here. If we do let's be explicit in considering this - # an error. - raise err + except ProcessLookupError: + return False + except PermissionError: + # EPERM clearly means there's a process to deny access to + return True + # According to "man 2 kill" possible error values are + # (EINVAL, EPERM, ESRCH) else: return True @@ -80,24 +78,20 @@ def wait_pid(pid, timeout=None, proc_name=None): while True: try: retpid, status = waitcall() - except OSError as err: - if err.errno == errno.EINTR: - delay = check_timeout(delay) - continue - elif err.errno == errno.ECHILD: - # This has two meanings: - # - pid is not a child of os.getpid() in which case - # we keep polling until it's gone - # - pid never existed in the first place - # In both cases we'll eventually return None as we - # can't determine its exit status code. - while True: - if pid_exists(pid): - delay = check_timeout(delay) - else: - return - else: - raise + except InterruptedError: + delay = check_timeout(delay) + except ChildProcessError: + # This has two meanings: + # - pid is not a child of os.getpid() in which case + # we keep polling until it's gone + # - pid never existed in the first place + # In both cases we'll eventually return None as we + # can't determine its exit status code. + while True: + if pid_exists(pid): + delay = check_timeout(delay) + else: + return else: if retpid == 0: # WNOHANG was used, pid is still running @@ -176,7 +170,6 @@ def get_terminal_map(): assert name not in ret, name try: ret[os.stat(name).st_rdev] = name - except OSError as err: - if err.errno != errno.ENOENT: - raise + except FileNotFoundError: + pass return ret diff --git a/server/www/packages/packages-linux/x64/psutil/_pssunos.py b/server/www/packages/packages-linux/x64/psutil/_pssunos.py index e2f33a3..62362b8 100644 --- a/server/www/packages/packages-linux/x64/psutil/_pssunos.py +++ b/server/www/packages/packages-linux/x64/psutil/_pssunos.py @@ -5,6 +5,7 @@ """Sun OS Solaris platform implementation.""" import errno +import functools import os import socket import subprocess @@ -16,17 +17,22 @@ from . import _common from . import _psposix from . import _psutil_posix as cext_posix from . import _psutil_sunos as cext +from ._common import AccessDenied from ._common import AF_INET6 +from ._common import debug +from ._common import get_procfs_path from ._common import isfile_strict from ._common import memoize_when_activated +from ._common import NoSuchProcess from ._common import sockfam_to_enum from ._common import socktype_to_enum from ._common import usage_percent +from ._common import ZombieProcess from ._compat import b +from ._compat import FileNotFoundError +from ._compat import PermissionError +from ._compat import ProcessLookupError from ._compat import PY3 -from ._exceptions import AccessDenied -from ._exceptions import NoSuchProcess -from ._exceptions import ZombieProcess __extra__all__ = ["CONN_IDLE", "CONN_BOUND", "PROCFS_PATH"] @@ -109,16 +115,6 @@ pmmap_ext = namedtuple( 'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields)) -# ===================================================================== -# --- utils -# ===================================================================== - - -def get_procfs_path(): - """Return updated psutil.PROCFS_PATH constant.""" - return sys.modules['psutil'].PROCFS_PATH - - # ===================================================================== # --- memory # ===================================================================== @@ -230,7 +226,12 @@ def disk_partitions(all=False): # Differently from, say, Linux, we don't have a list of # common fs types so the best we can do, AFAIK, is to # filter by filesystem having a total size > 0. - if not disk_usage(mountpoint).total: + try: + if not disk_usage(mountpoint).total: + continue + except OSError as err: + # https://github.com/giampaolo/psutil/issues/1674 + debug("skipping %r: %r" % (mountpoint, err)) continue ntuple = _common.sdiskpart(device, mountpoint, fstype, opts) retlist.append(ntuple) @@ -266,6 +267,7 @@ def net_connections(kind, _pid=-1): continue if type_ not in types: continue + # TODO: refactor and use _common.conn_to_ntuple. if fam in (AF_INET, AF_INET6): if laddr: laddr = _common.addr(*laddr) @@ -341,26 +343,26 @@ def wrap_exceptions(fun): """Call callable into a try/except clause and translate ENOENT, EACCES and EPERM in NoSuchProcess or AccessDenied exceptions. """ - + @functools.wraps(fun) def wrapper(self, *args, **kwargs): try: return fun(self, *args, **kwargs) - except EnvironmentError as err: + except (FileNotFoundError, ProcessLookupError): + # ENOENT (no such file or directory) gets raised on open(). + # ESRCH (no such process) can get raised on read() if + # process is gone in meantime. + if not pid_exists(self.pid): + raise NoSuchProcess(self.pid, self._name) + else: + raise ZombieProcess(self.pid, self._name, self._ppid) + except PermissionError: + raise AccessDenied(self.pid, self._name) + except OSError: if self.pid == 0: if 0 in pids(): raise AccessDenied(self.pid, self._name) else: raise - # ENOENT (no such file or directory) gets raised on open(). - # ESRCH (no such process) can get raised on read() if - # process is gone in meantime. - if err.errno in (errno.ENOENT, errno.ESRCH): - if not pid_exists(self.pid): - raise NoSuchProcess(self.pid, self._name) - else: - raise ZombieProcess(self.pid, self._name, self._ppid) - if err.errno in (errno.EPERM, errno.EACCES): - raise AccessDenied(self.pid, self._name) raise return wrapper @@ -368,7 +370,7 @@ def wrap_exceptions(fun): class Process(object): """Wrapper class around underlying C implementation.""" - __slots__ = ["pid", "_name", "_ppid", "_procfs_path"] + __slots__ = ["pid", "_name", "_ppid", "_procfs_path", "_cache"] def __init__(self, pid): self.pid = pid @@ -376,32 +378,41 @@ class Process(object): self._ppid = None self._procfs_path = get_procfs_path() + def _assert_alive(self): + """Raise NSP if the process disappeared on us.""" + # For those C function who do not raise NSP, possibly returning + # incorrect or incomplete result. + os.stat('%s/%s' % (self._procfs_path, self.pid)) + def oneshot_enter(self): - self._proc_name_and_args.cache_activate() - self._proc_basic_info.cache_activate() - self._proc_cred.cache_activate() + self._proc_name_and_args.cache_activate(self) + self._proc_basic_info.cache_activate(self) + self._proc_cred.cache_activate(self) def oneshot_exit(self): - self._proc_name_and_args.cache_deactivate() - self._proc_basic_info.cache_deactivate() - self._proc_cred.cache_deactivate() + self._proc_name_and_args.cache_deactivate(self) + self._proc_basic_info.cache_deactivate(self) + self._proc_cred.cache_deactivate(self) + @wrap_exceptions @memoize_when_activated def _proc_name_and_args(self): return cext.proc_name_and_args(self.pid, self._procfs_path) + @wrap_exceptions @memoize_when_activated def _proc_basic_info(self): + if self.pid == 0 and not \ + os.path.exists('%s/%s/psinfo' % (self._procfs_path, self.pid)): + raise AccessDenied(self.pid) ret = cext.proc_basic_info(self.pid, self._procfs_path) assert len(ret) == len(proc_info_map) return ret + @wrap_exceptions @memoize_when_activated def _proc_cred(self): - @wrap_exceptions - def proc_cred(self): - return cext.proc_cred(self.pid, self._procfs_path) - return proc_cred(self) + return cext.proc_cred(self.pid, self._procfs_path) @wrap_exceptions def name(self): @@ -512,14 +523,11 @@ class Process(object): try: return os.readlink( '%s/%d/path/%d' % (procfs_path, self.pid, x)) - except OSError as err: - if err.errno == errno.ENOENT: - hit_enoent = True - continue - raise + except FileNotFoundError: + hit_enoent = True + continue if hit_enoent: - # raise NSP if the process disappeared on us - os.stat('%s/%s' % (procfs_path, self.pid)) + self._assert_alive() @wrap_exceptions def cwd(self): @@ -530,11 +538,9 @@ class Process(object): procfs_path = self._procfs_path try: return os.readlink("%s/%s/path/cwd" % (procfs_path, self.pid)) - except OSError as err: - if err.errno == errno.ENOENT: - os.stat("%s/%s" % (procfs_path, self.pid)) # raise NSP or AD - return None - raise + except FileNotFoundError: + os.stat("%s/%s" % (procfs_path, self.pid)) # raise NSP or AD + return None @wrap_exceptions def memory_info(self): @@ -581,8 +587,7 @@ class Process(object): nt = _common.pthread(tid, utime, stime) ret.append(nt) if hit_enoent: - # raise NSP if the process disappeared on us - os.stat('%s/%s' % (procfs_path, self.pid)) + self._assert_alive() return ret @wrap_exceptions @@ -596,18 +601,14 @@ class Process(object): if os.path.islink(path): try: file = os.readlink(path) - except OSError as err: - # ENOENT == file which is gone in the meantime - if err.errno == errno.ENOENT: - hit_enoent = True - continue - raise + except FileNotFoundError: + hit_enoent = True + continue else: if isfile_strict(file): retlist.append(_common.popenfile(file, int(fd))) if hit_enoent: - # raise NSP if the process disappeared on us - os.stat('%s/%s' % (procfs_path, self.pid)) + self._assert_alive() return retlist def _get_unix_sockets(self, pid): @@ -707,8 +708,7 @@ class Process(object): raise retlist.append((addr, perm, name, rss, anon, locked)) if hit_enoent: - # raise NSP if the process disappeared on us - os.stat('%s/%s' % (procfs_path, self.pid)) + self._assert_alive() return retlist @wrap_exceptions diff --git a/server/www/packages/packages-linux/x64/psutil/_psutil_linux.cpython-37m-x86_64-linux-gnu.so b/server/www/packages/packages-linux/x64/psutil/_psutil_linux.cpython-37m-x86_64-linux-gnu.so index 26278e2..61bc4a0 100755 Binary files a/server/www/packages/packages-linux/x64/psutil/_psutil_linux.cpython-37m-x86_64-linux-gnu.so and b/server/www/packages/packages-linux/x64/psutil/_psutil_linux.cpython-37m-x86_64-linux-gnu.so differ diff --git a/server/www/packages/packages-linux/x64/psutil/_psutil_posix.cpython-37m-x86_64-linux-gnu.so b/server/www/packages/packages-linux/x64/psutil/_psutil_posix.cpython-37m-x86_64-linux-gnu.so index 8916dd8..ed97a25 100755 Binary files a/server/www/packages/packages-linux/x64/psutil/_psutil_posix.cpython-37m-x86_64-linux-gnu.so and b/server/www/packages/packages-linux/x64/psutil/_psutil_posix.cpython-37m-x86_64-linux-gnu.so differ diff --git a/server/www/packages/packages-linux/x64/psutil/_pswindows.py b/server/www/packages/packages-linux/x64/psutil/_pswindows.py index b938d42..99d5d71 100644 --- a/server/www/packages/packages-linux/x64/psutil/_pswindows.py +++ b/server/www/packages/packages-linux/x64/psutil/_pswindows.py @@ -8,11 +8,37 @@ import contextlib import errno import functools import os +import signal import sys import time from collections import namedtuple from . import _common +from ._common import AccessDenied +from ._common import conn_tmap +from ._common import conn_to_ntuple +from ._common import debug +from ._common import ENCODING +from ._common import ENCODING_ERRS +from ._common import isfile_strict +from ._common import memoize +from ._common import memoize_when_activated +from ._common import NoSuchProcess +from ._common import parse_environ_block +from ._common import TimeoutExpired +from ._common import usage_percent +from ._compat import long +from ._compat import lru_cache +from ._compat import PY3 +from ._compat import unicode +from ._compat import xrange +from ._psutil_windows import ABOVE_NORMAL_PRIORITY_CLASS +from ._psutil_windows import BELOW_NORMAL_PRIORITY_CLASS +from ._psutil_windows import HIGH_PRIORITY_CLASS +from ._psutil_windows import IDLE_PRIORITY_CLASS +from ._psutil_windows import NORMAL_PRIORITY_CLASS +from ._psutil_windows import REALTIME_PRIORITY_CLASS + try: from . import _psutil_windows as cext except ImportError as err: @@ -22,41 +48,13 @@ except ImportError as err: # 1) we are on an old Windows version # 2) psutil was installed via pip + wheel # See: https://github.com/giampaolo/psutil/issues/811 - # It must be noted that psutil can still (kind of) work - # on outdated systems if compiled / installed from sources, - # but if we get here it means this this was a wheel (or exe). msg = "this Windows version is too old (< Windows Vista); " msg += "psutil 3.4.2 is the latest version which supports Windows " - msg += "2000, XP and 2003 server; it may be possible that psutil " - msg += "will work if compiled from sources though" + msg += "2000, XP and 2003 server" raise RuntimeError(msg) else: raise -from ._common import conn_tmap -from ._common import ENCODING -from ._common import ENCODING_ERRS -from ._common import isfile_strict -from ._common import memoize_when_activated -from ._common import parse_environ_block -from ._common import sockfam_to_enum -from ._common import socktype_to_enum -from ._common import usage_percent -from ._compat import long -from ._compat import lru_cache -from ._compat import PY3 -from ._compat import unicode -from ._compat import xrange -from ._exceptions import AccessDenied -from ._exceptions import NoSuchProcess -from ._exceptions import TimeoutExpired -from ._psutil_windows import ABOVE_NORMAL_PRIORITY_CLASS -from ._psutil_windows import BELOW_NORMAL_PRIORITY_CLASS -from ._psutil_windows import HIGH_PRIORITY_CLASS -from ._psutil_windows import IDLE_PRIORITY_CLASS -from ._psutil_windows import NORMAL_PRIORITY_CLASS -from ._psutil_windows import REALTIME_PRIORITY_CLASS - if sys.version_info >= (3, 4): import enum else: @@ -66,11 +64,14 @@ else: # http://msdn.microsoft.com/en-us/library/ms686219(v=vs.85).aspx __extra__all__ = [ "win_service_iter", "win_service_get", + # Process priority "ABOVE_NORMAL_PRIORITY_CLASS", "BELOW_NORMAL_PRIORITY_CLASS", - "HIGH_PRIORITY_CLASS", "IDLE_PRIORITY_CLASS", - "NORMAL_PRIORITY_CLASS", "REALTIME_PRIORITY_CLASS", - "CONN_DELETE_TCB", - "AF_LINK", + "HIGH_PRIORITY_CLASS", "IDLE_PRIORITY_CLASS", "NORMAL_PRIORITY_CLASS", + "REALTIME_PRIORITY_CLASS", + # IO priority + "IOPRIO_VERYLOW", "IOPRIO_LOW", "IOPRIO_NORMAL", "IOPRIO_HIGH", + # others + "CONN_DELETE_TCB", "AF_LINK", ] @@ -79,12 +80,8 @@ __extra__all__ = [ # ===================================================================== CONN_DELETE_TCB = "DELETE_TCB" -ACCESS_DENIED_ERRSET = frozenset([errno.EPERM, errno.EACCES, - cext.ERROR_ACCESS_DENIED]) -NO_SUCH_SERVICE_ERRSET = frozenset([cext.ERROR_INVALID_NAME, - cext.ERROR_SERVICE_DOES_NOT_EXIST]) -HAS_PROC_IO_PRIORITY = hasattr(cext, "proc_io_priority_get") - +ERROR_PARTIAL_COPY = 299 +PYPY = '__pypy__' in sys.builtin_module_names if enum is None: AF_LINK = -1 @@ -119,6 +116,19 @@ if enum is not None: globals().update(Priority.__members__) +if enum is None: + IOPRIO_VERYLOW = 0 + IOPRIO_LOW = 1 + IOPRIO_NORMAL = 2 + IOPRIO_HIGH = 3 +else: + class IOPriority(enum.IntEnum): + IOPRIO_VERYLOW = 0 + IOPRIO_LOW = 1 + IOPRIO_NORMAL = 2 + IOPRIO_HIGH = 3 + globals().update(IOPriority.__members__) + pinfo_map = dict( num_handles=0, ctx_switches=1, @@ -188,7 +198,8 @@ def convert_dos_path(s): """ rawdrive = '\\'.join(s.split('\\')[:3]) driveletter = cext.win32_QueryDosDevice(rawdrive) - return os.path.join(driveletter, s[len(rawdrive):]) + remainder = s[len(rawdrive):] + return os.path.join(driveletter, remainder) def py2_strencode(s): @@ -204,6 +215,11 @@ def py2_strencode(s): return s.encode(ENCODING, ENCODING_ERRS) +@memoize +def getpagesize(): + return cext.getpagesize() + + # ===================================================================== # --- memory # ===================================================================== @@ -310,6 +326,23 @@ def cpu_freq(): return [_common.scpufreq(float(curr), min_, float(max_))] +_loadavg_inititialized = False + + +def getloadavg(): + """Return the number of processes in the system run queue averaged + over the last 1, 5, and 15 minutes respectively as a tuple""" + global _loadavg_inititialized + + if not _loadavg_inititialized: + cext.init_loadavg_counter() + _loadavg_inititialized = True + + # Drop to 2 decimal points which is what Linux does + raw_loads = cext.getloadavg() + return tuple([round(load, 2) for load in raw_loads]) + + # ===================================================================== # --- network # ===================================================================== @@ -327,17 +360,8 @@ def net_connections(kind, _pid=-1): ret = set() for item in rawlist: fd, fam, type, laddr, raddr, status, pid = item - if laddr: - laddr = _common.addr(*laddr) - if raddr: - raddr = _common.addr(*raddr) - status = TCP_STATUSES[status] - fam = sockfam_to_enum(fam) - type = socktype_to_enum(type) - if _pid == -1: - nt = _common.sconn(fd, fam, type, laddr, raddr, status, pid) - else: - nt = _common.pconn(fd, fam, type, laddr, raddr, status) + nt = conn_to_ntuple(fd, fam, type, laddr, raddr, status, TCP_STATUSES, + pid=pid if _pid == -1 else None) ret.add(nt) return list(ret) @@ -502,14 +526,14 @@ class WindowsService(object): """ try: yield - except WindowsError as err: - if err.errno in ACCESS_DENIED_ERRSET: + except OSError as err: + if is_permission_err(err): raise AccessDenied( pid=None, name=self._name, msg="service %r is not querable (not enough privileges)" % self._name) - elif err.errno in NO_SUCH_SERVICE_ERRSET or \ - err.winerror in NO_SUCH_SERVICE_ERRSET: + elif err.winerror in (cext.ERROR_INVALID_NAME, + cext.ERROR_SERVICE_DOES_NOT_EXIST): raise NoSuchProcess( pid=None, name=self._name, msg="service %r does not exist)" % self._name) @@ -626,27 +650,68 @@ pid_exists = cext.pid_exists ppid_map = cext.ppid_map # used internally by Process.children() +def is_permission_err(exc): + """Return True if this is a permission error.""" + assert isinstance(exc, OSError), exc + # On Python 2 OSError doesn't always have 'winerror'. Sometimes + # it does, in which case the original exception was WindowsError + # (which is a subclass of OSError). + return exc.errno in (errno.EPERM, errno.EACCES) or \ + getattr(exc, "winerror", -1) in (cext.ERROR_ACCESS_DENIED, + cext.ERROR_PRIVILEGE_NOT_HELD) + + +def convert_oserror(exc, pid=None, name=None): + """Convert OSError into NoSuchProcess or AccessDenied.""" + assert isinstance(exc, OSError), exc + if is_permission_err(exc): + return AccessDenied(pid=pid, name=name) + if exc.errno == errno.ESRCH: + return NoSuchProcess(pid=pid, name=name) + raise exc + + def wrap_exceptions(fun): - """Decorator which translates bare OSError and WindowsError - exceptions into NoSuchProcess and AccessDenied. - """ + """Decorator which converts OSError into NoSuchProcess or AccessDenied.""" @functools.wraps(fun) def wrapper(self, *args, **kwargs): try: return fun(self, *args, **kwargs) except OSError as err: - if err.errno in ACCESS_DENIED_ERRSET: - raise AccessDenied(self.pid, self._name) - if err.errno == errno.ESRCH: - raise NoSuchProcess(self.pid, self._name) - raise + raise convert_oserror(err, pid=self.pid, name=self._name) + return wrapper + + +def retry_error_partial_copy(fun): + """Workaround for https://github.com/giampaolo/psutil/issues/875. + See: https://stackoverflow.com/questions/4457745#4457745 + """ + @functools.wraps(fun) + def wrapper(self, *args, **kwargs): + delay = 0.0001 + times = 33 + for x in range(times): # retries for roughly 1 second + try: + return fun(self, *args, **kwargs) + except WindowsError as _: + err = _ + if err.winerror == ERROR_PARTIAL_COPY: + time.sleep(delay) + delay = min(delay * 2, 0.04) + continue + else: + raise + else: + msg = "%s retried %s times, converted to AccessDenied as it's " \ + "still returning %r" % (fun, times, err) + raise AccessDenied(pid=self.pid, name=self._name, msg=msg) return wrapper class Process(object): """Wrapper class around underlying C implementation.""" - __slots__ = ["pid", "_name", "_ppid"] + __slots__ = ["pid", "_name", "_ppid", "_cache"] def __init__(self, pid): self.pid = pid @@ -656,13 +721,15 @@ class Process(object): # --- oneshot() stuff def oneshot_enter(self): - self.oneshot_info.cache_activate() + self._proc_info.cache_activate(self) + self.exe.cache_activate(self) def oneshot_exit(self): - self.oneshot_info.cache_deactivate() + self._proc_info.cache_deactivate(self) + self.exe.cache_deactivate(self) @memoize_when_activated - def oneshot_info(self): + def _proc_info(self): """Return multiple information about this process as a raw tuple. """ @@ -670,7 +737,6 @@ class Process(object): assert len(ret) == len(pinfo_map) return ret - @wrap_exceptions def name(self): """Return process name, which on Windows is always the final part of the executable. @@ -679,37 +745,53 @@ class Process(object): # and process-hacker. if self.pid == 0: return "System Idle Process" - elif self.pid == 4: + if self.pid == 4: return "System" - else: - try: - # Note: this will fail with AD for most PIDs owned - # by another user but it's faster. - return py2_strencode(os.path.basename(self.exe())) - except AccessDenied: - return py2_strencode(cext.proc_name(self.pid)) + return os.path.basename(self.exe()) @wrap_exceptions + @memoize_when_activated def exe(self): - # Note: os.path.exists(path) may return False even if the file - # is there, see: - # http://stackoverflow.com/questions/3112546/os-path-exists-lies - - # see https://github.com/giampaolo/psutil/issues/414 - # see https://github.com/giampaolo/psutil/issues/528 - if self.pid in (0, 4): - raise AccessDenied(self.pid, self._name) - return py2_strencode(convert_dos_path(cext.proc_exe(self.pid))) + if PYPY: + try: + exe = cext.proc_exe(self.pid) + except WindowsError as err: + # 24 = ERROR_TOO_MANY_OPEN_FILES. Not sure why this happens + # (perhaps PyPy's JIT delaying garbage collection of files?). + if err.errno == 24: + debug("%r forced into AccessDenied" % err) + raise AccessDenied(self.pid, self._name) + raise + else: + exe = cext.proc_exe(self.pid) + if not PY3: + exe = py2_strencode(exe) + if exe.startswith('\\'): + return convert_dos_path(exe) + return exe # May be "Registry", "MemCompression", ... @wrap_exceptions + @retry_error_partial_copy def cmdline(self): - ret = cext.proc_cmdline(self.pid) + if cext.WINVER >= cext.WINDOWS_8_1: + # PEB method detects cmdline changes but requires more + # privileges: https://github.com/giampaolo/psutil/pull/1398 + try: + ret = cext.proc_cmdline(self.pid, use_peb=True) + except OSError as err: + if is_permission_err(err): + ret = cext.proc_cmdline(self.pid, use_peb=False) + else: + raise + else: + ret = cext.proc_cmdline(self.pid, use_peb=True) if PY3: return ret else: return [py2_strencode(s) for s in ret] @wrap_exceptions + @retry_error_partial_copy def environ(self): ustr = cext.proc_environ(self.pid) if ustr and not PY3: @@ -726,10 +808,10 @@ class Process(object): try: return cext.proc_memory_info(self.pid) except OSError as err: - if err.errno in ACCESS_DENIED_ERRSET: + if is_permission_err(err): # TODO: the C ext can probably be refactored in order # to get this from cext.proc_info() - info = self.oneshot_info() + info = self._proc_info() return ( info[pinfo_map['num_page_faults']], info[pinfo_map['peak_wset']], @@ -758,6 +840,7 @@ class Process(object): def memory_full_info(self): basic_mem = self.memory_info() uss = cext.proc_memory_uss(self.pid) + uss *= getpagesize() return pfullmem(*basic_mem + (uss, )) def memory_maps(self): @@ -766,16 +849,11 @@ class Process(object): except OSError as err: # XXX - can't use wrap_exceptions decorator as we're # returning a generator; probably needs refactoring. - if err.errno in ACCESS_DENIED_ERRSET: - raise AccessDenied(self.pid, self._name) - if err.errno == errno.ESRCH: - raise NoSuchProcess(self.pid, self._name) - raise + raise convert_oserror(err, self.pid, self._name) else: for addr, perm, path, rss in raw: path = convert_dos_path(path) if not PY3: - assert isinstance(path, unicode), type(path) path = py2_strencode(path) addr = hex(addr) yield (addr, perm, path, rss) @@ -786,7 +864,16 @@ class Process(object): @wrap_exceptions def send_signal(self, sig): - os.kill(self.pid, sig) + if sig == signal.SIGTERM: + cext.proc_kill(self.pid) + # py >= 2.7 + elif sig in (getattr(signal, "CTRL_C_EVENT", object()), + getattr(signal, "CTRL_BREAK_EVENT", object())): + os.kill(self.pid, sig) + else: + raise ValueError( + "only SIGTERM, CTRL_C_EVENT and CTRL_BREAK_EVENT signals " + "are supported on Windows") @wrap_exceptions def wait(self, timeout=None): @@ -840,19 +927,19 @@ class Process(object): @wrap_exceptions def create_time(self): - # special case for kernel process PIDs; return system boot time - if self.pid in (0, 4): - return boot_time() + # Note: proc_times() not put under oneshot() 'cause create_time() + # is already cached by the main Process class. try: - return cext.proc_create_time(self.pid) + user, system, created = cext.proc_times(self.pid) + return created except OSError as err: - if err.errno in ACCESS_DENIED_ERRSET: - return self.oneshot_info()[pinfo_map['create_time']] + if is_permission_err(err): + return self._proc_info()[pinfo_map['create_time']] raise @wrap_exceptions def num_threads(self): - return self.oneshot_info()[pinfo_map['num_threads']] + return self._proc_info()[pinfo_map['num_threads']] @wrap_exceptions def threads(self): @@ -866,26 +953,26 @@ class Process(object): @wrap_exceptions def cpu_times(self): try: - user, system = cext.proc_cpu_times(self.pid) + user, system, created = cext.proc_times(self.pid) except OSError as err: - if err.errno in ACCESS_DENIED_ERRSET: - info = self.oneshot_info() - user = info[pinfo_map['user_time']] - system = info[pinfo_map['kernel_time']] - else: + if not is_permission_err(err): raise + info = self._proc_info() + user = info[pinfo_map['user_time']] + system = info[pinfo_map['kernel_time']] # Children user/system times are not retrievable (set to 0). return _common.pcputimes(user, system, 0.0, 0.0) @wrap_exceptions def suspend(self): - return cext.proc_suspend(self.pid) + cext.proc_suspend_or_resume(self.pid, True) @wrap_exceptions def resume(self): - return cext.proc_resume(self.pid) + cext.proc_suspend_or_resume(self.pid, False) @wrap_exceptions + @retry_error_partial_copy def cwd(self): if self.pid in (0, 4): raise AccessDenied(self.pid, self._name) @@ -928,39 +1015,38 @@ class Process(object): def nice_set(self, value): return cext.proc_priority_set(self.pid, value) - # available on Windows >= Vista - if HAS_PROC_IO_PRIORITY: - @wrap_exceptions - def ionice_get(self): - return cext.proc_io_priority_get(self.pid) + @wrap_exceptions + def ionice_get(self): + ret = cext.proc_io_priority_get(self.pid) + if enum is not None: + ret = IOPriority(ret) + return ret - @wrap_exceptions - def ionice_set(self, value, _): - if _: - raise TypeError("set_proc_ionice() on Windows takes only " - "1 argument (2 given)") - if value not in (2, 1, 0): - raise ValueError("value must be 2 (normal), 1 (low) or 0 " - "(very low); got %r" % value) - return cext.proc_io_priority_set(self.pid, value) + @wrap_exceptions + def ionice_set(self, ioclass, value): + if value: + raise TypeError("value argument not accepted on Windows") + if ioclass not in (IOPRIO_VERYLOW, IOPRIO_LOW, IOPRIO_NORMAL, + IOPRIO_HIGH): + raise ValueError("%s is not a valid priority" % ioclass) + cext.proc_io_priority_set(self.pid, ioclass) @wrap_exceptions def io_counters(self): try: ret = cext.proc_io_counters(self.pid) except OSError as err: - if err.errno in ACCESS_DENIED_ERRSET: - info = self.oneshot_info() - ret = ( - info[pinfo_map['io_rcount']], - info[pinfo_map['io_wcount']], - info[pinfo_map['io_rbytes']], - info[pinfo_map['io_wbytes']], - info[pinfo_map['io_count_others']], - info[pinfo_map['io_bytes_others']], - ) - else: + if not is_permission_err(err): raise + info = self._proc_info() + ret = ( + info[pinfo_map['io_rcount']], + info[pinfo_map['io_wcount']], + info[pinfo_map['io_rbytes']], + info[pinfo_map['io_wbytes']], + info[pinfo_map['io_count_others']], + info[pinfo_map['io_bytes_others']], + ) return pio(*ret) @wrap_exceptions @@ -1008,12 +1094,12 @@ class Process(object): try: return cext.proc_num_handles(self.pid) except OSError as err: - if err.errno in ACCESS_DENIED_ERRSET: - return self.oneshot_info()[pinfo_map['num_handles']] + if is_permission_err(err): + return self._proc_info()[pinfo_map['num_handles']] raise @wrap_exceptions def num_ctx_switches(self): - ctx_switches = self.oneshot_info()[pinfo_map['ctx_switches']] + ctx_switches = self._proc_info()[pinfo_map['ctx_switches']] # only voluntary ctx switches are supported return _common.pctxsw(ctx_switches, 0) diff --git a/server/www/packages/packages-linux/x64/pyasn1/__init__.py b/server/www/packages/packages-linux/x64/pyasn1/__init__.py index e2e4c5c..5a56a70 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/__init__.py +++ b/server/www/packages/packages-linux/x64/pyasn1/__init__.py @@ -1,7 +1,7 @@ import sys # https://www.python.org/dev/peps/pep-0396/ -__version__ = '0.4.4' +__version__ = '0.4.8' if sys.version_info[:2] < (2, 4): raise RuntimeError('PyASN1 requires Python 2.4 or later') diff --git a/server/www/packages/packages-linux/x64/pyasn1/codec/ber/decoder.py b/server/www/packages/packages-linux/x64/pyasn1/codec/ber/decoder.py index a27b3e0..5ff485f 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/codec/ber/decoder.py +++ b/server/www/packages/packages-linux/x64/pyasn1/codec/ber/decoder.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # from pyasn1 import debug @@ -18,6 +18,8 @@ from pyasn1.type import useful __all__ = ['decode'] +LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_DECODER) + noValue = base.noValue @@ -70,6 +72,10 @@ class ExplicitTagDecoder(AbstractSimpleDecoder): value, _ = decodeFun(head, asn1Spec, tagSet, length, **options) + if LOG: + LOG('explicit tag container carries %d octets of trailing payload ' + '(will be lost!): %s' % (len(_), debug.hexdump(_))) + return value, tail def indefLenValueDecoder(self, substrate, asn1Spec, @@ -120,7 +126,8 @@ class BooleanDecoder(IntegerDecoder): protoComponent = univ.Boolean(0) def _createComponent(self, asn1Spec, tagSet, value, **options): - return IntegerDecoder._createComponent(self, asn1Spec, tagSet, value and 1 or 0, **options) + return IntegerDecoder._createComponent( + self, asn1Spec, tagSet, value and 1 or 0, **options) class BitStringDecoder(AbstractSimpleDecoder): @@ -134,8 +141,8 @@ class BitStringDecoder(AbstractSimpleDecoder): head, tail = substrate[:length], substrate[length:] if substrateFun: - return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options), - substrate, length) + return substrateFun(self._createComponent( + asn1Spec, tagSet, noValue, **options), substrate, length) if not head: raise error.PyAsn1Error('Empty BIT STRING substrate') @@ -148,12 +155,17 @@ class BitStringDecoder(AbstractSimpleDecoder): 'Trailing bits overflow %s' % trailingBits ) - value = self.protoComponent.fromOctetString(head[1:], internalFormat=True, padding=trailingBits) + value = self.protoComponent.fromOctetString( + head[1:], internalFormat=True, padding=trailingBits) return self._createComponent(asn1Spec, tagSet, value, **options), tail if not self.supportConstructedForm: - raise error.PyAsn1Error('Constructed encoding form prohibited at %s' % self.__class__.__name__) + raise error.PyAsn1Error('Constructed encoding form prohibited ' + 'at %s' % self.__class__.__name__) + + if LOG: + LOG('assembling constructed serialization') # All inner fragments are of the same type, treat them as octet string substrateFun = self.substrateCollector @@ -234,6 +246,9 @@ class OctetStringDecoder(AbstractSimpleDecoder): if not self.supportConstructedForm: raise error.PyAsn1Error('Constructed encoding form prohibited at %s' % self.__class__.__name__) + if LOG: + LOG('assembling constructed serialization') + # All inner fragments are of the same type, treat them as octet string substrateFun = self.substrateCollector @@ -267,7 +282,9 @@ class OctetStringDecoder(AbstractSimpleDecoder): allowEoo=True, **options) if component is eoo.endOfOctets: break + header += component + else: raise error.SubstrateUnderrunError( 'No EOO seen before substrate ends' @@ -374,59 +391,90 @@ class RealDecoder(AbstractSimpleDecoder): if fo & 0x80: # binary encoding if not head: raise error.PyAsn1Error("Incomplete floating-point value") + + if LOG: + LOG('decoding binary encoded REAL') + n = (fo & 0x03) + 1 + if n == 4: n = oct2int(head[0]) head = head[1:] + eo, head = head[:n], head[n:] + if not eo or not head: raise error.PyAsn1Error('Real exponent screwed') + e = oct2int(eo[0]) & 0x80 and -1 or 0 + while eo: # exponent e <<= 8 e |= oct2int(eo[0]) eo = eo[1:] + b = fo >> 4 & 0x03 # base bits + if b > 2: raise error.PyAsn1Error('Illegal Real base') + if b == 1: # encbase = 8 e *= 3 + elif b == 2: # encbase = 16 e *= 4 p = 0 + while head: # value p <<= 8 p |= oct2int(head[0]) head = head[1:] + if fo & 0x40: # sign bit p = -p + sf = fo >> 2 & 0x03 # scale bits p *= 2 ** sf value = (p, 2, e) + elif fo & 0x40: # infinite value + if LOG: + LOG('decoding infinite REAL') + value = fo & 0x01 and '-inf' or 'inf' + elif fo & 0xc0 == 0: # character encoding if not head: raise error.PyAsn1Error("Incomplete floating-point value") + + if LOG: + LOG('decoding character encoded REAL') + try: if fo & 0x3 == 0x1: # NR1 value = (int(head), 10, 0) + elif fo & 0x3 == 0x2: # NR2 value = float(head) + elif fo & 0x3 == 0x3: # NR3 value = float(head) + else: raise error.SubstrateUnderrunError( 'Unknown NR (tag %s)' % fo ) + except ValueError: raise error.SubstrateUnderrunError( 'Bad character Real syntax' ) + else: raise error.SubstrateUnderrunError( 'Unknown encoding (tag %s)' % fo ) + return self._createComponent(asn1Spec, tagSet, value, **options), tail @@ -447,10 +495,12 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): def _decodeComponents(self, substrate, tagSet=None, decodeFun=None, **options): components = [] componentTypes = set() + while substrate: component, substrate = decodeFun(substrate, **options) if component is eoo.endOfOctets: break + components.append(component) componentTypes.add(component.tagSet) @@ -460,6 +510,7 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): # * otherwise -> likely SEQUENCE OF/SET OF if len(componentTypes) > 1: protoComponent = self.protoRecordComponent + else: protoComponent = self.protoSequenceComponent @@ -469,6 +520,10 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): tagSet=tag.TagSet(protoComponent.tagSet.baseTag, *tagSet.superTags) ) + if LOG: + LOG('guessed %r container type (pass `asn1Spec` to guide the ' + 'decoder)' % asn1Object) + for idx, component in enumerate(components): asn1Object.setComponentByPosition( idx, component, @@ -490,8 +545,10 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): if substrateFun is not None: if asn1Spec is not None: asn1Object = asn1Spec.clone() + elif self.protoComponent is not None: asn1Object = self.protoComponent.clone(tagSet=tagSet) + else: asn1Object = self.protoRecordComponent, self.protoSequenceComponent @@ -501,11 +558,16 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): asn1Object, trailing = self._decodeComponents( head, tagSet=tagSet, decodeFun=decodeFun, **options ) + if trailing: - raise error.PyAsn1Error('Unused trailing %d octets encountered' % len(trailing)) + if LOG: + LOG('Unused trailing %d octets encountered: %s' % ( + len(trailing), debug.hexdump(trailing))) + return asn1Object, tail asn1Object = asn1Spec.clone() + asn1Object.clear() if asn1Spec.typeId in (univ.Sequence.typeId, univ.Set.typeId): @@ -514,21 +576,31 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): isSetType = asn1Spec.typeId == univ.Set.typeId isDeterministic = not isSetType and not namedTypes.hasOptionalOrDefault + if LOG: + LOG('decoding %sdeterministic %s type %r chosen by type ID' % ( + not isDeterministic and 'non-' or '', isSetType and 'SET' or '', + asn1Spec)) + seenIndices = set() idx = 0 while head: if not namedTypes: componentType = None + elif isSetType: componentType = namedTypes.tagMapUnique + else: try: if isDeterministic: componentType = namedTypes[idx].asn1Object + elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted: componentType = namedTypes.getTagMapNearPosition(idx) + else: componentType = namedTypes[idx].asn1Object + except IndexError: raise error.PyAsn1Error( 'Excessive components decoded at %r' % (asn1Spec,) @@ -539,6 +611,7 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): if not isDeterministic and namedTypes: if isSetType: idx = namedTypes.getPositionByType(component.effectiveTagSet) + elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted: idx = namedTypes.getPositionNearType(component.effectiveTagSet, idx) @@ -551,14 +624,25 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): seenIndices.add(idx) idx += 1 + if LOG: + LOG('seen component indices %s' % seenIndices) + if namedTypes: if not namedTypes.requiredComponents.issubset(seenIndices): - raise error.PyAsn1Error('ASN.1 object %s has uninitialized components' % asn1Object.__class__.__name__) + raise error.PyAsn1Error( + 'ASN.1 object %s has uninitialized ' + 'components' % asn1Object.__class__.__name__) if namedTypes.hasOpenTypes: openTypes = options.get('openTypes', {}) + if LOG: + LOG('user-specified open types map:') + + for k, v in openTypes.items(): + LOG('%s -> %r' % (k, v)) + if openTypes or options.get('decodeOpenTypes', False): for idx, namedType in enumerate(namedTypes.namedTypes): @@ -577,27 +661,67 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): except KeyError: + if LOG: + LOG('default open types map of component ' + '"%s.%s" governed by component "%s.%s"' + ':' % (asn1Object.__class__.__name__, + namedType.name, + asn1Object.__class__.__name__, + namedType.openType.name)) + + for k, v in namedType.openType.items(): + LOG('%s -> %r' % (k, v)) + try: openType = namedType.openType[governingValue] except KeyError: + if LOG: + LOG('failed to resolve open type by governing ' + 'value %r' % (governingValue,)) continue - component, rest = decodeFun( - asn1Object.getComponentByPosition(idx).asOctets(), - asn1Spec=openType - ) + if LOG: + LOG('resolved open type %r by governing ' + 'value %r' % (openType, governingValue)) - asn1Object.setComponentByPosition(idx, component) + containerValue = asn1Object.getComponentByPosition(idx) + + if containerValue.typeId in ( + univ.SetOf.typeId, univ.SequenceOf.typeId): + + for pos, containerElement in enumerate( + containerValue): + + component, rest = decodeFun( + containerValue[pos].asOctets(), + asn1Spec=openType, **options + ) + + containerValue[pos] = component + + else: + component, rest = decodeFun( + asn1Object.getComponentByPosition(idx).asOctets(), + asn1Spec=openType, **options + ) + + asn1Object.setComponentByPosition(idx, component) else: - asn1Object.verifySizeSpec() + inconsistency = asn1Object.isInconsistent + if inconsistency: + raise inconsistency else: asn1Object = asn1Spec.clone() + asn1Object.clear() componentType = asn1Spec.componentType + if LOG: + LOG('decoding type %r chosen by given `asn1Spec`' % componentType) + idx = 0 while head: @@ -607,6 +731,7 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): verifyConstraints=False, matchTags=False, matchConstraints=False ) + idx += 1 return asn1Object, tail @@ -621,8 +746,10 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): if substrateFun is not None: if asn1Spec is not None: asn1Object = asn1Spec.clone() + elif self.protoComponent is not None: asn1Object = self.protoComponent.clone(tagSet=tagSet) + else: asn1Object = self.protoRecordComponent, self.protoSequenceComponent @@ -630,10 +757,12 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): if asn1Spec is None: return self._decodeComponents( - substrate, tagSet=tagSet, decodeFun=decodeFun, allowEoo=True, **options + substrate, tagSet=tagSet, decodeFun=decodeFun, + **dict(options, allowEoo=True) ) asn1Object = asn1Spec.clone() + asn1Object.clear() if asn1Spec.typeId in (univ.Sequence.typeId, univ.Set.typeId): @@ -642,21 +771,31 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): isSetType = asn1Object.typeId == univ.Set.typeId isDeterministic = not isSetType and not namedTypes.hasOptionalOrDefault + if LOG: + LOG('decoding %sdeterministic %s type %r chosen by type ID' % ( + not isDeterministic and 'non-' or '', isSetType and 'SET' or '', + asn1Spec)) + seenIndices = set() idx = 0 while substrate: if len(namedTypes) <= idx: asn1Spec = None + elif isSetType: asn1Spec = namedTypes.tagMapUnique + else: try: if isDeterministic: asn1Spec = namedTypes[idx].asn1Object + elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted: asn1Spec = namedTypes.getTagMapNearPosition(idx) + else: asn1Spec = namedTypes[idx].asn1Object + except IndexError: raise error.PyAsn1Error( 'Excessive components decoded at %r' % (asn1Object,) @@ -686,13 +825,22 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): 'No EOO seen before substrate ends' ) + if LOG: + LOG('seen component indices %s' % seenIndices) + if namedTypes: if not namedTypes.requiredComponents.issubset(seenIndices): raise error.PyAsn1Error('ASN.1 object %s has uninitialized components' % asn1Object.__class__.__name__) - if namedTypes.hasOpenTypes: + if namedTypes.hasOpenTypes: - openTypes = options.get('openTypes', None) + openTypes = options.get('openTypes', {}) + + if LOG: + LOG('user-specified open types map:') + + for k, v in openTypes.items(): + LOG('%s -> %r' % (k, v)) if openTypes or options.get('decodeOpenTypes', False): @@ -712,28 +860,68 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): except KeyError: + if LOG: + LOG('default open types map of component ' + '"%s.%s" governed by component "%s.%s"' + ':' % (asn1Object.__class__.__name__, + namedType.name, + asn1Object.__class__.__name__, + namedType.openType.name)) + + for k, v in namedType.openType.items(): + LOG('%s -> %r' % (k, v)) + try: openType = namedType.openType[governingValue] except KeyError: + if LOG: + LOG('failed to resolve open type by governing ' + 'value %r' % (governingValue,)) continue - component, rest = decodeFun( - asn1Object.getComponentByPosition(idx).asOctets(), - asn1Spec=openType, allowEoo=True - ) + if LOG: + LOG('resolved open type %r by governing ' + 'value %r' % (openType, governingValue)) - if component is not eoo.endOfOctets: - asn1Object.setComponentByPosition(idx, component) + containerValue = asn1Object.getComponentByPosition(idx) + + if containerValue.typeId in ( + univ.SetOf.typeId, univ.SequenceOf.typeId): + + for pos, containerElement in enumerate( + containerValue): + + component, rest = decodeFun( + containerValue[pos].asOctets(), + asn1Spec=openType, **dict(options, allowEoo=True) + ) + + containerValue[pos] = component + + else: + component, rest = decodeFun( + asn1Object.getComponentByPosition(idx).asOctets(), + asn1Spec=openType, **dict(options, allowEoo=True) + ) + + if component is not eoo.endOfOctets: + asn1Object.setComponentByPosition(idx, component) else: - asn1Object.verifySizeSpec() + inconsistency = asn1Object.isInconsistent + if inconsistency: + raise inconsistency else: asn1Object = asn1Spec.clone() + asn1Object.clear() componentType = asn1Spec.componentType + if LOG: + LOG('decoding type %r chosen by given `asn1Spec`' % componentType) + idx = 0 while substrate: @@ -747,7 +935,9 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): verifyConstraints=False, matchTags=False, matchConstraints=False ) + idx += 1 + else: raise error.SubstrateUnderrunError( 'No EOO seen before substrate ends' @@ -794,18 +984,25 @@ class ChoiceDecoder(AbstractConstructedDecoder): if asn1Spec is None: asn1Object = self.protoComponent.clone(tagSet=tagSet) + else: asn1Object = asn1Spec.clone() if substrateFun: return substrateFun(asn1Object, substrate, length) - if asn1Object.tagSet == tagSet: # explicitly tagged Choice + if asn1Object.tagSet == tagSet: + if LOG: + LOG('decoding %s as explicitly tagged CHOICE' % (tagSet,)) + component, head = decodeFun( head, asn1Object.componentTagMap, **options ) else: + if LOG: + LOG('decoding %s as untagged CHOICE' % (tagSet,)) + component, head = decodeFun( head, asn1Object.componentTagMap, tagSet, length, state, **options @@ -813,6 +1010,9 @@ class ChoiceDecoder(AbstractConstructedDecoder): effectiveTagSet = component.effectiveTagSet + if LOG: + LOG('decoded component %s, effective tag set %s' % (component, effectiveTagSet)) + asn1Object.setComponentByType( effectiveTagSet, component, verifyConstraints=False, @@ -834,18 +1034,26 @@ class ChoiceDecoder(AbstractConstructedDecoder): if substrateFun: return substrateFun(asn1Object, substrate, length) - if asn1Object.tagSet == tagSet: # explicitly tagged Choice + if asn1Object.tagSet == tagSet: + if LOG: + LOG('decoding %s as explicitly tagged CHOICE' % (tagSet,)) + component, substrate = decodeFun( substrate, asn1Object.componentType.tagMapUnique, **options ) + # eat up EOO marker eooMarker, substrate = decodeFun( substrate, allowEoo=True, **options ) + if eooMarker is not eoo.endOfOctets: raise error.PyAsn1Error('No EOO seen before substrate ends') else: + if LOG: + LOG('decoding %s as untagged CHOICE' % (tagSet,)) + component, substrate = decodeFun( substrate, asn1Object.componentType.tagMapUnique, tagSet, length, state, **options @@ -853,6 +1061,9 @@ class ChoiceDecoder(AbstractConstructedDecoder): effectiveTagSet = component.effectiveTagSet + if LOG: + LOG('decoded component %s, effective tag set %s' % (component, effectiveTagSet)) + asn1Object.setComponentByType( effectiveTagSet, component, verifyConstraints=False, @@ -870,13 +1081,25 @@ class AnyDecoder(AbstractSimpleDecoder): tagSet=None, length=None, state=None, decodeFun=None, substrateFun=None, **options): - if asn1Spec is None or asn1Spec is not None and tagSet != asn1Spec.tagSet: + if asn1Spec is None: + isUntagged = True + + elif asn1Spec.__class__ is tagmap.TagMap: + isUntagged = tagSet not in asn1Spec.tagMap + + else: + isUntagged = tagSet != asn1Spec.tagSet + + if isUntagged: fullSubstrate = options['fullSubstrate'] # untagged Any container, recover inner header substrate length += len(fullSubstrate) - len(substrate) substrate = fullSubstrate + if LOG: + LOG('decoding as untagged ANY, substrate %s' % debug.hexdump(substrate)) + if substrateFun: return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options), substrate, length) @@ -889,15 +1112,31 @@ class AnyDecoder(AbstractSimpleDecoder): tagSet=None, length=None, state=None, decodeFun=None, substrateFun=None, **options): - if asn1Spec is not None and tagSet == asn1Spec.tagSet: + if asn1Spec is None: + isTagged = False + + elif asn1Spec.__class__ is tagmap.TagMap: + isTagged = tagSet in asn1Spec.tagMap + + else: + isTagged = tagSet == asn1Spec.tagSet + + if isTagged: # tagged Any type -- consume header substrate header = null + + if LOG: + LOG('decoding as tagged ANY') + else: fullSubstrate = options['fullSubstrate'] # untagged Any, recover header substrate header = fullSubstrate[:-len(substrate)] + if LOG: + LOG('decoding as untagged ANY, header substrate %s' % debug.hexdump(header)) + # Any components do not inherit initial tag asn1Spec = self.protoComponent @@ -905,6 +1144,9 @@ class AnyDecoder(AbstractSimpleDecoder): asn1Object = self._createComponent(asn1Spec, tagSet, noValue, **options) return substrateFun(asn1Object, header + substrate, length + len(header)) + if LOG: + LOG('assembling constructed serialization') + # All inner fragments are of the same type, treat them as octet string substrateFun = self.substrateCollector @@ -914,13 +1156,17 @@ class AnyDecoder(AbstractSimpleDecoder): allowEoo=True, **options) if component is eoo.endOfOctets: break + header += component + else: raise error.SubstrateUnderrunError( 'No EOO seen before substrate ends' ) + if substrateFun: return header, substrate + else: return self._createComponent(asn1Spec, tagSet, header, **options), substrate @@ -1045,7 +1291,7 @@ for typeDecoder in tagMap.values(): class Decoder(object): defaultErrorState = stErrorCondition - # defaultErrorState = stDumpRawValue + #defaultErrorState = stDumpRawValue defaultRawDecoder = AnyDecoder() supportIndefLength = True @@ -1063,21 +1309,16 @@ class Decoder(object): decodeFun=None, substrateFun=None, **options): - if debug.logger & debug.flagDecoder: - logger = debug.logger - else: - logger = None - - if logger: - logger('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate))) + if LOG: + LOG('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate))) allowEoo = options.pop('allowEoo', False) # Look for end-of-octets sentinel if allowEoo and self.supportIndefLength: if substrate[:2] == self.__eooSentinel: - if logger: - logger('end-of-octets sentinel found') + if LOG: + LOG('end-of-octets sentinel found') return eoo.endOfOctets, substrate[2:] value = noValue @@ -1090,26 +1331,32 @@ class Decoder(object): fullSubstrate = substrate while state is not stStop: + if state is stDecodeTag: if not substrate: raise error.SubstrateUnderrunError( 'Short octet stream on tag decoding' ) + # Decode tag isShortTag = True firstOctet = substrate[0] substrate = substrate[1:] + try: lastTag = tagCache[firstOctet] + except KeyError: integerTag = oct2int(firstOctet) tagClass = integerTag & 0xC0 tagFormat = integerTag & 0x20 tagId = integerTag & 0x1F + if tagId == 0x1F: isShortTag = False lengthOctetIdx = 0 tagId = 0 + try: while True: integerTag = oct2int(substrate[lengthOctetIdx]) @@ -1118,42 +1365,55 @@ class Decoder(object): tagId |= (integerTag & 0x7F) if not integerTag & 0x80: break + substrate = substrate[lengthOctetIdx:] + except IndexError: raise error.SubstrateUnderrunError( 'Short octet stream on long tag decoding' ) + lastTag = tag.Tag( tagClass=tagClass, tagFormat=tagFormat, tagId=tagId ) + if isShortTag: # cache short tags tagCache[firstOctet] = lastTag + if tagSet is None: if isShortTag: try: tagSet = tagSetCache[firstOctet] + except KeyError: # base tag not recovered tagSet = tag.TagSet((), lastTag) tagSetCache[firstOctet] = tagSet else: tagSet = tag.TagSet((), lastTag) + else: tagSet = lastTag + tagSet + state = stDecodeLength - if logger: - logger('tag decoded into %s, decoding length' % tagSet) + + if LOG: + LOG('tag decoded into %s, decoding length' % tagSet) + if state is stDecodeLength: # Decode length if not substrate: raise error.SubstrateUnderrunError( 'Short octet stream on length decoding' ) + firstOctet = oct2int(substrate[0]) + if firstOctet < 128: size = 1 length = firstOctet + elif firstOctet > 128: size = firstOctet & 0x7F # encoded in size bytes @@ -1164,28 +1424,36 @@ class Decoder(object): raise error.SubstrateUnderrunError( '%s<%s at %s' % (size, len(encodedLength), tagSet) ) + length = 0 for lengthOctet in encodedLength: length <<= 8 length |= lengthOctet size += 1 + else: size = 1 length = -1 substrate = substrate[size:] + if length == -1: if not self.supportIndefLength: raise error.PyAsn1Error('Indefinite length encoding not supported by this codec') + else: if len(substrate) < length: raise error.SubstrateUnderrunError('%d-octet short' % (length - len(substrate))) + state = stGetValueDecoder - if logger: - logger('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length]))) + + if LOG: + LOG('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length]))) + if state is stGetValueDecoder: if asn1Spec is None: state = stGetValueDecoderByTag + else: state = stGetValueDecoderByAsn1Spec # @@ -1207,41 +1475,55 @@ class Decoder(object): if state is stGetValueDecoderByTag: try: concreteDecoder = tagMap[tagSet] + except KeyError: concreteDecoder = None + if concreteDecoder: state = stDecodeValue + else: try: concreteDecoder = tagMap[tagSet[:1]] + except KeyError: concreteDecoder = None + if concreteDecoder: state = stDecodeValue else: state = stTryAsExplicitTag - if logger: - logger('codec %s chosen by a built-in type, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "", state is stDecodeValue and 'value' or 'as explicit tag')) + + if LOG: + LOG('codec %s chosen by a built-in type, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "", state is stDecodeValue and 'value' or 'as explicit tag')) debug.scope.push(concreteDecoder is None and '?' or concreteDecoder.protoComponent.__class__.__name__) + if state is stGetValueDecoderByAsn1Spec: + if asn1Spec.__class__ is tagmap.TagMap: try: chosenSpec = asn1Spec[tagSet] + except KeyError: chosenSpec = None - if logger: - logger('candidate ASN.1 spec is a map of:') + + if LOG: + LOG('candidate ASN.1 spec is a map of:') + for firstOctet, v in asn1Spec.presentTypes.items(): - logger(' %s -> %s' % (firstOctet, v.__class__.__name__)) + LOG(' %s -> %s' % (firstOctet, v.__class__.__name__)) + if asn1Spec.skipTypes: - logger('but neither of: ') + LOG('but neither of: ') for firstOctet, v in asn1Spec.skipTypes.items(): - logger(' %s -> %s' % (firstOctet, v.__class__.__name__)) - logger('new candidate ASN.1 spec is %s, chosen by %s' % (chosenSpec is None and '' or chosenSpec.prettyPrintType(), tagSet)) + LOG(' %s -> %s' % (firstOctet, v.__class__.__name__)) + LOG('new candidate ASN.1 spec is %s, chosen by %s' % (chosenSpec is None and '' or chosenSpec.prettyPrintType(), tagSet)) + elif tagSet == asn1Spec.tagSet or tagSet in asn1Spec.tagMap: chosenSpec = asn1Spec - if logger: - logger('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__) + if LOG: + LOG('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__) + else: chosenSpec = None @@ -1249,29 +1531,38 @@ class Decoder(object): try: # ambiguous type or just faster codec lookup concreteDecoder = typeMap[chosenSpec.typeId] - if logger: - logger('value decoder chosen for an ambiguous type by type ID %s' % (chosenSpec.typeId,)) + + if LOG: + LOG('value decoder chosen for an ambiguous type by type ID %s' % (chosenSpec.typeId,)) + except KeyError: # use base type for codec lookup to recover untagged types baseTagSet = tag.TagSet(chosenSpec.tagSet.baseTag, chosenSpec.tagSet.baseTag) try: # base type or tagged subtype concreteDecoder = tagMap[baseTagSet] - if logger: - logger('value decoder chosen by base %s' % (baseTagSet,)) + + if LOG: + LOG('value decoder chosen by base %s' % (baseTagSet,)) + except KeyError: concreteDecoder = None + if concreteDecoder: asn1Spec = chosenSpec state = stDecodeValue + else: state = stTryAsExplicitTag + else: concreteDecoder = None state = stTryAsExplicitTag - if logger: - logger('codec %s chosen by ASN.1 spec, decoding %s' % (state is stDecodeValue and concreteDecoder.__class__.__name__ or "", state is stDecodeValue and 'value' or 'as explicit tag')) + + if LOG: + LOG('codec %s chosen by ASN.1 spec, decoding %s' % (state is stDecodeValue and concreteDecoder.__class__.__name__ or "", state is stDecodeValue and 'value' or 'as explicit tag')) debug.scope.push(chosenSpec is None and '?' or chosenSpec.__class__.__name__) + if state is stDecodeValue: if not options.get('recursiveFlag', True) and not substrateFun: # deprecate this substrateFun = lambda a, b, c: (a, b[:c]) @@ -1285,6 +1576,7 @@ class Decoder(object): self, substrateFun, **options ) + else: value, substrate = concreteDecoder.valueDecoder( substrate, asn1Spec, @@ -1293,33 +1585,44 @@ class Decoder(object): **options ) - if logger: - logger('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, isinstance(value, base.Asn1Item) and value.prettyPrint() or value, substrate and debug.hexdump(substrate) or '')) + if LOG: + LOG('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, isinstance(value, base.Asn1Item) and value.prettyPrint() or value, substrate and debug.hexdump(substrate) or '')) state = stStop break + if state is stTryAsExplicitTag: - if tagSet and tagSet[0].tagFormat == tag.tagFormatConstructed and tagSet[0].tagClass != tag.tagClassUniversal: + if (tagSet and + tagSet[0].tagFormat == tag.tagFormatConstructed and + tagSet[0].tagClass != tag.tagClassUniversal): # Assume explicit tagging concreteDecoder = explicitTagDecoder state = stDecodeValue + else: concreteDecoder = None state = self.defaultErrorState - if logger: - logger('codec %s chosen, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "", state is stDecodeValue and 'value' or 'as failure')) + + if LOG: + LOG('codec %s chosen, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "", state is stDecodeValue and 'value' or 'as failure')) + if state is stDumpRawValue: concreteDecoder = self.defaultRawDecoder - if logger: - logger('codec %s chosen, decoding value' % concreteDecoder.__class__.__name__) + + if LOG: + LOG('codec %s chosen, decoding value' % concreteDecoder.__class__.__name__) + state = stDecodeValue + if state is stErrorCondition: raise error.PyAsn1Error( '%s not in asn1Spec: %r' % (tagSet, asn1Spec) ) - if logger: + + if LOG: debug.scope.pop() - logger('decoder left scope %s, call completed' % debug.scope) + LOG('decoder left scope %s, call completed' % debug.scope) + return value, substrate @@ -1349,7 +1652,7 @@ class Decoder(object): #: #: Raises #: ------ -#: :py:class:`~pyasn1.error.PyAsn1Error` +#: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError #: On decoding errors #: #: Examples diff --git a/server/www/packages/packages-linux/x64/pyasn1/codec/ber/encoder.py b/server/www/packages/packages-linux/x64/pyasn1/codec/ber/encoder.py index 0094b22..778aa86 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/codec/ber/encoder.py +++ b/server/www/packages/packages-linux/x64/pyasn1/codec/ber/encoder.py @@ -1,9 +1,11 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # +import sys + from pyasn1 import debug from pyasn1 import error from pyasn1.codec.ber import eoo @@ -17,6 +19,8 @@ from pyasn1.type import useful __all__ = ['encode'] +LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_ENCODER) + class AbstractItemEncoder(object): supportIndefLenMode = True @@ -31,29 +35,39 @@ class AbstractItemEncoder(object): encodedTag = tagClass | tagFormat if isConstructed: encodedTag |= tag.tagFormatConstructed + if tagId < 31: return encodedTag | tagId, + else: substrate = tagId & 0x7f, + tagId >>= 7 + while tagId: substrate = (0x80 | (tagId & 0x7f),) + substrate tagId >>= 7 + return (encodedTag | 0x1F,) + substrate def encodeLength(self, length, defMode): if not defMode and self.supportIndefLenMode: return (0x80,) + if length < 0x80: return length, + else: substrate = () while length: substrate = (length & 0xff,) + substrate length >>= 8 + substrateLen = len(substrate) + if substrateLen > 126: raise error.PyAsn1Error('Length octets overflow (%d)' % substrateLen) + return (0x80 | substrateLen,) + substrate def encodeValue(self, value, asn1Spec, encodeFun, **options): @@ -75,26 +89,51 @@ class AbstractItemEncoder(object): defMode = options.get('defMode', True) + substrate = null + for idx, singleTag in enumerate(tagSet.superTags): defModeOverride = defMode # base tag? if not idx: - substrate, isConstructed, isOctets = self.encodeValue( - value, asn1Spec, encodeFun, **options - ) + try: + substrate, isConstructed, isOctets = self.encodeValue( + value, asn1Spec, encodeFun, **options + ) + + except error.PyAsn1Error: + exc = sys.exc_info() + raise error.PyAsn1Error( + 'Error encoding %r: %s' % (value, exc[1])) + + if LOG: + LOG('encoded %svalue %s into %s' % ( + isConstructed and 'constructed ' or '', value, substrate + )) if not substrate and isConstructed and options.get('ifNotEmpty', False): return substrate - # primitive form implies definite mode if not isConstructed: defModeOverride = True + if LOG: + LOG('overridden encoding mode into definitive for primitive type') + header = self.encodeTag(singleTag, isConstructed) + + if LOG: + LOG('encoded %stag %s into %s' % ( + isConstructed and 'constructed ' or '', + singleTag, debug.hexdump(ints2octs(header)))) + header += self.encodeLength(len(substrate), defModeOverride) + if LOG: + LOG('encoded %s octets (tag + payload) into %s' % ( + len(substrate), debug.hexdump(ints2octs(header)))) + if isOctets: substrate = ints2octs(header) + substrate @@ -131,6 +170,11 @@ class IntegerEncoder(AbstractItemEncoder): def encodeValue(self, value, asn1Spec, encodeFun, **options): if value == 0: + if LOG: + LOG('encoding %spayload for zero INTEGER' % ( + self.supportCompactZero and 'no ' or '' + )) + # de-facto way to encode zero if self.supportCompactZero: return (), False, False @@ -157,11 +201,15 @@ class BitStringEncoder(AbstractItemEncoder): substrate = alignedValue.asOctets() return int2oct(len(substrate) * 8 - valueLength) + substrate, False, True + if LOG: + LOG('encoding into up to %s-octet chunks' % maxChunkSize) + baseTag = value.tagSet.baseTag # strip off explicit tags if baseTag: tagSet = tag.TagSet(baseTag, baseTag) + else: tagSet = tag.TagSet() @@ -195,44 +243,47 @@ class OctetStringEncoder(AbstractItemEncoder): if not maxChunkSize or len(substrate) <= maxChunkSize: return substrate, False, True - else: + if LOG: + LOG('encoding into up to %s-octet chunks' % maxChunkSize) - # strip off explicit tags for inner chunks + # strip off explicit tags for inner chunks - if asn1Spec is None: - baseTag = value.tagSet.baseTag + if asn1Spec is None: + baseTag = value.tagSet.baseTag - # strip off explicit tags - if baseTag: - tagSet = tag.TagSet(baseTag, baseTag) - else: - tagSet = tag.TagSet() + # strip off explicit tags + if baseTag: + tagSet = tag.TagSet(baseTag, baseTag) - asn1Spec = value.clone(tagSet=tagSet) + else: + tagSet = tag.TagSet() - elif not isOctetsType(value): - baseTag = asn1Spec.tagSet.baseTag + asn1Spec = value.clone(tagSet=tagSet) - # strip off explicit tags - if baseTag: - tagSet = tag.TagSet(baseTag, baseTag) - else: - tagSet = tag.TagSet() + elif not isOctetsType(value): + baseTag = asn1Spec.tagSet.baseTag - asn1Spec = asn1Spec.clone(tagSet=tagSet) + # strip off explicit tags + if baseTag: + tagSet = tag.TagSet(baseTag, baseTag) - pos = 0 - substrate = null + else: + tagSet = tag.TagSet() - while True: - chunk = value[pos:pos + maxChunkSize] - if not chunk: - break + asn1Spec = asn1Spec.clone(tagSet=tagSet) - substrate += encodeFun(chunk, asn1Spec, **options) - pos += maxChunkSize + pos = 0 + substrate = null - return substrate, True, True + while True: + chunk = value[pos:pos + maxChunkSize] + if not chunk: + break + + substrate += encodeFun(chunk, asn1Spec, **options) + pos += maxChunkSize + + return substrate, True, True class NullEncoder(AbstractItemEncoder): @@ -268,8 +319,10 @@ class ObjectIdentifierEncoder(AbstractItemEncoder): oid = (second + 80,) + oid[2:] else: raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,)) + elif first == 2: oid = (second + 80,) + oid[2:] + else: raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,)) @@ -280,15 +333,19 @@ class ObjectIdentifierEncoder(AbstractItemEncoder): if 0 <= subOid <= 127: # Optimize for the common case octets += (subOid,) + elif subOid > 127: # Pack large Sub-Object IDs res = (subOid & 0x7f,) subOid >>= 7 + while subOid: res = (0x80 | (subOid & 0x7f),) + res subOid >>= 7 + # Add packed Sub-Object ID to resulted Object ID octets += res + else: raise error.PyAsn1Error('Negative OID arc %s at %s' % (subOid, value)) @@ -304,12 +361,16 @@ class RealEncoder(AbstractItemEncoder): ms, es = 1, 1 if m < 0: ms = -1 # mantissa sign + if e < 0: - es = -1 # exponenta sign + es = -1 # exponent sign + m *= ms + if encbase == 8: m *= 2 ** (abs(e) % 3 * es) e = abs(e) // 3 * es + elif encbase == 16: m *= 2 ** (abs(e) % 4 * es) e = abs(e) // 4 * es @@ -320,6 +381,7 @@ class RealEncoder(AbstractItemEncoder): e -= 1 continue break + return ms, int(m), encbase, e def _chooseEncBase(self, value): @@ -327,23 +389,32 @@ class RealEncoder(AbstractItemEncoder): encBase = [2, 8, 16] if value.binEncBase in encBase: return self._dropFloatingPoint(m, value.binEncBase, e) + elif self.binEncBase in encBase: return self._dropFloatingPoint(m, self.binEncBase, e) - # auto choosing base 2/8/16 + + # auto choosing base 2/8/16 mantissa = [m, m, m] - exponenta = [e, e, e] + exponent = [e, e, e] sign = 1 encbase = 2 e = float('inf') + for i in range(3): (sign, mantissa[i], encBase[i], - exponenta[i]) = self._dropFloatingPoint(mantissa[i], encBase[i], exponenta[i]) - if abs(exponenta[i]) < abs(e) or (abs(exponenta[i]) == abs(e) and mantissa[i] < m): - e = exponenta[i] + exponent[i]) = self._dropFloatingPoint(mantissa[i], encBase[i], exponent[i]) + + if abs(exponent[i]) < abs(e) or (abs(exponent[i]) == abs(e) and mantissa[i] < m): + e = exponent[i] m = int(mantissa[i]) encbase = encBase[i] + + if LOG: + LOG('automatically chosen REAL encoding base %s, sign %s, mantissa %s, ' + 'exponent %s' % (encbase, sign, m, e)) + return sign, m, encbase, e def encodeValue(self, value, asn1Spec, encodeFun, **options): @@ -352,69 +423,98 @@ class RealEncoder(AbstractItemEncoder): if value.isPlusInf: return (0x40,), False, False + if value.isMinusInf: return (0x41,), False, False + m, b, e = value + if not m: return null, False, True + if b == 10: + if LOG: + LOG('encoding REAL into character form') + return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), False, True + elif b == 2: fo = 0x80 # binary encoding ms, m, encbase, e = self._chooseEncBase(value) + if ms < 0: # mantissa sign fo |= 0x40 # sign bit - # exponenta & mantissa normalization + + # exponent & mantissa normalization if encbase == 2: while m & 0x1 == 0: m >>= 1 e += 1 + elif encbase == 8: while m & 0x7 == 0: m >>= 3 e += 1 fo |= 0x10 + else: # encbase = 16 while m & 0xf == 0: m >>= 4 e += 1 fo |= 0x20 + sf = 0 # scale factor + while m & 0x1 == 0: m >>= 1 sf += 1 + if sf > 3: raise error.PyAsn1Error('Scale factor overflow') # bug if raised + fo |= sf << 2 eo = null if e == 0 or e == -1: eo = int2oct(e & 0xff) + else: while e not in (0, -1): eo = int2oct(e & 0xff) + eo e >>= 8 + if e == 0 and eo and oct2int(eo[0]) & 0x80: eo = int2oct(0) + eo + if e == -1 and eo and not (oct2int(eo[0]) & 0x80): eo = int2oct(0xff) + eo + n = len(eo) if n > 0xff: raise error.PyAsn1Error('Real exponent overflow') + if n == 1: pass + elif n == 2: fo |= 1 + elif n == 3: fo |= 2 + else: fo |= 3 eo = int2oct(n & 0xff) + eo + po = null + while m: po = int2oct(m & 0xff) + po m >>= 8 + substrate = int2oct(fo) + eo + po + return substrate, False, True + else: raise error.PyAsn1Error('Prohibited Real base %s' % b) @@ -428,9 +528,18 @@ class SequenceEncoder(AbstractItemEncoder): substrate = null + omitEmptyOptionals = options.get( + 'omitEmptyOptionals', self.omitEmptyOptionals) + + if LOG: + LOG('%sencoding empty OPTIONAL components' % ( + omitEmptyOptionals and 'not ' or '')) + if asn1Spec is None: # instance of ASN.1 schema - value.verifySizeSpec() + inconsistency = value.isInconsistent + if inconsistency: + raise inconsistency namedTypes = value.componentType @@ -439,23 +548,44 @@ class SequenceEncoder(AbstractItemEncoder): namedType = namedTypes[idx] if namedType.isOptional and not component.isValue: - continue + if LOG: + LOG('not encoding OPTIONAL component %r' % (namedType,)) + continue if namedType.isDefaulted and component == namedType.asn1Object: - continue + if LOG: + LOG('not encoding DEFAULT component %r' % (namedType,)) + continue - if self.omitEmptyOptionals: + if omitEmptyOptionals: options.update(ifNotEmpty=namedType.isOptional) - chunk = encodeFun(component, asn1Spec, **options) - # wrap open type blob if needed if namedTypes and namedType.openType: - wrapType = namedType.asn1Object - if wrapType.tagSet and not wrapType.isSameTypeWith(component): - chunk = encodeFun(chunk, wrapType, **options) - substrate += chunk + wrapType = namedType.asn1Object + + if wrapType.typeId in ( + univ.SetOf.typeId, univ.SequenceOf.typeId): + + substrate += encodeFun( + component, asn1Spec, + **dict(options, wrapType=wrapType.componentType)) + + else: + chunk = encodeFun(component, asn1Spec, **options) + + if wrapType.isSameTypeWith(component): + substrate += chunk + + else: + substrate += encodeFun(chunk, wrapType, **options) + + if LOG: + LOG('wrapped with wrap type %r' % (wrapType,)) + + else: + substrate += encodeFun(component, asn1Spec, **options) else: # bare Python value + ASN.1 schema @@ -465,43 +595,87 @@ class SequenceEncoder(AbstractItemEncoder): component = value[namedType.name] except KeyError: - raise error.PyAsn1Error('Component name "%s" not found in %r' % (namedType.name, value)) + raise error.PyAsn1Error('Component name "%s" not found in %r' % ( + namedType.name, value)) if namedType.isOptional and namedType.name not in value: + if LOG: + LOG('not encoding OPTIONAL component %r' % (namedType,)) continue if namedType.isDefaulted and component == namedType.asn1Object: + if LOG: + LOG('not encoding DEFAULT component %r' % (namedType,)) continue - if self.omitEmptyOptionals: + if omitEmptyOptionals: options.update(ifNotEmpty=namedType.isOptional) - chunk = encodeFun(component, asn1Spec[idx], **options) + componentSpec = namedType.asn1Object # wrap open type blob if needed if namedType.openType: - wrapType = namedType.asn1Object - if wrapType.tagSet and not wrapType.isSameTypeWith(component): - chunk = encodeFun(chunk, wrapType, **options) - substrate += chunk + if componentSpec.typeId in ( + univ.SetOf.typeId, univ.SequenceOf.typeId): + + substrate += encodeFun( + component, componentSpec, + **dict(options, wrapType=componentSpec.componentType)) + + else: + chunk = encodeFun(component, componentSpec, **options) + + if componentSpec.isSameTypeWith(component): + substrate += chunk + + else: + substrate += encodeFun(chunk, componentSpec, **options) + + if LOG: + LOG('wrapped with wrap type %r' % (componentSpec,)) + + else: + substrate += encodeFun(component, componentSpec, **options) return substrate, True, True class SequenceOfEncoder(AbstractItemEncoder): - def encodeValue(self, value, asn1Spec, encodeFun, **options): + def _encodeComponents(self, value, asn1Spec, encodeFun, **options): + if asn1Spec is None: - value.verifySizeSpec() + inconsistency = value.isInconsistent + if inconsistency: + raise inconsistency + else: asn1Spec = asn1Spec.componentType - substrate = null + chunks = [] + + wrapType = options.pop('wrapType', None) for idx, component in enumerate(value): - substrate += encodeFun(value[idx], asn1Spec, **options) + chunk = encodeFun(component, asn1Spec, **options) - return substrate, True, True + if (wrapType is not None and + not wrapType.isSameTypeWith(component)): + # wrap encoded value with wrapper container (e.g. ANY) + chunk = encodeFun(chunk, wrapType, **options) + + if LOG: + LOG('wrapped with wrap type %r' % (wrapType,)) + + chunks.append(chunk) + + return chunks + + def encodeValue(self, value, asn1Spec, encodeFun, **options): + chunks = self._encodeComponents( + value, asn1Spec, encodeFun, **options) + + return null.join(chunks), True, True class ChoiceEncoder(AbstractItemEncoder): @@ -620,13 +794,8 @@ class Encoder(object): raise error.PyAsn1Error('Value %r is not ASN.1 type instance ' 'and "asn1Spec" not given' % (value,)) - if debug.logger & debug.flagEncoder: - logger = debug.logger - else: - logger = None - - if logger: - logger('encoder called in %sdef mode, chunk size %s for ' + if LOG: + LOG('encoder called in %sdef mode, chunk size %s for ' 'type %s, value:\n%s' % (not options.get('defMode', True) and 'in' or '', options.get('maxChunkSize', 0), asn1Spec is None and value.prettyPrintType() or asn1Spec.prettyPrintType(), value)) if self.fixedDefLengthMode is not None: @@ -639,8 +808,8 @@ class Encoder(object): try: concreteEncoder = self.__typeMap[typeId] - if logger: - logger('using value codec %s chosen by type ID %s' % (concreteEncoder.__class__.__name__, typeId)) + if LOG: + LOG('using value codec %s chosen by type ID %s' % (concreteEncoder.__class__.__name__, typeId)) except KeyError: if asn1Spec is None: @@ -657,13 +826,13 @@ class Encoder(object): except KeyError: raise error.PyAsn1Error('No encoder for %r (%s)' % (value, tagSet)) - if logger: - logger('using value codec %s chosen by tagSet %s' % (concreteEncoder.__class__.__name__, tagSet)) + if LOG: + LOG('using value codec %s chosen by tagSet %s' % (concreteEncoder.__class__.__name__, tagSet)) substrate = concreteEncoder.encode(value, asn1Spec, self, **options) - if logger: - logger('codec %s built %s octets of substrate: %s\nencoder completed' % (concreteEncoder, len(substrate), debug.hexdump(substrate))) + if LOG: + LOG('codec %s built %s octets of substrate: %s\nencoder completed' % (concreteEncoder, len(substrate), debug.hexdump(substrate))) return substrate @@ -684,7 +853,7 @@ class Encoder(object): #: Optional ASN.1 schema or value object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative #: #: defMode: :py:class:`bool` -#: If `False`, produces indefinite length encoding +#: If :obj:`False`, produces indefinite length encoding #: #: maxChunkSize: :py:class:`int` #: Maximum chunk size in chunked encoding mode (0 denotes unlimited chunk size) @@ -696,7 +865,7 @@ class Encoder(object): #: #: Raises #: ------ -#: :py:class:`~pyasn1.error.PyAsn1Error` +#: ~pyasn1.error.PyAsn1Error #: On encoding errors #: #: Examples diff --git a/server/www/packages/packages-linux/x64/pyasn1/codec/ber/eoo.py b/server/www/packages/packages-linux/x64/pyasn1/codec/ber/eoo.py index d4cd827..48eb859 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/codec/ber/eoo.py +++ b/server/www/packages/packages-linux/x64/pyasn1/codec/ber/eoo.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # from pyasn1.type import base @@ -10,7 +10,7 @@ from pyasn1.type import tag __all__ = ['endOfOctets'] -class EndOfOctets(base.AbstractSimpleAsn1Item): +class EndOfOctets(base.SimpleAsn1Type): defaultValue = 0 tagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x00) diff --git a/server/www/packages/packages-linux/x64/pyasn1/codec/cer/decoder.py b/server/www/packages/packages-linux/x64/pyasn1/codec/cer/decoder.py index 66572ec..3e86fd0 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/codec/cer/decoder.py +++ b/server/www/packages/packages-linux/x64/pyasn1/codec/cer/decoder.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # from pyasn1 import error @@ -87,7 +87,7 @@ class Decoder(decoder.Decoder): #: #: Raises #: ------ -#: :py:class:`~pyasn1.error.PyAsn1Error` +#: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError #: On decoding errors #: #: Examples diff --git a/server/www/packages/packages-linux/x64/pyasn1/codec/cer/encoder.py b/server/www/packages/packages-linux/x64/pyasn1/codec/cer/encoder.py index 768d3c1..935b696 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/codec/cer/encoder.py +++ b/server/www/packages/packages-linux/x64/pyasn1/codec/cer/encoder.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # from pyasn1 import error @@ -31,17 +31,20 @@ class RealEncoder(encoder.RealEncoder): # specialized GeneralStringEncoder here class TimeEncoderMixIn(object): - zchar, = str2octs('Z') - pluschar, = str2octs('+') - minuschar, = str2octs('-') - commachar, = str2octs(',') - minLength = 12 - maxLength = 19 + Z_CHAR = ord('Z') + PLUS_CHAR = ord('+') + MINUS_CHAR = ord('-') + COMMA_CHAR = ord(',') + DOT_CHAR = ord('.') + ZERO_CHAR = ord('0') + + MIN_LENGTH = 12 + MAX_LENGTH = 19 def encodeValue(self, value, asn1Spec, encodeFun, **options): - # Encoding constraints: + # CER encoding constraints: # - minutes are mandatory, seconds are optional - # - subseconds must NOT be zero + # - sub-seconds must NOT be zero / no meaningless zeros # - no hanging fraction dot # - time in UTC (Z) # - only dot is allowed for fractions @@ -49,20 +52,46 @@ class TimeEncoderMixIn(object): if asn1Spec is not None: value = asn1Spec.clone(value) - octets = value.asOctets() + numbers = value.asNumbers() - if not self.minLength < len(octets) < self.maxLength: - raise error.PyAsn1Error('Length constraint violated: %r' % value) + if self.PLUS_CHAR in numbers or self.MINUS_CHAR in numbers: + raise error.PyAsn1Error('Must be UTC time: %r' % value) - if self.pluschar in octets or self.minuschar in octets: - raise error.PyAsn1Error('Must be UTC time: %r' % octets) + if numbers[-1] != self.Z_CHAR: + raise error.PyAsn1Error('Missing "Z" time zone specifier: %r' % value) - if octets[-1] != self.zchar: - raise error.PyAsn1Error('Missing "Z" time zone specifier: %r' % octets) - - if self.commachar in octets: + if self.COMMA_CHAR in numbers: raise error.PyAsn1Error('Comma in fractions disallowed: %r' % value) + if self.DOT_CHAR in numbers: + + isModified = False + + numbers = list(numbers) + + searchIndex = min(numbers.index(self.DOT_CHAR) + 4, len(numbers) - 1) + + while numbers[searchIndex] != self.DOT_CHAR: + if numbers[searchIndex] == self.ZERO_CHAR: + del numbers[searchIndex] + isModified = True + + searchIndex -= 1 + + searchIndex += 1 + + if searchIndex < len(numbers): + if numbers[searchIndex] == self.Z_CHAR: + # drop hanging comma + del numbers[searchIndex - 1] + isModified = True + + if isModified: + value = value.clone(numbers) + + if not self.MIN_LENGTH < len(numbers) < self.MAX_LENGTH: + raise error.PyAsn1Error('Length constraint violated: %r' % value) + options.update(maxChunkSize=1000) return encoder.OctetStringEncoder.encodeValue( @@ -71,13 +100,44 @@ class TimeEncoderMixIn(object): class GeneralizedTimeEncoder(TimeEncoderMixIn, encoder.OctetStringEncoder): - minLength = 12 - maxLength = 19 + MIN_LENGTH = 12 + MAX_LENGTH = 20 class UTCTimeEncoder(TimeEncoderMixIn, encoder.OctetStringEncoder): - minLength = 10 - maxLength = 14 + MIN_LENGTH = 10 + MAX_LENGTH = 14 + + +class SetOfEncoder(encoder.SequenceOfEncoder): + def encodeValue(self, value, asn1Spec, encodeFun, **options): + chunks = self._encodeComponents( + value, asn1Spec, encodeFun, **options) + + # sort by serialised and padded components + if len(chunks) > 1: + zero = str2octs('\x00') + maxLen = max(map(len, chunks)) + paddedChunks = [ + (x.ljust(maxLen, zero), x) for x in chunks + ] + paddedChunks.sort(key=lambda x: x[0]) + + chunks = [x[1] for x in paddedChunks] + + return null.join(chunks), True, True + + +class SequenceOfEncoder(encoder.SequenceOfEncoder): + def encodeValue(self, value, asn1Spec, encodeFun, **options): + + if options.get('ifNotEmpty', False) and not len(value): + return null, True, True + + chunks = self._encodeComponents( + value, asn1Spec, encodeFun, **options) + + return null.join(chunks), True, True class SetEncoder(encoder.SequenceEncoder): @@ -109,7 +169,9 @@ class SetEncoder(encoder.SequenceEncoder): if asn1Spec is None: # instance of ASN.1 schema - value.verifySizeSpec() + inconsistency = value.isInconsistent + if inconsistency: + raise inconsistency namedTypes = value.componentType @@ -168,55 +230,10 @@ class SetEncoder(encoder.SequenceEncoder): return substrate, True, True -class SetOfEncoder(encoder.SequenceOfEncoder): - def encodeValue(self, value, asn1Spec, encodeFun, **options): - if asn1Spec is None: - value.verifySizeSpec() - else: - asn1Spec = asn1Spec.componentType - - components = [encodeFun(x, asn1Spec, **options) - for x in value] - - # sort by serialised and padded components - if len(components) > 1: - zero = str2octs('\x00') - maxLen = max(map(len, components)) - paddedComponents = [ - (x.ljust(maxLen, zero), x) for x in components - ] - paddedComponents.sort(key=lambda x: x[0]) - - components = [x[1] for x in paddedComponents] - - substrate = null.join(components) - - return substrate, True, True - - class SequenceEncoder(encoder.SequenceEncoder): omitEmptyOptionals = True -class SequenceOfEncoder(encoder.SequenceOfEncoder): - def encodeValue(self, value, asn1Spec, encodeFun, **options): - - if options.get('ifNotEmpty', False) and not len(value): - return null, True, True - - if asn1Spec is None: - value.verifySizeSpec() - else: - asn1Spec = asn1Spec.componentType - - substrate = null - - for idx, component in enumerate(value): - substrate += encodeFun(value[idx], asn1Spec, **options) - - return substrate, True, True - - tagMap = encoder.tagMap.copy() tagMap.update({ univ.Boolean.tagSet: BooleanEncoder(), @@ -269,7 +286,7 @@ class Encoder(encoder.Encoder): #: #: Raises #: ------ -#: :py:class:`~pyasn1.error.PyAsn1Error` +#: ~pyasn1.error.PyAsn1Error #: On encoding errors #: #: Examples diff --git a/server/www/packages/packages-linux/x64/pyasn1/codec/der/decoder.py b/server/www/packages/packages-linux/x64/pyasn1/codec/der/decoder.py index f67d025..1a13fdb 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/codec/der/decoder.py +++ b/server/www/packages/packages-linux/x64/pyasn1/codec/der/decoder.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # from pyasn1.codec.cer import decoder @@ -67,7 +67,7 @@ class Decoder(decoder.Decoder): #: #: Raises #: ------ -#: :py:class:`~pyasn1.error.PyAsn1Error` +#: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError #: On decoding errors #: #: Examples diff --git a/server/www/packages/packages-linux/x64/pyasn1/codec/der/encoder.py b/server/www/packages/packages-linux/x64/pyasn1/codec/der/encoder.py index 756d9fe..90e982d 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/codec/der/encoder.py +++ b/server/www/packages/packages-linux/x64/pyasn1/codec/der/encoder.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # from pyasn1 import error @@ -82,7 +82,7 @@ class Encoder(encoder.Encoder): #: #: Raises #: ------ -#: :py:class:`~pyasn1.error.PyAsn1Error` +#: ~pyasn1.error.PyAsn1Error #: On encoding errors #: #: Examples diff --git a/server/www/packages/packages-linux/x64/pyasn1/codec/native/decoder.py b/server/www/packages/packages-linux/x64/pyasn1/codec/native/decoder.py index 78fcda6..104b92e 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/codec/native/decoder.py +++ b/server/www/packages/packages-linux/x64/pyasn1/codec/native/decoder.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # from pyasn1 import debug @@ -14,6 +14,8 @@ from pyasn1.type import useful __all__ = ['decode'] +LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_DECODER) + class AbstractScalarDecoder(object): def __call__(self, pyObject, asn1Spec, decodeFun=None, **options): @@ -136,13 +138,10 @@ class Decoder(object): self.__typeMap = typeMap def __call__(self, pyObject, asn1Spec, **options): - if debug.logger & debug.flagDecoder: - logger = debug.logger - else: - logger = None - if logger: + + if LOG: debug.scope.push(type(pyObject).__name__) - logger('decoder called at scope %s, working with type %s' % (debug.scope, type(pyObject).__name__)) + LOG('decoder called at scope %s, working with type %s' % (debug.scope, type(pyObject).__name__)) if asn1Spec is None or not isinstance(asn1Spec, base.Asn1Item): raise error.PyAsn1Error('asn1Spec is not valid (should be an instance of an ASN.1 Item, not %s)' % asn1Spec.__class__.__name__) @@ -159,13 +158,13 @@ class Decoder(object): except KeyError: raise error.PyAsn1Error('Unknown ASN.1 tag %s' % asn1Spec.tagSet) - if logger: - logger('calling decoder %s on Python type %s <%s>' % (type(valueDecoder).__name__, type(pyObject).__name__, repr(pyObject))) + if LOG: + LOG('calling decoder %s on Python type %s <%s>' % (type(valueDecoder).__name__, type(pyObject).__name__, repr(pyObject))) value = valueDecoder(pyObject, asn1Spec, self, **options) - if logger: - logger('decoder %s produced ASN.1 type %s <%s>' % (type(valueDecoder).__name__, type(value).__name__, repr(value))) + if LOG: + LOG('decoder %s produced ASN.1 type %s <%s>' % (type(valueDecoder).__name__, type(value).__name__, repr(value))) debug.scope.pop() return value @@ -196,7 +195,7 @@ class Decoder(object): #: #: Raises #: ------ -#: :py:class:`~pyasn1.error.PyAsn1Error` +#: ~pyasn1.error.PyAsn1Error #: On decoding errors #: #: Examples diff --git a/server/www/packages/packages-linux/x64/pyasn1/codec/native/encoder.py b/server/www/packages/packages-linux/x64/pyasn1/codec/native/encoder.py index 0956191..4318abd 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/codec/native/encoder.py +++ b/server/www/packages/packages-linux/x64/pyasn1/codec/native/encoder.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # try: @@ -20,6 +20,8 @@ from pyasn1.type import useful __all__ = ['encode'] +LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_ENCODER) + class AbstractItemEncoder(object): def encode(self, value, encodeFun, **options): @@ -70,7 +72,9 @@ class SetEncoder(AbstractItemEncoder): protoDict = dict def encode(self, value, encodeFun, **options): - value.verifySizeSpec() + inconsistency = value.isInconsistent + if inconsistency: + raise inconsistency namedTypes = value.componentType substrate = self.protoDict() @@ -88,7 +92,9 @@ class SequenceEncoder(SetEncoder): class SequenceOfEncoder(AbstractItemEncoder): def encode(self, value, encodeFun, **options): - value.verifySizeSpec() + inconsistency = value.isInconsistent + if inconsistency: + raise inconsistency return [encodeFun(x, **options) for x in value] @@ -180,14 +186,9 @@ class Encoder(object): if not isinstance(value, base.Asn1Item): raise error.PyAsn1Error('value is not valid (should be an instance of an ASN.1 Item)') - if debug.logger & debug.flagEncoder: - logger = debug.logger - else: - logger = None - - if logger: + if LOG: debug.scope.push(type(value).__name__) - logger('encoder called for type %s <%s>' % (type(value).__name__, value.prettyPrint())) + LOG('encoder called for type %s <%s>' % (type(value).__name__, value.prettyPrint())) tagSet = value.tagSet @@ -204,13 +205,13 @@ class Encoder(object): except KeyError: raise error.PyAsn1Error('No encoder for %s' % (value,)) - if logger: - logger('using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet)) + if LOG: + LOG('using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet)) pyObject = concreteEncoder.encode(value, self, **options) - if logger: - logger('encoder %s produced: %s' % (type(concreteEncoder).__name__, repr(pyObject))) + if LOG: + LOG('encoder %s produced: %s' % (type(concreteEncoder).__name__, repr(pyObject))) debug.scope.pop() return pyObject @@ -238,7 +239,7 @@ class Encoder(object): #: #: Raises #: ------ -#: :py:class:`~pyasn1.error.PyAsn1Error` +#: ~pyasn1.error.PyAsn1Error #: On encoding errors #: #: Examples diff --git a/server/www/packages/packages-linux/x64/pyasn1/compat/binary.py b/server/www/packages/packages-linux/x64/pyasn1/compat/binary.py index c38a650..addbdc9 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/compat/binary.py +++ b/server/www/packages/packages-linux/x64/pyasn1/compat/binary.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # from sys import version_info diff --git a/server/www/packages/packages-linux/x64/pyasn1/compat/calling.py b/server/www/packages/packages-linux/x64/pyasn1/compat/calling.py index c60b50d..778a3d1 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/compat/calling.py +++ b/server/www/packages/packages-linux/x64/pyasn1/compat/calling.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # from sys import version_info diff --git a/server/www/packages/packages-linux/x64/pyasn1/compat/dateandtime.py b/server/www/packages/packages-linux/x64/pyasn1/compat/dateandtime.py index 27526ad..5e471bf 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/compat/dateandtime.py +++ b/server/www/packages/packages-linux/x64/pyasn1/compat/dateandtime.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # import time diff --git a/server/www/packages/packages-linux/x64/pyasn1/compat/integer.py b/server/www/packages/packages-linux/x64/pyasn1/compat/integer.py index bb3d099..4b31791 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/compat/integer.py +++ b/server/www/packages/packages-linux/x64/pyasn1/compat/integer.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # import sys diff --git a/server/www/packages/packages-linux/x64/pyasn1/compat/octets.py b/server/www/packages/packages-linux/x64/pyasn1/compat/octets.py index a06db5d..99d23bb 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/compat/octets.py +++ b/server/www/packages/packages-linux/x64/pyasn1/compat/octets.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # from sys import version_info diff --git a/server/www/packages/packages-linux/x64/pyasn1/compat/string.py b/server/www/packages/packages-linux/x64/pyasn1/compat/string.py index 4d8a045..b9bc8c3 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/compat/string.py +++ b/server/www/packages/packages-linux/x64/pyasn1/compat/string.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # from sys import version_info diff --git a/server/www/packages/packages-linux/x64/pyasn1/debug.py b/server/www/packages/packages-linux/x64/pyasn1/debug.py index ab72fa8..8707aa8 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/debug.py +++ b/server/www/packages/packages-linux/x64/pyasn1/debug.py @@ -1,10 +1,11 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # import logging +import sys from pyasn1 import __version__ from pyasn1 import error @@ -12,18 +13,20 @@ from pyasn1.compat.octets import octs2ints __all__ = ['Debug', 'setLogger', 'hexdump'] -flagNone = 0x0000 -flagEncoder = 0x0001 -flagDecoder = 0x0002 -flagAll = 0xffff +DEBUG_NONE = 0x0000 +DEBUG_ENCODER = 0x0001 +DEBUG_DECODER = 0x0002 +DEBUG_ALL = 0xffff -flagMap = { - 'none': flagNone, - 'encoder': flagEncoder, - 'decoder': flagDecoder, - 'all': flagAll +FLAG_MAP = { + 'none': DEBUG_NONE, + 'encoder': DEBUG_ENCODER, + 'decoder': DEBUG_DECODER, + 'all': DEBUG_ALL } +LOGGEE_MAP = {} + class Printer(object): # noinspection PyShadowingNames @@ -66,7 +69,7 @@ class Debug(object): defaultPrinter = Printer() def __init__(self, *flags, **options): - self._flags = flagNone + self._flags = DEBUG_NONE if 'loggerName' in options: # route our logs to parent logger @@ -89,9 +92,9 @@ class Debug(object): flag = flag[1:] try: if inverse: - self._flags &= ~flagMap[flag] + self._flags &= ~FLAG_MAP[flag] else: - self._flags |= flagMap[flag] + self._flags |= FLAG_MAP[flag] except KeyError: raise error.PyAsn1Error('bad debug flag %s' % flag) @@ -109,17 +112,26 @@ class Debug(object): def __rand__(self, flag): return flag & self._flags - -logger = 0 +_LOG = DEBUG_NONE def setLogger(userLogger): - global logger + global _LOG if userLogger: - logger = userLogger + _LOG = userLogger else: - logger = 0 + _LOG = DEBUG_NONE + + # Update registered logging clients + for module, (name, flags) in LOGGEE_MAP.items(): + setattr(module, name, _LOG & flags and _LOG or DEBUG_NONE) + + +def registerLoggee(module, name='LOG', flags=DEBUG_NONE): + LOGGEE_MAP[sys.modules[module]] = name, flags + setLogger(_LOG) + return _LOG def hexdump(octets): diff --git a/server/www/packages/packages-linux/x64/pyasn1/error.py b/server/www/packages/packages-linux/x64/pyasn1/error.py index c05e65c..4f48db2 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/error.py +++ b/server/www/packages/packages-linux/x64/pyasn1/error.py @@ -1,29 +1,75 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # class PyAsn1Error(Exception): - """Create pyasn1 exception object + """Base pyasn1 exception - The `PyAsn1Error` exception represents generic, usually fatal, error. + `PyAsn1Error` is the base exception class (based on + :class:`Exception`) that represents all possible ASN.1 related + errors. """ class ValueConstraintError(PyAsn1Error): - """Create pyasn1 exception object + """ASN.1 type constraints violation exception The `ValueConstraintError` exception indicates an ASN.1 value constraint violation. + + It might happen on value object instantiation (for scalar types) or on + serialization (for constructed types). """ class SubstrateUnderrunError(PyAsn1Error): - """Create pyasn1 exception object + """ASN.1 data structure deserialization error The `SubstrateUnderrunError` exception indicates insufficient serialised - data on input of a deserialisation routine. + data on input of a de-serialization codec. """ + + +class PyAsn1UnicodeError(PyAsn1Error, UnicodeError): + """Unicode text processing error + + The `PyAsn1UnicodeError` exception is a base class for errors relating to + unicode text de/serialization. + + Apart from inheriting from :class:`PyAsn1Error`, it also inherits from + :class:`UnicodeError` to help the caller catching unicode-related errors. + """ + def __init__(self, message, unicode_error=None): + if isinstance(unicode_error, UnicodeError): + UnicodeError.__init__(self, *unicode_error.args) + PyAsn1Error.__init__(self, message) + + +class PyAsn1UnicodeDecodeError(PyAsn1UnicodeError, UnicodeDecodeError): + """Unicode text decoding error + + The `PyAsn1UnicodeDecodeError` exception represents a failure to + deserialize unicode text. + + Apart from inheriting from :class:`PyAsn1UnicodeError`, it also inherits + from :class:`UnicodeDecodeError` to help the caller catching unicode-related + errors. + """ + + +class PyAsn1UnicodeEncodeError(PyAsn1UnicodeError, UnicodeEncodeError): + """Unicode text encoding error + + The `PyAsn1UnicodeEncodeError` exception represents a failure to + serialize unicode text. + + Apart from inheriting from :class:`PyAsn1UnicodeError`, it also inherits + from :class:`UnicodeEncodeError` to help the caller catching + unicode-related errors. + """ + + diff --git a/server/www/packages/packages-linux/x64/pyasn1/type/base.py b/server/www/packages/packages-linux/x64/pyasn1/type/base.py index adaab22..994f1c9 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/type/base.py +++ b/server/www/packages/packages-linux/x64/pyasn1/type/base.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # import sys @@ -12,7 +12,8 @@ from pyasn1.type import constraint from pyasn1.type import tag from pyasn1.type import tagmap -__all__ = ['Asn1Item', 'Asn1ItemBase', 'AbstractSimpleAsn1Item', 'AbstractConstructedAsn1Item'] +__all__ = ['Asn1Item', 'Asn1Type', 'SimpleAsn1Type', + 'ConstructedAsn1Type'] class Asn1Item(object): @@ -25,7 +26,17 @@ class Asn1Item(object): return Asn1Item._typeCounter -class Asn1ItemBase(Asn1Item): +class Asn1Type(Asn1Item): + """Base class for all classes representing ASN.1 types. + + In the user code, |ASN.1| class is normally used only for telling + ASN.1 objects from others. + + Note + ---- + For as long as ASN.1 is concerned, a way to compare ASN.1 types + is to use :meth:`isSameTypeWith` and :meth:`isSuperTypeOf` methods. + """ #: Set or return a :py:class:`~pyasn1.type.tag.TagSet` object representing #: ASN.1 tag(s) associated with |ASN.1| type. tagSet = tag.TagSet() @@ -91,8 +102,8 @@ class Asn1ItemBase(Asn1Item): Returns ------- : :class:`bool` - :class:`True` if *other* is |ASN.1| type, - :class:`False` otherwise. + :obj:`True` if *other* is |ASN.1| type, + :obj:`False` otherwise. """ return (self is other or (not matchTags or self.tagSet == other.tagSet) and @@ -115,8 +126,8 @@ class Asn1ItemBase(Asn1Item): Returns ------- : :class:`bool` - :class:`True` if *other* is a subtype of |ASN.1| type, - :class:`False` otherwise. + :obj:`True` if *other* is a subtype of |ASN.1| type, + :obj:`False` otherwise. """ return (not matchTags or (self.tagSet.isSuperTagSetOf(other.tagSet)) and @@ -146,9 +157,13 @@ class Asn1ItemBase(Asn1Item): def getSubtypeSpec(self): return self.subtypeSpec + # backward compatibility def hasValue(self): return self.isValue +# Backward compatibility +Asn1ItemBase = Asn1Type + class NoValue(object): """Create a singleton instance of NoValue class. @@ -221,19 +236,31 @@ class NoValue(object): raise error.PyAsn1Error('Attempted "%s" operation on ASN.1 schema object' % attr) def __repr__(self): - return '<%s object at 0x%x>' % (self.__class__.__name__, id(self)) + return '<%s object>' % self.__class__.__name__ noValue = NoValue() -# Base class for "simple" ASN.1 objects. These are immutable. -class AbstractSimpleAsn1Item(Asn1ItemBase): +class SimpleAsn1Type(Asn1Type): + """Base class for all simple classes representing ASN.1 types. + + ASN.1 distinguishes types by their ability to hold other objects. + Scalar types are known as *simple* in ASN.1. + + In the user code, |ASN.1| class is normally used only for telling + ASN.1 objects from others. + + Note + ---- + For as long as ASN.1 is concerned, a way to compare ASN.1 types + is to use :meth:`isSameTypeWith` and :meth:`isSuperTypeOf` methods. + """ #: Default payload value defaultValue = noValue def __init__(self, value=noValue, **kwargs): - Asn1ItemBase.__init__(self, **kwargs) + Asn1Type.__init__(self, **kwargs) if value is noValue: value = self.defaultValue else: @@ -248,19 +275,18 @@ class AbstractSimpleAsn1Item(Asn1ItemBase): self._value = value def __repr__(self): - representation = '%s %s object at 0x%x' % ( - self.__class__.__name__, self.isValue and 'value' or 'schema', id(self) - ) + representation = '%s %s object' % ( + self.__class__.__name__, self.isValue and 'value' or 'schema') for attr, value in self.readOnly.items(): if value: - representation += ' %s %s' % (attr, value) + representation += ', %s %s' % (attr, value) if self.isValue: value = self.prettyPrint() if len(value) > 32: value = value[:16] + '...' + value[-16:] - representation += ' payload [%s]' % value + representation += ', payload [%s]' % value return '<%s>' % representation @@ -296,17 +322,18 @@ class AbstractSimpleAsn1Item(Asn1ItemBase): def isValue(self): """Indicate that |ASN.1| object represents ASN.1 value. - If *isValue* is `False` then this object represents just ASN.1 schema. + If *isValue* is :obj:`False` then this object represents just + ASN.1 schema. - If *isValue* is `True` then, in addition to its ASN.1 schema features, - this object can also be used like a Python built-in object (e.g. `int`, - `str`, `dict` etc.). + If *isValue* is :obj:`True` then, in addition to its ASN.1 schema + features, this object can also be used like a Python built-in object + (e.g. :class:`int`, :class:`str`, :class:`dict` etc.). Returns ------- : :class:`bool` - :class:`False` if object represents just ASN.1 schema. - :class:`True` if object represents ASN.1 schema and can be used as a normal value. + :obj:`False` if object represents just ASN.1 schema. + :obj:`True` if object represents ASN.1 schema and can be used as a normal value. Note ---- @@ -343,10 +370,10 @@ class AbstractSimpleAsn1Item(Asn1ItemBase): value = self._value - initilaizers = self.readOnly.copy() - initilaizers.update(kwargs) + initializers = self.readOnly.copy() + initializers.update(kwargs) - return self.__class__(value, **initilaizers) + return self.__class__(value, **initializers) def subtype(self, value=noValue, **kwargs): """Create a specialization of |ASN.1| schema or value object. @@ -425,10 +452,12 @@ class AbstractSimpleAsn1Item(Asn1ItemBase): def prettyPrint(self, scope=0): return self.prettyOut(self._value) - # noinspection PyUnusedLocal def prettyPrintType(self, scope=0): return '%s -> %s' % (self.tagSet, self.__class__.__name__) +# Backward compatibility +AbstractSimpleAsn1Item = SimpleAsn1Type + # # Constructed types: # * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice @@ -449,67 +478,102 @@ class AbstractSimpleAsn1Item(Asn1ItemBase): # -class AbstractConstructedAsn1Item(Asn1ItemBase): +class ConstructedAsn1Type(Asn1Type): + """Base class for all constructed classes representing ASN.1 types. - #: If `True`, requires exact component type matching, + ASN.1 distinguishes types by their ability to hold other objects. + Those "nesting" types are known as *constructed* in ASN.1. + + In the user code, |ASN.1| class is normally used only for telling + ASN.1 objects from others. + + Note + ---- + For as long as ASN.1 is concerned, a way to compare ASN.1 types + is to use :meth:`isSameTypeWith` and :meth:`isSuperTypeOf` methods. + """ + + #: If :obj:`True`, requires exact component type matching, #: otherwise subtype relation is only enforced strictConstraints = False componentType = None - sizeSpec = None + + # backward compatibility, unused + sizeSpec = constraint.ConstraintsIntersection() def __init__(self, **kwargs): readOnly = { 'componentType': self.componentType, + # backward compatibility, unused 'sizeSpec': self.sizeSpec } + + # backward compatibility: preserve legacy sizeSpec support + kwargs = self._moveSizeSpec(**kwargs) + readOnly.update(kwargs) - Asn1ItemBase.__init__(self, **readOnly) + Asn1Type.__init__(self, **readOnly) - self._componentValues = [] + def _moveSizeSpec(self, **kwargs): + # backward compatibility, unused + sizeSpec = kwargs.pop('sizeSpec', self.sizeSpec) + if sizeSpec: + subtypeSpec = kwargs.pop('subtypeSpec', self.subtypeSpec) + if subtypeSpec: + subtypeSpec = sizeSpec + + else: + subtypeSpec += sizeSpec + + kwargs['subtypeSpec'] = subtypeSpec + + return kwargs def __repr__(self): - representation = '%s %s object at 0x%x' % ( - self.__class__.__name__, self.isValue and 'value' or 'schema', id(self) + representation = '%s %s object' % ( + self.__class__.__name__, self.isValue and 'value' or 'schema' ) for attr, value in self.readOnly.items(): if value is not noValue: - representation += ' %s=%r' % (attr, value) + representation += ', %s=%r' % (attr, value) - if self.isValue and self._componentValues: - representation += ' payload [%s]' % ', '.join([repr(x) for x in self._componentValues]) + if self.isValue and self.components: + representation += ', payload [%s]' % ', '.join( + [repr(x) for x in self.components]) return '<%s>' % representation def __eq__(self, other): - return self is other and True or self._componentValues == other + return self is other or self.components == other def __ne__(self, other): - return self._componentValues != other + return self.components != other def __lt__(self, other): - return self._componentValues < other + return self.components < other def __le__(self, other): - return self._componentValues <= other + return self.components <= other def __gt__(self, other): - return self._componentValues > other + return self.components > other def __ge__(self, other): - return self._componentValues >= other + return self.components >= other if sys.version_info[0] <= 2: def __nonzero__(self): - return self._componentValues and True or False + return bool(self.components) else: def __bool__(self): - return self._componentValues and True or False + return bool(self.components) - def __len__(self): - return len(self._componentValues) + @property + def components(self): + raise error.PyAsn1Error('Method not implemented') def _cloneComponentValues(self, myClone, cloneValueFlag): pass @@ -535,15 +599,14 @@ class AbstractConstructedAsn1Item(Asn1ItemBase): Note ---- Due to the mutable nature of the |ASN.1| object, even if no arguments - are supplied, new |ASN.1| object will always be created as a shallow - copy of `self`. + are supplied, a new |ASN.1| object will be created and returned. """ cloneValueFlag = kwargs.pop('cloneValueFlag', False) - initilaizers = self.readOnly.copy() - initilaizers.update(kwargs) + initializers = self.readOnly.copy() + initializers.update(kwargs) - clone = self.__class__(**initilaizers) + clone = self.__class__(**initializers) if cloneValueFlag: self._cloneComponentValues(clone, cloneValueFlag) @@ -588,9 +651,8 @@ class AbstractConstructedAsn1Item(Asn1ItemBase): Note ---- - Due to the immutable nature of the |ASN.1| object, if no arguments - are supplied, no new |ASN.1| object will be created and `self` will - be returned instead. + Due to the mutable nature of the |ASN.1| object, even if no arguments + are supplied, a new |ASN.1| object will be created and returned. """ initializers = self.readOnly.copy() @@ -615,9 +677,6 @@ class AbstractConstructedAsn1Item(Asn1ItemBase): return clone - def verifySizeSpec(self): - self.sizeSpec(self) - def getComponentByPosition(self, idx): raise error.PyAsn1Error('Method not implemented') @@ -631,9 +690,6 @@ class AbstractConstructedAsn1Item(Asn1ItemBase): self[k] = kwargs[k] return self - def clear(self): - self._componentValues = [] - # backward compatibility def setDefaultComponents(self): @@ -641,3 +697,11 @@ class AbstractConstructedAsn1Item(Asn1ItemBase): def getComponentType(self): return self.componentType + + # backward compatibility, unused + def verifySizeSpec(self): + self.subtypeSpec(self) + + + # Backward compatibility +AbstractConstructedAsn1Item = ConstructedAsn1Type diff --git a/server/www/packages/packages-linux/x64/pyasn1/type/char.py b/server/www/packages/packages-linux/x64/pyasn1/type/char.py index 493badb..06074da 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/type/char.py +++ b/server/www/packages/packages-linux/x64/pyasn1/type/char.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # import sys @@ -21,21 +21,27 @@ noValue = univ.noValue class AbstractCharacterString(univ.OctetString): """Creates |ASN.1| schema or value object. - |ASN.1| objects are immutable and duck-type Python 2 :class:`unicode` or Python 3 :class:`str`. - When used in octet-stream context, |ASN.1| type assumes "|encoding|" encoding. + |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, + its objects are immutable and duck-type Python 2 :class:`str` or Python 3 + :class:`bytes`. When used in octet-stream context, |ASN.1| type assumes + "|encoding|" encoding. Keyword Args ------------ value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object - unicode object (Python 2) or string (Python 3), alternatively string - (Python 2) or bytes (Python 3) representing octet-stream of serialised - unicode string (note `encoding` parameter) or |ASN.1| class instance. + :class:`unicode` object (Python 2) or :class:`str` (Python 3), + alternatively :class:`str` (Python 2) or :class:`bytes` (Python 3) + representing octet-stream of serialised unicode string + (note `encoding` parameter) or |ASN.1| class instance. + If `value` is not given, schema object will be created. tagSet: :py:class:`~pyasn1.type.tag.TagSet` Object representing non-default ASN.1 tag(s) subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing non-default ASN.1 subtype constraint(s) + Object representing non-default ASN.1 subtype constraint(s). Constraints + verification for |ASN.1| type occurs automatically on object + instantiation. encoding: :py:class:`str` Unicode codec ID to encode/decode :class:`unicode` (Python 2) or @@ -44,7 +50,7 @@ class AbstractCharacterString(univ.OctetString): Raises ------ - :py:class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error On constraint violation or bad initializer. """ @@ -55,8 +61,10 @@ class AbstractCharacterString(univ.OctetString): return self._value.encode(self.encoding) except UnicodeEncodeError: - raise error.PyAsn1Error( - "Can't encode string '%s' with codec %s" % (self._value, self.encoding) + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeEncodeError( + "Can't encode string '%s' with codec " + "%s" % (self._value, self.encoding), exc ) def __unicode__(self): @@ -76,8 +84,10 @@ class AbstractCharacterString(univ.OctetString): return unicode(value) except (UnicodeDecodeError, LookupError): - raise error.PyAsn1Error( - "Can't decode string '%s' with codec %s" % (value, self.encoding) + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeDecodeError( + "Can't decode string '%s' with codec " + "%s" % (value, self.encoding), exc ) def asOctets(self, padding=True): @@ -95,8 +105,10 @@ class AbstractCharacterString(univ.OctetString): try: return self._value.encode(self.encoding) except UnicodeEncodeError: - raise error.PyAsn1Error( - "Can't encode string '%s' with codec %s" % (self._value, self.encoding) + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeEncodeError( + "Can't encode string '%s' with codec " + "%s" % (self._value, self.encoding), exc ) def prettyIn(self, value): @@ -113,8 +125,10 @@ class AbstractCharacterString(univ.OctetString): return str(value) except (UnicodeDecodeError, LookupError): - raise error.PyAsn1Error( - "Can't decode string '%s' with codec %s" % (value, self.encoding) + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeDecodeError( + "Can't decode string '%s' with codec " + "%s" % (value, self.encoding), exc ) def asOctets(self, padding=True): diff --git a/server/www/packages/packages-linux/x64/pyasn1/type/constraint.py b/server/www/packages/packages-linux/x64/pyasn1/type/constraint.py index a704331..8f152e9 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/type/constraint.py +++ b/server/www/packages/packages-linux/x64/pyasn1/type/constraint.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # # Original concept and code by Mike C. Fletcher. @@ -37,10 +37,11 @@ class AbstractConstraint(object): ) def __repr__(self): - representation = '%s object at 0x%x' % (self.__class__.__name__, id(self)) + representation = '%s object' % (self.__class__.__name__) if self._values: - representation += ' consts %s' % ', '.join([repr(x) for x in self._values]) + representation += ', consts %s' % ', '.join( + [repr(x) for x in self._values]) return '<%s>' % representation @@ -102,12 +103,17 @@ class SingleValueConstraint(AbstractConstraint): The SingleValueConstraint satisfies any value that is present in the set of permitted values. + Objects of this type are iterable (emitting constraint values) and + can act as operands for some arithmetic operations e.g. addition + and subtraction. The latter can be used for combining multiple + SingleValueConstraint objects into one. + The SingleValueConstraint object can be applied to any ASN.1 type. Parameters ---------- - \*values: :class:`int` + *values: :class:`int` Full set of values permitted by this constraint object. Examples @@ -136,6 +142,23 @@ class SingleValueConstraint(AbstractConstraint): if value not in self._set: raise error.ValueConstraintError(value) + # Constrains can be merged or reduced + + def __contains__(self, item): + return item in self._set + + def __iter__(self): + return iter(self._set) + + def __sub__(self, constraint): + return self.__class__(*(self._set.difference(constraint))) + + def __add__(self, constraint): + return self.__class__(*(self._set.union(constraint))) + + def __sub__(self, constraint): + return self.__class__(*(self._set.difference(constraint))) + class ContainedSubtypeConstraint(AbstractConstraint): """Create a ContainedSubtypeConstraint object. @@ -149,7 +172,7 @@ class ContainedSubtypeConstraint(AbstractConstraint): Parameters ---------- - \*values: + *values: Full set of values and constraint objects permitted by this constraint object. @@ -304,17 +327,21 @@ class PermittedAlphabetConstraint(SingleValueConstraint): string for as long as all its characters are present in the set of permitted characters. + Objects of this type are iterable (emitting constraint values) and + can act as operands for some arithmetic operations e.g. addition + and subtraction. + The PermittedAlphabetConstraint object can only be applied to the :ref:`character ASN.1 types ` such as :class:`~pyasn1.type.char.IA5String`. Parameters ---------- - \*alphabet: :class:`str` + *alphabet: :class:`str` Full set of characters permitted by this constraint object. - Examples - -------- + Example + ------- .. code-block:: python class BooleanValue(IA5String): @@ -331,6 +358,42 @@ class PermittedAlphabetConstraint(SingleValueConstraint): # this will raise ValueConstraintError garbage = BooleanValue('TAF') + + ASN.1 `FROM ... EXCEPT ...` clause can be modelled by combining multiple + PermittedAlphabetConstraint objects into one: + + Example + ------- + .. code-block:: python + + class Lipogramme(IA5String): + ''' + ASN.1 specification: + + Lipogramme ::= + IA5String (FROM (ALL EXCEPT ("e"|"E"))) + ''' + subtypeSpec = ( + PermittedAlphabetConstraint(*string.printable) - + PermittedAlphabetConstraint('e', 'E') + ) + + # this will succeed + lipogramme = Lipogramme('A work of fiction?') + + # this will raise ValueConstraintError + lipogramme = Lipogramme('Eel') + + Note + ---- + Although `ConstraintsExclusion` object could seemingly be used for this + purpose, practically, for it to work, it needs to represent its operand + constraints as sets and intersect one with the other. That would require + the insight into the constraint values (and their types) that are otherwise + hidden inside the constraint object. + + Therefore it's more practical to model `EXCEPT` clause at + `PermittedAlphabetConstraint` level instead. """ def _setValues(self, values): self._values = values @@ -341,6 +404,151 @@ class PermittedAlphabetConstraint(SingleValueConstraint): raise error.ValueConstraintError(value) +class ComponentPresentConstraint(AbstractConstraint): + """Create a ComponentPresentConstraint object. + + The ComponentPresentConstraint is only satisfied when the value + is not `None`. + + The ComponentPresentConstraint object is typically used with + `WithComponentsConstraint`. + + Examples + -------- + .. code-block:: python + + present = ComponentPresentConstraint() + + # this will succeed + present('whatever') + + # this will raise ValueConstraintError + present(None) + """ + def _setValues(self, values): + self._values = ('',) + + if values: + raise error.PyAsn1Error('No arguments expected') + + def _testValue(self, value, idx): + if value is None: + raise error.ValueConstraintError( + 'Component is not present:') + + +class ComponentAbsentConstraint(AbstractConstraint): + """Create a ComponentAbsentConstraint object. + + The ComponentAbsentConstraint is only satisfied when the value + is `None`. + + The ComponentAbsentConstraint object is typically used with + `WithComponentsConstraint`. + + Examples + -------- + .. code-block:: python + + absent = ComponentAbsentConstraint() + + # this will succeed + absent(None) + + # this will raise ValueConstraintError + absent('whatever') + """ + def _setValues(self, values): + self._values = ('',) + + if values: + raise error.PyAsn1Error('No arguments expected') + + def _testValue(self, value, idx): + if value is not None: + raise error.ValueConstraintError( + 'Component is not absent: %r' % value) + + +class WithComponentsConstraint(AbstractConstraint): + """Create a WithComponentsConstraint object. + + The `WithComponentsConstraint` satisfies any mapping object that has + constrained fields present or absent, what is indicated by + `ComponentPresentConstraint` and `ComponentAbsentConstraint` + objects respectively. + + The `WithComponentsConstraint` object is typically applied + to :class:`~pyasn1.type.univ.Set` or + :class:`~pyasn1.type.univ.Sequence` types. + + Parameters + ---------- + *fields: :class:`tuple` + Zero or more tuples of (`field`, `constraint`) indicating constrained + fields. + + Notes + ----- + On top of the primary use of `WithComponentsConstraint` (ensuring presence + or absence of particular components of a :class:`~pyasn1.type.univ.Set` or + :class:`~pyasn1.type.univ.Sequence`), it is also possible to pass any other + constraint objects or their combinations. In case of scalar fields, these + constraints will be verified in addition to the constraints belonging to + scalar components themselves. However, formally, these additional + constraints do not change the type of these ASN.1 objects. + + Examples + -------- + + .. code-block:: python + + class Item(Sequence): # Set is similar + ''' + ASN.1 specification: + + Item ::= SEQUENCE { + id INTEGER OPTIONAL, + name OCTET STRING OPTIONAL + } WITH COMPONENTS id PRESENT, name ABSENT | id ABSENT, name PRESENT + ''' + componentType = NamedTypes( + OptionalNamedType('id', Integer()), + OptionalNamedType('name', OctetString()) + ) + withComponents = ConstraintsUnion( + WithComponentsConstraint( + ('id', ComponentPresentConstraint()), + ('name', ComponentAbsentConstraint()) + ), + WithComponentsConstraint( + ('id', ComponentAbsentConstraint()), + ('name', ComponentPresentConstraint()) + ) + ) + + item = Item() + + # This will succeed + item['id'] = 1 + + # This will succeed + item.reset() + item['name'] = 'John' + + # This will fail (on encoding) + item.reset() + descr['id'] = 1 + descr['name'] = 'John' + """ + def _testValue(self, value, idx): + for field, constraint in self._values: + constraint(value.get(field)) + + def _setValues(self, values): + AbstractConstraint._setValues(self, values) + + # This is a bit kludgy, meaning two op modes within a single constraint class InnerTypeConstraint(AbstractConstraint): """Value must satisfy the type and presence constraints""" @@ -352,7 +560,7 @@ class InnerTypeConstraint(AbstractConstraint): if idx not in self.__multipleTypeConstraint: raise error.ValueConstraintError(value) constraint, status = self.__multipleTypeConstraint[idx] - if status == 'ABSENT': # XXX presense is not checked! + if status == 'ABSENT': # XXX presence is not checked! raise error.ValueConstraintError(value) constraint(value) @@ -380,49 +588,41 @@ class ConstraintsExclusion(AbstractConstraint): Parameters ---------- - constraint: - Constraint or logic operator object. + *constraints: + Constraint or logic operator objects. Examples -------- .. code-block:: python - class Lipogramme(IA5STRING): - ''' - ASN.1 specification: - - Lipogramme ::= - IA5String (FROM (ALL EXCEPT ("e"|"E"))) - ''' + class LuckyNumber(Integer): subtypeSpec = ConstraintsExclusion( - PermittedAlphabetConstraint('e', 'E') + SingleValueConstraint(13) ) # this will succeed - lipogramme = Lipogramme('A work of fiction?') + luckyNumber = LuckyNumber(12) # this will raise ValueConstraintError - lipogramme = Lipogramme('Eel') + luckyNumber = LuckyNumber(13) - Warning - ------- - The above example involving PermittedAlphabetConstraint might - not work due to the way how PermittedAlphabetConstraint works. - The other constraints might work with ConstraintsExclusion - though. + Note + ---- + The `FROM ... EXCEPT ...` ASN.1 clause should be modeled by combining + constraint objects into one. See `PermittedAlphabetConstraint` for more + information. """ def _testValue(self, value, idx): - try: - self._values[0](value, idx) - except error.ValueConstraintError: - return - else: + for constraint in self._values: + try: + constraint(value, idx) + + except error.ValueConstraintError: + continue + raise error.ValueConstraintError(value) def _setValues(self, values): - if len(values) != 1: - raise error.PyAsn1Error('Single constraint expected') - AbstractConstraint._setValues(self, values) @@ -467,7 +667,7 @@ class ConstraintsIntersection(AbstractConstraintSet): Parameters ---------- - \*constraints: + *constraints: Constraint or logic operator objects. Examples @@ -500,8 +700,8 @@ class ConstraintsIntersection(AbstractConstraintSet): class ConstraintsUnion(AbstractConstraintSet): """Create a ConstraintsUnion logic operator object. - The ConstraintsUnion logic operator only succeeds if - *at least a single* operand succeeds. + The ConstraintsUnion logic operator succeeds if + *at least* a single operand succeeds. The ConstraintsUnion object can be applied to any constraint and logic operator objects. @@ -511,7 +711,7 @@ class ConstraintsUnion(AbstractConstraintSet): Parameters ---------- - \*constraints: + *constraints: Constraint or logic operator objects. Examples @@ -525,7 +725,7 @@ class ConstraintsUnion(AbstractConstraintSet): CapitalOrSmall ::= IA5String (FROM ("A".."Z") | FROM ("a".."z")) ''' - subtypeSpec = ConstraintsIntersection( + subtypeSpec = ConstraintsUnion( PermittedAlphabetConstraint('A', 'Z'), PermittedAlphabetConstraint('a', 'z') ) diff --git a/server/www/packages/packages-linux/x64/pyasn1/type/error.py b/server/www/packages/packages-linux/x64/pyasn1/type/error.py index b2056bd..80fcf3b 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/type/error.py +++ b/server/www/packages/packages-linux/x64/pyasn1/type/error.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # from pyasn1.error import PyAsn1Error diff --git a/server/www/packages/packages-linux/x64/pyasn1/type/namedtype.py b/server/www/packages/packages-linux/x64/pyasn1/type/namedtype.py index f162d19..cbc1429 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/type/namedtype.py +++ b/server/www/packages/packages-linux/x64/pyasn1/type/namedtype.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # import sys @@ -49,9 +49,10 @@ class NamedType(object): representation = '%s=%r' % (self.name, self.asn1Object) if self.openType: - representation += ' openType: %r' % self.openType + representation += ', open type %r' % self.openType - return '<%s object at 0x%x type %s>' % (self.__class__.__name__, id(self), representation) + return '<%s object, type %s>' % ( + self.__class__.__name__, representation) def __eq__(self, other): return self.__nameAndType == other @@ -173,7 +174,8 @@ class NamedTypes(object): def __repr__(self): representation = ', '.join(['%r' % x for x in self.__namedTypes]) - return '<%s object at 0x%x types %s>' % (self.__class__.__name__, id(self), representation) + return '<%s object, types %s>' % ( + self.__class__.__name__, representation) def __eq__(self, other): return self.__namedTypes == other @@ -265,18 +267,18 @@ class NamedTypes(object): return nameToPosMap def __computeAmbiguousTypes(self): - ambigiousTypes = {} - partialAmbigiousTypes = () + ambiguousTypes = {} + partialAmbiguousTypes = () for idx, namedType in reversed(tuple(enumerate(self.__namedTypes))): if namedType.isOptional or namedType.isDefaulted: - partialAmbigiousTypes = (namedType,) + partialAmbigiousTypes + partialAmbiguousTypes = (namedType,) + partialAmbiguousTypes else: - partialAmbigiousTypes = (namedType,) - if len(partialAmbigiousTypes) == len(self.__namedTypes): - ambigiousTypes[idx] = self + partialAmbiguousTypes = (namedType,) + if len(partialAmbiguousTypes) == len(self.__namedTypes): + ambiguousTypes[idx] = self else: - ambigiousTypes[idx] = NamedTypes(*partialAmbigiousTypes, **dict(terminal=True)) - return ambigiousTypes + ambiguousTypes[idx] = NamedTypes(*partialAmbiguousTypes, **dict(terminal=True)) + return ambiguousTypes def getTypeByPosition(self, idx): """Return ASN.1 type object by its position in fields set. @@ -293,7 +295,7 @@ class NamedTypes(object): Raises ------ - : :class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.PyAsn1Error If given position is out of fields range """ try: @@ -317,7 +319,7 @@ class NamedTypes(object): Raises ------ - : :class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.PyAsn1Error If *tagSet* is not present or ASN.1 types are not unique within callee *NamedTypes* """ try: @@ -341,7 +343,7 @@ class NamedTypes(object): Raises ------ - : :class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.PyAsn1Error If given field name is not present in callee *NamedTypes* """ try: @@ -365,7 +367,7 @@ class NamedTypes(object): Raises ------ - : :class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.PyAsn1Error If *name* is not present or not unique within callee *NamedTypes* """ try: @@ -394,7 +396,7 @@ class NamedTypes(object): Raises ------ - : :class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.PyAsn1Error If given position is out of fields range """ try: @@ -426,7 +428,7 @@ class NamedTypes(object): Raises ------ - : :class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.PyAsn1Error If *tagSet* is not present or not unique within callee *NamedTypes* or *idx* is out of fields range """ diff --git a/server/www/packages/packages-linux/x64/pyasn1/type/namedval.py b/server/www/packages/packages-linux/x64/pyasn1/type/namedval.py index 59257e4..4247597 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/type/namedval.py +++ b/server/www/packages/packages-linux/x64/pyasn1/type/namedval.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # # ASN.1 named integers @@ -23,7 +23,7 @@ class NamedValues(object): Parameters ---------- - \*args: variable number of two-element :py:class:`tuple` + *args: variable number of two-element :py:class:`tuple` name: :py:class:`str` Value label @@ -109,7 +109,8 @@ class NamedValues(object): if len(representation) > 64: representation = representation[:32] + '...' + representation[-32:] - return '<%s object 0x%x enums %s>' % (self.__class__.__name__, id(self), representation) + return '<%s object, enums %s>' % ( + self.__class__.__name__, representation) def __eq__(self, other): return dict(self) == other diff --git a/server/www/packages/packages-linux/x64/pyasn1/type/opentype.py b/server/www/packages/packages-linux/x64/pyasn1/type/opentype.py index d14ab34..29645f0 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/type/opentype.py +++ b/server/www/packages/packages-linux/x64/pyasn1/type/opentype.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # @@ -11,11 +11,22 @@ __all__ = ['OpenType'] class OpenType(object): """Create ASN.1 type map indexed by a value - The *DefinedBy* object models the ASN.1 *DEFINED BY* clause which maps - values to ASN.1 types in the context of the ASN.1 SEQUENCE/SET type. + The *OpenType* object models an untyped field of a constructed ASN.1 + type. In ASN.1 syntax it is usually represented by the + `ANY DEFINED BY` for scalars or `SET OF ANY DEFINED BY`, + `SEQUENCE OF ANY DEFINED BY` for container types clauses. Typically + used together with :class:`~pyasn1.type.univ.Any` object. - OpenType objects are duck-type a read-only Python :class:`dict` objects, - however the passed `typeMap` is stored by reference. + OpenType objects duck-type a read-only Python :class:`dict` objects, + however the passed `typeMap` is not copied, but stored by reference. + That means the user can manipulate `typeMap` at run time having this + reflected on *OpenType* object behavior. + + The |OpenType| class models an untyped field of a constructed ASN.1 + type. In ASN.1 syntax it is usually represented by the + `ANY DEFINED BY` for scalars or `SET OF ANY DEFINED BY`, + `SEQUENCE OF ANY DEFINED BY` for container types clauses. Typically + used with :class:`~pyasn1.type.univ.Any` type. Parameters ---------- @@ -28,12 +39,14 @@ class OpenType(object): Examples -------- + + For untyped scalars: + .. code-block:: python openType = OpenType( - 'id', - {1: Integer(), - 2: OctetString()} + 'id', {1: Integer(), + 2: OctetString()} ) Sequence( componentType=NamedTypes( @@ -41,6 +54,22 @@ class OpenType(object): NamedType('blob', Any(), openType=openType) ) ) + + For untyped `SET OF` or `SEQUENCE OF` vectors: + + .. code-block:: python + + openType = OpenType( + 'id', {1: Integer(), + 2: OctetString()} + ) + Sequence( + componentType=NamedTypes( + NamedType('id', Integer()), + NamedType('blob', SetOf(componentType=Any()), + openType=openType) + ) + ) """ def __init__(self, name, typeMap=None): diff --git a/server/www/packages/packages-linux/x64/pyasn1/type/tag.py b/server/www/packages/packages-linux/x64/pyasn1/type/tag.py index 95c226f..b88a734 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/type/tag.py +++ b/server/www/packages/packages-linux/x64/pyasn1/type/tag.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # from pyasn1 import error @@ -64,8 +64,10 @@ class Tag(object): self.__hash = hash(self.__tagClassId) def __repr__(self): - representation = '[%s:%s:%s]' % (self.__tagClass, self.__tagFormat, self.__tagId) - return '<%s object at 0x%x tag %s>' % (self.__class__.__name__, id(self), representation) + representation = '[%s:%s:%s]' % ( + self.__tagClass, self.__tagFormat, self.__tagId) + return '<%s object, tag %s>' % ( + self.__class__.__name__, representation) def __eq__(self, other): return self.__tagClassId == other @@ -199,7 +201,7 @@ class TagSet(object): else: representation = 'untagged' - return '<%s object at 0x%x %s>' % (self.__class__.__name__, id(self), representation) + return '<%s object, %s>' % (self.__class__.__name__, representation) def __add__(self, superTag): return self.__class__(self.__baseTag, *self.__superTags + (superTag,)) @@ -318,7 +320,7 @@ class TagSet(object): Returns ------- : :py:class:`bool` - `True` if callee is a supertype of *tagSet* + :obj:`True` if callee is a supertype of *tagSet* """ if len(tagSet) < self.__lenOfSuperTags: return False diff --git a/server/www/packages/packages-linux/x64/pyasn1/type/tagmap.py b/server/www/packages/packages-linux/x64/pyasn1/type/tagmap.py index a9d237f..6f5163b 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/type/tagmap.py +++ b/server/www/packages/packages-linux/x64/pyasn1/type/tagmap.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # from pyasn1 import error @@ -56,16 +56,16 @@ class TagMap(object): return iter(self.__presentTypes) def __repr__(self): - representation = '%s object at 0x%x' % (self.__class__.__name__, id(self)) + representation = '%s object' % self.__class__.__name__ if self.__presentTypes: - representation += ' present %s' % repr(self.__presentTypes) + representation += ', present %s' % repr(self.__presentTypes) if self.__skipTypes: - representation += ' skip %s' % repr(self.__skipTypes) + representation += ', skip %s' % repr(self.__skipTypes) if self.__defaultType is not None: - representation += ' default %s' % repr(self.__defaultType) + representation += ', default %s' % repr(self.__defaultType) return '<%s>' % representation diff --git a/server/www/packages/packages-linux/x64/pyasn1/type/univ.py b/server/www/packages/packages-linux/x64/pyasn1/type/univ.py index 898cf25..aa688b2 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/type/univ.py +++ b/server/www/packages/packages-linux/x64/pyasn1/type/univ.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # import math @@ -31,28 +31,32 @@ __all__ = ['Integer', 'Boolean', 'BitString', 'OctetString', 'Null', # "Simple" ASN.1 types (yet incomplete) -class Integer(base.AbstractSimpleAsn1Item): - """Create |ASN.1| type or object. +class Integer(base.SimpleAsn1Type): + """Create |ASN.1| schema or value object. - |ASN.1| objects are immutable and duck-type Python :class:`int` objects. + |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its + objects are immutable and duck-type Python :class:`int` objects. Keyword Args ------------ value: :class:`int`, :class:`str` or |ASN.1| object - Python integer or string literal or |ASN.1| class instance. + Python :class:`int` or :class:`str` literal or |ASN.1| class + instance. If `value` is not given, schema object will be created. tagSet: :py:class:`~pyasn1.type.tag.TagSet` Object representing non-default ASN.1 tag(s) subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing non-default ASN.1 subtype constraint(s) + Object representing non-default ASN.1 subtype constraint(s). Constraints + verification for |ASN.1| type occurs automatically on object + instantiation. namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` Object representing non-default symbolic aliases for numbers Raises ------ - :py:class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error On constraint violation or bad initializer. Examples @@ -94,13 +98,13 @@ class Integer(base.AbstractSimpleAsn1Item): namedValues = namedval.NamedValues() # Optimization for faster codec lookup - typeId = base.AbstractSimpleAsn1Item.getTypeId() + typeId = base.SimpleAsn1Type.getTypeId() def __init__(self, value=noValue, **kwargs): if 'namedValues' not in kwargs: kwargs['namedValues'] = self.namedValues - base.AbstractSimpleAsn1Item.__init__(self, value, **kwargs) + base.SimpleAsn1Type.__init__(self, value, **kwargs) def __and__(self, value): return self.clone(self._value & value) @@ -187,7 +191,7 @@ class Integer(base.AbstractSimpleAsn1Item): def __rdivmod__(self, value): return self.clone(divmod(value, self._value)) - __hash__ = base.AbstractSimpleAsn1Item.__hash__ + __hash__ = base.SimpleAsn1Type.__hash__ def __int__(self): return int(self._value) @@ -276,27 +280,31 @@ class Integer(base.AbstractSimpleAsn1Item): class Boolean(Integer): - """Create |ASN.1| type or object. + """Create |ASN.1| schema or value object. - |ASN.1| objects are immutable and duck-type Python :class:`int` objects. + |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its + objects are immutable and duck-type Python :class:`int` objects. Keyword Args ------------ value: :class:`int`, :class:`str` or |ASN.1| object - Python integer or boolean or string literal or |ASN.1| class instance. + Python :class:`int` or :class:`str` literal or |ASN.1| class + instance. If `value` is not given, schema object will be created. tagSet: :py:class:`~pyasn1.type.tag.TagSet` Object representing non-default ASN.1 tag(s) subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing non-default ASN.1 subtype constraint(s) + Object representing non-default ASN.1 subtype constraint(s).Constraints + verification for |ASN.1| type occurs automatically on object + instantiation. namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` Object representing non-default symbolic aliases for numbers Raises ------ - :py:class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error On constraint violation or bad initializer. Examples @@ -355,23 +363,27 @@ class SizedInteger(SizedIntegerBase): return self.bitLength -class BitString(base.AbstractSimpleAsn1Item): +class BitString(base.SimpleAsn1Type): """Create |ASN.1| schema or value object. - |ASN.1| objects are immutable and duck-type both Python :class:`tuple` (as a tuple + |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its + objects are immutable and duck-type both Python :class:`tuple` (as a tuple of bits) and :class:`int` objects. Keyword Args ------------ value: :class:`int`, :class:`str` or |ASN.1| object - Python integer or string literal representing binary or hexadecimal - number or sequence of integer bits or |ASN.1| object. + Python :class:`int` or :class:`str` literal representing binary + or hexadecimal number or sequence of integer bits or |ASN.1| object. + If `value` is not given, schema object will be created. tagSet: :py:class:`~pyasn1.type.tag.TagSet` Object representing non-default ASN.1 tag(s) subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing non-default ASN.1 subtype constraint(s) + Object representing non-default ASN.1 subtype constraint(s). Constraints + verification for |ASN.1| type occurs automatically on object + instantiation. namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` Object representing non-default symbolic aliases for numbers @@ -386,7 +398,7 @@ class BitString(base.AbstractSimpleAsn1Item): Raises ------ - :py:class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error On constraint violation or bad initializer. Examples @@ -432,7 +444,7 @@ class BitString(base.AbstractSimpleAsn1Item): namedValues = namedval.NamedValues() # Optimization for faster codec lookup - typeId = base.AbstractSimpleAsn1Item.getTypeId() + typeId = base.SimpleAsn1Type.getTypeId() defaultBinValue = defaultHexValue = noValue @@ -461,7 +473,7 @@ class BitString(base.AbstractSimpleAsn1Item): if 'namedValues' not in kwargs: kwargs['namedValues'] = self.namedValues - base.AbstractSimpleAsn1Item.__init__(self, value, **kwargs) + base.SimpleAsn1Type.__init__(self, value, **kwargs) def __str__(self): return self.asBinary() @@ -720,24 +732,30 @@ except NameError: # Python 2.4 return True -class OctetString(base.AbstractSimpleAsn1Item): +class OctetString(base.SimpleAsn1Type): """Create |ASN.1| schema or value object. - |ASN.1| objects are immutable and duck-type Python 2 :class:`str` or Python 3 :class:`bytes`. - When used in Unicode context, |ASN.1| type assumes "|encoding|" serialisation. + |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its + objects are immutable and duck-type Python 2 :class:`str` or + Python 3 :class:`bytes`. When used in Unicode context, |ASN.1| type + assumes "|encoding|" serialisation. Keyword Args ------------ - value: :class:`str`, :class:`bytes` or |ASN.1| object - string (Python 2) or bytes (Python 3), alternatively unicode object - (Python 2) or string (Python 3) representing character string to be - serialised into octets (note `encoding` parameter) or |ASN.1| object. + value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object + class:`str` (Python 2) or :class:`bytes` (Python 3), alternatively + class:`unicode` object (Python 2) or :class:`str` (Python 3) + representing character string to be serialised into octets + (note `encoding` parameter) or |ASN.1| object. + If `value` is not given, schema object will be created. tagSet: :py:class:`~pyasn1.type.tag.TagSet` Object representing non-default ASN.1 tag(s) subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing non-default ASN.1 subtype constraint(s) + Object representing non-default ASN.1 subtype constraint(s). Constraints + verification for |ASN.1| type occurs automatically on object + instantiation. encoding: :py:class:`str` Unicode codec ID to encode/decode :class:`unicode` (Python 2) or @@ -754,7 +772,7 @@ class OctetString(base.AbstractSimpleAsn1Item): Raises ------ - :py:class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error On constraint violation or bad initializer. Examples @@ -786,7 +804,7 @@ class OctetString(base.AbstractSimpleAsn1Item): subtypeSpec = constraint.ConstraintsIntersection() # Optimization for faster codec lookup - typeId = base.AbstractSimpleAsn1Item.getTypeId() + typeId = base.SimpleAsn1Type.getTypeId() defaultBinValue = defaultHexValue = noValue encoding = 'iso-8859-1' @@ -816,26 +834,33 @@ class OctetString(base.AbstractSimpleAsn1Item): if 'encoding' not in kwargs: kwargs['encoding'] = self.encoding - base.AbstractSimpleAsn1Item.__init__(self, value, **kwargs) + base.SimpleAsn1Type.__init__(self, value, **kwargs) if sys.version_info[0] <= 2: def prettyIn(self, value): if isinstance(value, str): return value + elif isinstance(value, unicode): try: return value.encode(self.encoding) + except (LookupError, UnicodeEncodeError): - raise error.PyAsn1Error( - "Can't encode string '%s' with codec %s" % (value, self.encoding) + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeEncodeError( + "Can't encode string '%s' with codec " + "%s" % (value, self.encoding), exc ) + elif isinstance(value, (tuple, list)): try: return ''.join([chr(x) for x in value]) + except ValueError: raise error.PyAsn1Error( "Bad %s initializer '%s'" % (self.__class__.__name__, value) ) + else: return str(value) @@ -847,8 +872,10 @@ class OctetString(base.AbstractSimpleAsn1Item): return self._value.decode(self.encoding) except UnicodeDecodeError: - raise error.PyAsn1Error( - "Can't decode string '%s' with codec %s" % (self._value, self.encoding) + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeDecodeError( + "Can't decode string '%s' with codec " + "%s" % (self._value, self.encoding), exc ) def asOctets(self): @@ -861,19 +888,26 @@ class OctetString(base.AbstractSimpleAsn1Item): def prettyIn(self, value): if isinstance(value, bytes): return value + elif isinstance(value, str): try: return value.encode(self.encoding) + except UnicodeEncodeError: - raise error.PyAsn1Error( - "Can't encode string '%s' with '%s' codec" % (value, self.encoding) + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeEncodeError( + "Can't encode string '%s' with '%s' " + "codec" % (value, self.encoding), exc ) elif isinstance(value, OctetString): # a shortcut, bytes() would work the same way return value.asOctets() - elif isinstance(value, base.AbstractSimpleAsn1Item): # this mostly targets Integer objects + + elif isinstance(value, base.SimpleAsn1Type): # this mostly targets Integer objects return self.prettyIn(str(value)) + elif isinstance(value, (tuple, list)): return self.prettyIn(bytes(value)) + else: return bytes(value) @@ -882,8 +916,11 @@ class OctetString(base.AbstractSimpleAsn1Item): return self._value.decode(self.encoding) except UnicodeDecodeError: - raise error.PyAsn1Error( - "Can't decode string '%s' with '%s' codec at '%s'" % (self._value, self.encoding, self.__class__.__name__) + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeDecodeError( + "Can't decode string '%s' with '%s' codec at " + "'%s'" % (self._value, self.encoding, + self.__class__.__name__), exc ) def __bytes__(self): @@ -1028,19 +1065,22 @@ class OctetString(base.AbstractSimpleAsn1Item): class Null(OctetString): """Create |ASN.1| schema or value object. - |ASN.1| objects are immutable and duck-type Python :class:`str` objects (always empty). + |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its + objects are immutable and duck-type Python :class:`str` objects + (always empty). Keyword Args ------------ - value: :class:`str` or :py:class:`~pyasn1.type.univ.Null` object - Python empty string literal or any object that evaluates to `False` + value: :class:`str` or |ASN.1| object + Python empty :class:`str` literal or any object that evaluates to :obj:`False` + If `value` is not given, schema object will be created. tagSet: :py:class:`~pyasn1.type.tag.TagSet` Object representing non-default ASN.1 tag(s) Raises ------ - :py:class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error On constraint violation or bad initializer. Examples @@ -1081,25 +1121,30 @@ else: numericTypes = intTypes + (float,) -class ObjectIdentifier(base.AbstractSimpleAsn1Item): +class ObjectIdentifier(base.SimpleAsn1Type): """Create |ASN.1| schema or value object. - |ASN.1| objects are immutable and duck-type Python :class:`tuple` objects (tuple of non-negative integers). + |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its + objects are immutable and duck-type Python :class:`tuple` objects + (tuple of non-negative integers). Keyword Args ------------ value: :class:`tuple`, :class:`str` or |ASN.1| object - Python sequence of :class:`int` or string literal or |ASN.1| object. + Python sequence of :class:`int` or :class:`str` literal or |ASN.1| object. + If `value` is not given, schema object will be created. tagSet: :py:class:`~pyasn1.type.tag.TagSet` Object representing non-default ASN.1 tag(s) subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing non-default ASN.1 subtype constraint(s) + Object representing non-default ASN.1 subtype constraint(s). Constraints + verification for |ASN.1| type occurs automatically on object + instantiation. Raises ------ - :py:class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error On constraint violation or bad initializer. Examples @@ -1131,7 +1176,7 @@ class ObjectIdentifier(base.AbstractSimpleAsn1Item): subtypeSpec = constraint.ConstraintsIntersection() # Optimization for faster codec lookup - typeId = base.AbstractSimpleAsn1Item.getTypeId() + typeId = base.SimpleAsn1Type.getTypeId() def __add__(self, other): return self.clone(self._value + other) @@ -1173,8 +1218,8 @@ class ObjectIdentifier(base.AbstractSimpleAsn1Item): Returns ------- : :class:`bool` - :class:`True` if this |ASN.1| object is a parent (e.g. prefix) of the other |ASN.1| object - or :class:`False` otherwise. + :obj:`True` if this |ASN.1| object is a parent (e.g. prefix) of the other |ASN.1| object + or :obj:`False` otherwise. """ l = len(self) if l <= len(other): @@ -1214,10 +1259,11 @@ class ObjectIdentifier(base.AbstractSimpleAsn1Item): return '.'.join([str(x) for x in value]) -class Real(base.AbstractSimpleAsn1Item): +class Real(base.SimpleAsn1Type): """Create |ASN.1| schema or value object. - |ASN.1| objects are immutable and duck-type Python :class:`float` objects. + |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its + objects are immutable and duck-type Python :class:`float` objects. Additionally, |ASN.1| objects behave like a :class:`tuple` in which case its elements are mantissa, base and exponent. @@ -1225,17 +1271,20 @@ class Real(base.AbstractSimpleAsn1Item): ------------ value: :class:`tuple`, :class:`float` or |ASN.1| object Python sequence of :class:`int` (representing mantissa, base and - exponent) or float instance or *Real* class instance. + exponent) or :class:`float` instance or |ASN.1| object. + If `value` is not given, schema object will be created. tagSet: :py:class:`~pyasn1.type.tag.TagSet` Object representing non-default ASN.1 tag(s) subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing non-default ASN.1 subtype constraint(s) + Object representing non-default ASN.1 subtype constraint(s). Constraints + verification for |ASN.1| type occurs automatically on object + instantiation. Raises ------ - :py:class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error On constraint violation or bad initializer. Examples @@ -1278,7 +1327,7 @@ class Real(base.AbstractSimpleAsn1Item): subtypeSpec = constraint.ConstraintsIntersection() # Optimization for faster codec lookup - typeId = base.AbstractSimpleAsn1Item.getTypeId() + typeId = base.SimpleAsn1Type.getTypeId() @staticmethod def __normalizeBase10(value): @@ -1342,8 +1391,8 @@ class Real(base.AbstractSimpleAsn1Item): Returns ------- : :class:`bool` - :class:`True` if calling object represents plus infinity - or :class:`False` otherwise. + :obj:`True` if calling object represents plus infinity + or :obj:`False` otherwise. """ return self._value == self._plusInf @@ -1355,8 +1404,8 @@ class Real(base.AbstractSimpleAsn1Item): Returns ------- : :class:`bool` - :class:`True` if calling object represents minus infinity - or :class:`False` otherwise. + :obj:`True` if calling object represents minus infinity + or :obj:`False` otherwise. """ return self._value == self._minusInf @@ -1479,7 +1528,7 @@ class Real(base.AbstractSimpleAsn1Item): def __bool__(self): return bool(float(self)) - __hash__ = base.AbstractSimpleAsn1Item.__hash__ + __hash__ = base.SimpleAsn1Type.__hash__ def __getitem__(self, idx): if self._value in self._inf: @@ -1500,27 +1549,31 @@ class Real(base.AbstractSimpleAsn1Item): class Enumerated(Integer): - """Create |ASN.1| type or object. + """Create |ASN.1| schema or value object. - |ASN.1| objects are immutable and duck-type Python :class:`int` objects. + |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its + objects are immutable and duck-type Python :class:`int` objects. Keyword Args ------------ value: :class:`int`, :class:`str` or |ASN.1| object - Python integer or string literal or |ASN.1| class instance. + Python :class:`int` or :class:`str` literal or |ASN.1| object. + If `value` is not given, schema object will be created. tagSet: :py:class:`~pyasn1.type.tag.TagSet` Object representing non-default ASN.1 tag(s) subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing non-default ASN.1 subtype constraint(s) + Object representing non-default ASN.1 subtype constraint(s). Constraints + verification for |ASN.1| type occurs automatically on object + instantiation. namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` Object representing non-default symbolic aliases for numbers Raises ------ - :py:class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error On constraint violation or bad initializer. Examples @@ -1566,10 +1619,11 @@ class Enumerated(Integer): # "Structured" ASN.1 types -class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item): - """Create |ASN.1| type. +class SequenceOfAndSetOfBase(base.ConstructedAsn1Type): + """Create |ASN.1| schema or value object. - |ASN.1| objects are mutable and duck-type Python :class:`list` objects. + |ASN.1| class is based on :class:`~pyasn1.type.base.ConstructedAsn1Type`, + its objects are mutable and duck-type Python :class:`list` objects. Keyword Args ------------ @@ -1580,10 +1634,9 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item): Object representing non-default ASN.1 tag(s) subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing non-default ASN.1 subtype constraint(s) - - sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing collection size constraint + Object representing non-default ASN.1 subtype constraint(s). Constraints + verification for |ASN.1| type can only occur on explicit + `.isInconsistent` call. Examples -------- @@ -1605,12 +1658,14 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item): # support positional params for backward compatibility if args: for key, value in zip(('componentType', 'tagSet', - 'subtypeSpec', 'sizeSpec'), args): + 'subtypeSpec'), args): if key in kwargs: raise error.PyAsn1Error('Conflicting positional and keyword params!') kwargs['componentType'] = value - base.AbstractConstructedAsn1Item.__init__(self, **kwargs) + self._componentValues = noValue + + base.ConstructedAsn1Type.__init__(self, **kwargs) # Python list protocol @@ -1628,24 +1683,36 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item): except error.PyAsn1Error: raise IndexError(sys.exc_info()[1]) - def clear(self): - self._componentValues = [] - def append(self, value): - self[len(self)] = value + if self._componentValues is noValue: + pos = 0 + + else: + pos = len(self._componentValues) + + self[pos] = value def count(self, value): - return self._componentValues.count(value) + return list(self._componentValues.values()).count(value) def extend(self, values): for value in values: self.append(value) + if self._componentValues is noValue: + self._componentValues = {} + def index(self, value, start=0, stop=None): if stop is None: stop = len(self) + + indices, values = zip(*self._componentValues.items()) + + # TODO: remove when Py2.5 support is gone + values = list(values) + try: - return self._componentValues.index(value, start, stop) + return indices[values.index(value, start, stop)] except error.PyAsn1Error: raise ValueError(sys.exc_info()[1]) @@ -1654,15 +1721,24 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item): self._componentValues.reverse() def sort(self, key=None, reverse=False): - self._componentValues.sort(key=key, reverse=reverse) + self._componentValues = dict( + enumerate(sorted(self._componentValues.values(), + key=key, reverse=reverse))) + + def __len__(self): + if self._componentValues is noValue or not self._componentValues: + return 0 + + return max(self._componentValues) + 1 def __iter__(self): - return iter(self._componentValues) + for idx in range(0, len(self)): + yield self.getComponentByPosition(idx) def _cloneComponentValues(self, myClone, cloneValueFlag): - for idx, componentValue in enumerate(self._componentValues): + for idx, componentValue in self._componentValues.items(): if componentValue is not noValue: - if isinstance(componentValue, base.AbstractConstructedAsn1Item): + if isinstance(componentValue, base.ConstructedAsn1Type): myClone.setComponentByPosition( idx, componentValue.clone(cloneValueFlag=cloneValueFlag) ) @@ -1689,8 +1765,8 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item): object instead of the requested component. instantiate: :class:`bool` - If `True` (default), inner component will be automatically instantiated. - If 'False' either existing component or the `noValue` object will be + If :obj:`True` (default), inner component will be automatically instantiated. + If :obj:`False` either existing component or the :class:`NoValue` object will be returned. Returns @@ -1735,10 +1811,21 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item): # returns noValue s.getComponentByPosition(0, instantiate=False) """ + if isinstance(idx, slice): + indices = tuple(range(len(self))) + return [self.getComponentByPosition(subidx, default, instantiate) + for subidx in indices[idx]] + + if idx < 0: + idx = len(self) + idx + if idx < 0: + raise error.PyAsn1Error( + 'SequenceOf/SetOf index is out of range') + try: componentValue = self._componentValues[idx] - except IndexError: + except (KeyError, error.PyAsn1Error): if not instantiate: return default @@ -1773,15 +1860,16 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item): value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative A Python value to initialize |ASN.1| component with (if *componentType* is set) or ASN.1 value object to assign to |ASN.1| component. + If `value` is not given, schema object will be set as a component. verifyConstraints: :class:`bool` - If `False`, skip constraints validation + If :obj:`False`, skip constraints validation matchTags: :class:`bool` - If `False`, skip component tags matching + If :obj:`False`, skip component tags matching matchConstraints: :class:`bool` - If `False`, skip component constraints matching + If :obj:`False`, skip component constraints matching Returns ------- @@ -1789,51 +1877,75 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item): Raises ------ - IndexError: + ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error + On constraint violation or bad initializer + IndexError When idx > len(self) """ + if isinstance(idx, slice): + indices = tuple(range(len(self))) + startIdx = indices and indices[idx][0] or 0 + for subIdx, subValue in enumerate(value): + self.setComponentByPosition( + startIdx + subIdx, subValue, verifyConstraints, + matchTags, matchConstraints) + return self + + if idx < 0: + idx = len(self) + idx + if idx < 0: + raise error.PyAsn1Error( + 'SequenceOf/SetOf index is out of range') + componentType = self.componentType - try: - currentValue = self._componentValues[idx] - except IndexError: - currentValue = noValue + if self._componentValues is noValue: + componentValues = {} - if len(self._componentValues) < idx: - raise error.PyAsn1Error('Component index out of range') + else: + componentValues = self._componentValues + + currentValue = componentValues.get(idx, noValue) if value is noValue: if componentType is not None: value = componentType.clone() + elif currentValue is noValue: raise error.PyAsn1Error('Component type not defined') + elif not isinstance(value, base.Asn1Item): - if componentType is not None and isinstance(componentType, base.AbstractSimpleAsn1Item): + if (componentType is not None and + isinstance(componentType, base.SimpleAsn1Type)): value = componentType.clone(value=value) - elif currentValue is not noValue and isinstance(currentValue, base.AbstractSimpleAsn1Item): + + elif (currentValue is not noValue and + isinstance(currentValue, base.SimpleAsn1Type)): value = currentValue.clone(value=value) + else: - raise error.PyAsn1Error('Non-ASN.1 value %r and undefined component type at %r' % (value, self)) - elif componentType is not None: - if self.strictConstraints: - if not componentType.isSameTypeWith(value, matchTags, matchConstraints): - raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType)) - else: - if not componentType.isSuperTypeOf(value, matchTags, matchConstraints): - raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType)) + raise error.PyAsn1Error( + 'Non-ASN.1 value %r and undefined component' + ' type at %r' % (value, self)) - if verifyConstraints and value.isValue: - try: - self.subtypeSpec(value, idx) + elif componentType is not None and (matchTags or matchConstraints): + subtypeChecker = ( + self.strictConstraints and + componentType.isSameTypeWith or + componentType.isSuperTypeOf) - except error.PyAsn1Error: - exType, exValue, exTb = sys.exc_info() - raise exType('%s at %s' % (exValue, self.__class__.__name__)) + if not subtypeChecker(value, verifyConstraints and matchTags, + verifyConstraints and matchConstraints): + # TODO: we should wrap componentType with UnnamedType to carry + # additional properties associated with componentType + if componentType.typeId != Any.typeId: + raise error.PyAsn1Error( + 'Component value is tag-incompatible: %r vs ' + '%r' % (value, componentType)) - if currentValue is noValue: - self._componentValues.append(value) - else: - self._componentValues[idx] = value + componentValues[idx] = value + + self._componentValues = componentValues return self @@ -1842,16 +1954,44 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item): if self.componentType is not None: return self.componentType.tagMap + @property + def components(self): + return [self._componentValues[idx] + for idx in sorted(self._componentValues)] + + def clear(self): + """Remove all components and become an empty |ASN.1| value object. + + Has the same effect on |ASN.1| object as it does on :class:`list` + built-in. + """ + self._componentValues = {} + return self + + def reset(self): + """Remove all components and become a |ASN.1| schema object. + + See :meth:`isValue` property for more information on the + distinction between value and schema objects. + """ + self._componentValues = noValue + return self + def prettyPrint(self, scope=0): scope += 1 representation = self.__class__.__name__ + ':\n' - for idx, componentValue in enumerate(self._componentValues): + + if not self.isValue: + return representation + + for idx, componentValue in enumerate(self): representation += ' ' * scope if (componentValue is noValue and self.componentType is not None): representation += '' else: representation += componentValue.prettyPrint(scope) + return representation def prettyPrintType(self, scope=0): @@ -1867,17 +2007,17 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item): def isValue(self): """Indicate that |ASN.1| object represents ASN.1 value. - If *isValue* is `False` then this object represents just ASN.1 schema. + If *isValue* is :obj:`False` then this object represents just ASN.1 schema. - If *isValue* is `True` then, in addition to its ASN.1 schema features, - this object can also be used like a Python built-in object (e.g. `int`, - `str`, `dict` etc.). + If *isValue* is :obj:`True` then, in addition to its ASN.1 schema features, + this object can also be used like a Python built-in object + (e.g. :class:`int`, :class:`str`, :class:`dict` etc.). Returns ------- : :class:`bool` - :class:`False` if object represents just ASN.1 schema. - :class:`True` if object represents ASN.1 schema and can be used as a normal value. + :obj:`False` if object represents just ASN.1 schema. + :obj:`True` if object represents ASN.1 schema and can be used as a normal value. Note ---- @@ -1890,12 +2030,53 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item): The PyASN1 value objects can **additionally** participate in many operations involving regular Python objects (e.g. arithmetic, comprehension etc). """ - for componentValue in self._componentValues: + if self._componentValues is noValue: + return False + + if len(self._componentValues) != len(self): + return False + + for componentValue in self._componentValues.values(): if componentValue is noValue or not componentValue.isValue: return False return True + @property + def isInconsistent(self): + """Run necessary checks to ensure |ASN.1| object consistency. + + Default action is to verify |ASN.1| object against constraints imposed + by `subtypeSpec`. + + Raises + ------ + :py:class:`~pyasn1.error.PyAsn1tError` on any inconsistencies found + """ + if self.componentType is noValue or not self.subtypeSpec: + return False + + if self._componentValues is noValue: + return True + + mapping = {} + + for idx, value in self._componentValues.items(): + # Absent fields are not in the mapping + if value is noValue: + continue + + mapping[idx] = value + + try: + # Represent SequenceOf/SetOf as a bare dict to constraints chain + self.subtypeSpec(mapping) + + except error.PyAsn1Error: + exc = sys.exc_info()[1] + return exc + + return False class SequenceOf(SequenceOfAndSetOfBase): __doc__ = SequenceOfAndSetOfBase.__doc__ @@ -1916,10 +2097,6 @@ class SequenceOf(SequenceOfAndSetOfBase): #: imposing constraints on |ASN.1| type initialization values. subtypeSpec = constraint.ConstraintsIntersection() - #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - #: object imposing size constraint on |ASN.1| objects - sizeSpec = constraint.ConstraintsIntersection() - # Disambiguation ASN.1 types identification typeId = SequenceOfAndSetOfBase.getTypeId() @@ -1943,18 +2120,15 @@ class SetOf(SequenceOfAndSetOfBase): #: imposing constraints on |ASN.1| type initialization values. subtypeSpec = constraint.ConstraintsIntersection() - #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - #: object imposing size constraint on |ASN.1| objects - sizeSpec = constraint.ConstraintsIntersection() - # Disambiguation ASN.1 types identification typeId = SequenceOfAndSetOfBase.getTypeId() -class SequenceAndSetBase(base.AbstractConstructedAsn1Item): - """Create |ASN.1| type. +class SequenceAndSetBase(base.ConstructedAsn1Type): + """Create |ASN.1| schema or value object. - |ASN.1| objects are mutable and duck-type Python :class:`dict` objects. + |ASN.1| class is based on :class:`~pyasn1.type.base.ConstructedAsn1Type`, + its objects are mutable and duck-type Python :class:`dict` objects. Keyword Args ------------ @@ -1965,10 +2139,9 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item): Object representing non-default ASN.1 tag(s) subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing non-default ASN.1 subtype constraint(s) - - sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing collection size constraint + Object representing non-default ASN.1 subtype constraint(s). Constraints + verification for |ASN.1| type can only occur on explicit + `.isInconsistent` call. Examples -------- @@ -2042,8 +2215,12 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item): def __init__(self, **kwargs): - base.AbstractConstructedAsn1Item.__init__(self, **kwargs) + base.ConstructedAsn1Type.__init__(self, **kwargs) self._componentTypeLen = len(self.componentType) + if self._componentTypeLen: + self._componentValues = [] + else: + self._componentValues = noValue self._dynamicNames = self._componentTypeLen or self.DynamicNames() def __getitem__(self, idx): @@ -2086,6 +2263,9 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item): else: return key in self._dynamicNames + def __len__(self): + return len(self._componentValues) + def __iter__(self): return iter(self.componentType or self._dynamicNames) @@ -2112,13 +2292,36 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item): self[k] = mappingValue[k] def clear(self): + """Remove all components and become an empty |ASN.1| value object. + + Has the same effect on |ASN.1| object as it does on :class:`dict` + built-in. + """ self._componentValues = [] self._dynamicNames = self.DynamicNames() + return self + + def reset(self): + """Remove all components and become a |ASN.1| schema object. + + See :meth:`isValue` property for more information on the + distinction between value and schema objects. + """ + self._componentValues = noValue + self._dynamicNames = self.DynamicNames() + return self + + @property + def components(self): + return self._componentValues def _cloneComponentValues(self, myClone, cloneValueFlag): + if self._componentValues is noValue: + return + for idx, componentValue in enumerate(self._componentValues): if componentValue is not noValue: - if isinstance(componentValue, base.AbstractConstructedAsn1Item): + if isinstance(componentValue, base.ConstructedAsn1Type): myClone.setComponentByPosition( idx, componentValue.clone(cloneValueFlag=cloneValueFlag) ) @@ -2142,14 +2345,16 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item): object instead of the requested component. instantiate: :class:`bool` - If `True` (default), inner component will be automatically instantiated. - If 'False' either existing component or the `noValue` object will be - returned. + If :obj:`True` (default), inner component will be automatically + instantiated. + If :obj:`False` either existing component or the :class:`NoValue` + object will be returned. Returns ------- : :py:class:`~pyasn1.type.base.PyAsn1Item` - Instantiate |ASN.1| component type or return existing component value + Instantiate |ASN.1| component type or return existing + component value """ if self._componentTypeLen: idx = self.componentType.getPositionByName(name) @@ -2180,15 +2385,16 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item): value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative A Python value to initialize |ASN.1| component with (if *componentType* is set) or ASN.1 value object to assign to |ASN.1| component. + If `value` is not given, schema object will be set as a component. verifyConstraints: :class:`bool` - If `False`, skip constraints validation + If :obj:`False`, skip constraints validation matchTags: :class:`bool` - If `False`, skip component tags matching + If :obj:`False`, skip component tags matching matchConstraints: :class:`bool` - If `False`, skip component constraints matching + If :obj:`False`, skip component constraints matching Returns ------- @@ -2226,9 +2432,10 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item): object instead of the requested component. instantiate: :class:`bool` - If `True` (default), inner component will be automatically instantiated. - If 'False' either existing component or the `noValue` object will be - returned. + If :obj:`True` (default), inner component will be automatically + instantiated. + If :obj:`False` either existing component or the :class:`NoValue` + object will be returned. Returns ------- @@ -2275,7 +2482,11 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item): s.getComponentByPosition(0, instantiate=False) """ try: - componentValue = self._componentValues[idx] + if self._componentValues is noValue: + componentValue = noValue + + else: + componentValue = self._componentValues[idx] except IndexError: componentValue = noValue @@ -2317,15 +2528,16 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item): value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative A Python value to initialize |ASN.1| component with (if *componentType* is set) or ASN.1 value object to assign to |ASN.1| component. + If `value` is not given, schema object will be set as a component. verifyConstraints : :class:`bool` - If `False`, skip constraints validation + If :obj:`False`, skip constraints validation matchTags: :class:`bool` - If `False`, skip component tags matching + If :obj:`False`, skip component tags matching matchConstraints: :class:`bool` - If `False`, skip component constraints matching + If :obj:`False`, skip component constraints matching Returns ------- @@ -2334,8 +2546,14 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item): componentType = self.componentType componentTypeLen = self._componentTypeLen + if self._componentValues is noValue: + componentValues = [] + + else: + componentValues = self._componentValues + try: - currentValue = self._componentValues[idx] + currentValue = componentValues[idx] except IndexError: currentValue = noValue @@ -2343,11 +2561,13 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item): if componentTypeLen < idx: raise error.PyAsn1Error('component index out of range') - self._componentValues = [noValue] * componentTypeLen + componentValues = [noValue] * componentTypeLen if value is noValue: if componentTypeLen: - value = componentType.getTypeByPosition(idx).clone() + value = componentType.getTypeByPosition(idx) + if isinstance(value, base.ConstructedAsn1Type): + value = value.clone(cloneValueFlag=componentType[idx].isDefaulted) elif currentValue is noValue: raise error.PyAsn1Error('Component type not defined') @@ -2355,64 +2575,61 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item): elif not isinstance(value, base.Asn1Item): if componentTypeLen: subComponentType = componentType.getTypeByPosition(idx) - if isinstance(subComponentType, base.AbstractSimpleAsn1Item): + if isinstance(subComponentType, base.SimpleAsn1Type): value = subComponentType.clone(value=value) else: raise error.PyAsn1Error('%s can cast only scalar values' % componentType.__class__.__name__) - elif currentValue is not noValue and isinstance(currentValue, base.AbstractSimpleAsn1Item): + elif currentValue is not noValue and isinstance(currentValue, base.SimpleAsn1Type): value = currentValue.clone(value=value) else: raise error.PyAsn1Error('%s undefined component type' % componentType.__class__.__name__) - elif (matchTags or matchConstraints) and componentTypeLen: + elif ((verifyConstraints or matchTags or matchConstraints) and + componentTypeLen): subComponentType = componentType.getTypeByPosition(idx) if subComponentType is not noValue: subtypeChecker = (self.strictConstraints and subComponentType.isSameTypeWith or subComponentType.isSuperTypeOf) - if not subtypeChecker(value, matchTags, matchConstraints): + if not subtypeChecker(value, verifyConstraints and matchTags, + verifyConstraints and matchConstraints): if not componentType[idx].openType: raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType)) - if verifyConstraints and value.isValue: - try: - self.subtypeSpec(value, idx) - - except error.PyAsn1Error: - exType, exValue, exTb = sys.exc_info() - raise exType('%s at %s' % (exValue, self.__class__.__name__)) - if componentTypeLen or idx in self._dynamicNames: - self._componentValues[idx] = value + componentValues[idx] = value - elif len(self._componentValues) == idx: - self._componentValues.append(value) + elif len(componentValues) == idx: + componentValues.append(value) self._dynamicNames.addField(idx) else: raise error.PyAsn1Error('Component index out of range') + self._componentValues = componentValues + return self @property def isValue(self): """Indicate that |ASN.1| object represents ASN.1 value. - If *isValue* is `False` then this object represents just ASN.1 schema. + If *isValue* is :obj:`False` then this object represents just ASN.1 schema. - If *isValue* is `True` then, in addition to its ASN.1 schema features, - this object can also be used like a Python built-in object (e.g. `int`, - `str`, `dict` etc.). + If *isValue* is :obj:`True` then, in addition to its ASN.1 schema features, + this object can also be used like a Python built-in object (e.g. + :class:`int`, :class:`str`, :class:`dict` etc.). Returns ------- : :class:`bool` - :class:`False` if object represents just ASN.1 schema. - :class:`True` if object represents ASN.1 schema and can be used as a normal value. + :obj:`False` if object represents just ASN.1 schema. + :obj:`True` if object represents ASN.1 schema and can be used as a + normal value. Note ---- @@ -2424,7 +2641,16 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item): The PyASN1 value objects can **additionally** participate in many operations involving regular Python objects (e.g. arithmetic, comprehension etc). + + It is sufficient for |ASN.1| objects to have all non-optional and non-defaulted + components being value objects to be considered as a value objects as a whole. + In other words, even having one or more optional components not turned into + value objects, |ASN.1| object is still considered as a value object. Defaulted + components are normally value objects by default. """ + if self._componentValues is noValue: + return False + componentType = self.componentType if componentType: @@ -2446,6 +2672,44 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item): return True + @property + def isInconsistent(self): + """Run necessary checks to ensure |ASN.1| object consistency. + + Default action is to verify |ASN.1| object against constraints imposed + by `subtypeSpec`. + + Raises + ------ + :py:class:`~pyasn1.error.PyAsn1tError` on any inconsistencies found + """ + if self.componentType is noValue or not self.subtypeSpec: + return False + + if self._componentValues is noValue: + return True + + mapping = {} + + for idx, value in enumerate(self._componentValues): + # Absent fields are not in the mapping + if value is noValue: + continue + + name = self.componentType.getNameByPosition(idx) + + mapping[name] = value + + try: + # Represent Sequence/Set as a bare dict to constraints chain + self.subtypeSpec(mapping) + + except error.PyAsn1Error: + exc = sys.exc_info()[1] + return exc + + return False + def prettyPrint(self, scope=0): """Return an object representation string. @@ -2495,7 +2759,6 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item): if self._componentTypeLen: return self.componentType[idx].name - class Sequence(SequenceAndSetBase): __doc__ = SequenceAndSetBase.__doc__ @@ -2511,10 +2774,6 @@ class Sequence(SequenceAndSetBase): #: imposing constraints on |ASN.1| type initialization values. subtypeSpec = constraint.ConstraintsIntersection() - #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - #: object imposing constraints on |ASN.1| objects - sizeSpec = constraint.ConstraintsIntersection() - #: Default collection of ASN.1 types of component (e.g. :py:class:`~pyasn1.type.namedtype.NamedType`) #: object imposing size constraint on |ASN.1| objects componentType = namedtype.NamedTypes() @@ -2554,10 +2813,6 @@ class Set(SequenceAndSetBase): #: imposing constraints on |ASN.1| type initialization values. subtypeSpec = constraint.ConstraintsIntersection() - #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - #: object imposing constraints on |ASN.1| objects - sizeSpec = constraint.ConstraintsIntersection() - # Disambiguation ASN.1 types identification typeId = SequenceAndSetBase.getTypeId() @@ -2581,9 +2836,10 @@ class Set(SequenceAndSetBase): object instead of the requested component. instantiate: :class:`bool` - If `True` (default), inner component will be automatically instantiated. - If 'False' either existing component or the `noValue` object will be - returned. + If :obj:`True` (default), inner component will be automatically + instantiated. + If :obj:`False` either existing component or the :class:`noValue` + object will be returned. Returns ------- @@ -2619,18 +2875,19 @@ class Set(SequenceAndSetBase): value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative A Python value to initialize |ASN.1| component with (if *componentType* is set) or ASN.1 value object to assign to |ASN.1| component. + If `value` is not given, schema object will be set as a component. verifyConstraints : :class:`bool` - If `False`, skip constraints validation + If :obj:`False`, skip constraints validation matchTags: :class:`bool` - If `False`, skip component tags matching + If :obj:`False`, skip component tags matching matchConstraints: :class:`bool` - If `False`, skip component constraints matching + If :obj:`False`, skip component constraints matching innerFlag: :class:`bool` - If `True`, search for matching *tagSet* recursively. + If :obj:`True`, search for matching *tagSet* recursively. Returns ------- @@ -2662,9 +2919,10 @@ class Set(SequenceAndSetBase): class Choice(Set): - """Create |ASN.1| type. + """Create |ASN.1| schema or value object. - |ASN.1| objects are mutable and duck-type Python :class:`dict` objects. + |ASN.1| class is based on :class:`~pyasn1.type.base.ConstructedAsn1Type`, + its objects are mutable and duck-type Python :class:`list` objects. Keyword Args ------------ @@ -2675,10 +2933,9 @@ class Choice(Set): Object representing non-default ASN.1 tag(s) subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing non-default ASN.1 subtype constraint(s) - - sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing collection size constraint + Object representing non-default ASN.1 subtype constraint(s). Constraints + verification for |ASN.1| type can only occur on explicit + `.isInconsistent` call. Examples -------- @@ -2718,11 +2975,7 @@ class Choice(Set): #: Set (on class, not on instance) or return a #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object #: imposing constraints on |ASN.1| type initialization values. - subtypeSpec = constraint.ConstraintsIntersection() - - #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - #: object imposing size constraint on |ASN.1| objects - sizeSpec = constraint.ConstraintsIntersection( + subtypeSpec = constraint.ConstraintsIntersection( constraint.ValueSizeConstraint(1, 1) ) @@ -2795,7 +3048,7 @@ class Choice(Set): if self._currentIdx is not None: yield self.componentType[self._currentIdx].getName(), self[self._currentIdx] - def verifySizeSpec(self): + def checkConsistency(self): if self._currentIdx is None: raise error.PyAsn1Error('Component not chosen') @@ -2809,7 +3062,7 @@ class Choice(Set): tagSet = component.effectiveTagSet else: tagSet = component.tagSet - if isinstance(component, base.AbstractConstructedAsn1Item): + if isinstance(component, base.ConstructedAsn1Type): myClone.setComponentByType( tagSet, component.clone(cloneValueFlag=cloneValueFlag) ) @@ -2847,15 +3100,16 @@ class Choice(Set): A Python value to initialize |ASN.1| component with (if *componentType* is set) or ASN.1 value object to assign to |ASN.1| component. Once a new value is set to *idx* component, previous value is dropped. + If `value` is not given, schema object will be set as a component. verifyConstraints : :class:`bool` - If `False`, skip constraints validation + If :obj:`False`, skip constraints validation matchTags: :class:`bool` - If `False`, skip component tags matching + If :obj:`False`, skip component tags matching matchConstraints: :class:`bool` - If `False`, skip component constraints matching + If :obj:`False`, skip component constraints matching Returns ------- @@ -2925,17 +3179,18 @@ class Choice(Set): def isValue(self): """Indicate that |ASN.1| object represents ASN.1 value. - If *isValue* is `False` then this object represents just ASN.1 schema. + If *isValue* is :obj:`False` then this object represents just ASN.1 schema. - If *isValue* is `True` then, in addition to its ASN.1 schema features, - this object can also be used like a Python built-in object (e.g. `int`, - `str`, `dict` etc.). + If *isValue* is :obj:`True` then, in addition to its ASN.1 schema features, + this object can also be used like a Python built-in object (e.g. + :class:`int`, :class:`str`, :class:`dict` etc.). Returns ------- : :class:`bool` - :class:`False` if object represents just ASN.1 schema. - :class:`True` if object represents ASN.1 schema and can be used as a normal value. + :obj:`False` if object represents just ASN.1 schema. + :obj:`True` if object represents ASN.1 schema and can be used as a normal + value. Note ---- @@ -2957,7 +3212,7 @@ class Choice(Set): def clear(self): self._currentIdx = None - Set.clear(self) + return Set.clear(self) # compatibility stubs @@ -2968,22 +3223,27 @@ class Choice(Set): class Any(OctetString): """Create |ASN.1| schema or value object. - |ASN.1| objects are immutable and duck-type Python 2 :class:`str` or Python 3 - :class:`bytes`. When used in Unicode context, |ASN.1| type assumes "|encoding|" - serialisation. + |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, + its objects are immutable and duck-type Python 2 :class:`str` or Python 3 + :class:`bytes`. When used in Unicode context, |ASN.1| type assumes + "|encoding|" serialisation. Keyword Args ------------ - value: :class:`str`, :class:`bytes` or |ASN.1| object - string (Python 2) or bytes (Python 3), alternatively unicode object - (Python 2) or string (Python 3) representing character string to be - serialised into octets (note `encoding` parameter) or |ASN.1| object. + value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object + :class:`str` (Python 2) or :class:`bytes` (Python 3), alternatively + :class:`unicode` object (Python 2) or :class:`str` (Python 3) + representing character string to be serialised into octets (note + `encoding` parameter) or |ASN.1| object. + If `value` is not given, schema object will be created. tagSet: :py:class:`~pyasn1.type.tag.TagSet` Object representing non-default ASN.1 tag(s) subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing non-default ASN.1 subtype constraint(s) + Object representing non-default ASN.1 subtype constraint(s). Constraints + verification for |ASN.1| type occurs automatically on object + instantiation. encoding: :py:class:`str` Unicode codec ID to encode/decode :class:`unicode` (Python 2) or @@ -3000,7 +3260,7 @@ class Any(OctetString): Raises ------ - :py:class:`~pyasn1.error.PyAsn1Error` + ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error On constraint violation or bad initializer. Examples diff --git a/server/www/packages/packages-linux/x64/pyasn1/type/useful.py b/server/www/packages/packages-linux/x64/pyasn1/type/useful.py index 146916d..7536b95 100644 --- a/server/www/packages/packages-linux/x64/pyasn1/type/useful.py +++ b/server/www/packages/packages-linux/x64/pyasn1/type/useful.py @@ -1,7 +1,7 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2018, Ilya Etingof +# Copyright (c) 2005-2019, Ilya Etingof # License: http://snmplabs.com/pyasn1/license.html # import datetime diff --git a/server/www/packages/packages-linux/x64/pymysql/__init__.py b/server/www/packages/packages-linux/x64/pymysql/__init__.py index b79b4b8..0cb5006 100644 --- a/server/www/packages/packages-linux/x64/pymysql/__init__.py +++ b/server/www/packages/packages-linux/x64/pymysql/__init__.py @@ -35,7 +35,7 @@ from .times import ( DateFromTicks, TimeFromTicks, TimestampFromTicks) -VERSION = (0, 9, 2, None) +VERSION = (0, 9, 3, None) if VERSION[3] is not None: VERSION_STRING = "%d.%d.%d_%s" % VERSION else: diff --git a/server/www/packages/packages-linux/x64/pymysql/_auth.py b/server/www/packages/packages-linux/x64/pymysql/_auth.py index bbb742d..199f36c 100644 --- a/server/www/packages/packages-linux/x64/pymysql/_auth.py +++ b/server/www/packages/packages-linux/x64/pymysql/_auth.py @@ -4,14 +4,22 @@ Implements auth methods from ._compat import text_type, PY2 from .constants import CLIENT from .err import OperationalError +from .util import byte2int, int2byte -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import serialization, hashes -from cryptography.hazmat.primitives.asymmetric import padding + +try: + from cryptography.hazmat.backends import default_backend + from cryptography.hazmat.primitives import serialization, hashes + from cryptography.hazmat.primitives.asymmetric import padding + _have_cryptography = True +except ImportError: + _have_cryptography = False from functools import partial import hashlib +import io import struct +import warnings DEBUG = False @@ -69,6 +77,8 @@ class RandStruct_323(object): def scramble_old_password(password, message): """Scramble for old_password""" + warnings.warn("old password (for MySQL <4.1) is used. Upgrade your password with newer auth method.\n" + "old password support will be removed in future PyMySQL version") hash_pass = _hash_password_323(password) hash_message = _hash_password_323(message[:SCRAMBLE_LENGTH_323]) hash_pass_n = struct.unpack(">LL", hash_pass) @@ -128,6 +138,8 @@ def sha2_rsa_encrypt(password, salt, public_key): Used for sha256_password and caching_sha2_password. """ + if not _have_cryptography: + raise RuntimeError("cryptography is required for sha256_password or caching_sha2_password") message = _xor_password(password + b'\0', salt) rsa_key = serialization.load_pem_public_key(public_key, default_backend()) return rsa_key.encrypt( diff --git a/server/www/packages/packages-linux/x64/pymysql/charset.py b/server/www/packages/packages-linux/x64/pymysql/charset.py index 968376c..07d8063 100644 --- a/server/www/packages/packages-linux/x64/pymysql/charset.py +++ b/server/www/packages/packages-linux/x64/pymysql/charset.py @@ -18,7 +18,7 @@ class Charset(object): @property def encoding(self): name = self.name - if name == 'utf8mb4': + if name in ('utf8mb4', 'utf8mb3'): return 'utf8' return name @@ -30,18 +30,18 @@ class Charset(object): class Charsets: def __init__(self): self._by_id = {} + self._by_name = {} def add(self, c): self._by_id[c.id] = c + if c.is_default: + self._by_name[c.name] = c def by_id(self, id): return self._by_id[id] def by_name(self, name): - name = name.lower() - for c in self._by_id.values(): - if c.name == name and c.is_default: - return c + return self._by_name.get(name.lower()) _charsets = Charsets() """ @@ -89,7 +89,6 @@ _charsets.add(Charset(31, 'latin1', 'latin1_german2_ci', '')) _charsets.add(Charset(32, 'armscii8', 'armscii8_general_ci', 'Yes')) _charsets.add(Charset(33, 'utf8', 'utf8_general_ci', 'Yes')) _charsets.add(Charset(34, 'cp1250', 'cp1250_czech_cs', '')) -_charsets.add(Charset(35, 'ucs2', 'ucs2_general_ci', 'Yes')) _charsets.add(Charset(36, 'cp866', 'cp866_general_ci', 'Yes')) _charsets.add(Charset(37, 'keybcs2', 'keybcs2_general_ci', 'Yes')) _charsets.add(Charset(38, 'macce', 'macce_general_ci', 'Yes')) @@ -108,13 +107,9 @@ _charsets.add(Charset(50, 'cp1251', 'cp1251_bin', '')) _charsets.add(Charset(51, 'cp1251', 'cp1251_general_ci', 'Yes')) _charsets.add(Charset(52, 'cp1251', 'cp1251_general_cs', '')) _charsets.add(Charset(53, 'macroman', 'macroman_bin', '')) -_charsets.add(Charset(54, 'utf16', 'utf16_general_ci', 'Yes')) -_charsets.add(Charset(55, 'utf16', 'utf16_bin', '')) _charsets.add(Charset(57, 'cp1256', 'cp1256_general_ci', 'Yes')) _charsets.add(Charset(58, 'cp1257', 'cp1257_bin', '')) _charsets.add(Charset(59, 'cp1257', 'cp1257_general_ci', 'Yes')) -_charsets.add(Charset(60, 'utf32', 'utf32_general_ci', 'Yes')) -_charsets.add(Charset(61, 'utf32', 'utf32_bin', '')) _charsets.add(Charset(63, 'binary', 'binary', 'Yes')) _charsets.add(Charset(64, 'armscii8', 'armscii8_bin', '')) _charsets.add(Charset(65, 'ascii', 'ascii_bin', '')) @@ -128,6 +123,7 @@ _charsets.add(Charset(72, 'hp8', 'hp8_bin', '')) _charsets.add(Charset(73, 'keybcs2', 'keybcs2_bin', '')) _charsets.add(Charset(74, 'koi8r', 'koi8r_bin', '')) _charsets.add(Charset(75, 'koi8u', 'koi8u_bin', '')) +_charsets.add(Charset(76, 'utf8', 'utf8_tolower_ci', '')) _charsets.add(Charset(77, 'latin2', 'latin2_bin', '')) _charsets.add(Charset(78, 'latin5', 'latin5_bin', '')) _charsets.add(Charset(79, 'latin7', 'latin7_bin', '')) @@ -141,7 +137,6 @@ _charsets.add(Charset(86, 'gb2312', 'gb2312_bin', '')) _charsets.add(Charset(87, 'gbk', 'gbk_bin', '')) _charsets.add(Charset(88, 'sjis', 'sjis_bin', '')) _charsets.add(Charset(89, 'tis620', 'tis620_bin', '')) -_charsets.add(Charset(90, 'ucs2', 'ucs2_bin', '')) _charsets.add(Charset(91, 'ujis', 'ujis_bin', '')) _charsets.add(Charset(92, 'geostd8', 'geostd8_general_ci', 'Yes')) _charsets.add(Charset(93, 'geostd8', 'geostd8_bin', '')) @@ -151,67 +146,6 @@ _charsets.add(Charset(96, 'cp932', 'cp932_bin', '')) _charsets.add(Charset(97, 'eucjpms', 'eucjpms_japanese_ci', 'Yes')) _charsets.add(Charset(98, 'eucjpms', 'eucjpms_bin', '')) _charsets.add(Charset(99, 'cp1250', 'cp1250_polish_ci', '')) -_charsets.add(Charset(101, 'utf16', 'utf16_unicode_ci', '')) -_charsets.add(Charset(102, 'utf16', 'utf16_icelandic_ci', '')) -_charsets.add(Charset(103, 'utf16', 'utf16_latvian_ci', '')) -_charsets.add(Charset(104, 'utf16', 'utf16_romanian_ci', '')) -_charsets.add(Charset(105, 'utf16', 'utf16_slovenian_ci', '')) -_charsets.add(Charset(106, 'utf16', 'utf16_polish_ci', '')) -_charsets.add(Charset(107, 'utf16', 'utf16_estonian_ci', '')) -_charsets.add(Charset(108, 'utf16', 'utf16_spanish_ci', '')) -_charsets.add(Charset(109, 'utf16', 'utf16_swedish_ci', '')) -_charsets.add(Charset(110, 'utf16', 'utf16_turkish_ci', '')) -_charsets.add(Charset(111, 'utf16', 'utf16_czech_ci', '')) -_charsets.add(Charset(112, 'utf16', 'utf16_danish_ci', '')) -_charsets.add(Charset(113, 'utf16', 'utf16_lithuanian_ci', '')) -_charsets.add(Charset(114, 'utf16', 'utf16_slovak_ci', '')) -_charsets.add(Charset(115, 'utf16', 'utf16_spanish2_ci', '')) -_charsets.add(Charset(116, 'utf16', 'utf16_roman_ci', '')) -_charsets.add(Charset(117, 'utf16', 'utf16_persian_ci', '')) -_charsets.add(Charset(118, 'utf16', 'utf16_esperanto_ci', '')) -_charsets.add(Charset(119, 'utf16', 'utf16_hungarian_ci', '')) -_charsets.add(Charset(120, 'utf16', 'utf16_sinhala_ci', '')) -_charsets.add(Charset(128, 'ucs2', 'ucs2_unicode_ci', '')) -_charsets.add(Charset(129, 'ucs2', 'ucs2_icelandic_ci', '')) -_charsets.add(Charset(130, 'ucs2', 'ucs2_latvian_ci', '')) -_charsets.add(Charset(131, 'ucs2', 'ucs2_romanian_ci', '')) -_charsets.add(Charset(132, 'ucs2', 'ucs2_slovenian_ci', '')) -_charsets.add(Charset(133, 'ucs2', 'ucs2_polish_ci', '')) -_charsets.add(Charset(134, 'ucs2', 'ucs2_estonian_ci', '')) -_charsets.add(Charset(135, 'ucs2', 'ucs2_spanish_ci', '')) -_charsets.add(Charset(136, 'ucs2', 'ucs2_swedish_ci', '')) -_charsets.add(Charset(137, 'ucs2', 'ucs2_turkish_ci', '')) -_charsets.add(Charset(138, 'ucs2', 'ucs2_czech_ci', '')) -_charsets.add(Charset(139, 'ucs2', 'ucs2_danish_ci', '')) -_charsets.add(Charset(140, 'ucs2', 'ucs2_lithuanian_ci', '')) -_charsets.add(Charset(141, 'ucs2', 'ucs2_slovak_ci', '')) -_charsets.add(Charset(142, 'ucs2', 'ucs2_spanish2_ci', '')) -_charsets.add(Charset(143, 'ucs2', 'ucs2_roman_ci', '')) -_charsets.add(Charset(144, 'ucs2', 'ucs2_persian_ci', '')) -_charsets.add(Charset(145, 'ucs2', 'ucs2_esperanto_ci', '')) -_charsets.add(Charset(146, 'ucs2', 'ucs2_hungarian_ci', '')) -_charsets.add(Charset(147, 'ucs2', 'ucs2_sinhala_ci', '')) -_charsets.add(Charset(159, 'ucs2', 'ucs2_general_mysql500_ci', '')) -_charsets.add(Charset(160, 'utf32', 'utf32_unicode_ci', '')) -_charsets.add(Charset(161, 'utf32', 'utf32_icelandic_ci', '')) -_charsets.add(Charset(162, 'utf32', 'utf32_latvian_ci', '')) -_charsets.add(Charset(163, 'utf32', 'utf32_romanian_ci', '')) -_charsets.add(Charset(164, 'utf32', 'utf32_slovenian_ci', '')) -_charsets.add(Charset(165, 'utf32', 'utf32_polish_ci', '')) -_charsets.add(Charset(166, 'utf32', 'utf32_estonian_ci', '')) -_charsets.add(Charset(167, 'utf32', 'utf32_spanish_ci', '')) -_charsets.add(Charset(168, 'utf32', 'utf32_swedish_ci', '')) -_charsets.add(Charset(169, 'utf32', 'utf32_turkish_ci', '')) -_charsets.add(Charset(170, 'utf32', 'utf32_czech_ci', '')) -_charsets.add(Charset(171, 'utf32', 'utf32_danish_ci', '')) -_charsets.add(Charset(172, 'utf32', 'utf32_lithuanian_ci', '')) -_charsets.add(Charset(173, 'utf32', 'utf32_slovak_ci', '')) -_charsets.add(Charset(174, 'utf32', 'utf32_spanish2_ci', '')) -_charsets.add(Charset(175, 'utf32', 'utf32_roman_ci', '')) -_charsets.add(Charset(176, 'utf32', 'utf32_persian_ci', '')) -_charsets.add(Charset(177, 'utf32', 'utf32_esperanto_ci', '')) -_charsets.add(Charset(178, 'utf32', 'utf32_hungarian_ci', '')) -_charsets.add(Charset(179, 'utf32', 'utf32_sinhala_ci', '')) _charsets.add(Charset(192, 'utf8', 'utf8_unicode_ci', '')) _charsets.add(Charset(193, 'utf8', 'utf8_icelandic_ci', '')) _charsets.add(Charset(194, 'utf8', 'utf8_latvian_ci', '')) @@ -232,6 +166,10 @@ _charsets.add(Charset(208, 'utf8', 'utf8_persian_ci', '')) _charsets.add(Charset(209, 'utf8', 'utf8_esperanto_ci', '')) _charsets.add(Charset(210, 'utf8', 'utf8_hungarian_ci', '')) _charsets.add(Charset(211, 'utf8', 'utf8_sinhala_ci', '')) +_charsets.add(Charset(212, 'utf8', 'utf8_german2_ci', '')) +_charsets.add(Charset(213, 'utf8', 'utf8_croatian_ci', '')) +_charsets.add(Charset(214, 'utf8', 'utf8_unicode_520_ci', '')) +_charsets.add(Charset(215, 'utf8', 'utf8_vietnamese_ci', '')) _charsets.add(Charset(223, 'utf8', 'utf8_general_mysql500_ci', '')) _charsets.add(Charset(224, 'utf8mb4', 'utf8mb4_unicode_ci', '')) _charsets.add(Charset(225, 'utf8mb4', 'utf8mb4_icelandic_ci', '')) @@ -257,14 +195,18 @@ _charsets.add(Charset(244, 'utf8mb4', 'utf8mb4_german2_ci', '')) _charsets.add(Charset(245, 'utf8mb4', 'utf8mb4_croatian_ci', '')) _charsets.add(Charset(246, 'utf8mb4', 'utf8mb4_unicode_520_ci', '')) _charsets.add(Charset(247, 'utf8mb4', 'utf8mb4_vietnamese_ci', '')) - +_charsets.add(Charset(248, 'gb18030', 'gb18030_chinese_ci', 'Yes')) +_charsets.add(Charset(249, 'gb18030', 'gb18030_bin', '')) +_charsets.add(Charset(250, 'gb18030', 'gb18030_unicode_520_ci', '')) +_charsets.add(Charset(255, 'utf8mb4', 'utf8mb4_0900_ai_ci', '')) charset_by_name = _charsets.by_name charset_by_id = _charsets.by_id +#TODO: remove this def charset_to_encoding(name): """Convert MySQL's charset name to Python's codec name""" - if name == 'utf8mb4': + if name in ('utf8mb4', 'utf8mb3'): return 'utf8' return name diff --git a/server/www/packages/packages-linux/x64/pymysql/connections.py b/server/www/packages/packages-linux/x64/pymysql/connections.py index 1e580d2..2e4122b 100644 --- a/server/www/packages/packages-linux/x64/pymysql/connections.py +++ b/server/www/packages/packages-linux/x64/pymysql/connections.py @@ -88,7 +88,7 @@ TEXT_TYPES = { } -DEFAULT_CHARSET = 'utf8mb4' # TODO: change to utf8mb4 +DEFAULT_CHARSET = 'utf8mb4' MAX_PACKET_LEN = 2**24-1 @@ -152,7 +152,6 @@ class Connection(object): (default: 10, min: 1, max: 31536000) :param ssl: A dict of arguments similar to mysql_ssl_set()'s parameters. - For now the capath and cipher arguments are not supported. :param read_default_group: Group to read from in the configuration file. :param compress: Not supported :param named_pipe: Not supported @@ -295,15 +294,15 @@ class Connection(object): self._affected_rows = 0 self.host_info = "Not connected" - #: specified autocommit mode. None means use server default. + # specified autocommit mode. None means use server default. self.autocommit_mode = autocommit if conv is None: conv = converters.conversions # Need for MySQLdb compatibility. - self.encoders = dict([(k, v) for (k, v) in conv.items() if type(k) is not int]) - self.decoders = dict([(k, v) for (k, v) in conv.items() if type(k) is int]) + self.encoders = {k: v for (k, v) in conv.items() if type(k) is not int} + self.decoders = {k: v for (k, v) in conv.items() if type(k) is int} self.sql_mode = sql_mode self.init_command = init_command self.max_allowed_packet = max_allowed_packet @@ -316,10 +315,9 @@ class Connection(object): '_pid': str(os.getpid()), '_client_version': VERSION_STRING, } + if program_name: self._connect_attrs["program_name"] = program_name - elif sys.argv: - self._connect_attrs["program_name"] = sys.argv[0] if defer_connect: self._sock = None @@ -494,6 +492,9 @@ class Connection(object): def __enter__(self): """Context manager that returns a Cursor""" + warnings.warn( + "Context manager API of Connection object is deprecated; Use conn.begin()", + DeprecationWarning) return self.cursor() def __exit__(self, exc, value, traceback): @@ -696,6 +697,10 @@ class Connection(object): raise err.OperationalError( CR.CR_SERVER_LOST, "Lost connection to MySQL server during query (%s)" % (e,)) + except BaseException: + # Don't convert unknown exception to MySQLError. + self._force_close() + raise if len(data) < num_bytes: self._force_close() raise err.OperationalError( @@ -804,7 +809,11 @@ class Connection(object): authresp = b'' plugin_name = None - if self._auth_plugin_name in ('', 'mysql_native_password'): + if self._auth_plugin_name == '': + plugin_name = b'' + authresp = _auth.scramble_native_password(self.password, self.salt) + elif self._auth_plugin_name == 'mysql_native_password': + plugin_name = b'mysql_native_password' authresp = _auth.scramble_native_password(self.password, self.salt) elif self._auth_plugin_name == 'caching_sha2_password': plugin_name = b'caching_sha2_password' @@ -842,9 +851,9 @@ class Connection(object): if self.server_capabilities & CLIENT.CONNECT_ATTRS: connect_attrs = b'' for k, v in self._connect_attrs.items(): - k = k.encode('utf8') + k = k.encode('utf-8') connect_attrs += struct.pack('B', len(k)) + k - v = v.encode('utf8') + v = v.encode('utf-8') connect_attrs += struct.pack('B', len(v)) + v data += struct.pack('B', len(connect_attrs)) + connect_attrs diff --git a/server/www/packages/packages-linux/x64/pymysql/converters.py b/server/www/packages/packages-linux/x64/pymysql/converters.py index bf1db9d..ce2be06 100644 --- a/server/www/packages/packages-linux/x64/pymysql/converters.py +++ b/server/www/packages/packages-linux/x64/pymysql/converters.py @@ -354,21 +354,6 @@ def through(x): convert_bit = through -def convert_characters(connection, field, data): - field_charset = charset_by_id(field.charsetnr).name - encoding = charset_to_encoding(field_charset) - if field.flags & FLAG.SET: - return convert_set(data.decode(encoding)) - if field.flags & FLAG.BINARY: - return data - - if connection.use_unicode: - data = data.decode(encoding) - elif connection.charset != field_charset: - data = data.decode(encoding) - data = data.encode(connection.encoding) - return data - encoders = { bool: escape_bool, int: escape_int, diff --git a/server/www/packages/packages-linux/x64/pymysql/cursors.py b/server/www/packages/packages-linux/x64/pymysql/cursors.py index cc16998..a6d645d 100644 --- a/server/www/packages/packages-linux/x64/pymysql/cursors.py +++ b/server/www/packages/packages-linux/x64/pymysql/cursors.py @@ -122,9 +122,9 @@ class Cursor(object): return tuple(conn.literal(arg) for arg in args) elif isinstance(args, dict): if PY2: - args = dict((ensure_bytes(key), ensure_bytes(val)) for - (key, val) in args.items()) - return dict((key, conn.literal(val)) for (key, val) in args.items()) + args = {ensure_bytes(key): ensure_bytes(val) for + (key, val) in args.items()} + return {key: conn.literal(val) for (key, val) in args.items()} else: # If it's not a dictionary let's try escaping it anyways. # Worst case it will throw a Value error diff --git a/server/www/packages/packages-linux/x64/pymysql/util.py b/server/www/packages/packages-linux/x64/pymysql/util.py index 3e82ac7..04683f8 100644 --- a/server/www/packages/packages-linux/x64/pymysql/util.py +++ b/server/www/packages/packages-linux/x64/pymysql/util.py @@ -11,12 +11,3 @@ def byte2int(b): def int2byte(i): return struct.pack("!B", i) - -def join_bytes(bs): - if len(bs) == 0: - return "" - else: - rv = bs[0] - for b in bs[1:]: - rv += b - return rv diff --git a/server/www/packages/packages-linux/x64/qrcode/main.py b/server/www/packages/packages-linux/x64/qrcode/main.py index 1e164f1..e46a9b9 100644 --- a/server/www/packages/packages-linux/x64/qrcode/main.py +++ b/server/www/packages/packages-linux/x64/qrcode/main.py @@ -33,6 +33,7 @@ def _check_mask_pattern(mask_pattern): raise ValueError( "Mask pattern should be in range(8) (got %s)" % mask_pattern) + class QRCode: def __init__(self, version=None, diff --git a/server/www/packages/packages-linux/x64/qrcode/release.py b/server/www/packages/packages-linux/x64/qrcode/release.py index abbabb4..4cc4c19 100644 --- a/server/www/packages/packages-linux/x64/qrcode/release.py +++ b/server/www/packages/packages-linux/x64/qrcode/release.py @@ -12,7 +12,6 @@ def update_manpage(data): Update the version in the manpage document. """ if data['name'] != 'qrcode': - print('no qrcode') return base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) diff --git a/server/www/packages/packages-linux/x64/qrcode/util.py b/server/www/packages/packages-linux/x64/qrcode/util.py index a9652f7..231b85e 100644 --- a/server/www/packages/packages-linux/x64/qrcode/util.py +++ b/server/www/packages/packages-linux/x64/qrcode/util.py @@ -33,7 +33,7 @@ MODE_SIZE_LARGE = { } ALPHA_NUM = six.b('0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ $%*+-./:') -RE_ALPHA_NUM = re.compile(six.b('^[') + re.escape(ALPHA_NUM) + six.b(']*\Z')) +RE_ALPHA_NUM = re.compile(six.b('^[') + re.escape(ALPHA_NUM) + six.b(r']*\Z')) # The number of bits for numeric delimited data lengths. NUMBER_LENGTH = {3: 10, 2: 7, 1: 4} @@ -344,12 +344,17 @@ def optimal_data_chunks(data, minimum=4): :param minimum: The minimum number of bytes in a row to split as a chunk. """ data = to_bytestring(data) - re_repeat = ( - six.b('{') + six.text_type(minimum).encode('ascii') + six.b(',}')) - num_pattern = re.compile(six.b('\d') + re_repeat) + num_pattern = six.b(r'\d') + alpha_pattern = six.b('[') + re.escape(ALPHA_NUM) + six.b(']') + if len(data) <= minimum: + num_pattern = re.compile(six.b('^') + num_pattern + six.b('+$')) + alpha_pattern = re.compile(six.b('^') + alpha_pattern + six.b('+$')) + else: + re_repeat = ( + six.b('{') + six.text_type(minimum).encode('ascii') + six.b(',}')) + num_pattern = re.compile(num_pattern + re_repeat) + alpha_pattern = re.compile(alpha_pattern + re_repeat) num_bits = _optimal_split(data, num_pattern) - alpha_pattern = re.compile( - six.b('[') + re.escape(ALPHA_NUM) + six.b(']') + re_repeat) for is_num, chunk in num_bits: if is_num: yield QRData(chunk, mode=MODE_NUMBER, check_data=False) diff --git a/server/www/packages/packages-linux/x64/six.py b/server/www/packages/packages-linux/x64/six.py index 6bf4fd3..357e624 100644 --- a/server/www/packages/packages-linux/x64/six.py +++ b/server/www/packages/packages-linux/x64/six.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010-2017 Benjamin Peterson +# Copyright (c) 2010-2019 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -29,7 +29,7 @@ import sys import types __author__ = "Benjamin Peterson " -__version__ = "1.11.0" +__version__ = "1.13.0" # Useful for very coarse version differentiation. @@ -255,8 +255,10 @@ _moved_attributes = [ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), + MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), MovedModule("copyreg", "copy_reg"), MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), @@ -637,6 +639,7 @@ if PY3: import io StringIO = io.StringIO BytesIO = io.BytesIO + del io _assertCountEqual = "assertCountEqual" if sys.version_info[1] <= 1: _assertRaisesRegex = "assertRaisesRegexp" @@ -824,7 +827,15 @@ def with_metaclass(meta, *bases): class metaclass(type): def __new__(cls, name, this_bases, d): - return meta(name, bases, d) + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d['__orig_bases__'] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) @classmethod def __prepare__(cls, name, this_bases): @@ -844,10 +855,71 @@ def add_metaclass(metaclass): orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) + if hasattr(cls, '__qualname__'): + orig_vars['__qualname__'] = cls.__qualname__ return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper +def ensure_binary(s, encoding='utf-8', errors='strict'): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, text_type): + return s.encode(encoding, errors) + elif isinstance(s, binary_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding='utf-8', errors='strict'): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + if PY2 and isinstance(s, text_type): + s = s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + s = s.decode(encoding, errors) + return s + + +def ensure_text(s, encoding='utf-8', errors='strict'): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + + def python_2_unicode_compatible(klass): """ A decorator that defines __unicode__ and __str__ methods under Python 2. diff --git a/server/www/packages/packages-linux/x64/tornado/__init__.py b/server/www/packages/packages-linux/x64/tornado/__init__.py index b269cf7..3ac2bb8 100644 --- a/server/www/packages/packages-linux/x64/tornado/__init__.py +++ b/server/www/packages/packages-linux/x64/tornado/__init__.py @@ -1,28 +1,26 @@ -# -# Copyright 2009 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""The Tornado web server and tools.""" - -from __future__ import absolute_import, division, print_function - -# version is a human-readable version number. - -# version_info is a four-tuple for programmatic comparison. The first -# three numbers are the components of the version number. The fourth -# is zero for an official release, positive for a development branch, -# or negative for a release candidate or beta (after the base version -# number has been incremented) -version = "5.1.1" -version_info = (5, 1, 1, 0) +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""The Tornado web server and tools.""" + +# version is a human-readable version number. + +# version_info is a four-tuple for programmatic comparison. The first +# three numbers are the components of the version number. The fourth +# is zero for an official release, positive for a development branch, +# or negative for a release candidate or beta (after the base version +# number has been incremented) +version = "6.0.4" +version_info = (6, 0, 4, 0) diff --git a/server/www/packages/packages-linux/x64/tornado/_locale_data.py b/server/www/packages/packages-linux/x64/tornado/_locale_data.py index a2c5039..02e89d4 100644 --- a/server/www/packages/packages-linux/x64/tornado/_locale_data.py +++ b/server/www/packages/packages-linux/x64/tornado/_locale_data.py @@ -1,84 +1,82 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2012 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Data used by the tornado.locale module.""" - -from __future__ import absolute_import, division, print_function - -LOCALE_NAMES = { - "af_ZA": {"name_en": u"Afrikaans", "name": u"Afrikaans"}, - "am_ET": {"name_en": u"Amharic", "name": u"አማርኛ"}, - "ar_AR": {"name_en": u"Arabic", "name": u"العربية"}, - "bg_BG": {"name_en": u"Bulgarian", "name": u"Български"}, - "bn_IN": {"name_en": u"Bengali", "name": u"বাংলা"}, - "bs_BA": {"name_en": u"Bosnian", "name": u"Bosanski"}, - "ca_ES": {"name_en": u"Catalan", "name": u"Català"}, - "cs_CZ": {"name_en": u"Czech", "name": u"Čeština"}, - "cy_GB": {"name_en": u"Welsh", "name": u"Cymraeg"}, - "da_DK": {"name_en": u"Danish", "name": u"Dansk"}, - "de_DE": {"name_en": u"German", "name": u"Deutsch"}, - "el_GR": {"name_en": u"Greek", "name": u"Ελληνικά"}, - "en_GB": {"name_en": u"English (UK)", "name": u"English (UK)"}, - "en_US": {"name_en": u"English (US)", "name": u"English (US)"}, - "es_ES": {"name_en": u"Spanish (Spain)", "name": u"Español (España)"}, - "es_LA": {"name_en": u"Spanish", "name": u"Español"}, - "et_EE": {"name_en": u"Estonian", "name": u"Eesti"}, - "eu_ES": {"name_en": u"Basque", "name": u"Euskara"}, - "fa_IR": {"name_en": u"Persian", "name": u"فارسی"}, - "fi_FI": {"name_en": u"Finnish", "name": u"Suomi"}, - "fr_CA": {"name_en": u"French (Canada)", "name": u"Français (Canada)"}, - "fr_FR": {"name_en": u"French", "name": u"Français"}, - "ga_IE": {"name_en": u"Irish", "name": u"Gaeilge"}, - "gl_ES": {"name_en": u"Galician", "name": u"Galego"}, - "he_IL": {"name_en": u"Hebrew", "name": u"עברית"}, - "hi_IN": {"name_en": u"Hindi", "name": u"हिन्दी"}, - "hr_HR": {"name_en": u"Croatian", "name": u"Hrvatski"}, - "hu_HU": {"name_en": u"Hungarian", "name": u"Magyar"}, - "id_ID": {"name_en": u"Indonesian", "name": u"Bahasa Indonesia"}, - "is_IS": {"name_en": u"Icelandic", "name": u"Íslenska"}, - "it_IT": {"name_en": u"Italian", "name": u"Italiano"}, - "ja_JP": {"name_en": u"Japanese", "name": u"日本語"}, - "ko_KR": {"name_en": u"Korean", "name": u"한국어"}, - "lt_LT": {"name_en": u"Lithuanian", "name": u"Lietuvių"}, - "lv_LV": {"name_en": u"Latvian", "name": u"Latviešu"}, - "mk_MK": {"name_en": u"Macedonian", "name": u"Македонски"}, - "ml_IN": {"name_en": u"Malayalam", "name": u"മലയാളം"}, - "ms_MY": {"name_en": u"Malay", "name": u"Bahasa Melayu"}, - "nb_NO": {"name_en": u"Norwegian (bokmal)", "name": u"Norsk (bokmål)"}, - "nl_NL": {"name_en": u"Dutch", "name": u"Nederlands"}, - "nn_NO": {"name_en": u"Norwegian (nynorsk)", "name": u"Norsk (nynorsk)"}, - "pa_IN": {"name_en": u"Punjabi", "name": u"ਪੰਜਾਬੀ"}, - "pl_PL": {"name_en": u"Polish", "name": u"Polski"}, - "pt_BR": {"name_en": u"Portuguese (Brazil)", "name": u"Português (Brasil)"}, - "pt_PT": {"name_en": u"Portuguese (Portugal)", "name": u"Português (Portugal)"}, - "ro_RO": {"name_en": u"Romanian", "name": u"Română"}, - "ru_RU": {"name_en": u"Russian", "name": u"Русский"}, - "sk_SK": {"name_en": u"Slovak", "name": u"Slovenčina"}, - "sl_SI": {"name_en": u"Slovenian", "name": u"Slovenščina"}, - "sq_AL": {"name_en": u"Albanian", "name": u"Shqip"}, - "sr_RS": {"name_en": u"Serbian", "name": u"Српски"}, - "sv_SE": {"name_en": u"Swedish", "name": u"Svenska"}, - "sw_KE": {"name_en": u"Swahili", "name": u"Kiswahili"}, - "ta_IN": {"name_en": u"Tamil", "name": u"தமிழ்"}, - "te_IN": {"name_en": u"Telugu", "name": u"తెలుగు"}, - "th_TH": {"name_en": u"Thai", "name": u"ภาษาไทย"}, - "tl_PH": {"name_en": u"Filipino", "name": u"Filipino"}, - "tr_TR": {"name_en": u"Turkish", "name": u"Türkçe"}, - "uk_UA": {"name_en": u"Ukraini ", "name": u"Українська"}, - "vi_VN": {"name_en": u"Vietnamese", "name": u"Tiếng Việt"}, - "zh_CN": {"name_en": u"Chinese (Simplified)", "name": u"中文(简体)"}, - "zh_TW": {"name_en": u"Chinese (Traditional)", "name": u"中文(繁體)"}, -} +# -*- coding: utf-8 -*- +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Data used by the tornado.locale module.""" + +LOCALE_NAMES = { + "af_ZA": {"name_en": u"Afrikaans", "name": u"Afrikaans"}, + "am_ET": {"name_en": u"Amharic", "name": u"አማርኛ"}, + "ar_AR": {"name_en": u"Arabic", "name": u"العربية"}, + "bg_BG": {"name_en": u"Bulgarian", "name": u"Български"}, + "bn_IN": {"name_en": u"Bengali", "name": u"বাংলা"}, + "bs_BA": {"name_en": u"Bosnian", "name": u"Bosanski"}, + "ca_ES": {"name_en": u"Catalan", "name": u"Català"}, + "cs_CZ": {"name_en": u"Czech", "name": u"Čeština"}, + "cy_GB": {"name_en": u"Welsh", "name": u"Cymraeg"}, + "da_DK": {"name_en": u"Danish", "name": u"Dansk"}, + "de_DE": {"name_en": u"German", "name": u"Deutsch"}, + "el_GR": {"name_en": u"Greek", "name": u"Ελληνικά"}, + "en_GB": {"name_en": u"English (UK)", "name": u"English (UK)"}, + "en_US": {"name_en": u"English (US)", "name": u"English (US)"}, + "es_ES": {"name_en": u"Spanish (Spain)", "name": u"Español (España)"}, + "es_LA": {"name_en": u"Spanish", "name": u"Español"}, + "et_EE": {"name_en": u"Estonian", "name": u"Eesti"}, + "eu_ES": {"name_en": u"Basque", "name": u"Euskara"}, + "fa_IR": {"name_en": u"Persian", "name": u"فارسی"}, + "fi_FI": {"name_en": u"Finnish", "name": u"Suomi"}, + "fr_CA": {"name_en": u"French (Canada)", "name": u"Français (Canada)"}, + "fr_FR": {"name_en": u"French", "name": u"Français"}, + "ga_IE": {"name_en": u"Irish", "name": u"Gaeilge"}, + "gl_ES": {"name_en": u"Galician", "name": u"Galego"}, + "he_IL": {"name_en": u"Hebrew", "name": u"עברית"}, + "hi_IN": {"name_en": u"Hindi", "name": u"हिन्दी"}, + "hr_HR": {"name_en": u"Croatian", "name": u"Hrvatski"}, + "hu_HU": {"name_en": u"Hungarian", "name": u"Magyar"}, + "id_ID": {"name_en": u"Indonesian", "name": u"Bahasa Indonesia"}, + "is_IS": {"name_en": u"Icelandic", "name": u"Íslenska"}, + "it_IT": {"name_en": u"Italian", "name": u"Italiano"}, + "ja_JP": {"name_en": u"Japanese", "name": u"日本語"}, + "ko_KR": {"name_en": u"Korean", "name": u"한국어"}, + "lt_LT": {"name_en": u"Lithuanian", "name": u"Lietuvių"}, + "lv_LV": {"name_en": u"Latvian", "name": u"Latviešu"}, + "mk_MK": {"name_en": u"Macedonian", "name": u"Македонски"}, + "ml_IN": {"name_en": u"Malayalam", "name": u"മലയാളം"}, + "ms_MY": {"name_en": u"Malay", "name": u"Bahasa Melayu"}, + "nb_NO": {"name_en": u"Norwegian (bokmal)", "name": u"Norsk (bokmål)"}, + "nl_NL": {"name_en": u"Dutch", "name": u"Nederlands"}, + "nn_NO": {"name_en": u"Norwegian (nynorsk)", "name": u"Norsk (nynorsk)"}, + "pa_IN": {"name_en": u"Punjabi", "name": u"ਪੰਜਾਬੀ"}, + "pl_PL": {"name_en": u"Polish", "name": u"Polski"}, + "pt_BR": {"name_en": u"Portuguese (Brazil)", "name": u"Português (Brasil)"}, + "pt_PT": {"name_en": u"Portuguese (Portugal)", "name": u"Português (Portugal)"}, + "ro_RO": {"name_en": u"Romanian", "name": u"Română"}, + "ru_RU": {"name_en": u"Russian", "name": u"Русский"}, + "sk_SK": {"name_en": u"Slovak", "name": u"Slovenčina"}, + "sl_SI": {"name_en": u"Slovenian", "name": u"Slovenščina"}, + "sq_AL": {"name_en": u"Albanian", "name": u"Shqip"}, + "sr_RS": {"name_en": u"Serbian", "name": u"Српски"}, + "sv_SE": {"name_en": u"Swedish", "name": u"Svenska"}, + "sw_KE": {"name_en": u"Swahili", "name": u"Kiswahili"}, + "ta_IN": {"name_en": u"Tamil", "name": u"தமிழ்"}, + "te_IN": {"name_en": u"Telugu", "name": u"తెలుగు"}, + "th_TH": {"name_en": u"Thai", "name": u"ภาษาไทย"}, + "tl_PH": {"name_en": u"Filipino", "name": u"Filipino"}, + "tr_TR": {"name_en": u"Turkish", "name": u"Türkçe"}, + "uk_UA": {"name_en": u"Ukraini ", "name": u"Українська"}, + "vi_VN": {"name_en": u"Vietnamese", "name": u"Tiếng Việt"}, + "zh_CN": {"name_en": u"Chinese (Simplified)", "name": u"中文(简体)"}, + "zh_TW": {"name_en": u"Chinese (Traditional)", "name": u"中文(繁體)"}, +} diff --git a/server/www/packages/packages-linux/x64/tornado/auth.py b/server/www/packages/packages-linux/x64/tornado/auth.py index b79ad14..01dc4c5 100644 --- a/server/www/packages/packages-linux/x64/tornado/auth.py +++ b/server/www/packages/packages-linux/x64/tornado/auth.py @@ -1,1236 +1,1182 @@ -# -# Copyright 2009 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""This module contains implementations of various third-party -authentication schemes. - -All the classes in this file are class mixins designed to be used with -the `tornado.web.RequestHandler` class. They are used in two ways: - -* On a login handler, use methods such as ``authenticate_redirect()``, - ``authorize_redirect()``, and ``get_authenticated_user()`` to - establish the user's identity and store authentication tokens to your - database and/or cookies. -* In non-login handlers, use methods such as ``facebook_request()`` - or ``twitter_request()`` to use the authentication tokens to make - requests to the respective services. - -They all take slightly different arguments due to the fact all these -services implement authentication and authorization slightly differently. -See the individual service classes below for complete documentation. - -Example usage for Google OAuth: - -.. testcode:: - - class GoogleOAuth2LoginHandler(tornado.web.RequestHandler, - tornado.auth.GoogleOAuth2Mixin): - async def get(self): - if self.get_argument('code', False): - user = await self.get_authenticated_user( - redirect_uri='http://your.site.com/auth/google', - code=self.get_argument('code')) - # Save the user with e.g. set_secure_cookie - else: - await self.authorize_redirect( - redirect_uri='http://your.site.com/auth/google', - client_id=self.settings['google_oauth']['key'], - scope=['profile', 'email'], - response_type='code', - extra_params={'approval_prompt': 'auto'}) - -.. testoutput:: - :hide: - - -.. versionchanged:: 4.0 - All of the callback interfaces in this module are now guaranteed - to run their callback with an argument of ``None`` on error. - Previously some functions would do this while others would simply - terminate the request on their own. This change also ensures that - errors are more consistently reported through the ``Future`` interfaces. -""" - -from __future__ import absolute_import, division, print_function - -import base64 -import binascii -import functools -import hashlib -import hmac -import time -import uuid -import warnings - -from tornado.concurrent import (Future, _non_deprecated_return_future, - future_set_exc_info, chain_future, - future_set_result_unless_cancelled) -from tornado import gen -from tornado import httpclient -from tornado import escape -from tornado.httputil import url_concat -from tornado.log import gen_log -from tornado.stack_context import ExceptionStackContext, wrap -from tornado.util import unicode_type, ArgReplacer, PY3 - -if PY3: - import urllib.parse as urlparse - import urllib.parse as urllib_parse - long = int -else: - import urlparse - import urllib as urllib_parse - - -class AuthError(Exception): - pass - - -def _auth_future_to_callback(callback, future): - try: - result = future.result() - except AuthError as e: - gen_log.warning(str(e)) - result = None - callback(result) - - -def _auth_return_future(f): - """Similar to tornado.concurrent.return_future, but uses the auth - module's legacy callback interface. - - Note that when using this decorator the ``callback`` parameter - inside the function will actually be a future. - - .. deprecated:: 5.1 - Will be removed in 6.0. - """ - replacer = ArgReplacer(f, 'callback') - - @functools.wraps(f) - def wrapper(*args, **kwargs): - future = Future() - callback, args, kwargs = replacer.replace(future, args, kwargs) - if callback is not None: - warnings.warn("callback arguments are deprecated, use the returned Future instead", - DeprecationWarning) - future.add_done_callback( - wrap(functools.partial(_auth_future_to_callback, callback))) - - def handle_exception(typ, value, tb): - if future.done(): - return False - else: - future_set_exc_info(future, (typ, value, tb)) - return True - with ExceptionStackContext(handle_exception, delay_warning=True): - f(*args, **kwargs) - return future - return wrapper - - -class OpenIdMixin(object): - """Abstract implementation of OpenID and Attribute Exchange. - - Class attributes: - - * ``_OPENID_ENDPOINT``: the identity provider's URI. - """ - @_non_deprecated_return_future - def authenticate_redirect(self, callback_uri=None, - ax_attrs=["name", "email", "language", "username"], - callback=None): - """Redirects to the authentication URL for this service. - - After authentication, the service will redirect back to the given - callback URI with additional parameters including ``openid.mode``. - - We request the given attributes for the authenticated user by - default (name, email, language, and username). If you don't need - all those attributes for your app, you can request fewer with - the ax_attrs keyword argument. - - .. versionchanged:: 3.1 - Returns a `.Future` and takes an optional callback. These are - not strictly necessary as this method is synchronous, - but they are supplied for consistency with - `OAuthMixin.authorize_redirect`. - - .. deprecated:: 5.1 - - The ``callback`` argument and returned awaitable will be removed - in Tornado 6.0; this will be an ordinary synchronous function. - """ - callback_uri = callback_uri or self.request.uri - args = self._openid_args(callback_uri, ax_attrs=ax_attrs) - self.redirect(self._OPENID_ENDPOINT + "?" + urllib_parse.urlencode(args)) - callback() - - @_auth_return_future - def get_authenticated_user(self, callback, http_client=None): - """Fetches the authenticated user data upon redirect. - - This method should be called by the handler that receives the - redirect from the `authenticate_redirect()` method (which is - often the same as the one that calls it; in that case you would - call `get_authenticated_user` if the ``openid.mode`` parameter - is present and `authenticate_redirect` if it is not). - - The result of this method will generally be used to set a cookie. - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed in 6.0. - Use the returned awaitable object instead. - """ - # Verify the OpenID response via direct request to the OP - args = dict((k, v[-1]) for k, v in self.request.arguments.items()) - args["openid.mode"] = u"check_authentication" - url = self._OPENID_ENDPOINT - if http_client is None: - http_client = self.get_auth_http_client() - fut = http_client.fetch(url, method="POST", body=urllib_parse.urlencode(args)) - fut.add_done_callback(wrap(functools.partial( - self._on_authentication_verified, callback))) - - def _openid_args(self, callback_uri, ax_attrs=[], oauth_scope=None): - url = urlparse.urljoin(self.request.full_url(), callback_uri) - args = { - "openid.ns": "http://specs.openid.net/auth/2.0", - "openid.claimed_id": - "http://specs.openid.net/auth/2.0/identifier_select", - "openid.identity": - "http://specs.openid.net/auth/2.0/identifier_select", - "openid.return_to": url, - "openid.realm": urlparse.urljoin(url, '/'), - "openid.mode": "checkid_setup", - } - if ax_attrs: - args.update({ - "openid.ns.ax": "http://openid.net/srv/ax/1.0", - "openid.ax.mode": "fetch_request", - }) - ax_attrs = set(ax_attrs) - required = [] - if "name" in ax_attrs: - ax_attrs -= set(["name", "firstname", "fullname", "lastname"]) - required += ["firstname", "fullname", "lastname"] - args.update({ - "openid.ax.type.firstname": - "http://axschema.org/namePerson/first", - "openid.ax.type.fullname": - "http://axschema.org/namePerson", - "openid.ax.type.lastname": - "http://axschema.org/namePerson/last", - }) - known_attrs = { - "email": "http://axschema.org/contact/email", - "language": "http://axschema.org/pref/language", - "username": "http://axschema.org/namePerson/friendly", - } - for name in ax_attrs: - args["openid.ax.type." + name] = known_attrs[name] - required.append(name) - args["openid.ax.required"] = ",".join(required) - if oauth_scope: - args.update({ - "openid.ns.oauth": - "http://specs.openid.net/extensions/oauth/1.0", - "openid.oauth.consumer": self.request.host.split(":")[0], - "openid.oauth.scope": oauth_scope, - }) - return args - - def _on_authentication_verified(self, future, response_fut): - try: - response = response_fut.result() - except Exception as e: - future.set_exception(AuthError( - "Error response %s" % e)) - return - if b"is_valid:true" not in response.body: - future.set_exception(AuthError( - "Invalid OpenID response: %s" % response.body)) - return - - # Make sure we got back at least an email from attribute exchange - ax_ns = None - for name in self.request.arguments: - if name.startswith("openid.ns.") and \ - self.get_argument(name) == u"http://openid.net/srv/ax/1.0": - ax_ns = name[10:] - break - - def get_ax_arg(uri): - if not ax_ns: - return u"" - prefix = "openid." + ax_ns + ".type." - ax_name = None - for name in self.request.arguments.keys(): - if self.get_argument(name) == uri and name.startswith(prefix): - part = name[len(prefix):] - ax_name = "openid." + ax_ns + ".value." + part - break - if not ax_name: - return u"" - return self.get_argument(ax_name, u"") - - email = get_ax_arg("http://axschema.org/contact/email") - name = get_ax_arg("http://axschema.org/namePerson") - first_name = get_ax_arg("http://axschema.org/namePerson/first") - last_name = get_ax_arg("http://axschema.org/namePerson/last") - username = get_ax_arg("http://axschema.org/namePerson/friendly") - locale = get_ax_arg("http://axschema.org/pref/language").lower() - user = dict() - name_parts = [] - if first_name: - user["first_name"] = first_name - name_parts.append(first_name) - if last_name: - user["last_name"] = last_name - name_parts.append(last_name) - if name: - user["name"] = name - elif name_parts: - user["name"] = u" ".join(name_parts) - elif email: - user["name"] = email.split("@")[0] - if email: - user["email"] = email - if locale: - user["locale"] = locale - if username: - user["username"] = username - claimed_id = self.get_argument("openid.claimed_id", None) - if claimed_id: - user["claimed_id"] = claimed_id - future_set_result_unless_cancelled(future, user) - - def get_auth_http_client(self): - """Returns the `.AsyncHTTPClient` instance to be used for auth requests. - - May be overridden by subclasses to use an HTTP client other than - the default. - """ - return httpclient.AsyncHTTPClient() - - -class OAuthMixin(object): - """Abstract implementation of OAuth 1.0 and 1.0a. - - See `TwitterMixin` below for an example implementation. - - Class attributes: - - * ``_OAUTH_AUTHORIZE_URL``: The service's OAuth authorization url. - * ``_OAUTH_ACCESS_TOKEN_URL``: The service's OAuth access token url. - * ``_OAUTH_VERSION``: May be either "1.0" or "1.0a". - * ``_OAUTH_NO_CALLBACKS``: Set this to True if the service requires - advance registration of callbacks. - - Subclasses must also override the `_oauth_get_user_future` and - `_oauth_consumer_token` methods. - """ - @_non_deprecated_return_future - def authorize_redirect(self, callback_uri=None, extra_params=None, - http_client=None, callback=None): - """Redirects the user to obtain OAuth authorization for this service. - - The ``callback_uri`` may be omitted if you have previously - registered a callback URI with the third-party service. For - some services, you must use a previously-registered callback - URI and cannot specify a callback via this method. - - This method sets a cookie called ``_oauth_request_token`` which is - subsequently used (and cleared) in `get_authenticated_user` for - security purposes. - - This method is asynchronous and must be called with ``await`` - or ``yield`` (This is different from other ``auth*_redirect`` - methods defined in this module). It calls - `.RequestHandler.finish` for you so you should not write any - other response after it returns. - - .. versionchanged:: 3.1 - Now returns a `.Future` and takes an optional callback, for - compatibility with `.gen.coroutine`. - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed in 6.0. - Use the returned awaitable object instead. - - """ - if callback_uri and getattr(self, "_OAUTH_NO_CALLBACKS", False): - raise Exception("This service does not support oauth_callback") - if http_client is None: - http_client = self.get_auth_http_client() - if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": - fut = http_client.fetch( - self._oauth_request_token_url(callback_uri=callback_uri, - extra_params=extra_params)) - fut.add_done_callback(wrap(functools.partial( - self._on_request_token, - self._OAUTH_AUTHORIZE_URL, - callback_uri, - callback))) - else: - fut = http_client.fetch(self._oauth_request_token_url()) - fut.add_done_callback( - wrap(functools.partial( - self._on_request_token, self._OAUTH_AUTHORIZE_URL, - callback_uri, - callback))) - - @_auth_return_future - def get_authenticated_user(self, callback, http_client=None): - """Gets the OAuth authorized user and access token. - - This method should be called from the handler for your - OAuth callback URL to complete the registration process. We run the - callback with the authenticated user dictionary. This dictionary - will contain an ``access_key`` which can be used to make authorized - requests to this service on behalf of the user. The dictionary will - also contain other fields such as ``name``, depending on the service - used. - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed in 6.0. - Use the returned awaitable object instead. - """ - future = callback - request_key = escape.utf8(self.get_argument("oauth_token")) - oauth_verifier = self.get_argument("oauth_verifier", None) - request_cookie = self.get_cookie("_oauth_request_token") - if not request_cookie: - future.set_exception(AuthError( - "Missing OAuth request token cookie")) - return - self.clear_cookie("_oauth_request_token") - cookie_key, cookie_secret = [ - base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|")] - if cookie_key != request_key: - future.set_exception(AuthError( - "Request token does not match cookie")) - return - token = dict(key=cookie_key, secret=cookie_secret) - if oauth_verifier: - token["verifier"] = oauth_verifier - if http_client is None: - http_client = self.get_auth_http_client() - fut = http_client.fetch(self._oauth_access_token_url(token)) - fut.add_done_callback(wrap(functools.partial(self._on_access_token, callback))) - - def _oauth_request_token_url(self, callback_uri=None, extra_params=None): - consumer_token = self._oauth_consumer_token() - url = self._OAUTH_REQUEST_TOKEN_URL - args = dict( - oauth_consumer_key=escape.to_basestring(consumer_token["key"]), - oauth_signature_method="HMAC-SHA1", - oauth_timestamp=str(int(time.time())), - oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)), - oauth_version="1.0", - ) - if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": - if callback_uri == "oob": - args["oauth_callback"] = "oob" - elif callback_uri: - args["oauth_callback"] = urlparse.urljoin( - self.request.full_url(), callback_uri) - if extra_params: - args.update(extra_params) - signature = _oauth10a_signature(consumer_token, "GET", url, args) - else: - signature = _oauth_signature(consumer_token, "GET", url, args) - - args["oauth_signature"] = signature - return url + "?" + urllib_parse.urlencode(args) - - def _on_request_token(self, authorize_url, callback_uri, callback, - response_fut): - try: - response = response_fut.result() - except Exception as e: - raise Exception("Could not get request token: %s" % e) - request_token = _oauth_parse_response(response.body) - data = (base64.b64encode(escape.utf8(request_token["key"])) + b"|" + - base64.b64encode(escape.utf8(request_token["secret"]))) - self.set_cookie("_oauth_request_token", data) - args = dict(oauth_token=request_token["key"]) - if callback_uri == "oob": - self.finish(authorize_url + "?" + urllib_parse.urlencode(args)) - callback() - return - elif callback_uri: - args["oauth_callback"] = urlparse.urljoin( - self.request.full_url(), callback_uri) - self.redirect(authorize_url + "?" + urllib_parse.urlencode(args)) - callback() - - def _oauth_access_token_url(self, request_token): - consumer_token = self._oauth_consumer_token() - url = self._OAUTH_ACCESS_TOKEN_URL - args = dict( - oauth_consumer_key=escape.to_basestring(consumer_token["key"]), - oauth_token=escape.to_basestring(request_token["key"]), - oauth_signature_method="HMAC-SHA1", - oauth_timestamp=str(int(time.time())), - oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)), - oauth_version="1.0", - ) - if "verifier" in request_token: - args["oauth_verifier"] = request_token["verifier"] - - if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": - signature = _oauth10a_signature(consumer_token, "GET", url, args, - request_token) - else: - signature = _oauth_signature(consumer_token, "GET", url, args, - request_token) - - args["oauth_signature"] = signature - return url + "?" + urllib_parse.urlencode(args) - - def _on_access_token(self, future, response_fut): - try: - response = response_fut.result() - except Exception: - future.set_exception(AuthError("Could not fetch access token")) - return - - access_token = _oauth_parse_response(response.body) - fut = self._oauth_get_user_future(access_token) - fut = gen.convert_yielded(fut) - fut.add_done_callback( - wrap(functools.partial(self._on_oauth_get_user, access_token, future))) - - def _oauth_consumer_token(self): - """Subclasses must override this to return their OAuth consumer keys. - - The return value should be a `dict` with keys ``key`` and ``secret``. - """ - raise NotImplementedError() - - @_non_deprecated_return_future - def _oauth_get_user_future(self, access_token, callback): - """Subclasses must override this to get basic information about the - user. - - Should return a `.Future` whose result is a dictionary - containing information about the user, which may have been - retrieved by using ``access_token`` to make a request to the - service. - - The access token will be added to the returned dictionary to make - the result of `get_authenticated_user`. - - For backwards compatibility, the callback-based ``_oauth_get_user`` - method is also supported. - - .. versionchanged:: 5.1 - - Subclasses may also define this method with ``async def``. - - .. deprecated:: 5.1 - - The ``_oauth_get_user`` fallback is deprecated and support for it - will be removed in 6.0. - """ - warnings.warn("_oauth_get_user is deprecated, override _oauth_get_user_future instead", - DeprecationWarning) - # By default, call the old-style _oauth_get_user, but new code - # should override this method instead. - self._oauth_get_user(access_token, callback) - - def _oauth_get_user(self, access_token, callback): - raise NotImplementedError() - - def _on_oauth_get_user(self, access_token, future, user_future): - if user_future.exception() is not None: - future.set_exception(user_future.exception()) - return - user = user_future.result() - if not user: - future.set_exception(AuthError("Error getting user")) - return - user["access_token"] = access_token - future_set_result_unless_cancelled(future, user) - - def _oauth_request_parameters(self, url, access_token, parameters={}, - method="GET"): - """Returns the OAuth parameters as a dict for the given request. - - parameters should include all POST arguments and query string arguments - that will be sent with the request. - """ - consumer_token = self._oauth_consumer_token() - base_args = dict( - oauth_consumer_key=escape.to_basestring(consumer_token["key"]), - oauth_token=escape.to_basestring(access_token["key"]), - oauth_signature_method="HMAC-SHA1", - oauth_timestamp=str(int(time.time())), - oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)), - oauth_version="1.0", - ) - args = {} - args.update(base_args) - args.update(parameters) - if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": - signature = _oauth10a_signature(consumer_token, method, url, args, - access_token) - else: - signature = _oauth_signature(consumer_token, method, url, args, - access_token) - base_args["oauth_signature"] = escape.to_basestring(signature) - return base_args - - def get_auth_http_client(self): - """Returns the `.AsyncHTTPClient` instance to be used for auth requests. - - May be overridden by subclasses to use an HTTP client other than - the default. - """ - return httpclient.AsyncHTTPClient() - - -class OAuth2Mixin(object): - """Abstract implementation of OAuth 2.0. - - See `FacebookGraphMixin` or `GoogleOAuth2Mixin` below for example - implementations. - - Class attributes: - - * ``_OAUTH_AUTHORIZE_URL``: The service's authorization url. - * ``_OAUTH_ACCESS_TOKEN_URL``: The service's access token url. - """ - @_non_deprecated_return_future - def authorize_redirect(self, redirect_uri=None, client_id=None, - client_secret=None, extra_params=None, - callback=None, scope=None, response_type="code"): - """Redirects the user to obtain OAuth authorization for this service. - - Some providers require that you register a redirect URL with - your application instead of passing one via this method. You - should call this method to log the user in, and then call - ``get_authenticated_user`` in the handler for your - redirect URL to complete the authorization process. - - .. versionchanged:: 3.1 - Returns a `.Future` and takes an optional callback. These are - not strictly necessary as this method is synchronous, - but they are supplied for consistency with - `OAuthMixin.authorize_redirect`. - - .. deprecated:: 5.1 - - The ``callback`` argument and returned awaitable will be removed - in Tornado 6.0; this will be an ordinary synchronous function. - """ - args = { - "redirect_uri": redirect_uri, - "client_id": client_id, - "response_type": response_type - } - if extra_params: - args.update(extra_params) - if scope: - args['scope'] = ' '.join(scope) - self.redirect( - url_concat(self._OAUTH_AUTHORIZE_URL, args)) - callback() - - def _oauth_request_token_url(self, redirect_uri=None, client_id=None, - client_secret=None, code=None, - extra_params=None): - url = self._OAUTH_ACCESS_TOKEN_URL - args = dict( - redirect_uri=redirect_uri, - code=code, - client_id=client_id, - client_secret=client_secret, - ) - if extra_params: - args.update(extra_params) - return url_concat(url, args) - - @_auth_return_future - def oauth2_request(self, url, callback, access_token=None, - post_args=None, **args): - """Fetches the given URL auth an OAuth2 access token. - - If the request is a POST, ``post_args`` should be provided. Query - string arguments should be given as keyword arguments. - - Example usage: - - ..testcode:: - - class MainHandler(tornado.web.RequestHandler, - tornado.auth.FacebookGraphMixin): - @tornado.web.authenticated - async def get(self): - new_entry = await self.oauth2_request( - "https://graph.facebook.com/me/feed", - post_args={"message": "I am posting from my Tornado application!"}, - access_token=self.current_user["access_token"]) - - if not new_entry: - # Call failed; perhaps missing permission? - await self.authorize_redirect() - return - self.finish("Posted a message!") - - .. testoutput:: - :hide: - - .. versionadded:: 4.3 - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed in 6.0. - Use the returned awaitable object instead. - """ - all_args = {} - if access_token: - all_args["access_token"] = access_token - all_args.update(args) - - if all_args: - url += "?" + urllib_parse.urlencode(all_args) - callback = wrap(functools.partial(self._on_oauth2_request, callback)) - http = self.get_auth_http_client() - if post_args is not None: - fut = http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args)) - else: - fut = http.fetch(url) - fut.add_done_callback(callback) - - def _on_oauth2_request(self, future, response_fut): - try: - response = response_fut.result() - except Exception as e: - future.set_exception(AuthError("Error response %s" % e)) - return - - future_set_result_unless_cancelled(future, escape.json_decode(response.body)) - - def get_auth_http_client(self): - """Returns the `.AsyncHTTPClient` instance to be used for auth requests. - - May be overridden by subclasses to use an HTTP client other than - the default. - - .. versionadded:: 4.3 - """ - return httpclient.AsyncHTTPClient() - - -class TwitterMixin(OAuthMixin): - """Twitter OAuth authentication. - - To authenticate with Twitter, register your application with - Twitter at http://twitter.com/apps. Then copy your Consumer Key - and Consumer Secret to the application - `~tornado.web.Application.settings` ``twitter_consumer_key`` and - ``twitter_consumer_secret``. Use this mixin on the handler for the - URL you registered as your application's callback URL. - - When your application is set up, you can use this mixin like this - to authenticate the user with Twitter and get access to their stream: - - .. testcode:: - - class TwitterLoginHandler(tornado.web.RequestHandler, - tornado.auth.TwitterMixin): - async def get(self): - if self.get_argument("oauth_token", None): - user = await self.get_authenticated_user() - # Save the user using e.g. set_secure_cookie() - else: - await self.authorize_redirect() - - .. testoutput:: - :hide: - - The user object returned by `~OAuthMixin.get_authenticated_user` - includes the attributes ``username``, ``name``, ``access_token``, - and all of the custom Twitter user attributes described at - https://dev.twitter.com/docs/api/1.1/get/users/show - """ - _OAUTH_REQUEST_TOKEN_URL = "https://api.twitter.com/oauth/request_token" - _OAUTH_ACCESS_TOKEN_URL = "https://api.twitter.com/oauth/access_token" - _OAUTH_AUTHORIZE_URL = "https://api.twitter.com/oauth/authorize" - _OAUTH_AUTHENTICATE_URL = "https://api.twitter.com/oauth/authenticate" - _OAUTH_NO_CALLBACKS = False - _TWITTER_BASE_URL = "https://api.twitter.com/1.1" - - @_non_deprecated_return_future - def authenticate_redirect(self, callback_uri=None, callback=None): - """Just like `~OAuthMixin.authorize_redirect`, but - auto-redirects if authorized. - - This is generally the right interface to use if you are using - Twitter for single-sign on. - - .. versionchanged:: 3.1 - Now returns a `.Future` and takes an optional callback, for - compatibility with `.gen.coroutine`. - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed in 6.0. - Use the returned awaitable object instead. - """ - http = self.get_auth_http_client() - fut = http.fetch(self._oauth_request_token_url(callback_uri=callback_uri)) - fut.add_done_callback(wrap(functools.partial( - self._on_request_token, self._OAUTH_AUTHENTICATE_URL, - None, callback))) - - @_auth_return_future - def twitter_request(self, path, callback=None, access_token=None, - post_args=None, **args): - """Fetches the given API path, e.g., ``statuses/user_timeline/btaylor`` - - The path should not include the format or API version number. - (we automatically use JSON format and API version 1). - - If the request is a POST, ``post_args`` should be provided. Query - string arguments should be given as keyword arguments. - - All the Twitter methods are documented at http://dev.twitter.com/ - - Many methods require an OAuth access token which you can - obtain through `~OAuthMixin.authorize_redirect` and - `~OAuthMixin.get_authenticated_user`. The user returned through that - process includes an 'access_token' attribute that can be used - to make authenticated requests via this method. Example - usage: - - .. testcode:: - - class MainHandler(tornado.web.RequestHandler, - tornado.auth.TwitterMixin): - @tornado.web.authenticated - async def get(self): - new_entry = await self.twitter_request( - "/statuses/update", - post_args={"status": "Testing Tornado Web Server"}, - access_token=self.current_user["access_token"]) - if not new_entry: - # Call failed; perhaps missing permission? - yield self.authorize_redirect() - return - self.finish("Posted a message!") - - .. testoutput:: - :hide: - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed in 6.0. - Use the returned awaitable object instead. - """ - if path.startswith('http:') or path.startswith('https:'): - # Raw urls are useful for e.g. search which doesn't follow the - # usual pattern: http://search.twitter.com/search.json - url = path - else: - url = self._TWITTER_BASE_URL + path + ".json" - # Add the OAuth resource request signature if we have credentials - if access_token: - all_args = {} - all_args.update(args) - all_args.update(post_args or {}) - method = "POST" if post_args is not None else "GET" - oauth = self._oauth_request_parameters( - url, access_token, all_args, method=method) - args.update(oauth) - if args: - url += "?" + urllib_parse.urlencode(args) - http = self.get_auth_http_client() - http_callback = wrap(functools.partial(self._on_twitter_request, callback, url)) - if post_args is not None: - fut = http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args)) - else: - fut = http.fetch(url) - fut.add_done_callback(http_callback) - - def _on_twitter_request(self, future, url, response_fut): - try: - response = response_fut.result() - except Exception as e: - future.set_exception(AuthError( - "Error response %s fetching %s" % (e, url))) - return - future_set_result_unless_cancelled(future, escape.json_decode(response.body)) - - def _oauth_consumer_token(self): - self.require_setting("twitter_consumer_key", "Twitter OAuth") - self.require_setting("twitter_consumer_secret", "Twitter OAuth") - return dict( - key=self.settings["twitter_consumer_key"], - secret=self.settings["twitter_consumer_secret"]) - - @gen.coroutine - def _oauth_get_user_future(self, access_token): - user = yield self.twitter_request( - "/account/verify_credentials", - access_token=access_token) - if user: - user["username"] = user["screen_name"] - raise gen.Return(user) - - -class GoogleOAuth2Mixin(OAuth2Mixin): - """Google authentication using OAuth2. - - In order to use, register your application with Google and copy the - relevant parameters to your application settings. - - * Go to the Google Dev Console at http://console.developers.google.com - * Select a project, or create a new one. - * In the sidebar on the left, select APIs & Auth. - * In the list of APIs, find the Google+ API service and set it to ON. - * In the sidebar on the left, select Credentials. - * In the OAuth section of the page, select Create New Client ID. - * Set the Redirect URI to point to your auth handler - * Copy the "Client secret" and "Client ID" to the application settings as - {"google_oauth": {"key": CLIENT_ID, "secret": CLIENT_SECRET}} - - .. versionadded:: 3.2 - """ - _OAUTH_AUTHORIZE_URL = "https://accounts.google.com/o/oauth2/v2/auth" - _OAUTH_ACCESS_TOKEN_URL = "https://www.googleapis.com/oauth2/v4/token" - _OAUTH_USERINFO_URL = "https://www.googleapis.com/oauth2/v1/userinfo" - _OAUTH_NO_CALLBACKS = False - _OAUTH_SETTINGS_KEY = 'google_oauth' - - @_auth_return_future - def get_authenticated_user(self, redirect_uri, code, callback): - """Handles the login for the Google user, returning an access token. - - The result is a dictionary containing an ``access_token`` field - ([among others](https://developers.google.com/identity/protocols/OAuth2WebServer#handlingtheresponse)). - Unlike other ``get_authenticated_user`` methods in this package, - this method does not return any additional information about the user. - The returned access token can be used with `OAuth2Mixin.oauth2_request` - to request additional information (perhaps from - ``https://www.googleapis.com/oauth2/v2/userinfo``) - - Example usage: - - .. testcode:: - - class GoogleOAuth2LoginHandler(tornado.web.RequestHandler, - tornado.auth.GoogleOAuth2Mixin): - async def get(self): - if self.get_argument('code', False): - access = await self.get_authenticated_user( - redirect_uri='http://your.site.com/auth/google', - code=self.get_argument('code')) - user = await self.oauth2_request( - "https://www.googleapis.com/oauth2/v1/userinfo", - access_token=access["access_token"]) - # Save the user and access token with - # e.g. set_secure_cookie. - else: - await self.authorize_redirect( - redirect_uri='http://your.site.com/auth/google', - client_id=self.settings['google_oauth']['key'], - scope=['profile', 'email'], - response_type='code', - extra_params={'approval_prompt': 'auto'}) - - .. testoutput:: - :hide: - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed in 6.0. - Use the returned awaitable object instead. - """ # noqa: E501 - http = self.get_auth_http_client() - body = urllib_parse.urlencode({ - "redirect_uri": redirect_uri, - "code": code, - "client_id": self.settings[self._OAUTH_SETTINGS_KEY]['key'], - "client_secret": self.settings[self._OAUTH_SETTINGS_KEY]['secret'], - "grant_type": "authorization_code", - }) - - fut = http.fetch(self._OAUTH_ACCESS_TOKEN_URL, - method="POST", - headers={'Content-Type': 'application/x-www-form-urlencoded'}, - body=body) - fut.add_done_callback(wrap(functools.partial(self._on_access_token, callback))) - - def _on_access_token(self, future, response_fut): - """Callback function for the exchange to the access token.""" - try: - response = response_fut.result() - except Exception as e: - future.set_exception(AuthError('Google auth error: %s' % str(e))) - return - - args = escape.json_decode(response.body) - future_set_result_unless_cancelled(future, args) - - -class FacebookGraphMixin(OAuth2Mixin): - """Facebook authentication using the new Graph API and OAuth2.""" - _OAUTH_ACCESS_TOKEN_URL = "https://graph.facebook.com/oauth/access_token?" - _OAUTH_AUTHORIZE_URL = "https://www.facebook.com/dialog/oauth?" - _OAUTH_NO_CALLBACKS = False - _FACEBOOK_BASE_URL = "https://graph.facebook.com" - - @_auth_return_future - def get_authenticated_user(self, redirect_uri, client_id, client_secret, - code, callback, extra_fields=None): - """Handles the login for the Facebook user, returning a user object. - - Example usage: - - .. testcode:: - - class FacebookGraphLoginHandler(tornado.web.RequestHandler, - tornado.auth.FacebookGraphMixin): - async def get(self): - if self.get_argument("code", False): - user = await self.get_authenticated_user( - redirect_uri='/auth/facebookgraph/', - client_id=self.settings["facebook_api_key"], - client_secret=self.settings["facebook_secret"], - code=self.get_argument("code")) - # Save the user with e.g. set_secure_cookie - else: - await self.authorize_redirect( - redirect_uri='/auth/facebookgraph/', - client_id=self.settings["facebook_api_key"], - extra_params={"scope": "read_stream,offline_access"}) - - .. testoutput:: - :hide: - - This method returns a dictionary which may contain the following fields: - - * ``access_token``, a string which may be passed to `facebook_request` - * ``session_expires``, an integer encoded as a string representing - the time until the access token expires in seconds. This field should - be used like ``int(user['session_expires'])``; in a future version of - Tornado it will change from a string to an integer. - * ``id``, ``name``, ``first_name``, ``last_name``, ``locale``, ``picture``, - ``link``, plus any fields named in the ``extra_fields`` argument. These - fields are copied from the Facebook graph API - `user object `_ - - .. versionchanged:: 4.5 - The ``session_expires`` field was updated to support changes made to the - Facebook API in March 2017. - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed in 6.0. - Use the returned awaitable object instead. - """ - http = self.get_auth_http_client() - args = { - "redirect_uri": redirect_uri, - "code": code, - "client_id": client_id, - "client_secret": client_secret, - } - - fields = set(['id', 'name', 'first_name', 'last_name', - 'locale', 'picture', 'link']) - if extra_fields: - fields.update(extra_fields) - - fut = http.fetch(self._oauth_request_token_url(**args)) - fut.add_done_callback(wrap(functools.partial(self._on_access_token, redirect_uri, client_id, - client_secret, callback, fields))) - - @gen.coroutine - def _on_access_token(self, redirect_uri, client_id, client_secret, - future, fields, response_fut): - try: - response = response_fut.result() - except Exception as e: - future.set_exception(AuthError('Facebook auth error: %s' % str(e))) - return - - args = escape.json_decode(response.body) - session = { - "access_token": args.get("access_token"), - "expires_in": args.get("expires_in") - } - - user = yield self.facebook_request( - path="/me", - access_token=session["access_token"], - appsecret_proof=hmac.new(key=client_secret.encode('utf8'), - msg=session["access_token"].encode('utf8'), - digestmod=hashlib.sha256).hexdigest(), - fields=",".join(fields) - ) - - if user is None: - future_set_result_unless_cancelled(future, None) - return - - fieldmap = {} - for field in fields: - fieldmap[field] = user.get(field) - - # session_expires is converted to str for compatibility with - # older versions in which the server used url-encoding and - # this code simply returned the string verbatim. - # This should change in Tornado 5.0. - fieldmap.update({"access_token": session["access_token"], - "session_expires": str(session.get("expires_in"))}) - future_set_result_unless_cancelled(future, fieldmap) - - @_auth_return_future - def facebook_request(self, path, callback, access_token=None, - post_args=None, **args): - """Fetches the given relative API path, e.g., "/btaylor/picture" - - If the request is a POST, ``post_args`` should be provided. Query - string arguments should be given as keyword arguments. - - An introduction to the Facebook Graph API can be found at - http://developers.facebook.com/docs/api - - Many methods require an OAuth access token which you can - obtain through `~OAuth2Mixin.authorize_redirect` and - `get_authenticated_user`. The user returned through that - process includes an ``access_token`` attribute that can be - used to make authenticated requests via this method. - - Example usage: - - .. testcode:: - - class MainHandler(tornado.web.RequestHandler, - tornado.auth.FacebookGraphMixin): - @tornado.web.authenticated - async def get(self): - new_entry = await self.facebook_request( - "/me/feed", - post_args={"message": "I am posting from my Tornado application!"}, - access_token=self.current_user["access_token"]) - - if not new_entry: - # Call failed; perhaps missing permission? - yield self.authorize_redirect() - return - self.finish("Posted a message!") - - .. testoutput:: - :hide: - - The given path is relative to ``self._FACEBOOK_BASE_URL``, - by default "https://graph.facebook.com". - - This method is a wrapper around `OAuth2Mixin.oauth2_request`; - the only difference is that this method takes a relative path, - while ``oauth2_request`` takes a complete url. - - .. versionchanged:: 3.1 - Added the ability to override ``self._FACEBOOK_BASE_URL``. - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed in 6.0. - Use the returned awaitable object instead. - """ - url = self._FACEBOOK_BASE_URL + path - # Thanks to the _auth_return_future decorator, our "callback" - # argument is a Future, which we cannot pass as a callback to - # oauth2_request. Instead, have oauth2_request return a - # future and chain them together. - oauth_future = self.oauth2_request(url, access_token=access_token, - post_args=post_args, **args) - chain_future(oauth_future, callback) - - -def _oauth_signature(consumer_token, method, url, parameters={}, token=None): - """Calculates the HMAC-SHA1 OAuth signature for the given request. - - See http://oauth.net/core/1.0/#signing_process - """ - parts = urlparse.urlparse(url) - scheme, netloc, path = parts[:3] - normalized_url = scheme.lower() + "://" + netloc.lower() + path - - base_elems = [] - base_elems.append(method.upper()) - base_elems.append(normalized_url) - base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v))) - for k, v in sorted(parameters.items()))) - base_string = "&".join(_oauth_escape(e) for e in base_elems) - - key_elems = [escape.utf8(consumer_token["secret"])] - key_elems.append(escape.utf8(token["secret"] if token else "")) - key = b"&".join(key_elems) - - hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1) - return binascii.b2a_base64(hash.digest())[:-1] - - -def _oauth10a_signature(consumer_token, method, url, parameters={}, token=None): - """Calculates the HMAC-SHA1 OAuth 1.0a signature for the given request. - - See http://oauth.net/core/1.0a/#signing_process - """ - parts = urlparse.urlparse(url) - scheme, netloc, path = parts[:3] - normalized_url = scheme.lower() + "://" + netloc.lower() + path - - base_elems = [] - base_elems.append(method.upper()) - base_elems.append(normalized_url) - base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v))) - for k, v in sorted(parameters.items()))) - - base_string = "&".join(_oauth_escape(e) for e in base_elems) - key_elems = [escape.utf8(urllib_parse.quote(consumer_token["secret"], safe='~'))] - key_elems.append(escape.utf8(urllib_parse.quote(token["secret"], safe='~') if token else "")) - key = b"&".join(key_elems) - - hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1) - return binascii.b2a_base64(hash.digest())[:-1] - - -def _oauth_escape(val): - if isinstance(val, unicode_type): - val = val.encode("utf-8") - return urllib_parse.quote(val, safe="~") - - -def _oauth_parse_response(body): - # I can't find an officially-defined encoding for oauth responses and - # have never seen anyone use non-ascii. Leave the response in a byte - # string for python 2, and use utf8 on python 3. - body = escape.native_str(body) - p = urlparse.parse_qs(body, keep_blank_values=False) - token = dict(key=p["oauth_token"][0], secret=p["oauth_token_secret"][0]) - - # Add the extra parameters the Provider included to the token - special = ("oauth_token", "oauth_token_secret") - token.update((k, p[k][0]) for k in p if k not in special) - return token +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""This module contains implementations of various third-party +authentication schemes. + +All the classes in this file are class mixins designed to be used with +the `tornado.web.RequestHandler` class. They are used in two ways: + +* On a login handler, use methods such as ``authenticate_redirect()``, + ``authorize_redirect()``, and ``get_authenticated_user()`` to + establish the user's identity and store authentication tokens to your + database and/or cookies. +* In non-login handlers, use methods such as ``facebook_request()`` + or ``twitter_request()`` to use the authentication tokens to make + requests to the respective services. + +They all take slightly different arguments due to the fact all these +services implement authentication and authorization slightly differently. +See the individual service classes below for complete documentation. + +Example usage for Google OAuth: + +.. testcode:: + + class GoogleOAuth2LoginHandler(tornado.web.RequestHandler, + tornado.auth.GoogleOAuth2Mixin): + async def get(self): + if self.get_argument('code', False): + user = await self.get_authenticated_user( + redirect_uri='http://your.site.com/auth/google', + code=self.get_argument('code')) + # Save the user with e.g. set_secure_cookie + else: + await self.authorize_redirect( + redirect_uri='http://your.site.com/auth/google', + client_id=self.settings['google_oauth']['key'], + scope=['profile', 'email'], + response_type='code', + extra_params={'approval_prompt': 'auto'}) + +.. testoutput:: + :hide: + +""" + +import base64 +import binascii +import hashlib +import hmac +import time +import urllib.parse +import uuid + +from tornado import httpclient +from tornado import escape +from tornado.httputil import url_concat +from tornado.util import unicode_type +from tornado.web import RequestHandler + +from typing import List, Any, Dict, cast, Iterable, Union, Optional + + +class AuthError(Exception): + pass + + +class OpenIdMixin(object): + """Abstract implementation of OpenID and Attribute Exchange. + + Class attributes: + + * ``_OPENID_ENDPOINT``: the identity provider's URI. + """ + + def authenticate_redirect( + self, + callback_uri: str = None, + ax_attrs: List[str] = ["name", "email", "language", "username"], + ) -> None: + """Redirects to the authentication URL for this service. + + After authentication, the service will redirect back to the given + callback URI with additional parameters including ``openid.mode``. + + We request the given attributes for the authenticated user by + default (name, email, language, and username). If you don't need + all those attributes for your app, you can request fewer with + the ax_attrs keyword argument. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed and this method no + longer returns an awaitable object. It is now an ordinary + synchronous function. + """ + handler = cast(RequestHandler, self) + callback_uri = callback_uri or handler.request.uri + assert callback_uri is not None + args = self._openid_args(callback_uri, ax_attrs=ax_attrs) + endpoint = self._OPENID_ENDPOINT # type: ignore + handler.redirect(endpoint + "?" + urllib.parse.urlencode(args)) + + async def get_authenticated_user( + self, http_client: httpclient.AsyncHTTPClient = None + ) -> Dict[str, Any]: + """Fetches the authenticated user data upon redirect. + + This method should be called by the handler that receives the + redirect from the `authenticate_redirect()` method (which is + often the same as the one that calls it; in that case you would + call `get_authenticated_user` if the ``openid.mode`` parameter + is present and `authenticate_redirect` if it is not). + + The result of this method will generally be used to set a cookie. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned + awaitable object instead. + """ + handler = cast(RequestHandler, self) + # Verify the OpenID response via direct request to the OP + args = dict( + (k, v[-1]) for k, v in handler.request.arguments.items() + ) # type: Dict[str, Union[str, bytes]] + args["openid.mode"] = u"check_authentication" + url = self._OPENID_ENDPOINT # type: ignore + if http_client is None: + http_client = self.get_auth_http_client() + resp = await http_client.fetch( + url, method="POST", body=urllib.parse.urlencode(args) + ) + return self._on_authentication_verified(resp) + + def _openid_args( + self, callback_uri: str, ax_attrs: Iterable[str] = [], oauth_scope: str = None + ) -> Dict[str, str]: + handler = cast(RequestHandler, self) + url = urllib.parse.urljoin(handler.request.full_url(), callback_uri) + args = { + "openid.ns": "http://specs.openid.net/auth/2.0", + "openid.claimed_id": "http://specs.openid.net/auth/2.0/identifier_select", + "openid.identity": "http://specs.openid.net/auth/2.0/identifier_select", + "openid.return_to": url, + "openid.realm": urllib.parse.urljoin(url, "/"), + "openid.mode": "checkid_setup", + } + if ax_attrs: + args.update( + { + "openid.ns.ax": "http://openid.net/srv/ax/1.0", + "openid.ax.mode": "fetch_request", + } + ) + ax_attrs = set(ax_attrs) + required = [] # type: List[str] + if "name" in ax_attrs: + ax_attrs -= set(["name", "firstname", "fullname", "lastname"]) + required += ["firstname", "fullname", "lastname"] + args.update( + { + "openid.ax.type.firstname": "http://axschema.org/namePerson/first", + "openid.ax.type.fullname": "http://axschema.org/namePerson", + "openid.ax.type.lastname": "http://axschema.org/namePerson/last", + } + ) + known_attrs = { + "email": "http://axschema.org/contact/email", + "language": "http://axschema.org/pref/language", + "username": "http://axschema.org/namePerson/friendly", + } + for name in ax_attrs: + args["openid.ax.type." + name] = known_attrs[name] + required.append(name) + args["openid.ax.required"] = ",".join(required) + if oauth_scope: + args.update( + { + "openid.ns.oauth": "http://specs.openid.net/extensions/oauth/1.0", + "openid.oauth.consumer": handler.request.host.split(":")[0], + "openid.oauth.scope": oauth_scope, + } + ) + return args + + def _on_authentication_verified( + self, response: httpclient.HTTPResponse + ) -> Dict[str, Any]: + handler = cast(RequestHandler, self) + if b"is_valid:true" not in response.body: + raise AuthError("Invalid OpenID response: %s" % response.body) + + # Make sure we got back at least an email from attribute exchange + ax_ns = None + for key in handler.request.arguments: + if ( + key.startswith("openid.ns.") + and handler.get_argument(key) == u"http://openid.net/srv/ax/1.0" + ): + ax_ns = key[10:] + break + + def get_ax_arg(uri: str) -> str: + if not ax_ns: + return u"" + prefix = "openid." + ax_ns + ".type." + ax_name = None + for name in handler.request.arguments.keys(): + if handler.get_argument(name) == uri and name.startswith(prefix): + part = name[len(prefix) :] + ax_name = "openid." + ax_ns + ".value." + part + break + if not ax_name: + return u"" + return handler.get_argument(ax_name, u"") + + email = get_ax_arg("http://axschema.org/contact/email") + name = get_ax_arg("http://axschema.org/namePerson") + first_name = get_ax_arg("http://axschema.org/namePerson/first") + last_name = get_ax_arg("http://axschema.org/namePerson/last") + username = get_ax_arg("http://axschema.org/namePerson/friendly") + locale = get_ax_arg("http://axschema.org/pref/language").lower() + user = dict() + name_parts = [] + if first_name: + user["first_name"] = first_name + name_parts.append(first_name) + if last_name: + user["last_name"] = last_name + name_parts.append(last_name) + if name: + user["name"] = name + elif name_parts: + user["name"] = u" ".join(name_parts) + elif email: + user["name"] = email.split("@")[0] + if email: + user["email"] = email + if locale: + user["locale"] = locale + if username: + user["username"] = username + claimed_id = handler.get_argument("openid.claimed_id", None) + if claimed_id: + user["claimed_id"] = claimed_id + return user + + def get_auth_http_client(self) -> httpclient.AsyncHTTPClient: + """Returns the `.AsyncHTTPClient` instance to be used for auth requests. + + May be overridden by subclasses to use an HTTP client other than + the default. + """ + return httpclient.AsyncHTTPClient() + + +class OAuthMixin(object): + """Abstract implementation of OAuth 1.0 and 1.0a. + + See `TwitterMixin` below for an example implementation. + + Class attributes: + + * ``_OAUTH_AUTHORIZE_URL``: The service's OAuth authorization url. + * ``_OAUTH_ACCESS_TOKEN_URL``: The service's OAuth access token url. + * ``_OAUTH_VERSION``: May be either "1.0" or "1.0a". + * ``_OAUTH_NO_CALLBACKS``: Set this to True if the service requires + advance registration of callbacks. + + Subclasses must also override the `_oauth_get_user_future` and + `_oauth_consumer_token` methods. + """ + + async def authorize_redirect( + self, + callback_uri: str = None, + extra_params: Dict[str, Any] = None, + http_client: httpclient.AsyncHTTPClient = None, + ) -> None: + """Redirects the user to obtain OAuth authorization for this service. + + The ``callback_uri`` may be omitted if you have previously + registered a callback URI with the third-party service. For + some services, you must use a previously-registered callback + URI and cannot specify a callback via this method. + + This method sets a cookie called ``_oauth_request_token`` which is + subsequently used (and cleared) in `get_authenticated_user` for + security purposes. + + This method is asynchronous and must be called with ``await`` + or ``yield`` (This is different from other ``auth*_redirect`` + methods defined in this module). It calls + `.RequestHandler.finish` for you so you should not write any + other response after it returns. + + .. versionchanged:: 3.1 + Now returns a `.Future` and takes an optional callback, for + compatibility with `.gen.coroutine`. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned + awaitable object instead. + + """ + if callback_uri and getattr(self, "_OAUTH_NO_CALLBACKS", False): + raise Exception("This service does not support oauth_callback") + if http_client is None: + http_client = self.get_auth_http_client() + assert http_client is not None + if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": + response = await http_client.fetch( + self._oauth_request_token_url( + callback_uri=callback_uri, extra_params=extra_params + ) + ) + else: + response = await http_client.fetch(self._oauth_request_token_url()) + url = self._OAUTH_AUTHORIZE_URL # type: ignore + self._on_request_token(url, callback_uri, response) + + async def get_authenticated_user( + self, http_client: httpclient.AsyncHTTPClient = None + ) -> Dict[str, Any]: + """Gets the OAuth authorized user and access token. + + This method should be called from the handler for your + OAuth callback URL to complete the registration process. We run the + callback with the authenticated user dictionary. This dictionary + will contain an ``access_key`` which can be used to make authorized + requests to this service on behalf of the user. The dictionary will + also contain other fields such as ``name``, depending on the service + used. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned + awaitable object instead. + """ + handler = cast(RequestHandler, self) + request_key = escape.utf8(handler.get_argument("oauth_token")) + oauth_verifier = handler.get_argument("oauth_verifier", None) + request_cookie = handler.get_cookie("_oauth_request_token") + if not request_cookie: + raise AuthError("Missing OAuth request token cookie") + handler.clear_cookie("_oauth_request_token") + cookie_key, cookie_secret = [ + base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|") + ] + if cookie_key != request_key: + raise AuthError("Request token does not match cookie") + token = dict( + key=cookie_key, secret=cookie_secret + ) # type: Dict[str, Union[str, bytes]] + if oauth_verifier: + token["verifier"] = oauth_verifier + if http_client is None: + http_client = self.get_auth_http_client() + assert http_client is not None + response = await http_client.fetch(self._oauth_access_token_url(token)) + access_token = _oauth_parse_response(response.body) + user = await self._oauth_get_user_future(access_token) + if not user: + raise AuthError("Error getting user") + user["access_token"] = access_token + return user + + def _oauth_request_token_url( + self, callback_uri: str = None, extra_params: Dict[str, Any] = None + ) -> str: + handler = cast(RequestHandler, self) + consumer_token = self._oauth_consumer_token() + url = self._OAUTH_REQUEST_TOKEN_URL # type: ignore + args = dict( + oauth_consumer_key=escape.to_basestring(consumer_token["key"]), + oauth_signature_method="HMAC-SHA1", + oauth_timestamp=str(int(time.time())), + oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)), + oauth_version="1.0", + ) + if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": + if callback_uri == "oob": + args["oauth_callback"] = "oob" + elif callback_uri: + args["oauth_callback"] = urllib.parse.urljoin( + handler.request.full_url(), callback_uri + ) + if extra_params: + args.update(extra_params) + signature = _oauth10a_signature(consumer_token, "GET", url, args) + else: + signature = _oauth_signature(consumer_token, "GET", url, args) + + args["oauth_signature"] = signature + return url + "?" + urllib.parse.urlencode(args) + + def _on_request_token( + self, + authorize_url: str, + callback_uri: Optional[str], + response: httpclient.HTTPResponse, + ) -> None: + handler = cast(RequestHandler, self) + request_token = _oauth_parse_response(response.body) + data = ( + base64.b64encode(escape.utf8(request_token["key"])) + + b"|" + + base64.b64encode(escape.utf8(request_token["secret"])) + ) + handler.set_cookie("_oauth_request_token", data) + args = dict(oauth_token=request_token["key"]) + if callback_uri == "oob": + handler.finish(authorize_url + "?" + urllib.parse.urlencode(args)) + return + elif callback_uri: + args["oauth_callback"] = urllib.parse.urljoin( + handler.request.full_url(), callback_uri + ) + handler.redirect(authorize_url + "?" + urllib.parse.urlencode(args)) + + def _oauth_access_token_url(self, request_token: Dict[str, Any]) -> str: + consumer_token = self._oauth_consumer_token() + url = self._OAUTH_ACCESS_TOKEN_URL # type: ignore + args = dict( + oauth_consumer_key=escape.to_basestring(consumer_token["key"]), + oauth_token=escape.to_basestring(request_token["key"]), + oauth_signature_method="HMAC-SHA1", + oauth_timestamp=str(int(time.time())), + oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)), + oauth_version="1.0", + ) + if "verifier" in request_token: + args["oauth_verifier"] = request_token["verifier"] + + if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": + signature = _oauth10a_signature( + consumer_token, "GET", url, args, request_token + ) + else: + signature = _oauth_signature( + consumer_token, "GET", url, args, request_token + ) + + args["oauth_signature"] = signature + return url + "?" + urllib.parse.urlencode(args) + + def _oauth_consumer_token(self) -> Dict[str, Any]: + """Subclasses must override this to return their OAuth consumer keys. + + The return value should be a `dict` with keys ``key`` and ``secret``. + """ + raise NotImplementedError() + + async def _oauth_get_user_future( + self, access_token: Dict[str, Any] + ) -> Dict[str, Any]: + """Subclasses must override this to get basic information about the + user. + + Should be a coroutine whose result is a dictionary + containing information about the user, which may have been + retrieved by using ``access_token`` to make a request to the + service. + + The access token will be added to the returned dictionary to make + the result of `get_authenticated_user`. + + .. versionchanged:: 5.1 + + Subclasses may also define this method with ``async def``. + + .. versionchanged:: 6.0 + + A synchronous fallback to ``_oauth_get_user`` was removed. + """ + raise NotImplementedError() + + def _oauth_request_parameters( + self, + url: str, + access_token: Dict[str, Any], + parameters: Dict[str, Any] = {}, + method: str = "GET", + ) -> Dict[str, Any]: + """Returns the OAuth parameters as a dict for the given request. + + parameters should include all POST arguments and query string arguments + that will be sent with the request. + """ + consumer_token = self._oauth_consumer_token() + base_args = dict( + oauth_consumer_key=escape.to_basestring(consumer_token["key"]), + oauth_token=escape.to_basestring(access_token["key"]), + oauth_signature_method="HMAC-SHA1", + oauth_timestamp=str(int(time.time())), + oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)), + oauth_version="1.0", + ) + args = {} + args.update(base_args) + args.update(parameters) + if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": + signature = _oauth10a_signature( + consumer_token, method, url, args, access_token + ) + else: + signature = _oauth_signature( + consumer_token, method, url, args, access_token + ) + base_args["oauth_signature"] = escape.to_basestring(signature) + return base_args + + def get_auth_http_client(self) -> httpclient.AsyncHTTPClient: + """Returns the `.AsyncHTTPClient` instance to be used for auth requests. + + May be overridden by subclasses to use an HTTP client other than + the default. + """ + return httpclient.AsyncHTTPClient() + + +class OAuth2Mixin(object): + """Abstract implementation of OAuth 2.0. + + See `FacebookGraphMixin` or `GoogleOAuth2Mixin` below for example + implementations. + + Class attributes: + + * ``_OAUTH_AUTHORIZE_URL``: The service's authorization url. + * ``_OAUTH_ACCESS_TOKEN_URL``: The service's access token url. + """ + + def authorize_redirect( + self, + redirect_uri: str = None, + client_id: str = None, + client_secret: str = None, + extra_params: Dict[str, Any] = None, + scope: str = None, + response_type: str = "code", + ) -> None: + """Redirects the user to obtain OAuth authorization for this service. + + Some providers require that you register a redirect URL with + your application instead of passing one via this method. You + should call this method to log the user in, and then call + ``get_authenticated_user`` in the handler for your + redirect URL to complete the authorization process. + + .. versionchanged:: 6.0 + + The ``callback`` argument and returned awaitable were removed; + this is now an ordinary synchronous function. + """ + handler = cast(RequestHandler, self) + args = {"response_type": response_type} + if redirect_uri is not None: + args["redirect_uri"] = redirect_uri + if client_id is not None: + args["client_id"] = client_id + if extra_params: + args.update(extra_params) + if scope: + args["scope"] = " ".join(scope) + url = self._OAUTH_AUTHORIZE_URL # type: ignore + handler.redirect(url_concat(url, args)) + + def _oauth_request_token_url( + self, + redirect_uri: str = None, + client_id: str = None, + client_secret: str = None, + code: str = None, + extra_params: Dict[str, Any] = None, + ) -> str: + url = self._OAUTH_ACCESS_TOKEN_URL # type: ignore + args = {} # type: Dict[str, str] + if redirect_uri is not None: + args["redirect_uri"] = redirect_uri + if code is not None: + args["code"] = code + if client_id is not None: + args["client_id"] = client_id + if client_secret is not None: + args["client_secret"] = client_secret + if extra_params: + args.update(extra_params) + return url_concat(url, args) + + async def oauth2_request( + self, + url: str, + access_token: str = None, + post_args: Dict[str, Any] = None, + **args: Any + ) -> Any: + """Fetches the given URL auth an OAuth2 access token. + + If the request is a POST, ``post_args`` should be provided. Query + string arguments should be given as keyword arguments. + + Example usage: + + ..testcode:: + + class MainHandler(tornado.web.RequestHandler, + tornado.auth.FacebookGraphMixin): + @tornado.web.authenticated + async def get(self): + new_entry = await self.oauth2_request( + "https://graph.facebook.com/me/feed", + post_args={"message": "I am posting from my Tornado application!"}, + access_token=self.current_user["access_token"]) + + if not new_entry: + # Call failed; perhaps missing permission? + await self.authorize_redirect() + return + self.finish("Posted a message!") + + .. testoutput:: + :hide: + + .. versionadded:: 4.3 + + .. versionchanged::: 6.0 + + The ``callback`` argument was removed. Use the returned awaitable object instead. + """ + all_args = {} + if access_token: + all_args["access_token"] = access_token + all_args.update(args) + + if all_args: + url += "?" + urllib.parse.urlencode(all_args) + http = self.get_auth_http_client() + if post_args is not None: + response = await http.fetch( + url, method="POST", body=urllib.parse.urlencode(post_args) + ) + else: + response = await http.fetch(url) + return escape.json_decode(response.body) + + def get_auth_http_client(self) -> httpclient.AsyncHTTPClient: + """Returns the `.AsyncHTTPClient` instance to be used for auth requests. + + May be overridden by subclasses to use an HTTP client other than + the default. + + .. versionadded:: 4.3 + """ + return httpclient.AsyncHTTPClient() + + +class TwitterMixin(OAuthMixin): + """Twitter OAuth authentication. + + To authenticate with Twitter, register your application with + Twitter at http://twitter.com/apps. Then copy your Consumer Key + and Consumer Secret to the application + `~tornado.web.Application.settings` ``twitter_consumer_key`` and + ``twitter_consumer_secret``. Use this mixin on the handler for the + URL you registered as your application's callback URL. + + When your application is set up, you can use this mixin like this + to authenticate the user with Twitter and get access to their stream: + + .. testcode:: + + class TwitterLoginHandler(tornado.web.RequestHandler, + tornado.auth.TwitterMixin): + async def get(self): + if self.get_argument("oauth_token", None): + user = await self.get_authenticated_user() + # Save the user using e.g. set_secure_cookie() + else: + await self.authorize_redirect() + + .. testoutput:: + :hide: + + The user object returned by `~OAuthMixin.get_authenticated_user` + includes the attributes ``username``, ``name``, ``access_token``, + and all of the custom Twitter user attributes described at + https://dev.twitter.com/docs/api/1.1/get/users/show + """ + + _OAUTH_REQUEST_TOKEN_URL = "https://api.twitter.com/oauth/request_token" + _OAUTH_ACCESS_TOKEN_URL = "https://api.twitter.com/oauth/access_token" + _OAUTH_AUTHORIZE_URL = "https://api.twitter.com/oauth/authorize" + _OAUTH_AUTHENTICATE_URL = "https://api.twitter.com/oauth/authenticate" + _OAUTH_NO_CALLBACKS = False + _TWITTER_BASE_URL = "https://api.twitter.com/1.1" + + async def authenticate_redirect(self, callback_uri: str = None) -> None: + """Just like `~OAuthMixin.authorize_redirect`, but + auto-redirects if authorized. + + This is generally the right interface to use if you are using + Twitter for single-sign on. + + .. versionchanged:: 3.1 + Now returns a `.Future` and takes an optional callback, for + compatibility with `.gen.coroutine`. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned + awaitable object instead. + """ + http = self.get_auth_http_client() + response = await http.fetch( + self._oauth_request_token_url(callback_uri=callback_uri) + ) + self._on_request_token(self._OAUTH_AUTHENTICATE_URL, None, response) + + async def twitter_request( + self, + path: str, + access_token: Dict[str, Any], + post_args: Dict[str, Any] = None, + **args: Any + ) -> Any: + """Fetches the given API path, e.g., ``statuses/user_timeline/btaylor`` + + The path should not include the format or API version number. + (we automatically use JSON format and API version 1). + + If the request is a POST, ``post_args`` should be provided. Query + string arguments should be given as keyword arguments. + + All the Twitter methods are documented at http://dev.twitter.com/ + + Many methods require an OAuth access token which you can + obtain through `~OAuthMixin.authorize_redirect` and + `~OAuthMixin.get_authenticated_user`. The user returned through that + process includes an 'access_token' attribute that can be used + to make authenticated requests via this method. Example + usage: + + .. testcode:: + + class MainHandler(tornado.web.RequestHandler, + tornado.auth.TwitterMixin): + @tornado.web.authenticated + async def get(self): + new_entry = await self.twitter_request( + "/statuses/update", + post_args={"status": "Testing Tornado Web Server"}, + access_token=self.current_user["access_token"]) + if not new_entry: + # Call failed; perhaps missing permission? + yield self.authorize_redirect() + return + self.finish("Posted a message!") + + .. testoutput:: + :hide: + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned + awaitable object instead. + """ + if path.startswith("http:") or path.startswith("https:"): + # Raw urls are useful for e.g. search which doesn't follow the + # usual pattern: http://search.twitter.com/search.json + url = path + else: + url = self._TWITTER_BASE_URL + path + ".json" + # Add the OAuth resource request signature if we have credentials + if access_token: + all_args = {} + all_args.update(args) + all_args.update(post_args or {}) + method = "POST" if post_args is not None else "GET" + oauth = self._oauth_request_parameters( + url, access_token, all_args, method=method + ) + args.update(oauth) + if args: + url += "?" + urllib.parse.urlencode(args) + http = self.get_auth_http_client() + if post_args is not None: + response = await http.fetch( + url, method="POST", body=urllib.parse.urlencode(post_args) + ) + else: + response = await http.fetch(url) + return escape.json_decode(response.body) + + def _oauth_consumer_token(self) -> Dict[str, Any]: + handler = cast(RequestHandler, self) + handler.require_setting("twitter_consumer_key", "Twitter OAuth") + handler.require_setting("twitter_consumer_secret", "Twitter OAuth") + return dict( + key=handler.settings["twitter_consumer_key"], + secret=handler.settings["twitter_consumer_secret"], + ) + + async def _oauth_get_user_future( + self, access_token: Dict[str, Any] + ) -> Dict[str, Any]: + user = await self.twitter_request( + "/account/verify_credentials", access_token=access_token + ) + if user: + user["username"] = user["screen_name"] + return user + + +class GoogleOAuth2Mixin(OAuth2Mixin): + """Google authentication using OAuth2. + + In order to use, register your application with Google and copy the + relevant parameters to your application settings. + + * Go to the Google Dev Console at http://console.developers.google.com + * Select a project, or create a new one. + * In the sidebar on the left, select APIs & Auth. + * In the list of APIs, find the Google+ API service and set it to ON. + * In the sidebar on the left, select Credentials. + * In the OAuth section of the page, select Create New Client ID. + * Set the Redirect URI to point to your auth handler + * Copy the "Client secret" and "Client ID" to the application settings as + ``{"google_oauth": {"key": CLIENT_ID, "secret": CLIENT_SECRET}}`` + + .. versionadded:: 3.2 + """ + + _OAUTH_AUTHORIZE_URL = "https://accounts.google.com/o/oauth2/v2/auth" + _OAUTH_ACCESS_TOKEN_URL = "https://www.googleapis.com/oauth2/v4/token" + _OAUTH_USERINFO_URL = "https://www.googleapis.com/oauth2/v1/userinfo" + _OAUTH_NO_CALLBACKS = False + _OAUTH_SETTINGS_KEY = "google_oauth" + + async def get_authenticated_user( + self, redirect_uri: str, code: str + ) -> Dict[str, Any]: + """Handles the login for the Google user, returning an access token. + + The result is a dictionary containing an ``access_token`` field + ([among others](https://developers.google.com/identity/protocols/OAuth2WebServer#handlingtheresponse)). + Unlike other ``get_authenticated_user`` methods in this package, + this method does not return any additional information about the user. + The returned access token can be used with `OAuth2Mixin.oauth2_request` + to request additional information (perhaps from + ``https://www.googleapis.com/oauth2/v2/userinfo``) + + Example usage: + + .. testcode:: + + class GoogleOAuth2LoginHandler(tornado.web.RequestHandler, + tornado.auth.GoogleOAuth2Mixin): + async def get(self): + if self.get_argument('code', False): + access = await self.get_authenticated_user( + redirect_uri='http://your.site.com/auth/google', + code=self.get_argument('code')) + user = await self.oauth2_request( + "https://www.googleapis.com/oauth2/v1/userinfo", + access_token=access["access_token"]) + # Save the user and access token with + # e.g. set_secure_cookie. + else: + await self.authorize_redirect( + redirect_uri='http://your.site.com/auth/google', + client_id=self.settings['google_oauth']['key'], + scope=['profile', 'email'], + response_type='code', + extra_params={'approval_prompt': 'auto'}) + + .. testoutput:: + :hide: + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned awaitable object instead. + """ # noqa: E501 + handler = cast(RequestHandler, self) + http = self.get_auth_http_client() + body = urllib.parse.urlencode( + { + "redirect_uri": redirect_uri, + "code": code, + "client_id": handler.settings[self._OAUTH_SETTINGS_KEY]["key"], + "client_secret": handler.settings[self._OAUTH_SETTINGS_KEY]["secret"], + "grant_type": "authorization_code", + } + ) + + response = await http.fetch( + self._OAUTH_ACCESS_TOKEN_URL, + method="POST", + headers={"Content-Type": "application/x-www-form-urlencoded"}, + body=body, + ) + return escape.json_decode(response.body) + + +class FacebookGraphMixin(OAuth2Mixin): + """Facebook authentication using the new Graph API and OAuth2.""" + + _OAUTH_ACCESS_TOKEN_URL = "https://graph.facebook.com/oauth/access_token?" + _OAUTH_AUTHORIZE_URL = "https://www.facebook.com/dialog/oauth?" + _OAUTH_NO_CALLBACKS = False + _FACEBOOK_BASE_URL = "https://graph.facebook.com" + + async def get_authenticated_user( + self, + redirect_uri: str, + client_id: str, + client_secret: str, + code: str, + extra_fields: Dict[str, Any] = None, + ) -> Optional[Dict[str, Any]]: + """Handles the login for the Facebook user, returning a user object. + + Example usage: + + .. testcode:: + + class FacebookGraphLoginHandler(tornado.web.RequestHandler, + tornado.auth.FacebookGraphMixin): + async def get(self): + if self.get_argument("code", False): + user = await self.get_authenticated_user( + redirect_uri='/auth/facebookgraph/', + client_id=self.settings["facebook_api_key"], + client_secret=self.settings["facebook_secret"], + code=self.get_argument("code")) + # Save the user with e.g. set_secure_cookie + else: + await self.authorize_redirect( + redirect_uri='/auth/facebookgraph/', + client_id=self.settings["facebook_api_key"], + extra_params={"scope": "read_stream,offline_access"}) + + .. testoutput:: + :hide: + + This method returns a dictionary which may contain the following fields: + + * ``access_token``, a string which may be passed to `facebook_request` + * ``session_expires``, an integer encoded as a string representing + the time until the access token expires in seconds. This field should + be used like ``int(user['session_expires'])``; in a future version of + Tornado it will change from a string to an integer. + * ``id``, ``name``, ``first_name``, ``last_name``, ``locale``, ``picture``, + ``link``, plus any fields named in the ``extra_fields`` argument. These + fields are copied from the Facebook graph API + `user object `_ + + .. versionchanged:: 4.5 + The ``session_expires`` field was updated to support changes made to the + Facebook API in March 2017. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned awaitable object instead. + """ + http = self.get_auth_http_client() + args = { + "redirect_uri": redirect_uri, + "code": code, + "client_id": client_id, + "client_secret": client_secret, + } + + fields = set( + ["id", "name", "first_name", "last_name", "locale", "picture", "link"] + ) + if extra_fields: + fields.update(extra_fields) + + response = await http.fetch( + self._oauth_request_token_url(**args) # type: ignore + ) + args = escape.json_decode(response.body) + session = { + "access_token": args.get("access_token"), + "expires_in": args.get("expires_in"), + } + assert session["access_token"] is not None + + user = await self.facebook_request( + path="/me", + access_token=session["access_token"], + appsecret_proof=hmac.new( + key=client_secret.encode("utf8"), + msg=session["access_token"].encode("utf8"), + digestmod=hashlib.sha256, + ).hexdigest(), + fields=",".join(fields), + ) + + if user is None: + return None + + fieldmap = {} + for field in fields: + fieldmap[field] = user.get(field) + + # session_expires is converted to str for compatibility with + # older versions in which the server used url-encoding and + # this code simply returned the string verbatim. + # This should change in Tornado 5.0. + fieldmap.update( + { + "access_token": session["access_token"], + "session_expires": str(session.get("expires_in")), + } + ) + return fieldmap + + async def facebook_request( + self, + path: str, + access_token: str = None, + post_args: Dict[str, Any] = None, + **args: Any + ) -> Any: + """Fetches the given relative API path, e.g., "/btaylor/picture" + + If the request is a POST, ``post_args`` should be provided. Query + string arguments should be given as keyword arguments. + + An introduction to the Facebook Graph API can be found at + http://developers.facebook.com/docs/api + + Many methods require an OAuth access token which you can + obtain through `~OAuth2Mixin.authorize_redirect` and + `get_authenticated_user`. The user returned through that + process includes an ``access_token`` attribute that can be + used to make authenticated requests via this method. + + Example usage: + + .. testcode:: + + class MainHandler(tornado.web.RequestHandler, + tornado.auth.FacebookGraphMixin): + @tornado.web.authenticated + async def get(self): + new_entry = await self.facebook_request( + "/me/feed", + post_args={"message": "I am posting from my Tornado application!"}, + access_token=self.current_user["access_token"]) + + if not new_entry: + # Call failed; perhaps missing permission? + yield self.authorize_redirect() + return + self.finish("Posted a message!") + + .. testoutput:: + :hide: + + The given path is relative to ``self._FACEBOOK_BASE_URL``, + by default "https://graph.facebook.com". + + This method is a wrapper around `OAuth2Mixin.oauth2_request`; + the only difference is that this method takes a relative path, + while ``oauth2_request`` takes a complete url. + + .. versionchanged:: 3.1 + Added the ability to override ``self._FACEBOOK_BASE_URL``. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned awaitable object instead. + """ + url = self._FACEBOOK_BASE_URL + path + return await self.oauth2_request( + url, access_token=access_token, post_args=post_args, **args + ) + + +def _oauth_signature( + consumer_token: Dict[str, Any], + method: str, + url: str, + parameters: Dict[str, Any] = {}, + token: Dict[str, Any] = None, +) -> bytes: + """Calculates the HMAC-SHA1 OAuth signature for the given request. + + See http://oauth.net/core/1.0/#signing_process + """ + parts = urllib.parse.urlparse(url) + scheme, netloc, path = parts[:3] + normalized_url = scheme.lower() + "://" + netloc.lower() + path + + base_elems = [] + base_elems.append(method.upper()) + base_elems.append(normalized_url) + base_elems.append( + "&".join( + "%s=%s" % (k, _oauth_escape(str(v))) for k, v in sorted(parameters.items()) + ) + ) + base_string = "&".join(_oauth_escape(e) for e in base_elems) + + key_elems = [escape.utf8(consumer_token["secret"])] + key_elems.append(escape.utf8(token["secret"] if token else "")) + key = b"&".join(key_elems) + + hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1) + return binascii.b2a_base64(hash.digest())[:-1] + + +def _oauth10a_signature( + consumer_token: Dict[str, Any], + method: str, + url: str, + parameters: Dict[str, Any] = {}, + token: Dict[str, Any] = None, +) -> bytes: + """Calculates the HMAC-SHA1 OAuth 1.0a signature for the given request. + + See http://oauth.net/core/1.0a/#signing_process + """ + parts = urllib.parse.urlparse(url) + scheme, netloc, path = parts[:3] + normalized_url = scheme.lower() + "://" + netloc.lower() + path + + base_elems = [] + base_elems.append(method.upper()) + base_elems.append(normalized_url) + base_elems.append( + "&".join( + "%s=%s" % (k, _oauth_escape(str(v))) for k, v in sorted(parameters.items()) + ) + ) + + base_string = "&".join(_oauth_escape(e) for e in base_elems) + key_elems = [escape.utf8(urllib.parse.quote(consumer_token["secret"], safe="~"))] + key_elems.append( + escape.utf8(urllib.parse.quote(token["secret"], safe="~") if token else "") + ) + key = b"&".join(key_elems) + + hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1) + return binascii.b2a_base64(hash.digest())[:-1] + + +def _oauth_escape(val: Union[str, bytes]) -> str: + if isinstance(val, unicode_type): + val = val.encode("utf-8") + return urllib.parse.quote(val, safe="~") + + +def _oauth_parse_response(body: bytes) -> Dict[str, Any]: + # I can't find an officially-defined encoding for oauth responses and + # have never seen anyone use non-ascii. Leave the response in a byte + # string for python 2, and use utf8 on python 3. + body_str = escape.native_str(body) + p = urllib.parse.parse_qs(body_str, keep_blank_values=False) + token = dict(key=p["oauth_token"][0], secret=p["oauth_token_secret"][0]) + + # Add the extra parameters the Provider included to the token + special = ("oauth_token", "oauth_token_secret") + token.update((k, p[k][0]) for k in p if k not in special) + return token diff --git a/server/www/packages/packages-linux/x64/tornado/autoreload.py b/server/www/packages/packages-linux/x64/tornado/autoreload.py index 7d69474..eddb033 100644 --- a/server/www/packages/packages-linux/x64/tornado/autoreload.py +++ b/server/www/packages/packages-linux/x64/tornado/autoreload.py @@ -1,356 +1,364 @@ -# -# Copyright 2009 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Automatically restart the server when a source file is modified. - -Most applications should not access this module directly. Instead, -pass the keyword argument ``autoreload=True`` to the -`tornado.web.Application` constructor (or ``debug=True``, which -enables this setting and several others). This will enable autoreload -mode as well as checking for changes to templates and static -resources. Note that restarting is a destructive operation and any -requests in progress will be aborted when the process restarts. (If -you want to disable autoreload while using other debug-mode features, -pass both ``debug=True`` and ``autoreload=False``). - -This module can also be used as a command-line wrapper around scripts -such as unit test runners. See the `main` method for details. - -The command-line wrapper and Application debug modes can be used together. -This combination is encouraged as the wrapper catches syntax errors and -other import-time failures, while debug mode catches changes once -the server has started. - -This module depends on `.IOLoop`, so it will not work in WSGI applications -and Google App Engine. It also will not work correctly when `.HTTPServer`'s -multi-process mode is used. - -Reloading loses any Python interpreter command-line arguments (e.g. ``-u``) -because it re-executes Python using ``sys.executable`` and ``sys.argv``. -Additionally, modifying these variables will cause reloading to behave -incorrectly. - -""" - -from __future__ import absolute_import, division, print_function - -import os -import sys - -# sys.path handling -# ----------------- -# -# If a module is run with "python -m", the current directory (i.e. "") -# is automatically prepended to sys.path, but not if it is run as -# "path/to/file.py". The processing for "-m" rewrites the former to -# the latter, so subsequent executions won't have the same path as the -# original. -# -# Conversely, when run as path/to/file.py, the directory containing -# file.py gets added to the path, which can cause confusion as imports -# may become relative in spite of the future import. -# -# We address the former problem by reconstructing the original command -# line (Python >= 3.4) or by setting the $PYTHONPATH environment -# variable (Python < 3.4) before re-execution so the new process will -# see the correct path. We attempt to address the latter problem when -# tornado.autoreload is run as __main__. - -if __name__ == "__main__": - # This sys.path manipulation must come before our imports (as much - # as possible - if we introduced a tornado.sys or tornado.os - # module we'd be in trouble), or else our imports would become - # relative again despite the future import. - # - # There is a separate __main__ block at the end of the file to call main(). - if sys.path[0] == os.path.dirname(__file__): - del sys.path[0] - -import functools -import logging -import os -import pkgutil # type: ignore -import sys -import traceback -import types -import subprocess -import weakref - -from tornado import ioloop -from tornado.log import gen_log -from tornado import process -from tornado.util import exec_in - -try: - import signal -except ImportError: - signal = None - -# os.execv is broken on Windows and can't properly parse command line -# arguments and executable name if they contain whitespaces. subprocess -# fixes that behavior. -_has_execv = sys.platform != 'win32' - -_watched_files = set() -_reload_hooks = [] -_reload_attempted = False -_io_loops = weakref.WeakKeyDictionary() # type: ignore -_autoreload_is_main = False -_original_argv = None -_original_spec = None - - -def start(check_time=500): - """Begins watching source files for changes. - - .. versionchanged:: 5.0 - The ``io_loop`` argument (deprecated since version 4.1) has been removed. - """ - io_loop = ioloop.IOLoop.current() - if io_loop in _io_loops: - return - _io_loops[io_loop] = True - if len(_io_loops) > 1: - gen_log.warning("tornado.autoreload started more than once in the same process") - modify_times = {} - callback = functools.partial(_reload_on_update, modify_times) - scheduler = ioloop.PeriodicCallback(callback, check_time) - scheduler.start() - - -def wait(): - """Wait for a watched file to change, then restart the process. - - Intended to be used at the end of scripts like unit test runners, - to run the tests again after any source file changes (but see also - the command-line interface in `main`) - """ - io_loop = ioloop.IOLoop() - io_loop.add_callback(start) - io_loop.start() - - -def watch(filename): - """Add a file to the watch list. - - All imported modules are watched by default. - """ - _watched_files.add(filename) - - -def add_reload_hook(fn): - """Add a function to be called before reloading the process. - - Note that for open file and socket handles it is generally - preferable to set the ``FD_CLOEXEC`` flag (using `fcntl` or - ``tornado.platform.auto.set_close_exec``) instead - of using a reload hook to close them. - """ - _reload_hooks.append(fn) - - -def _reload_on_update(modify_times): - if _reload_attempted: - # We already tried to reload and it didn't work, so don't try again. - return - if process.task_id() is not None: - # We're in a child process created by fork_processes. If child - # processes restarted themselves, they'd all restart and then - # all call fork_processes again. - return - for module in list(sys.modules.values()): - # Some modules play games with sys.modules (e.g. email/__init__.py - # in the standard library), and occasionally this can cause strange - # failures in getattr. Just ignore anything that's not an ordinary - # module. - if not isinstance(module, types.ModuleType): - continue - path = getattr(module, "__file__", None) - if not path: - continue - if path.endswith(".pyc") or path.endswith(".pyo"): - path = path[:-1] - _check_file(modify_times, path) - for path in _watched_files: - _check_file(modify_times, path) - - -def _check_file(modify_times, path): - try: - modified = os.stat(path).st_mtime - except Exception: - return - if path not in modify_times: - modify_times[path] = modified - return - if modify_times[path] != modified: - gen_log.info("%s modified; restarting server", path) - _reload() - - -def _reload(): - global _reload_attempted - _reload_attempted = True - for fn in _reload_hooks: - fn() - if hasattr(signal, "setitimer"): - # Clear the alarm signal set by - # ioloop.set_blocking_log_threshold so it doesn't fire - # after the exec. - signal.setitimer(signal.ITIMER_REAL, 0, 0) - # sys.path fixes: see comments at top of file. If __main__.__spec__ - # exists, we were invoked with -m and the effective path is about to - # change on re-exec. Reconstruct the original command line to - # ensure that the new process sees the same path we did. If - # __spec__ is not available (Python < 3.4), check instead if - # sys.path[0] is an empty string and add the current directory to - # $PYTHONPATH. - if _autoreload_is_main: - spec = _original_spec - argv = _original_argv - else: - spec = getattr(sys.modules['__main__'], '__spec__', None) - argv = sys.argv - if spec: - argv = ['-m', spec.name] + argv[1:] - else: - path_prefix = '.' + os.pathsep - if (sys.path[0] == '' and - not os.environ.get("PYTHONPATH", "").startswith(path_prefix)): - os.environ["PYTHONPATH"] = (path_prefix + - os.environ.get("PYTHONPATH", "")) - if not _has_execv: - subprocess.Popen([sys.executable] + argv) - os._exit(0) - else: - try: - os.execv(sys.executable, [sys.executable] + argv) - except OSError: - # Mac OS X versions prior to 10.6 do not support execv in - # a process that contains multiple threads. Instead of - # re-executing in the current process, start a new one - # and cause the current process to exit. This isn't - # ideal since the new process is detached from the parent - # terminal and thus cannot easily be killed with ctrl-C, - # but it's better than not being able to autoreload at - # all. - # Unfortunately the errno returned in this case does not - # appear to be consistent, so we can't easily check for - # this error specifically. - os.spawnv(os.P_NOWAIT, sys.executable, [sys.executable] + argv) - # At this point the IOLoop has been closed and finally - # blocks will experience errors if we allow the stack to - # unwind, so just exit uncleanly. - os._exit(0) - - -_USAGE = """\ -Usage: - python -m tornado.autoreload -m module.to.run [args...] - python -m tornado.autoreload path/to/script.py [args...] -""" - - -def main(): - """Command-line wrapper to re-run a script whenever its source changes. - - Scripts may be specified by filename or module name:: - - python -m tornado.autoreload -m tornado.test.runtests - python -m tornado.autoreload tornado/test/runtests.py - - Running a script with this wrapper is similar to calling - `tornado.autoreload.wait` at the end of the script, but this wrapper - can catch import-time problems like syntax errors that would otherwise - prevent the script from reaching its call to `wait`. - """ - # Remember that we were launched with autoreload as main. - # The main module can be tricky; set the variables both in our globals - # (which may be __main__) and the real importable version. - import tornado.autoreload - global _autoreload_is_main - global _original_argv, _original_spec - tornado.autoreload._autoreload_is_main = _autoreload_is_main = True - original_argv = sys.argv - tornado.autoreload._original_argv = _original_argv = original_argv - original_spec = getattr(sys.modules['__main__'], '__spec__', None) - tornado.autoreload._original_spec = _original_spec = original_spec - sys.argv = sys.argv[:] - if len(sys.argv) >= 3 and sys.argv[1] == "-m": - mode = "module" - module = sys.argv[2] - del sys.argv[1:3] - elif len(sys.argv) >= 2: - mode = "script" - script = sys.argv[1] - sys.argv = sys.argv[1:] - else: - print(_USAGE, file=sys.stderr) - sys.exit(1) - - try: - if mode == "module": - import runpy - runpy.run_module(module, run_name="__main__", alter_sys=True) - elif mode == "script": - with open(script) as f: - # Execute the script in our namespace instead of creating - # a new one so that something that tries to import __main__ - # (e.g. the unittest module) will see names defined in the - # script instead of just those defined in this module. - global __file__ - __file__ = script - # If __package__ is defined, imports may be incorrectly - # interpreted as relative to this module. - global __package__ - del __package__ - exec_in(f.read(), globals(), globals()) - except SystemExit as e: - logging.basicConfig() - gen_log.info("Script exited with status %s", e.code) - except Exception as e: - logging.basicConfig() - gen_log.warning("Script exited with uncaught exception", exc_info=True) - # If an exception occurred at import time, the file with the error - # never made it into sys.modules and so we won't know to watch it. - # Just to make sure we've covered everything, walk the stack trace - # from the exception and watch every file. - for (filename, lineno, name, line) in traceback.extract_tb(sys.exc_info()[2]): - watch(filename) - if isinstance(e, SyntaxError): - # SyntaxErrors are special: their innermost stack frame is fake - # so extract_tb won't see it and we have to get the filename - # from the exception object. - watch(e.filename) - else: - logging.basicConfig() - gen_log.info("Script exited normally") - # restore sys.argv so subsequent executions will include autoreload - sys.argv = original_argv - - if mode == 'module': - # runpy did a fake import of the module as __main__, but now it's - # no longer in sys.modules. Figure out where it is and watch it. - loader = pkgutil.get_loader(module) - if loader is not None: - watch(loader.get_filename()) - - wait() - - -if __name__ == "__main__": - # See also the other __main__ block at the top of the file, which modifies - # sys.path before our imports - main() +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Automatically restart the server when a source file is modified. + +Most applications should not access this module directly. Instead, +pass the keyword argument ``autoreload=True`` to the +`tornado.web.Application` constructor (or ``debug=True``, which +enables this setting and several others). This will enable autoreload +mode as well as checking for changes to templates and static +resources. Note that restarting is a destructive operation and any +requests in progress will be aborted when the process restarts. (If +you want to disable autoreload while using other debug-mode features, +pass both ``debug=True`` and ``autoreload=False``). + +This module can also be used as a command-line wrapper around scripts +such as unit test runners. See the `main` method for details. + +The command-line wrapper and Application debug modes can be used together. +This combination is encouraged as the wrapper catches syntax errors and +other import-time failures, while debug mode catches changes once +the server has started. + +This module will not work correctly when `.HTTPServer`'s multi-process +mode is used. + +Reloading loses any Python interpreter command-line arguments (e.g. ``-u``) +because it re-executes Python using ``sys.executable`` and ``sys.argv``. +Additionally, modifying these variables will cause reloading to behave +incorrectly. + +""" + +import os +import sys + +# sys.path handling +# ----------------- +# +# If a module is run with "python -m", the current directory (i.e. "") +# is automatically prepended to sys.path, but not if it is run as +# "path/to/file.py". The processing for "-m" rewrites the former to +# the latter, so subsequent executions won't have the same path as the +# original. +# +# Conversely, when run as path/to/file.py, the directory containing +# file.py gets added to the path, which can cause confusion as imports +# may become relative in spite of the future import. +# +# We address the former problem by reconstructing the original command +# line (Python >= 3.4) or by setting the $PYTHONPATH environment +# variable (Python < 3.4) before re-execution so the new process will +# see the correct path. We attempt to address the latter problem when +# tornado.autoreload is run as __main__. + +if __name__ == "__main__": + # This sys.path manipulation must come before our imports (as much + # as possible - if we introduced a tornado.sys or tornado.os + # module we'd be in trouble), or else our imports would become + # relative again despite the future import. + # + # There is a separate __main__ block at the end of the file to call main(). + if sys.path[0] == os.path.dirname(__file__): + del sys.path[0] + +import functools +import logging +import os +import pkgutil # type: ignore +import sys +import traceback +import types +import subprocess +import weakref + +from tornado import ioloop +from tornado.log import gen_log +from tornado import process +from tornado.util import exec_in + +try: + import signal +except ImportError: + signal = None # type: ignore + +import typing +from typing import Callable, Dict + +if typing.TYPE_CHECKING: + from typing import List, Optional, Union # noqa: F401 + +# os.execv is broken on Windows and can't properly parse command line +# arguments and executable name if they contain whitespaces. subprocess +# fixes that behavior. +_has_execv = sys.platform != "win32" + +_watched_files = set() +_reload_hooks = [] +_reload_attempted = False +_io_loops = weakref.WeakKeyDictionary() # type: ignore +_autoreload_is_main = False +_original_argv = None # type: Optional[List[str]] +_original_spec = None + + +def start(check_time: int = 500) -> None: + """Begins watching source files for changes. + + .. versionchanged:: 5.0 + The ``io_loop`` argument (deprecated since version 4.1) has been removed. + """ + io_loop = ioloop.IOLoop.current() + if io_loop in _io_loops: + return + _io_loops[io_loop] = True + if len(_io_loops) > 1: + gen_log.warning("tornado.autoreload started more than once in the same process") + modify_times = {} # type: Dict[str, float] + callback = functools.partial(_reload_on_update, modify_times) + scheduler = ioloop.PeriodicCallback(callback, check_time) + scheduler.start() + + +def wait() -> None: + """Wait for a watched file to change, then restart the process. + + Intended to be used at the end of scripts like unit test runners, + to run the tests again after any source file changes (but see also + the command-line interface in `main`) + """ + io_loop = ioloop.IOLoop() + io_loop.add_callback(start) + io_loop.start() + + +def watch(filename: str) -> None: + """Add a file to the watch list. + + All imported modules are watched by default. + """ + _watched_files.add(filename) + + +def add_reload_hook(fn: Callable[[], None]) -> None: + """Add a function to be called before reloading the process. + + Note that for open file and socket handles it is generally + preferable to set the ``FD_CLOEXEC`` flag (using `fcntl` or + ``tornado.platform.auto.set_close_exec``) instead + of using a reload hook to close them. + """ + _reload_hooks.append(fn) + + +def _reload_on_update(modify_times: Dict[str, float]) -> None: + if _reload_attempted: + # We already tried to reload and it didn't work, so don't try again. + return + if process.task_id() is not None: + # We're in a child process created by fork_processes. If child + # processes restarted themselves, they'd all restart and then + # all call fork_processes again. + return + for module in list(sys.modules.values()): + # Some modules play games with sys.modules (e.g. email/__init__.py + # in the standard library), and occasionally this can cause strange + # failures in getattr. Just ignore anything that's not an ordinary + # module. + if not isinstance(module, types.ModuleType): + continue + path = getattr(module, "__file__", None) + if not path: + continue + if path.endswith(".pyc") or path.endswith(".pyo"): + path = path[:-1] + _check_file(modify_times, path) + for path in _watched_files: + _check_file(modify_times, path) + + +def _check_file(modify_times: Dict[str, float], path: str) -> None: + try: + modified = os.stat(path).st_mtime + except Exception: + return + if path not in modify_times: + modify_times[path] = modified + return + if modify_times[path] != modified: + gen_log.info("%s modified; restarting server", path) + _reload() + + +def _reload() -> None: + global _reload_attempted + _reload_attempted = True + for fn in _reload_hooks: + fn() + if hasattr(signal, "setitimer"): + # Clear the alarm signal set by + # ioloop.set_blocking_log_threshold so it doesn't fire + # after the exec. + signal.setitimer(signal.ITIMER_REAL, 0, 0) + # sys.path fixes: see comments at top of file. If __main__.__spec__ + # exists, we were invoked with -m and the effective path is about to + # change on re-exec. Reconstruct the original command line to + # ensure that the new process sees the same path we did. If + # __spec__ is not available (Python < 3.4), check instead if + # sys.path[0] is an empty string and add the current directory to + # $PYTHONPATH. + if _autoreload_is_main: + assert _original_argv is not None + spec = _original_spec + argv = _original_argv + else: + spec = getattr(sys.modules["__main__"], "__spec__", None) + argv = sys.argv + if spec: + argv = ["-m", spec.name] + argv[1:] + else: + path_prefix = "." + os.pathsep + if sys.path[0] == "" and not os.environ.get("PYTHONPATH", "").startswith( + path_prefix + ): + os.environ["PYTHONPATH"] = path_prefix + os.environ.get("PYTHONPATH", "") + if not _has_execv: + subprocess.Popen([sys.executable] + argv) + os._exit(0) + else: + try: + os.execv(sys.executable, [sys.executable] + argv) + except OSError: + # Mac OS X versions prior to 10.6 do not support execv in + # a process that contains multiple threads. Instead of + # re-executing in the current process, start a new one + # and cause the current process to exit. This isn't + # ideal since the new process is detached from the parent + # terminal and thus cannot easily be killed with ctrl-C, + # but it's better than not being able to autoreload at + # all. + # Unfortunately the errno returned in this case does not + # appear to be consistent, so we can't easily check for + # this error specifically. + os.spawnv( # type: ignore + os.P_NOWAIT, sys.executable, [sys.executable] + argv + ) + # At this point the IOLoop has been closed and finally + # blocks will experience errors if we allow the stack to + # unwind, so just exit uncleanly. + os._exit(0) + + +_USAGE = """\ +Usage: + python -m tornado.autoreload -m module.to.run [args...] + python -m tornado.autoreload path/to/script.py [args...] +""" + + +def main() -> None: + """Command-line wrapper to re-run a script whenever its source changes. + + Scripts may be specified by filename or module name:: + + python -m tornado.autoreload -m tornado.test.runtests + python -m tornado.autoreload tornado/test/runtests.py + + Running a script with this wrapper is similar to calling + `tornado.autoreload.wait` at the end of the script, but this wrapper + can catch import-time problems like syntax errors that would otherwise + prevent the script from reaching its call to `wait`. + """ + # Remember that we were launched with autoreload as main. + # The main module can be tricky; set the variables both in our globals + # (which may be __main__) and the real importable version. + import tornado.autoreload + + global _autoreload_is_main + global _original_argv, _original_spec + tornado.autoreload._autoreload_is_main = _autoreload_is_main = True + original_argv = sys.argv + tornado.autoreload._original_argv = _original_argv = original_argv + original_spec = getattr(sys.modules["__main__"], "__spec__", None) + tornado.autoreload._original_spec = _original_spec = original_spec + sys.argv = sys.argv[:] + if len(sys.argv) >= 3 and sys.argv[1] == "-m": + mode = "module" + module = sys.argv[2] + del sys.argv[1:3] + elif len(sys.argv) >= 2: + mode = "script" + script = sys.argv[1] + sys.argv = sys.argv[1:] + else: + print(_USAGE, file=sys.stderr) + sys.exit(1) + + try: + if mode == "module": + import runpy + + runpy.run_module(module, run_name="__main__", alter_sys=True) + elif mode == "script": + with open(script) as f: + # Execute the script in our namespace instead of creating + # a new one so that something that tries to import __main__ + # (e.g. the unittest module) will see names defined in the + # script instead of just those defined in this module. + global __file__ + __file__ = script + # If __package__ is defined, imports may be incorrectly + # interpreted as relative to this module. + global __package__ + del __package__ + exec_in(f.read(), globals(), globals()) + except SystemExit as e: + logging.basicConfig() + gen_log.info("Script exited with status %s", e.code) + except Exception as e: + logging.basicConfig() + gen_log.warning("Script exited with uncaught exception", exc_info=True) + # If an exception occurred at import time, the file with the error + # never made it into sys.modules and so we won't know to watch it. + # Just to make sure we've covered everything, walk the stack trace + # from the exception and watch every file. + for (filename, lineno, name, line) in traceback.extract_tb(sys.exc_info()[2]): + watch(filename) + if isinstance(e, SyntaxError): + # SyntaxErrors are special: their innermost stack frame is fake + # so extract_tb won't see it and we have to get the filename + # from the exception object. + watch(e.filename) + else: + logging.basicConfig() + gen_log.info("Script exited normally") + # restore sys.argv so subsequent executions will include autoreload + sys.argv = original_argv + + if mode == "module": + # runpy did a fake import of the module as __main__, but now it's + # no longer in sys.modules. Figure out where it is and watch it. + loader = pkgutil.get_loader(module) + if loader is not None: + watch(loader.get_filename()) # type: ignore + + wait() + + +if __name__ == "__main__": + # See also the other __main__ block at the top of the file, which modifies + # sys.path before our imports + main() diff --git a/server/www/packages/packages-linux/x64/tornado/concurrent.py b/server/www/packages/packages-linux/x64/tornado/concurrent.py index f7e6bcc..63c2e77 100644 --- a/server/www/packages/packages-linux/x64/tornado/concurrent.py +++ b/server/www/packages/packages-linux/x64/tornado/concurrent.py @@ -1,660 +1,264 @@ -# -# Copyright 2012 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Utilities for working with ``Future`` objects. - -``Futures`` are a pattern for concurrent programming introduced in -Python 3.2 in the `concurrent.futures` package, and also adopted (in a -slightly different form) in Python 3.4's `asyncio` package. This -package defines a ``Future`` class that is an alias for `asyncio.Future` -when available, and a compatible implementation for older versions of -Python. It also includes some utility functions for interacting with -``Future`` objects. - -While this package is an important part of Tornado's internal -implementation, applications rarely need to interact with it -directly. -""" -from __future__ import absolute_import, division, print_function - -import functools -import platform -import textwrap -import traceback -import sys -import warnings - -from tornado.log import app_log -from tornado.stack_context import ExceptionStackContext, wrap -from tornado.util import raise_exc_info, ArgReplacer, is_finalizing - -try: - from concurrent import futures -except ImportError: - futures = None - -try: - import asyncio -except ImportError: - asyncio = None - -try: - import typing -except ImportError: - typing = None - - -# Can the garbage collector handle cycles that include __del__ methods? -# This is true in cpython beginning with version 3.4 (PEP 442). -_GC_CYCLE_FINALIZERS = (platform.python_implementation() == 'CPython' and - sys.version_info >= (3, 4)) - - -class ReturnValueIgnoredError(Exception): - pass - -# This class and associated code in the future object is derived -# from the Trollius project, a backport of asyncio to Python 2.x - 3.x - - -class _TracebackLogger(object): - """Helper to log a traceback upon destruction if not cleared. - - This solves a nasty problem with Futures and Tasks that have an - exception set: if nobody asks for the exception, the exception is - never logged. This violates the Zen of Python: 'Errors should - never pass silently. Unless explicitly silenced.' - - However, we don't want to log the exception as soon as - set_exception() is called: if the calling code is written - properly, it will get the exception and handle it properly. But - we *do* want to log it if result() or exception() was never called - -- otherwise developers waste a lot of time wondering why their - buggy code fails silently. - - An earlier attempt added a __del__() method to the Future class - itself, but this backfired because the presence of __del__() - prevents garbage collection from breaking cycles. A way out of - this catch-22 is to avoid having a __del__() method on the Future - class itself, but instead to have a reference to a helper object - with a __del__() method that logs the traceback, where we ensure - that the helper object doesn't participate in cycles, and only the - Future has a reference to it. - - The helper object is added when set_exception() is called. When - the Future is collected, and the helper is present, the helper - object is also collected, and its __del__() method will log the - traceback. When the Future's result() or exception() method is - called (and a helper object is present), it removes the the helper - object, after calling its clear() method to prevent it from - logging. - - One downside is that we do a fair amount of work to extract the - traceback from the exception, even when it is never logged. It - would seem cheaper to just store the exception object, but that - references the traceback, which references stack frames, which may - reference the Future, which references the _TracebackLogger, and - then the _TracebackLogger would be included in a cycle, which is - what we're trying to avoid! As an optimization, we don't - immediately format the exception; we only do the work when - activate() is called, which call is delayed until after all the - Future's callbacks have run. Since usually a Future has at least - one callback (typically set by 'yield From') and usually that - callback extracts the callback, thereby removing the need to - format the exception. - - PS. I don't claim credit for this solution. I first heard of it - in a discussion about closing files when they are collected. - """ - - __slots__ = ('exc_info', 'formatted_tb') - - def __init__(self, exc_info): - self.exc_info = exc_info - self.formatted_tb = None - - def activate(self): - exc_info = self.exc_info - if exc_info is not None: - self.exc_info = None - self.formatted_tb = traceback.format_exception(*exc_info) - - def clear(self): - self.exc_info = None - self.formatted_tb = None - - def __del__(self, is_finalizing=is_finalizing): - if not is_finalizing() and self.formatted_tb: - app_log.error('Future exception was never retrieved: %s', - ''.join(self.formatted_tb).rstrip()) - - -class Future(object): - """Placeholder for an asynchronous result. - - A ``Future`` encapsulates the result of an asynchronous - operation. In synchronous applications ``Futures`` are used - to wait for the result from a thread or process pool; in - Tornado they are normally used with `.IOLoop.add_future` or by - yielding them in a `.gen.coroutine`. - - `tornado.concurrent.Future` is an alias for `asyncio.Future` when - that package is available (Python 3.4+). Unlike - `concurrent.futures.Future`, the ``Futures`` used by Tornado and - `asyncio` are not thread-safe (and therefore faster for use with - single-threaded event loops). - - In addition to ``exception`` and ``set_exception``, Tornado's - ``Future`` implementation supports storing an ``exc_info`` triple - to support better tracebacks on Python 2. To set an ``exc_info`` - triple, use `future_set_exc_info`, and to retrieve one, call - `result()` (which will raise it). - - .. versionchanged:: 4.0 - `tornado.concurrent.Future` is always a thread-unsafe ``Future`` - with support for the ``exc_info`` methods. Previously it would - be an alias for the thread-safe `concurrent.futures.Future` - if that package was available and fall back to the thread-unsafe - implementation if it was not. - - .. versionchanged:: 4.1 - If a `.Future` contains an error but that error is never observed - (by calling ``result()``, ``exception()``, or ``exc_info()``), - a stack trace will be logged when the `.Future` is garbage collected. - This normally indicates an error in the application, but in cases - where it results in undesired logging it may be necessary to - suppress the logging by ensuring that the exception is observed: - ``f.add_done_callback(lambda f: f.exception())``. - - .. versionchanged:: 5.0 - - This class was previoiusly available under the name - ``TracebackFuture``. This name, which was deprecated since - version 4.0, has been removed. When `asyncio` is available - ``tornado.concurrent.Future`` is now an alias for - `asyncio.Future`. Like `asyncio.Future`, callbacks are now - always scheduled on the `.IOLoop` and are never run - synchronously. - - """ - def __init__(self): - self._done = False - self._result = None - self._exc_info = None - - self._log_traceback = False # Used for Python >= 3.4 - self._tb_logger = None # Used for Python <= 3.3 - - self._callbacks = [] - - # Implement the Python 3.5 Awaitable protocol if possible - # (we can't use return and yield together until py33). - if sys.version_info >= (3, 3): - exec(textwrap.dedent(""" - def __await__(self): - return (yield self) - """)) - else: - # Py2-compatible version for use with cython. - def __await__(self): - result = yield self - # StopIteration doesn't take args before py33, - # but Cython recognizes the args tuple. - e = StopIteration() - e.args = (result,) - raise e - - def cancel(self): - """Cancel the operation, if possible. - - Tornado ``Futures`` do not support cancellation, so this method always - returns False. - """ - return False - - def cancelled(self): - """Returns True if the operation has been cancelled. - - Tornado ``Futures`` do not support cancellation, so this method - always returns False. - """ - return False - - def running(self): - """Returns True if this operation is currently running.""" - return not self._done - - def done(self): - """Returns True if the future has finished running.""" - return self._done - - def _clear_tb_log(self): - self._log_traceback = False - if self._tb_logger is not None: - self._tb_logger.clear() - self._tb_logger = None - - def result(self, timeout=None): - """If the operation succeeded, return its result. If it failed, - re-raise its exception. - - This method takes a ``timeout`` argument for compatibility with - `concurrent.futures.Future` but it is an error to call it - before the `Future` is done, so the ``timeout`` is never used. - """ - self._clear_tb_log() - if self._result is not None: - return self._result - if self._exc_info is not None: - try: - raise_exc_info(self._exc_info) - finally: - self = None - self._check_done() - return self._result - - def exception(self, timeout=None): - """If the operation raised an exception, return the `Exception` - object. Otherwise returns None. - - This method takes a ``timeout`` argument for compatibility with - `concurrent.futures.Future` but it is an error to call it - before the `Future` is done, so the ``timeout`` is never used. - """ - self._clear_tb_log() - if self._exc_info is not None: - return self._exc_info[1] - else: - self._check_done() - return None - - def add_done_callback(self, fn): - """Attaches the given callback to the `Future`. - - It will be invoked with the `Future` as its argument when the Future - has finished running and its result is available. In Tornado - consider using `.IOLoop.add_future` instead of calling - `add_done_callback` directly. - """ - if self._done: - from tornado.ioloop import IOLoop - IOLoop.current().add_callback(fn, self) - else: - self._callbacks.append(fn) - - def set_result(self, result): - """Sets the result of a ``Future``. - - It is undefined to call any of the ``set`` methods more than once - on the same object. - """ - self._result = result - self._set_done() - - def set_exception(self, exception): - """Sets the exception of a ``Future.``""" - self.set_exc_info( - (exception.__class__, - exception, - getattr(exception, '__traceback__', None))) - - def exc_info(self): - """Returns a tuple in the same format as `sys.exc_info` or None. - - .. versionadded:: 4.0 - """ - self._clear_tb_log() - return self._exc_info - - def set_exc_info(self, exc_info): - """Sets the exception information of a ``Future.`` - - Preserves tracebacks on Python 2. - - .. versionadded:: 4.0 - """ - self._exc_info = exc_info - self._log_traceback = True - if not _GC_CYCLE_FINALIZERS: - self._tb_logger = _TracebackLogger(exc_info) - - try: - self._set_done() - finally: - # Activate the logger after all callbacks have had a - # chance to call result() or exception(). - if self._log_traceback and self._tb_logger is not None: - self._tb_logger.activate() - self._exc_info = exc_info - - def _check_done(self): - if not self._done: - raise Exception("DummyFuture does not support blocking for results") - - def _set_done(self): - self._done = True - if self._callbacks: - from tornado.ioloop import IOLoop - loop = IOLoop.current() - for cb in self._callbacks: - loop.add_callback(cb, self) - self._callbacks = None - - # On Python 3.3 or older, objects with a destructor part of a reference - # cycle are never destroyed. It's no longer the case on Python 3.4 thanks to - # the PEP 442. - if _GC_CYCLE_FINALIZERS: - def __del__(self, is_finalizing=is_finalizing): - if is_finalizing() or not self._log_traceback: - # set_exception() was not called, or result() or exception() - # has consumed the exception - return - - tb = traceback.format_exception(*self._exc_info) - - app_log.error('Future %r exception was never retrieved: %s', - self, ''.join(tb).rstrip()) - - -if asyncio is not None: - Future = asyncio.Future # noqa - -if futures is None: - FUTURES = Future # type: typing.Union[type, typing.Tuple[type, ...]] -else: - FUTURES = (futures.Future, Future) - - -def is_future(x): - return isinstance(x, FUTURES) - - -class DummyExecutor(object): - def submit(self, fn, *args, **kwargs): - future = Future() - try: - future_set_result_unless_cancelled(future, fn(*args, **kwargs)) - except Exception: - future_set_exc_info(future, sys.exc_info()) - return future - - def shutdown(self, wait=True): - pass - - -dummy_executor = DummyExecutor() - - -def run_on_executor(*args, **kwargs): - """Decorator to run a synchronous method asynchronously on an executor. - - The decorated method may be called with a ``callback`` keyword - argument and returns a future. - - The executor to be used is determined by the ``executor`` - attributes of ``self``. To use a different attribute name, pass a - keyword argument to the decorator:: - - @run_on_executor(executor='_thread_pool') - def foo(self): - pass - - This decorator should not be confused with the similarly-named - `.IOLoop.run_in_executor`. In general, using ``run_in_executor`` - when *calling* a blocking method is recommended instead of using - this decorator when *defining* a method. If compatibility with older - versions of Tornado is required, consider defining an executor - and using ``executor.submit()`` at the call site. - - .. versionchanged:: 4.2 - Added keyword arguments to use alternative attributes. - - .. versionchanged:: 5.0 - Always uses the current IOLoop instead of ``self.io_loop``. - - .. versionchanged:: 5.1 - Returns a `.Future` compatible with ``await`` instead of a - `concurrent.futures.Future`. - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed in - 6.0. The decorator itself is discouraged in new code but will - not be removed in 6.0. - """ - def run_on_executor_decorator(fn): - executor = kwargs.get("executor", "executor") - - @functools.wraps(fn) - def wrapper(self, *args, **kwargs): - callback = kwargs.pop("callback", None) - async_future = Future() - conc_future = getattr(self, executor).submit(fn, self, *args, **kwargs) - chain_future(conc_future, async_future) - if callback: - warnings.warn("callback arguments are deprecated, use the returned Future instead", - DeprecationWarning) - from tornado.ioloop import IOLoop - IOLoop.current().add_future( - async_future, lambda future: callback(future.result())) - return async_future - return wrapper - if args and kwargs: - raise ValueError("cannot combine positional and keyword args") - if len(args) == 1: - return run_on_executor_decorator(args[0]) - elif len(args) != 0: - raise ValueError("expected 1 argument, got %d", len(args)) - return run_on_executor_decorator - - -_NO_RESULT = object() - - -def return_future(f): - """Decorator to make a function that returns via callback return a - `Future`. - - This decorator was provided to ease the transition from - callback-oriented code to coroutines. It is not recommended for - new code. - - The wrapped function should take a ``callback`` keyword argument - and invoke it with one argument when it has finished. To signal failure, - the function can simply raise an exception (which will be - captured by the `.StackContext` and passed along to the ``Future``). - - From the caller's perspective, the callback argument is optional. - If one is given, it will be invoked when the function is complete - with ``Future.result()`` as an argument. If the function fails, the - callback will not be run and an exception will be raised into the - surrounding `.StackContext`. - - If no callback is given, the caller should use the ``Future`` to - wait for the function to complete (perhaps by yielding it in a - coroutine, or passing it to `.IOLoop.add_future`). - - Usage: - - .. testcode:: - - @return_future - def future_func(arg1, arg2, callback): - # Do stuff (possibly asynchronous) - callback(result) - - async def caller(): - await future_func(arg1, arg2) - - .. - - Note that ``@return_future`` and ``@gen.engine`` can be applied to the - same function, provided ``@return_future`` appears first. However, - consider using ``@gen.coroutine`` instead of this combination. - - .. versionchanged:: 5.1 - - Now raises a `.DeprecationWarning` if a callback argument is passed to - the decorated function and deprecation warnings are enabled. - - .. deprecated:: 5.1 - - This decorator will be removed in Tornado 6.0. New code should - use coroutines directly instead of wrapping callback-based code - with this decorator. Interactions with non-Tornado - callback-based code should be managed explicitly to avoid - relying on the `.ExceptionStackContext` built into this - decorator. - """ - warnings.warn("@return_future is deprecated, use coroutines instead", - DeprecationWarning) - return _non_deprecated_return_future(f, warn=True) - - -def _non_deprecated_return_future(f, warn=False): - # Allow auth.py to use this decorator without triggering - # deprecation warnings. This will go away once auth.py has removed - # its legacy interfaces in 6.0. - replacer = ArgReplacer(f, 'callback') - - @functools.wraps(f) - def wrapper(*args, **kwargs): - future = Future() - callback, args, kwargs = replacer.replace( - lambda value=_NO_RESULT: future_set_result_unless_cancelled(future, value), - args, kwargs) - - def handle_error(typ, value, tb): - future_set_exc_info(future, (typ, value, tb)) - return True - exc_info = None - esc = ExceptionStackContext(handle_error, delay_warning=True) - with esc: - if not warn: - # HACK: In non-deprecated mode (only used in auth.py), - # suppress the warning entirely. Since this is added - # in a 5.1 patch release and already removed in 6.0 - # I'm prioritizing a minimial change instead of a - # clean solution. - esc.delay_warning = False - try: - result = f(*args, **kwargs) - if result is not None: - raise ReturnValueIgnoredError( - "@return_future should not be used with functions " - "that return values") - except: - exc_info = sys.exc_info() - raise - if exc_info is not None: - # If the initial synchronous part of f() raised an exception, - # go ahead and raise it to the caller directly without waiting - # for them to inspect the Future. - future.result() - - # If the caller passed in a callback, schedule it to be called - # when the future resolves. It is important that this happens - # just before we return the future, or else we risk confusing - # stack contexts with multiple exceptions (one here with the - # immediate exception, and again when the future resolves and - # the callback triggers its exception by calling future.result()). - if callback is not None: - warnings.warn("callback arguments are deprecated, use the returned Future instead", - DeprecationWarning) - - def run_callback(future): - result = future.result() - if result is _NO_RESULT: - callback() - else: - callback(future.result()) - future_add_done_callback(future, wrap(run_callback)) - return future - return wrapper - - -def chain_future(a, b): - """Chain two futures together so that when one completes, so does the other. - - The result (success or failure) of ``a`` will be copied to ``b``, unless - ``b`` has already been completed or cancelled by the time ``a`` finishes. - - .. versionchanged:: 5.0 - - Now accepts both Tornado/asyncio `Future` objects and - `concurrent.futures.Future`. - - """ - def copy(future): - assert future is a - if b.done(): - return - if (hasattr(a, 'exc_info') and - a.exc_info() is not None): - future_set_exc_info(b, a.exc_info()) - elif a.exception() is not None: - b.set_exception(a.exception()) - else: - b.set_result(a.result()) - if isinstance(a, Future): - future_add_done_callback(a, copy) - else: - # concurrent.futures.Future - from tornado.ioloop import IOLoop - IOLoop.current().add_future(a, copy) - - -def future_set_result_unless_cancelled(future, value): - """Set the given ``value`` as the `Future`'s result, if not cancelled. - - Avoids asyncio.InvalidStateError when calling set_result() on - a cancelled `asyncio.Future`. - - .. versionadded:: 5.0 - """ - if not future.cancelled(): - future.set_result(value) - - -def future_set_exc_info(future, exc_info): - """Set the given ``exc_info`` as the `Future`'s exception. - - Understands both `asyncio.Future` and Tornado's extensions to - enable better tracebacks on Python 2. - - .. versionadded:: 5.0 - """ - if hasattr(future, 'set_exc_info'): - # Tornado's Future - future.set_exc_info(exc_info) - else: - # asyncio.Future - future.set_exception(exc_info[1]) - - -def future_add_done_callback(future, callback): - """Arrange to call ``callback`` when ``future`` is complete. - - ``callback`` is invoked with one argument, the ``future``. - - If ``future`` is already done, ``callback`` is invoked immediately. - This may differ from the behavior of ``Future.add_done_callback``, - which makes no such guarantee. - - .. versionadded:: 5.0 - """ - if future.done(): - callback(future) - else: - future.add_done_callback(callback) +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""Utilities for working with ``Future`` objects. + +Tornado previously provided its own ``Future`` class, but now uses +`asyncio.Future`. This module contains utility functions for working +with `asyncio.Future` in a way that is backwards-compatible with +Tornado's old ``Future`` implementation. + +While this module is an important part of Tornado's internal +implementation, applications rarely need to interact with it +directly. + +""" + +import asyncio +from concurrent import futures +import functools +import sys +import types + +from tornado.log import app_log + +import typing +from typing import Any, Callable, Optional, Tuple, Union + +_T = typing.TypeVar("_T") + + +class ReturnValueIgnoredError(Exception): + # No longer used; was previously used by @return_future + pass + + +Future = asyncio.Future + +FUTURES = (futures.Future, Future) + + +def is_future(x: Any) -> bool: + return isinstance(x, FUTURES) + + +class DummyExecutor(futures.Executor): + def submit( + self, fn: Callable[..., _T], *args: Any, **kwargs: Any + ) -> "futures.Future[_T]": + future = futures.Future() # type: futures.Future[_T] + try: + future_set_result_unless_cancelled(future, fn(*args, **kwargs)) + except Exception: + future_set_exc_info(future, sys.exc_info()) + return future + + def shutdown(self, wait: bool = True) -> None: + pass + + +dummy_executor = DummyExecutor() + + +def run_on_executor(*args: Any, **kwargs: Any) -> Callable: + """Decorator to run a synchronous method asynchronously on an executor. + + The decorated method may be called with a ``callback`` keyword + argument and returns a future. + + The executor to be used is determined by the ``executor`` + attributes of ``self``. To use a different attribute name, pass a + keyword argument to the decorator:: + + @run_on_executor(executor='_thread_pool') + def foo(self): + pass + + This decorator should not be confused with the similarly-named + `.IOLoop.run_in_executor`. In general, using ``run_in_executor`` + when *calling* a blocking method is recommended instead of using + this decorator when *defining* a method. If compatibility with older + versions of Tornado is required, consider defining an executor + and using ``executor.submit()`` at the call site. + + .. versionchanged:: 4.2 + Added keyword arguments to use alternative attributes. + + .. versionchanged:: 5.0 + Always uses the current IOLoop instead of ``self.io_loop``. + + .. versionchanged:: 5.1 + Returns a `.Future` compatible with ``await`` instead of a + `concurrent.futures.Future`. + + .. deprecated:: 5.1 + + The ``callback`` argument is deprecated and will be removed in + 6.0. The decorator itself is discouraged in new code but will + not be removed in 6.0. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. + """ + # Fully type-checking decorators is tricky, and this one is + # discouraged anyway so it doesn't have all the generic magic. + def run_on_executor_decorator(fn: Callable) -> Callable[..., Future]: + executor = kwargs.get("executor", "executor") + + @functools.wraps(fn) + def wrapper(self: Any, *args: Any, **kwargs: Any) -> Future: + async_future = Future() # type: Future + conc_future = getattr(self, executor).submit(fn, self, *args, **kwargs) + chain_future(conc_future, async_future) + return async_future + + return wrapper + + if args and kwargs: + raise ValueError("cannot combine positional and keyword args") + if len(args) == 1: + return run_on_executor_decorator(args[0]) + elif len(args) != 0: + raise ValueError("expected 1 argument, got %d", len(args)) + return run_on_executor_decorator + + +_NO_RESULT = object() + + +def chain_future(a: "Future[_T]", b: "Future[_T]") -> None: + """Chain two futures together so that when one completes, so does the other. + + The result (success or failure) of ``a`` will be copied to ``b``, unless + ``b`` has already been completed or cancelled by the time ``a`` finishes. + + .. versionchanged:: 5.0 + + Now accepts both Tornado/asyncio `Future` objects and + `concurrent.futures.Future`. + + """ + + def copy(future: "Future[_T]") -> None: + assert future is a + if b.done(): + return + if hasattr(a, "exc_info") and a.exc_info() is not None: # type: ignore + future_set_exc_info(b, a.exc_info()) # type: ignore + elif a.exception() is not None: + b.set_exception(a.exception()) + else: + b.set_result(a.result()) + + if isinstance(a, Future): + future_add_done_callback(a, copy) + else: + # concurrent.futures.Future + from tornado.ioloop import IOLoop + + IOLoop.current().add_future(a, copy) + + +def future_set_result_unless_cancelled( + future: "Union[futures.Future[_T], Future[_T]]", value: _T +) -> None: + """Set the given ``value`` as the `Future`'s result, if not cancelled. + + Avoids ``asyncio.InvalidStateError`` when calling ``set_result()`` on + a cancelled `asyncio.Future`. + + .. versionadded:: 5.0 + """ + if not future.cancelled(): + future.set_result(value) + + +def future_set_exception_unless_cancelled( + future: "Union[futures.Future[_T], Future[_T]]", exc: BaseException +) -> None: + """Set the given ``exc`` as the `Future`'s exception. + + If the Future is already canceled, logs the exception instead. If + this logging is not desired, the caller should explicitly check + the state of the Future and call ``Future.set_exception`` instead of + this wrapper. + + Avoids ``asyncio.InvalidStateError`` when calling ``set_exception()`` on + a cancelled `asyncio.Future`. + + .. versionadded:: 6.0 + + """ + if not future.cancelled(): + future.set_exception(exc) + else: + app_log.error("Exception after Future was cancelled", exc_info=exc) + + +def future_set_exc_info( + future: "Union[futures.Future[_T], Future[_T]]", + exc_info: Tuple[ + Optional[type], Optional[BaseException], Optional[types.TracebackType] + ], +) -> None: + """Set the given ``exc_info`` as the `Future`'s exception. + + Understands both `asyncio.Future` and the extensions in older + versions of Tornado to enable better tracebacks on Python 2. + + .. versionadded:: 5.0 + + .. versionchanged:: 6.0 + + If the future is already cancelled, this function is a no-op. + (previously ``asyncio.InvalidStateError`` would be raised) + + """ + if exc_info[1] is None: + raise Exception("future_set_exc_info called with no exception") + future_set_exception_unless_cancelled(future, exc_info[1]) + + +@typing.overload +def future_add_done_callback( + future: "futures.Future[_T]", callback: Callable[["futures.Future[_T]"], None] +) -> None: + pass + + +@typing.overload # noqa: F811 +def future_add_done_callback( + future: "Future[_T]", callback: Callable[["Future[_T]"], None] +) -> None: + pass + + +def future_add_done_callback( # noqa: F811 + future: "Union[futures.Future[_T], Future[_T]]", callback: Callable[..., None] +) -> None: + """Arrange to call ``callback`` when ``future`` is complete. + + ``callback`` is invoked with one argument, the ``future``. + + If ``future`` is already done, ``callback`` is invoked immediately. + This may differ from the behavior of ``Future.add_done_callback``, + which makes no such guarantee. + + .. versionadded:: 5.0 + """ + if future.done(): + callback(future) + else: + future.add_done_callback(callback) diff --git a/server/www/packages/packages-linux/x64/tornado/curl_httpclient.py b/server/www/packages/packages-linux/x64/tornado/curl_httpclient.py index 7f5cb10..b81316c 100644 --- a/server/www/packages/packages-linux/x64/tornado/curl_httpclient.py +++ b/server/www/packages/packages-linux/x64/tornado/curl_httpclient.py @@ -1,514 +1,572 @@ -# -# Copyright 2009 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Non-blocking HTTP client implementation using pycurl.""" - -from __future__ import absolute_import, division, print_function - -import collections -import functools -import logging -import pycurl # type: ignore -import threading -import time -from io import BytesIO - -from tornado import httputil -from tornado import ioloop -from tornado import stack_context - -from tornado.escape import utf8, native_str -from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main - -curl_log = logging.getLogger('tornado.curl_httpclient') - - -class CurlAsyncHTTPClient(AsyncHTTPClient): - def initialize(self, max_clients=10, defaults=None): - super(CurlAsyncHTTPClient, self).initialize(defaults=defaults) - self._multi = pycurl.CurlMulti() - self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout) - self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket) - self._curls = [self._curl_create() for i in range(max_clients)] - self._free_list = self._curls[:] - self._requests = collections.deque() - self._fds = {} - self._timeout = None - - # libcurl has bugs that sometimes cause it to not report all - # relevant file descriptors and timeouts to TIMERFUNCTION/ - # SOCKETFUNCTION. Mitigate the effects of such bugs by - # forcing a periodic scan of all active requests. - self._force_timeout_callback = ioloop.PeriodicCallback( - self._handle_force_timeout, 1000) - self._force_timeout_callback.start() - - # Work around a bug in libcurl 7.29.0: Some fields in the curl - # multi object are initialized lazily, and its destructor will - # segfault if it is destroyed without having been used. Add - # and remove a dummy handle to make sure everything is - # initialized. - dummy_curl_handle = pycurl.Curl() - self._multi.add_handle(dummy_curl_handle) - self._multi.remove_handle(dummy_curl_handle) - - def close(self): - self._force_timeout_callback.stop() - if self._timeout is not None: - self.io_loop.remove_timeout(self._timeout) - for curl in self._curls: - curl.close() - self._multi.close() - super(CurlAsyncHTTPClient, self).close() - - # Set below properties to None to reduce the reference count of current - # instance, because those properties hold some methods of current - # instance that will case circular reference. - self._force_timeout_callback = None - self._multi = None - - def fetch_impl(self, request, callback): - self._requests.append((request, callback, self.io_loop.time())) - self._process_queue() - self._set_timeout(0) - - def _handle_socket(self, event, fd, multi, data): - """Called by libcurl when it wants to change the file descriptors - it cares about. - """ - event_map = { - pycurl.POLL_NONE: ioloop.IOLoop.NONE, - pycurl.POLL_IN: ioloop.IOLoop.READ, - pycurl.POLL_OUT: ioloop.IOLoop.WRITE, - pycurl.POLL_INOUT: ioloop.IOLoop.READ | ioloop.IOLoop.WRITE - } - if event == pycurl.POLL_REMOVE: - if fd in self._fds: - self.io_loop.remove_handler(fd) - del self._fds[fd] - else: - ioloop_event = event_map[event] - # libcurl sometimes closes a socket and then opens a new - # one using the same FD without giving us a POLL_NONE in - # between. This is a problem with the epoll IOLoop, - # because the kernel can tell when a socket is closed and - # removes it from the epoll automatically, causing future - # update_handler calls to fail. Since we can't tell when - # this has happened, always use remove and re-add - # instead of update. - if fd in self._fds: - self.io_loop.remove_handler(fd) - self.io_loop.add_handler(fd, self._handle_events, - ioloop_event) - self._fds[fd] = ioloop_event - - def _set_timeout(self, msecs): - """Called by libcurl to schedule a timeout.""" - if self._timeout is not None: - self.io_loop.remove_timeout(self._timeout) - self._timeout = self.io_loop.add_timeout( - self.io_loop.time() + msecs / 1000.0, self._handle_timeout) - - def _handle_events(self, fd, events): - """Called by IOLoop when there is activity on one of our - file descriptors. - """ - action = 0 - if events & ioloop.IOLoop.READ: - action |= pycurl.CSELECT_IN - if events & ioloop.IOLoop.WRITE: - action |= pycurl.CSELECT_OUT - while True: - try: - ret, num_handles = self._multi.socket_action(fd, action) - except pycurl.error as e: - ret = e.args[0] - if ret != pycurl.E_CALL_MULTI_PERFORM: - break - self._finish_pending_requests() - - def _handle_timeout(self): - """Called by IOLoop when the requested timeout has passed.""" - with stack_context.NullContext(): - self._timeout = None - while True: - try: - ret, num_handles = self._multi.socket_action( - pycurl.SOCKET_TIMEOUT, 0) - except pycurl.error as e: - ret = e.args[0] - if ret != pycurl.E_CALL_MULTI_PERFORM: - break - self._finish_pending_requests() - - # In theory, we shouldn't have to do this because curl will - # call _set_timeout whenever the timeout changes. However, - # sometimes after _handle_timeout we will need to reschedule - # immediately even though nothing has changed from curl's - # perspective. This is because when socket_action is - # called with SOCKET_TIMEOUT, libcurl decides internally which - # timeouts need to be processed by using a monotonic clock - # (where available) while tornado uses python's time.time() - # to decide when timeouts have occurred. When those clocks - # disagree on elapsed time (as they will whenever there is an - # NTP adjustment), tornado might call _handle_timeout before - # libcurl is ready. After each timeout, resync the scheduled - # timeout with libcurl's current state. - new_timeout = self._multi.timeout() - if new_timeout >= 0: - self._set_timeout(new_timeout) - - def _handle_force_timeout(self): - """Called by IOLoop periodically to ask libcurl to process any - events it may have forgotten about. - """ - with stack_context.NullContext(): - while True: - try: - ret, num_handles = self._multi.socket_all() - except pycurl.error as e: - ret = e.args[0] - if ret != pycurl.E_CALL_MULTI_PERFORM: - break - self._finish_pending_requests() - - def _finish_pending_requests(self): - """Process any requests that were completed by the last - call to multi.socket_action. - """ - while True: - num_q, ok_list, err_list = self._multi.info_read() - for curl in ok_list: - self._finish(curl) - for curl, errnum, errmsg in err_list: - self._finish(curl, errnum, errmsg) - if num_q == 0: - break - self._process_queue() - - def _process_queue(self): - with stack_context.NullContext(): - while True: - started = 0 - while self._free_list and self._requests: - started += 1 - curl = self._free_list.pop() - (request, callback, queue_start_time) = self._requests.popleft() - curl.info = { - "headers": httputil.HTTPHeaders(), - "buffer": BytesIO(), - "request": request, - "callback": callback, - "queue_start_time": queue_start_time, - "curl_start_time": time.time(), - "curl_start_ioloop_time": self.io_loop.current().time(), - } - try: - self._curl_setup_request( - curl, request, curl.info["buffer"], - curl.info["headers"]) - except Exception as e: - # If there was an error in setup, pass it on - # to the callback. Note that allowing the - # error to escape here will appear to work - # most of the time since we are still in the - # caller's original stack frame, but when - # _process_queue() is called from - # _finish_pending_requests the exceptions have - # nowhere to go. - self._free_list.append(curl) - callback(HTTPResponse( - request=request, - code=599, - error=e)) - else: - self._multi.add_handle(curl) - - if not started: - break - - def _finish(self, curl, curl_error=None, curl_message=None): - info = curl.info - curl.info = None - self._multi.remove_handle(curl) - self._free_list.append(curl) - buffer = info["buffer"] - if curl_error: - error = CurlError(curl_error, curl_message) - code = error.code - effective_url = None - buffer.close() - buffer = None - else: - error = None - code = curl.getinfo(pycurl.HTTP_CODE) - effective_url = curl.getinfo(pycurl.EFFECTIVE_URL) - buffer.seek(0) - # the various curl timings are documented at - # http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html - time_info = dict( - queue=info["curl_start_ioloop_time"] - info["queue_start_time"], - namelookup=curl.getinfo(pycurl.NAMELOOKUP_TIME), - connect=curl.getinfo(pycurl.CONNECT_TIME), - appconnect=curl.getinfo(pycurl.APPCONNECT_TIME), - pretransfer=curl.getinfo(pycurl.PRETRANSFER_TIME), - starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME), - total=curl.getinfo(pycurl.TOTAL_TIME), - redirect=curl.getinfo(pycurl.REDIRECT_TIME), - ) - try: - info["callback"](HTTPResponse( - request=info["request"], code=code, headers=info["headers"], - buffer=buffer, effective_url=effective_url, error=error, - reason=info['headers'].get("X-Http-Reason", None), - request_time=self.io_loop.time() - info["curl_start_ioloop_time"], - start_time=info["curl_start_time"], - time_info=time_info)) - except Exception: - self.handle_callback_exception(info["callback"]) - - def handle_callback_exception(self, callback): - self.io_loop.handle_callback_exception(callback) - - def _curl_create(self): - curl = pycurl.Curl() - if curl_log.isEnabledFor(logging.DEBUG): - curl.setopt(pycurl.VERBOSE, 1) - curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug) - if hasattr(pycurl, 'PROTOCOLS'): # PROTOCOLS first appeared in pycurl 7.19.5 (2014-07-12) - curl.setopt(pycurl.PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS) - curl.setopt(pycurl.REDIR_PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS) - return curl - - def _curl_setup_request(self, curl, request, buffer, headers): - curl.setopt(pycurl.URL, native_str(request.url)) - - # libcurl's magic "Expect: 100-continue" behavior causes delays - # with servers that don't support it (which include, among others, - # Google's OpenID endpoint). Additionally, this behavior has - # a bug in conjunction with the curl_multi_socket_action API - # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976), - # which increases the delays. It's more trouble than it's worth, - # so just turn off the feature (yes, setting Expect: to an empty - # value is the official way to disable this) - if "Expect" not in request.headers: - request.headers["Expect"] = "" - - # libcurl adds Pragma: no-cache by default; disable that too - if "Pragma" not in request.headers: - request.headers["Pragma"] = "" - - curl.setopt(pycurl.HTTPHEADER, - ["%s: %s" % (native_str(k), native_str(v)) - for k, v in request.headers.get_all()]) - - curl.setopt(pycurl.HEADERFUNCTION, - functools.partial(self._curl_header_callback, - headers, request.header_callback)) - if request.streaming_callback: - def write_function(chunk): - self.io_loop.add_callback(request.streaming_callback, chunk) - else: - write_function = buffer.write - curl.setopt(pycurl.WRITEFUNCTION, write_function) - curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects) - curl.setopt(pycurl.MAXREDIRS, request.max_redirects) - curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout)) - curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout)) - if request.user_agent: - curl.setopt(pycurl.USERAGENT, native_str(request.user_agent)) - else: - curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)") - if request.network_interface: - curl.setopt(pycurl.INTERFACE, request.network_interface) - if request.decompress_response: - curl.setopt(pycurl.ENCODING, "gzip,deflate") - else: - curl.setopt(pycurl.ENCODING, "none") - if request.proxy_host and request.proxy_port: - curl.setopt(pycurl.PROXY, request.proxy_host) - curl.setopt(pycurl.PROXYPORT, request.proxy_port) - if request.proxy_username: - credentials = httputil.encode_username_password(request.proxy_username, - request.proxy_password) - curl.setopt(pycurl.PROXYUSERPWD, credentials) - - if (request.proxy_auth_mode is None or - request.proxy_auth_mode == "basic"): - curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_BASIC) - elif request.proxy_auth_mode == "digest": - curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_DIGEST) - else: - raise ValueError( - "Unsupported proxy_auth_mode %s" % request.proxy_auth_mode) - else: - curl.setopt(pycurl.PROXY, '') - curl.unsetopt(pycurl.PROXYUSERPWD) - if request.validate_cert: - curl.setopt(pycurl.SSL_VERIFYPEER, 1) - curl.setopt(pycurl.SSL_VERIFYHOST, 2) - else: - curl.setopt(pycurl.SSL_VERIFYPEER, 0) - curl.setopt(pycurl.SSL_VERIFYHOST, 0) - if request.ca_certs is not None: - curl.setopt(pycurl.CAINFO, request.ca_certs) - else: - # There is no way to restore pycurl.CAINFO to its default value - # (Using unsetopt makes it reject all certificates). - # I don't see any way to read the default value from python so it - # can be restored later. We'll have to just leave CAINFO untouched - # if no ca_certs file was specified, and require that if any - # request uses a custom ca_certs file, they all must. - pass - - if request.allow_ipv6 is False: - # Curl behaves reasonably when DNS resolution gives an ipv6 address - # that we can't reach, so allow ipv6 unless the user asks to disable. - curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4) - else: - curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER) - - # Set the request method through curl's irritating interface which makes - # up names for almost every single method - curl_options = { - "GET": pycurl.HTTPGET, - "POST": pycurl.POST, - "PUT": pycurl.UPLOAD, - "HEAD": pycurl.NOBODY, - } - custom_methods = set(["DELETE", "OPTIONS", "PATCH"]) - for o in curl_options.values(): - curl.setopt(o, False) - if request.method in curl_options: - curl.unsetopt(pycurl.CUSTOMREQUEST) - curl.setopt(curl_options[request.method], True) - elif request.allow_nonstandard_methods or request.method in custom_methods: - curl.setopt(pycurl.CUSTOMREQUEST, request.method) - else: - raise KeyError('unknown method ' + request.method) - - body_expected = request.method in ("POST", "PATCH", "PUT") - body_present = request.body is not None - if not request.allow_nonstandard_methods: - # Some HTTP methods nearly always have bodies while others - # almost never do. Fail in this case unless the user has - # opted out of sanity checks with allow_nonstandard_methods. - if ((body_expected and not body_present) or - (body_present and not body_expected)): - raise ValueError( - 'Body must %sbe None for method %s (unless ' - 'allow_nonstandard_methods is true)' % - ('not ' if body_expected else '', request.method)) - - if body_expected or body_present: - if request.method == "GET": - # Even with `allow_nonstandard_methods` we disallow - # GET with a body (because libcurl doesn't allow it - # unless we use CUSTOMREQUEST). While the spec doesn't - # forbid clients from sending a body, it arguably - # disallows the server from doing anything with them. - raise ValueError('Body must be None for GET request') - request_buffer = BytesIO(utf8(request.body or '')) - - def ioctl(cmd): - if cmd == curl.IOCMD_RESTARTREAD: - request_buffer.seek(0) - curl.setopt(pycurl.READFUNCTION, request_buffer.read) - curl.setopt(pycurl.IOCTLFUNCTION, ioctl) - if request.method == "POST": - curl.setopt(pycurl.POSTFIELDSIZE, len(request.body or '')) - else: - curl.setopt(pycurl.UPLOAD, True) - curl.setopt(pycurl.INFILESIZE, len(request.body or '')) - - if request.auth_username is not None: - if request.auth_mode is None or request.auth_mode == "basic": - curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC) - elif request.auth_mode == "digest": - curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST) - else: - raise ValueError("Unsupported auth_mode %s" % request.auth_mode) - - userpwd = httputil.encode_username_password(request.auth_username, - request.auth_password) - curl.setopt(pycurl.USERPWD, userpwd) - curl_log.debug("%s %s (username: %r)", request.method, request.url, - request.auth_username) - else: - curl.unsetopt(pycurl.USERPWD) - curl_log.debug("%s %s", request.method, request.url) - - if request.client_cert is not None: - curl.setopt(pycurl.SSLCERT, request.client_cert) - - if request.client_key is not None: - curl.setopt(pycurl.SSLKEY, request.client_key) - - if request.ssl_options is not None: - raise ValueError("ssl_options not supported in curl_httpclient") - - if threading.activeCount() > 1: - # libcurl/pycurl is not thread-safe by default. When multiple threads - # are used, signals should be disabled. This has the side effect - # of disabling DNS timeouts in some environments (when libcurl is - # not linked against ares), so we don't do it when there is only one - # thread. Applications that use many short-lived threads may need - # to set NOSIGNAL manually in a prepare_curl_callback since - # there may not be any other threads running at the time we call - # threading.activeCount. - curl.setopt(pycurl.NOSIGNAL, 1) - if request.prepare_curl_callback is not None: - request.prepare_curl_callback(curl) - - def _curl_header_callback(self, headers, header_callback, header_line): - header_line = native_str(header_line.decode('latin1')) - if header_callback is not None: - self.io_loop.add_callback(header_callback, header_line) - # header_line as returned by curl includes the end-of-line characters. - # whitespace at the start should be preserved to allow multi-line headers - header_line = header_line.rstrip() - if header_line.startswith("HTTP/"): - headers.clear() - try: - (__, __, reason) = httputil.parse_response_start_line(header_line) - header_line = "X-Http-Reason: %s" % reason - except httputil.HTTPInputError: - return - if not header_line: - return - headers.parse_line(header_line) - - def _curl_debug(self, debug_type, debug_msg): - debug_types = ('I', '<', '>', '<', '>') - if debug_type == 0: - debug_msg = native_str(debug_msg) - curl_log.debug('%s', debug_msg.strip()) - elif debug_type in (1, 2): - debug_msg = native_str(debug_msg) - for line in debug_msg.splitlines(): - curl_log.debug('%s %s', debug_types[debug_type], line) - elif debug_type == 4: - curl_log.debug('%s %r', debug_types[debug_type], debug_msg) - - -class CurlError(HTTPError): - def __init__(self, errno, message): - HTTPError.__init__(self, 599, message) - self.errno = errno - - -if __name__ == "__main__": - AsyncHTTPClient.configure(CurlAsyncHTTPClient) - main() +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Non-blocking HTTP client implementation using pycurl.""" + +import collections +import functools +import logging +import pycurl # type: ignore +import threading +import time +from io import BytesIO + +from tornado import httputil +from tornado import ioloop + +from tornado.escape import utf8, native_str +from tornado.httpclient import ( + HTTPRequest, + HTTPResponse, + HTTPError, + AsyncHTTPClient, + main, +) +from tornado.log import app_log + +from typing import Dict, Any, Callable, Union +import typing + +if typing.TYPE_CHECKING: + from typing import Deque, Tuple, Optional # noqa: F401 + +curl_log = logging.getLogger("tornado.curl_httpclient") + + +class CurlAsyncHTTPClient(AsyncHTTPClient): + def initialize( # type: ignore + self, max_clients: int = 10, defaults: Dict[str, Any] = None + ) -> None: + super(CurlAsyncHTTPClient, self).initialize(defaults=defaults) + self._multi = pycurl.CurlMulti() + self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout) + self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket) + self._curls = [self._curl_create() for i in range(max_clients)] + self._free_list = self._curls[:] + self._requests = ( + collections.deque() + ) # type: Deque[Tuple[HTTPRequest, Callable[[HTTPResponse], None], float]] + self._fds = {} # type: Dict[int, int] + self._timeout = None # type: Optional[object] + + # libcurl has bugs that sometimes cause it to not report all + # relevant file descriptors and timeouts to TIMERFUNCTION/ + # SOCKETFUNCTION. Mitigate the effects of such bugs by + # forcing a periodic scan of all active requests. + self._force_timeout_callback = ioloop.PeriodicCallback( + self._handle_force_timeout, 1000 + ) + self._force_timeout_callback.start() + + # Work around a bug in libcurl 7.29.0: Some fields in the curl + # multi object are initialized lazily, and its destructor will + # segfault if it is destroyed without having been used. Add + # and remove a dummy handle to make sure everything is + # initialized. + dummy_curl_handle = pycurl.Curl() + self._multi.add_handle(dummy_curl_handle) + self._multi.remove_handle(dummy_curl_handle) + + def close(self) -> None: + self._force_timeout_callback.stop() + if self._timeout is not None: + self.io_loop.remove_timeout(self._timeout) + for curl in self._curls: + curl.close() + self._multi.close() + super(CurlAsyncHTTPClient, self).close() + + # Set below properties to None to reduce the reference count of current + # instance, because those properties hold some methods of current + # instance that will case circular reference. + self._force_timeout_callback = None # type: ignore + self._multi = None + + def fetch_impl( + self, request: HTTPRequest, callback: Callable[[HTTPResponse], None] + ) -> None: + self._requests.append((request, callback, self.io_loop.time())) + self._process_queue() + self._set_timeout(0) + + def _handle_socket(self, event: int, fd: int, multi: Any, data: bytes) -> None: + """Called by libcurl when it wants to change the file descriptors + it cares about. + """ + event_map = { + pycurl.POLL_NONE: ioloop.IOLoop.NONE, + pycurl.POLL_IN: ioloop.IOLoop.READ, + pycurl.POLL_OUT: ioloop.IOLoop.WRITE, + pycurl.POLL_INOUT: ioloop.IOLoop.READ | ioloop.IOLoop.WRITE, + } + if event == pycurl.POLL_REMOVE: + if fd in self._fds: + self.io_loop.remove_handler(fd) + del self._fds[fd] + else: + ioloop_event = event_map[event] + # libcurl sometimes closes a socket and then opens a new + # one using the same FD without giving us a POLL_NONE in + # between. This is a problem with the epoll IOLoop, + # because the kernel can tell when a socket is closed and + # removes it from the epoll automatically, causing future + # update_handler calls to fail. Since we can't tell when + # this has happened, always use remove and re-add + # instead of update. + if fd in self._fds: + self.io_loop.remove_handler(fd) + self.io_loop.add_handler(fd, self._handle_events, ioloop_event) + self._fds[fd] = ioloop_event + + def _set_timeout(self, msecs: int) -> None: + """Called by libcurl to schedule a timeout.""" + if self._timeout is not None: + self.io_loop.remove_timeout(self._timeout) + self._timeout = self.io_loop.add_timeout( + self.io_loop.time() + msecs / 1000.0, self._handle_timeout + ) + + def _handle_events(self, fd: int, events: int) -> None: + """Called by IOLoop when there is activity on one of our + file descriptors. + """ + action = 0 + if events & ioloop.IOLoop.READ: + action |= pycurl.CSELECT_IN + if events & ioloop.IOLoop.WRITE: + action |= pycurl.CSELECT_OUT + while True: + try: + ret, num_handles = self._multi.socket_action(fd, action) + except pycurl.error as e: + ret = e.args[0] + if ret != pycurl.E_CALL_MULTI_PERFORM: + break + self._finish_pending_requests() + + def _handle_timeout(self) -> None: + """Called by IOLoop when the requested timeout has passed.""" + self._timeout = None + while True: + try: + ret, num_handles = self._multi.socket_action(pycurl.SOCKET_TIMEOUT, 0) + except pycurl.error as e: + ret = e.args[0] + if ret != pycurl.E_CALL_MULTI_PERFORM: + break + self._finish_pending_requests() + + # In theory, we shouldn't have to do this because curl will + # call _set_timeout whenever the timeout changes. However, + # sometimes after _handle_timeout we will need to reschedule + # immediately even though nothing has changed from curl's + # perspective. This is because when socket_action is + # called with SOCKET_TIMEOUT, libcurl decides internally which + # timeouts need to be processed by using a monotonic clock + # (where available) while tornado uses python's time.time() + # to decide when timeouts have occurred. When those clocks + # disagree on elapsed time (as they will whenever there is an + # NTP adjustment), tornado might call _handle_timeout before + # libcurl is ready. After each timeout, resync the scheduled + # timeout with libcurl's current state. + new_timeout = self._multi.timeout() + if new_timeout >= 0: + self._set_timeout(new_timeout) + + def _handle_force_timeout(self) -> None: + """Called by IOLoop periodically to ask libcurl to process any + events it may have forgotten about. + """ + while True: + try: + ret, num_handles = self._multi.socket_all() + except pycurl.error as e: + ret = e.args[0] + if ret != pycurl.E_CALL_MULTI_PERFORM: + break + self._finish_pending_requests() + + def _finish_pending_requests(self) -> None: + """Process any requests that were completed by the last + call to multi.socket_action. + """ + while True: + num_q, ok_list, err_list = self._multi.info_read() + for curl in ok_list: + self._finish(curl) + for curl, errnum, errmsg in err_list: + self._finish(curl, errnum, errmsg) + if num_q == 0: + break + self._process_queue() + + def _process_queue(self) -> None: + while True: + started = 0 + while self._free_list and self._requests: + started += 1 + curl = self._free_list.pop() + (request, callback, queue_start_time) = self._requests.popleft() + curl.info = { + "headers": httputil.HTTPHeaders(), + "buffer": BytesIO(), + "request": request, + "callback": callback, + "queue_start_time": queue_start_time, + "curl_start_time": time.time(), + "curl_start_ioloop_time": self.io_loop.current().time(), + } + try: + self._curl_setup_request( + curl, request, curl.info["buffer"], curl.info["headers"] + ) + except Exception as e: + # If there was an error in setup, pass it on + # to the callback. Note that allowing the + # error to escape here will appear to work + # most of the time since we are still in the + # caller's original stack frame, but when + # _process_queue() is called from + # _finish_pending_requests the exceptions have + # nowhere to go. + self._free_list.append(curl) + callback(HTTPResponse(request=request, code=599, error=e)) + else: + self._multi.add_handle(curl) + + if not started: + break + + def _finish( + self, curl: pycurl.Curl, curl_error: int = None, curl_message: str = None + ) -> None: + info = curl.info + curl.info = None + self._multi.remove_handle(curl) + self._free_list.append(curl) + buffer = info["buffer"] + if curl_error: + assert curl_message is not None + error = CurlError(curl_error, curl_message) # type: Optional[CurlError] + assert error is not None + code = error.code + effective_url = None + buffer.close() + buffer = None + else: + error = None + code = curl.getinfo(pycurl.HTTP_CODE) + effective_url = curl.getinfo(pycurl.EFFECTIVE_URL) + buffer.seek(0) + # the various curl timings are documented at + # http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html + time_info = dict( + queue=info["curl_start_ioloop_time"] - info["queue_start_time"], + namelookup=curl.getinfo(pycurl.NAMELOOKUP_TIME), + connect=curl.getinfo(pycurl.CONNECT_TIME), + appconnect=curl.getinfo(pycurl.APPCONNECT_TIME), + pretransfer=curl.getinfo(pycurl.PRETRANSFER_TIME), + starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME), + total=curl.getinfo(pycurl.TOTAL_TIME), + redirect=curl.getinfo(pycurl.REDIRECT_TIME), + ) + try: + info["callback"]( + HTTPResponse( + request=info["request"], + code=code, + headers=info["headers"], + buffer=buffer, + effective_url=effective_url, + error=error, + reason=info["headers"].get("X-Http-Reason", None), + request_time=self.io_loop.time() - info["curl_start_ioloop_time"], + start_time=info["curl_start_time"], + time_info=time_info, + ) + ) + except Exception: + self.handle_callback_exception(info["callback"]) + + def handle_callback_exception(self, callback: Any) -> None: + app_log.error("Exception in callback %r", callback, exc_info=True) + + def _curl_create(self) -> pycurl.Curl: + curl = pycurl.Curl() + if curl_log.isEnabledFor(logging.DEBUG): + curl.setopt(pycurl.VERBOSE, 1) + curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug) + if hasattr( + pycurl, "PROTOCOLS" + ): # PROTOCOLS first appeared in pycurl 7.19.5 (2014-07-12) + curl.setopt(pycurl.PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS) + curl.setopt(pycurl.REDIR_PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS) + return curl + + def _curl_setup_request( + self, + curl: pycurl.Curl, + request: HTTPRequest, + buffer: BytesIO, + headers: httputil.HTTPHeaders, + ) -> None: + curl.setopt(pycurl.URL, native_str(request.url)) + + # libcurl's magic "Expect: 100-continue" behavior causes delays + # with servers that don't support it (which include, among others, + # Google's OpenID endpoint). Additionally, this behavior has + # a bug in conjunction with the curl_multi_socket_action API + # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976), + # which increases the delays. It's more trouble than it's worth, + # so just turn off the feature (yes, setting Expect: to an empty + # value is the official way to disable this) + if "Expect" not in request.headers: + request.headers["Expect"] = "" + + # libcurl adds Pragma: no-cache by default; disable that too + if "Pragma" not in request.headers: + request.headers["Pragma"] = "" + + curl.setopt( + pycurl.HTTPHEADER, + [ + "%s: %s" % (native_str(k), native_str(v)) + for k, v in request.headers.get_all() + ], + ) + + curl.setopt( + pycurl.HEADERFUNCTION, + functools.partial( + self._curl_header_callback, headers, request.header_callback + ), + ) + if request.streaming_callback: + + def write_function(b: Union[bytes, bytearray]) -> int: + assert request.streaming_callback is not None + self.io_loop.add_callback(request.streaming_callback, b) + return len(b) + + else: + write_function = buffer.write + curl.setopt(pycurl.WRITEFUNCTION, write_function) + curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects) + curl.setopt(pycurl.MAXREDIRS, request.max_redirects) + assert request.connect_timeout is not None + curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout)) + assert request.request_timeout is not None + curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout)) + if request.user_agent: + curl.setopt(pycurl.USERAGENT, native_str(request.user_agent)) + else: + curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)") + if request.network_interface: + curl.setopt(pycurl.INTERFACE, request.network_interface) + if request.decompress_response: + curl.setopt(pycurl.ENCODING, "gzip,deflate") + else: + curl.setopt(pycurl.ENCODING, "none") + if request.proxy_host and request.proxy_port: + curl.setopt(pycurl.PROXY, request.proxy_host) + curl.setopt(pycurl.PROXYPORT, request.proxy_port) + if request.proxy_username: + assert request.proxy_password is not None + credentials = httputil.encode_username_password( + request.proxy_username, request.proxy_password + ) + curl.setopt(pycurl.PROXYUSERPWD, credentials) + + if request.proxy_auth_mode is None or request.proxy_auth_mode == "basic": + curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_BASIC) + elif request.proxy_auth_mode == "digest": + curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_DIGEST) + else: + raise ValueError( + "Unsupported proxy_auth_mode %s" % request.proxy_auth_mode + ) + else: + curl.setopt(pycurl.PROXY, "") + curl.unsetopt(pycurl.PROXYUSERPWD) + if request.validate_cert: + curl.setopt(pycurl.SSL_VERIFYPEER, 1) + curl.setopt(pycurl.SSL_VERIFYHOST, 2) + else: + curl.setopt(pycurl.SSL_VERIFYPEER, 0) + curl.setopt(pycurl.SSL_VERIFYHOST, 0) + if request.ca_certs is not None: + curl.setopt(pycurl.CAINFO, request.ca_certs) + else: + # There is no way to restore pycurl.CAINFO to its default value + # (Using unsetopt makes it reject all certificates). + # I don't see any way to read the default value from python so it + # can be restored later. We'll have to just leave CAINFO untouched + # if no ca_certs file was specified, and require that if any + # request uses a custom ca_certs file, they all must. + pass + + if request.allow_ipv6 is False: + # Curl behaves reasonably when DNS resolution gives an ipv6 address + # that we can't reach, so allow ipv6 unless the user asks to disable. + curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4) + else: + curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER) + + # Set the request method through curl's irritating interface which makes + # up names for almost every single method + curl_options = { + "GET": pycurl.HTTPGET, + "POST": pycurl.POST, + "PUT": pycurl.UPLOAD, + "HEAD": pycurl.NOBODY, + } + custom_methods = set(["DELETE", "OPTIONS", "PATCH"]) + for o in curl_options.values(): + curl.setopt(o, False) + if request.method in curl_options: + curl.unsetopt(pycurl.CUSTOMREQUEST) + curl.setopt(curl_options[request.method], True) + elif request.allow_nonstandard_methods or request.method in custom_methods: + curl.setopt(pycurl.CUSTOMREQUEST, request.method) + else: + raise KeyError("unknown method " + request.method) + + body_expected = request.method in ("POST", "PATCH", "PUT") + body_present = request.body is not None + if not request.allow_nonstandard_methods: + # Some HTTP methods nearly always have bodies while others + # almost never do. Fail in this case unless the user has + # opted out of sanity checks with allow_nonstandard_methods. + if (body_expected and not body_present) or ( + body_present and not body_expected + ): + raise ValueError( + "Body must %sbe None for method %s (unless " + "allow_nonstandard_methods is true)" + % ("not " if body_expected else "", request.method) + ) + + if body_expected or body_present: + if request.method == "GET": + # Even with `allow_nonstandard_methods` we disallow + # GET with a body (because libcurl doesn't allow it + # unless we use CUSTOMREQUEST). While the spec doesn't + # forbid clients from sending a body, it arguably + # disallows the server from doing anything with them. + raise ValueError("Body must be None for GET request") + request_buffer = BytesIO(utf8(request.body or "")) + + def ioctl(cmd: int) -> None: + if cmd == curl.IOCMD_RESTARTREAD: + request_buffer.seek(0) + + curl.setopt(pycurl.READFUNCTION, request_buffer.read) + curl.setopt(pycurl.IOCTLFUNCTION, ioctl) + if request.method == "POST": + curl.setopt(pycurl.POSTFIELDSIZE, len(request.body or "")) + else: + curl.setopt(pycurl.UPLOAD, True) + curl.setopt(pycurl.INFILESIZE, len(request.body or "")) + + if request.auth_username is not None: + assert request.auth_password is not None + if request.auth_mode is None or request.auth_mode == "basic": + curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC) + elif request.auth_mode == "digest": + curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST) + else: + raise ValueError("Unsupported auth_mode %s" % request.auth_mode) + + userpwd = httputil.encode_username_password( + request.auth_username, request.auth_password + ) + curl.setopt(pycurl.USERPWD, userpwd) + curl_log.debug( + "%s %s (username: %r)", + request.method, + request.url, + request.auth_username, + ) + else: + curl.unsetopt(pycurl.USERPWD) + curl_log.debug("%s %s", request.method, request.url) + + if request.client_cert is not None: + curl.setopt(pycurl.SSLCERT, request.client_cert) + + if request.client_key is not None: + curl.setopt(pycurl.SSLKEY, request.client_key) + + if request.ssl_options is not None: + raise ValueError("ssl_options not supported in curl_httpclient") + + if threading.active_count() > 1: + # libcurl/pycurl is not thread-safe by default. When multiple threads + # are used, signals should be disabled. This has the side effect + # of disabling DNS timeouts in some environments (when libcurl is + # not linked against ares), so we don't do it when there is only one + # thread. Applications that use many short-lived threads may need + # to set NOSIGNAL manually in a prepare_curl_callback since + # there may not be any other threads running at the time we call + # threading.activeCount. + curl.setopt(pycurl.NOSIGNAL, 1) + if request.prepare_curl_callback is not None: + request.prepare_curl_callback(curl) + + def _curl_header_callback( + self, + headers: httputil.HTTPHeaders, + header_callback: Callable[[str], None], + header_line_bytes: bytes, + ) -> None: + header_line = native_str(header_line_bytes.decode("latin1")) + if header_callback is not None: + self.io_loop.add_callback(header_callback, header_line) + # header_line as returned by curl includes the end-of-line characters. + # whitespace at the start should be preserved to allow multi-line headers + header_line = header_line.rstrip() + if header_line.startswith("HTTP/"): + headers.clear() + try: + (__, __, reason) = httputil.parse_response_start_line(header_line) + header_line = "X-Http-Reason: %s" % reason + except httputil.HTTPInputError: + return + if not header_line: + return + headers.parse_line(header_line) + + def _curl_debug(self, debug_type: int, debug_msg: str) -> None: + debug_types = ("I", "<", ">", "<", ">") + if debug_type == 0: + debug_msg = native_str(debug_msg) + curl_log.debug("%s", debug_msg.strip()) + elif debug_type in (1, 2): + debug_msg = native_str(debug_msg) + for line in debug_msg.splitlines(): + curl_log.debug("%s %s", debug_types[debug_type], line) + elif debug_type == 4: + curl_log.debug("%s %r", debug_types[debug_type], debug_msg) + + +class CurlError(HTTPError): + def __init__(self, errno: int, message: str) -> None: + HTTPError.__init__(self, 599, message) + self.errno = errno + + +if __name__ == "__main__": + AsyncHTTPClient.configure(CurlAsyncHTTPClient) + main() diff --git a/server/www/packages/packages-linux/x64/tornado/escape.py b/server/www/packages/packages-linux/x64/tornado/escape.py index a79ece6..03e1201 100644 --- a/server/www/packages/packages-linux/x64/tornado/escape.py +++ b/server/www/packages/packages-linux/x64/tornado/escape.py @@ -1,399 +1,400 @@ -# -# Copyright 2009 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Escaping/unescaping methods for HTML, JSON, URLs, and others. - -Also includes a few other miscellaneous string manipulation functions that -have crept in over time. -""" - -from __future__ import absolute_import, division, print_function - -import json -import re - -from tornado.util import PY3, unicode_type, basestring_type - -if PY3: - from urllib.parse import parse_qs as _parse_qs - import html.entities as htmlentitydefs - import urllib.parse as urllib_parse - unichr = chr -else: - from urlparse import parse_qs as _parse_qs - import htmlentitydefs - import urllib as urllib_parse - -try: - import typing # noqa -except ImportError: - pass - - -_XHTML_ESCAPE_RE = re.compile('[&<>"\']') -_XHTML_ESCAPE_DICT = {'&': '&', '<': '<', '>': '>', '"': '"', - '\'': '''} - - -def xhtml_escape(value): - """Escapes a string so it is valid within HTML or XML. - - Escapes the characters ``<``, ``>``, ``"``, ``'``, and ``&``. - When used in attribute values the escaped strings must be enclosed - in quotes. - - .. versionchanged:: 3.2 - - Added the single quote to the list of escaped characters. - """ - return _XHTML_ESCAPE_RE.sub(lambda match: _XHTML_ESCAPE_DICT[match.group(0)], - to_basestring(value)) - - -def xhtml_unescape(value): - """Un-escapes an XML-escaped string.""" - return re.sub(r"&(#?)(\w+?);", _convert_entity, _unicode(value)) - - -# The fact that json_encode wraps json.dumps is an implementation detail. -# Please see https://github.com/tornadoweb/tornado/pull/706 -# before sending a pull request that adds **kwargs to this function. -def json_encode(value): - """JSON-encodes the given Python object.""" - # JSON permits but does not require forward slashes to be escaped. - # This is useful when json data is emitted in a tags from prematurely terminating - # the javascript. Some json libraries do this escaping by default, - # although python's standard library does not, so we do it here. - # http://stackoverflow.com/questions/1580647/json-why-are-forward-slashes-escaped - return json.dumps(value).replace("typing.Union[bytes,None] - """Converts a string argument to a byte string. - - If the argument is already a byte string or None, it is returned unchanged. - Otherwise it must be a unicode string and is encoded as utf8. - """ - if isinstance(value, _UTF8_TYPES): - return value - if not isinstance(value, unicode_type): - raise TypeError( - "Expected bytes, unicode, or None; got %r" % type(value) - ) - return value.encode("utf-8") - - -_TO_UNICODE_TYPES = (unicode_type, type(None)) - - -def to_unicode(value): - """Converts a string argument to a unicode string. - - If the argument is already a unicode string or None, it is returned - unchanged. Otherwise it must be a byte string and is decoded as utf8. - """ - if isinstance(value, _TO_UNICODE_TYPES): - return value - if not isinstance(value, bytes): - raise TypeError( - "Expected bytes, unicode, or None; got %r" % type(value) - ) - return value.decode("utf-8") - - -# to_unicode was previously named _unicode not because it was private, -# but to avoid conflicts with the built-in unicode() function/type -_unicode = to_unicode - -# When dealing with the standard library across python 2 and 3 it is -# sometimes useful to have a direct conversion to the native string type -if str is unicode_type: - native_str = to_unicode -else: - native_str = utf8 - -_BASESTRING_TYPES = (basestring_type, type(None)) - - -def to_basestring(value): - """Converts a string argument to a subclass of basestring. - - In python2, byte and unicode strings are mostly interchangeable, - so functions that deal with a user-supplied argument in combination - with ascii string constants can use either and should return the type - the user supplied. In python3, the two types are not interchangeable, - so this method is needed to convert byte strings to unicode. - """ - if isinstance(value, _BASESTRING_TYPES): - return value - if not isinstance(value, bytes): - raise TypeError( - "Expected bytes, unicode, or None; got %r" % type(value) - ) - return value.decode("utf-8") - - -def recursive_unicode(obj): - """Walks a simple data structure, converting byte strings to unicode. - - Supports lists, tuples, and dictionaries. - """ - if isinstance(obj, dict): - return dict((recursive_unicode(k), recursive_unicode(v)) for (k, v) in obj.items()) - elif isinstance(obj, list): - return list(recursive_unicode(i) for i in obj) - elif isinstance(obj, tuple): - return tuple(recursive_unicode(i) for i in obj) - elif isinstance(obj, bytes): - return to_unicode(obj) - else: - return obj - - -# I originally used the regex from -# http://daringfireball.net/2010/07/improved_regex_for_matching_urls -# but it gets all exponential on certain patterns (such as too many trailing -# dots), causing the regex matcher to never return. -# This regex should avoid those problems. -# Use to_unicode instead of tornado.util.u - we don't want backslashes getting -# processed as escapes. -_URL_RE = re.compile(to_unicode( - r"""\b((?:([\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\s&()]|&|")*(?:[^!"#$%&'()*+,.:;<=>?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&|")*\)))+)""" # noqa: E501 -)) - - -def linkify(text, shorten=False, extra_params="", - require_protocol=False, permitted_protocols=["http", "https"]): - """Converts plain text into HTML with links. - - For example: ``linkify("Hello http://tornadoweb.org!")`` would return - ``Hello http://tornadoweb.org!`` - - Parameters: - - * ``shorten``: Long urls will be shortened for display. - - * ``extra_params``: Extra text to include in the link tag, or a callable - taking the link as an argument and returning the extra text - e.g. ``linkify(text, extra_params='rel="nofollow" class="external"')``, - or:: - - def extra_params_cb(url): - if url.startswith("http://example.com"): - return 'class="internal"' - else: - return 'class="external" rel="nofollow"' - linkify(text, extra_params=extra_params_cb) - - * ``require_protocol``: Only linkify urls which include a protocol. If - this is False, urls such as www.facebook.com will also be linkified. - - * ``permitted_protocols``: List (or set) of protocols which should be - linkified, e.g. ``linkify(text, permitted_protocols=["http", "ftp", - "mailto"])``. It is very unsafe to include protocols such as - ``javascript``. - """ - if extra_params and not callable(extra_params): - extra_params = " " + extra_params.strip() - - def make_link(m): - url = m.group(1) - proto = m.group(2) - if require_protocol and not proto: - return url # not protocol, no linkify - - if proto and proto not in permitted_protocols: - return url # bad protocol, no linkify - - href = m.group(1) - if not proto: - href = "http://" + href # no proto specified, use http - - if callable(extra_params): - params = " " + extra_params(href).strip() - else: - params = extra_params - - # clip long urls. max_len is just an approximation - max_len = 30 - if shorten and len(url) > max_len: - before_clip = url - if proto: - proto_len = len(proto) + 1 + len(m.group(3) or "") # +1 for : - else: - proto_len = 0 - - parts = url[proto_len:].split("/") - if len(parts) > 1: - # Grab the whole host part plus the first bit of the path - # The path is usually not that interesting once shortened - # (no more slug, etc), so it really just provides a little - # extra indication of shortening. - url = url[:proto_len] + parts[0] + "/" + \ - parts[1][:8].split('?')[0].split('.')[0] - - if len(url) > max_len * 1.5: # still too long - url = url[:max_len] - - if url != before_clip: - amp = url.rfind('&') - # avoid splitting html char entities - if amp > max_len - 5: - url = url[:amp] - url += "..." - - if len(url) >= len(before_clip): - url = before_clip - else: - # full url is visible on mouse-over (for those who don't - # have a status bar, such as Safari by default) - params += ' title="%s"' % href - - return u'%s' % (href, params, url) - - # First HTML-escape so that our strings are all safe. - # The regex is modified to avoid character entites other than & so - # that we won't pick up ", etc. - text = _unicode(xhtml_escape(text)) - return _URL_RE.sub(make_link, text) - - -def _convert_entity(m): - if m.group(1) == "#": - try: - if m.group(2)[:1].lower() == 'x': - return unichr(int(m.group(2)[1:], 16)) - else: - return unichr(int(m.group(2))) - except ValueError: - return "&#%s;" % m.group(2) - try: - return _HTML_UNICODE_MAP[m.group(2)] - except KeyError: - return "&%s;" % m.group(2) - - -def _build_unicode_map(): - unicode_map = {} - for name, value in htmlentitydefs.name2codepoint.items(): - unicode_map[name] = unichr(value) - return unicode_map - - -_HTML_UNICODE_MAP = _build_unicode_map() +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Escaping/unescaping methods for HTML, JSON, URLs, and others. + +Also includes a few other miscellaneous string manipulation functions that +have crept in over time. +""" + +import html.entities +import json +import re +import urllib.parse + +from tornado.util import unicode_type + +import typing +from typing import Union, Any, Optional, Dict, List, Callable + + +_XHTML_ESCAPE_RE = re.compile("[&<>\"']") +_XHTML_ESCAPE_DICT = { + "&": "&", + "<": "<", + ">": ">", + '"': """, + "'": "'", +} + + +def xhtml_escape(value: Union[str, bytes]) -> str: + """Escapes a string so it is valid within HTML or XML. + + Escapes the characters ``<``, ``>``, ``"``, ``'``, and ``&``. + When used in attribute values the escaped strings must be enclosed + in quotes. + + .. versionchanged:: 3.2 + + Added the single quote to the list of escaped characters. + """ + return _XHTML_ESCAPE_RE.sub( + lambda match: _XHTML_ESCAPE_DICT[match.group(0)], to_basestring(value) + ) + + +def xhtml_unescape(value: Union[str, bytes]) -> str: + """Un-escapes an XML-escaped string.""" + return re.sub(r"&(#?)(\w+?);", _convert_entity, _unicode(value)) + + +# The fact that json_encode wraps json.dumps is an implementation detail. +# Please see https://github.com/tornadoweb/tornado/pull/706 +# before sending a pull request that adds **kwargs to this function. +def json_encode(value: Any) -> str: + """JSON-encodes the given Python object.""" + # JSON permits but does not require forward slashes to be escaped. + # This is useful when json data is emitted in a tags from prematurely terminating + # the javascript. Some json libraries do this escaping by default, + # although python's standard library does not, so we do it here. + # http://stackoverflow.com/questions/1580647/json-why-are-forward-slashes-escaped + return json.dumps(value).replace(" Any: + """Returns Python objects for the given JSON string. + + Supports both `str` and `bytes` inputs. + """ + return json.loads(to_basestring(value)) + + +def squeeze(value: str) -> str: + """Replace all sequences of whitespace chars with a single space.""" + return re.sub(r"[\x00-\x20]+", " ", value).strip() + + +def url_escape(value: Union[str, bytes], plus: bool = True) -> str: + """Returns a URL-encoded version of the given value. + + If ``plus`` is true (the default), spaces will be represented + as "+" instead of "%20". This is appropriate for query strings + but not for the path component of a URL. Note that this default + is the reverse of Python's urllib module. + + .. versionadded:: 3.1 + The ``plus`` argument + """ + quote = urllib.parse.quote_plus if plus else urllib.parse.quote + return quote(utf8(value)) + + +@typing.overload +def url_unescape(value: Union[str, bytes], encoding: None, plus: bool = True) -> bytes: + pass + + +@typing.overload # noqa: F811 +def url_unescape( + value: Union[str, bytes], encoding: str = "utf-8", plus: bool = True +) -> str: + pass + + +def url_unescape( # noqa: F811 + value: Union[str, bytes], encoding: Optional[str] = "utf-8", plus: bool = True +) -> Union[str, bytes]: + """Decodes the given value from a URL. + + The argument may be either a byte or unicode string. + + If encoding is None, the result will be a byte string. Otherwise, + the result is a unicode string in the specified encoding. + + If ``plus`` is true (the default), plus signs will be interpreted + as spaces (literal plus signs must be represented as "%2B"). This + is appropriate for query strings and form-encoded values but not + for the path component of a URL. Note that this default is the + reverse of Python's urllib module. + + .. versionadded:: 3.1 + The ``plus`` argument + """ + if encoding is None: + if plus: + # unquote_to_bytes doesn't have a _plus variant + value = to_basestring(value).replace("+", " ") + return urllib.parse.unquote_to_bytes(value) + else: + unquote = urllib.parse.unquote_plus if plus else urllib.parse.unquote + return unquote(to_basestring(value), encoding=encoding) + + +def parse_qs_bytes( + qs: str, keep_blank_values: bool = False, strict_parsing: bool = False +) -> Dict[str, List[bytes]]: + """Parses a query string like urlparse.parse_qs, but returns the + values as byte strings. + + Keys still become type str (interpreted as latin1 in python3!) + because it's too painful to keep them as byte strings in + python3 and in practice they're nearly always ascii anyway. + """ + # This is gross, but python3 doesn't give us another way. + # Latin1 is the universal donor of character encodings. + result = urllib.parse.parse_qs( + qs, keep_blank_values, strict_parsing, encoding="latin1", errors="strict" + ) + encoded = {} + for k, v in result.items(): + encoded[k] = [i.encode("latin1") for i in v] + return encoded + + +_UTF8_TYPES = (bytes, type(None)) + + +@typing.overload +def utf8(value: bytes) -> bytes: + pass + + +@typing.overload # noqa: F811 +def utf8(value: str) -> bytes: + pass + + +@typing.overload # noqa: F811 +def utf8(value: None) -> None: + pass + + +def utf8(value: Union[None, str, bytes]) -> Optional[bytes]: # noqa: F811 + """Converts a string argument to a byte string. + + If the argument is already a byte string or None, it is returned unchanged. + Otherwise it must be a unicode string and is encoded as utf8. + """ + if isinstance(value, _UTF8_TYPES): + return value + if not isinstance(value, unicode_type): + raise TypeError("Expected bytes, unicode, or None; got %r" % type(value)) + return value.encode("utf-8") + + +_TO_UNICODE_TYPES = (unicode_type, type(None)) + + +@typing.overload +def to_unicode(value: str) -> str: + pass + + +@typing.overload # noqa: F811 +def to_unicode(value: bytes) -> str: + pass + + +@typing.overload # noqa: F811 +def to_unicode(value: None) -> None: + pass + + +def to_unicode(value: Union[None, str, bytes]) -> Optional[str]: # noqa: F811 + """Converts a string argument to a unicode string. + + If the argument is already a unicode string or None, it is returned + unchanged. Otherwise it must be a byte string and is decoded as utf8. + """ + if isinstance(value, _TO_UNICODE_TYPES): + return value + if not isinstance(value, bytes): + raise TypeError("Expected bytes, unicode, or None; got %r" % type(value)) + return value.decode("utf-8") + + +# to_unicode was previously named _unicode not because it was private, +# but to avoid conflicts with the built-in unicode() function/type +_unicode = to_unicode + +# When dealing with the standard library across python 2 and 3 it is +# sometimes useful to have a direct conversion to the native string type +native_str = to_unicode +to_basestring = to_unicode + + +def recursive_unicode(obj: Any) -> Any: + """Walks a simple data structure, converting byte strings to unicode. + + Supports lists, tuples, and dictionaries. + """ + if isinstance(obj, dict): + return dict( + (recursive_unicode(k), recursive_unicode(v)) for (k, v) in obj.items() + ) + elif isinstance(obj, list): + return list(recursive_unicode(i) for i in obj) + elif isinstance(obj, tuple): + return tuple(recursive_unicode(i) for i in obj) + elif isinstance(obj, bytes): + return to_unicode(obj) + else: + return obj + + +# I originally used the regex from +# http://daringfireball.net/2010/07/improved_regex_for_matching_urls +# but it gets all exponential on certain patterns (such as too many trailing +# dots), causing the regex matcher to never return. +# This regex should avoid those problems. +# Use to_unicode instead of tornado.util.u - we don't want backslashes getting +# processed as escapes. +_URL_RE = re.compile( + to_unicode( + r"""\b((?:([\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\s&()]|&|")*(?:[^!"#$%&'()*+,.:;<=>?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&|")*\)))+)""" # noqa: E501 + ) +) + + +def linkify( + text: Union[str, bytes], + shorten: bool = False, + extra_params: Union[str, Callable[[str], str]] = "", + require_protocol: bool = False, + permitted_protocols: List[str] = ["http", "https"], +) -> str: + """Converts plain text into HTML with links. + + For example: ``linkify("Hello http://tornadoweb.org!")`` would return + ``Hello http://tornadoweb.org!`` + + Parameters: + + * ``shorten``: Long urls will be shortened for display. + + * ``extra_params``: Extra text to include in the link tag, or a callable + taking the link as an argument and returning the extra text + e.g. ``linkify(text, extra_params='rel="nofollow" class="external"')``, + or:: + + def extra_params_cb(url): + if url.startswith("http://example.com"): + return 'class="internal"' + else: + return 'class="external" rel="nofollow"' + linkify(text, extra_params=extra_params_cb) + + * ``require_protocol``: Only linkify urls which include a protocol. If + this is False, urls such as www.facebook.com will also be linkified. + + * ``permitted_protocols``: List (or set) of protocols which should be + linkified, e.g. ``linkify(text, permitted_protocols=["http", "ftp", + "mailto"])``. It is very unsafe to include protocols such as + ``javascript``. + """ + if extra_params and not callable(extra_params): + extra_params = " " + extra_params.strip() + + def make_link(m: typing.Match) -> str: + url = m.group(1) + proto = m.group(2) + if require_protocol and not proto: + return url # not protocol, no linkify + + if proto and proto not in permitted_protocols: + return url # bad protocol, no linkify + + href = m.group(1) + if not proto: + href = "http://" + href # no proto specified, use http + + if callable(extra_params): + params = " " + extra_params(href).strip() + else: + params = extra_params + + # clip long urls. max_len is just an approximation + max_len = 30 + if shorten and len(url) > max_len: + before_clip = url + if proto: + proto_len = len(proto) + 1 + len(m.group(3) or "") # +1 for : + else: + proto_len = 0 + + parts = url[proto_len:].split("/") + if len(parts) > 1: + # Grab the whole host part plus the first bit of the path + # The path is usually not that interesting once shortened + # (no more slug, etc), so it really just provides a little + # extra indication of shortening. + url = ( + url[:proto_len] + + parts[0] + + "/" + + parts[1][:8].split("?")[0].split(".")[0] + ) + + if len(url) > max_len * 1.5: # still too long + url = url[:max_len] + + if url != before_clip: + amp = url.rfind("&") + # avoid splitting html char entities + if amp > max_len - 5: + url = url[:amp] + url += "..." + + if len(url) >= len(before_clip): + url = before_clip + else: + # full url is visible on mouse-over (for those who don't + # have a status bar, such as Safari by default) + params += ' title="%s"' % href + + return u'%s' % (href, params, url) + + # First HTML-escape so that our strings are all safe. + # The regex is modified to avoid character entites other than & so + # that we won't pick up ", etc. + text = _unicode(xhtml_escape(text)) + return _URL_RE.sub(make_link, text) + + +def _convert_entity(m: typing.Match) -> str: + if m.group(1) == "#": + try: + if m.group(2)[:1].lower() == "x": + return chr(int(m.group(2)[1:], 16)) + else: + return chr(int(m.group(2))) + except ValueError: + return "&#%s;" % m.group(2) + try: + return _HTML_UNICODE_MAP[m.group(2)] + except KeyError: + return "&%s;" % m.group(2) + + +def _build_unicode_map() -> Dict[str, str]: + unicode_map = {} + for name, value in html.entities.name2codepoint.items(): + unicode_map[name] = chr(value) + return unicode_map + + +_HTML_UNICODE_MAP = _build_unicode_map() diff --git a/server/www/packages/packages-linux/x64/tornado/gen.py b/server/www/packages/packages-linux/x64/tornado/gen.py index 3556374..6c1e394 100644 --- a/server/www/packages/packages-linux/x64/tornado/gen.py +++ b/server/www/packages/packages-linux/x64/tornado/gen.py @@ -1,1367 +1,845 @@ -"""``tornado.gen`` implements generator-based coroutines. - -.. note:: - - The "decorator and generator" approach in this module is a - precursor to native coroutines (using ``async def`` and ``await``) - which were introduced in Python 3.5. Applications that do not - require compatibility with older versions of Python should use - native coroutines instead. Some parts of this module are still - useful with native coroutines, notably `multi`, `sleep`, - `WaitIterator`, and `with_timeout`. Some of these functions have - counterparts in the `asyncio` module which may be used as well, - although the two may not necessarily be 100% compatible. - -Coroutines provide an easier way to work in an asynchronous -environment than chaining callbacks. Code using coroutines is -technically asynchronous, but it is written as a single generator -instead of a collection of separate functions. - -For example, the following callback-based asynchronous handler: - -.. testcode:: - - class AsyncHandler(RequestHandler): - @asynchronous - def get(self): - http_client = AsyncHTTPClient() - http_client.fetch("http://example.com", - callback=self.on_fetch) - - def on_fetch(self, response): - do_something_with_response(response) - self.render("template.html") - -.. testoutput:: - :hide: - -could be written with ``gen`` as: - -.. testcode:: - - class GenAsyncHandler(RequestHandler): - @gen.coroutine - def get(self): - http_client = AsyncHTTPClient() - response = yield http_client.fetch("http://example.com") - do_something_with_response(response) - self.render("template.html") - -.. testoutput:: - :hide: - -Most asynchronous functions in Tornado return a `.Future`; -yielding this object returns its ``Future.result``. - -You can also yield a list or dict of ``Futures``, which will be -started at the same time and run in parallel; a list or dict of results will -be returned when they are all finished: - -.. testcode:: - - @gen.coroutine - def get(self): - http_client = AsyncHTTPClient() - response1, response2 = yield [http_client.fetch(url1), - http_client.fetch(url2)] - response_dict = yield dict(response3=http_client.fetch(url3), - response4=http_client.fetch(url4)) - response3 = response_dict['response3'] - response4 = response_dict['response4'] - -.. testoutput:: - :hide: - -If the `~functools.singledispatch` library is available (standard in -Python 3.4, available via the `singledispatch -`_ package on older -versions), additional types of objects may be yielded. Tornado includes -support for ``asyncio.Future`` and Twisted's ``Deferred`` class when -``tornado.platform.asyncio`` and ``tornado.platform.twisted`` are imported. -See the `convert_yielded` function to extend this mechanism. - -.. versionchanged:: 3.2 - Dict support added. - -.. versionchanged:: 4.1 - Support added for yielding ``asyncio`` Futures and Twisted Deferreds - via ``singledispatch``. - -""" -from __future__ import absolute_import, division, print_function - -import collections -import functools -import itertools -import os -import sys -import types -import warnings - -from tornado.concurrent import (Future, is_future, chain_future, future_set_exc_info, - future_add_done_callback, future_set_result_unless_cancelled) -from tornado.ioloop import IOLoop -from tornado.log import app_log -from tornado import stack_context -from tornado.util import PY3, raise_exc_info, TimeoutError - -try: - try: - # py34+ - from functools import singledispatch # type: ignore - except ImportError: - from singledispatch import singledispatch # backport -except ImportError: - # In most cases, singledispatch is required (to avoid - # difficult-to-diagnose problems in which the functionality - # available differs depending on which invisble packages are - # installed). However, in Google App Engine third-party - # dependencies are more trouble so we allow this module to be - # imported without it. - if 'APPENGINE_RUNTIME' not in os.environ: - raise - singledispatch = None - -try: - try: - # py35+ - from collections.abc import Generator as GeneratorType # type: ignore - except ImportError: - from backports_abc import Generator as GeneratorType # type: ignore - - try: - # py35+ - from inspect import isawaitable # type: ignore - except ImportError: - from backports_abc import isawaitable -except ImportError: - if 'APPENGINE_RUNTIME' not in os.environ: - raise - from types import GeneratorType - - def isawaitable(x): # type: ignore - return False - -if PY3: - import builtins -else: - import __builtin__ as builtins - - -class KeyReuseError(Exception): - pass - - -class UnknownKeyError(Exception): - pass - - -class LeakedCallbackError(Exception): - pass - - -class BadYieldError(Exception): - pass - - -class ReturnValueIgnoredError(Exception): - pass - - -def _value_from_stopiteration(e): - try: - # StopIteration has a value attribute beginning in py33. - # So does our Return class. - return e.value - except AttributeError: - pass - try: - # Cython backports coroutine functionality by putting the value in - # e.args[0]. - return e.args[0] - except (AttributeError, IndexError): - return None - - -def _create_future(): - future = Future() - # Fixup asyncio debug info by removing extraneous stack entries - source_traceback = getattr(future, "_source_traceback", ()) - while source_traceback: - # Each traceback entry is equivalent to a - # (filename, self.lineno, self.name, self.line) tuple - filename = source_traceback[-1][0] - if filename == __file__: - del source_traceback[-1] - else: - break - return future - - -def engine(func): - """Callback-oriented decorator for asynchronous generators. - - This is an older interface; for new code that does not need to be - compatible with versions of Tornado older than 3.0 the - `coroutine` decorator is recommended instead. - - This decorator is similar to `coroutine`, except it does not - return a `.Future` and the ``callback`` argument is not treated - specially. - - In most cases, functions decorated with `engine` should take - a ``callback`` argument and invoke it with their result when - they are finished. One notable exception is the - `~tornado.web.RequestHandler` :ref:`HTTP verb methods `, - which use ``self.finish()`` in place of a callback argument. - - .. deprecated:: 5.1 - - This decorator will be removed in 6.0. Use `coroutine` or - ``async def`` instead. - """ - warnings.warn("gen.engine is deprecated, use gen.coroutine or async def instead", - DeprecationWarning) - func = _make_coroutine_wrapper(func, replace_callback=False) - - @functools.wraps(func) - def wrapper(*args, **kwargs): - future = func(*args, **kwargs) - - def final_callback(future): - if future.result() is not None: - raise ReturnValueIgnoredError( - "@gen.engine functions cannot return values: %r" % - (future.result(),)) - # The engine interface doesn't give us any way to return - # errors but to raise them into the stack context. - # Save the stack context here to use when the Future has resolved. - future_add_done_callback(future, stack_context.wrap(final_callback)) - return wrapper - - -def coroutine(func): - """Decorator for asynchronous generators. - - Any generator that yields objects from this module must be wrapped - in either this decorator or `engine`. - - Coroutines may "return" by raising the special exception - `Return(value) `. In Python 3.3+, it is also possible for - the function to simply use the ``return value`` statement (prior to - Python 3.3 generators were not allowed to also return values). - In all versions of Python a coroutine that simply wishes to exit - early may use the ``return`` statement without a value. - - Functions with this decorator return a `.Future`. Additionally, - they may be called with a ``callback`` keyword argument, which - will be invoked with the future's result when it resolves. If the - coroutine fails, the callback will not be run and an exception - will be raised into the surrounding `.StackContext`. The - ``callback`` argument is not visible inside the decorated - function; it is handled by the decorator itself. - - .. warning:: - - When exceptions occur inside a coroutine, the exception - information will be stored in the `.Future` object. You must - examine the result of the `.Future` object, or the exception - may go unnoticed by your code. This means yielding the function - if called from another coroutine, using something like - `.IOLoop.run_sync` for top-level calls, or passing the `.Future` - to `.IOLoop.add_future`. - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed in 6.0. - Use the returned awaitable object instead. - """ - return _make_coroutine_wrapper(func, replace_callback=True) - - -def _make_coroutine_wrapper(func, replace_callback): - """The inner workings of ``@gen.coroutine`` and ``@gen.engine``. - - The two decorators differ in their treatment of the ``callback`` - argument, so we cannot simply implement ``@engine`` in terms of - ``@coroutine``. - """ - # On Python 3.5, set the coroutine flag on our generator, to allow it - # to be used with 'await'. - wrapped = func - if hasattr(types, 'coroutine'): - func = types.coroutine(func) - - @functools.wraps(wrapped) - def wrapper(*args, **kwargs): - future = _create_future() - - if replace_callback and 'callback' in kwargs: - warnings.warn("callback arguments are deprecated, use the returned Future instead", - DeprecationWarning, stacklevel=2) - callback = kwargs.pop('callback') - IOLoop.current().add_future( - future, lambda future: callback(future.result())) - - try: - result = func(*args, **kwargs) - except (Return, StopIteration) as e: - result = _value_from_stopiteration(e) - except Exception: - future_set_exc_info(future, sys.exc_info()) - try: - return future - finally: - # Avoid circular references - future = None - else: - if isinstance(result, GeneratorType): - # Inline the first iteration of Runner.run. This lets us - # avoid the cost of creating a Runner when the coroutine - # never actually yields, which in turn allows us to - # use "optional" coroutines in critical path code without - # performance penalty for the synchronous case. - try: - orig_stack_contexts = stack_context._state.contexts - yielded = next(result) - if stack_context._state.contexts is not orig_stack_contexts: - yielded = _create_future() - yielded.set_exception( - stack_context.StackContextInconsistentError( - 'stack_context inconsistency (probably caused ' - 'by yield within a "with StackContext" block)')) - except (StopIteration, Return) as e: - future_set_result_unless_cancelled(future, _value_from_stopiteration(e)) - except Exception: - future_set_exc_info(future, sys.exc_info()) - else: - # Provide strong references to Runner objects as long - # as their result future objects also have strong - # references (typically from the parent coroutine's - # Runner). This keeps the coroutine's Runner alive. - # We do this by exploiting the public API - # add_done_callback() instead of putting a private - # attribute on the Future. - # (Github issues #1769, #2229). - runner = Runner(result, future, yielded) - future.add_done_callback(lambda _: runner) - yielded = None - try: - return future - finally: - # Subtle memory optimization: if next() raised an exception, - # the future's exc_info contains a traceback which - # includes this stack frame. This creates a cycle, - # which will be collected at the next full GC but has - # been shown to greatly increase memory usage of - # benchmarks (relative to the refcount-based scheme - # used in the absence of cycles). We can avoid the - # cycle by clearing the local variable after we return it. - future = None - future_set_result_unless_cancelled(future, result) - return future - - wrapper.__wrapped__ = wrapped - wrapper.__tornado_coroutine__ = True - return wrapper - - -def is_coroutine_function(func): - """Return whether *func* is a coroutine function, i.e. a function - wrapped with `~.gen.coroutine`. - - .. versionadded:: 4.5 - """ - return getattr(func, '__tornado_coroutine__', False) - - -class Return(Exception): - """Special exception to return a value from a `coroutine`. - - If this exception is raised, its value argument is used as the - result of the coroutine:: - - @gen.coroutine - def fetch_json(url): - response = yield AsyncHTTPClient().fetch(url) - raise gen.Return(json_decode(response.body)) - - In Python 3.3, this exception is no longer necessary: the ``return`` - statement can be used directly to return a value (previously - ``yield`` and ``return`` with a value could not be combined in the - same function). - - By analogy with the return statement, the value argument is optional, - but it is never necessary to ``raise gen.Return()``. The ``return`` - statement can be used with no arguments instead. - """ - def __init__(self, value=None): - super(Return, self).__init__() - self.value = value - # Cython recognizes subclasses of StopIteration with a .args tuple. - self.args = (value,) - - -class WaitIterator(object): - """Provides an iterator to yield the results of futures as they finish. - - Yielding a set of futures like this: - - ``results = yield [future1, future2]`` - - pauses the coroutine until both ``future1`` and ``future2`` - return, and then restarts the coroutine with the results of both - futures. If either future is an exception, the expression will - raise that exception and all the results will be lost. - - If you need to get the result of each future as soon as possible, - or if you need the result of some futures even if others produce - errors, you can use ``WaitIterator``:: - - wait_iterator = gen.WaitIterator(future1, future2) - while not wait_iterator.done(): - try: - result = yield wait_iterator.next() - except Exception as e: - print("Error {} from {}".format(e, wait_iterator.current_future)) - else: - print("Result {} received from {} at {}".format( - result, wait_iterator.current_future, - wait_iterator.current_index)) - - Because results are returned as soon as they are available the - output from the iterator *will not be in the same order as the - input arguments*. If you need to know which future produced the - current result, you can use the attributes - ``WaitIterator.current_future``, or ``WaitIterator.current_index`` - to get the index of the future from the input list. (if keyword - arguments were used in the construction of the `WaitIterator`, - ``current_index`` will use the corresponding keyword). - - On Python 3.5, `WaitIterator` implements the async iterator - protocol, so it can be used with the ``async for`` statement (note - that in this version the entire iteration is aborted if any value - raises an exception, while the previous example can continue past - individual errors):: - - async for result in gen.WaitIterator(future1, future2): - print("Result {} received from {} at {}".format( - result, wait_iterator.current_future, - wait_iterator.current_index)) - - .. versionadded:: 4.1 - - .. versionchanged:: 4.3 - Added ``async for`` support in Python 3.5. - - """ - def __init__(self, *args, **kwargs): - if args and kwargs: - raise ValueError( - "You must provide args or kwargs, not both") - - if kwargs: - self._unfinished = dict((f, k) for (k, f) in kwargs.items()) - futures = list(kwargs.values()) - else: - self._unfinished = dict((f, i) for (i, f) in enumerate(args)) - futures = args - - self._finished = collections.deque() - self.current_index = self.current_future = None - self._running_future = None - - for future in futures: - future_add_done_callback(future, self._done_callback) - - def done(self): - """Returns True if this iterator has no more results.""" - if self._finished or self._unfinished: - return False - # Clear the 'current' values when iteration is done. - self.current_index = self.current_future = None - return True - - def next(self): - """Returns a `.Future` that will yield the next available result. - - Note that this `.Future` will not be the same object as any of - the inputs. - """ - self._running_future = Future() - - if self._finished: - self._return_result(self._finished.popleft()) - - return self._running_future - - def _done_callback(self, done): - if self._running_future and not self._running_future.done(): - self._return_result(done) - else: - self._finished.append(done) - - def _return_result(self, done): - """Called set the returned future's state that of the future - we yielded, and set the current future for the iterator. - """ - chain_future(done, self._running_future) - - self.current_future = done - self.current_index = self._unfinished.pop(done) - - def __aiter__(self): - return self - - def __anext__(self): - if self.done(): - # Lookup by name to silence pyflakes on older versions. - raise getattr(builtins, 'StopAsyncIteration')() - return self.next() - - -class YieldPoint(object): - """Base class for objects that may be yielded from the generator. - - .. deprecated:: 4.0 - Use `Futures <.Future>` instead. This class and all its subclasses - will be removed in 6.0 - """ - def __init__(self): - warnings.warn("YieldPoint is deprecated, use Futures instead", - DeprecationWarning) - - def start(self, runner): - """Called by the runner after the generator has yielded. - - No other methods will be called on this object before ``start``. - """ - raise NotImplementedError() - - def is_ready(self): - """Called by the runner to determine whether to resume the generator. - - Returns a boolean; may be called more than once. - """ - raise NotImplementedError() - - def get_result(self): - """Returns the value to use as the result of the yield expression. - - This method will only be called once, and only after `is_ready` - has returned true. - """ - raise NotImplementedError() - - -class Callback(YieldPoint): - """Returns a callable object that will allow a matching `Wait` to proceed. - - The key may be any value suitable for use as a dictionary key, and is - used to match ``Callbacks`` to their corresponding ``Waits``. The key - must be unique among outstanding callbacks within a single run of the - generator function, but may be reused across different runs of the same - function (so constants generally work fine). - - The callback may be called with zero or one arguments; if an argument - is given it will be returned by `Wait`. - - .. deprecated:: 4.0 - Use `Futures <.Future>` instead. This class will be removed in 6.0. - """ - def __init__(self, key): - warnings.warn("gen.Callback is deprecated, use Futures instead", - DeprecationWarning) - self.key = key - - def start(self, runner): - self.runner = runner - runner.register_callback(self.key) - - def is_ready(self): - return True - - def get_result(self): - return self.runner.result_callback(self.key) - - -class Wait(YieldPoint): - """Returns the argument passed to the result of a previous `Callback`. - - .. deprecated:: 4.0 - Use `Futures <.Future>` instead. This class will be removed in 6.0. - """ - def __init__(self, key): - warnings.warn("gen.Wait is deprecated, use Futures instead", - DeprecationWarning) - self.key = key - - def start(self, runner): - self.runner = runner - - def is_ready(self): - return self.runner.is_ready(self.key) - - def get_result(self): - return self.runner.pop_result(self.key) - - -class WaitAll(YieldPoint): - """Returns the results of multiple previous `Callbacks `. - - The argument is a sequence of `Callback` keys, and the result is - a list of results in the same order. - - `WaitAll` is equivalent to yielding a list of `Wait` objects. - - .. deprecated:: 4.0 - Use `Futures <.Future>` instead. This class will be removed in 6.0. - """ - def __init__(self, keys): - warnings.warn("gen.WaitAll is deprecated, use gen.multi instead", - DeprecationWarning) - self.keys = keys - - def start(self, runner): - self.runner = runner - - def is_ready(self): - return all(self.runner.is_ready(key) for key in self.keys) - - def get_result(self): - return [self.runner.pop_result(key) for key in self.keys] - - -def Task(func, *args, **kwargs): - """Adapts a callback-based asynchronous function for use in coroutines. - - Takes a function (and optional additional arguments) and runs it with - those arguments plus a ``callback`` keyword argument. The argument passed - to the callback is returned as the result of the yield expression. - - .. versionchanged:: 4.0 - ``gen.Task`` is now a function that returns a `.Future`, instead of - a subclass of `YieldPoint`. It still behaves the same way when - yielded. - - .. deprecated:: 5.1 - This function is deprecated and will be removed in 6.0. - """ - warnings.warn("gen.Task is deprecated, use Futures instead", - DeprecationWarning) - future = _create_future() - - def handle_exception(typ, value, tb): - if future.done(): - return False - future_set_exc_info(future, (typ, value, tb)) - return True - - def set_result(result): - if future.done(): - return - future_set_result_unless_cancelled(future, result) - with stack_context.ExceptionStackContext(handle_exception): - func(*args, callback=_argument_adapter(set_result), **kwargs) - return future - - -class YieldFuture(YieldPoint): - def __init__(self, future): - """Adapts a `.Future` to the `YieldPoint` interface. - - .. versionchanged:: 5.0 - The ``io_loop`` argument (deprecated since version 4.1) has been removed. - - .. deprecated:: 5.1 - This class will be removed in 6.0. - """ - warnings.warn("YieldFuture is deprecated, use Futures instead", - DeprecationWarning) - self.future = future - self.io_loop = IOLoop.current() - - def start(self, runner): - if not self.future.done(): - self.runner = runner - self.key = object() - runner.register_callback(self.key) - self.io_loop.add_future(self.future, runner.result_callback(self.key)) - else: - self.runner = None - self.result_fn = self.future.result - - def is_ready(self): - if self.runner is not None: - return self.runner.is_ready(self.key) - else: - return True - - def get_result(self): - if self.runner is not None: - return self.runner.pop_result(self.key).result() - else: - return self.result_fn() - - -def _contains_yieldpoint(children): - """Returns True if ``children`` contains any YieldPoints. - - ``children`` may be a dict or a list, as used by `MultiYieldPoint` - and `multi_future`. - """ - if isinstance(children, dict): - return any(isinstance(i, YieldPoint) for i in children.values()) - if isinstance(children, list): - return any(isinstance(i, YieldPoint) for i in children) - return False - - -def multi(children, quiet_exceptions=()): - """Runs multiple asynchronous operations in parallel. - - ``children`` may either be a list or a dict whose values are - yieldable objects. ``multi()`` returns a new yieldable - object that resolves to a parallel structure containing their - results. If ``children`` is a list, the result is a list of - results in the same order; if it is a dict, the result is a dict - with the same keys. - - That is, ``results = yield multi(list_of_futures)`` is equivalent - to:: - - results = [] - for future in list_of_futures: - results.append(yield future) - - If any children raise exceptions, ``multi()`` will raise the first - one. All others will be logged, unless they are of types - contained in the ``quiet_exceptions`` argument. - - If any of the inputs are `YieldPoints `, the returned - yieldable object is a `YieldPoint`. Otherwise, returns a `.Future`. - This means that the result of `multi` can be used in a native - coroutine if and only if all of its children can be. - - In a ``yield``-based coroutine, it is not normally necessary to - call this function directly, since the coroutine runner will - do it automatically when a list or dict is yielded. However, - it is necessary in ``await``-based coroutines, or to pass - the ``quiet_exceptions`` argument. - - This function is available under the names ``multi()`` and ``Multi()`` - for historical reasons. - - Cancelling a `.Future` returned by ``multi()`` does not cancel its - children. `asyncio.gather` is similar to ``multi()``, but it does - cancel its children. - - .. versionchanged:: 4.2 - If multiple yieldables fail, any exceptions after the first - (which is raised) will be logged. Added the ``quiet_exceptions`` - argument to suppress this logging for selected exception types. - - .. versionchanged:: 4.3 - Replaced the class ``Multi`` and the function ``multi_future`` - with a unified function ``multi``. Added support for yieldables - other than `YieldPoint` and `.Future`. - - """ - if _contains_yieldpoint(children): - return MultiYieldPoint(children, quiet_exceptions=quiet_exceptions) - else: - return multi_future(children, quiet_exceptions=quiet_exceptions) - - -Multi = multi - - -class MultiYieldPoint(YieldPoint): - """Runs multiple asynchronous operations in parallel. - - This class is similar to `multi`, but it always creates a stack - context even when no children require it. It is not compatible with - native coroutines. - - .. versionchanged:: 4.2 - If multiple ``YieldPoints`` fail, any exceptions after the first - (which is raised) will be logged. Added the ``quiet_exceptions`` - argument to suppress this logging for selected exception types. - - .. versionchanged:: 4.3 - Renamed from ``Multi`` to ``MultiYieldPoint``. The name ``Multi`` - remains as an alias for the equivalent `multi` function. - - .. deprecated:: 4.3 - Use `multi` instead. This class will be removed in 6.0. - """ - def __init__(self, children, quiet_exceptions=()): - warnings.warn("MultiYieldPoint is deprecated, use Futures instead", - DeprecationWarning) - self.keys = None - if isinstance(children, dict): - self.keys = list(children.keys()) - children = children.values() - self.children = [] - for i in children: - if not isinstance(i, YieldPoint): - i = convert_yielded(i) - if is_future(i): - i = YieldFuture(i) - self.children.append(i) - assert all(isinstance(i, YieldPoint) for i in self.children) - self.unfinished_children = set(self.children) - self.quiet_exceptions = quiet_exceptions - - def start(self, runner): - for i in self.children: - i.start(runner) - - def is_ready(self): - finished = list(itertools.takewhile( - lambda i: i.is_ready(), self.unfinished_children)) - self.unfinished_children.difference_update(finished) - return not self.unfinished_children - - def get_result(self): - result_list = [] - exc_info = None - for f in self.children: - try: - result_list.append(f.get_result()) - except Exception as e: - if exc_info is None: - exc_info = sys.exc_info() - else: - if not isinstance(e, self.quiet_exceptions): - app_log.error("Multiple exceptions in yield list", - exc_info=True) - if exc_info is not None: - raise_exc_info(exc_info) - if self.keys is not None: - return dict(zip(self.keys, result_list)) - else: - return list(result_list) - - -def multi_future(children, quiet_exceptions=()): - """Wait for multiple asynchronous futures in parallel. - - This function is similar to `multi`, but does not support - `YieldPoints `. - - .. versionadded:: 4.0 - - .. versionchanged:: 4.2 - If multiple ``Futures`` fail, any exceptions after the first (which is - raised) will be logged. Added the ``quiet_exceptions`` - argument to suppress this logging for selected exception types. - - .. deprecated:: 4.3 - Use `multi` instead. - """ - if isinstance(children, dict): - keys = list(children.keys()) - children = children.values() - else: - keys = None - children = list(map(convert_yielded, children)) - assert all(is_future(i) or isinstance(i, _NullFuture) for i in children) - unfinished_children = set(children) - - future = _create_future() - if not children: - future_set_result_unless_cancelled(future, - {} if keys is not None else []) - - def callback(f): - unfinished_children.remove(f) - if not unfinished_children: - result_list = [] - for f in children: - try: - result_list.append(f.result()) - except Exception as e: - if future.done(): - if not isinstance(e, quiet_exceptions): - app_log.error("Multiple exceptions in yield list", - exc_info=True) - else: - future_set_exc_info(future, sys.exc_info()) - if not future.done(): - if keys is not None: - future_set_result_unless_cancelled(future, - dict(zip(keys, result_list))) - else: - future_set_result_unless_cancelled(future, result_list) - - listening = set() - for f in children: - if f not in listening: - listening.add(f) - future_add_done_callback(f, callback) - return future - - -def maybe_future(x): - """Converts ``x`` into a `.Future`. - - If ``x`` is already a `.Future`, it is simply returned; otherwise - it is wrapped in a new `.Future`. This is suitable for use as - ``result = yield gen.maybe_future(f())`` when you don't know whether - ``f()`` returns a `.Future` or not. - - .. deprecated:: 4.3 - This function only handles ``Futures``, not other yieldable objects. - Instead of `maybe_future`, check for the non-future result types - you expect (often just ``None``), and ``yield`` anything unknown. - """ - if is_future(x): - return x - else: - fut = _create_future() - fut.set_result(x) - return fut - - -def with_timeout(timeout, future, quiet_exceptions=()): - """Wraps a `.Future` (or other yieldable object) in a timeout. - - Raises `tornado.util.TimeoutError` if the input future does not - complete before ``timeout``, which may be specified in any form - allowed by `.IOLoop.add_timeout` (i.e. a `datetime.timedelta` or - an absolute time relative to `.IOLoop.time`) - - If the wrapped `.Future` fails after it has timed out, the exception - will be logged unless it is of a type contained in ``quiet_exceptions`` - (which may be an exception type or a sequence of types). - - Does not support `YieldPoint` subclasses. - - The wrapped `.Future` is not canceled when the timeout expires, - permitting it to be reused. `asyncio.wait_for` is similar to this - function but it does cancel the wrapped `.Future` on timeout. - - .. versionadded:: 4.0 - - .. versionchanged:: 4.1 - Added the ``quiet_exceptions`` argument and the logging of unhandled - exceptions. - - .. versionchanged:: 4.4 - Added support for yieldable objects other than `.Future`. - - """ - # TODO: allow YieldPoints in addition to other yieldables? - # Tricky to do with stack_context semantics. - # - # It's tempting to optimize this by cancelling the input future on timeout - # instead of creating a new one, but A) we can't know if we are the only - # one waiting on the input future, so cancelling it might disrupt other - # callers and B) concurrent futures can only be cancelled while they are - # in the queue, so cancellation cannot reliably bound our waiting time. - future = convert_yielded(future) - result = _create_future() - chain_future(future, result) - io_loop = IOLoop.current() - - def error_callback(future): - try: - future.result() - except Exception as e: - if not isinstance(e, quiet_exceptions): - app_log.error("Exception in Future %r after timeout", - future, exc_info=True) - - def timeout_callback(): - if not result.done(): - result.set_exception(TimeoutError("Timeout")) - # In case the wrapped future goes on to fail, log it. - future_add_done_callback(future, error_callback) - timeout_handle = io_loop.add_timeout( - timeout, timeout_callback) - if isinstance(future, Future): - # We know this future will resolve on the IOLoop, so we don't - # need the extra thread-safety of IOLoop.add_future (and we also - # don't care about StackContext here. - future_add_done_callback( - future, lambda future: io_loop.remove_timeout(timeout_handle)) - else: - # concurrent.futures.Futures may resolve on any thread, so we - # need to route them back to the IOLoop. - io_loop.add_future( - future, lambda future: io_loop.remove_timeout(timeout_handle)) - return result - - -def sleep(duration): - """Return a `.Future` that resolves after the given number of seconds. - - When used with ``yield`` in a coroutine, this is a non-blocking - analogue to `time.sleep` (which should not be used in coroutines - because it is blocking):: - - yield gen.sleep(0.5) - - Note that calling this function on its own does nothing; you must - wait on the `.Future` it returns (usually by yielding it). - - .. versionadded:: 4.1 - """ - f = _create_future() - IOLoop.current().call_later(duration, - lambda: future_set_result_unless_cancelled(f, None)) - return f - - -class _NullFuture(object): - """_NullFuture resembles a Future that finished with a result of None. - - It's not actually a `Future` to avoid depending on a particular event loop. - Handled as a special case in the coroutine runner. - """ - def result(self): - return None - - def done(self): - return True - - -# _null_future is used as a dummy value in the coroutine runner. It differs -# from moment in that moment always adds a delay of one IOLoop iteration -# while _null_future is processed as soon as possible. -_null_future = _NullFuture() - -moment = _NullFuture() -moment.__doc__ = \ - """A special object which may be yielded to allow the IOLoop to run for -one iteration. - -This is not needed in normal use but it can be helpful in long-running -coroutines that are likely to yield Futures that are ready instantly. - -Usage: ``yield gen.moment`` - -.. versionadded:: 4.0 - -.. deprecated:: 4.5 - ``yield None`` (or ``yield`` with no argument) is now equivalent to - ``yield gen.moment``. -""" - - -class Runner(object): - """Internal implementation of `tornado.gen.engine`. - - Maintains information about pending callbacks and their results. - - The results of the generator are stored in ``result_future`` (a - `.Future`) - """ - def __init__(self, gen, result_future, first_yielded): - self.gen = gen - self.result_future = result_future - self.future = _null_future - self.yield_point = None - self.pending_callbacks = None - self.results = None - self.running = False - self.finished = False - self.had_exception = False - self.io_loop = IOLoop.current() - # For efficiency, we do not create a stack context until we - # reach a YieldPoint (stack contexts are required for the historical - # semantics of YieldPoints, but not for Futures). When we have - # done so, this field will be set and must be called at the end - # of the coroutine. - self.stack_context_deactivate = None - if self.handle_yield(first_yielded): - gen = result_future = first_yielded = None - self.run() - - def register_callback(self, key): - """Adds ``key`` to the list of callbacks.""" - if self.pending_callbacks is None: - # Lazily initialize the old-style YieldPoint data structures. - self.pending_callbacks = set() - self.results = {} - if key in self.pending_callbacks: - raise KeyReuseError("key %r is already pending" % (key,)) - self.pending_callbacks.add(key) - - def is_ready(self, key): - """Returns true if a result is available for ``key``.""" - if self.pending_callbacks is None or key not in self.pending_callbacks: - raise UnknownKeyError("key %r is not pending" % (key,)) - return key in self.results - - def set_result(self, key, result): - """Sets the result for ``key`` and attempts to resume the generator.""" - self.results[key] = result - if self.yield_point is not None and self.yield_point.is_ready(): - try: - future_set_result_unless_cancelled(self.future, - self.yield_point.get_result()) - except: - future_set_exc_info(self.future, sys.exc_info()) - self.yield_point = None - self.run() - - def pop_result(self, key): - """Returns the result for ``key`` and unregisters it.""" - self.pending_callbacks.remove(key) - return self.results.pop(key) - - def run(self): - """Starts or resumes the generator, running until it reaches a - yield point that is not ready. - """ - if self.running or self.finished: - return - try: - self.running = True - while True: - future = self.future - if not future.done(): - return - self.future = None - try: - orig_stack_contexts = stack_context._state.contexts - exc_info = None - - try: - value = future.result() - except Exception: - self.had_exception = True - exc_info = sys.exc_info() - future = None - - if exc_info is not None: - try: - yielded = self.gen.throw(*exc_info) - finally: - # Break up a reference to itself - # for faster GC on CPython. - exc_info = None - else: - yielded = self.gen.send(value) - - if stack_context._state.contexts is not orig_stack_contexts: - self.gen.throw( - stack_context.StackContextInconsistentError( - 'stack_context inconsistency (probably caused ' - 'by yield within a "with StackContext" block)')) - except (StopIteration, Return) as e: - self.finished = True - self.future = _null_future - if self.pending_callbacks and not self.had_exception: - # If we ran cleanly without waiting on all callbacks - # raise an error (really more of a warning). If we - # had an exception then some callbacks may have been - # orphaned, so skip the check in that case. - raise LeakedCallbackError( - "finished without waiting for callbacks %r" % - self.pending_callbacks) - future_set_result_unless_cancelled(self.result_future, - _value_from_stopiteration(e)) - self.result_future = None - self._deactivate_stack_context() - return - except Exception: - self.finished = True - self.future = _null_future - future_set_exc_info(self.result_future, sys.exc_info()) - self.result_future = None - self._deactivate_stack_context() - return - if not self.handle_yield(yielded): - return - yielded = None - finally: - self.running = False - - def handle_yield(self, yielded): - # Lists containing YieldPoints require stack contexts; - # other lists are handled in convert_yielded. - if _contains_yieldpoint(yielded): - yielded = multi(yielded) - - if isinstance(yielded, YieldPoint): - # YieldPoints are too closely coupled to the Runner to go - # through the generic convert_yielded mechanism. - self.future = Future() - - def start_yield_point(): - try: - yielded.start(self) - if yielded.is_ready(): - future_set_result_unless_cancelled(self.future, yielded.get_result()) - else: - self.yield_point = yielded - except Exception: - self.future = Future() - future_set_exc_info(self.future, sys.exc_info()) - - if self.stack_context_deactivate is None: - # Start a stack context if this is the first - # YieldPoint we've seen. - with stack_context.ExceptionStackContext( - self.handle_exception) as deactivate: - self.stack_context_deactivate = deactivate - - def cb(): - start_yield_point() - self.run() - self.io_loop.add_callback(cb) - return False - else: - start_yield_point() - else: - try: - self.future = convert_yielded(yielded) - except BadYieldError: - self.future = Future() - future_set_exc_info(self.future, sys.exc_info()) - - if self.future is moment: - self.io_loop.add_callback(self.run) - return False - elif not self.future.done(): - def inner(f): - # Break a reference cycle to speed GC. - f = None # noqa - self.run() - self.io_loop.add_future( - self.future, inner) - return False - return True - - def result_callback(self, key): - return stack_context.wrap(_argument_adapter( - functools.partial(self.set_result, key))) - - def handle_exception(self, typ, value, tb): - if not self.running and not self.finished: - self.future = Future() - future_set_exc_info(self.future, (typ, value, tb)) - self.run() - return True - else: - return False - - def _deactivate_stack_context(self): - if self.stack_context_deactivate is not None: - self.stack_context_deactivate() - self.stack_context_deactivate = None - - -Arguments = collections.namedtuple('Arguments', ['args', 'kwargs']) - - -def _argument_adapter(callback): - """Returns a function that when invoked runs ``callback`` with one arg. - - If the function returned by this function is called with exactly - one argument, that argument is passed to ``callback``. Otherwise - the args tuple and kwargs dict are wrapped in an `Arguments` object. - """ - def wrapper(*args, **kwargs): - if kwargs or len(args) > 1: - callback(Arguments(args, kwargs)) - elif args: - callback(args[0]) - else: - callback(None) - return wrapper - - -# Convert Awaitables into Futures. -try: - import asyncio -except ImportError: - # Py2-compatible version for use with Cython. - # Copied from PEP 380. - @coroutine - def _wrap_awaitable(x): - if hasattr(x, '__await__'): - _i = x.__await__() - else: - _i = iter(x) - try: - _y = next(_i) - except StopIteration as _e: - _r = _value_from_stopiteration(_e) - else: - while 1: - try: - _s = yield _y - except GeneratorExit as _e: - try: - _m = _i.close - except AttributeError: - pass - else: - _m() - raise _e - except BaseException as _e: - _x = sys.exc_info() - try: - _m = _i.throw - except AttributeError: - raise _e - else: - try: - _y = _m(*_x) - except StopIteration as _e: - _r = _value_from_stopiteration(_e) - break - else: - try: - if _s is None: - _y = next(_i) - else: - _y = _i.send(_s) - except StopIteration as _e: - _r = _value_from_stopiteration(_e) - break - raise Return(_r) -else: - try: - _wrap_awaitable = asyncio.ensure_future - except AttributeError: - # asyncio.ensure_future was introduced in Python 3.4.4, but - # Debian jessie still ships with 3.4.2 so try the old name. - _wrap_awaitable = getattr(asyncio, 'async') - - -def convert_yielded(yielded): - """Convert a yielded object into a `.Future`. - - The default implementation accepts lists, dictionaries, and Futures. - - If the `~functools.singledispatch` library is available, this function - may be extended to support additional types. For example:: - - @convert_yielded.register(asyncio.Future) - def _(asyncio_future): - return tornado.platform.asyncio.to_tornado_future(asyncio_future) - - .. versionadded:: 4.1 - """ - # Lists and dicts containing YieldPoints were handled earlier. - if yielded is None or yielded is moment: - return moment - elif yielded is _null_future: - return _null_future - elif isinstance(yielded, (list, dict)): - return multi(yielded) - elif is_future(yielded): - return yielded - elif isawaitable(yielded): - return _wrap_awaitable(yielded) - else: - raise BadYieldError("yielded unknown object %r" % (yielded,)) - - -if singledispatch is not None: - convert_yielded = singledispatch(convert_yielded) +"""``tornado.gen`` implements generator-based coroutines. + +.. note:: + + The "decorator and generator" approach in this module is a + precursor to native coroutines (using ``async def`` and ``await``) + which were introduced in Python 3.5. Applications that do not + require compatibility with older versions of Python should use + native coroutines instead. Some parts of this module are still + useful with native coroutines, notably `multi`, `sleep`, + `WaitIterator`, and `with_timeout`. Some of these functions have + counterparts in the `asyncio` module which may be used as well, + although the two may not necessarily be 100% compatible. + +Coroutines provide an easier way to work in an asynchronous +environment than chaining callbacks. Code using coroutines is +technically asynchronous, but it is written as a single generator +instead of a collection of separate functions. + +For example, here's a coroutine-based handler: + +.. testcode:: + + class GenAsyncHandler(RequestHandler): + @gen.coroutine + def get(self): + http_client = AsyncHTTPClient() + response = yield http_client.fetch("http://example.com") + do_something_with_response(response) + self.render("template.html") + +.. testoutput:: + :hide: + +Asynchronous functions in Tornado return an ``Awaitable`` or `.Future`; +yielding this object returns its result. + +You can also yield a list or dict of other yieldable objects, which +will be started at the same time and run in parallel; a list or dict +of results will be returned when they are all finished: + +.. testcode:: + + @gen.coroutine + def get(self): + http_client = AsyncHTTPClient() + response1, response2 = yield [http_client.fetch(url1), + http_client.fetch(url2)] + response_dict = yield dict(response3=http_client.fetch(url3), + response4=http_client.fetch(url4)) + response3 = response_dict['response3'] + response4 = response_dict['response4'] + +.. testoutput:: + :hide: + +If ``tornado.platform.twisted`` is imported, it is also possible to +yield Twisted's ``Deferred`` objects. See the `convert_yielded` +function to extend this mechanism. + +.. versionchanged:: 3.2 + Dict support added. + +.. versionchanged:: 4.1 + Support added for yielding ``asyncio`` Futures and Twisted Deferreds + via ``singledispatch``. + +""" +import asyncio +import builtins +import collections +from collections.abc import Generator +import concurrent.futures +import datetime +import functools +from functools import singledispatch +from inspect import isawaitable +import sys +import types + +from tornado.concurrent import ( + Future, + is_future, + chain_future, + future_set_exc_info, + future_add_done_callback, + future_set_result_unless_cancelled, +) +from tornado.ioloop import IOLoop +from tornado.log import app_log +from tornado.util import TimeoutError + +import typing +from typing import Union, Any, Callable, List, Type, Tuple, Awaitable, Dict + +if typing.TYPE_CHECKING: + from typing import Sequence, Deque, Optional, Set, Iterable # noqa: F401 + +_T = typing.TypeVar("_T") + +_Yieldable = Union[ + None, Awaitable, List[Awaitable], Dict[Any, Awaitable], concurrent.futures.Future +] + + +class KeyReuseError(Exception): + pass + + +class UnknownKeyError(Exception): + pass + + +class LeakedCallbackError(Exception): + pass + + +class BadYieldError(Exception): + pass + + +class ReturnValueIgnoredError(Exception): + pass + + +def _value_from_stopiteration(e: Union[StopIteration, "Return"]) -> Any: + try: + # StopIteration has a value attribute beginning in py33. + # So does our Return class. + return e.value + except AttributeError: + pass + try: + # Cython backports coroutine functionality by putting the value in + # e.args[0]. + return e.args[0] + except (AttributeError, IndexError): + return None + + +def _create_future() -> Future: + future = Future() # type: Future + # Fixup asyncio debug info by removing extraneous stack entries + source_traceback = getattr(future, "_source_traceback", ()) + while source_traceback: + # Each traceback entry is equivalent to a + # (filename, self.lineno, self.name, self.line) tuple + filename = source_traceback[-1][0] + if filename == __file__: + del source_traceback[-1] + else: + break + return future + + +def coroutine( + func: Callable[..., "Generator[Any, Any, _T]"] +) -> Callable[..., "Future[_T]"]: + """Decorator for asynchronous generators. + + For compatibility with older versions of Python, coroutines may + also "return" by raising the special exception `Return(value) + `. + + Functions with this decorator return a `.Future`. + + .. warning:: + + When exceptions occur inside a coroutine, the exception + information will be stored in the `.Future` object. You must + examine the result of the `.Future` object, or the exception + may go unnoticed by your code. This means yielding the function + if called from another coroutine, using something like + `.IOLoop.run_sync` for top-level calls, or passing the `.Future` + to `.IOLoop.add_future`. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned + awaitable object instead. + + """ + + @functools.wraps(func) + def wrapper(*args, **kwargs): + # type: (*Any, **Any) -> Future[_T] + # This function is type-annotated with a comment to work around + # https://bitbucket.org/pypy/pypy/issues/2868/segfault-with-args-type-annotation-in + future = _create_future() + try: + result = func(*args, **kwargs) + except (Return, StopIteration) as e: + result = _value_from_stopiteration(e) + except Exception: + future_set_exc_info(future, sys.exc_info()) + try: + return future + finally: + # Avoid circular references + future = None # type: ignore + else: + if isinstance(result, Generator): + # Inline the first iteration of Runner.run. This lets us + # avoid the cost of creating a Runner when the coroutine + # never actually yields, which in turn allows us to + # use "optional" coroutines in critical path code without + # performance penalty for the synchronous case. + try: + yielded = next(result) + except (StopIteration, Return) as e: + future_set_result_unless_cancelled( + future, _value_from_stopiteration(e) + ) + except Exception: + future_set_exc_info(future, sys.exc_info()) + else: + # Provide strong references to Runner objects as long + # as their result future objects also have strong + # references (typically from the parent coroutine's + # Runner). This keeps the coroutine's Runner alive. + # We do this by exploiting the public API + # add_done_callback() instead of putting a private + # attribute on the Future. + # (Github issues #1769, #2229). + runner = Runner(result, future, yielded) + future.add_done_callback(lambda _: runner) + yielded = None + try: + return future + finally: + # Subtle memory optimization: if next() raised an exception, + # the future's exc_info contains a traceback which + # includes this stack frame. This creates a cycle, + # which will be collected at the next full GC but has + # been shown to greatly increase memory usage of + # benchmarks (relative to the refcount-based scheme + # used in the absence of cycles). We can avoid the + # cycle by clearing the local variable after we return it. + future = None # type: ignore + future_set_result_unless_cancelled(future, result) + return future + + wrapper.__wrapped__ = func # type: ignore + wrapper.__tornado_coroutine__ = True # type: ignore + return wrapper + + +def is_coroutine_function(func: Any) -> bool: + """Return whether *func* is a coroutine function, i.e. a function + wrapped with `~.gen.coroutine`. + + .. versionadded:: 4.5 + """ + return getattr(func, "__tornado_coroutine__", False) + + +class Return(Exception): + """Special exception to return a value from a `coroutine`. + + If this exception is raised, its value argument is used as the + result of the coroutine:: + + @gen.coroutine + def fetch_json(url): + response = yield AsyncHTTPClient().fetch(url) + raise gen.Return(json_decode(response.body)) + + In Python 3.3, this exception is no longer necessary: the ``return`` + statement can be used directly to return a value (previously + ``yield`` and ``return`` with a value could not be combined in the + same function). + + By analogy with the return statement, the value argument is optional, + but it is never necessary to ``raise gen.Return()``. The ``return`` + statement can be used with no arguments instead. + """ + + def __init__(self, value: Any = None) -> None: + super(Return, self).__init__() + self.value = value + # Cython recognizes subclasses of StopIteration with a .args tuple. + self.args = (value,) + + +class WaitIterator(object): + """Provides an iterator to yield the results of awaitables as they finish. + + Yielding a set of awaitables like this: + + ``results = yield [awaitable1, awaitable2]`` + + pauses the coroutine until both ``awaitable1`` and ``awaitable2`` + return, and then restarts the coroutine with the results of both + awaitables. If either awaitable raises an exception, the + expression will raise that exception and all the results will be + lost. + + If you need to get the result of each awaitable as soon as possible, + or if you need the result of some awaitables even if others produce + errors, you can use ``WaitIterator``:: + + wait_iterator = gen.WaitIterator(awaitable1, awaitable2) + while not wait_iterator.done(): + try: + result = yield wait_iterator.next() + except Exception as e: + print("Error {} from {}".format(e, wait_iterator.current_future)) + else: + print("Result {} received from {} at {}".format( + result, wait_iterator.current_future, + wait_iterator.current_index)) + + Because results are returned as soon as they are available the + output from the iterator *will not be in the same order as the + input arguments*. If you need to know which future produced the + current result, you can use the attributes + ``WaitIterator.current_future``, or ``WaitIterator.current_index`` + to get the index of the awaitable from the input list. (if keyword + arguments were used in the construction of the `WaitIterator`, + ``current_index`` will use the corresponding keyword). + + On Python 3.5, `WaitIterator` implements the async iterator + protocol, so it can be used with the ``async for`` statement (note + that in this version the entire iteration is aborted if any value + raises an exception, while the previous example can continue past + individual errors):: + + async for result in gen.WaitIterator(future1, future2): + print("Result {} received from {} at {}".format( + result, wait_iterator.current_future, + wait_iterator.current_index)) + + .. versionadded:: 4.1 + + .. versionchanged:: 4.3 + Added ``async for`` support in Python 3.5. + + """ + + _unfinished = {} # type: Dict[Future, Union[int, str]] + + def __init__(self, *args: Future, **kwargs: Future) -> None: + if args and kwargs: + raise ValueError("You must provide args or kwargs, not both") + + if kwargs: + self._unfinished = dict((f, k) for (k, f) in kwargs.items()) + futures = list(kwargs.values()) # type: Sequence[Future] + else: + self._unfinished = dict((f, i) for (i, f) in enumerate(args)) + futures = args + + self._finished = collections.deque() # type: Deque[Future] + self.current_index = None # type: Optional[Union[str, int]] + self.current_future = None # type: Optional[Future] + self._running_future = None # type: Optional[Future] + + for future in futures: + future_add_done_callback(future, self._done_callback) + + def done(self) -> bool: + """Returns True if this iterator has no more results.""" + if self._finished or self._unfinished: + return False + # Clear the 'current' values when iteration is done. + self.current_index = self.current_future = None + return True + + def next(self) -> Future: + """Returns a `.Future` that will yield the next available result. + + Note that this `.Future` will not be the same object as any of + the inputs. + """ + self._running_future = Future() + + if self._finished: + self._return_result(self._finished.popleft()) + + return self._running_future + + def _done_callback(self, done: Future) -> None: + if self._running_future and not self._running_future.done(): + self._return_result(done) + else: + self._finished.append(done) + + def _return_result(self, done: Future) -> None: + """Called set the returned future's state that of the future + we yielded, and set the current future for the iterator. + """ + if self._running_future is None: + raise Exception("no future is running") + chain_future(done, self._running_future) + + self.current_future = done + self.current_index = self._unfinished.pop(done) + + def __aiter__(self) -> typing.AsyncIterator: + return self + + def __anext__(self) -> Future: + if self.done(): + # Lookup by name to silence pyflakes on older versions. + raise getattr(builtins, "StopAsyncIteration")() + return self.next() + + +def multi( + children: Union[List[_Yieldable], Dict[Any, _Yieldable]], + quiet_exceptions: "Union[Type[Exception], Tuple[Type[Exception], ...]]" = (), +) -> "Union[Future[List], Future[Dict]]": + """Runs multiple asynchronous operations in parallel. + + ``children`` may either be a list or a dict whose values are + yieldable objects. ``multi()`` returns a new yieldable + object that resolves to a parallel structure containing their + results. If ``children`` is a list, the result is a list of + results in the same order; if it is a dict, the result is a dict + with the same keys. + + That is, ``results = yield multi(list_of_futures)`` is equivalent + to:: + + results = [] + for future in list_of_futures: + results.append(yield future) + + If any children raise exceptions, ``multi()`` will raise the first + one. All others will be logged, unless they are of types + contained in the ``quiet_exceptions`` argument. + + In a ``yield``-based coroutine, it is not normally necessary to + call this function directly, since the coroutine runner will + do it automatically when a list or dict is yielded. However, + it is necessary in ``await``-based coroutines, or to pass + the ``quiet_exceptions`` argument. + + This function is available under the names ``multi()`` and ``Multi()`` + for historical reasons. + + Cancelling a `.Future` returned by ``multi()`` does not cancel its + children. `asyncio.gather` is similar to ``multi()``, but it does + cancel its children. + + .. versionchanged:: 4.2 + If multiple yieldables fail, any exceptions after the first + (which is raised) will be logged. Added the ``quiet_exceptions`` + argument to suppress this logging for selected exception types. + + .. versionchanged:: 4.3 + Replaced the class ``Multi`` and the function ``multi_future`` + with a unified function ``multi``. Added support for yieldables + other than ``YieldPoint`` and `.Future`. + + """ + return multi_future(children, quiet_exceptions=quiet_exceptions) + + +Multi = multi + + +def multi_future( + children: Union[List[_Yieldable], Dict[Any, _Yieldable]], + quiet_exceptions: "Union[Type[Exception], Tuple[Type[Exception], ...]]" = (), +) -> "Union[Future[List], Future[Dict]]": + """Wait for multiple asynchronous futures in parallel. + + Since Tornado 6.0, this function is exactly the same as `multi`. + + .. versionadded:: 4.0 + + .. versionchanged:: 4.2 + If multiple ``Futures`` fail, any exceptions after the first (which is + raised) will be logged. Added the ``quiet_exceptions`` + argument to suppress this logging for selected exception types. + + .. deprecated:: 4.3 + Use `multi` instead. + """ + if isinstance(children, dict): + keys = list(children.keys()) # type: Optional[List] + children_seq = children.values() # type: Iterable + else: + keys = None + children_seq = children + children_futs = list(map(convert_yielded, children_seq)) + assert all(is_future(i) or isinstance(i, _NullFuture) for i in children_futs) + unfinished_children = set(children_futs) + + future = _create_future() + if not children_futs: + future_set_result_unless_cancelled(future, {} if keys is not None else []) + + def callback(fut: Future) -> None: + unfinished_children.remove(fut) + if not unfinished_children: + result_list = [] + for f in children_futs: + try: + result_list.append(f.result()) + except Exception as e: + if future.done(): + if not isinstance(e, quiet_exceptions): + app_log.error( + "Multiple exceptions in yield list", exc_info=True + ) + else: + future_set_exc_info(future, sys.exc_info()) + if not future.done(): + if keys is not None: + future_set_result_unless_cancelled( + future, dict(zip(keys, result_list)) + ) + else: + future_set_result_unless_cancelled(future, result_list) + + listening = set() # type: Set[Future] + for f in children_futs: + if f not in listening: + listening.add(f) + future_add_done_callback(f, callback) + return future + + +def maybe_future(x: Any) -> Future: + """Converts ``x`` into a `.Future`. + + If ``x`` is already a `.Future`, it is simply returned; otherwise + it is wrapped in a new `.Future`. This is suitable for use as + ``result = yield gen.maybe_future(f())`` when you don't know whether + ``f()`` returns a `.Future` or not. + + .. deprecated:: 4.3 + This function only handles ``Futures``, not other yieldable objects. + Instead of `maybe_future`, check for the non-future result types + you expect (often just ``None``), and ``yield`` anything unknown. + """ + if is_future(x): + return x + else: + fut = _create_future() + fut.set_result(x) + return fut + + +def with_timeout( + timeout: Union[float, datetime.timedelta], + future: _Yieldable, + quiet_exceptions: "Union[Type[Exception], Tuple[Type[Exception], ...]]" = (), +) -> Future: + """Wraps a `.Future` (or other yieldable object) in a timeout. + + Raises `tornado.util.TimeoutError` if the input future does not + complete before ``timeout``, which may be specified in any form + allowed by `.IOLoop.add_timeout` (i.e. a `datetime.timedelta` or + an absolute time relative to `.IOLoop.time`) + + If the wrapped `.Future` fails after it has timed out, the exception + will be logged unless it is either of a type contained in + ``quiet_exceptions`` (which may be an exception type or a sequence of + types), or an ``asyncio.CancelledError``. + + The wrapped `.Future` is not canceled when the timeout expires, + permitting it to be reused. `asyncio.wait_for` is similar to this + function but it does cancel the wrapped `.Future` on timeout. + + .. versionadded:: 4.0 + + .. versionchanged:: 4.1 + Added the ``quiet_exceptions`` argument and the logging of unhandled + exceptions. + + .. versionchanged:: 4.4 + Added support for yieldable objects other than `.Future`. + + .. versionchanged:: 6.0.3 + ``asyncio.CancelledError`` is now always considered "quiet". + + """ + # It's tempting to optimize this by cancelling the input future on timeout + # instead of creating a new one, but A) we can't know if we are the only + # one waiting on the input future, so cancelling it might disrupt other + # callers and B) concurrent futures can only be cancelled while they are + # in the queue, so cancellation cannot reliably bound our waiting time. + future_converted = convert_yielded(future) + result = _create_future() + chain_future(future_converted, result) + io_loop = IOLoop.current() + + def error_callback(future: Future) -> None: + try: + future.result() + except asyncio.CancelledError: + pass + except Exception as e: + if not isinstance(e, quiet_exceptions): + app_log.error( + "Exception in Future %r after timeout", future, exc_info=True + ) + + def timeout_callback() -> None: + if not result.done(): + result.set_exception(TimeoutError("Timeout")) + # In case the wrapped future goes on to fail, log it. + future_add_done_callback(future_converted, error_callback) + + timeout_handle = io_loop.add_timeout(timeout, timeout_callback) + if isinstance(future_converted, Future): + # We know this future will resolve on the IOLoop, so we don't + # need the extra thread-safety of IOLoop.add_future (and we also + # don't care about StackContext here. + future_add_done_callback( + future_converted, lambda future: io_loop.remove_timeout(timeout_handle) + ) + else: + # concurrent.futures.Futures may resolve on any thread, so we + # need to route them back to the IOLoop. + io_loop.add_future( + future_converted, lambda future: io_loop.remove_timeout(timeout_handle) + ) + return result + + +def sleep(duration: float) -> "Future[None]": + """Return a `.Future` that resolves after the given number of seconds. + + When used with ``yield`` in a coroutine, this is a non-blocking + analogue to `time.sleep` (which should not be used in coroutines + because it is blocking):: + + yield gen.sleep(0.5) + + Note that calling this function on its own does nothing; you must + wait on the `.Future` it returns (usually by yielding it). + + .. versionadded:: 4.1 + """ + f = _create_future() + IOLoop.current().call_later( + duration, lambda: future_set_result_unless_cancelled(f, None) + ) + return f + + +class _NullFuture(object): + """_NullFuture resembles a Future that finished with a result of None. + + It's not actually a `Future` to avoid depending on a particular event loop. + Handled as a special case in the coroutine runner. + + We lie and tell the type checker that a _NullFuture is a Future so + we don't have to leak _NullFuture into lots of public APIs. But + this means that the type checker can't warn us when we're passing + a _NullFuture into a code path that doesn't understand what to do + with it. + """ + + def result(self) -> None: + return None + + def done(self) -> bool: + return True + + +# _null_future is used as a dummy value in the coroutine runner. It differs +# from moment in that moment always adds a delay of one IOLoop iteration +# while _null_future is processed as soon as possible. +_null_future = typing.cast(Future, _NullFuture()) + +moment = typing.cast(Future, _NullFuture()) +moment.__doc__ = """A special object which may be yielded to allow the IOLoop to run for +one iteration. + +This is not needed in normal use but it can be helpful in long-running +coroutines that are likely to yield Futures that are ready instantly. + +Usage: ``yield gen.moment`` + +In native coroutines, the equivalent of ``yield gen.moment`` is +``await asyncio.sleep(0)``. + +.. versionadded:: 4.0 + +.. deprecated:: 4.5 + ``yield None`` (or ``yield`` with no argument) is now equivalent to + ``yield gen.moment``. +""" + + +class Runner(object): + """Internal implementation of `tornado.gen.coroutine`. + + Maintains information about pending callbacks and their results. + + The results of the generator are stored in ``result_future`` (a + `.Future`) + """ + + def __init__( + self, + gen: "Generator[_Yieldable, Any, _T]", + result_future: "Future[_T]", + first_yielded: _Yieldable, + ) -> None: + self.gen = gen + self.result_future = result_future + self.future = _null_future # type: Union[None, Future] + self.running = False + self.finished = False + self.io_loop = IOLoop.current() + if self.handle_yield(first_yielded): + gen = result_future = first_yielded = None # type: ignore + self.run() + + def run(self) -> None: + """Starts or resumes the generator, running until it reaches a + yield point that is not ready. + """ + if self.running or self.finished: + return + try: + self.running = True + while True: + future = self.future + if future is None: + raise Exception("No pending future") + if not future.done(): + return + self.future = None + try: + exc_info = None + + try: + value = future.result() + except Exception: + exc_info = sys.exc_info() + future = None + + if exc_info is not None: + try: + yielded = self.gen.throw(*exc_info) # type: ignore + finally: + # Break up a reference to itself + # for faster GC on CPython. + exc_info = None + else: + yielded = self.gen.send(value) + + except (StopIteration, Return) as e: + self.finished = True + self.future = _null_future + future_set_result_unless_cancelled( + self.result_future, _value_from_stopiteration(e) + ) + self.result_future = None # type: ignore + return + except Exception: + self.finished = True + self.future = _null_future + future_set_exc_info(self.result_future, sys.exc_info()) + self.result_future = None # type: ignore + return + if not self.handle_yield(yielded): + return + yielded = None + finally: + self.running = False + + def handle_yield(self, yielded: _Yieldable) -> bool: + try: + self.future = convert_yielded(yielded) + except BadYieldError: + self.future = Future() + future_set_exc_info(self.future, sys.exc_info()) + + if self.future is moment: + self.io_loop.add_callback(self.run) + return False + elif self.future is None: + raise Exception("no pending future") + elif not self.future.done(): + + def inner(f: Any) -> None: + # Break a reference cycle to speed GC. + f = None # noqa: F841 + self.run() + + self.io_loop.add_future(self.future, inner) + return False + return True + + def handle_exception( + self, typ: Type[Exception], value: Exception, tb: types.TracebackType + ) -> bool: + if not self.running and not self.finished: + self.future = Future() + future_set_exc_info(self.future, (typ, value, tb)) + self.run() + return True + else: + return False + + +# Convert Awaitables into Futures. +try: + _wrap_awaitable = asyncio.ensure_future +except AttributeError: + # asyncio.ensure_future was introduced in Python 3.4.4, but + # Debian jessie still ships with 3.4.2 so try the old name. + _wrap_awaitable = getattr(asyncio, "async") + + +def convert_yielded(yielded: _Yieldable) -> Future: + """Convert a yielded object into a `.Future`. + + The default implementation accepts lists, dictionaries, and + Futures. This has the side effect of starting any coroutines that + did not start themselves, similar to `asyncio.ensure_future`. + + If the `~functools.singledispatch` library is available, this function + may be extended to support additional types. For example:: + + @convert_yielded.register(asyncio.Future) + def _(asyncio_future): + return tornado.platform.asyncio.to_tornado_future(asyncio_future) + + .. versionadded:: 4.1 + + """ + if yielded is None or yielded is moment: + return moment + elif yielded is _null_future: + return _null_future + elif isinstance(yielded, (list, dict)): + return multi(yielded) # type: ignore + elif is_future(yielded): + return typing.cast(Future, yielded) + elif isawaitable(yielded): + return _wrap_awaitable(yielded) # type: ignore + else: + raise BadYieldError("yielded unknown object %r" % (yielded,)) + + +convert_yielded = singledispatch(convert_yielded) diff --git a/server/www/packages/packages-linux/x64/tornado/http1connection.py b/server/www/packages/packages-linux/x64/tornado/http1connection.py index 6cc4071..6ea1d36 100644 --- a/server/www/packages/packages-linux/x64/tornado/http1connection.py +++ b/server/www/packages/packages-linux/x64/tornado/http1connection.py @@ -1,751 +1,836 @@ -# -# Copyright 2014 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Client and server implementations of HTTP/1.x. - -.. versionadded:: 4.0 -""" - -from __future__ import absolute_import, division, print_function - -import re -import warnings - -from tornado.concurrent import (Future, future_add_done_callback, - future_set_result_unless_cancelled) -from tornado.escape import native_str, utf8 -from tornado import gen -from tornado import httputil -from tornado import iostream -from tornado.log import gen_log, app_log -from tornado import stack_context -from tornado.util import GzipDecompressor, PY3 - - -class _QuietException(Exception): - def __init__(self): - pass - - -class _ExceptionLoggingContext(object): - """Used with the ``with`` statement when calling delegate methods to - log any exceptions with the given logger. Any exceptions caught are - converted to _QuietException - """ - def __init__(self, logger): - self.logger = logger - - def __enter__(self): - pass - - def __exit__(self, typ, value, tb): - if value is not None: - self.logger.error("Uncaught exception", exc_info=(typ, value, tb)) - raise _QuietException - - -class HTTP1ConnectionParameters(object): - """Parameters for `.HTTP1Connection` and `.HTTP1ServerConnection`. - """ - def __init__(self, no_keep_alive=False, chunk_size=None, - max_header_size=None, header_timeout=None, max_body_size=None, - body_timeout=None, decompress=False): - """ - :arg bool no_keep_alive: If true, always close the connection after - one request. - :arg int chunk_size: how much data to read into memory at once - :arg int max_header_size: maximum amount of data for HTTP headers - :arg float header_timeout: how long to wait for all headers (seconds) - :arg int max_body_size: maximum amount of data for body - :arg float body_timeout: how long to wait while reading body (seconds) - :arg bool decompress: if true, decode incoming - ``Content-Encoding: gzip`` - """ - self.no_keep_alive = no_keep_alive - self.chunk_size = chunk_size or 65536 - self.max_header_size = max_header_size or 65536 - self.header_timeout = header_timeout - self.max_body_size = max_body_size - self.body_timeout = body_timeout - self.decompress = decompress - - -class HTTP1Connection(httputil.HTTPConnection): - """Implements the HTTP/1.x protocol. - - This class can be on its own for clients, or via `HTTP1ServerConnection` - for servers. - """ - def __init__(self, stream, is_client, params=None, context=None): - """ - :arg stream: an `.IOStream` - :arg bool is_client: client or server - :arg params: a `.HTTP1ConnectionParameters` instance or ``None`` - :arg context: an opaque application-defined object that can be accessed - as ``connection.context``. - """ - self.is_client = is_client - self.stream = stream - if params is None: - params = HTTP1ConnectionParameters() - self.params = params - self.context = context - self.no_keep_alive = params.no_keep_alive - # The body limits can be altered by the delegate, so save them - # here instead of just referencing self.params later. - self._max_body_size = (self.params.max_body_size or - self.stream.max_buffer_size) - self._body_timeout = self.params.body_timeout - # _write_finished is set to True when finish() has been called, - # i.e. there will be no more data sent. Data may still be in the - # stream's write buffer. - self._write_finished = False - # True when we have read the entire incoming body. - self._read_finished = False - # _finish_future resolves when all data has been written and flushed - # to the IOStream. - self._finish_future = Future() - # If true, the connection should be closed after this request - # (after the response has been written in the server side, - # and after it has been read in the client) - self._disconnect_on_finish = False - self._clear_callbacks() - # Save the start lines after we read or write them; they - # affect later processing (e.g. 304 responses and HEAD methods - # have content-length but no bodies) - self._request_start_line = None - self._response_start_line = None - self._request_headers = None - # True if we are writing output with chunked encoding. - self._chunking_output = None - # While reading a body with a content-length, this is the - # amount left to read. - self._expected_content_remaining = None - # A Future for our outgoing writes, returned by IOStream.write. - self._pending_write = None - - def read_response(self, delegate): - """Read a single HTTP response. - - Typical client-mode usage is to write a request using `write_headers`, - `write`, and `finish`, and then call ``read_response``. - - :arg delegate: a `.HTTPMessageDelegate` - - Returns a `.Future` that resolves to None after the full response has - been read. - """ - if self.params.decompress: - delegate = _GzipMessageDelegate(delegate, self.params.chunk_size) - return self._read_message(delegate) - - @gen.coroutine - def _read_message(self, delegate): - need_delegate_close = False - try: - header_future = self.stream.read_until_regex( - b"\r?\n\r?\n", - max_bytes=self.params.max_header_size) - if self.params.header_timeout is None: - header_data = yield header_future - else: - try: - header_data = yield gen.with_timeout( - self.stream.io_loop.time() + self.params.header_timeout, - header_future, - quiet_exceptions=iostream.StreamClosedError) - except gen.TimeoutError: - self.close() - raise gen.Return(False) - start_line, headers = self._parse_headers(header_data) - if self.is_client: - start_line = httputil.parse_response_start_line(start_line) - self._response_start_line = start_line - else: - start_line = httputil.parse_request_start_line(start_line) - self._request_start_line = start_line - self._request_headers = headers - - self._disconnect_on_finish = not self._can_keep_alive( - start_line, headers) - need_delegate_close = True - with _ExceptionLoggingContext(app_log): - header_future = delegate.headers_received(start_line, headers) - if header_future is not None: - yield header_future - if self.stream is None: - # We've been detached. - need_delegate_close = False - raise gen.Return(False) - skip_body = False - if self.is_client: - if (self._request_start_line is not None and - self._request_start_line.method == 'HEAD'): - skip_body = True - code = start_line.code - if code == 304: - # 304 responses may include the content-length header - # but do not actually have a body. - # http://tools.ietf.org/html/rfc7230#section-3.3 - skip_body = True - if code >= 100 and code < 200: - # 1xx responses should never indicate the presence of - # a body. - if ('Content-Length' in headers or - 'Transfer-Encoding' in headers): - raise httputil.HTTPInputError( - "Response code %d cannot have body" % code) - # TODO: client delegates will get headers_received twice - # in the case of a 100-continue. Document or change? - yield self._read_message(delegate) - else: - if (headers.get("Expect") == "100-continue" and - not self._write_finished): - self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n") - if not skip_body: - body_future = self._read_body( - start_line.code if self.is_client else 0, headers, delegate) - if body_future is not None: - if self._body_timeout is None: - yield body_future - else: - try: - yield gen.with_timeout( - self.stream.io_loop.time() + self._body_timeout, - body_future, - quiet_exceptions=iostream.StreamClosedError) - except gen.TimeoutError: - gen_log.info("Timeout reading body from %s", - self.context) - self.stream.close() - raise gen.Return(False) - self._read_finished = True - if not self._write_finished or self.is_client: - need_delegate_close = False - with _ExceptionLoggingContext(app_log): - delegate.finish() - # If we're waiting for the application to produce an asynchronous - # response, and we're not detached, register a close callback - # on the stream (we didn't need one while we were reading) - if (not self._finish_future.done() and - self.stream is not None and - not self.stream.closed()): - self.stream.set_close_callback(self._on_connection_close) - yield self._finish_future - if self.is_client and self._disconnect_on_finish: - self.close() - if self.stream is None: - raise gen.Return(False) - except httputil.HTTPInputError as e: - gen_log.info("Malformed HTTP message from %s: %s", - self.context, e) - if not self.is_client: - yield self.stream.write(b'HTTP/1.1 400 Bad Request\r\n\r\n') - self.close() - raise gen.Return(False) - finally: - if need_delegate_close: - with _ExceptionLoggingContext(app_log): - delegate.on_connection_close() - header_future = None - self._clear_callbacks() - raise gen.Return(True) - - def _clear_callbacks(self): - """Clears the callback attributes. - - This allows the request handler to be garbage collected more - quickly in CPython by breaking up reference cycles. - """ - self._write_callback = None - self._write_future = None - self._close_callback = None - if self.stream is not None: - self.stream.set_close_callback(None) - - def set_close_callback(self, callback): - """Sets a callback that will be run when the connection is closed. - - Note that this callback is slightly different from - `.HTTPMessageDelegate.on_connection_close`: The - `.HTTPMessageDelegate` method is called when the connection is - closed while recieving a message. This callback is used when - there is not an active delegate (for example, on the server - side this callback is used if the client closes the connection - after sending its request but before receiving all the - response. - """ - self._close_callback = stack_context.wrap(callback) - - def _on_connection_close(self): - # Note that this callback is only registered on the IOStream - # when we have finished reading the request and are waiting for - # the application to produce its response. - if self._close_callback is not None: - callback = self._close_callback - self._close_callback = None - callback() - if not self._finish_future.done(): - future_set_result_unless_cancelled(self._finish_future, None) - self._clear_callbacks() - - def close(self): - if self.stream is not None: - self.stream.close() - self._clear_callbacks() - if not self._finish_future.done(): - future_set_result_unless_cancelled(self._finish_future, None) - - def detach(self): - """Take control of the underlying stream. - - Returns the underlying `.IOStream` object and stops all further - HTTP processing. May only be called during - `.HTTPMessageDelegate.headers_received`. Intended for implementing - protocols like websockets that tunnel over an HTTP handshake. - """ - self._clear_callbacks() - stream = self.stream - self.stream = None - if not self._finish_future.done(): - future_set_result_unless_cancelled(self._finish_future, None) - return stream - - def set_body_timeout(self, timeout): - """Sets the body timeout for a single request. - - Overrides the value from `.HTTP1ConnectionParameters`. - """ - self._body_timeout = timeout - - def set_max_body_size(self, max_body_size): - """Sets the body size limit for a single request. - - Overrides the value from `.HTTP1ConnectionParameters`. - """ - self._max_body_size = max_body_size - - def write_headers(self, start_line, headers, chunk=None, callback=None): - """Implements `.HTTPConnection.write_headers`.""" - lines = [] - if self.is_client: - self._request_start_line = start_line - lines.append(utf8('%s %s HTTP/1.1' % (start_line[0], start_line[1]))) - # Client requests with a non-empty body must have either a - # Content-Length or a Transfer-Encoding. - self._chunking_output = ( - start_line.method in ('POST', 'PUT', 'PATCH') and - 'Content-Length' not in headers and - 'Transfer-Encoding' not in headers) - else: - self._response_start_line = start_line - lines.append(utf8('HTTP/1.1 %d %s' % (start_line[1], start_line[2]))) - self._chunking_output = ( - # TODO: should this use - # self._request_start_line.version or - # start_line.version? - self._request_start_line.version == 'HTTP/1.1' and - # 1xx, 204 and 304 responses have no body (not even a zero-length - # body), and so should not have either Content-Length or - # Transfer-Encoding headers. - start_line.code not in (204, 304) and - (start_line.code < 100 or start_line.code >= 200) and - # No need to chunk the output if a Content-Length is specified. - 'Content-Length' not in headers and - # Applications are discouraged from touching Transfer-Encoding, - # but if they do, leave it alone. - 'Transfer-Encoding' not in headers) - # If connection to a 1.1 client will be closed, inform client - if (self._request_start_line.version == 'HTTP/1.1' and self._disconnect_on_finish): - headers['Connection'] = 'close' - # If a 1.0 client asked for keep-alive, add the header. - if (self._request_start_line.version == 'HTTP/1.0' and - self._request_headers.get('Connection', '').lower() == 'keep-alive'): - headers['Connection'] = 'Keep-Alive' - if self._chunking_output: - headers['Transfer-Encoding'] = 'chunked' - if (not self.is_client and - (self._request_start_line.method == 'HEAD' or - start_line.code == 304)): - self._expected_content_remaining = 0 - elif 'Content-Length' in headers: - self._expected_content_remaining = int(headers['Content-Length']) - else: - self._expected_content_remaining = None - # TODO: headers are supposed to be of type str, but we still have some - # cases that let bytes slip through. Remove these native_str calls when those - # are fixed. - header_lines = (native_str(n) + ": " + native_str(v) for n, v in headers.get_all()) - if PY3: - lines.extend(l.encode('latin1') for l in header_lines) - else: - lines.extend(header_lines) - for line in lines: - if b'\n' in line: - raise ValueError('Newline in header: ' + repr(line)) - future = None - if self.stream.closed(): - future = self._write_future = Future() - future.set_exception(iostream.StreamClosedError()) - future.exception() - else: - if callback is not None: - warnings.warn("callback argument is deprecated, use returned Future instead", - DeprecationWarning) - self._write_callback = stack_context.wrap(callback) - else: - future = self._write_future = Future() - data = b"\r\n".join(lines) + b"\r\n\r\n" - if chunk: - data += self._format_chunk(chunk) - self._pending_write = self.stream.write(data) - future_add_done_callback(self._pending_write, self._on_write_complete) - return future - - def _format_chunk(self, chunk): - if self._expected_content_remaining is not None: - self._expected_content_remaining -= len(chunk) - if self._expected_content_remaining < 0: - # Close the stream now to stop further framing errors. - self.stream.close() - raise httputil.HTTPOutputError( - "Tried to write more data than Content-Length") - if self._chunking_output and chunk: - # Don't write out empty chunks because that means END-OF-STREAM - # with chunked encoding - return utf8("%x" % len(chunk)) + b"\r\n" + chunk + b"\r\n" - else: - return chunk - - def write(self, chunk, callback=None): - """Implements `.HTTPConnection.write`. - - For backwards compatibility it is allowed but deprecated to - skip `write_headers` and instead call `write()` with a - pre-encoded header block. - """ - future = None - if self.stream.closed(): - future = self._write_future = Future() - self._write_future.set_exception(iostream.StreamClosedError()) - self._write_future.exception() - else: - if callback is not None: - warnings.warn("callback argument is deprecated, use returned Future instead", - DeprecationWarning) - self._write_callback = stack_context.wrap(callback) - else: - future = self._write_future = Future() - self._pending_write = self.stream.write(self._format_chunk(chunk)) - self._pending_write.add_done_callback(self._on_write_complete) - return future - - def finish(self): - """Implements `.HTTPConnection.finish`.""" - if (self._expected_content_remaining is not None and - self._expected_content_remaining != 0 and - not self.stream.closed()): - self.stream.close() - raise httputil.HTTPOutputError( - "Tried to write %d bytes less than Content-Length" % - self._expected_content_remaining) - if self._chunking_output: - if not self.stream.closed(): - self._pending_write = self.stream.write(b"0\r\n\r\n") - self._pending_write.add_done_callback(self._on_write_complete) - self._write_finished = True - # If the app finished the request while we're still reading, - # divert any remaining data away from the delegate and - # close the connection when we're done sending our response. - # Closing the connection is the only way to avoid reading the - # whole input body. - if not self._read_finished: - self._disconnect_on_finish = True - # No more data is coming, so instruct TCP to send any remaining - # data immediately instead of waiting for a full packet or ack. - self.stream.set_nodelay(True) - if self._pending_write is None: - self._finish_request(None) - else: - future_add_done_callback(self._pending_write, self._finish_request) - - def _on_write_complete(self, future): - exc = future.exception() - if exc is not None and not isinstance(exc, iostream.StreamClosedError): - future.result() - if self._write_callback is not None: - callback = self._write_callback - self._write_callback = None - self.stream.io_loop.add_callback(callback) - if self._write_future is not None: - future = self._write_future - self._write_future = None - future_set_result_unless_cancelled(future, None) - - def _can_keep_alive(self, start_line, headers): - if self.params.no_keep_alive: - return False - connection_header = headers.get("Connection") - if connection_header is not None: - connection_header = connection_header.lower() - if start_line.version == "HTTP/1.1": - return connection_header != "close" - elif ("Content-Length" in headers or - headers.get("Transfer-Encoding", "").lower() == "chunked" or - getattr(start_line, 'method', None) in ("HEAD", "GET")): - # start_line may be a request or response start line; only - # the former has a method attribute. - return connection_header == "keep-alive" - return False - - def _finish_request(self, future): - self._clear_callbacks() - if not self.is_client and self._disconnect_on_finish: - self.close() - return - # Turn Nagle's algorithm back on, leaving the stream in its - # default state for the next request. - self.stream.set_nodelay(False) - if not self._finish_future.done(): - future_set_result_unless_cancelled(self._finish_future, None) - - def _parse_headers(self, data): - # The lstrip removes newlines that some implementations sometimes - # insert between messages of a reused connection. Per RFC 7230, - # we SHOULD ignore at least one empty line before the request. - # http://tools.ietf.org/html/rfc7230#section-3.5 - data = native_str(data.decode('latin1')).lstrip("\r\n") - # RFC 7230 section allows for both CRLF and bare LF. - eol = data.find("\n") - start_line = data[:eol].rstrip("\r") - headers = httputil.HTTPHeaders.parse(data[eol:]) - return start_line, headers - - def _read_body(self, code, headers, delegate): - if "Content-Length" in headers: - if "Transfer-Encoding" in headers: - # Response cannot contain both Content-Length and - # Transfer-Encoding headers. - # http://tools.ietf.org/html/rfc7230#section-3.3.3 - raise httputil.HTTPInputError( - "Response with both Transfer-Encoding and Content-Length") - if "," in headers["Content-Length"]: - # Proxies sometimes cause Content-Length headers to get - # duplicated. If all the values are identical then we can - # use them but if they differ it's an error. - pieces = re.split(r',\s*', headers["Content-Length"]) - if any(i != pieces[0] for i in pieces): - raise httputil.HTTPInputError( - "Multiple unequal Content-Lengths: %r" % - headers["Content-Length"]) - headers["Content-Length"] = pieces[0] - - try: - content_length = int(headers["Content-Length"]) - except ValueError: - # Handles non-integer Content-Length value. - raise httputil.HTTPInputError( - "Only integer Content-Length is allowed: %s" % headers["Content-Length"]) - - if content_length > self._max_body_size: - raise httputil.HTTPInputError("Content-Length too long") - else: - content_length = None - - if code == 204: - # This response code is not allowed to have a non-empty body, - # and has an implicit length of zero instead of read-until-close. - # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3 - if ("Transfer-Encoding" in headers or - content_length not in (None, 0)): - raise httputil.HTTPInputError( - "Response with code %d should not have body" % code) - content_length = 0 - - if content_length is not None: - return self._read_fixed_body(content_length, delegate) - if headers.get("Transfer-Encoding", "").lower() == "chunked": - return self._read_chunked_body(delegate) - if self.is_client: - return self._read_body_until_close(delegate) - return None - - @gen.coroutine - def _read_fixed_body(self, content_length, delegate): - while content_length > 0: - body = yield self.stream.read_bytes( - min(self.params.chunk_size, content_length), partial=True) - content_length -= len(body) - if not self._write_finished or self.is_client: - with _ExceptionLoggingContext(app_log): - ret = delegate.data_received(body) - if ret is not None: - yield ret - - @gen.coroutine - def _read_chunked_body(self, delegate): - # TODO: "chunk extensions" http://tools.ietf.org/html/rfc2616#section-3.6.1 - total_size = 0 - while True: - chunk_len = yield self.stream.read_until(b"\r\n", max_bytes=64) - chunk_len = int(chunk_len.strip(), 16) - if chunk_len == 0: - crlf = yield self.stream.read_bytes(2) - if crlf != b'\r\n': - raise httputil.HTTPInputError("improperly terminated chunked request") - return - total_size += chunk_len - if total_size > self._max_body_size: - raise httputil.HTTPInputError("chunked body too large") - bytes_to_read = chunk_len - while bytes_to_read: - chunk = yield self.stream.read_bytes( - min(bytes_to_read, self.params.chunk_size), partial=True) - bytes_to_read -= len(chunk) - if not self._write_finished or self.is_client: - with _ExceptionLoggingContext(app_log): - ret = delegate.data_received(chunk) - if ret is not None: - yield ret - # chunk ends with \r\n - crlf = yield self.stream.read_bytes(2) - assert crlf == b"\r\n" - - @gen.coroutine - def _read_body_until_close(self, delegate): - body = yield self.stream.read_until_close() - if not self._write_finished or self.is_client: - with _ExceptionLoggingContext(app_log): - delegate.data_received(body) - - -class _GzipMessageDelegate(httputil.HTTPMessageDelegate): - """Wraps an `HTTPMessageDelegate` to decode ``Content-Encoding: gzip``. - """ - def __init__(self, delegate, chunk_size): - self._delegate = delegate - self._chunk_size = chunk_size - self._decompressor = None - - def headers_received(self, start_line, headers): - if headers.get("Content-Encoding") == "gzip": - self._decompressor = GzipDecompressor() - # Downstream delegates will only see uncompressed data, - # so rename the content-encoding header. - # (but note that curl_httpclient doesn't do this). - headers.add("X-Consumed-Content-Encoding", - headers["Content-Encoding"]) - del headers["Content-Encoding"] - return self._delegate.headers_received(start_line, headers) - - @gen.coroutine - def data_received(self, chunk): - if self._decompressor: - compressed_data = chunk - while compressed_data: - decompressed = self._decompressor.decompress( - compressed_data, self._chunk_size) - if decompressed: - ret = self._delegate.data_received(decompressed) - if ret is not None: - yield ret - compressed_data = self._decompressor.unconsumed_tail - else: - ret = self._delegate.data_received(chunk) - if ret is not None: - yield ret - - def finish(self): - if self._decompressor is not None: - tail = self._decompressor.flush() - if tail: - # I believe the tail will always be empty (i.e. - # decompress will return all it can). The purpose - # of the flush call is to detect errors such - # as truncated input. But in case it ever returns - # anything, treat it as an extra chunk - self._delegate.data_received(tail) - return self._delegate.finish() - - def on_connection_close(self): - return self._delegate.on_connection_close() - - -class HTTP1ServerConnection(object): - """An HTTP/1.x server.""" - def __init__(self, stream, params=None, context=None): - """ - :arg stream: an `.IOStream` - :arg params: a `.HTTP1ConnectionParameters` or None - :arg context: an opaque application-defined object that is accessible - as ``connection.context`` - """ - self.stream = stream - if params is None: - params = HTTP1ConnectionParameters() - self.params = params - self.context = context - self._serving_future = None - - @gen.coroutine - def close(self): - """Closes the connection. - - Returns a `.Future` that resolves after the serving loop has exited. - """ - self.stream.close() - # Block until the serving loop is done, but ignore any exceptions - # (start_serving is already responsible for logging them). - try: - yield self._serving_future - except Exception: - pass - - def start_serving(self, delegate): - """Starts serving requests on this connection. - - :arg delegate: a `.HTTPServerConnectionDelegate` - """ - assert isinstance(delegate, httputil.HTTPServerConnectionDelegate) - self._serving_future = self._server_request_loop(delegate) - # Register the future on the IOLoop so its errors get logged. - self.stream.io_loop.add_future(self._serving_future, - lambda f: f.result()) - - @gen.coroutine - def _server_request_loop(self, delegate): - try: - while True: - conn = HTTP1Connection(self.stream, False, - self.params, self.context) - request_delegate = delegate.start_request(self, conn) - try: - ret = yield conn.read_response(request_delegate) - except (iostream.StreamClosedError, - iostream.UnsatisfiableReadError): - return - except _QuietException: - # This exception was already logged. - conn.close() - return - except Exception: - gen_log.error("Uncaught exception", exc_info=True) - conn.close() - return - if not ret: - return - yield gen.moment - finally: - delegate.on_close(self) +# +# Copyright 2014 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Client and server implementations of HTTP/1.x. + +.. versionadded:: 4.0 +""" + +import asyncio +import logging +import re +import types + +from tornado.concurrent import ( + Future, + future_add_done_callback, + future_set_result_unless_cancelled, +) +from tornado.escape import native_str, utf8 +from tornado import gen +from tornado import httputil +from tornado import iostream +from tornado.log import gen_log, app_log +from tornado.util import GzipDecompressor + + +from typing import cast, Optional, Type, Awaitable, Callable, Union, Tuple + + +class _QuietException(Exception): + def __init__(self) -> None: + pass + + +class _ExceptionLoggingContext(object): + """Used with the ``with`` statement when calling delegate methods to + log any exceptions with the given logger. Any exceptions caught are + converted to _QuietException + """ + + def __init__(self, logger: logging.Logger) -> None: + self.logger = logger + + def __enter__(self) -> None: + pass + + def __exit__( + self, + typ: "Optional[Type[BaseException]]", + value: Optional[BaseException], + tb: types.TracebackType, + ) -> None: + if value is not None: + assert typ is not None + self.logger.error("Uncaught exception", exc_info=(typ, value, tb)) + raise _QuietException + + +class HTTP1ConnectionParameters(object): + """Parameters for `.HTTP1Connection` and `.HTTP1ServerConnection`. + """ + + def __init__( + self, + no_keep_alive: bool = False, + chunk_size: int = None, + max_header_size: int = None, + header_timeout: float = None, + max_body_size: int = None, + body_timeout: float = None, + decompress: bool = False, + ) -> None: + """ + :arg bool no_keep_alive: If true, always close the connection after + one request. + :arg int chunk_size: how much data to read into memory at once + :arg int max_header_size: maximum amount of data for HTTP headers + :arg float header_timeout: how long to wait for all headers (seconds) + :arg int max_body_size: maximum amount of data for body + :arg float body_timeout: how long to wait while reading body (seconds) + :arg bool decompress: if true, decode incoming + ``Content-Encoding: gzip`` + """ + self.no_keep_alive = no_keep_alive + self.chunk_size = chunk_size or 65536 + self.max_header_size = max_header_size or 65536 + self.header_timeout = header_timeout + self.max_body_size = max_body_size + self.body_timeout = body_timeout + self.decompress = decompress + + +class HTTP1Connection(httputil.HTTPConnection): + """Implements the HTTP/1.x protocol. + + This class can be on its own for clients, or via `HTTP1ServerConnection` + for servers. + """ + + def __init__( + self, + stream: iostream.IOStream, + is_client: bool, + params: HTTP1ConnectionParameters = None, + context: object = None, + ) -> None: + """ + :arg stream: an `.IOStream` + :arg bool is_client: client or server + :arg params: a `.HTTP1ConnectionParameters` instance or ``None`` + :arg context: an opaque application-defined object that can be accessed + as ``connection.context``. + """ + self.is_client = is_client + self.stream = stream + if params is None: + params = HTTP1ConnectionParameters() + self.params = params + self.context = context + self.no_keep_alive = params.no_keep_alive + # The body limits can be altered by the delegate, so save them + # here instead of just referencing self.params later. + self._max_body_size = self.params.max_body_size or self.stream.max_buffer_size + self._body_timeout = self.params.body_timeout + # _write_finished is set to True when finish() has been called, + # i.e. there will be no more data sent. Data may still be in the + # stream's write buffer. + self._write_finished = False + # True when we have read the entire incoming body. + self._read_finished = False + # _finish_future resolves when all data has been written and flushed + # to the IOStream. + self._finish_future = Future() # type: Future[None] + # If true, the connection should be closed after this request + # (after the response has been written in the server side, + # and after it has been read in the client) + self._disconnect_on_finish = False + self._clear_callbacks() + # Save the start lines after we read or write them; they + # affect later processing (e.g. 304 responses and HEAD methods + # have content-length but no bodies) + self._request_start_line = None # type: Optional[httputil.RequestStartLine] + self._response_start_line = None # type: Optional[httputil.ResponseStartLine] + self._request_headers = None # type: Optional[httputil.HTTPHeaders] + # True if we are writing output with chunked encoding. + self._chunking_output = False + # While reading a body with a content-length, this is the + # amount left to read. + self._expected_content_remaining = None # type: Optional[int] + # A Future for our outgoing writes, returned by IOStream.write. + self._pending_write = None # type: Optional[Future[None]] + + def read_response(self, delegate: httputil.HTTPMessageDelegate) -> Awaitable[bool]: + """Read a single HTTP response. + + Typical client-mode usage is to write a request using `write_headers`, + `write`, and `finish`, and then call ``read_response``. + + :arg delegate: a `.HTTPMessageDelegate` + + Returns a `.Future` that resolves to a bool after the full response has + been read. The result is true if the stream is still open. + """ + if self.params.decompress: + delegate = _GzipMessageDelegate(delegate, self.params.chunk_size) + return self._read_message(delegate) + + async def _read_message(self, delegate: httputil.HTTPMessageDelegate) -> bool: + need_delegate_close = False + try: + header_future = self.stream.read_until_regex( + b"\r?\n\r?\n", max_bytes=self.params.max_header_size + ) + if self.params.header_timeout is None: + header_data = await header_future + else: + try: + header_data = await gen.with_timeout( + self.stream.io_loop.time() + self.params.header_timeout, + header_future, + quiet_exceptions=iostream.StreamClosedError, + ) + except gen.TimeoutError: + self.close() + return False + start_line_str, headers = self._parse_headers(header_data) + if self.is_client: + resp_start_line = httputil.parse_response_start_line(start_line_str) + self._response_start_line = resp_start_line + start_line = ( + resp_start_line + ) # type: Union[httputil.RequestStartLine, httputil.ResponseStartLine] + # TODO: this will need to change to support client-side keepalive + self._disconnect_on_finish = False + else: + req_start_line = httputil.parse_request_start_line(start_line_str) + self._request_start_line = req_start_line + self._request_headers = headers + start_line = req_start_line + self._disconnect_on_finish = not self._can_keep_alive( + req_start_line, headers + ) + need_delegate_close = True + with _ExceptionLoggingContext(app_log): + header_recv_future = delegate.headers_received(start_line, headers) + if header_recv_future is not None: + await header_recv_future + if self.stream is None: + # We've been detached. + need_delegate_close = False + return False + skip_body = False + if self.is_client: + assert isinstance(start_line, httputil.ResponseStartLine) + if ( + self._request_start_line is not None + and self._request_start_line.method == "HEAD" + ): + skip_body = True + code = start_line.code + if code == 304: + # 304 responses may include the content-length header + # but do not actually have a body. + # http://tools.ietf.org/html/rfc7230#section-3.3 + skip_body = True + if code >= 100 and code < 200: + # 1xx responses should never indicate the presence of + # a body. + if "Content-Length" in headers or "Transfer-Encoding" in headers: + raise httputil.HTTPInputError( + "Response code %d cannot have body" % code + ) + # TODO: client delegates will get headers_received twice + # in the case of a 100-continue. Document or change? + await self._read_message(delegate) + else: + if headers.get("Expect") == "100-continue" and not self._write_finished: + self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n") + if not skip_body: + body_future = self._read_body( + resp_start_line.code if self.is_client else 0, headers, delegate + ) + if body_future is not None: + if self._body_timeout is None: + await body_future + else: + try: + await gen.with_timeout( + self.stream.io_loop.time() + self._body_timeout, + body_future, + quiet_exceptions=iostream.StreamClosedError, + ) + except gen.TimeoutError: + gen_log.info("Timeout reading body from %s", self.context) + self.stream.close() + return False + self._read_finished = True + if not self._write_finished or self.is_client: + need_delegate_close = False + with _ExceptionLoggingContext(app_log): + delegate.finish() + # If we're waiting for the application to produce an asynchronous + # response, and we're not detached, register a close callback + # on the stream (we didn't need one while we were reading) + if ( + not self._finish_future.done() + and self.stream is not None + and not self.stream.closed() + ): + self.stream.set_close_callback(self._on_connection_close) + await self._finish_future + if self.is_client and self._disconnect_on_finish: + self.close() + if self.stream is None: + return False + except httputil.HTTPInputError as e: + gen_log.info("Malformed HTTP message from %s: %s", self.context, e) + if not self.is_client: + await self.stream.write(b"HTTP/1.1 400 Bad Request\r\n\r\n") + self.close() + return False + finally: + if need_delegate_close: + with _ExceptionLoggingContext(app_log): + delegate.on_connection_close() + header_future = None # type: ignore + self._clear_callbacks() + return True + + def _clear_callbacks(self) -> None: + """Clears the callback attributes. + + This allows the request handler to be garbage collected more + quickly in CPython by breaking up reference cycles. + """ + self._write_callback = None + self._write_future = None # type: Optional[Future[None]] + self._close_callback = None # type: Optional[Callable[[], None]] + if self.stream is not None: + self.stream.set_close_callback(None) + + def set_close_callback(self, callback: Optional[Callable[[], None]]) -> None: + """Sets a callback that will be run when the connection is closed. + + Note that this callback is slightly different from + `.HTTPMessageDelegate.on_connection_close`: The + `.HTTPMessageDelegate` method is called when the connection is + closed while recieving a message. This callback is used when + there is not an active delegate (for example, on the server + side this callback is used if the client closes the connection + after sending its request but before receiving all the + response. + """ + self._close_callback = callback + + def _on_connection_close(self) -> None: + # Note that this callback is only registered on the IOStream + # when we have finished reading the request and are waiting for + # the application to produce its response. + if self._close_callback is not None: + callback = self._close_callback + self._close_callback = None + callback() + if not self._finish_future.done(): + future_set_result_unless_cancelled(self._finish_future, None) + self._clear_callbacks() + + def close(self) -> None: + if self.stream is not None: + self.stream.close() + self._clear_callbacks() + if not self._finish_future.done(): + future_set_result_unless_cancelled(self._finish_future, None) + + def detach(self) -> iostream.IOStream: + """Take control of the underlying stream. + + Returns the underlying `.IOStream` object and stops all further + HTTP processing. May only be called during + `.HTTPMessageDelegate.headers_received`. Intended for implementing + protocols like websockets that tunnel over an HTTP handshake. + """ + self._clear_callbacks() + stream = self.stream + self.stream = None # type: ignore + if not self._finish_future.done(): + future_set_result_unless_cancelled(self._finish_future, None) + return stream + + def set_body_timeout(self, timeout: float) -> None: + """Sets the body timeout for a single request. + + Overrides the value from `.HTTP1ConnectionParameters`. + """ + self._body_timeout = timeout + + def set_max_body_size(self, max_body_size: int) -> None: + """Sets the body size limit for a single request. + + Overrides the value from `.HTTP1ConnectionParameters`. + """ + self._max_body_size = max_body_size + + def write_headers( + self, + start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine], + headers: httputil.HTTPHeaders, + chunk: bytes = None, + ) -> "Future[None]": + """Implements `.HTTPConnection.write_headers`.""" + lines = [] + if self.is_client: + assert isinstance(start_line, httputil.RequestStartLine) + self._request_start_line = start_line + lines.append(utf8("%s %s HTTP/1.1" % (start_line[0], start_line[1]))) + # Client requests with a non-empty body must have either a + # Content-Length or a Transfer-Encoding. + self._chunking_output = ( + start_line.method in ("POST", "PUT", "PATCH") + and "Content-Length" not in headers + and ( + "Transfer-Encoding" not in headers + or headers["Transfer-Encoding"] == "chunked" + ) + ) + else: + assert isinstance(start_line, httputil.ResponseStartLine) + assert self._request_start_line is not None + assert self._request_headers is not None + self._response_start_line = start_line + lines.append(utf8("HTTP/1.1 %d %s" % (start_line[1], start_line[2]))) + self._chunking_output = ( + # TODO: should this use + # self._request_start_line.version or + # start_line.version? + self._request_start_line.version == "HTTP/1.1" + # 1xx, 204 and 304 responses have no body (not even a zero-length + # body), and so should not have either Content-Length or + # Transfer-Encoding headers. + and start_line.code not in (204, 304) + and (start_line.code < 100 or start_line.code >= 200) + # No need to chunk the output if a Content-Length is specified. + and "Content-Length" not in headers + # Applications are discouraged from touching Transfer-Encoding, + # but if they do, leave it alone. + and "Transfer-Encoding" not in headers + ) + # If connection to a 1.1 client will be closed, inform client + if ( + self._request_start_line.version == "HTTP/1.1" + and self._disconnect_on_finish + ): + headers["Connection"] = "close" + # If a 1.0 client asked for keep-alive, add the header. + if ( + self._request_start_line.version == "HTTP/1.0" + and self._request_headers.get("Connection", "").lower() == "keep-alive" + ): + headers["Connection"] = "Keep-Alive" + if self._chunking_output: + headers["Transfer-Encoding"] = "chunked" + if not self.is_client and ( + self._request_start_line.method == "HEAD" + or cast(httputil.ResponseStartLine, start_line).code == 304 + ): + self._expected_content_remaining = 0 + elif "Content-Length" in headers: + self._expected_content_remaining = int(headers["Content-Length"]) + else: + self._expected_content_remaining = None + # TODO: headers are supposed to be of type str, but we still have some + # cases that let bytes slip through. Remove these native_str calls when those + # are fixed. + header_lines = ( + native_str(n) + ": " + native_str(v) for n, v in headers.get_all() + ) + lines.extend(l.encode("latin1") for l in header_lines) + for line in lines: + if b"\n" in line: + raise ValueError("Newline in header: " + repr(line)) + future = None + if self.stream.closed(): + future = self._write_future = Future() + future.set_exception(iostream.StreamClosedError()) + future.exception() + else: + future = self._write_future = Future() + data = b"\r\n".join(lines) + b"\r\n\r\n" + if chunk: + data += self._format_chunk(chunk) + self._pending_write = self.stream.write(data) + future_add_done_callback(self._pending_write, self._on_write_complete) + return future + + def _format_chunk(self, chunk: bytes) -> bytes: + if self._expected_content_remaining is not None: + self._expected_content_remaining -= len(chunk) + if self._expected_content_remaining < 0: + # Close the stream now to stop further framing errors. + self.stream.close() + raise httputil.HTTPOutputError( + "Tried to write more data than Content-Length" + ) + if self._chunking_output and chunk: + # Don't write out empty chunks because that means END-OF-STREAM + # with chunked encoding + return utf8("%x" % len(chunk)) + b"\r\n" + chunk + b"\r\n" + else: + return chunk + + def write(self, chunk: bytes) -> "Future[None]": + """Implements `.HTTPConnection.write`. + + For backwards compatibility it is allowed but deprecated to + skip `write_headers` and instead call `write()` with a + pre-encoded header block. + """ + future = None + if self.stream.closed(): + future = self._write_future = Future() + self._write_future.set_exception(iostream.StreamClosedError()) + self._write_future.exception() + else: + future = self._write_future = Future() + self._pending_write = self.stream.write(self._format_chunk(chunk)) + future_add_done_callback(self._pending_write, self._on_write_complete) + return future + + def finish(self) -> None: + """Implements `.HTTPConnection.finish`.""" + if ( + self._expected_content_remaining is not None + and self._expected_content_remaining != 0 + and not self.stream.closed() + ): + self.stream.close() + raise httputil.HTTPOutputError( + "Tried to write %d bytes less than Content-Length" + % self._expected_content_remaining + ) + if self._chunking_output: + if not self.stream.closed(): + self._pending_write = self.stream.write(b"0\r\n\r\n") + self._pending_write.add_done_callback(self._on_write_complete) + self._write_finished = True + # If the app finished the request while we're still reading, + # divert any remaining data away from the delegate and + # close the connection when we're done sending our response. + # Closing the connection is the only way to avoid reading the + # whole input body. + if not self._read_finished: + self._disconnect_on_finish = True + # No more data is coming, so instruct TCP to send any remaining + # data immediately instead of waiting for a full packet or ack. + self.stream.set_nodelay(True) + if self._pending_write is None: + self._finish_request(None) + else: + future_add_done_callback(self._pending_write, self._finish_request) + + def _on_write_complete(self, future: "Future[None]") -> None: + exc = future.exception() + if exc is not None and not isinstance(exc, iostream.StreamClosedError): + future.result() + if self._write_callback is not None: + callback = self._write_callback + self._write_callback = None + self.stream.io_loop.add_callback(callback) + if self._write_future is not None: + future = self._write_future + self._write_future = None + future_set_result_unless_cancelled(future, None) + + def _can_keep_alive( + self, start_line: httputil.RequestStartLine, headers: httputil.HTTPHeaders + ) -> bool: + if self.params.no_keep_alive: + return False + connection_header = headers.get("Connection") + if connection_header is not None: + connection_header = connection_header.lower() + if start_line.version == "HTTP/1.1": + return connection_header != "close" + elif ( + "Content-Length" in headers + or headers.get("Transfer-Encoding", "").lower() == "chunked" + or getattr(start_line, "method", None) in ("HEAD", "GET") + ): + # start_line may be a request or response start line; only + # the former has a method attribute. + return connection_header == "keep-alive" + return False + + def _finish_request(self, future: "Optional[Future[None]]") -> None: + self._clear_callbacks() + if not self.is_client and self._disconnect_on_finish: + self.close() + return + # Turn Nagle's algorithm back on, leaving the stream in its + # default state for the next request. + self.stream.set_nodelay(False) + if not self._finish_future.done(): + future_set_result_unless_cancelled(self._finish_future, None) + + def _parse_headers(self, data: bytes) -> Tuple[str, httputil.HTTPHeaders]: + # The lstrip removes newlines that some implementations sometimes + # insert between messages of a reused connection. Per RFC 7230, + # we SHOULD ignore at least one empty line before the request. + # http://tools.ietf.org/html/rfc7230#section-3.5 + data_str = native_str(data.decode("latin1")).lstrip("\r\n") + # RFC 7230 section allows for both CRLF and bare LF. + eol = data_str.find("\n") + start_line = data_str[:eol].rstrip("\r") + headers = httputil.HTTPHeaders.parse(data_str[eol:]) + return start_line, headers + + def _read_body( + self, + code: int, + headers: httputil.HTTPHeaders, + delegate: httputil.HTTPMessageDelegate, + ) -> Optional[Awaitable[None]]: + if "Content-Length" in headers: + if "Transfer-Encoding" in headers: + # Response cannot contain both Content-Length and + # Transfer-Encoding headers. + # http://tools.ietf.org/html/rfc7230#section-3.3.3 + raise httputil.HTTPInputError( + "Response with both Transfer-Encoding and Content-Length" + ) + if "," in headers["Content-Length"]: + # Proxies sometimes cause Content-Length headers to get + # duplicated. If all the values are identical then we can + # use them but if they differ it's an error. + pieces = re.split(r",\s*", headers["Content-Length"]) + if any(i != pieces[0] for i in pieces): + raise httputil.HTTPInputError( + "Multiple unequal Content-Lengths: %r" + % headers["Content-Length"] + ) + headers["Content-Length"] = pieces[0] + + try: + content_length = int(headers["Content-Length"]) # type: Optional[int] + except ValueError: + # Handles non-integer Content-Length value. + raise httputil.HTTPInputError( + "Only integer Content-Length is allowed: %s" + % headers["Content-Length"] + ) + + if cast(int, content_length) > self._max_body_size: + raise httputil.HTTPInputError("Content-Length too long") + else: + content_length = None + + if code == 204: + # This response code is not allowed to have a non-empty body, + # and has an implicit length of zero instead of read-until-close. + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3 + if "Transfer-Encoding" in headers or content_length not in (None, 0): + raise httputil.HTTPInputError( + "Response with code %d should not have body" % code + ) + content_length = 0 + + if content_length is not None: + return self._read_fixed_body(content_length, delegate) + if headers.get("Transfer-Encoding", "").lower() == "chunked": + return self._read_chunked_body(delegate) + if self.is_client: + return self._read_body_until_close(delegate) + return None + + async def _read_fixed_body( + self, content_length: int, delegate: httputil.HTTPMessageDelegate + ) -> None: + while content_length > 0: + body = await self.stream.read_bytes( + min(self.params.chunk_size, content_length), partial=True + ) + content_length -= len(body) + if not self._write_finished or self.is_client: + with _ExceptionLoggingContext(app_log): + ret = delegate.data_received(body) + if ret is not None: + await ret + + async def _read_chunked_body(self, delegate: httputil.HTTPMessageDelegate) -> None: + # TODO: "chunk extensions" http://tools.ietf.org/html/rfc2616#section-3.6.1 + total_size = 0 + while True: + chunk_len_str = await self.stream.read_until(b"\r\n", max_bytes=64) + chunk_len = int(chunk_len_str.strip(), 16) + if chunk_len == 0: + crlf = await self.stream.read_bytes(2) + if crlf != b"\r\n": + raise httputil.HTTPInputError( + "improperly terminated chunked request" + ) + return + total_size += chunk_len + if total_size > self._max_body_size: + raise httputil.HTTPInputError("chunked body too large") + bytes_to_read = chunk_len + while bytes_to_read: + chunk = await self.stream.read_bytes( + min(bytes_to_read, self.params.chunk_size), partial=True + ) + bytes_to_read -= len(chunk) + if not self._write_finished or self.is_client: + with _ExceptionLoggingContext(app_log): + ret = delegate.data_received(chunk) + if ret is not None: + await ret + # chunk ends with \r\n + crlf = await self.stream.read_bytes(2) + assert crlf == b"\r\n" + + async def _read_body_until_close( + self, delegate: httputil.HTTPMessageDelegate + ) -> None: + body = await self.stream.read_until_close() + if not self._write_finished or self.is_client: + with _ExceptionLoggingContext(app_log): + ret = delegate.data_received(body) + if ret is not None: + await ret + + +class _GzipMessageDelegate(httputil.HTTPMessageDelegate): + """Wraps an `HTTPMessageDelegate` to decode ``Content-Encoding: gzip``. + """ + + def __init__(self, delegate: httputil.HTTPMessageDelegate, chunk_size: int) -> None: + self._delegate = delegate + self._chunk_size = chunk_size + self._decompressor = None # type: Optional[GzipDecompressor] + + def headers_received( + self, + start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine], + headers: httputil.HTTPHeaders, + ) -> Optional[Awaitable[None]]: + if headers.get("Content-Encoding") == "gzip": + self._decompressor = GzipDecompressor() + # Downstream delegates will only see uncompressed data, + # so rename the content-encoding header. + # (but note that curl_httpclient doesn't do this). + headers.add("X-Consumed-Content-Encoding", headers["Content-Encoding"]) + del headers["Content-Encoding"] + return self._delegate.headers_received(start_line, headers) + + async def data_received(self, chunk: bytes) -> None: + if self._decompressor: + compressed_data = chunk + while compressed_data: + decompressed = self._decompressor.decompress( + compressed_data, self._chunk_size + ) + if decompressed: + ret = self._delegate.data_received(decompressed) + if ret is not None: + await ret + compressed_data = self._decompressor.unconsumed_tail + else: + ret = self._delegate.data_received(chunk) + if ret is not None: + await ret + + def finish(self) -> None: + if self._decompressor is not None: + tail = self._decompressor.flush() + if tail: + # The tail should always be empty: decompress returned + # all that it can in data_received and the only + # purpose of the flush call is to detect errors such + # as truncated input. If we did legitimately get a new + # chunk at this point we'd need to change the + # interface to make finish() a coroutine. + raise ValueError( + "decompressor.flush returned data; possile truncated input" + ) + return self._delegate.finish() + + def on_connection_close(self) -> None: + return self._delegate.on_connection_close() + + +class HTTP1ServerConnection(object): + """An HTTP/1.x server.""" + + def __init__( + self, + stream: iostream.IOStream, + params: HTTP1ConnectionParameters = None, + context: object = None, + ) -> None: + """ + :arg stream: an `.IOStream` + :arg params: a `.HTTP1ConnectionParameters` or None + :arg context: an opaque application-defined object that is accessible + as ``connection.context`` + """ + self.stream = stream + if params is None: + params = HTTP1ConnectionParameters() + self.params = params + self.context = context + self._serving_future = None # type: Optional[Future[None]] + + async def close(self) -> None: + """Closes the connection. + + Returns a `.Future` that resolves after the serving loop has exited. + """ + self.stream.close() + # Block until the serving loop is done, but ignore any exceptions + # (start_serving is already responsible for logging them). + assert self._serving_future is not None + try: + await self._serving_future + except Exception: + pass + + def start_serving(self, delegate: httputil.HTTPServerConnectionDelegate) -> None: + """Starts serving requests on this connection. + + :arg delegate: a `.HTTPServerConnectionDelegate` + """ + assert isinstance(delegate, httputil.HTTPServerConnectionDelegate) + fut = gen.convert_yielded(self._server_request_loop(delegate)) + self._serving_future = fut + # Register the future on the IOLoop so its errors get logged. + self.stream.io_loop.add_future(fut, lambda f: f.result()) + + async def _server_request_loop( + self, delegate: httputil.HTTPServerConnectionDelegate + ) -> None: + try: + while True: + conn = HTTP1Connection(self.stream, False, self.params, self.context) + request_delegate = delegate.start_request(self, conn) + try: + ret = await conn.read_response(request_delegate) + except ( + iostream.StreamClosedError, + iostream.UnsatisfiableReadError, + asyncio.CancelledError, + ): + return + except _QuietException: + # This exception was already logged. + conn.close() + return + except Exception: + gen_log.error("Uncaught exception", exc_info=True) + conn.close() + return + if not ret: + return + await asyncio.sleep(0) + finally: + delegate.on_close(self) diff --git a/server/www/packages/packages-linux/x64/tornado/httpclient.py b/server/www/packages/packages-linux/x64/tornado/httpclient.py index 5ed2ee6..5dc1edd 100644 --- a/server/www/packages/packages-linux/x64/tornado/httpclient.py +++ b/server/www/packages/packages-linux/x64/tornado/httpclient.py @@ -1,748 +1,781 @@ -"""Blocking and non-blocking HTTP client interfaces. - -This module defines a common interface shared by two implementations, -``simple_httpclient`` and ``curl_httpclient``. Applications may either -instantiate their chosen implementation class directly or use the -`AsyncHTTPClient` class from this module, which selects an implementation -that can be overridden with the `AsyncHTTPClient.configure` method. - -The default implementation is ``simple_httpclient``, and this is expected -to be suitable for most users' needs. However, some applications may wish -to switch to ``curl_httpclient`` for reasons such as the following: - -* ``curl_httpclient`` has some features not found in ``simple_httpclient``, - including support for HTTP proxies and the ability to use a specified - network interface. - -* ``curl_httpclient`` is more likely to be compatible with sites that are - not-quite-compliant with the HTTP spec, or sites that use little-exercised - features of HTTP. - -* ``curl_httpclient`` is faster. - -* ``curl_httpclient`` was the default prior to Tornado 2.0. - -Note that if you are using ``curl_httpclient``, it is highly -recommended that you use a recent version of ``libcurl`` and -``pycurl``. Currently the minimum supported version of libcurl is -7.22.0, and the minimum version of pycurl is 7.18.2. It is highly -recommended that your ``libcurl`` installation is built with -asynchronous DNS resolver (threaded or c-ares), otherwise you may -encounter various problems with request timeouts (for more -information, see -http://curl.haxx.se/libcurl/c/curl_easy_setopt.html#CURLOPTCONNECTTIMEOUTMS -and comments in curl_httpclient.py). - -To select ``curl_httpclient``, call `AsyncHTTPClient.configure` at startup:: - - AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") -""" - -from __future__ import absolute_import, division, print_function - -import functools -import time -import warnings -import weakref - -from tornado.concurrent import Future, future_set_result_unless_cancelled -from tornado.escape import utf8, native_str -from tornado import gen, httputil, stack_context -from tornado.ioloop import IOLoop -from tornado.util import Configurable - - -class HTTPClient(object): - """A blocking HTTP client. - - This interface is provided to make it easier to share code between - synchronous and asynchronous applications. Applications that are - running an `.IOLoop` must use `AsyncHTTPClient` instead. - - Typical usage looks like this:: - - http_client = httpclient.HTTPClient() - try: - response = http_client.fetch("http://www.google.com/") - print(response.body) - except httpclient.HTTPError as e: - # HTTPError is raised for non-200 responses; the response - # can be found in e.response. - print("Error: " + str(e)) - except Exception as e: - # Other errors are possible, such as IOError. - print("Error: " + str(e)) - http_client.close() - - .. versionchanged:: 5.0 - - Due to limitations in `asyncio`, it is no longer possible to - use the synchronous ``HTTPClient`` while an `.IOLoop` is running. - Use `AsyncHTTPClient` instead. - - """ - def __init__(self, async_client_class=None, **kwargs): - # Initialize self._closed at the beginning of the constructor - # so that an exception raised here doesn't lead to confusing - # failures in __del__. - self._closed = True - self._io_loop = IOLoop(make_current=False) - if async_client_class is None: - async_client_class = AsyncHTTPClient - # Create the client while our IOLoop is "current", without - # clobbering the thread's real current IOLoop (if any). - self._async_client = self._io_loop.run_sync( - gen.coroutine(lambda: async_client_class(**kwargs))) - self._closed = False - - def __del__(self): - self.close() - - def close(self): - """Closes the HTTPClient, freeing any resources used.""" - if not self._closed: - self._async_client.close() - self._io_loop.close() - self._closed = True - - def fetch(self, request, **kwargs): - """Executes a request, returning an `HTTPResponse`. - - The request may be either a string URL or an `HTTPRequest` object. - If it is a string, we construct an `HTTPRequest` using any additional - kwargs: ``HTTPRequest(request, **kwargs)`` - - If an error occurs during the fetch, we raise an `HTTPError` unless - the ``raise_error`` keyword argument is set to False. - """ - response = self._io_loop.run_sync(functools.partial( - self._async_client.fetch, request, **kwargs)) - return response - - -class AsyncHTTPClient(Configurable): - """An non-blocking HTTP client. - - Example usage:: - - async def f(): - http_client = AsyncHTTPClient() - try: - response = await http_client.fetch("http://www.google.com") - except Exception as e: - print("Error: %s" % e) - else: - print(response.body) - - The constructor for this class is magic in several respects: It - actually creates an instance of an implementation-specific - subclass, and instances are reused as a kind of pseudo-singleton - (one per `.IOLoop`). The keyword argument ``force_instance=True`` - can be used to suppress this singleton behavior. Unless - ``force_instance=True`` is used, no arguments should be passed to - the `AsyncHTTPClient` constructor. The implementation subclass as - well as arguments to its constructor can be set with the static - method `configure()` - - All `AsyncHTTPClient` implementations support a ``defaults`` - keyword argument, which can be used to set default values for - `HTTPRequest` attributes. For example:: - - AsyncHTTPClient.configure( - None, defaults=dict(user_agent="MyUserAgent")) - # or with force_instance: - client = AsyncHTTPClient(force_instance=True, - defaults=dict(user_agent="MyUserAgent")) - - .. versionchanged:: 5.0 - The ``io_loop`` argument (deprecated since version 4.1) has been removed. - - """ - @classmethod - def configurable_base(cls): - return AsyncHTTPClient - - @classmethod - def configurable_default(cls): - from tornado.simple_httpclient import SimpleAsyncHTTPClient - return SimpleAsyncHTTPClient - - @classmethod - def _async_clients(cls): - attr_name = '_async_client_dict_' + cls.__name__ - if not hasattr(cls, attr_name): - setattr(cls, attr_name, weakref.WeakKeyDictionary()) - return getattr(cls, attr_name) - - def __new__(cls, force_instance=False, **kwargs): - io_loop = IOLoop.current() - if force_instance: - instance_cache = None - else: - instance_cache = cls._async_clients() - if instance_cache is not None and io_loop in instance_cache: - return instance_cache[io_loop] - instance = super(AsyncHTTPClient, cls).__new__(cls, **kwargs) - # Make sure the instance knows which cache to remove itself from. - # It can't simply call _async_clients() because we may be in - # __new__(AsyncHTTPClient) but instance.__class__ may be - # SimpleAsyncHTTPClient. - instance._instance_cache = instance_cache - if instance_cache is not None: - instance_cache[instance.io_loop] = instance - return instance - - def initialize(self, defaults=None): - self.io_loop = IOLoop.current() - self.defaults = dict(HTTPRequest._DEFAULTS) - if defaults is not None: - self.defaults.update(defaults) - self._closed = False - - def close(self): - """Destroys this HTTP client, freeing any file descriptors used. - - This method is **not needed in normal use** due to the way - that `AsyncHTTPClient` objects are transparently reused. - ``close()`` is generally only necessary when either the - `.IOLoop` is also being closed, or the ``force_instance=True`` - argument was used when creating the `AsyncHTTPClient`. - - No other methods may be called on the `AsyncHTTPClient` after - ``close()``. - - """ - if self._closed: - return - self._closed = True - if self._instance_cache is not None: - if self._instance_cache.get(self.io_loop) is not self: - raise RuntimeError("inconsistent AsyncHTTPClient cache") - del self._instance_cache[self.io_loop] - - def fetch(self, request, callback=None, raise_error=True, **kwargs): - """Executes a request, asynchronously returning an `HTTPResponse`. - - The request may be either a string URL or an `HTTPRequest` object. - If it is a string, we construct an `HTTPRequest` using any additional - kwargs: ``HTTPRequest(request, **kwargs)`` - - This method returns a `.Future` whose result is an - `HTTPResponse`. By default, the ``Future`` will raise an - `HTTPError` if the request returned a non-200 response code - (other errors may also be raised if the server could not be - contacted). Instead, if ``raise_error`` is set to False, the - response will always be returned regardless of the response - code. - - If a ``callback`` is given, it will be invoked with the `HTTPResponse`. - In the callback interface, `HTTPError` is not automatically raised. - Instead, you must check the response's ``error`` attribute or - call its `~HTTPResponse.rethrow` method. - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed - in 6.0. Use the returned `.Future` instead. - - The ``raise_error=False`` argument currently suppresses - *all* errors, encapsulating them in `HTTPResponse` objects - with a 599 response code. This will change in Tornado 6.0: - ``raise_error=False`` will only affect the `HTTPError` - raised when a non-200 response code is used. - - """ - if self._closed: - raise RuntimeError("fetch() called on closed AsyncHTTPClient") - if not isinstance(request, HTTPRequest): - request = HTTPRequest(url=request, **kwargs) - else: - if kwargs: - raise ValueError("kwargs can't be used if request is an HTTPRequest object") - # We may modify this (to add Host, Accept-Encoding, etc), - # so make sure we don't modify the caller's object. This is also - # where normal dicts get converted to HTTPHeaders objects. - request.headers = httputil.HTTPHeaders(request.headers) - request = _RequestProxy(request, self.defaults) - future = Future() - if callback is not None: - warnings.warn("callback arguments are deprecated, use the returned Future instead", - DeprecationWarning) - callback = stack_context.wrap(callback) - - def handle_future(future): - exc = future.exception() - if isinstance(exc, HTTPError) and exc.response is not None: - response = exc.response - elif exc is not None: - response = HTTPResponse( - request, 599, error=exc, - request_time=time.time() - request.start_time) - else: - response = future.result() - self.io_loop.add_callback(callback, response) - future.add_done_callback(handle_future) - - def handle_response(response): - if raise_error and response.error: - if isinstance(response.error, HTTPError): - response.error.response = response - future.set_exception(response.error) - else: - if response.error and not response._error_is_response_code: - warnings.warn("raise_error=False will allow '%s' to be raised in the future" % - response.error, DeprecationWarning) - future_set_result_unless_cancelled(future, response) - self.fetch_impl(request, handle_response) - return future - - def fetch_impl(self, request, callback): - raise NotImplementedError() - - @classmethod - def configure(cls, impl, **kwargs): - """Configures the `AsyncHTTPClient` subclass to use. - - ``AsyncHTTPClient()`` actually creates an instance of a subclass. - This method may be called with either a class object or the - fully-qualified name of such a class (or ``None`` to use the default, - ``SimpleAsyncHTTPClient``) - - If additional keyword arguments are given, they will be passed - to the constructor of each subclass instance created. The - keyword argument ``max_clients`` determines the maximum number - of simultaneous `~AsyncHTTPClient.fetch()` operations that can - execute in parallel on each `.IOLoop`. Additional arguments - may be supported depending on the implementation class in use. - - Example:: - - AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") - """ - super(AsyncHTTPClient, cls).configure(impl, **kwargs) - - -class HTTPRequest(object): - """HTTP client request object.""" - - # Default values for HTTPRequest parameters. - # Merged with the values on the request object by AsyncHTTPClient - # implementations. - _DEFAULTS = dict( - connect_timeout=20.0, - request_timeout=20.0, - follow_redirects=True, - max_redirects=5, - decompress_response=True, - proxy_password='', - allow_nonstandard_methods=False, - validate_cert=True) - - def __init__(self, url, method="GET", headers=None, body=None, - auth_username=None, auth_password=None, auth_mode=None, - connect_timeout=None, request_timeout=None, - if_modified_since=None, follow_redirects=None, - max_redirects=None, user_agent=None, use_gzip=None, - network_interface=None, streaming_callback=None, - header_callback=None, prepare_curl_callback=None, - proxy_host=None, proxy_port=None, proxy_username=None, - proxy_password=None, proxy_auth_mode=None, - allow_nonstandard_methods=None, validate_cert=None, - ca_certs=None, allow_ipv6=None, client_key=None, - client_cert=None, body_producer=None, - expect_100_continue=False, decompress_response=None, - ssl_options=None): - r"""All parameters except ``url`` are optional. - - :arg str url: URL to fetch - :arg str method: HTTP method, e.g. "GET" or "POST" - :arg headers: Additional HTTP headers to pass on the request - :type headers: `~tornado.httputil.HTTPHeaders` or `dict` - :arg body: HTTP request body as a string (byte or unicode; if unicode - the utf-8 encoding will be used) - :arg body_producer: Callable used for lazy/asynchronous request bodies. - It is called with one argument, a ``write`` function, and should - return a `.Future`. It should call the write function with new - data as it becomes available. The write function returns a - `.Future` which can be used for flow control. - Only one of ``body`` and ``body_producer`` may - be specified. ``body_producer`` is not supported on - ``curl_httpclient``. When using ``body_producer`` it is recommended - to pass a ``Content-Length`` in the headers as otherwise chunked - encoding will be used, and many servers do not support chunked - encoding on requests. New in Tornado 4.0 - :arg str auth_username: Username for HTTP authentication - :arg str auth_password: Password for HTTP authentication - :arg str auth_mode: Authentication mode; default is "basic". - Allowed values are implementation-defined; ``curl_httpclient`` - supports "basic" and "digest"; ``simple_httpclient`` only supports - "basic" - :arg float connect_timeout: Timeout for initial connection in seconds, - default 20 seconds - :arg float request_timeout: Timeout for entire request in seconds, - default 20 seconds - :arg if_modified_since: Timestamp for ``If-Modified-Since`` header - :type if_modified_since: `datetime` or `float` - :arg bool follow_redirects: Should redirects be followed automatically - or return the 3xx response? Default True. - :arg int max_redirects: Limit for ``follow_redirects``, default 5. - :arg str user_agent: String to send as ``User-Agent`` header - :arg bool decompress_response: Request a compressed response from - the server and decompress it after downloading. Default is True. - New in Tornado 4.0. - :arg bool use_gzip: Deprecated alias for ``decompress_response`` - since Tornado 4.0. - :arg str network_interface: Network interface to use for request. - ``curl_httpclient`` only; see note below. - :arg collections.abc.Callable streaming_callback: If set, ``streaming_callback`` will - be run with each chunk of data as it is received, and - ``HTTPResponse.body`` and ``HTTPResponse.buffer`` will be empty in - the final response. - :arg collections.abc.Callable header_callback: If set, ``header_callback`` will - be run with each header line as it is received (including the - first line, e.g. ``HTTP/1.0 200 OK\r\n``, and a final line - containing only ``\r\n``. All lines include the trailing newline - characters). ``HTTPResponse.headers`` will be empty in the final - response. This is most useful in conjunction with - ``streaming_callback``, because it's the only way to get access to - header data while the request is in progress. - :arg collections.abc.Callable prepare_curl_callback: If set, will be called with - a ``pycurl.Curl`` object to allow the application to make additional - ``setopt`` calls. - :arg str proxy_host: HTTP proxy hostname. To use proxies, - ``proxy_host`` and ``proxy_port`` must be set; ``proxy_username``, - ``proxy_pass`` and ``proxy_auth_mode`` are optional. Proxies are - currently only supported with ``curl_httpclient``. - :arg int proxy_port: HTTP proxy port - :arg str proxy_username: HTTP proxy username - :arg str proxy_password: HTTP proxy password - :arg str proxy_auth_mode: HTTP proxy Authentication mode; - default is "basic". supports "basic" and "digest" - :arg bool allow_nonstandard_methods: Allow unknown values for ``method`` - argument? Default is False. - :arg bool validate_cert: For HTTPS requests, validate the server's - certificate? Default is True. - :arg str ca_certs: filename of CA certificates in PEM format, - or None to use defaults. See note below when used with - ``curl_httpclient``. - :arg str client_key: Filename for client SSL key, if any. See - note below when used with ``curl_httpclient``. - :arg str client_cert: Filename for client SSL certificate, if any. - See note below when used with ``curl_httpclient``. - :arg ssl.SSLContext ssl_options: `ssl.SSLContext` object for use in - ``simple_httpclient`` (unsupported by ``curl_httpclient``). - Overrides ``validate_cert``, ``ca_certs``, ``client_key``, - and ``client_cert``. - :arg bool allow_ipv6: Use IPv6 when available? Default is true. - :arg bool expect_100_continue: If true, send the - ``Expect: 100-continue`` header and wait for a continue response - before sending the request body. Only supported with - simple_httpclient. - - .. note:: - - When using ``curl_httpclient`` certain options may be - inherited by subsequent fetches because ``pycurl`` does - not allow them to be cleanly reset. This applies to the - ``ca_certs``, ``client_key``, ``client_cert``, and - ``network_interface`` arguments. If you use these - options, you should pass them on every request (you don't - have to always use the same values, but it's not possible - to mix requests that specify these options with ones that - use the defaults). - - .. versionadded:: 3.1 - The ``auth_mode`` argument. - - .. versionadded:: 4.0 - The ``body_producer`` and ``expect_100_continue`` arguments. - - .. versionadded:: 4.2 - The ``ssl_options`` argument. - - .. versionadded:: 4.5 - The ``proxy_auth_mode`` argument. - """ - # Note that some of these attributes go through property setters - # defined below. - self.headers = headers - if if_modified_since: - self.headers["If-Modified-Since"] = httputil.format_timestamp( - if_modified_since) - self.proxy_host = proxy_host - self.proxy_port = proxy_port - self.proxy_username = proxy_username - self.proxy_password = proxy_password - self.proxy_auth_mode = proxy_auth_mode - self.url = url - self.method = method - self.body = body - self.body_producer = body_producer - self.auth_username = auth_username - self.auth_password = auth_password - self.auth_mode = auth_mode - self.connect_timeout = connect_timeout - self.request_timeout = request_timeout - self.follow_redirects = follow_redirects - self.max_redirects = max_redirects - self.user_agent = user_agent - if decompress_response is not None: - self.decompress_response = decompress_response - else: - self.decompress_response = use_gzip - self.network_interface = network_interface - self.streaming_callback = streaming_callback - self.header_callback = header_callback - self.prepare_curl_callback = prepare_curl_callback - self.allow_nonstandard_methods = allow_nonstandard_methods - self.validate_cert = validate_cert - self.ca_certs = ca_certs - self.allow_ipv6 = allow_ipv6 - self.client_key = client_key - self.client_cert = client_cert - self.ssl_options = ssl_options - self.expect_100_continue = expect_100_continue - self.start_time = time.time() - - @property - def headers(self): - return self._headers - - @headers.setter - def headers(self, value): - if value is None: - self._headers = httputil.HTTPHeaders() - else: - self._headers = value - - @property - def body(self): - return self._body - - @body.setter - def body(self, value): - self._body = utf8(value) - - @property - def body_producer(self): - return self._body_producer - - @body_producer.setter - def body_producer(self, value): - self._body_producer = stack_context.wrap(value) - - @property - def streaming_callback(self): - return self._streaming_callback - - @streaming_callback.setter - def streaming_callback(self, value): - self._streaming_callback = stack_context.wrap(value) - - @property - def header_callback(self): - return self._header_callback - - @header_callback.setter - def header_callback(self, value): - self._header_callback = stack_context.wrap(value) - - @property - def prepare_curl_callback(self): - return self._prepare_curl_callback - - @prepare_curl_callback.setter - def prepare_curl_callback(self, value): - self._prepare_curl_callback = stack_context.wrap(value) - - -class HTTPResponse(object): - """HTTP Response object. - - Attributes: - - * request: HTTPRequest object - - * code: numeric HTTP status code, e.g. 200 or 404 - - * reason: human-readable reason phrase describing the status code - - * headers: `tornado.httputil.HTTPHeaders` object - - * effective_url: final location of the resource after following any - redirects - - * buffer: ``cStringIO`` object for response body - - * body: response body as bytes (created on demand from ``self.buffer``) - - * error: Exception object, if any - - * request_time: seconds from request start to finish. Includes all network - operations from DNS resolution to receiving the last byte of data. - Does not include time spent in the queue (due to the ``max_clients`` option). - If redirects were followed, only includes the final request. - - * start_time: Time at which the HTTP operation started, based on `time.time` - (not the monotonic clock used by `.IOLoop.time`). May be ``None`` if the request - timed out while in the queue. - - * time_info: dictionary of diagnostic timing information from the request. - Available data are subject to change, but currently uses timings - available from http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html, - plus ``queue``, which is the delay (if any) introduced by waiting for - a slot under `AsyncHTTPClient`'s ``max_clients`` setting. - - .. versionadded:: 5.1 - - Added the ``start_time`` attribute. - - .. versionchanged:: 5.1 - - The ``request_time`` attribute previously included time spent in the queue - for ``simple_httpclient``, but not in ``curl_httpclient``. Now queueing time - is excluded in both implementations. ``request_time`` is now more accurate for - ``curl_httpclient`` because it uses a monotonic clock when available. - """ - def __init__(self, request, code, headers=None, buffer=None, - effective_url=None, error=None, request_time=None, - time_info=None, reason=None, start_time=None): - if isinstance(request, _RequestProxy): - self.request = request.request - else: - self.request = request - self.code = code - self.reason = reason or httputil.responses.get(code, "Unknown") - if headers is not None: - self.headers = headers - else: - self.headers = httputil.HTTPHeaders() - self.buffer = buffer - self._body = None - if effective_url is None: - self.effective_url = request.url - else: - self.effective_url = effective_url - self._error_is_response_code = False - if error is None: - if self.code < 200 or self.code >= 300: - self._error_is_response_code = True - self.error = HTTPError(self.code, message=self.reason, - response=self) - else: - self.error = None - else: - self.error = error - self.start_time = start_time - self.request_time = request_time - self.time_info = time_info or {} - - @property - def body(self): - if self.buffer is None: - return None - elif self._body is None: - self._body = self.buffer.getvalue() - - return self._body - - def rethrow(self): - """If there was an error on the request, raise an `HTTPError`.""" - if self.error: - raise self.error - - def __repr__(self): - args = ",".join("%s=%r" % i for i in sorted(self.__dict__.items())) - return "%s(%s)" % (self.__class__.__name__, args) - - -class HTTPClientError(Exception): - """Exception thrown for an unsuccessful HTTP request. - - Attributes: - - * ``code`` - HTTP error integer error code, e.g. 404. Error code 599 is - used when no HTTP response was received, e.g. for a timeout. - - * ``response`` - `HTTPResponse` object, if any. - - Note that if ``follow_redirects`` is False, redirects become HTTPErrors, - and you can look at ``error.response.headers['Location']`` to see the - destination of the redirect. - - .. versionchanged:: 5.1 - - Renamed from ``HTTPError`` to ``HTTPClientError`` to avoid collisions with - `tornado.web.HTTPError`. The name ``tornado.httpclient.HTTPError`` remains - as an alias. - """ - def __init__(self, code, message=None, response=None): - self.code = code - self.message = message or httputil.responses.get(code, "Unknown") - self.response = response - super(HTTPClientError, self).__init__(code, message, response) - - def __str__(self): - return "HTTP %d: %s" % (self.code, self.message) - - # There is a cyclic reference between self and self.response, - # which breaks the default __repr__ implementation. - # (especially on pypy, which doesn't have the same recursion - # detection as cpython). - __repr__ = __str__ - - -HTTPError = HTTPClientError - - -class _RequestProxy(object): - """Combines an object with a dictionary of defaults. - - Used internally by AsyncHTTPClient implementations. - """ - def __init__(self, request, defaults): - self.request = request - self.defaults = defaults - - def __getattr__(self, name): - request_attr = getattr(self.request, name) - if request_attr is not None: - return request_attr - elif self.defaults is not None: - return self.defaults.get(name, None) - else: - return None - - -def main(): - from tornado.options import define, options, parse_command_line - define("print_headers", type=bool, default=False) - define("print_body", type=bool, default=True) - define("follow_redirects", type=bool, default=True) - define("validate_cert", type=bool, default=True) - define("proxy_host", type=str) - define("proxy_port", type=int) - args = parse_command_line() - client = HTTPClient() - for arg in args: - try: - response = client.fetch(arg, - follow_redirects=options.follow_redirects, - validate_cert=options.validate_cert, - proxy_host=options.proxy_host, - proxy_port=options.proxy_port, - ) - except HTTPError as e: - if e.response is not None: - response = e.response - else: - raise - if options.print_headers: - print(response.headers) - if options.print_body: - print(native_str(response.body)) - client.close() - - -if __name__ == "__main__": - main() +"""Blocking and non-blocking HTTP client interfaces. + +This module defines a common interface shared by two implementations, +``simple_httpclient`` and ``curl_httpclient``. Applications may either +instantiate their chosen implementation class directly or use the +`AsyncHTTPClient` class from this module, which selects an implementation +that can be overridden with the `AsyncHTTPClient.configure` method. + +The default implementation is ``simple_httpclient``, and this is expected +to be suitable for most users' needs. However, some applications may wish +to switch to ``curl_httpclient`` for reasons such as the following: + +* ``curl_httpclient`` has some features not found in ``simple_httpclient``, + including support for HTTP proxies and the ability to use a specified + network interface. + +* ``curl_httpclient`` is more likely to be compatible with sites that are + not-quite-compliant with the HTTP spec, or sites that use little-exercised + features of HTTP. + +* ``curl_httpclient`` is faster. + +Note that if you are using ``curl_httpclient``, it is highly +recommended that you use a recent version of ``libcurl`` and +``pycurl``. Currently the minimum supported version of libcurl is +7.22.0, and the minimum version of pycurl is 7.18.2. It is highly +recommended that your ``libcurl`` installation is built with +asynchronous DNS resolver (threaded or c-ares), otherwise you may +encounter various problems with request timeouts (for more +information, see +http://curl.haxx.se/libcurl/c/curl_easy_setopt.html#CURLOPTCONNECTTIMEOUTMS +and comments in curl_httpclient.py). + +To select ``curl_httpclient``, call `AsyncHTTPClient.configure` at startup:: + + AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") +""" + +import datetime +import functools +from io import BytesIO +import ssl +import time +import weakref + +from tornado.concurrent import ( + Future, + future_set_result_unless_cancelled, + future_set_exception_unless_cancelled, +) +from tornado.escape import utf8, native_str +from tornado import gen, httputil +from tornado.ioloop import IOLoop +from tornado.util import Configurable + +from typing import Type, Any, Union, Dict, Callable, Optional, cast, Awaitable + + +class HTTPClient(object): + """A blocking HTTP client. + + This interface is provided to make it easier to share code between + synchronous and asynchronous applications. Applications that are + running an `.IOLoop` must use `AsyncHTTPClient` instead. + + Typical usage looks like this:: + + http_client = httpclient.HTTPClient() + try: + response = http_client.fetch("http://www.google.com/") + print(response.body) + except httpclient.HTTPError as e: + # HTTPError is raised for non-200 responses; the response + # can be found in e.response. + print("Error: " + str(e)) + except Exception as e: + # Other errors are possible, such as IOError. + print("Error: " + str(e)) + http_client.close() + + .. versionchanged:: 5.0 + + Due to limitations in `asyncio`, it is no longer possible to + use the synchronous ``HTTPClient`` while an `.IOLoop` is running. + Use `AsyncHTTPClient` instead. + + """ + + def __init__( + self, async_client_class: Type["AsyncHTTPClient"] = None, **kwargs: Any + ) -> None: + # Initialize self._closed at the beginning of the constructor + # so that an exception raised here doesn't lead to confusing + # failures in __del__. + self._closed = True + self._io_loop = IOLoop(make_current=False) + if async_client_class is None: + async_client_class = AsyncHTTPClient + + # Create the client while our IOLoop is "current", without + # clobbering the thread's real current IOLoop (if any). + async def make_client() -> "AsyncHTTPClient": + await gen.sleep(0) + assert async_client_class is not None + return async_client_class(**kwargs) + + self._async_client = self._io_loop.run_sync(make_client) + self._closed = False + + def __del__(self) -> None: + self.close() + + def close(self) -> None: + """Closes the HTTPClient, freeing any resources used.""" + if not self._closed: + self._async_client.close() + self._io_loop.close() + self._closed = True + + def fetch( + self, request: Union["HTTPRequest", str], **kwargs: Any + ) -> "HTTPResponse": + """Executes a request, returning an `HTTPResponse`. + + The request may be either a string URL or an `HTTPRequest` object. + If it is a string, we construct an `HTTPRequest` using any additional + kwargs: ``HTTPRequest(request, **kwargs)`` + + If an error occurs during the fetch, we raise an `HTTPError` unless + the ``raise_error`` keyword argument is set to False. + """ + response = self._io_loop.run_sync( + functools.partial(self._async_client.fetch, request, **kwargs) + ) + return response + + +class AsyncHTTPClient(Configurable): + """An non-blocking HTTP client. + + Example usage:: + + async def f(): + http_client = AsyncHTTPClient() + try: + response = await http_client.fetch("http://www.google.com") + except Exception as e: + print("Error: %s" % e) + else: + print(response.body) + + The constructor for this class is magic in several respects: It + actually creates an instance of an implementation-specific + subclass, and instances are reused as a kind of pseudo-singleton + (one per `.IOLoop`). The keyword argument ``force_instance=True`` + can be used to suppress this singleton behavior. Unless + ``force_instance=True`` is used, no arguments should be passed to + the `AsyncHTTPClient` constructor. The implementation subclass as + well as arguments to its constructor can be set with the static + method `configure()` + + All `AsyncHTTPClient` implementations support a ``defaults`` + keyword argument, which can be used to set default values for + `HTTPRequest` attributes. For example:: + + AsyncHTTPClient.configure( + None, defaults=dict(user_agent="MyUserAgent")) + # or with force_instance: + client = AsyncHTTPClient(force_instance=True, + defaults=dict(user_agent="MyUserAgent")) + + .. versionchanged:: 5.0 + The ``io_loop`` argument (deprecated since version 4.1) has been removed. + + """ + + _instance_cache = None # type: Dict[IOLoop, AsyncHTTPClient] + + @classmethod + def configurable_base(cls) -> Type[Configurable]: + return AsyncHTTPClient + + @classmethod + def configurable_default(cls) -> Type[Configurable]: + from tornado.simple_httpclient import SimpleAsyncHTTPClient + + return SimpleAsyncHTTPClient + + @classmethod + def _async_clients(cls) -> Dict[IOLoop, "AsyncHTTPClient"]: + attr_name = "_async_client_dict_" + cls.__name__ + if not hasattr(cls, attr_name): + setattr(cls, attr_name, weakref.WeakKeyDictionary()) + return getattr(cls, attr_name) + + def __new__(cls, force_instance: bool = False, **kwargs: Any) -> "AsyncHTTPClient": + io_loop = IOLoop.current() + if force_instance: + instance_cache = None + else: + instance_cache = cls._async_clients() + if instance_cache is not None and io_loop in instance_cache: + return instance_cache[io_loop] + instance = super(AsyncHTTPClient, cls).__new__(cls, **kwargs) # type: ignore + # Make sure the instance knows which cache to remove itself from. + # It can't simply call _async_clients() because we may be in + # __new__(AsyncHTTPClient) but instance.__class__ may be + # SimpleAsyncHTTPClient. + instance._instance_cache = instance_cache + if instance_cache is not None: + instance_cache[instance.io_loop] = instance + return instance + + def initialize(self, defaults: Dict[str, Any] = None) -> None: + self.io_loop = IOLoop.current() + self.defaults = dict(HTTPRequest._DEFAULTS) + if defaults is not None: + self.defaults.update(defaults) + self._closed = False + + def close(self) -> None: + """Destroys this HTTP client, freeing any file descriptors used. + + This method is **not needed in normal use** due to the way + that `AsyncHTTPClient` objects are transparently reused. + ``close()`` is generally only necessary when either the + `.IOLoop` is also being closed, or the ``force_instance=True`` + argument was used when creating the `AsyncHTTPClient`. + + No other methods may be called on the `AsyncHTTPClient` after + ``close()``. + + """ + if self._closed: + return + self._closed = True + if self._instance_cache is not None: + cached_val = self._instance_cache.pop(self.io_loop, None) + # If there's an object other than self in the instance + # cache for our IOLoop, something has gotten mixed up. A + # value of None appears to be possible when this is called + # from a destructor (HTTPClient.__del__) as the weakref + # gets cleared before the destructor runs. + if cached_val is not None and cached_val is not self: + raise RuntimeError("inconsistent AsyncHTTPClient cache") + + def fetch( + self, + request: Union[str, "HTTPRequest"], + raise_error: bool = True, + **kwargs: Any + ) -> Awaitable["HTTPResponse"]: + """Executes a request, asynchronously returning an `HTTPResponse`. + + The request may be either a string URL or an `HTTPRequest` object. + If it is a string, we construct an `HTTPRequest` using any additional + kwargs: ``HTTPRequest(request, **kwargs)`` + + This method returns a `.Future` whose result is an + `HTTPResponse`. By default, the ``Future`` will raise an + `HTTPError` if the request returned a non-200 response code + (other errors may also be raised if the server could not be + contacted). Instead, if ``raise_error`` is set to False, the + response will always be returned regardless of the response + code. + + If a ``callback`` is given, it will be invoked with the `HTTPResponse`. + In the callback interface, `HTTPError` is not automatically raised. + Instead, you must check the response's ``error`` attribute or + call its `~HTTPResponse.rethrow` method. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned + `.Future` instead. + + The ``raise_error=False`` argument only affects the + `HTTPError` raised when a non-200 response code is used, + instead of suppressing all errors. + """ + if self._closed: + raise RuntimeError("fetch() called on closed AsyncHTTPClient") + if not isinstance(request, HTTPRequest): + request = HTTPRequest(url=request, **kwargs) + else: + if kwargs: + raise ValueError( + "kwargs can't be used if request is an HTTPRequest object" + ) + # We may modify this (to add Host, Accept-Encoding, etc), + # so make sure we don't modify the caller's object. This is also + # where normal dicts get converted to HTTPHeaders objects. + request.headers = httputil.HTTPHeaders(request.headers) + request_proxy = _RequestProxy(request, self.defaults) + future = Future() # type: Future[HTTPResponse] + + def handle_response(response: "HTTPResponse") -> None: + if response.error: + if raise_error or not response._error_is_response_code: + future_set_exception_unless_cancelled(future, response.error) + return + future_set_result_unless_cancelled(future, response) + + self.fetch_impl(cast(HTTPRequest, request_proxy), handle_response) + return future + + def fetch_impl( + self, request: "HTTPRequest", callback: Callable[["HTTPResponse"], None] + ) -> None: + raise NotImplementedError() + + @classmethod + def configure( + cls, impl: "Union[None, str, Type[Configurable]]", **kwargs: Any + ) -> None: + """Configures the `AsyncHTTPClient` subclass to use. + + ``AsyncHTTPClient()`` actually creates an instance of a subclass. + This method may be called with either a class object or the + fully-qualified name of such a class (or ``None`` to use the default, + ``SimpleAsyncHTTPClient``) + + If additional keyword arguments are given, they will be passed + to the constructor of each subclass instance created. The + keyword argument ``max_clients`` determines the maximum number + of simultaneous `~AsyncHTTPClient.fetch()` operations that can + execute in parallel on each `.IOLoop`. Additional arguments + may be supported depending on the implementation class in use. + + Example:: + + AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") + """ + super(AsyncHTTPClient, cls).configure(impl, **kwargs) + + +class HTTPRequest(object): + """HTTP client request object.""" + + _headers = None # type: Union[Dict[str, str], httputil.HTTPHeaders] + + # Default values for HTTPRequest parameters. + # Merged with the values on the request object by AsyncHTTPClient + # implementations. + _DEFAULTS = dict( + connect_timeout=20.0, + request_timeout=20.0, + follow_redirects=True, + max_redirects=5, + decompress_response=True, + proxy_password="", + allow_nonstandard_methods=False, + validate_cert=True, + ) + + def __init__( + self, + url: str, + method: str = "GET", + headers: Union[Dict[str, str], httputil.HTTPHeaders] = None, + body: Union[bytes, str] = None, + auth_username: str = None, + auth_password: str = None, + auth_mode: str = None, + connect_timeout: float = None, + request_timeout: float = None, + if_modified_since: Union[float, datetime.datetime] = None, + follow_redirects: bool = None, + max_redirects: int = None, + user_agent: str = None, + use_gzip: bool = None, + network_interface: str = None, + streaming_callback: Callable[[bytes], None] = None, + header_callback: Callable[[str], None] = None, + prepare_curl_callback: Callable[[Any], None] = None, + proxy_host: str = None, + proxy_port: int = None, + proxy_username: str = None, + proxy_password: str = None, + proxy_auth_mode: str = None, + allow_nonstandard_methods: bool = None, + validate_cert: bool = None, + ca_certs: str = None, + allow_ipv6: bool = None, + client_key: str = None, + client_cert: str = None, + body_producer: Callable[[Callable[[bytes], None]], "Future[None]"] = None, + expect_100_continue: bool = False, + decompress_response: bool = None, + ssl_options: Union[Dict[str, Any], ssl.SSLContext] = None, + ) -> None: + r"""All parameters except ``url`` are optional. + + :arg str url: URL to fetch + :arg str method: HTTP method, e.g. "GET" or "POST" + :arg headers: Additional HTTP headers to pass on the request + :type headers: `~tornado.httputil.HTTPHeaders` or `dict` + :arg body: HTTP request body as a string (byte or unicode; if unicode + the utf-8 encoding will be used) + :arg body_producer: Callable used for lazy/asynchronous request bodies. + It is called with one argument, a ``write`` function, and should + return a `.Future`. It should call the write function with new + data as it becomes available. The write function returns a + `.Future` which can be used for flow control. + Only one of ``body`` and ``body_producer`` may + be specified. ``body_producer`` is not supported on + ``curl_httpclient``. When using ``body_producer`` it is recommended + to pass a ``Content-Length`` in the headers as otherwise chunked + encoding will be used, and many servers do not support chunked + encoding on requests. New in Tornado 4.0 + :arg str auth_username: Username for HTTP authentication + :arg str auth_password: Password for HTTP authentication + :arg str auth_mode: Authentication mode; default is "basic". + Allowed values are implementation-defined; ``curl_httpclient`` + supports "basic" and "digest"; ``simple_httpclient`` only supports + "basic" + :arg float connect_timeout: Timeout for initial connection in seconds, + default 20 seconds + :arg float request_timeout: Timeout for entire request in seconds, + default 20 seconds + :arg if_modified_since: Timestamp for ``If-Modified-Since`` header + :type if_modified_since: `datetime` or `float` + :arg bool follow_redirects: Should redirects be followed automatically + or return the 3xx response? Default True. + :arg int max_redirects: Limit for ``follow_redirects``, default 5. + :arg str user_agent: String to send as ``User-Agent`` header + :arg bool decompress_response: Request a compressed response from + the server and decompress it after downloading. Default is True. + New in Tornado 4.0. + :arg bool use_gzip: Deprecated alias for ``decompress_response`` + since Tornado 4.0. + :arg str network_interface: Network interface or source IP to use for request. + See ``curl_httpclient`` note below. + :arg collections.abc.Callable streaming_callback: If set, ``streaming_callback`` will + be run with each chunk of data as it is received, and + ``HTTPResponse.body`` and ``HTTPResponse.buffer`` will be empty in + the final response. + :arg collections.abc.Callable header_callback: If set, ``header_callback`` will + be run with each header line as it is received (including the + first line, e.g. ``HTTP/1.0 200 OK\r\n``, and a final line + containing only ``\r\n``. All lines include the trailing newline + characters). ``HTTPResponse.headers`` will be empty in the final + response. This is most useful in conjunction with + ``streaming_callback``, because it's the only way to get access to + header data while the request is in progress. + :arg collections.abc.Callable prepare_curl_callback: If set, will be called with + a ``pycurl.Curl`` object to allow the application to make additional + ``setopt`` calls. + :arg str proxy_host: HTTP proxy hostname. To use proxies, + ``proxy_host`` and ``proxy_port`` must be set; ``proxy_username``, + ``proxy_pass`` and ``proxy_auth_mode`` are optional. Proxies are + currently only supported with ``curl_httpclient``. + :arg int proxy_port: HTTP proxy port + :arg str proxy_username: HTTP proxy username + :arg str proxy_password: HTTP proxy password + :arg str proxy_auth_mode: HTTP proxy Authentication mode; + default is "basic". supports "basic" and "digest" + :arg bool allow_nonstandard_methods: Allow unknown values for ``method`` + argument? Default is False. + :arg bool validate_cert: For HTTPS requests, validate the server's + certificate? Default is True. + :arg str ca_certs: filename of CA certificates in PEM format, + or None to use defaults. See note below when used with + ``curl_httpclient``. + :arg str client_key: Filename for client SSL key, if any. See + note below when used with ``curl_httpclient``. + :arg str client_cert: Filename for client SSL certificate, if any. + See note below when used with ``curl_httpclient``. + :arg ssl.SSLContext ssl_options: `ssl.SSLContext` object for use in + ``simple_httpclient`` (unsupported by ``curl_httpclient``). + Overrides ``validate_cert``, ``ca_certs``, ``client_key``, + and ``client_cert``. + :arg bool allow_ipv6: Use IPv6 when available? Default is True. + :arg bool expect_100_continue: If true, send the + ``Expect: 100-continue`` header and wait for a continue response + before sending the request body. Only supported with + ``simple_httpclient``. + + .. note:: + + When using ``curl_httpclient`` certain options may be + inherited by subsequent fetches because ``pycurl`` does + not allow them to be cleanly reset. This applies to the + ``ca_certs``, ``client_key``, ``client_cert``, and + ``network_interface`` arguments. If you use these + options, you should pass them on every request (you don't + have to always use the same values, but it's not possible + to mix requests that specify these options with ones that + use the defaults). + + .. versionadded:: 3.1 + The ``auth_mode`` argument. + + .. versionadded:: 4.0 + The ``body_producer`` and ``expect_100_continue`` arguments. + + .. versionadded:: 4.2 + The ``ssl_options`` argument. + + .. versionadded:: 4.5 + The ``proxy_auth_mode`` argument. + """ + # Note that some of these attributes go through property setters + # defined below. + self.headers = headers + if if_modified_since: + self.headers["If-Modified-Since"] = httputil.format_timestamp( + if_modified_since + ) + self.proxy_host = proxy_host + self.proxy_port = proxy_port + self.proxy_username = proxy_username + self.proxy_password = proxy_password + self.proxy_auth_mode = proxy_auth_mode + self.url = url + self.method = method + self.body = body + self.body_producer = body_producer + self.auth_username = auth_username + self.auth_password = auth_password + self.auth_mode = auth_mode + self.connect_timeout = connect_timeout + self.request_timeout = request_timeout + self.follow_redirects = follow_redirects + self.max_redirects = max_redirects + self.user_agent = user_agent + if decompress_response is not None: + self.decompress_response = decompress_response # type: Optional[bool] + else: + self.decompress_response = use_gzip + self.network_interface = network_interface + self.streaming_callback = streaming_callback + self.header_callback = header_callback + self.prepare_curl_callback = prepare_curl_callback + self.allow_nonstandard_methods = allow_nonstandard_methods + self.validate_cert = validate_cert + self.ca_certs = ca_certs + self.allow_ipv6 = allow_ipv6 + self.client_key = client_key + self.client_cert = client_cert + self.ssl_options = ssl_options + self.expect_100_continue = expect_100_continue + self.start_time = time.time() + + @property + def headers(self) -> httputil.HTTPHeaders: + # TODO: headers may actually be a plain dict until fairly late in + # the process (AsyncHTTPClient.fetch), but practically speaking, + # whenever the property is used they're already HTTPHeaders. + return self._headers # type: ignore + + @headers.setter + def headers(self, value: Union[Dict[str, str], httputil.HTTPHeaders]) -> None: + if value is None: + self._headers = httputil.HTTPHeaders() + else: + self._headers = value # type: ignore + + @property + def body(self) -> bytes: + return self._body + + @body.setter + def body(self, value: Union[bytes, str]) -> None: + self._body = utf8(value) + + +class HTTPResponse(object): + """HTTP Response object. + + Attributes: + + * ``request``: HTTPRequest object + + * ``code``: numeric HTTP status code, e.g. 200 or 404 + + * ``reason``: human-readable reason phrase describing the status code + + * ``headers``: `tornado.httputil.HTTPHeaders` object + + * ``effective_url``: final location of the resource after following any + redirects + + * ``buffer``: ``cStringIO`` object for response body + + * ``body``: response body as bytes (created on demand from ``self.buffer``) + + * ``error``: Exception object, if any + + * ``request_time``: seconds from request start to finish. Includes all + network operations from DNS resolution to receiving the last byte of + data. Does not include time spent in the queue (due to the + ``max_clients`` option). If redirects were followed, only includes + the final request. + + * ``start_time``: Time at which the HTTP operation started, based on + `time.time` (not the monotonic clock used by `.IOLoop.time`). May + be ``None`` if the request timed out while in the queue. + + * ``time_info``: dictionary of diagnostic timing information from the + request. Available data are subject to change, but currently uses timings + available from http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html, + plus ``queue``, which is the delay (if any) introduced by waiting for + a slot under `AsyncHTTPClient`'s ``max_clients`` setting. + + .. versionadded:: 5.1 + + Added the ``start_time`` attribute. + + .. versionchanged:: 5.1 + + The ``request_time`` attribute previously included time spent in the queue + for ``simple_httpclient``, but not in ``curl_httpclient``. Now queueing time + is excluded in both implementations. ``request_time`` is now more accurate for + ``curl_httpclient`` because it uses a monotonic clock when available. + """ + + # I'm not sure why these don't get type-inferred from the references in __init__. + error = None # type: Optional[BaseException] + _error_is_response_code = False + request = None # type: HTTPRequest + + def __init__( + self, + request: HTTPRequest, + code: int, + headers: httputil.HTTPHeaders = None, + buffer: BytesIO = None, + effective_url: str = None, + error: BaseException = None, + request_time: float = None, + time_info: Dict[str, float] = None, + reason: str = None, + start_time: float = None, + ) -> None: + if isinstance(request, _RequestProxy): + self.request = request.request + else: + self.request = request + self.code = code + self.reason = reason or httputil.responses.get(code, "Unknown") + if headers is not None: + self.headers = headers + else: + self.headers = httputil.HTTPHeaders() + self.buffer = buffer + self._body = None # type: Optional[bytes] + if effective_url is None: + self.effective_url = request.url + else: + self.effective_url = effective_url + self._error_is_response_code = False + if error is None: + if self.code < 200 or self.code >= 300: + self._error_is_response_code = True + self.error = HTTPError(self.code, message=self.reason, response=self) + else: + self.error = None + else: + self.error = error + self.start_time = start_time + self.request_time = request_time + self.time_info = time_info or {} + + @property + def body(self) -> bytes: + if self.buffer is None: + return b"" + elif self._body is None: + self._body = self.buffer.getvalue() + + return self._body + + def rethrow(self) -> None: + """If there was an error on the request, raise an `HTTPError`.""" + if self.error: + raise self.error + + def __repr__(self) -> str: + args = ",".join("%s=%r" % i for i in sorted(self.__dict__.items())) + return "%s(%s)" % (self.__class__.__name__, args) + + +class HTTPClientError(Exception): + """Exception thrown for an unsuccessful HTTP request. + + Attributes: + + * ``code`` - HTTP error integer error code, e.g. 404. Error code 599 is + used when no HTTP response was received, e.g. for a timeout. + + * ``response`` - `HTTPResponse` object, if any. + + Note that if ``follow_redirects`` is False, redirects become HTTPErrors, + and you can look at ``error.response.headers['Location']`` to see the + destination of the redirect. + + .. versionchanged:: 5.1 + + Renamed from ``HTTPError`` to ``HTTPClientError`` to avoid collisions with + `tornado.web.HTTPError`. The name ``tornado.httpclient.HTTPError`` remains + as an alias. + """ + + def __init__( + self, code: int, message: str = None, response: HTTPResponse = None + ) -> None: + self.code = code + self.message = message or httputil.responses.get(code, "Unknown") + self.response = response + super(HTTPClientError, self).__init__(code, message, response) + + def __str__(self) -> str: + return "HTTP %d: %s" % (self.code, self.message) + + # There is a cyclic reference between self and self.response, + # which breaks the default __repr__ implementation. + # (especially on pypy, which doesn't have the same recursion + # detection as cpython). + __repr__ = __str__ + + +HTTPError = HTTPClientError + + +class _RequestProxy(object): + """Combines an object with a dictionary of defaults. + + Used internally by AsyncHTTPClient implementations. + """ + + def __init__( + self, request: HTTPRequest, defaults: Optional[Dict[str, Any]] + ) -> None: + self.request = request + self.defaults = defaults + + def __getattr__(self, name: str) -> Any: + request_attr = getattr(self.request, name) + if request_attr is not None: + return request_attr + elif self.defaults is not None: + return self.defaults.get(name, None) + else: + return None + + +def main() -> None: + from tornado.options import define, options, parse_command_line + + define("print_headers", type=bool, default=False) + define("print_body", type=bool, default=True) + define("follow_redirects", type=bool, default=True) + define("validate_cert", type=bool, default=True) + define("proxy_host", type=str) + define("proxy_port", type=int) + args = parse_command_line() + client = HTTPClient() + for arg in args: + try: + response = client.fetch( + arg, + follow_redirects=options.follow_redirects, + validate_cert=options.validate_cert, + proxy_host=options.proxy_host, + proxy_port=options.proxy_port, + ) + except HTTPError as e: + if e.response is not None: + response = e.response + else: + raise + if options.print_headers: + print(response.headers) + if options.print_body: + print(native_str(response.body)) + client.close() + + +if __name__ == "__main__": + main() diff --git a/server/www/packages/packages-linux/x64/tornado/httpserver.py b/server/www/packages/packages-linux/x64/tornado/httpserver.py index 3498d71..2fbfdee 100644 --- a/server/www/packages/packages-linux/x64/tornado/httpserver.py +++ b/server/www/packages/packages-linux/x64/tornado/httpserver.py @@ -1,330 +1,398 @@ -# -# Copyright 2009 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A non-blocking, single-threaded HTTP server. - -Typical applications have little direct interaction with the `HTTPServer` -class except to start a server at the beginning of the process -(and even that is often done indirectly via `tornado.web.Application.listen`). - -.. versionchanged:: 4.0 - - The ``HTTPRequest`` class that used to live in this module has been moved - to `tornado.httputil.HTTPServerRequest`. The old name remains as an alias. -""" - -from __future__ import absolute_import, division, print_function - -import socket - -from tornado.escape import native_str -from tornado.http1connection import HTTP1ServerConnection, HTTP1ConnectionParameters -from tornado import gen -from tornado import httputil -from tornado import iostream -from tornado import netutil -from tornado.tcpserver import TCPServer -from tornado.util import Configurable - - -class HTTPServer(TCPServer, Configurable, - httputil.HTTPServerConnectionDelegate): - r"""A non-blocking, single-threaded HTTP server. - - A server is defined by a subclass of `.HTTPServerConnectionDelegate`, - or, for backwards compatibility, a callback that takes an - `.HTTPServerRequest` as an argument. The delegate is usually a - `tornado.web.Application`. - - `HTTPServer` supports keep-alive connections by default - (automatically for HTTP/1.1, or for HTTP/1.0 when the client - requests ``Connection: keep-alive``). - - If ``xheaders`` is ``True``, we support the - ``X-Real-Ip``/``X-Forwarded-For`` and - ``X-Scheme``/``X-Forwarded-Proto`` headers, which override the - remote IP and URI scheme/protocol for all requests. These headers - are useful when running Tornado behind a reverse proxy or load - balancer. The ``protocol`` argument can also be set to ``https`` - if Tornado is run behind an SSL-decoding proxy that does not set one of - the supported ``xheaders``. - - By default, when parsing the ``X-Forwarded-For`` header, Tornado will - select the last (i.e., the closest) address on the list of hosts as the - remote host IP address. To select the next server in the chain, a list of - trusted downstream hosts may be passed as the ``trusted_downstream`` - argument. These hosts will be skipped when parsing the ``X-Forwarded-For`` - header. - - To make this server serve SSL traffic, send the ``ssl_options`` keyword - argument with an `ssl.SSLContext` object. For compatibility with older - versions of Python ``ssl_options`` may also be a dictionary of keyword - arguments for the `ssl.wrap_socket` method.:: - - ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) - ssl_ctx.load_cert_chain(os.path.join(data_dir, "mydomain.crt"), - os.path.join(data_dir, "mydomain.key")) - HTTPServer(application, ssl_options=ssl_ctx) - - `HTTPServer` initialization follows one of three patterns (the - initialization methods are defined on `tornado.tcpserver.TCPServer`): - - 1. `~tornado.tcpserver.TCPServer.listen`: simple single-process:: - - server = HTTPServer(app) - server.listen(8888) - IOLoop.current().start() - - In many cases, `tornado.web.Application.listen` can be used to avoid - the need to explicitly create the `HTTPServer`. - - 2. `~tornado.tcpserver.TCPServer.bind`/`~tornado.tcpserver.TCPServer.start`: - simple multi-process:: - - server = HTTPServer(app) - server.bind(8888) - server.start(0) # Forks multiple sub-processes - IOLoop.current().start() - - When using this interface, an `.IOLoop` must *not* be passed - to the `HTTPServer` constructor. `~.TCPServer.start` will always start - the server on the default singleton `.IOLoop`. - - 3. `~tornado.tcpserver.TCPServer.add_sockets`: advanced multi-process:: - - sockets = tornado.netutil.bind_sockets(8888) - tornado.process.fork_processes(0) - server = HTTPServer(app) - server.add_sockets(sockets) - IOLoop.current().start() - - The `~.TCPServer.add_sockets` interface is more complicated, - but it can be used with `tornado.process.fork_processes` to - give you more flexibility in when the fork happens. - `~.TCPServer.add_sockets` can also be used in single-process - servers if you want to create your listening sockets in some - way other than `tornado.netutil.bind_sockets`. - - .. versionchanged:: 4.0 - Added ``decompress_request``, ``chunk_size``, ``max_header_size``, - ``idle_connection_timeout``, ``body_timeout``, ``max_body_size`` - arguments. Added support for `.HTTPServerConnectionDelegate` - instances as ``request_callback``. - - .. versionchanged:: 4.1 - `.HTTPServerConnectionDelegate.start_request` is now called with - two arguments ``(server_conn, request_conn)`` (in accordance with the - documentation) instead of one ``(request_conn)``. - - .. versionchanged:: 4.2 - `HTTPServer` is now a subclass of `tornado.util.Configurable`. - - .. versionchanged:: 4.5 - Added the ``trusted_downstream`` argument. - - .. versionchanged:: 5.0 - The ``io_loop`` argument has been removed. - """ - def __init__(self, *args, **kwargs): - # Ignore args to __init__; real initialization belongs in - # initialize since we're Configurable. (there's something - # weird in initialization order between this class, - # Configurable, and TCPServer so we can't leave __init__ out - # completely) - pass - - def initialize(self, request_callback, no_keep_alive=False, - xheaders=False, ssl_options=None, protocol=None, - decompress_request=False, - chunk_size=None, max_header_size=None, - idle_connection_timeout=None, body_timeout=None, - max_body_size=None, max_buffer_size=None, - trusted_downstream=None): - self.request_callback = request_callback - self.xheaders = xheaders - self.protocol = protocol - self.conn_params = HTTP1ConnectionParameters( - decompress=decompress_request, - chunk_size=chunk_size, - max_header_size=max_header_size, - header_timeout=idle_connection_timeout or 3600, - max_body_size=max_body_size, - body_timeout=body_timeout, - no_keep_alive=no_keep_alive) - TCPServer.__init__(self, ssl_options=ssl_options, - max_buffer_size=max_buffer_size, - read_chunk_size=chunk_size) - self._connections = set() - self.trusted_downstream = trusted_downstream - - @classmethod - def configurable_base(cls): - return HTTPServer - - @classmethod - def configurable_default(cls): - return HTTPServer - - @gen.coroutine - def close_all_connections(self): - while self._connections: - # Peek at an arbitrary element of the set - conn = next(iter(self._connections)) - yield conn.close() - - def handle_stream(self, stream, address): - context = _HTTPRequestContext(stream, address, - self.protocol, - self.trusted_downstream) - conn = HTTP1ServerConnection( - stream, self.conn_params, context) - self._connections.add(conn) - conn.start_serving(self) - - def start_request(self, server_conn, request_conn): - if isinstance(self.request_callback, httputil.HTTPServerConnectionDelegate): - delegate = self.request_callback.start_request(server_conn, request_conn) - else: - delegate = _CallableAdapter(self.request_callback, request_conn) - - if self.xheaders: - delegate = _ProxyAdapter(delegate, request_conn) - - return delegate - - def on_close(self, server_conn): - self._connections.remove(server_conn) - - -class _CallableAdapter(httputil.HTTPMessageDelegate): - def __init__(self, request_callback, request_conn): - self.connection = request_conn - self.request_callback = request_callback - self.request = None - self.delegate = None - self._chunks = [] - - def headers_received(self, start_line, headers): - self.request = httputil.HTTPServerRequest( - connection=self.connection, start_line=start_line, - headers=headers) - - def data_received(self, chunk): - self._chunks.append(chunk) - - def finish(self): - self.request.body = b''.join(self._chunks) - self.request._parse_body() - self.request_callback(self.request) - - def on_connection_close(self): - self._chunks = None - - -class _HTTPRequestContext(object): - def __init__(self, stream, address, protocol, trusted_downstream=None): - self.address = address - # Save the socket's address family now so we know how to - # interpret self.address even after the stream is closed - # and its socket attribute replaced with None. - if stream.socket is not None: - self.address_family = stream.socket.family - else: - self.address_family = None - # In HTTPServerRequest we want an IP, not a full socket address. - if (self.address_family in (socket.AF_INET, socket.AF_INET6) and - address is not None): - self.remote_ip = address[0] - else: - # Unix (or other) socket; fake the remote address. - self.remote_ip = '0.0.0.0' - if protocol: - self.protocol = protocol - elif isinstance(stream, iostream.SSLIOStream): - self.protocol = "https" - else: - self.protocol = "http" - self._orig_remote_ip = self.remote_ip - self._orig_protocol = self.protocol - self.trusted_downstream = set(trusted_downstream or []) - - def __str__(self): - if self.address_family in (socket.AF_INET, socket.AF_INET6): - return self.remote_ip - elif isinstance(self.address, bytes): - # Python 3 with the -bb option warns about str(bytes), - # so convert it explicitly. - # Unix socket addresses are str on mac but bytes on linux. - return native_str(self.address) - else: - return str(self.address) - - def _apply_xheaders(self, headers): - """Rewrite the ``remote_ip`` and ``protocol`` fields.""" - # Squid uses X-Forwarded-For, others use X-Real-Ip - ip = headers.get("X-Forwarded-For", self.remote_ip) - # Skip trusted downstream hosts in X-Forwarded-For list - for ip in (cand.strip() for cand in reversed(ip.split(','))): - if ip not in self.trusted_downstream: - break - ip = headers.get("X-Real-Ip", ip) - if netutil.is_valid_ip(ip): - self.remote_ip = ip - # AWS uses X-Forwarded-Proto - proto_header = headers.get( - "X-Scheme", headers.get("X-Forwarded-Proto", - self.protocol)) - if proto_header: - # use only the last proto entry if there is more than one - # TODO: support trusting mutiple layers of proxied protocol - proto_header = proto_header.split(',')[-1].strip() - if proto_header in ("http", "https"): - self.protocol = proto_header - - def _unapply_xheaders(self): - """Undo changes from `_apply_xheaders`. - - Xheaders are per-request so they should not leak to the next - request on the same connection. - """ - self.remote_ip = self._orig_remote_ip - self.protocol = self._orig_protocol - - -class _ProxyAdapter(httputil.HTTPMessageDelegate): - def __init__(self, delegate, request_conn): - self.connection = request_conn - self.delegate = delegate - - def headers_received(self, start_line, headers): - self.connection.context._apply_xheaders(headers) - return self.delegate.headers_received(start_line, headers) - - def data_received(self, chunk): - return self.delegate.data_received(chunk) - - def finish(self): - self.delegate.finish() - self._cleanup() - - def on_connection_close(self): - self.delegate.on_connection_close() - self._cleanup() - - def _cleanup(self): - self.connection.context._unapply_xheaders() - - -HTTPRequest = httputil.HTTPServerRequest +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A non-blocking, single-threaded HTTP server. + +Typical applications have little direct interaction with the `HTTPServer` +class except to start a server at the beginning of the process +(and even that is often done indirectly via `tornado.web.Application.listen`). + +.. versionchanged:: 4.0 + + The ``HTTPRequest`` class that used to live in this module has been moved + to `tornado.httputil.HTTPServerRequest`. The old name remains as an alias. +""" + +import socket +import ssl + +from tornado.escape import native_str +from tornado.http1connection import HTTP1ServerConnection, HTTP1ConnectionParameters +from tornado import httputil +from tornado import iostream +from tornado import netutil +from tornado.tcpserver import TCPServer +from tornado.util import Configurable + +import typing +from typing import Union, Any, Dict, Callable, List, Type, Tuple, Optional, Awaitable + +if typing.TYPE_CHECKING: + from typing import Set # noqa: F401 + + +class HTTPServer(TCPServer, Configurable, httputil.HTTPServerConnectionDelegate): + r"""A non-blocking, single-threaded HTTP server. + + A server is defined by a subclass of `.HTTPServerConnectionDelegate`, + or, for backwards compatibility, a callback that takes an + `.HTTPServerRequest` as an argument. The delegate is usually a + `tornado.web.Application`. + + `HTTPServer` supports keep-alive connections by default + (automatically for HTTP/1.1, or for HTTP/1.0 when the client + requests ``Connection: keep-alive``). + + If ``xheaders`` is ``True``, we support the + ``X-Real-Ip``/``X-Forwarded-For`` and + ``X-Scheme``/``X-Forwarded-Proto`` headers, which override the + remote IP and URI scheme/protocol for all requests. These headers + are useful when running Tornado behind a reverse proxy or load + balancer. The ``protocol`` argument can also be set to ``https`` + if Tornado is run behind an SSL-decoding proxy that does not set one of + the supported ``xheaders``. + + By default, when parsing the ``X-Forwarded-For`` header, Tornado will + select the last (i.e., the closest) address on the list of hosts as the + remote host IP address. To select the next server in the chain, a list of + trusted downstream hosts may be passed as the ``trusted_downstream`` + argument. These hosts will be skipped when parsing the ``X-Forwarded-For`` + header. + + To make this server serve SSL traffic, send the ``ssl_options`` keyword + argument with an `ssl.SSLContext` object. For compatibility with older + versions of Python ``ssl_options`` may also be a dictionary of keyword + arguments for the `ssl.wrap_socket` method.:: + + ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) + ssl_ctx.load_cert_chain(os.path.join(data_dir, "mydomain.crt"), + os.path.join(data_dir, "mydomain.key")) + HTTPServer(application, ssl_options=ssl_ctx) + + `HTTPServer` initialization follows one of three patterns (the + initialization methods are defined on `tornado.tcpserver.TCPServer`): + + 1. `~tornado.tcpserver.TCPServer.listen`: simple single-process:: + + server = HTTPServer(app) + server.listen(8888) + IOLoop.current().start() + + In many cases, `tornado.web.Application.listen` can be used to avoid + the need to explicitly create the `HTTPServer`. + + 2. `~tornado.tcpserver.TCPServer.bind`/`~tornado.tcpserver.TCPServer.start`: + simple multi-process:: + + server = HTTPServer(app) + server.bind(8888) + server.start(0) # Forks multiple sub-processes + IOLoop.current().start() + + When using this interface, an `.IOLoop` must *not* be passed + to the `HTTPServer` constructor. `~.TCPServer.start` will always start + the server on the default singleton `.IOLoop`. + + 3. `~tornado.tcpserver.TCPServer.add_sockets`: advanced multi-process:: + + sockets = tornado.netutil.bind_sockets(8888) + tornado.process.fork_processes(0) + server = HTTPServer(app) + server.add_sockets(sockets) + IOLoop.current().start() + + The `~.TCPServer.add_sockets` interface is more complicated, + but it can be used with `tornado.process.fork_processes` to + give you more flexibility in when the fork happens. + `~.TCPServer.add_sockets` can also be used in single-process + servers if you want to create your listening sockets in some + way other than `tornado.netutil.bind_sockets`. + + .. versionchanged:: 4.0 + Added ``decompress_request``, ``chunk_size``, ``max_header_size``, + ``idle_connection_timeout``, ``body_timeout``, ``max_body_size`` + arguments. Added support for `.HTTPServerConnectionDelegate` + instances as ``request_callback``. + + .. versionchanged:: 4.1 + `.HTTPServerConnectionDelegate.start_request` is now called with + two arguments ``(server_conn, request_conn)`` (in accordance with the + documentation) instead of one ``(request_conn)``. + + .. versionchanged:: 4.2 + `HTTPServer` is now a subclass of `tornado.util.Configurable`. + + .. versionchanged:: 4.5 + Added the ``trusted_downstream`` argument. + + .. versionchanged:: 5.0 + The ``io_loop`` argument has been removed. + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + # Ignore args to __init__; real initialization belongs in + # initialize since we're Configurable. (there's something + # weird in initialization order between this class, + # Configurable, and TCPServer so we can't leave __init__ out + # completely) + pass + + def initialize( + self, + request_callback: Union[ + httputil.HTTPServerConnectionDelegate, + Callable[[httputil.HTTPServerRequest], None], + ], + no_keep_alive: bool = False, + xheaders: bool = False, + ssl_options: Union[Dict[str, Any], ssl.SSLContext] = None, + protocol: str = None, + decompress_request: bool = False, + chunk_size: int = None, + max_header_size: int = None, + idle_connection_timeout: float = None, + body_timeout: float = None, + max_body_size: int = None, + max_buffer_size: int = None, + trusted_downstream: List[str] = None, + ) -> None: + # This method's signature is not extracted with autodoc + # because we want its arguments to appear on the class + # constructor. When changing this signature, also update the + # copy in httpserver.rst. + self.request_callback = request_callback + self.xheaders = xheaders + self.protocol = protocol + self.conn_params = HTTP1ConnectionParameters( + decompress=decompress_request, + chunk_size=chunk_size, + max_header_size=max_header_size, + header_timeout=idle_connection_timeout or 3600, + max_body_size=max_body_size, + body_timeout=body_timeout, + no_keep_alive=no_keep_alive, + ) + TCPServer.__init__( + self, + ssl_options=ssl_options, + max_buffer_size=max_buffer_size, + read_chunk_size=chunk_size, + ) + self._connections = set() # type: Set[HTTP1ServerConnection] + self.trusted_downstream = trusted_downstream + + @classmethod + def configurable_base(cls) -> Type[Configurable]: + return HTTPServer + + @classmethod + def configurable_default(cls) -> Type[Configurable]: + return HTTPServer + + async def close_all_connections(self) -> None: + """Close all open connections and asynchronously wait for them to finish. + + This method is used in combination with `~.TCPServer.stop` to + support clean shutdowns (especially for unittests). Typical + usage would call ``stop()`` first to stop accepting new + connections, then ``await close_all_connections()`` to wait for + existing connections to finish. + + This method does not currently close open websocket connections. + + Note that this method is a coroutine and must be caled with ``await``. + + """ + while self._connections: + # Peek at an arbitrary element of the set + conn = next(iter(self._connections)) + await conn.close() + + def handle_stream(self, stream: iostream.IOStream, address: Tuple) -> None: + context = _HTTPRequestContext( + stream, address, self.protocol, self.trusted_downstream + ) + conn = HTTP1ServerConnection(stream, self.conn_params, context) + self._connections.add(conn) + conn.start_serving(self) + + def start_request( + self, server_conn: object, request_conn: httputil.HTTPConnection + ) -> httputil.HTTPMessageDelegate: + if isinstance(self.request_callback, httputil.HTTPServerConnectionDelegate): + delegate = self.request_callback.start_request(server_conn, request_conn) + else: + delegate = _CallableAdapter(self.request_callback, request_conn) + + if self.xheaders: + delegate = _ProxyAdapter(delegate, request_conn) + + return delegate + + def on_close(self, server_conn: object) -> None: + self._connections.remove(typing.cast(HTTP1ServerConnection, server_conn)) + + +class _CallableAdapter(httputil.HTTPMessageDelegate): + def __init__( + self, + request_callback: Callable[[httputil.HTTPServerRequest], None], + request_conn: httputil.HTTPConnection, + ) -> None: + self.connection = request_conn + self.request_callback = request_callback + self.request = None # type: Optional[httputil.HTTPServerRequest] + self.delegate = None + self._chunks = [] # type: List[bytes] + + def headers_received( + self, + start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine], + headers: httputil.HTTPHeaders, + ) -> Optional[Awaitable[None]]: + self.request = httputil.HTTPServerRequest( + connection=self.connection, + start_line=typing.cast(httputil.RequestStartLine, start_line), + headers=headers, + ) + return None + + def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]: + self._chunks.append(chunk) + return None + + def finish(self) -> None: + assert self.request is not None + self.request.body = b"".join(self._chunks) + self.request._parse_body() + self.request_callback(self.request) + + def on_connection_close(self) -> None: + del self._chunks + + +class _HTTPRequestContext(object): + def __init__( + self, + stream: iostream.IOStream, + address: Tuple, + protocol: Optional[str], + trusted_downstream: List[str] = None, + ) -> None: + self.address = address + # Save the socket's address family now so we know how to + # interpret self.address even after the stream is closed + # and its socket attribute replaced with None. + if stream.socket is not None: + self.address_family = stream.socket.family + else: + self.address_family = None + # In HTTPServerRequest we want an IP, not a full socket address. + if ( + self.address_family in (socket.AF_INET, socket.AF_INET6) + and address is not None + ): + self.remote_ip = address[0] + else: + # Unix (or other) socket; fake the remote address. + self.remote_ip = "0.0.0.0" + if protocol: + self.protocol = protocol + elif isinstance(stream, iostream.SSLIOStream): + self.protocol = "https" + else: + self.protocol = "http" + self._orig_remote_ip = self.remote_ip + self._orig_protocol = self.protocol + self.trusted_downstream = set(trusted_downstream or []) + + def __str__(self) -> str: + if self.address_family in (socket.AF_INET, socket.AF_INET6): + return self.remote_ip + elif isinstance(self.address, bytes): + # Python 3 with the -bb option warns about str(bytes), + # so convert it explicitly. + # Unix socket addresses are str on mac but bytes on linux. + return native_str(self.address) + else: + return str(self.address) + + def _apply_xheaders(self, headers: httputil.HTTPHeaders) -> None: + """Rewrite the ``remote_ip`` and ``protocol`` fields.""" + # Squid uses X-Forwarded-For, others use X-Real-Ip + ip = headers.get("X-Forwarded-For", self.remote_ip) + # Skip trusted downstream hosts in X-Forwarded-For list + for ip in (cand.strip() for cand in reversed(ip.split(","))): + if ip not in self.trusted_downstream: + break + ip = headers.get("X-Real-Ip", ip) + if netutil.is_valid_ip(ip): + self.remote_ip = ip + # AWS uses X-Forwarded-Proto + proto_header = headers.get( + "X-Scheme", headers.get("X-Forwarded-Proto", self.protocol) + ) + if proto_header: + # use only the last proto entry if there is more than one + # TODO: support trusting mutiple layers of proxied protocol + proto_header = proto_header.split(",")[-1].strip() + if proto_header in ("http", "https"): + self.protocol = proto_header + + def _unapply_xheaders(self) -> None: + """Undo changes from `_apply_xheaders`. + + Xheaders are per-request so they should not leak to the next + request on the same connection. + """ + self.remote_ip = self._orig_remote_ip + self.protocol = self._orig_protocol + + +class _ProxyAdapter(httputil.HTTPMessageDelegate): + def __init__( + self, + delegate: httputil.HTTPMessageDelegate, + request_conn: httputil.HTTPConnection, + ) -> None: + self.connection = request_conn + self.delegate = delegate + + def headers_received( + self, + start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine], + headers: httputil.HTTPHeaders, + ) -> Optional[Awaitable[None]]: + # TODO: either make context an official part of the + # HTTPConnection interface or figure out some other way to do this. + self.connection.context._apply_xheaders(headers) # type: ignore + return self.delegate.headers_received(start_line, headers) + + def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]: + return self.delegate.data_received(chunk) + + def finish(self) -> None: + self.delegate.finish() + self._cleanup() + + def on_connection_close(self) -> None: + self.delegate.on_connection_close() + self._cleanup() + + def _cleanup(self) -> None: + self.connection.context._unapply_xheaders() # type: ignore + + +HTTPRequest = httputil.HTTPServerRequest diff --git a/server/www/packages/packages-linux/x64/tornado/httputil.py b/server/www/packages/packages-linux/x64/tornado/httputil.py index 3961446..2f7f0f3 100644 --- a/server/www/packages/packages-linux/x64/tornado/httputil.py +++ b/server/www/packages/packages-linux/x64/tornado/httputil.py @@ -1,1095 +1,1144 @@ -# -# Copyright 2009 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""HTTP utility code shared by clients and servers. - -This module also defines the `HTTPServerRequest` class which is exposed -via `tornado.web.RequestHandler.request`. -""" - -from __future__ import absolute_import, division, print_function - -import calendar -import collections -import copy -import datetime -import email.utils -import numbers -import re -import time -import unicodedata -import warnings - -from tornado.escape import native_str, parse_qs_bytes, utf8 -from tornado.log import gen_log -from tornado.util import ObjectDict, PY3, unicode_type - -if PY3: - import http.cookies as Cookie - from http.client import responses - from urllib.parse import urlencode, urlparse, urlunparse, parse_qsl -else: - import Cookie - from httplib import responses - from urllib import urlencode - from urlparse import urlparse, urlunparse, parse_qsl - - -# responses is unused in this file, but we re-export it to other files. -# Reference it so pyflakes doesn't complain. -responses - -try: - from ssl import SSLError -except ImportError: - # ssl is unavailable on app engine. - class _SSLError(Exception): - pass - # Hack around a mypy limitation. We can't simply put "type: ignore" - # on the class definition itself; must go through an assignment. - SSLError = _SSLError # type: ignore - -try: - import typing # noqa: F401 -except ImportError: - pass - - -# RFC 7230 section 3.5: a recipient MAY recognize a single LF as a line -# terminator and ignore any preceding CR. -_CRLF_RE = re.compile(r'\r?\n') - - -class _NormalizedHeaderCache(dict): - """Dynamic cached mapping of header names to Http-Header-Case. - - Implemented as a dict subclass so that cache hits are as fast as a - normal dict lookup, without the overhead of a python function - call. - - >>> normalized_headers = _NormalizedHeaderCache(10) - >>> normalized_headers["coNtent-TYPE"] - 'Content-Type' - """ - def __init__(self, size): - super(_NormalizedHeaderCache, self).__init__() - self.size = size - self.queue = collections.deque() - - def __missing__(self, key): - normalized = "-".join([w.capitalize() for w in key.split("-")]) - self[key] = normalized - self.queue.append(key) - if len(self.queue) > self.size: - # Limit the size of the cache. LRU would be better, but this - # simpler approach should be fine. In Python 2.7+ we could - # use OrderedDict (or in 3.2+, @functools.lru_cache). - old_key = self.queue.popleft() - del self[old_key] - return normalized - - -_normalized_headers = _NormalizedHeaderCache(1000) - - -class HTTPHeaders(collections.MutableMapping): - """A dictionary that maintains ``Http-Header-Case`` for all keys. - - Supports multiple values per key via a pair of new methods, - `add()` and `get_list()`. The regular dictionary interface - returns a single value per key, with multiple values joined by a - comma. - - >>> h = HTTPHeaders({"content-type": "text/html"}) - >>> list(h.keys()) - ['Content-Type'] - >>> h["Content-Type"] - 'text/html' - - >>> h.add("Set-Cookie", "A=B") - >>> h.add("Set-Cookie", "C=D") - >>> h["set-cookie"] - 'A=B,C=D' - >>> h.get_list("set-cookie") - ['A=B', 'C=D'] - - >>> for (k,v) in sorted(h.get_all()): - ... print('%s: %s' % (k,v)) - ... - Content-Type: text/html - Set-Cookie: A=B - Set-Cookie: C=D - """ - def __init__(self, *args, **kwargs): - self._dict = {} # type: typing.Dict[str, str] - self._as_list = {} # type: typing.Dict[str, typing.List[str]] - self._last_key = None - if (len(args) == 1 and len(kwargs) == 0 and - isinstance(args[0], HTTPHeaders)): - # Copy constructor - for k, v in args[0].get_all(): - self.add(k, v) - else: - # Dict-style initialization - self.update(*args, **kwargs) - - # new public methods - - def add(self, name, value): - # type: (str, str) -> None - """Adds a new value for the given key.""" - norm_name = _normalized_headers[name] - self._last_key = norm_name - if norm_name in self: - self._dict[norm_name] = (native_str(self[norm_name]) + ',' + - native_str(value)) - self._as_list[norm_name].append(value) - else: - self[norm_name] = value - - def get_list(self, name): - """Returns all values for the given header as a list.""" - norm_name = _normalized_headers[name] - return self._as_list.get(norm_name, []) - - def get_all(self): - # type: () -> typing.Iterable[typing.Tuple[str, str]] - """Returns an iterable of all (name, value) pairs. - - If a header has multiple values, multiple pairs will be - returned with the same name. - """ - for name, values in self._as_list.items(): - for value in values: - yield (name, value) - - def parse_line(self, line): - """Updates the dictionary with a single header line. - - >>> h = HTTPHeaders() - >>> h.parse_line("Content-Type: text/html") - >>> h.get('content-type') - 'text/html' - """ - if line[0].isspace(): - # continuation of a multi-line header - if self._last_key is None: - raise HTTPInputError("first header line cannot start with whitespace") - new_part = ' ' + line.lstrip() - self._as_list[self._last_key][-1] += new_part - self._dict[self._last_key] += new_part - else: - try: - name, value = line.split(":", 1) - except ValueError: - raise HTTPInputError("no colon in header line") - self.add(name, value.strip()) - - @classmethod - def parse(cls, headers): - """Returns a dictionary from HTTP header text. - - >>> h = HTTPHeaders.parse("Content-Type: text/html\\r\\nContent-Length: 42\\r\\n") - >>> sorted(h.items()) - [('Content-Length', '42'), ('Content-Type', 'text/html')] - - .. versionchanged:: 5.1 - - Raises `HTTPInputError` on malformed headers instead of a - mix of `KeyError`, and `ValueError`. - - """ - h = cls() - for line in _CRLF_RE.split(headers): - if line: - h.parse_line(line) - return h - - # MutableMapping abstract method implementations. - - def __setitem__(self, name, value): - norm_name = _normalized_headers[name] - self._dict[norm_name] = value - self._as_list[norm_name] = [value] - - def __getitem__(self, name): - # type: (str) -> str - return self._dict[_normalized_headers[name]] - - def __delitem__(self, name): - norm_name = _normalized_headers[name] - del self._dict[norm_name] - del self._as_list[norm_name] - - def __len__(self): - return len(self._dict) - - def __iter__(self): - return iter(self._dict) - - def copy(self): - # defined in dict but not in MutableMapping. - return HTTPHeaders(self) - - # Use our overridden copy method for the copy.copy module. - # This makes shallow copies one level deeper, but preserves - # the appearance that HTTPHeaders is a single container. - __copy__ = copy - - def __str__(self): - lines = [] - for name, value in self.get_all(): - lines.append("%s: %s\n" % (name, value)) - return "".join(lines) - - __unicode__ = __str__ - - -class HTTPServerRequest(object): - """A single HTTP request. - - All attributes are type `str` unless otherwise noted. - - .. attribute:: method - - HTTP request method, e.g. "GET" or "POST" - - .. attribute:: uri - - The requested uri. - - .. attribute:: path - - The path portion of `uri` - - .. attribute:: query - - The query portion of `uri` - - .. attribute:: version - - HTTP version specified in request, e.g. "HTTP/1.1" - - .. attribute:: headers - - `.HTTPHeaders` dictionary-like object for request headers. Acts like - a case-insensitive dictionary with additional methods for repeated - headers. - - .. attribute:: body - - Request body, if present, as a byte string. - - .. attribute:: remote_ip - - Client's IP address as a string. If ``HTTPServer.xheaders`` is set, - will pass along the real IP address provided by a load balancer - in the ``X-Real-Ip`` or ``X-Forwarded-For`` header. - - .. versionchanged:: 3.1 - The list format of ``X-Forwarded-For`` is now supported. - - .. attribute:: protocol - - The protocol used, either "http" or "https". If ``HTTPServer.xheaders`` - is set, will pass along the protocol used by a load balancer if - reported via an ``X-Scheme`` header. - - .. attribute:: host - - The requested hostname, usually taken from the ``Host`` header. - - .. attribute:: arguments - - GET/POST arguments are available in the arguments property, which - maps arguments names to lists of values (to support multiple values - for individual names). Names are of type `str`, while arguments - are byte strings. Note that this is different from - `.RequestHandler.get_argument`, which returns argument values as - unicode strings. - - .. attribute:: query_arguments - - Same format as ``arguments``, but contains only arguments extracted - from the query string. - - .. versionadded:: 3.2 - - .. attribute:: body_arguments - - Same format as ``arguments``, but contains only arguments extracted - from the request body. - - .. versionadded:: 3.2 - - .. attribute:: files - - File uploads are available in the files property, which maps file - names to lists of `.HTTPFile`. - - .. attribute:: connection - - An HTTP request is attached to a single HTTP connection, which can - be accessed through the "connection" attribute. Since connections - are typically kept open in HTTP/1.1, multiple requests can be handled - sequentially on a single connection. - - .. versionchanged:: 4.0 - Moved from ``tornado.httpserver.HTTPRequest``. - """ - def __init__(self, method=None, uri=None, version="HTTP/1.0", headers=None, - body=None, host=None, files=None, connection=None, - start_line=None, server_connection=None): - if start_line is not None: - method, uri, version = start_line - self.method = method - self.uri = uri - self.version = version - self.headers = headers or HTTPHeaders() - self.body = body or b"" - - # set remote IP and protocol - context = getattr(connection, 'context', None) - self.remote_ip = getattr(context, 'remote_ip', None) - self.protocol = getattr(context, 'protocol', "http") - - self.host = host or self.headers.get("Host") or "127.0.0.1" - self.host_name = split_host_and_port(self.host.lower())[0] - self.files = files or {} - self.connection = connection - self.server_connection = server_connection - self._start_time = time.time() - self._finish_time = None - - self.path, sep, self.query = uri.partition('?') - self.arguments = parse_qs_bytes(self.query, keep_blank_values=True) - self.query_arguments = copy.deepcopy(self.arguments) - self.body_arguments = {} - - def supports_http_1_1(self): - """Returns True if this request supports HTTP/1.1 semantics. - - .. deprecated:: 4.0 - - Applications are less likely to need this information with - the introduction of `.HTTPConnection`. If you still need - it, access the ``version`` attribute directly. This method - will be removed in Tornado 6.0. - - """ - warnings.warn("supports_http_1_1() is deprecated, use request.version instead", - DeprecationWarning) - return self.version == "HTTP/1.1" - - @property - def cookies(self): - """A dictionary of Cookie.Morsel objects.""" - if not hasattr(self, "_cookies"): - self._cookies = Cookie.SimpleCookie() - if "Cookie" in self.headers: - try: - parsed = parse_cookie(self.headers["Cookie"]) - except Exception: - pass - else: - for k, v in parsed.items(): - try: - self._cookies[k] = v - except Exception: - # SimpleCookie imposes some restrictions on keys; - # parse_cookie does not. Discard any cookies - # with disallowed keys. - pass - return self._cookies - - def write(self, chunk, callback=None): - """Writes the given chunk to the response stream. - - .. deprecated:: 4.0 - Use ``request.connection`` and the `.HTTPConnection` methods - to write the response. This method will be removed in Tornado 6.0. - """ - warnings.warn("req.write deprecated, use req.connection.write and write_headers instead", - DeprecationWarning) - assert isinstance(chunk, bytes) - assert self.version.startswith("HTTP/1."), \ - "deprecated interface only supported in HTTP/1.x" - self.connection.write(chunk, callback=callback) - - def finish(self): - """Finishes this HTTP request on the open connection. - - .. deprecated:: 4.0 - Use ``request.connection`` and the `.HTTPConnection` methods - to write the response. This method will be removed in Tornado 6.0. - """ - warnings.warn("req.finish deprecated, use req.connection.finish instead", - DeprecationWarning) - self.connection.finish() - self._finish_time = time.time() - - def full_url(self): - """Reconstructs the full URL for this request.""" - return self.protocol + "://" + self.host + self.uri - - def request_time(self): - """Returns the amount of time it took for this request to execute.""" - if self._finish_time is None: - return time.time() - self._start_time - else: - return self._finish_time - self._start_time - - def get_ssl_certificate(self, binary_form=False): - """Returns the client's SSL certificate, if any. - - To use client certificates, the HTTPServer's - `ssl.SSLContext.verify_mode` field must be set, e.g.:: - - ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) - ssl_ctx.load_cert_chain("foo.crt", "foo.key") - ssl_ctx.load_verify_locations("cacerts.pem") - ssl_ctx.verify_mode = ssl.CERT_REQUIRED - server = HTTPServer(app, ssl_options=ssl_ctx) - - By default, the return value is a dictionary (or None, if no - client certificate is present). If ``binary_form`` is true, a - DER-encoded form of the certificate is returned instead. See - SSLSocket.getpeercert() in the standard library for more - details. - http://docs.python.org/library/ssl.html#sslsocket-objects - """ - try: - return self.connection.stream.socket.getpeercert( - binary_form=binary_form) - except SSLError: - return None - - def _parse_body(self): - parse_body_arguments( - self.headers.get("Content-Type", ""), self.body, - self.body_arguments, self.files, - self.headers) - - for k, v in self.body_arguments.items(): - self.arguments.setdefault(k, []).extend(v) - - def __repr__(self): - attrs = ("protocol", "host", "method", "uri", "version", "remote_ip") - args = ", ".join(["%s=%r" % (n, getattr(self, n)) for n in attrs]) - return "%s(%s)" % (self.__class__.__name__, args) - - -class HTTPInputError(Exception): - """Exception class for malformed HTTP requests or responses - from remote sources. - - .. versionadded:: 4.0 - """ - pass - - -class HTTPOutputError(Exception): - """Exception class for errors in HTTP output. - - .. versionadded:: 4.0 - """ - pass - - -class HTTPServerConnectionDelegate(object): - """Implement this interface to handle requests from `.HTTPServer`. - - .. versionadded:: 4.0 - """ - def start_request(self, server_conn, request_conn): - """This method is called by the server when a new request has started. - - :arg server_conn: is an opaque object representing the long-lived - (e.g. tcp-level) connection. - :arg request_conn: is a `.HTTPConnection` object for a single - request/response exchange. - - This method should return a `.HTTPMessageDelegate`. - """ - raise NotImplementedError() - - def on_close(self, server_conn): - """This method is called when a connection has been closed. - - :arg server_conn: is a server connection that has previously been - passed to ``start_request``. - """ - pass - - -class HTTPMessageDelegate(object): - """Implement this interface to handle an HTTP request or response. - - .. versionadded:: 4.0 - """ - def headers_received(self, start_line, headers): - """Called when the HTTP headers have been received and parsed. - - :arg start_line: a `.RequestStartLine` or `.ResponseStartLine` - depending on whether this is a client or server message. - :arg headers: a `.HTTPHeaders` instance. - - Some `.HTTPConnection` methods can only be called during - ``headers_received``. - - May return a `.Future`; if it does the body will not be read - until it is done. - """ - pass - - def data_received(self, chunk): - """Called when a chunk of data has been received. - - May return a `.Future` for flow control. - """ - pass - - def finish(self): - """Called after the last chunk of data has been received.""" - pass - - def on_connection_close(self): - """Called if the connection is closed without finishing the request. - - If ``headers_received`` is called, either ``finish`` or - ``on_connection_close`` will be called, but not both. - """ - pass - - -class HTTPConnection(object): - """Applications use this interface to write their responses. - - .. versionadded:: 4.0 - """ - def write_headers(self, start_line, headers, chunk=None, callback=None): - """Write an HTTP header block. - - :arg start_line: a `.RequestStartLine` or `.ResponseStartLine`. - :arg headers: a `.HTTPHeaders` instance. - :arg chunk: the first (optional) chunk of data. This is an optimization - so that small responses can be written in the same call as their - headers. - :arg callback: a callback to be run when the write is complete. - - The ``version`` field of ``start_line`` is ignored. - - Returns a `.Future` if no callback is given. - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed - in Tornado 6.0. - """ - raise NotImplementedError() - - def write(self, chunk, callback=None): - """Writes a chunk of body data. - - The callback will be run when the write is complete. If no callback - is given, returns a Future. - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed - in Tornado 6.0. - """ - raise NotImplementedError() - - def finish(self): - """Indicates that the last body data has been written. - """ - raise NotImplementedError() - - -def url_concat(url, args): - """Concatenate url and arguments regardless of whether - url has existing query parameters. - - ``args`` may be either a dictionary or a list of key-value pairs - (the latter allows for multiple values with the same key. - - >>> url_concat("http://example.com/foo", dict(c="d")) - 'http://example.com/foo?c=d' - >>> url_concat("http://example.com/foo?a=b", dict(c="d")) - 'http://example.com/foo?a=b&c=d' - >>> url_concat("http://example.com/foo?a=b", [("c", "d"), ("c", "d2")]) - 'http://example.com/foo?a=b&c=d&c=d2' - """ - if args is None: - return url - parsed_url = urlparse(url) - if isinstance(args, dict): - parsed_query = parse_qsl(parsed_url.query, keep_blank_values=True) - parsed_query.extend(args.items()) - elif isinstance(args, list) or isinstance(args, tuple): - parsed_query = parse_qsl(parsed_url.query, keep_blank_values=True) - parsed_query.extend(args) - else: - err = "'args' parameter should be dict, list or tuple. Not {0}".format( - type(args)) - raise TypeError(err) - final_query = urlencode(parsed_query) - url = urlunparse(( - parsed_url[0], - parsed_url[1], - parsed_url[2], - parsed_url[3], - final_query, - parsed_url[5])) - return url - - -class HTTPFile(ObjectDict): - """Represents a file uploaded via a form. - - For backwards compatibility, its instance attributes are also - accessible as dictionary keys. - - * ``filename`` - * ``body`` - * ``content_type`` - """ - pass - - -def _parse_request_range(range_header): - """Parses a Range header. - - Returns either ``None`` or tuple ``(start, end)``. - Note that while the HTTP headers use inclusive byte positions, - this method returns indexes suitable for use in slices. - - >>> start, end = _parse_request_range("bytes=1-2") - >>> start, end - (1, 3) - >>> [0, 1, 2, 3, 4][start:end] - [1, 2] - >>> _parse_request_range("bytes=6-") - (6, None) - >>> _parse_request_range("bytes=-6") - (-6, None) - >>> _parse_request_range("bytes=-0") - (None, 0) - >>> _parse_request_range("bytes=") - (None, None) - >>> _parse_request_range("foo=42") - >>> _parse_request_range("bytes=1-2,6-10") - - Note: only supports one range (ex, ``bytes=1-2,6-10`` is not allowed). - - See [0] for the details of the range header. - - [0]: http://greenbytes.de/tech/webdav/draft-ietf-httpbis-p5-range-latest.html#byte.ranges - """ - unit, _, value = range_header.partition("=") - unit, value = unit.strip(), value.strip() - if unit != "bytes": - return None - start_b, _, end_b = value.partition("-") - try: - start = _int_or_none(start_b) - end = _int_or_none(end_b) - except ValueError: - return None - if end is not None: - if start is None: - if end != 0: - start = -end - end = None - else: - end += 1 - return (start, end) - - -def _get_content_range(start, end, total): - """Returns a suitable Content-Range header: - - >>> print(_get_content_range(None, 1, 4)) - bytes 0-0/4 - >>> print(_get_content_range(1, 3, 4)) - bytes 1-2/4 - >>> print(_get_content_range(None, None, 4)) - bytes 0-3/4 - """ - start = start or 0 - end = (end or total) - 1 - return "bytes %s-%s/%s" % (start, end, total) - - -def _int_or_none(val): - val = val.strip() - if val == "": - return None - return int(val) - - -def parse_body_arguments(content_type, body, arguments, files, headers=None): - """Parses a form request body. - - Supports ``application/x-www-form-urlencoded`` and - ``multipart/form-data``. The ``content_type`` parameter should be - a string and ``body`` should be a byte string. The ``arguments`` - and ``files`` parameters are dictionaries that will be updated - with the parsed contents. - """ - if headers and 'Content-Encoding' in headers: - gen_log.warning("Unsupported Content-Encoding: %s", - headers['Content-Encoding']) - return - if content_type.startswith("application/x-www-form-urlencoded"): - try: - uri_arguments = parse_qs_bytes(native_str(body), keep_blank_values=True) - except Exception as e: - gen_log.warning('Invalid x-www-form-urlencoded body: %s', e) - uri_arguments = {} - for name, values in uri_arguments.items(): - if values: - arguments.setdefault(name, []).extend(values) - elif content_type.startswith("multipart/form-data"): - try: - fields = content_type.split(";") - for field in fields: - k, sep, v = field.strip().partition("=") - if k == "boundary" and v: - parse_multipart_form_data(utf8(v), body, arguments, files) - break - else: - raise ValueError("multipart boundary not found") - except Exception as e: - gen_log.warning("Invalid multipart/form-data: %s", e) - - -def parse_multipart_form_data(boundary, data, arguments, files): - """Parses a ``multipart/form-data`` body. - - The ``boundary`` and ``data`` parameters are both byte strings. - The dictionaries given in the arguments and files parameters - will be updated with the contents of the body. - - .. versionchanged:: 5.1 - - Now recognizes non-ASCII filenames in RFC 2231/5987 - (``filename*=``) format. - """ - # The standard allows for the boundary to be quoted in the header, - # although it's rare (it happens at least for google app engine - # xmpp). I think we're also supposed to handle backslash-escapes - # here but I'll save that until we see a client that uses them - # in the wild. - if boundary.startswith(b'"') and boundary.endswith(b'"'): - boundary = boundary[1:-1] - final_boundary_index = data.rfind(b"--" + boundary + b"--") - if final_boundary_index == -1: - gen_log.warning("Invalid multipart/form-data: no final boundary") - return - parts = data[:final_boundary_index].split(b"--" + boundary + b"\r\n") - for part in parts: - if not part: - continue - eoh = part.find(b"\r\n\r\n") - if eoh == -1: - gen_log.warning("multipart/form-data missing headers") - continue - headers = HTTPHeaders.parse(part[:eoh].decode("utf-8")) - disp_header = headers.get("Content-Disposition", "") - disposition, disp_params = _parse_header(disp_header) - if disposition != "form-data" or not part.endswith(b"\r\n"): - gen_log.warning("Invalid multipart/form-data") - continue - value = part[eoh + 4:-2] - if not disp_params.get("name"): - gen_log.warning("multipart/form-data value missing name") - continue - name = disp_params["name"] - if disp_params.get("filename"): - ctype = headers.get("Content-Type", "application/unknown") - files.setdefault(name, []).append(HTTPFile( # type: ignore - filename=disp_params["filename"], body=value, - content_type=ctype)) - else: - arguments.setdefault(name, []).append(value) - - -def format_timestamp(ts): - """Formats a timestamp in the format used by HTTP. - - The argument may be a numeric timestamp as returned by `time.time`, - a time tuple as returned by `time.gmtime`, or a `datetime.datetime` - object. - - >>> format_timestamp(1359312200) - 'Sun, 27 Jan 2013 18:43:20 GMT' - """ - if isinstance(ts, numbers.Real): - pass - elif isinstance(ts, (tuple, time.struct_time)): - ts = calendar.timegm(ts) - elif isinstance(ts, datetime.datetime): - ts = calendar.timegm(ts.utctimetuple()) - else: - raise TypeError("unknown timestamp type: %r" % ts) - return email.utils.formatdate(ts, usegmt=True) - - -RequestStartLine = collections.namedtuple( - 'RequestStartLine', ['method', 'path', 'version']) - - -def parse_request_start_line(line): - """Returns a (method, path, version) tuple for an HTTP 1.x request line. - - The response is a `collections.namedtuple`. - - >>> parse_request_start_line("GET /foo HTTP/1.1") - RequestStartLine(method='GET', path='/foo', version='HTTP/1.1') - """ - try: - method, path, version = line.split(" ") - except ValueError: - # https://tools.ietf.org/html/rfc7230#section-3.1.1 - # invalid request-line SHOULD respond with a 400 (Bad Request) - raise HTTPInputError("Malformed HTTP request line") - if not re.match(r"^HTTP/1\.[0-9]$", version): - raise HTTPInputError( - "Malformed HTTP version in HTTP Request-Line: %r" % version) - return RequestStartLine(method, path, version) - - -ResponseStartLine = collections.namedtuple( - 'ResponseStartLine', ['version', 'code', 'reason']) - - -def parse_response_start_line(line): - """Returns a (version, code, reason) tuple for an HTTP 1.x response line. - - The response is a `collections.namedtuple`. - - >>> parse_response_start_line("HTTP/1.1 200 OK") - ResponseStartLine(version='HTTP/1.1', code=200, reason='OK') - """ - line = native_str(line) - match = re.match("(HTTP/1.[0-9]) ([0-9]+) ([^\r]*)", line) - if not match: - raise HTTPInputError("Error parsing response start line") - return ResponseStartLine(match.group(1), int(match.group(2)), - match.group(3)) - -# _parseparam and _parse_header are copied and modified from python2.7's cgi.py -# The original 2.7 version of this code did not correctly support some -# combinations of semicolons and double quotes. -# It has also been modified to support valueless parameters as seen in -# websocket extension negotiations, and to support non-ascii values in -# RFC 2231/5987 format. - - -def _parseparam(s): - while s[:1] == ';': - s = s[1:] - end = s.find(';') - while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2: - end = s.find(';', end + 1) - if end < 0: - end = len(s) - f = s[:end] - yield f.strip() - s = s[end:] - - -def _parse_header(line): - r"""Parse a Content-type like header. - - Return the main content-type and a dictionary of options. - - >>> d = "form-data; foo=\"b\\\\a\\\"r\"; file*=utf-8''T%C3%A4st" - >>> ct, d = _parse_header(d) - >>> ct - 'form-data' - >>> d['file'] == r'T\u00e4st'.encode('ascii').decode('unicode_escape') - True - >>> d['foo'] - 'b\\a"r' - """ - parts = _parseparam(';' + line) - key = next(parts) - # decode_params treats first argument special, but we already stripped key - params = [('Dummy', 'value')] - for p in parts: - i = p.find('=') - if i >= 0: - name = p[:i].strip().lower() - value = p[i + 1:].strip() - params.append((name, native_str(value))) - params = email.utils.decode_params(params) - params.pop(0) # get rid of the dummy again - pdict = {} - for name, value in params: - value = email.utils.collapse_rfc2231_value(value) - if len(value) >= 2 and value[0] == '"' and value[-1] == '"': - value = value[1:-1] - pdict[name] = value - return key, pdict - - -def _encode_header(key, pdict): - """Inverse of _parse_header. - - >>> _encode_header('permessage-deflate', - ... {'client_max_window_bits': 15, 'client_no_context_takeover': None}) - 'permessage-deflate; client_max_window_bits=15; client_no_context_takeover' - """ - if not pdict: - return key - out = [key] - # Sort the parameters just to make it easy to test. - for k, v in sorted(pdict.items()): - if v is None: - out.append(k) - else: - # TODO: quote if necessary. - out.append('%s=%s' % (k, v)) - return '; '.join(out) - - -def encode_username_password(username, password): - """Encodes a username/password pair in the format used by HTTP auth. - - The return value is a byte string in the form ``username:password``. - - .. versionadded:: 5.1 - """ - if isinstance(username, unicode_type): - username = unicodedata.normalize('NFC', username) - if isinstance(password, unicode_type): - password = unicodedata.normalize('NFC', password) - return utf8(username) + b":" + utf8(password) - - -def doctests(): - import doctest - return doctest.DocTestSuite() - - -def split_host_and_port(netloc): - """Returns ``(host, port)`` tuple from ``netloc``. - - Returned ``port`` will be ``None`` if not present. - - .. versionadded:: 4.1 - """ - match = re.match(r'^(.+):(\d+)$', netloc) - if match: - host = match.group(1) - port = int(match.group(2)) - else: - host = netloc - port = None - return (host, port) - - -def qs_to_qsl(qs): - """Generator converting a result of ``parse_qs`` back to name-value pairs. - - .. versionadded:: 5.0 - """ - for k, vs in qs.items(): - for v in vs: - yield (k, v) - - -_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]") -_QuotePatt = re.compile(r"[\\].") -_nulljoin = ''.join - - -def _unquote_cookie(str): - """Handle double quotes and escaping in cookie values. - - This method is copied verbatim from the Python 3.5 standard - library (http.cookies._unquote) so we don't have to depend on - non-public interfaces. - """ - # If there aren't any doublequotes, - # then there can't be any special characters. See RFC 2109. - if str is None or len(str) < 2: - return str - if str[0] != '"' or str[-1] != '"': - return str - - # We have to assume that we must decode this string. - # Down to work. - - # Remove the "s - str = str[1:-1] - - # Check for special sequences. Examples: - # \012 --> \n - # \" --> " - # - i = 0 - n = len(str) - res = [] - while 0 <= i < n: - o_match = _OctalPatt.search(str, i) - q_match = _QuotePatt.search(str, i) - if not o_match and not q_match: # Neither matched - res.append(str[i:]) - break - # else: - j = k = -1 - if o_match: - j = o_match.start(0) - if q_match: - k = q_match.start(0) - if q_match and (not o_match or k < j): # QuotePatt matched - res.append(str[i:k]) - res.append(str[k + 1]) - i = k + 2 - else: # OctalPatt matched - res.append(str[i:j]) - res.append(chr(int(str[j + 1:j + 4], 8))) - i = j + 4 - return _nulljoin(res) - - -def parse_cookie(cookie): - """Parse a ``Cookie`` HTTP header into a dict of name/value pairs. - - This function attempts to mimic browser cookie parsing behavior; - it specifically does not follow any of the cookie-related RFCs - (because browsers don't either). - - The algorithm used is identical to that used by Django version 1.9.10. - - .. versionadded:: 4.4.2 - """ - cookiedict = {} - for chunk in cookie.split(str(';')): - if str('=') in chunk: - key, val = chunk.split(str('='), 1) - else: - # Assume an empty name per - # https://bugzilla.mozilla.org/show_bug.cgi?id=169091 - key, val = str(''), chunk - key, val = key.strip(), val.strip() - if key or val: - # unquote using Python's algorithm. - cookiedict[key] = _unquote_cookie(val) - return cookiedict +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""HTTP utility code shared by clients and servers. + +This module also defines the `HTTPServerRequest` class which is exposed +via `tornado.web.RequestHandler.request`. +""" + +import calendar +import collections +import copy +import datetime +import email.utils +from http.client import responses +import http.cookies +import re +from ssl import SSLError +import time +import unicodedata +from urllib.parse import urlencode, urlparse, urlunparse, parse_qsl + +from tornado.escape import native_str, parse_qs_bytes, utf8 +from tornado.log import gen_log +from tornado.util import ObjectDict, unicode_type + + +# responses is unused in this file, but we re-export it to other files. +# Reference it so pyflakes doesn't complain. +responses + +import typing +from typing import ( + Tuple, + Iterable, + List, + Mapping, + Iterator, + Dict, + Union, + Optional, + Awaitable, + Generator, + AnyStr, +) + +if typing.TYPE_CHECKING: + from typing import Deque # noqa: F401 + from asyncio import Future # noqa: F401 + import unittest # noqa: F401 + + +# RFC 7230 section 3.5: a recipient MAY recognize a single LF as a line +# terminator and ignore any preceding CR. +_CRLF_RE = re.compile(r"\r?\n") + + +class _NormalizedHeaderCache(dict): + """Dynamic cached mapping of header names to Http-Header-Case. + + Implemented as a dict subclass so that cache hits are as fast as a + normal dict lookup, without the overhead of a python function + call. + + >>> normalized_headers = _NormalizedHeaderCache(10) + >>> normalized_headers["coNtent-TYPE"] + 'Content-Type' + """ + + def __init__(self, size: int) -> None: + super(_NormalizedHeaderCache, self).__init__() + self.size = size + self.queue = collections.deque() # type: Deque[str] + + def __missing__(self, key: str) -> str: + normalized = "-".join([w.capitalize() for w in key.split("-")]) + self[key] = normalized + self.queue.append(key) + if len(self.queue) > self.size: + # Limit the size of the cache. LRU would be better, but this + # simpler approach should be fine. In Python 2.7+ we could + # use OrderedDict (or in 3.2+, @functools.lru_cache). + old_key = self.queue.popleft() + del self[old_key] + return normalized + + +_normalized_headers = _NormalizedHeaderCache(1000) + + +class HTTPHeaders(collections.abc.MutableMapping): + """A dictionary that maintains ``Http-Header-Case`` for all keys. + + Supports multiple values per key via a pair of new methods, + `add()` and `get_list()`. The regular dictionary interface + returns a single value per key, with multiple values joined by a + comma. + + >>> h = HTTPHeaders({"content-type": "text/html"}) + >>> list(h.keys()) + ['Content-Type'] + >>> h["Content-Type"] + 'text/html' + + >>> h.add("Set-Cookie", "A=B") + >>> h.add("Set-Cookie", "C=D") + >>> h["set-cookie"] + 'A=B,C=D' + >>> h.get_list("set-cookie") + ['A=B', 'C=D'] + + >>> for (k,v) in sorted(h.get_all()): + ... print('%s: %s' % (k,v)) + ... + Content-Type: text/html + Set-Cookie: A=B + Set-Cookie: C=D + """ + + @typing.overload + def __init__(self, __arg: Mapping[str, List[str]]) -> None: + pass + + @typing.overload # noqa: F811 + def __init__(self, __arg: Mapping[str, str]) -> None: + pass + + @typing.overload # noqa: F811 + def __init__(self, *args: Tuple[str, str]) -> None: + pass + + @typing.overload # noqa: F811 + def __init__(self, **kwargs: str) -> None: + pass + + def __init__(self, *args: typing.Any, **kwargs: str) -> None: # noqa: F811 + self._dict = {} # type: typing.Dict[str, str] + self._as_list = {} # type: typing.Dict[str, typing.List[str]] + self._last_key = None + if len(args) == 1 and len(kwargs) == 0 and isinstance(args[0], HTTPHeaders): + # Copy constructor + for k, v in args[0].get_all(): + self.add(k, v) + else: + # Dict-style initialization + self.update(*args, **kwargs) + + # new public methods + + def add(self, name: str, value: str) -> None: + """Adds a new value for the given key.""" + norm_name = _normalized_headers[name] + self._last_key = norm_name + if norm_name in self: + self._dict[norm_name] = ( + native_str(self[norm_name]) + "," + native_str(value) + ) + self._as_list[norm_name].append(value) + else: + self[norm_name] = value + + def get_list(self, name: str) -> List[str]: + """Returns all values for the given header as a list.""" + norm_name = _normalized_headers[name] + return self._as_list.get(norm_name, []) + + def get_all(self) -> Iterable[Tuple[str, str]]: + """Returns an iterable of all (name, value) pairs. + + If a header has multiple values, multiple pairs will be + returned with the same name. + """ + for name, values in self._as_list.items(): + for value in values: + yield (name, value) + + def parse_line(self, line: str) -> None: + """Updates the dictionary with a single header line. + + >>> h = HTTPHeaders() + >>> h.parse_line("Content-Type: text/html") + >>> h.get('content-type') + 'text/html' + """ + if line[0].isspace(): + # continuation of a multi-line header + if self._last_key is None: + raise HTTPInputError("first header line cannot start with whitespace") + new_part = " " + line.lstrip() + self._as_list[self._last_key][-1] += new_part + self._dict[self._last_key] += new_part + else: + try: + name, value = line.split(":", 1) + except ValueError: + raise HTTPInputError("no colon in header line") + self.add(name, value.strip()) + + @classmethod + def parse(cls, headers: str) -> "HTTPHeaders": + """Returns a dictionary from HTTP header text. + + >>> h = HTTPHeaders.parse("Content-Type: text/html\\r\\nContent-Length: 42\\r\\n") + >>> sorted(h.items()) + [('Content-Length', '42'), ('Content-Type', 'text/html')] + + .. versionchanged:: 5.1 + + Raises `HTTPInputError` on malformed headers instead of a + mix of `KeyError`, and `ValueError`. + + """ + h = cls() + for line in _CRLF_RE.split(headers): + if line: + h.parse_line(line) + return h + + # MutableMapping abstract method implementations. + + def __setitem__(self, name: str, value: str) -> None: + norm_name = _normalized_headers[name] + self._dict[norm_name] = value + self._as_list[norm_name] = [value] + + def __getitem__(self, name: str) -> str: + return self._dict[_normalized_headers[name]] + + def __delitem__(self, name: str) -> None: + norm_name = _normalized_headers[name] + del self._dict[norm_name] + del self._as_list[norm_name] + + def __len__(self) -> int: + return len(self._dict) + + def __iter__(self) -> Iterator[typing.Any]: + return iter(self._dict) + + def copy(self) -> "HTTPHeaders": + # defined in dict but not in MutableMapping. + return HTTPHeaders(self) + + # Use our overridden copy method for the copy.copy module. + # This makes shallow copies one level deeper, but preserves + # the appearance that HTTPHeaders is a single container. + __copy__ = copy + + def __str__(self) -> str: + lines = [] + for name, value in self.get_all(): + lines.append("%s: %s\n" % (name, value)) + return "".join(lines) + + __unicode__ = __str__ + + +class HTTPServerRequest(object): + """A single HTTP request. + + All attributes are type `str` unless otherwise noted. + + .. attribute:: method + + HTTP request method, e.g. "GET" or "POST" + + .. attribute:: uri + + The requested uri. + + .. attribute:: path + + The path portion of `uri` + + .. attribute:: query + + The query portion of `uri` + + .. attribute:: version + + HTTP version specified in request, e.g. "HTTP/1.1" + + .. attribute:: headers + + `.HTTPHeaders` dictionary-like object for request headers. Acts like + a case-insensitive dictionary with additional methods for repeated + headers. + + .. attribute:: body + + Request body, if present, as a byte string. + + .. attribute:: remote_ip + + Client's IP address as a string. If ``HTTPServer.xheaders`` is set, + will pass along the real IP address provided by a load balancer + in the ``X-Real-Ip`` or ``X-Forwarded-For`` header. + + .. versionchanged:: 3.1 + The list format of ``X-Forwarded-For`` is now supported. + + .. attribute:: protocol + + The protocol used, either "http" or "https". If ``HTTPServer.xheaders`` + is set, will pass along the protocol used by a load balancer if + reported via an ``X-Scheme`` header. + + .. attribute:: host + + The requested hostname, usually taken from the ``Host`` header. + + .. attribute:: arguments + + GET/POST arguments are available in the arguments property, which + maps arguments names to lists of values (to support multiple values + for individual names). Names are of type `str`, while arguments + are byte strings. Note that this is different from + `.RequestHandler.get_argument`, which returns argument values as + unicode strings. + + .. attribute:: query_arguments + + Same format as ``arguments``, but contains only arguments extracted + from the query string. + + .. versionadded:: 3.2 + + .. attribute:: body_arguments + + Same format as ``arguments``, but contains only arguments extracted + from the request body. + + .. versionadded:: 3.2 + + .. attribute:: files + + File uploads are available in the files property, which maps file + names to lists of `.HTTPFile`. + + .. attribute:: connection + + An HTTP request is attached to a single HTTP connection, which can + be accessed through the "connection" attribute. Since connections + are typically kept open in HTTP/1.1, multiple requests can be handled + sequentially on a single connection. + + .. versionchanged:: 4.0 + Moved from ``tornado.httpserver.HTTPRequest``. + """ + + path = None # type: str + query = None # type: str + + # HACK: Used for stream_request_body + _body_future = None # type: Future[None] + + def __init__( + self, + method: str = None, + uri: str = None, + version: str = "HTTP/1.0", + headers: HTTPHeaders = None, + body: bytes = None, + host: str = None, + files: Dict[str, List["HTTPFile"]] = None, + connection: "HTTPConnection" = None, + start_line: "RequestStartLine" = None, + server_connection: object = None, + ) -> None: + if start_line is not None: + method, uri, version = start_line + self.method = method + self.uri = uri + self.version = version + self.headers = headers or HTTPHeaders() + self.body = body or b"" + + # set remote IP and protocol + context = getattr(connection, "context", None) + self.remote_ip = getattr(context, "remote_ip", None) + self.protocol = getattr(context, "protocol", "http") + + self.host = host or self.headers.get("Host") or "127.0.0.1" + self.host_name = split_host_and_port(self.host.lower())[0] + self.files = files or {} + self.connection = connection + self.server_connection = server_connection + self._start_time = time.time() + self._finish_time = None + + if uri is not None: + self.path, sep, self.query = uri.partition("?") + self.arguments = parse_qs_bytes(self.query, keep_blank_values=True) + self.query_arguments = copy.deepcopy(self.arguments) + self.body_arguments = {} # type: Dict[str, List[bytes]] + + @property + def cookies(self) -> Dict[str, http.cookies.Morsel]: + """A dictionary of ``http.cookies.Morsel`` objects.""" + if not hasattr(self, "_cookies"): + self._cookies = http.cookies.SimpleCookie() + if "Cookie" in self.headers: + try: + parsed = parse_cookie(self.headers["Cookie"]) + except Exception: + pass + else: + for k, v in parsed.items(): + try: + self._cookies[k] = v + except Exception: + # SimpleCookie imposes some restrictions on keys; + # parse_cookie does not. Discard any cookies + # with disallowed keys. + pass + return self._cookies + + def full_url(self) -> str: + """Reconstructs the full URL for this request.""" + return self.protocol + "://" + self.host + self.uri + + def request_time(self) -> float: + """Returns the amount of time it took for this request to execute.""" + if self._finish_time is None: + return time.time() - self._start_time + else: + return self._finish_time - self._start_time + + def get_ssl_certificate( + self, binary_form: bool = False + ) -> Union[None, Dict, bytes]: + """Returns the client's SSL certificate, if any. + + To use client certificates, the HTTPServer's + `ssl.SSLContext.verify_mode` field must be set, e.g.:: + + ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) + ssl_ctx.load_cert_chain("foo.crt", "foo.key") + ssl_ctx.load_verify_locations("cacerts.pem") + ssl_ctx.verify_mode = ssl.CERT_REQUIRED + server = HTTPServer(app, ssl_options=ssl_ctx) + + By default, the return value is a dictionary (or None, if no + client certificate is present). If ``binary_form`` is true, a + DER-encoded form of the certificate is returned instead. See + SSLSocket.getpeercert() in the standard library for more + details. + http://docs.python.org/library/ssl.html#sslsocket-objects + """ + try: + if self.connection is None: + return None + # TODO: add a method to HTTPConnection for this so it can work with HTTP/2 + return self.connection.stream.socket.getpeercert( # type: ignore + binary_form=binary_form + ) + except SSLError: + return None + + def _parse_body(self) -> None: + parse_body_arguments( + self.headers.get("Content-Type", ""), + self.body, + self.body_arguments, + self.files, + self.headers, + ) + + for k, v in self.body_arguments.items(): + self.arguments.setdefault(k, []).extend(v) + + def __repr__(self) -> str: + attrs = ("protocol", "host", "method", "uri", "version", "remote_ip") + args = ", ".join(["%s=%r" % (n, getattr(self, n)) for n in attrs]) + return "%s(%s)" % (self.__class__.__name__, args) + + +class HTTPInputError(Exception): + """Exception class for malformed HTTP requests or responses + from remote sources. + + .. versionadded:: 4.0 + """ + + pass + + +class HTTPOutputError(Exception): + """Exception class for errors in HTTP output. + + .. versionadded:: 4.0 + """ + + pass + + +class HTTPServerConnectionDelegate(object): + """Implement this interface to handle requests from `.HTTPServer`. + + .. versionadded:: 4.0 + """ + + def start_request( + self, server_conn: object, request_conn: "HTTPConnection" + ) -> "HTTPMessageDelegate": + """This method is called by the server when a new request has started. + + :arg server_conn: is an opaque object representing the long-lived + (e.g. tcp-level) connection. + :arg request_conn: is a `.HTTPConnection` object for a single + request/response exchange. + + This method should return a `.HTTPMessageDelegate`. + """ + raise NotImplementedError() + + def on_close(self, server_conn: object) -> None: + """This method is called when a connection has been closed. + + :arg server_conn: is a server connection that has previously been + passed to ``start_request``. + """ + pass + + +class HTTPMessageDelegate(object): + """Implement this interface to handle an HTTP request or response. + + .. versionadded:: 4.0 + """ + + # TODO: genericize this class to avoid exposing the Union. + def headers_received( + self, + start_line: Union["RequestStartLine", "ResponseStartLine"], + headers: HTTPHeaders, + ) -> Optional[Awaitable[None]]: + """Called when the HTTP headers have been received and parsed. + + :arg start_line: a `.RequestStartLine` or `.ResponseStartLine` + depending on whether this is a client or server message. + :arg headers: a `.HTTPHeaders` instance. + + Some `.HTTPConnection` methods can only be called during + ``headers_received``. + + May return a `.Future`; if it does the body will not be read + until it is done. + """ + pass + + def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]: + """Called when a chunk of data has been received. + + May return a `.Future` for flow control. + """ + pass + + def finish(self) -> None: + """Called after the last chunk of data has been received.""" + pass + + def on_connection_close(self) -> None: + """Called if the connection is closed without finishing the request. + + If ``headers_received`` is called, either ``finish`` or + ``on_connection_close`` will be called, but not both. + """ + pass + + +class HTTPConnection(object): + """Applications use this interface to write their responses. + + .. versionadded:: 4.0 + """ + + def write_headers( + self, + start_line: Union["RequestStartLine", "ResponseStartLine"], + headers: HTTPHeaders, + chunk: bytes = None, + ) -> "Future[None]": + """Write an HTTP header block. + + :arg start_line: a `.RequestStartLine` or `.ResponseStartLine`. + :arg headers: a `.HTTPHeaders` instance. + :arg chunk: the first (optional) chunk of data. This is an optimization + so that small responses can be written in the same call as their + headers. + + The ``version`` field of ``start_line`` is ignored. + + Returns a future for flow control. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. + """ + raise NotImplementedError() + + def write(self, chunk: bytes) -> "Future[None]": + """Writes a chunk of body data. + + Returns a future for flow control. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. + """ + raise NotImplementedError() + + def finish(self) -> None: + """Indicates that the last body data has been written. + """ + raise NotImplementedError() + + +def url_concat( + url: str, + args: Union[ + None, Dict[str, str], List[Tuple[str, str]], Tuple[Tuple[str, str], ...] + ], +) -> str: + """Concatenate url and arguments regardless of whether + url has existing query parameters. + + ``args`` may be either a dictionary or a list of key-value pairs + (the latter allows for multiple values with the same key. + + >>> url_concat("http://example.com/foo", dict(c="d")) + 'http://example.com/foo?c=d' + >>> url_concat("http://example.com/foo?a=b", dict(c="d")) + 'http://example.com/foo?a=b&c=d' + >>> url_concat("http://example.com/foo?a=b", [("c", "d"), ("c", "d2")]) + 'http://example.com/foo?a=b&c=d&c=d2' + """ + if args is None: + return url + parsed_url = urlparse(url) + if isinstance(args, dict): + parsed_query = parse_qsl(parsed_url.query, keep_blank_values=True) + parsed_query.extend(args.items()) + elif isinstance(args, list) or isinstance(args, tuple): + parsed_query = parse_qsl(parsed_url.query, keep_blank_values=True) + parsed_query.extend(args) + else: + err = "'args' parameter should be dict, list or tuple. Not {0}".format( + type(args) + ) + raise TypeError(err) + final_query = urlencode(parsed_query) + url = urlunparse( + ( + parsed_url[0], + parsed_url[1], + parsed_url[2], + parsed_url[3], + final_query, + parsed_url[5], + ) + ) + return url + + +class HTTPFile(ObjectDict): + """Represents a file uploaded via a form. + + For backwards compatibility, its instance attributes are also + accessible as dictionary keys. + + * ``filename`` + * ``body`` + * ``content_type`` + """ + + pass + + +def _parse_request_range( + range_header: str, +) -> Optional[Tuple[Optional[int], Optional[int]]]: + """Parses a Range header. + + Returns either ``None`` or tuple ``(start, end)``. + Note that while the HTTP headers use inclusive byte positions, + this method returns indexes suitable for use in slices. + + >>> start, end = _parse_request_range("bytes=1-2") + >>> start, end + (1, 3) + >>> [0, 1, 2, 3, 4][start:end] + [1, 2] + >>> _parse_request_range("bytes=6-") + (6, None) + >>> _parse_request_range("bytes=-6") + (-6, None) + >>> _parse_request_range("bytes=-0") + (None, 0) + >>> _parse_request_range("bytes=") + (None, None) + >>> _parse_request_range("foo=42") + >>> _parse_request_range("bytes=1-2,6-10") + + Note: only supports one range (ex, ``bytes=1-2,6-10`` is not allowed). + + See [0] for the details of the range header. + + [0]: http://greenbytes.de/tech/webdav/draft-ietf-httpbis-p5-range-latest.html#byte.ranges + """ + unit, _, value = range_header.partition("=") + unit, value = unit.strip(), value.strip() + if unit != "bytes": + return None + start_b, _, end_b = value.partition("-") + try: + start = _int_or_none(start_b) + end = _int_or_none(end_b) + except ValueError: + return None + if end is not None: + if start is None: + if end != 0: + start = -end + end = None + else: + end += 1 + return (start, end) + + +def _get_content_range(start: Optional[int], end: Optional[int], total: int) -> str: + """Returns a suitable Content-Range header: + + >>> print(_get_content_range(None, 1, 4)) + bytes 0-0/4 + >>> print(_get_content_range(1, 3, 4)) + bytes 1-2/4 + >>> print(_get_content_range(None, None, 4)) + bytes 0-3/4 + """ + start = start or 0 + end = (end or total) - 1 + return "bytes %s-%s/%s" % (start, end, total) + + +def _int_or_none(val: str) -> Optional[int]: + val = val.strip() + if val == "": + return None + return int(val) + + +def parse_body_arguments( + content_type: str, + body: bytes, + arguments: Dict[str, List[bytes]], + files: Dict[str, List[HTTPFile]], + headers: HTTPHeaders = None, +) -> None: + """Parses a form request body. + + Supports ``application/x-www-form-urlencoded`` and + ``multipart/form-data``. The ``content_type`` parameter should be + a string and ``body`` should be a byte string. The ``arguments`` + and ``files`` parameters are dictionaries that will be updated + with the parsed contents. + """ + if content_type.startswith("application/x-www-form-urlencoded"): + if headers and "Content-Encoding" in headers: + gen_log.warning( + "Unsupported Content-Encoding: %s", headers["Content-Encoding"] + ) + return + try: + uri_arguments = parse_qs_bytes(native_str(body), keep_blank_values=True) + except Exception as e: + gen_log.warning("Invalid x-www-form-urlencoded body: %s", e) + uri_arguments = {} + for name, values in uri_arguments.items(): + if values: + arguments.setdefault(name, []).extend(values) + elif content_type.startswith("multipart/form-data"): + if headers and "Content-Encoding" in headers: + gen_log.warning( + "Unsupported Content-Encoding: %s", headers["Content-Encoding"] + ) + return + try: + fields = content_type.split(";") + for field in fields: + k, sep, v = field.strip().partition("=") + if k == "boundary" and v: + parse_multipart_form_data(utf8(v), body, arguments, files) + break + else: + raise ValueError("multipart boundary not found") + except Exception as e: + gen_log.warning("Invalid multipart/form-data: %s", e) + + +def parse_multipart_form_data( + boundary: bytes, + data: bytes, + arguments: Dict[str, List[bytes]], + files: Dict[str, List[HTTPFile]], +) -> None: + """Parses a ``multipart/form-data`` body. + + The ``boundary`` and ``data`` parameters are both byte strings. + The dictionaries given in the arguments and files parameters + will be updated with the contents of the body. + + .. versionchanged:: 5.1 + + Now recognizes non-ASCII filenames in RFC 2231/5987 + (``filename*=``) format. + """ + # The standard allows for the boundary to be quoted in the header, + # although it's rare (it happens at least for google app engine + # xmpp). I think we're also supposed to handle backslash-escapes + # here but I'll save that until we see a client that uses them + # in the wild. + if boundary.startswith(b'"') and boundary.endswith(b'"'): + boundary = boundary[1:-1] + final_boundary_index = data.rfind(b"--" + boundary + b"--") + if final_boundary_index == -1: + gen_log.warning("Invalid multipart/form-data: no final boundary") + return + parts = data[:final_boundary_index].split(b"--" + boundary + b"\r\n") + for part in parts: + if not part: + continue + eoh = part.find(b"\r\n\r\n") + if eoh == -1: + gen_log.warning("multipart/form-data missing headers") + continue + headers = HTTPHeaders.parse(part[:eoh].decode("utf-8")) + disp_header = headers.get("Content-Disposition", "") + disposition, disp_params = _parse_header(disp_header) + if disposition != "form-data" or not part.endswith(b"\r\n"): + gen_log.warning("Invalid multipart/form-data") + continue + value = part[eoh + 4 : -2] + if not disp_params.get("name"): + gen_log.warning("multipart/form-data value missing name") + continue + name = disp_params["name"] + if disp_params.get("filename"): + ctype = headers.get("Content-Type", "application/unknown") + files.setdefault(name, []).append( + HTTPFile( + filename=disp_params["filename"], body=value, content_type=ctype + ) + ) + else: + arguments.setdefault(name, []).append(value) + + +def format_timestamp( + ts: Union[int, float, tuple, time.struct_time, datetime.datetime] +) -> str: + """Formats a timestamp in the format used by HTTP. + + The argument may be a numeric timestamp as returned by `time.time`, + a time tuple as returned by `time.gmtime`, or a `datetime.datetime` + object. + + >>> format_timestamp(1359312200) + 'Sun, 27 Jan 2013 18:43:20 GMT' + """ + if isinstance(ts, (int, float)): + time_num = ts + elif isinstance(ts, (tuple, time.struct_time)): + time_num = calendar.timegm(ts) + elif isinstance(ts, datetime.datetime): + time_num = calendar.timegm(ts.utctimetuple()) + else: + raise TypeError("unknown timestamp type: %r" % ts) + return email.utils.formatdate(time_num, usegmt=True) + + +RequestStartLine = collections.namedtuple( + "RequestStartLine", ["method", "path", "version"] +) + + +def parse_request_start_line(line: str) -> RequestStartLine: + """Returns a (method, path, version) tuple for an HTTP 1.x request line. + + The response is a `collections.namedtuple`. + + >>> parse_request_start_line("GET /foo HTTP/1.1") + RequestStartLine(method='GET', path='/foo', version='HTTP/1.1') + """ + try: + method, path, version = line.split(" ") + except ValueError: + # https://tools.ietf.org/html/rfc7230#section-3.1.1 + # invalid request-line SHOULD respond with a 400 (Bad Request) + raise HTTPInputError("Malformed HTTP request line") + if not re.match(r"^HTTP/1\.[0-9]$", version): + raise HTTPInputError( + "Malformed HTTP version in HTTP Request-Line: %r" % version + ) + return RequestStartLine(method, path, version) + + +ResponseStartLine = collections.namedtuple( + "ResponseStartLine", ["version", "code", "reason"] +) + + +def parse_response_start_line(line: str) -> ResponseStartLine: + """Returns a (version, code, reason) tuple for an HTTP 1.x response line. + + The response is a `collections.namedtuple`. + + >>> parse_response_start_line("HTTP/1.1 200 OK") + ResponseStartLine(version='HTTP/1.1', code=200, reason='OK') + """ + line = native_str(line) + match = re.match("(HTTP/1.[0-9]) ([0-9]+) ([^\r]*)", line) + if not match: + raise HTTPInputError("Error parsing response start line") + return ResponseStartLine(match.group(1), int(match.group(2)), match.group(3)) + + +# _parseparam and _parse_header are copied and modified from python2.7's cgi.py +# The original 2.7 version of this code did not correctly support some +# combinations of semicolons and double quotes. +# It has also been modified to support valueless parameters as seen in +# websocket extension negotiations, and to support non-ascii values in +# RFC 2231/5987 format. + + +def _parseparam(s: str) -> Generator[str, None, None]: + while s[:1] == ";": + s = s[1:] + end = s.find(";") + while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2: + end = s.find(";", end + 1) + if end < 0: + end = len(s) + f = s[:end] + yield f.strip() + s = s[end:] + + +def _parse_header(line: str) -> Tuple[str, Dict[str, str]]: + r"""Parse a Content-type like header. + + Return the main content-type and a dictionary of options. + + >>> d = "form-data; foo=\"b\\\\a\\\"r\"; file*=utf-8''T%C3%A4st" + >>> ct, d = _parse_header(d) + >>> ct + 'form-data' + >>> d['file'] == r'T\u00e4st'.encode('ascii').decode('unicode_escape') + True + >>> d['foo'] + 'b\\a"r' + """ + parts = _parseparam(";" + line) + key = next(parts) + # decode_params treats first argument special, but we already stripped key + params = [("Dummy", "value")] + for p in parts: + i = p.find("=") + if i >= 0: + name = p[:i].strip().lower() + value = p[i + 1 :].strip() + params.append((name, native_str(value))) + decoded_params = email.utils.decode_params(params) + decoded_params.pop(0) # get rid of the dummy again + pdict = {} + for name, decoded_value in decoded_params: + value = email.utils.collapse_rfc2231_value(decoded_value) + if len(value) >= 2 and value[0] == '"' and value[-1] == '"': + value = value[1:-1] + pdict[name] = value + return key, pdict + + +def _encode_header(key: str, pdict: Dict[str, str]) -> str: + """Inverse of _parse_header. + + >>> _encode_header('permessage-deflate', + ... {'client_max_window_bits': 15, 'client_no_context_takeover': None}) + 'permessage-deflate; client_max_window_bits=15; client_no_context_takeover' + """ + if not pdict: + return key + out = [key] + # Sort the parameters just to make it easy to test. + for k, v in sorted(pdict.items()): + if v is None: + out.append(k) + else: + # TODO: quote if necessary. + out.append("%s=%s" % (k, v)) + return "; ".join(out) + + +def encode_username_password( + username: Union[str, bytes], password: Union[str, bytes] +) -> bytes: + """Encodes a username/password pair in the format used by HTTP auth. + + The return value is a byte string in the form ``username:password``. + + .. versionadded:: 5.1 + """ + if isinstance(username, unicode_type): + username = unicodedata.normalize("NFC", username) + if isinstance(password, unicode_type): + password = unicodedata.normalize("NFC", password) + return utf8(username) + b":" + utf8(password) + + +def doctests(): + # type: () -> unittest.TestSuite + import doctest + + return doctest.DocTestSuite() + + +def split_host_and_port(netloc: str) -> Tuple[str, Optional[int]]: + """Returns ``(host, port)`` tuple from ``netloc``. + + Returned ``port`` will be ``None`` if not present. + + .. versionadded:: 4.1 + """ + match = re.match(r"^(.+):(\d+)$", netloc) + if match: + host = match.group(1) + port = int(match.group(2)) # type: Optional[int] + else: + host = netloc + port = None + return (host, port) + + +def qs_to_qsl(qs: Dict[str, List[AnyStr]]) -> Iterable[Tuple[str, AnyStr]]: + """Generator converting a result of ``parse_qs`` back to name-value pairs. + + .. versionadded:: 5.0 + """ + for k, vs in qs.items(): + for v in vs: + yield (k, v) + + +_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]") +_QuotePatt = re.compile(r"[\\].") +_nulljoin = "".join + + +def _unquote_cookie(s: str) -> str: + """Handle double quotes and escaping in cookie values. + + This method is copied verbatim from the Python 3.5 standard + library (http.cookies._unquote) so we don't have to depend on + non-public interfaces. + """ + # If there aren't any doublequotes, + # then there can't be any special characters. See RFC 2109. + if s is None or len(s) < 2: + return s + if s[0] != '"' or s[-1] != '"': + return s + + # We have to assume that we must decode this string. + # Down to work. + + # Remove the "s + s = s[1:-1] + + # Check for special sequences. Examples: + # \012 --> \n + # \" --> " + # + i = 0 + n = len(s) + res = [] + while 0 <= i < n: + o_match = _OctalPatt.search(s, i) + q_match = _QuotePatt.search(s, i) + if not o_match and not q_match: # Neither matched + res.append(s[i:]) + break + # else: + j = k = -1 + if o_match: + j = o_match.start(0) + if q_match: + k = q_match.start(0) + if q_match and (not o_match or k < j): # QuotePatt matched + res.append(s[i:k]) + res.append(s[k + 1]) + i = k + 2 + else: # OctalPatt matched + res.append(s[i:j]) + res.append(chr(int(s[j + 1 : j + 4], 8))) + i = j + 4 + return _nulljoin(res) + + +def parse_cookie(cookie: str) -> Dict[str, str]: + """Parse a ``Cookie`` HTTP header into a dict of name/value pairs. + + This function attempts to mimic browser cookie parsing behavior; + it specifically does not follow any of the cookie-related RFCs + (because browsers don't either). + + The algorithm used is identical to that used by Django version 1.9.10. + + .. versionadded:: 4.4.2 + """ + cookiedict = {} + for chunk in cookie.split(str(";")): + if str("=") in chunk: + key, val = chunk.split(str("="), 1) + else: + # Assume an empty name per + # https://bugzilla.mozilla.org/show_bug.cgi?id=169091 + key, val = str(""), chunk + key, val = key.strip(), val.strip() + if key or val: + # unquote using Python's algorithm. + cookiedict[key] = _unquote_cookie(val) + return cookiedict diff --git a/server/www/packages/packages-linux/x64/tornado/ioloop.py b/server/www/packages/packages-linux/x64/tornado/ioloop.py index 889153a..a691fe8 100644 --- a/server/www/packages/packages-linux/x64/tornado/ioloop.py +++ b/server/www/packages/packages-linux/x64/tornado/ioloop.py @@ -1,1267 +1,946 @@ -# -# Copyright 2009 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""An I/O event loop for non-blocking sockets. - -On Python 3, `.IOLoop` is a wrapper around the `asyncio` event loop. - -Typical applications will use a single `IOLoop` object, accessed via -`IOLoop.current` class method. The `IOLoop.start` method (or -equivalently, `asyncio.AbstractEventLoop.run_forever`) should usually -be called at the end of the ``main()`` function. Atypical applications -may use more than one `IOLoop`, such as one `IOLoop` per thread, or -per `unittest` case. - -In addition to I/O events, the `IOLoop` can also schedule time-based -events. `IOLoop.add_timeout` is a non-blocking alternative to -`time.sleep`. - -""" - -from __future__ import absolute_import, division, print_function - -import collections -import datetime -import errno -import functools -import heapq -import itertools -import logging -import numbers -import os -import select -import sys -import threading -import time -import traceback -import math -import random - -from tornado.concurrent import Future, is_future, chain_future, future_set_exc_info, future_add_done_callback # noqa: E501 -from tornado.log import app_log, gen_log -from tornado.platform.auto import set_close_exec, Waker -from tornado import stack_context -from tornado.util import ( - PY3, Configurable, errno_from_exception, timedelta_to_seconds, - TimeoutError, unicode_type, import_object, -) - -try: - import signal -except ImportError: - signal = None - -try: - from concurrent.futures import ThreadPoolExecutor -except ImportError: - ThreadPoolExecutor = None - -if PY3: - import _thread as thread -else: - import thread - -try: - import asyncio -except ImportError: - asyncio = None - - -_POLL_TIMEOUT = 3600.0 - - -class IOLoop(Configurable): - """A level-triggered I/O loop. - - On Python 3, `IOLoop` is a wrapper around the `asyncio` event - loop. On Python 2, it uses ``epoll`` (Linux) or ``kqueue`` (BSD - and Mac OS X) if they are available, or else we fall back on - select(). If you are implementing a system that needs to handle - thousands of simultaneous connections, you should use a system - that supports either ``epoll`` or ``kqueue``. - - Example usage for a simple TCP server: - - .. testcode:: - - import errno - import functools - import socket - - import tornado.ioloop - from tornado.iostream import IOStream - - async def handle_connection(connection, address): - stream = IOStream(connection) - message = await stream.read_until_close() - print("message from client:", message.decode().strip()) - - def connection_ready(sock, fd, events): - while True: - try: - connection, address = sock.accept() - except socket.error as e: - if e.args[0] not in (errno.EWOULDBLOCK, errno.EAGAIN): - raise - return - connection.setblocking(0) - io_loop = tornado.ioloop.IOLoop.current() - io_loop.spawn_callback(handle_connection, connection, address) - - if __name__ == '__main__': - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - sock.setblocking(0) - sock.bind(("", 8888)) - sock.listen(128) - - io_loop = tornado.ioloop.IOLoop.current() - callback = functools.partial(connection_ready, sock) - io_loop.add_handler(sock.fileno(), callback, io_loop.READ) - io_loop.start() - - .. testoutput:: - :hide: - - By default, a newly-constructed `IOLoop` becomes the thread's current - `IOLoop`, unless there already is a current `IOLoop`. This behavior - can be controlled with the ``make_current`` argument to the `IOLoop` - constructor: if ``make_current=True``, the new `IOLoop` will always - try to become current and it raises an error if there is already a - current instance. If ``make_current=False``, the new `IOLoop` will - not try to become current. - - In general, an `IOLoop` cannot survive a fork or be shared across - processes in any way. When multiple processes are being used, each - process should create its own `IOLoop`, which also implies that - any objects which depend on the `IOLoop` (such as - `.AsyncHTTPClient`) must also be created in the child processes. - As a guideline, anything that starts processes (including the - `tornado.process` and `multiprocessing` modules) should do so as - early as possible, ideally the first thing the application does - after loading its configuration in ``main()``. - - .. versionchanged:: 4.2 - Added the ``make_current`` keyword argument to the `IOLoop` - constructor. - - .. versionchanged:: 5.0 - - Uses the `asyncio` event loop by default. The - ``IOLoop.configure`` method cannot be used on Python 3 except - to redundantly specify the `asyncio` event loop. - - """ - # Constants from the epoll module - _EPOLLIN = 0x001 - _EPOLLPRI = 0x002 - _EPOLLOUT = 0x004 - _EPOLLERR = 0x008 - _EPOLLHUP = 0x010 - _EPOLLRDHUP = 0x2000 - _EPOLLONESHOT = (1 << 30) - _EPOLLET = (1 << 31) - - # Our events map exactly to the epoll events - NONE = 0 - READ = _EPOLLIN - WRITE = _EPOLLOUT - ERROR = _EPOLLERR | _EPOLLHUP - - # In Python 2, _current.instance points to the current IOLoop. - _current = threading.local() - - # In Python 3, _ioloop_for_asyncio maps from asyncio loops to IOLoops. - _ioloop_for_asyncio = dict() - - @classmethod - def configure(cls, impl, **kwargs): - if asyncio is not None: - from tornado.platform.asyncio import BaseAsyncIOLoop - - if isinstance(impl, (str, unicode_type)): - impl = import_object(impl) - if not issubclass(impl, BaseAsyncIOLoop): - raise RuntimeError( - "only AsyncIOLoop is allowed when asyncio is available") - super(IOLoop, cls).configure(impl, **kwargs) - - @staticmethod - def instance(): - """Deprecated alias for `IOLoop.current()`. - - .. versionchanged:: 5.0 - - Previously, this method returned a global singleton - `IOLoop`, in contrast with the per-thread `IOLoop` returned - by `current()`. In nearly all cases the two were the same - (when they differed, it was generally used from non-Tornado - threads to communicate back to the main thread's `IOLoop`). - This distinction is not present in `asyncio`, so in order - to facilitate integration with that package `instance()` - was changed to be an alias to `current()`. Applications - using the cross-thread communications aspect of - `instance()` should instead set their own global variable - to point to the `IOLoop` they want to use. - - .. deprecated:: 5.0 - """ - return IOLoop.current() - - def install(self): - """Deprecated alias for `make_current()`. - - .. versionchanged:: 5.0 - - Previously, this method would set this `IOLoop` as the - global singleton used by `IOLoop.instance()`. Now that - `instance()` is an alias for `current()`, `install()` - is an alias for `make_current()`. - - .. deprecated:: 5.0 - """ - self.make_current() - - @staticmethod - def clear_instance(): - """Deprecated alias for `clear_current()`. - - .. versionchanged:: 5.0 - - Previously, this method would clear the `IOLoop` used as - the global singleton by `IOLoop.instance()`. Now that - `instance()` is an alias for `current()`, - `clear_instance()` is an alias for `clear_current()`. - - .. deprecated:: 5.0 - - """ - IOLoop.clear_current() - - @staticmethod - def current(instance=True): - """Returns the current thread's `IOLoop`. - - If an `IOLoop` is currently running or has been marked as - current by `make_current`, returns that instance. If there is - no current `IOLoop` and ``instance`` is true, creates one. - - .. versionchanged:: 4.1 - Added ``instance`` argument to control the fallback to - `IOLoop.instance()`. - .. versionchanged:: 5.0 - On Python 3, control of the current `IOLoop` is delegated - to `asyncio`, with this and other methods as pass-through accessors. - The ``instance`` argument now controls whether an `IOLoop` - is created automatically when there is none, instead of - whether we fall back to `IOLoop.instance()` (which is now - an alias for this method). ``instance=False`` is deprecated, - since even if we do not create an `IOLoop`, this method - may initialize the asyncio loop. - """ - if asyncio is None: - current = getattr(IOLoop._current, "instance", None) - if current is None and instance: - current = IOLoop() - if IOLoop._current.instance is not current: - raise RuntimeError("new IOLoop did not become current") - else: - try: - loop = asyncio.get_event_loop() - except (RuntimeError, AssertionError): - if not instance: - return None - raise - try: - return IOLoop._ioloop_for_asyncio[loop] - except KeyError: - if instance: - from tornado.platform.asyncio import AsyncIOMainLoop - current = AsyncIOMainLoop(make_current=True) - else: - current = None - return current - - def make_current(self): - """Makes this the `IOLoop` for the current thread. - - An `IOLoop` automatically becomes current for its thread - when it is started, but it is sometimes useful to call - `make_current` explicitly before starting the `IOLoop`, - so that code run at startup time can find the right - instance. - - .. versionchanged:: 4.1 - An `IOLoop` created while there is no current `IOLoop` - will automatically become current. - - .. versionchanged:: 5.0 - This method also sets the current `asyncio` event loop. - """ - # The asyncio event loops override this method. - assert asyncio is None - old = getattr(IOLoop._current, "instance", None) - if old is not None: - old.clear_current() - IOLoop._current.instance = self - - @staticmethod - def clear_current(): - """Clears the `IOLoop` for the current thread. - - Intended primarily for use by test frameworks in between tests. - - .. versionchanged:: 5.0 - This method also clears the current `asyncio` event loop. - """ - old = IOLoop.current(instance=False) - if old is not None: - old._clear_current_hook() - if asyncio is None: - IOLoop._current.instance = None - - def _clear_current_hook(self): - """Instance method called when an IOLoop ceases to be current. - - May be overridden by subclasses as a counterpart to make_current. - """ - pass - - @classmethod - def configurable_base(cls): - return IOLoop - - @classmethod - def configurable_default(cls): - if asyncio is not None: - from tornado.platform.asyncio import AsyncIOLoop - return AsyncIOLoop - return PollIOLoop - - def initialize(self, make_current=None): - if make_current is None: - if IOLoop.current(instance=False) is None: - self.make_current() - elif make_current: - current = IOLoop.current(instance=False) - # AsyncIO loops can already be current by this point. - if current is not None and current is not self: - raise RuntimeError("current IOLoop already exists") - self.make_current() - - def close(self, all_fds=False): - """Closes the `IOLoop`, freeing any resources used. - - If ``all_fds`` is true, all file descriptors registered on the - IOLoop will be closed (not just the ones created by the - `IOLoop` itself). - - Many applications will only use a single `IOLoop` that runs for the - entire lifetime of the process. In that case closing the `IOLoop` - is not necessary since everything will be cleaned up when the - process exits. `IOLoop.close` is provided mainly for scenarios - such as unit tests, which create and destroy a large number of - ``IOLoops``. - - An `IOLoop` must be completely stopped before it can be closed. This - means that `IOLoop.stop()` must be called *and* `IOLoop.start()` must - be allowed to return before attempting to call `IOLoop.close()`. - Therefore the call to `close` will usually appear just after - the call to `start` rather than near the call to `stop`. - - .. versionchanged:: 3.1 - If the `IOLoop` implementation supports non-integer objects - for "file descriptors", those objects will have their - ``close`` method when ``all_fds`` is true. - """ - raise NotImplementedError() - - def add_handler(self, fd, handler, events): - """Registers the given handler to receive the given events for ``fd``. - - The ``fd`` argument may either be an integer file descriptor or - a file-like object with a ``fileno()`` method (and optionally a - ``close()`` method, which may be called when the `IOLoop` is shut - down). - - The ``events`` argument is a bitwise or of the constants - ``IOLoop.READ``, ``IOLoop.WRITE``, and ``IOLoop.ERROR``. - - When an event occurs, ``handler(fd, events)`` will be run. - - .. versionchanged:: 4.0 - Added the ability to pass file-like objects in addition to - raw file descriptors. - """ - raise NotImplementedError() - - def update_handler(self, fd, events): - """Changes the events we listen for ``fd``. - - .. versionchanged:: 4.0 - Added the ability to pass file-like objects in addition to - raw file descriptors. - """ - raise NotImplementedError() - - def remove_handler(self, fd): - """Stop listening for events on ``fd``. - - .. versionchanged:: 4.0 - Added the ability to pass file-like objects in addition to - raw file descriptors. - """ - raise NotImplementedError() - - def set_blocking_signal_threshold(self, seconds, action): - """Sends a signal if the `IOLoop` is blocked for more than - ``s`` seconds. - - Pass ``seconds=None`` to disable. Requires Python 2.6 on a unixy - platform. - - The action parameter is a Python signal handler. Read the - documentation for the `signal` module for more information. - If ``action`` is None, the process will be killed if it is - blocked for too long. - - .. deprecated:: 5.0 - - Not implemented on the `asyncio` event loop. Use the environment - variable ``PYTHONASYNCIODEBUG=1`` instead. This method will be - removed in Tornado 6.0. - """ - raise NotImplementedError() - - def set_blocking_log_threshold(self, seconds): - """Logs a stack trace if the `IOLoop` is blocked for more than - ``s`` seconds. - - Equivalent to ``set_blocking_signal_threshold(seconds, - self.log_stack)`` - - .. deprecated:: 5.0 - - Not implemented on the `asyncio` event loop. Use the environment - variable ``PYTHONASYNCIODEBUG=1`` instead. This method will be - removed in Tornado 6.0. - """ - self.set_blocking_signal_threshold(seconds, self.log_stack) - - def log_stack(self, signal, frame): - """Signal handler to log the stack trace of the current thread. - - For use with `set_blocking_signal_threshold`. - - .. deprecated:: 5.1 - - This method will be removed in Tornado 6.0. - """ - gen_log.warning('IOLoop blocked for %f seconds in\n%s', - self._blocking_signal_threshold, - ''.join(traceback.format_stack(frame))) - - def start(self): - """Starts the I/O loop. - - The loop will run until one of the callbacks calls `stop()`, which - will make the loop stop after the current event iteration completes. - """ - raise NotImplementedError() - - def _setup_logging(self): - """The IOLoop catches and logs exceptions, so it's - important that log output be visible. However, python's - default behavior for non-root loggers (prior to python - 3.2) is to print an unhelpful "no handlers could be - found" message rather than the actual log entry, so we - must explicitly configure logging if we've made it this - far without anything. - - This method should be called from start() in subclasses. - """ - if not any([logging.getLogger().handlers, - logging.getLogger('tornado').handlers, - logging.getLogger('tornado.application').handlers]): - logging.basicConfig() - - def stop(self): - """Stop the I/O loop. - - If the event loop is not currently running, the next call to `start()` - will return immediately. - - Note that even after `stop` has been called, the `IOLoop` is not - completely stopped until `IOLoop.start` has also returned. - Some work that was scheduled before the call to `stop` may still - be run before the `IOLoop` shuts down. - """ - raise NotImplementedError() - - def run_sync(self, func, timeout=None): - """Starts the `IOLoop`, runs the given function, and stops the loop. - - The function must return either an awaitable object or - ``None``. If the function returns an awaitable object, the - `IOLoop` will run until the awaitable is resolved (and - `run_sync()` will return the awaitable's result). If it raises - an exception, the `IOLoop` will stop and the exception will be - re-raised to the caller. - - The keyword-only argument ``timeout`` may be used to set - a maximum duration for the function. If the timeout expires, - a `tornado.util.TimeoutError` is raised. - - This method is useful to allow asynchronous calls in a - ``main()`` function:: - - async def main(): - # do stuff... - - if __name__ == '__main__': - IOLoop.current().run_sync(main) - - .. versionchanged:: 4.3 - Returning a non-``None``, non-awaitable value is now an error. - - .. versionchanged:: 5.0 - If a timeout occurs, the ``func`` coroutine will be cancelled. - - """ - future_cell = [None] - - def run(): - try: - result = func() - if result is not None: - from tornado.gen import convert_yielded - result = convert_yielded(result) - except Exception: - future_cell[0] = Future() - future_set_exc_info(future_cell[0], sys.exc_info()) - else: - if is_future(result): - future_cell[0] = result - else: - future_cell[0] = Future() - future_cell[0].set_result(result) - self.add_future(future_cell[0], lambda future: self.stop()) - self.add_callback(run) - if timeout is not None: - def timeout_callback(): - # If we can cancel the future, do so and wait on it. If not, - # Just stop the loop and return with the task still pending. - # (If we neither cancel nor wait for the task, a warning - # will be logged). - if not future_cell[0].cancel(): - self.stop() - timeout_handle = self.add_timeout(self.time() + timeout, timeout_callback) - self.start() - if timeout is not None: - self.remove_timeout(timeout_handle) - if future_cell[0].cancelled() or not future_cell[0].done(): - raise TimeoutError('Operation timed out after %s seconds' % timeout) - return future_cell[0].result() - - def time(self): - """Returns the current time according to the `IOLoop`'s clock. - - The return value is a floating-point number relative to an - unspecified time in the past. - - By default, the `IOLoop`'s time function is `time.time`. However, - it may be configured to use e.g. `time.monotonic` instead. - Calls to `add_timeout` that pass a number instead of a - `datetime.timedelta` should use this function to compute the - appropriate time, so they can work no matter what time function - is chosen. - """ - return time.time() - - def add_timeout(self, deadline, callback, *args, **kwargs): - """Runs the ``callback`` at the time ``deadline`` from the I/O loop. - - Returns an opaque handle that may be passed to - `remove_timeout` to cancel. - - ``deadline`` may be a number denoting a time (on the same - scale as `IOLoop.time`, normally `time.time`), or a - `datetime.timedelta` object for a deadline relative to the - current time. Since Tornado 4.0, `call_later` is a more - convenient alternative for the relative case since it does not - require a timedelta object. - - Note that it is not safe to call `add_timeout` from other threads. - Instead, you must use `add_callback` to transfer control to the - `IOLoop`'s thread, and then call `add_timeout` from there. - - Subclasses of IOLoop must implement either `add_timeout` or - `call_at`; the default implementations of each will call - the other. `call_at` is usually easier to implement, but - subclasses that wish to maintain compatibility with Tornado - versions prior to 4.0 must use `add_timeout` instead. - - .. versionchanged:: 4.0 - Now passes through ``*args`` and ``**kwargs`` to the callback. - """ - if isinstance(deadline, numbers.Real): - return self.call_at(deadline, callback, *args, **kwargs) - elif isinstance(deadline, datetime.timedelta): - return self.call_at(self.time() + timedelta_to_seconds(deadline), - callback, *args, **kwargs) - else: - raise TypeError("Unsupported deadline %r" % deadline) - - def call_later(self, delay, callback, *args, **kwargs): - """Runs the ``callback`` after ``delay`` seconds have passed. - - Returns an opaque handle that may be passed to `remove_timeout` - to cancel. Note that unlike the `asyncio` method of the same - name, the returned object does not have a ``cancel()`` method. - - See `add_timeout` for comments on thread-safety and subclassing. - - .. versionadded:: 4.0 - """ - return self.call_at(self.time() + delay, callback, *args, **kwargs) - - def call_at(self, when, callback, *args, **kwargs): - """Runs the ``callback`` at the absolute time designated by ``when``. - - ``when`` must be a number using the same reference point as - `IOLoop.time`. - - Returns an opaque handle that may be passed to `remove_timeout` - to cancel. Note that unlike the `asyncio` method of the same - name, the returned object does not have a ``cancel()`` method. - - See `add_timeout` for comments on thread-safety and subclassing. - - .. versionadded:: 4.0 - """ - return self.add_timeout(when, callback, *args, **kwargs) - - def remove_timeout(self, timeout): - """Cancels a pending timeout. - - The argument is a handle as returned by `add_timeout`. It is - safe to call `remove_timeout` even if the callback has already - been run. - """ - raise NotImplementedError() - - def add_callback(self, callback, *args, **kwargs): - """Calls the given callback on the next I/O loop iteration. - - It is safe to call this method from any thread at any time, - except from a signal handler. Note that this is the **only** - method in `IOLoop` that makes this thread-safety guarantee; all - other interaction with the `IOLoop` must be done from that - `IOLoop`'s thread. `add_callback()` may be used to transfer - control from other threads to the `IOLoop`'s thread. - - To add a callback from a signal handler, see - `add_callback_from_signal`. - """ - raise NotImplementedError() - - def add_callback_from_signal(self, callback, *args, **kwargs): - """Calls the given callback on the next I/O loop iteration. - - Safe for use from a Python signal handler; should not be used - otherwise. - - Callbacks added with this method will be run without any - `.stack_context`, to avoid picking up the context of the function - that was interrupted by the signal. - """ - raise NotImplementedError() - - def spawn_callback(self, callback, *args, **kwargs): - """Calls the given callback on the next IOLoop iteration. - - Unlike all other callback-related methods on IOLoop, - ``spawn_callback`` does not associate the callback with its caller's - ``stack_context``, so it is suitable for fire-and-forget callbacks - that should not interfere with the caller. - - .. versionadded:: 4.0 - """ - with stack_context.NullContext(): - self.add_callback(callback, *args, **kwargs) - - def add_future(self, future, callback): - """Schedules a callback on the ``IOLoop`` when the given - `.Future` is finished. - - The callback is invoked with one argument, the - `.Future`. - - This method only accepts `.Future` objects and not other - awaitables (unlike most of Tornado where the two are - interchangeable). - """ - assert is_future(future) - callback = stack_context.wrap(callback) - future_add_done_callback( - future, lambda future: self.add_callback(callback, future)) - - def run_in_executor(self, executor, func, *args): - """Runs a function in a ``concurrent.futures.Executor``. If - ``executor`` is ``None``, the IO loop's default executor will be used. - - Use `functools.partial` to pass keyword arguments to ``func``. - - .. versionadded:: 5.0 - """ - if ThreadPoolExecutor is None: - raise RuntimeError( - "concurrent.futures is required to use IOLoop.run_in_executor") - - if executor is None: - if not hasattr(self, '_executor'): - from tornado.process import cpu_count - self._executor = ThreadPoolExecutor(max_workers=(cpu_count() * 5)) - executor = self._executor - c_future = executor.submit(func, *args) - # Concurrent Futures are not usable with await. Wrap this in a - # Tornado Future instead, using self.add_future for thread-safety. - t_future = Future() - self.add_future(c_future, lambda f: chain_future(f, t_future)) - return t_future - - def set_default_executor(self, executor): - """Sets the default executor to use with :meth:`run_in_executor`. - - .. versionadded:: 5.0 - """ - self._executor = executor - - def _run_callback(self, callback): - """Runs a callback with error handling. - - For use in subclasses. - """ - try: - ret = callback() - if ret is not None: - from tornado import gen - # Functions that return Futures typically swallow all - # exceptions and store them in the Future. If a Future - # makes it out to the IOLoop, ensure its exception (if any) - # gets logged too. - try: - ret = gen.convert_yielded(ret) - except gen.BadYieldError: - # It's not unusual for add_callback to be used with - # methods returning a non-None and non-yieldable - # result, which should just be ignored. - pass - else: - self.add_future(ret, self._discard_future_result) - except Exception: - self.handle_callback_exception(callback) - - def _discard_future_result(self, future): - """Avoid unhandled-exception warnings from spawned coroutines.""" - future.result() - - def handle_callback_exception(self, callback): - """This method is called whenever a callback run by the `IOLoop` - throws an exception. - - By default simply logs the exception as an error. Subclasses - may override this method to customize reporting of exceptions. - - The exception itself is not passed explicitly, but is available - in `sys.exc_info`. - - .. versionchanged:: 5.0 - - When the `asyncio` event loop is used (which is now the - default on Python 3), some callback errors will be handled by - `asyncio` instead of this method. - - .. deprecated: 5.1 - - Support for this method will be removed in Tornado 6.0. - """ - app_log.error("Exception in callback %r", callback, exc_info=True) - - def split_fd(self, fd): - """Returns an (fd, obj) pair from an ``fd`` parameter. - - We accept both raw file descriptors and file-like objects as - input to `add_handler` and related methods. When a file-like - object is passed, we must retain the object itself so we can - close it correctly when the `IOLoop` shuts down, but the - poller interfaces favor file descriptors (they will accept - file-like objects and call ``fileno()`` for you, but they - always return the descriptor itself). - - This method is provided for use by `IOLoop` subclasses and should - not generally be used by application code. - - .. versionadded:: 4.0 - """ - try: - return fd.fileno(), fd - except AttributeError: - return fd, fd - - def close_fd(self, fd): - """Utility method to close an ``fd``. - - If ``fd`` is a file-like object, we close it directly; otherwise - we use `os.close`. - - This method is provided for use by `IOLoop` subclasses (in - implementations of ``IOLoop.close(all_fds=True)`` and should - not generally be used by application code. - - .. versionadded:: 4.0 - """ - try: - try: - fd.close() - except AttributeError: - os.close(fd) - except OSError: - pass - - -class PollIOLoop(IOLoop): - """Base class for IOLoops built around a select-like function. - - For concrete implementations, see `tornado.platform.epoll.EPollIOLoop` - (Linux), `tornado.platform.kqueue.KQueueIOLoop` (BSD and Mac), or - `tornado.platform.select.SelectIOLoop` (all platforms). - """ - def initialize(self, impl, time_func=None, **kwargs): - super(PollIOLoop, self).initialize(**kwargs) - self._impl = impl - if hasattr(self._impl, 'fileno'): - set_close_exec(self._impl.fileno()) - self.time_func = time_func or time.time - self._handlers = {} - self._events = {} - self._callbacks = collections.deque() - self._timeouts = [] - self._cancellations = 0 - self._running = False - self._stopped = False - self._closing = False - self._thread_ident = None - self._pid = os.getpid() - self._blocking_signal_threshold = None - self._timeout_counter = itertools.count() - - # Create a pipe that we send bogus data to when we want to wake - # the I/O loop when it is idle - self._waker = Waker() - self.add_handler(self._waker.fileno(), - lambda fd, events: self._waker.consume(), - self.READ) - - @classmethod - def configurable_base(cls): - return PollIOLoop - - @classmethod - def configurable_default(cls): - if hasattr(select, "epoll"): - from tornado.platform.epoll import EPollIOLoop - return EPollIOLoop - if hasattr(select, "kqueue"): - # Python 2.6+ on BSD or Mac - from tornado.platform.kqueue import KQueueIOLoop - return KQueueIOLoop - from tornado.platform.select import SelectIOLoop - return SelectIOLoop - - def close(self, all_fds=False): - self._closing = True - self.remove_handler(self._waker.fileno()) - if all_fds: - for fd, handler in list(self._handlers.values()): - self.close_fd(fd) - self._waker.close() - self._impl.close() - self._callbacks = None - self._timeouts = None - if hasattr(self, '_executor'): - self._executor.shutdown() - - def add_handler(self, fd, handler, events): - fd, obj = self.split_fd(fd) - self._handlers[fd] = (obj, stack_context.wrap(handler)) - self._impl.register(fd, events | self.ERROR) - - def update_handler(self, fd, events): - fd, obj = self.split_fd(fd) - self._impl.modify(fd, events | self.ERROR) - - def remove_handler(self, fd): - fd, obj = self.split_fd(fd) - self._handlers.pop(fd, None) - self._events.pop(fd, None) - try: - self._impl.unregister(fd) - except Exception: - gen_log.debug("Error deleting fd from IOLoop", exc_info=True) - - def set_blocking_signal_threshold(self, seconds, action): - if not hasattr(signal, "setitimer"): - gen_log.error("set_blocking_signal_threshold requires a signal module " - "with the setitimer method") - return - self._blocking_signal_threshold = seconds - if seconds is not None: - signal.signal(signal.SIGALRM, - action if action is not None else signal.SIG_DFL) - - def start(self): - if self._running: - raise RuntimeError("IOLoop is already running") - if os.getpid() != self._pid: - raise RuntimeError("Cannot share PollIOLoops across processes") - self._setup_logging() - if self._stopped: - self._stopped = False - return - old_current = IOLoop.current(instance=False) - if old_current is not self: - self.make_current() - self._thread_ident = thread.get_ident() - self._running = True - - # signal.set_wakeup_fd closes a race condition in event loops: - # a signal may arrive at the beginning of select/poll/etc - # before it goes into its interruptible sleep, so the signal - # will be consumed without waking the select. The solution is - # for the (C, synchronous) signal handler to write to a pipe, - # which will then be seen by select. - # - # In python's signal handling semantics, this only matters on the - # main thread (fortunately, set_wakeup_fd only works on the main - # thread and will raise a ValueError otherwise). - # - # If someone has already set a wakeup fd, we don't want to - # disturb it. This is an issue for twisted, which does its - # SIGCHLD processing in response to its own wakeup fd being - # written to. As long as the wakeup fd is registered on the IOLoop, - # the loop will still wake up and everything should work. - old_wakeup_fd = None - if hasattr(signal, 'set_wakeup_fd') and os.name == 'posix': - # requires python 2.6+, unix. set_wakeup_fd exists but crashes - # the python process on windows. - try: - old_wakeup_fd = signal.set_wakeup_fd(self._waker.write_fileno()) - if old_wakeup_fd != -1: - # Already set, restore previous value. This is a little racy, - # but there's no clean get_wakeup_fd and in real use the - # IOLoop is just started once at the beginning. - signal.set_wakeup_fd(old_wakeup_fd) - old_wakeup_fd = None - except ValueError: - # Non-main thread, or the previous value of wakeup_fd - # is no longer valid. - old_wakeup_fd = None - - try: - while True: - # Prevent IO event starvation by delaying new callbacks - # to the next iteration of the event loop. - ncallbacks = len(self._callbacks) - - # Add any timeouts that have come due to the callback list. - # Do not run anything until we have determined which ones - # are ready, so timeouts that call add_timeout cannot - # schedule anything in this iteration. - due_timeouts = [] - if self._timeouts: - now = self.time() - while self._timeouts: - if self._timeouts[0].callback is None: - # The timeout was cancelled. Note that the - # cancellation check is repeated below for timeouts - # that are cancelled by another timeout or callback. - heapq.heappop(self._timeouts) - self._cancellations -= 1 - elif self._timeouts[0].deadline <= now: - due_timeouts.append(heapq.heappop(self._timeouts)) - else: - break - if (self._cancellations > 512 and - self._cancellations > (len(self._timeouts) >> 1)): - # Clean up the timeout queue when it gets large and it's - # more than half cancellations. - self._cancellations = 0 - self._timeouts = [x for x in self._timeouts - if x.callback is not None] - heapq.heapify(self._timeouts) - - for i in range(ncallbacks): - self._run_callback(self._callbacks.popleft()) - for timeout in due_timeouts: - if timeout.callback is not None: - self._run_callback(timeout.callback) - # Closures may be holding on to a lot of memory, so allow - # them to be freed before we go into our poll wait. - due_timeouts = timeout = None - - if self._callbacks: - # If any callbacks or timeouts called add_callback, - # we don't want to wait in poll() before we run them. - poll_timeout = 0.0 - elif self._timeouts: - # If there are any timeouts, schedule the first one. - # Use self.time() instead of 'now' to account for time - # spent running callbacks. - poll_timeout = self._timeouts[0].deadline - self.time() - poll_timeout = max(0, min(poll_timeout, _POLL_TIMEOUT)) - else: - # No timeouts and no callbacks, so use the default. - poll_timeout = _POLL_TIMEOUT - - if not self._running: - break - - if self._blocking_signal_threshold is not None: - # clear alarm so it doesn't fire while poll is waiting for - # events. - signal.setitimer(signal.ITIMER_REAL, 0, 0) - - try: - event_pairs = self._impl.poll(poll_timeout) - except Exception as e: - # Depending on python version and IOLoop implementation, - # different exception types may be thrown and there are - # two ways EINTR might be signaled: - # * e.errno == errno.EINTR - # * e.args is like (errno.EINTR, 'Interrupted system call') - if errno_from_exception(e) == errno.EINTR: - continue - else: - raise - - if self._blocking_signal_threshold is not None: - signal.setitimer(signal.ITIMER_REAL, - self._blocking_signal_threshold, 0) - - # Pop one fd at a time from the set of pending fds and run - # its handler. Since that handler may perform actions on - # other file descriptors, there may be reentrant calls to - # this IOLoop that modify self._events - self._events.update(event_pairs) - while self._events: - fd, events = self._events.popitem() - try: - fd_obj, handler_func = self._handlers[fd] - handler_func(fd_obj, events) - except (OSError, IOError) as e: - if errno_from_exception(e) == errno.EPIPE: - # Happens when the client closes the connection - pass - else: - self.handle_callback_exception(self._handlers.get(fd)) - except Exception: - self.handle_callback_exception(self._handlers.get(fd)) - fd_obj = handler_func = None - - finally: - # reset the stopped flag so another start/stop pair can be issued - self._stopped = False - if self._blocking_signal_threshold is not None: - signal.setitimer(signal.ITIMER_REAL, 0, 0) - if old_current is None: - IOLoop.clear_current() - elif old_current is not self: - old_current.make_current() - if old_wakeup_fd is not None: - signal.set_wakeup_fd(old_wakeup_fd) - - def stop(self): - self._running = False - self._stopped = True - self._waker.wake() - - def time(self): - return self.time_func() - - def call_at(self, deadline, callback, *args, **kwargs): - timeout = _Timeout( - deadline, - functools.partial(stack_context.wrap(callback), *args, **kwargs), - self) - heapq.heappush(self._timeouts, timeout) - return timeout - - def remove_timeout(self, timeout): - # Removing from a heap is complicated, so just leave the defunct - # timeout object in the queue (see discussion in - # http://docs.python.org/library/heapq.html). - # If this turns out to be a problem, we could add a garbage - # collection pass whenever there are too many dead timeouts. - timeout.callback = None - self._cancellations += 1 - - def add_callback(self, callback, *args, **kwargs): - if self._closing: - return - # Blindly insert into self._callbacks. This is safe even - # from signal handlers because deque.append is atomic. - self._callbacks.append(functools.partial( - stack_context.wrap(callback), *args, **kwargs)) - if thread.get_ident() != self._thread_ident: - # This will write one byte but Waker.consume() reads many - # at once, so it's ok to write even when not strictly - # necessary. - self._waker.wake() - else: - # If we're on the IOLoop's thread, we don't need to wake anyone. - pass - - def add_callback_from_signal(self, callback, *args, **kwargs): - with stack_context.NullContext(): - self.add_callback(callback, *args, **kwargs) - - -class _Timeout(object): - """An IOLoop timeout, a UNIX timestamp and a callback""" - - # Reduce memory overhead when there are lots of pending callbacks - __slots__ = ['deadline', 'callback', 'tdeadline'] - - def __init__(self, deadline, callback, io_loop): - if not isinstance(deadline, numbers.Real): - raise TypeError("Unsupported deadline %r" % deadline) - self.deadline = deadline - self.callback = callback - self.tdeadline = (deadline, next(io_loop._timeout_counter)) - - # Comparison methods to sort by deadline, with object id as a tiebreaker - # to guarantee a consistent ordering. The heapq module uses __le__ - # in python2.5, and __lt__ in 2.6+ (sort() and most other comparisons - # use __lt__). - def __lt__(self, other): - return self.tdeadline < other.tdeadline - - def __le__(self, other): - return self.tdeadline <= other.tdeadline - - -class PeriodicCallback(object): - """Schedules the given callback to be called periodically. - - The callback is called every ``callback_time`` milliseconds. - Note that the timeout is given in milliseconds, while most other - time-related functions in Tornado use seconds. - - If ``jitter`` is specified, each callback time will be randomly selected - within a window of ``jitter * callback_time`` milliseconds. - Jitter can be used to reduce alignment of events with similar periods. - A jitter of 0.1 means allowing a 10% variation in callback time. - The window is centered on ``callback_time`` so the total number of calls - within a given interval should not be significantly affected by adding - jitter. - - If the callback runs for longer than ``callback_time`` milliseconds, - subsequent invocations will be skipped to get back on schedule. - - `start` must be called after the `PeriodicCallback` is created. - - .. versionchanged:: 5.0 - The ``io_loop`` argument (deprecated since version 4.1) has been removed. - - .. versionchanged:: 5.1 - The ``jitter`` argument is added. - """ - def __init__(self, callback, callback_time, jitter=0): - self.callback = callback - if callback_time <= 0: - raise ValueError("Periodic callback must have a positive callback_time") - self.callback_time = callback_time - self.jitter = jitter - self._running = False - self._timeout = None - - def start(self): - """Starts the timer.""" - # Looking up the IOLoop here allows to first instantiate the - # PeriodicCallback in another thread, then start it using - # IOLoop.add_callback(). - self.io_loop = IOLoop.current() - self._running = True - self._next_timeout = self.io_loop.time() - self._schedule_next() - - def stop(self): - """Stops the timer.""" - self._running = False - if self._timeout is not None: - self.io_loop.remove_timeout(self._timeout) - self._timeout = None - - def is_running(self): - """Return True if this `.PeriodicCallback` has been started. - - .. versionadded:: 4.1 - """ - return self._running - - def _run(self): - if not self._running: - return - try: - return self.callback() - except Exception: - self.io_loop.handle_callback_exception(self.callback) - finally: - self._schedule_next() - - def _schedule_next(self): - if self._running: - self._update_next(self.io_loop.time()) - self._timeout = self.io_loop.add_timeout(self._next_timeout, self._run) - - def _update_next(self, current_time): - callback_time_sec = self.callback_time / 1000.0 - if self.jitter: - # apply jitter fraction - callback_time_sec *= 1 + (self.jitter * (random.random() - 0.5)) - if self._next_timeout <= current_time: - # The period should be measured from the start of one call - # to the start of the next. If one call takes too long, - # skip cycles to get back to a multiple of the original - # schedule. - self._next_timeout += (math.floor((current_time - self._next_timeout) / - callback_time_sec) + 1) * callback_time_sec - else: - # If the clock moved backwards, ensure we advance the next - # timeout instead of recomputing the same value again. - # This may result in long gaps between callbacks if the - # clock jumps backwards by a lot, but the far more common - # scenario is a small NTP adjustment that should just be - # ignored. - # - # Note that on some systems if time.time() runs slower - # than time.monotonic() (most common on windows), we - # effectively experience a small backwards time jump on - # every iteration because PeriodicCallback uses - # time.time() while asyncio schedules callbacks using - # time.monotonic(). - # https://github.com/tornadoweb/tornado/issues/2333 - self._next_timeout += callback_time_sec +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""An I/O event loop for non-blocking sockets. + +In Tornado 6.0, `.IOLoop` is a wrapper around the `asyncio` event +loop, with a slightly different interface for historical reasons. +Applications can use either the `.IOLoop` interface or the underlying +`asyncio` event loop directly (unless compatibility with older +versions of Tornado is desired, in which case `.IOLoop` must be used). + +Typical applications will use a single `IOLoop` object, accessed via +`IOLoop.current` class method. The `IOLoop.start` method (or +equivalently, `asyncio.AbstractEventLoop.run_forever`) should usually +be called at the end of the ``main()`` function. Atypical applications +may use more than one `IOLoop`, such as one `IOLoop` per thread, or +per `unittest` case. + +""" + +import asyncio +import concurrent.futures +import datetime +import functools +import logging +import numbers +import os +import sys +import time +import math +import random + +from tornado.concurrent import ( + Future, + is_future, + chain_future, + future_set_exc_info, + future_add_done_callback, +) +from tornado.log import app_log +from tornado.util import Configurable, TimeoutError, import_object + +import typing +from typing import Union, Any, Type, Optional, Callable, TypeVar, Tuple, Awaitable + +if typing.TYPE_CHECKING: + from typing import Dict, List # noqa: F401 + + from typing_extensions import Protocol +else: + Protocol = object + + +class _Selectable(Protocol): + def fileno(self) -> int: + pass + + def close(self) -> None: + pass + + +_T = TypeVar("_T") +_S = TypeVar("_S", bound=_Selectable) + + +class IOLoop(Configurable): + """An I/O event loop. + + As of Tornado 6.0, `IOLoop` is a wrapper around the `asyncio` event + loop. + + Example usage for a simple TCP server: + + .. testcode:: + + import errno + import functools + import socket + + import tornado.ioloop + from tornado.iostream import IOStream + + async def handle_connection(connection, address): + stream = IOStream(connection) + message = await stream.read_until_close() + print("message from client:", message.decode().strip()) + + def connection_ready(sock, fd, events): + while True: + try: + connection, address = sock.accept() + except socket.error as e: + if e.args[0] not in (errno.EWOULDBLOCK, errno.EAGAIN): + raise + return + connection.setblocking(0) + io_loop = tornado.ioloop.IOLoop.current() + io_loop.spawn_callback(handle_connection, connection, address) + + if __name__ == '__main__': + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + sock.setblocking(0) + sock.bind(("", 8888)) + sock.listen(128) + + io_loop = tornado.ioloop.IOLoop.current() + callback = functools.partial(connection_ready, sock) + io_loop.add_handler(sock.fileno(), callback, io_loop.READ) + io_loop.start() + + .. testoutput:: + :hide: + + By default, a newly-constructed `IOLoop` becomes the thread's current + `IOLoop`, unless there already is a current `IOLoop`. This behavior + can be controlled with the ``make_current`` argument to the `IOLoop` + constructor: if ``make_current=True``, the new `IOLoop` will always + try to become current and it raises an error if there is already a + current instance. If ``make_current=False``, the new `IOLoop` will + not try to become current. + + In general, an `IOLoop` cannot survive a fork or be shared across + processes in any way. When multiple processes are being used, each + process should create its own `IOLoop`, which also implies that + any objects which depend on the `IOLoop` (such as + `.AsyncHTTPClient`) must also be created in the child processes. + As a guideline, anything that starts processes (including the + `tornado.process` and `multiprocessing` modules) should do so as + early as possible, ideally the first thing the application does + after loading its configuration in ``main()``. + + .. versionchanged:: 4.2 + Added the ``make_current`` keyword argument to the `IOLoop` + constructor. + + .. versionchanged:: 5.0 + + Uses the `asyncio` event loop by default. The + ``IOLoop.configure`` method cannot be used on Python 3 except + to redundantly specify the `asyncio` event loop. + + """ + + # These constants were originally based on constants from the epoll module. + NONE = 0 + READ = 0x001 + WRITE = 0x004 + ERROR = 0x018 + + # In Python 3, _ioloop_for_asyncio maps from asyncio loops to IOLoops. + _ioloop_for_asyncio = dict() # type: Dict[asyncio.AbstractEventLoop, IOLoop] + + @classmethod + def configure( + cls, impl: "Union[None, str, Type[Configurable]]", **kwargs: Any + ) -> None: + if asyncio is not None: + from tornado.platform.asyncio import BaseAsyncIOLoop + + if isinstance(impl, str): + impl = import_object(impl) + if isinstance(impl, type) and not issubclass(impl, BaseAsyncIOLoop): + raise RuntimeError( + "only AsyncIOLoop is allowed when asyncio is available" + ) + super(IOLoop, cls).configure(impl, **kwargs) + + @staticmethod + def instance() -> "IOLoop": + """Deprecated alias for `IOLoop.current()`. + + .. versionchanged:: 5.0 + + Previously, this method returned a global singleton + `IOLoop`, in contrast with the per-thread `IOLoop` returned + by `current()`. In nearly all cases the two were the same + (when they differed, it was generally used from non-Tornado + threads to communicate back to the main thread's `IOLoop`). + This distinction is not present in `asyncio`, so in order + to facilitate integration with that package `instance()` + was changed to be an alias to `current()`. Applications + using the cross-thread communications aspect of + `instance()` should instead set their own global variable + to point to the `IOLoop` they want to use. + + .. deprecated:: 5.0 + """ + return IOLoop.current() + + def install(self) -> None: + """Deprecated alias for `make_current()`. + + .. versionchanged:: 5.0 + + Previously, this method would set this `IOLoop` as the + global singleton used by `IOLoop.instance()`. Now that + `instance()` is an alias for `current()`, `install()` + is an alias for `make_current()`. + + .. deprecated:: 5.0 + """ + self.make_current() + + @staticmethod + def clear_instance() -> None: + """Deprecated alias for `clear_current()`. + + .. versionchanged:: 5.0 + + Previously, this method would clear the `IOLoop` used as + the global singleton by `IOLoop.instance()`. Now that + `instance()` is an alias for `current()`, + `clear_instance()` is an alias for `clear_current()`. + + .. deprecated:: 5.0 + + """ + IOLoop.clear_current() + + @typing.overload + @staticmethod + def current() -> "IOLoop": + pass + + @typing.overload # noqa: F811 + @staticmethod + def current(instance: bool = True) -> Optional["IOLoop"]: + pass + + @staticmethod # noqa: F811 + def current(instance: bool = True) -> Optional["IOLoop"]: + """Returns the current thread's `IOLoop`. + + If an `IOLoop` is currently running or has been marked as + current by `make_current`, returns that instance. If there is + no current `IOLoop` and ``instance`` is true, creates one. + + .. versionchanged:: 4.1 + Added ``instance`` argument to control the fallback to + `IOLoop.instance()`. + .. versionchanged:: 5.0 + On Python 3, control of the current `IOLoop` is delegated + to `asyncio`, with this and other methods as pass-through accessors. + The ``instance`` argument now controls whether an `IOLoop` + is created automatically when there is none, instead of + whether we fall back to `IOLoop.instance()` (which is now + an alias for this method). ``instance=False`` is deprecated, + since even if we do not create an `IOLoop`, this method + may initialize the asyncio loop. + """ + try: + loop = asyncio.get_event_loop() + except (RuntimeError, AssertionError): + if not instance: + return None + raise + try: + return IOLoop._ioloop_for_asyncio[loop] + except KeyError: + if instance: + from tornado.platform.asyncio import AsyncIOMainLoop + + current = AsyncIOMainLoop(make_current=True) # type: Optional[IOLoop] + else: + current = None + return current + + def make_current(self) -> None: + """Makes this the `IOLoop` for the current thread. + + An `IOLoop` automatically becomes current for its thread + when it is started, but it is sometimes useful to call + `make_current` explicitly before starting the `IOLoop`, + so that code run at startup time can find the right + instance. + + .. versionchanged:: 4.1 + An `IOLoop` created while there is no current `IOLoop` + will automatically become current. + + .. versionchanged:: 5.0 + This method also sets the current `asyncio` event loop. + """ + # The asyncio event loops override this method. + raise NotImplementedError() + + @staticmethod + def clear_current() -> None: + """Clears the `IOLoop` for the current thread. + + Intended primarily for use by test frameworks in between tests. + + .. versionchanged:: 5.0 + This method also clears the current `asyncio` event loop. + """ + old = IOLoop.current(instance=False) + if old is not None: + old._clear_current_hook() + if asyncio is None: + IOLoop._current.instance = None + + def _clear_current_hook(self) -> None: + """Instance method called when an IOLoop ceases to be current. + + May be overridden by subclasses as a counterpart to make_current. + """ + pass + + @classmethod + def configurable_base(cls) -> Type[Configurable]: + return IOLoop + + @classmethod + def configurable_default(cls) -> Type[Configurable]: + from tornado.platform.asyncio import AsyncIOLoop + + return AsyncIOLoop + + def initialize(self, make_current: bool = None) -> None: + if make_current is None: + if IOLoop.current(instance=False) is None: + self.make_current() + elif make_current: + current = IOLoop.current(instance=False) + # AsyncIO loops can already be current by this point. + if current is not None and current is not self: + raise RuntimeError("current IOLoop already exists") + self.make_current() + + def close(self, all_fds: bool = False) -> None: + """Closes the `IOLoop`, freeing any resources used. + + If ``all_fds`` is true, all file descriptors registered on the + IOLoop will be closed (not just the ones created by the + `IOLoop` itself). + + Many applications will only use a single `IOLoop` that runs for the + entire lifetime of the process. In that case closing the `IOLoop` + is not necessary since everything will be cleaned up when the + process exits. `IOLoop.close` is provided mainly for scenarios + such as unit tests, which create and destroy a large number of + ``IOLoops``. + + An `IOLoop` must be completely stopped before it can be closed. This + means that `IOLoop.stop()` must be called *and* `IOLoop.start()` must + be allowed to return before attempting to call `IOLoop.close()`. + Therefore the call to `close` will usually appear just after + the call to `start` rather than near the call to `stop`. + + .. versionchanged:: 3.1 + If the `IOLoop` implementation supports non-integer objects + for "file descriptors", those objects will have their + ``close`` method when ``all_fds`` is true. + """ + raise NotImplementedError() + + @typing.overload + def add_handler( + self, fd: int, handler: Callable[[int, int], None], events: int + ) -> None: + pass + + @typing.overload # noqa: F811 + def add_handler( + self, fd: _S, handler: Callable[[_S, int], None], events: int + ) -> None: + pass + + def add_handler( # noqa: F811 + self, fd: Union[int, _Selectable], handler: Callable[..., None], events: int + ) -> None: + """Registers the given handler to receive the given events for ``fd``. + + The ``fd`` argument may either be an integer file descriptor or + a file-like object with a ``fileno()`` and ``close()`` method. + + The ``events`` argument is a bitwise or of the constants + ``IOLoop.READ``, ``IOLoop.WRITE``, and ``IOLoop.ERROR``. + + When an event occurs, ``handler(fd, events)`` will be run. + + .. versionchanged:: 4.0 + Added the ability to pass file-like objects in addition to + raw file descriptors. + """ + raise NotImplementedError() + + def update_handler(self, fd: Union[int, _Selectable], events: int) -> None: + """Changes the events we listen for ``fd``. + + .. versionchanged:: 4.0 + Added the ability to pass file-like objects in addition to + raw file descriptors. + """ + raise NotImplementedError() + + def remove_handler(self, fd: Union[int, _Selectable]) -> None: + """Stop listening for events on ``fd``. + + .. versionchanged:: 4.0 + Added the ability to pass file-like objects in addition to + raw file descriptors. + """ + raise NotImplementedError() + + def start(self) -> None: + """Starts the I/O loop. + + The loop will run until one of the callbacks calls `stop()`, which + will make the loop stop after the current event iteration completes. + """ + raise NotImplementedError() + + def _setup_logging(self) -> None: + """The IOLoop catches and logs exceptions, so it's + important that log output be visible. However, python's + default behavior for non-root loggers (prior to python + 3.2) is to print an unhelpful "no handlers could be + found" message rather than the actual log entry, so we + must explicitly configure logging if we've made it this + far without anything. + + This method should be called from start() in subclasses. + """ + if not any( + [ + logging.getLogger().handlers, + logging.getLogger("tornado").handlers, + logging.getLogger("tornado.application").handlers, + ] + ): + logging.basicConfig() + + def stop(self) -> None: + """Stop the I/O loop. + + If the event loop is not currently running, the next call to `start()` + will return immediately. + + Note that even after `stop` has been called, the `IOLoop` is not + completely stopped until `IOLoop.start` has also returned. + Some work that was scheduled before the call to `stop` may still + be run before the `IOLoop` shuts down. + """ + raise NotImplementedError() + + def run_sync(self, func: Callable, timeout: float = None) -> Any: + """Starts the `IOLoop`, runs the given function, and stops the loop. + + The function must return either an awaitable object or + ``None``. If the function returns an awaitable object, the + `IOLoop` will run until the awaitable is resolved (and + `run_sync()` will return the awaitable's result). If it raises + an exception, the `IOLoop` will stop and the exception will be + re-raised to the caller. + + The keyword-only argument ``timeout`` may be used to set + a maximum duration for the function. If the timeout expires, + a `tornado.util.TimeoutError` is raised. + + This method is useful to allow asynchronous calls in a + ``main()`` function:: + + async def main(): + # do stuff... + + if __name__ == '__main__': + IOLoop.current().run_sync(main) + + .. versionchanged:: 4.3 + Returning a non-``None``, non-awaitable value is now an error. + + .. versionchanged:: 5.0 + If a timeout occurs, the ``func`` coroutine will be cancelled. + + """ + future_cell = [None] # type: List[Optional[Future]] + + def run() -> None: + try: + result = func() + if result is not None: + from tornado.gen import convert_yielded + + result = convert_yielded(result) + except Exception: + fut = Future() # type: Future[Any] + future_cell[0] = fut + future_set_exc_info(fut, sys.exc_info()) + else: + if is_future(result): + future_cell[0] = result + else: + fut = Future() + future_cell[0] = fut + fut.set_result(result) + assert future_cell[0] is not None + self.add_future(future_cell[0], lambda future: self.stop()) + + self.add_callback(run) + if timeout is not None: + + def timeout_callback() -> None: + # If we can cancel the future, do so and wait on it. If not, + # Just stop the loop and return with the task still pending. + # (If we neither cancel nor wait for the task, a warning + # will be logged). + assert future_cell[0] is not None + if not future_cell[0].cancel(): + self.stop() + + timeout_handle = self.add_timeout(self.time() + timeout, timeout_callback) + self.start() + if timeout is not None: + self.remove_timeout(timeout_handle) + assert future_cell[0] is not None + if future_cell[0].cancelled() or not future_cell[0].done(): + raise TimeoutError("Operation timed out after %s seconds" % timeout) + return future_cell[0].result() + + def time(self) -> float: + """Returns the current time according to the `IOLoop`'s clock. + + The return value is a floating-point number relative to an + unspecified time in the past. + + Historically, the IOLoop could be customized to use e.g. + `time.monotonic` instead of `time.time`, but this is not + currently supported and so this method is equivalent to + `time.time`. + + """ + return time.time() + + def add_timeout( + self, + deadline: Union[float, datetime.timedelta], + callback: Callable[..., None], + *args: Any, + **kwargs: Any + ) -> object: + """Runs the ``callback`` at the time ``deadline`` from the I/O loop. + + Returns an opaque handle that may be passed to + `remove_timeout` to cancel. + + ``deadline`` may be a number denoting a time (on the same + scale as `IOLoop.time`, normally `time.time`), or a + `datetime.timedelta` object for a deadline relative to the + current time. Since Tornado 4.0, `call_later` is a more + convenient alternative for the relative case since it does not + require a timedelta object. + + Note that it is not safe to call `add_timeout` from other threads. + Instead, you must use `add_callback` to transfer control to the + `IOLoop`'s thread, and then call `add_timeout` from there. + + Subclasses of IOLoop must implement either `add_timeout` or + `call_at`; the default implementations of each will call + the other. `call_at` is usually easier to implement, but + subclasses that wish to maintain compatibility with Tornado + versions prior to 4.0 must use `add_timeout` instead. + + .. versionchanged:: 4.0 + Now passes through ``*args`` and ``**kwargs`` to the callback. + """ + if isinstance(deadline, numbers.Real): + return self.call_at(deadline, callback, *args, **kwargs) + elif isinstance(deadline, datetime.timedelta): + return self.call_at( + self.time() + deadline.total_seconds(), callback, *args, **kwargs + ) + else: + raise TypeError("Unsupported deadline %r" % deadline) + + def call_later( + self, delay: float, callback: Callable[..., None], *args: Any, **kwargs: Any + ) -> object: + """Runs the ``callback`` after ``delay`` seconds have passed. + + Returns an opaque handle that may be passed to `remove_timeout` + to cancel. Note that unlike the `asyncio` method of the same + name, the returned object does not have a ``cancel()`` method. + + See `add_timeout` for comments on thread-safety and subclassing. + + .. versionadded:: 4.0 + """ + return self.call_at(self.time() + delay, callback, *args, **kwargs) + + def call_at( + self, when: float, callback: Callable[..., None], *args: Any, **kwargs: Any + ) -> object: + """Runs the ``callback`` at the absolute time designated by ``when``. + + ``when`` must be a number using the same reference point as + `IOLoop.time`. + + Returns an opaque handle that may be passed to `remove_timeout` + to cancel. Note that unlike the `asyncio` method of the same + name, the returned object does not have a ``cancel()`` method. + + See `add_timeout` for comments on thread-safety and subclassing. + + .. versionadded:: 4.0 + """ + return self.add_timeout(when, callback, *args, **kwargs) + + def remove_timeout(self, timeout: object) -> None: + """Cancels a pending timeout. + + The argument is a handle as returned by `add_timeout`. It is + safe to call `remove_timeout` even if the callback has already + been run. + """ + raise NotImplementedError() + + def add_callback(self, callback: Callable, *args: Any, **kwargs: Any) -> None: + """Calls the given callback on the next I/O loop iteration. + + It is safe to call this method from any thread at any time, + except from a signal handler. Note that this is the **only** + method in `IOLoop` that makes this thread-safety guarantee; all + other interaction with the `IOLoop` must be done from that + `IOLoop`'s thread. `add_callback()` may be used to transfer + control from other threads to the `IOLoop`'s thread. + + To add a callback from a signal handler, see + `add_callback_from_signal`. + """ + raise NotImplementedError() + + def add_callback_from_signal( + self, callback: Callable, *args: Any, **kwargs: Any + ) -> None: + """Calls the given callback on the next I/O loop iteration. + + Safe for use from a Python signal handler; should not be used + otherwise. + """ + raise NotImplementedError() + + def spawn_callback(self, callback: Callable, *args: Any, **kwargs: Any) -> None: + """Calls the given callback on the next IOLoop iteration. + + As of Tornado 6.0, this method is equivalent to `add_callback`. + + .. versionadded:: 4.0 + """ + self.add_callback(callback, *args, **kwargs) + + def add_future( + self, + future: "Union[Future[_T], concurrent.futures.Future[_T]]", + callback: Callable[["Future[_T]"], None], + ) -> None: + """Schedules a callback on the ``IOLoop`` when the given + `.Future` is finished. + + The callback is invoked with one argument, the + `.Future`. + + This method only accepts `.Future` objects and not other + awaitables (unlike most of Tornado where the two are + interchangeable). + """ + if isinstance(future, Future): + # Note that we specifically do not want the inline behavior of + # tornado.concurrent.future_add_done_callback. We always want + # this callback scheduled on the next IOLoop iteration (which + # asyncio.Future always does). + # + # Wrap the callback in self._run_callback so we control + # the error logging (i.e. it goes to tornado.log.app_log + # instead of asyncio's log). + future.add_done_callback( + lambda f: self._run_callback(functools.partial(callback, future)) + ) + else: + assert is_future(future) + # For concurrent futures, we use self.add_callback, so + # it's fine if future_add_done_callback inlines that call. + future_add_done_callback( + future, lambda f: self.add_callback(callback, future) + ) + + def run_in_executor( + self, + executor: Optional[concurrent.futures.Executor], + func: Callable[..., _T], + *args: Any + ) -> Awaitable[_T]: + """Runs a function in a ``concurrent.futures.Executor``. If + ``executor`` is ``None``, the IO loop's default executor will be used. + + Use `functools.partial` to pass keyword arguments to ``func``. + + .. versionadded:: 5.0 + """ + if executor is None: + if not hasattr(self, "_executor"): + from tornado.process import cpu_count + + self._executor = concurrent.futures.ThreadPoolExecutor( + max_workers=(cpu_count() * 5) + ) # type: concurrent.futures.Executor + executor = self._executor + c_future = executor.submit(func, *args) + # Concurrent Futures are not usable with await. Wrap this in a + # Tornado Future instead, using self.add_future for thread-safety. + t_future = Future() # type: Future[_T] + self.add_future(c_future, lambda f: chain_future(f, t_future)) + return t_future + + def set_default_executor(self, executor: concurrent.futures.Executor) -> None: + """Sets the default executor to use with :meth:`run_in_executor`. + + .. versionadded:: 5.0 + """ + self._executor = executor + + def _run_callback(self, callback: Callable[[], Any]) -> None: + """Runs a callback with error handling. + + .. versionchanged:: 6.0 + + CancelledErrors are no longer logged. + """ + try: + ret = callback() + if ret is not None: + from tornado import gen + + # Functions that return Futures typically swallow all + # exceptions and store them in the Future. If a Future + # makes it out to the IOLoop, ensure its exception (if any) + # gets logged too. + try: + ret = gen.convert_yielded(ret) + except gen.BadYieldError: + # It's not unusual for add_callback to be used with + # methods returning a non-None and non-yieldable + # result, which should just be ignored. + pass + else: + self.add_future(ret, self._discard_future_result) + except asyncio.CancelledError: + pass + except Exception: + app_log.error("Exception in callback %r", callback, exc_info=True) + + def _discard_future_result(self, future: Future) -> None: + """Avoid unhandled-exception warnings from spawned coroutines.""" + future.result() + + def split_fd( + self, fd: Union[int, _Selectable] + ) -> Tuple[int, Union[int, _Selectable]]: + # """Returns an (fd, obj) pair from an ``fd`` parameter. + + # We accept both raw file descriptors and file-like objects as + # input to `add_handler` and related methods. When a file-like + # object is passed, we must retain the object itself so we can + # close it correctly when the `IOLoop` shuts down, but the + # poller interfaces favor file descriptors (they will accept + # file-like objects and call ``fileno()`` for you, but they + # always return the descriptor itself). + + # This method is provided for use by `IOLoop` subclasses and should + # not generally be used by application code. + + # .. versionadded:: 4.0 + # """ + if isinstance(fd, int): + return fd, fd + return fd.fileno(), fd + + def close_fd(self, fd: Union[int, _Selectable]) -> None: + # """Utility method to close an ``fd``. + + # If ``fd`` is a file-like object, we close it directly; otherwise + # we use `os.close`. + + # This method is provided for use by `IOLoop` subclasses (in + # implementations of ``IOLoop.close(all_fds=True)`` and should + # not generally be used by application code. + + # .. versionadded:: 4.0 + # """ + try: + if isinstance(fd, int): + os.close(fd) + else: + fd.close() + except OSError: + pass + + +class _Timeout(object): + """An IOLoop timeout, a UNIX timestamp and a callback""" + + # Reduce memory overhead when there are lots of pending callbacks + __slots__ = ["deadline", "callback", "tdeadline"] + + def __init__( + self, deadline: float, callback: Callable[[], None], io_loop: IOLoop + ) -> None: + if not isinstance(deadline, numbers.Real): + raise TypeError("Unsupported deadline %r" % deadline) + self.deadline = deadline + self.callback = callback + self.tdeadline = ( + deadline, + next(io_loop._timeout_counter), + ) # type: Tuple[float, int] + + # Comparison methods to sort by deadline, with object id as a tiebreaker + # to guarantee a consistent ordering. The heapq module uses __le__ + # in python2.5, and __lt__ in 2.6+ (sort() and most other comparisons + # use __lt__). + def __lt__(self, other: "_Timeout") -> bool: + return self.tdeadline < other.tdeadline + + def __le__(self, other: "_Timeout") -> bool: + return self.tdeadline <= other.tdeadline + + +class PeriodicCallback(object): + """Schedules the given callback to be called periodically. + + The callback is called every ``callback_time`` milliseconds. + Note that the timeout is given in milliseconds, while most other + time-related functions in Tornado use seconds. + + If ``jitter`` is specified, each callback time will be randomly selected + within a window of ``jitter * callback_time`` milliseconds. + Jitter can be used to reduce alignment of events with similar periods. + A jitter of 0.1 means allowing a 10% variation in callback time. + The window is centered on ``callback_time`` so the total number of calls + within a given interval should not be significantly affected by adding + jitter. + + If the callback runs for longer than ``callback_time`` milliseconds, + subsequent invocations will be skipped to get back on schedule. + + `start` must be called after the `PeriodicCallback` is created. + + .. versionchanged:: 5.0 + The ``io_loop`` argument (deprecated since version 4.1) has been removed. + + .. versionchanged:: 5.1 + The ``jitter`` argument is added. + """ + + def __init__( + self, callback: Callable[[], None], callback_time: float, jitter: float = 0 + ) -> None: + self.callback = callback + if callback_time <= 0: + raise ValueError("Periodic callback must have a positive callback_time") + self.callback_time = callback_time + self.jitter = jitter + self._running = False + self._timeout = None # type: object + + def start(self) -> None: + """Starts the timer.""" + # Looking up the IOLoop here allows to first instantiate the + # PeriodicCallback in another thread, then start it using + # IOLoop.add_callback(). + self.io_loop = IOLoop.current() + self._running = True + self._next_timeout = self.io_loop.time() + self._schedule_next() + + def stop(self) -> None: + """Stops the timer.""" + self._running = False + if self._timeout is not None: + self.io_loop.remove_timeout(self._timeout) + self._timeout = None + + def is_running(self) -> bool: + """Returns ``True`` if this `.PeriodicCallback` has been started. + + .. versionadded:: 4.1 + """ + return self._running + + def _run(self) -> None: + if not self._running: + return + try: + return self.callback() + except Exception: + app_log.error("Exception in callback %r", self.callback, exc_info=True) + finally: + self._schedule_next() + + def _schedule_next(self) -> None: + if self._running: + self._update_next(self.io_loop.time()) + self._timeout = self.io_loop.add_timeout(self._next_timeout, self._run) + + def _update_next(self, current_time: float) -> None: + callback_time_sec = self.callback_time / 1000.0 + if self.jitter: + # apply jitter fraction + callback_time_sec *= 1 + (self.jitter * (random.random() - 0.5)) + if self._next_timeout <= current_time: + # The period should be measured from the start of one call + # to the start of the next. If one call takes too long, + # skip cycles to get back to a multiple of the original + # schedule. + self._next_timeout += ( + math.floor((current_time - self._next_timeout) / callback_time_sec) + 1 + ) * callback_time_sec + else: + # If the clock moved backwards, ensure we advance the next + # timeout instead of recomputing the same value again. + # This may result in long gaps between callbacks if the + # clock jumps backwards by a lot, but the far more common + # scenario is a small NTP adjustment that should just be + # ignored. + # + # Note that on some systems if time.time() runs slower + # than time.monotonic() (most common on windows), we + # effectively experience a small backwards time jump on + # every iteration because PeriodicCallback uses + # time.time() while asyncio schedules callbacks using + # time.monotonic(). + # https://github.com/tornadoweb/tornado/issues/2333 + self._next_timeout += callback_time_sec diff --git a/server/www/packages/packages-linux/x64/tornado/iostream.py b/server/www/packages/packages-linux/x64/tornado/iostream.py index 89e1e23..6504f8e 100644 --- a/server/www/packages/packages-linux/x64/tornado/iostream.py +++ b/server/www/packages/packages-linux/x64/tornado/iostream.py @@ -1,1757 +1,1681 @@ -# -# Copyright 2009 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Utility classes to write to and read from non-blocking files and sockets. - -Contents: - -* `BaseIOStream`: Generic interface for reading and writing. -* `IOStream`: Implementation of BaseIOStream using non-blocking sockets. -* `SSLIOStream`: SSL-aware version of IOStream. -* `PipeIOStream`: Pipe-based IOStream implementation. -""" - -from __future__ import absolute_import, division, print_function - -import collections -import errno -import io -import numbers -import os -import socket -import sys -import re -import warnings - -from tornado.concurrent import Future -from tornado import ioloop -from tornado.log import gen_log, app_log -from tornado.netutil import ssl_wrap_socket, _client_ssl_defaults, _server_ssl_defaults -from tornado import stack_context -from tornado.util import errno_from_exception - -try: - from tornado.platform.posix import _set_nonblocking -except ImportError: - _set_nonblocking = None - -try: - import ssl -except ImportError: - # ssl is not available on Google App Engine - ssl = None - -# These errnos indicate that a non-blocking operation must be retried -# at a later time. On most platforms they're the same value, but on -# some they differ. -_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN) - -if hasattr(errno, "WSAEWOULDBLOCK"): - _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) # type: ignore - -# These errnos indicate that a connection has been abruptly terminated. -# They should be caught and handled less noisily than other errors. -_ERRNO_CONNRESET = (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE, - errno.ETIMEDOUT) - -if hasattr(errno, "WSAECONNRESET"): - _ERRNO_CONNRESET += (errno.WSAECONNRESET, errno.WSAECONNABORTED, errno.WSAETIMEDOUT) # type: ignore # noqa: E501 - -if sys.platform == 'darwin': - # OSX appears to have a race condition that causes send(2) to return - # EPROTOTYPE if called while a socket is being torn down: - # http://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ - # Since the socket is being closed anyway, treat this as an ECONNRESET - # instead of an unexpected error. - _ERRNO_CONNRESET += (errno.EPROTOTYPE,) # type: ignore - -# More non-portable errnos: -_ERRNO_INPROGRESS = (errno.EINPROGRESS,) - -if hasattr(errno, "WSAEINPROGRESS"): - _ERRNO_INPROGRESS += (errno.WSAEINPROGRESS,) # type: ignore - -_WINDOWS = sys.platform.startswith('win') - - -class StreamClosedError(IOError): - """Exception raised by `IOStream` methods when the stream is closed. - - Note that the close callback is scheduled to run *after* other - callbacks on the stream (to allow for buffered data to be processed), - so you may see this error before you see the close callback. - - The ``real_error`` attribute contains the underlying error that caused - the stream to close (if any). - - .. versionchanged:: 4.3 - Added the ``real_error`` attribute. - """ - def __init__(self, real_error=None): - super(StreamClosedError, self).__init__('Stream is closed') - self.real_error = real_error - - -class UnsatisfiableReadError(Exception): - """Exception raised when a read cannot be satisfied. - - Raised by ``read_until`` and ``read_until_regex`` with a ``max_bytes`` - argument. - """ - pass - - -class StreamBufferFullError(Exception): - """Exception raised by `IOStream` methods when the buffer is full. - """ - - -class _StreamBuffer(object): - """ - A specialized buffer that tries to avoid copies when large pieces - of data are encountered. - """ - - def __init__(self): - # A sequence of (False, bytearray) and (True, memoryview) objects - self._buffers = collections.deque() - # Position in the first buffer - self._first_pos = 0 - self._size = 0 - - def __len__(self): - return self._size - - # Data above this size will be appended separately instead - # of extending an existing bytearray - _large_buf_threshold = 2048 - - def append(self, data): - """ - Append the given piece of data (should be a buffer-compatible object). - """ - size = len(data) - if size > self._large_buf_threshold: - if not isinstance(data, memoryview): - data = memoryview(data) - self._buffers.append((True, data)) - elif size > 0: - if self._buffers: - is_memview, b = self._buffers[-1] - new_buf = is_memview or len(b) >= self._large_buf_threshold - else: - new_buf = True - if new_buf: - self._buffers.append((False, bytearray(data))) - else: - b += data - - self._size += size - - def peek(self, size): - """ - Get a view over at most ``size`` bytes (possibly fewer) at the - current buffer position. - """ - assert size > 0 - try: - is_memview, b = self._buffers[0] - except IndexError: - return memoryview(b'') - - pos = self._first_pos - if is_memview: - return b[pos:pos + size] - else: - return memoryview(b)[pos:pos + size] - - def advance(self, size): - """ - Advance the current buffer position by ``size`` bytes. - """ - assert 0 < size <= self._size - self._size -= size - pos = self._first_pos - - buffers = self._buffers - while buffers and size > 0: - is_large, b = buffers[0] - b_remain = len(b) - size - pos - if b_remain <= 0: - buffers.popleft() - size -= len(b) - pos - pos = 0 - elif is_large: - pos += size - size = 0 - else: - # Amortized O(1) shrink for Python 2 - pos += size - if len(b) <= 2 * pos: - del b[:pos] - pos = 0 - size = 0 - - assert size == 0 - self._first_pos = pos - - -class BaseIOStream(object): - """A utility class to write to and read from a non-blocking file or socket. - - We support a non-blocking ``write()`` and a family of ``read_*()`` methods. - All of the methods take an optional ``callback`` argument and return a - `.Future` only if no callback is given. When the operation completes, - the callback will be run or the `.Future` will resolve with the data - read (or ``None`` for ``write()``). All outstanding ``Futures`` will - resolve with a `StreamClosedError` when the stream is closed; users - of the callback interface will be notified via - `.BaseIOStream.set_close_callback` instead. - - When a stream is closed due to an error, the IOStream's ``error`` - attribute contains the exception object. - - Subclasses must implement `fileno`, `close_fd`, `write_to_fd`, - `read_from_fd`, and optionally `get_fd_error`. - """ - def __init__(self, max_buffer_size=None, - read_chunk_size=None, max_write_buffer_size=None): - """`BaseIOStream` constructor. - - :arg max_buffer_size: Maximum amount of incoming data to buffer; - defaults to 100MB. - :arg read_chunk_size: Amount of data to read at one time from the - underlying transport; defaults to 64KB. - :arg max_write_buffer_size: Amount of outgoing data to buffer; - defaults to unlimited. - - .. versionchanged:: 4.0 - Add the ``max_write_buffer_size`` parameter. Changed default - ``read_chunk_size`` to 64KB. - .. versionchanged:: 5.0 - The ``io_loop`` argument (deprecated since version 4.1) has been - removed. - """ - self.io_loop = ioloop.IOLoop.current() - self.max_buffer_size = max_buffer_size or 104857600 - # A chunk size that is too close to max_buffer_size can cause - # spurious failures. - self.read_chunk_size = min(read_chunk_size or 65536, - self.max_buffer_size // 2) - self.max_write_buffer_size = max_write_buffer_size - self.error = None - self._read_buffer = bytearray() - self._read_buffer_pos = 0 - self._read_buffer_size = 0 - self._user_read_buffer = False - self._after_user_read_buffer = None - self._write_buffer = _StreamBuffer() - self._total_write_index = 0 - self._total_write_done_index = 0 - self._read_delimiter = None - self._read_regex = None - self._read_max_bytes = None - self._read_bytes = None - self._read_partial = False - self._read_until_close = False - self._read_callback = None - self._read_future = None - self._streaming_callback = None - self._write_callback = None - self._write_futures = collections.deque() - self._close_callback = None - self._connect_callback = None - self._connect_future = None - # _ssl_connect_future should be defined in SSLIOStream - # but it's here so we can clean it up in maybe_run_close_callback. - # TODO: refactor that so subclasses can add additional futures - # to be cancelled. - self._ssl_connect_future = None - self._connecting = False - self._state = None - self._pending_callbacks = 0 - self._closed = False - - def fileno(self): - """Returns the file descriptor for this stream.""" - raise NotImplementedError() - - def close_fd(self): - """Closes the file underlying this stream. - - ``close_fd`` is called by `BaseIOStream` and should not be called - elsewhere; other users should call `close` instead. - """ - raise NotImplementedError() - - def write_to_fd(self, data): - """Attempts to write ``data`` to the underlying file. - - Returns the number of bytes written. - """ - raise NotImplementedError() - - def read_from_fd(self, buf): - """Attempts to read from the underlying file. - - Reads up to ``len(buf)`` bytes, storing them in the buffer. - Returns the number of bytes read. Returns None if there was - nothing to read (the socket returned `~errno.EWOULDBLOCK` or - equivalent), and zero on EOF. - - .. versionchanged:: 5.0 - - Interface redesigned to take a buffer and return a number - of bytes instead of a freshly-allocated object. - """ - raise NotImplementedError() - - def get_fd_error(self): - """Returns information about any error on the underlying file. - - This method is called after the `.IOLoop` has signaled an error on the - file descriptor, and should return an Exception (such as `socket.error` - with additional information, or None if no such information is - available. - """ - return None - - def read_until_regex(self, regex, callback=None, max_bytes=None): - """Asynchronously read until we have matched the given regex. - - The result includes the data that matches the regex and anything - that came before it. If a callback is given, it will be run - with the data as an argument; if not, this method returns a - `.Future`. - - If ``max_bytes`` is not None, the connection will be closed - if more than ``max_bytes`` bytes have been read and the regex is - not satisfied. - - .. versionchanged:: 4.0 - Added the ``max_bytes`` argument. The ``callback`` argument is - now optional and a `.Future` will be returned if it is omitted. - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed - in Tornado 6.0. Use the returned `.Future` instead. - - """ - future = self._set_read_callback(callback) - self._read_regex = re.compile(regex) - self._read_max_bytes = max_bytes - try: - self._try_inline_read() - except UnsatisfiableReadError as e: - # Handle this the same way as in _handle_events. - gen_log.info("Unsatisfiable read, closing connection: %s" % e) - self.close(exc_info=e) - return future - except: - if future is not None: - # Ensure that the future doesn't log an error because its - # failure was never examined. - future.add_done_callback(lambda f: f.exception()) - raise - return future - - def read_until(self, delimiter, callback=None, max_bytes=None): - """Asynchronously read until we have found the given delimiter. - - The result includes all the data read including the delimiter. - If a callback is given, it will be run with the data as an argument; - if not, this method returns a `.Future`. - - If ``max_bytes`` is not None, the connection will be closed - if more than ``max_bytes`` bytes have been read and the delimiter - is not found. - - .. versionchanged:: 4.0 - Added the ``max_bytes`` argument. The ``callback`` argument is - now optional and a `.Future` will be returned if it is omitted. - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed - in Tornado 6.0. Use the returned `.Future` instead. - """ - future = self._set_read_callback(callback) - self._read_delimiter = delimiter - self._read_max_bytes = max_bytes - try: - self._try_inline_read() - except UnsatisfiableReadError as e: - # Handle this the same way as in _handle_events. - gen_log.info("Unsatisfiable read, closing connection: %s" % e) - self.close(exc_info=e) - return future - except: - if future is not None: - future.add_done_callback(lambda f: f.exception()) - raise - return future - - def read_bytes(self, num_bytes, callback=None, streaming_callback=None, - partial=False): - """Asynchronously read a number of bytes. - - If a ``streaming_callback`` is given, it will be called with chunks - of data as they become available, and the final result will be empty. - Otherwise, the result is all the data that was read. - If a callback is given, it will be run with the data as an argument; - if not, this method returns a `.Future`. - - If ``partial`` is true, the callback is run as soon as we have - any bytes to return (but never more than ``num_bytes``) - - .. versionchanged:: 4.0 - Added the ``partial`` argument. The callback argument is now - optional and a `.Future` will be returned if it is omitted. - - .. deprecated:: 5.1 - - The ``callback`` and ``streaming_callback`` arguments are - deprecated and will be removed in Tornado 6.0. Use the - returned `.Future` (and ``partial=True`` for - ``streaming_callback``) instead. - - """ - future = self._set_read_callback(callback) - assert isinstance(num_bytes, numbers.Integral) - self._read_bytes = num_bytes - self._read_partial = partial - if streaming_callback is not None: - warnings.warn("streaming_callback is deprecated, use partial instead", - DeprecationWarning) - self._streaming_callback = stack_context.wrap(streaming_callback) - try: - self._try_inline_read() - except: - if future is not None: - future.add_done_callback(lambda f: f.exception()) - raise - return future - - def read_into(self, buf, callback=None, partial=False): - """Asynchronously read a number of bytes. - - ``buf`` must be a writable buffer into which data will be read. - If a callback is given, it will be run with the number of read - bytes as an argument; if not, this method returns a `.Future`. - - If ``partial`` is true, the callback is run as soon as any bytes - have been read. Otherwise, it is run when the ``buf`` has been - entirely filled with read data. - - .. versionadded:: 5.0 - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed - in Tornado 6.0. Use the returned `.Future` instead. - - """ - future = self._set_read_callback(callback) - - # First copy data already in read buffer - available_bytes = self._read_buffer_size - n = len(buf) - if available_bytes >= n: - end = self._read_buffer_pos + n - buf[:] = memoryview(self._read_buffer)[self._read_buffer_pos:end] - del self._read_buffer[:end] - self._after_user_read_buffer = self._read_buffer - elif available_bytes > 0: - buf[:available_bytes] = memoryview(self._read_buffer)[self._read_buffer_pos:] - - # Set up the supplied buffer as our temporary read buffer. - # The original (if it had any data remaining) has been - # saved for later. - self._user_read_buffer = True - self._read_buffer = buf - self._read_buffer_pos = 0 - self._read_buffer_size = available_bytes - self._read_bytes = n - self._read_partial = partial - - try: - self._try_inline_read() - except: - if future is not None: - future.add_done_callback(lambda f: f.exception()) - raise - return future - - def read_until_close(self, callback=None, streaming_callback=None): - """Asynchronously reads all data from the socket until it is closed. - - If a ``streaming_callback`` is given, it will be called with chunks - of data as they become available, and the final result will be empty. - Otherwise, the result is all the data that was read. - If a callback is given, it will be run with the data as an argument; - if not, this method returns a `.Future`. - - Note that if a ``streaming_callback`` is used, data will be - read from the socket as quickly as it becomes available; there - is no way to apply backpressure or cancel the reads. If flow - control or cancellation are desired, use a loop with - `read_bytes(partial=True) <.read_bytes>` instead. - - .. versionchanged:: 4.0 - The callback argument is now optional and a `.Future` will - be returned if it is omitted. - - .. deprecated:: 5.1 - - The ``callback`` and ``streaming_callback`` arguments are - deprecated and will be removed in Tornado 6.0. Use the - returned `.Future` (and `read_bytes` with ``partial=True`` - for ``streaming_callback``) instead. - - """ - future = self._set_read_callback(callback) - if streaming_callback is not None: - warnings.warn("streaming_callback is deprecated, use read_bytes(partial=True) instead", - DeprecationWarning) - self._streaming_callback = stack_context.wrap(streaming_callback) - if self.closed(): - if self._streaming_callback is not None: - self._run_read_callback(self._read_buffer_size, True) - self._run_read_callback(self._read_buffer_size, False) - return future - self._read_until_close = True - try: - self._try_inline_read() - except: - if future is not None: - future.add_done_callback(lambda f: f.exception()) - raise - return future - - def write(self, data, callback=None): - """Asynchronously write the given data to this stream. - - If ``callback`` is given, we call it when all of the buffered write - data has been successfully written to the stream. If there was - previously buffered write data and an old write callback, that - callback is simply overwritten with this new callback. - - If no ``callback`` is given, this method returns a `.Future` that - resolves (with a result of ``None``) when the write has been - completed. - - The ``data`` argument may be of type `bytes` or `memoryview`. - - .. versionchanged:: 4.0 - Now returns a `.Future` if no callback is given. - - .. versionchanged:: 4.5 - Added support for `memoryview` arguments. - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed - in Tornado 6.0. Use the returned `.Future` instead. - - """ - self._check_closed() - if data: - if (self.max_write_buffer_size is not None and - len(self._write_buffer) + len(data) > self.max_write_buffer_size): - raise StreamBufferFullError("Reached maximum write buffer size") - self._write_buffer.append(data) - self._total_write_index += len(data) - if callback is not None: - warnings.warn("callback argument is deprecated, use returned Future instead", - DeprecationWarning) - self._write_callback = stack_context.wrap(callback) - future = None - else: - future = Future() - future.add_done_callback(lambda f: f.exception()) - self._write_futures.append((self._total_write_index, future)) - if not self._connecting: - self._handle_write() - if self._write_buffer: - self._add_io_state(self.io_loop.WRITE) - self._maybe_add_error_listener() - return future - - def set_close_callback(self, callback): - """Call the given callback when the stream is closed. - - This mostly is not necessary for applications that use the - `.Future` interface; all outstanding ``Futures`` will resolve - with a `StreamClosedError` when the stream is closed. However, - it is still useful as a way to signal that the stream has been - closed while no other read or write is in progress. - - Unlike other callback-based interfaces, ``set_close_callback`` - will not be removed in Tornado 6.0. - """ - self._close_callback = stack_context.wrap(callback) - self._maybe_add_error_listener() - - def close(self, exc_info=False): - """Close this stream. - - If ``exc_info`` is true, set the ``error`` attribute to the current - exception from `sys.exc_info` (or if ``exc_info`` is a tuple, - use that instead of `sys.exc_info`). - """ - if not self.closed(): - if exc_info: - if isinstance(exc_info, tuple): - self.error = exc_info[1] - elif isinstance(exc_info, BaseException): - self.error = exc_info - else: - exc_info = sys.exc_info() - if any(exc_info): - self.error = exc_info[1] - if self._read_until_close: - if (self._streaming_callback is not None and - self._read_buffer_size): - self._run_read_callback(self._read_buffer_size, True) - self._read_until_close = False - self._run_read_callback(self._read_buffer_size, False) - if self._state is not None: - self.io_loop.remove_handler(self.fileno()) - self._state = None - self.close_fd() - self._closed = True - self._maybe_run_close_callback() - - def _maybe_run_close_callback(self): - # If there are pending callbacks, don't run the close callback - # until they're done (see _maybe_add_error_handler) - if self.closed() and self._pending_callbacks == 0: - futures = [] - if self._read_future is not None: - futures.append(self._read_future) - self._read_future = None - futures += [future for _, future in self._write_futures] - self._write_futures.clear() - if self._connect_future is not None: - futures.append(self._connect_future) - self._connect_future = None - if self._ssl_connect_future is not None: - futures.append(self._ssl_connect_future) - self._ssl_connect_future = None - for future in futures: - future.set_exception(StreamClosedError(real_error=self.error)) - future.exception() - if self._close_callback is not None: - cb = self._close_callback - self._close_callback = None - self._run_callback(cb) - # Delete any unfinished callbacks to break up reference cycles. - self._read_callback = self._write_callback = None - # Clear the buffers so they can be cleared immediately even - # if the IOStream object is kept alive by a reference cycle. - # TODO: Clear the read buffer too; it currently breaks some tests. - self._write_buffer = None - - def reading(self): - """Returns true if we are currently reading from the stream.""" - return self._read_callback is not None or self._read_future is not None - - def writing(self): - """Returns true if we are currently writing to the stream.""" - return bool(self._write_buffer) - - def closed(self): - """Returns true if the stream has been closed.""" - return self._closed - - def set_nodelay(self, value): - """Sets the no-delay flag for this stream. - - By default, data written to TCP streams may be held for a time - to make the most efficient use of bandwidth (according to - Nagle's algorithm). The no-delay flag requests that data be - written as soon as possible, even if doing so would consume - additional bandwidth. - - This flag is currently defined only for TCP-based ``IOStreams``. - - .. versionadded:: 3.1 - """ - pass - - def _handle_events(self, fd, events): - if self.closed(): - gen_log.warning("Got events for closed stream %s", fd) - return - try: - if self._connecting: - # Most IOLoops will report a write failed connect - # with the WRITE event, but SelectIOLoop reports a - # READ as well so we must check for connecting before - # either. - self._handle_connect() - if self.closed(): - return - if events & self.io_loop.READ: - self._handle_read() - if self.closed(): - return - if events & self.io_loop.WRITE: - self._handle_write() - if self.closed(): - return - if events & self.io_loop.ERROR: - self.error = self.get_fd_error() - # We may have queued up a user callback in _handle_read or - # _handle_write, so don't close the IOStream until those - # callbacks have had a chance to run. - self.io_loop.add_callback(self.close) - return - state = self.io_loop.ERROR - if self.reading(): - state |= self.io_loop.READ - if self.writing(): - state |= self.io_loop.WRITE - if state == self.io_loop.ERROR and self._read_buffer_size == 0: - # If the connection is idle, listen for reads too so - # we can tell if the connection is closed. If there is - # data in the read buffer we won't run the close callback - # yet anyway, so we don't need to listen in this case. - state |= self.io_loop.READ - if state != self._state: - assert self._state is not None, \ - "shouldn't happen: _handle_events without self._state" - self._state = state - self.io_loop.update_handler(self.fileno(), self._state) - except UnsatisfiableReadError as e: - gen_log.info("Unsatisfiable read, closing connection: %s" % e) - self.close(exc_info=e) - except Exception as e: - gen_log.error("Uncaught exception, closing connection.", - exc_info=True) - self.close(exc_info=e) - raise - - def _run_callback(self, callback, *args): - def wrapper(): - self._pending_callbacks -= 1 - try: - return callback(*args) - except Exception as e: - app_log.error("Uncaught exception, closing connection.", - exc_info=True) - # Close the socket on an uncaught exception from a user callback - # (It would eventually get closed when the socket object is - # gc'd, but we don't want to rely on gc happening before we - # run out of file descriptors) - self.close(exc_info=e) - # Re-raise the exception so that IOLoop.handle_callback_exception - # can see it and log the error - raise - finally: - self._maybe_add_error_listener() - # We schedule callbacks to be run on the next IOLoop iteration - # rather than running them directly for several reasons: - # * Prevents unbounded stack growth when a callback calls an - # IOLoop operation that immediately runs another callback - # * Provides a predictable execution context for e.g. - # non-reentrant mutexes - # * Ensures that the try/except in wrapper() is run outside - # of the application's StackContexts - with stack_context.NullContext(): - # stack_context was already captured in callback, we don't need to - # capture it again for IOStream's wrapper. This is especially - # important if the callback was pre-wrapped before entry to - # IOStream (as in HTTPConnection._header_callback), as we could - # capture and leak the wrong context here. - self._pending_callbacks += 1 - self.io_loop.add_callback(wrapper) - - def _read_to_buffer_loop(self): - # This method is called from _handle_read and _try_inline_read. - try: - if self._read_bytes is not None: - target_bytes = self._read_bytes - elif self._read_max_bytes is not None: - target_bytes = self._read_max_bytes - elif self.reading(): - # For read_until without max_bytes, or - # read_until_close, read as much as we can before - # scanning for the delimiter. - target_bytes = None - else: - target_bytes = 0 - next_find_pos = 0 - # Pretend to have a pending callback so that an EOF in - # _read_to_buffer doesn't trigger an immediate close - # callback. At the end of this method we'll either - # establish a real pending callback via - # _read_from_buffer or run the close callback. - # - # We need two try statements here so that - # pending_callbacks is decremented before the `except` - # clause below (which calls `close` and does need to - # trigger the callback) - self._pending_callbacks += 1 - while not self.closed(): - # Read from the socket until we get EWOULDBLOCK or equivalent. - # SSL sockets do some internal buffering, and if the data is - # sitting in the SSL object's buffer select() and friends - # can't see it; the only way to find out if it's there is to - # try to read it. - if self._read_to_buffer() == 0: - break - - self._run_streaming_callback() - - # If we've read all the bytes we can use, break out of - # this loop. We can't just call read_from_buffer here - # because of subtle interactions with the - # pending_callback and error_listener mechanisms. - # - # If we've reached target_bytes, we know we're done. - if (target_bytes is not None and - self._read_buffer_size >= target_bytes): - break - - # Otherwise, we need to call the more expensive find_read_pos. - # It's inefficient to do this on every read, so instead - # do it on the first read and whenever the read buffer - # size has doubled. - if self._read_buffer_size >= next_find_pos: - pos = self._find_read_pos() - if pos is not None: - return pos - next_find_pos = self._read_buffer_size * 2 - return self._find_read_pos() - finally: - self._pending_callbacks -= 1 - - def _handle_read(self): - try: - pos = self._read_to_buffer_loop() - except UnsatisfiableReadError: - raise - except Exception as e: - gen_log.warning("error on read: %s" % e) - self.close(exc_info=e) - return - if pos is not None: - self._read_from_buffer(pos) - return - else: - self._maybe_run_close_callback() - - def _set_read_callback(self, callback): - assert self._read_callback is None, "Already reading" - assert self._read_future is None, "Already reading" - if callback is not None: - warnings.warn("callbacks are deprecated, use returned Future instead", - DeprecationWarning) - self._read_callback = stack_context.wrap(callback) - else: - self._read_future = Future() - return self._read_future - - def _run_read_callback(self, size, streaming): - if self._user_read_buffer: - self._read_buffer = self._after_user_read_buffer or bytearray() - self._after_user_read_buffer = None - self._read_buffer_pos = 0 - self._read_buffer_size = len(self._read_buffer) - self._user_read_buffer = False - result = size - else: - result = self._consume(size) - if streaming: - callback = self._streaming_callback - else: - callback = self._read_callback - self._read_callback = self._streaming_callback = None - if self._read_future is not None: - assert callback is None - future = self._read_future - self._read_future = None - - future.set_result(result) - if callback is not None: - assert (self._read_future is None) or streaming - self._run_callback(callback, result) - else: - # If we scheduled a callback, we will add the error listener - # afterwards. If we didn't, we have to do it now. - self._maybe_add_error_listener() - - def _try_inline_read(self): - """Attempt to complete the current read operation from buffered data. - - If the read can be completed without blocking, schedules the - read callback on the next IOLoop iteration; otherwise starts - listening for reads on the socket. - """ - # See if we've already got the data from a previous read - self._run_streaming_callback() - pos = self._find_read_pos() - if pos is not None: - self._read_from_buffer(pos) - return - self._check_closed() - try: - pos = self._read_to_buffer_loop() - except Exception: - # If there was an in _read_to_buffer, we called close() already, - # but couldn't run the close callback because of _pending_callbacks. - # Before we escape from this function, run the close callback if - # applicable. - self._maybe_run_close_callback() - raise - if pos is not None: - self._read_from_buffer(pos) - return - # We couldn't satisfy the read inline, so either close the stream - # or listen for new data. - if self.closed(): - self._maybe_run_close_callback() - else: - self._add_io_state(ioloop.IOLoop.READ) - - def _read_to_buffer(self): - """Reads from the socket and appends the result to the read buffer. - - Returns the number of bytes read. Returns 0 if there is nothing - to read (i.e. the read returns EWOULDBLOCK or equivalent). On - error closes the socket and raises an exception. - """ - try: - while True: - try: - if self._user_read_buffer: - buf = memoryview(self._read_buffer)[self._read_buffer_size:] - else: - buf = bytearray(self.read_chunk_size) - bytes_read = self.read_from_fd(buf) - except (socket.error, IOError, OSError) as e: - if errno_from_exception(e) == errno.EINTR: - continue - # ssl.SSLError is a subclass of socket.error - if self._is_connreset(e): - # Treat ECONNRESET as a connection close rather than - # an error to minimize log spam (the exception will - # be available on self.error for apps that care). - self.close(exc_info=e) - return - self.close(exc_info=e) - raise - break - if bytes_read is None: - return 0 - elif bytes_read == 0: - self.close() - return 0 - if not self._user_read_buffer: - self._read_buffer += memoryview(buf)[:bytes_read] - self._read_buffer_size += bytes_read - finally: - # Break the reference to buf so we don't waste a chunk's worth of - # memory in case an exception hangs on to our stack frame. - buf = None - if self._read_buffer_size > self.max_buffer_size: - gen_log.error("Reached maximum read buffer size") - self.close() - raise StreamBufferFullError("Reached maximum read buffer size") - return bytes_read - - def _run_streaming_callback(self): - if self._streaming_callback is not None and self._read_buffer_size: - bytes_to_consume = self._read_buffer_size - if self._read_bytes is not None: - bytes_to_consume = min(self._read_bytes, bytes_to_consume) - self._read_bytes -= bytes_to_consume - self._run_read_callback(bytes_to_consume, True) - - def _read_from_buffer(self, pos): - """Attempts to complete the currently-pending read from the buffer. - - The argument is either a position in the read buffer or None, - as returned by _find_read_pos. - """ - self._read_bytes = self._read_delimiter = self._read_regex = None - self._read_partial = False - self._run_read_callback(pos, False) - - def _find_read_pos(self): - """Attempts to find a position in the read buffer that satisfies - the currently-pending read. - - Returns a position in the buffer if the current read can be satisfied, - or None if it cannot. - """ - if (self._read_bytes is not None and - (self._read_buffer_size >= self._read_bytes or - (self._read_partial and self._read_buffer_size > 0))): - num_bytes = min(self._read_bytes, self._read_buffer_size) - return num_bytes - elif self._read_delimiter is not None: - # Multi-byte delimiters (e.g. '\r\n') may straddle two - # chunks in the read buffer, so we can't easily find them - # without collapsing the buffer. However, since protocols - # using delimited reads (as opposed to reads of a known - # length) tend to be "line" oriented, the delimiter is likely - # to be in the first few chunks. Merge the buffer gradually - # since large merges are relatively expensive and get undone in - # _consume(). - if self._read_buffer: - loc = self._read_buffer.find(self._read_delimiter, - self._read_buffer_pos) - if loc != -1: - loc -= self._read_buffer_pos - delimiter_len = len(self._read_delimiter) - self._check_max_bytes(self._read_delimiter, - loc + delimiter_len) - return loc + delimiter_len - self._check_max_bytes(self._read_delimiter, - self._read_buffer_size) - elif self._read_regex is not None: - if self._read_buffer: - m = self._read_regex.search(self._read_buffer, - self._read_buffer_pos) - if m is not None: - loc = m.end() - self._read_buffer_pos - self._check_max_bytes(self._read_regex, loc) - return loc - self._check_max_bytes(self._read_regex, self._read_buffer_size) - return None - - def _check_max_bytes(self, delimiter, size): - if (self._read_max_bytes is not None and - size > self._read_max_bytes): - raise UnsatisfiableReadError( - "delimiter %r not found within %d bytes" % ( - delimiter, self._read_max_bytes)) - - def _handle_write(self): - while True: - size = len(self._write_buffer) - if not size: - break - assert size > 0 - try: - if _WINDOWS: - # On windows, socket.send blows up if given a - # write buffer that's too large, instead of just - # returning the number of bytes it was able to - # process. Therefore we must not call socket.send - # with more than 128KB at a time. - size = 128 * 1024 - - num_bytes = self.write_to_fd(self._write_buffer.peek(size)) - if num_bytes == 0: - break - self._write_buffer.advance(num_bytes) - self._total_write_done_index += num_bytes - except (socket.error, IOError, OSError) as e: - if e.args[0] in _ERRNO_WOULDBLOCK: - break - else: - if not self._is_connreset(e): - # Broken pipe errors are usually caused by connection - # reset, and its better to not log EPIPE errors to - # minimize log spam - gen_log.warning("Write error on %s: %s", - self.fileno(), e) - self.close(exc_info=e) - return - - while self._write_futures: - index, future = self._write_futures[0] - if index > self._total_write_done_index: - break - self._write_futures.popleft() - future.set_result(None) - - if not len(self._write_buffer): - if self._write_callback: - callback = self._write_callback - self._write_callback = None - self._run_callback(callback) - - def _consume(self, loc): - # Consume loc bytes from the read buffer and return them - if loc == 0: - return b"" - assert loc <= self._read_buffer_size - # Slice the bytearray buffer into bytes, without intermediate copying - b = (memoryview(self._read_buffer) - [self._read_buffer_pos:self._read_buffer_pos + loc] - ).tobytes() - self._read_buffer_pos += loc - self._read_buffer_size -= loc - # Amortized O(1) shrink - # (this heuristic is implemented natively in Python 3.4+ - # but is replicated here for Python 2) - if self._read_buffer_pos > self._read_buffer_size: - del self._read_buffer[:self._read_buffer_pos] - self._read_buffer_pos = 0 - return b - - def _check_closed(self): - if self.closed(): - raise StreamClosedError(real_error=self.error) - - def _maybe_add_error_listener(self): - # This method is part of an optimization: to detect a connection that - # is closed when we're not actively reading or writing, we must listen - # for read events. However, it is inefficient to do this when the - # connection is first established because we are going to read or write - # immediately anyway. Instead, we insert checks at various times to - # see if the connection is idle and add the read listener then. - if self._pending_callbacks != 0: - return - if self._state is None or self._state == ioloop.IOLoop.ERROR: - if self.closed(): - self._maybe_run_close_callback() - elif (self._read_buffer_size == 0 and - self._close_callback is not None): - self._add_io_state(ioloop.IOLoop.READ) - - def _add_io_state(self, state): - """Adds `state` (IOLoop.{READ,WRITE} flags) to our event handler. - - Implementation notes: Reads and writes have a fast path and a - slow path. The fast path reads synchronously from socket - buffers, while the slow path uses `_add_io_state` to schedule - an IOLoop callback. Note that in both cases, the callback is - run asynchronously with `_run_callback`. - - To detect closed connections, we must have called - `_add_io_state` at some point, but we want to delay this as - much as possible so we don't have to set an `IOLoop.ERROR` - listener that will be overwritten by the next slow-path - operation. As long as there are callbacks scheduled for - fast-path ops, those callbacks may do more reads. - If a sequence of fast-path ops do not end in a slow-path op, - (e.g. for an @asynchronous long-poll request), we must add - the error handler. This is done in `_run_callback` and `write` - (since the write callback is optional so we can have a - fast-path write with no `_run_callback`) - """ - if self.closed(): - # connection has been closed, so there can be no future events - return - if self._state is None: - self._state = ioloop.IOLoop.ERROR | state - with stack_context.NullContext(): - self.io_loop.add_handler( - self.fileno(), self._handle_events, self._state) - elif not self._state & state: - self._state = self._state | state - self.io_loop.update_handler(self.fileno(), self._state) - - def _is_connreset(self, exc): - """Return true if exc is ECONNRESET or equivalent. - - May be overridden in subclasses. - """ - return (isinstance(exc, (socket.error, IOError)) and - errno_from_exception(exc) in _ERRNO_CONNRESET) - - -class IOStream(BaseIOStream): - r"""Socket-based `IOStream` implementation. - - This class supports the read and write methods from `BaseIOStream` - plus a `connect` method. - - The ``socket`` parameter may either be connected or unconnected. - For server operations the socket is the result of calling - `socket.accept `. For client operations the - socket is created with `socket.socket`, and may either be - connected before passing it to the `IOStream` or connected with - `IOStream.connect`. - - A very simple (and broken) HTTP client using this class: - - .. testcode:: - - import tornado.ioloop - import tornado.iostream - import socket - - async def main(): - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) - stream = tornado.iostream.IOStream(s) - await stream.connect(("friendfeed.com", 80)) - await stream.write(b"GET / HTTP/1.0\r\nHost: friendfeed.com\r\n\r\n") - header_data = await stream.read_until(b"\r\n\r\n") - headers = {} - for line in header_data.split(b"\r\n"): - parts = line.split(b":") - if len(parts) == 2: - headers[parts[0].strip()] = parts[1].strip() - body_data = await stream.read_bytes(int(headers[b"Content-Length"])) - print(body_data) - stream.close() - - if __name__ == '__main__': - tornado.ioloop.IOLoop.current().run_sync(main) - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) - stream = tornado.iostream.IOStream(s) - stream.connect(("friendfeed.com", 80), send_request) - tornado.ioloop.IOLoop.current().start() - - .. testoutput:: - :hide: - - """ - def __init__(self, socket, *args, **kwargs): - self.socket = socket - self.socket.setblocking(False) - super(IOStream, self).__init__(*args, **kwargs) - - def fileno(self): - return self.socket - - def close_fd(self): - self.socket.close() - self.socket = None - - def get_fd_error(self): - errno = self.socket.getsockopt(socket.SOL_SOCKET, - socket.SO_ERROR) - return socket.error(errno, os.strerror(errno)) - - def read_from_fd(self, buf): - try: - return self.socket.recv_into(buf) - except socket.error as e: - if e.args[0] in _ERRNO_WOULDBLOCK: - return None - else: - raise - finally: - buf = None - - def write_to_fd(self, data): - try: - return self.socket.send(data) - finally: - # Avoid keeping to data, which can be a memoryview. - # See https://github.com/tornadoweb/tornado/pull/2008 - del data - - def connect(self, address, callback=None, server_hostname=None): - """Connects the socket to a remote address without blocking. - - May only be called if the socket passed to the constructor was - not previously connected. The address parameter is in the - same format as for `socket.connect ` for - the type of socket passed to the IOStream constructor, - e.g. an ``(ip, port)`` tuple. Hostnames are accepted here, - but will be resolved synchronously and block the IOLoop. - If you have a hostname instead of an IP address, the `.TCPClient` - class is recommended instead of calling this method directly. - `.TCPClient` will do asynchronous DNS resolution and handle - both IPv4 and IPv6. - - If ``callback`` is specified, it will be called with no - arguments when the connection is completed; if not this method - returns a `.Future` (whose result after a successful - connection will be the stream itself). - - In SSL mode, the ``server_hostname`` parameter will be used - for certificate validation (unless disabled in the - ``ssl_options``) and SNI (if supported; requires Python - 2.7.9+). - - Note that it is safe to call `IOStream.write - ` while the connection is pending, in - which case the data will be written as soon as the connection - is ready. Calling `IOStream` read methods before the socket is - connected works on some platforms but is non-portable. - - .. versionchanged:: 4.0 - If no callback is given, returns a `.Future`. - - .. versionchanged:: 4.2 - SSL certificates are validated by default; pass - ``ssl_options=dict(cert_reqs=ssl.CERT_NONE)`` or a - suitably-configured `ssl.SSLContext` to the - `SSLIOStream` constructor to disable. - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed - in Tornado 6.0. Use the returned `.Future` instead. - - """ - self._connecting = True - if callback is not None: - warnings.warn("callback argument is deprecated, use returned Future instead", - DeprecationWarning) - self._connect_callback = stack_context.wrap(callback) - future = None - else: - future = self._connect_future = Future() - try: - self.socket.connect(address) - except socket.error as e: - # In non-blocking mode we expect connect() to raise an - # exception with EINPROGRESS or EWOULDBLOCK. - # - # On freebsd, other errors such as ECONNREFUSED may be - # returned immediately when attempting to connect to - # localhost, so handle them the same way as an error - # reported later in _handle_connect. - if (errno_from_exception(e) not in _ERRNO_INPROGRESS and - errno_from_exception(e) not in _ERRNO_WOULDBLOCK): - if future is None: - gen_log.warning("Connect error on fd %s: %s", - self.socket.fileno(), e) - self.close(exc_info=e) - return future - self._add_io_state(self.io_loop.WRITE) - return future - - def start_tls(self, server_side, ssl_options=None, server_hostname=None): - """Convert this `IOStream` to an `SSLIOStream`. - - This enables protocols that begin in clear-text mode and - switch to SSL after some initial negotiation (such as the - ``STARTTLS`` extension to SMTP and IMAP). - - This method cannot be used if there are outstanding reads - or writes on the stream, or if there is any data in the - IOStream's buffer (data in the operating system's socket - buffer is allowed). This means it must generally be used - immediately after reading or writing the last clear-text - data. It can also be used immediately after connecting, - before any reads or writes. - - The ``ssl_options`` argument may be either an `ssl.SSLContext` - object or a dictionary of keyword arguments for the - `ssl.wrap_socket` function. The ``server_hostname`` argument - will be used for certificate validation unless disabled - in the ``ssl_options``. - - This method returns a `.Future` whose result is the new - `SSLIOStream`. After this method has been called, - any other operation on the original stream is undefined. - - If a close callback is defined on this stream, it will be - transferred to the new stream. - - .. versionadded:: 4.0 - - .. versionchanged:: 4.2 - SSL certificates are validated by default; pass - ``ssl_options=dict(cert_reqs=ssl.CERT_NONE)`` or a - suitably-configured `ssl.SSLContext` to disable. - """ - if (self._read_callback or self._read_future or - self._write_callback or self._write_futures or - self._connect_callback or self._connect_future or - self._pending_callbacks or self._closed or - self._read_buffer or self._write_buffer): - raise ValueError("IOStream is not idle; cannot convert to SSL") - if ssl_options is None: - if server_side: - ssl_options = _server_ssl_defaults - else: - ssl_options = _client_ssl_defaults - - socket = self.socket - self.io_loop.remove_handler(socket) - self.socket = None - socket = ssl_wrap_socket(socket, ssl_options, - server_hostname=server_hostname, - server_side=server_side, - do_handshake_on_connect=False) - orig_close_callback = self._close_callback - self._close_callback = None - - future = Future() - ssl_stream = SSLIOStream(socket, ssl_options=ssl_options) - # Wrap the original close callback so we can fail our Future as well. - # If we had an "unwrap" counterpart to this method we would need - # to restore the original callback after our Future resolves - # so that repeated wrap/unwrap calls don't build up layers. - - def close_callback(): - if not future.done(): - # Note that unlike most Futures returned by IOStream, - # this one passes the underlying error through directly - # instead of wrapping everything in a StreamClosedError - # with a real_error attribute. This is because once the - # connection is established it's more helpful to raise - # the SSLError directly than to hide it behind a - # StreamClosedError (and the client is expecting SSL - # issues rather than network issues since this method is - # named start_tls). - future.set_exception(ssl_stream.error or StreamClosedError()) - if orig_close_callback is not None: - orig_close_callback() - ssl_stream.set_close_callback(close_callback) - ssl_stream._ssl_connect_callback = lambda: future.set_result(ssl_stream) - ssl_stream.max_buffer_size = self.max_buffer_size - ssl_stream.read_chunk_size = self.read_chunk_size - return future - - def _handle_connect(self): - try: - err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) - except socket.error as e: - # Hurd doesn't allow SO_ERROR for loopback sockets because all - # errors for such sockets are reported synchronously. - if errno_from_exception(e) == errno.ENOPROTOOPT: - err = 0 - if err != 0: - self.error = socket.error(err, os.strerror(err)) - # IOLoop implementations may vary: some of them return - # an error state before the socket becomes writable, so - # in that case a connection failure would be handled by the - # error path in _handle_events instead of here. - if self._connect_future is None: - gen_log.warning("Connect error on fd %s: %s", - self.socket.fileno(), errno.errorcode[err]) - self.close() - return - if self._connect_callback is not None: - callback = self._connect_callback - self._connect_callback = None - self._run_callback(callback) - if self._connect_future is not None: - future = self._connect_future - self._connect_future = None - future.set_result(self) - self._connecting = False - - def set_nodelay(self, value): - if (self.socket is not None and - self.socket.family in (socket.AF_INET, socket.AF_INET6)): - try: - self.socket.setsockopt(socket.IPPROTO_TCP, - socket.TCP_NODELAY, 1 if value else 0) - except socket.error as e: - # Sometimes setsockopt will fail if the socket is closed - # at the wrong time. This can happen with HTTPServer - # resetting the value to false between requests. - if e.errno != errno.EINVAL and not self._is_connreset(e): - raise - - -class SSLIOStream(IOStream): - """A utility class to write to and read from a non-blocking SSL socket. - - If the socket passed to the constructor is already connected, - it should be wrapped with:: - - ssl.wrap_socket(sock, do_handshake_on_connect=False, **kwargs) - - before constructing the `SSLIOStream`. Unconnected sockets will be - wrapped when `IOStream.connect` is finished. - """ - def __init__(self, *args, **kwargs): - """The ``ssl_options`` keyword argument may either be an - `ssl.SSLContext` object or a dictionary of keywords arguments - for `ssl.wrap_socket` - """ - self._ssl_options = kwargs.pop('ssl_options', _client_ssl_defaults) - super(SSLIOStream, self).__init__(*args, **kwargs) - self._ssl_accepting = True - self._handshake_reading = False - self._handshake_writing = False - self._ssl_connect_callback = None - self._server_hostname = None - - # If the socket is already connected, attempt to start the handshake. - try: - self.socket.getpeername() - except socket.error: - pass - else: - # Indirectly start the handshake, which will run on the next - # IOLoop iteration and then the real IO state will be set in - # _handle_events. - self._add_io_state(self.io_loop.WRITE) - - def reading(self): - return self._handshake_reading or super(SSLIOStream, self).reading() - - def writing(self): - return self._handshake_writing or super(SSLIOStream, self).writing() - - def _do_ssl_handshake(self): - # Based on code from test_ssl.py in the python stdlib - try: - self._handshake_reading = False - self._handshake_writing = False - self.socket.do_handshake() - except ssl.SSLError as err: - if err.args[0] == ssl.SSL_ERROR_WANT_READ: - self._handshake_reading = True - return - elif err.args[0] == ssl.SSL_ERROR_WANT_WRITE: - self._handshake_writing = True - return - elif err.args[0] in (ssl.SSL_ERROR_EOF, - ssl.SSL_ERROR_ZERO_RETURN): - return self.close(exc_info=err) - elif err.args[0] == ssl.SSL_ERROR_SSL: - try: - peer = self.socket.getpeername() - except Exception: - peer = '(not connected)' - gen_log.warning("SSL Error on %s %s: %s", - self.socket.fileno(), peer, err) - return self.close(exc_info=err) - raise - except socket.error as err: - # Some port scans (e.g. nmap in -sT mode) have been known - # to cause do_handshake to raise EBADF and ENOTCONN, so make - # those errors quiet as well. - # https://groups.google.com/forum/?fromgroups#!topic/python-tornado/ApucKJat1_0 - if (self._is_connreset(err) or - err.args[0] in (errno.EBADF, errno.ENOTCONN)): - return self.close(exc_info=err) - raise - except AttributeError as err: - # On Linux, if the connection was reset before the call to - # wrap_socket, do_handshake will fail with an - # AttributeError. - return self.close(exc_info=err) - else: - self._ssl_accepting = False - if not self._verify_cert(self.socket.getpeercert()): - self.close() - return - self._run_ssl_connect_callback() - - def _run_ssl_connect_callback(self): - if self._ssl_connect_callback is not None: - callback = self._ssl_connect_callback - self._ssl_connect_callback = None - self._run_callback(callback) - if self._ssl_connect_future is not None: - future = self._ssl_connect_future - self._ssl_connect_future = None - future.set_result(self) - - def _verify_cert(self, peercert): - """Returns True if peercert is valid according to the configured - validation mode and hostname. - - The ssl handshake already tested the certificate for a valid - CA signature; the only thing that remains is to check - the hostname. - """ - if isinstance(self._ssl_options, dict): - verify_mode = self._ssl_options.get('cert_reqs', ssl.CERT_NONE) - elif isinstance(self._ssl_options, ssl.SSLContext): - verify_mode = self._ssl_options.verify_mode - assert verify_mode in (ssl.CERT_NONE, ssl.CERT_REQUIRED, ssl.CERT_OPTIONAL) - if verify_mode == ssl.CERT_NONE or self._server_hostname is None: - return True - cert = self.socket.getpeercert() - if cert is None and verify_mode == ssl.CERT_REQUIRED: - gen_log.warning("No SSL certificate given") - return False - try: - ssl.match_hostname(peercert, self._server_hostname) - except ssl.CertificateError as e: - gen_log.warning("Invalid SSL certificate: %s" % e) - return False - else: - return True - - def _handle_read(self): - if self._ssl_accepting: - self._do_ssl_handshake() - return - super(SSLIOStream, self)._handle_read() - - def _handle_write(self): - if self._ssl_accepting: - self._do_ssl_handshake() - return - super(SSLIOStream, self)._handle_write() - - def connect(self, address, callback=None, server_hostname=None): - self._server_hostname = server_hostname - # Ignore the result of connect(). If it fails, - # wait_for_handshake will raise an error too. This is - # necessary for the old semantics of the connect callback - # (which takes no arguments). In 6.0 this can be refactored to - # be a regular coroutine. - fut = super(SSLIOStream, self).connect(address) - fut.add_done_callback(lambda f: f.exception()) - return self.wait_for_handshake(callback) - - def _handle_connect(self): - # Call the superclass method to check for errors. - super(SSLIOStream, self)._handle_connect() - if self.closed(): - return - # When the connection is complete, wrap the socket for SSL - # traffic. Note that we do this by overriding _handle_connect - # instead of by passing a callback to super().connect because - # user callbacks are enqueued asynchronously on the IOLoop, - # but since _handle_events calls _handle_connect immediately - # followed by _handle_write we need this to be synchronous. - # - # The IOLoop will get confused if we swap out self.socket while the - # fd is registered, so remove it now and re-register after - # wrap_socket(). - self.io_loop.remove_handler(self.socket) - old_state = self._state - self._state = None - self.socket = ssl_wrap_socket(self.socket, self._ssl_options, - server_hostname=self._server_hostname, - do_handshake_on_connect=False) - self._add_io_state(old_state) - - def wait_for_handshake(self, callback=None): - """Wait for the initial SSL handshake to complete. - - If a ``callback`` is given, it will be called with no - arguments once the handshake is complete; otherwise this - method returns a `.Future` which will resolve to the - stream itself after the handshake is complete. - - Once the handshake is complete, information such as - the peer's certificate and NPN/ALPN selections may be - accessed on ``self.socket``. - - This method is intended for use on server-side streams - or after using `IOStream.start_tls`; it should not be used - with `IOStream.connect` (which already waits for the - handshake to complete). It may only be called once per stream. - - .. versionadded:: 4.2 - - .. deprecated:: 5.1 - - The ``callback`` argument is deprecated and will be removed - in Tornado 6.0. Use the returned `.Future` instead. - - """ - if (self._ssl_connect_callback is not None or - self._ssl_connect_future is not None): - raise RuntimeError("Already waiting") - if callback is not None: - warnings.warn("callback argument is deprecated, use returned Future instead", - DeprecationWarning) - self._ssl_connect_callback = stack_context.wrap(callback) - future = None - else: - future = self._ssl_connect_future = Future() - if not self._ssl_accepting: - self._run_ssl_connect_callback() - return future - - def write_to_fd(self, data): - try: - return self.socket.send(data) - except ssl.SSLError as e: - if e.args[0] == ssl.SSL_ERROR_WANT_WRITE: - # In Python 3.5+, SSLSocket.send raises a WANT_WRITE error if - # the socket is not writeable; we need to transform this into - # an EWOULDBLOCK socket.error or a zero return value, - # either of which will be recognized by the caller of this - # method. Prior to Python 3.5, an unwriteable socket would - # simply return 0 bytes written. - return 0 - raise - finally: - # Avoid keeping to data, which can be a memoryview. - # See https://github.com/tornadoweb/tornado/pull/2008 - del data - - def read_from_fd(self, buf): - try: - if self._ssl_accepting: - # If the handshake hasn't finished yet, there can't be anything - # to read (attempting to read may or may not raise an exception - # depending on the SSL version) - return None - try: - return self.socket.recv_into(buf) - except ssl.SSLError as e: - # SSLError is a subclass of socket.error, so this except - # block must come first. - if e.args[0] == ssl.SSL_ERROR_WANT_READ: - return None - else: - raise - except socket.error as e: - if e.args[0] in _ERRNO_WOULDBLOCK: - return None - else: - raise - finally: - buf = None - - def _is_connreset(self, e): - if isinstance(e, ssl.SSLError) and e.args[0] == ssl.SSL_ERROR_EOF: - return True - return super(SSLIOStream, self)._is_connreset(e) - - -class PipeIOStream(BaseIOStream): - """Pipe-based `IOStream` implementation. - - The constructor takes an integer file descriptor (such as one returned - by `os.pipe`) rather than an open file object. Pipes are generally - one-way, so a `PipeIOStream` can be used for reading or writing but not - both. - """ - def __init__(self, fd, *args, **kwargs): - self.fd = fd - self._fio = io.FileIO(self.fd, "r+") - _set_nonblocking(fd) - super(PipeIOStream, self).__init__(*args, **kwargs) - - def fileno(self): - return self.fd - - def close_fd(self): - self._fio.close() - - def write_to_fd(self, data): - try: - return os.write(self.fd, data) - finally: - # Avoid keeping to data, which can be a memoryview. - # See https://github.com/tornadoweb/tornado/pull/2008 - del data - - def read_from_fd(self, buf): - try: - return self._fio.readinto(buf) - except (IOError, OSError) as e: - if errno_from_exception(e) == errno.EBADF: - # If the writing half of a pipe is closed, select will - # report it as readable but reads will fail with EBADF. - self.close(exc_info=e) - return None - else: - raise - finally: - buf = None - - -def doctests(): - import doctest - return doctest.DocTestSuite() +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Utility classes to write to and read from non-blocking files and sockets. + +Contents: + +* `BaseIOStream`: Generic interface for reading and writing. +* `IOStream`: Implementation of BaseIOStream using non-blocking sockets. +* `SSLIOStream`: SSL-aware version of IOStream. +* `PipeIOStream`: Pipe-based IOStream implementation. +""" + +import asyncio +import collections +import errno +import io +import numbers +import os +import socket +import ssl +import sys +import re + +from tornado.concurrent import Future, future_set_result_unless_cancelled +from tornado import ioloop +from tornado.log import gen_log +from tornado.netutil import ssl_wrap_socket, _client_ssl_defaults, _server_ssl_defaults +from tornado.util import errno_from_exception + +import typing +from typing import ( + Union, + Optional, + Awaitable, + Callable, + Pattern, + Any, + Dict, + TypeVar, + Tuple, +) +from types import TracebackType + +if typing.TYPE_CHECKING: + from typing import Deque, List, Type # noqa: F401 + +_IOStreamType = TypeVar("_IOStreamType", bound="IOStream") + +try: + from tornado.platform.posix import _set_nonblocking +except ImportError: + _set_nonblocking = None # type: ignore + +# These errnos indicate that a non-blocking operation must be retried +# at a later time. On most platforms they're the same value, but on +# some they differ. +_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN) + +if hasattr(errno, "WSAEWOULDBLOCK"): + _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) # type: ignore + +# These errnos indicate that a connection has been abruptly terminated. +# They should be caught and handled less noisily than other errors. +_ERRNO_CONNRESET = (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE, errno.ETIMEDOUT) + +if hasattr(errno, "WSAECONNRESET"): + _ERRNO_CONNRESET += ( # type: ignore + errno.WSAECONNRESET, # type: ignore + errno.WSAECONNABORTED, # type: ignore + errno.WSAETIMEDOUT, # type: ignore + ) + +if sys.platform == "darwin": + # OSX appears to have a race condition that causes send(2) to return + # EPROTOTYPE if called while a socket is being torn down: + # http://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ + # Since the socket is being closed anyway, treat this as an ECONNRESET + # instead of an unexpected error. + _ERRNO_CONNRESET += (errno.EPROTOTYPE,) # type: ignore + +# More non-portable errnos: +_ERRNO_INPROGRESS = (errno.EINPROGRESS,) + +if hasattr(errno, "WSAEINPROGRESS"): + _ERRNO_INPROGRESS += (errno.WSAEINPROGRESS,) # type: ignore + +_WINDOWS = sys.platform.startswith("win") + + +class StreamClosedError(IOError): + """Exception raised by `IOStream` methods when the stream is closed. + + Note that the close callback is scheduled to run *after* other + callbacks on the stream (to allow for buffered data to be processed), + so you may see this error before you see the close callback. + + The ``real_error`` attribute contains the underlying error that caused + the stream to close (if any). + + .. versionchanged:: 4.3 + Added the ``real_error`` attribute. + """ + + def __init__(self, real_error: BaseException = None) -> None: + super(StreamClosedError, self).__init__("Stream is closed") + self.real_error = real_error + + +class UnsatisfiableReadError(Exception): + """Exception raised when a read cannot be satisfied. + + Raised by ``read_until`` and ``read_until_regex`` with a ``max_bytes`` + argument. + """ + + pass + + +class StreamBufferFullError(Exception): + """Exception raised by `IOStream` methods when the buffer is full. + """ + + +class _StreamBuffer(object): + """ + A specialized buffer that tries to avoid copies when large pieces + of data are encountered. + """ + + def __init__(self) -> None: + # A sequence of (False, bytearray) and (True, memoryview) objects + self._buffers = ( + collections.deque() + ) # type: Deque[Tuple[bool, Union[bytearray, memoryview]]] + # Position in the first buffer + self._first_pos = 0 + self._size = 0 + + def __len__(self) -> int: + return self._size + + # Data above this size will be appended separately instead + # of extending an existing bytearray + _large_buf_threshold = 2048 + + def append(self, data: Union[bytes, bytearray, memoryview]) -> None: + """ + Append the given piece of data (should be a buffer-compatible object). + """ + size = len(data) + if size > self._large_buf_threshold: + if not isinstance(data, memoryview): + data = memoryview(data) + self._buffers.append((True, data)) + elif size > 0: + if self._buffers: + is_memview, b = self._buffers[-1] + new_buf = is_memview or len(b) >= self._large_buf_threshold + else: + new_buf = True + if new_buf: + self._buffers.append((False, bytearray(data))) + else: + b += data # type: ignore + + self._size += size + + def peek(self, size: int) -> memoryview: + """ + Get a view over at most ``size`` bytes (possibly fewer) at the + current buffer position. + """ + assert size > 0 + try: + is_memview, b = self._buffers[0] + except IndexError: + return memoryview(b"") + + pos = self._first_pos + if is_memview: + return typing.cast(memoryview, b[pos : pos + size]) + else: + return memoryview(b)[pos : pos + size] + + def advance(self, size: int) -> None: + """ + Advance the current buffer position by ``size`` bytes. + """ + assert 0 < size <= self._size + self._size -= size + pos = self._first_pos + + buffers = self._buffers + while buffers and size > 0: + is_large, b = buffers[0] + b_remain = len(b) - size - pos + if b_remain <= 0: + buffers.popleft() + size -= len(b) - pos + pos = 0 + elif is_large: + pos += size + size = 0 + else: + # Amortized O(1) shrink for Python 2 + pos += size + if len(b) <= 2 * pos: + del typing.cast(bytearray, b)[:pos] + pos = 0 + size = 0 + + assert size == 0 + self._first_pos = pos + + +class BaseIOStream(object): + """A utility class to write to and read from a non-blocking file or socket. + + We support a non-blocking ``write()`` and a family of ``read_*()`` + methods. When the operation completes, the ``Awaitable`` will resolve + with the data read (or ``None`` for ``write()``). All outstanding + ``Awaitables`` will resolve with a `StreamClosedError` when the + stream is closed; `.BaseIOStream.set_close_callback` can also be used + to be notified of a closed stream. + + When a stream is closed due to an error, the IOStream's ``error`` + attribute contains the exception object. + + Subclasses must implement `fileno`, `close_fd`, `write_to_fd`, + `read_from_fd`, and optionally `get_fd_error`. + + """ + + def __init__( + self, + max_buffer_size: int = None, + read_chunk_size: int = None, + max_write_buffer_size: int = None, + ) -> None: + """`BaseIOStream` constructor. + + :arg max_buffer_size: Maximum amount of incoming data to buffer; + defaults to 100MB. + :arg read_chunk_size: Amount of data to read at one time from the + underlying transport; defaults to 64KB. + :arg max_write_buffer_size: Amount of outgoing data to buffer; + defaults to unlimited. + + .. versionchanged:: 4.0 + Add the ``max_write_buffer_size`` parameter. Changed default + ``read_chunk_size`` to 64KB. + .. versionchanged:: 5.0 + The ``io_loop`` argument (deprecated since version 4.1) has been + removed. + """ + self.io_loop = ioloop.IOLoop.current() + self.max_buffer_size = max_buffer_size or 104857600 + # A chunk size that is too close to max_buffer_size can cause + # spurious failures. + self.read_chunk_size = min(read_chunk_size or 65536, self.max_buffer_size // 2) + self.max_write_buffer_size = max_write_buffer_size + self.error = None # type: Optional[BaseException] + self._read_buffer = bytearray() + self._read_buffer_pos = 0 + self._read_buffer_size = 0 + self._user_read_buffer = False + self._after_user_read_buffer = None # type: Optional[bytearray] + self._write_buffer = _StreamBuffer() + self._total_write_index = 0 + self._total_write_done_index = 0 + self._read_delimiter = None # type: Optional[bytes] + self._read_regex = None # type: Optional[Pattern] + self._read_max_bytes = None # type: Optional[int] + self._read_bytes = None # type: Optional[int] + self._read_partial = False + self._read_until_close = False + self._read_future = None # type: Optional[Future] + self._write_futures = ( + collections.deque() + ) # type: Deque[Tuple[int, Future[None]]] + self._close_callback = None # type: Optional[Callable[[], None]] + self._connect_future = None # type: Optional[Future[IOStream]] + # _ssl_connect_future should be defined in SSLIOStream + # but it's here so we can clean it up in _signal_closed + # TODO: refactor that so subclasses can add additional futures + # to be cancelled. + self._ssl_connect_future = None # type: Optional[Future[SSLIOStream]] + self._connecting = False + self._state = None # type: Optional[int] + self._closed = False + + def fileno(self) -> Union[int, ioloop._Selectable]: + """Returns the file descriptor for this stream.""" + raise NotImplementedError() + + def close_fd(self) -> None: + """Closes the file underlying this stream. + + ``close_fd`` is called by `BaseIOStream` and should not be called + elsewhere; other users should call `close` instead. + """ + raise NotImplementedError() + + def write_to_fd(self, data: memoryview) -> int: + """Attempts to write ``data`` to the underlying file. + + Returns the number of bytes written. + """ + raise NotImplementedError() + + def read_from_fd(self, buf: Union[bytearray, memoryview]) -> Optional[int]: + """Attempts to read from the underlying file. + + Reads up to ``len(buf)`` bytes, storing them in the buffer. + Returns the number of bytes read. Returns None if there was + nothing to read (the socket returned `~errno.EWOULDBLOCK` or + equivalent), and zero on EOF. + + .. versionchanged:: 5.0 + + Interface redesigned to take a buffer and return a number + of bytes instead of a freshly-allocated object. + """ + raise NotImplementedError() + + def get_fd_error(self) -> Optional[Exception]: + """Returns information about any error on the underlying file. + + This method is called after the `.IOLoop` has signaled an error on the + file descriptor, and should return an Exception (such as `socket.error` + with additional information, or None if no such information is + available. + """ + return None + + def read_until_regex(self, regex: bytes, max_bytes: int = None) -> Awaitable[bytes]: + """Asynchronously read until we have matched the given regex. + + The result includes the data that matches the regex and anything + that came before it. + + If ``max_bytes`` is not None, the connection will be closed + if more than ``max_bytes`` bytes have been read and the regex is + not satisfied. + + .. versionchanged:: 4.0 + Added the ``max_bytes`` argument. The ``callback`` argument is + now optional and a `.Future` will be returned if it is omitted. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned + `.Future` instead. + + """ + future = self._start_read() + self._read_regex = re.compile(regex) + self._read_max_bytes = max_bytes + try: + self._try_inline_read() + except UnsatisfiableReadError as e: + # Handle this the same way as in _handle_events. + gen_log.info("Unsatisfiable read, closing connection: %s" % e) + self.close(exc_info=e) + return future + except: + # Ensure that the future doesn't log an error because its + # failure was never examined. + future.add_done_callback(lambda f: f.exception()) + raise + return future + + def read_until(self, delimiter: bytes, max_bytes: int = None) -> Awaitable[bytes]: + """Asynchronously read until we have found the given delimiter. + + The result includes all the data read including the delimiter. + + If ``max_bytes`` is not None, the connection will be closed + if more than ``max_bytes`` bytes have been read and the delimiter + is not found. + + .. versionchanged:: 4.0 + Added the ``max_bytes`` argument. The ``callback`` argument is + now optional and a `.Future` will be returned if it is omitted. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned + `.Future` instead. + """ + future = self._start_read() + self._read_delimiter = delimiter + self._read_max_bytes = max_bytes + try: + self._try_inline_read() + except UnsatisfiableReadError as e: + # Handle this the same way as in _handle_events. + gen_log.info("Unsatisfiable read, closing connection: %s" % e) + self.close(exc_info=e) + return future + except: + future.add_done_callback(lambda f: f.exception()) + raise + return future + + def read_bytes(self, num_bytes: int, partial: bool = False) -> Awaitable[bytes]: + """Asynchronously read a number of bytes. + + If ``partial`` is true, data is returned as soon as we have + any bytes to return (but never more than ``num_bytes``) + + .. versionchanged:: 4.0 + Added the ``partial`` argument. The callback argument is now + optional and a `.Future` will be returned if it is omitted. + + .. versionchanged:: 6.0 + + The ``callback`` and ``streaming_callback`` arguments have + been removed. Use the returned `.Future` (and + ``partial=True`` for ``streaming_callback``) instead. + + """ + future = self._start_read() + assert isinstance(num_bytes, numbers.Integral) + self._read_bytes = num_bytes + self._read_partial = partial + try: + self._try_inline_read() + except: + future.add_done_callback(lambda f: f.exception()) + raise + return future + + def read_into(self, buf: bytearray, partial: bool = False) -> Awaitable[int]: + """Asynchronously read a number of bytes. + + ``buf`` must be a writable buffer into which data will be read. + + If ``partial`` is true, the callback is run as soon as any bytes + have been read. Otherwise, it is run when the ``buf`` has been + entirely filled with read data. + + .. versionadded:: 5.0 + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned + `.Future` instead. + + """ + future = self._start_read() + + # First copy data already in read buffer + available_bytes = self._read_buffer_size + n = len(buf) + if available_bytes >= n: + end = self._read_buffer_pos + n + buf[:] = memoryview(self._read_buffer)[self._read_buffer_pos : end] + del self._read_buffer[:end] + self._after_user_read_buffer = self._read_buffer + elif available_bytes > 0: + buf[:available_bytes] = memoryview(self._read_buffer)[ + self._read_buffer_pos : + ] + + # Set up the supplied buffer as our temporary read buffer. + # The original (if it had any data remaining) has been + # saved for later. + self._user_read_buffer = True + self._read_buffer = buf + self._read_buffer_pos = 0 + self._read_buffer_size = available_bytes + self._read_bytes = n + self._read_partial = partial + + try: + self._try_inline_read() + except: + future.add_done_callback(lambda f: f.exception()) + raise + return future + + def read_until_close(self) -> Awaitable[bytes]: + """Asynchronously reads all data from the socket until it is closed. + + This will buffer all available data until ``max_buffer_size`` + is reached. If flow control or cancellation are desired, use a + loop with `read_bytes(partial=True) <.read_bytes>` instead. + + .. versionchanged:: 4.0 + The callback argument is now optional and a `.Future` will + be returned if it is omitted. + + .. versionchanged:: 6.0 + + The ``callback`` and ``streaming_callback`` arguments have + been removed. Use the returned `.Future` (and `read_bytes` + with ``partial=True`` for ``streaming_callback``) instead. + + """ + future = self._start_read() + if self.closed(): + self._finish_read(self._read_buffer_size, False) + return future + self._read_until_close = True + try: + self._try_inline_read() + except: + future.add_done_callback(lambda f: f.exception()) + raise + return future + + def write(self, data: Union[bytes, memoryview]) -> "Future[None]": + """Asynchronously write the given data to this stream. + + This method returns a `.Future` that resolves (with a result + of ``None``) when the write has been completed. + + The ``data`` argument may be of type `bytes` or `memoryview`. + + .. versionchanged:: 4.0 + Now returns a `.Future` if no callback is given. + + .. versionchanged:: 4.5 + Added support for `memoryview` arguments. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned + `.Future` instead. + + """ + self._check_closed() + if data: + if ( + self.max_write_buffer_size is not None + and len(self._write_buffer) + len(data) > self.max_write_buffer_size + ): + raise StreamBufferFullError("Reached maximum write buffer size") + self._write_buffer.append(data) + self._total_write_index += len(data) + future = Future() # type: Future[None] + future.add_done_callback(lambda f: f.exception()) + self._write_futures.append((self._total_write_index, future)) + if not self._connecting: + self._handle_write() + if self._write_buffer: + self._add_io_state(self.io_loop.WRITE) + self._maybe_add_error_listener() + return future + + def set_close_callback(self, callback: Optional[Callable[[], None]]) -> None: + """Call the given callback when the stream is closed. + + This mostly is not necessary for applications that use the + `.Future` interface; all outstanding ``Futures`` will resolve + with a `StreamClosedError` when the stream is closed. However, + it is still useful as a way to signal that the stream has been + closed while no other read or write is in progress. + + Unlike other callback-based interfaces, ``set_close_callback`` + was not removed in Tornado 6.0. + """ + self._close_callback = callback + self._maybe_add_error_listener() + + def close( + self, + exc_info: Union[ + None, + bool, + BaseException, + Tuple[ + "Optional[Type[BaseException]]", + Optional[BaseException], + Optional[TracebackType], + ], + ] = False, + ) -> None: + """Close this stream. + + If ``exc_info`` is true, set the ``error`` attribute to the current + exception from `sys.exc_info` (or if ``exc_info`` is a tuple, + use that instead of `sys.exc_info`). + """ + if not self.closed(): + if exc_info: + if isinstance(exc_info, tuple): + self.error = exc_info[1] + elif isinstance(exc_info, BaseException): + self.error = exc_info + else: + exc_info = sys.exc_info() + if any(exc_info): + self.error = exc_info[1] + if self._read_until_close: + self._read_until_close = False + self._finish_read(self._read_buffer_size, False) + elif self._read_future is not None: + # resolve reads that are pending and ready to complete + try: + pos = self._find_read_pos() + except UnsatisfiableReadError: + pass + else: + if pos is not None: + self._read_from_buffer(pos) + if self._state is not None: + self.io_loop.remove_handler(self.fileno()) + self._state = None + self.close_fd() + self._closed = True + self._signal_closed() + + def _signal_closed(self) -> None: + futures = [] # type: List[Future] + if self._read_future is not None: + futures.append(self._read_future) + self._read_future = None + futures += [future for _, future in self._write_futures] + self._write_futures.clear() + if self._connect_future is not None: + futures.append(self._connect_future) + self._connect_future = None + for future in futures: + if not future.done(): + future.set_exception(StreamClosedError(real_error=self.error)) + # Reference the exception to silence warnings. Annoyingly, + # this raises if the future was cancelled, but just + # returns any other error. + try: + future.exception() + except asyncio.CancelledError: + pass + if self._ssl_connect_future is not None: + # _ssl_connect_future expects to see the real exception (typically + # an ssl.SSLError), not just StreamClosedError. + if not self._ssl_connect_future.done(): + if self.error is not None: + self._ssl_connect_future.set_exception(self.error) + else: + self._ssl_connect_future.set_exception(StreamClosedError()) + self._ssl_connect_future.exception() + self._ssl_connect_future = None + if self._close_callback is not None: + cb = self._close_callback + self._close_callback = None + self.io_loop.add_callback(cb) + # Clear the buffers so they can be cleared immediately even + # if the IOStream object is kept alive by a reference cycle. + # TODO: Clear the read buffer too; it currently breaks some tests. + self._write_buffer = None # type: ignore + + def reading(self) -> bool: + """Returns ``True`` if we are currently reading from the stream.""" + return self._read_future is not None + + def writing(self) -> bool: + """Returns ``True`` if we are currently writing to the stream.""" + return bool(self._write_buffer) + + def closed(self) -> bool: + """Returns ``True`` if the stream has been closed.""" + return self._closed + + def set_nodelay(self, value: bool) -> None: + """Sets the no-delay flag for this stream. + + By default, data written to TCP streams may be held for a time + to make the most efficient use of bandwidth (according to + Nagle's algorithm). The no-delay flag requests that data be + written as soon as possible, even if doing so would consume + additional bandwidth. + + This flag is currently defined only for TCP-based ``IOStreams``. + + .. versionadded:: 3.1 + """ + pass + + def _handle_connect(self) -> None: + raise NotImplementedError() + + def _handle_events(self, fd: Union[int, ioloop._Selectable], events: int) -> None: + if self.closed(): + gen_log.warning("Got events for closed stream %s", fd) + return + try: + if self._connecting: + # Most IOLoops will report a write failed connect + # with the WRITE event, but SelectIOLoop reports a + # READ as well so we must check for connecting before + # either. + self._handle_connect() + if self.closed(): + return + if events & self.io_loop.READ: + self._handle_read() + if self.closed(): + return + if events & self.io_loop.WRITE: + self._handle_write() + if self.closed(): + return + if events & self.io_loop.ERROR: + self.error = self.get_fd_error() + # We may have queued up a user callback in _handle_read or + # _handle_write, so don't close the IOStream until those + # callbacks have had a chance to run. + self.io_loop.add_callback(self.close) + return + state = self.io_loop.ERROR + if self.reading(): + state |= self.io_loop.READ + if self.writing(): + state |= self.io_loop.WRITE + if state == self.io_loop.ERROR and self._read_buffer_size == 0: + # If the connection is idle, listen for reads too so + # we can tell if the connection is closed. If there is + # data in the read buffer we won't run the close callback + # yet anyway, so we don't need to listen in this case. + state |= self.io_loop.READ + if state != self._state: + assert ( + self._state is not None + ), "shouldn't happen: _handle_events without self._state" + self._state = state + self.io_loop.update_handler(self.fileno(), self._state) + except UnsatisfiableReadError as e: + gen_log.info("Unsatisfiable read, closing connection: %s" % e) + self.close(exc_info=e) + except Exception as e: + gen_log.error("Uncaught exception, closing connection.", exc_info=True) + self.close(exc_info=e) + raise + + def _read_to_buffer_loop(self) -> Optional[int]: + # This method is called from _handle_read and _try_inline_read. + if self._read_bytes is not None: + target_bytes = self._read_bytes # type: Optional[int] + elif self._read_max_bytes is not None: + target_bytes = self._read_max_bytes + elif self.reading(): + # For read_until without max_bytes, or + # read_until_close, read as much as we can before + # scanning for the delimiter. + target_bytes = None + else: + target_bytes = 0 + next_find_pos = 0 + while not self.closed(): + # Read from the socket until we get EWOULDBLOCK or equivalent. + # SSL sockets do some internal buffering, and if the data is + # sitting in the SSL object's buffer select() and friends + # can't see it; the only way to find out if it's there is to + # try to read it. + if self._read_to_buffer() == 0: + break + + # If we've read all the bytes we can use, break out of + # this loop. + + # If we've reached target_bytes, we know we're done. + if target_bytes is not None and self._read_buffer_size >= target_bytes: + break + + # Otherwise, we need to call the more expensive find_read_pos. + # It's inefficient to do this on every read, so instead + # do it on the first read and whenever the read buffer + # size has doubled. + if self._read_buffer_size >= next_find_pos: + pos = self._find_read_pos() + if pos is not None: + return pos + next_find_pos = self._read_buffer_size * 2 + return self._find_read_pos() + + def _handle_read(self) -> None: + try: + pos = self._read_to_buffer_loop() + except UnsatisfiableReadError: + raise + except asyncio.CancelledError: + raise + except Exception as e: + gen_log.warning("error on read: %s" % e) + self.close(exc_info=e) + return + if pos is not None: + self._read_from_buffer(pos) + + def _start_read(self) -> Future: + if self._read_future is not None: + # It is an error to start a read while a prior read is unresolved. + # However, if the prior read is unresolved because the stream was + # closed without satisfying it, it's better to raise + # StreamClosedError instead of AssertionError. In particular, this + # situation occurs in harmless situations in http1connection.py and + # an AssertionError would be logged noisily. + # + # On the other hand, it is legal to start a new read while the + # stream is closed, in case the read can be satisfied from the + # read buffer. So we only want to check the closed status of the + # stream if we need to decide what kind of error to raise for + # "already reading". + # + # These conditions have proven difficult to test; we have no + # unittests that reliably verify this behavior so be careful + # when making changes here. See #2651 and #2719. + self._check_closed() + assert self._read_future is None, "Already reading" + self._read_future = Future() + return self._read_future + + def _finish_read(self, size: int, streaming: bool) -> None: + if self._user_read_buffer: + self._read_buffer = self._after_user_read_buffer or bytearray() + self._after_user_read_buffer = None + self._read_buffer_pos = 0 + self._read_buffer_size = len(self._read_buffer) + self._user_read_buffer = False + result = size # type: Union[int, bytes] + else: + result = self._consume(size) + if self._read_future is not None: + future = self._read_future + self._read_future = None + future_set_result_unless_cancelled(future, result) + self._maybe_add_error_listener() + + def _try_inline_read(self) -> None: + """Attempt to complete the current read operation from buffered data. + + If the read can be completed without blocking, schedules the + read callback on the next IOLoop iteration; otherwise starts + listening for reads on the socket. + """ + # See if we've already got the data from a previous read + pos = self._find_read_pos() + if pos is not None: + self._read_from_buffer(pos) + return + self._check_closed() + pos = self._read_to_buffer_loop() + if pos is not None: + self._read_from_buffer(pos) + return + # We couldn't satisfy the read inline, so make sure we're + # listening for new data unless the stream is closed. + if not self.closed(): + self._add_io_state(ioloop.IOLoop.READ) + + def _read_to_buffer(self) -> Optional[int]: + """Reads from the socket and appends the result to the read buffer. + + Returns the number of bytes read. Returns 0 if there is nothing + to read (i.e. the read returns EWOULDBLOCK or equivalent). On + error closes the socket and raises an exception. + """ + try: + while True: + try: + if self._user_read_buffer: + buf = memoryview(self._read_buffer)[ + self._read_buffer_size : + ] # type: Union[memoryview, bytearray] + else: + buf = bytearray(self.read_chunk_size) + bytes_read = self.read_from_fd(buf) + except (socket.error, IOError, OSError) as e: + if errno_from_exception(e) == errno.EINTR: + continue + # ssl.SSLError is a subclass of socket.error + if self._is_connreset(e): + # Treat ECONNRESET as a connection close rather than + # an error to minimize log spam (the exception will + # be available on self.error for apps that care). + self.close(exc_info=e) + return None + self.close(exc_info=e) + raise + break + if bytes_read is None: + return 0 + elif bytes_read == 0: + self.close() + return 0 + if not self._user_read_buffer: + self._read_buffer += memoryview(buf)[:bytes_read] + self._read_buffer_size += bytes_read + finally: + # Break the reference to buf so we don't waste a chunk's worth of + # memory in case an exception hangs on to our stack frame. + del buf + if self._read_buffer_size > self.max_buffer_size: + gen_log.error("Reached maximum read buffer size") + self.close() + raise StreamBufferFullError("Reached maximum read buffer size") + return bytes_read + + def _read_from_buffer(self, pos: int) -> None: + """Attempts to complete the currently-pending read from the buffer. + + The argument is either a position in the read buffer or None, + as returned by _find_read_pos. + """ + self._read_bytes = self._read_delimiter = self._read_regex = None + self._read_partial = False + self._finish_read(pos, False) + + def _find_read_pos(self) -> Optional[int]: + """Attempts to find a position in the read buffer that satisfies + the currently-pending read. + + Returns a position in the buffer if the current read can be satisfied, + or None if it cannot. + """ + if self._read_bytes is not None and ( + self._read_buffer_size >= self._read_bytes + or (self._read_partial and self._read_buffer_size > 0) + ): + num_bytes = min(self._read_bytes, self._read_buffer_size) + return num_bytes + elif self._read_delimiter is not None: + # Multi-byte delimiters (e.g. '\r\n') may straddle two + # chunks in the read buffer, so we can't easily find them + # without collapsing the buffer. However, since protocols + # using delimited reads (as opposed to reads of a known + # length) tend to be "line" oriented, the delimiter is likely + # to be in the first few chunks. Merge the buffer gradually + # since large merges are relatively expensive and get undone in + # _consume(). + if self._read_buffer: + loc = self._read_buffer.find( + self._read_delimiter, self._read_buffer_pos + ) + if loc != -1: + loc -= self._read_buffer_pos + delimiter_len = len(self._read_delimiter) + self._check_max_bytes(self._read_delimiter, loc + delimiter_len) + return loc + delimiter_len + self._check_max_bytes(self._read_delimiter, self._read_buffer_size) + elif self._read_regex is not None: + if self._read_buffer: + m = self._read_regex.search(self._read_buffer, self._read_buffer_pos) + if m is not None: + loc = m.end() - self._read_buffer_pos + self._check_max_bytes(self._read_regex, loc) + return loc + self._check_max_bytes(self._read_regex, self._read_buffer_size) + return None + + def _check_max_bytes(self, delimiter: Union[bytes, Pattern], size: int) -> None: + if self._read_max_bytes is not None and size > self._read_max_bytes: + raise UnsatisfiableReadError( + "delimiter %r not found within %d bytes" + % (delimiter, self._read_max_bytes) + ) + + def _handle_write(self) -> None: + while True: + size = len(self._write_buffer) + if not size: + break + assert size > 0 + try: + if _WINDOWS: + # On windows, socket.send blows up if given a + # write buffer that's too large, instead of just + # returning the number of bytes it was able to + # process. Therefore we must not call socket.send + # with more than 128KB at a time. + size = 128 * 1024 + + num_bytes = self.write_to_fd(self._write_buffer.peek(size)) + if num_bytes == 0: + break + self._write_buffer.advance(num_bytes) + self._total_write_done_index += num_bytes + except (socket.error, IOError, OSError) as e: + if e.args[0] in _ERRNO_WOULDBLOCK: + break + else: + if not self._is_connreset(e): + # Broken pipe errors are usually caused by connection + # reset, and its better to not log EPIPE errors to + # minimize log spam + gen_log.warning("Write error on %s: %s", self.fileno(), e) + self.close(exc_info=e) + return + + while self._write_futures: + index, future = self._write_futures[0] + if index > self._total_write_done_index: + break + self._write_futures.popleft() + future_set_result_unless_cancelled(future, None) + + def _consume(self, loc: int) -> bytes: + # Consume loc bytes from the read buffer and return them + if loc == 0: + return b"" + assert loc <= self._read_buffer_size + # Slice the bytearray buffer into bytes, without intermediate copying + b = ( + memoryview(self._read_buffer)[ + self._read_buffer_pos : self._read_buffer_pos + loc + ] + ).tobytes() + self._read_buffer_pos += loc + self._read_buffer_size -= loc + # Amortized O(1) shrink + # (this heuristic is implemented natively in Python 3.4+ + # but is replicated here for Python 2) + if self._read_buffer_pos > self._read_buffer_size: + del self._read_buffer[: self._read_buffer_pos] + self._read_buffer_pos = 0 + return b + + def _check_closed(self) -> None: + if self.closed(): + raise StreamClosedError(real_error=self.error) + + def _maybe_add_error_listener(self) -> None: + # This method is part of an optimization: to detect a connection that + # is closed when we're not actively reading or writing, we must listen + # for read events. However, it is inefficient to do this when the + # connection is first established because we are going to read or write + # immediately anyway. Instead, we insert checks at various times to + # see if the connection is idle and add the read listener then. + if self._state is None or self._state == ioloop.IOLoop.ERROR: + if ( + not self.closed() + and self._read_buffer_size == 0 + and self._close_callback is not None + ): + self._add_io_state(ioloop.IOLoop.READ) + + def _add_io_state(self, state: int) -> None: + """Adds `state` (IOLoop.{READ,WRITE} flags) to our event handler. + + Implementation notes: Reads and writes have a fast path and a + slow path. The fast path reads synchronously from socket + buffers, while the slow path uses `_add_io_state` to schedule + an IOLoop callback. + + To detect closed connections, we must have called + `_add_io_state` at some point, but we want to delay this as + much as possible so we don't have to set an `IOLoop.ERROR` + listener that will be overwritten by the next slow-path + operation. If a sequence of fast-path ops do not end in a + slow-path op, (e.g. for an @asynchronous long-poll request), + we must add the error handler. + + TODO: reevaluate this now that callbacks are gone. + + """ + if self.closed(): + # connection has been closed, so there can be no future events + return + if self._state is None: + self._state = ioloop.IOLoop.ERROR | state + self.io_loop.add_handler(self.fileno(), self._handle_events, self._state) + elif not self._state & state: + self._state = self._state | state + self.io_loop.update_handler(self.fileno(), self._state) + + def _is_connreset(self, exc: BaseException) -> bool: + """Return ``True`` if exc is ECONNRESET or equivalent. + + May be overridden in subclasses. + """ + return ( + isinstance(exc, (socket.error, IOError)) + and errno_from_exception(exc) in _ERRNO_CONNRESET + ) + + +class IOStream(BaseIOStream): + r"""Socket-based `IOStream` implementation. + + This class supports the read and write methods from `BaseIOStream` + plus a `connect` method. + + The ``socket`` parameter may either be connected or unconnected. + For server operations the socket is the result of calling + `socket.accept `. For client operations the + socket is created with `socket.socket`, and may either be + connected before passing it to the `IOStream` or connected with + `IOStream.connect`. + + A very simple (and broken) HTTP client using this class: + + .. testcode:: + + import tornado.ioloop + import tornado.iostream + import socket + + async def main(): + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) + stream = tornado.iostream.IOStream(s) + await stream.connect(("friendfeed.com", 80)) + await stream.write(b"GET / HTTP/1.0\r\nHost: friendfeed.com\r\n\r\n") + header_data = await stream.read_until(b"\r\n\r\n") + headers = {} + for line in header_data.split(b"\r\n"): + parts = line.split(b":") + if len(parts) == 2: + headers[parts[0].strip()] = parts[1].strip() + body_data = await stream.read_bytes(int(headers[b"Content-Length"])) + print(body_data) + stream.close() + + if __name__ == '__main__': + tornado.ioloop.IOLoop.current().run_sync(main) + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) + stream = tornado.iostream.IOStream(s) + stream.connect(("friendfeed.com", 80), send_request) + tornado.ioloop.IOLoop.current().start() + + .. testoutput:: + :hide: + + """ + + def __init__(self, socket: socket.socket, *args: Any, **kwargs: Any) -> None: + self.socket = socket + self.socket.setblocking(False) + super(IOStream, self).__init__(*args, **kwargs) + + def fileno(self) -> Union[int, ioloop._Selectable]: + return self.socket + + def close_fd(self) -> None: + self.socket.close() + self.socket = None # type: ignore + + def get_fd_error(self) -> Optional[Exception]: + errno = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) + return socket.error(errno, os.strerror(errno)) + + def read_from_fd(self, buf: Union[bytearray, memoryview]) -> Optional[int]: + try: + return self.socket.recv_into(buf, len(buf)) + except socket.error as e: + if e.args[0] in _ERRNO_WOULDBLOCK: + return None + else: + raise + finally: + del buf + + def write_to_fd(self, data: memoryview) -> int: + try: + return self.socket.send(data) # type: ignore + finally: + # Avoid keeping to data, which can be a memoryview. + # See https://github.com/tornadoweb/tornado/pull/2008 + del data + + def connect( + self: _IOStreamType, address: tuple, server_hostname: str = None + ) -> "Future[_IOStreamType]": + """Connects the socket to a remote address without blocking. + + May only be called if the socket passed to the constructor was + not previously connected. The address parameter is in the + same format as for `socket.connect ` for + the type of socket passed to the IOStream constructor, + e.g. an ``(ip, port)`` tuple. Hostnames are accepted here, + but will be resolved synchronously and block the IOLoop. + If you have a hostname instead of an IP address, the `.TCPClient` + class is recommended instead of calling this method directly. + `.TCPClient` will do asynchronous DNS resolution and handle + both IPv4 and IPv6. + + If ``callback`` is specified, it will be called with no + arguments when the connection is completed; if not this method + returns a `.Future` (whose result after a successful + connection will be the stream itself). + + In SSL mode, the ``server_hostname`` parameter will be used + for certificate validation (unless disabled in the + ``ssl_options``) and SNI (if supported; requires Python + 2.7.9+). + + Note that it is safe to call `IOStream.write + ` while the connection is pending, in + which case the data will be written as soon as the connection + is ready. Calling `IOStream` read methods before the socket is + connected works on some platforms but is non-portable. + + .. versionchanged:: 4.0 + If no callback is given, returns a `.Future`. + + .. versionchanged:: 4.2 + SSL certificates are validated by default; pass + ``ssl_options=dict(cert_reqs=ssl.CERT_NONE)`` or a + suitably-configured `ssl.SSLContext` to the + `SSLIOStream` constructor to disable. + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned + `.Future` instead. + + """ + self._connecting = True + future = Future() # type: Future[_IOStreamType] + self._connect_future = typing.cast("Future[IOStream]", future) + try: + self.socket.connect(address) + except socket.error as e: + # In non-blocking mode we expect connect() to raise an + # exception with EINPROGRESS or EWOULDBLOCK. + # + # On freebsd, other errors such as ECONNREFUSED may be + # returned immediately when attempting to connect to + # localhost, so handle them the same way as an error + # reported later in _handle_connect. + if ( + errno_from_exception(e) not in _ERRNO_INPROGRESS + and errno_from_exception(e) not in _ERRNO_WOULDBLOCK + ): + if future is None: + gen_log.warning( + "Connect error on fd %s: %s", self.socket.fileno(), e + ) + self.close(exc_info=e) + return future + self._add_io_state(self.io_loop.WRITE) + return future + + def start_tls( + self, + server_side: bool, + ssl_options: Union[Dict[str, Any], ssl.SSLContext] = None, + server_hostname: str = None, + ) -> Awaitable["SSLIOStream"]: + """Convert this `IOStream` to an `SSLIOStream`. + + This enables protocols that begin in clear-text mode and + switch to SSL after some initial negotiation (such as the + ``STARTTLS`` extension to SMTP and IMAP). + + This method cannot be used if there are outstanding reads + or writes on the stream, or if there is any data in the + IOStream's buffer (data in the operating system's socket + buffer is allowed). This means it must generally be used + immediately after reading or writing the last clear-text + data. It can also be used immediately after connecting, + before any reads or writes. + + The ``ssl_options`` argument may be either an `ssl.SSLContext` + object or a dictionary of keyword arguments for the + `ssl.wrap_socket` function. The ``server_hostname`` argument + will be used for certificate validation unless disabled + in the ``ssl_options``. + + This method returns a `.Future` whose result is the new + `SSLIOStream`. After this method has been called, + any other operation on the original stream is undefined. + + If a close callback is defined on this stream, it will be + transferred to the new stream. + + .. versionadded:: 4.0 + + .. versionchanged:: 4.2 + SSL certificates are validated by default; pass + ``ssl_options=dict(cert_reqs=ssl.CERT_NONE)`` or a + suitably-configured `ssl.SSLContext` to disable. + """ + if ( + self._read_future + or self._write_futures + or self._connect_future + or self._closed + or self._read_buffer + or self._write_buffer + ): + raise ValueError("IOStream is not idle; cannot convert to SSL") + if ssl_options is None: + if server_side: + ssl_options = _server_ssl_defaults + else: + ssl_options = _client_ssl_defaults + + socket = self.socket + self.io_loop.remove_handler(socket) + self.socket = None # type: ignore + socket = ssl_wrap_socket( + socket, + ssl_options, + server_hostname=server_hostname, + server_side=server_side, + do_handshake_on_connect=False, + ) + orig_close_callback = self._close_callback + self._close_callback = None + + future = Future() # type: Future[SSLIOStream] + ssl_stream = SSLIOStream(socket, ssl_options=ssl_options) + ssl_stream.set_close_callback(orig_close_callback) + ssl_stream._ssl_connect_future = future + ssl_stream.max_buffer_size = self.max_buffer_size + ssl_stream.read_chunk_size = self.read_chunk_size + return future + + def _handle_connect(self) -> None: + try: + err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) + except socket.error as e: + # Hurd doesn't allow SO_ERROR for loopback sockets because all + # errors for such sockets are reported synchronously. + if errno_from_exception(e) == errno.ENOPROTOOPT: + err = 0 + if err != 0: + self.error = socket.error(err, os.strerror(err)) + # IOLoop implementations may vary: some of them return + # an error state before the socket becomes writable, so + # in that case a connection failure would be handled by the + # error path in _handle_events instead of here. + if self._connect_future is None: + gen_log.warning( + "Connect error on fd %s: %s", + self.socket.fileno(), + errno.errorcode[err], + ) + self.close() + return + if self._connect_future is not None: + future = self._connect_future + self._connect_future = None + future_set_result_unless_cancelled(future, self) + self._connecting = False + + def set_nodelay(self, value: bool) -> None: + if self.socket is not None and self.socket.family in ( + socket.AF_INET, + socket.AF_INET6, + ): + try: + self.socket.setsockopt( + socket.IPPROTO_TCP, socket.TCP_NODELAY, 1 if value else 0 + ) + except socket.error as e: + # Sometimes setsockopt will fail if the socket is closed + # at the wrong time. This can happen with HTTPServer + # resetting the value to ``False`` between requests. + if e.errno != errno.EINVAL and not self._is_connreset(e): + raise + + +class SSLIOStream(IOStream): + """A utility class to write to and read from a non-blocking SSL socket. + + If the socket passed to the constructor is already connected, + it should be wrapped with:: + + ssl.wrap_socket(sock, do_handshake_on_connect=False, **kwargs) + + before constructing the `SSLIOStream`. Unconnected sockets will be + wrapped when `IOStream.connect` is finished. + """ + + socket = None # type: ssl.SSLSocket + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """The ``ssl_options`` keyword argument may either be an + `ssl.SSLContext` object or a dictionary of keywords arguments + for `ssl.wrap_socket` + """ + self._ssl_options = kwargs.pop("ssl_options", _client_ssl_defaults) + super(SSLIOStream, self).__init__(*args, **kwargs) + self._ssl_accepting = True + self._handshake_reading = False + self._handshake_writing = False + self._server_hostname = None # type: Optional[str] + + # If the socket is already connected, attempt to start the handshake. + try: + self.socket.getpeername() + except socket.error: + pass + else: + # Indirectly start the handshake, which will run on the next + # IOLoop iteration and then the real IO state will be set in + # _handle_events. + self._add_io_state(self.io_loop.WRITE) + + def reading(self) -> bool: + return self._handshake_reading or super(SSLIOStream, self).reading() + + def writing(self) -> bool: + return self._handshake_writing or super(SSLIOStream, self).writing() + + def _do_ssl_handshake(self) -> None: + # Based on code from test_ssl.py in the python stdlib + try: + self._handshake_reading = False + self._handshake_writing = False + self.socket.do_handshake() + except ssl.SSLError as err: + if err.args[0] == ssl.SSL_ERROR_WANT_READ: + self._handshake_reading = True + return + elif err.args[0] == ssl.SSL_ERROR_WANT_WRITE: + self._handshake_writing = True + return + elif err.args[0] in (ssl.SSL_ERROR_EOF, ssl.SSL_ERROR_ZERO_RETURN): + return self.close(exc_info=err) + elif err.args[0] == ssl.SSL_ERROR_SSL: + try: + peer = self.socket.getpeername() + except Exception: + peer = "(not connected)" + gen_log.warning( + "SSL Error on %s %s: %s", self.socket.fileno(), peer, err + ) + return self.close(exc_info=err) + raise + except socket.error as err: + # Some port scans (e.g. nmap in -sT mode) have been known + # to cause do_handshake to raise EBADF and ENOTCONN, so make + # those errors quiet as well. + # https://groups.google.com/forum/?fromgroups#!topic/python-tornado/ApucKJat1_0 + # Errno 0 is also possible in some cases (nc -z). + # https://github.com/tornadoweb/tornado/issues/2504 + if self._is_connreset(err) or err.args[0] in ( + 0, + errno.EBADF, + errno.ENOTCONN, + ): + return self.close(exc_info=err) + raise + except AttributeError as err: + # On Linux, if the connection was reset before the call to + # wrap_socket, do_handshake will fail with an + # AttributeError. + return self.close(exc_info=err) + else: + self._ssl_accepting = False + if not self._verify_cert(self.socket.getpeercert()): + self.close() + return + self._finish_ssl_connect() + + def _finish_ssl_connect(self) -> None: + if self._ssl_connect_future is not None: + future = self._ssl_connect_future + self._ssl_connect_future = None + future_set_result_unless_cancelled(future, self) + + def _verify_cert(self, peercert: Any) -> bool: + """Returns ``True`` if peercert is valid according to the configured + validation mode and hostname. + + The ssl handshake already tested the certificate for a valid + CA signature; the only thing that remains is to check + the hostname. + """ + if isinstance(self._ssl_options, dict): + verify_mode = self._ssl_options.get("cert_reqs", ssl.CERT_NONE) + elif isinstance(self._ssl_options, ssl.SSLContext): + verify_mode = self._ssl_options.verify_mode + assert verify_mode in (ssl.CERT_NONE, ssl.CERT_REQUIRED, ssl.CERT_OPTIONAL) + if verify_mode == ssl.CERT_NONE or self._server_hostname is None: + return True + cert = self.socket.getpeercert() + if cert is None and verify_mode == ssl.CERT_REQUIRED: + gen_log.warning("No SSL certificate given") + return False + try: + ssl.match_hostname(peercert, self._server_hostname) + except ssl.CertificateError as e: + gen_log.warning("Invalid SSL certificate: %s" % e) + return False + else: + return True + + def _handle_read(self) -> None: + if self._ssl_accepting: + self._do_ssl_handshake() + return + super(SSLIOStream, self)._handle_read() + + def _handle_write(self) -> None: + if self._ssl_accepting: + self._do_ssl_handshake() + return + super(SSLIOStream, self)._handle_write() + + def connect( + self, address: Tuple, server_hostname: str = None + ) -> "Future[SSLIOStream]": + self._server_hostname = server_hostname + # Ignore the result of connect(). If it fails, + # wait_for_handshake will raise an error too. This is + # necessary for the old semantics of the connect callback + # (which takes no arguments). In 6.0 this can be refactored to + # be a regular coroutine. + # TODO: This is trickier than it looks, since if write() + # is called with a connect() pending, we want the connect + # to resolve before the write. Or do we care about this? + # (There's a test for it, but I think in practice users + # either wait for the connect before performing a write or + # they don't care about the connect Future at all) + fut = super(SSLIOStream, self).connect(address) + fut.add_done_callback(lambda f: f.exception()) + return self.wait_for_handshake() + + def _handle_connect(self) -> None: + # Call the superclass method to check for errors. + super(SSLIOStream, self)._handle_connect() + if self.closed(): + return + # When the connection is complete, wrap the socket for SSL + # traffic. Note that we do this by overriding _handle_connect + # instead of by passing a callback to super().connect because + # user callbacks are enqueued asynchronously on the IOLoop, + # but since _handle_events calls _handle_connect immediately + # followed by _handle_write we need this to be synchronous. + # + # The IOLoop will get confused if we swap out self.socket while the + # fd is registered, so remove it now and re-register after + # wrap_socket(). + self.io_loop.remove_handler(self.socket) + old_state = self._state + assert old_state is not None + self._state = None + self.socket = ssl_wrap_socket( + self.socket, + self._ssl_options, + server_hostname=self._server_hostname, + do_handshake_on_connect=False, + ) + self._add_io_state(old_state) + + def wait_for_handshake(self) -> "Future[SSLIOStream]": + """Wait for the initial SSL handshake to complete. + + If a ``callback`` is given, it will be called with no + arguments once the handshake is complete; otherwise this + method returns a `.Future` which will resolve to the + stream itself after the handshake is complete. + + Once the handshake is complete, information such as + the peer's certificate and NPN/ALPN selections may be + accessed on ``self.socket``. + + This method is intended for use on server-side streams + or after using `IOStream.start_tls`; it should not be used + with `IOStream.connect` (which already waits for the + handshake to complete). It may only be called once per stream. + + .. versionadded:: 4.2 + + .. versionchanged:: 6.0 + + The ``callback`` argument was removed. Use the returned + `.Future` instead. + + """ + if self._ssl_connect_future is not None: + raise RuntimeError("Already waiting") + future = self._ssl_connect_future = Future() + if not self._ssl_accepting: + self._finish_ssl_connect() + return future + + def write_to_fd(self, data: memoryview) -> int: + try: + return self.socket.send(data) # type: ignore + except ssl.SSLError as e: + if e.args[0] == ssl.SSL_ERROR_WANT_WRITE: + # In Python 3.5+, SSLSocket.send raises a WANT_WRITE error if + # the socket is not writeable; we need to transform this into + # an EWOULDBLOCK socket.error or a zero return value, + # either of which will be recognized by the caller of this + # method. Prior to Python 3.5, an unwriteable socket would + # simply return 0 bytes written. + return 0 + raise + finally: + # Avoid keeping to data, which can be a memoryview. + # See https://github.com/tornadoweb/tornado/pull/2008 + del data + + def read_from_fd(self, buf: Union[bytearray, memoryview]) -> Optional[int]: + try: + if self._ssl_accepting: + # If the handshake hasn't finished yet, there can't be anything + # to read (attempting to read may or may not raise an exception + # depending on the SSL version) + return None + try: + return self.socket.recv_into(buf, len(buf)) + except ssl.SSLError as e: + # SSLError is a subclass of socket.error, so this except + # block must come first. + if e.args[0] == ssl.SSL_ERROR_WANT_READ: + return None + else: + raise + except socket.error as e: + if e.args[0] in _ERRNO_WOULDBLOCK: + return None + else: + raise + finally: + del buf + + def _is_connreset(self, e: BaseException) -> bool: + if isinstance(e, ssl.SSLError) and e.args[0] == ssl.SSL_ERROR_EOF: + return True + return super(SSLIOStream, self)._is_connreset(e) + + +class PipeIOStream(BaseIOStream): + """Pipe-based `IOStream` implementation. + + The constructor takes an integer file descriptor (such as one returned + by `os.pipe`) rather than an open file object. Pipes are generally + one-way, so a `PipeIOStream` can be used for reading or writing but not + both. + """ + + def __init__(self, fd: int, *args: Any, **kwargs: Any) -> None: + self.fd = fd + self._fio = io.FileIO(self.fd, "r+") + _set_nonblocking(fd) + super(PipeIOStream, self).__init__(*args, **kwargs) + + def fileno(self) -> int: + return self.fd + + def close_fd(self) -> None: + self._fio.close() + + def write_to_fd(self, data: memoryview) -> int: + try: + return os.write(self.fd, data) # type: ignore + finally: + # Avoid keeping to data, which can be a memoryview. + # See https://github.com/tornadoweb/tornado/pull/2008 + del data + + def read_from_fd(self, buf: Union[bytearray, memoryview]) -> Optional[int]: + try: + return self._fio.readinto(buf) # type: ignore + except (IOError, OSError) as e: + if errno_from_exception(e) == errno.EBADF: + # If the writing half of a pipe is closed, select will + # report it as readable but reads will fail with EBADF. + self.close(exc_info=e) + return None + else: + raise + finally: + del buf + + +def doctests() -> Any: + import doctest + + return doctest.DocTestSuite() diff --git a/server/www/packages/packages-linux/x64/tornado/locale.py b/server/www/packages/packages-linux/x64/tornado/locale.py index d45172f..2a1b738 100644 --- a/server/www/packages/packages-linux/x64/tornado/locale.py +++ b/server/www/packages/packages-linux/x64/tornado/locale.py @@ -1,521 +1,564 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2009 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Translation methods for generating localized strings. - -To load a locale and generate a translated string:: - - user_locale = tornado.locale.get("es_LA") - print(user_locale.translate("Sign out")) - -`tornado.locale.get()` returns the closest matching locale, not necessarily the -specific locale you requested. You can support pluralization with -additional arguments to `~Locale.translate()`, e.g.:: - - people = [...] - message = user_locale.translate( - "%(list)s is online", "%(list)s are online", len(people)) - print(message % {"list": user_locale.list(people)}) - -The first string is chosen if ``len(people) == 1``, otherwise the second -string is chosen. - -Applications should call one of `load_translations` (which uses a simple -CSV format) or `load_gettext_translations` (which uses the ``.mo`` format -supported by `gettext` and related tools). If neither method is called, -the `Locale.translate` method will simply return the original string. -""" - -from __future__ import absolute_import, division, print_function - -import codecs -import csv -import datetime -from io import BytesIO -import numbers -import os -import re - -from tornado import escape -from tornado.log import gen_log -from tornado.util import PY3 - -from tornado._locale_data import LOCALE_NAMES - -_default_locale = "en_US" -_translations = {} # type: dict -_supported_locales = frozenset([_default_locale]) -_use_gettext = False -CONTEXT_SEPARATOR = "\x04" - - -def get(*locale_codes): - """Returns the closest match for the given locale codes. - - We iterate over all given locale codes in order. If we have a tight - or a loose match for the code (e.g., "en" for "en_US"), we return - the locale. Otherwise we move to the next code in the list. - - By default we return ``en_US`` if no translations are found for any of - the specified locales. You can change the default locale with - `set_default_locale()`. - """ - return Locale.get_closest(*locale_codes) - - -def set_default_locale(code): - """Sets the default locale. - - The default locale is assumed to be the language used for all strings - in the system. The translations loaded from disk are mappings from - the default locale to the destination locale. Consequently, you don't - need to create a translation file for the default locale. - """ - global _default_locale - global _supported_locales - _default_locale = code - _supported_locales = frozenset(list(_translations.keys()) + [_default_locale]) - - -def load_translations(directory, encoding=None): - """Loads translations from CSV files in a directory. - - Translations are strings with optional Python-style named placeholders - (e.g., ``My name is %(name)s``) and their associated translations. - - The directory should have translation files of the form ``LOCALE.csv``, - e.g. ``es_GT.csv``. The CSV files should have two or three columns: string, - translation, and an optional plural indicator. Plural indicators should - be one of "plural" or "singular". A given string can have both singular - and plural forms. For example ``%(name)s liked this`` may have a - different verb conjugation depending on whether %(name)s is one - name or a list of names. There should be two rows in the CSV file for - that string, one with plural indicator "singular", and one "plural". - For strings with no verbs that would change on translation, simply - use "unknown" or the empty string (or don't include the column at all). - - The file is read using the `csv` module in the default "excel" dialect. - In this format there should not be spaces after the commas. - - If no ``encoding`` parameter is given, the encoding will be - detected automatically (among UTF-8 and UTF-16) if the file - contains a byte-order marker (BOM), defaulting to UTF-8 if no BOM - is present. - - Example translation ``es_LA.csv``:: - - "I love you","Te amo" - "%(name)s liked this","A %(name)s les gustó esto","plural" - "%(name)s liked this","A %(name)s le gustó esto","singular" - - .. versionchanged:: 4.3 - Added ``encoding`` parameter. Added support for BOM-based encoding - detection, UTF-16, and UTF-8-with-BOM. - """ - global _translations - global _supported_locales - _translations = {} - for path in os.listdir(directory): - if not path.endswith(".csv"): - continue - locale, extension = path.split(".") - if not re.match("[a-z]+(_[A-Z]+)?$", locale): - gen_log.error("Unrecognized locale %r (path: %s)", locale, - os.path.join(directory, path)) - continue - full_path = os.path.join(directory, path) - if encoding is None: - # Try to autodetect encoding based on the BOM. - with open(full_path, 'rb') as f: - data = f.read(len(codecs.BOM_UTF16_LE)) - if data in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): - encoding = 'utf-16' - else: - # utf-8-sig is "utf-8 with optional BOM". It's discouraged - # in most cases but is common with CSV files because Excel - # cannot read utf-8 files without a BOM. - encoding = 'utf-8-sig' - if PY3: - # python 3: csv.reader requires a file open in text mode. - # Force utf8 to avoid dependence on $LANG environment variable. - f = open(full_path, "r", encoding=encoding) - else: - # python 2: csv can only handle byte strings (in ascii-compatible - # encodings), which we decode below. Transcode everything into - # utf8 before passing it to csv.reader. - f = BytesIO() - with codecs.open(full_path, "r", encoding=encoding) as infile: - f.write(escape.utf8(infile.read())) - f.seek(0) - _translations[locale] = {} - for i, row in enumerate(csv.reader(f)): - if not row or len(row) < 2: - continue - row = [escape.to_unicode(c).strip() for c in row] - english, translation = row[:2] - if len(row) > 2: - plural = row[2] or "unknown" - else: - plural = "unknown" - if plural not in ("plural", "singular", "unknown"): - gen_log.error("Unrecognized plural indicator %r in %s line %d", - plural, path, i + 1) - continue - _translations[locale].setdefault(plural, {})[english] = translation - f.close() - _supported_locales = frozenset(list(_translations.keys()) + [_default_locale]) - gen_log.debug("Supported locales: %s", sorted(_supported_locales)) - - -def load_gettext_translations(directory, domain): - """Loads translations from `gettext`'s locale tree - - Locale tree is similar to system's ``/usr/share/locale``, like:: - - {directory}/{lang}/LC_MESSAGES/{domain}.mo - - Three steps are required to have your app translated: - - 1. Generate POT translation file:: - - xgettext --language=Python --keyword=_:1,2 -d mydomain file1.py file2.html etc - - 2. Merge against existing POT file:: - - msgmerge old.po mydomain.po > new.po - - 3. Compile:: - - msgfmt mydomain.po -o {directory}/pt_BR/LC_MESSAGES/mydomain.mo - """ - import gettext - global _translations - global _supported_locales - global _use_gettext - _translations = {} - for lang in os.listdir(directory): - if lang.startswith('.'): - continue # skip .svn, etc - if os.path.isfile(os.path.join(directory, lang)): - continue - try: - os.stat(os.path.join(directory, lang, "LC_MESSAGES", domain + ".mo")) - _translations[lang] = gettext.translation(domain, directory, - languages=[lang]) - except Exception as e: - gen_log.error("Cannot load translation for '%s': %s", lang, str(e)) - continue - _supported_locales = frozenset(list(_translations.keys()) + [_default_locale]) - _use_gettext = True - gen_log.debug("Supported locales: %s", sorted(_supported_locales)) - - -def get_supported_locales(): - """Returns a list of all the supported locale codes.""" - return _supported_locales - - -class Locale(object): - """Object representing a locale. - - After calling one of `load_translations` or `load_gettext_translations`, - call `get` or `get_closest` to get a Locale object. - """ - @classmethod - def get_closest(cls, *locale_codes): - """Returns the closest match for the given locale code.""" - for code in locale_codes: - if not code: - continue - code = code.replace("-", "_") - parts = code.split("_") - if len(parts) > 2: - continue - elif len(parts) == 2: - code = parts[0].lower() + "_" + parts[1].upper() - if code in _supported_locales: - return cls.get(code) - if parts[0].lower() in _supported_locales: - return cls.get(parts[0].lower()) - return cls.get(_default_locale) - - @classmethod - def get(cls, code): - """Returns the Locale for the given locale code. - - If it is not supported, we raise an exception. - """ - if not hasattr(cls, "_cache"): - cls._cache = {} - if code not in cls._cache: - assert code in _supported_locales - translations = _translations.get(code, None) - if translations is None: - locale = CSVLocale(code, {}) - elif _use_gettext: - locale = GettextLocale(code, translations) - else: - locale = CSVLocale(code, translations) - cls._cache[code] = locale - return cls._cache[code] - - def __init__(self, code, translations): - self.code = code - self.name = LOCALE_NAMES.get(code, {}).get("name", u"Unknown") - self.rtl = False - for prefix in ["fa", "ar", "he"]: - if self.code.startswith(prefix): - self.rtl = True - break - self.translations = translations - - # Initialize strings for date formatting - _ = self.translate - self._months = [ - _("January"), _("February"), _("March"), _("April"), - _("May"), _("June"), _("July"), _("August"), - _("September"), _("October"), _("November"), _("December")] - self._weekdays = [ - _("Monday"), _("Tuesday"), _("Wednesday"), _("Thursday"), - _("Friday"), _("Saturday"), _("Sunday")] - - def translate(self, message, plural_message=None, count=None): - """Returns the translation for the given message for this locale. - - If ``plural_message`` is given, you must also provide - ``count``. We return ``plural_message`` when ``count != 1``, - and we return the singular form for the given message when - ``count == 1``. - """ - raise NotImplementedError() - - def pgettext(self, context, message, plural_message=None, count=None): - raise NotImplementedError() - - def format_date(self, date, gmt_offset=0, relative=True, shorter=False, - full_format=False): - """Formats the given date (which should be GMT). - - By default, we return a relative time (e.g., "2 minutes ago"). You - can return an absolute date string with ``relative=False``. - - You can force a full format date ("July 10, 1980") with - ``full_format=True``. - - This method is primarily intended for dates in the past. - For dates in the future, we fall back to full format. - """ - if isinstance(date, numbers.Real): - date = datetime.datetime.utcfromtimestamp(date) - now = datetime.datetime.utcnow() - if date > now: - if relative and (date - now).seconds < 60: - # Due to click skew, things are some things slightly - # in the future. Round timestamps in the immediate - # future down to now in relative mode. - date = now - else: - # Otherwise, future dates always use the full format. - full_format = True - local_date = date - datetime.timedelta(minutes=gmt_offset) - local_now = now - datetime.timedelta(minutes=gmt_offset) - local_yesterday = local_now - datetime.timedelta(hours=24) - difference = now - date - seconds = difference.seconds - days = difference.days - - _ = self.translate - format = None - if not full_format: - if relative and days == 0: - if seconds < 50: - return _("1 second ago", "%(seconds)d seconds ago", - seconds) % {"seconds": seconds} - - if seconds < 50 * 60: - minutes = round(seconds / 60.0) - return _("1 minute ago", "%(minutes)d minutes ago", - minutes) % {"minutes": minutes} - - hours = round(seconds / (60.0 * 60)) - return _("1 hour ago", "%(hours)d hours ago", - hours) % {"hours": hours} - - if days == 0: - format = _("%(time)s") - elif days == 1 and local_date.day == local_yesterday.day and \ - relative: - format = _("yesterday") if shorter else \ - _("yesterday at %(time)s") - elif days < 5: - format = _("%(weekday)s") if shorter else \ - _("%(weekday)s at %(time)s") - elif days < 334: # 11mo, since confusing for same month last year - format = _("%(month_name)s %(day)s") if shorter else \ - _("%(month_name)s %(day)s at %(time)s") - - if format is None: - format = _("%(month_name)s %(day)s, %(year)s") if shorter else \ - _("%(month_name)s %(day)s, %(year)s at %(time)s") - - tfhour_clock = self.code not in ("en", "en_US", "zh_CN") - if tfhour_clock: - str_time = "%d:%02d" % (local_date.hour, local_date.minute) - elif self.code == "zh_CN": - str_time = "%s%d:%02d" % ( - (u'\u4e0a\u5348', u'\u4e0b\u5348')[local_date.hour >= 12], - local_date.hour % 12 or 12, local_date.minute) - else: - str_time = "%d:%02d %s" % ( - local_date.hour % 12 or 12, local_date.minute, - ("am", "pm")[local_date.hour >= 12]) - - return format % { - "month_name": self._months[local_date.month - 1], - "weekday": self._weekdays[local_date.weekday()], - "day": str(local_date.day), - "year": str(local_date.year), - "time": str_time - } - - def format_day(self, date, gmt_offset=0, dow=True): - """Formats the given date as a day of week. - - Example: "Monday, January 22". You can remove the day of week with - ``dow=False``. - """ - local_date = date - datetime.timedelta(minutes=gmt_offset) - _ = self.translate - if dow: - return _("%(weekday)s, %(month_name)s %(day)s") % { - "month_name": self._months[local_date.month - 1], - "weekday": self._weekdays[local_date.weekday()], - "day": str(local_date.day), - } - else: - return _("%(month_name)s %(day)s") % { - "month_name": self._months[local_date.month - 1], - "day": str(local_date.day), - } - - def list(self, parts): - """Returns a comma-separated list for the given list of parts. - - The format is, e.g., "A, B and C", "A and B" or just "A" for lists - of size 1. - """ - _ = self.translate - if len(parts) == 0: - return "" - if len(parts) == 1: - return parts[0] - comma = u' \u0648 ' if self.code.startswith("fa") else u", " - return _("%(commas)s and %(last)s") % { - "commas": comma.join(parts[:-1]), - "last": parts[len(parts) - 1], - } - - def friendly_number(self, value): - """Returns a comma-separated number for the given integer.""" - if self.code not in ("en", "en_US"): - return str(value) - value = str(value) - parts = [] - while value: - parts.append(value[-3:]) - value = value[:-3] - return ",".join(reversed(parts)) - - -class CSVLocale(Locale): - """Locale implementation using tornado's CSV translation format.""" - def translate(self, message, plural_message=None, count=None): - if plural_message is not None: - assert count is not None - if count != 1: - message = plural_message - message_dict = self.translations.get("plural", {}) - else: - message_dict = self.translations.get("singular", {}) - else: - message_dict = self.translations.get("unknown", {}) - return message_dict.get(message, message) - - def pgettext(self, context, message, plural_message=None, count=None): - if self.translations: - gen_log.warning('pgettext is not supported by CSVLocale') - return self.translate(message, plural_message, count) - - -class GettextLocale(Locale): - """Locale implementation using the `gettext` module.""" - def __init__(self, code, translations): - try: - # python 2 - self.ngettext = translations.ungettext - self.gettext = translations.ugettext - except AttributeError: - # python 3 - self.ngettext = translations.ngettext - self.gettext = translations.gettext - # self.gettext must exist before __init__ is called, since it - # calls into self.translate - super(GettextLocale, self).__init__(code, translations) - - def translate(self, message, plural_message=None, count=None): - if plural_message is not None: - assert count is not None - return self.ngettext(message, plural_message, count) - else: - return self.gettext(message) - - def pgettext(self, context, message, plural_message=None, count=None): - """Allows to set context for translation, accepts plural forms. - - Usage example:: - - pgettext("law", "right") - pgettext("good", "right") - - Plural message example:: - - pgettext("organization", "club", "clubs", len(clubs)) - pgettext("stick", "club", "clubs", len(clubs)) - - To generate POT file with context, add following options to step 1 - of `load_gettext_translations` sequence:: - - xgettext [basic options] --keyword=pgettext:1c,2 --keyword=pgettext:1c,2,3 - - .. versionadded:: 4.2 - """ - if plural_message is not None: - assert count is not None - msgs_with_ctxt = ("%s%s%s" % (context, CONTEXT_SEPARATOR, message), - "%s%s%s" % (context, CONTEXT_SEPARATOR, plural_message), - count) - result = self.ngettext(*msgs_with_ctxt) - if CONTEXT_SEPARATOR in result: - # Translation not found - result = self.ngettext(message, plural_message, count) - return result - else: - msg_with_ctxt = "%s%s%s" % (context, CONTEXT_SEPARATOR, message) - result = self.gettext(msg_with_ctxt) - if CONTEXT_SEPARATOR in result: - # Translation not found - result = message - return result +# -*- coding: utf-8 -*- +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Translation methods for generating localized strings. + +To load a locale and generate a translated string:: + + user_locale = tornado.locale.get("es_LA") + print(user_locale.translate("Sign out")) + +`tornado.locale.get()` returns the closest matching locale, not necessarily the +specific locale you requested. You can support pluralization with +additional arguments to `~Locale.translate()`, e.g.:: + + people = [...] + message = user_locale.translate( + "%(list)s is online", "%(list)s are online", len(people)) + print(message % {"list": user_locale.list(people)}) + +The first string is chosen if ``len(people) == 1``, otherwise the second +string is chosen. + +Applications should call one of `load_translations` (which uses a simple +CSV format) or `load_gettext_translations` (which uses the ``.mo`` format +supported by `gettext` and related tools). If neither method is called, +the `Locale.translate` method will simply return the original string. +""" + +import codecs +import csv +import datetime +import gettext +import os +import re + +from tornado import escape +from tornado.log import gen_log + +from tornado._locale_data import LOCALE_NAMES + +from typing import Iterable, Any, Union, Dict + +_default_locale = "en_US" +_translations = {} # type: Dict[str, Any] +_supported_locales = frozenset([_default_locale]) +_use_gettext = False +CONTEXT_SEPARATOR = "\x04" + + +def get(*locale_codes: str) -> "Locale": + """Returns the closest match for the given locale codes. + + We iterate over all given locale codes in order. If we have a tight + or a loose match for the code (e.g., "en" for "en_US"), we return + the locale. Otherwise we move to the next code in the list. + + By default we return ``en_US`` if no translations are found for any of + the specified locales. You can change the default locale with + `set_default_locale()`. + """ + return Locale.get_closest(*locale_codes) + + +def set_default_locale(code: str) -> None: + """Sets the default locale. + + The default locale is assumed to be the language used for all strings + in the system. The translations loaded from disk are mappings from + the default locale to the destination locale. Consequently, you don't + need to create a translation file for the default locale. + """ + global _default_locale + global _supported_locales + _default_locale = code + _supported_locales = frozenset(list(_translations.keys()) + [_default_locale]) + + +def load_translations(directory: str, encoding: str = None) -> None: + """Loads translations from CSV files in a directory. + + Translations are strings with optional Python-style named placeholders + (e.g., ``My name is %(name)s``) and their associated translations. + + The directory should have translation files of the form ``LOCALE.csv``, + e.g. ``es_GT.csv``. The CSV files should have two or three columns: string, + translation, and an optional plural indicator. Plural indicators should + be one of "plural" or "singular". A given string can have both singular + and plural forms. For example ``%(name)s liked this`` may have a + different verb conjugation depending on whether %(name)s is one + name or a list of names. There should be two rows in the CSV file for + that string, one with plural indicator "singular", and one "plural". + For strings with no verbs that would change on translation, simply + use "unknown" or the empty string (or don't include the column at all). + + The file is read using the `csv` module in the default "excel" dialect. + In this format there should not be spaces after the commas. + + If no ``encoding`` parameter is given, the encoding will be + detected automatically (among UTF-8 and UTF-16) if the file + contains a byte-order marker (BOM), defaulting to UTF-8 if no BOM + is present. + + Example translation ``es_LA.csv``:: + + "I love you","Te amo" + "%(name)s liked this","A %(name)s les gustó esto","plural" + "%(name)s liked this","A %(name)s le gustó esto","singular" + + .. versionchanged:: 4.3 + Added ``encoding`` parameter. Added support for BOM-based encoding + detection, UTF-16, and UTF-8-with-BOM. + """ + global _translations + global _supported_locales + _translations = {} + for path in os.listdir(directory): + if not path.endswith(".csv"): + continue + locale, extension = path.split(".") + if not re.match("[a-z]+(_[A-Z]+)?$", locale): + gen_log.error( + "Unrecognized locale %r (path: %s)", + locale, + os.path.join(directory, path), + ) + continue + full_path = os.path.join(directory, path) + if encoding is None: + # Try to autodetect encoding based on the BOM. + with open(full_path, "rb") as bf: + data = bf.read(len(codecs.BOM_UTF16_LE)) + if data in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + encoding = "utf-16" + else: + # utf-8-sig is "utf-8 with optional BOM". It's discouraged + # in most cases but is common with CSV files because Excel + # cannot read utf-8 files without a BOM. + encoding = "utf-8-sig" + # python 3: csv.reader requires a file open in text mode. + # Specify an encoding to avoid dependence on $LANG environment variable. + with open(full_path, encoding=encoding) as f: + _translations[locale] = {} + for i, row in enumerate(csv.reader(f)): + if not row or len(row) < 2: + continue + row = [escape.to_unicode(c).strip() for c in row] + english, translation = row[:2] + if len(row) > 2: + plural = row[2] or "unknown" + else: + plural = "unknown" + if plural not in ("plural", "singular", "unknown"): + gen_log.error( + "Unrecognized plural indicator %r in %s line %d", + plural, + path, + i + 1, + ) + continue + _translations[locale].setdefault(plural, {})[english] = translation + _supported_locales = frozenset(list(_translations.keys()) + [_default_locale]) + gen_log.debug("Supported locales: %s", sorted(_supported_locales)) + + +def load_gettext_translations(directory: str, domain: str) -> None: + """Loads translations from `gettext`'s locale tree + + Locale tree is similar to system's ``/usr/share/locale``, like:: + + {directory}/{lang}/LC_MESSAGES/{domain}.mo + + Three steps are required to have your app translated: + + 1. Generate POT translation file:: + + xgettext --language=Python --keyword=_:1,2 -d mydomain file1.py file2.html etc + + 2. Merge against existing POT file:: + + msgmerge old.po mydomain.po > new.po + + 3. Compile:: + + msgfmt mydomain.po -o {directory}/pt_BR/LC_MESSAGES/mydomain.mo + """ + import gettext + + global _translations + global _supported_locales + global _use_gettext + _translations = {} + for lang in os.listdir(directory): + if lang.startswith("."): + continue # skip .svn, etc + if os.path.isfile(os.path.join(directory, lang)): + continue + try: + os.stat(os.path.join(directory, lang, "LC_MESSAGES", domain + ".mo")) + _translations[lang] = gettext.translation( + domain, directory, languages=[lang] + ) + except Exception as e: + gen_log.error("Cannot load translation for '%s': %s", lang, str(e)) + continue + _supported_locales = frozenset(list(_translations.keys()) + [_default_locale]) + _use_gettext = True + gen_log.debug("Supported locales: %s", sorted(_supported_locales)) + + +def get_supported_locales() -> Iterable[str]: + """Returns a list of all the supported locale codes.""" + return _supported_locales + + +class Locale(object): + """Object representing a locale. + + After calling one of `load_translations` or `load_gettext_translations`, + call `get` or `get_closest` to get a Locale object. + """ + + _cache = {} # type: Dict[str, Locale] + + @classmethod + def get_closest(cls, *locale_codes: str) -> "Locale": + """Returns the closest match for the given locale code.""" + for code in locale_codes: + if not code: + continue + code = code.replace("-", "_") + parts = code.split("_") + if len(parts) > 2: + continue + elif len(parts) == 2: + code = parts[0].lower() + "_" + parts[1].upper() + if code in _supported_locales: + return cls.get(code) + if parts[0].lower() in _supported_locales: + return cls.get(parts[0].lower()) + return cls.get(_default_locale) + + @classmethod + def get(cls, code: str) -> "Locale": + """Returns the Locale for the given locale code. + + If it is not supported, we raise an exception. + """ + if code not in cls._cache: + assert code in _supported_locales + translations = _translations.get(code, None) + if translations is None: + locale = CSVLocale(code, {}) # type: Locale + elif _use_gettext: + locale = GettextLocale(code, translations) + else: + locale = CSVLocale(code, translations) + cls._cache[code] = locale + return cls._cache[code] + + def __init__(self, code: str) -> None: + self.code = code + self.name = LOCALE_NAMES.get(code, {}).get("name", u"Unknown") + self.rtl = False + for prefix in ["fa", "ar", "he"]: + if self.code.startswith(prefix): + self.rtl = True + break + + # Initialize strings for date formatting + _ = self.translate + self._months = [ + _("January"), + _("February"), + _("March"), + _("April"), + _("May"), + _("June"), + _("July"), + _("August"), + _("September"), + _("October"), + _("November"), + _("December"), + ] + self._weekdays = [ + _("Monday"), + _("Tuesday"), + _("Wednesday"), + _("Thursday"), + _("Friday"), + _("Saturday"), + _("Sunday"), + ] + + def translate( + self, message: str, plural_message: str = None, count: int = None + ) -> str: + """Returns the translation for the given message for this locale. + + If ``plural_message`` is given, you must also provide + ``count``. We return ``plural_message`` when ``count != 1``, + and we return the singular form for the given message when + ``count == 1``. + """ + raise NotImplementedError() + + def pgettext( + self, context: str, message: str, plural_message: str = None, count: int = None + ) -> str: + raise NotImplementedError() + + def format_date( + self, + date: Union[int, float, datetime.datetime], + gmt_offset: int = 0, + relative: bool = True, + shorter: bool = False, + full_format: bool = False, + ) -> str: + """Formats the given date (which should be GMT). + + By default, we return a relative time (e.g., "2 minutes ago"). You + can return an absolute date string with ``relative=False``. + + You can force a full format date ("July 10, 1980") with + ``full_format=True``. + + This method is primarily intended for dates in the past. + For dates in the future, we fall back to full format. + """ + if isinstance(date, (int, float)): + date = datetime.datetime.utcfromtimestamp(date) + now = datetime.datetime.utcnow() + if date > now: + if relative and (date - now).seconds < 60: + # Due to click skew, things are some things slightly + # in the future. Round timestamps in the immediate + # future down to now in relative mode. + date = now + else: + # Otherwise, future dates always use the full format. + full_format = True + local_date = date - datetime.timedelta(minutes=gmt_offset) + local_now = now - datetime.timedelta(minutes=gmt_offset) + local_yesterday = local_now - datetime.timedelta(hours=24) + difference = now - date + seconds = difference.seconds + days = difference.days + + _ = self.translate + format = None + if not full_format: + if relative and days == 0: + if seconds < 50: + return _("1 second ago", "%(seconds)d seconds ago", seconds) % { + "seconds": seconds + } + + if seconds < 50 * 60: + minutes = round(seconds / 60.0) + return _("1 minute ago", "%(minutes)d minutes ago", minutes) % { + "minutes": minutes + } + + hours = round(seconds / (60.0 * 60)) + return _("1 hour ago", "%(hours)d hours ago", hours) % {"hours": hours} + + if days == 0: + format = _("%(time)s") + elif days == 1 and local_date.day == local_yesterday.day and relative: + format = _("yesterday") if shorter else _("yesterday at %(time)s") + elif days < 5: + format = _("%(weekday)s") if shorter else _("%(weekday)s at %(time)s") + elif days < 334: # 11mo, since confusing for same month last year + format = ( + _("%(month_name)s %(day)s") + if shorter + else _("%(month_name)s %(day)s at %(time)s") + ) + + if format is None: + format = ( + _("%(month_name)s %(day)s, %(year)s") + if shorter + else _("%(month_name)s %(day)s, %(year)s at %(time)s") + ) + + tfhour_clock = self.code not in ("en", "en_US", "zh_CN") + if tfhour_clock: + str_time = "%d:%02d" % (local_date.hour, local_date.minute) + elif self.code == "zh_CN": + str_time = "%s%d:%02d" % ( + (u"\u4e0a\u5348", u"\u4e0b\u5348")[local_date.hour >= 12], + local_date.hour % 12 or 12, + local_date.minute, + ) + else: + str_time = "%d:%02d %s" % ( + local_date.hour % 12 or 12, + local_date.minute, + ("am", "pm")[local_date.hour >= 12], + ) + + return format % { + "month_name": self._months[local_date.month - 1], + "weekday": self._weekdays[local_date.weekday()], + "day": str(local_date.day), + "year": str(local_date.year), + "time": str_time, + } + + def format_day( + self, date: datetime.datetime, gmt_offset: int = 0, dow: bool = True + ) -> bool: + """Formats the given date as a day of week. + + Example: "Monday, January 22". You can remove the day of week with + ``dow=False``. + """ + local_date = date - datetime.timedelta(minutes=gmt_offset) + _ = self.translate + if dow: + return _("%(weekday)s, %(month_name)s %(day)s") % { + "month_name": self._months[local_date.month - 1], + "weekday": self._weekdays[local_date.weekday()], + "day": str(local_date.day), + } + else: + return _("%(month_name)s %(day)s") % { + "month_name": self._months[local_date.month - 1], + "day": str(local_date.day), + } + + def list(self, parts: Any) -> str: + """Returns a comma-separated list for the given list of parts. + + The format is, e.g., "A, B and C", "A and B" or just "A" for lists + of size 1. + """ + _ = self.translate + if len(parts) == 0: + return "" + if len(parts) == 1: + return parts[0] + comma = u" \u0648 " if self.code.startswith("fa") else u", " + return _("%(commas)s and %(last)s") % { + "commas": comma.join(parts[:-1]), + "last": parts[len(parts) - 1], + } + + def friendly_number(self, value: int) -> str: + """Returns a comma-separated number for the given integer.""" + if self.code not in ("en", "en_US"): + return str(value) + s = str(value) + parts = [] + while s: + parts.append(s[-3:]) + s = s[:-3] + return ",".join(reversed(parts)) + + +class CSVLocale(Locale): + """Locale implementation using tornado's CSV translation format.""" + + def __init__(self, code: str, translations: Dict[str, Dict[str, str]]) -> None: + self.translations = translations + super(CSVLocale, self).__init__(code) + + def translate( + self, message: str, plural_message: str = None, count: int = None + ) -> str: + if plural_message is not None: + assert count is not None + if count != 1: + message = plural_message + message_dict = self.translations.get("plural", {}) + else: + message_dict = self.translations.get("singular", {}) + else: + message_dict = self.translations.get("unknown", {}) + return message_dict.get(message, message) + + def pgettext( + self, context: str, message: str, plural_message: str = None, count: int = None + ) -> str: + if self.translations: + gen_log.warning("pgettext is not supported by CSVLocale") + return self.translate(message, plural_message, count) + + +class GettextLocale(Locale): + """Locale implementation using the `gettext` module.""" + + def __init__(self, code: str, translations: gettext.NullTranslations) -> None: + self.ngettext = translations.ngettext + self.gettext = translations.gettext + # self.gettext must exist before __init__ is called, since it + # calls into self.translate + super(GettextLocale, self).__init__(code) + + def translate( + self, message: str, plural_message: str = None, count: int = None + ) -> str: + if plural_message is not None: + assert count is not None + return self.ngettext(message, plural_message, count) + else: + return self.gettext(message) + + def pgettext( + self, context: str, message: str, plural_message: str = None, count: int = None + ) -> str: + """Allows to set context for translation, accepts plural forms. + + Usage example:: + + pgettext("law", "right") + pgettext("good", "right") + + Plural message example:: + + pgettext("organization", "club", "clubs", len(clubs)) + pgettext("stick", "club", "clubs", len(clubs)) + + To generate POT file with context, add following options to step 1 + of `load_gettext_translations` sequence:: + + xgettext [basic options] --keyword=pgettext:1c,2 --keyword=pgettext:1c,2,3 + + .. versionadded:: 4.2 + """ + if plural_message is not None: + assert count is not None + msgs_with_ctxt = ( + "%s%s%s" % (context, CONTEXT_SEPARATOR, message), + "%s%s%s" % (context, CONTEXT_SEPARATOR, plural_message), + count, + ) + result = self.ngettext(*msgs_with_ctxt) + if CONTEXT_SEPARATOR in result: + # Translation not found + result = self.ngettext(message, plural_message, count) + return result + else: + msg_with_ctxt = "%s%s%s" % (context, CONTEXT_SEPARATOR, message) + result = self.gettext(msg_with_ctxt) + if CONTEXT_SEPARATOR in result: + # Translation not found + result = message + return result diff --git a/server/www/packages/packages-linux/x64/tornado/locks.py b/server/www/packages/packages-linux/x64/tornado/locks.py index 9566a45..a90b3e3 100644 --- a/server/www/packages/packages-linux/x64/tornado/locks.py +++ b/server/www/packages/packages-linux/x64/tornado/locks.py @@ -1,526 +1,570 @@ -# Copyright 2015 The Tornado Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import, division, print_function - -import collections -from concurrent.futures import CancelledError - -from tornado import gen, ioloop -from tornado.concurrent import Future, future_set_result_unless_cancelled - -__all__ = ['Condition', 'Event', 'Semaphore', 'BoundedSemaphore', 'Lock'] - - -class _TimeoutGarbageCollector(object): - """Base class for objects that periodically clean up timed-out waiters. - - Avoids memory leak in a common pattern like: - - while True: - yield condition.wait(short_timeout) - print('looping....') - """ - def __init__(self): - self._waiters = collections.deque() # Futures. - self._timeouts = 0 - - def _garbage_collect(self): - # Occasionally clear timed-out waiters. - self._timeouts += 1 - if self._timeouts > 100: - self._timeouts = 0 - self._waiters = collections.deque( - w for w in self._waiters if not w.done()) - - -class Condition(_TimeoutGarbageCollector): - """A condition allows one or more coroutines to wait until notified. - - Like a standard `threading.Condition`, but does not need an underlying lock - that is acquired and released. - - With a `Condition`, coroutines can wait to be notified by other coroutines: - - .. testcode:: - - from tornado import gen - from tornado.ioloop import IOLoop - from tornado.locks import Condition - - condition = Condition() - - async def waiter(): - print("I'll wait right here") - await condition.wait() - print("I'm done waiting") - - async def notifier(): - print("About to notify") - condition.notify() - print("Done notifying") - - async def runner(): - # Wait for waiter() and notifier() in parallel - await gen.multi([waiter(), notifier()]) - - IOLoop.current().run_sync(runner) - - .. testoutput:: - - I'll wait right here - About to notify - Done notifying - I'm done waiting - - `wait` takes an optional ``timeout`` argument, which is either an absolute - timestamp:: - - io_loop = IOLoop.current() - - # Wait up to 1 second for a notification. - await condition.wait(timeout=io_loop.time() + 1) - - ...or a `datetime.timedelta` for a timeout relative to the current time:: - - # Wait up to 1 second. - await condition.wait(timeout=datetime.timedelta(seconds=1)) - - The method returns False if there's no notification before the deadline. - - .. versionchanged:: 5.0 - Previously, waiters could be notified synchronously from within - `notify`. Now, the notification will always be received on the - next iteration of the `.IOLoop`. - """ - - def __init__(self): - super(Condition, self).__init__() - self.io_loop = ioloop.IOLoop.current() - - def __repr__(self): - result = '<%s' % (self.__class__.__name__, ) - if self._waiters: - result += ' waiters[%s]' % len(self._waiters) - return result + '>' - - def wait(self, timeout=None): - """Wait for `.notify`. - - Returns a `.Future` that resolves ``True`` if the condition is notified, - or ``False`` after a timeout. - """ - waiter = Future() - self._waiters.append(waiter) - if timeout: - def on_timeout(): - if not waiter.done(): - future_set_result_unless_cancelled(waiter, False) - self._garbage_collect() - io_loop = ioloop.IOLoop.current() - timeout_handle = io_loop.add_timeout(timeout, on_timeout) - waiter.add_done_callback( - lambda _: io_loop.remove_timeout(timeout_handle)) - return waiter - - def notify(self, n=1): - """Wake ``n`` waiters.""" - waiters = [] # Waiters we plan to run right now. - while n and self._waiters: - waiter = self._waiters.popleft() - if not waiter.done(): # Might have timed out. - n -= 1 - waiters.append(waiter) - - for waiter in waiters: - future_set_result_unless_cancelled(waiter, True) - - def notify_all(self): - """Wake all waiters.""" - self.notify(len(self._waiters)) - - -class Event(object): - """An event blocks coroutines until its internal flag is set to True. - - Similar to `threading.Event`. - - A coroutine can wait for an event to be set. Once it is set, calls to - ``yield event.wait()`` will not block unless the event has been cleared: - - .. testcode:: - - from tornado import gen - from tornado.ioloop import IOLoop - from tornado.locks import Event - - event = Event() - - async def waiter(): - print("Waiting for event") - await event.wait() - print("Not waiting this time") - await event.wait() - print("Done") - - async def setter(): - print("About to set the event") - event.set() - - async def runner(): - await gen.multi([waiter(), setter()]) - - IOLoop.current().run_sync(runner) - - .. testoutput:: - - Waiting for event - About to set the event - Not waiting this time - Done - """ - def __init__(self): - self._value = False - self._waiters = set() - - def __repr__(self): - return '<%s %s>' % ( - self.__class__.__name__, 'set' if self.is_set() else 'clear') - - def is_set(self): - """Return ``True`` if the internal flag is true.""" - return self._value - - def set(self): - """Set the internal flag to ``True``. All waiters are awakened. - - Calling `.wait` once the flag is set will not block. - """ - if not self._value: - self._value = True - - for fut in self._waiters: - if not fut.done(): - fut.set_result(None) - - def clear(self): - """Reset the internal flag to ``False``. - - Calls to `.wait` will block until `.set` is called. - """ - self._value = False - - def wait(self, timeout=None): - """Block until the internal flag is true. - - Returns a Future, which raises `tornado.util.TimeoutError` after a - timeout. - """ - fut = Future() - if self._value: - fut.set_result(None) - return fut - self._waiters.add(fut) - fut.add_done_callback(lambda fut: self._waiters.remove(fut)) - if timeout is None: - return fut - else: - timeout_fut = gen.with_timeout(timeout, fut, quiet_exceptions=(CancelledError,)) - # This is a slightly clumsy workaround for the fact that - # gen.with_timeout doesn't cancel its futures. Cancelling - # fut will remove it from the waiters list. - timeout_fut.add_done_callback(lambda tf: fut.cancel() if not fut.done() else None) - return timeout_fut - - -class _ReleasingContextManager(object): - """Releases a Lock or Semaphore at the end of a "with" statement. - - with (yield semaphore.acquire()): - pass - - # Now semaphore.release() has been called. - """ - def __init__(self, obj): - self._obj = obj - - def __enter__(self): - pass - - def __exit__(self, exc_type, exc_val, exc_tb): - self._obj.release() - - -class Semaphore(_TimeoutGarbageCollector): - """A lock that can be acquired a fixed number of times before blocking. - - A Semaphore manages a counter representing the number of `.release` calls - minus the number of `.acquire` calls, plus an initial value. The `.acquire` - method blocks if necessary until it can return without making the counter - negative. - - Semaphores limit access to a shared resource. To allow access for two - workers at a time: - - .. testsetup:: semaphore - - from collections import deque - - from tornado import gen - from tornado.ioloop import IOLoop - from tornado.concurrent import Future - - # Ensure reliable doctest output: resolve Futures one at a time. - futures_q = deque([Future() for _ in range(3)]) - - async def simulator(futures): - for f in futures: - # simulate the asynchronous passage of time - await gen.sleep(0) - await gen.sleep(0) - f.set_result(None) - - IOLoop.current().add_callback(simulator, list(futures_q)) - - def use_some_resource(): - return futures_q.popleft() - - .. testcode:: semaphore - - from tornado import gen - from tornado.ioloop import IOLoop - from tornado.locks import Semaphore - - sem = Semaphore(2) - - async def worker(worker_id): - await sem.acquire() - try: - print("Worker %d is working" % worker_id) - await use_some_resource() - finally: - print("Worker %d is done" % worker_id) - sem.release() - - async def runner(): - # Join all workers. - await gen.multi([worker(i) for i in range(3)]) - - IOLoop.current().run_sync(runner) - - .. testoutput:: semaphore - - Worker 0 is working - Worker 1 is working - Worker 0 is done - Worker 2 is working - Worker 1 is done - Worker 2 is done - - Workers 0 and 1 are allowed to run concurrently, but worker 2 waits until - the semaphore has been released once, by worker 0. - - The semaphore can be used as an async context manager:: - - async def worker(worker_id): - async with sem: - print("Worker %d is working" % worker_id) - await use_some_resource() - - # Now the semaphore has been released. - print("Worker %d is done" % worker_id) - - For compatibility with older versions of Python, `.acquire` is a - context manager, so ``worker`` could also be written as:: - - @gen.coroutine - def worker(worker_id): - with (yield sem.acquire()): - print("Worker %d is working" % worker_id) - yield use_some_resource() - - # Now the semaphore has been released. - print("Worker %d is done" % worker_id) - - .. versionchanged:: 4.3 - Added ``async with`` support in Python 3.5. - - """ - def __init__(self, value=1): - super(Semaphore, self).__init__() - if value < 0: - raise ValueError('semaphore initial value must be >= 0') - - self._value = value - - def __repr__(self): - res = super(Semaphore, self).__repr__() - extra = 'locked' if self._value == 0 else 'unlocked,value:{0}'.format( - self._value) - if self._waiters: - extra = '{0},waiters:{1}'.format(extra, len(self._waiters)) - return '<{0} [{1}]>'.format(res[1:-1], extra) - - def release(self): - """Increment the counter and wake one waiter.""" - self._value += 1 - while self._waiters: - waiter = self._waiters.popleft() - if not waiter.done(): - self._value -= 1 - - # If the waiter is a coroutine paused at - # - # with (yield semaphore.acquire()): - # - # then the context manager's __exit__ calls release() at the end - # of the "with" block. - waiter.set_result(_ReleasingContextManager(self)) - break - - def acquire(self, timeout=None): - """Decrement the counter. Returns a Future. - - Block if the counter is zero and wait for a `.release`. The Future - raises `.TimeoutError` after the deadline. - """ - waiter = Future() - if self._value > 0: - self._value -= 1 - waiter.set_result(_ReleasingContextManager(self)) - else: - self._waiters.append(waiter) - if timeout: - def on_timeout(): - if not waiter.done(): - waiter.set_exception(gen.TimeoutError()) - self._garbage_collect() - io_loop = ioloop.IOLoop.current() - timeout_handle = io_loop.add_timeout(timeout, on_timeout) - waiter.add_done_callback( - lambda _: io_loop.remove_timeout(timeout_handle)) - return waiter - - def __enter__(self): - raise RuntimeError( - "Use Semaphore like 'with (yield semaphore.acquire())', not like" - " 'with semaphore'") - - __exit__ = __enter__ - - @gen.coroutine - def __aenter__(self): - yield self.acquire() - - @gen.coroutine - def __aexit__(self, typ, value, tb): - self.release() - - -class BoundedSemaphore(Semaphore): - """A semaphore that prevents release() being called too many times. - - If `.release` would increment the semaphore's value past the initial - value, it raises `ValueError`. Semaphores are mostly used to guard - resources with limited capacity, so a semaphore released too many times - is a sign of a bug. - """ - def __init__(self, value=1): - super(BoundedSemaphore, self).__init__(value=value) - self._initial_value = value - - def release(self): - """Increment the counter and wake one waiter.""" - if self._value >= self._initial_value: - raise ValueError("Semaphore released too many times") - super(BoundedSemaphore, self).release() - - -class Lock(object): - """A lock for coroutines. - - A Lock begins unlocked, and `acquire` locks it immediately. While it is - locked, a coroutine that yields `acquire` waits until another coroutine - calls `release`. - - Releasing an unlocked lock raises `RuntimeError`. - - A Lock can be used as an async context manager with the ``async - with`` statement: - - >>> from tornado import locks - >>> lock = locks.Lock() - >>> - >>> async def f(): - ... async with lock: - ... # Do something holding the lock. - ... pass - ... - ... # Now the lock is released. - - For compatibility with older versions of Python, the `.acquire` - method asynchronously returns a regular context manager: - - >>> async def f2(): - ... with (yield lock.acquire()): - ... # Do something holding the lock. - ... pass - ... - ... # Now the lock is released. - - .. versionchanged:: 4.3 - Added ``async with`` support in Python 3.5. - - """ - def __init__(self): - self._block = BoundedSemaphore(value=1) - - def __repr__(self): - return "<%s _block=%s>" % ( - self.__class__.__name__, - self._block) - - def acquire(self, timeout=None): - """Attempt to lock. Returns a Future. - - Returns a Future, which raises `tornado.util.TimeoutError` after a - timeout. - """ - return self._block.acquire(timeout) - - def release(self): - """Unlock. - - The first coroutine in line waiting for `acquire` gets the lock. - - If not locked, raise a `RuntimeError`. - """ - try: - self._block.release() - except ValueError: - raise RuntimeError('release unlocked lock') - - def __enter__(self): - raise RuntimeError( - "Use Lock like 'with (yield lock)', not like 'with lock'") - - __exit__ = __enter__ - - @gen.coroutine - def __aenter__(self): - yield self.acquire() - - @gen.coroutine - def __aexit__(self, typ, value, tb): - self.release() +# Copyright 2015 The Tornado Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import collections +from concurrent.futures import CancelledError +import datetime +import types + +from tornado import gen, ioloop +from tornado.concurrent import Future, future_set_result_unless_cancelled + +from typing import Union, Optional, Type, Any, Awaitable +import typing + +if typing.TYPE_CHECKING: + from typing import Deque, Set # noqa: F401 + +__all__ = ["Condition", "Event", "Semaphore", "BoundedSemaphore", "Lock"] + + +class _TimeoutGarbageCollector(object): + """Base class for objects that periodically clean up timed-out waiters. + + Avoids memory leak in a common pattern like: + + while True: + yield condition.wait(short_timeout) + print('looping....') + """ + + def __init__(self) -> None: + self._waiters = collections.deque() # type: Deque[Future] + self._timeouts = 0 + + def _garbage_collect(self) -> None: + # Occasionally clear timed-out waiters. + self._timeouts += 1 + if self._timeouts > 100: + self._timeouts = 0 + self._waiters = collections.deque(w for w in self._waiters if not w.done()) + + +class Condition(_TimeoutGarbageCollector): + """A condition allows one or more coroutines to wait until notified. + + Like a standard `threading.Condition`, but does not need an underlying lock + that is acquired and released. + + With a `Condition`, coroutines can wait to be notified by other coroutines: + + .. testcode:: + + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.locks import Condition + + condition = Condition() + + async def waiter(): + print("I'll wait right here") + await condition.wait() + print("I'm done waiting") + + async def notifier(): + print("About to notify") + condition.notify() + print("Done notifying") + + async def runner(): + # Wait for waiter() and notifier() in parallel + await gen.multi([waiter(), notifier()]) + + IOLoop.current().run_sync(runner) + + .. testoutput:: + + I'll wait right here + About to notify + Done notifying + I'm done waiting + + `wait` takes an optional ``timeout`` argument, which is either an absolute + timestamp:: + + io_loop = IOLoop.current() + + # Wait up to 1 second for a notification. + await condition.wait(timeout=io_loop.time() + 1) + + ...or a `datetime.timedelta` for a timeout relative to the current time:: + + # Wait up to 1 second. + await condition.wait(timeout=datetime.timedelta(seconds=1)) + + The method returns False if there's no notification before the deadline. + + .. versionchanged:: 5.0 + Previously, waiters could be notified synchronously from within + `notify`. Now, the notification will always be received on the + next iteration of the `.IOLoop`. + """ + + def __init__(self) -> None: + super(Condition, self).__init__() + self.io_loop = ioloop.IOLoop.current() + + def __repr__(self) -> str: + result = "<%s" % (self.__class__.__name__,) + if self._waiters: + result += " waiters[%s]" % len(self._waiters) + return result + ">" + + def wait(self, timeout: Union[float, datetime.timedelta] = None) -> Awaitable[bool]: + """Wait for `.notify`. + + Returns a `.Future` that resolves ``True`` if the condition is notified, + or ``False`` after a timeout. + """ + waiter = Future() # type: Future[bool] + self._waiters.append(waiter) + if timeout: + + def on_timeout() -> None: + if not waiter.done(): + future_set_result_unless_cancelled(waiter, False) + self._garbage_collect() + + io_loop = ioloop.IOLoop.current() + timeout_handle = io_loop.add_timeout(timeout, on_timeout) + waiter.add_done_callback(lambda _: io_loop.remove_timeout(timeout_handle)) + return waiter + + def notify(self, n: int = 1) -> None: + """Wake ``n`` waiters.""" + waiters = [] # Waiters we plan to run right now. + while n and self._waiters: + waiter = self._waiters.popleft() + if not waiter.done(): # Might have timed out. + n -= 1 + waiters.append(waiter) + + for waiter in waiters: + future_set_result_unless_cancelled(waiter, True) + + def notify_all(self) -> None: + """Wake all waiters.""" + self.notify(len(self._waiters)) + + +class Event(object): + """An event blocks coroutines until its internal flag is set to True. + + Similar to `threading.Event`. + + A coroutine can wait for an event to be set. Once it is set, calls to + ``yield event.wait()`` will not block unless the event has been cleared: + + .. testcode:: + + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.locks import Event + + event = Event() + + async def waiter(): + print("Waiting for event") + await event.wait() + print("Not waiting this time") + await event.wait() + print("Done") + + async def setter(): + print("About to set the event") + event.set() + + async def runner(): + await gen.multi([waiter(), setter()]) + + IOLoop.current().run_sync(runner) + + .. testoutput:: + + Waiting for event + About to set the event + Not waiting this time + Done + """ + + def __init__(self) -> None: + self._value = False + self._waiters = set() # type: Set[Future[None]] + + def __repr__(self) -> str: + return "<%s %s>" % ( + self.__class__.__name__, + "set" if self.is_set() else "clear", + ) + + def is_set(self) -> bool: + """Return ``True`` if the internal flag is true.""" + return self._value + + def set(self) -> None: + """Set the internal flag to ``True``. All waiters are awakened. + + Calling `.wait` once the flag is set will not block. + """ + if not self._value: + self._value = True + + for fut in self._waiters: + if not fut.done(): + fut.set_result(None) + + def clear(self) -> None: + """Reset the internal flag to ``False``. + + Calls to `.wait` will block until `.set` is called. + """ + self._value = False + + def wait(self, timeout: Union[float, datetime.timedelta] = None) -> Awaitable[None]: + """Block until the internal flag is true. + + Returns an awaitable, which raises `tornado.util.TimeoutError` after a + timeout. + """ + fut = Future() # type: Future[None] + if self._value: + fut.set_result(None) + return fut + self._waiters.add(fut) + fut.add_done_callback(lambda fut: self._waiters.remove(fut)) + if timeout is None: + return fut + else: + timeout_fut = gen.with_timeout( + timeout, fut, quiet_exceptions=(CancelledError,) + ) + # This is a slightly clumsy workaround for the fact that + # gen.with_timeout doesn't cancel its futures. Cancelling + # fut will remove it from the waiters list. + timeout_fut.add_done_callback( + lambda tf: fut.cancel() if not fut.done() else None + ) + return timeout_fut + + +class _ReleasingContextManager(object): + """Releases a Lock or Semaphore at the end of a "with" statement. + + with (yield semaphore.acquire()): + pass + + # Now semaphore.release() has been called. + """ + + def __init__(self, obj: Any) -> None: + self._obj = obj + + def __enter__(self) -> None: + pass + + def __exit__( + self, + exc_type: "Optional[Type[BaseException]]", + exc_val: Optional[BaseException], + exc_tb: Optional[types.TracebackType], + ) -> None: + self._obj.release() + + +class Semaphore(_TimeoutGarbageCollector): + """A lock that can be acquired a fixed number of times before blocking. + + A Semaphore manages a counter representing the number of `.release` calls + minus the number of `.acquire` calls, plus an initial value. The `.acquire` + method blocks if necessary until it can return without making the counter + negative. + + Semaphores limit access to a shared resource. To allow access for two + workers at a time: + + .. testsetup:: semaphore + + from collections import deque + + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.concurrent import Future + + # Ensure reliable doctest output: resolve Futures one at a time. + futures_q = deque([Future() for _ in range(3)]) + + async def simulator(futures): + for f in futures: + # simulate the asynchronous passage of time + await gen.sleep(0) + await gen.sleep(0) + f.set_result(None) + + IOLoop.current().add_callback(simulator, list(futures_q)) + + def use_some_resource(): + return futures_q.popleft() + + .. testcode:: semaphore + + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.locks import Semaphore + + sem = Semaphore(2) + + async def worker(worker_id): + await sem.acquire() + try: + print("Worker %d is working" % worker_id) + await use_some_resource() + finally: + print("Worker %d is done" % worker_id) + sem.release() + + async def runner(): + # Join all workers. + await gen.multi([worker(i) for i in range(3)]) + + IOLoop.current().run_sync(runner) + + .. testoutput:: semaphore + + Worker 0 is working + Worker 1 is working + Worker 0 is done + Worker 2 is working + Worker 1 is done + Worker 2 is done + + Workers 0 and 1 are allowed to run concurrently, but worker 2 waits until + the semaphore has been released once, by worker 0. + + The semaphore can be used as an async context manager:: + + async def worker(worker_id): + async with sem: + print("Worker %d is working" % worker_id) + await use_some_resource() + + # Now the semaphore has been released. + print("Worker %d is done" % worker_id) + + For compatibility with older versions of Python, `.acquire` is a + context manager, so ``worker`` could also be written as:: + + @gen.coroutine + def worker(worker_id): + with (yield sem.acquire()): + print("Worker %d is working" % worker_id) + yield use_some_resource() + + # Now the semaphore has been released. + print("Worker %d is done" % worker_id) + + .. versionchanged:: 4.3 + Added ``async with`` support in Python 3.5. + + """ + + def __init__(self, value: int = 1) -> None: + super(Semaphore, self).__init__() + if value < 0: + raise ValueError("semaphore initial value must be >= 0") + + self._value = value + + def __repr__(self) -> str: + res = super(Semaphore, self).__repr__() + extra = ( + "locked" if self._value == 0 else "unlocked,value:{0}".format(self._value) + ) + if self._waiters: + extra = "{0},waiters:{1}".format(extra, len(self._waiters)) + return "<{0} [{1}]>".format(res[1:-1], extra) + + def release(self) -> None: + """Increment the counter and wake one waiter.""" + self._value += 1 + while self._waiters: + waiter = self._waiters.popleft() + if not waiter.done(): + self._value -= 1 + + # If the waiter is a coroutine paused at + # + # with (yield semaphore.acquire()): + # + # then the context manager's __exit__ calls release() at the end + # of the "with" block. + waiter.set_result(_ReleasingContextManager(self)) + break + + def acquire( + self, timeout: Union[float, datetime.timedelta] = None + ) -> Awaitable[_ReleasingContextManager]: + """Decrement the counter. Returns an awaitable. + + Block if the counter is zero and wait for a `.release`. The awaitable + raises `.TimeoutError` after the deadline. + """ + waiter = Future() # type: Future[_ReleasingContextManager] + if self._value > 0: + self._value -= 1 + waiter.set_result(_ReleasingContextManager(self)) + else: + self._waiters.append(waiter) + if timeout: + + def on_timeout() -> None: + if not waiter.done(): + waiter.set_exception(gen.TimeoutError()) + self._garbage_collect() + + io_loop = ioloop.IOLoop.current() + timeout_handle = io_loop.add_timeout(timeout, on_timeout) + waiter.add_done_callback( + lambda _: io_loop.remove_timeout(timeout_handle) + ) + return waiter + + def __enter__(self) -> None: + raise RuntimeError("Use 'async with' instead of 'with' for Semaphore") + + def __exit__( + self, + typ: "Optional[Type[BaseException]]", + value: Optional[BaseException], + traceback: Optional[types.TracebackType], + ) -> None: + self.__enter__() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + typ: "Optional[Type[BaseException]]", + value: Optional[BaseException], + tb: Optional[types.TracebackType], + ) -> None: + self.release() + + +class BoundedSemaphore(Semaphore): + """A semaphore that prevents release() being called too many times. + + If `.release` would increment the semaphore's value past the initial + value, it raises `ValueError`. Semaphores are mostly used to guard + resources with limited capacity, so a semaphore released too many times + is a sign of a bug. + """ + + def __init__(self, value: int = 1) -> None: + super(BoundedSemaphore, self).__init__(value=value) + self._initial_value = value + + def release(self) -> None: + """Increment the counter and wake one waiter.""" + if self._value >= self._initial_value: + raise ValueError("Semaphore released too many times") + super(BoundedSemaphore, self).release() + + +class Lock(object): + """A lock for coroutines. + + A Lock begins unlocked, and `acquire` locks it immediately. While it is + locked, a coroutine that yields `acquire` waits until another coroutine + calls `release`. + + Releasing an unlocked lock raises `RuntimeError`. + + A Lock can be used as an async context manager with the ``async + with`` statement: + + >>> from tornado import locks + >>> lock = locks.Lock() + >>> + >>> async def f(): + ... async with lock: + ... # Do something holding the lock. + ... pass + ... + ... # Now the lock is released. + + For compatibility with older versions of Python, the `.acquire` + method asynchronously returns a regular context manager: + + >>> async def f2(): + ... with (yield lock.acquire()): + ... # Do something holding the lock. + ... pass + ... + ... # Now the lock is released. + + .. versionchanged:: 4.3 + Added ``async with`` support in Python 3.5. + + """ + + def __init__(self) -> None: + self._block = BoundedSemaphore(value=1) + + def __repr__(self) -> str: + return "<%s _block=%s>" % (self.__class__.__name__, self._block) + + def acquire( + self, timeout: Union[float, datetime.timedelta] = None + ) -> Awaitable[_ReleasingContextManager]: + """Attempt to lock. Returns an awaitable. + + Returns an awaitable, which raises `tornado.util.TimeoutError` after a + timeout. + """ + return self._block.acquire(timeout) + + def release(self) -> None: + """Unlock. + + The first coroutine in line waiting for `acquire` gets the lock. + + If not locked, raise a `RuntimeError`. + """ + try: + self._block.release() + except ValueError: + raise RuntimeError("release unlocked lock") + + def __enter__(self) -> None: + raise RuntimeError("Use `async with` instead of `with` for Lock") + + def __exit__( + self, + typ: "Optional[Type[BaseException]]", + value: Optional[BaseException], + tb: Optional[types.TracebackType], + ) -> None: + self.__enter__() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + typ: "Optional[Type[BaseException]]", + value: Optional[BaseException], + tb: Optional[types.TracebackType], + ) -> None: + self.release() diff --git a/server/www/packages/packages-linux/x64/tornado/log.py b/server/www/packages/packages-linux/x64/tornado/log.py index cda905c..31c2ab9 100644 --- a/server/www/packages/packages-linux/x64/tornado/log.py +++ b/server/www/packages/packages-linux/x64/tornado/log.py @@ -1,290 +1,336 @@ -# -# Copyright 2012 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Logging support for Tornado. - -Tornado uses three logger streams: - -* ``tornado.access``: Per-request logging for Tornado's HTTP servers (and - potentially other servers in the future) -* ``tornado.application``: Logging of errors from application code (i.e. - uncaught exceptions from callbacks) -* ``tornado.general``: General-purpose logging, including any errors - or warnings from Tornado itself. - -These streams may be configured independently using the standard library's -`logging` module. For example, you may wish to send ``tornado.access`` logs -to a separate file for analysis. -""" -from __future__ import absolute_import, division, print_function - -import logging -import logging.handlers -import sys - -from tornado.escape import _unicode -from tornado.util import unicode_type, basestring_type - -try: - import colorama -except ImportError: - colorama = None - -try: - import curses # type: ignore -except ImportError: - curses = None - -# Logger objects for internal tornado use -access_log = logging.getLogger("tornado.access") -app_log = logging.getLogger("tornado.application") -gen_log = logging.getLogger("tornado.general") - - -def _stderr_supports_color(): - try: - if hasattr(sys.stderr, 'isatty') and sys.stderr.isatty(): - if curses: - curses.setupterm() - if curses.tigetnum("colors") > 0: - return True - elif colorama: - if sys.stderr is getattr(colorama.initialise, 'wrapped_stderr', - object()): - return True - except Exception: - # Very broad exception handling because it's always better to - # fall back to non-colored logs than to break at startup. - pass - return False - - -def _safe_unicode(s): - try: - return _unicode(s) - except UnicodeDecodeError: - return repr(s) - - -class LogFormatter(logging.Formatter): - """Log formatter used in Tornado. - - Key features of this formatter are: - - * Color support when logging to a terminal that supports it. - * Timestamps on every log line. - * Robust against str/bytes encoding problems. - - This formatter is enabled automatically by - `tornado.options.parse_command_line` or `tornado.options.parse_config_file` - (unless ``--logging=none`` is used). - - Color support on Windows versions that do not support ANSI color codes is - enabled by use of the colorama__ library. Applications that wish to use - this must first initialize colorama with a call to ``colorama.init``. - See the colorama documentation for details. - - __ https://pypi.python.org/pypi/colorama - - .. versionchanged:: 4.5 - Added support for ``colorama``. Changed the constructor - signature to be compatible with `logging.config.dictConfig`. - """ - DEFAULT_FORMAT = \ - '%(color)s[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s' - DEFAULT_DATE_FORMAT = '%y%m%d %H:%M:%S' - DEFAULT_COLORS = { - logging.DEBUG: 4, # Blue - logging.INFO: 2, # Green - logging.WARNING: 3, # Yellow - logging.ERROR: 1, # Red - } - - def __init__(self, fmt=DEFAULT_FORMAT, datefmt=DEFAULT_DATE_FORMAT, - style='%', color=True, colors=DEFAULT_COLORS): - r""" - :arg bool color: Enables color support. - :arg str fmt: Log message format. - It will be applied to the attributes dict of log records. The - text between ``%(color)s`` and ``%(end_color)s`` will be colored - depending on the level if color support is on. - :arg dict colors: color mappings from logging level to terminal color - code - :arg str datefmt: Datetime format. - Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``. - - .. versionchanged:: 3.2 - - Added ``fmt`` and ``datefmt`` arguments. - """ - logging.Formatter.__init__(self, datefmt=datefmt) - self._fmt = fmt - - self._colors = {} - if color and _stderr_supports_color(): - if curses is not None: - # The curses module has some str/bytes confusion in - # python3. Until version 3.2.3, most methods return - # bytes, but only accept strings. In addition, we want to - # output these strings with the logging module, which - # works with unicode strings. The explicit calls to - # unicode() below are harmless in python2 but will do the - # right conversion in python 3. - fg_color = (curses.tigetstr("setaf") or - curses.tigetstr("setf") or "") - if (3, 0) < sys.version_info < (3, 2, 3): - fg_color = unicode_type(fg_color, "ascii") - - for levelno, code in colors.items(): - self._colors[levelno] = unicode_type(curses.tparm(fg_color, code), "ascii") - self._normal = unicode_type(curses.tigetstr("sgr0"), "ascii") - else: - # If curses is not present (currently we'll only get here for - # colorama on windows), assume hard-coded ANSI color codes. - for levelno, code in colors.items(): - self._colors[levelno] = '\033[2;3%dm' % code - self._normal = '\033[0m' - else: - self._normal = '' - - def format(self, record): - try: - message = record.getMessage() - assert isinstance(message, basestring_type) # guaranteed by logging - # Encoding notes: The logging module prefers to work with character - # strings, but only enforces that log messages are instances of - # basestring. In python 2, non-ascii bytestrings will make - # their way through the logging framework until they blow up with - # an unhelpful decoding error (with this formatter it happens - # when we attach the prefix, but there are other opportunities for - # exceptions further along in the framework). - # - # If a byte string makes it this far, convert it to unicode to - # ensure it will make it out to the logs. Use repr() as a fallback - # to ensure that all byte strings can be converted successfully, - # but don't do it by default so we don't add extra quotes to ascii - # bytestrings. This is a bit of a hacky place to do this, but - # it's worth it since the encoding errors that would otherwise - # result are so useless (and tornado is fond of using utf8-encoded - # byte strings wherever possible). - record.message = _safe_unicode(message) - except Exception as e: - record.message = "Bad message (%r): %r" % (e, record.__dict__) - - record.asctime = self.formatTime(record, self.datefmt) - - if record.levelno in self._colors: - record.color = self._colors[record.levelno] - record.end_color = self._normal - else: - record.color = record.end_color = '' - - formatted = self._fmt % record.__dict__ - - if record.exc_info: - if not record.exc_text: - record.exc_text = self.formatException(record.exc_info) - if record.exc_text: - # exc_text contains multiple lines. We need to _safe_unicode - # each line separately so that non-utf8 bytes don't cause - # all the newlines to turn into '\n'. - lines = [formatted.rstrip()] - lines.extend(_safe_unicode(ln) for ln in record.exc_text.split('\n')) - formatted = '\n'.join(lines) - return formatted.replace("\n", "\n ") - - -def enable_pretty_logging(options=None, logger=None): - """Turns on formatted logging output as configured. - - This is called automatically by `tornado.options.parse_command_line` - and `tornado.options.parse_config_file`. - """ - if options is None: - import tornado.options - options = tornado.options.options - if options.logging is None or options.logging.lower() == 'none': - return - if logger is None: - logger = logging.getLogger() - logger.setLevel(getattr(logging, options.logging.upper())) - if options.log_file_prefix: - rotate_mode = options.log_rotate_mode - if rotate_mode == 'size': - channel = logging.handlers.RotatingFileHandler( - filename=options.log_file_prefix, - maxBytes=options.log_file_max_size, - backupCount=options.log_file_num_backups) - elif rotate_mode == 'time': - channel = logging.handlers.TimedRotatingFileHandler( - filename=options.log_file_prefix, - when=options.log_rotate_when, - interval=options.log_rotate_interval, - backupCount=options.log_file_num_backups) - else: - error_message = 'The value of log_rotate_mode option should be ' +\ - '"size" or "time", not "%s".' % rotate_mode - raise ValueError(error_message) - channel.setFormatter(LogFormatter(color=False)) - logger.addHandler(channel) - - if (options.log_to_stderr or - (options.log_to_stderr is None and not logger.handlers)): - # Set up color if we are in a tty and curses is installed - channel = logging.StreamHandler() - channel.setFormatter(LogFormatter()) - logger.addHandler(channel) - - -def define_logging_options(options=None): - """Add logging-related flags to ``options``. - - These options are present automatically on the default options instance; - this method is only necessary if you have created your own `.OptionParser`. - - .. versionadded:: 4.2 - This function existed in prior versions but was broken and undocumented until 4.2. - """ - if options is None: - # late import to prevent cycle - import tornado.options - options = tornado.options.options - options.define("logging", default="info", - help=("Set the Python log level. If 'none', tornado won't touch the " - "logging configuration."), - metavar="debug|info|warning|error|none") - options.define("log_to_stderr", type=bool, default=None, - help=("Send log output to stderr (colorized if possible). " - "By default use stderr if --log_file_prefix is not set and " - "no other logging is configured.")) - options.define("log_file_prefix", type=str, default=None, metavar="PATH", - help=("Path prefix for log files. " - "Note that if you are running multiple tornado processes, " - "log_file_prefix must be different for each of them (e.g. " - "include the port number)")) - options.define("log_file_max_size", type=int, default=100 * 1000 * 1000, - help="max size of log files before rollover") - options.define("log_file_num_backups", type=int, default=10, - help="number of log files to keep") - - options.define("log_rotate_when", type=str, default='midnight', - help=("specify the type of TimedRotatingFileHandler interval " - "other options:('S', 'M', 'H', 'D', 'W0'-'W6')")) - options.define("log_rotate_interval", type=int, default=1, - help="The interval value of timed rotating") - - options.define("log_rotate_mode", type=str, default='size', - help="The mode of rotating files(time or size)") - - options.add_parse_callback(lambda: enable_pretty_logging(options)) +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""Logging support for Tornado. + +Tornado uses three logger streams: + +* ``tornado.access``: Per-request logging for Tornado's HTTP servers (and + potentially other servers in the future) +* ``tornado.application``: Logging of errors from application code (i.e. + uncaught exceptions from callbacks) +* ``tornado.general``: General-purpose logging, including any errors + or warnings from Tornado itself. + +These streams may be configured independently using the standard library's +`logging` module. For example, you may wish to send ``tornado.access`` logs +to a separate file for analysis. +""" +import logging +import logging.handlers +import sys + +from tornado.escape import _unicode +from tornado.util import unicode_type, basestring_type + +try: + import colorama # type: ignore +except ImportError: + colorama = None + +try: + import curses +except ImportError: + curses = None # type: ignore + +from typing import Dict, Any, cast + +# Logger objects for internal tornado use +access_log = logging.getLogger("tornado.access") +app_log = logging.getLogger("tornado.application") +gen_log = logging.getLogger("tornado.general") + + +def _stderr_supports_color() -> bool: + try: + if hasattr(sys.stderr, "isatty") and sys.stderr.isatty(): + if curses: + curses.setupterm() + if curses.tigetnum("colors") > 0: + return True + elif colorama: + if sys.stderr is getattr( + colorama.initialise, "wrapped_stderr", object() + ): + return True + except Exception: + # Very broad exception handling because it's always better to + # fall back to non-colored logs than to break at startup. + pass + return False + + +def _safe_unicode(s: Any) -> str: + try: + return _unicode(s) + except UnicodeDecodeError: + return repr(s) + + +class LogFormatter(logging.Formatter): + """Log formatter used in Tornado. + + Key features of this formatter are: + + * Color support when logging to a terminal that supports it. + * Timestamps on every log line. + * Robust against str/bytes encoding problems. + + This formatter is enabled automatically by + `tornado.options.parse_command_line` or `tornado.options.parse_config_file` + (unless ``--logging=none`` is used). + + Color support on Windows versions that do not support ANSI color codes is + enabled by use of the colorama__ library. Applications that wish to use + this must first initialize colorama with a call to ``colorama.init``. + See the colorama documentation for details. + + __ https://pypi.python.org/pypi/colorama + + .. versionchanged:: 4.5 + Added support for ``colorama``. Changed the constructor + signature to be compatible with `logging.config.dictConfig`. + """ + + DEFAULT_FORMAT = "%(color)s[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s" # noqa: E501 + DEFAULT_DATE_FORMAT = "%y%m%d %H:%M:%S" + DEFAULT_COLORS = { + logging.DEBUG: 4, # Blue + logging.INFO: 2, # Green + logging.WARNING: 3, # Yellow + logging.ERROR: 1, # Red + } + + def __init__( + self, + fmt: str = DEFAULT_FORMAT, + datefmt: str = DEFAULT_DATE_FORMAT, + style: str = "%", + color: bool = True, + colors: Dict[int, int] = DEFAULT_COLORS, + ) -> None: + r""" + :arg bool color: Enables color support. + :arg str fmt: Log message format. + It will be applied to the attributes dict of log records. The + text between ``%(color)s`` and ``%(end_color)s`` will be colored + depending on the level if color support is on. + :arg dict colors: color mappings from logging level to terminal color + code + :arg str datefmt: Datetime format. + Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``. + + .. versionchanged:: 3.2 + + Added ``fmt`` and ``datefmt`` arguments. + """ + logging.Formatter.__init__(self, datefmt=datefmt) + self._fmt = fmt + + self._colors = {} # type: Dict[int, str] + if color and _stderr_supports_color(): + if curses is not None: + fg_color = curses.tigetstr("setaf") or curses.tigetstr("setf") or b"" + + for levelno, code in colors.items(): + # Convert the terminal control characters from + # bytes to unicode strings for easier use with the + # logging module. + self._colors[levelno] = unicode_type( + curses.tparm(fg_color, code), "ascii" + ) + self._normal = unicode_type(curses.tigetstr("sgr0"), "ascii") + else: + # If curses is not present (currently we'll only get here for + # colorama on windows), assume hard-coded ANSI color codes. + for levelno, code in colors.items(): + self._colors[levelno] = "\033[2;3%dm" % code + self._normal = "\033[0m" + else: + self._normal = "" + + def format(self, record: Any) -> str: + try: + message = record.getMessage() + assert isinstance(message, basestring_type) # guaranteed by logging + # Encoding notes: The logging module prefers to work with character + # strings, but only enforces that log messages are instances of + # basestring. In python 2, non-ascii bytestrings will make + # their way through the logging framework until they blow up with + # an unhelpful decoding error (with this formatter it happens + # when we attach the prefix, but there are other opportunities for + # exceptions further along in the framework). + # + # If a byte string makes it this far, convert it to unicode to + # ensure it will make it out to the logs. Use repr() as a fallback + # to ensure that all byte strings can be converted successfully, + # but don't do it by default so we don't add extra quotes to ascii + # bytestrings. This is a bit of a hacky place to do this, but + # it's worth it since the encoding errors that would otherwise + # result are so useless (and tornado is fond of using utf8-encoded + # byte strings wherever possible). + record.message = _safe_unicode(message) + except Exception as e: + record.message = "Bad message (%r): %r" % (e, record.__dict__) + + record.asctime = self.formatTime(record, cast(str, self.datefmt)) + + if record.levelno in self._colors: + record.color = self._colors[record.levelno] + record.end_color = self._normal + else: + record.color = record.end_color = "" + + formatted = self._fmt % record.__dict__ + + if record.exc_info: + if not record.exc_text: + record.exc_text = self.formatException(record.exc_info) + if record.exc_text: + # exc_text contains multiple lines. We need to _safe_unicode + # each line separately so that non-utf8 bytes don't cause + # all the newlines to turn into '\n'. + lines = [formatted.rstrip()] + lines.extend(_safe_unicode(ln) for ln in record.exc_text.split("\n")) + formatted = "\n".join(lines) + return formatted.replace("\n", "\n ") + + +def enable_pretty_logging(options: Any = None, logger: logging.Logger = None) -> None: + """Turns on formatted logging output as configured. + + This is called automatically by `tornado.options.parse_command_line` + and `tornado.options.parse_config_file`. + """ + if options is None: + import tornado.options + + options = tornado.options.options + if options.logging is None or options.logging.lower() == "none": + return + if logger is None: + logger = logging.getLogger() + logger.setLevel(getattr(logging, options.logging.upper())) + if options.log_file_prefix: + rotate_mode = options.log_rotate_mode + if rotate_mode == "size": + channel = logging.handlers.RotatingFileHandler( + filename=options.log_file_prefix, + maxBytes=options.log_file_max_size, + backupCount=options.log_file_num_backups, + encoding="utf-8", + ) # type: logging.Handler + elif rotate_mode == "time": + channel = logging.handlers.TimedRotatingFileHandler( + filename=options.log_file_prefix, + when=options.log_rotate_when, + interval=options.log_rotate_interval, + backupCount=options.log_file_num_backups, + encoding="utf-8", + ) + else: + error_message = ( + "The value of log_rotate_mode option should be " + + '"size" or "time", not "%s".' % rotate_mode + ) + raise ValueError(error_message) + channel.setFormatter(LogFormatter(color=False)) + logger.addHandler(channel) + + if options.log_to_stderr or (options.log_to_stderr is None and not logger.handlers): + # Set up color if we are in a tty and curses is installed + channel = logging.StreamHandler() + channel.setFormatter(LogFormatter()) + logger.addHandler(channel) + + +def define_logging_options(options: Any = None) -> None: + """Add logging-related flags to ``options``. + + These options are present automatically on the default options instance; + this method is only necessary if you have created your own `.OptionParser`. + + .. versionadded:: 4.2 + This function existed in prior versions but was broken and undocumented until 4.2. + """ + if options is None: + # late import to prevent cycle + import tornado.options + + options = tornado.options.options + options.define( + "logging", + default="info", + help=( + "Set the Python log level. If 'none', tornado won't touch the " + "logging configuration." + ), + metavar="debug|info|warning|error|none", + ) + options.define( + "log_to_stderr", + type=bool, + default=None, + help=( + "Send log output to stderr (colorized if possible). " + "By default use stderr if --log_file_prefix is not set and " + "no other logging is configured." + ), + ) + options.define( + "log_file_prefix", + type=str, + default=None, + metavar="PATH", + help=( + "Path prefix for log files. " + "Note that if you are running multiple tornado processes, " + "log_file_prefix must be different for each of them (e.g. " + "include the port number)" + ), + ) + options.define( + "log_file_max_size", + type=int, + default=100 * 1000 * 1000, + help="max size of log files before rollover", + ) + options.define( + "log_file_num_backups", type=int, default=10, help="number of log files to keep" + ) + + options.define( + "log_rotate_when", + type=str, + default="midnight", + help=( + "specify the type of TimedRotatingFileHandler interval " + "other options:('S', 'M', 'H', 'D', 'W0'-'W6')" + ), + ) + options.define( + "log_rotate_interval", + type=int, + default=1, + help="The interval value of timed rotating", + ) + + options.define( + "log_rotate_mode", + type=str, + default="size", + help="The mode of rotating files(time or size)", + ) + + options.add_parse_callback(lambda: enable_pretty_logging(options)) diff --git a/server/www/packages/packages-linux/x64/tornado/netutil.py b/server/www/packages/packages-linux/x64/tornado/netutil.py index e63683a..5bc8478 100644 --- a/server/www/packages/packages-linux/x64/tornado/netutil.py +++ b/server/www/packages/packages-linux/x64/tornado/netutil.py @@ -1,575 +1,614 @@ -# -# Copyright 2011 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Miscellaneous network utility code.""" - -from __future__ import absolute_import, division, print_function - -import errno -import os -import sys -import socket -import stat - -from tornado.concurrent import dummy_executor, run_on_executor -from tornado import gen -from tornado.ioloop import IOLoop -from tornado.platform.auto import set_close_exec -from tornado.util import PY3, Configurable, errno_from_exception - -try: - import ssl -except ImportError: - # ssl is not available on Google App Engine - ssl = None - -if PY3: - xrange = range - -if ssl is not None: - # Note that the naming of ssl.Purpose is confusing; the purpose - # of a context is to authentiate the opposite side of the connection. - _client_ssl_defaults = ssl.create_default_context( - ssl.Purpose.SERVER_AUTH) - _server_ssl_defaults = ssl.create_default_context( - ssl.Purpose.CLIENT_AUTH) - if hasattr(ssl, 'OP_NO_COMPRESSION'): - # See netutil.ssl_options_to_context - _client_ssl_defaults.options |= ssl.OP_NO_COMPRESSION - _server_ssl_defaults.options |= ssl.OP_NO_COMPRESSION -else: - # Google App Engine - _client_ssl_defaults = dict(cert_reqs=None, - ca_certs=None) - _server_ssl_defaults = {} - -# ThreadedResolver runs getaddrinfo on a thread. If the hostname is unicode, -# getaddrinfo attempts to import encodings.idna. If this is done at -# module-import time, the import lock is already held by the main thread, -# leading to deadlock. Avoid it by caching the idna encoder on the main -# thread now. -u'foo'.encode('idna') - -# For undiagnosed reasons, 'latin1' codec may also need to be preloaded. -u'foo'.encode('latin1') - -# These errnos indicate that a non-blocking operation must be retried -# at a later time. On most platforms they're the same value, but on -# some they differ. -_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN) - -if hasattr(errno, "WSAEWOULDBLOCK"): - _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) # type: ignore - -# Default backlog used when calling sock.listen() -_DEFAULT_BACKLOG = 128 - - -def bind_sockets(port, address=None, family=socket.AF_UNSPEC, - backlog=_DEFAULT_BACKLOG, flags=None, reuse_port=False): - """Creates listening sockets bound to the given port and address. - - Returns a list of socket objects (multiple sockets are returned if - the given address maps to multiple IP addresses, which is most common - for mixed IPv4 and IPv6 use). - - Address may be either an IP address or hostname. If it's a hostname, - the server will listen on all IP addresses associated with the - name. Address may be an empty string or None to listen on all - available interfaces. Family may be set to either `socket.AF_INET` - or `socket.AF_INET6` to restrict to IPv4 or IPv6 addresses, otherwise - both will be used if available. - - The ``backlog`` argument has the same meaning as for - `socket.listen() `. - - ``flags`` is a bitmask of AI_* flags to `~socket.getaddrinfo`, like - ``socket.AI_PASSIVE | socket.AI_NUMERICHOST``. - - ``reuse_port`` option sets ``SO_REUSEPORT`` option for every socket - in the list. If your platform doesn't support this option ValueError will - be raised. - """ - if reuse_port and not hasattr(socket, "SO_REUSEPORT"): - raise ValueError("the platform doesn't support SO_REUSEPORT") - - sockets = [] - if address == "": - address = None - if not socket.has_ipv6 and family == socket.AF_UNSPEC: - # Python can be compiled with --disable-ipv6, which causes - # operations on AF_INET6 sockets to fail, but does not - # automatically exclude those results from getaddrinfo - # results. - # http://bugs.python.org/issue16208 - family = socket.AF_INET - if flags is None: - flags = socket.AI_PASSIVE - bound_port = None - for res in set(socket.getaddrinfo(address, port, family, socket.SOCK_STREAM, - 0, flags)): - af, socktype, proto, canonname, sockaddr = res - if (sys.platform == 'darwin' and address == 'localhost' and - af == socket.AF_INET6 and sockaddr[3] != 0): - # Mac OS X includes a link-local address fe80::1%lo0 in the - # getaddrinfo results for 'localhost'. However, the firewall - # doesn't understand that this is a local address and will - # prompt for access (often repeatedly, due to an apparent - # bug in its ability to remember granting access to an - # application). Skip these addresses. - continue - try: - sock = socket.socket(af, socktype, proto) - except socket.error as e: - if errno_from_exception(e) == errno.EAFNOSUPPORT: - continue - raise - set_close_exec(sock.fileno()) - if os.name != 'nt': - try: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - except socket.error as e: - if errno_from_exception(e) != errno.ENOPROTOOPT: - # Hurd doesn't support SO_REUSEADDR. - raise - if reuse_port: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - if af == socket.AF_INET6: - # On linux, ipv6 sockets accept ipv4 too by default, - # but this makes it impossible to bind to both - # 0.0.0.0 in ipv4 and :: in ipv6. On other systems, - # separate sockets *must* be used to listen for both ipv4 - # and ipv6. For consistency, always disable ipv4 on our - # ipv6 sockets and use a separate ipv4 socket when needed. - # - # Python 2.x on windows doesn't have IPPROTO_IPV6. - if hasattr(socket, "IPPROTO_IPV6"): - sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) - - # automatic port allocation with port=None - # should bind on the same port on IPv4 and IPv6 - host, requested_port = sockaddr[:2] - if requested_port == 0 and bound_port is not None: - sockaddr = tuple([host, bound_port] + list(sockaddr[2:])) - - sock.setblocking(0) - sock.bind(sockaddr) - bound_port = sock.getsockname()[1] - sock.listen(backlog) - sockets.append(sock) - return sockets - - -if hasattr(socket, 'AF_UNIX'): - def bind_unix_socket(file, mode=0o600, backlog=_DEFAULT_BACKLOG): - """Creates a listening unix socket. - - If a socket with the given name already exists, it will be deleted. - If any other file with that name exists, an exception will be - raised. - - Returns a socket object (not a list of socket objects like - `bind_sockets`) - """ - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - set_close_exec(sock.fileno()) - try: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - except socket.error as e: - if errno_from_exception(e) != errno.ENOPROTOOPT: - # Hurd doesn't support SO_REUSEADDR - raise - sock.setblocking(0) - try: - st = os.stat(file) - except OSError as err: - if errno_from_exception(err) != errno.ENOENT: - raise - else: - if stat.S_ISSOCK(st.st_mode): - os.remove(file) - else: - raise ValueError("File %s exists and is not a socket", file) - sock.bind(file) - os.chmod(file, mode) - sock.listen(backlog) - return sock - - -def add_accept_handler(sock, callback): - """Adds an `.IOLoop` event handler to accept new connections on ``sock``. - - When a connection is accepted, ``callback(connection, address)`` will - be run (``connection`` is a socket object, and ``address`` is the - address of the other end of the connection). Note that this signature - is different from the ``callback(fd, events)`` signature used for - `.IOLoop` handlers. - - A callable is returned which, when called, will remove the `.IOLoop` - event handler and stop processing further incoming connections. - - .. versionchanged:: 5.0 - The ``io_loop`` argument (deprecated since version 4.1) has been removed. - - .. versionchanged:: 5.0 - A callable is returned (``None`` was returned before). - """ - io_loop = IOLoop.current() - removed = [False] - - def accept_handler(fd, events): - # More connections may come in while we're handling callbacks; - # to prevent starvation of other tasks we must limit the number - # of connections we accept at a time. Ideally we would accept - # up to the number of connections that were waiting when we - # entered this method, but this information is not available - # (and rearranging this method to call accept() as many times - # as possible before running any callbacks would have adverse - # effects on load balancing in multiprocess configurations). - # Instead, we use the (default) listen backlog as a rough - # heuristic for the number of connections we can reasonably - # accept at once. - for i in xrange(_DEFAULT_BACKLOG): - if removed[0]: - # The socket was probably closed - return - try: - connection, address = sock.accept() - except socket.error as e: - # _ERRNO_WOULDBLOCK indicate we have accepted every - # connection that is available. - if errno_from_exception(e) in _ERRNO_WOULDBLOCK: - return - # ECONNABORTED indicates that there was a connection - # but it was closed while still in the accept queue. - # (observed on FreeBSD). - if errno_from_exception(e) == errno.ECONNABORTED: - continue - raise - set_close_exec(connection.fileno()) - callback(connection, address) - - def remove_handler(): - io_loop.remove_handler(sock) - removed[0] = True - - io_loop.add_handler(sock, accept_handler, IOLoop.READ) - return remove_handler - - -def is_valid_ip(ip): - """Returns true if the given string is a well-formed IP address. - - Supports IPv4 and IPv6. - """ - if not ip or '\x00' in ip: - # getaddrinfo resolves empty strings to localhost, and truncates - # on zero bytes. - return False - try: - res = socket.getaddrinfo(ip, 0, socket.AF_UNSPEC, - socket.SOCK_STREAM, - 0, socket.AI_NUMERICHOST) - return bool(res) - except socket.gaierror as e: - if e.args[0] == socket.EAI_NONAME: - return False - raise - return True - - -class Resolver(Configurable): - """Configurable asynchronous DNS resolver interface. - - By default, a blocking implementation is used (which simply calls - `socket.getaddrinfo`). An alternative implementation can be - chosen with the `Resolver.configure <.Configurable.configure>` - class method:: - - Resolver.configure('tornado.netutil.ThreadedResolver') - - The implementations of this interface included with Tornado are - - * `tornado.netutil.DefaultExecutorResolver` - * `tornado.netutil.BlockingResolver` (deprecated) - * `tornado.netutil.ThreadedResolver` (deprecated) - * `tornado.netutil.OverrideResolver` - * `tornado.platform.twisted.TwistedResolver` - * `tornado.platform.caresresolver.CaresResolver` - - .. versionchanged:: 5.0 - The default implementation has changed from `BlockingResolver` to - `DefaultExecutorResolver`. - """ - @classmethod - def configurable_base(cls): - return Resolver - - @classmethod - def configurable_default(cls): - return DefaultExecutorResolver - - def resolve(self, host, port, family=socket.AF_UNSPEC, callback=None): - """Resolves an address. - - The ``host`` argument is a string which may be a hostname or a - literal IP address. - - Returns a `.Future` whose result is a list of (family, - address) pairs, where address is a tuple suitable to pass to - `socket.connect ` (i.e. a ``(host, - port)`` pair for IPv4; additional fields may be present for - IPv6). If a ``callback`` is passed, it will be run with the - result as an argument when it is complete. - - :raises IOError: if the address cannot be resolved. - - .. versionchanged:: 4.4 - Standardized all implementations to raise `IOError`. - - .. deprecated:: 5.1 - The ``callback`` argument is deprecated and will be removed in 6.0. - Use the returned awaitable object instead. - """ - raise NotImplementedError() - - def close(self): - """Closes the `Resolver`, freeing any resources used. - - .. versionadded:: 3.1 - - """ - pass - - -def _resolve_addr(host, port, family=socket.AF_UNSPEC): - # On Solaris, getaddrinfo fails if the given port is not found - # in /etc/services and no socket type is given, so we must pass - # one here. The socket type used here doesn't seem to actually - # matter (we discard the one we get back in the results), - # so the addresses we return should still be usable with SOCK_DGRAM. - addrinfo = socket.getaddrinfo(host, port, family, socket.SOCK_STREAM) - results = [] - for family, socktype, proto, canonname, address in addrinfo: - results.append((family, address)) - return results - - -class DefaultExecutorResolver(Resolver): - """Resolver implementation using `.IOLoop.run_in_executor`. - - .. versionadded:: 5.0 - """ - @gen.coroutine - def resolve(self, host, port, family=socket.AF_UNSPEC): - result = yield IOLoop.current().run_in_executor( - None, _resolve_addr, host, port, family) - raise gen.Return(result) - - -class ExecutorResolver(Resolver): - """Resolver implementation using a `concurrent.futures.Executor`. - - Use this instead of `ThreadedResolver` when you require additional - control over the executor being used. - - The executor will be shut down when the resolver is closed unless - ``close_resolver=False``; use this if you want to reuse the same - executor elsewhere. - - .. versionchanged:: 5.0 - The ``io_loop`` argument (deprecated since version 4.1) has been removed. - - .. deprecated:: 5.0 - The default `Resolver` now uses `.IOLoop.run_in_executor`; use that instead - of this class. - """ - def initialize(self, executor=None, close_executor=True): - self.io_loop = IOLoop.current() - if executor is not None: - self.executor = executor - self.close_executor = close_executor - else: - self.executor = dummy_executor - self.close_executor = False - - def close(self): - if self.close_executor: - self.executor.shutdown() - self.executor = None - - @run_on_executor - def resolve(self, host, port, family=socket.AF_UNSPEC): - return _resolve_addr(host, port, family) - - -class BlockingResolver(ExecutorResolver): - """Default `Resolver` implementation, using `socket.getaddrinfo`. - - The `.IOLoop` will be blocked during the resolution, although the - callback will not be run until the next `.IOLoop` iteration. - - .. deprecated:: 5.0 - The default `Resolver` now uses `.IOLoop.run_in_executor`; use that instead - of this class. - """ - def initialize(self): - super(BlockingResolver, self).initialize() - - -class ThreadedResolver(ExecutorResolver): - """Multithreaded non-blocking `Resolver` implementation. - - Requires the `concurrent.futures` package to be installed - (available in the standard library since Python 3.2, - installable with ``pip install futures`` in older versions). - - The thread pool size can be configured with:: - - Resolver.configure('tornado.netutil.ThreadedResolver', - num_threads=10) - - .. versionchanged:: 3.1 - All ``ThreadedResolvers`` share a single thread pool, whose - size is set by the first one to be created. - - .. deprecated:: 5.0 - The default `Resolver` now uses `.IOLoop.run_in_executor`; use that instead - of this class. - """ - _threadpool = None # type: ignore - _threadpool_pid = None # type: int - - def initialize(self, num_threads=10): - threadpool = ThreadedResolver._create_threadpool(num_threads) - super(ThreadedResolver, self).initialize( - executor=threadpool, close_executor=False) - - @classmethod - def _create_threadpool(cls, num_threads): - pid = os.getpid() - if cls._threadpool_pid != pid: - # Threads cannot survive after a fork, so if our pid isn't what it - # was when we created the pool then delete it. - cls._threadpool = None - if cls._threadpool is None: - from concurrent.futures import ThreadPoolExecutor - cls._threadpool = ThreadPoolExecutor(num_threads) - cls._threadpool_pid = pid - return cls._threadpool - - -class OverrideResolver(Resolver): - """Wraps a resolver with a mapping of overrides. - - This can be used to make local DNS changes (e.g. for testing) - without modifying system-wide settings. - - The mapping can be in three formats:: - - { - # Hostname to host or ip - "example.com": "127.0.1.1", - - # Host+port to host+port - ("login.example.com", 443): ("localhost", 1443), - - # Host+port+address family to host+port - ("login.example.com", 443, socket.AF_INET6): ("::1", 1443), - } - - .. versionchanged:: 5.0 - Added support for host-port-family triplets. - """ - def initialize(self, resolver, mapping): - self.resolver = resolver - self.mapping = mapping - - def close(self): - self.resolver.close() - - def resolve(self, host, port, family=socket.AF_UNSPEC, *args, **kwargs): - if (host, port, family) in self.mapping: - host, port = self.mapping[(host, port, family)] - elif (host, port) in self.mapping: - host, port = self.mapping[(host, port)] - elif host in self.mapping: - host = self.mapping[host] - return self.resolver.resolve(host, port, family, *args, **kwargs) - - -# These are the keyword arguments to ssl.wrap_socket that must be translated -# to their SSLContext equivalents (the other arguments are still passed -# to SSLContext.wrap_socket). -_SSL_CONTEXT_KEYWORDS = frozenset(['ssl_version', 'certfile', 'keyfile', - 'cert_reqs', 'ca_certs', 'ciphers']) - - -def ssl_options_to_context(ssl_options): - """Try to convert an ``ssl_options`` dictionary to an - `~ssl.SSLContext` object. - - The ``ssl_options`` dictionary contains keywords to be passed to - `ssl.wrap_socket`. In Python 2.7.9+, `ssl.SSLContext` objects can - be used instead. This function converts the dict form to its - `~ssl.SSLContext` equivalent, and may be used when a component which - accepts both forms needs to upgrade to the `~ssl.SSLContext` version - to use features like SNI or NPN. - """ - if isinstance(ssl_options, ssl.SSLContext): - return ssl_options - assert isinstance(ssl_options, dict) - assert all(k in _SSL_CONTEXT_KEYWORDS for k in ssl_options), ssl_options - # Can't use create_default_context since this interface doesn't - # tell us client vs server. - context = ssl.SSLContext( - ssl_options.get('ssl_version', ssl.PROTOCOL_SSLv23)) - if 'certfile' in ssl_options: - context.load_cert_chain(ssl_options['certfile'], ssl_options.get('keyfile', None)) - if 'cert_reqs' in ssl_options: - context.verify_mode = ssl_options['cert_reqs'] - if 'ca_certs' in ssl_options: - context.load_verify_locations(ssl_options['ca_certs']) - if 'ciphers' in ssl_options: - context.set_ciphers(ssl_options['ciphers']) - if hasattr(ssl, 'OP_NO_COMPRESSION'): - # Disable TLS compression to avoid CRIME and related attacks. - # This constant depends on openssl version 1.0. - # TODO: Do we need to do this ourselves or can we trust - # the defaults? - context.options |= ssl.OP_NO_COMPRESSION - return context - - -def ssl_wrap_socket(socket, ssl_options, server_hostname=None, **kwargs): - """Returns an ``ssl.SSLSocket`` wrapping the given socket. - - ``ssl_options`` may be either an `ssl.SSLContext` object or a - dictionary (as accepted by `ssl_options_to_context`). Additional - keyword arguments are passed to ``wrap_socket`` (either the - `~ssl.SSLContext` method or the `ssl` module function as - appropriate). - """ - context = ssl_options_to_context(ssl_options) - if ssl.HAS_SNI: - # In python 3.4, wrap_socket only accepts the server_hostname - # argument if HAS_SNI is true. - # TODO: add a unittest (python added server-side SNI support in 3.4) - # In the meantime it can be manually tested with - # python3 -m tornado.httpclient https://sni.velox.ch - return context.wrap_socket(socket, server_hostname=server_hostname, - **kwargs) - else: - return context.wrap_socket(socket, **kwargs) +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Miscellaneous network utility code.""" + +import concurrent.futures +import errno +import os +import sys +import socket +import ssl +import stat + +from tornado.concurrent import dummy_executor, run_on_executor +from tornado.ioloop import IOLoop +from tornado.platform.auto import set_close_exec +from tornado.util import Configurable, errno_from_exception + +import typing +from typing import List, Callable, Any, Type, Dict, Union, Tuple, Awaitable + +if typing.TYPE_CHECKING: + from asyncio import Future # noqa: F401 + +# Note that the naming of ssl.Purpose is confusing; the purpose +# of a context is to authentiate the opposite side of the connection. +_client_ssl_defaults = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) +_server_ssl_defaults = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) +if hasattr(ssl, "OP_NO_COMPRESSION"): + # See netutil.ssl_options_to_context + _client_ssl_defaults.options |= ssl.OP_NO_COMPRESSION + _server_ssl_defaults.options |= ssl.OP_NO_COMPRESSION + +# ThreadedResolver runs getaddrinfo on a thread. If the hostname is unicode, +# getaddrinfo attempts to import encodings.idna. If this is done at +# module-import time, the import lock is already held by the main thread, +# leading to deadlock. Avoid it by caching the idna encoder on the main +# thread now. +u"foo".encode("idna") + +# For undiagnosed reasons, 'latin1' codec may also need to be preloaded. +u"foo".encode("latin1") + +# These errnos indicate that a non-blocking operation must be retried +# at a later time. On most platforms they're the same value, but on +# some they differ. +_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN) + +if hasattr(errno, "WSAEWOULDBLOCK"): + _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) # type: ignore + +# Default backlog used when calling sock.listen() +_DEFAULT_BACKLOG = 128 + + +def bind_sockets( + port: int, + address: str = None, + family: socket.AddressFamily = socket.AF_UNSPEC, + backlog: int = _DEFAULT_BACKLOG, + flags: int = None, + reuse_port: bool = False, +) -> List[socket.socket]: + """Creates listening sockets bound to the given port and address. + + Returns a list of socket objects (multiple sockets are returned if + the given address maps to multiple IP addresses, which is most common + for mixed IPv4 and IPv6 use). + + Address may be either an IP address or hostname. If it's a hostname, + the server will listen on all IP addresses associated with the + name. Address may be an empty string or None to listen on all + available interfaces. Family may be set to either `socket.AF_INET` + or `socket.AF_INET6` to restrict to IPv4 or IPv6 addresses, otherwise + both will be used if available. + + The ``backlog`` argument has the same meaning as for + `socket.listen() `. + + ``flags`` is a bitmask of AI_* flags to `~socket.getaddrinfo`, like + ``socket.AI_PASSIVE | socket.AI_NUMERICHOST``. + + ``reuse_port`` option sets ``SO_REUSEPORT`` option for every socket + in the list. If your platform doesn't support this option ValueError will + be raised. + """ + if reuse_port and not hasattr(socket, "SO_REUSEPORT"): + raise ValueError("the platform doesn't support SO_REUSEPORT") + + sockets = [] + if address == "": + address = None + if not socket.has_ipv6 and family == socket.AF_UNSPEC: + # Python can be compiled with --disable-ipv6, which causes + # operations on AF_INET6 sockets to fail, but does not + # automatically exclude those results from getaddrinfo + # results. + # http://bugs.python.org/issue16208 + family = socket.AF_INET + if flags is None: + flags = socket.AI_PASSIVE + bound_port = None + unique_addresses = set() # type: set + for res in sorted( + socket.getaddrinfo(address, port, family, socket.SOCK_STREAM, 0, flags), + key=lambda x: x[0], + ): + if res in unique_addresses: + continue + + unique_addresses.add(res) + + af, socktype, proto, canonname, sockaddr = res + if ( + sys.platform == "darwin" + and address == "localhost" + and af == socket.AF_INET6 + and sockaddr[3] != 0 + ): + # Mac OS X includes a link-local address fe80::1%lo0 in the + # getaddrinfo results for 'localhost'. However, the firewall + # doesn't understand that this is a local address and will + # prompt for access (often repeatedly, due to an apparent + # bug in its ability to remember granting access to an + # application). Skip these addresses. + continue + try: + sock = socket.socket(af, socktype, proto) + except socket.error as e: + if errno_from_exception(e) == errno.EAFNOSUPPORT: + continue + raise + set_close_exec(sock.fileno()) + if os.name != "nt": + try: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + except socket.error as e: + if errno_from_exception(e) != errno.ENOPROTOOPT: + # Hurd doesn't support SO_REUSEADDR. + raise + if reuse_port: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + if af == socket.AF_INET6: + # On linux, ipv6 sockets accept ipv4 too by default, + # but this makes it impossible to bind to both + # 0.0.0.0 in ipv4 and :: in ipv6. On other systems, + # separate sockets *must* be used to listen for both ipv4 + # and ipv6. For consistency, always disable ipv4 on our + # ipv6 sockets and use a separate ipv4 socket when needed. + # + # Python 2.x on windows doesn't have IPPROTO_IPV6. + if hasattr(socket, "IPPROTO_IPV6"): + sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) + + # automatic port allocation with port=None + # should bind on the same port on IPv4 and IPv6 + host, requested_port = sockaddr[:2] + if requested_port == 0 and bound_port is not None: + sockaddr = tuple([host, bound_port] + list(sockaddr[2:])) + + sock.setblocking(False) + sock.bind(sockaddr) + bound_port = sock.getsockname()[1] + sock.listen(backlog) + sockets.append(sock) + return sockets + + +if hasattr(socket, "AF_UNIX"): + + def bind_unix_socket( + file: str, mode: int = 0o600, backlog: int = _DEFAULT_BACKLOG + ) -> socket.socket: + """Creates a listening unix socket. + + If a socket with the given name already exists, it will be deleted. + If any other file with that name exists, an exception will be + raised. + + Returns a socket object (not a list of socket objects like + `bind_sockets`) + """ + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + set_close_exec(sock.fileno()) + try: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + except socket.error as e: + if errno_from_exception(e) != errno.ENOPROTOOPT: + # Hurd doesn't support SO_REUSEADDR + raise + sock.setblocking(False) + try: + st = os.stat(file) + except OSError as err: + if errno_from_exception(err) != errno.ENOENT: + raise + else: + if stat.S_ISSOCK(st.st_mode): + os.remove(file) + else: + raise ValueError("File %s exists and is not a socket", file) + sock.bind(file) + os.chmod(file, mode) + sock.listen(backlog) + return sock + + +def add_accept_handler( + sock: socket.socket, callback: Callable[[socket.socket, Any], None] +) -> Callable[[], None]: + """Adds an `.IOLoop` event handler to accept new connections on ``sock``. + + When a connection is accepted, ``callback(connection, address)`` will + be run (``connection`` is a socket object, and ``address`` is the + address of the other end of the connection). Note that this signature + is different from the ``callback(fd, events)`` signature used for + `.IOLoop` handlers. + + A callable is returned which, when called, will remove the `.IOLoop` + event handler and stop processing further incoming connections. + + .. versionchanged:: 5.0 + The ``io_loop`` argument (deprecated since version 4.1) has been removed. + + .. versionchanged:: 5.0 + A callable is returned (``None`` was returned before). + """ + io_loop = IOLoop.current() + removed = [False] + + def accept_handler(fd: socket.socket, events: int) -> None: + # More connections may come in while we're handling callbacks; + # to prevent starvation of other tasks we must limit the number + # of connections we accept at a time. Ideally we would accept + # up to the number of connections that were waiting when we + # entered this method, but this information is not available + # (and rearranging this method to call accept() as many times + # as possible before running any callbacks would have adverse + # effects on load balancing in multiprocess configurations). + # Instead, we use the (default) listen backlog as a rough + # heuristic for the number of connections we can reasonably + # accept at once. + for i in range(_DEFAULT_BACKLOG): + if removed[0]: + # The socket was probably closed + return + try: + connection, address = sock.accept() + except socket.error as e: + # _ERRNO_WOULDBLOCK indicate we have accepted every + # connection that is available. + if errno_from_exception(e) in _ERRNO_WOULDBLOCK: + return + # ECONNABORTED indicates that there was a connection + # but it was closed while still in the accept queue. + # (observed on FreeBSD). + if errno_from_exception(e) == errno.ECONNABORTED: + continue + raise + set_close_exec(connection.fileno()) + callback(connection, address) + + def remove_handler() -> None: + io_loop.remove_handler(sock) + removed[0] = True + + io_loop.add_handler(sock, accept_handler, IOLoop.READ) + return remove_handler + + +def is_valid_ip(ip: str) -> bool: + """Returns ``True`` if the given string is a well-formed IP address. + + Supports IPv4 and IPv6. + """ + if not ip or "\x00" in ip: + # getaddrinfo resolves empty strings to localhost, and truncates + # on zero bytes. + return False + try: + res = socket.getaddrinfo( + ip, 0, socket.AF_UNSPEC, socket.SOCK_STREAM, 0, socket.AI_NUMERICHOST + ) + return bool(res) + except socket.gaierror as e: + if e.args[0] == socket.EAI_NONAME: + return False + raise + return True + + +class Resolver(Configurable): + """Configurable asynchronous DNS resolver interface. + + By default, a blocking implementation is used (which simply calls + `socket.getaddrinfo`). An alternative implementation can be + chosen with the `Resolver.configure <.Configurable.configure>` + class method:: + + Resolver.configure('tornado.netutil.ThreadedResolver') + + The implementations of this interface included with Tornado are + + * `tornado.netutil.DefaultExecutorResolver` + * `tornado.netutil.BlockingResolver` (deprecated) + * `tornado.netutil.ThreadedResolver` (deprecated) + * `tornado.netutil.OverrideResolver` + * `tornado.platform.twisted.TwistedResolver` + * `tornado.platform.caresresolver.CaresResolver` + + .. versionchanged:: 5.0 + The default implementation has changed from `BlockingResolver` to + `DefaultExecutorResolver`. + """ + + @classmethod + def configurable_base(cls) -> Type["Resolver"]: + return Resolver + + @classmethod + def configurable_default(cls) -> Type["Resolver"]: + return DefaultExecutorResolver + + def resolve( + self, host: str, port: int, family: socket.AddressFamily = socket.AF_UNSPEC + ) -> Awaitable[List[Tuple[int, Any]]]: + """Resolves an address. + + The ``host`` argument is a string which may be a hostname or a + literal IP address. + + Returns a `.Future` whose result is a list of (family, + address) pairs, where address is a tuple suitable to pass to + `socket.connect ` (i.e. a ``(host, + port)`` pair for IPv4; additional fields may be present for + IPv6). If a ``callback`` is passed, it will be run with the + result as an argument when it is complete. + + :raises IOError: if the address cannot be resolved. + + .. versionchanged:: 4.4 + Standardized all implementations to raise `IOError`. + + .. versionchanged:: 6.0 The ``callback`` argument was removed. + Use the returned awaitable object instead. + + """ + raise NotImplementedError() + + def close(self) -> None: + """Closes the `Resolver`, freeing any resources used. + + .. versionadded:: 3.1 + + """ + pass + + +def _resolve_addr( + host: str, port: int, family: socket.AddressFamily = socket.AF_UNSPEC +) -> List[Tuple[int, Any]]: + # On Solaris, getaddrinfo fails if the given port is not found + # in /etc/services and no socket type is given, so we must pass + # one here. The socket type used here doesn't seem to actually + # matter (we discard the one we get back in the results), + # so the addresses we return should still be usable with SOCK_DGRAM. + addrinfo = socket.getaddrinfo(host, port, family, socket.SOCK_STREAM) + results = [] + for fam, socktype, proto, canonname, address in addrinfo: + results.append((fam, address)) + return results + + +class DefaultExecutorResolver(Resolver): + """Resolver implementation using `.IOLoop.run_in_executor`. + + .. versionadded:: 5.0 + """ + + async def resolve( + self, host: str, port: int, family: socket.AddressFamily = socket.AF_UNSPEC + ) -> List[Tuple[int, Any]]: + result = await IOLoop.current().run_in_executor( + None, _resolve_addr, host, port, family + ) + return result + + +class ExecutorResolver(Resolver): + """Resolver implementation using a `concurrent.futures.Executor`. + + Use this instead of `ThreadedResolver` when you require additional + control over the executor being used. + + The executor will be shut down when the resolver is closed unless + ``close_resolver=False``; use this if you want to reuse the same + executor elsewhere. + + .. versionchanged:: 5.0 + The ``io_loop`` argument (deprecated since version 4.1) has been removed. + + .. deprecated:: 5.0 + The default `Resolver` now uses `.IOLoop.run_in_executor`; use that instead + of this class. + """ + + def initialize( + self, executor: concurrent.futures.Executor = None, close_executor: bool = True + ) -> None: + self.io_loop = IOLoop.current() + if executor is not None: + self.executor = executor + self.close_executor = close_executor + else: + self.executor = dummy_executor + self.close_executor = False + + def close(self) -> None: + if self.close_executor: + self.executor.shutdown() + self.executor = None # type: ignore + + @run_on_executor + def resolve( + self, host: str, port: int, family: socket.AddressFamily = socket.AF_UNSPEC + ) -> List[Tuple[int, Any]]: + return _resolve_addr(host, port, family) + + +class BlockingResolver(ExecutorResolver): + """Default `Resolver` implementation, using `socket.getaddrinfo`. + + The `.IOLoop` will be blocked during the resolution, although the + callback will not be run until the next `.IOLoop` iteration. + + .. deprecated:: 5.0 + The default `Resolver` now uses `.IOLoop.run_in_executor`; use that instead + of this class. + """ + + def initialize(self) -> None: # type: ignore + super(BlockingResolver, self).initialize() + + +class ThreadedResolver(ExecutorResolver): + """Multithreaded non-blocking `Resolver` implementation. + + Requires the `concurrent.futures` package to be installed + (available in the standard library since Python 3.2, + installable with ``pip install futures`` in older versions). + + The thread pool size can be configured with:: + + Resolver.configure('tornado.netutil.ThreadedResolver', + num_threads=10) + + .. versionchanged:: 3.1 + All ``ThreadedResolvers`` share a single thread pool, whose + size is set by the first one to be created. + + .. deprecated:: 5.0 + The default `Resolver` now uses `.IOLoop.run_in_executor`; use that instead + of this class. + """ + + _threadpool = None # type: ignore + _threadpool_pid = None # type: int + + def initialize(self, num_threads: int = 10) -> None: # type: ignore + threadpool = ThreadedResolver._create_threadpool(num_threads) + super(ThreadedResolver, self).initialize( + executor=threadpool, close_executor=False + ) + + @classmethod + def _create_threadpool( + cls, num_threads: int + ) -> concurrent.futures.ThreadPoolExecutor: + pid = os.getpid() + if cls._threadpool_pid != pid: + # Threads cannot survive after a fork, so if our pid isn't what it + # was when we created the pool then delete it. + cls._threadpool = None + if cls._threadpool is None: + cls._threadpool = concurrent.futures.ThreadPoolExecutor(num_threads) + cls._threadpool_pid = pid + return cls._threadpool + + +class OverrideResolver(Resolver): + """Wraps a resolver with a mapping of overrides. + + This can be used to make local DNS changes (e.g. for testing) + without modifying system-wide settings. + + The mapping can be in three formats:: + + { + # Hostname to host or ip + "example.com": "127.0.1.1", + + # Host+port to host+port + ("login.example.com", 443): ("localhost", 1443), + + # Host+port+address family to host+port + ("login.example.com", 443, socket.AF_INET6): ("::1", 1443), + } + + .. versionchanged:: 5.0 + Added support for host-port-family triplets. + """ + + def initialize(self, resolver: Resolver, mapping: dict) -> None: + self.resolver = resolver + self.mapping = mapping + + def close(self) -> None: + self.resolver.close() + + def resolve( + self, host: str, port: int, family: socket.AddressFamily = socket.AF_UNSPEC + ) -> Awaitable[List[Tuple[int, Any]]]: + if (host, port, family) in self.mapping: + host, port = self.mapping[(host, port, family)] + elif (host, port) in self.mapping: + host, port = self.mapping[(host, port)] + elif host in self.mapping: + host = self.mapping[host] + return self.resolver.resolve(host, port, family) + + +# These are the keyword arguments to ssl.wrap_socket that must be translated +# to their SSLContext equivalents (the other arguments are still passed +# to SSLContext.wrap_socket). +_SSL_CONTEXT_KEYWORDS = frozenset( + ["ssl_version", "certfile", "keyfile", "cert_reqs", "ca_certs", "ciphers"] +) + + +def ssl_options_to_context( + ssl_options: Union[Dict[str, Any], ssl.SSLContext] +) -> ssl.SSLContext: + """Try to convert an ``ssl_options`` dictionary to an + `~ssl.SSLContext` object. + + The ``ssl_options`` dictionary contains keywords to be passed to + `ssl.wrap_socket`. In Python 2.7.9+, `ssl.SSLContext` objects can + be used instead. This function converts the dict form to its + `~ssl.SSLContext` equivalent, and may be used when a component which + accepts both forms needs to upgrade to the `~ssl.SSLContext` version + to use features like SNI or NPN. + """ + if isinstance(ssl_options, ssl.SSLContext): + return ssl_options + assert isinstance(ssl_options, dict) + assert all(k in _SSL_CONTEXT_KEYWORDS for k in ssl_options), ssl_options + # Can't use create_default_context since this interface doesn't + # tell us client vs server. + context = ssl.SSLContext(ssl_options.get("ssl_version", ssl.PROTOCOL_SSLv23)) + if "certfile" in ssl_options: + context.load_cert_chain( + ssl_options["certfile"], ssl_options.get("keyfile", None) + ) + if "cert_reqs" in ssl_options: + context.verify_mode = ssl_options["cert_reqs"] + if "ca_certs" in ssl_options: + context.load_verify_locations(ssl_options["ca_certs"]) + if "ciphers" in ssl_options: + context.set_ciphers(ssl_options["ciphers"]) + if hasattr(ssl, "OP_NO_COMPRESSION"): + # Disable TLS compression to avoid CRIME and related attacks. + # This constant depends on openssl version 1.0. + # TODO: Do we need to do this ourselves or can we trust + # the defaults? + context.options |= ssl.OP_NO_COMPRESSION + return context + + +def ssl_wrap_socket( + socket: socket.socket, + ssl_options: Union[Dict[str, Any], ssl.SSLContext], + server_hostname: str = None, + **kwargs: Any +) -> ssl.SSLSocket: + """Returns an ``ssl.SSLSocket`` wrapping the given socket. + + ``ssl_options`` may be either an `ssl.SSLContext` object or a + dictionary (as accepted by `ssl_options_to_context`). Additional + keyword arguments are passed to ``wrap_socket`` (either the + `~ssl.SSLContext` method or the `ssl` module function as + appropriate). + """ + context = ssl_options_to_context(ssl_options) + if ssl.HAS_SNI: + # In python 3.4, wrap_socket only accepts the server_hostname + # argument if HAS_SNI is true. + # TODO: add a unittest (python added server-side SNI support in 3.4) + # In the meantime it can be manually tested with + # python3 -m tornado.httpclient https://sni.velox.ch + return context.wrap_socket(socket, server_hostname=server_hostname, **kwargs) + else: + return context.wrap_socket(socket, **kwargs) diff --git a/server/www/packages/packages-linux/x64/tornado/options.py b/server/www/packages/packages-linux/x64/tornado/options.py index 0a4b965..0f06a58 100644 --- a/server/www/packages/packages-linux/x64/tornado/options.py +++ b/server/www/packages/packages-linux/x64/tornado/options.py @@ -1,654 +1,726 @@ -# -# Copyright 2009 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A command line parsing module that lets modules define their own options. - -This module is inspired by Google's `gflags -`_. The primary difference -with libraries such as `argparse` is that a global registry is used so -that options may be defined in any module (it also enables -`tornado.log` by default). The rest of Tornado does not depend on this -module, so feel free to use `argparse` or other configuration -libraries if you prefer them. - -Options must be defined with `tornado.options.define` before use, -generally at the top level of a module. The options are then -accessible as attributes of `tornado.options.options`:: - - # myapp/db.py - from tornado.options import define, options - - define("mysql_host", default="127.0.0.1:3306", help="Main user DB") - define("memcache_hosts", default="127.0.0.1:11011", multiple=True, - help="Main user memcache servers") - - def connect(): - db = database.Connection(options.mysql_host) - ... - - # myapp/server.py - from tornado.options import define, options - - define("port", default=8080, help="port to listen on") - - def start_server(): - app = make_app() - app.listen(options.port) - -The ``main()`` method of your application does not need to be aware of all of -the options used throughout your program; they are all automatically loaded -when the modules are loaded. However, all modules that define options -must have been imported before the command line is parsed. - -Your ``main()`` method can parse the command line or parse a config file with -either `parse_command_line` or `parse_config_file`:: - - import myapp.db, myapp.server - import tornado.options - - if __name__ == '__main__': - tornado.options.parse_command_line() - # or - tornado.options.parse_config_file("/etc/server.conf") - -.. note:: - - When using multiple ``parse_*`` functions, pass ``final=False`` to all - but the last one, or side effects may occur twice (in particular, - this can result in log messages being doubled). - -`tornado.options.options` is a singleton instance of `OptionParser`, and -the top-level functions in this module (`define`, `parse_command_line`, etc) -simply call methods on it. You may create additional `OptionParser` -instances to define isolated sets of options, such as for subcommands. - -.. note:: - - By default, several options are defined that will configure the - standard `logging` module when `parse_command_line` or `parse_config_file` - are called. If you want Tornado to leave the logging configuration - alone so you can manage it yourself, either pass ``--logging=none`` - on the command line or do the following to disable it in code:: - - from tornado.options import options, parse_command_line - options.logging = None - parse_command_line() - -.. versionchanged:: 4.3 - Dashes and underscores are fully interchangeable in option names; - options can be defined, set, and read with any mix of the two. - Dashes are typical for command-line usage while config files require - underscores. -""" - -from __future__ import absolute_import, division, print_function - -import datetime -import numbers -import re -import sys -import os -import textwrap - -from tornado.escape import _unicode, native_str -from tornado.log import define_logging_options -from tornado import stack_context -from tornado.util import basestring_type, exec_in - - -class Error(Exception): - """Exception raised by errors in the options module.""" - pass - - -class OptionParser(object): - """A collection of options, a dictionary with object-like access. - - Normally accessed via static functions in the `tornado.options` module, - which reference a global instance. - """ - def __init__(self): - # we have to use self.__dict__ because we override setattr. - self.__dict__['_options'] = {} - self.__dict__['_parse_callbacks'] = [] - self.define("help", type=bool, help="show this help information", - callback=self._help_callback) - - def _normalize_name(self, name): - return name.replace('_', '-') - - def __getattr__(self, name): - name = self._normalize_name(name) - if isinstance(self._options.get(name), _Option): - return self._options[name].value() - raise AttributeError("Unrecognized option %r" % name) - - def __setattr__(self, name, value): - name = self._normalize_name(name) - if isinstance(self._options.get(name), _Option): - return self._options[name].set(value) - raise AttributeError("Unrecognized option %r" % name) - - def __iter__(self): - return (opt.name for opt in self._options.values()) - - def __contains__(self, name): - name = self._normalize_name(name) - return name in self._options - - def __getitem__(self, name): - return self.__getattr__(name) - - def __setitem__(self, name, value): - return self.__setattr__(name, value) - - def items(self): - """A sequence of (name, value) pairs. - - .. versionadded:: 3.1 - """ - return [(opt.name, opt.value()) for name, opt in self._options.items()] - - def groups(self): - """The set of option-groups created by ``define``. - - .. versionadded:: 3.1 - """ - return set(opt.group_name for opt in self._options.values()) - - def group_dict(self, group): - """The names and values of options in a group. - - Useful for copying options into Application settings:: - - from tornado.options import define, parse_command_line, options - - define('template_path', group='application') - define('static_path', group='application') - - parse_command_line() - - application = Application( - handlers, **options.group_dict('application')) - - .. versionadded:: 3.1 - """ - return dict( - (opt.name, opt.value()) for name, opt in self._options.items() - if not group or group == opt.group_name) - - def as_dict(self): - """The names and values of all options. - - .. versionadded:: 3.1 - """ - return dict( - (opt.name, opt.value()) for name, opt in self._options.items()) - - def define(self, name, default=None, type=None, help=None, metavar=None, - multiple=False, group=None, callback=None): - """Defines a new command line option. - - ``type`` can be any of `str`, `int`, `float`, `bool`, - `~datetime.datetime`, or `~datetime.timedelta`. If no ``type`` - is given but a ``default`` is, ``type`` is the type of - ``default``. Otherwise, ``type`` defaults to `str`. - - If ``multiple`` is True, the option value is a list of ``type`` - instead of an instance of ``type``. - - ``help`` and ``metavar`` are used to construct the - automatically generated command line help string. The help - message is formatted like:: - - --name=METAVAR help string - - ``group`` is used to group the defined options in logical - groups. By default, command line options are grouped by the - file in which they are defined. - - Command line option names must be unique globally. - - If a ``callback`` is given, it will be run with the new value whenever - the option is changed. This can be used to combine command-line - and file-based options:: - - define("config", type=str, help="path to config file", - callback=lambda path: parse_config_file(path, final=False)) - - With this definition, options in the file specified by ``--config`` will - override options set earlier on the command line, but can be overridden - by later flags. - - """ - normalized = self._normalize_name(name) - if normalized in self._options: - raise Error("Option %r already defined in %s" % - (normalized, self._options[normalized].file_name)) - frame = sys._getframe(0) - options_file = frame.f_code.co_filename - - # Can be called directly, or through top level define() fn, in which - # case, step up above that frame to look for real caller. - if (frame.f_back.f_code.co_filename == options_file and - frame.f_back.f_code.co_name == 'define'): - frame = frame.f_back - - file_name = frame.f_back.f_code.co_filename - if file_name == options_file: - file_name = "" - if type is None: - if not multiple and default is not None: - type = default.__class__ - else: - type = str - if group: - group_name = group - else: - group_name = file_name - option = _Option(name, file_name=file_name, - default=default, type=type, help=help, - metavar=metavar, multiple=multiple, - group_name=group_name, - callback=callback) - self._options[normalized] = option - - def parse_command_line(self, args=None, final=True): - """Parses all options given on the command line (defaults to - `sys.argv`). - - Options look like ``--option=value`` and are parsed according - to their ``type``. For boolean options, ``--option`` is - equivalent to ``--option=true`` - - If the option has ``multiple=True``, comma-separated values - are accepted. For multi-value integer options, the syntax - ``x:y`` is also accepted and equivalent to ``range(x, y)``. - - Note that ``args[0]`` is ignored since it is the program name - in `sys.argv`. - - We return a list of all arguments that are not parsed as options. - - If ``final`` is ``False``, parse callbacks will not be run. - This is useful for applications that wish to combine configurations - from multiple sources. - - """ - if args is None: - args = sys.argv - remaining = [] - for i in range(1, len(args)): - # All things after the last option are command line arguments - if not args[i].startswith("-"): - remaining = args[i:] - break - if args[i] == "--": - remaining = args[i + 1:] - break - arg = args[i].lstrip("-") - name, equals, value = arg.partition("=") - name = self._normalize_name(name) - if name not in self._options: - self.print_help() - raise Error('Unrecognized command line option: %r' % name) - option = self._options[name] - if not equals: - if option.type == bool: - value = "true" - else: - raise Error('Option %r requires a value' % name) - option.parse(value) - - if final: - self.run_parse_callbacks() - - return remaining - - def parse_config_file(self, path, final=True): - """Parses and loads the config file at the given path. - - The config file contains Python code that will be executed (so - it is **not safe** to use untrusted config files). Anything in - the global namespace that matches a defined option will be - used to set that option's value. - - Options may either be the specified type for the option or - strings (in which case they will be parsed the same way as in - `.parse_command_line`) - - Example (using the options defined in the top-level docs of - this module):: - - port = 80 - mysql_host = 'mydb.example.com:3306' - # Both lists and comma-separated strings are allowed for - # multiple=True. - memcache_hosts = ['cache1.example.com:11011', - 'cache2.example.com:11011'] - memcache_hosts = 'cache1.example.com:11011,cache2.example.com:11011' - - If ``final`` is ``False``, parse callbacks will not be run. - This is useful for applications that wish to combine configurations - from multiple sources. - - .. note:: - - `tornado.options` is primarily a command-line library. - Config file support is provided for applications that wish - to use it, but applications that prefer config files may - wish to look at other libraries instead. - - .. versionchanged:: 4.1 - Config files are now always interpreted as utf-8 instead of - the system default encoding. - - .. versionchanged:: 4.4 - The special variable ``__file__`` is available inside config - files, specifying the absolute path to the config file itself. - - .. versionchanged:: 5.1 - Added the ability to set options via strings in config files. - - """ - config = {'__file__': os.path.abspath(path)} - with open(path, 'rb') as f: - exec_in(native_str(f.read()), config, config) - for name in config: - normalized = self._normalize_name(name) - if normalized in self._options: - option = self._options[normalized] - if option.multiple: - if not isinstance(config[name], (list, str)): - raise Error("Option %r is required to be a list of %s " - "or a comma-separated string" % - (option.name, option.type.__name__)) - - if type(config[name]) == str and option.type != str: - option.parse(config[name]) - else: - option.set(config[name]) - - if final: - self.run_parse_callbacks() - - def print_help(self, file=None): - """Prints all the command line options to stderr (or another file).""" - if file is None: - file = sys.stderr - print("Usage: %s [OPTIONS]" % sys.argv[0], file=file) - print("\nOptions:\n", file=file) - by_group = {} - for option in self._options.values(): - by_group.setdefault(option.group_name, []).append(option) - - for filename, o in sorted(by_group.items()): - if filename: - print("\n%s options:\n" % os.path.normpath(filename), file=file) - o.sort(key=lambda option: option.name) - for option in o: - # Always print names with dashes in a CLI context. - prefix = self._normalize_name(option.name) - if option.metavar: - prefix += "=" + option.metavar - description = option.help or "" - if option.default is not None and option.default != '': - description += " (default %s)" % option.default - lines = textwrap.wrap(description, 79 - 35) - if len(prefix) > 30 or len(lines) == 0: - lines.insert(0, '') - print(" --%-30s %s" % (prefix, lines[0]), file=file) - for line in lines[1:]: - print("%-34s %s" % (' ', line), file=file) - print(file=file) - - def _help_callback(self, value): - if value: - self.print_help() - sys.exit(0) - - def add_parse_callback(self, callback): - """Adds a parse callback, to be invoked when option parsing is done.""" - self._parse_callbacks.append(stack_context.wrap(callback)) - - def run_parse_callbacks(self): - for callback in self._parse_callbacks: - callback() - - def mockable(self): - """Returns a wrapper around self that is compatible with - `mock.patch `. - - The `mock.patch ` function (included in - the standard library `unittest.mock` package since Python 3.3, - or in the third-party ``mock`` package for older versions of - Python) is incompatible with objects like ``options`` that - override ``__getattr__`` and ``__setattr__``. This function - returns an object that can be used with `mock.patch.object - ` to modify option values:: - - with mock.patch.object(options.mockable(), 'name', value): - assert options.name == value - """ - return _Mockable(self) - - -class _Mockable(object): - """`mock.patch` compatible wrapper for `OptionParser`. - - As of ``mock`` version 1.0.1, when an object uses ``__getattr__`` - hooks instead of ``__dict__``, ``patch.__exit__`` tries to delete - the attribute it set instead of setting a new one (assuming that - the object does not catpure ``__setattr__``, so the patch - created a new attribute in ``__dict__``). - - _Mockable's getattr and setattr pass through to the underlying - OptionParser, and delattr undoes the effect of a previous setattr. - """ - def __init__(self, options): - # Modify __dict__ directly to bypass __setattr__ - self.__dict__['_options'] = options - self.__dict__['_originals'] = {} - - def __getattr__(self, name): - return getattr(self._options, name) - - def __setattr__(self, name, value): - assert name not in self._originals, "don't reuse mockable objects" - self._originals[name] = getattr(self._options, name) - setattr(self._options, name, value) - - def __delattr__(self, name): - setattr(self._options, name, self._originals.pop(name)) - - -class _Option(object): - UNSET = object() - - def __init__(self, name, default=None, type=basestring_type, help=None, - metavar=None, multiple=False, file_name=None, group_name=None, - callback=None): - if default is None and multiple: - default = [] - self.name = name - self.type = type - self.help = help - self.metavar = metavar - self.multiple = multiple - self.file_name = file_name - self.group_name = group_name - self.callback = callback - self.default = default - self._value = _Option.UNSET - - def value(self): - return self.default if self._value is _Option.UNSET else self._value - - def parse(self, value): - _parse = { - datetime.datetime: self._parse_datetime, - datetime.timedelta: self._parse_timedelta, - bool: self._parse_bool, - basestring_type: self._parse_string, - }.get(self.type, self.type) - if self.multiple: - self._value = [] - for part in value.split(","): - if issubclass(self.type, numbers.Integral): - # allow ranges of the form X:Y (inclusive at both ends) - lo, _, hi = part.partition(":") - lo = _parse(lo) - hi = _parse(hi) if hi else lo - self._value.extend(range(lo, hi + 1)) - else: - self._value.append(_parse(part)) - else: - self._value = _parse(value) - if self.callback is not None: - self.callback(self._value) - return self.value() - - def set(self, value): - if self.multiple: - if not isinstance(value, list): - raise Error("Option %r is required to be a list of %s" % - (self.name, self.type.__name__)) - for item in value: - if item is not None and not isinstance(item, self.type): - raise Error("Option %r is required to be a list of %s" % - (self.name, self.type.__name__)) - else: - if value is not None and not isinstance(value, self.type): - raise Error("Option %r is required to be a %s (%s given)" % - (self.name, self.type.__name__, type(value))) - self._value = value - if self.callback is not None: - self.callback(self._value) - - # Supported date/time formats in our options - _DATETIME_FORMATS = [ - "%a %b %d %H:%M:%S %Y", - "%Y-%m-%d %H:%M:%S", - "%Y-%m-%d %H:%M", - "%Y-%m-%dT%H:%M", - "%Y%m%d %H:%M:%S", - "%Y%m%d %H:%M", - "%Y-%m-%d", - "%Y%m%d", - "%H:%M:%S", - "%H:%M", - ] - - def _parse_datetime(self, value): - for format in self._DATETIME_FORMATS: - try: - return datetime.datetime.strptime(value, format) - except ValueError: - pass - raise Error('Unrecognized date/time format: %r' % value) - - _TIMEDELTA_ABBREV_DICT = { - 'h': 'hours', - 'm': 'minutes', - 'min': 'minutes', - 's': 'seconds', - 'sec': 'seconds', - 'ms': 'milliseconds', - 'us': 'microseconds', - 'd': 'days', - 'w': 'weeks', - } - - _FLOAT_PATTERN = r'[-+]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][-+]?\d+)?' - - _TIMEDELTA_PATTERN = re.compile( - r'\s*(%s)\s*(\w*)\s*' % _FLOAT_PATTERN, re.IGNORECASE) - - def _parse_timedelta(self, value): - try: - sum = datetime.timedelta() - start = 0 - while start < len(value): - m = self._TIMEDELTA_PATTERN.match(value, start) - if not m: - raise Exception() - num = float(m.group(1)) - units = m.group(2) or 'seconds' - units = self._TIMEDELTA_ABBREV_DICT.get(units, units) - sum += datetime.timedelta(**{units: num}) - start = m.end() - return sum - except Exception: - raise - - def _parse_bool(self, value): - return value.lower() not in ("false", "0", "f") - - def _parse_string(self, value): - return _unicode(value) - - -options = OptionParser() -"""Global options object. - -All defined options are available as attributes on this object. -""" - - -def define(name, default=None, type=None, help=None, metavar=None, - multiple=False, group=None, callback=None): - """Defines an option in the global namespace. - - See `OptionParser.define`. - """ - return options.define(name, default=default, type=type, help=help, - metavar=metavar, multiple=multiple, group=group, - callback=callback) - - -def parse_command_line(args=None, final=True): - """Parses global options from the command line. - - See `OptionParser.parse_command_line`. - """ - return options.parse_command_line(args, final=final) - - -def parse_config_file(path, final=True): - """Parses global options from a config file. - - See `OptionParser.parse_config_file`. - """ - return options.parse_config_file(path, final=final) - - -def print_help(file=None): - """Prints all the command line options to stderr (or another file). - - See `OptionParser.print_help`. - """ - return options.print_help(file) - - -def add_parse_callback(callback): - """Adds a parse callback, to be invoked when option parsing is done. - - See `OptionParser.add_parse_callback` - """ - options.add_parse_callback(callback) - - -# Default options -define_logging_options(options) +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A command line parsing module that lets modules define their own options. + +This module is inspired by Google's `gflags +`_. The primary difference +with libraries such as `argparse` is that a global registry is used so +that options may be defined in any module (it also enables +`tornado.log` by default). The rest of Tornado does not depend on this +module, so feel free to use `argparse` or other configuration +libraries if you prefer them. + +Options must be defined with `tornado.options.define` before use, +generally at the top level of a module. The options are then +accessible as attributes of `tornado.options.options`:: + + # myapp/db.py + from tornado.options import define, options + + define("mysql_host", default="127.0.0.1:3306", help="Main user DB") + define("memcache_hosts", default="127.0.0.1:11011", multiple=True, + help="Main user memcache servers") + + def connect(): + db = database.Connection(options.mysql_host) + ... + + # myapp/server.py + from tornado.options import define, options + + define("port", default=8080, help="port to listen on") + + def start_server(): + app = make_app() + app.listen(options.port) + +The ``main()`` method of your application does not need to be aware of all of +the options used throughout your program; they are all automatically loaded +when the modules are loaded. However, all modules that define options +must have been imported before the command line is parsed. + +Your ``main()`` method can parse the command line or parse a config file with +either `parse_command_line` or `parse_config_file`:: + + import myapp.db, myapp.server + import tornado.options + + if __name__ == '__main__': + tornado.options.parse_command_line() + # or + tornado.options.parse_config_file("/etc/server.conf") + +.. note:: + + When using multiple ``parse_*`` functions, pass ``final=False`` to all + but the last one, or side effects may occur twice (in particular, + this can result in log messages being doubled). + +`tornado.options.options` is a singleton instance of `OptionParser`, and +the top-level functions in this module (`define`, `parse_command_line`, etc) +simply call methods on it. You may create additional `OptionParser` +instances to define isolated sets of options, such as for subcommands. + +.. note:: + + By default, several options are defined that will configure the + standard `logging` module when `parse_command_line` or `parse_config_file` + are called. If you want Tornado to leave the logging configuration + alone so you can manage it yourself, either pass ``--logging=none`` + on the command line or do the following to disable it in code:: + + from tornado.options import options, parse_command_line + options.logging = None + parse_command_line() + +.. versionchanged:: 4.3 + Dashes and underscores are fully interchangeable in option names; + options can be defined, set, and read with any mix of the two. + Dashes are typical for command-line usage while config files require + underscores. +""" + +import datetime +import numbers +import re +import sys +import os +import textwrap + +from tornado.escape import _unicode, native_str +from tornado.log import define_logging_options +from tornado.util import basestring_type, exec_in + +import typing +from typing import Any, Iterator, Iterable, Tuple, Set, Dict, Callable, List, TextIO + +if typing.TYPE_CHECKING: + from typing import Optional # noqa: F401 + + +class Error(Exception): + """Exception raised by errors in the options module.""" + + pass + + +class OptionParser(object): + """A collection of options, a dictionary with object-like access. + + Normally accessed via static functions in the `tornado.options` module, + which reference a global instance. + """ + + def __init__(self) -> None: + # we have to use self.__dict__ because we override setattr. + self.__dict__["_options"] = {} + self.__dict__["_parse_callbacks"] = [] + self.define( + "help", + type=bool, + help="show this help information", + callback=self._help_callback, + ) + + def _normalize_name(self, name: str) -> str: + return name.replace("_", "-") + + def __getattr__(self, name: str) -> Any: + name = self._normalize_name(name) + if isinstance(self._options.get(name), _Option): + return self._options[name].value() + raise AttributeError("Unrecognized option %r" % name) + + def __setattr__(self, name: str, value: Any) -> None: + name = self._normalize_name(name) + if isinstance(self._options.get(name), _Option): + return self._options[name].set(value) + raise AttributeError("Unrecognized option %r" % name) + + def __iter__(self) -> Iterator: + return (opt.name for opt in self._options.values()) + + def __contains__(self, name: str) -> bool: + name = self._normalize_name(name) + return name in self._options + + def __getitem__(self, name: str) -> Any: + return self.__getattr__(name) + + def __setitem__(self, name: str, value: Any) -> None: + return self.__setattr__(name, value) + + def items(self) -> Iterable[Tuple[str, Any]]: + """An iterable of (name, value) pairs. + + .. versionadded:: 3.1 + """ + return [(opt.name, opt.value()) for name, opt in self._options.items()] + + def groups(self) -> Set[str]: + """The set of option-groups created by ``define``. + + .. versionadded:: 3.1 + """ + return set(opt.group_name for opt in self._options.values()) + + def group_dict(self, group: str) -> Dict[str, Any]: + """The names and values of options in a group. + + Useful for copying options into Application settings:: + + from tornado.options import define, parse_command_line, options + + define('template_path', group='application') + define('static_path', group='application') + + parse_command_line() + + application = Application( + handlers, **options.group_dict('application')) + + .. versionadded:: 3.1 + """ + return dict( + (opt.name, opt.value()) + for name, opt in self._options.items() + if not group or group == opt.group_name + ) + + def as_dict(self) -> Dict[str, Any]: + """The names and values of all options. + + .. versionadded:: 3.1 + """ + return dict((opt.name, opt.value()) for name, opt in self._options.items()) + + def define( + self, + name: str, + default: Any = None, + type: type = None, + help: str = None, + metavar: str = None, + multiple: bool = False, + group: str = None, + callback: Callable[[Any], None] = None, + ) -> None: + """Defines a new command line option. + + ``type`` can be any of `str`, `int`, `float`, `bool`, + `~datetime.datetime`, or `~datetime.timedelta`. If no ``type`` + is given but a ``default`` is, ``type`` is the type of + ``default``. Otherwise, ``type`` defaults to `str`. + + If ``multiple`` is True, the option value is a list of ``type`` + instead of an instance of ``type``. + + ``help`` and ``metavar`` are used to construct the + automatically generated command line help string. The help + message is formatted like:: + + --name=METAVAR help string + + ``group`` is used to group the defined options in logical + groups. By default, command line options are grouped by the + file in which they are defined. + + Command line option names must be unique globally. + + If a ``callback`` is given, it will be run with the new value whenever + the option is changed. This can be used to combine command-line + and file-based options:: + + define("config", type=str, help="path to config file", + callback=lambda path: parse_config_file(path, final=False)) + + With this definition, options in the file specified by ``--config`` will + override options set earlier on the command line, but can be overridden + by later flags. + + """ + normalized = self._normalize_name(name) + if normalized in self._options: + raise Error( + "Option %r already defined in %s" + % (normalized, self._options[normalized].file_name) + ) + frame = sys._getframe(0) + options_file = frame.f_code.co_filename + + # Can be called directly, or through top level define() fn, in which + # case, step up above that frame to look for real caller. + if ( + frame.f_back.f_code.co_filename == options_file + and frame.f_back.f_code.co_name == "define" + ): + frame = frame.f_back + + file_name = frame.f_back.f_code.co_filename + if file_name == options_file: + file_name = "" + if type is None: + if not multiple and default is not None: + type = default.__class__ + else: + type = str + if group: + group_name = group # type: Optional[str] + else: + group_name = file_name + option = _Option( + name, + file_name=file_name, + default=default, + type=type, + help=help, + metavar=metavar, + multiple=multiple, + group_name=group_name, + callback=callback, + ) + self._options[normalized] = option + + def parse_command_line( + self, args: List[str] = None, final: bool = True + ) -> List[str]: + """Parses all options given on the command line (defaults to + `sys.argv`). + + Options look like ``--option=value`` and are parsed according + to their ``type``. For boolean options, ``--option`` is + equivalent to ``--option=true`` + + If the option has ``multiple=True``, comma-separated values + are accepted. For multi-value integer options, the syntax + ``x:y`` is also accepted and equivalent to ``range(x, y)``. + + Note that ``args[0]`` is ignored since it is the program name + in `sys.argv`. + + We return a list of all arguments that are not parsed as options. + + If ``final`` is ``False``, parse callbacks will not be run. + This is useful for applications that wish to combine configurations + from multiple sources. + + """ + if args is None: + args = sys.argv + remaining = [] # type: List[str] + for i in range(1, len(args)): + # All things after the last option are command line arguments + if not args[i].startswith("-"): + remaining = args[i:] + break + if args[i] == "--": + remaining = args[i + 1 :] + break + arg = args[i].lstrip("-") + name, equals, value = arg.partition("=") + name = self._normalize_name(name) + if name not in self._options: + self.print_help() + raise Error("Unrecognized command line option: %r" % name) + option = self._options[name] + if not equals: + if option.type == bool: + value = "true" + else: + raise Error("Option %r requires a value" % name) + option.parse(value) + + if final: + self.run_parse_callbacks() + + return remaining + + def parse_config_file(self, path: str, final: bool = True) -> None: + """Parses and loads the config file at the given path. + + The config file contains Python code that will be executed (so + it is **not safe** to use untrusted config files). Anything in + the global namespace that matches a defined option will be + used to set that option's value. + + Options may either be the specified type for the option or + strings (in which case they will be parsed the same way as in + `.parse_command_line`) + + Example (using the options defined in the top-level docs of + this module):: + + port = 80 + mysql_host = 'mydb.example.com:3306' + # Both lists and comma-separated strings are allowed for + # multiple=True. + memcache_hosts = ['cache1.example.com:11011', + 'cache2.example.com:11011'] + memcache_hosts = 'cache1.example.com:11011,cache2.example.com:11011' + + If ``final`` is ``False``, parse callbacks will not be run. + This is useful for applications that wish to combine configurations + from multiple sources. + + .. note:: + + `tornado.options` is primarily a command-line library. + Config file support is provided for applications that wish + to use it, but applications that prefer config files may + wish to look at other libraries instead. + + .. versionchanged:: 4.1 + Config files are now always interpreted as utf-8 instead of + the system default encoding. + + .. versionchanged:: 4.4 + The special variable ``__file__`` is available inside config + files, specifying the absolute path to the config file itself. + + .. versionchanged:: 5.1 + Added the ability to set options via strings in config files. + + """ + config = {"__file__": os.path.abspath(path)} + with open(path, "rb") as f: + exec_in(native_str(f.read()), config, config) + for name in config: + normalized = self._normalize_name(name) + if normalized in self._options: + option = self._options[normalized] + if option.multiple: + if not isinstance(config[name], (list, str)): + raise Error( + "Option %r is required to be a list of %s " + "or a comma-separated string" + % (option.name, option.type.__name__) + ) + + if type(config[name]) == str and option.type != str: + option.parse(config[name]) + else: + option.set(config[name]) + + if final: + self.run_parse_callbacks() + + def print_help(self, file: TextIO = None) -> None: + """Prints all the command line options to stderr (or another file).""" + if file is None: + file = sys.stderr + print("Usage: %s [OPTIONS]" % sys.argv[0], file=file) + print("\nOptions:\n", file=file) + by_group = {} # type: Dict[str, List[_Option]] + for option in self._options.values(): + by_group.setdefault(option.group_name, []).append(option) + + for filename, o in sorted(by_group.items()): + if filename: + print("\n%s options:\n" % os.path.normpath(filename), file=file) + o.sort(key=lambda option: option.name) + for option in o: + # Always print names with dashes in a CLI context. + prefix = self._normalize_name(option.name) + if option.metavar: + prefix += "=" + option.metavar + description = option.help or "" + if option.default is not None and option.default != "": + description += " (default %s)" % option.default + lines = textwrap.wrap(description, 79 - 35) + if len(prefix) > 30 or len(lines) == 0: + lines.insert(0, "") + print(" --%-30s %s" % (prefix, lines[0]), file=file) + for line in lines[1:]: + print("%-34s %s" % (" ", line), file=file) + print(file=file) + + def _help_callback(self, value: bool) -> None: + if value: + self.print_help() + sys.exit(0) + + def add_parse_callback(self, callback: Callable[[], None]) -> None: + """Adds a parse callback, to be invoked when option parsing is done.""" + self._parse_callbacks.append(callback) + + def run_parse_callbacks(self) -> None: + for callback in self._parse_callbacks: + callback() + + def mockable(self) -> "_Mockable": + """Returns a wrapper around self that is compatible with + `mock.patch `. + + The `mock.patch ` function (included in + the standard library `unittest.mock` package since Python 3.3, + or in the third-party ``mock`` package for older versions of + Python) is incompatible with objects like ``options`` that + override ``__getattr__`` and ``__setattr__``. This function + returns an object that can be used with `mock.patch.object + ` to modify option values:: + + with mock.patch.object(options.mockable(), 'name', value): + assert options.name == value + """ + return _Mockable(self) + + +class _Mockable(object): + """`mock.patch` compatible wrapper for `OptionParser`. + + As of ``mock`` version 1.0.1, when an object uses ``__getattr__`` + hooks instead of ``__dict__``, ``patch.__exit__`` tries to delete + the attribute it set instead of setting a new one (assuming that + the object does not catpure ``__setattr__``, so the patch + created a new attribute in ``__dict__``). + + _Mockable's getattr and setattr pass through to the underlying + OptionParser, and delattr undoes the effect of a previous setattr. + """ + + def __init__(self, options: OptionParser) -> None: + # Modify __dict__ directly to bypass __setattr__ + self.__dict__["_options"] = options + self.__dict__["_originals"] = {} + + def __getattr__(self, name: str) -> Any: + return getattr(self._options, name) + + def __setattr__(self, name: str, value: Any) -> None: + assert name not in self._originals, "don't reuse mockable objects" + self._originals[name] = getattr(self._options, name) + setattr(self._options, name, value) + + def __delattr__(self, name: str) -> None: + setattr(self._options, name, self._originals.pop(name)) + + +class _Option(object): + # This class could almost be made generic, but the way the types + # interact with the multiple argument makes this tricky. (default + # and the callback use List[T], but type is still Type[T]). + UNSET = object() + + def __init__( + self, + name: str, + default: Any = None, + type: type = None, + help: str = None, + metavar: str = None, + multiple: bool = False, + file_name: str = None, + group_name: str = None, + callback: Callable[[Any], None] = None, + ) -> None: + if default is None and multiple: + default = [] + self.name = name + if type is None: + raise ValueError("type must not be None") + self.type = type + self.help = help + self.metavar = metavar + self.multiple = multiple + self.file_name = file_name + self.group_name = group_name + self.callback = callback + self.default = default + self._value = _Option.UNSET # type: Any + + def value(self) -> Any: + return self.default if self._value is _Option.UNSET else self._value + + def parse(self, value: str) -> Any: + _parse = { + datetime.datetime: self._parse_datetime, + datetime.timedelta: self._parse_timedelta, + bool: self._parse_bool, + basestring_type: self._parse_string, + }.get( + self.type, self.type + ) # type: Callable[[str], Any] + if self.multiple: + self._value = [] + for part in value.split(","): + if issubclass(self.type, numbers.Integral): + # allow ranges of the form X:Y (inclusive at both ends) + lo_str, _, hi_str = part.partition(":") + lo = _parse(lo_str) + hi = _parse(hi_str) if hi_str else lo + self._value.extend(range(lo, hi + 1)) + else: + self._value.append(_parse(part)) + else: + self._value = _parse(value) + if self.callback is not None: + self.callback(self._value) + return self.value() + + def set(self, value: Any) -> None: + if self.multiple: + if not isinstance(value, list): + raise Error( + "Option %r is required to be a list of %s" + % (self.name, self.type.__name__) + ) + for item in value: + if item is not None and not isinstance(item, self.type): + raise Error( + "Option %r is required to be a list of %s" + % (self.name, self.type.__name__) + ) + else: + if value is not None and not isinstance(value, self.type): + raise Error( + "Option %r is required to be a %s (%s given)" + % (self.name, self.type.__name__, type(value)) + ) + self._value = value + if self.callback is not None: + self.callback(self._value) + + # Supported date/time formats in our options + _DATETIME_FORMATS = [ + "%a %b %d %H:%M:%S %Y", + "%Y-%m-%d %H:%M:%S", + "%Y-%m-%d %H:%M", + "%Y-%m-%dT%H:%M", + "%Y%m%d %H:%M:%S", + "%Y%m%d %H:%M", + "%Y-%m-%d", + "%Y%m%d", + "%H:%M:%S", + "%H:%M", + ] + + def _parse_datetime(self, value: str) -> datetime.datetime: + for format in self._DATETIME_FORMATS: + try: + return datetime.datetime.strptime(value, format) + except ValueError: + pass + raise Error("Unrecognized date/time format: %r" % value) + + _TIMEDELTA_ABBREV_DICT = { + "h": "hours", + "m": "minutes", + "min": "minutes", + "s": "seconds", + "sec": "seconds", + "ms": "milliseconds", + "us": "microseconds", + "d": "days", + "w": "weeks", + } + + _FLOAT_PATTERN = r"[-+]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][-+]?\d+)?" + + _TIMEDELTA_PATTERN = re.compile( + r"\s*(%s)\s*(\w*)\s*" % _FLOAT_PATTERN, re.IGNORECASE + ) + + def _parse_timedelta(self, value: str) -> datetime.timedelta: + try: + sum = datetime.timedelta() + start = 0 + while start < len(value): + m = self._TIMEDELTA_PATTERN.match(value, start) + if not m: + raise Exception() + num = float(m.group(1)) + units = m.group(2) or "seconds" + units = self._TIMEDELTA_ABBREV_DICT.get(units, units) + sum += datetime.timedelta(**{units: num}) + start = m.end() + return sum + except Exception: + raise + + def _parse_bool(self, value: str) -> bool: + return value.lower() not in ("false", "0", "f") + + def _parse_string(self, value: str) -> str: + return _unicode(value) + + +options = OptionParser() +"""Global options object. + +All defined options are available as attributes on this object. +""" + + +def define( + name: str, + default: Any = None, + type: type = None, + help: str = None, + metavar: str = None, + multiple: bool = False, + group: str = None, + callback: Callable[[Any], None] = None, +) -> None: + """Defines an option in the global namespace. + + See `OptionParser.define`. + """ + return options.define( + name, + default=default, + type=type, + help=help, + metavar=metavar, + multiple=multiple, + group=group, + callback=callback, + ) + + +def parse_command_line(args: List[str] = None, final: bool = True) -> List[str]: + """Parses global options from the command line. + + See `OptionParser.parse_command_line`. + """ + return options.parse_command_line(args, final=final) + + +def parse_config_file(path: str, final: bool = True) -> None: + """Parses global options from a config file. + + See `OptionParser.parse_config_file`. + """ + return options.parse_config_file(path, final=final) + + +def print_help(file: TextIO = None) -> None: + """Prints all the command line options to stderr (or another file). + + See `OptionParser.print_help`. + """ + return options.print_help(file) + + +def add_parse_callback(callback: Callable[[], None]) -> None: + """Adds a parse callback, to be invoked when option parsing is done. + + See `OptionParser.add_parse_callback` + """ + options.add_parse_callback(callback) + + +# Default options +define_logging_options(options) diff --git a/server/www/packages/packages-linux/x64/tornado/platform/asyncio.py b/server/www/packages/packages-linux/x64/tornado/platform/asyncio.py index e0042e1..325a6fe 100644 --- a/server/www/packages/packages-linux/x64/tornado/platform/asyncio.py +++ b/server/www/packages/packages-linux/x64/tornado/platform/asyncio.py @@ -1,299 +1,346 @@ -"""Bridges between the `asyncio` module and Tornado IOLoop. - -.. versionadded:: 3.2 - -This module integrates Tornado with the ``asyncio`` module introduced -in Python 3.4. This makes it possible to combine the two libraries on -the same event loop. - -.. deprecated:: 5.0 - - While the code in this module is still used, it is now enabled - automatically when `asyncio` is available, so applications should - no longer need to refer to this module directly. - -.. note:: - - Tornado requires the `~asyncio.AbstractEventLoop.add_reader` family of - methods, so it is not compatible with the `~asyncio.ProactorEventLoop` on - Windows. Use the `~asyncio.SelectorEventLoop` instead. -""" - -from __future__ import absolute_import, division, print_function -import functools - -from tornado.gen import convert_yielded -from tornado.ioloop import IOLoop -from tornado import stack_context - -import asyncio - - -class BaseAsyncIOLoop(IOLoop): - def initialize(self, asyncio_loop, **kwargs): - self.asyncio_loop = asyncio_loop - # Maps fd to (fileobj, handler function) pair (as in IOLoop.add_handler) - self.handlers = {} - # Set of fds listening for reads/writes - self.readers = set() - self.writers = set() - self.closing = False - # If an asyncio loop was closed through an asyncio interface - # instead of IOLoop.close(), we'd never hear about it and may - # have left a dangling reference in our map. In case an - # application (or, more likely, a test suite) creates and - # destroys a lot of event loops in this way, check here to - # ensure that we don't have a lot of dead loops building up in - # the map. - # - # TODO(bdarnell): consider making self.asyncio_loop a weakref - # for AsyncIOMainLoop and make _ioloop_for_asyncio a - # WeakKeyDictionary. - for loop in list(IOLoop._ioloop_for_asyncio): - if loop.is_closed(): - del IOLoop._ioloop_for_asyncio[loop] - IOLoop._ioloop_for_asyncio[asyncio_loop] = self - super(BaseAsyncIOLoop, self).initialize(**kwargs) - - def close(self, all_fds=False): - self.closing = True - for fd in list(self.handlers): - fileobj, handler_func = self.handlers[fd] - self.remove_handler(fd) - if all_fds: - self.close_fd(fileobj) - # Remove the mapping before closing the asyncio loop. If this - # happened in the other order, we could race against another - # initialize() call which would see the closed asyncio loop, - # assume it was closed from the asyncio side, and do this - # cleanup for us, leading to a KeyError. - del IOLoop._ioloop_for_asyncio[self.asyncio_loop] - self.asyncio_loop.close() - - def add_handler(self, fd, handler, events): - fd, fileobj = self.split_fd(fd) - if fd in self.handlers: - raise ValueError("fd %s added twice" % fd) - self.handlers[fd] = (fileobj, stack_context.wrap(handler)) - if events & IOLoop.READ: - self.asyncio_loop.add_reader( - fd, self._handle_events, fd, IOLoop.READ) - self.readers.add(fd) - if events & IOLoop.WRITE: - self.asyncio_loop.add_writer( - fd, self._handle_events, fd, IOLoop.WRITE) - self.writers.add(fd) - - def update_handler(self, fd, events): - fd, fileobj = self.split_fd(fd) - if events & IOLoop.READ: - if fd not in self.readers: - self.asyncio_loop.add_reader( - fd, self._handle_events, fd, IOLoop.READ) - self.readers.add(fd) - else: - if fd in self.readers: - self.asyncio_loop.remove_reader(fd) - self.readers.remove(fd) - if events & IOLoop.WRITE: - if fd not in self.writers: - self.asyncio_loop.add_writer( - fd, self._handle_events, fd, IOLoop.WRITE) - self.writers.add(fd) - else: - if fd in self.writers: - self.asyncio_loop.remove_writer(fd) - self.writers.remove(fd) - - def remove_handler(self, fd): - fd, fileobj = self.split_fd(fd) - if fd not in self.handlers: - return - if fd in self.readers: - self.asyncio_loop.remove_reader(fd) - self.readers.remove(fd) - if fd in self.writers: - self.asyncio_loop.remove_writer(fd) - self.writers.remove(fd) - del self.handlers[fd] - - def _handle_events(self, fd, events): - fileobj, handler_func = self.handlers[fd] - handler_func(fileobj, events) - - def start(self): - try: - old_loop = asyncio.get_event_loop() - except (RuntimeError, AssertionError): - old_loop = None - try: - self._setup_logging() - asyncio.set_event_loop(self.asyncio_loop) - self.asyncio_loop.run_forever() - finally: - asyncio.set_event_loop(old_loop) - - def stop(self): - self.asyncio_loop.stop() - - def call_at(self, when, callback, *args, **kwargs): - # asyncio.call_at supports *args but not **kwargs, so bind them here. - # We do not synchronize self.time and asyncio_loop.time, so - # convert from absolute to relative. - return self.asyncio_loop.call_later( - max(0, when - self.time()), self._run_callback, - functools.partial(stack_context.wrap(callback), *args, **kwargs)) - - def remove_timeout(self, timeout): - timeout.cancel() - - def add_callback(self, callback, *args, **kwargs): - try: - self.asyncio_loop.call_soon_threadsafe( - self._run_callback, - functools.partial(stack_context.wrap(callback), *args, **kwargs)) - except RuntimeError: - # "Event loop is closed". Swallow the exception for - # consistency with PollIOLoop (and logical consistency - # with the fact that we can't guarantee that an - # add_callback that completes without error will - # eventually execute). - pass - - add_callback_from_signal = add_callback - - def run_in_executor(self, executor, func, *args): - return self.asyncio_loop.run_in_executor(executor, func, *args) - - def set_default_executor(self, executor): - return self.asyncio_loop.set_default_executor(executor) - - -class AsyncIOMainLoop(BaseAsyncIOLoop): - """``AsyncIOMainLoop`` creates an `.IOLoop` that corresponds to the - current ``asyncio`` event loop (i.e. the one returned by - ``asyncio.get_event_loop()``). - - .. deprecated:: 5.0 - - Now used automatically when appropriate; it is no longer necessary - to refer to this class directly. - - .. versionchanged:: 5.0 - - Closing an `AsyncIOMainLoop` now closes the underlying asyncio loop. - """ - def initialize(self, **kwargs): - super(AsyncIOMainLoop, self).initialize(asyncio.get_event_loop(), **kwargs) - - def make_current(self): - # AsyncIOMainLoop already refers to the current asyncio loop so - # nothing to do here. - pass - - -class AsyncIOLoop(BaseAsyncIOLoop): - """``AsyncIOLoop`` is an `.IOLoop` that runs on an ``asyncio`` event loop. - This class follows the usual Tornado semantics for creating new - ``IOLoops``; these loops are not necessarily related to the - ``asyncio`` default event loop. - - Each ``AsyncIOLoop`` creates a new ``asyncio.EventLoop``; this object - can be accessed with the ``asyncio_loop`` attribute. - - .. versionchanged:: 5.0 - - When an ``AsyncIOLoop`` becomes the current `.IOLoop`, it also sets - the current `asyncio` event loop. - - .. deprecated:: 5.0 - - Now used automatically when appropriate; it is no longer necessary - to refer to this class directly. - """ - def initialize(self, **kwargs): - self.is_current = False - loop = asyncio.new_event_loop() - try: - super(AsyncIOLoop, self).initialize(loop, **kwargs) - except Exception: - # If initialize() does not succeed (taking ownership of the loop), - # we have to close it. - loop.close() - raise - - def close(self, all_fds=False): - if self.is_current: - self.clear_current() - super(AsyncIOLoop, self).close(all_fds=all_fds) - - def make_current(self): - if not self.is_current: - try: - self.old_asyncio = asyncio.get_event_loop() - except (RuntimeError, AssertionError): - self.old_asyncio = None - self.is_current = True - asyncio.set_event_loop(self.asyncio_loop) - - def _clear_current_hook(self): - if self.is_current: - asyncio.set_event_loop(self.old_asyncio) - self.is_current = False - - -def to_tornado_future(asyncio_future): - """Convert an `asyncio.Future` to a `tornado.concurrent.Future`. - - .. versionadded:: 4.1 - - .. deprecated:: 5.0 - Tornado ``Futures`` have been merged with `asyncio.Future`, - so this method is now a no-op. - """ - return asyncio_future - - -def to_asyncio_future(tornado_future): - """Convert a Tornado yieldable object to an `asyncio.Future`. - - .. versionadded:: 4.1 - - .. versionchanged:: 4.3 - Now accepts any yieldable object, not just - `tornado.concurrent.Future`. - - .. deprecated:: 5.0 - Tornado ``Futures`` have been merged with `asyncio.Future`, - so this method is now equivalent to `tornado.gen.convert_yielded`. - """ - return convert_yielded(tornado_future) - - -class AnyThreadEventLoopPolicy(asyncio.DefaultEventLoopPolicy): - """Event loop policy that allows loop creation on any thread. - - The default `asyncio` event loop policy only automatically creates - event loops in the main threads. Other threads must create event - loops explicitly or `asyncio.get_event_loop` (and therefore - `.IOLoop.current`) will fail. Installing this policy allows event - loops to be created automatically on any thread, matching the - behavior of Tornado versions prior to 5.0 (or 5.0 on Python 2). - - Usage:: - - asyncio.set_event_loop_policy(AnyThreadEventLoopPolicy()) - - .. versionadded:: 5.0 - - """ - def get_event_loop(self): - try: - return super().get_event_loop() - except (RuntimeError, AssertionError): - # This was an AssertionError in python 3.4.2 (which ships with debian jessie) - # and changed to a RuntimeError in 3.4.3. - # "There is no current event loop in thread %r" - loop = self.new_event_loop() - self.set_event_loop(loop) - return loop +"""Bridges between the `asyncio` module and Tornado IOLoop. + +.. versionadded:: 3.2 + +This module integrates Tornado with the ``asyncio`` module introduced +in Python 3.4. This makes it possible to combine the two libraries on +the same event loop. + +.. deprecated:: 5.0 + + While the code in this module is still used, it is now enabled + automatically when `asyncio` is available, so applications should + no longer need to refer to this module directly. + +.. note:: + + Tornado requires the `~asyncio.AbstractEventLoop.add_reader` family of + methods, so it is not compatible with the `~asyncio.ProactorEventLoop` on + Windows. Use the `~asyncio.SelectorEventLoop` instead. +""" + +import concurrent.futures +import functools +import sys + +from threading import get_ident +from tornado.gen import convert_yielded +from tornado.ioloop import IOLoop, _Selectable + +import asyncio + +import typing +from typing import Any, TypeVar, Awaitable, Callable, Union, Optional + +if typing.TYPE_CHECKING: + from typing import Set, Dict, Tuple # noqa: F401 + +_T = TypeVar("_T") + + +class BaseAsyncIOLoop(IOLoop): + def initialize( # type: ignore + self, asyncio_loop: asyncio.AbstractEventLoop, **kwargs: Any + ) -> None: + self.asyncio_loop = asyncio_loop + # Maps fd to (fileobj, handler function) pair (as in IOLoop.add_handler) + self.handlers = {} # type: Dict[int, Tuple[Union[int, _Selectable], Callable]] + # Set of fds listening for reads/writes + self.readers = set() # type: Set[int] + self.writers = set() # type: Set[int] + self.closing = False + # If an asyncio loop was closed through an asyncio interface + # instead of IOLoop.close(), we'd never hear about it and may + # have left a dangling reference in our map. In case an + # application (or, more likely, a test suite) creates and + # destroys a lot of event loops in this way, check here to + # ensure that we don't have a lot of dead loops building up in + # the map. + # + # TODO(bdarnell): consider making self.asyncio_loop a weakref + # for AsyncIOMainLoop and make _ioloop_for_asyncio a + # WeakKeyDictionary. + for loop in list(IOLoop._ioloop_for_asyncio): + if loop.is_closed(): + del IOLoop._ioloop_for_asyncio[loop] + IOLoop._ioloop_for_asyncio[asyncio_loop] = self + + self._thread_identity = 0 + + super(BaseAsyncIOLoop, self).initialize(**kwargs) + + def assign_thread_identity() -> None: + self._thread_identity = get_ident() + + self.add_callback(assign_thread_identity) + + def close(self, all_fds: bool = False) -> None: + self.closing = True + for fd in list(self.handlers): + fileobj, handler_func = self.handlers[fd] + self.remove_handler(fd) + if all_fds: + self.close_fd(fileobj) + # Remove the mapping before closing the asyncio loop. If this + # happened in the other order, we could race against another + # initialize() call which would see the closed asyncio loop, + # assume it was closed from the asyncio side, and do this + # cleanup for us, leading to a KeyError. + del IOLoop._ioloop_for_asyncio[self.asyncio_loop] + self.asyncio_loop.close() + + def add_handler( + self, fd: Union[int, _Selectable], handler: Callable[..., None], events: int + ) -> None: + fd, fileobj = self.split_fd(fd) + if fd in self.handlers: + raise ValueError("fd %s added twice" % fd) + self.handlers[fd] = (fileobj, handler) + if events & IOLoop.READ: + self.asyncio_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ) + self.readers.add(fd) + if events & IOLoop.WRITE: + self.asyncio_loop.add_writer(fd, self._handle_events, fd, IOLoop.WRITE) + self.writers.add(fd) + + def update_handler(self, fd: Union[int, _Selectable], events: int) -> None: + fd, fileobj = self.split_fd(fd) + if events & IOLoop.READ: + if fd not in self.readers: + self.asyncio_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ) + self.readers.add(fd) + else: + if fd in self.readers: + self.asyncio_loop.remove_reader(fd) + self.readers.remove(fd) + if events & IOLoop.WRITE: + if fd not in self.writers: + self.asyncio_loop.add_writer(fd, self._handle_events, fd, IOLoop.WRITE) + self.writers.add(fd) + else: + if fd in self.writers: + self.asyncio_loop.remove_writer(fd) + self.writers.remove(fd) + + def remove_handler(self, fd: Union[int, _Selectable]) -> None: + fd, fileobj = self.split_fd(fd) + if fd not in self.handlers: + return + if fd in self.readers: + self.asyncio_loop.remove_reader(fd) + self.readers.remove(fd) + if fd in self.writers: + self.asyncio_loop.remove_writer(fd) + self.writers.remove(fd) + del self.handlers[fd] + + def _handle_events(self, fd: int, events: int) -> None: + fileobj, handler_func = self.handlers[fd] + handler_func(fileobj, events) + + def start(self) -> None: + try: + old_loop = asyncio.get_event_loop() + except (RuntimeError, AssertionError): + old_loop = None # type: ignore + try: + self._setup_logging() + asyncio.set_event_loop(self.asyncio_loop) + self.asyncio_loop.run_forever() + finally: + asyncio.set_event_loop(old_loop) + + def stop(self) -> None: + self.asyncio_loop.stop() + + def call_at( + self, when: float, callback: Callable[..., None], *args: Any, **kwargs: Any + ) -> object: + # asyncio.call_at supports *args but not **kwargs, so bind them here. + # We do not synchronize self.time and asyncio_loop.time, so + # convert from absolute to relative. + return self.asyncio_loop.call_later( + max(0, when - self.time()), + self._run_callback, + functools.partial(callback, *args, **kwargs), + ) + + def remove_timeout(self, timeout: object) -> None: + timeout.cancel() # type: ignore + + def add_callback(self, callback: Callable, *args: Any, **kwargs: Any) -> None: + if get_ident() == self._thread_identity: + call_soon = self.asyncio_loop.call_soon + else: + call_soon = self.asyncio_loop.call_soon_threadsafe + try: + call_soon(self._run_callback, functools.partial(callback, *args, **kwargs)) + except RuntimeError: + # "Event loop is closed". Swallow the exception for + # consistency with PollIOLoop (and logical consistency + # with the fact that we can't guarantee that an + # add_callback that completes without error will + # eventually execute). + pass + + def add_callback_from_signal( + self, callback: Callable, *args: Any, **kwargs: Any + ) -> None: + try: + self.asyncio_loop.call_soon_threadsafe( + self._run_callback, functools.partial(callback, *args, **kwargs) + ) + except RuntimeError: + pass + + def run_in_executor( + self, + executor: Optional[concurrent.futures.Executor], + func: Callable[..., _T], + *args: Any + ) -> Awaitable[_T]: + return self.asyncio_loop.run_in_executor(executor, func, *args) + + def set_default_executor(self, executor: concurrent.futures.Executor) -> None: + return self.asyncio_loop.set_default_executor(executor) + + +class AsyncIOMainLoop(BaseAsyncIOLoop): + """``AsyncIOMainLoop`` creates an `.IOLoop` that corresponds to the + current ``asyncio`` event loop (i.e. the one returned by + ``asyncio.get_event_loop()``). + + .. deprecated:: 5.0 + + Now used automatically when appropriate; it is no longer necessary + to refer to this class directly. + + .. versionchanged:: 5.0 + + Closing an `AsyncIOMainLoop` now closes the underlying asyncio loop. + """ + + def initialize(self, **kwargs: Any) -> None: # type: ignore + super(AsyncIOMainLoop, self).initialize(asyncio.get_event_loop(), **kwargs) + + def make_current(self) -> None: + # AsyncIOMainLoop already refers to the current asyncio loop so + # nothing to do here. + pass + + +class AsyncIOLoop(BaseAsyncIOLoop): + """``AsyncIOLoop`` is an `.IOLoop` that runs on an ``asyncio`` event loop. + This class follows the usual Tornado semantics for creating new + ``IOLoops``; these loops are not necessarily related to the + ``asyncio`` default event loop. + + Each ``AsyncIOLoop`` creates a new ``asyncio.EventLoop``; this object + can be accessed with the ``asyncio_loop`` attribute. + + .. versionchanged:: 5.0 + + When an ``AsyncIOLoop`` becomes the current `.IOLoop`, it also sets + the current `asyncio` event loop. + + .. deprecated:: 5.0 + + Now used automatically when appropriate; it is no longer necessary + to refer to this class directly. + """ + + def initialize(self, **kwargs: Any) -> None: # type: ignore + self.is_current = False + loop = asyncio.new_event_loop() + try: + super(AsyncIOLoop, self).initialize(loop, **kwargs) + except Exception: + # If initialize() does not succeed (taking ownership of the loop), + # we have to close it. + loop.close() + raise + + def close(self, all_fds: bool = False) -> None: + if self.is_current: + self.clear_current() + super(AsyncIOLoop, self).close(all_fds=all_fds) + + def make_current(self) -> None: + if not self.is_current: + try: + self.old_asyncio = asyncio.get_event_loop() + except (RuntimeError, AssertionError): + self.old_asyncio = None # type: ignore + self.is_current = True + asyncio.set_event_loop(self.asyncio_loop) + + def _clear_current_hook(self) -> None: + if self.is_current: + asyncio.set_event_loop(self.old_asyncio) + self.is_current = False + + +def to_tornado_future(asyncio_future: asyncio.Future) -> asyncio.Future: + """Convert an `asyncio.Future` to a `tornado.concurrent.Future`. + + .. versionadded:: 4.1 + + .. deprecated:: 5.0 + Tornado ``Futures`` have been merged with `asyncio.Future`, + so this method is now a no-op. + """ + return asyncio_future + + +def to_asyncio_future(tornado_future: asyncio.Future) -> asyncio.Future: + """Convert a Tornado yieldable object to an `asyncio.Future`. + + .. versionadded:: 4.1 + + .. versionchanged:: 4.3 + Now accepts any yieldable object, not just + `tornado.concurrent.Future`. + + .. deprecated:: 5.0 + Tornado ``Futures`` have been merged with `asyncio.Future`, + so this method is now equivalent to `tornado.gen.convert_yielded`. + """ + return convert_yielded(tornado_future) + + +if sys.platform == "win32" and hasattr(asyncio, "WindowsSelectorEventLoopPolicy"): + # "Any thread" and "selector" should be orthogonal, but there's not a clean + # interface for composing policies so pick the right base. + _BasePolicy = asyncio.WindowsSelectorEventLoopPolicy # type: ignore +else: + _BasePolicy = asyncio.DefaultEventLoopPolicy + + +class AnyThreadEventLoopPolicy(_BasePolicy): # type: ignore + """Event loop policy that allows loop creation on any thread. + + The default `asyncio` event loop policy only automatically creates + event loops in the main threads. Other threads must create event + loops explicitly or `asyncio.get_event_loop` (and therefore + `.IOLoop.current`) will fail. Installing this policy allows event + loops to be created automatically on any thread, matching the + behavior of Tornado versions prior to 5.0 (or 5.0 on Python 2). + + Usage:: + + asyncio.set_event_loop_policy(AnyThreadEventLoopPolicy()) + + .. versionadded:: 5.0 + + """ + + def get_event_loop(self) -> asyncio.AbstractEventLoop: + try: + return super().get_event_loop() + except (RuntimeError, AssertionError): + # This was an AssertionError in python 3.4.2 (which ships with debian jessie) + # and changed to a RuntimeError in 3.4.3. + # "There is no current event loop in thread %r" + loop = self.new_event_loop() + self.set_event_loop(loop) + return loop diff --git a/server/www/packages/packages-linux/x64/tornado/platform/auto.py b/server/www/packages/packages-linux/x64/tornado/platform/auto.py index 1a9133f..c85be11 100644 --- a/server/www/packages/packages-linux/x64/tornado/platform/auto.py +++ b/server/www/packages/packages-linux/x64/tornado/platform/auto.py @@ -1,58 +1,32 @@ -# -# Copyright 2011 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Implementation of platform-specific functionality. - -For each function or class described in `tornado.platform.interface`, -the appropriate platform-specific implementation exists in this module. -Most code that needs access to this functionality should do e.g.:: - - from tornado.platform.auto import set_close_exec -""" - -from __future__ import absolute_import, division, print_function - -import os - -if 'APPENGINE_RUNTIME' in os.environ: - from tornado.platform.common import Waker - - def set_close_exec(fd): - pass -elif os.name == 'nt': - from tornado.platform.common import Waker - from tornado.platform.windows import set_close_exec -else: - from tornado.platform.posix import set_close_exec, Waker - -try: - # monotime monkey-patches the time module to have a monotonic function - # in versions of python before 3.3. - import monotime - # Silence pyflakes warning about this unused import - monotime -except ImportError: - pass -try: - # monotonic can provide a monotonic function in versions of python before - # 3.3, too. - from monotonic import monotonic as monotonic_time -except ImportError: - try: - from time import monotonic as monotonic_time - except ImportError: - monotonic_time = None - -__all__ = ['Waker', 'set_close_exec', 'monotonic_time'] +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Implementation of platform-specific functionality. + +For each function or class described in `tornado.platform.interface`, +the appropriate platform-specific implementation exists in this module. +Most code that needs access to this functionality should do e.g.:: + + from tornado.platform.auto import set_close_exec +""" + +import os + +if os.name == "nt": + from tornado.platform.windows import set_close_exec +else: + from tornado.platform.posix import set_close_exec + +__all__ = ["set_close_exec"] diff --git a/server/www/packages/packages-linux/x64/tornado/platform/caresresolver.py b/server/www/packages/packages-linux/x64/tornado/platform/caresresolver.py index 768cb62..97dd794 100644 --- a/server/www/packages/packages-linux/x64/tornado/platform/caresresolver.py +++ b/server/www/packages/packages-linux/x64/tornado/platform/caresresolver.py @@ -1,79 +1,89 @@ -from __future__ import absolute_import, division, print_function -import pycares # type: ignore -import socket - -from tornado.concurrent import Future -from tornado import gen -from tornado.ioloop import IOLoop -from tornado.netutil import Resolver, is_valid_ip - - -class CaresResolver(Resolver): - """Name resolver based on the c-ares library. - - This is a non-blocking and non-threaded resolver. It may not produce - the same results as the system resolver, but can be used for non-blocking - resolution when threads cannot be used. - - c-ares fails to resolve some names when ``family`` is ``AF_UNSPEC``, - so it is only recommended for use in ``AF_INET`` (i.e. IPv4). This is - the default for ``tornado.simple_httpclient``, but other libraries - may default to ``AF_UNSPEC``. - - .. versionchanged:: 5.0 - The ``io_loop`` argument (deprecated since version 4.1) has been removed. - """ - def initialize(self): - self.io_loop = IOLoop.current() - self.channel = pycares.Channel(sock_state_cb=self._sock_state_cb) - self.fds = {} - - def _sock_state_cb(self, fd, readable, writable): - state = ((IOLoop.READ if readable else 0) | - (IOLoop.WRITE if writable else 0)) - if not state: - self.io_loop.remove_handler(fd) - del self.fds[fd] - elif fd in self.fds: - self.io_loop.update_handler(fd, state) - self.fds[fd] = state - else: - self.io_loop.add_handler(fd, self._handle_events, state) - self.fds[fd] = state - - def _handle_events(self, fd, events): - read_fd = pycares.ARES_SOCKET_BAD - write_fd = pycares.ARES_SOCKET_BAD - if events & IOLoop.READ: - read_fd = fd - if events & IOLoop.WRITE: - write_fd = fd - self.channel.process_fd(read_fd, write_fd) - - @gen.coroutine - def resolve(self, host, port, family=0): - if is_valid_ip(host): - addresses = [host] - else: - # gethostbyname doesn't take callback as a kwarg - fut = Future() - self.channel.gethostbyname(host, family, - lambda result, error: fut.set_result((result, error))) - result, error = yield fut - if error: - raise IOError('C-Ares returned error %s: %s while resolving %s' % - (error, pycares.errno.strerror(error), host)) - addresses = result.addresses - addrinfo = [] - for address in addresses: - if '.' in address: - address_family = socket.AF_INET - elif ':' in address: - address_family = socket.AF_INET6 - else: - address_family = socket.AF_UNSPEC - if family != socket.AF_UNSPEC and family != address_family: - raise IOError('Requested socket family %d but got %d' % - (family, address_family)) - addrinfo.append((address_family, (address, port))) - raise gen.Return(addrinfo) +import pycares # type: ignore +import socket + +from tornado.concurrent import Future +from tornado import gen +from tornado.ioloop import IOLoop +from tornado.netutil import Resolver, is_valid_ip + +import typing + +if typing.TYPE_CHECKING: + from typing import Generator, Any, List, Tuple, Dict # noqa: F401 + + +class CaresResolver(Resolver): + """Name resolver based on the c-ares library. + + This is a non-blocking and non-threaded resolver. It may not produce + the same results as the system resolver, but can be used for non-blocking + resolution when threads cannot be used. + + c-ares fails to resolve some names when ``family`` is ``AF_UNSPEC``, + so it is only recommended for use in ``AF_INET`` (i.e. IPv4). This is + the default for ``tornado.simple_httpclient``, but other libraries + may default to ``AF_UNSPEC``. + + .. versionchanged:: 5.0 + The ``io_loop`` argument (deprecated since version 4.1) has been removed. + """ + + def initialize(self) -> None: + self.io_loop = IOLoop.current() + self.channel = pycares.Channel(sock_state_cb=self._sock_state_cb) + self.fds = {} # type: Dict[int, int] + + def _sock_state_cb(self, fd: int, readable: bool, writable: bool) -> None: + state = (IOLoop.READ if readable else 0) | (IOLoop.WRITE if writable else 0) + if not state: + self.io_loop.remove_handler(fd) + del self.fds[fd] + elif fd in self.fds: + self.io_loop.update_handler(fd, state) + self.fds[fd] = state + else: + self.io_loop.add_handler(fd, self._handle_events, state) + self.fds[fd] = state + + def _handle_events(self, fd: int, events: int) -> None: + read_fd = pycares.ARES_SOCKET_BAD + write_fd = pycares.ARES_SOCKET_BAD + if events & IOLoop.READ: + read_fd = fd + if events & IOLoop.WRITE: + write_fd = fd + self.channel.process_fd(read_fd, write_fd) + + @gen.coroutine + def resolve( + self, host: str, port: int, family: int = 0 + ) -> "Generator[Any, Any, List[Tuple[int, Any]]]": + if is_valid_ip(host): + addresses = [host] + else: + # gethostbyname doesn't take callback as a kwarg + fut = Future() # type: Future[Tuple[Any, Any]] + self.channel.gethostbyname( + host, family, lambda result, error: fut.set_result((result, error)) + ) + result, error = yield fut + if error: + raise IOError( + "C-Ares returned error %s: %s while resolving %s" + % (error, pycares.errno.strerror(error), host) + ) + addresses = result.addresses + addrinfo = [] + for address in addresses: + if "." in address: + address_family = socket.AF_INET + elif ":" in address: + address_family = socket.AF_INET6 + else: + address_family = socket.AF_UNSPEC + if family != socket.AF_UNSPEC and family != address_family: + raise IOError( + "Requested socket family %d but got %d" % (family, address_family) + ) + addrinfo.append((typing.cast(int, address_family), (address, port))) + return addrinfo diff --git a/server/www/packages/packages-linux/x64/tornado/platform/common.py b/server/www/packages/packages-linux/x64/tornado/platform/common.py deleted file mode 100644 index b597748..0000000 --- a/server/www/packages/packages-linux/x64/tornado/platform/common.py +++ /dev/null @@ -1,113 +0,0 @@ -"""Lowest-common-denominator implementations of platform functionality.""" -from __future__ import absolute_import, division, print_function - -import errno -import socket -import time - -from tornado.platform import interface -from tornado.util import errno_from_exception - - -def try_close(f): - # Avoid issue #875 (race condition when using the file in another - # thread). - for i in range(10): - try: - f.close() - except IOError: - # Yield to another thread - time.sleep(1e-3) - else: - break - # Try a last time and let raise - f.close() - - -class Waker(interface.Waker): - """Create an OS independent asynchronous pipe. - - For use on platforms that don't have os.pipe() (or where pipes cannot - be passed to select()), but do have sockets. This includes Windows - and Jython. - """ - def __init__(self): - from .auto import set_close_exec - # Based on Zope select_trigger.py: - # https://github.com/zopefoundation/Zope/blob/master/src/ZServer/medusa/thread/select_trigger.py - - self.writer = socket.socket() - set_close_exec(self.writer.fileno()) - # Disable buffering -- pulling the trigger sends 1 byte, - # and we want that sent immediately, to wake up ASAP. - self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - count = 0 - while 1: - count += 1 - # Bind to a local port; for efficiency, let the OS pick - # a free port for us. - # Unfortunately, stress tests showed that we may not - # be able to connect to that port ("Address already in - # use") despite that the OS picked it. This appears - # to be a race bug in the Windows socket implementation. - # So we loop until a connect() succeeds (almost always - # on the first try). See the long thread at - # http://mail.zope.org/pipermail/zope/2005-July/160433.html - # for hideous details. - a = socket.socket() - set_close_exec(a.fileno()) - a.bind(("127.0.0.1", 0)) - a.listen(1) - connect_address = a.getsockname() # assigned (host, port) pair - try: - self.writer.connect(connect_address) - break # success - except socket.error as detail: - if (not hasattr(errno, 'WSAEADDRINUSE') or - errno_from_exception(detail) != errno.WSAEADDRINUSE): - # "Address already in use" is the only error - # I've seen on two WinXP Pro SP2 boxes, under - # Pythons 2.3.5 and 2.4.1. - raise - # (10048, 'Address already in use') - # assert count <= 2 # never triggered in Tim's tests - if count >= 10: # I've never seen it go above 2 - a.close() - self.writer.close() - raise socket.error("Cannot bind trigger!") - # Close `a` and try again. Note: I originally put a short - # sleep() here, but it didn't appear to help or hurt. - a.close() - - self.reader, addr = a.accept() - set_close_exec(self.reader.fileno()) - self.reader.setblocking(0) - self.writer.setblocking(0) - a.close() - self.reader_fd = self.reader.fileno() - - def fileno(self): - return self.reader.fileno() - - def write_fileno(self): - return self.writer.fileno() - - def wake(self): - try: - self.writer.send(b"x") - except (IOError, socket.error, ValueError): - pass - - def consume(self): - try: - while True: - result = self.reader.recv(1024) - if not result: - break - except (IOError, socket.error): - pass - - def close(self): - self.reader.close() - try_close(self.writer) diff --git a/server/www/packages/packages-linux/x64/tornado/platform/epoll.py b/server/www/packages/packages-linux/x64/tornado/platform/epoll.py deleted file mode 100644 index 4e34617..0000000 --- a/server/www/packages/packages-linux/x64/tornado/platform/epoll.py +++ /dev/null @@ -1,25 +0,0 @@ -# -# Copyright 2012 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""EPoll-based IOLoop implementation for Linux systems.""" -from __future__ import absolute_import, division, print_function - -import select - -from tornado.ioloop import PollIOLoop - - -class EPollIOLoop(PollIOLoop): - def initialize(self, **kwargs): - super(EPollIOLoop, self).initialize(impl=select.epoll(), **kwargs) diff --git a/server/www/packages/packages-linux/x64/tornado/platform/interface.py b/server/www/packages/packages-linux/x64/tornado/platform/interface.py index cac5326..cc48536 100644 --- a/server/www/packages/packages-linux/x64/tornado/platform/interface.py +++ b/server/www/packages/packages-linux/x64/tornado/platform/interface.py @@ -1,66 +1,26 @@ -# -# Copyright 2011 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Interfaces for platform-specific functionality. - -This module exists primarily for documentation purposes and as base classes -for other tornado.platform modules. Most code should import the appropriate -implementation from `tornado.platform.auto`. -""" - -from __future__ import absolute_import, division, print_function - - -def set_close_exec(fd): - """Sets the close-on-exec bit (``FD_CLOEXEC``)for a file descriptor.""" - raise NotImplementedError() - - -class Waker(object): - """A socket-like object that can wake another thread from ``select()``. - - The `~tornado.ioloop.IOLoop` will add the Waker's `fileno()` to - its ``select`` (or ``epoll`` or ``kqueue``) calls. When another - thread wants to wake up the loop, it calls `wake`. Once it has woken - up, it will call `consume` to do any necessary per-wake cleanup. When - the ``IOLoop`` is closed, it closes its waker too. - """ - def fileno(self): - """Returns the read file descriptor for this waker. - - Must be suitable for use with ``select()`` or equivalent on the - local platform. - """ - raise NotImplementedError() - - def write_fileno(self): - """Returns the write file descriptor for this waker.""" - raise NotImplementedError() - - def wake(self): - """Triggers activity on the waker's file descriptor.""" - raise NotImplementedError() - - def consume(self): - """Called after the listen has woken up to do any necessary cleanup.""" - raise NotImplementedError() - - def close(self): - """Closes the waker's file descriptor(s).""" - raise NotImplementedError() - - -def monotonic_time(): - raise NotImplementedError() +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Interfaces for platform-specific functionality. + +This module exists primarily for documentation purposes and as base classes +for other tornado.platform modules. Most code should import the appropriate +implementation from `tornado.platform.auto`. +""" + + +def set_close_exec(fd: int) -> None: + """Sets the close-on-exec bit (``FD_CLOEXEC``)for a file descriptor.""" + raise NotImplementedError() diff --git a/server/www/packages/packages-linux/x64/tornado/platform/kqueue.py b/server/www/packages/packages-linux/x64/tornado/platform/kqueue.py deleted file mode 100644 index 4e0aee0..0000000 --- a/server/www/packages/packages-linux/x64/tornado/platform/kqueue.py +++ /dev/null @@ -1,90 +0,0 @@ -# -# Copyright 2012 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""KQueue-based IOLoop implementation for BSD/Mac systems.""" -from __future__ import absolute_import, division, print_function - -import select - -from tornado.ioloop import IOLoop, PollIOLoop - -assert hasattr(select, 'kqueue'), 'kqueue not supported' - - -class _KQueue(object): - """A kqueue-based event loop for BSD/Mac systems.""" - def __init__(self): - self._kqueue = select.kqueue() - self._active = {} - - def fileno(self): - return self._kqueue.fileno() - - def close(self): - self._kqueue.close() - - def register(self, fd, events): - if fd in self._active: - raise IOError("fd %s already registered" % fd) - self._control(fd, events, select.KQ_EV_ADD) - self._active[fd] = events - - def modify(self, fd, events): - self.unregister(fd) - self.register(fd, events) - - def unregister(self, fd): - events = self._active.pop(fd) - self._control(fd, events, select.KQ_EV_DELETE) - - def _control(self, fd, events, flags): - kevents = [] - if events & IOLoop.WRITE: - kevents.append(select.kevent( - fd, filter=select.KQ_FILTER_WRITE, flags=flags)) - if events & IOLoop.READ: - kevents.append(select.kevent( - fd, filter=select.KQ_FILTER_READ, flags=flags)) - # Even though control() takes a list, it seems to return EINVAL - # on Mac OS X (10.6) when there is more than one event in the list. - for kevent in kevents: - self._kqueue.control([kevent], 0) - - def poll(self, timeout): - kevents = self._kqueue.control(None, 1000, timeout) - events = {} - for kevent in kevents: - fd = kevent.ident - if kevent.filter == select.KQ_FILTER_READ: - events[fd] = events.get(fd, 0) | IOLoop.READ - if kevent.filter == select.KQ_FILTER_WRITE: - if kevent.flags & select.KQ_EV_EOF: - # If an asynchronous connection is refused, kqueue - # returns a write event with the EOF flag set. - # Turn this into an error for consistency with the - # other IOLoop implementations. - # Note that for read events, EOF may be returned before - # all data has been consumed from the socket buffer, - # so we only check for EOF on write events. - events[fd] = IOLoop.ERROR - else: - events[fd] = events.get(fd, 0) | IOLoop.WRITE - if kevent.flags & select.KQ_EV_ERROR: - events[fd] = events.get(fd, 0) | IOLoop.ERROR - return events.items() - - -class KQueueIOLoop(PollIOLoop): - def initialize(self, **kwargs): - super(KQueueIOLoop, self).initialize(impl=_KQueue(), **kwargs) diff --git a/server/www/packages/packages-linux/x64/tornado/platform/posix.py b/server/www/packages/packages-linux/x64/tornado/platform/posix.py index 6fe1fa8..6e66f4e 100644 --- a/server/www/packages/packages-linux/x64/tornado/platform/posix.py +++ b/server/www/packages/packages-linux/x64/tornado/platform/posix.py @@ -1,69 +1,29 @@ -# -# Copyright 2011 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Posix implementations of platform-specific functionality.""" - -from __future__ import absolute_import, division, print_function - -import fcntl -import os - -from tornado.platform import common, interface - - -def set_close_exec(fd): - flags = fcntl.fcntl(fd, fcntl.F_GETFD) - fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC) - - -def _set_nonblocking(fd): - flags = fcntl.fcntl(fd, fcntl.F_GETFL) - fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) - - -class Waker(interface.Waker): - def __init__(self): - r, w = os.pipe() - _set_nonblocking(r) - _set_nonblocking(w) - set_close_exec(r) - set_close_exec(w) - self.reader = os.fdopen(r, "rb", 0) - self.writer = os.fdopen(w, "wb", 0) - - def fileno(self): - return self.reader.fileno() - - def write_fileno(self): - return self.writer.fileno() - - def wake(self): - try: - self.writer.write(b"x") - except (IOError, ValueError): - pass - - def consume(self): - try: - while True: - result = self.reader.read() - if not result: - break - except IOError: - pass - - def close(self): - self.reader.close() - common.try_close(self.writer) +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Posix implementations of platform-specific functionality.""" + +import fcntl +import os + + +def set_close_exec(fd: int) -> None: + flags = fcntl.fcntl(fd, fcntl.F_GETFD) + fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC) + + +def _set_nonblocking(fd: int) -> None: + flags = fcntl.fcntl(fd, fcntl.F_GETFL) + fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) diff --git a/server/www/packages/packages-linux/x64/tornado/platform/select.py b/server/www/packages/packages-linux/x64/tornado/platform/select.py deleted file mode 100644 index 14e8a47..0000000 --- a/server/www/packages/packages-linux/x64/tornado/platform/select.py +++ /dev/null @@ -1,75 +0,0 @@ -# -# Copyright 2012 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Select-based IOLoop implementation. - -Used as a fallback for systems that don't support epoll or kqueue. -""" -from __future__ import absolute_import, division, print_function - -import select - -from tornado.ioloop import IOLoop, PollIOLoop - - -class _Select(object): - """A simple, select()-based IOLoop implementation for non-Linux systems""" - def __init__(self): - self.read_fds = set() - self.write_fds = set() - self.error_fds = set() - self.fd_sets = (self.read_fds, self.write_fds, self.error_fds) - - def close(self): - pass - - def register(self, fd, events): - if fd in self.read_fds or fd in self.write_fds or fd in self.error_fds: - raise IOError("fd %s already registered" % fd) - if events & IOLoop.READ: - self.read_fds.add(fd) - if events & IOLoop.WRITE: - self.write_fds.add(fd) - if events & IOLoop.ERROR: - self.error_fds.add(fd) - # Closed connections are reported as errors by epoll and kqueue, - # but as zero-byte reads by select, so when errors are requested - # we need to listen for both read and error. - # self.read_fds.add(fd) - - def modify(self, fd, events): - self.unregister(fd) - self.register(fd, events) - - def unregister(self, fd): - self.read_fds.discard(fd) - self.write_fds.discard(fd) - self.error_fds.discard(fd) - - def poll(self, timeout): - readable, writeable, errors = select.select( - self.read_fds, self.write_fds, self.error_fds, timeout) - events = {} - for fd in readable: - events[fd] = events.get(fd, 0) | IOLoop.READ - for fd in writeable: - events[fd] = events.get(fd, 0) | IOLoop.WRITE - for fd in errors: - events[fd] = events.get(fd, 0) | IOLoop.ERROR - return events.items() - - -class SelectIOLoop(PollIOLoop): - def initialize(self, **kwargs): - super(SelectIOLoop, self).initialize(impl=_Select(), **kwargs) diff --git a/server/www/packages/packages-linux/x64/tornado/platform/twisted.py b/server/www/packages/packages-linux/x64/tornado/platform/twisted.py index b38a755..2be54cf 100644 --- a/server/www/packages/packages-linux/x64/tornado/platform/twisted.py +++ b/server/www/packages/packages-linux/x64/tornado/platform/twisted.py @@ -1,609 +1,131 @@ -# Author: Ovidiu Predescu -# Date: July 2011 -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Bridges between the Twisted reactor and Tornado IOLoop. - -This module lets you run applications and libraries written for -Twisted in a Tornado application. It can be used in two modes, -depending on which library's underlying event loop you want to use. - -This module has been tested with Twisted versions 11.0.0 and newer. -""" - -from __future__ import absolute_import, division, print_function - -import datetime -import functools -import numbers -import socket -import sys - -import twisted.internet.abstract # type: ignore -from twisted.internet.defer import Deferred # type: ignore -from twisted.internet.posixbase import PosixReactorBase # type: ignore -from twisted.internet.interfaces import IReactorFDSet, IDelayedCall, IReactorTime, IReadDescriptor, IWriteDescriptor # type: ignore # noqa: E501 -from twisted.python import failure, log # type: ignore -from twisted.internet import error # type: ignore -import twisted.names.cache # type: ignore -import twisted.names.client # type: ignore -import twisted.names.hosts # type: ignore -import twisted.names.resolve # type: ignore - -from zope.interface import implementer # type: ignore - -from tornado.concurrent import Future, future_set_exc_info -from tornado.escape import utf8 -from tornado import gen -import tornado.ioloop -from tornado.log import app_log -from tornado.netutil import Resolver -from tornado.stack_context import NullContext, wrap -from tornado.ioloop import IOLoop -from tornado.util import timedelta_to_seconds - - -@implementer(IDelayedCall) -class TornadoDelayedCall(object): - """DelayedCall object for Tornado.""" - def __init__(self, reactor, seconds, f, *args, **kw): - self._reactor = reactor - self._func = functools.partial(f, *args, **kw) - self._time = self._reactor.seconds() + seconds - self._timeout = self._reactor._io_loop.add_timeout(self._time, - self._called) - self._active = True - - def _called(self): - self._active = False - self._reactor._removeDelayedCall(self) - try: - self._func() - except: - app_log.error("_called caught exception", exc_info=True) - - def getTime(self): - return self._time - - def cancel(self): - self._active = False - self._reactor._io_loop.remove_timeout(self._timeout) - self._reactor._removeDelayedCall(self) - - def delay(self, seconds): - self._reactor._io_loop.remove_timeout(self._timeout) - self._time += seconds - self._timeout = self._reactor._io_loop.add_timeout(self._time, - self._called) - - def reset(self, seconds): - self._reactor._io_loop.remove_timeout(self._timeout) - self._time = self._reactor.seconds() + seconds - self._timeout = self._reactor._io_loop.add_timeout(self._time, - self._called) - - def active(self): - return self._active - - -@implementer(IReactorTime, IReactorFDSet) -class TornadoReactor(PosixReactorBase): - """Twisted reactor built on the Tornado IOLoop. - - `TornadoReactor` implements the Twisted reactor interface on top of - the Tornado IOLoop. To use it, simply call `install` at the beginning - of the application:: - - import tornado.platform.twisted - tornado.platform.twisted.install() - from twisted.internet import reactor - - When the app is ready to start, call ``IOLoop.current().start()`` - instead of ``reactor.run()``. - - It is also possible to create a non-global reactor by calling - ``tornado.platform.twisted.TornadoReactor()``. However, if - the `.IOLoop` and reactor are to be short-lived (such as those used in - unit tests), additional cleanup may be required. Specifically, it is - recommended to call:: - - reactor.fireSystemEvent('shutdown') - reactor.disconnectAll() - - before closing the `.IOLoop`. - - .. versionchanged:: 5.0 - The ``io_loop`` argument (deprecated since version 4.1) has been removed. - - .. deprecated:: 5.1 - - This class will be removed in Tornado 6.0. Use - ``twisted.internet.asyncioreactor.AsyncioSelectorReactor`` - instead. - - """ - def __init__(self): - self._io_loop = tornado.ioloop.IOLoop.current() - self._readers = {} # map of reader objects to fd - self._writers = {} # map of writer objects to fd - self._fds = {} # a map of fd to a (reader, writer) tuple - self._delayedCalls = {} - PosixReactorBase.__init__(self) - self.addSystemEventTrigger('during', 'shutdown', self.crash) - - # IOLoop.start() bypasses some of the reactor initialization. - # Fire off the necessary events if they weren't already triggered - # by reactor.run(). - def start_if_necessary(): - if not self._started: - self.fireSystemEvent('startup') - self._io_loop.add_callback(start_if_necessary) - - # IReactorTime - def seconds(self): - return self._io_loop.time() - - def callLater(self, seconds, f, *args, **kw): - dc = TornadoDelayedCall(self, seconds, f, *args, **kw) - self._delayedCalls[dc] = True - return dc - - def getDelayedCalls(self): - return [x for x in self._delayedCalls if x._active] - - def _removeDelayedCall(self, dc): - if dc in self._delayedCalls: - del self._delayedCalls[dc] - - # IReactorThreads - def callFromThread(self, f, *args, **kw): - assert callable(f), "%s is not callable" % f - with NullContext(): - # This NullContext is mainly for an edge case when running - # TwistedIOLoop on top of a TornadoReactor. - # TwistedIOLoop.add_callback uses reactor.callFromThread and - # should not pick up additional StackContexts along the way. - self._io_loop.add_callback(f, *args, **kw) - - # We don't need the waker code from the super class, Tornado uses - # its own waker. - def installWaker(self): - pass - - def wakeUp(self): - pass - - # IReactorFDSet - def _invoke_callback(self, fd, events): - if fd not in self._fds: - return - (reader, writer) = self._fds[fd] - if reader: - err = None - if reader.fileno() == -1: - err = error.ConnectionLost() - elif events & IOLoop.READ: - err = log.callWithLogger(reader, reader.doRead) - if err is None and events & IOLoop.ERROR: - err = error.ConnectionLost() - if err is not None: - self.removeReader(reader) - reader.readConnectionLost(failure.Failure(err)) - if writer: - err = None - if writer.fileno() == -1: - err = error.ConnectionLost() - elif events & IOLoop.WRITE: - err = log.callWithLogger(writer, writer.doWrite) - if err is None and events & IOLoop.ERROR: - err = error.ConnectionLost() - if err is not None: - self.removeWriter(writer) - writer.writeConnectionLost(failure.Failure(err)) - - def addReader(self, reader): - if reader in self._readers: - # Don't add the reader if it's already there - return - fd = reader.fileno() - self._readers[reader] = fd - if fd in self._fds: - (_, writer) = self._fds[fd] - self._fds[fd] = (reader, writer) - if writer: - # We already registered this fd for write events, - # update it for read events as well. - self._io_loop.update_handler(fd, IOLoop.READ | IOLoop.WRITE) - else: - with NullContext(): - self._fds[fd] = (reader, None) - self._io_loop.add_handler(fd, self._invoke_callback, - IOLoop.READ) - - def addWriter(self, writer): - if writer in self._writers: - return - fd = writer.fileno() - self._writers[writer] = fd - if fd in self._fds: - (reader, _) = self._fds[fd] - self._fds[fd] = (reader, writer) - if reader: - # We already registered this fd for read events, - # update it for write events as well. - self._io_loop.update_handler(fd, IOLoop.READ | IOLoop.WRITE) - else: - with NullContext(): - self._fds[fd] = (None, writer) - self._io_loop.add_handler(fd, self._invoke_callback, - IOLoop.WRITE) - - def removeReader(self, reader): - if reader in self._readers: - fd = self._readers.pop(reader) - (_, writer) = self._fds[fd] - if writer: - # We have a writer so we need to update the IOLoop for - # write events only. - self._fds[fd] = (None, writer) - self._io_loop.update_handler(fd, IOLoop.WRITE) - else: - # Since we have no writer registered, we remove the - # entry from _fds and unregister the handler from the - # IOLoop - del self._fds[fd] - self._io_loop.remove_handler(fd) - - def removeWriter(self, writer): - if writer in self._writers: - fd = self._writers.pop(writer) - (reader, _) = self._fds[fd] - if reader: - # We have a reader so we need to update the IOLoop for - # read events only. - self._fds[fd] = (reader, None) - self._io_loop.update_handler(fd, IOLoop.READ) - else: - # Since we have no reader registered, we remove the - # entry from the _fds and unregister the handler from - # the IOLoop. - del self._fds[fd] - self._io_loop.remove_handler(fd) - - def removeAll(self): - return self._removeAll(self._readers, self._writers) - - def getReaders(self): - return self._readers.keys() - - def getWriters(self): - return self._writers.keys() - - # The following functions are mainly used in twisted-style test cases; - # it is expected that most users of the TornadoReactor will call - # IOLoop.start() instead of Reactor.run(). - def stop(self): - PosixReactorBase.stop(self) - fire_shutdown = functools.partial(self.fireSystemEvent, "shutdown") - self._io_loop.add_callback(fire_shutdown) - - def crash(self): - PosixReactorBase.crash(self) - self._io_loop.stop() - - def doIteration(self, delay): - raise NotImplementedError("doIteration") - - def mainLoop(self): - # Since this class is intended to be used in applications - # where the top-level event loop is ``io_loop.start()`` rather - # than ``reactor.run()``, it is implemented a little - # differently than other Twisted reactors. We override - # ``mainLoop`` instead of ``doIteration`` and must implement - # timed call functionality on top of `.IOLoop.add_timeout` - # rather than using the implementation in - # ``PosixReactorBase``. - self._io_loop.start() - - -class _TestReactor(TornadoReactor): - """Subclass of TornadoReactor for use in unittests. - - This can't go in the test.py file because of import-order dependencies - with the Twisted reactor test builder. - """ - def __init__(self): - # always use a new ioloop - IOLoop.clear_current() - IOLoop(make_current=True) - super(_TestReactor, self).__init__() - IOLoop.clear_current() - - def listenTCP(self, port, factory, backlog=50, interface=''): - # default to localhost to avoid firewall prompts on the mac - if not interface: - interface = '127.0.0.1' - return super(_TestReactor, self).listenTCP( - port, factory, backlog=backlog, interface=interface) - - def listenUDP(self, port, protocol, interface='', maxPacketSize=8192): - if not interface: - interface = '127.0.0.1' - return super(_TestReactor, self).listenUDP( - port, protocol, interface=interface, maxPacketSize=maxPacketSize) - - -def install(): - """Install this package as the default Twisted reactor. - - ``install()`` must be called very early in the startup process, - before most other twisted-related imports. Conversely, because it - initializes the `.IOLoop`, it cannot be called before - `.fork_processes` or multi-process `~.TCPServer.start`. These - conflicting requirements make it difficult to use `.TornadoReactor` - in multi-process mode, and an external process manager such as - ``supervisord`` is recommended instead. - - .. versionchanged:: 5.0 - The ``io_loop`` argument (deprecated since version 4.1) has been removed. - - .. deprecated:: 5.1 - - This functio will be removed in Tornado 6.0. Use - ``twisted.internet.asyncioreactor.install`` instead. - """ - reactor = TornadoReactor() - from twisted.internet.main import installReactor # type: ignore - installReactor(reactor) - return reactor - - -@implementer(IReadDescriptor, IWriteDescriptor) -class _FD(object): - def __init__(self, fd, fileobj, handler): - self.fd = fd - self.fileobj = fileobj - self.handler = handler - self.reading = False - self.writing = False - self.lost = False - - def fileno(self): - return self.fd - - def doRead(self): - if not self.lost: - self.handler(self.fileobj, tornado.ioloop.IOLoop.READ) - - def doWrite(self): - if not self.lost: - self.handler(self.fileobj, tornado.ioloop.IOLoop.WRITE) - - def connectionLost(self, reason): - if not self.lost: - self.handler(self.fileobj, tornado.ioloop.IOLoop.ERROR) - self.lost = True - - writeConnectionLost = readConnectionLost = connectionLost - - def logPrefix(self): - return '' - - -class TwistedIOLoop(tornado.ioloop.IOLoop): - """IOLoop implementation that runs on Twisted. - - `TwistedIOLoop` implements the Tornado IOLoop interface on top of - the Twisted reactor. Recommended usage:: - - from tornado.platform.twisted import TwistedIOLoop - from twisted.internet import reactor - TwistedIOLoop().install() - # Set up your tornado application as usual using `IOLoop.instance` - reactor.run() - - Uses the global Twisted reactor by default. To create multiple - ``TwistedIOLoops`` in the same process, you must pass a unique reactor - when constructing each one. - - Not compatible with `tornado.process.Subprocess.set_exit_callback` - because the ``SIGCHLD`` handlers used by Tornado and Twisted conflict - with each other. - - See also :meth:`tornado.ioloop.IOLoop.install` for general notes on - installing alternative IOLoops. - - .. deprecated:: 5.1 - - The `asyncio` event loop will be the only available implementation in - Tornado 6.0. - """ - def initialize(self, reactor=None, **kwargs): - super(TwistedIOLoop, self).initialize(**kwargs) - if reactor is None: - import twisted.internet.reactor # type: ignore - reactor = twisted.internet.reactor - self.reactor = reactor - self.fds = {} - - def close(self, all_fds=False): - fds = self.fds - self.reactor.removeAll() - for c in self.reactor.getDelayedCalls(): - c.cancel() - if all_fds: - for fd in fds.values(): - self.close_fd(fd.fileobj) - - def add_handler(self, fd, handler, events): - if fd in self.fds: - raise ValueError('fd %s added twice' % fd) - fd, fileobj = self.split_fd(fd) - self.fds[fd] = _FD(fd, fileobj, wrap(handler)) - if events & tornado.ioloop.IOLoop.READ: - self.fds[fd].reading = True - self.reactor.addReader(self.fds[fd]) - if events & tornado.ioloop.IOLoop.WRITE: - self.fds[fd].writing = True - self.reactor.addWriter(self.fds[fd]) - - def update_handler(self, fd, events): - fd, fileobj = self.split_fd(fd) - if events & tornado.ioloop.IOLoop.READ: - if not self.fds[fd].reading: - self.fds[fd].reading = True - self.reactor.addReader(self.fds[fd]) - else: - if self.fds[fd].reading: - self.fds[fd].reading = False - self.reactor.removeReader(self.fds[fd]) - if events & tornado.ioloop.IOLoop.WRITE: - if not self.fds[fd].writing: - self.fds[fd].writing = True - self.reactor.addWriter(self.fds[fd]) - else: - if self.fds[fd].writing: - self.fds[fd].writing = False - self.reactor.removeWriter(self.fds[fd]) - - def remove_handler(self, fd): - fd, fileobj = self.split_fd(fd) - if fd not in self.fds: - return - self.fds[fd].lost = True - if self.fds[fd].reading: - self.reactor.removeReader(self.fds[fd]) - if self.fds[fd].writing: - self.reactor.removeWriter(self.fds[fd]) - del self.fds[fd] - - def start(self): - old_current = IOLoop.current(instance=False) - try: - self._setup_logging() - self.make_current() - self.reactor.run() - finally: - if old_current is None: - IOLoop.clear_current() - else: - old_current.make_current() - - def stop(self): - self.reactor.crash() - - def add_timeout(self, deadline, callback, *args, **kwargs): - # This method could be simplified (since tornado 4.0) by - # overriding call_at instead of add_timeout, but we leave it - # for now as a test of backwards-compatibility. - if isinstance(deadline, numbers.Real): - delay = max(deadline - self.time(), 0) - elif isinstance(deadline, datetime.timedelta): - delay = timedelta_to_seconds(deadline) - else: - raise TypeError("Unsupported deadline %r") - return self.reactor.callLater( - delay, self._run_callback, - functools.partial(wrap(callback), *args, **kwargs)) - - def remove_timeout(self, timeout): - if timeout.active(): - timeout.cancel() - - def add_callback(self, callback, *args, **kwargs): - self.reactor.callFromThread( - self._run_callback, - functools.partial(wrap(callback), *args, **kwargs)) - - def add_callback_from_signal(self, callback, *args, **kwargs): - self.add_callback(callback, *args, **kwargs) - - -class TwistedResolver(Resolver): - """Twisted-based asynchronous resolver. - - This is a non-blocking and non-threaded resolver. It is - recommended only when threads cannot be used, since it has - limitations compared to the standard ``getaddrinfo``-based - `~tornado.netutil.Resolver` and - `~tornado.netutil.DefaultExecutorResolver`. Specifically, it returns at - most one result, and arguments other than ``host`` and ``family`` - are ignored. It may fail to resolve when ``family`` is not - ``socket.AF_UNSPEC``. - - Requires Twisted 12.1 or newer. - - .. versionchanged:: 5.0 - The ``io_loop`` argument (deprecated since version 4.1) has been removed. - """ - def initialize(self): - # partial copy of twisted.names.client.createResolver, which doesn't - # allow for a reactor to be passed in. - self.reactor = tornado.platform.twisted.TornadoReactor() - - host_resolver = twisted.names.hosts.Resolver('/etc/hosts') - cache_resolver = twisted.names.cache.CacheResolver(reactor=self.reactor) - real_resolver = twisted.names.client.Resolver('/etc/resolv.conf', - reactor=self.reactor) - self.resolver = twisted.names.resolve.ResolverChain( - [host_resolver, cache_resolver, real_resolver]) - - @gen.coroutine - def resolve(self, host, port, family=0): - # getHostByName doesn't accept IP addresses, so if the input - # looks like an IP address just return it immediately. - if twisted.internet.abstract.isIPAddress(host): - resolved = host - resolved_family = socket.AF_INET - elif twisted.internet.abstract.isIPv6Address(host): - resolved = host - resolved_family = socket.AF_INET6 - else: - deferred = self.resolver.getHostByName(utf8(host)) - fut = Future() - deferred.addBoth(fut.set_result) - resolved = yield fut - if isinstance(resolved, failure.Failure): - try: - resolved.raiseException() - except twisted.names.error.DomainError as e: - raise IOError(e) - elif twisted.internet.abstract.isIPAddress(resolved): - resolved_family = socket.AF_INET - elif twisted.internet.abstract.isIPv6Address(resolved): - resolved_family = socket.AF_INET6 - else: - resolved_family = socket.AF_UNSPEC - if family != socket.AF_UNSPEC and family != resolved_family: - raise Exception('Requested socket family %d but got %d' % - (family, resolved_family)) - result = [ - (resolved_family, (resolved, port)), - ] - raise gen.Return(result) - - -if hasattr(gen.convert_yielded, 'register'): - @gen.convert_yielded.register(Deferred) # type: ignore - def _(d): - f = Future() - - def errback(failure): - try: - failure.raiseException() - # Should never happen, but just in case - raise Exception("errback called without error") - except: - future_set_exc_info(f, sys.exc_info()) - d.addCallbacks(f.set_result, errback) - return f +# Author: Ovidiu Predescu +# Date: July 2011 +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""Bridges between the Twisted reactor and Tornado IOLoop. + +This module lets you run applications and libraries written for +Twisted in a Tornado application. It can be used in two modes, +depending on which library's underlying event loop you want to use. + +This module has been tested with Twisted versions 11.0.0 and newer. +""" + +import socket +import sys + +import twisted.internet.abstract # type: ignore +import twisted.internet.asyncioreactor # type: ignore +from twisted.internet.defer import Deferred # type: ignore +from twisted.python import failure # type: ignore +import twisted.names.cache # type: ignore +import twisted.names.client # type: ignore +import twisted.names.hosts # type: ignore +import twisted.names.resolve # type: ignore + + +from tornado.concurrent import Future, future_set_exc_info +from tornado.escape import utf8 +from tornado import gen +from tornado.netutil import Resolver + +import typing + +if typing.TYPE_CHECKING: + from typing import Generator, Any, List, Tuple # noqa: F401 + + +class TwistedResolver(Resolver): + """Twisted-based asynchronous resolver. + + This is a non-blocking and non-threaded resolver. It is + recommended only when threads cannot be used, since it has + limitations compared to the standard ``getaddrinfo``-based + `~tornado.netutil.Resolver` and + `~tornado.netutil.DefaultExecutorResolver`. Specifically, it returns at + most one result, and arguments other than ``host`` and ``family`` + are ignored. It may fail to resolve when ``family`` is not + ``socket.AF_UNSPEC``. + + Requires Twisted 12.1 or newer. + + .. versionchanged:: 5.0 + The ``io_loop`` argument (deprecated since version 4.1) has been removed. + """ + + def initialize(self) -> None: + # partial copy of twisted.names.client.createResolver, which doesn't + # allow for a reactor to be passed in. + self.reactor = twisted.internet.asyncioreactor.AsyncioSelectorReactor() + + host_resolver = twisted.names.hosts.Resolver("/etc/hosts") + cache_resolver = twisted.names.cache.CacheResolver(reactor=self.reactor) + real_resolver = twisted.names.client.Resolver( + "/etc/resolv.conf", reactor=self.reactor + ) + self.resolver = twisted.names.resolve.ResolverChain( + [host_resolver, cache_resolver, real_resolver] + ) + + @gen.coroutine + def resolve( + self, host: str, port: int, family: int = 0 + ) -> "Generator[Any, Any, List[Tuple[int, Any]]]": + # getHostByName doesn't accept IP addresses, so if the input + # looks like an IP address just return it immediately. + if twisted.internet.abstract.isIPAddress(host): + resolved = host + resolved_family = socket.AF_INET + elif twisted.internet.abstract.isIPv6Address(host): + resolved = host + resolved_family = socket.AF_INET6 + else: + deferred = self.resolver.getHostByName(utf8(host)) + fut = Future() # type: Future[Any] + deferred.addBoth(fut.set_result) + resolved = yield fut + if isinstance(resolved, failure.Failure): + try: + resolved.raiseException() + except twisted.names.error.DomainError as e: + raise IOError(e) + elif twisted.internet.abstract.isIPAddress(resolved): + resolved_family = socket.AF_INET + elif twisted.internet.abstract.isIPv6Address(resolved): + resolved_family = socket.AF_INET6 + else: + resolved_family = socket.AF_UNSPEC + if family != socket.AF_UNSPEC and family != resolved_family: + raise Exception( + "Requested socket family %d but got %d" % (family, resolved_family) + ) + result = [(typing.cast(int, resolved_family), (resolved, port))] + return result + + +if hasattr(gen.convert_yielded, "register"): + + @gen.convert_yielded.register(Deferred) # type: ignore + def _(d: Deferred) -> Future: + f = Future() # type: Future[Any] + + def errback(failure: failure.Failure) -> None: + try: + failure.raiseException() + # Should never happen, but just in case + raise Exception("errback called without error") + except: + future_set_exc_info(f, sys.exc_info()) + + d.addCallbacks(f.set_result, errback) + return f diff --git a/server/www/packages/packages-linux/x64/tornado/platform/windows.py b/server/www/packages/packages-linux/x64/tornado/platform/windows.py index 4127700..9e859e6 100644 --- a/server/www/packages/packages-linux/x64/tornado/platform/windows.py +++ b/server/www/packages/packages-linux/x64/tornado/platform/windows.py @@ -1,20 +1,22 @@ -# NOTE: win32 support is currently experimental, and not recommended -# for production use. - - -from __future__ import absolute_import, division, print_function -import ctypes # type: ignore -import ctypes.wintypes # type: ignore - -# See: http://msdn.microsoft.com/en-us/library/ms724935(VS.85).aspx -SetHandleInformation = ctypes.windll.kernel32.SetHandleInformation -SetHandleInformation.argtypes = (ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD, ctypes.wintypes.DWORD) # noqa: E501 -SetHandleInformation.restype = ctypes.wintypes.BOOL - -HANDLE_FLAG_INHERIT = 0x00000001 - - -def set_close_exec(fd): - success = SetHandleInformation(fd, HANDLE_FLAG_INHERIT, 0) - if not success: - raise ctypes.WinError() +# NOTE: win32 support is currently experimental, and not recommended +# for production use. + +import ctypes +import ctypes.wintypes + +# See: http://msdn.microsoft.com/en-us/library/ms724935(VS.85).aspx +SetHandleInformation = ctypes.windll.kernel32.SetHandleInformation # type: ignore +SetHandleInformation.argtypes = ( + ctypes.wintypes.HANDLE, + ctypes.wintypes.DWORD, + ctypes.wintypes.DWORD, +) +SetHandleInformation.restype = ctypes.wintypes.BOOL + +HANDLE_FLAG_INHERIT = 0x00000001 + + +def set_close_exec(fd: int) -> None: + success = SetHandleInformation(fd, HANDLE_FLAG_INHERIT, 0) + if not success: + raise ctypes.WinError() # type: ignore diff --git a/server/www/packages/packages-linux/x64/tornado/process.py b/server/www/packages/packages-linux/x64/tornado/process.py index 122fd7e..6ed42a0 100644 --- a/server/www/packages/packages-linux/x64/tornado/process.py +++ b/server/www/packages/packages-linux/x64/tornado/process.py @@ -1,361 +1,373 @@ -# -# Copyright 2011 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Utilities for working with multiple processes, including both forking -the server into multiple processes and managing subprocesses. -""" - -from __future__ import absolute_import, division, print_function - -import errno -import os -import signal -import subprocess -import sys -import time - -from binascii import hexlify - -from tornado.concurrent import Future, future_set_result_unless_cancelled -from tornado import ioloop -from tornado.iostream import PipeIOStream -from tornado.log import gen_log -from tornado.platform.auto import set_close_exec -from tornado import stack_context -from tornado.util import errno_from_exception, PY3 - -try: - import multiprocessing -except ImportError: - # Multiprocessing is not available on Google App Engine. - multiprocessing = None - -if PY3: - long = int - -# Re-export this exception for convenience. -try: - CalledProcessError = subprocess.CalledProcessError -except AttributeError: - # The subprocess module exists in Google App Engine, but is empty. - # This module isn't very useful in that case, but it should - # at least be importable. - if 'APPENGINE_RUNTIME' not in os.environ: - raise - - -def cpu_count(): - """Returns the number of processors on this machine.""" - if multiprocessing is None: - return 1 - try: - return multiprocessing.cpu_count() - except NotImplementedError: - pass - try: - return os.sysconf("SC_NPROCESSORS_CONF") - except (AttributeError, ValueError): - pass - gen_log.error("Could not detect number of processors; assuming 1") - return 1 - - -def _reseed_random(): - if 'random' not in sys.modules: - return - import random - # If os.urandom is available, this method does the same thing as - # random.seed (at least as of python 2.6). If os.urandom is not - # available, we mix in the pid in addition to a timestamp. - try: - seed = long(hexlify(os.urandom(16)), 16) - except NotImplementedError: - seed = int(time.time() * 1000) ^ os.getpid() - random.seed(seed) - - -def _pipe_cloexec(): - r, w = os.pipe() - set_close_exec(r) - set_close_exec(w) - return r, w - - -_task_id = None - - -def fork_processes(num_processes, max_restarts=100): - """Starts multiple worker processes. - - If ``num_processes`` is None or <= 0, we detect the number of cores - available on this machine and fork that number of child - processes. If ``num_processes`` is given and > 0, we fork that - specific number of sub-processes. - - Since we use processes and not threads, there is no shared memory - between any server code. - - Note that multiple processes are not compatible with the autoreload - module (or the ``autoreload=True`` option to `tornado.web.Application` - which defaults to True when ``debug=True``). - When using multiple processes, no IOLoops can be created or - referenced until after the call to ``fork_processes``. - - In each child process, ``fork_processes`` returns its *task id*, a - number between 0 and ``num_processes``. Processes that exit - abnormally (due to a signal or non-zero exit status) are restarted - with the same id (up to ``max_restarts`` times). In the parent - process, ``fork_processes`` returns None if all child processes - have exited normally, but will otherwise only exit by throwing an - exception. - """ - global _task_id - assert _task_id is None - if num_processes is None or num_processes <= 0: - num_processes = cpu_count() - gen_log.info("Starting %d processes", num_processes) - children = {} - - def start_child(i): - pid = os.fork() - if pid == 0: - # child process - _reseed_random() - global _task_id - _task_id = i - return i - else: - children[pid] = i - return None - - for i in range(num_processes): - id = start_child(i) - if id is not None: - return id - num_restarts = 0 - while children: - try: - pid, status = os.wait() - except OSError as e: - if errno_from_exception(e) == errno.EINTR: - continue - raise - if pid not in children: - continue - id = children.pop(pid) - if os.WIFSIGNALED(status): - gen_log.warning("child %d (pid %d) killed by signal %d, restarting", - id, pid, os.WTERMSIG(status)) - elif os.WEXITSTATUS(status) != 0: - gen_log.warning("child %d (pid %d) exited with status %d, restarting", - id, pid, os.WEXITSTATUS(status)) - else: - gen_log.info("child %d (pid %d) exited normally", id, pid) - continue - num_restarts += 1 - if num_restarts > max_restarts: - raise RuntimeError("Too many child restarts, giving up") - new_id = start_child(id) - if new_id is not None: - return new_id - # All child processes exited cleanly, so exit the master process - # instead of just returning to right after the call to - # fork_processes (which will probably just start up another IOLoop - # unless the caller checks the return value). - sys.exit(0) - - -def task_id(): - """Returns the current task id, if any. - - Returns None if this process was not created by `fork_processes`. - """ - global _task_id - return _task_id - - -class Subprocess(object): - """Wraps ``subprocess.Popen`` with IOStream support. - - The constructor is the same as ``subprocess.Popen`` with the following - additions: - - * ``stdin``, ``stdout``, and ``stderr`` may have the value - ``tornado.process.Subprocess.STREAM``, which will make the corresponding - attribute of the resulting Subprocess a `.PipeIOStream`. If this option - is used, the caller is responsible for closing the streams when done - with them. - - The ``Subprocess.STREAM`` option and the ``set_exit_callback`` and - ``wait_for_exit`` methods do not work on Windows. There is - therefore no reason to use this class instead of - ``subprocess.Popen`` on that platform. - - .. versionchanged:: 5.0 - The ``io_loop`` argument (deprecated since version 4.1) has been removed. - - """ - STREAM = object() - - _initialized = False - _waiting = {} # type: ignore - - def __init__(self, *args, **kwargs): - self.io_loop = ioloop.IOLoop.current() - # All FDs we create should be closed on error; those in to_close - # should be closed in the parent process on success. - pipe_fds = [] - to_close = [] - if kwargs.get('stdin') is Subprocess.STREAM: - in_r, in_w = _pipe_cloexec() - kwargs['stdin'] = in_r - pipe_fds.extend((in_r, in_w)) - to_close.append(in_r) - self.stdin = PipeIOStream(in_w) - if kwargs.get('stdout') is Subprocess.STREAM: - out_r, out_w = _pipe_cloexec() - kwargs['stdout'] = out_w - pipe_fds.extend((out_r, out_w)) - to_close.append(out_w) - self.stdout = PipeIOStream(out_r) - if kwargs.get('stderr') is Subprocess.STREAM: - err_r, err_w = _pipe_cloexec() - kwargs['stderr'] = err_w - pipe_fds.extend((err_r, err_w)) - to_close.append(err_w) - self.stderr = PipeIOStream(err_r) - try: - self.proc = subprocess.Popen(*args, **kwargs) - except: - for fd in pipe_fds: - os.close(fd) - raise - for fd in to_close: - os.close(fd) - for attr in ['stdin', 'stdout', 'stderr', 'pid']: - if not hasattr(self, attr): # don't clobber streams set above - setattr(self, attr, getattr(self.proc, attr)) - self._exit_callback = None - self.returncode = None - - def set_exit_callback(self, callback): - """Runs ``callback`` when this process exits. - - The callback takes one argument, the return code of the process. - - This method uses a ``SIGCHLD`` handler, which is a global setting - and may conflict if you have other libraries trying to handle the - same signal. If you are using more than one ``IOLoop`` it may - be necessary to call `Subprocess.initialize` first to designate - one ``IOLoop`` to run the signal handlers. - - In many cases a close callback on the stdout or stderr streams - can be used as an alternative to an exit callback if the - signal handler is causing a problem. - """ - self._exit_callback = stack_context.wrap(callback) - Subprocess.initialize() - Subprocess._waiting[self.pid] = self - Subprocess._try_cleanup_process(self.pid) - - def wait_for_exit(self, raise_error=True): - """Returns a `.Future` which resolves when the process exits. - - Usage:: - - ret = yield proc.wait_for_exit() - - This is a coroutine-friendly alternative to `set_exit_callback` - (and a replacement for the blocking `subprocess.Popen.wait`). - - By default, raises `subprocess.CalledProcessError` if the process - has a non-zero exit status. Use ``wait_for_exit(raise_error=False)`` - to suppress this behavior and return the exit status without raising. - - .. versionadded:: 4.2 - """ - future = Future() - - def callback(ret): - if ret != 0 and raise_error: - # Unfortunately we don't have the original args any more. - future.set_exception(CalledProcessError(ret, None)) - else: - future_set_result_unless_cancelled(future, ret) - self.set_exit_callback(callback) - return future - - @classmethod - def initialize(cls): - """Initializes the ``SIGCHLD`` handler. - - The signal handler is run on an `.IOLoop` to avoid locking issues. - Note that the `.IOLoop` used for signal handling need not be the - same one used by individual Subprocess objects (as long as the - ``IOLoops`` are each running in separate threads). - - .. versionchanged:: 5.0 - The ``io_loop`` argument (deprecated since version 4.1) has been - removed. - """ - if cls._initialized: - return - io_loop = ioloop.IOLoop.current() - cls._old_sigchld = signal.signal( - signal.SIGCHLD, - lambda sig, frame: io_loop.add_callback_from_signal(cls._cleanup)) - cls._initialized = True - - @classmethod - def uninitialize(cls): - """Removes the ``SIGCHLD`` handler.""" - if not cls._initialized: - return - signal.signal(signal.SIGCHLD, cls._old_sigchld) - cls._initialized = False - - @classmethod - def _cleanup(cls): - for pid in list(cls._waiting.keys()): # make a copy - cls._try_cleanup_process(pid) - - @classmethod - def _try_cleanup_process(cls, pid): - try: - ret_pid, status = os.waitpid(pid, os.WNOHANG) - except OSError as e: - if errno_from_exception(e) == errno.ECHILD: - return - if ret_pid == 0: - return - assert ret_pid == pid - subproc = cls._waiting.pop(pid) - subproc.io_loop.add_callback_from_signal( - subproc._set_returncode, status) - - def _set_returncode(self, status): - if os.WIFSIGNALED(status): - self.returncode = -os.WTERMSIG(status) - else: - assert os.WIFEXITED(status) - self.returncode = os.WEXITSTATUS(status) - # We've taken over wait() duty from the subprocess.Popen - # object. If we don't inform it of the process's return code, - # it will log a warning at destruction in python 3.6+. - self.proc.returncode = self.returncode - if self._exit_callback: - callback = self._exit_callback - self._exit_callback = None - callback(self.returncode) +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Utilities for working with multiple processes, including both forking +the server into multiple processes and managing subprocesses. +""" + +import errno +import os +import multiprocessing +import signal +import subprocess +import sys +import time + +from binascii import hexlify + +from tornado.concurrent import ( + Future, + future_set_result_unless_cancelled, + future_set_exception_unless_cancelled, +) +from tornado import ioloop +from tornado.iostream import PipeIOStream +from tornado.log import gen_log +from tornado.platform.auto import set_close_exec +from tornado.util import errno_from_exception + +import typing +from typing import Tuple, Optional, Any, Callable + +if typing.TYPE_CHECKING: + from typing import List # noqa: F401 + +# Re-export this exception for convenience. +CalledProcessError = subprocess.CalledProcessError + + +def cpu_count() -> int: + """Returns the number of processors on this machine.""" + if multiprocessing is None: + return 1 + try: + return multiprocessing.cpu_count() + except NotImplementedError: + pass + try: + return os.sysconf("SC_NPROCESSORS_CONF") + except (AttributeError, ValueError): + pass + gen_log.error("Could not detect number of processors; assuming 1") + return 1 + + +def _reseed_random() -> None: + if "random" not in sys.modules: + return + import random + + # If os.urandom is available, this method does the same thing as + # random.seed (at least as of python 2.6). If os.urandom is not + # available, we mix in the pid in addition to a timestamp. + try: + seed = int(hexlify(os.urandom(16)), 16) + except NotImplementedError: + seed = int(time.time() * 1000) ^ os.getpid() + random.seed(seed) + + +def _pipe_cloexec() -> Tuple[int, int]: + r, w = os.pipe() + set_close_exec(r) + set_close_exec(w) + return r, w + + +_task_id = None + + +def fork_processes(num_processes: Optional[int], max_restarts: int = None) -> int: + """Starts multiple worker processes. + + If ``num_processes`` is None or <= 0, we detect the number of cores + available on this machine and fork that number of child + processes. If ``num_processes`` is given and > 0, we fork that + specific number of sub-processes. + + Since we use processes and not threads, there is no shared memory + between any server code. + + Note that multiple processes are not compatible with the autoreload + module (or the ``autoreload=True`` option to `tornado.web.Application` + which defaults to True when ``debug=True``). + When using multiple processes, no IOLoops can be created or + referenced until after the call to ``fork_processes``. + + In each child process, ``fork_processes`` returns its *task id*, a + number between 0 and ``num_processes``. Processes that exit + abnormally (due to a signal or non-zero exit status) are restarted + with the same id (up to ``max_restarts`` times). In the parent + process, ``fork_processes`` returns None if all child processes + have exited normally, but will otherwise only exit by throwing an + exception. + + max_restarts defaults to 100. + """ + if max_restarts is None: + max_restarts = 100 + + global _task_id + assert _task_id is None + if num_processes is None or num_processes <= 0: + num_processes = cpu_count() + gen_log.info("Starting %d processes", num_processes) + children = {} + + def start_child(i: int) -> Optional[int]: + pid = os.fork() + if pid == 0: + # child process + _reseed_random() + global _task_id + _task_id = i + return i + else: + children[pid] = i + return None + + for i in range(num_processes): + id = start_child(i) + if id is not None: + return id + num_restarts = 0 + while children: + try: + pid, status = os.wait() + except OSError as e: + if errno_from_exception(e) == errno.EINTR: + continue + raise + if pid not in children: + continue + id = children.pop(pid) + if os.WIFSIGNALED(status): + gen_log.warning( + "child %d (pid %d) killed by signal %d, restarting", + id, + pid, + os.WTERMSIG(status), + ) + elif os.WEXITSTATUS(status) != 0: + gen_log.warning( + "child %d (pid %d) exited with status %d, restarting", + id, + pid, + os.WEXITSTATUS(status), + ) + else: + gen_log.info("child %d (pid %d) exited normally", id, pid) + continue + num_restarts += 1 + if num_restarts > max_restarts: + raise RuntimeError("Too many child restarts, giving up") + new_id = start_child(id) + if new_id is not None: + return new_id + # All child processes exited cleanly, so exit the master process + # instead of just returning to right after the call to + # fork_processes (which will probably just start up another IOLoop + # unless the caller checks the return value). + sys.exit(0) + + +def task_id() -> Optional[int]: + """Returns the current task id, if any. + + Returns None if this process was not created by `fork_processes`. + """ + global _task_id + return _task_id + + +class Subprocess(object): + """Wraps ``subprocess.Popen`` with IOStream support. + + The constructor is the same as ``subprocess.Popen`` with the following + additions: + + * ``stdin``, ``stdout``, and ``stderr`` may have the value + ``tornado.process.Subprocess.STREAM``, which will make the corresponding + attribute of the resulting Subprocess a `.PipeIOStream`. If this option + is used, the caller is responsible for closing the streams when done + with them. + + The ``Subprocess.STREAM`` option and the ``set_exit_callback`` and + ``wait_for_exit`` methods do not work on Windows. There is + therefore no reason to use this class instead of + ``subprocess.Popen`` on that platform. + + .. versionchanged:: 5.0 + The ``io_loop`` argument (deprecated since version 4.1) has been removed. + + """ + + STREAM = object() + + _initialized = False + _waiting = {} # type: ignore + _old_sigchld = None + + def __init__(self, *args: Any, **kwargs: Any) -> None: + self.io_loop = ioloop.IOLoop.current() + # All FDs we create should be closed on error; those in to_close + # should be closed in the parent process on success. + pipe_fds = [] # type: List[int] + to_close = [] # type: List[int] + if kwargs.get("stdin") is Subprocess.STREAM: + in_r, in_w = _pipe_cloexec() + kwargs["stdin"] = in_r + pipe_fds.extend((in_r, in_w)) + to_close.append(in_r) + self.stdin = PipeIOStream(in_w) + if kwargs.get("stdout") is Subprocess.STREAM: + out_r, out_w = _pipe_cloexec() + kwargs["stdout"] = out_w + pipe_fds.extend((out_r, out_w)) + to_close.append(out_w) + self.stdout = PipeIOStream(out_r) + if kwargs.get("stderr") is Subprocess.STREAM: + err_r, err_w = _pipe_cloexec() + kwargs["stderr"] = err_w + pipe_fds.extend((err_r, err_w)) + to_close.append(err_w) + self.stderr = PipeIOStream(err_r) + try: + self.proc = subprocess.Popen(*args, **kwargs) + except: + for fd in pipe_fds: + os.close(fd) + raise + for fd in to_close: + os.close(fd) + self.pid = self.proc.pid + for attr in ["stdin", "stdout", "stderr"]: + if not hasattr(self, attr): # don't clobber streams set above + setattr(self, attr, getattr(self.proc, attr)) + self._exit_callback = None # type: Optional[Callable[[int], None]] + self.returncode = None # type: Optional[int] + + def set_exit_callback(self, callback: Callable[[int], None]) -> None: + """Runs ``callback`` when this process exits. + + The callback takes one argument, the return code of the process. + + This method uses a ``SIGCHLD`` handler, which is a global setting + and may conflict if you have other libraries trying to handle the + same signal. If you are using more than one ``IOLoop`` it may + be necessary to call `Subprocess.initialize` first to designate + one ``IOLoop`` to run the signal handlers. + + In many cases a close callback on the stdout or stderr streams + can be used as an alternative to an exit callback if the + signal handler is causing a problem. + """ + self._exit_callback = callback + Subprocess.initialize() + Subprocess._waiting[self.pid] = self + Subprocess._try_cleanup_process(self.pid) + + def wait_for_exit(self, raise_error: bool = True) -> "Future[int]": + """Returns a `.Future` which resolves when the process exits. + + Usage:: + + ret = yield proc.wait_for_exit() + + This is a coroutine-friendly alternative to `set_exit_callback` + (and a replacement for the blocking `subprocess.Popen.wait`). + + By default, raises `subprocess.CalledProcessError` if the process + has a non-zero exit status. Use ``wait_for_exit(raise_error=False)`` + to suppress this behavior and return the exit status without raising. + + .. versionadded:: 4.2 + """ + future = Future() # type: Future[int] + + def callback(ret: int) -> None: + if ret != 0 and raise_error: + # Unfortunately we don't have the original args any more. + future_set_exception_unless_cancelled( + future, CalledProcessError(ret, "unknown") + ) + else: + future_set_result_unless_cancelled(future, ret) + + self.set_exit_callback(callback) + return future + + @classmethod + def initialize(cls) -> None: + """Initializes the ``SIGCHLD`` handler. + + The signal handler is run on an `.IOLoop` to avoid locking issues. + Note that the `.IOLoop` used for signal handling need not be the + same one used by individual Subprocess objects (as long as the + ``IOLoops`` are each running in separate threads). + + .. versionchanged:: 5.0 + The ``io_loop`` argument (deprecated since version 4.1) has been + removed. + """ + if cls._initialized: + return + io_loop = ioloop.IOLoop.current() + cls._old_sigchld = signal.signal( + signal.SIGCHLD, + lambda sig, frame: io_loop.add_callback_from_signal(cls._cleanup), + ) + cls._initialized = True + + @classmethod + def uninitialize(cls) -> None: + """Removes the ``SIGCHLD`` handler.""" + if not cls._initialized: + return + signal.signal(signal.SIGCHLD, cls._old_sigchld) + cls._initialized = False + + @classmethod + def _cleanup(cls) -> None: + for pid in list(cls._waiting.keys()): # make a copy + cls._try_cleanup_process(pid) + + @classmethod + def _try_cleanup_process(cls, pid: int) -> None: + try: + ret_pid, status = os.waitpid(pid, os.WNOHANG) + except OSError as e: + if errno_from_exception(e) == errno.ECHILD: + return + if ret_pid == 0: + return + assert ret_pid == pid + subproc = cls._waiting.pop(pid) + subproc.io_loop.add_callback_from_signal(subproc._set_returncode, status) + + def _set_returncode(self, status: int) -> None: + if os.WIFSIGNALED(status): + self.returncode = -os.WTERMSIG(status) + else: + assert os.WIFEXITED(status) + self.returncode = os.WEXITSTATUS(status) + # We've taken over wait() duty from the subprocess.Popen + # object. If we don't inform it of the process's return code, + # it will log a warning at destruction in python 3.6+. + self.proc.returncode = self.returncode + if self._exit_callback: + callback = self._exit_callback + self._exit_callback = None + callback(self.returncode) diff --git a/server/www/packages/packages-linux/x64/tornado/py.typed b/server/www/packages/packages-linux/x64/tornado/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/server/www/packages/packages-linux/x64/tornado/queues.py b/server/www/packages/packages-linux/x64/tornado/queues.py index 7cb96bf..28dcebb 100644 --- a/server/www/packages/packages-linux/x64/tornado/queues.py +++ b/server/www/packages/packages-linux/x64/tornado/queues.py @@ -1,379 +1,410 @@ -# Copyright 2015 The Tornado Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Asynchronous queues for coroutines. These classes are very similar -to those provided in the standard library's `asyncio package -`_. - -.. warning:: - - Unlike the standard library's `queue` module, the classes defined here - are *not* thread-safe. To use these queues from another thread, - use `.IOLoop.add_callback` to transfer control to the `.IOLoop` thread - before calling any queue methods. - -""" - -from __future__ import absolute_import, division, print_function - -import collections -import heapq - -from tornado import gen, ioloop -from tornado.concurrent import Future, future_set_result_unless_cancelled -from tornado.locks import Event - -__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty'] - - -class QueueEmpty(Exception): - """Raised by `.Queue.get_nowait` when the queue has no items.""" - pass - - -class QueueFull(Exception): - """Raised by `.Queue.put_nowait` when a queue is at its maximum size.""" - pass - - -def _set_timeout(future, timeout): - if timeout: - def on_timeout(): - if not future.done(): - future.set_exception(gen.TimeoutError()) - io_loop = ioloop.IOLoop.current() - timeout_handle = io_loop.add_timeout(timeout, on_timeout) - future.add_done_callback( - lambda _: io_loop.remove_timeout(timeout_handle)) - - -class _QueueIterator(object): - def __init__(self, q): - self.q = q - - def __anext__(self): - return self.q.get() - - -class Queue(object): - """Coordinate producer and consumer coroutines. - - If maxsize is 0 (the default) the queue size is unbounded. - - .. testcode:: - - from tornado import gen - from tornado.ioloop import IOLoop - from tornado.queues import Queue - - q = Queue(maxsize=2) - - async def consumer(): - async for item in q: - try: - print('Doing work on %s' % item) - await gen.sleep(0.01) - finally: - q.task_done() - - async def producer(): - for item in range(5): - await q.put(item) - print('Put %s' % item) - - async def main(): - # Start consumer without waiting (since it never finishes). - IOLoop.current().spawn_callback(consumer) - await producer() # Wait for producer to put all tasks. - await q.join() # Wait for consumer to finish all tasks. - print('Done') - - IOLoop.current().run_sync(main) - - .. testoutput:: - - Put 0 - Put 1 - Doing work on 0 - Put 2 - Doing work on 1 - Put 3 - Doing work on 2 - Put 4 - Doing work on 3 - Doing work on 4 - Done - - - In versions of Python without native coroutines (before 3.5), - ``consumer()`` could be written as:: - - @gen.coroutine - def consumer(): - while True: - item = yield q.get() - try: - print('Doing work on %s' % item) - yield gen.sleep(0.01) - finally: - q.task_done() - - .. versionchanged:: 4.3 - Added ``async for`` support in Python 3.5. - - """ - def __init__(self, maxsize=0): - if maxsize is None: - raise TypeError("maxsize can't be None") - - if maxsize < 0: - raise ValueError("maxsize can't be negative") - - self._maxsize = maxsize - self._init() - self._getters = collections.deque([]) # Futures. - self._putters = collections.deque([]) # Pairs of (item, Future). - self._unfinished_tasks = 0 - self._finished = Event() - self._finished.set() - - @property - def maxsize(self): - """Number of items allowed in the queue.""" - return self._maxsize - - def qsize(self): - """Number of items in the queue.""" - return len(self._queue) - - def empty(self): - return not self._queue - - def full(self): - if self.maxsize == 0: - return False - else: - return self.qsize() >= self.maxsize - - def put(self, item, timeout=None): - """Put an item into the queue, perhaps waiting until there is room. - - Returns a Future, which raises `tornado.util.TimeoutError` after a - timeout. - - ``timeout`` may be a number denoting a time (on the same - scale as `tornado.ioloop.IOLoop.time`, normally `time.time`), or a - `datetime.timedelta` object for a deadline relative to the - current time. - """ - future = Future() - try: - self.put_nowait(item) - except QueueFull: - self._putters.append((item, future)) - _set_timeout(future, timeout) - else: - future.set_result(None) - return future - - def put_nowait(self, item): - """Put an item into the queue without blocking. - - If no free slot is immediately available, raise `QueueFull`. - """ - self._consume_expired() - if self._getters: - assert self.empty(), "queue non-empty, why are getters waiting?" - getter = self._getters.popleft() - self.__put_internal(item) - future_set_result_unless_cancelled(getter, self._get()) - elif self.full(): - raise QueueFull - else: - self.__put_internal(item) - - def get(self, timeout=None): - """Remove and return an item from the queue. - - Returns a Future which resolves once an item is available, or raises - `tornado.util.TimeoutError` after a timeout. - - ``timeout`` may be a number denoting a time (on the same - scale as `tornado.ioloop.IOLoop.time`, normally `time.time`), or a - `datetime.timedelta` object for a deadline relative to the - current time. - """ - future = Future() - try: - future.set_result(self.get_nowait()) - except QueueEmpty: - self._getters.append(future) - _set_timeout(future, timeout) - return future - - def get_nowait(self): - """Remove and return an item from the queue without blocking. - - Return an item if one is immediately available, else raise - `QueueEmpty`. - """ - self._consume_expired() - if self._putters: - assert self.full(), "queue not full, why are putters waiting?" - item, putter = self._putters.popleft() - self.__put_internal(item) - future_set_result_unless_cancelled(putter, None) - return self._get() - elif self.qsize(): - return self._get() - else: - raise QueueEmpty - - def task_done(self): - """Indicate that a formerly enqueued task is complete. - - Used by queue consumers. For each `.get` used to fetch a task, a - subsequent call to `.task_done` tells the queue that the processing - on the task is complete. - - If a `.join` is blocking, it resumes when all items have been - processed; that is, when every `.put` is matched by a `.task_done`. - - Raises `ValueError` if called more times than `.put`. - """ - if self._unfinished_tasks <= 0: - raise ValueError('task_done() called too many times') - self._unfinished_tasks -= 1 - if self._unfinished_tasks == 0: - self._finished.set() - - def join(self, timeout=None): - """Block until all items in the queue are processed. - - Returns a Future, which raises `tornado.util.TimeoutError` after a - timeout. - """ - return self._finished.wait(timeout) - - def __aiter__(self): - return _QueueIterator(self) - - # These three are overridable in subclasses. - def _init(self): - self._queue = collections.deque() - - def _get(self): - return self._queue.popleft() - - def _put(self, item): - self._queue.append(item) - # End of the overridable methods. - - def __put_internal(self, item): - self._unfinished_tasks += 1 - self._finished.clear() - self._put(item) - - def _consume_expired(self): - # Remove timed-out waiters. - while self._putters and self._putters[0][1].done(): - self._putters.popleft() - - while self._getters and self._getters[0].done(): - self._getters.popleft() - - def __repr__(self): - return '<%s at %s %s>' % ( - type(self).__name__, hex(id(self)), self._format()) - - def __str__(self): - return '<%s %s>' % (type(self).__name__, self._format()) - - def _format(self): - result = 'maxsize=%r' % (self.maxsize, ) - if getattr(self, '_queue', None): - result += ' queue=%r' % self._queue - if self._getters: - result += ' getters[%s]' % len(self._getters) - if self._putters: - result += ' putters[%s]' % len(self._putters) - if self._unfinished_tasks: - result += ' tasks=%s' % self._unfinished_tasks - return result - - -class PriorityQueue(Queue): - """A `.Queue` that retrieves entries in priority order, lowest first. - - Entries are typically tuples like ``(priority number, data)``. - - .. testcode:: - - from tornado.queues import PriorityQueue - - q = PriorityQueue() - q.put((1, 'medium-priority item')) - q.put((0, 'high-priority item')) - q.put((10, 'low-priority item')) - - print(q.get_nowait()) - print(q.get_nowait()) - print(q.get_nowait()) - - .. testoutput:: - - (0, 'high-priority item') - (1, 'medium-priority item') - (10, 'low-priority item') - """ - def _init(self): - self._queue = [] - - def _put(self, item): - heapq.heappush(self._queue, item) - - def _get(self): - return heapq.heappop(self._queue) - - -class LifoQueue(Queue): - """A `.Queue` that retrieves the most recently put items first. - - .. testcode:: - - from tornado.queues import LifoQueue - - q = LifoQueue() - q.put(3) - q.put(2) - q.put(1) - - print(q.get_nowait()) - print(q.get_nowait()) - print(q.get_nowait()) - - .. testoutput:: - - 1 - 2 - 3 - """ - def _init(self): - self._queue = [] - - def _put(self, item): - self._queue.append(item) - - def _get(self): - return self._queue.pop() +# Copyright 2015 The Tornado Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Asynchronous queues for coroutines. These classes are very similar +to those provided in the standard library's `asyncio package +`_. + +.. warning:: + + Unlike the standard library's `queue` module, the classes defined here + are *not* thread-safe. To use these queues from another thread, + use `.IOLoop.add_callback` to transfer control to the `.IOLoop` thread + before calling any queue methods. + +""" + +import collections +import datetime +import heapq + +from tornado import gen, ioloop +from tornado.concurrent import Future, future_set_result_unless_cancelled +from tornado.locks import Event + +from typing import Union, TypeVar, Generic, Awaitable +import typing + +if typing.TYPE_CHECKING: + from typing import Deque, Tuple, List, Any # noqa: F401 + +_T = TypeVar("_T") + +__all__ = ["Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty"] + + +class QueueEmpty(Exception): + """Raised by `.Queue.get_nowait` when the queue has no items.""" + + pass + + +class QueueFull(Exception): + """Raised by `.Queue.put_nowait` when a queue is at its maximum size.""" + + pass + + +def _set_timeout( + future: Future, timeout: Union[None, float, datetime.timedelta] +) -> None: + if timeout: + + def on_timeout() -> None: + if not future.done(): + future.set_exception(gen.TimeoutError()) + + io_loop = ioloop.IOLoop.current() + timeout_handle = io_loop.add_timeout(timeout, on_timeout) + future.add_done_callback(lambda _: io_loop.remove_timeout(timeout_handle)) + + +class _QueueIterator(Generic[_T]): + def __init__(self, q: "Queue[_T]") -> None: + self.q = q + + def __anext__(self) -> Awaitable[_T]: + return self.q.get() + + +class Queue(Generic[_T]): + """Coordinate producer and consumer coroutines. + + If maxsize is 0 (the default) the queue size is unbounded. + + .. testcode:: + + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.queues import Queue + + q = Queue(maxsize=2) + + async def consumer(): + async for item in q: + try: + print('Doing work on %s' % item) + await gen.sleep(0.01) + finally: + q.task_done() + + async def producer(): + for item in range(5): + await q.put(item) + print('Put %s' % item) + + async def main(): + # Start consumer without waiting (since it never finishes). + IOLoop.current().spawn_callback(consumer) + await producer() # Wait for producer to put all tasks. + await q.join() # Wait for consumer to finish all tasks. + print('Done') + + IOLoop.current().run_sync(main) + + .. testoutput:: + + Put 0 + Put 1 + Doing work on 0 + Put 2 + Doing work on 1 + Put 3 + Doing work on 2 + Put 4 + Doing work on 3 + Doing work on 4 + Done + + + In versions of Python without native coroutines (before 3.5), + ``consumer()`` could be written as:: + + @gen.coroutine + def consumer(): + while True: + item = yield q.get() + try: + print('Doing work on %s' % item) + yield gen.sleep(0.01) + finally: + q.task_done() + + .. versionchanged:: 4.3 + Added ``async for`` support in Python 3.5. + + """ + + # Exact type depends on subclass. Could be another generic + # parameter and use protocols to be more precise here. + _queue = None # type: Any + + def __init__(self, maxsize: int = 0) -> None: + if maxsize is None: + raise TypeError("maxsize can't be None") + + if maxsize < 0: + raise ValueError("maxsize can't be negative") + + self._maxsize = maxsize + self._init() + self._getters = collections.deque([]) # type: Deque[Future[_T]] + self._putters = collections.deque([]) # type: Deque[Tuple[_T, Future[None]]] + self._unfinished_tasks = 0 + self._finished = Event() + self._finished.set() + + @property + def maxsize(self) -> int: + """Number of items allowed in the queue.""" + return self._maxsize + + def qsize(self) -> int: + """Number of items in the queue.""" + return len(self._queue) + + def empty(self) -> bool: + return not self._queue + + def full(self) -> bool: + if self.maxsize == 0: + return False + else: + return self.qsize() >= self.maxsize + + def put( + self, item: _T, timeout: Union[float, datetime.timedelta] = None + ) -> "Future[None]": + """Put an item into the queue, perhaps waiting until there is room. + + Returns a Future, which raises `tornado.util.TimeoutError` after a + timeout. + + ``timeout`` may be a number denoting a time (on the same + scale as `tornado.ioloop.IOLoop.time`, normally `time.time`), or a + `datetime.timedelta` object for a deadline relative to the + current time. + """ + future = Future() # type: Future[None] + try: + self.put_nowait(item) + except QueueFull: + self._putters.append((item, future)) + _set_timeout(future, timeout) + else: + future.set_result(None) + return future + + def put_nowait(self, item: _T) -> None: + """Put an item into the queue without blocking. + + If no free slot is immediately available, raise `QueueFull`. + """ + self._consume_expired() + if self._getters: + assert self.empty(), "queue non-empty, why are getters waiting?" + getter = self._getters.popleft() + self.__put_internal(item) + future_set_result_unless_cancelled(getter, self._get()) + elif self.full(): + raise QueueFull + else: + self.__put_internal(item) + + def get(self, timeout: Union[float, datetime.timedelta] = None) -> Awaitable[_T]: + """Remove and return an item from the queue. + + Returns an awaitable which resolves once an item is available, or raises + `tornado.util.TimeoutError` after a timeout. + + ``timeout`` may be a number denoting a time (on the same + scale as `tornado.ioloop.IOLoop.time`, normally `time.time`), or a + `datetime.timedelta` object for a deadline relative to the + current time. + + .. note:: + + The ``timeout`` argument of this method differs from that + of the standard library's `queue.Queue.get`. That method + interprets numeric values as relative timeouts; this one + interprets them as absolute deadlines and requires + ``timedelta`` objects for relative timeouts (consistent + with other timeouts in Tornado). + + """ + future = Future() # type: Future[_T] + try: + future.set_result(self.get_nowait()) + except QueueEmpty: + self._getters.append(future) + _set_timeout(future, timeout) + return future + + def get_nowait(self) -> _T: + """Remove and return an item from the queue without blocking. + + Return an item if one is immediately available, else raise + `QueueEmpty`. + """ + self._consume_expired() + if self._putters: + assert self.full(), "queue not full, why are putters waiting?" + item, putter = self._putters.popleft() + self.__put_internal(item) + future_set_result_unless_cancelled(putter, None) + return self._get() + elif self.qsize(): + return self._get() + else: + raise QueueEmpty + + def task_done(self) -> None: + """Indicate that a formerly enqueued task is complete. + + Used by queue consumers. For each `.get` used to fetch a task, a + subsequent call to `.task_done` tells the queue that the processing + on the task is complete. + + If a `.join` is blocking, it resumes when all items have been + processed; that is, when every `.put` is matched by a `.task_done`. + + Raises `ValueError` if called more times than `.put`. + """ + if self._unfinished_tasks <= 0: + raise ValueError("task_done() called too many times") + self._unfinished_tasks -= 1 + if self._unfinished_tasks == 0: + self._finished.set() + + def join(self, timeout: Union[float, datetime.timedelta] = None) -> Awaitable[None]: + """Block until all items in the queue are processed. + + Returns an awaitable, which raises `tornado.util.TimeoutError` after a + timeout. + """ + return self._finished.wait(timeout) + + def __aiter__(self) -> _QueueIterator[_T]: + return _QueueIterator(self) + + # These three are overridable in subclasses. + def _init(self) -> None: + self._queue = collections.deque() + + def _get(self) -> _T: + return self._queue.popleft() + + def _put(self, item: _T) -> None: + self._queue.append(item) + + # End of the overridable methods. + + def __put_internal(self, item: _T) -> None: + self._unfinished_tasks += 1 + self._finished.clear() + self._put(item) + + def _consume_expired(self) -> None: + # Remove timed-out waiters. + while self._putters and self._putters[0][1].done(): + self._putters.popleft() + + while self._getters and self._getters[0].done(): + self._getters.popleft() + + def __repr__(self) -> str: + return "<%s at %s %s>" % (type(self).__name__, hex(id(self)), self._format()) + + def __str__(self) -> str: + return "<%s %s>" % (type(self).__name__, self._format()) + + def _format(self) -> str: + result = "maxsize=%r" % (self.maxsize,) + if getattr(self, "_queue", None): + result += " queue=%r" % self._queue + if self._getters: + result += " getters[%s]" % len(self._getters) + if self._putters: + result += " putters[%s]" % len(self._putters) + if self._unfinished_tasks: + result += " tasks=%s" % self._unfinished_tasks + return result + + +class PriorityQueue(Queue): + """A `.Queue` that retrieves entries in priority order, lowest first. + + Entries are typically tuples like ``(priority number, data)``. + + .. testcode:: + + from tornado.queues import PriorityQueue + + q = PriorityQueue() + q.put((1, 'medium-priority item')) + q.put((0, 'high-priority item')) + q.put((10, 'low-priority item')) + + print(q.get_nowait()) + print(q.get_nowait()) + print(q.get_nowait()) + + .. testoutput:: + + (0, 'high-priority item') + (1, 'medium-priority item') + (10, 'low-priority item') + """ + + def _init(self) -> None: + self._queue = [] + + def _put(self, item: _T) -> None: + heapq.heappush(self._queue, item) + + def _get(self) -> _T: + return heapq.heappop(self._queue) + + +class LifoQueue(Queue): + """A `.Queue` that retrieves the most recently put items first. + + .. testcode:: + + from tornado.queues import LifoQueue + + q = LifoQueue() + q.put(3) + q.put(2) + q.put(1) + + print(q.get_nowait()) + print(q.get_nowait()) + print(q.get_nowait()) + + .. testoutput:: + + 1 + 2 + 3 + """ + + def _init(self) -> None: + self._queue = [] + + def _put(self, item: _T) -> None: + self._queue.append(item) + + def _get(self) -> _T: + return self._queue.pop() diff --git a/server/www/packages/packages-linux/x64/tornado/routing.py b/server/www/packages/packages-linux/x64/tornado/routing.py index e56d1a7..7a131ac 100644 --- a/server/www/packages/packages-linux/x64/tornado/routing.py +++ b/server/www/packages/packages-linux/x64/tornado/routing.py @@ -1,641 +1,711 @@ -# Copyright 2015 The Tornado Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Flexible routing implementation. - -Tornado routes HTTP requests to appropriate handlers using `Router` -class implementations. The `tornado.web.Application` class is a -`Router` implementation and may be used directly, or the classes in -this module may be used for additional flexibility. The `RuleRouter` -class can match on more criteria than `.Application`, or the `Router` -interface can be subclassed for maximum customization. - -`Router` interface extends `~.httputil.HTTPServerConnectionDelegate` -to provide additional routing capabilities. This also means that any -`Router` implementation can be used directly as a ``request_callback`` -for `~.httpserver.HTTPServer` constructor. - -`Router` subclass must implement a ``find_handler`` method to provide -a suitable `~.httputil.HTTPMessageDelegate` instance to handle the -request: - -.. code-block:: python - - class CustomRouter(Router): - def find_handler(self, request, **kwargs): - # some routing logic providing a suitable HTTPMessageDelegate instance - return MessageDelegate(request.connection) - - class MessageDelegate(HTTPMessageDelegate): - def __init__(self, connection): - self.connection = connection - - def finish(self): - self.connection.write_headers( - ResponseStartLine("HTTP/1.1", 200, "OK"), - HTTPHeaders({"Content-Length": "2"}), - b"OK") - self.connection.finish() - - router = CustomRouter() - server = HTTPServer(router) - -The main responsibility of `Router` implementation is to provide a -mapping from a request to `~.httputil.HTTPMessageDelegate` instance -that will handle this request. In the example above we can see that -routing is possible even without instantiating an `~.web.Application`. - -For routing to `~.web.RequestHandler` implementations we need an -`~.web.Application` instance. `~.web.Application.get_handler_delegate` -provides a convenient way to create `~.httputil.HTTPMessageDelegate` -for a given request and `~.web.RequestHandler`. - -Here is a simple example of how we can we route to -`~.web.RequestHandler` subclasses by HTTP method: - -.. code-block:: python - - resources = {} - - class GetResource(RequestHandler): - def get(self, path): - if path not in resources: - raise HTTPError(404) - - self.finish(resources[path]) - - class PostResource(RequestHandler): - def post(self, path): - resources[path] = self.request.body - - class HTTPMethodRouter(Router): - def __init__(self, app): - self.app = app - - def find_handler(self, request, **kwargs): - handler = GetResource if request.method == "GET" else PostResource - return self.app.get_handler_delegate(request, handler, path_args=[request.path]) - - router = HTTPMethodRouter(Application()) - server = HTTPServer(router) - -`ReversibleRouter` interface adds the ability to distinguish between -the routes and reverse them to the original urls using route's name -and additional arguments. `~.web.Application` is itself an -implementation of `ReversibleRouter` class. - -`RuleRouter` and `ReversibleRuleRouter` are implementations of -`Router` and `ReversibleRouter` interfaces and can be used for -creating rule-based routing configurations. - -Rules are instances of `Rule` class. They contain a `Matcher`, which -provides the logic for determining whether the rule is a match for a -particular request and a target, which can be one of the following. - -1) An instance of `~.httputil.HTTPServerConnectionDelegate`: - -.. code-block:: python - - router = RuleRouter([ - Rule(PathMatches("/handler"), ConnectionDelegate()), - # ... more rules - ]) - - class ConnectionDelegate(HTTPServerConnectionDelegate): - def start_request(self, server_conn, request_conn): - return MessageDelegate(request_conn) - -2) A callable accepting a single argument of `~.httputil.HTTPServerRequest` type: - -.. code-block:: python - - router = RuleRouter([ - Rule(PathMatches("/callable"), request_callable) - ]) - - def request_callable(request): - request.write(b"HTTP/1.1 200 OK\\r\\nContent-Length: 2\\r\\n\\r\\nOK") - request.finish() - -3) Another `Router` instance: - -.. code-block:: python - - router = RuleRouter([ - Rule(PathMatches("/router.*"), CustomRouter()) - ]) - -Of course a nested `RuleRouter` or a `~.web.Application` is allowed: - -.. code-block:: python - - router = RuleRouter([ - Rule(HostMatches("example.com"), RuleRouter([ - Rule(PathMatches("/app1/.*"), Application([(r"/app1/handler", Handler)]))), - ])) - ]) - - server = HTTPServer(router) - -In the example below `RuleRouter` is used to route between applications: - -.. code-block:: python - - app1 = Application([ - (r"/app1/handler", Handler1), - # other handlers ... - ]) - - app2 = Application([ - (r"/app2/handler", Handler2), - # other handlers ... - ]) - - router = RuleRouter([ - Rule(PathMatches("/app1.*"), app1), - Rule(PathMatches("/app2.*"), app2) - ]) - - server = HTTPServer(router) - -For more information on application-level routing see docs for `~.web.Application`. - -.. versionadded:: 4.5 - -""" - -from __future__ import absolute_import, division, print_function - -import re -from functools import partial - -from tornado import httputil -from tornado.httpserver import _CallableAdapter -from tornado.escape import url_escape, url_unescape, utf8 -from tornado.log import app_log -from tornado.util import basestring_type, import_object, re_unescape, unicode_type - -try: - import typing # noqa -except ImportError: - pass - - -class Router(httputil.HTTPServerConnectionDelegate): - """Abstract router interface.""" - - def find_handler(self, request, **kwargs): - # type: (httputil.HTTPServerRequest, typing.Any)->httputil.HTTPMessageDelegate - """Must be implemented to return an appropriate instance of `~.httputil.HTTPMessageDelegate` - that can serve the request. - Routing implementations may pass additional kwargs to extend the routing logic. - - :arg httputil.HTTPServerRequest request: current HTTP request. - :arg kwargs: additional keyword arguments passed by routing implementation. - :returns: an instance of `~.httputil.HTTPMessageDelegate` that will be used to - process the request. - """ - raise NotImplementedError() - - def start_request(self, server_conn, request_conn): - return _RoutingDelegate(self, server_conn, request_conn) - - -class ReversibleRouter(Router): - """Abstract router interface for routers that can handle named routes - and support reversing them to original urls. - """ - - def reverse_url(self, name, *args): - """Returns url string for a given route name and arguments - or ``None`` if no match is found. - - :arg str name: route name. - :arg args: url parameters. - :returns: parametrized url string for a given route name (or ``None``). - """ - raise NotImplementedError() - - -class _RoutingDelegate(httputil.HTTPMessageDelegate): - def __init__(self, router, server_conn, request_conn): - self.server_conn = server_conn - self.request_conn = request_conn - self.delegate = None - self.router = router # type: Router - - def headers_received(self, start_line, headers): - request = httputil.HTTPServerRequest( - connection=self.request_conn, - server_connection=self.server_conn, - start_line=start_line, headers=headers) - - self.delegate = self.router.find_handler(request) - if self.delegate is None: - app_log.debug("Delegate for %s %s request not found", - start_line.method, start_line.path) - self.delegate = _DefaultMessageDelegate(self.request_conn) - - return self.delegate.headers_received(start_line, headers) - - def data_received(self, chunk): - return self.delegate.data_received(chunk) - - def finish(self): - self.delegate.finish() - - def on_connection_close(self): - self.delegate.on_connection_close() - - -class _DefaultMessageDelegate(httputil.HTTPMessageDelegate): - def __init__(self, connection): - self.connection = connection - - def finish(self): - self.connection.write_headers( - httputil.ResponseStartLine("HTTP/1.1", 404, "Not Found"), httputil.HTTPHeaders()) - self.connection.finish() - - -class RuleRouter(Router): - """Rule-based router implementation.""" - - def __init__(self, rules=None): - """Constructs a router from an ordered list of rules:: - - RuleRouter([ - Rule(PathMatches("/handler"), Target), - # ... more rules - ]) - - You can also omit explicit `Rule` constructor and use tuples of arguments:: - - RuleRouter([ - (PathMatches("/handler"), Target), - ]) - - `PathMatches` is a default matcher, so the example above can be simplified:: - - RuleRouter([ - ("/handler", Target), - ]) - - In the examples above, ``Target`` can be a nested `Router` instance, an instance of - `~.httputil.HTTPServerConnectionDelegate` or an old-style callable, - accepting a request argument. - - :arg rules: a list of `Rule` instances or tuples of `Rule` - constructor arguments. - """ - self.rules = [] # type: typing.List[Rule] - if rules: - self.add_rules(rules) - - def add_rules(self, rules): - """Appends new rules to the router. - - :arg rules: a list of Rule instances (or tuples of arguments, which are - passed to Rule constructor). - """ - for rule in rules: - if isinstance(rule, (tuple, list)): - assert len(rule) in (2, 3, 4) - if isinstance(rule[0], basestring_type): - rule = Rule(PathMatches(rule[0]), *rule[1:]) - else: - rule = Rule(*rule) - - self.rules.append(self.process_rule(rule)) - - def process_rule(self, rule): - """Override this method for additional preprocessing of each rule. - - :arg Rule rule: a rule to be processed. - :returns: the same or modified Rule instance. - """ - return rule - - def find_handler(self, request, **kwargs): - for rule in self.rules: - target_params = rule.matcher.match(request) - if target_params is not None: - if rule.target_kwargs: - target_params['target_kwargs'] = rule.target_kwargs - - delegate = self.get_target_delegate( - rule.target, request, **target_params) - - if delegate is not None: - return delegate - - return None - - def get_target_delegate(self, target, request, **target_params): - """Returns an instance of `~.httputil.HTTPMessageDelegate` for a - Rule's target. This method is called by `~.find_handler` and can be - extended to provide additional target types. - - :arg target: a Rule's target. - :arg httputil.HTTPServerRequest request: current request. - :arg target_params: additional parameters that can be useful - for `~.httputil.HTTPMessageDelegate` creation. - """ - if isinstance(target, Router): - return target.find_handler(request, **target_params) - - elif isinstance(target, httputil.HTTPServerConnectionDelegate): - return target.start_request(request.server_connection, request.connection) - - elif callable(target): - return _CallableAdapter( - partial(target, **target_params), request.connection - ) - - return None - - -class ReversibleRuleRouter(ReversibleRouter, RuleRouter): - """A rule-based router that implements ``reverse_url`` method. - - Each rule added to this router may have a ``name`` attribute that can be - used to reconstruct an original uri. The actual reconstruction takes place - in a rule's matcher (see `Matcher.reverse`). - """ - - def __init__(self, rules=None): - self.named_rules = {} # type: typing.Dict[str] - super(ReversibleRuleRouter, self).__init__(rules) - - def process_rule(self, rule): - rule = super(ReversibleRuleRouter, self).process_rule(rule) - - if rule.name: - if rule.name in self.named_rules: - app_log.warning( - "Multiple handlers named %s; replacing previous value", - rule.name) - self.named_rules[rule.name] = rule - - return rule - - def reverse_url(self, name, *args): - if name in self.named_rules: - return self.named_rules[name].matcher.reverse(*args) - - for rule in self.rules: - if isinstance(rule.target, ReversibleRouter): - reversed_url = rule.target.reverse_url(name, *args) - if reversed_url is not None: - return reversed_url - - return None - - -class Rule(object): - """A routing rule.""" - - def __init__(self, matcher, target, target_kwargs=None, name=None): - """Constructs a Rule instance. - - :arg Matcher matcher: a `Matcher` instance used for determining - whether the rule should be considered a match for a specific - request. - :arg target: a Rule's target (typically a ``RequestHandler`` or - `~.httputil.HTTPServerConnectionDelegate` subclass or even a nested `Router`, - depending on routing implementation). - :arg dict target_kwargs: a dict of parameters that can be useful - at the moment of target instantiation (for example, ``status_code`` - for a ``RequestHandler`` subclass). They end up in - ``target_params['target_kwargs']`` of `RuleRouter.get_target_delegate` - method. - :arg str name: the name of the rule that can be used to find it - in `ReversibleRouter.reverse_url` implementation. - """ - if isinstance(target, str): - # import the Module and instantiate the class - # Must be a fully qualified name (module.ClassName) - target = import_object(target) - - self.matcher = matcher # type: Matcher - self.target = target - self.target_kwargs = target_kwargs if target_kwargs else {} - self.name = name - - def reverse(self, *args): - return self.matcher.reverse(*args) - - def __repr__(self): - return '%s(%r, %s, kwargs=%r, name=%r)' % \ - (self.__class__.__name__, self.matcher, - self.target, self.target_kwargs, self.name) - - -class Matcher(object): - """Represents a matcher for request features.""" - - def match(self, request): - """Matches current instance against the request. - - :arg httputil.HTTPServerRequest request: current HTTP request - :returns: a dict of parameters to be passed to the target handler - (for example, ``handler_kwargs``, ``path_args``, ``path_kwargs`` - can be passed for proper `~.web.RequestHandler` instantiation). - An empty dict is a valid (and common) return value to indicate a match - when the argument-passing features are not used. - ``None`` must be returned to indicate that there is no match.""" - raise NotImplementedError() - - def reverse(self, *args): - """Reconstructs full url from matcher instance and additional arguments.""" - return None - - -class AnyMatches(Matcher): - """Matches any request.""" - - def match(self, request): - return {} - - -class HostMatches(Matcher): - """Matches requests from hosts specified by ``host_pattern`` regex.""" - - def __init__(self, host_pattern): - if isinstance(host_pattern, basestring_type): - if not host_pattern.endswith("$"): - host_pattern += "$" - self.host_pattern = re.compile(host_pattern) - else: - self.host_pattern = host_pattern - - def match(self, request): - if self.host_pattern.match(request.host_name): - return {} - - return None - - -class DefaultHostMatches(Matcher): - """Matches requests from host that is equal to application's default_host. - Always returns no match if ``X-Real-Ip`` header is present. - """ - - def __init__(self, application, host_pattern): - self.application = application - self.host_pattern = host_pattern - - def match(self, request): - # Look for default host if not behind load balancer (for debugging) - if "X-Real-Ip" not in request.headers: - if self.host_pattern.match(self.application.default_host): - return {} - return None - - -class PathMatches(Matcher): - """Matches requests with paths specified by ``path_pattern`` regex.""" - - def __init__(self, path_pattern): - if isinstance(path_pattern, basestring_type): - if not path_pattern.endswith('$'): - path_pattern += '$' - self.regex = re.compile(path_pattern) - else: - self.regex = path_pattern - - assert len(self.regex.groupindex) in (0, self.regex.groups), \ - ("groups in url regexes must either be all named or all " - "positional: %r" % self.regex.pattern) - - self._path, self._group_count = self._find_groups() - - def match(self, request): - match = self.regex.match(request.path) - if match is None: - return None - if not self.regex.groups: - return {} - - path_args, path_kwargs = [], {} - - # Pass matched groups to the handler. Since - # match.groups() includes both named and - # unnamed groups, we want to use either groups - # or groupdict but not both. - if self.regex.groupindex: - path_kwargs = dict( - (str(k), _unquote_or_none(v)) - for (k, v) in match.groupdict().items()) - else: - path_args = [_unquote_or_none(s) for s in match.groups()] - - return dict(path_args=path_args, path_kwargs=path_kwargs) - - def reverse(self, *args): - if self._path is None: - raise ValueError("Cannot reverse url regex " + self.regex.pattern) - assert len(args) == self._group_count, "required number of arguments " \ - "not found" - if not len(args): - return self._path - converted_args = [] - for a in args: - if not isinstance(a, (unicode_type, bytes)): - a = str(a) - converted_args.append(url_escape(utf8(a), plus=False)) - return self._path % tuple(converted_args) - - def _find_groups(self): - """Returns a tuple (reverse string, group count) for a url. - - For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method - would return ('/%s/%s/', 2). - """ - pattern = self.regex.pattern - if pattern.startswith('^'): - pattern = pattern[1:] - if pattern.endswith('$'): - pattern = pattern[:-1] - - if self.regex.groups != pattern.count('('): - # The pattern is too complicated for our simplistic matching, - # so we can't support reversing it. - return None, None - - pieces = [] - for fragment in pattern.split('('): - if ')' in fragment: - paren_loc = fragment.index(')') - if paren_loc >= 0: - pieces.append('%s' + fragment[paren_loc + 1:]) - else: - try: - unescaped_fragment = re_unescape(fragment) - except ValueError: - # If we can't unescape part of it, we can't - # reverse this url. - return (None, None) - pieces.append(unescaped_fragment) - - return ''.join(pieces), self.regex.groups - - -class URLSpec(Rule): - """Specifies mappings between URLs and handlers. - - .. versionchanged: 4.5 - `URLSpec` is now a subclass of a `Rule` with `PathMatches` matcher and is preserved for - backwards compatibility. - """ - def __init__(self, pattern, handler, kwargs=None, name=None): - """Parameters: - - * ``pattern``: Regular expression to be matched. Any capturing - groups in the regex will be passed in to the handler's - get/post/etc methods as arguments (by keyword if named, by - position if unnamed. Named and unnamed capturing groups - may not be mixed in the same rule). - - * ``handler``: `~.web.RequestHandler` subclass to be invoked. - - * ``kwargs`` (optional): A dictionary of additional arguments - to be passed to the handler's constructor. - - * ``name`` (optional): A name for this handler. Used by - `~.web.Application.reverse_url`. - - """ - super(URLSpec, self).__init__(PathMatches(pattern), handler, kwargs, name) - - self.regex = self.matcher.regex - self.handler_class = self.target - self.kwargs = kwargs - - def __repr__(self): - return '%s(%r, %s, kwargs=%r, name=%r)' % \ - (self.__class__.__name__, self.regex.pattern, - self.handler_class, self.kwargs, self.name) - - -def _unquote_or_none(s): - """None-safe wrapper around url_unescape to handle unmatched optional - groups correctly. - - Note that args are passed as bytes so the handler can decide what - encoding to use. - """ - if s is None: - return s - return url_unescape(s, encoding=None, plus=False) +# Copyright 2015 The Tornado Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Flexible routing implementation. + +Tornado routes HTTP requests to appropriate handlers using `Router` +class implementations. The `tornado.web.Application` class is a +`Router` implementation and may be used directly, or the classes in +this module may be used for additional flexibility. The `RuleRouter` +class can match on more criteria than `.Application`, or the `Router` +interface can be subclassed for maximum customization. + +`Router` interface extends `~.httputil.HTTPServerConnectionDelegate` +to provide additional routing capabilities. This also means that any +`Router` implementation can be used directly as a ``request_callback`` +for `~.httpserver.HTTPServer` constructor. + +`Router` subclass must implement a ``find_handler`` method to provide +a suitable `~.httputil.HTTPMessageDelegate` instance to handle the +request: + +.. code-block:: python + + class CustomRouter(Router): + def find_handler(self, request, **kwargs): + # some routing logic providing a suitable HTTPMessageDelegate instance + return MessageDelegate(request.connection) + + class MessageDelegate(HTTPMessageDelegate): + def __init__(self, connection): + self.connection = connection + + def finish(self): + self.connection.write_headers( + ResponseStartLine("HTTP/1.1", 200, "OK"), + HTTPHeaders({"Content-Length": "2"}), + b"OK") + self.connection.finish() + + router = CustomRouter() + server = HTTPServer(router) + +The main responsibility of `Router` implementation is to provide a +mapping from a request to `~.httputil.HTTPMessageDelegate` instance +that will handle this request. In the example above we can see that +routing is possible even without instantiating an `~.web.Application`. + +For routing to `~.web.RequestHandler` implementations we need an +`~.web.Application` instance. `~.web.Application.get_handler_delegate` +provides a convenient way to create `~.httputil.HTTPMessageDelegate` +for a given request and `~.web.RequestHandler`. + +Here is a simple example of how we can we route to +`~.web.RequestHandler` subclasses by HTTP method: + +.. code-block:: python + + resources = {} + + class GetResource(RequestHandler): + def get(self, path): + if path not in resources: + raise HTTPError(404) + + self.finish(resources[path]) + + class PostResource(RequestHandler): + def post(self, path): + resources[path] = self.request.body + + class HTTPMethodRouter(Router): + def __init__(self, app): + self.app = app + + def find_handler(self, request, **kwargs): + handler = GetResource if request.method == "GET" else PostResource + return self.app.get_handler_delegate(request, handler, path_args=[request.path]) + + router = HTTPMethodRouter(Application()) + server = HTTPServer(router) + +`ReversibleRouter` interface adds the ability to distinguish between +the routes and reverse them to the original urls using route's name +and additional arguments. `~.web.Application` is itself an +implementation of `ReversibleRouter` class. + +`RuleRouter` and `ReversibleRuleRouter` are implementations of +`Router` and `ReversibleRouter` interfaces and can be used for +creating rule-based routing configurations. + +Rules are instances of `Rule` class. They contain a `Matcher`, which +provides the logic for determining whether the rule is a match for a +particular request and a target, which can be one of the following. + +1) An instance of `~.httputil.HTTPServerConnectionDelegate`: + +.. code-block:: python + + router = RuleRouter([ + Rule(PathMatches("/handler"), ConnectionDelegate()), + # ... more rules + ]) + + class ConnectionDelegate(HTTPServerConnectionDelegate): + def start_request(self, server_conn, request_conn): + return MessageDelegate(request_conn) + +2) A callable accepting a single argument of `~.httputil.HTTPServerRequest` type: + +.. code-block:: python + + router = RuleRouter([ + Rule(PathMatches("/callable"), request_callable) + ]) + + def request_callable(request): + request.write(b"HTTP/1.1 200 OK\\r\\nContent-Length: 2\\r\\n\\r\\nOK") + request.finish() + +3) Another `Router` instance: + +.. code-block:: python + + router = RuleRouter([ + Rule(PathMatches("/router.*"), CustomRouter()) + ]) + +Of course a nested `RuleRouter` or a `~.web.Application` is allowed: + +.. code-block:: python + + router = RuleRouter([ + Rule(HostMatches("example.com"), RuleRouter([ + Rule(PathMatches("/app1/.*"), Application([(r"/app1/handler", Handler)]))), + ])) + ]) + + server = HTTPServer(router) + +In the example below `RuleRouter` is used to route between applications: + +.. code-block:: python + + app1 = Application([ + (r"/app1/handler", Handler1), + # other handlers ... + ]) + + app2 = Application([ + (r"/app2/handler", Handler2), + # other handlers ... + ]) + + router = RuleRouter([ + Rule(PathMatches("/app1.*"), app1), + Rule(PathMatches("/app2.*"), app2) + ]) + + server = HTTPServer(router) + +For more information on application-level routing see docs for `~.web.Application`. + +.. versionadded:: 4.5 + +""" + +import re +from functools import partial + +from tornado import httputil +from tornado.httpserver import _CallableAdapter +from tornado.escape import url_escape, url_unescape, utf8 +from tornado.log import app_log +from tornado.util import basestring_type, import_object, re_unescape, unicode_type + +from typing import Any, Union, Optional, Awaitable, List, Dict, Pattern, Tuple, overload + + +class Router(httputil.HTTPServerConnectionDelegate): + """Abstract router interface.""" + + def find_handler( + self, request: httputil.HTTPServerRequest, **kwargs: Any + ) -> Optional[httputil.HTTPMessageDelegate]: + """Must be implemented to return an appropriate instance of `~.httputil.HTTPMessageDelegate` + that can serve the request. + Routing implementations may pass additional kwargs to extend the routing logic. + + :arg httputil.HTTPServerRequest request: current HTTP request. + :arg kwargs: additional keyword arguments passed by routing implementation. + :returns: an instance of `~.httputil.HTTPMessageDelegate` that will be used to + process the request. + """ + raise NotImplementedError() + + def start_request( + self, server_conn: object, request_conn: httputil.HTTPConnection + ) -> httputil.HTTPMessageDelegate: + return _RoutingDelegate(self, server_conn, request_conn) + + +class ReversibleRouter(Router): + """Abstract router interface for routers that can handle named routes + and support reversing them to original urls. + """ + + def reverse_url(self, name: str, *args: Any) -> Optional[str]: + """Returns url string for a given route name and arguments + or ``None`` if no match is found. + + :arg str name: route name. + :arg args: url parameters. + :returns: parametrized url string for a given route name (or ``None``). + """ + raise NotImplementedError() + + +class _RoutingDelegate(httputil.HTTPMessageDelegate): + def __init__( + self, router: Router, server_conn: object, request_conn: httputil.HTTPConnection + ) -> None: + self.server_conn = server_conn + self.request_conn = request_conn + self.delegate = None # type: Optional[httputil.HTTPMessageDelegate] + self.router = router # type: Router + + def headers_received( + self, + start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine], + headers: httputil.HTTPHeaders, + ) -> Optional[Awaitable[None]]: + assert isinstance(start_line, httputil.RequestStartLine) + request = httputil.HTTPServerRequest( + connection=self.request_conn, + server_connection=self.server_conn, + start_line=start_line, + headers=headers, + ) + + self.delegate = self.router.find_handler(request) + if self.delegate is None: + app_log.debug( + "Delegate for %s %s request not found", + start_line.method, + start_line.path, + ) + self.delegate = _DefaultMessageDelegate(self.request_conn) + + return self.delegate.headers_received(start_line, headers) + + def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]: + assert self.delegate is not None + return self.delegate.data_received(chunk) + + def finish(self) -> None: + assert self.delegate is not None + self.delegate.finish() + + def on_connection_close(self) -> None: + assert self.delegate is not None + self.delegate.on_connection_close() + + +class _DefaultMessageDelegate(httputil.HTTPMessageDelegate): + def __init__(self, connection: httputil.HTTPConnection) -> None: + self.connection = connection + + def finish(self) -> None: + self.connection.write_headers( + httputil.ResponseStartLine("HTTP/1.1", 404, "Not Found"), + httputil.HTTPHeaders(), + ) + self.connection.finish() + + +# _RuleList can either contain pre-constructed Rules or a sequence of +# arguments to be passed to the Rule constructor. +_RuleList = List[ + Union[ + "Rule", + List[Any], # Can't do detailed typechecking of lists. + Tuple[Union[str, "Matcher"], Any], + Tuple[Union[str, "Matcher"], Any, Dict[str, Any]], + Tuple[Union[str, "Matcher"], Any, Dict[str, Any], str], + ] +] + + +class RuleRouter(Router): + """Rule-based router implementation.""" + + def __init__(self, rules: _RuleList = None) -> None: + """Constructs a router from an ordered list of rules:: + + RuleRouter([ + Rule(PathMatches("/handler"), Target), + # ... more rules + ]) + + You can also omit explicit `Rule` constructor and use tuples of arguments:: + + RuleRouter([ + (PathMatches("/handler"), Target), + ]) + + `PathMatches` is a default matcher, so the example above can be simplified:: + + RuleRouter([ + ("/handler", Target), + ]) + + In the examples above, ``Target`` can be a nested `Router` instance, an instance of + `~.httputil.HTTPServerConnectionDelegate` or an old-style callable, + accepting a request argument. + + :arg rules: a list of `Rule` instances or tuples of `Rule` + constructor arguments. + """ + self.rules = [] # type: List[Rule] + if rules: + self.add_rules(rules) + + def add_rules(self, rules: _RuleList) -> None: + """Appends new rules to the router. + + :arg rules: a list of Rule instances (or tuples of arguments, which are + passed to Rule constructor). + """ + for rule in rules: + if isinstance(rule, (tuple, list)): + assert len(rule) in (2, 3, 4) + if isinstance(rule[0], basestring_type): + rule = Rule(PathMatches(rule[0]), *rule[1:]) + else: + rule = Rule(*rule) + + self.rules.append(self.process_rule(rule)) + + def process_rule(self, rule: "Rule") -> "Rule": + """Override this method for additional preprocessing of each rule. + + :arg Rule rule: a rule to be processed. + :returns: the same or modified Rule instance. + """ + return rule + + def find_handler( + self, request: httputil.HTTPServerRequest, **kwargs: Any + ) -> Optional[httputil.HTTPMessageDelegate]: + for rule in self.rules: + target_params = rule.matcher.match(request) + if target_params is not None: + if rule.target_kwargs: + target_params["target_kwargs"] = rule.target_kwargs + + delegate = self.get_target_delegate( + rule.target, request, **target_params + ) + + if delegate is not None: + return delegate + + return None + + def get_target_delegate( + self, target: Any, request: httputil.HTTPServerRequest, **target_params: Any + ) -> Optional[httputil.HTTPMessageDelegate]: + """Returns an instance of `~.httputil.HTTPMessageDelegate` for a + Rule's target. This method is called by `~.find_handler` and can be + extended to provide additional target types. + + :arg target: a Rule's target. + :arg httputil.HTTPServerRequest request: current request. + :arg target_params: additional parameters that can be useful + for `~.httputil.HTTPMessageDelegate` creation. + """ + if isinstance(target, Router): + return target.find_handler(request, **target_params) + + elif isinstance(target, httputil.HTTPServerConnectionDelegate): + assert request.connection is not None + return target.start_request(request.server_connection, request.connection) + + elif callable(target): + assert request.connection is not None + return _CallableAdapter( + partial(target, **target_params), request.connection + ) + + return None + + +class ReversibleRuleRouter(ReversibleRouter, RuleRouter): + """A rule-based router that implements ``reverse_url`` method. + + Each rule added to this router may have a ``name`` attribute that can be + used to reconstruct an original uri. The actual reconstruction takes place + in a rule's matcher (see `Matcher.reverse`). + """ + + def __init__(self, rules: _RuleList = None) -> None: + self.named_rules = {} # type: Dict[str, Any] + super(ReversibleRuleRouter, self).__init__(rules) + + def process_rule(self, rule: "Rule") -> "Rule": + rule = super(ReversibleRuleRouter, self).process_rule(rule) + + if rule.name: + if rule.name in self.named_rules: + app_log.warning( + "Multiple handlers named %s; replacing previous value", rule.name + ) + self.named_rules[rule.name] = rule + + return rule + + def reverse_url(self, name: str, *args: Any) -> Optional[str]: + if name in self.named_rules: + return self.named_rules[name].matcher.reverse(*args) + + for rule in self.rules: + if isinstance(rule.target, ReversibleRouter): + reversed_url = rule.target.reverse_url(name, *args) + if reversed_url is not None: + return reversed_url + + return None + + +class Rule(object): + """A routing rule.""" + + def __init__( + self, + matcher: "Matcher", + target: Any, + target_kwargs: Dict[str, Any] = None, + name: str = None, + ) -> None: + """Constructs a Rule instance. + + :arg Matcher matcher: a `Matcher` instance used for determining + whether the rule should be considered a match for a specific + request. + :arg target: a Rule's target (typically a ``RequestHandler`` or + `~.httputil.HTTPServerConnectionDelegate` subclass or even a nested `Router`, + depending on routing implementation). + :arg dict target_kwargs: a dict of parameters that can be useful + at the moment of target instantiation (for example, ``status_code`` + for a ``RequestHandler`` subclass). They end up in + ``target_params['target_kwargs']`` of `RuleRouter.get_target_delegate` + method. + :arg str name: the name of the rule that can be used to find it + in `ReversibleRouter.reverse_url` implementation. + """ + if isinstance(target, str): + # import the Module and instantiate the class + # Must be a fully qualified name (module.ClassName) + target = import_object(target) + + self.matcher = matcher # type: Matcher + self.target = target + self.target_kwargs = target_kwargs if target_kwargs else {} + self.name = name + + def reverse(self, *args: Any) -> Optional[str]: + return self.matcher.reverse(*args) + + def __repr__(self) -> str: + return "%s(%r, %s, kwargs=%r, name=%r)" % ( + self.__class__.__name__, + self.matcher, + self.target, + self.target_kwargs, + self.name, + ) + + +class Matcher(object): + """Represents a matcher for request features.""" + + def match(self, request: httputil.HTTPServerRequest) -> Optional[Dict[str, Any]]: + """Matches current instance against the request. + + :arg httputil.HTTPServerRequest request: current HTTP request + :returns: a dict of parameters to be passed to the target handler + (for example, ``handler_kwargs``, ``path_args``, ``path_kwargs`` + can be passed for proper `~.web.RequestHandler` instantiation). + An empty dict is a valid (and common) return value to indicate a match + when the argument-passing features are not used. + ``None`` must be returned to indicate that there is no match.""" + raise NotImplementedError() + + def reverse(self, *args: Any) -> Optional[str]: + """Reconstructs full url from matcher instance and additional arguments.""" + return None + + +class AnyMatches(Matcher): + """Matches any request.""" + + def match(self, request: httputil.HTTPServerRequest) -> Optional[Dict[str, Any]]: + return {} + + +class HostMatches(Matcher): + """Matches requests from hosts specified by ``host_pattern`` regex.""" + + def __init__(self, host_pattern: Union[str, Pattern]) -> None: + if isinstance(host_pattern, basestring_type): + if not host_pattern.endswith("$"): + host_pattern += "$" + self.host_pattern = re.compile(host_pattern) + else: + self.host_pattern = host_pattern + + def match(self, request: httputil.HTTPServerRequest) -> Optional[Dict[str, Any]]: + if self.host_pattern.match(request.host_name): + return {} + + return None + + +class DefaultHostMatches(Matcher): + """Matches requests from host that is equal to application's default_host. + Always returns no match if ``X-Real-Ip`` header is present. + """ + + def __init__(self, application: Any, host_pattern: Pattern) -> None: + self.application = application + self.host_pattern = host_pattern + + def match(self, request: httputil.HTTPServerRequest) -> Optional[Dict[str, Any]]: + # Look for default host if not behind load balancer (for debugging) + if "X-Real-Ip" not in request.headers: + if self.host_pattern.match(self.application.default_host): + return {} + return None + + +class PathMatches(Matcher): + """Matches requests with paths specified by ``path_pattern`` regex.""" + + def __init__(self, path_pattern: Union[str, Pattern]) -> None: + if isinstance(path_pattern, basestring_type): + if not path_pattern.endswith("$"): + path_pattern += "$" + self.regex = re.compile(path_pattern) + else: + self.regex = path_pattern + + assert len(self.regex.groupindex) in (0, self.regex.groups), ( + "groups in url regexes must either be all named or all " + "positional: %r" % self.regex.pattern + ) + + self._path, self._group_count = self._find_groups() + + def match(self, request: httputil.HTTPServerRequest) -> Optional[Dict[str, Any]]: + match = self.regex.match(request.path) + if match is None: + return None + if not self.regex.groups: + return {} + + path_args = [] # type: List[bytes] + path_kwargs = {} # type: Dict[str, bytes] + + # Pass matched groups to the handler. Since + # match.groups() includes both named and + # unnamed groups, we want to use either groups + # or groupdict but not both. + if self.regex.groupindex: + path_kwargs = dict( + (str(k), _unquote_or_none(v)) for (k, v) in match.groupdict().items() + ) + else: + path_args = [_unquote_or_none(s) for s in match.groups()] + + return dict(path_args=path_args, path_kwargs=path_kwargs) + + def reverse(self, *args: Any) -> Optional[str]: + if self._path is None: + raise ValueError("Cannot reverse url regex " + self.regex.pattern) + assert len(args) == self._group_count, ( + "required number of arguments " "not found" + ) + if not len(args): + return self._path + converted_args = [] + for a in args: + if not isinstance(a, (unicode_type, bytes)): + a = str(a) + converted_args.append(url_escape(utf8(a), plus=False)) + return self._path % tuple(converted_args) + + def _find_groups(self) -> Tuple[Optional[str], Optional[int]]: + """Returns a tuple (reverse string, group count) for a url. + + For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method + would return ('/%s/%s/', 2). + """ + pattern = self.regex.pattern + if pattern.startswith("^"): + pattern = pattern[1:] + if pattern.endswith("$"): + pattern = pattern[:-1] + + if self.regex.groups != pattern.count("("): + # The pattern is too complicated for our simplistic matching, + # so we can't support reversing it. + return None, None + + pieces = [] + for fragment in pattern.split("("): + if ")" in fragment: + paren_loc = fragment.index(")") + if paren_loc >= 0: + pieces.append("%s" + fragment[paren_loc + 1 :]) + else: + try: + unescaped_fragment = re_unescape(fragment) + except ValueError: + # If we can't unescape part of it, we can't + # reverse this url. + return (None, None) + pieces.append(unescaped_fragment) + + return "".join(pieces), self.regex.groups + + +class URLSpec(Rule): + """Specifies mappings between URLs and handlers. + + .. versionchanged: 4.5 + `URLSpec` is now a subclass of a `Rule` with `PathMatches` matcher and is preserved for + backwards compatibility. + """ + + def __init__( + self, + pattern: Union[str, Pattern], + handler: Any, + kwargs: Dict[str, Any] = None, + name: str = None, + ) -> None: + """Parameters: + + * ``pattern``: Regular expression to be matched. Any capturing + groups in the regex will be passed in to the handler's + get/post/etc methods as arguments (by keyword if named, by + position if unnamed. Named and unnamed capturing groups + may not be mixed in the same rule). + + * ``handler``: `~.web.RequestHandler` subclass to be invoked. + + * ``kwargs`` (optional): A dictionary of additional arguments + to be passed to the handler's constructor. + + * ``name`` (optional): A name for this handler. Used by + `~.web.Application.reverse_url`. + + """ + matcher = PathMatches(pattern) + super(URLSpec, self).__init__(matcher, handler, kwargs, name) + + self.regex = matcher.regex + self.handler_class = self.target + self.kwargs = kwargs + + def __repr__(self) -> str: + return "%s(%r, %s, kwargs=%r, name=%r)" % ( + self.__class__.__name__, + self.regex.pattern, + self.handler_class, + self.kwargs, + self.name, + ) + + +@overload +def _unquote_or_none(s: str) -> bytes: + pass + + +@overload # noqa: F811 +def _unquote_or_none(s: None) -> None: + pass + + +def _unquote_or_none(s: Optional[str]) -> Optional[bytes]: # noqa: F811 + """None-safe wrapper around url_unescape to handle unmatched optional + groups correctly. + + Note that args are passed as bytes so the handler can decide what + encoding to use. + """ + if s is None: + return s + return url_unescape(s, encoding=None, plus=False) diff --git a/server/www/packages/packages-linux/x64/tornado/simple_httpclient.py b/server/www/packages/packages-linux/x64/tornado/simple_httpclient.py index 60b7956..8b2ffe0 100644 --- a/server/www/packages/packages-linux/x64/tornado/simple_httpclient.py +++ b/server/www/packages/packages-linux/x64/tornado/simple_httpclient.py @@ -1,566 +1,692 @@ -from __future__ import absolute_import, division, print_function - -from tornado.escape import _unicode -from tornado import gen -from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main, _RequestProxy -from tornado import httputil -from tornado.http1connection import HTTP1Connection, HTTP1ConnectionParameters -from tornado.ioloop import IOLoop -from tornado.iostream import StreamClosedError -from tornado.netutil import Resolver, OverrideResolver, _client_ssl_defaults -from tornado.log import gen_log -from tornado import stack_context -from tornado.tcpclient import TCPClient -from tornado.util import PY3 - -import base64 -import collections -import copy -import functools -import re -import socket -import sys -import time -from io import BytesIO - - -if PY3: - import urllib.parse as urlparse -else: - import urlparse - -try: - import ssl -except ImportError: - # ssl is not available on Google App Engine. - ssl = None - - -class HTTPTimeoutError(HTTPError): - """Error raised by SimpleAsyncHTTPClient on timeout. - - For historical reasons, this is a subclass of `.HTTPClientError` - which simulates a response code of 599. - - .. versionadded:: 5.1 - """ - def __init__(self, message): - super(HTTPTimeoutError, self).__init__(599, message=message) - - def __str__(self): - return self.message - - -class HTTPStreamClosedError(HTTPError): - """Error raised by SimpleAsyncHTTPClient when the underlying stream is closed. - - When a more specific exception is available (such as `ConnectionResetError`), - it may be raised instead of this one. - - For historical reasons, this is a subclass of `.HTTPClientError` - which simulates a response code of 599. - - .. versionadded:: 5.1 - """ - def __init__(self, message): - super(HTTPStreamClosedError, self).__init__(599, message=message) - - def __str__(self): - return self.message - - -class SimpleAsyncHTTPClient(AsyncHTTPClient): - """Non-blocking HTTP client with no external dependencies. - - This class implements an HTTP 1.1 client on top of Tornado's IOStreams. - Some features found in the curl-based AsyncHTTPClient are not yet - supported. In particular, proxies are not supported, connections - are not reused, and callers cannot select the network interface to be - used. - """ - def initialize(self, max_clients=10, - hostname_mapping=None, max_buffer_size=104857600, - resolver=None, defaults=None, max_header_size=None, - max_body_size=None): - """Creates a AsyncHTTPClient. - - Only a single AsyncHTTPClient instance exists per IOLoop - in order to provide limitations on the number of pending connections. - ``force_instance=True`` may be used to suppress this behavior. - - Note that because of this implicit reuse, unless ``force_instance`` - is used, only the first call to the constructor actually uses - its arguments. It is recommended to use the ``configure`` method - instead of the constructor to ensure that arguments take effect. - - ``max_clients`` is the number of concurrent requests that can be - in progress; when this limit is reached additional requests will be - queued. Note that time spent waiting in this queue still counts - against the ``request_timeout``. - - ``hostname_mapping`` is a dictionary mapping hostnames to IP addresses. - It can be used to make local DNS changes when modifying system-wide - settings like ``/etc/hosts`` is not possible or desirable (e.g. in - unittests). - - ``max_buffer_size`` (default 100MB) is the number of bytes - that can be read into memory at once. ``max_body_size`` - (defaults to ``max_buffer_size``) is the largest response body - that the client will accept. Without a - ``streaming_callback``, the smaller of these two limits - applies; with a ``streaming_callback`` only ``max_body_size`` - does. - - .. versionchanged:: 4.2 - Added the ``max_body_size`` argument. - """ - super(SimpleAsyncHTTPClient, self).initialize(defaults=defaults) - self.max_clients = max_clients - self.queue = collections.deque() - self.active = {} - self.waiting = {} - self.max_buffer_size = max_buffer_size - self.max_header_size = max_header_size - self.max_body_size = max_body_size - # TCPClient could create a Resolver for us, but we have to do it - # ourselves to support hostname_mapping. - if resolver: - self.resolver = resolver - self.own_resolver = False - else: - self.resolver = Resolver() - self.own_resolver = True - if hostname_mapping is not None: - self.resolver = OverrideResolver(resolver=self.resolver, - mapping=hostname_mapping) - self.tcp_client = TCPClient(resolver=self.resolver) - - def close(self): - super(SimpleAsyncHTTPClient, self).close() - if self.own_resolver: - self.resolver.close() - self.tcp_client.close() - - def fetch_impl(self, request, callback): - key = object() - self.queue.append((key, request, callback)) - if not len(self.active) < self.max_clients: - timeout_handle = self.io_loop.add_timeout( - self.io_loop.time() + min(request.connect_timeout, - request.request_timeout), - functools.partial(self._on_timeout, key, "in request queue")) - else: - timeout_handle = None - self.waiting[key] = (request, callback, timeout_handle) - self._process_queue() - if self.queue: - gen_log.debug("max_clients limit reached, request queued. " - "%d active, %d queued requests." % ( - len(self.active), len(self.queue))) - - def _process_queue(self): - with stack_context.NullContext(): - while self.queue and len(self.active) < self.max_clients: - key, request, callback = self.queue.popleft() - if key not in self.waiting: - continue - self._remove_timeout(key) - self.active[key] = (request, callback) - release_callback = functools.partial(self._release_fetch, key) - self._handle_request(request, release_callback, callback) - - def _connection_class(self): - return _HTTPConnection - - def _handle_request(self, request, release_callback, final_callback): - self._connection_class()( - self, request, release_callback, - final_callback, self.max_buffer_size, self.tcp_client, - self.max_header_size, self.max_body_size) - - def _release_fetch(self, key): - del self.active[key] - self._process_queue() - - def _remove_timeout(self, key): - if key in self.waiting: - request, callback, timeout_handle = self.waiting[key] - if timeout_handle is not None: - self.io_loop.remove_timeout(timeout_handle) - del self.waiting[key] - - def _on_timeout(self, key, info=None): - """Timeout callback of request. - - Construct a timeout HTTPResponse when a timeout occurs. - - :arg object key: A simple object to mark the request. - :info string key: More detailed timeout information. - """ - request, callback, timeout_handle = self.waiting[key] - self.queue.remove((key, request, callback)) - - error_message = "Timeout {0}".format(info) if info else "Timeout" - timeout_response = HTTPResponse( - request, 599, error=HTTPTimeoutError(error_message), - request_time=self.io_loop.time() - request.start_time) - self.io_loop.add_callback(callback, timeout_response) - del self.waiting[key] - - -class _HTTPConnection(httputil.HTTPMessageDelegate): - _SUPPORTED_METHODS = set(["GET", "HEAD", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"]) - - def __init__(self, client, request, release_callback, - final_callback, max_buffer_size, tcp_client, - max_header_size, max_body_size): - self.io_loop = IOLoop.current() - self.start_time = self.io_loop.time() - self.start_wall_time = time.time() - self.client = client - self.request = request - self.release_callback = release_callback - self.final_callback = final_callback - self.max_buffer_size = max_buffer_size - self.tcp_client = tcp_client - self.max_header_size = max_header_size - self.max_body_size = max_body_size - self.code = None - self.headers = None - self.chunks = [] - self._decompressor = None - # Timeout handle returned by IOLoop.add_timeout - self._timeout = None - self._sockaddr = None - IOLoop.current().add_callback(self.run) - - @gen.coroutine - def run(self): - try: - self.parsed = urlparse.urlsplit(_unicode(self.request.url)) - if self.parsed.scheme not in ("http", "https"): - raise ValueError("Unsupported url scheme: %s" % - self.request.url) - # urlsplit results have hostname and port results, but they - # didn't support ipv6 literals until python 2.7. - netloc = self.parsed.netloc - if "@" in netloc: - userpass, _, netloc = netloc.rpartition("@") - host, port = httputil.split_host_and_port(netloc) - if port is None: - port = 443 if self.parsed.scheme == "https" else 80 - if re.match(r'^\[.*\]$', host): - # raw ipv6 addresses in urls are enclosed in brackets - host = host[1:-1] - self.parsed_hostname = host # save final host for _on_connect - - if self.request.allow_ipv6 is False: - af = socket.AF_INET - else: - af = socket.AF_UNSPEC - - ssl_options = self._get_ssl_options(self.parsed.scheme) - - timeout = min(self.request.connect_timeout, self.request.request_timeout) - if timeout: - self._timeout = self.io_loop.add_timeout( - self.start_time + timeout, - stack_context.wrap(functools.partial(self._on_timeout, "while connecting"))) - stream = yield self.tcp_client.connect( - host, port, af=af, - ssl_options=ssl_options, - max_buffer_size=self.max_buffer_size) - - if self.final_callback is None: - # final_callback is cleared if we've hit our timeout. - stream.close() - return - self.stream = stream - self.stream.set_close_callback(self.on_connection_close) - self._remove_timeout() - if self.final_callback is None: - return - if self.request.request_timeout: - self._timeout = self.io_loop.add_timeout( - self.start_time + self.request.request_timeout, - stack_context.wrap(functools.partial(self._on_timeout, "during request"))) - if (self.request.method not in self._SUPPORTED_METHODS and - not self.request.allow_nonstandard_methods): - raise KeyError("unknown method %s" % self.request.method) - for key in ('network_interface', - 'proxy_host', 'proxy_port', - 'proxy_username', 'proxy_password', - 'proxy_auth_mode'): - if getattr(self.request, key, None): - raise NotImplementedError('%s not supported' % key) - if "Connection" not in self.request.headers: - self.request.headers["Connection"] = "close" - if "Host" not in self.request.headers: - if '@' in self.parsed.netloc: - self.request.headers["Host"] = self.parsed.netloc.rpartition('@')[-1] - else: - self.request.headers["Host"] = self.parsed.netloc - username, password = None, None - if self.parsed.username is not None: - username, password = self.parsed.username, self.parsed.password - elif self.request.auth_username is not None: - username = self.request.auth_username - password = self.request.auth_password or '' - if username is not None: - if self.request.auth_mode not in (None, "basic"): - raise ValueError("unsupported auth_mode %s", - self.request.auth_mode) - self.request.headers["Authorization"] = ( - b"Basic " + base64.b64encode( - httputil.encode_username_password(username, password))) - if self.request.user_agent: - self.request.headers["User-Agent"] = self.request.user_agent - if not self.request.allow_nonstandard_methods: - # Some HTTP methods nearly always have bodies while others - # almost never do. Fail in this case unless the user has - # opted out of sanity checks with allow_nonstandard_methods. - body_expected = self.request.method in ("POST", "PATCH", "PUT") - body_present = (self.request.body is not None or - self.request.body_producer is not None) - if ((body_expected and not body_present) or - (body_present and not body_expected)): - raise ValueError( - 'Body must %sbe None for method %s (unless ' - 'allow_nonstandard_methods is true)' % - ('not ' if body_expected else '', self.request.method)) - if self.request.expect_100_continue: - self.request.headers["Expect"] = "100-continue" - if self.request.body is not None: - # When body_producer is used the caller is responsible for - # setting Content-Length (or else chunked encoding will be used). - self.request.headers["Content-Length"] = str(len( - self.request.body)) - if (self.request.method == "POST" and - "Content-Type" not in self.request.headers): - self.request.headers["Content-Type"] = "application/x-www-form-urlencoded" - if self.request.decompress_response: - self.request.headers["Accept-Encoding"] = "gzip" - req_path = ((self.parsed.path or '/') + - (('?' + self.parsed.query) if self.parsed.query else '')) - self.connection = self._create_connection(stream) - start_line = httputil.RequestStartLine(self.request.method, - req_path, '') - self.connection.write_headers(start_line, self.request.headers) - if self.request.expect_100_continue: - yield self.connection.read_response(self) - else: - yield self._write_body(True) - except Exception: - if not self._handle_exception(*sys.exc_info()): - raise - - def _get_ssl_options(self, scheme): - if scheme == "https": - if self.request.ssl_options is not None: - return self.request.ssl_options - # If we are using the defaults, don't construct a - # new SSLContext. - if (self.request.validate_cert and - self.request.ca_certs is None and - self.request.client_cert is None and - self.request.client_key is None): - return _client_ssl_defaults - ssl_ctx = ssl.create_default_context( - ssl.Purpose.SERVER_AUTH, - cafile=self.request.ca_certs) - if not self.request.validate_cert: - ssl_ctx.check_hostname = False - ssl_ctx.verify_mode = ssl.CERT_NONE - if self.request.client_cert is not None: - ssl_ctx.load_cert_chain(self.request.client_cert, - self.request.client_key) - if hasattr(ssl, 'OP_NO_COMPRESSION'): - # See netutil.ssl_options_to_context - ssl_ctx.options |= ssl.OP_NO_COMPRESSION - return ssl_ctx - return None - - def _on_timeout(self, info=None): - """Timeout callback of _HTTPConnection instance. - - Raise a `HTTPTimeoutError` when a timeout occurs. - - :info string key: More detailed timeout information. - """ - self._timeout = None - error_message = "Timeout {0}".format(info) if info else "Timeout" - if self.final_callback is not None: - self._handle_exception(HTTPTimeoutError, HTTPTimeoutError(error_message), - None) - - def _remove_timeout(self): - if self._timeout is not None: - self.io_loop.remove_timeout(self._timeout) - self._timeout = None - - def _create_connection(self, stream): - stream.set_nodelay(True) - connection = HTTP1Connection( - stream, True, - HTTP1ConnectionParameters( - no_keep_alive=True, - max_header_size=self.max_header_size, - max_body_size=self.max_body_size, - decompress=self.request.decompress_response), - self._sockaddr) - return connection - - @gen.coroutine - def _write_body(self, start_read): - if self.request.body is not None: - self.connection.write(self.request.body) - elif self.request.body_producer is not None: - fut = self.request.body_producer(self.connection.write) - if fut is not None: - yield fut - self.connection.finish() - if start_read: - try: - yield self.connection.read_response(self) - except StreamClosedError: - if not self._handle_exception(*sys.exc_info()): - raise - - def _release(self): - if self.release_callback is not None: - release_callback = self.release_callback - self.release_callback = None - release_callback() - - def _run_callback(self, response): - self._release() - if self.final_callback is not None: - final_callback = self.final_callback - self.final_callback = None - self.io_loop.add_callback(final_callback, response) - - def _handle_exception(self, typ, value, tb): - if self.final_callback: - self._remove_timeout() - if isinstance(value, StreamClosedError): - if value.real_error is None: - value = HTTPStreamClosedError("Stream closed") - else: - value = value.real_error - self._run_callback(HTTPResponse(self.request, 599, error=value, - request_time=self.io_loop.time() - self.start_time, - start_time=self.start_wall_time, - )) - - if hasattr(self, "stream"): - # TODO: this may cause a StreamClosedError to be raised - # by the connection's Future. Should we cancel the - # connection more gracefully? - self.stream.close() - return True - else: - # If our callback has already been called, we are probably - # catching an exception that is not caused by us but rather - # some child of our callback. Rather than drop it on the floor, - # pass it along, unless it's just the stream being closed. - return isinstance(value, StreamClosedError) - - def on_connection_close(self): - if self.final_callback is not None: - message = "Connection closed" - if self.stream.error: - raise self.stream.error - try: - raise HTTPStreamClosedError(message) - except HTTPStreamClosedError: - self._handle_exception(*sys.exc_info()) - - def headers_received(self, first_line, headers): - if self.request.expect_100_continue and first_line.code == 100: - self._write_body(False) - return - self.code = first_line.code - self.reason = first_line.reason - self.headers = headers - - if self._should_follow_redirect(): - return - - if self.request.header_callback is not None: - # Reassemble the start line. - self.request.header_callback('%s %s %s\r\n' % first_line) - for k, v in self.headers.get_all(): - self.request.header_callback("%s: %s\r\n" % (k, v)) - self.request.header_callback('\r\n') - - def _should_follow_redirect(self): - return (self.request.follow_redirects and - self.request.max_redirects > 0 and - self.code in (301, 302, 303, 307, 308)) - - def finish(self): - data = b''.join(self.chunks) - self._remove_timeout() - original_request = getattr(self.request, "original_request", - self.request) - if self._should_follow_redirect(): - assert isinstance(self.request, _RequestProxy) - new_request = copy.copy(self.request.request) - new_request.url = urlparse.urljoin(self.request.url, - self.headers["Location"]) - new_request.max_redirects = self.request.max_redirects - 1 - del new_request.headers["Host"] - # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4 - # Client SHOULD make a GET request after a 303. - # According to the spec, 302 should be followed by the same - # method as the original request, but in practice browsers - # treat 302 the same as 303, and many servers use 302 for - # compatibility with pre-HTTP/1.1 user agents which don't - # understand the 303 status. - if self.code in (302, 303): - new_request.method = "GET" - new_request.body = None - for h in ["Content-Length", "Content-Type", - "Content-Encoding", "Transfer-Encoding"]: - try: - del self.request.headers[h] - except KeyError: - pass - new_request.original_request = original_request - final_callback = self.final_callback - self.final_callback = None - self._release() - fut = self.client.fetch(new_request, raise_error=False) - fut.add_done_callback(lambda f: final_callback(f.result())) - self._on_end_request() - return - if self.request.streaming_callback: - buffer = BytesIO() - else: - buffer = BytesIO(data) # TODO: don't require one big string? - response = HTTPResponse(original_request, - self.code, reason=getattr(self, 'reason', None), - headers=self.headers, - request_time=self.io_loop.time() - self.start_time, - start_time=self.start_wall_time, - buffer=buffer, - effective_url=self.request.url) - self._run_callback(response) - self._on_end_request() - - def _on_end_request(self): - self.stream.close() - - def data_received(self, chunk): - if self._should_follow_redirect(): - # We're going to follow a redirect so just discard the body. - return - if self.request.streaming_callback is not None: - self.request.streaming_callback(chunk) - else: - self.chunks.append(chunk) - - -if __name__ == "__main__": - AsyncHTTPClient.configure(SimpleAsyncHTTPClient) - main() +from tornado.escape import _unicode +from tornado import gen +from tornado.httpclient import ( + HTTPResponse, + HTTPError, + AsyncHTTPClient, + main, + _RequestProxy, + HTTPRequest, +) +from tornado import httputil +from tornado.http1connection import HTTP1Connection, HTTP1ConnectionParameters +from tornado.ioloop import IOLoop +from tornado.iostream import StreamClosedError, IOStream +from tornado.netutil import ( + Resolver, + OverrideResolver, + _client_ssl_defaults, + is_valid_ip, +) +from tornado.log import gen_log +from tornado.tcpclient import TCPClient + +import base64 +import collections +import copy +import functools +import re +import socket +import ssl +import sys +import time +from io import BytesIO +import urllib.parse + +from typing import Dict, Any, Callable, Optional, Type, Union +from types import TracebackType +import typing + +if typing.TYPE_CHECKING: + from typing import Deque, Tuple, List # noqa: F401 + + +class HTTPTimeoutError(HTTPError): + """Error raised by SimpleAsyncHTTPClient on timeout. + + For historical reasons, this is a subclass of `.HTTPClientError` + which simulates a response code of 599. + + .. versionadded:: 5.1 + """ + + def __init__(self, message: str) -> None: + super(HTTPTimeoutError, self).__init__(599, message=message) + + def __str__(self) -> str: + return self.message or "Timeout" + + +class HTTPStreamClosedError(HTTPError): + """Error raised by SimpleAsyncHTTPClient when the underlying stream is closed. + + When a more specific exception is available (such as `ConnectionResetError`), + it may be raised instead of this one. + + For historical reasons, this is a subclass of `.HTTPClientError` + which simulates a response code of 599. + + .. versionadded:: 5.1 + """ + + def __init__(self, message: str) -> None: + super(HTTPStreamClosedError, self).__init__(599, message=message) + + def __str__(self) -> str: + return self.message or "Stream closed" + + +class SimpleAsyncHTTPClient(AsyncHTTPClient): + """Non-blocking HTTP client with no external dependencies. + + This class implements an HTTP 1.1 client on top of Tornado's IOStreams. + Some features found in the curl-based AsyncHTTPClient are not yet + supported. In particular, proxies are not supported, connections + are not reused, and callers cannot select the network interface to be + used. + """ + + def initialize( # type: ignore + self, + max_clients: int = 10, + hostname_mapping: Dict[str, str] = None, + max_buffer_size: int = 104857600, + resolver: Resolver = None, + defaults: Dict[str, Any] = None, + max_header_size: int = None, + max_body_size: int = None, + ) -> None: + """Creates a AsyncHTTPClient. + + Only a single AsyncHTTPClient instance exists per IOLoop + in order to provide limitations on the number of pending connections. + ``force_instance=True`` may be used to suppress this behavior. + + Note that because of this implicit reuse, unless ``force_instance`` + is used, only the first call to the constructor actually uses + its arguments. It is recommended to use the ``configure`` method + instead of the constructor to ensure that arguments take effect. + + ``max_clients`` is the number of concurrent requests that can be + in progress; when this limit is reached additional requests will be + queued. Note that time spent waiting in this queue still counts + against the ``request_timeout``. + + ``hostname_mapping`` is a dictionary mapping hostnames to IP addresses. + It can be used to make local DNS changes when modifying system-wide + settings like ``/etc/hosts`` is not possible or desirable (e.g. in + unittests). + + ``max_buffer_size`` (default 100MB) is the number of bytes + that can be read into memory at once. ``max_body_size`` + (defaults to ``max_buffer_size``) is the largest response body + that the client will accept. Without a + ``streaming_callback``, the smaller of these two limits + applies; with a ``streaming_callback`` only ``max_body_size`` + does. + + .. versionchanged:: 4.2 + Added the ``max_body_size`` argument. + """ + super(SimpleAsyncHTTPClient, self).initialize(defaults=defaults) + self.max_clients = max_clients + self.queue = ( + collections.deque() + ) # type: Deque[Tuple[object, HTTPRequest, Callable[[HTTPResponse], None]]] + self.active = ( + {} + ) # type: Dict[object, Tuple[HTTPRequest, Callable[[HTTPResponse], None]]] + self.waiting = ( + {} + ) # type: Dict[object, Tuple[HTTPRequest, Callable[[HTTPResponse], None], object]] + self.max_buffer_size = max_buffer_size + self.max_header_size = max_header_size + self.max_body_size = max_body_size + # TCPClient could create a Resolver for us, but we have to do it + # ourselves to support hostname_mapping. + if resolver: + self.resolver = resolver + self.own_resolver = False + else: + self.resolver = Resolver() + self.own_resolver = True + if hostname_mapping is not None: + self.resolver = OverrideResolver( + resolver=self.resolver, mapping=hostname_mapping + ) + self.tcp_client = TCPClient(resolver=self.resolver) + + def close(self) -> None: + super(SimpleAsyncHTTPClient, self).close() + if self.own_resolver: + self.resolver.close() + self.tcp_client.close() + + def fetch_impl( + self, request: HTTPRequest, callback: Callable[[HTTPResponse], None] + ) -> None: + key = object() + self.queue.append((key, request, callback)) + if not len(self.active) < self.max_clients: + assert request.connect_timeout is not None + assert request.request_timeout is not None + timeout_handle = self.io_loop.add_timeout( + self.io_loop.time() + + min(request.connect_timeout, request.request_timeout), + functools.partial(self._on_timeout, key, "in request queue"), + ) + else: + timeout_handle = None + self.waiting[key] = (request, callback, timeout_handle) + self._process_queue() + if self.queue: + gen_log.debug( + "max_clients limit reached, request queued. " + "%d active, %d queued requests." % (len(self.active), len(self.queue)) + ) + + def _process_queue(self) -> None: + while self.queue and len(self.active) < self.max_clients: + key, request, callback = self.queue.popleft() + if key not in self.waiting: + continue + self._remove_timeout(key) + self.active[key] = (request, callback) + release_callback = functools.partial(self._release_fetch, key) + self._handle_request(request, release_callback, callback) + + def _connection_class(self) -> type: + return _HTTPConnection + + def _handle_request( + self, + request: HTTPRequest, + release_callback: Callable[[], None], + final_callback: Callable[[HTTPResponse], None], + ) -> None: + self._connection_class()( + self, + request, + release_callback, + final_callback, + self.max_buffer_size, + self.tcp_client, + self.max_header_size, + self.max_body_size, + ) + + def _release_fetch(self, key: object) -> None: + del self.active[key] + self._process_queue() + + def _remove_timeout(self, key: object) -> None: + if key in self.waiting: + request, callback, timeout_handle = self.waiting[key] + if timeout_handle is not None: + self.io_loop.remove_timeout(timeout_handle) + del self.waiting[key] + + def _on_timeout(self, key: object, info: str = None) -> None: + """Timeout callback of request. + + Construct a timeout HTTPResponse when a timeout occurs. + + :arg object key: A simple object to mark the request. + :info string key: More detailed timeout information. + """ + request, callback, timeout_handle = self.waiting[key] + self.queue.remove((key, request, callback)) + + error_message = "Timeout {0}".format(info) if info else "Timeout" + timeout_response = HTTPResponse( + request, + 599, + error=HTTPTimeoutError(error_message), + request_time=self.io_loop.time() - request.start_time, + ) + self.io_loop.add_callback(callback, timeout_response) + del self.waiting[key] + + +class _HTTPConnection(httputil.HTTPMessageDelegate): + _SUPPORTED_METHODS = set( + ["GET", "HEAD", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"] + ) + + def __init__( + self, + client: Optional[SimpleAsyncHTTPClient], + request: HTTPRequest, + release_callback: Callable[[], None], + final_callback: Callable[[HTTPResponse], None], + max_buffer_size: int, + tcp_client: TCPClient, + max_header_size: int, + max_body_size: int, + ) -> None: + self.io_loop = IOLoop.current() + self.start_time = self.io_loop.time() + self.start_wall_time = time.time() + self.client = client + self.request = request + self.release_callback = release_callback + self.final_callback = final_callback + self.max_buffer_size = max_buffer_size + self.tcp_client = tcp_client + self.max_header_size = max_header_size + self.max_body_size = max_body_size + self.code = None # type: Optional[int] + self.headers = None # type: Optional[httputil.HTTPHeaders] + self.chunks = [] # type: List[bytes] + self._decompressor = None + # Timeout handle returned by IOLoop.add_timeout + self._timeout = None # type: object + self._sockaddr = None + IOLoop.current().add_future( + gen.convert_yielded(self.run()), lambda f: f.result() + ) + + async def run(self) -> None: + try: + self.parsed = urllib.parse.urlsplit(_unicode(self.request.url)) + if self.parsed.scheme not in ("http", "https"): + raise ValueError("Unsupported url scheme: %s" % self.request.url) + # urlsplit results have hostname and port results, but they + # didn't support ipv6 literals until python 2.7. + netloc = self.parsed.netloc + if "@" in netloc: + userpass, _, netloc = netloc.rpartition("@") + host, port = httputil.split_host_and_port(netloc) + if port is None: + port = 443 if self.parsed.scheme == "https" else 80 + if re.match(r"^\[.*\]$", host): + # raw ipv6 addresses in urls are enclosed in brackets + host = host[1:-1] + self.parsed_hostname = host # save final host for _on_connect + + if self.request.allow_ipv6 is False: + af = socket.AF_INET + else: + af = socket.AF_UNSPEC + + ssl_options = self._get_ssl_options(self.parsed.scheme) + + source_ip = None + if self.request.network_interface: + if is_valid_ip(self.request.network_interface): + source_ip = self.request.network_interface + else: + raise ValueError( + "Unrecognized IPv4 or IPv6 address for network_interface, got %r" + % (self.request.network_interface,) + ) + + timeout = min(self.request.connect_timeout, self.request.request_timeout) + if timeout: + self._timeout = self.io_loop.add_timeout( + self.start_time + timeout, + functools.partial(self._on_timeout, "while connecting"), + ) + stream = await self.tcp_client.connect( + host, + port, + af=af, + ssl_options=ssl_options, + max_buffer_size=self.max_buffer_size, + source_ip=source_ip, + ) + + if self.final_callback is None: + # final_callback is cleared if we've hit our timeout. + stream.close() + return + self.stream = stream + self.stream.set_close_callback(self.on_connection_close) + self._remove_timeout() + if self.final_callback is None: + return + if self.request.request_timeout: + self._timeout = self.io_loop.add_timeout( + self.start_time + self.request.request_timeout, + functools.partial(self._on_timeout, "during request"), + ) + if ( + self.request.method not in self._SUPPORTED_METHODS + and not self.request.allow_nonstandard_methods + ): + raise KeyError("unknown method %s" % self.request.method) + for key in ( + "proxy_host", + "proxy_port", + "proxy_username", + "proxy_password", + "proxy_auth_mode", + ): + if getattr(self.request, key, None): + raise NotImplementedError("%s not supported" % key) + if "Connection" not in self.request.headers: + self.request.headers["Connection"] = "close" + if "Host" not in self.request.headers: + if "@" in self.parsed.netloc: + self.request.headers["Host"] = self.parsed.netloc.rpartition( + "@" + )[-1] + else: + self.request.headers["Host"] = self.parsed.netloc + username, password = None, None + if self.parsed.username is not None: + username, password = self.parsed.username, self.parsed.password + elif self.request.auth_username is not None: + username = self.request.auth_username + password = self.request.auth_password or "" + if username is not None: + assert password is not None + if self.request.auth_mode not in (None, "basic"): + raise ValueError( + "unsupported auth_mode %s", self.request.auth_mode + ) + self.request.headers["Authorization"] = "Basic " + _unicode( + base64.b64encode( + httputil.encode_username_password(username, password) + ) + ) + if self.request.user_agent: + self.request.headers["User-Agent"] = self.request.user_agent + if not self.request.allow_nonstandard_methods: + # Some HTTP methods nearly always have bodies while others + # almost never do. Fail in this case unless the user has + # opted out of sanity checks with allow_nonstandard_methods. + body_expected = self.request.method in ("POST", "PATCH", "PUT") + body_present = ( + self.request.body is not None + or self.request.body_producer is not None + ) + if (body_expected and not body_present) or ( + body_present and not body_expected + ): + raise ValueError( + "Body must %sbe None for method %s (unless " + "allow_nonstandard_methods is true)" + % ("not " if body_expected else "", self.request.method) + ) + if self.request.expect_100_continue: + self.request.headers["Expect"] = "100-continue" + if self.request.body is not None: + # When body_producer is used the caller is responsible for + # setting Content-Length (or else chunked encoding will be used). + self.request.headers["Content-Length"] = str(len(self.request.body)) + if ( + self.request.method == "POST" + and "Content-Type" not in self.request.headers + ): + self.request.headers[ + "Content-Type" + ] = "application/x-www-form-urlencoded" + if self.request.decompress_response: + self.request.headers["Accept-Encoding"] = "gzip" + req_path = (self.parsed.path or "/") + ( + ("?" + self.parsed.query) if self.parsed.query else "" + ) + self.connection = self._create_connection(stream) + start_line = httputil.RequestStartLine( + self.request.method, req_path, "" + ) + self.connection.write_headers(start_line, self.request.headers) + if self.request.expect_100_continue: + await self.connection.read_response(self) + else: + await self._write_body(True) + except Exception: + if not self._handle_exception(*sys.exc_info()): + raise + + def _get_ssl_options( + self, scheme: str + ) -> Union[None, Dict[str, Any], ssl.SSLContext]: + if scheme == "https": + if self.request.ssl_options is not None: + return self.request.ssl_options + # If we are using the defaults, don't construct a + # new SSLContext. + if ( + self.request.validate_cert + and self.request.ca_certs is None + and self.request.client_cert is None + and self.request.client_key is None + ): + return _client_ssl_defaults + ssl_ctx = ssl.create_default_context( + ssl.Purpose.SERVER_AUTH, cafile=self.request.ca_certs + ) + if not self.request.validate_cert: + ssl_ctx.check_hostname = False + ssl_ctx.verify_mode = ssl.CERT_NONE + if self.request.client_cert is not None: + ssl_ctx.load_cert_chain( + self.request.client_cert, self.request.client_key + ) + if hasattr(ssl, "OP_NO_COMPRESSION"): + # See netutil.ssl_options_to_context + ssl_ctx.options |= ssl.OP_NO_COMPRESSION + return ssl_ctx + return None + + def _on_timeout(self, info: str = None) -> None: + """Timeout callback of _HTTPConnection instance. + + Raise a `HTTPTimeoutError` when a timeout occurs. + + :info string key: More detailed timeout information. + """ + self._timeout = None + error_message = "Timeout {0}".format(info) if info else "Timeout" + if self.final_callback is not None: + self._handle_exception( + HTTPTimeoutError, HTTPTimeoutError(error_message), None + ) + + def _remove_timeout(self) -> None: + if self._timeout is not None: + self.io_loop.remove_timeout(self._timeout) + self._timeout = None + + def _create_connection(self, stream: IOStream) -> HTTP1Connection: + stream.set_nodelay(True) + connection = HTTP1Connection( + stream, + True, + HTTP1ConnectionParameters( + no_keep_alive=True, + max_header_size=self.max_header_size, + max_body_size=self.max_body_size, + decompress=bool(self.request.decompress_response), + ), + self._sockaddr, + ) + return connection + + async def _write_body(self, start_read: bool) -> None: + if self.request.body is not None: + self.connection.write(self.request.body) + elif self.request.body_producer is not None: + fut = self.request.body_producer(self.connection.write) + if fut is not None: + await fut + self.connection.finish() + if start_read: + try: + await self.connection.read_response(self) + except StreamClosedError: + if not self._handle_exception(*sys.exc_info()): + raise + + def _release(self) -> None: + if self.release_callback is not None: + release_callback = self.release_callback + self.release_callback = None # type: ignore + release_callback() + + def _run_callback(self, response: HTTPResponse) -> None: + self._release() + if self.final_callback is not None: + final_callback = self.final_callback + self.final_callback = None # type: ignore + self.io_loop.add_callback(final_callback, response) + + def _handle_exception( + self, + typ: "Optional[Type[BaseException]]", + value: Optional[BaseException], + tb: Optional[TracebackType], + ) -> bool: + if self.final_callback: + self._remove_timeout() + if isinstance(value, StreamClosedError): + if value.real_error is None: + value = HTTPStreamClosedError("Stream closed") + else: + value = value.real_error + self._run_callback( + HTTPResponse( + self.request, + 599, + error=value, + request_time=self.io_loop.time() - self.start_time, + start_time=self.start_wall_time, + ) + ) + + if hasattr(self, "stream"): + # TODO: this may cause a StreamClosedError to be raised + # by the connection's Future. Should we cancel the + # connection more gracefully? + self.stream.close() + return True + else: + # If our callback has already been called, we are probably + # catching an exception that is not caused by us but rather + # some child of our callback. Rather than drop it on the floor, + # pass it along, unless it's just the stream being closed. + return isinstance(value, StreamClosedError) + + def on_connection_close(self) -> None: + if self.final_callback is not None: + message = "Connection closed" + if self.stream.error: + raise self.stream.error + try: + raise HTTPStreamClosedError(message) + except HTTPStreamClosedError: + self._handle_exception(*sys.exc_info()) + + async def headers_received( + self, + first_line: Union[httputil.ResponseStartLine, httputil.RequestStartLine], + headers: httputil.HTTPHeaders, + ) -> None: + assert isinstance(first_line, httputil.ResponseStartLine) + if self.request.expect_100_continue and first_line.code == 100: + await self._write_body(False) + return + self.code = first_line.code + self.reason = first_line.reason + self.headers = headers + + if self._should_follow_redirect(): + return + + if self.request.header_callback is not None: + # Reassemble the start line. + self.request.header_callback("%s %s %s\r\n" % first_line) + for k, v in self.headers.get_all(): + self.request.header_callback("%s: %s\r\n" % (k, v)) + self.request.header_callback("\r\n") + + def _should_follow_redirect(self) -> bool: + if self.request.follow_redirects: + assert self.request.max_redirects is not None + return ( + self.code in (301, 302, 303, 307, 308) + and self.request.max_redirects > 0 + and self.headers is not None + and self.headers.get("Location") is not None + ) + return False + + def finish(self) -> None: + assert self.code is not None + data = b"".join(self.chunks) + self._remove_timeout() + original_request = getattr(self.request, "original_request", self.request) + if self._should_follow_redirect(): + assert isinstance(self.request, _RequestProxy) + new_request = copy.copy(self.request.request) + new_request.url = urllib.parse.urljoin( + self.request.url, self.headers["Location"] + ) + new_request.max_redirects = self.request.max_redirects - 1 + del new_request.headers["Host"] + # https://tools.ietf.org/html/rfc7231#section-6.4 + # + # The original HTTP spec said that after a 301 or 302 + # redirect, the request method should be preserved. + # However, browsers implemented this by changing the + # method to GET, and the behavior stuck. 303 redirects + # always specified this POST-to-GET behavior (arguably 303 + # redirects should change *all* requests to GET, but + # libcurl only does this for POST so we follow their + # example). + if self.code in (301, 302, 303) and self.request.method == "POST": + new_request.method = "GET" + new_request.body = None + for h in [ + "Content-Length", + "Content-Type", + "Content-Encoding", + "Transfer-Encoding", + ]: + try: + del self.request.headers[h] + except KeyError: + pass + new_request.original_request = original_request + final_callback = self.final_callback + self.final_callback = None + self._release() + fut = self.client.fetch(new_request, raise_error=False) + fut.add_done_callback(lambda f: final_callback(f.result())) + self._on_end_request() + return + if self.request.streaming_callback: + buffer = BytesIO() + else: + buffer = BytesIO(data) # TODO: don't require one big string? + response = HTTPResponse( + original_request, + self.code, + reason=getattr(self, "reason", None), + headers=self.headers, + request_time=self.io_loop.time() - self.start_time, + start_time=self.start_wall_time, + buffer=buffer, + effective_url=self.request.url, + ) + self._run_callback(response) + self._on_end_request() + + def _on_end_request(self) -> None: + self.stream.close() + + def data_received(self, chunk: bytes) -> None: + if self._should_follow_redirect(): + # We're going to follow a redirect so just discard the body. + return + if self.request.streaming_callback is not None: + self.request.streaming_callback(chunk) + else: + self.chunks.append(chunk) + + +if __name__ == "__main__": + AsyncHTTPClient.configure(SimpleAsyncHTTPClient) + main() diff --git a/server/www/packages/packages-linux/x64/tornado/speedups.cpython-37m-x86_64-linux-gnu.so b/server/www/packages/packages-linux/x64/tornado/speedups.cpython-37m-x86_64-linux-gnu.so index 3814a27..53bd639 100755 Binary files a/server/www/packages/packages-linux/x64/tornado/speedups.cpython-37m-x86_64-linux-gnu.so and b/server/www/packages/packages-linux/x64/tornado/speedups.cpython-37m-x86_64-linux-gnu.so differ diff --git a/server/www/packages/packages-linux/x64/tornado/stack_context.py b/server/www/packages/packages-linux/x64/tornado/stack_context.py deleted file mode 100644 index a1eca4c..0000000 --- a/server/www/packages/packages-linux/x64/tornado/stack_context.py +++ /dev/null @@ -1,413 +0,0 @@ -# -# Copyright 2010 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""`StackContext` allows applications to maintain threadlocal-like state -that follows execution as it moves to other execution contexts. - -The motivating examples are to eliminate the need for explicit -``async_callback`` wrappers (as in `tornado.web.RequestHandler`), and to -allow some additional context to be kept for logging. - -This is slightly magic, but it's an extension of the idea that an -exception handler is a kind of stack-local state and when that stack -is suspended and resumed in a new context that state needs to be -preserved. `StackContext` shifts the burden of restoring that state -from each call site (e.g. wrapping each `.AsyncHTTPClient` callback -in ``async_callback``) to the mechanisms that transfer control from -one context to another (e.g. `.AsyncHTTPClient` itself, `.IOLoop`, -thread pools, etc). - -Example usage:: - - @contextlib.contextmanager - def die_on_error(): - try: - yield - except Exception: - logging.error("exception in asynchronous operation",exc_info=True) - sys.exit(1) - - with StackContext(die_on_error): - # Any exception thrown here *or in callback and its descendants* - # will cause the process to exit instead of spinning endlessly - # in the ioloop. - http_client.fetch(url, callback) - ioloop.start() - -Most applications shouldn't have to work with `StackContext` directly. -Here are a few rules of thumb for when it's necessary: - -* If you're writing an asynchronous library that doesn't rely on a - stack_context-aware library like `tornado.ioloop` or `tornado.iostream` - (for example, if you're writing a thread pool), use - `.stack_context.wrap()` before any asynchronous operations to capture the - stack context from where the operation was started. - -* If you're writing an asynchronous library that has some shared - resources (such as a connection pool), create those shared resources - within a ``with stack_context.NullContext():`` block. This will prevent - ``StackContexts`` from leaking from one request to another. - -* If you want to write something like an exception handler that will - persist across asynchronous calls, create a new `StackContext` (or - `ExceptionStackContext`), and make your asynchronous calls in a ``with`` - block that references your `StackContext`. - -.. deprecated:: 5.1 - - The ``stack_context`` package is deprecated and will be removed in - Tornado 6.0. -""" - -from __future__ import absolute_import, division, print_function - -import sys -import threading -import warnings - -from tornado.util import raise_exc_info - - -class StackContextInconsistentError(Exception): - pass - - -class _State(threading.local): - def __init__(self): - self.contexts = (tuple(), None) - - -_state = _State() - - -class StackContext(object): - """Establishes the given context as a StackContext that will be transferred. - - Note that the parameter is a callable that returns a context - manager, not the context itself. That is, where for a - non-transferable context manager you would say:: - - with my_context(): - - StackContext takes the function itself rather than its result:: - - with StackContext(my_context): - - The result of ``with StackContext() as cb:`` is a deactivation - callback. Run this callback when the StackContext is no longer - needed to ensure that it is not propagated any further (note that - deactivating a context does not affect any instances of that - context that are currently pending). This is an advanced feature - and not necessary in most applications. - """ - def __init__(self, context_factory): - warnings.warn("StackContext is deprecated and will be removed in Tornado 6.0", - DeprecationWarning) - self.context_factory = context_factory - self.contexts = [] - self.active = True - - def _deactivate(self): - self.active = False - - # StackContext protocol - def enter(self): - context = self.context_factory() - self.contexts.append(context) - context.__enter__() - - def exit(self, type, value, traceback): - context = self.contexts.pop() - context.__exit__(type, value, traceback) - - # Note that some of this code is duplicated in ExceptionStackContext - # below. ExceptionStackContext is more common and doesn't need - # the full generality of this class. - def __enter__(self): - self.old_contexts = _state.contexts - self.new_contexts = (self.old_contexts[0] + (self,), self) - _state.contexts = self.new_contexts - - try: - self.enter() - except: - _state.contexts = self.old_contexts - raise - - return self._deactivate - - def __exit__(self, type, value, traceback): - try: - self.exit(type, value, traceback) - finally: - final_contexts = _state.contexts - _state.contexts = self.old_contexts - - # Generator coroutines and with-statements with non-local - # effects interact badly. Check here for signs of - # the stack getting out of sync. - # Note that this check comes after restoring _state.context - # so that if it fails things are left in a (relatively) - # consistent state. - if final_contexts is not self.new_contexts: - raise StackContextInconsistentError( - 'stack_context inconsistency (may be caused by yield ' - 'within a "with StackContext" block)') - - # Break up a reference to itself to allow for faster GC on CPython. - self.new_contexts = None - - -class ExceptionStackContext(object): - """Specialization of StackContext for exception handling. - - The supplied ``exception_handler`` function will be called in the - event of an uncaught exception in this context. The semantics are - similar to a try/finally clause, and intended use cases are to log - an error, close a socket, or similar cleanup actions. The - ``exc_info`` triple ``(type, value, traceback)`` will be passed to the - exception_handler function. - - If the exception handler returns true, the exception will be - consumed and will not be propagated to other exception handlers. - - .. versionadded:: 5.1 - - The ``delay_warning`` argument can be used to delay the emission - of DeprecationWarnings until an exception is caught by the - ``ExceptionStackContext``, which facilitates certain transitional - use cases. - """ - def __init__(self, exception_handler, delay_warning=False): - self.delay_warning = delay_warning - if not self.delay_warning: - warnings.warn( - "StackContext is deprecated and will be removed in Tornado 6.0", - DeprecationWarning) - self.exception_handler = exception_handler - self.active = True - - def _deactivate(self): - self.active = False - - def exit(self, type, value, traceback): - if type is not None: - if self.delay_warning: - warnings.warn( - "StackContext is deprecated and will be removed in Tornado 6.0", - DeprecationWarning) - return self.exception_handler(type, value, traceback) - - def __enter__(self): - self.old_contexts = _state.contexts - self.new_contexts = (self.old_contexts[0], self) - _state.contexts = self.new_contexts - - return self._deactivate - - def __exit__(self, type, value, traceback): - try: - if type is not None: - return self.exception_handler(type, value, traceback) - finally: - final_contexts = _state.contexts - _state.contexts = self.old_contexts - - if final_contexts is not self.new_contexts: - raise StackContextInconsistentError( - 'stack_context inconsistency (may be caused by yield ' - 'within a "with StackContext" block)') - - # Break up a reference to itself to allow for faster GC on CPython. - self.new_contexts = None - - -class NullContext(object): - """Resets the `StackContext`. - - Useful when creating a shared resource on demand (e.g. an - `.AsyncHTTPClient`) where the stack that caused the creating is - not relevant to future operations. - """ - def __enter__(self): - self.old_contexts = _state.contexts - _state.contexts = (tuple(), None) - - def __exit__(self, type, value, traceback): - _state.contexts = self.old_contexts - - -def _remove_deactivated(contexts): - """Remove deactivated handlers from the chain""" - # Clean ctx handlers - stack_contexts = tuple([h for h in contexts[0] if h.active]) - - # Find new head - head = contexts[1] - while head is not None and not head.active: - head = head.old_contexts[1] - - # Process chain - ctx = head - while ctx is not None: - parent = ctx.old_contexts[1] - - while parent is not None: - if parent.active: - break - ctx.old_contexts = parent.old_contexts - parent = parent.old_contexts[1] - - ctx = parent - - return (stack_contexts, head) - - -def wrap(fn): - """Returns a callable object that will restore the current `StackContext` - when executed. - - Use this whenever saving a callback to be executed later in a - different execution context (either in a different thread or - asynchronously in the same thread). - """ - # Check if function is already wrapped - if fn is None or hasattr(fn, '_wrapped'): - return fn - - # Capture current stack head - # TODO: Any other better way to store contexts and update them in wrapped function? - cap_contexts = [_state.contexts] - - if not cap_contexts[0][0] and not cap_contexts[0][1]: - # Fast path when there are no active contexts. - def null_wrapper(*args, **kwargs): - try: - current_state = _state.contexts - _state.contexts = cap_contexts[0] - return fn(*args, **kwargs) - finally: - _state.contexts = current_state - null_wrapper._wrapped = True - return null_wrapper - - def wrapped(*args, **kwargs): - ret = None - try: - # Capture old state - current_state = _state.contexts - - # Remove deactivated items - cap_contexts[0] = contexts = _remove_deactivated(cap_contexts[0]) - - # Force new state - _state.contexts = contexts - - # Current exception - exc = (None, None, None) - top = None - - # Apply stack contexts - last_ctx = 0 - stack = contexts[0] - - # Apply state - for n in stack: - try: - n.enter() - last_ctx += 1 - except: - # Exception happened. Record exception info and store top-most handler - exc = sys.exc_info() - top = n.old_contexts[1] - - # Execute callback if no exception happened while restoring state - if top is None: - try: - ret = fn(*args, **kwargs) - except: - exc = sys.exc_info() - top = contexts[1] - - # If there was exception, try to handle it by going through the exception chain - if top is not None: - exc = _handle_exception(top, exc) - else: - # Otherwise take shorter path and run stack contexts in reverse order - while last_ctx > 0: - last_ctx -= 1 - c = stack[last_ctx] - - try: - c.exit(*exc) - except: - exc = sys.exc_info() - top = c.old_contexts[1] - break - else: - top = None - - # If if exception happened while unrolling, take longer exception handler path - if top is not None: - exc = _handle_exception(top, exc) - - # If exception was not handled, raise it - if exc != (None, None, None): - raise_exc_info(exc) - finally: - _state.contexts = current_state - return ret - - wrapped._wrapped = True - return wrapped - - -def _handle_exception(tail, exc): - while tail is not None: - try: - if tail.exit(*exc): - exc = (None, None, None) - except: - exc = sys.exc_info() - - tail = tail.old_contexts[1] - - return exc - - -def run_with_stack_context(context, func): - """Run a coroutine ``func`` in the given `StackContext`. - - It is not safe to have a ``yield`` statement within a ``with StackContext`` - block, so it is difficult to use stack context with `.gen.coroutine`. - This helper function runs the function in the correct context while - keeping the ``yield`` and ``with`` statements syntactically separate. - - Example:: - - @gen.coroutine - def incorrect(): - with StackContext(ctx): - # ERROR: this will raise StackContextInconsistentError - yield other_coroutine() - - @gen.coroutine - def correct(): - yield run_with_stack_context(StackContext(ctx), other_coroutine) - - .. versionadded:: 3.1 - """ - with context: - return func() diff --git a/server/www/packages/packages-linux/x64/tornado/tcpclient.py b/server/www/packages/packages-linux/x64/tornado/tcpclient.py index 3a1b58c..e198d3d 100644 --- a/server/www/packages/packages-linux/x64/tornado/tcpclient.py +++ b/server/www/packages/packages-linux/x64/tornado/tcpclient.py @@ -1,276 +1,334 @@ -# -# Copyright 2014 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A non-blocking TCP connection factory. -""" -from __future__ import absolute_import, division, print_function - -import functools -import socket -import numbers -import datetime - -from tornado.concurrent import Future, future_add_done_callback -from tornado.ioloop import IOLoop -from tornado.iostream import IOStream -from tornado import gen -from tornado.netutil import Resolver -from tornado.platform.auto import set_close_exec -from tornado.gen import TimeoutError -from tornado.util import timedelta_to_seconds - -_INITIAL_CONNECT_TIMEOUT = 0.3 - - -class _Connector(object): - """A stateless implementation of the "Happy Eyeballs" algorithm. - - "Happy Eyeballs" is documented in RFC6555 as the recommended practice - for when both IPv4 and IPv6 addresses are available. - - In this implementation, we partition the addresses by family, and - make the first connection attempt to whichever address was - returned first by ``getaddrinfo``. If that connection fails or - times out, we begin a connection in parallel to the first address - of the other family. If there are additional failures we retry - with other addresses, keeping one connection attempt per family - in flight at a time. - - http://tools.ietf.org/html/rfc6555 - - """ - def __init__(self, addrinfo, connect): - self.io_loop = IOLoop.current() - self.connect = connect - - self.future = Future() - self.timeout = None - self.connect_timeout = None - self.last_error = None - self.remaining = len(addrinfo) - self.primary_addrs, self.secondary_addrs = self.split(addrinfo) - self.streams = set() - - @staticmethod - def split(addrinfo): - """Partition the ``addrinfo`` list by address family. - - Returns two lists. The first list contains the first entry from - ``addrinfo`` and all others with the same family, and the - second list contains all other addresses (normally one list will - be AF_INET and the other AF_INET6, although non-standard resolvers - may return additional families). - """ - primary = [] - secondary = [] - primary_af = addrinfo[0][0] - for af, addr in addrinfo: - if af == primary_af: - primary.append((af, addr)) - else: - secondary.append((af, addr)) - return primary, secondary - - def start(self, timeout=_INITIAL_CONNECT_TIMEOUT, connect_timeout=None): - self.try_connect(iter(self.primary_addrs)) - self.set_timeout(timeout) - if connect_timeout is not None: - self.set_connect_timeout(connect_timeout) - return self.future - - def try_connect(self, addrs): - try: - af, addr = next(addrs) - except StopIteration: - # We've reached the end of our queue, but the other queue - # might still be working. Send a final error on the future - # only when both queues are finished. - if self.remaining == 0 and not self.future.done(): - self.future.set_exception(self.last_error or - IOError("connection failed")) - return - stream, future = self.connect(af, addr) - self.streams.add(stream) - future_add_done_callback( - future, functools.partial(self.on_connect_done, addrs, af, addr)) - - def on_connect_done(self, addrs, af, addr, future): - self.remaining -= 1 - try: - stream = future.result() - except Exception as e: - if self.future.done(): - return - # Error: try again (but remember what happened so we have an - # error to raise in the end) - self.last_error = e - self.try_connect(addrs) - if self.timeout is not None: - # If the first attempt failed, don't wait for the - # timeout to try an address from the secondary queue. - self.io_loop.remove_timeout(self.timeout) - self.on_timeout() - return - self.clear_timeouts() - if self.future.done(): - # This is a late arrival; just drop it. - stream.close() - else: - self.streams.discard(stream) - self.future.set_result((af, addr, stream)) - self.close_streams() - - def set_timeout(self, timeout): - self.timeout = self.io_loop.add_timeout(self.io_loop.time() + timeout, - self.on_timeout) - - def on_timeout(self): - self.timeout = None - if not self.future.done(): - self.try_connect(iter(self.secondary_addrs)) - - def clear_timeout(self): - if self.timeout is not None: - self.io_loop.remove_timeout(self.timeout) - - def set_connect_timeout(self, connect_timeout): - self.connect_timeout = self.io_loop.add_timeout( - connect_timeout, self.on_connect_timeout) - - def on_connect_timeout(self): - if not self.future.done(): - self.future.set_exception(TimeoutError()) - self.close_streams() - - def clear_timeouts(self): - if self.timeout is not None: - self.io_loop.remove_timeout(self.timeout) - if self.connect_timeout is not None: - self.io_loop.remove_timeout(self.connect_timeout) - - def close_streams(self): - for stream in self.streams: - stream.close() - - -class TCPClient(object): - """A non-blocking TCP connection factory. - - .. versionchanged:: 5.0 - The ``io_loop`` argument (deprecated since version 4.1) has been removed. - """ - def __init__(self, resolver=None): - if resolver is not None: - self.resolver = resolver - self._own_resolver = False - else: - self.resolver = Resolver() - self._own_resolver = True - - def close(self): - if self._own_resolver: - self.resolver.close() - - @gen.coroutine - def connect(self, host, port, af=socket.AF_UNSPEC, ssl_options=None, - max_buffer_size=None, source_ip=None, source_port=None, - timeout=None): - """Connect to the given host and port. - - Asynchronously returns an `.IOStream` (or `.SSLIOStream` if - ``ssl_options`` is not None). - - Using the ``source_ip`` kwarg, one can specify the source - IP address to use when establishing the connection. - In case the user needs to resolve and - use a specific interface, it has to be handled outside - of Tornado as this depends very much on the platform. - - Raises `TimeoutError` if the input future does not complete before - ``timeout``, which may be specified in any form allowed by - `.IOLoop.add_timeout` (i.e. a `datetime.timedelta` or an absolute time - relative to `.IOLoop.time`) - - Similarly, when the user requires a certain source port, it can - be specified using the ``source_port`` arg. - - .. versionchanged:: 4.5 - Added the ``source_ip`` and ``source_port`` arguments. - - .. versionchanged:: 5.0 - Added the ``timeout`` argument. - """ - if timeout is not None: - if isinstance(timeout, numbers.Real): - timeout = IOLoop.current().time() + timeout - elif isinstance(timeout, datetime.timedelta): - timeout = IOLoop.current().time() + timedelta_to_seconds(timeout) - else: - raise TypeError("Unsupported timeout %r" % timeout) - if timeout is not None: - addrinfo = yield gen.with_timeout( - timeout, self.resolver.resolve(host, port, af)) - else: - addrinfo = yield self.resolver.resolve(host, port, af) - connector = _Connector( - addrinfo, - functools.partial(self._create_stream, max_buffer_size, - source_ip=source_ip, source_port=source_port) - ) - af, addr, stream = yield connector.start(connect_timeout=timeout) - # TODO: For better performance we could cache the (af, addr) - # information here and re-use it on subsequent connections to - # the same host. (http://tools.ietf.org/html/rfc6555#section-4.2) - if ssl_options is not None: - if timeout is not None: - stream = yield gen.with_timeout(timeout, stream.start_tls( - False, ssl_options=ssl_options, server_hostname=host)) - else: - stream = yield stream.start_tls(False, ssl_options=ssl_options, - server_hostname=host) - raise gen.Return(stream) - - def _create_stream(self, max_buffer_size, af, addr, source_ip=None, - source_port=None): - # Always connect in plaintext; we'll convert to ssl if necessary - # after one connection has completed. - source_port_bind = source_port if isinstance(source_port, int) else 0 - source_ip_bind = source_ip - if source_port_bind and not source_ip: - # User required a specific port, but did not specify - # a certain source IP, will bind to the default loopback. - source_ip_bind = '::1' if af == socket.AF_INET6 else '127.0.0.1' - # Trying to use the same address family as the requested af socket: - # - 127.0.0.1 for IPv4 - # - ::1 for IPv6 - socket_obj = socket.socket(af) - set_close_exec(socket_obj.fileno()) - if source_port_bind or source_ip_bind: - # If the user requires binding also to a specific IP/port. - try: - socket_obj.bind((source_ip_bind, source_port_bind)) - except socket.error: - socket_obj.close() - # Fail loudly if unable to use the IP/port. - raise - try: - stream = IOStream(socket_obj, - max_buffer_size=max_buffer_size) - except socket.error as e: - fu = Future() - fu.set_exception(e) - return fu - else: - return stream, stream.connect(addr) +# +# Copyright 2014 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A non-blocking TCP connection factory. +""" + +import functools +import socket +import numbers +import datetime +import ssl + +from tornado.concurrent import Future, future_add_done_callback +from tornado.ioloop import IOLoop +from tornado.iostream import IOStream +from tornado import gen +from tornado.netutil import Resolver +from tornado.platform.auto import set_close_exec +from tornado.gen import TimeoutError + +import typing +from typing import Any, Union, Dict, Tuple, List, Callable, Iterator + +if typing.TYPE_CHECKING: + from typing import Optional, Set # noqa: F401 + +_INITIAL_CONNECT_TIMEOUT = 0.3 + + +class _Connector(object): + """A stateless implementation of the "Happy Eyeballs" algorithm. + + "Happy Eyeballs" is documented in RFC6555 as the recommended practice + for when both IPv4 and IPv6 addresses are available. + + In this implementation, we partition the addresses by family, and + make the first connection attempt to whichever address was + returned first by ``getaddrinfo``. If that connection fails or + times out, we begin a connection in parallel to the first address + of the other family. If there are additional failures we retry + with other addresses, keeping one connection attempt per family + in flight at a time. + + http://tools.ietf.org/html/rfc6555 + + """ + + def __init__( + self, + addrinfo: List[Tuple], + connect: Callable[ + [socket.AddressFamily, Tuple], Tuple[IOStream, "Future[IOStream]"] + ], + ) -> None: + self.io_loop = IOLoop.current() + self.connect = connect + + self.future = ( + Future() + ) # type: Future[Tuple[socket.AddressFamily, Any, IOStream]] + self.timeout = None # type: Optional[object] + self.connect_timeout = None # type: Optional[object] + self.last_error = None # type: Optional[Exception] + self.remaining = len(addrinfo) + self.primary_addrs, self.secondary_addrs = self.split(addrinfo) + self.streams = set() # type: Set[IOStream] + + @staticmethod + def split( + addrinfo: List[Tuple], + ) -> Tuple[ + List[Tuple[socket.AddressFamily, Tuple]], + List[Tuple[socket.AddressFamily, Tuple]], + ]: + """Partition the ``addrinfo`` list by address family. + + Returns two lists. The first list contains the first entry from + ``addrinfo`` and all others with the same family, and the + second list contains all other addresses (normally one list will + be AF_INET and the other AF_INET6, although non-standard resolvers + may return additional families). + """ + primary = [] + secondary = [] + primary_af = addrinfo[0][0] + for af, addr in addrinfo: + if af == primary_af: + primary.append((af, addr)) + else: + secondary.append((af, addr)) + return primary, secondary + + def start( + self, + timeout: float = _INITIAL_CONNECT_TIMEOUT, + connect_timeout: Union[float, datetime.timedelta] = None, + ) -> "Future[Tuple[socket.AddressFamily, Any, IOStream]]": + self.try_connect(iter(self.primary_addrs)) + self.set_timeout(timeout) + if connect_timeout is not None: + self.set_connect_timeout(connect_timeout) + return self.future + + def try_connect(self, addrs: Iterator[Tuple[socket.AddressFamily, Tuple]]) -> None: + try: + af, addr = next(addrs) + except StopIteration: + # We've reached the end of our queue, but the other queue + # might still be working. Send a final error on the future + # only when both queues are finished. + if self.remaining == 0 and not self.future.done(): + self.future.set_exception( + self.last_error or IOError("connection failed") + ) + return + stream, future = self.connect(af, addr) + self.streams.add(stream) + future_add_done_callback( + future, functools.partial(self.on_connect_done, addrs, af, addr) + ) + + def on_connect_done( + self, + addrs: Iterator[Tuple[socket.AddressFamily, Tuple]], + af: socket.AddressFamily, + addr: Tuple, + future: "Future[IOStream]", + ) -> None: + self.remaining -= 1 + try: + stream = future.result() + except Exception as e: + if self.future.done(): + return + # Error: try again (but remember what happened so we have an + # error to raise in the end) + self.last_error = e + self.try_connect(addrs) + if self.timeout is not None: + # If the first attempt failed, don't wait for the + # timeout to try an address from the secondary queue. + self.io_loop.remove_timeout(self.timeout) + self.on_timeout() + return + self.clear_timeouts() + if self.future.done(): + # This is a late arrival; just drop it. + stream.close() + else: + self.streams.discard(stream) + self.future.set_result((af, addr, stream)) + self.close_streams() + + def set_timeout(self, timeout: float) -> None: + self.timeout = self.io_loop.add_timeout( + self.io_loop.time() + timeout, self.on_timeout + ) + + def on_timeout(self) -> None: + self.timeout = None + if not self.future.done(): + self.try_connect(iter(self.secondary_addrs)) + + def clear_timeout(self) -> None: + if self.timeout is not None: + self.io_loop.remove_timeout(self.timeout) + + def set_connect_timeout( + self, connect_timeout: Union[float, datetime.timedelta] + ) -> None: + self.connect_timeout = self.io_loop.add_timeout( + connect_timeout, self.on_connect_timeout + ) + + def on_connect_timeout(self) -> None: + if not self.future.done(): + self.future.set_exception(TimeoutError()) + self.close_streams() + + def clear_timeouts(self) -> None: + if self.timeout is not None: + self.io_loop.remove_timeout(self.timeout) + if self.connect_timeout is not None: + self.io_loop.remove_timeout(self.connect_timeout) + + def close_streams(self) -> None: + for stream in self.streams: + stream.close() + + +class TCPClient(object): + """A non-blocking TCP connection factory. + + .. versionchanged:: 5.0 + The ``io_loop`` argument (deprecated since version 4.1) has been removed. + """ + + def __init__(self, resolver: Resolver = None) -> None: + if resolver is not None: + self.resolver = resolver + self._own_resolver = False + else: + self.resolver = Resolver() + self._own_resolver = True + + def close(self) -> None: + if self._own_resolver: + self.resolver.close() + + async def connect( + self, + host: str, + port: int, + af: socket.AddressFamily = socket.AF_UNSPEC, + ssl_options: Union[Dict[str, Any], ssl.SSLContext] = None, + max_buffer_size: int = None, + source_ip: str = None, + source_port: int = None, + timeout: Union[float, datetime.timedelta] = None, + ) -> IOStream: + """Connect to the given host and port. + + Asynchronously returns an `.IOStream` (or `.SSLIOStream` if + ``ssl_options`` is not None). + + Using the ``source_ip`` kwarg, one can specify the source + IP address to use when establishing the connection. + In case the user needs to resolve and + use a specific interface, it has to be handled outside + of Tornado as this depends very much on the platform. + + Raises `TimeoutError` if the input future does not complete before + ``timeout``, which may be specified in any form allowed by + `.IOLoop.add_timeout` (i.e. a `datetime.timedelta` or an absolute time + relative to `.IOLoop.time`) + + Similarly, when the user requires a certain source port, it can + be specified using the ``source_port`` arg. + + .. versionchanged:: 4.5 + Added the ``source_ip`` and ``source_port`` arguments. + + .. versionchanged:: 5.0 + Added the ``timeout`` argument. + """ + if timeout is not None: + if isinstance(timeout, numbers.Real): + timeout = IOLoop.current().time() + timeout + elif isinstance(timeout, datetime.timedelta): + timeout = IOLoop.current().time() + timeout.total_seconds() + else: + raise TypeError("Unsupported timeout %r" % timeout) + if timeout is not None: + addrinfo = await gen.with_timeout( + timeout, self.resolver.resolve(host, port, af) + ) + else: + addrinfo = await self.resolver.resolve(host, port, af) + connector = _Connector( + addrinfo, + functools.partial( + self._create_stream, + max_buffer_size, + source_ip=source_ip, + source_port=source_port, + ), + ) + af, addr, stream = await connector.start(connect_timeout=timeout) + # TODO: For better performance we could cache the (af, addr) + # information here and re-use it on subsequent connections to + # the same host. (http://tools.ietf.org/html/rfc6555#section-4.2) + if ssl_options is not None: + if timeout is not None: + stream = await gen.with_timeout( + timeout, + stream.start_tls( + False, ssl_options=ssl_options, server_hostname=host + ), + ) + else: + stream = await stream.start_tls( + False, ssl_options=ssl_options, server_hostname=host + ) + return stream + + def _create_stream( + self, + max_buffer_size: int, + af: socket.AddressFamily, + addr: Tuple, + source_ip: str = None, + source_port: int = None, + ) -> Tuple[IOStream, "Future[IOStream]"]: + # Always connect in plaintext; we'll convert to ssl if necessary + # after one connection has completed. + source_port_bind = source_port if isinstance(source_port, int) else 0 + source_ip_bind = source_ip + if source_port_bind and not source_ip: + # User required a specific port, but did not specify + # a certain source IP, will bind to the default loopback. + source_ip_bind = "::1" if af == socket.AF_INET6 else "127.0.0.1" + # Trying to use the same address family as the requested af socket: + # - 127.0.0.1 for IPv4 + # - ::1 for IPv6 + socket_obj = socket.socket(af) + set_close_exec(socket_obj.fileno()) + if source_port_bind or source_ip_bind: + # If the user requires binding also to a specific IP/port. + try: + socket_obj.bind((source_ip_bind, source_port_bind)) + except socket.error: + socket_obj.close() + # Fail loudly if unable to use the IP/port. + raise + try: + stream = IOStream(socket_obj, max_buffer_size=max_buffer_size) + except socket.error as e: + fu = Future() # type: Future[IOStream] + fu.set_exception(e) + return stream, fu + else: + return stream, stream.connect(addr) diff --git a/server/www/packages/packages-linux/x64/tornado/tcpserver.py b/server/www/packages/packages-linux/x64/tornado/tcpserver.py index 4f5d6f0..ff2b581 100644 --- a/server/www/packages/packages-linux/x64/tornado/tcpserver.py +++ b/server/www/packages/packages-linux/x64/tornado/tcpserver.py @@ -1,299 +1,330 @@ -# -# Copyright 2011 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A non-blocking, single-threaded TCP server.""" -from __future__ import absolute_import, division, print_function - -import errno -import os -import socket - -from tornado import gen -from tornado.log import app_log -from tornado.ioloop import IOLoop -from tornado.iostream import IOStream, SSLIOStream -from tornado.netutil import bind_sockets, add_accept_handler, ssl_wrap_socket -from tornado import process -from tornado.util import errno_from_exception - -try: - import ssl -except ImportError: - # ssl is not available on Google App Engine. - ssl = None - - -class TCPServer(object): - r"""A non-blocking, single-threaded TCP server. - - To use `TCPServer`, define a subclass which overrides the `handle_stream` - method. For example, a simple echo server could be defined like this:: - - from tornado.tcpserver import TCPServer - from tornado.iostream import StreamClosedError - from tornado import gen - - class EchoServer(TCPServer): - async def handle_stream(self, stream, address): - while True: - try: - data = await stream.read_until(b"\n") - await stream.write(data) - except StreamClosedError: - break - - To make this server serve SSL traffic, send the ``ssl_options`` keyword - argument with an `ssl.SSLContext` object. For compatibility with older - versions of Python ``ssl_options`` may also be a dictionary of keyword - arguments for the `ssl.wrap_socket` method.:: - - ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) - ssl_ctx.load_cert_chain(os.path.join(data_dir, "mydomain.crt"), - os.path.join(data_dir, "mydomain.key")) - TCPServer(ssl_options=ssl_ctx) - - `TCPServer` initialization follows one of three patterns: - - 1. `listen`: simple single-process:: - - server = TCPServer() - server.listen(8888) - IOLoop.current().start() - - 2. `bind`/`start`: simple multi-process:: - - server = TCPServer() - server.bind(8888) - server.start(0) # Forks multiple sub-processes - IOLoop.current().start() - - When using this interface, an `.IOLoop` must *not* be passed - to the `TCPServer` constructor. `start` will always start - the server on the default singleton `.IOLoop`. - - 3. `add_sockets`: advanced multi-process:: - - sockets = bind_sockets(8888) - tornado.process.fork_processes(0) - server = TCPServer() - server.add_sockets(sockets) - IOLoop.current().start() - - The `add_sockets` interface is more complicated, but it can be - used with `tornado.process.fork_processes` to give you more - flexibility in when the fork happens. `add_sockets` can - also be used in single-process servers if you want to create - your listening sockets in some way other than - `~tornado.netutil.bind_sockets`. - - .. versionadded:: 3.1 - The ``max_buffer_size`` argument. - - .. versionchanged:: 5.0 - The ``io_loop`` argument has been removed. - """ - def __init__(self, ssl_options=None, max_buffer_size=None, - read_chunk_size=None): - self.ssl_options = ssl_options - self._sockets = {} # fd -> socket object - self._handlers = {} # fd -> remove_handler callable - self._pending_sockets = [] - self._started = False - self._stopped = False - self.max_buffer_size = max_buffer_size - self.read_chunk_size = read_chunk_size - - # Verify the SSL options. Otherwise we don't get errors until clients - # connect. This doesn't verify that the keys are legitimate, but - # the SSL module doesn't do that until there is a connected socket - # which seems like too much work - if self.ssl_options is not None and isinstance(self.ssl_options, dict): - # Only certfile is required: it can contain both keys - if 'certfile' not in self.ssl_options: - raise KeyError('missing key "certfile" in ssl_options') - - if not os.path.exists(self.ssl_options['certfile']): - raise ValueError('certfile "%s" does not exist' % - self.ssl_options['certfile']) - if ('keyfile' in self.ssl_options and - not os.path.exists(self.ssl_options['keyfile'])): - raise ValueError('keyfile "%s" does not exist' % - self.ssl_options['keyfile']) - - def listen(self, port, address=""): - """Starts accepting connections on the given port. - - This method may be called more than once to listen on multiple ports. - `listen` takes effect immediately; it is not necessary to call - `TCPServer.start` afterwards. It is, however, necessary to start - the `.IOLoop`. - """ - sockets = bind_sockets(port, address=address) - self.add_sockets(sockets) - - def add_sockets(self, sockets): - """Makes this server start accepting connections on the given sockets. - - The ``sockets`` parameter is a list of socket objects such as - those returned by `~tornado.netutil.bind_sockets`. - `add_sockets` is typically used in combination with that - method and `tornado.process.fork_processes` to provide greater - control over the initialization of a multi-process server. - """ - for sock in sockets: - self._sockets[sock.fileno()] = sock - self._handlers[sock.fileno()] = add_accept_handler( - sock, self._handle_connection) - - def add_socket(self, socket): - """Singular version of `add_sockets`. Takes a single socket object.""" - self.add_sockets([socket]) - - def bind(self, port, address=None, family=socket.AF_UNSPEC, backlog=128, - reuse_port=False): - """Binds this server to the given port on the given address. - - To start the server, call `start`. If you want to run this server - in a single process, you can call `listen` as a shortcut to the - sequence of `bind` and `start` calls. - - Address may be either an IP address or hostname. If it's a hostname, - the server will listen on all IP addresses associated with the - name. Address may be an empty string or None to listen on all - available interfaces. Family may be set to either `socket.AF_INET` - or `socket.AF_INET6` to restrict to IPv4 or IPv6 addresses, otherwise - both will be used if available. - - The ``backlog`` argument has the same meaning as for - `socket.listen `. The ``reuse_port`` argument - has the same meaning as for `.bind_sockets`. - - This method may be called multiple times prior to `start` to listen - on multiple ports or interfaces. - - .. versionchanged:: 4.4 - Added the ``reuse_port`` argument. - """ - sockets = bind_sockets(port, address=address, family=family, - backlog=backlog, reuse_port=reuse_port) - if self._started: - self.add_sockets(sockets) - else: - self._pending_sockets.extend(sockets) - - def start(self, num_processes=1): - """Starts this server in the `.IOLoop`. - - By default, we run the server in this process and do not fork any - additional child process. - - If num_processes is ``None`` or <= 0, we detect the number of cores - available on this machine and fork that number of child - processes. If num_processes is given and > 1, we fork that - specific number of sub-processes. - - Since we use processes and not threads, there is no shared memory - between any server code. - - Note that multiple processes are not compatible with the autoreload - module (or the ``autoreload=True`` option to `tornado.web.Application` - which defaults to True when ``debug=True``). - When using multiple processes, no IOLoops can be created or - referenced until after the call to ``TCPServer.start(n)``. - """ - assert not self._started - self._started = True - if num_processes != 1: - process.fork_processes(num_processes) - sockets = self._pending_sockets - self._pending_sockets = [] - self.add_sockets(sockets) - - def stop(self): - """Stops listening for new connections. - - Requests currently in progress may still continue after the - server is stopped. - """ - if self._stopped: - return - self._stopped = True - for fd, sock in self._sockets.items(): - assert sock.fileno() == fd - # Unregister socket from IOLoop - self._handlers.pop(fd)() - sock.close() - - def handle_stream(self, stream, address): - """Override to handle a new `.IOStream` from an incoming connection. - - This method may be a coroutine; if so any exceptions it raises - asynchronously will be logged. Accepting of incoming connections - will not be blocked by this coroutine. - - If this `TCPServer` is configured for SSL, ``handle_stream`` - may be called before the SSL handshake has completed. Use - `.SSLIOStream.wait_for_handshake` if you need to verify the client's - certificate or use NPN/ALPN. - - .. versionchanged:: 4.2 - Added the option for this method to be a coroutine. - """ - raise NotImplementedError() - - def _handle_connection(self, connection, address): - if self.ssl_options is not None: - assert ssl, "Python 2.6+ and OpenSSL required for SSL" - try: - connection = ssl_wrap_socket(connection, - self.ssl_options, - server_side=True, - do_handshake_on_connect=False) - except ssl.SSLError as err: - if err.args[0] == ssl.SSL_ERROR_EOF: - return connection.close() - else: - raise - except socket.error as err: - # If the connection is closed immediately after it is created - # (as in a port scan), we can get one of several errors. - # wrap_socket makes an internal call to getpeername, - # which may return either EINVAL (Mac OS X) or ENOTCONN - # (Linux). If it returns ENOTCONN, this error is - # silently swallowed by the ssl module, so we need to - # catch another error later on (AttributeError in - # SSLIOStream._do_ssl_handshake). - # To test this behavior, try nmap with the -sT flag. - # https://github.com/tornadoweb/tornado/pull/750 - if errno_from_exception(err) in (errno.ECONNABORTED, errno.EINVAL): - return connection.close() - else: - raise - try: - if self.ssl_options is not None: - stream = SSLIOStream(connection, - max_buffer_size=self.max_buffer_size, - read_chunk_size=self.read_chunk_size) - else: - stream = IOStream(connection, - max_buffer_size=self.max_buffer_size, - read_chunk_size=self.read_chunk_size) - - future = self.handle_stream(stream, address) - if future is not None: - IOLoop.current().add_future(gen.convert_yielded(future), - lambda f: f.result()) - except Exception: - app_log.error("Error in connection callback", exc_info=True) +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A non-blocking, single-threaded TCP server.""" + +import errno +import os +import socket +import ssl + +from tornado import gen +from tornado.log import app_log +from tornado.ioloop import IOLoop +from tornado.iostream import IOStream, SSLIOStream +from tornado.netutil import bind_sockets, add_accept_handler, ssl_wrap_socket +from tornado import process +from tornado.util import errno_from_exception + +import typing +from typing import Union, Dict, Any, Iterable, Optional, Awaitable + +if typing.TYPE_CHECKING: + from typing import Callable, List # noqa: F401 + + +class TCPServer(object): + r"""A non-blocking, single-threaded TCP server. + + To use `TCPServer`, define a subclass which overrides the `handle_stream` + method. For example, a simple echo server could be defined like this:: + + from tornado.tcpserver import TCPServer + from tornado.iostream import StreamClosedError + from tornado import gen + + class EchoServer(TCPServer): + async def handle_stream(self, stream, address): + while True: + try: + data = await stream.read_until(b"\n") + await stream.write(data) + except StreamClosedError: + break + + To make this server serve SSL traffic, send the ``ssl_options`` keyword + argument with an `ssl.SSLContext` object. For compatibility with older + versions of Python ``ssl_options`` may also be a dictionary of keyword + arguments for the `ssl.wrap_socket` method.:: + + ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) + ssl_ctx.load_cert_chain(os.path.join(data_dir, "mydomain.crt"), + os.path.join(data_dir, "mydomain.key")) + TCPServer(ssl_options=ssl_ctx) + + `TCPServer` initialization follows one of three patterns: + + 1. `listen`: simple single-process:: + + server = TCPServer() + server.listen(8888) + IOLoop.current().start() + + 2. `bind`/`start`: simple multi-process:: + + server = TCPServer() + server.bind(8888) + server.start(0) # Forks multiple sub-processes + IOLoop.current().start() + + When using this interface, an `.IOLoop` must *not* be passed + to the `TCPServer` constructor. `start` will always start + the server on the default singleton `.IOLoop`. + + 3. `add_sockets`: advanced multi-process:: + + sockets = bind_sockets(8888) + tornado.process.fork_processes(0) + server = TCPServer() + server.add_sockets(sockets) + IOLoop.current().start() + + The `add_sockets` interface is more complicated, but it can be + used with `tornado.process.fork_processes` to give you more + flexibility in when the fork happens. `add_sockets` can + also be used in single-process servers if you want to create + your listening sockets in some way other than + `~tornado.netutil.bind_sockets`. + + .. versionadded:: 3.1 + The ``max_buffer_size`` argument. + + .. versionchanged:: 5.0 + The ``io_loop`` argument has been removed. + """ + + def __init__( + self, + ssl_options: Union[Dict[str, Any], ssl.SSLContext] = None, + max_buffer_size: int = None, + read_chunk_size: int = None, + ) -> None: + self.ssl_options = ssl_options + self._sockets = {} # type: Dict[int, socket.socket] + self._handlers = {} # type: Dict[int, Callable[[], None]] + self._pending_sockets = [] # type: List[socket.socket] + self._started = False + self._stopped = False + self.max_buffer_size = max_buffer_size + self.read_chunk_size = read_chunk_size + + # Verify the SSL options. Otherwise we don't get errors until clients + # connect. This doesn't verify that the keys are legitimate, but + # the SSL module doesn't do that until there is a connected socket + # which seems like too much work + if self.ssl_options is not None and isinstance(self.ssl_options, dict): + # Only certfile is required: it can contain both keys + if "certfile" not in self.ssl_options: + raise KeyError('missing key "certfile" in ssl_options') + + if not os.path.exists(self.ssl_options["certfile"]): + raise ValueError( + 'certfile "%s" does not exist' % self.ssl_options["certfile"] + ) + if "keyfile" in self.ssl_options and not os.path.exists( + self.ssl_options["keyfile"] + ): + raise ValueError( + 'keyfile "%s" does not exist' % self.ssl_options["keyfile"] + ) + + def listen(self, port: int, address: str = "") -> None: + """Starts accepting connections on the given port. + + This method may be called more than once to listen on multiple ports. + `listen` takes effect immediately; it is not necessary to call + `TCPServer.start` afterwards. It is, however, necessary to start + the `.IOLoop`. + """ + sockets = bind_sockets(port, address=address) + self.add_sockets(sockets) + + def add_sockets(self, sockets: Iterable[socket.socket]) -> None: + """Makes this server start accepting connections on the given sockets. + + The ``sockets`` parameter is a list of socket objects such as + those returned by `~tornado.netutil.bind_sockets`. + `add_sockets` is typically used in combination with that + method and `tornado.process.fork_processes` to provide greater + control over the initialization of a multi-process server. + """ + for sock in sockets: + self._sockets[sock.fileno()] = sock + self._handlers[sock.fileno()] = add_accept_handler( + sock, self._handle_connection + ) + + def add_socket(self, socket: socket.socket) -> None: + """Singular version of `add_sockets`. Takes a single socket object.""" + self.add_sockets([socket]) + + def bind( + self, + port: int, + address: str = None, + family: socket.AddressFamily = socket.AF_UNSPEC, + backlog: int = 128, + reuse_port: bool = False, + ) -> None: + """Binds this server to the given port on the given address. + + To start the server, call `start`. If you want to run this server + in a single process, you can call `listen` as a shortcut to the + sequence of `bind` and `start` calls. + + Address may be either an IP address or hostname. If it's a hostname, + the server will listen on all IP addresses associated with the + name. Address may be an empty string or None to listen on all + available interfaces. Family may be set to either `socket.AF_INET` + or `socket.AF_INET6` to restrict to IPv4 or IPv6 addresses, otherwise + both will be used if available. + + The ``backlog`` argument has the same meaning as for + `socket.listen `. The ``reuse_port`` argument + has the same meaning as for `.bind_sockets`. + + This method may be called multiple times prior to `start` to listen + on multiple ports or interfaces. + + .. versionchanged:: 4.4 + Added the ``reuse_port`` argument. + """ + sockets = bind_sockets( + port, address=address, family=family, backlog=backlog, reuse_port=reuse_port + ) + if self._started: + self.add_sockets(sockets) + else: + self._pending_sockets.extend(sockets) + + def start(self, num_processes: Optional[int] = 1, max_restarts: int = None) -> None: + """Starts this server in the `.IOLoop`. + + By default, we run the server in this process and do not fork any + additional child process. + + If num_processes is ``None`` or <= 0, we detect the number of cores + available on this machine and fork that number of child + processes. If num_processes is given and > 1, we fork that + specific number of sub-processes. + + Since we use processes and not threads, there is no shared memory + between any server code. + + Note that multiple processes are not compatible with the autoreload + module (or the ``autoreload=True`` option to `tornado.web.Application` + which defaults to True when ``debug=True``). + When using multiple processes, no IOLoops can be created or + referenced until after the call to ``TCPServer.start(n)``. + + The ``max_restarts`` argument is passed to `.fork_processes`. + + .. versionchanged:: 6.0 + + Added ``max_restarts`` argument. + """ + assert not self._started + self._started = True + if num_processes != 1: + process.fork_processes(num_processes, max_restarts) + sockets = self._pending_sockets + self._pending_sockets = [] + self.add_sockets(sockets) + + def stop(self) -> None: + """Stops listening for new connections. + + Requests currently in progress may still continue after the + server is stopped. + """ + if self._stopped: + return + self._stopped = True + for fd, sock in self._sockets.items(): + assert sock.fileno() == fd + # Unregister socket from IOLoop + self._handlers.pop(fd)() + sock.close() + + def handle_stream( + self, stream: IOStream, address: tuple + ) -> Optional[Awaitable[None]]: + """Override to handle a new `.IOStream` from an incoming connection. + + This method may be a coroutine; if so any exceptions it raises + asynchronously will be logged. Accepting of incoming connections + will not be blocked by this coroutine. + + If this `TCPServer` is configured for SSL, ``handle_stream`` + may be called before the SSL handshake has completed. Use + `.SSLIOStream.wait_for_handshake` if you need to verify the client's + certificate or use NPN/ALPN. + + .. versionchanged:: 4.2 + Added the option for this method to be a coroutine. + """ + raise NotImplementedError() + + def _handle_connection(self, connection: socket.socket, address: Any) -> None: + if self.ssl_options is not None: + assert ssl, "Python 2.6+ and OpenSSL required for SSL" + try: + connection = ssl_wrap_socket( + connection, + self.ssl_options, + server_side=True, + do_handshake_on_connect=False, + ) + except ssl.SSLError as err: + if err.args[0] == ssl.SSL_ERROR_EOF: + return connection.close() + else: + raise + except socket.error as err: + # If the connection is closed immediately after it is created + # (as in a port scan), we can get one of several errors. + # wrap_socket makes an internal call to getpeername, + # which may return either EINVAL (Mac OS X) or ENOTCONN + # (Linux). If it returns ENOTCONN, this error is + # silently swallowed by the ssl module, so we need to + # catch another error later on (AttributeError in + # SSLIOStream._do_ssl_handshake). + # To test this behavior, try nmap with the -sT flag. + # https://github.com/tornadoweb/tornado/pull/750 + if errno_from_exception(err) in (errno.ECONNABORTED, errno.EINVAL): + return connection.close() + else: + raise + try: + if self.ssl_options is not None: + stream = SSLIOStream( + connection, + max_buffer_size=self.max_buffer_size, + read_chunk_size=self.read_chunk_size, + ) # type: IOStream + else: + stream = IOStream( + connection, + max_buffer_size=self.max_buffer_size, + read_chunk_size=self.read_chunk_size, + ) + + future = self.handle_stream(stream, address) + if future is not None: + IOLoop.current().add_future( + gen.convert_yielded(future), lambda f: f.result() + ) + except Exception: + app_log.error("Error in connection callback", exc_info=True) diff --git a/server/www/packages/packages-linux/x64/tornado/template.py b/server/www/packages/packages-linux/x64/tornado/template.py index 61b9874..f6093c4 100644 --- a/server/www/packages/packages-linux/x64/tornado/template.py +++ b/server/www/packages/packages-linux/x64/tornado/template.py @@ -1,976 +1,1043 @@ -# -# Copyright 2009 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A simple template system that compiles templates to Python code. - -Basic usage looks like:: - - t = template.Template("{{ myvalue }}") - print(t.generate(myvalue="XXX")) - -`Loader` is a class that loads templates from a root directory and caches -the compiled templates:: - - loader = template.Loader("/home/btaylor") - print(loader.load("test.html").generate(myvalue="XXX")) - -We compile all templates to raw Python. Error-reporting is currently... uh, -interesting. Syntax for the templates:: - - ### base.html - - - {% block title %}Default title{% end %} - - -
      - {% for student in students %} - {% block student %} -
    • {{ escape(student.name) }}
    • - {% end %} - {% end %} -
    - - - - ### bold.html - {% extends "base.html" %} - - {% block title %}A bolder title{% end %} - - {% block student %} -
  • {{ escape(student.name) }}
  • - {% end %} - -Unlike most other template systems, we do not put any restrictions on the -expressions you can include in your statements. ``if`` and ``for`` blocks get -translated exactly into Python, so you can do complex expressions like:: - - {% for student in [p for p in people if p.student and p.age > 23] %} -
  • {{ escape(student.name) }}
  • - {% end %} - -Translating directly to Python means you can apply functions to expressions -easily, like the ``escape()`` function in the examples above. You can pass -functions in to your template just like any other variable -(In a `.RequestHandler`, override `.RequestHandler.get_template_namespace`):: - - ### Python code - def add(x, y): - return x + y - template.execute(add=add) - - ### The template - {{ add(1, 2) }} - -We provide the functions `escape() <.xhtml_escape>`, `.url_escape()`, -`.json_encode()`, and `.squeeze()` to all templates by default. - -Typical applications do not create `Template` or `Loader` instances by -hand, but instead use the `~.RequestHandler.render` and -`~.RequestHandler.render_string` methods of -`tornado.web.RequestHandler`, which load templates automatically based -on the ``template_path`` `.Application` setting. - -Variable names beginning with ``_tt_`` are reserved by the template -system and should not be used by application code. - -Syntax Reference ----------------- - -Template expressions are surrounded by double curly braces: ``{{ ... }}``. -The contents may be any python expression, which will be escaped according -to the current autoescape setting and inserted into the output. Other -template directives use ``{% %}``. - -To comment out a section so that it is omitted from the output, surround it -with ``{# ... #}``. - -These tags may be escaped as ``{{!``, ``{%!``, and ``{#!`` -if you need to include a literal ``{{``, ``{%``, or ``{#`` in the output. - - -``{% apply *function* %}...{% end %}`` - Applies a function to the output of all template code between ``apply`` - and ``end``:: - - {% apply linkify %}{{name}} said: {{message}}{% end %} - - Note that as an implementation detail apply blocks are implemented - as nested functions and thus may interact strangely with variables - set via ``{% set %}``, or the use of ``{% break %}`` or ``{% continue %}`` - within loops. - -``{% autoescape *function* %}`` - Sets the autoescape mode for the current file. This does not affect - other files, even those referenced by ``{% include %}``. Note that - autoescaping can also be configured globally, at the `.Application` - or `Loader`.:: - - {% autoescape xhtml_escape %} - {% autoescape None %} - -``{% block *name* %}...{% end %}`` - Indicates a named, replaceable block for use with ``{% extends %}``. - Blocks in the parent template will be replaced with the contents of - the same-named block in a child template.:: - - - {% block title %}Default title{% end %} - - - {% extends "base.html" %} - {% block title %}My page title{% end %} - -``{% comment ... %}`` - A comment which will be removed from the template output. Note that - there is no ``{% end %}`` tag; the comment goes from the word ``comment`` - to the closing ``%}`` tag. - -``{% extends *filename* %}`` - Inherit from another template. Templates that use ``extends`` should - contain one or more ``block`` tags to replace content from the parent - template. Anything in the child template not contained in a ``block`` - tag will be ignored. For an example, see the ``{% block %}`` tag. - -``{% for *var* in *expr* %}...{% end %}`` - Same as the python ``for`` statement. ``{% break %}`` and - ``{% continue %}`` may be used inside the loop. - -``{% from *x* import *y* %}`` - Same as the python ``import`` statement. - -``{% if *condition* %}...{% elif *condition* %}...{% else %}...{% end %}`` - Conditional statement - outputs the first section whose condition is - true. (The ``elif`` and ``else`` sections are optional) - -``{% import *module* %}`` - Same as the python ``import`` statement. - -``{% include *filename* %}`` - Includes another template file. The included file can see all the local - variables as if it were copied directly to the point of the ``include`` - directive (the ``{% autoescape %}`` directive is an exception). - Alternately, ``{% module Template(filename, **kwargs) %}`` may be used - to include another template with an isolated namespace. - -``{% module *expr* %}`` - Renders a `~tornado.web.UIModule`. The output of the ``UIModule`` is - not escaped:: - - {% module Template("foo.html", arg=42) %} - - ``UIModules`` are a feature of the `tornado.web.RequestHandler` - class (and specifically its ``render`` method) and will not work - when the template system is used on its own in other contexts. - -``{% raw *expr* %}`` - Outputs the result of the given expression without autoescaping. - -``{% set *x* = *y* %}`` - Sets a local variable. - -``{% try %}...{% except %}...{% else %}...{% finally %}...{% end %}`` - Same as the python ``try`` statement. - -``{% while *condition* %}... {% end %}`` - Same as the python ``while`` statement. ``{% break %}`` and - ``{% continue %}`` may be used inside the loop. - -``{% whitespace *mode* %}`` - Sets the whitespace mode for the remainder of the current file - (or until the next ``{% whitespace %}`` directive). See - `filter_whitespace` for available options. New in Tornado 4.3. -""" - -from __future__ import absolute_import, division, print_function - -import datetime -import linecache -import os.path -import posixpath -import re -import threading - -from tornado import escape -from tornado.log import app_log -from tornado.util import ObjectDict, exec_in, unicode_type, PY3 - -if PY3: - from io import StringIO -else: - from cStringIO import StringIO - -_DEFAULT_AUTOESCAPE = "xhtml_escape" -_UNSET = object() - - -def filter_whitespace(mode, text): - """Transform whitespace in ``text`` according to ``mode``. - - Available modes are: - - * ``all``: Return all whitespace unmodified. - * ``single``: Collapse consecutive whitespace with a single whitespace - character, preserving newlines. - * ``oneline``: Collapse all runs of whitespace into a single space - character, removing all newlines in the process. - - .. versionadded:: 4.3 - """ - if mode == 'all': - return text - elif mode == 'single': - text = re.sub(r"([\t ]+)", " ", text) - text = re.sub(r"(\s*\n\s*)", "\n", text) - return text - elif mode == 'oneline': - return re.sub(r"(\s+)", " ", text) - else: - raise Exception("invalid whitespace mode %s" % mode) - - -class Template(object): - """A compiled template. - - We compile into Python from the given template_string. You can generate - the template from variables with generate(). - """ - # note that the constructor's signature is not extracted with - # autodoc because _UNSET looks like garbage. When changing - # this signature update website/sphinx/template.rst too. - def __init__(self, template_string, name="", loader=None, - compress_whitespace=_UNSET, autoescape=_UNSET, - whitespace=None): - """Construct a Template. - - :arg str template_string: the contents of the template file. - :arg str name: the filename from which the template was loaded - (used for error message). - :arg tornado.template.BaseLoader loader: the `~tornado.template.BaseLoader` responsible - for this template, used to resolve ``{% include %}`` and ``{% extend %}`` directives. - :arg bool compress_whitespace: Deprecated since Tornado 4.3. - Equivalent to ``whitespace="single"`` if true and - ``whitespace="all"`` if false. - :arg str autoescape: The name of a function in the template - namespace, or ``None`` to disable escaping by default. - :arg str whitespace: A string specifying treatment of whitespace; - see `filter_whitespace` for options. - - .. versionchanged:: 4.3 - Added ``whitespace`` parameter; deprecated ``compress_whitespace``. - """ - self.name = escape.native_str(name) - - if compress_whitespace is not _UNSET: - # Convert deprecated compress_whitespace (bool) to whitespace (str). - if whitespace is not None: - raise Exception("cannot set both whitespace and compress_whitespace") - whitespace = "single" if compress_whitespace else "all" - if whitespace is None: - if loader and loader.whitespace: - whitespace = loader.whitespace - else: - # Whitespace defaults by filename. - if name.endswith(".html") or name.endswith(".js"): - whitespace = "single" - else: - whitespace = "all" - # Validate the whitespace setting. - filter_whitespace(whitespace, '') - - if autoescape is not _UNSET: - self.autoescape = autoescape - elif loader: - self.autoescape = loader.autoescape - else: - self.autoescape = _DEFAULT_AUTOESCAPE - - self.namespace = loader.namespace if loader else {} - reader = _TemplateReader(name, escape.native_str(template_string), - whitespace) - self.file = _File(self, _parse(reader, self)) - self.code = self._generate_python(loader) - self.loader = loader - try: - # Under python2.5, the fake filename used here must match - # the module name used in __name__ below. - # The dont_inherit flag prevents template.py's future imports - # from being applied to the generated code. - self.compiled = compile( - escape.to_unicode(self.code), - "%s.generated.py" % self.name.replace('.', '_'), - "exec", dont_inherit=True) - except Exception: - formatted_code = _format_code(self.code).rstrip() - app_log.error("%s code:\n%s", self.name, formatted_code) - raise - - def generate(self, **kwargs): - """Generate this template with the given arguments.""" - namespace = { - "escape": escape.xhtml_escape, - "xhtml_escape": escape.xhtml_escape, - "url_escape": escape.url_escape, - "json_encode": escape.json_encode, - "squeeze": escape.squeeze, - "linkify": escape.linkify, - "datetime": datetime, - "_tt_utf8": escape.utf8, # for internal use - "_tt_string_types": (unicode_type, bytes), - # __name__ and __loader__ allow the traceback mechanism to find - # the generated source code. - "__name__": self.name.replace('.', '_'), - "__loader__": ObjectDict(get_source=lambda name: self.code), - } - namespace.update(self.namespace) - namespace.update(kwargs) - exec_in(self.compiled, namespace) - execute = namespace["_tt_execute"] - # Clear the traceback module's cache of source data now that - # we've generated a new template (mainly for this module's - # unittests, where different tests reuse the same name). - linecache.clearcache() - return execute() - - def _generate_python(self, loader): - buffer = StringIO() - try: - # named_blocks maps from names to _NamedBlock objects - named_blocks = {} - ancestors = self._get_ancestors(loader) - ancestors.reverse() - for ancestor in ancestors: - ancestor.find_named_blocks(loader, named_blocks) - writer = _CodeWriter(buffer, named_blocks, loader, - ancestors[0].template) - ancestors[0].generate(writer) - return buffer.getvalue() - finally: - buffer.close() - - def _get_ancestors(self, loader): - ancestors = [self.file] - for chunk in self.file.body.chunks: - if isinstance(chunk, _ExtendsBlock): - if not loader: - raise ParseError("{% extends %} block found, but no " - "template loader") - template = loader.load(chunk.name, self.name) - ancestors.extend(template._get_ancestors(loader)) - return ancestors - - -class BaseLoader(object): - """Base class for template loaders. - - You must use a template loader to use template constructs like - ``{% extends %}`` and ``{% include %}``. The loader caches all - templates after they are loaded the first time. - """ - def __init__(self, autoescape=_DEFAULT_AUTOESCAPE, namespace=None, - whitespace=None): - """Construct a template loader. - - :arg str autoescape: The name of a function in the template - namespace, such as "xhtml_escape", or ``None`` to disable - autoescaping by default. - :arg dict namespace: A dictionary to be added to the default template - namespace, or ``None``. - :arg str whitespace: A string specifying default behavior for - whitespace in templates; see `filter_whitespace` for options. - Default is "single" for files ending in ".html" and ".js" and - "all" for other files. - - .. versionchanged:: 4.3 - Added ``whitespace`` parameter. - """ - self.autoescape = autoescape - self.namespace = namespace or {} - self.whitespace = whitespace - self.templates = {} - # self.lock protects self.templates. It's a reentrant lock - # because templates may load other templates via `include` or - # `extends`. Note that thanks to the GIL this code would be safe - # even without the lock, but could lead to wasted work as multiple - # threads tried to compile the same template simultaneously. - self.lock = threading.RLock() - - def reset(self): - """Resets the cache of compiled templates.""" - with self.lock: - self.templates = {} - - def resolve_path(self, name, parent_path=None): - """Converts a possibly-relative path to absolute (used internally).""" - raise NotImplementedError() - - def load(self, name, parent_path=None): - """Loads a template.""" - name = self.resolve_path(name, parent_path=parent_path) - with self.lock: - if name not in self.templates: - self.templates[name] = self._create_template(name) - return self.templates[name] - - def _create_template(self, name): - raise NotImplementedError() - - -class Loader(BaseLoader): - """A template loader that loads from a single root directory. - """ - def __init__(self, root_directory, **kwargs): - super(Loader, self).__init__(**kwargs) - self.root = os.path.abspath(root_directory) - - def resolve_path(self, name, parent_path=None): - if parent_path and not parent_path.startswith("<") and \ - not parent_path.startswith("/") and \ - not name.startswith("/"): - current_path = os.path.join(self.root, parent_path) - file_dir = os.path.dirname(os.path.abspath(current_path)) - relative_path = os.path.abspath(os.path.join(file_dir, name)) - if relative_path.startswith(self.root): - name = relative_path[len(self.root) + 1:] - return name - - def _create_template(self, name): - path = os.path.join(self.root, name) - with open(path, "rb") as f: - template = Template(f.read(), name=name, loader=self) - return template - - -class DictLoader(BaseLoader): - """A template loader that loads from a dictionary.""" - def __init__(self, dict, **kwargs): - super(DictLoader, self).__init__(**kwargs) - self.dict = dict - - def resolve_path(self, name, parent_path=None): - if parent_path and not parent_path.startswith("<") and \ - not parent_path.startswith("/") and \ - not name.startswith("/"): - file_dir = posixpath.dirname(parent_path) - name = posixpath.normpath(posixpath.join(file_dir, name)) - return name - - def _create_template(self, name): - return Template(self.dict[name], name=name, loader=self) - - -class _Node(object): - def each_child(self): - return () - - def generate(self, writer): - raise NotImplementedError() - - def find_named_blocks(self, loader, named_blocks): - for child in self.each_child(): - child.find_named_blocks(loader, named_blocks) - - -class _File(_Node): - def __init__(self, template, body): - self.template = template - self.body = body - self.line = 0 - - def generate(self, writer): - writer.write_line("def _tt_execute():", self.line) - with writer.indent(): - writer.write_line("_tt_buffer = []", self.line) - writer.write_line("_tt_append = _tt_buffer.append", self.line) - self.body.generate(writer) - writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line) - - def each_child(self): - return (self.body,) - - -class _ChunkList(_Node): - def __init__(self, chunks): - self.chunks = chunks - - def generate(self, writer): - for chunk in self.chunks: - chunk.generate(writer) - - def each_child(self): - return self.chunks - - -class _NamedBlock(_Node): - def __init__(self, name, body, template, line): - self.name = name - self.body = body - self.template = template - self.line = line - - def each_child(self): - return (self.body,) - - def generate(self, writer): - block = writer.named_blocks[self.name] - with writer.include(block.template, self.line): - block.body.generate(writer) - - def find_named_blocks(self, loader, named_blocks): - named_blocks[self.name] = self - _Node.find_named_blocks(self, loader, named_blocks) - - -class _ExtendsBlock(_Node): - def __init__(self, name): - self.name = name - - -class _IncludeBlock(_Node): - def __init__(self, name, reader, line): - self.name = name - self.template_name = reader.name - self.line = line - - def find_named_blocks(self, loader, named_blocks): - included = loader.load(self.name, self.template_name) - included.file.find_named_blocks(loader, named_blocks) - - def generate(self, writer): - included = writer.loader.load(self.name, self.template_name) - with writer.include(included, self.line): - included.file.body.generate(writer) - - -class _ApplyBlock(_Node): - def __init__(self, method, line, body=None): - self.method = method - self.line = line - self.body = body - - def each_child(self): - return (self.body,) - - def generate(self, writer): - method_name = "_tt_apply%d" % writer.apply_counter - writer.apply_counter += 1 - writer.write_line("def %s():" % method_name, self.line) - with writer.indent(): - writer.write_line("_tt_buffer = []", self.line) - writer.write_line("_tt_append = _tt_buffer.append", self.line) - self.body.generate(writer) - writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line) - writer.write_line("_tt_append(_tt_utf8(%s(%s())))" % ( - self.method, method_name), self.line) - - -class _ControlBlock(_Node): - def __init__(self, statement, line, body=None): - self.statement = statement - self.line = line - self.body = body - - def each_child(self): - return (self.body,) - - def generate(self, writer): - writer.write_line("%s:" % self.statement, self.line) - with writer.indent(): - self.body.generate(writer) - # Just in case the body was empty - writer.write_line("pass", self.line) - - -class _IntermediateControlBlock(_Node): - def __init__(self, statement, line): - self.statement = statement - self.line = line - - def generate(self, writer): - # In case the previous block was empty - writer.write_line("pass", self.line) - writer.write_line("%s:" % self.statement, self.line, writer.indent_size() - 1) - - -class _Statement(_Node): - def __init__(self, statement, line): - self.statement = statement - self.line = line - - def generate(self, writer): - writer.write_line(self.statement, self.line) - - -class _Expression(_Node): - def __init__(self, expression, line, raw=False): - self.expression = expression - self.line = line - self.raw = raw - - def generate(self, writer): - writer.write_line("_tt_tmp = %s" % self.expression, self.line) - writer.write_line("if isinstance(_tt_tmp, _tt_string_types):" - " _tt_tmp = _tt_utf8(_tt_tmp)", self.line) - writer.write_line("else: _tt_tmp = _tt_utf8(str(_tt_tmp))", self.line) - if not self.raw and writer.current_template.autoescape is not None: - # In python3 functions like xhtml_escape return unicode, - # so we have to convert to utf8 again. - writer.write_line("_tt_tmp = _tt_utf8(%s(_tt_tmp))" % - writer.current_template.autoescape, self.line) - writer.write_line("_tt_append(_tt_tmp)", self.line) - - -class _Module(_Expression): - def __init__(self, expression, line): - super(_Module, self).__init__("_tt_modules." + expression, line, - raw=True) - - -class _Text(_Node): - def __init__(self, value, line, whitespace): - self.value = value - self.line = line - self.whitespace = whitespace - - def generate(self, writer): - value = self.value - - # Compress whitespace if requested, with a crude heuristic to avoid - # altering preformatted whitespace. - if "
    " not in value:
    -            value = filter_whitespace(self.whitespace, value)
    -
    -        if value:
    -            writer.write_line('_tt_append(%r)' % escape.utf8(value), self.line)
    -
    -
    -class ParseError(Exception):
    -    """Raised for template syntax errors.
    -
    -    ``ParseError`` instances have ``filename`` and ``lineno`` attributes
    -    indicating the position of the error.
    -
    -    .. versionchanged:: 4.3
    -       Added ``filename`` and ``lineno`` attributes.
    -    """
    -    def __init__(self, message, filename=None, lineno=0):
    -        self.message = message
    -        # The names "filename" and "lineno" are chosen for consistency
    -        # with python SyntaxError.
    -        self.filename = filename
    -        self.lineno = lineno
    -
    -    def __str__(self):
    -        return '%s at %s:%d' % (self.message, self.filename, self.lineno)
    -
    -
    -class _CodeWriter(object):
    -    def __init__(self, file, named_blocks, loader, current_template):
    -        self.file = file
    -        self.named_blocks = named_blocks
    -        self.loader = loader
    -        self.current_template = current_template
    -        self.apply_counter = 0
    -        self.include_stack = []
    -        self._indent = 0
    -
    -    def indent_size(self):
    -        return self._indent
    -
    -    def indent(self):
    -        class Indenter(object):
    -            def __enter__(_):
    -                self._indent += 1
    -                return self
    -
    -            def __exit__(_, *args):
    -                assert self._indent > 0
    -                self._indent -= 1
    -
    -        return Indenter()
    -
    -    def include(self, template, line):
    -        self.include_stack.append((self.current_template, line))
    -        self.current_template = template
    -
    -        class IncludeTemplate(object):
    -            def __enter__(_):
    -                return self
    -
    -            def __exit__(_, *args):
    -                self.current_template = self.include_stack.pop()[0]
    -
    -        return IncludeTemplate()
    -
    -    def write_line(self, line, line_number, indent=None):
    -        if indent is None:
    -            indent = self._indent
    -        line_comment = '  # %s:%d' % (self.current_template.name, line_number)
    -        if self.include_stack:
    -            ancestors = ["%s:%d" % (tmpl.name, lineno)
    -                         for (tmpl, lineno) in self.include_stack]
    -            line_comment += ' (via %s)' % ', '.join(reversed(ancestors))
    -        print("    " * indent + line + line_comment, file=self.file)
    -
    -
    -class _TemplateReader(object):
    -    def __init__(self, name, text, whitespace):
    -        self.name = name
    -        self.text = text
    -        self.whitespace = whitespace
    -        self.line = 1
    -        self.pos = 0
    -
    -    def find(self, needle, start=0, end=None):
    -        assert start >= 0, start
    -        pos = self.pos
    -        start += pos
    -        if end is None:
    -            index = self.text.find(needle, start)
    -        else:
    -            end += pos
    -            assert end >= start
    -            index = self.text.find(needle, start, end)
    -        if index != -1:
    -            index -= pos
    -        return index
    -
    -    def consume(self, count=None):
    -        if count is None:
    -            count = len(self.text) - self.pos
    -        newpos = self.pos + count
    -        self.line += self.text.count("\n", self.pos, newpos)
    -        s = self.text[self.pos:newpos]
    -        self.pos = newpos
    -        return s
    -
    -    def remaining(self):
    -        return len(self.text) - self.pos
    -
    -    def __len__(self):
    -        return self.remaining()
    -
    -    def __getitem__(self, key):
    -        if type(key) is slice:
    -            size = len(self)
    -            start, stop, step = key.indices(size)
    -            if start is None:
    -                start = self.pos
    -            else:
    -                start += self.pos
    -            if stop is not None:
    -                stop += self.pos
    -            return self.text[slice(start, stop, step)]
    -        elif key < 0:
    -            return self.text[key]
    -        else:
    -            return self.text[self.pos + key]
    -
    -    def __str__(self):
    -        return self.text[self.pos:]
    -
    -    def raise_parse_error(self, msg):
    -        raise ParseError(msg, self.name, self.line)
    -
    -
    -def _format_code(code):
    -    lines = code.splitlines()
    -    format = "%%%dd  %%s\n" % len(repr(len(lines) + 1))
    -    return "".join([format % (i + 1, line) for (i, line) in enumerate(lines)])
    -
    -
    -def _parse(reader, template, in_block=None, in_loop=None):
    -    body = _ChunkList([])
    -    while True:
    -        # Find next template directive
    -        curly = 0
    -        while True:
    -            curly = reader.find("{", curly)
    -            if curly == -1 or curly + 1 == reader.remaining():
    -                # EOF
    -                if in_block:
    -                    reader.raise_parse_error(
    -                        "Missing {%% end %%} block for %s" % in_block)
    -                body.chunks.append(_Text(reader.consume(), reader.line,
    -                                         reader.whitespace))
    -                return body
    -            # If the first curly brace is not the start of a special token,
    -            # start searching from the character after it
    -            if reader[curly + 1] not in ("{", "%", "#"):
    -                curly += 1
    -                continue
    -            # When there are more than 2 curlies in a row, use the
    -            # innermost ones.  This is useful when generating languages
    -            # like latex where curlies are also meaningful
    -            if (curly + 2 < reader.remaining() and
    -                    reader[curly + 1] == '{' and reader[curly + 2] == '{'):
    -                curly += 1
    -                continue
    -            break
    -
    -        # Append any text before the special token
    -        if curly > 0:
    -            cons = reader.consume(curly)
    -            body.chunks.append(_Text(cons, reader.line,
    -                                     reader.whitespace))
    -
    -        start_brace = reader.consume(2)
    -        line = reader.line
    -
    -        # Template directives may be escaped as "{{!" or "{%!".
    -        # In this case output the braces and consume the "!".
    -        # This is especially useful in conjunction with jquery templates,
    -        # which also use double braces.
    -        if reader.remaining() and reader[0] == "!":
    -            reader.consume(1)
    -            body.chunks.append(_Text(start_brace, line,
    -                                     reader.whitespace))
    -            continue
    -
    -        # Comment
    -        if start_brace == "{#":
    -            end = reader.find("#}")
    -            if end == -1:
    -                reader.raise_parse_error("Missing end comment #}")
    -            contents = reader.consume(end).strip()
    -            reader.consume(2)
    -            continue
    -
    -        # Expression
    -        if start_brace == "{{":
    -            end = reader.find("}}")
    -            if end == -1:
    -                reader.raise_parse_error("Missing end expression }}")
    -            contents = reader.consume(end).strip()
    -            reader.consume(2)
    -            if not contents:
    -                reader.raise_parse_error("Empty expression")
    -            body.chunks.append(_Expression(contents, line))
    -            continue
    -
    -        # Block
    -        assert start_brace == "{%", start_brace
    -        end = reader.find("%}")
    -        if end == -1:
    -            reader.raise_parse_error("Missing end block %}")
    -        contents = reader.consume(end).strip()
    -        reader.consume(2)
    -        if not contents:
    -            reader.raise_parse_error("Empty block tag ({% %})")
    -
    -        operator, space, suffix = contents.partition(" ")
    -        suffix = suffix.strip()
    -
    -        # Intermediate ("else", "elif", etc) blocks
    -        intermediate_blocks = {
    -            "else": set(["if", "for", "while", "try"]),
    -            "elif": set(["if"]),
    -            "except": set(["try"]),
    -            "finally": set(["try"]),
    -        }
    -        allowed_parents = intermediate_blocks.get(operator)
    -        if allowed_parents is not None:
    -            if not in_block:
    -                reader.raise_parse_error("%s outside %s block" %
    -                                         (operator, allowed_parents))
    -            if in_block not in allowed_parents:
    -                reader.raise_parse_error(
    -                    "%s block cannot be attached to %s block" %
    -                    (operator, in_block))
    -            body.chunks.append(_IntermediateControlBlock(contents, line))
    -            continue
    -
    -        # End tag
    -        elif operator == "end":
    -            if not in_block:
    -                reader.raise_parse_error("Extra {% end %} block")
    -            return body
    -
    -        elif operator in ("extends", "include", "set", "import", "from",
    -                          "comment", "autoescape", "whitespace", "raw",
    -                          "module"):
    -            if operator == "comment":
    -                continue
    -            if operator == "extends":
    -                suffix = suffix.strip('"').strip("'")
    -                if not suffix:
    -                    reader.raise_parse_error("extends missing file path")
    -                block = _ExtendsBlock(suffix)
    -            elif operator in ("import", "from"):
    -                if not suffix:
    -                    reader.raise_parse_error("import missing statement")
    -                block = _Statement(contents, line)
    -            elif operator == "include":
    -                suffix = suffix.strip('"').strip("'")
    -                if not suffix:
    -                    reader.raise_parse_error("include missing file path")
    -                block = _IncludeBlock(suffix, reader, line)
    -            elif operator == "set":
    -                if not suffix:
    -                    reader.raise_parse_error("set missing statement")
    -                block = _Statement(suffix, line)
    -            elif operator == "autoescape":
    -                fn = suffix.strip()
    -                if fn == "None":
    -                    fn = None
    -                template.autoescape = fn
    -                continue
    -            elif operator == "whitespace":
    -                mode = suffix.strip()
    -                # Validate the selected mode
    -                filter_whitespace(mode, '')
    -                reader.whitespace = mode
    -                continue
    -            elif operator == "raw":
    -                block = _Expression(suffix, line, raw=True)
    -            elif operator == "module":
    -                block = _Module(suffix, line)
    -            body.chunks.append(block)
    -            continue
    -
    -        elif operator in ("apply", "block", "try", "if", "for", "while"):
    -            # parse inner body recursively
    -            if operator in ("for", "while"):
    -                block_body = _parse(reader, template, operator, operator)
    -            elif operator == "apply":
    -                # apply creates a nested function so syntactically it's not
    -                # in the loop.
    -                block_body = _parse(reader, template, operator, None)
    -            else:
    -                block_body = _parse(reader, template, operator, in_loop)
    -
    -            if operator == "apply":
    -                if not suffix:
    -                    reader.raise_parse_error("apply missing method name")
    -                block = _ApplyBlock(suffix, line, block_body)
    -            elif operator == "block":
    -                if not suffix:
    -                    reader.raise_parse_error("block missing name")
    -                block = _NamedBlock(suffix, block_body, template, line)
    -            else:
    -                block = _ControlBlock(contents, line, block_body)
    -            body.chunks.append(block)
    -            continue
    -
    -        elif operator in ("break", "continue"):
    -            if not in_loop:
    -                reader.raise_parse_error("%s outside %s block" %
    -                                         (operator, set(["for", "while"])))
    -            body.chunks.append(_Statement(contents, line))
    -            continue
    -
    -        else:
    -            reader.raise_parse_error("unknown operator: %r" % operator)
    +#
    +# Copyright 2009 Facebook
    +#
    +# Licensed under the Apache License, Version 2.0 (the "License"); you may
    +# not use this file except in compliance with the License. You may obtain
    +# a copy of the License at
    +#
    +#     http://www.apache.org/licenses/LICENSE-2.0
    +#
    +# Unless required by applicable law or agreed to in writing, software
    +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
    +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
    +# License for the specific language governing permissions and limitations
    +# under the License.
    +
    +"""A simple template system that compiles templates to Python code.
    +
    +Basic usage looks like::
    +
    +    t = template.Template("{{ myvalue }}")
    +    print(t.generate(myvalue="XXX"))
    +
    +`Loader` is a class that loads templates from a root directory and caches
    +the compiled templates::
    +
    +    loader = template.Loader("/home/btaylor")
    +    print(loader.load("test.html").generate(myvalue="XXX"))
    +
    +We compile all templates to raw Python. Error-reporting is currently... uh,
    +interesting. Syntax for the templates::
    +
    +    ### base.html
    +    
    +      
    +        {% block title %}Default title{% end %}
    +      
    +      
    +        
      + {% for student in students %} + {% block student %} +
    • {{ escape(student.name) }}
    • + {% end %} + {% end %} +
    + + + + ### bold.html + {% extends "base.html" %} + + {% block title %}A bolder title{% end %} + + {% block student %} +
  • {{ escape(student.name) }}
  • + {% end %} + +Unlike most other template systems, we do not put any restrictions on the +expressions you can include in your statements. ``if`` and ``for`` blocks get +translated exactly into Python, so you can do complex expressions like:: + + {% for student in [p for p in people if p.student and p.age > 23] %} +
  • {{ escape(student.name) }}
  • + {% end %} + +Translating directly to Python means you can apply functions to expressions +easily, like the ``escape()`` function in the examples above. You can pass +functions in to your template just like any other variable +(In a `.RequestHandler`, override `.RequestHandler.get_template_namespace`):: + + ### Python code + def add(x, y): + return x + y + template.execute(add=add) + + ### The template + {{ add(1, 2) }} + +We provide the functions `escape() <.xhtml_escape>`, `.url_escape()`, +`.json_encode()`, and `.squeeze()` to all templates by default. + +Typical applications do not create `Template` or `Loader` instances by +hand, but instead use the `~.RequestHandler.render` and +`~.RequestHandler.render_string` methods of +`tornado.web.RequestHandler`, which load templates automatically based +on the ``template_path`` `.Application` setting. + +Variable names beginning with ``_tt_`` are reserved by the template +system and should not be used by application code. + +Syntax Reference +---------------- + +Template expressions are surrounded by double curly braces: ``{{ ... }}``. +The contents may be any python expression, which will be escaped according +to the current autoescape setting and inserted into the output. Other +template directives use ``{% %}``. + +To comment out a section so that it is omitted from the output, surround it +with ``{# ... #}``. + +These tags may be escaped as ``{{!``, ``{%!``, and ``{#!`` +if you need to include a literal ``{{``, ``{%``, or ``{#`` in the output. + + +``{% apply *function* %}...{% end %}`` + Applies a function to the output of all template code between ``apply`` + and ``end``:: + + {% apply linkify %}{{name}} said: {{message}}{% end %} + + Note that as an implementation detail apply blocks are implemented + as nested functions and thus may interact strangely with variables + set via ``{% set %}``, or the use of ``{% break %}`` or ``{% continue %}`` + within loops. + +``{% autoescape *function* %}`` + Sets the autoescape mode for the current file. This does not affect + other files, even those referenced by ``{% include %}``. Note that + autoescaping can also be configured globally, at the `.Application` + or `Loader`.:: + + {% autoescape xhtml_escape %} + {% autoescape None %} + +``{% block *name* %}...{% end %}`` + Indicates a named, replaceable block for use with ``{% extends %}``. + Blocks in the parent template will be replaced with the contents of + the same-named block in a child template.:: + + + {% block title %}Default title{% end %} + + + {% extends "base.html" %} + {% block title %}My page title{% end %} + +``{% comment ... %}`` + A comment which will be removed from the template output. Note that + there is no ``{% end %}`` tag; the comment goes from the word ``comment`` + to the closing ``%}`` tag. + +``{% extends *filename* %}`` + Inherit from another template. Templates that use ``extends`` should + contain one or more ``block`` tags to replace content from the parent + template. Anything in the child template not contained in a ``block`` + tag will be ignored. For an example, see the ``{% block %}`` tag. + +``{% for *var* in *expr* %}...{% end %}`` + Same as the python ``for`` statement. ``{% break %}`` and + ``{% continue %}`` may be used inside the loop. + +``{% from *x* import *y* %}`` + Same as the python ``import`` statement. + +``{% if *condition* %}...{% elif *condition* %}...{% else %}...{% end %}`` + Conditional statement - outputs the first section whose condition is + true. (The ``elif`` and ``else`` sections are optional) + +``{% import *module* %}`` + Same as the python ``import`` statement. + +``{% include *filename* %}`` + Includes another template file. The included file can see all the local + variables as if it were copied directly to the point of the ``include`` + directive (the ``{% autoescape %}`` directive is an exception). + Alternately, ``{% module Template(filename, **kwargs) %}`` may be used + to include another template with an isolated namespace. + +``{% module *expr* %}`` + Renders a `~tornado.web.UIModule`. The output of the ``UIModule`` is + not escaped:: + + {% module Template("foo.html", arg=42) %} + + ``UIModules`` are a feature of the `tornado.web.RequestHandler` + class (and specifically its ``render`` method) and will not work + when the template system is used on its own in other contexts. + +``{% raw *expr* %}`` + Outputs the result of the given expression without autoescaping. + +``{% set *x* = *y* %}`` + Sets a local variable. + +``{% try %}...{% except %}...{% else %}...{% finally %}...{% end %}`` + Same as the python ``try`` statement. + +``{% while *condition* %}... {% end %}`` + Same as the python ``while`` statement. ``{% break %}`` and + ``{% continue %}`` may be used inside the loop. + +``{% whitespace *mode* %}`` + Sets the whitespace mode for the remainder of the current file + (or until the next ``{% whitespace %}`` directive). See + `filter_whitespace` for available options. New in Tornado 4.3. +""" + +import datetime +from io import StringIO +import linecache +import os.path +import posixpath +import re +import threading + +from tornado import escape +from tornado.log import app_log +from tornado.util import ObjectDict, exec_in, unicode_type + +from typing import Any, Union, Callable, List, Dict, Iterable, Optional, TextIO +import typing + +if typing.TYPE_CHECKING: + from typing import Tuple, ContextManager # noqa: F401 + +_DEFAULT_AUTOESCAPE = "xhtml_escape" + + +class _UnsetMarker: + pass + + +_UNSET = _UnsetMarker() + + +def filter_whitespace(mode: str, text: str) -> str: + """Transform whitespace in ``text`` according to ``mode``. + + Available modes are: + + * ``all``: Return all whitespace unmodified. + * ``single``: Collapse consecutive whitespace with a single whitespace + character, preserving newlines. + * ``oneline``: Collapse all runs of whitespace into a single space + character, removing all newlines in the process. + + .. versionadded:: 4.3 + """ + if mode == "all": + return text + elif mode == "single": + text = re.sub(r"([\t ]+)", " ", text) + text = re.sub(r"(\s*\n\s*)", "\n", text) + return text + elif mode == "oneline": + return re.sub(r"(\s+)", " ", text) + else: + raise Exception("invalid whitespace mode %s" % mode) + + +class Template(object): + """A compiled template. + + We compile into Python from the given template_string. You can generate + the template from variables with generate(). + """ + + # note that the constructor's signature is not extracted with + # autodoc because _UNSET looks like garbage. When changing + # this signature update website/sphinx/template.rst too. + def __init__( + self, + template_string: Union[str, bytes], + name: str = "", + loader: "BaseLoader" = None, + compress_whitespace: Union[bool, _UnsetMarker] = _UNSET, + autoescape: Union[str, _UnsetMarker] = _UNSET, + whitespace: str = None, + ) -> None: + """Construct a Template. + + :arg str template_string: the contents of the template file. + :arg str name: the filename from which the template was loaded + (used for error message). + :arg tornado.template.BaseLoader loader: the `~tornado.template.BaseLoader` responsible + for this template, used to resolve ``{% include %}`` and ``{% extend %}`` directives. + :arg bool compress_whitespace: Deprecated since Tornado 4.3. + Equivalent to ``whitespace="single"`` if true and + ``whitespace="all"`` if false. + :arg str autoescape: The name of a function in the template + namespace, or ``None`` to disable escaping by default. + :arg str whitespace: A string specifying treatment of whitespace; + see `filter_whitespace` for options. + + .. versionchanged:: 4.3 + Added ``whitespace`` parameter; deprecated ``compress_whitespace``. + """ + self.name = escape.native_str(name) + + if compress_whitespace is not _UNSET: + # Convert deprecated compress_whitespace (bool) to whitespace (str). + if whitespace is not None: + raise Exception("cannot set both whitespace and compress_whitespace") + whitespace = "single" if compress_whitespace else "all" + if whitespace is None: + if loader and loader.whitespace: + whitespace = loader.whitespace + else: + # Whitespace defaults by filename. + if name.endswith(".html") or name.endswith(".js"): + whitespace = "single" + else: + whitespace = "all" + # Validate the whitespace setting. + assert whitespace is not None + filter_whitespace(whitespace, "") + + if not isinstance(autoescape, _UnsetMarker): + self.autoescape = autoescape # type: Optional[str] + elif loader: + self.autoescape = loader.autoescape + else: + self.autoescape = _DEFAULT_AUTOESCAPE + + self.namespace = loader.namespace if loader else {} + reader = _TemplateReader(name, escape.native_str(template_string), whitespace) + self.file = _File(self, _parse(reader, self)) + self.code = self._generate_python(loader) + self.loader = loader + try: + # Under python2.5, the fake filename used here must match + # the module name used in __name__ below. + # The dont_inherit flag prevents template.py's future imports + # from being applied to the generated code. + self.compiled = compile( + escape.to_unicode(self.code), + "%s.generated.py" % self.name.replace(".", "_"), + "exec", + dont_inherit=True, + ) + except Exception: + formatted_code = _format_code(self.code).rstrip() + app_log.error("%s code:\n%s", self.name, formatted_code) + raise + + def generate(self, **kwargs: Any) -> bytes: + """Generate this template with the given arguments.""" + namespace = { + "escape": escape.xhtml_escape, + "xhtml_escape": escape.xhtml_escape, + "url_escape": escape.url_escape, + "json_encode": escape.json_encode, + "squeeze": escape.squeeze, + "linkify": escape.linkify, + "datetime": datetime, + "_tt_utf8": escape.utf8, # for internal use + "_tt_string_types": (unicode_type, bytes), + # __name__ and __loader__ allow the traceback mechanism to find + # the generated source code. + "__name__": self.name.replace(".", "_"), + "__loader__": ObjectDict(get_source=lambda name: self.code), + } + namespace.update(self.namespace) + namespace.update(kwargs) + exec_in(self.compiled, namespace) + execute = typing.cast(Callable[[], bytes], namespace["_tt_execute"]) + # Clear the traceback module's cache of source data now that + # we've generated a new template (mainly for this module's + # unittests, where different tests reuse the same name). + linecache.clearcache() + return execute() + + def _generate_python(self, loader: Optional["BaseLoader"]) -> str: + buffer = StringIO() + try: + # named_blocks maps from names to _NamedBlock objects + named_blocks = {} # type: Dict[str, _NamedBlock] + ancestors = self._get_ancestors(loader) + ancestors.reverse() + for ancestor in ancestors: + ancestor.find_named_blocks(loader, named_blocks) + writer = _CodeWriter(buffer, named_blocks, loader, ancestors[0].template) + ancestors[0].generate(writer) + return buffer.getvalue() + finally: + buffer.close() + + def _get_ancestors(self, loader: Optional["BaseLoader"]) -> List["_File"]: + ancestors = [self.file] + for chunk in self.file.body.chunks: + if isinstance(chunk, _ExtendsBlock): + if not loader: + raise ParseError( + "{% extends %} block found, but no " "template loader" + ) + template = loader.load(chunk.name, self.name) + ancestors.extend(template._get_ancestors(loader)) + return ancestors + + +class BaseLoader(object): + """Base class for template loaders. + + You must use a template loader to use template constructs like + ``{% extends %}`` and ``{% include %}``. The loader caches all + templates after they are loaded the first time. + """ + + def __init__( + self, + autoescape: str = _DEFAULT_AUTOESCAPE, + namespace: Dict[str, Any] = None, + whitespace: str = None, + ) -> None: + """Construct a template loader. + + :arg str autoescape: The name of a function in the template + namespace, such as "xhtml_escape", or ``None`` to disable + autoescaping by default. + :arg dict namespace: A dictionary to be added to the default template + namespace, or ``None``. + :arg str whitespace: A string specifying default behavior for + whitespace in templates; see `filter_whitespace` for options. + Default is "single" for files ending in ".html" and ".js" and + "all" for other files. + + .. versionchanged:: 4.3 + Added ``whitespace`` parameter. + """ + self.autoescape = autoescape + self.namespace = namespace or {} + self.whitespace = whitespace + self.templates = {} # type: Dict[str, Template] + # self.lock protects self.templates. It's a reentrant lock + # because templates may load other templates via `include` or + # `extends`. Note that thanks to the GIL this code would be safe + # even without the lock, but could lead to wasted work as multiple + # threads tried to compile the same template simultaneously. + self.lock = threading.RLock() + + def reset(self) -> None: + """Resets the cache of compiled templates.""" + with self.lock: + self.templates = {} + + def resolve_path(self, name: str, parent_path: str = None) -> str: + """Converts a possibly-relative path to absolute (used internally).""" + raise NotImplementedError() + + def load(self, name: str, parent_path: str = None) -> Template: + """Loads a template.""" + name = self.resolve_path(name, parent_path=parent_path) + with self.lock: + if name not in self.templates: + self.templates[name] = self._create_template(name) + return self.templates[name] + + def _create_template(self, name: str) -> Template: + raise NotImplementedError() + + +class Loader(BaseLoader): + """A template loader that loads from a single root directory. + """ + + def __init__(self, root_directory: str, **kwargs: Any) -> None: + super(Loader, self).__init__(**kwargs) + self.root = os.path.abspath(root_directory) + + def resolve_path(self, name: str, parent_path: str = None) -> str: + if ( + parent_path + and not parent_path.startswith("<") + and not parent_path.startswith("/") + and not name.startswith("/") + ): + current_path = os.path.join(self.root, parent_path) + file_dir = os.path.dirname(os.path.abspath(current_path)) + relative_path = os.path.abspath(os.path.join(file_dir, name)) + if relative_path.startswith(self.root): + name = relative_path[len(self.root) + 1 :] + return name + + def _create_template(self, name: str) -> Template: + path = os.path.join(self.root, name) + with open(path, "rb") as f: + template = Template(f.read(), name=name, loader=self) + return template + + +class DictLoader(BaseLoader): + """A template loader that loads from a dictionary.""" + + def __init__(self, dict: Dict[str, str], **kwargs: Any) -> None: + super(DictLoader, self).__init__(**kwargs) + self.dict = dict + + def resolve_path(self, name: str, parent_path: str = None) -> str: + if ( + parent_path + and not parent_path.startswith("<") + and not parent_path.startswith("/") + and not name.startswith("/") + ): + file_dir = posixpath.dirname(parent_path) + name = posixpath.normpath(posixpath.join(file_dir, name)) + return name + + def _create_template(self, name: str) -> Template: + return Template(self.dict[name], name=name, loader=self) + + +class _Node(object): + def each_child(self) -> Iterable["_Node"]: + return () + + def generate(self, writer: "_CodeWriter") -> None: + raise NotImplementedError() + + def find_named_blocks( + self, loader: Optional[BaseLoader], named_blocks: Dict[str, "_NamedBlock"] + ) -> None: + for child in self.each_child(): + child.find_named_blocks(loader, named_blocks) + + +class _File(_Node): + def __init__(self, template: Template, body: "_ChunkList") -> None: + self.template = template + self.body = body + self.line = 0 + + def generate(self, writer: "_CodeWriter") -> None: + writer.write_line("def _tt_execute():", self.line) + with writer.indent(): + writer.write_line("_tt_buffer = []", self.line) + writer.write_line("_tt_append = _tt_buffer.append", self.line) + self.body.generate(writer) + writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line) + + def each_child(self) -> Iterable["_Node"]: + return (self.body,) + + +class _ChunkList(_Node): + def __init__(self, chunks: List[_Node]) -> None: + self.chunks = chunks + + def generate(self, writer: "_CodeWriter") -> None: + for chunk in self.chunks: + chunk.generate(writer) + + def each_child(self) -> Iterable["_Node"]: + return self.chunks + + +class _NamedBlock(_Node): + def __init__(self, name: str, body: _Node, template: Template, line: int) -> None: + self.name = name + self.body = body + self.template = template + self.line = line + + def each_child(self) -> Iterable["_Node"]: + return (self.body,) + + def generate(self, writer: "_CodeWriter") -> None: + block = writer.named_blocks[self.name] + with writer.include(block.template, self.line): + block.body.generate(writer) + + def find_named_blocks( + self, loader: Optional[BaseLoader], named_blocks: Dict[str, "_NamedBlock"] + ) -> None: + named_blocks[self.name] = self + _Node.find_named_blocks(self, loader, named_blocks) + + +class _ExtendsBlock(_Node): + def __init__(self, name: str) -> None: + self.name = name + + +class _IncludeBlock(_Node): + def __init__(self, name: str, reader: "_TemplateReader", line: int) -> None: + self.name = name + self.template_name = reader.name + self.line = line + + def find_named_blocks( + self, loader: Optional[BaseLoader], named_blocks: Dict[str, _NamedBlock] + ) -> None: + assert loader is not None + included = loader.load(self.name, self.template_name) + included.file.find_named_blocks(loader, named_blocks) + + def generate(self, writer: "_CodeWriter") -> None: + assert writer.loader is not None + included = writer.loader.load(self.name, self.template_name) + with writer.include(included, self.line): + included.file.body.generate(writer) + + +class _ApplyBlock(_Node): + def __init__(self, method: str, line: int, body: _Node) -> None: + self.method = method + self.line = line + self.body = body + + def each_child(self) -> Iterable["_Node"]: + return (self.body,) + + def generate(self, writer: "_CodeWriter") -> None: + method_name = "_tt_apply%d" % writer.apply_counter + writer.apply_counter += 1 + writer.write_line("def %s():" % method_name, self.line) + with writer.indent(): + writer.write_line("_tt_buffer = []", self.line) + writer.write_line("_tt_append = _tt_buffer.append", self.line) + self.body.generate(writer) + writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line) + writer.write_line( + "_tt_append(_tt_utf8(%s(%s())))" % (self.method, method_name), self.line + ) + + +class _ControlBlock(_Node): + def __init__(self, statement: str, line: int, body: _Node) -> None: + self.statement = statement + self.line = line + self.body = body + + def each_child(self) -> Iterable[_Node]: + return (self.body,) + + def generate(self, writer: "_CodeWriter") -> None: + writer.write_line("%s:" % self.statement, self.line) + with writer.indent(): + self.body.generate(writer) + # Just in case the body was empty + writer.write_line("pass", self.line) + + +class _IntermediateControlBlock(_Node): + def __init__(self, statement: str, line: int) -> None: + self.statement = statement + self.line = line + + def generate(self, writer: "_CodeWriter") -> None: + # In case the previous block was empty + writer.write_line("pass", self.line) + writer.write_line("%s:" % self.statement, self.line, writer.indent_size() - 1) + + +class _Statement(_Node): + def __init__(self, statement: str, line: int) -> None: + self.statement = statement + self.line = line + + def generate(self, writer: "_CodeWriter") -> None: + writer.write_line(self.statement, self.line) + + +class _Expression(_Node): + def __init__(self, expression: str, line: int, raw: bool = False) -> None: + self.expression = expression + self.line = line + self.raw = raw + + def generate(self, writer: "_CodeWriter") -> None: + writer.write_line("_tt_tmp = %s" % self.expression, self.line) + writer.write_line( + "if isinstance(_tt_tmp, _tt_string_types):" " _tt_tmp = _tt_utf8(_tt_tmp)", + self.line, + ) + writer.write_line("else: _tt_tmp = _tt_utf8(str(_tt_tmp))", self.line) + if not self.raw and writer.current_template.autoescape is not None: + # In python3 functions like xhtml_escape return unicode, + # so we have to convert to utf8 again. + writer.write_line( + "_tt_tmp = _tt_utf8(%s(_tt_tmp))" % writer.current_template.autoescape, + self.line, + ) + writer.write_line("_tt_append(_tt_tmp)", self.line) + + +class _Module(_Expression): + def __init__(self, expression: str, line: int) -> None: + super(_Module, self).__init__("_tt_modules." + expression, line, raw=True) + + +class _Text(_Node): + def __init__(self, value: str, line: int, whitespace: str) -> None: + self.value = value + self.line = line + self.whitespace = whitespace + + def generate(self, writer: "_CodeWriter") -> None: + value = self.value + + # Compress whitespace if requested, with a crude heuristic to avoid + # altering preformatted whitespace. + if "
    " not in value:
    +            value = filter_whitespace(self.whitespace, value)
    +
    +        if value:
    +            writer.write_line("_tt_append(%r)" % escape.utf8(value), self.line)
    +
    +
    +class ParseError(Exception):
    +    """Raised for template syntax errors.
    +
    +    ``ParseError`` instances have ``filename`` and ``lineno`` attributes
    +    indicating the position of the error.
    +
    +    .. versionchanged:: 4.3
    +       Added ``filename`` and ``lineno`` attributes.
    +    """
    +
    +    def __init__(self, message: str, filename: str = None, lineno: int = 0) -> None:
    +        self.message = message
    +        # The names "filename" and "lineno" are chosen for consistency
    +        # with python SyntaxError.
    +        self.filename = filename
    +        self.lineno = lineno
    +
    +    def __str__(self) -> str:
    +        return "%s at %s:%d" % (self.message, self.filename, self.lineno)
    +
    +
    +class _CodeWriter(object):
    +    def __init__(
    +        self,
    +        file: TextIO,
    +        named_blocks: Dict[str, _NamedBlock],
    +        loader: Optional[BaseLoader],
    +        current_template: Template,
    +    ) -> None:
    +        self.file = file
    +        self.named_blocks = named_blocks
    +        self.loader = loader
    +        self.current_template = current_template
    +        self.apply_counter = 0
    +        self.include_stack = []  # type: List[Tuple[Template, int]]
    +        self._indent = 0
    +
    +    def indent_size(self) -> int:
    +        return self._indent
    +
    +    def indent(self) -> "ContextManager":
    +        class Indenter(object):
    +            def __enter__(_) -> "_CodeWriter":
    +                self._indent += 1
    +                return self
    +
    +            def __exit__(_, *args: Any) -> None:
    +                assert self._indent > 0
    +                self._indent -= 1
    +
    +        return Indenter()
    +
    +    def include(self, template: Template, line: int) -> "ContextManager":
    +        self.include_stack.append((self.current_template, line))
    +        self.current_template = template
    +
    +        class IncludeTemplate(object):
    +            def __enter__(_) -> "_CodeWriter":
    +                return self
    +
    +            def __exit__(_, *args: Any) -> None:
    +                self.current_template = self.include_stack.pop()[0]
    +
    +        return IncludeTemplate()
    +
    +    def write_line(self, line: str, line_number: int, indent: int = None) -> None:
    +        if indent is None:
    +            indent = self._indent
    +        line_comment = "  # %s:%d" % (self.current_template.name, line_number)
    +        if self.include_stack:
    +            ancestors = [
    +                "%s:%d" % (tmpl.name, lineno) for (tmpl, lineno) in self.include_stack
    +            ]
    +            line_comment += " (via %s)" % ", ".join(reversed(ancestors))
    +        print("    " * indent + line + line_comment, file=self.file)
    +
    +
    +class _TemplateReader(object):
    +    def __init__(self, name: str, text: str, whitespace: str) -> None:
    +        self.name = name
    +        self.text = text
    +        self.whitespace = whitespace
    +        self.line = 1
    +        self.pos = 0
    +
    +    def find(self, needle: str, start: int = 0, end: int = None) -> int:
    +        assert start >= 0, start
    +        pos = self.pos
    +        start += pos
    +        if end is None:
    +            index = self.text.find(needle, start)
    +        else:
    +            end += pos
    +            assert end >= start
    +            index = self.text.find(needle, start, end)
    +        if index != -1:
    +            index -= pos
    +        return index
    +
    +    def consume(self, count: int = None) -> str:
    +        if count is None:
    +            count = len(self.text) - self.pos
    +        newpos = self.pos + count
    +        self.line += self.text.count("\n", self.pos, newpos)
    +        s = self.text[self.pos : newpos]
    +        self.pos = newpos
    +        return s
    +
    +    def remaining(self) -> int:
    +        return len(self.text) - self.pos
    +
    +    def __len__(self) -> int:
    +        return self.remaining()
    +
    +    def __getitem__(self, key: Union[int, slice]) -> str:
    +        if isinstance(key, slice):
    +            size = len(self)
    +            start, stop, step = key.indices(size)
    +            if start is None:
    +                start = self.pos
    +            else:
    +                start += self.pos
    +            if stop is not None:
    +                stop += self.pos
    +            return self.text[slice(start, stop, step)]
    +        elif key < 0:
    +            return self.text[key]
    +        else:
    +            return self.text[self.pos + key]
    +
    +    def __str__(self) -> str:
    +        return self.text[self.pos :]
    +
    +    def raise_parse_error(self, msg: str) -> None:
    +        raise ParseError(msg, self.name, self.line)
    +
    +
    +def _format_code(code: str) -> str:
    +    lines = code.splitlines()
    +    format = "%%%dd  %%s\n" % len(repr(len(lines) + 1))
    +    return "".join([format % (i + 1, line) for (i, line) in enumerate(lines)])
    +
    +
    +def _parse(
    +    reader: _TemplateReader,
    +    template: Template,
    +    in_block: str = None,
    +    in_loop: str = None,
    +) -> _ChunkList:
    +    body = _ChunkList([])
    +    while True:
    +        # Find next template directive
    +        curly = 0
    +        while True:
    +            curly = reader.find("{", curly)
    +            if curly == -1 or curly + 1 == reader.remaining():
    +                # EOF
    +                if in_block:
    +                    reader.raise_parse_error(
    +                        "Missing {%% end %%} block for %s" % in_block
    +                    )
    +                body.chunks.append(
    +                    _Text(reader.consume(), reader.line, reader.whitespace)
    +                )
    +                return body
    +            # If the first curly brace is not the start of a special token,
    +            # start searching from the character after it
    +            if reader[curly + 1] not in ("{", "%", "#"):
    +                curly += 1
    +                continue
    +            # When there are more than 2 curlies in a row, use the
    +            # innermost ones.  This is useful when generating languages
    +            # like latex where curlies are also meaningful
    +            if (
    +                curly + 2 < reader.remaining()
    +                and reader[curly + 1] == "{"
    +                and reader[curly + 2] == "{"
    +            ):
    +                curly += 1
    +                continue
    +            break
    +
    +        # Append any text before the special token
    +        if curly > 0:
    +            cons = reader.consume(curly)
    +            body.chunks.append(_Text(cons, reader.line, reader.whitespace))
    +
    +        start_brace = reader.consume(2)
    +        line = reader.line
    +
    +        # Template directives may be escaped as "{{!" or "{%!".
    +        # In this case output the braces and consume the "!".
    +        # This is especially useful in conjunction with jquery templates,
    +        # which also use double braces.
    +        if reader.remaining() and reader[0] == "!":
    +            reader.consume(1)
    +            body.chunks.append(_Text(start_brace, line, reader.whitespace))
    +            continue
    +
    +        # Comment
    +        if start_brace == "{#":
    +            end = reader.find("#}")
    +            if end == -1:
    +                reader.raise_parse_error("Missing end comment #}")
    +            contents = reader.consume(end).strip()
    +            reader.consume(2)
    +            continue
    +
    +        # Expression
    +        if start_brace == "{{":
    +            end = reader.find("}}")
    +            if end == -1:
    +                reader.raise_parse_error("Missing end expression }}")
    +            contents = reader.consume(end).strip()
    +            reader.consume(2)
    +            if not contents:
    +                reader.raise_parse_error("Empty expression")
    +            body.chunks.append(_Expression(contents, line))
    +            continue
    +
    +        # Block
    +        assert start_brace == "{%", start_brace
    +        end = reader.find("%}")
    +        if end == -1:
    +            reader.raise_parse_error("Missing end block %}")
    +        contents = reader.consume(end).strip()
    +        reader.consume(2)
    +        if not contents:
    +            reader.raise_parse_error("Empty block tag ({% %})")
    +
    +        operator, space, suffix = contents.partition(" ")
    +        suffix = suffix.strip()
    +
    +        # Intermediate ("else", "elif", etc) blocks
    +        intermediate_blocks = {
    +            "else": set(["if", "for", "while", "try"]),
    +            "elif": set(["if"]),
    +            "except": set(["try"]),
    +            "finally": set(["try"]),
    +        }
    +        allowed_parents = intermediate_blocks.get(operator)
    +        if allowed_parents is not None:
    +            if not in_block:
    +                reader.raise_parse_error(
    +                    "%s outside %s block" % (operator, allowed_parents)
    +                )
    +            if in_block not in allowed_parents:
    +                reader.raise_parse_error(
    +                    "%s block cannot be attached to %s block" % (operator, in_block)
    +                )
    +            body.chunks.append(_IntermediateControlBlock(contents, line))
    +            continue
    +
    +        # End tag
    +        elif operator == "end":
    +            if not in_block:
    +                reader.raise_parse_error("Extra {% end %} block")
    +            return body
    +
    +        elif operator in (
    +            "extends",
    +            "include",
    +            "set",
    +            "import",
    +            "from",
    +            "comment",
    +            "autoescape",
    +            "whitespace",
    +            "raw",
    +            "module",
    +        ):
    +            if operator == "comment":
    +                continue
    +            if operator == "extends":
    +                suffix = suffix.strip('"').strip("'")
    +                if not suffix:
    +                    reader.raise_parse_error("extends missing file path")
    +                block = _ExtendsBlock(suffix)  # type: _Node
    +            elif operator in ("import", "from"):
    +                if not suffix:
    +                    reader.raise_parse_error("import missing statement")
    +                block = _Statement(contents, line)
    +            elif operator == "include":
    +                suffix = suffix.strip('"').strip("'")
    +                if not suffix:
    +                    reader.raise_parse_error("include missing file path")
    +                block = _IncludeBlock(suffix, reader, line)
    +            elif operator == "set":
    +                if not suffix:
    +                    reader.raise_parse_error("set missing statement")
    +                block = _Statement(suffix, line)
    +            elif operator == "autoescape":
    +                fn = suffix.strip()  # type: Optional[str]
    +                if fn == "None":
    +                    fn = None
    +                template.autoescape = fn
    +                continue
    +            elif operator == "whitespace":
    +                mode = suffix.strip()
    +                # Validate the selected mode
    +                filter_whitespace(mode, "")
    +                reader.whitespace = mode
    +                continue
    +            elif operator == "raw":
    +                block = _Expression(suffix, line, raw=True)
    +            elif operator == "module":
    +                block = _Module(suffix, line)
    +            body.chunks.append(block)
    +            continue
    +
    +        elif operator in ("apply", "block", "try", "if", "for", "while"):
    +            # parse inner body recursively
    +            if operator in ("for", "while"):
    +                block_body = _parse(reader, template, operator, operator)
    +            elif operator == "apply":
    +                # apply creates a nested function so syntactically it's not
    +                # in the loop.
    +                block_body = _parse(reader, template, operator, None)
    +            else:
    +                block_body = _parse(reader, template, operator, in_loop)
    +
    +            if operator == "apply":
    +                if not suffix:
    +                    reader.raise_parse_error("apply missing method name")
    +                block = _ApplyBlock(suffix, line, block_body)
    +            elif operator == "block":
    +                if not suffix:
    +                    reader.raise_parse_error("block missing name")
    +                block = _NamedBlock(suffix, block_body, template, line)
    +            else:
    +                block = _ControlBlock(contents, line, block_body)
    +            body.chunks.append(block)
    +            continue
    +
    +        elif operator in ("break", "continue"):
    +            if not in_loop:
    +                reader.raise_parse_error(
    +                    "%s outside %s block" % (operator, set(["for", "while"]))
    +                )
    +            body.chunks.append(_Statement(contents, line))
    +            continue
    +
    +        else:
    +            reader.raise_parse_error("unknown operator: %r" % operator)
    diff --git a/server/www/packages/packages-linux/x64/tornado/testing.py b/server/www/packages/packages-linux/x64/tornado/testing.py
    index d6e5e94..799542f 100644
    --- a/server/www/packages/packages-linux/x64/tornado/testing.py
    +++ b/server/www/packages/packages-linux/x64/tornado/testing.py
    @@ -1,724 +1,790 @@
    -"""Support classes for automated testing.
    -
    -* `AsyncTestCase` and `AsyncHTTPTestCase`:  Subclasses of unittest.TestCase
    -  with additional support for testing asynchronous (`.IOLoop`-based) code.
    -
    -* `ExpectLog`: Make test logs less spammy.
    -
    -* `main()`: A simple test runner (wrapper around unittest.main()) with support
    -  for the tornado.autoreload module to rerun the tests when code changes.
    -"""
    -
    -from __future__ import absolute_import, division, print_function
    -
    -try:
    -    from tornado import gen
    -    from tornado.httpclient import AsyncHTTPClient
    -    from tornado.httpserver import HTTPServer
    -    from tornado.simple_httpclient import SimpleAsyncHTTPClient
    -    from tornado.ioloop import IOLoop, TimeoutError
    -    from tornado import netutil
    -    from tornado.process import Subprocess
    -except ImportError:
    -    # These modules are not importable on app engine.  Parts of this module
    -    # won't work, but e.g. main() will.
    -    AsyncHTTPClient = None  # type: ignore
    -    gen = None  # type: ignore
    -    HTTPServer = None  # type: ignore
    -    IOLoop = None  # type: ignore
    -    netutil = None  # type: ignore
    -    SimpleAsyncHTTPClient = None  # type: ignore
    -    Subprocess = None  # type: ignore
    -from tornado.log import app_log
    -from tornado.stack_context import ExceptionStackContext
    -from tornado.util import raise_exc_info, basestring_type, PY3
    -import functools
    -import inspect
    -import logging
    -import os
    -import re
    -import signal
    -import socket
    -import sys
    -
    -try:
    -    import asyncio
    -except ImportError:
    -    asyncio = None
    -
    -
    -try:
    -    from collections.abc import Generator as GeneratorType  # type: ignore
    -except ImportError:
    -    from types import GeneratorType  # type: ignore
    -
    -if sys.version_info >= (3, 5):
    -    iscoroutine = inspect.iscoroutine  # type: ignore
    -    iscoroutinefunction = inspect.iscoroutinefunction  # type: ignore
    -else:
    -    iscoroutine = iscoroutinefunction = lambda f: False
    -
    -# Tornado's own test suite requires the updated unittest module
    -# (either py27+ or unittest2) so tornado.test.util enforces
    -# this requirement, but for other users of tornado.testing we want
    -# to allow the older version if unitest2 is not available.
    -if PY3:
    -    # On python 3, mixing unittest2 and unittest (including doctest)
    -    # doesn't seem to work, so always use unittest.
    -    import unittest
    -else:
    -    # On python 2, prefer unittest2 when available.
    -    try:
    -        import unittest2 as unittest  # type: ignore
    -    except ImportError:
    -        import unittest  # type: ignore
    -
    -
    -if asyncio is None:
    -    _NON_OWNED_IOLOOPS = ()
    -else:
    -    import tornado.platform.asyncio
    -    _NON_OWNED_IOLOOPS = tornado.platform.asyncio.AsyncIOMainLoop
    -
    -
    -def bind_unused_port(reuse_port=False):
    -    """Binds a server socket to an available port on localhost.
    -
    -    Returns a tuple (socket, port).
    -
    -    .. versionchanged:: 4.4
    -       Always binds to ``127.0.0.1`` without resolving the name
    -       ``localhost``.
    -    """
    -    sock = netutil.bind_sockets(None, '127.0.0.1', family=socket.AF_INET,
    -                                reuse_port=reuse_port)[0]
    -    port = sock.getsockname()[1]
    -    return sock, port
    -
    -
    -def get_async_test_timeout():
    -    """Get the global timeout setting for async tests.
    -
    -    Returns a float, the timeout in seconds.
    -
    -    .. versionadded:: 3.1
    -    """
    -    try:
    -        return float(os.environ.get('ASYNC_TEST_TIMEOUT'))
    -    except (ValueError, TypeError):
    -        return 5
    -
    -
    -class _TestMethodWrapper(object):
    -    """Wraps a test method to raise an error if it returns a value.
    -
    -    This is mainly used to detect undecorated generators (if a test
    -    method yields it must use a decorator to consume the generator),
    -    but will also detect other kinds of return values (these are not
    -    necessarily errors, but we alert anyway since there is no good
    -    reason to return a value from a test).
    -    """
    -    def __init__(self, orig_method):
    -        self.orig_method = orig_method
    -
    -    def __call__(self, *args, **kwargs):
    -        result = self.orig_method(*args, **kwargs)
    -        if isinstance(result, GeneratorType) or iscoroutine(result):
    -            raise TypeError("Generator and coroutine test methods should be"
    -                            " decorated with tornado.testing.gen_test")
    -        elif result is not None:
    -            raise ValueError("Return value from test method ignored: %r" %
    -                             result)
    -
    -    def __getattr__(self, name):
    -        """Proxy all unknown attributes to the original method.
    -
    -        This is important for some of the decorators in the `unittest`
    -        module, such as `unittest.skipIf`.
    -        """
    -        return getattr(self.orig_method, name)
    -
    -
    -class AsyncTestCase(unittest.TestCase):
    -    """`~unittest.TestCase` subclass for testing `.IOLoop`-based
    -    asynchronous code.
    -
    -    The unittest framework is synchronous, so the test must be
    -    complete by the time the test method returns. This means that
    -    asynchronous code cannot be used in quite the same way as usual
    -    and must be adapted to fit. To write your tests with coroutines,
    -    decorate your test methods with `tornado.testing.gen_test` instead
    -    of `tornado.gen.coroutine`.
    -
    -    This class also provides the (deprecated) `stop()` and `wait()`
    -    methods for a more manual style of testing. The test method itself
    -    must call ``self.wait()``, and asynchronous callbacks should call
    -    ``self.stop()`` to signal completion.
    -
    -    By default, a new `.IOLoop` is constructed for each test and is available
    -    as ``self.io_loop``.  If the code being tested requires a
    -    global `.IOLoop`, subclasses should override `get_new_ioloop` to return it.
    -
    -    The `.IOLoop`'s ``start`` and ``stop`` methods should not be
    -    called directly.  Instead, use `self.stop ` and `self.wait
    -    `.  Arguments passed to ``self.stop`` are returned from
    -    ``self.wait``.  It is possible to have multiple ``wait``/``stop``
    -    cycles in the same test.
    -
    -    Example::
    -
    -        # This test uses coroutine style.
    -        class MyTestCase(AsyncTestCase):
    -            @tornado.testing.gen_test
    -            def test_http_fetch(self):
    -                client = AsyncHTTPClient()
    -                response = yield client.fetch("http://www.tornadoweb.org")
    -                # Test contents of response
    -                self.assertIn("FriendFeed", response.body)
    -
    -        # This test uses argument passing between self.stop and self.wait.
    -        class MyTestCase2(AsyncTestCase):
    -            def test_http_fetch(self):
    -                client = AsyncHTTPClient()
    -                client.fetch("http://www.tornadoweb.org/", self.stop)
    -                response = self.wait()
    -                # Test contents of response
    -                self.assertIn("FriendFeed", response.body)
    -    """
    -    def __init__(self, methodName='runTest'):
    -        super(AsyncTestCase, self).__init__(methodName)
    -        self.__stopped = False
    -        self.__running = False
    -        self.__failure = None
    -        self.__stop_args = None
    -        self.__timeout = None
    -
    -        # It's easy to forget the @gen_test decorator, but if you do
    -        # the test will silently be ignored because nothing will consume
    -        # the generator.  Replace the test method with a wrapper that will
    -        # make sure it's not an undecorated generator.
    -        setattr(self, methodName, _TestMethodWrapper(getattr(self, methodName)))
    -
    -    def setUp(self):
    -        super(AsyncTestCase, self).setUp()
    -        self.io_loop = self.get_new_ioloop()
    -        self.io_loop.make_current()
    -
    -    def tearDown(self):
    -        # Clean up Subprocess, so it can be used again with a new ioloop.
    -        Subprocess.uninitialize()
    -        self.io_loop.clear_current()
    -        if not isinstance(self.io_loop, _NON_OWNED_IOLOOPS):
    -            # Try to clean up any file descriptors left open in the ioloop.
    -            # This avoids leaks, especially when tests are run repeatedly
    -            # in the same process with autoreload (because curl does not
    -            # set FD_CLOEXEC on its file descriptors)
    -            self.io_loop.close(all_fds=True)
    -        super(AsyncTestCase, self).tearDown()
    -        # In case an exception escaped or the StackContext caught an exception
    -        # when there wasn't a wait() to re-raise it, do so here.
    -        # This is our last chance to raise an exception in a way that the
    -        # unittest machinery understands.
    -        self.__rethrow()
    -
    -    def get_new_ioloop(self):
    -        """Returns the `.IOLoop` to use for this test.
    -
    -        By default, a new `.IOLoop` is created for each test.
    -        Subclasses may override this method to return
    -        `.IOLoop.current()` if it is not appropriate to use a new
    -        `.IOLoop` in each tests (for example, if there are global
    -        singletons using the default `.IOLoop`) or if a per-test event
    -        loop is being provided by another system (such as
    -        ``pytest-asyncio``).
    -        """
    -        return IOLoop()
    -
    -    def _handle_exception(self, typ, value, tb):
    -        if self.__failure is None:
    -            self.__failure = (typ, value, tb)
    -        else:
    -            app_log.error("multiple unhandled exceptions in test",
    -                          exc_info=(typ, value, tb))
    -        self.stop()
    -        return True
    -
    -    def __rethrow(self):
    -        if self.__failure is not None:
    -            failure = self.__failure
    -            self.__failure = None
    -            raise_exc_info(failure)
    -
    -    def run(self, result=None):
    -        with ExceptionStackContext(self._handle_exception, delay_warning=True):
    -            super(AsyncTestCase, self).run(result)
    -        # As a last resort, if an exception escaped super.run() and wasn't
    -        # re-raised in tearDown, raise it here.  This will cause the
    -        # unittest run to fail messily, but that's better than silently
    -        # ignoring an error.
    -        self.__rethrow()
    -
    -    def stop(self, _arg=None, **kwargs):
    -        """Stops the `.IOLoop`, causing one pending (or future) call to `wait()`
    -        to return.
    -
    -        Keyword arguments or a single positional argument passed to `stop()` are
    -        saved and will be returned by `wait()`.
    -
    -        .. deprecated:: 5.1
    -
    -           `stop` and `wait` are deprecated; use ``@gen_test`` instead.
    -        """
    -        assert _arg is None or not kwargs
    -        self.__stop_args = kwargs or _arg
    -        if self.__running:
    -            self.io_loop.stop()
    -            self.__running = False
    -        self.__stopped = True
    -
    -    def wait(self, condition=None, timeout=None):
    -        """Runs the `.IOLoop` until stop is called or timeout has passed.
    -
    -        In the event of a timeout, an exception will be thrown. The
    -        default timeout is 5 seconds; it may be overridden with a
    -        ``timeout`` keyword argument or globally with the
    -        ``ASYNC_TEST_TIMEOUT`` environment variable.
    -
    -        If ``condition`` is not None, the `.IOLoop` will be restarted
    -        after `stop()` until ``condition()`` returns true.
    -
    -        .. versionchanged:: 3.1
    -           Added the ``ASYNC_TEST_TIMEOUT`` environment variable.
    -
    -        .. deprecated:: 5.1
    -
    -           `stop` and `wait` are deprecated; use ``@gen_test`` instead.
    -        """
    -        if timeout is None:
    -            timeout = get_async_test_timeout()
    -
    -        if not self.__stopped:
    -            if timeout:
    -                def timeout_func():
    -                    try:
    -                        raise self.failureException(
    -                            'Async operation timed out after %s seconds' %
    -                            timeout)
    -                    except Exception:
    -                        self.__failure = sys.exc_info()
    -                    self.stop()
    -                self.__timeout = self.io_loop.add_timeout(self.io_loop.time() + timeout,
    -                                                          timeout_func)
    -            while True:
    -                self.__running = True
    -                self.io_loop.start()
    -                if (self.__failure is not None or
    -                        condition is None or condition()):
    -                    break
    -            if self.__timeout is not None:
    -                self.io_loop.remove_timeout(self.__timeout)
    -                self.__timeout = None
    -        assert self.__stopped
    -        self.__stopped = False
    -        self.__rethrow()
    -        result = self.__stop_args
    -        self.__stop_args = None
    -        return result
    -
    -
    -class AsyncHTTPTestCase(AsyncTestCase):
    -    """A test case that starts up an HTTP server.
    -
    -    Subclasses must override `get_app()`, which returns the
    -    `tornado.web.Application` (or other `.HTTPServer` callback) to be tested.
    -    Tests will typically use the provided ``self.http_client`` to fetch
    -    URLs from this server.
    -
    -    Example, assuming the "Hello, world" example from the user guide is in
    -    ``hello.py``::
    -
    -        import hello
    -
    -        class TestHelloApp(AsyncHTTPTestCase):
    -            def get_app(self):
    -                return hello.make_app()
    -
    -            def test_homepage(self):
    -                response = self.fetch('/')
    -                self.assertEqual(response.code, 200)
    -                self.assertEqual(response.body, 'Hello, world')
    -
    -    That call to ``self.fetch()`` is equivalent to ::
    -
    -        self.http_client.fetch(self.get_url('/'), self.stop)
    -        response = self.wait()
    -
    -    which illustrates how AsyncTestCase can turn an asynchronous operation,
    -    like ``http_client.fetch()``, into a synchronous operation. If you need
    -    to do other asynchronous operations in tests, you'll probably need to use
    -    ``stop()`` and ``wait()`` yourself.
    -    """
    -    def setUp(self):
    -        super(AsyncHTTPTestCase, self).setUp()
    -        sock, port = bind_unused_port()
    -        self.__port = port
    -
    -        self.http_client = self.get_http_client()
    -        self._app = self.get_app()
    -        self.http_server = self.get_http_server()
    -        self.http_server.add_sockets([sock])
    -
    -    def get_http_client(self):
    -        return AsyncHTTPClient()
    -
    -    def get_http_server(self):
    -        return HTTPServer(self._app, **self.get_httpserver_options())
    -
    -    def get_app(self):
    -        """Should be overridden by subclasses to return a
    -        `tornado.web.Application` or other `.HTTPServer` callback.
    -        """
    -        raise NotImplementedError()
    -
    -    def fetch(self, path, raise_error=False, **kwargs):
    -        """Convenience method to synchronously fetch a URL.
    -
    -        The given path will be appended to the local server's host and
    -        port.  Any additional kwargs will be passed directly to
    -        `.AsyncHTTPClient.fetch` (and so could be used to pass
    -        ``method="POST"``, ``body="..."``, etc).
    -
    -        If the path begins with http:// or https://, it will be treated as a
    -        full URL and will be fetched as-is.
    -
    -        If ``raise_error`` is True, a `tornado.httpclient.HTTPError` will
    -        be raised if the response code is not 200. This is the same behavior
    -        as the ``raise_error`` argument to `.AsyncHTTPClient.fetch`, but
    -        the default is False here (it's True in `.AsyncHTTPClient`) because
    -        tests often need to deal with non-200 response codes.
    -
    -        .. versionchanged:: 5.0
    -           Added support for absolute URLs.
    -
    -        .. versionchanged:: 5.1
    -
    -           Added the ``raise_error`` argument.
    -
    -        .. deprecated:: 5.1
    -
    -           This method currently turns any exception into an
    -           `.HTTPResponse` with status code 599. In Tornado 6.0,
    -           errors other than `tornado.httpclient.HTTPError` will be
    -           passed through, and ``raise_error=False`` will only
    -           suppress errors that would be raised due to non-200
    -           response codes.
    -
    -        """
    -        if path.lower().startswith(('http://', 'https://')):
    -            url = path
    -        else:
    -            url = self.get_url(path)
    -        return self.io_loop.run_sync(
    -            lambda: self.http_client.fetch(url, raise_error=raise_error, **kwargs),
    -            timeout=get_async_test_timeout())
    -
    -    def get_httpserver_options(self):
    -        """May be overridden by subclasses to return additional
    -        keyword arguments for the server.
    -        """
    -        return {}
    -
    -    def get_http_port(self):
    -        """Returns the port used by the server.
    -
    -        A new port is chosen for each test.
    -        """
    -        return self.__port
    -
    -    def get_protocol(self):
    -        return 'http'
    -
    -    def get_url(self, path):
    -        """Returns an absolute url for the given path on the test server."""
    -        return '%s://127.0.0.1:%s%s' % (self.get_protocol(),
    -                                        self.get_http_port(), path)
    -
    -    def tearDown(self):
    -        self.http_server.stop()
    -        self.io_loop.run_sync(self.http_server.close_all_connections,
    -                              timeout=get_async_test_timeout())
    -        self.http_client.close()
    -        super(AsyncHTTPTestCase, self).tearDown()
    -
    -
    -class AsyncHTTPSTestCase(AsyncHTTPTestCase):
    -    """A test case that starts an HTTPS server.
    -
    -    Interface is generally the same as `AsyncHTTPTestCase`.
    -    """
    -    def get_http_client(self):
    -        return AsyncHTTPClient(force_instance=True,
    -                               defaults=dict(validate_cert=False))
    -
    -    def get_httpserver_options(self):
    -        return dict(ssl_options=self.get_ssl_options())
    -
    -    def get_ssl_options(self):
    -        """May be overridden by subclasses to select SSL options.
    -
    -        By default includes a self-signed testing certificate.
    -        """
    -        # Testing keys were generated with:
    -        # openssl req -new -keyout tornado/test/test.key \
    -        #                     -out tornado/test/test.crt -nodes -days 3650 -x509
    -        module_dir = os.path.dirname(__file__)
    -        return dict(
    -            certfile=os.path.join(module_dir, 'test', 'test.crt'),
    -            keyfile=os.path.join(module_dir, 'test', 'test.key'))
    -
    -    def get_protocol(self):
    -        return 'https'
    -
    -
    -def gen_test(func=None, timeout=None):
    -    """Testing equivalent of ``@gen.coroutine``, to be applied to test methods.
    -
    -    ``@gen.coroutine`` cannot be used on tests because the `.IOLoop` is not
    -    already running.  ``@gen_test`` should be applied to test methods
    -    on subclasses of `AsyncTestCase`.
    -
    -    Example::
    -
    -        class MyTest(AsyncHTTPTestCase):
    -            @gen_test
    -            def test_something(self):
    -                response = yield self.http_client.fetch(self.get_url('/'))
    -
    -    By default, ``@gen_test`` times out after 5 seconds. The timeout may be
    -    overridden globally with the ``ASYNC_TEST_TIMEOUT`` environment variable,
    -    or for each test with the ``timeout`` keyword argument::
    -
    -        class MyTest(AsyncHTTPTestCase):
    -            @gen_test(timeout=10)
    -            def test_something_slow(self):
    -                response = yield self.http_client.fetch(self.get_url('/'))
    -
    -    Note that ``@gen_test`` is incompatible with `AsyncTestCase.stop`,
    -    `AsyncTestCase.wait`, and `AsyncHTTPTestCase.fetch`. Use ``yield
    -    self.http_client.fetch(self.get_url())`` as shown above instead.
    -
    -    .. versionadded:: 3.1
    -       The ``timeout`` argument and ``ASYNC_TEST_TIMEOUT`` environment
    -       variable.
    -
    -    .. versionchanged:: 4.0
    -       The wrapper now passes along ``*args, **kwargs`` so it can be used
    -       on functions with arguments.
    -
    -    """
    -    if timeout is None:
    -        timeout = get_async_test_timeout()
    -
    -    def wrap(f):
    -        # Stack up several decorators to allow us to access the generator
    -        # object itself.  In the innermost wrapper, we capture the generator
    -        # and save it in an attribute of self.  Next, we run the wrapped
    -        # function through @gen.coroutine.  Finally, the coroutine is
    -        # wrapped again to make it synchronous with run_sync.
    -        #
    -        # This is a good case study arguing for either some sort of
    -        # extensibility in the gen decorators or cancellation support.
    -        @functools.wraps(f)
    -        def pre_coroutine(self, *args, **kwargs):
    -            result = f(self, *args, **kwargs)
    -            if isinstance(result, GeneratorType) or iscoroutine(result):
    -                self._test_generator = result
    -            else:
    -                self._test_generator = None
    -            return result
    -
    -        if iscoroutinefunction(f):
    -            coro = pre_coroutine
    -        else:
    -            coro = gen.coroutine(pre_coroutine)
    -
    -        @functools.wraps(coro)
    -        def post_coroutine(self, *args, **kwargs):
    -            try:
    -                return self.io_loop.run_sync(
    -                    functools.partial(coro, self, *args, **kwargs),
    -                    timeout=timeout)
    -            except TimeoutError as e:
    -                # run_sync raises an error with an unhelpful traceback.
    -                # If the underlying generator is still running, we can throw the
    -                # exception back into it so the stack trace is replaced by the
    -                # point where the test is stopped. The only reason the generator
    -                # would not be running would be if it were cancelled, which means
    -                # a native coroutine, so we can rely on the cr_running attribute.
    -                if getattr(self._test_generator, 'cr_running', True):
    -                    self._test_generator.throw(e)
    -                    # In case the test contains an overly broad except
    -                    # clause, we may get back here.
    -                # Coroutine was stopped or didn't raise a useful stack trace,
    -                # so re-raise the original exception which is better than nothing.
    -                raise
    -        return post_coroutine
    -
    -    if func is not None:
    -        # Used like:
    -        #     @gen_test
    -        #     def f(self):
    -        #         pass
    -        return wrap(func)
    -    else:
    -        # Used like @gen_test(timeout=10)
    -        return wrap
    -
    -
    -# Without this attribute, nosetests will try to run gen_test as a test
    -# anywhere it is imported.
    -gen_test.__test__ = False  # type: ignore
    -
    -
    -class ExpectLog(logging.Filter):
    -    """Context manager to capture and suppress expected log output.
    -
    -    Useful to make tests of error conditions less noisy, while still
    -    leaving unexpected log entries visible.  *Not thread safe.*
    -
    -    The attribute ``logged_stack`` is set to true if any exception
    -    stack trace was logged.
    -
    -    Usage::
    -
    -        with ExpectLog('tornado.application', "Uncaught exception"):
    -            error_response = self.fetch("/some_page")
    -
    -    .. versionchanged:: 4.3
    -       Added the ``logged_stack`` attribute.
    -    """
    -    def __init__(self, logger, regex, required=True):
    -        """Constructs an ExpectLog context manager.
    -
    -        :param logger: Logger object (or name of logger) to watch.  Pass
    -            an empty string to watch the root logger.
    -        :param regex: Regular expression to match.  Any log entries on
    -            the specified logger that match this regex will be suppressed.
    -        :param required: If true, an exception will be raised if the end of
    -            the ``with`` statement is reached without matching any log entries.
    -        """
    -        if isinstance(logger, basestring_type):
    -            logger = logging.getLogger(logger)
    -        self.logger = logger
    -        self.regex = re.compile(regex)
    -        self.required = required
    -        self.matched = False
    -        self.logged_stack = False
    -
    -    def filter(self, record):
    -        if record.exc_info:
    -            self.logged_stack = True
    -        message = record.getMessage()
    -        if self.regex.match(message):
    -            self.matched = True
    -            return False
    -        return True
    -
    -    def __enter__(self):
    -        self.logger.addFilter(self)
    -        return self
    -
    -    def __exit__(self, typ, value, tb):
    -        self.logger.removeFilter(self)
    -        if not typ and self.required and not self.matched:
    -            raise Exception("did not get expected log message")
    -
    -
    -def main(**kwargs):
    -    """A simple test runner.
    -
    -    This test runner is essentially equivalent to `unittest.main` from
    -    the standard library, but adds support for tornado-style option
    -    parsing and log formatting. It is *not* necessary to use this
    -    `main` function to run tests using `AsyncTestCase`; these tests
    -    are self-contained and can run with any test runner.
    -
    -    The easiest way to run a test is via the command line::
    -
    -        python -m tornado.testing tornado.test.stack_context_test
    -
    -    See the standard library unittest module for ways in which tests can
    -    be specified.
    -
    -    Projects with many tests may wish to define a test script like
    -    ``tornado/test/runtests.py``.  This script should define a method
    -    ``all()`` which returns a test suite and then call
    -    `tornado.testing.main()`.  Note that even when a test script is
    -    used, the ``all()`` test suite may be overridden by naming a
    -    single test on the command line::
    -
    -        # Runs all tests
    -        python -m tornado.test.runtests
    -        # Runs one test
    -        python -m tornado.test.runtests tornado.test.stack_context_test
    -
    -    Additional keyword arguments passed through to ``unittest.main()``.
    -    For example, use ``tornado.testing.main(verbosity=2)``
    -    to show many test details as they are run.
    -    See http://docs.python.org/library/unittest.html#unittest.main
    -    for full argument list.
    -
    -    .. versionchanged:: 5.0
    -
    -       This function produces no output of its own; only that produced
    -       by the `unittest` module (Previously it would add a PASS or FAIL
    -       log message).
    -    """
    -    from tornado.options import define, options, parse_command_line
    -
    -    define('exception_on_interrupt', type=bool, default=True,
    -           help=("If true (default), ctrl-c raises a KeyboardInterrupt "
    -                 "exception.  This prints a stack trace but cannot interrupt "
    -                 "certain operations.  If false, the process is more reliably "
    -                 "killed, but does not print a stack trace."))
    -
    -    # support the same options as unittest's command-line interface
    -    define('verbose', type=bool)
    -    define('quiet', type=bool)
    -    define('failfast', type=bool)
    -    define('catch', type=bool)
    -    define('buffer', type=bool)
    -
    -    argv = [sys.argv[0]] + parse_command_line(sys.argv)
    -
    -    if not options.exception_on_interrupt:
    -        signal.signal(signal.SIGINT, signal.SIG_DFL)
    -
    -    if options.verbose is not None:
    -        kwargs['verbosity'] = 2
    -    if options.quiet is not None:
    -        kwargs['verbosity'] = 0
    -    if options.failfast is not None:
    -        kwargs['failfast'] = True
    -    if options.catch is not None:
    -        kwargs['catchbreak'] = True
    -    if options.buffer is not None:
    -        kwargs['buffer'] = True
    -
    -    if __name__ == '__main__' and len(argv) == 1:
    -        print("No tests specified", file=sys.stderr)
    -        sys.exit(1)
    -    # In order to be able to run tests by their fully-qualified name
    -    # on the command line without importing all tests here,
    -    # module must be set to None.  Python 3.2's unittest.main ignores
    -    # defaultTest if no module is given (it tries to do its own
    -    # test discovery, which is incompatible with auto2to3), so don't
    -    # set module if we're not asking for a specific test.
    -    if len(argv) > 1:
    -        unittest.main(module=None, argv=argv, **kwargs)
    -    else:
    -        unittest.main(defaultTest="all", argv=argv, **kwargs)
    -
    -
    -if __name__ == '__main__':
    -    main()
    +"""Support classes for automated testing.
    +
    +* `AsyncTestCase` and `AsyncHTTPTestCase`:  Subclasses of unittest.TestCase
    +  with additional support for testing asynchronous (`.IOLoop`-based) code.
    +
    +* `ExpectLog`: Make test logs less spammy.
    +
    +* `main()`: A simple test runner (wrapper around unittest.main()) with support
    +  for the tornado.autoreload module to rerun the tests when code changes.
    +"""
    +
    +import asyncio
    +from collections.abc import Generator
    +import functools
    +import inspect
    +import logging
    +import os
    +import re
    +import signal
    +import socket
    +import sys
    +import unittest
    +
    +from tornado import gen
    +from tornado.httpclient import AsyncHTTPClient, HTTPResponse
    +from tornado.httpserver import HTTPServer
    +from tornado.ioloop import IOLoop, TimeoutError
    +from tornado import netutil
    +from tornado.platform.asyncio import AsyncIOMainLoop
    +from tornado.process import Subprocess
    +from tornado.log import app_log
    +from tornado.util import raise_exc_info, basestring_type
    +from tornado.web import Application
    +
    +import typing
    +from typing import Tuple, Any, Callable, Type, Dict, Union, Optional
    +from types import TracebackType
    +
    +if typing.TYPE_CHECKING:
    +    # Coroutine wasn't added to typing until 3.5.3, so only import it
    +    # when mypy is running and use forward references.
    +    from typing import Coroutine  # noqa: F401
    +
    +    _ExcInfoTuple = Tuple[
    +        Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]
    +    ]
    +
    +
    +_NON_OWNED_IOLOOPS = AsyncIOMainLoop
    +
    +
    +def bind_unused_port(reuse_port: bool = False) -> Tuple[socket.socket, int]:
    +    """Binds a server socket to an available port on localhost.
    +
    +    Returns a tuple (socket, port).
    +
    +    .. versionchanged:: 4.4
    +       Always binds to ``127.0.0.1`` without resolving the name
    +       ``localhost``.
    +    """
    +    sock = netutil.bind_sockets(
    +        0, "127.0.0.1", family=socket.AF_INET, reuse_port=reuse_port
    +    )[0]
    +    port = sock.getsockname()[1]
    +    return sock, port
    +
    +
    +def get_async_test_timeout() -> float:
    +    """Get the global timeout setting for async tests.
    +
    +    Returns a float, the timeout in seconds.
    +
    +    .. versionadded:: 3.1
    +    """
    +    env = os.environ.get("ASYNC_TEST_TIMEOUT")
    +    if env is not None:
    +        try:
    +            return float(env)
    +        except ValueError:
    +            pass
    +    return 5
    +
    +
    +class _TestMethodWrapper(object):
    +    """Wraps a test method to raise an error if it returns a value.
    +
    +    This is mainly used to detect undecorated generators (if a test
    +    method yields it must use a decorator to consume the generator),
    +    but will also detect other kinds of return values (these are not
    +    necessarily errors, but we alert anyway since there is no good
    +    reason to return a value from a test).
    +    """
    +
    +    def __init__(self, orig_method: Callable) -> None:
    +        self.orig_method = orig_method
    +
    +    def __call__(self, *args: Any, **kwargs: Any) -> None:
    +        result = self.orig_method(*args, **kwargs)
    +        if isinstance(result, Generator) or inspect.iscoroutine(result):
    +            raise TypeError(
    +                "Generator and coroutine test methods should be"
    +                " decorated with tornado.testing.gen_test"
    +            )
    +        elif result is not None:
    +            raise ValueError("Return value from test method ignored: %r" % result)
    +
    +    def __getattr__(self, name: str) -> Any:
    +        """Proxy all unknown attributes to the original method.
    +
    +        This is important for some of the decorators in the `unittest`
    +        module, such as `unittest.skipIf`.
    +        """
    +        return getattr(self.orig_method, name)
    +
    +
    +class AsyncTestCase(unittest.TestCase):
    +    """`~unittest.TestCase` subclass for testing `.IOLoop`-based
    +    asynchronous code.
    +
    +    The unittest framework is synchronous, so the test must be
    +    complete by the time the test method returns. This means that
    +    asynchronous code cannot be used in quite the same way as usual
    +    and must be adapted to fit. To write your tests with coroutines,
    +    decorate your test methods with `tornado.testing.gen_test` instead
    +    of `tornado.gen.coroutine`.
    +
    +    This class also provides the (deprecated) `stop()` and `wait()`
    +    methods for a more manual style of testing. The test method itself
    +    must call ``self.wait()``, and asynchronous callbacks should call
    +    ``self.stop()`` to signal completion.
    +
    +    By default, a new `.IOLoop` is constructed for each test and is available
    +    as ``self.io_loop``.  If the code being tested requires a
    +    global `.IOLoop`, subclasses should override `get_new_ioloop` to return it.
    +
    +    The `.IOLoop`'s ``start`` and ``stop`` methods should not be
    +    called directly.  Instead, use `self.stop ` and `self.wait
    +    `.  Arguments passed to ``self.stop`` are returned from
    +    ``self.wait``.  It is possible to have multiple ``wait``/``stop``
    +    cycles in the same test.
    +
    +    Example::
    +
    +        # This test uses coroutine style.
    +        class MyTestCase(AsyncTestCase):
    +            @tornado.testing.gen_test
    +            def test_http_fetch(self):
    +                client = AsyncHTTPClient()
    +                response = yield client.fetch("http://www.tornadoweb.org")
    +                # Test contents of response
    +                self.assertIn("FriendFeed", response.body)
    +
    +        # This test uses argument passing between self.stop and self.wait.
    +        class MyTestCase2(AsyncTestCase):
    +            def test_http_fetch(self):
    +                client = AsyncHTTPClient()
    +                client.fetch("http://www.tornadoweb.org/", self.stop)
    +                response = self.wait()
    +                # Test contents of response
    +                self.assertIn("FriendFeed", response.body)
    +    """
    +
    +    def __init__(self, methodName: str = "runTest") -> None:
    +        super(AsyncTestCase, self).__init__(methodName)
    +        self.__stopped = False
    +        self.__running = False
    +        self.__failure = None  # type: Optional[_ExcInfoTuple]
    +        self.__stop_args = None  # type: Any
    +        self.__timeout = None  # type: Optional[object]
    +
    +        # It's easy to forget the @gen_test decorator, but if you do
    +        # the test will silently be ignored because nothing will consume
    +        # the generator.  Replace the test method with a wrapper that will
    +        # make sure it's not an undecorated generator.
    +        setattr(self, methodName, _TestMethodWrapper(getattr(self, methodName)))
    +
    +        # Not used in this class itself, but used by @gen_test
    +        self._test_generator = None  # type: Optional[Union[Generator, Coroutine]]
    +
    +    def setUp(self) -> None:
    +        super(AsyncTestCase, self).setUp()
    +        self.io_loop = self.get_new_ioloop()
    +        self.io_loop.make_current()
    +
    +    def tearDown(self) -> None:
    +        # Native coroutines tend to produce warnings if they're not
    +        # allowed to run to completion. It's difficult to ensure that
    +        # this always happens in tests, so cancel any tasks that are
    +        # still pending by the time we get here.
    +        asyncio_loop = self.io_loop.asyncio_loop  # type: ignore
    +        if hasattr(asyncio, "all_tasks"):  # py37
    +            tasks = asyncio.all_tasks(asyncio_loop)  # type: ignore
    +        else:
    +            tasks = asyncio.Task.all_tasks(asyncio_loop)
    +        # Tasks that are done may still appear here and may contain
    +        # non-cancellation exceptions, so filter them out.
    +        tasks = [t for t in tasks if not t.done()]
    +        for t in tasks:
    +            t.cancel()
    +        # Allow the tasks to run and finalize themselves (which means
    +        # raising a CancelledError inside the coroutine). This may
    +        # just transform the "task was destroyed but it is pending"
    +        # warning into a "uncaught CancelledError" warning, but
    +        # catching CancelledErrors in coroutines that may leak is
    +        # simpler than ensuring that no coroutines leak.
    +        if tasks:
    +            done, pending = self.io_loop.run_sync(lambda: asyncio.wait(tasks))
    +            assert not pending
    +            # If any task failed with anything but a CancelledError, raise it.
    +            for f in done:
    +                try:
    +                    f.result()
    +                except asyncio.CancelledError:
    +                    pass
    +
    +        # Clean up Subprocess, so it can be used again with a new ioloop.
    +        Subprocess.uninitialize()
    +        self.io_loop.clear_current()
    +        if not isinstance(self.io_loop, _NON_OWNED_IOLOOPS):
    +            # Try to clean up any file descriptors left open in the ioloop.
    +            # This avoids leaks, especially when tests are run repeatedly
    +            # in the same process with autoreload (because curl does not
    +            # set FD_CLOEXEC on its file descriptors)
    +            self.io_loop.close(all_fds=True)
    +        super(AsyncTestCase, self).tearDown()
    +        # In case an exception escaped or the StackContext caught an exception
    +        # when there wasn't a wait() to re-raise it, do so here.
    +        # This is our last chance to raise an exception in a way that the
    +        # unittest machinery understands.
    +        self.__rethrow()
    +
    +    def get_new_ioloop(self) -> IOLoop:
    +        """Returns the `.IOLoop` to use for this test.
    +
    +        By default, a new `.IOLoop` is created for each test.
    +        Subclasses may override this method to return
    +        `.IOLoop.current()` if it is not appropriate to use a new
    +        `.IOLoop` in each tests (for example, if there are global
    +        singletons using the default `.IOLoop`) or if a per-test event
    +        loop is being provided by another system (such as
    +        ``pytest-asyncio``).
    +        """
    +        return IOLoop()
    +
    +    def _handle_exception(
    +        self, typ: Type[Exception], value: Exception, tb: TracebackType
    +    ) -> bool:
    +        if self.__failure is None:
    +            self.__failure = (typ, value, tb)
    +        else:
    +            app_log.error(
    +                "multiple unhandled exceptions in test", exc_info=(typ, value, tb)
    +            )
    +        self.stop()
    +        return True
    +
    +    def __rethrow(self) -> None:
    +        if self.__failure is not None:
    +            failure = self.__failure
    +            self.__failure = None
    +            raise_exc_info(failure)
    +
    +    def run(self, result: unittest.TestResult = None) -> unittest.TestCase:
    +        ret = super(AsyncTestCase, self).run(result)
    +        # As a last resort, if an exception escaped super.run() and wasn't
    +        # re-raised in tearDown, raise it here.  This will cause the
    +        # unittest run to fail messily, but that's better than silently
    +        # ignoring an error.
    +        self.__rethrow()
    +        return ret
    +
    +    def stop(self, _arg: Any = None, **kwargs: Any) -> None:
    +        """Stops the `.IOLoop`, causing one pending (or future) call to `wait()`
    +        to return.
    +
    +        Keyword arguments or a single positional argument passed to `stop()` are
    +        saved and will be returned by `wait()`.
    +
    +        .. deprecated:: 5.1
    +
    +           `stop` and `wait` are deprecated; use ``@gen_test`` instead.
    +        """
    +        assert _arg is None or not kwargs
    +        self.__stop_args = kwargs or _arg
    +        if self.__running:
    +            self.io_loop.stop()
    +            self.__running = False
    +        self.__stopped = True
    +
    +    def wait(
    +        self, condition: Callable[..., bool] = None, timeout: float = None
    +    ) -> None:
    +        """Runs the `.IOLoop` until stop is called or timeout has passed.
    +
    +        In the event of a timeout, an exception will be thrown. The
    +        default timeout is 5 seconds; it may be overridden with a
    +        ``timeout`` keyword argument or globally with the
    +        ``ASYNC_TEST_TIMEOUT`` environment variable.
    +
    +        If ``condition`` is not ``None``, the `.IOLoop` will be restarted
    +        after `stop()` until ``condition()`` returns ``True``.
    +
    +        .. versionchanged:: 3.1
    +           Added the ``ASYNC_TEST_TIMEOUT`` environment variable.
    +
    +        .. deprecated:: 5.1
    +
    +           `stop` and `wait` are deprecated; use ``@gen_test`` instead.
    +        """
    +        if timeout is None:
    +            timeout = get_async_test_timeout()
    +
    +        if not self.__stopped:
    +            if timeout:
    +
    +                def timeout_func() -> None:
    +                    try:
    +                        raise self.failureException(
    +                            "Async operation timed out after %s seconds" % timeout
    +                        )
    +                    except Exception:
    +                        self.__failure = sys.exc_info()
    +                    self.stop()
    +
    +                self.__timeout = self.io_loop.add_timeout(
    +                    self.io_loop.time() + timeout, timeout_func
    +                )
    +            while True:
    +                self.__running = True
    +                self.io_loop.start()
    +                if self.__failure is not None or condition is None or condition():
    +                    break
    +            if self.__timeout is not None:
    +                self.io_loop.remove_timeout(self.__timeout)
    +                self.__timeout = None
    +        assert self.__stopped
    +        self.__stopped = False
    +        self.__rethrow()
    +        result = self.__stop_args
    +        self.__stop_args = None
    +        return result
    +
    +
    +class AsyncHTTPTestCase(AsyncTestCase):
    +    """A test case that starts up an HTTP server.
    +
    +    Subclasses must override `get_app()`, which returns the
    +    `tornado.web.Application` (or other `.HTTPServer` callback) to be tested.
    +    Tests will typically use the provided ``self.http_client`` to fetch
    +    URLs from this server.
    +
    +    Example, assuming the "Hello, world" example from the user guide is in
    +    ``hello.py``::
    +
    +        import hello
    +
    +        class TestHelloApp(AsyncHTTPTestCase):
    +            def get_app(self):
    +                return hello.make_app()
    +
    +            def test_homepage(self):
    +                response = self.fetch('/')
    +                self.assertEqual(response.code, 200)
    +                self.assertEqual(response.body, 'Hello, world')
    +
    +    That call to ``self.fetch()`` is equivalent to ::
    +
    +        self.http_client.fetch(self.get_url('/'), self.stop)
    +        response = self.wait()
    +
    +    which illustrates how AsyncTestCase can turn an asynchronous operation,
    +    like ``http_client.fetch()``, into a synchronous operation. If you need
    +    to do other asynchronous operations in tests, you'll probably need to use
    +    ``stop()`` and ``wait()`` yourself.
    +    """
    +
    +    def setUp(self) -> None:
    +        super(AsyncHTTPTestCase, self).setUp()
    +        sock, port = bind_unused_port()
    +        self.__port = port
    +
    +        self.http_client = self.get_http_client()
    +        self._app = self.get_app()
    +        self.http_server = self.get_http_server()
    +        self.http_server.add_sockets([sock])
    +
    +    def get_http_client(self) -> AsyncHTTPClient:
    +        return AsyncHTTPClient()
    +
    +    def get_http_server(self) -> HTTPServer:
    +        return HTTPServer(self._app, **self.get_httpserver_options())
    +
    +    def get_app(self) -> Application:
    +        """Should be overridden by subclasses to return a
    +        `tornado.web.Application` or other `.HTTPServer` callback.
    +        """
    +        raise NotImplementedError()
    +
    +    def fetch(
    +        self, path: str, raise_error: bool = False, **kwargs: Any
    +    ) -> HTTPResponse:
    +        """Convenience method to synchronously fetch a URL.
    +
    +        The given path will be appended to the local server's host and
    +        port.  Any additional keyword arguments will be passed directly to
    +        `.AsyncHTTPClient.fetch` (and so could be used to pass
    +        ``method="POST"``, ``body="..."``, etc).
    +
    +        If the path begins with http:// or https://, it will be treated as a
    +        full URL and will be fetched as-is.
    +
    +        If ``raise_error`` is ``True``, a `tornado.httpclient.HTTPError` will
    +        be raised if the response code is not 200. This is the same behavior
    +        as the ``raise_error`` argument to `.AsyncHTTPClient.fetch`, but
    +        the default is ``False`` here (it's ``True`` in `.AsyncHTTPClient`)
    +        because tests often need to deal with non-200 response codes.
    +
    +        .. versionchanged:: 5.0
    +           Added support for absolute URLs.
    +
    +        .. versionchanged:: 5.1
    +
    +           Added the ``raise_error`` argument.
    +
    +        .. deprecated:: 5.1
    +
    +           This method currently turns any exception into an
    +           `.HTTPResponse` with status code 599. In Tornado 6.0,
    +           errors other than `tornado.httpclient.HTTPError` will be
    +           passed through, and ``raise_error=False`` will only
    +           suppress errors that would be raised due to non-200
    +           response codes.
    +
    +        """
    +        if path.lower().startswith(("http://", "https://")):
    +            url = path
    +        else:
    +            url = self.get_url(path)
    +        return self.io_loop.run_sync(
    +            lambda: self.http_client.fetch(url, raise_error=raise_error, **kwargs),
    +            timeout=get_async_test_timeout(),
    +        )
    +
    +    def get_httpserver_options(self) -> Dict[str, Any]:
    +        """May be overridden by subclasses to return additional
    +        keyword arguments for the server.
    +        """
    +        return {}
    +
    +    def get_http_port(self) -> int:
    +        """Returns the port used by the server.
    +
    +        A new port is chosen for each test.
    +        """
    +        return self.__port
    +
    +    def get_protocol(self) -> str:
    +        return "http"
    +
    +    def get_url(self, path: str) -> str:
    +        """Returns an absolute url for the given path on the test server."""
    +        return "%s://127.0.0.1:%s%s" % (self.get_protocol(), self.get_http_port(), path)
    +
    +    def tearDown(self) -> None:
    +        self.http_server.stop()
    +        self.io_loop.run_sync(
    +            self.http_server.close_all_connections, timeout=get_async_test_timeout()
    +        )
    +        self.http_client.close()
    +        del self.http_server
    +        del self._app
    +        super(AsyncHTTPTestCase, self).tearDown()
    +
    +
    +class AsyncHTTPSTestCase(AsyncHTTPTestCase):
    +    """A test case that starts an HTTPS server.
    +
    +    Interface is generally the same as `AsyncHTTPTestCase`.
    +    """
    +
    +    def get_http_client(self) -> AsyncHTTPClient:
    +        return AsyncHTTPClient(force_instance=True, defaults=dict(validate_cert=False))
    +
    +    def get_httpserver_options(self) -> Dict[str, Any]:
    +        return dict(ssl_options=self.get_ssl_options())
    +
    +    def get_ssl_options(self) -> Dict[str, Any]:
    +        """May be overridden by subclasses to select SSL options.
    +
    +        By default includes a self-signed testing certificate.
    +        """
    +        return AsyncHTTPSTestCase.default_ssl_options()
    +
    +    @staticmethod
    +    def default_ssl_options() -> Dict[str, Any]:
    +        # Testing keys were generated with:
    +        # openssl req -new -keyout tornado/test/test.key \
    +        #                     -out tornado/test/test.crt -nodes -days 3650 -x509
    +        module_dir = os.path.dirname(__file__)
    +        return dict(
    +            certfile=os.path.join(module_dir, "test", "test.crt"),
    +            keyfile=os.path.join(module_dir, "test", "test.key"),
    +        )
    +
    +    def get_protocol(self) -> str:
    +        return "https"
    +
    +
    +@typing.overload
    +def gen_test(
    +    *, timeout: float = None
    +) -> Callable[[Callable[..., Union[Generator, "Coroutine"]]], Callable[..., None]]:
    +    pass
    +
    +
    +@typing.overload  # noqa: F811
    +def gen_test(func: Callable[..., Union[Generator, "Coroutine"]]) -> Callable[..., None]:
    +    pass
    +
    +
    +def gen_test(  # noqa: F811
    +    func: Callable[..., Union[Generator, "Coroutine"]] = None, timeout: float = None
    +) -> Union[
    +    Callable[..., None],
    +    Callable[[Callable[..., Union[Generator, "Coroutine"]]], Callable[..., None]],
    +]:
    +    """Testing equivalent of ``@gen.coroutine``, to be applied to test methods.
    +
    +    ``@gen.coroutine`` cannot be used on tests because the `.IOLoop` is not
    +    already running.  ``@gen_test`` should be applied to test methods
    +    on subclasses of `AsyncTestCase`.
    +
    +    Example::
    +
    +        class MyTest(AsyncHTTPTestCase):
    +            @gen_test
    +            def test_something(self):
    +                response = yield self.http_client.fetch(self.get_url('/'))
    +
    +    By default, ``@gen_test`` times out after 5 seconds. The timeout may be
    +    overridden globally with the ``ASYNC_TEST_TIMEOUT`` environment variable,
    +    or for each test with the ``timeout`` keyword argument::
    +
    +        class MyTest(AsyncHTTPTestCase):
    +            @gen_test(timeout=10)
    +            def test_something_slow(self):
    +                response = yield self.http_client.fetch(self.get_url('/'))
    +
    +    Note that ``@gen_test`` is incompatible with `AsyncTestCase.stop`,
    +    `AsyncTestCase.wait`, and `AsyncHTTPTestCase.fetch`. Use ``yield
    +    self.http_client.fetch(self.get_url())`` as shown above instead.
    +
    +    .. versionadded:: 3.1
    +       The ``timeout`` argument and ``ASYNC_TEST_TIMEOUT`` environment
    +       variable.
    +
    +    .. versionchanged:: 4.0
    +       The wrapper now passes along ``*args, **kwargs`` so it can be used
    +       on functions with arguments.
    +
    +    """
    +    if timeout is None:
    +        timeout = get_async_test_timeout()
    +
    +    def wrap(f: Callable[..., Union[Generator, "Coroutine"]]) -> Callable[..., None]:
    +        # Stack up several decorators to allow us to access the generator
    +        # object itself.  In the innermost wrapper, we capture the generator
    +        # and save it in an attribute of self.  Next, we run the wrapped
    +        # function through @gen.coroutine.  Finally, the coroutine is
    +        # wrapped again to make it synchronous with run_sync.
    +        #
    +        # This is a good case study arguing for either some sort of
    +        # extensibility in the gen decorators or cancellation support.
    +        @functools.wraps(f)
    +        def pre_coroutine(self, *args, **kwargs):
    +            # type: (AsyncTestCase, *Any, **Any) -> Union[Generator, Coroutine]
    +            # Type comments used to avoid pypy3 bug.
    +            result = f(self, *args, **kwargs)
    +            if isinstance(result, Generator) or inspect.iscoroutine(result):
    +                self._test_generator = result
    +            else:
    +                self._test_generator = None
    +            return result
    +
    +        if inspect.iscoroutinefunction(f):
    +            coro = pre_coroutine
    +        else:
    +            coro = gen.coroutine(pre_coroutine)
    +
    +        @functools.wraps(coro)
    +        def post_coroutine(self, *args, **kwargs):
    +            # type: (AsyncTestCase, *Any, **Any) -> None
    +            try:
    +                return self.io_loop.run_sync(
    +                    functools.partial(coro, self, *args, **kwargs), timeout=timeout
    +                )
    +            except TimeoutError as e:
    +                # run_sync raises an error with an unhelpful traceback.
    +                # If the underlying generator is still running, we can throw the
    +                # exception back into it so the stack trace is replaced by the
    +                # point where the test is stopped. The only reason the generator
    +                # would not be running would be if it were cancelled, which means
    +                # a native coroutine, so we can rely on the cr_running attribute.
    +                if self._test_generator is not None and getattr(
    +                    self._test_generator, "cr_running", True
    +                ):
    +                    self._test_generator.throw(type(e), e)
    +                    # In case the test contains an overly broad except
    +                    # clause, we may get back here.
    +                # Coroutine was stopped or didn't raise a useful stack trace,
    +                # so re-raise the original exception which is better than nothing.
    +                raise
    +
    +        return post_coroutine
    +
    +    if func is not None:
    +        # Used like:
    +        #     @gen_test
    +        #     def f(self):
    +        #         pass
    +        return wrap(func)
    +    else:
    +        # Used like @gen_test(timeout=10)
    +        return wrap
    +
    +
    +# Without this attribute, nosetests will try to run gen_test as a test
    +# anywhere it is imported.
    +gen_test.__test__ = False  # type: ignore
    +
    +
    +class ExpectLog(logging.Filter):
    +    """Context manager to capture and suppress expected log output.
    +
    +    Useful to make tests of error conditions less noisy, while still
    +    leaving unexpected log entries visible.  *Not thread safe.*
    +
    +    The attribute ``logged_stack`` is set to ``True`` if any exception
    +    stack trace was logged.
    +
    +    Usage::
    +
    +        with ExpectLog('tornado.application', "Uncaught exception"):
    +            error_response = self.fetch("/some_page")
    +
    +    .. versionchanged:: 4.3
    +       Added the ``logged_stack`` attribute.
    +    """
    +
    +    def __init__(
    +        self,
    +        logger: Union[logging.Logger, basestring_type],
    +        regex: str,
    +        required: bool = True,
    +    ) -> None:
    +        """Constructs an ExpectLog context manager.
    +
    +        :param logger: Logger object (or name of logger) to watch.  Pass
    +            an empty string to watch the root logger.
    +        :param regex: Regular expression to match.  Any log entries on
    +            the specified logger that match this regex will be suppressed.
    +        :param required: If true, an exception will be raised if the end of
    +            the ``with`` statement is reached without matching any log entries.
    +        """
    +        if isinstance(logger, basestring_type):
    +            logger = logging.getLogger(logger)
    +        self.logger = logger
    +        self.regex = re.compile(regex)
    +        self.required = required
    +        self.matched = False
    +        self.logged_stack = False
    +
    +    def filter(self, record: logging.LogRecord) -> bool:
    +        if record.exc_info:
    +            self.logged_stack = True
    +        message = record.getMessage()
    +        if self.regex.match(message):
    +            self.matched = True
    +            return False
    +        return True
    +
    +    def __enter__(self) -> "ExpectLog":
    +        self.logger.addFilter(self)
    +        return self
    +
    +    def __exit__(
    +        self,
    +        typ: "Optional[Type[BaseException]]",
    +        value: Optional[BaseException],
    +        tb: Optional[TracebackType],
    +    ) -> None:
    +        self.logger.removeFilter(self)
    +        if not typ and self.required and not self.matched:
    +            raise Exception("did not get expected log message")
    +
    +
    +def main(**kwargs: Any) -> None:
    +    """A simple test runner.
    +
    +    This test runner is essentially equivalent to `unittest.main` from
    +    the standard library, but adds support for Tornado-style option
    +    parsing and log formatting. It is *not* necessary to use this
    +    `main` function to run tests using `AsyncTestCase`; these tests
    +    are self-contained and can run with any test runner.
    +
    +    The easiest way to run a test is via the command line::
    +
    +        python -m tornado.testing tornado.test.web_test
    +
    +    See the standard library ``unittest`` module for ways in which
    +    tests can be specified.
    +
    +    Projects with many tests may wish to define a test script like
    +    ``tornado/test/runtests.py``.  This script should define a method
    +    ``all()`` which returns a test suite and then call
    +    `tornado.testing.main()`.  Note that even when a test script is
    +    used, the ``all()`` test suite may be overridden by naming a
    +    single test on the command line::
    +
    +        # Runs all tests
    +        python -m tornado.test.runtests
    +        # Runs one test
    +        python -m tornado.test.runtests tornado.test.web_test
    +
    +    Additional keyword arguments passed through to ``unittest.main()``.
    +    For example, use ``tornado.testing.main(verbosity=2)``
    +    to show many test details as they are run.
    +    See http://docs.python.org/library/unittest.html#unittest.main
    +    for full argument list.
    +
    +    .. versionchanged:: 5.0
    +
    +       This function produces no output of its own; only that produced
    +       by the `unittest` module (previously it would add a PASS or FAIL
    +       log message).
    +    """
    +    from tornado.options import define, options, parse_command_line
    +
    +    define(
    +        "exception_on_interrupt",
    +        type=bool,
    +        default=True,
    +        help=(
    +            "If true (default), ctrl-c raises a KeyboardInterrupt "
    +            "exception.  This prints a stack trace but cannot interrupt "
    +            "certain operations.  If false, the process is more reliably "
    +            "killed, but does not print a stack trace."
    +        ),
    +    )
    +
    +    # support the same options as unittest's command-line interface
    +    define("verbose", type=bool)
    +    define("quiet", type=bool)
    +    define("failfast", type=bool)
    +    define("catch", type=bool)
    +    define("buffer", type=bool)
    +
    +    argv = [sys.argv[0]] + parse_command_line(sys.argv)
    +
    +    if not options.exception_on_interrupt:
    +        signal.signal(signal.SIGINT, signal.SIG_DFL)
    +
    +    if options.verbose is not None:
    +        kwargs["verbosity"] = 2
    +    if options.quiet is not None:
    +        kwargs["verbosity"] = 0
    +    if options.failfast is not None:
    +        kwargs["failfast"] = True
    +    if options.catch is not None:
    +        kwargs["catchbreak"] = True
    +    if options.buffer is not None:
    +        kwargs["buffer"] = True
    +
    +    if __name__ == "__main__" and len(argv) == 1:
    +        print("No tests specified", file=sys.stderr)
    +        sys.exit(1)
    +    # In order to be able to run tests by their fully-qualified name
    +    # on the command line without importing all tests here,
    +    # module must be set to None.  Python 3.2's unittest.main ignores
    +    # defaultTest if no module is given (it tries to do its own
    +    # test discovery, which is incompatible with auto2to3), so don't
    +    # set module if we're not asking for a specific test.
    +    if len(argv) > 1:
    +        unittest.main(module=None, argv=argv, **kwargs)  # type: ignore
    +    else:
    +        unittest.main(defaultTest="all", argv=argv, **kwargs)
    +
    +
    +if __name__ == "__main__":
    +    main()
    diff --git a/server/www/packages/packages-linux/x64/tornado/util.py b/server/www/packages/packages-linux/x64/tornado/util.py
    index a42ebeb..0a97a04 100644
    --- a/server/www/packages/packages-linux/x64/tornado/util.py
    +++ b/server/www/packages/packages-linux/x64/tornado/util.py
    @@ -1,497 +1,472 @@
    -"""Miscellaneous utility functions and classes.
    -
    -This module is used internally by Tornado.  It is not necessarily expected
    -that the functions and classes defined here will be useful to other
    -applications, but they are documented here in case they are.
    -
    -The one public-facing part of this module is the `Configurable` class
    -and its `~Configurable.configure` method, which becomes a part of the
    -interface of its subclasses, including `.AsyncHTTPClient`, `.IOLoop`,
    -and `.Resolver`.
    -"""
    -
    -from __future__ import absolute_import, division, print_function
    -
    -import array
    -import atexit
    -import os
    -import re
    -import sys
    -import zlib
    -
    -PY3 = sys.version_info >= (3,)
    -
    -if PY3:
    -    xrange = range
    -
    -# inspect.getargspec() raises DeprecationWarnings in Python 3.5.
    -# The two functions have compatible interfaces for the parts we need.
    -if PY3:
    -    from inspect import getfullargspec as getargspec
    -else:
    -    from inspect import getargspec
    -
    -# Aliases for types that are spelled differently in different Python
    -# versions. bytes_type is deprecated and no longer used in Tornado
    -# itself but is left in case anyone outside Tornado is using it.
    -bytes_type = bytes
    -if PY3:
    -    unicode_type = str
    -    basestring_type = str
    -else:
    -    # The names unicode and basestring don't exist in py3 so silence flake8.
    -    unicode_type = unicode  # noqa
    -    basestring_type = basestring  # noqa
    -
    -
    -try:
    -    import typing  # noqa
    -    from typing import cast
    -
    -    _ObjectDictBase = typing.Dict[str, typing.Any]
    -except ImportError:
    -    _ObjectDictBase = dict
    -
    -    def cast(typ, x):
    -        return x
    -else:
    -    # More imports that are only needed in type comments.
    -    import datetime  # noqa
    -    import types  # noqa
    -    from typing import Any, AnyStr, Union, Optional, Dict, Mapping  # noqa
    -    from typing import Tuple, Match, Callable  # noqa
    -
    -    if PY3:
    -        _BaseString = str
    -    else:
    -        _BaseString = Union[bytes, unicode_type]
    -
    -
    -try:
    -    from sys import is_finalizing
    -except ImportError:
    -    # Emulate it
    -    def _get_emulated_is_finalizing():
    -        L = []
    -        atexit.register(lambda: L.append(None))
    -
    -        def is_finalizing():
    -            # Not referencing any globals here
    -            return L != []
    -
    -        return is_finalizing
    -
    -    is_finalizing = _get_emulated_is_finalizing()
    -
    -
    -class TimeoutError(Exception):
    -    """Exception raised by `.with_timeout` and `.IOLoop.run_sync`.
    -
    -    .. versionchanged:: 5.0:
    -       Unified ``tornado.gen.TimeoutError`` and
    -       ``tornado.ioloop.TimeoutError`` as ``tornado.util.TimeoutError``.
    -       Both former names remain as aliases.
    -    """
    -
    -
    -class ObjectDict(_ObjectDictBase):
    -    """Makes a dictionary behave like an object, with attribute-style access.
    -    """
    -    def __getattr__(self, name):
    -        # type: (str) -> Any
    -        try:
    -            return self[name]
    -        except KeyError:
    -            raise AttributeError(name)
    -
    -    def __setattr__(self, name, value):
    -        # type: (str, Any) -> None
    -        self[name] = value
    -
    -
    -class GzipDecompressor(object):
    -    """Streaming gzip decompressor.
    -
    -    The interface is like that of `zlib.decompressobj` (without some of the
    -    optional arguments, but it understands gzip headers and checksums.
    -    """
    -    def __init__(self):
    -        # Magic parameter makes zlib module understand gzip header
    -        # http://stackoverflow.com/questions/1838699/how-can-i-decompress-a-gzip-stream-with-zlib
    -        # This works on cpython and pypy, but not jython.
    -        self.decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS)
    -
    -    def decompress(self, value, max_length=None):
    -        # type: (bytes, Optional[int]) -> bytes
    -        """Decompress a chunk, returning newly-available data.
    -
    -        Some data may be buffered for later processing; `flush` must
    -        be called when there is no more input data to ensure that
    -        all data was processed.
    -
    -        If ``max_length`` is given, some input data may be left over
    -        in ``unconsumed_tail``; you must retrieve this value and pass
    -        it back to a future call to `decompress` if it is not empty.
    -        """
    -        return self.decompressobj.decompress(value, max_length)
    -
    -    @property
    -    def unconsumed_tail(self):
    -        # type: () -> bytes
    -        """Returns the unconsumed portion left over
    -        """
    -        return self.decompressobj.unconsumed_tail
    -
    -    def flush(self):
    -        # type: () -> bytes
    -        """Return any remaining buffered data not yet returned by decompress.
    -
    -        Also checks for errors such as truncated input.
    -        No other methods may be called on this object after `flush`.
    -        """
    -        return self.decompressobj.flush()
    -
    -
    -def import_object(name):
    -    # type: (_BaseString) -> Any
    -    """Imports an object by name.
    -
    -    import_object('x') is equivalent to 'import x'.
    -    import_object('x.y.z') is equivalent to 'from x.y import z'.
    -
    -    >>> import tornado.escape
    -    >>> import_object('tornado.escape') is tornado.escape
    -    True
    -    >>> import_object('tornado.escape.utf8') is tornado.escape.utf8
    -    True
    -    >>> import_object('tornado') is tornado
    -    True
    -    >>> import_object('tornado.missing_module')
    -    Traceback (most recent call last):
    -        ...
    -    ImportError: No module named missing_module
    -    """
    -    if not isinstance(name, str):
    -        # on python 2 a byte string is required.
    -        name = name.encode('utf-8')
    -    if name.count('.') == 0:
    -        return __import__(name, None, None)
    -
    -    parts = name.split('.')
    -    obj = __import__('.'.join(parts[:-1]), None, None, [parts[-1]], 0)
    -    try:
    -        return getattr(obj, parts[-1])
    -    except AttributeError:
    -        raise ImportError("No module named %s" % parts[-1])
    -
    -
    -# Stubs to make mypy happy (and later for actual type-checking).
    -def raise_exc_info(exc_info):
    -    # type: (Tuple[type, BaseException, types.TracebackType]) -> None
    -    pass
    -
    -
    -def exec_in(code, glob, loc=None):
    -    # type: (Any, Dict[str, Any], Optional[Mapping[str, Any]]) -> Any
    -    if isinstance(code, basestring_type):
    -        # exec(string) inherits the caller's future imports; compile
    -        # the string first to prevent that.
    -        code = compile(code, '', 'exec', dont_inherit=True)
    -    exec(code, glob, loc)
    -
    -
    -if PY3:
    -    exec("""
    -def raise_exc_info(exc_info):
    -    try:
    -        raise exc_info[1].with_traceback(exc_info[2])
    -    finally:
    -        exc_info = None
    -
    -""")
    -else:
    -    exec("""
    -def raise_exc_info(exc_info):
    -    raise exc_info[0], exc_info[1], exc_info[2]
    -""")
    -
    -
    -def errno_from_exception(e):
    -    # type: (BaseException) -> Optional[int]
    -    """Provides the errno from an Exception object.
    -
    -    There are cases that the errno attribute was not set so we pull
    -    the errno out of the args but if someone instantiates an Exception
    -    without any args you will get a tuple error. So this function
    -    abstracts all that behavior to give you a safe way to get the
    -    errno.
    -    """
    -
    -    if hasattr(e, 'errno'):
    -        return e.errno  # type: ignore
    -    elif e.args:
    -        return e.args[0]
    -    else:
    -        return None
    -
    -
    -_alphanum = frozenset(
    -    "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
    -
    -
    -def _re_unescape_replacement(match):
    -    # type: (Match[str]) -> str
    -    group = match.group(1)
    -    if group[0] in _alphanum:
    -        raise ValueError("cannot unescape '\\\\%s'" % group[0])
    -    return group
    -
    -
    -_re_unescape_pattern = re.compile(r'\\(.)', re.DOTALL)
    -
    -
    -def re_unescape(s):
    -    # type: (str) -> str
    -    """Unescape a string escaped by `re.escape`.
    -
    -    May raise ``ValueError`` for regular expressions which could not
    -    have been produced by `re.escape` (for example, strings containing
    -    ``\d`` cannot be unescaped).
    -
    -    .. versionadded:: 4.4
    -    """
    -    return _re_unescape_pattern.sub(_re_unescape_replacement, s)
    -
    -
    -class Configurable(object):
    -    """Base class for configurable interfaces.
    -
    -    A configurable interface is an (abstract) class whose constructor
    -    acts as a factory function for one of its implementation subclasses.
    -    The implementation subclass as well as optional keyword arguments to
    -    its initializer can be set globally at runtime with `configure`.
    -
    -    By using the constructor as the factory method, the interface
    -    looks like a normal class, `isinstance` works as usual, etc.  This
    -    pattern is most useful when the choice of implementation is likely
    -    to be a global decision (e.g. when `~select.epoll` is available,
    -    always use it instead of `~select.select`), or when a
    -    previously-monolithic class has been split into specialized
    -    subclasses.
    -
    -    Configurable subclasses must define the class methods
    -    `configurable_base` and `configurable_default`, and use the instance
    -    method `initialize` instead of ``__init__``.
    -
    -    .. versionchanged:: 5.0
    -
    -       It is now possible for configuration to be specified at
    -       multiple levels of a class hierarchy.
    -
    -    """
    -    __impl_class = None  # type: type
    -    __impl_kwargs = None  # type: Dict[str, Any]
    -
    -    def __new__(cls, *args, **kwargs):
    -        base = cls.configurable_base()
    -        init_kwargs = {}
    -        if cls is base:
    -            impl = cls.configured_class()
    -            if base.__impl_kwargs:
    -                init_kwargs.update(base.__impl_kwargs)
    -        else:
    -            impl = cls
    -        init_kwargs.update(kwargs)
    -        if impl.configurable_base() is not base:
    -            # The impl class is itself configurable, so recurse.
    -            return impl(*args, **init_kwargs)
    -        instance = super(Configurable, cls).__new__(impl)
    -        # initialize vs __init__ chosen for compatibility with AsyncHTTPClient
    -        # singleton magic.  If we get rid of that we can switch to __init__
    -        # here too.
    -        instance.initialize(*args, **init_kwargs)
    -        return instance
    -
    -    @classmethod
    -    def configurable_base(cls):
    -        # type: () -> Any
    -        # TODO: This class needs https://github.com/python/typing/issues/107
    -        # to be fully typeable.
    -        """Returns the base class of a configurable hierarchy.
    -
    -        This will normally return the class in which it is defined.
    -        (which is *not* necessarily the same as the cls classmethod parameter).
    -        """
    -        raise NotImplementedError()
    -
    -    @classmethod
    -    def configurable_default(cls):
    -        # type: () -> type
    -        """Returns the implementation class to be used if none is configured."""
    -        raise NotImplementedError()
    -
    -    def initialize(self):
    -        # type: () -> None
    -        """Initialize a `Configurable` subclass instance.
    -
    -        Configurable classes should use `initialize` instead of ``__init__``.
    -
    -        .. versionchanged:: 4.2
    -           Now accepts positional arguments in addition to keyword arguments.
    -        """
    -
    -    @classmethod
    -    def configure(cls, impl, **kwargs):
    -        # type: (Any, **Any) -> None
    -        """Sets the class to use when the base class is instantiated.
    -
    -        Keyword arguments will be saved and added to the arguments passed
    -        to the constructor.  This can be used to set global defaults for
    -        some parameters.
    -        """
    -        base = cls.configurable_base()
    -        if isinstance(impl, (str, unicode_type)):
    -            impl = import_object(impl)
    -        if impl is not None and not issubclass(impl, cls):
    -            raise ValueError("Invalid subclass of %s" % cls)
    -        base.__impl_class = impl
    -        base.__impl_kwargs = kwargs
    -
    -    @classmethod
    -    def configured_class(cls):
    -        # type: () -> type
    -        """Returns the currently configured class."""
    -        base = cls.configurable_base()
    -        # Manually mangle the private name to see whether this base
    -        # has been configured (and not another base higher in the
    -        # hierarchy).
    -        if base.__dict__.get('_Configurable__impl_class') is None:
    -            base.__impl_class = cls.configurable_default()
    -        return base.__impl_class
    -
    -    @classmethod
    -    def _save_configuration(cls):
    -        # type: () -> Tuple[type, Dict[str, Any]]
    -        base = cls.configurable_base()
    -        return (base.__impl_class, base.__impl_kwargs)
    -
    -    @classmethod
    -    def _restore_configuration(cls, saved):
    -        # type: (Tuple[type, Dict[str, Any]]) -> None
    -        base = cls.configurable_base()
    -        base.__impl_class = saved[0]
    -        base.__impl_kwargs = saved[1]
    -
    -
    -class ArgReplacer(object):
    -    """Replaces one value in an ``args, kwargs`` pair.
    -
    -    Inspects the function signature to find an argument by name
    -    whether it is passed by position or keyword.  For use in decorators
    -    and similar wrappers.
    -    """
    -    def __init__(self, func, name):
    -        # type: (Callable, str) -> None
    -        self.name = name
    -        try:
    -            self.arg_pos = self._getargnames(func).index(name)
    -        except ValueError:
    -            # Not a positional parameter
    -            self.arg_pos = None
    -
    -    def _getargnames(self, func):
    -        # type: (Callable) -> List[str]
    -        try:
    -            return getargspec(func).args
    -        except TypeError:
    -            if hasattr(func, 'func_code'):
    -                # Cython-generated code has all the attributes needed
    -                # by inspect.getargspec, but the inspect module only
    -                # works with ordinary functions. Inline the portion of
    -                # getargspec that we need here. Note that for static
    -                # functions the @cython.binding(True) decorator must
    -                # be used (for methods it works out of the box).
    -                code = func.func_code  # type: ignore
    -                return code.co_varnames[:code.co_argcount]
    -            raise
    -
    -    def get_old_value(self, args, kwargs, default=None):
    -        # type: (List[Any], Dict[str, Any], Any) -> Any
    -        """Returns the old value of the named argument without replacing it.
    -
    -        Returns ``default`` if the argument is not present.
    -        """
    -        if self.arg_pos is not None and len(args) > self.arg_pos:
    -            return args[self.arg_pos]
    -        else:
    -            return kwargs.get(self.name, default)
    -
    -    def replace(self, new_value, args, kwargs):
    -        # type: (Any, List[Any], Dict[str, Any]) -> Tuple[Any, List[Any], Dict[str, Any]]
    -        """Replace the named argument in ``args, kwargs`` with ``new_value``.
    -
    -        Returns ``(old_value, args, kwargs)``.  The returned ``args`` and
    -        ``kwargs`` objects may not be the same as the input objects, or
    -        the input objects may be mutated.
    -
    -        If the named argument was not found, ``new_value`` will be added
    -        to ``kwargs`` and None will be returned as ``old_value``.
    -        """
    -        if self.arg_pos is not None and len(args) > self.arg_pos:
    -            # The arg to replace is passed positionally
    -            old_value = args[self.arg_pos]
    -            args = list(args)  # *args is normally a tuple
    -            args[self.arg_pos] = new_value
    -        else:
    -            # The arg to replace is either omitted or passed by keyword.
    -            old_value = kwargs.get(self.name)
    -            kwargs[self.name] = new_value
    -        return old_value, args, kwargs
    -
    -
    -def timedelta_to_seconds(td):
    -    # type: (datetime.timedelta) -> float
    -    """Equivalent to td.total_seconds() (introduced in python 2.7)."""
    -    return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
    -
    -
    -def _websocket_mask_python(mask, data):
    -    # type: (bytes, bytes) -> bytes
    -    """Websocket masking function.
    -
    -    `mask` is a `bytes` object of length 4; `data` is a `bytes` object of any length.
    -    Returns a `bytes` object of the same length as `data` with the mask applied
    -    as specified in section 5.3 of RFC 6455.
    -
    -    This pure-python implementation may be replaced by an optimized version when available.
    -    """
    -    mask_arr = array.array("B", mask)
    -    unmasked_arr = array.array("B", data)
    -    for i in xrange(len(data)):
    -        unmasked_arr[i] = unmasked_arr[i] ^ mask_arr[i % 4]
    -    if PY3:
    -        # tostring was deprecated in py32.  It hasn't been removed,
    -        # but since we turn on deprecation warnings in our tests
    -        # we need to use the right one.
    -        return unmasked_arr.tobytes()
    -    else:
    -        return unmasked_arr.tostring()
    -
    -
    -if (os.environ.get('TORNADO_NO_EXTENSION') or
    -        os.environ.get('TORNADO_EXTENSION') == '0'):
    -    # These environment variables exist to make it easier to do performance
    -    # comparisons; they are not guaranteed to remain supported in the future.
    -    _websocket_mask = _websocket_mask_python
    -else:
    -    try:
    -        from tornado.speedups import websocket_mask as _websocket_mask
    -    except ImportError:
    -        if os.environ.get('TORNADO_EXTENSION') == '1':
    -            raise
    -        _websocket_mask = _websocket_mask_python
    -
    -
    -def doctests():
    -    import doctest
    -    return doctest.DocTestSuite()
    +"""Miscellaneous utility functions and classes.
    +
    +This module is used internally by Tornado.  It is not necessarily expected
    +that the functions and classes defined here will be useful to other
    +applications, but they are documented here in case they are.
    +
    +The one public-facing part of this module is the `Configurable` class
    +and its `~Configurable.configure` method, which becomes a part of the
    +interface of its subclasses, including `.AsyncHTTPClient`, `.IOLoop`,
    +and `.Resolver`.
    +"""
    +
    +import array
    +import atexit
    +from inspect import getfullargspec
    +import os
    +import re
    +import typing
    +import zlib
    +
    +from typing import (
    +    Any,
    +    Optional,
    +    Dict,
    +    Mapping,
    +    List,
    +    Tuple,
    +    Match,
    +    Callable,
    +    Type,
    +    Sequence,
    +)
    +
    +if typing.TYPE_CHECKING:
    +    # Additional imports only used in type comments.
    +    # This lets us make these imports lazy.
    +    import datetime  # noqa: F401
    +    from types import TracebackType  # noqa: F401
    +    from typing import Union  # noqa: F401
    +    import unittest  # noqa: F401
    +
    +# Aliases for types that are spelled differently in different Python
    +# versions. bytes_type is deprecated and no longer used in Tornado
    +# itself but is left in case anyone outside Tornado is using it.
    +bytes_type = bytes
    +unicode_type = str
    +basestring_type = str
    +
    +try:
    +    from sys import is_finalizing
    +except ImportError:
    +    # Emulate it
    +    def _get_emulated_is_finalizing() -> Callable[[], bool]:
    +        L = []  # type: List[None]
    +        atexit.register(lambda: L.append(None))
    +
    +        def is_finalizing() -> bool:
    +            # Not referencing any globals here
    +            return L != []
    +
    +        return is_finalizing
    +
    +    is_finalizing = _get_emulated_is_finalizing()
    +
    +
    +class TimeoutError(Exception):
    +    """Exception raised by `.with_timeout` and `.IOLoop.run_sync`.
    +
    +    .. versionchanged:: 5.0:
    +       Unified ``tornado.gen.TimeoutError`` and
    +       ``tornado.ioloop.TimeoutError`` as ``tornado.util.TimeoutError``.
    +       Both former names remain as aliases.
    +    """
    +
    +
    +class ObjectDict(Dict[str, Any]):
    +    """Makes a dictionary behave like an object, with attribute-style access.
    +    """
    +
    +    def __getattr__(self, name: str) -> Any:
    +        try:
    +            return self[name]
    +        except KeyError:
    +            raise AttributeError(name)
    +
    +    def __setattr__(self, name: str, value: Any) -> None:
    +        self[name] = value
    +
    +
    +class GzipDecompressor(object):
    +    """Streaming gzip decompressor.
    +
    +    The interface is like that of `zlib.decompressobj` (without some of the
    +    optional arguments, but it understands gzip headers and checksums.
    +    """
    +
    +    def __init__(self) -> None:
    +        # Magic parameter makes zlib module understand gzip header
    +        # http://stackoverflow.com/questions/1838699/how-can-i-decompress-a-gzip-stream-with-zlib
    +        # This works on cpython and pypy, but not jython.
    +        self.decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS)
    +
    +    def decompress(self, value: bytes, max_length: int = 0) -> bytes:
    +        """Decompress a chunk, returning newly-available data.
    +
    +        Some data may be buffered for later processing; `flush` must
    +        be called when there is no more input data to ensure that
    +        all data was processed.
    +
    +        If ``max_length`` is given, some input data may be left over
    +        in ``unconsumed_tail``; you must retrieve this value and pass
    +        it back to a future call to `decompress` if it is not empty.
    +        """
    +        return self.decompressobj.decompress(value, max_length)
    +
    +    @property
    +    def unconsumed_tail(self) -> bytes:
    +        """Returns the unconsumed portion left over
    +        """
    +        return self.decompressobj.unconsumed_tail
    +
    +    def flush(self) -> bytes:
    +        """Return any remaining buffered data not yet returned by decompress.
    +
    +        Also checks for errors such as truncated input.
    +        No other methods may be called on this object after `flush`.
    +        """
    +        return self.decompressobj.flush()
    +
    +
    +def import_object(name: str) -> Any:
    +    """Imports an object by name.
    +
    +    ``import_object('x')`` is equivalent to ``import x``.
    +    ``import_object('x.y.z')`` is equivalent to ``from x.y import z``.
    +
    +    >>> import tornado.escape
    +    >>> import_object('tornado.escape') is tornado.escape
    +    True
    +    >>> import_object('tornado.escape.utf8') is tornado.escape.utf8
    +    True
    +    >>> import_object('tornado') is tornado
    +    True
    +    >>> import_object('tornado.missing_module')
    +    Traceback (most recent call last):
    +        ...
    +    ImportError: No module named missing_module
    +    """
    +    if name.count(".") == 0:
    +        return __import__(name)
    +
    +    parts = name.split(".")
    +    obj = __import__(".".join(parts[:-1]), fromlist=[parts[-1]])
    +    try:
    +        return getattr(obj, parts[-1])
    +    except AttributeError:
    +        raise ImportError("No module named %s" % parts[-1])
    +
    +
    +def exec_in(code: Any, glob: Dict[str, Any], loc: Mapping[str, Any] = None) -> None:
    +    if isinstance(code, str):
    +        # exec(string) inherits the caller's future imports; compile
    +        # the string first to prevent that.
    +        code = compile(code, "", "exec", dont_inherit=True)
    +    exec(code, glob, loc)
    +
    +
    +def raise_exc_info(
    +    exc_info,  # type: Tuple[Optional[type], Optional[BaseException], Optional[TracebackType]]
    +):
    +    # type: (...) -> typing.NoReturn
    +    #
    +    # This function's type annotation must use comments instead of
    +    # real annotations because typing.NoReturn does not exist in
    +    # python 3.5's typing module. The formatting is funky because this
    +    # is apparently what flake8 wants.
    +    try:
    +        if exc_info[1] is not None:
    +            raise exc_info[1].with_traceback(exc_info[2])
    +        else:
    +            raise TypeError("raise_exc_info called with no exception")
    +    finally:
    +        # Clear the traceback reference from our stack frame to
    +        # minimize circular references that slow down GC.
    +        exc_info = (None, None, None)
    +
    +
    +def errno_from_exception(e: BaseException) -> Optional[int]:
    +    """Provides the errno from an Exception object.
    +
    +    There are cases that the errno attribute was not set so we pull
    +    the errno out of the args but if someone instantiates an Exception
    +    without any args you will get a tuple error. So this function
    +    abstracts all that behavior to give you a safe way to get the
    +    errno.
    +    """
    +
    +    if hasattr(e, "errno"):
    +        return e.errno  # type: ignore
    +    elif e.args:
    +        return e.args[0]
    +    else:
    +        return None
    +
    +
    +_alphanum = frozenset("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
    +
    +
    +def _re_unescape_replacement(match: Match[str]) -> str:
    +    group = match.group(1)
    +    if group[0] in _alphanum:
    +        raise ValueError("cannot unescape '\\\\%s'" % group[0])
    +    return group
    +
    +
    +_re_unescape_pattern = re.compile(r"\\(.)", re.DOTALL)
    +
    +
    +def re_unescape(s: str) -> str:
    +    r"""Unescape a string escaped by `re.escape`.
    +
    +    May raise ``ValueError`` for regular expressions which could not
    +    have been produced by `re.escape` (for example, strings containing
    +    ``\d`` cannot be unescaped).
    +
    +    .. versionadded:: 4.4
    +    """
    +    return _re_unescape_pattern.sub(_re_unescape_replacement, s)
    +
    +
    +class Configurable(object):
    +    """Base class for configurable interfaces.
    +
    +    A configurable interface is an (abstract) class whose constructor
    +    acts as a factory function for one of its implementation subclasses.
    +    The implementation subclass as well as optional keyword arguments to
    +    its initializer can be set globally at runtime with `configure`.
    +
    +    By using the constructor as the factory method, the interface
    +    looks like a normal class, `isinstance` works as usual, etc.  This
    +    pattern is most useful when the choice of implementation is likely
    +    to be a global decision (e.g. when `~select.epoll` is available,
    +    always use it instead of `~select.select`), or when a
    +    previously-monolithic class has been split into specialized
    +    subclasses.
    +
    +    Configurable subclasses must define the class methods
    +    `configurable_base` and `configurable_default`, and use the instance
    +    method `initialize` instead of ``__init__``.
    +
    +    .. versionchanged:: 5.0
    +
    +       It is now possible for configuration to be specified at
    +       multiple levels of a class hierarchy.
    +
    +    """
    +
    +    # Type annotations on this class are mostly done with comments
    +    # because they need to refer to Configurable, which isn't defined
    +    # until after the class definition block. These can use regular
    +    # annotations when our minimum python version is 3.7.
    +    #
    +    # There may be a clever way to use generics here to get more
    +    # precise types (i.e. for a particular Configurable subclass T,
    +    # all the types are subclasses of T, not just Configurable).
    +    __impl_class = None  # type: Optional[Type[Configurable]]
    +    __impl_kwargs = None  # type: Dict[str, Any]
    +
    +    def __new__(cls, *args: Any, **kwargs: Any) -> Any:
    +        base = cls.configurable_base()
    +        init_kwargs = {}  # type: Dict[str, Any]
    +        if cls is base:
    +            impl = cls.configured_class()
    +            if base.__impl_kwargs:
    +                init_kwargs.update(base.__impl_kwargs)
    +        else:
    +            impl = cls
    +        init_kwargs.update(kwargs)
    +        if impl.configurable_base() is not base:
    +            # The impl class is itself configurable, so recurse.
    +            return impl(*args, **init_kwargs)
    +        instance = super(Configurable, cls).__new__(impl)
    +        # initialize vs __init__ chosen for compatibility with AsyncHTTPClient
    +        # singleton magic.  If we get rid of that we can switch to __init__
    +        # here too.
    +        instance.initialize(*args, **init_kwargs)
    +        return instance
    +
    +    @classmethod
    +    def configurable_base(cls):
    +        # type: () -> Type[Configurable]
    +        """Returns the base class of a configurable hierarchy.
    +
    +        This will normally return the class in which it is defined.
    +        (which is *not* necessarily the same as the ``cls`` classmethod
    +        parameter).
    +
    +        """
    +        raise NotImplementedError()
    +
    +    @classmethod
    +    def configurable_default(cls):
    +        # type: () -> Type[Configurable]
    +        """Returns the implementation class to be used if none is configured."""
    +        raise NotImplementedError()
    +
    +    def _initialize(self) -> None:
    +        pass
    +
    +    initialize = _initialize  # type: Callable[..., None]
    +    """Initialize a `Configurable` subclass instance.
    +
    +    Configurable classes should use `initialize` instead of ``__init__``.
    +
    +    .. versionchanged:: 4.2
    +       Now accepts positional arguments in addition to keyword arguments.
    +    """
    +
    +    @classmethod
    +    def configure(cls, impl, **kwargs):
    +        # type: (Union[None, str, Type[Configurable]], Any) -> None
    +        """Sets the class to use when the base class is instantiated.
    +
    +        Keyword arguments will be saved and added to the arguments passed
    +        to the constructor.  This can be used to set global defaults for
    +        some parameters.
    +        """
    +        base = cls.configurable_base()
    +        if isinstance(impl, str):
    +            impl = typing.cast(Type[Configurable], import_object(impl))
    +        if impl is not None and not issubclass(impl, cls):
    +            raise ValueError("Invalid subclass of %s" % cls)
    +        base.__impl_class = impl
    +        base.__impl_kwargs = kwargs
    +
    +    @classmethod
    +    def configured_class(cls):
    +        # type: () -> Type[Configurable]
    +        """Returns the currently configured class."""
    +        base = cls.configurable_base()
    +        # Manually mangle the private name to see whether this base
    +        # has been configured (and not another base higher in the
    +        # hierarchy).
    +        if base.__dict__.get("_Configurable__impl_class") is None:
    +            base.__impl_class = cls.configurable_default()
    +        if base.__impl_class is not None:
    +            return base.__impl_class
    +        else:
    +            # Should be impossible, but mypy wants an explicit check.
    +            raise ValueError("configured class not found")
    +
    +    @classmethod
    +    def _save_configuration(cls):
    +        # type: () -> Tuple[Optional[Type[Configurable]], Dict[str, Any]]
    +        base = cls.configurable_base()
    +        return (base.__impl_class, base.__impl_kwargs)
    +
    +    @classmethod
    +    def _restore_configuration(cls, saved):
    +        # type: (Tuple[Optional[Type[Configurable]], Dict[str, Any]]) -> None
    +        base = cls.configurable_base()
    +        base.__impl_class = saved[0]
    +        base.__impl_kwargs = saved[1]
    +
    +
    +class ArgReplacer(object):
    +    """Replaces one value in an ``args, kwargs`` pair.
    +
    +    Inspects the function signature to find an argument by name
    +    whether it is passed by position or keyword.  For use in decorators
    +    and similar wrappers.
    +    """
    +
    +    def __init__(self, func: Callable, name: str) -> None:
    +        self.name = name
    +        try:
    +            self.arg_pos = self._getargnames(func).index(name)  # type: Optional[int]
    +        except ValueError:
    +            # Not a positional parameter
    +            self.arg_pos = None
    +
    +    def _getargnames(self, func: Callable) -> List[str]:
    +        try:
    +            return getfullargspec(func).args
    +        except TypeError:
    +            if hasattr(func, "func_code"):
    +                # Cython-generated code has all the attributes needed
    +                # by inspect.getfullargspec, but the inspect module only
    +                # works with ordinary functions. Inline the portion of
    +                # getfullargspec that we need here. Note that for static
    +                # functions the @cython.binding(True) decorator must
    +                # be used (for methods it works out of the box).
    +                code = func.func_code  # type: ignore
    +                return code.co_varnames[: code.co_argcount]
    +            raise
    +
    +    def get_old_value(
    +        self, args: Sequence[Any], kwargs: Dict[str, Any], default: Any = None
    +    ) -> Any:
    +        """Returns the old value of the named argument without replacing it.
    +
    +        Returns ``default`` if the argument is not present.
    +        """
    +        if self.arg_pos is not None and len(args) > self.arg_pos:
    +            return args[self.arg_pos]
    +        else:
    +            return kwargs.get(self.name, default)
    +
    +    def replace(
    +        self, new_value: Any, args: Sequence[Any], kwargs: Dict[str, Any]
    +    ) -> Tuple[Any, Sequence[Any], Dict[str, Any]]:
    +        """Replace the named argument in ``args, kwargs`` with ``new_value``.
    +
    +        Returns ``(old_value, args, kwargs)``.  The returned ``args`` and
    +        ``kwargs`` objects may not be the same as the input objects, or
    +        the input objects may be mutated.
    +
    +        If the named argument was not found, ``new_value`` will be added
    +        to ``kwargs`` and None will be returned as ``old_value``.
    +        """
    +        if self.arg_pos is not None and len(args) > self.arg_pos:
    +            # The arg to replace is passed positionally
    +            old_value = args[self.arg_pos]
    +            args = list(args)  # *args is normally a tuple
    +            args[self.arg_pos] = new_value
    +        else:
    +            # The arg to replace is either omitted or passed by keyword.
    +            old_value = kwargs.get(self.name)
    +            kwargs[self.name] = new_value
    +        return old_value, args, kwargs
    +
    +
    +def timedelta_to_seconds(td):
    +    # type: (datetime.timedelta) -> float
    +    """Equivalent to ``td.total_seconds()`` (introduced in Python 2.7)."""
    +    return td.total_seconds()
    +
    +
    +def _websocket_mask_python(mask: bytes, data: bytes) -> bytes:
    +    """Websocket masking function.
    +
    +    `mask` is a `bytes` object of length 4; `data` is a `bytes` object of any length.
    +    Returns a `bytes` object of the same length as `data` with the mask applied
    +    as specified in section 5.3 of RFC 6455.
    +
    +    This pure-python implementation may be replaced by an optimized version when available.
    +    """
    +    mask_arr = array.array("B", mask)
    +    unmasked_arr = array.array("B", data)
    +    for i in range(len(data)):
    +        unmasked_arr[i] = unmasked_arr[i] ^ mask_arr[i % 4]
    +    return unmasked_arr.tobytes()
    +
    +
    +if os.environ.get("TORNADO_NO_EXTENSION") or os.environ.get("TORNADO_EXTENSION") == "0":
    +    # These environment variables exist to make it easier to do performance
    +    # comparisons; they are not guaranteed to remain supported in the future.
    +    _websocket_mask = _websocket_mask_python
    +else:
    +    try:
    +        from tornado.speedups import websocket_mask as _websocket_mask
    +    except ImportError:
    +        if os.environ.get("TORNADO_EXTENSION") == "1":
    +            raise
    +        _websocket_mask = _websocket_mask_python
    +
    +
    +def doctests():
    +    # type: () -> unittest.TestSuite
    +    import doctest
    +
    +    return doctest.DocTestSuite()
    diff --git a/server/www/packages/packages-linux/x64/tornado/web.py b/server/www/packages/packages-linux/x64/tornado/web.py
    index 6760b0b..08e63ec 100644
    --- a/server/www/packages/packages-linux/x64/tornado/web.py
    +++ b/server/www/packages/packages-linux/x64/tornado/web.py
    @@ -1,3394 +1,3593 @@
    -#
    -# Copyright 2009 Facebook
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License"); you may
    -# not use this file except in compliance with the License. You may obtain
    -# a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
    -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
    -# License for the specific language governing permissions and limitations
    -# under the License.
    -
    -"""``tornado.web`` provides a simple web framework with asynchronous
    -features that allow it to scale to large numbers of open connections,
    -making it ideal for `long polling
    -`_.
    -
    -Here is a simple "Hello, world" example app:
    -
    -.. testcode::
    -
    -    import tornado.ioloop
    -    import tornado.web
    -
    -    class MainHandler(tornado.web.RequestHandler):
    -        def get(self):
    -            self.write("Hello, world")
    -
    -    if __name__ == "__main__":
    -        application = tornado.web.Application([
    -            (r"/", MainHandler),
    -        ])
    -        application.listen(8888)
    -        tornado.ioloop.IOLoop.current().start()
    -
    -.. testoutput::
    -   :hide:
    -
    -
    -See the :doc:`guide` for additional information.
    -
    -Thread-safety notes
    --------------------
    -
    -In general, methods on `RequestHandler` and elsewhere in Tornado are
    -not thread-safe. In particular, methods such as
    -`~RequestHandler.write()`, `~RequestHandler.finish()`, and
    -`~RequestHandler.flush()` must only be called from the main thread. If
    -you use multiple threads it is important to use `.IOLoop.add_callback`
    -to transfer control back to the main thread before finishing the
    -request, or to limit your use of other threads to
    -`.IOLoop.run_in_executor` and ensure that your callbacks running in
    -the executor do not refer to Tornado objects.
    -
    -"""
    -
    -from __future__ import absolute_import, division, print_function
    -
    -import base64
    -import binascii
    -import datetime
    -import email.utils
    -import functools
    -import gzip
    -import hashlib
    -import hmac
    -import mimetypes
    -import numbers
    -import os.path
    -import re
    -import stat
    -import sys
    -import threading
    -import time
    -import tornado
    -import traceback
    -import types
    -import warnings
    -from inspect import isclass
    -from io import BytesIO
    -
    -from tornado.concurrent import Future, future_set_result_unless_cancelled
    -from tornado import escape
    -from tornado import gen
    -from tornado import httputil
    -from tornado import iostream
    -from tornado import locale
    -from tornado.log import access_log, app_log, gen_log
    -from tornado import stack_context
    -from tornado import template
    -from tornado.escape import utf8, _unicode
    -from tornado.routing import (AnyMatches, DefaultHostMatches, HostMatches,
    -                             ReversibleRouter, Rule, ReversibleRuleRouter,
    -                             URLSpec)
    -from tornado.util import (ObjectDict, raise_exc_info,
    -                          unicode_type, _websocket_mask, PY3)
    -
    -url = URLSpec
    -
    -if PY3:
    -    import http.cookies as Cookie
    -    import urllib.parse as urlparse
    -    from urllib.parse import urlencode
    -else:
    -    import Cookie
    -    import urlparse
    -    from urllib import urlencode
    -
    -try:
    -    import typing  # noqa
    -
    -    # The following types are accepted by RequestHandler.set_header
    -    # and related methods.
    -    _HeaderTypes = typing.Union[bytes, unicode_type,
    -                                numbers.Integral, datetime.datetime]
    -except ImportError:
    -    pass
    -
    -
    -MIN_SUPPORTED_SIGNED_VALUE_VERSION = 1
    -"""The oldest signed value version supported by this version of Tornado.
    -
    -Signed values older than this version cannot be decoded.
    -
    -.. versionadded:: 3.2.1
    -"""
    -
    -MAX_SUPPORTED_SIGNED_VALUE_VERSION = 2
    -"""The newest signed value version supported by this version of Tornado.
    -
    -Signed values newer than this version cannot be decoded.
    -
    -.. versionadded:: 3.2.1
    -"""
    -
    -DEFAULT_SIGNED_VALUE_VERSION = 2
    -"""The signed value version produced by `.RequestHandler.create_signed_value`.
    -
    -May be overridden by passing a ``version`` keyword argument.
    -
    -.. versionadded:: 3.2.1
    -"""
    -
    -DEFAULT_SIGNED_VALUE_MIN_VERSION = 1
    -"""The oldest signed value accepted by `.RequestHandler.get_secure_cookie`.
    -
    -May be overridden by passing a ``min_version`` keyword argument.
    -
    -.. versionadded:: 3.2.1
    -"""
    -
    -
    -class RequestHandler(object):
    -    """Base class for HTTP request handlers.
    -
    -    Subclasses must define at least one of the methods defined in the
    -    "Entry points" section below.
    -    """
    -    SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT",
    -                         "OPTIONS")
    -
    -    _template_loaders = {}  # type: typing.Dict[str, template.BaseLoader]
    -    _template_loader_lock = threading.Lock()
    -    _remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]")
    -
    -    def __init__(self, application, request, **kwargs):
    -        super(RequestHandler, self).__init__()
    -
    -        self.application = application
    -        self.request = request
    -        self._headers_written = False
    -        self._finished = False
    -        self._auto_finish = True
    -        self._transforms = None  # will be set in _execute
    -        self._prepared_future = None
    -        self._headers = None  # type: httputil.HTTPHeaders
    -        self.path_args = None
    -        self.path_kwargs = None
    -        self.ui = ObjectDict((n, self._ui_method(m)) for n, m in
    -                             application.ui_methods.items())
    -        # UIModules are available as both `modules` and `_tt_modules` in the
    -        # template namespace.  Historically only `modules` was available
    -        # but could be clobbered by user additions to the namespace.
    -        # The template {% module %} directive looks in `_tt_modules` to avoid
    -        # possible conflicts.
    -        self.ui["_tt_modules"] = _UIModuleNamespace(self,
    -                                                    application.ui_modules)
    -        self.ui["modules"] = self.ui["_tt_modules"]
    -        self.clear()
    -        self.request.connection.set_close_callback(self.on_connection_close)
    -        self.initialize(**kwargs)
    -
    -    def initialize(self):
    -        """Hook for subclass initialization. Called for each request.
    -
    -        A dictionary passed as the third argument of a url spec will be
    -        supplied as keyword arguments to initialize().
    -
    -        Example::
    -
    -            class ProfileHandler(RequestHandler):
    -                def initialize(self, database):
    -                    self.database = database
    -
    -                def get(self, username):
    -                    ...
    -
    -            app = Application([
    -                (r'/user/(.*)', ProfileHandler, dict(database=database)),
    -                ])
    -        """
    -        pass
    -
    -    @property
    -    def settings(self):
    -        """An alias for `self.application.settings `."""
    -        return self.application.settings
    -
    -    def head(self, *args, **kwargs):
    -        raise HTTPError(405)
    -
    -    def get(self, *args, **kwargs):
    -        raise HTTPError(405)
    -
    -    def post(self, *args, **kwargs):
    -        raise HTTPError(405)
    -
    -    def delete(self, *args, **kwargs):
    -        raise HTTPError(405)
    -
    -    def patch(self, *args, **kwargs):
    -        raise HTTPError(405)
    -
    -    def put(self, *args, **kwargs):
    -        raise HTTPError(405)
    -
    -    def options(self, *args, **kwargs):
    -        raise HTTPError(405)
    -
    -    def prepare(self):
    -        """Called at the beginning of a request before  `get`/`post`/etc.
    -
    -        Override this method to perform common initialization regardless
    -        of the request method.
    -
    -        Asynchronous support: Decorate this method with `.gen.coroutine`
    -        or use ``async def`` to make it asynchronous (the
    -        `asynchronous` decorator cannot be used on `prepare`).
    -        If this method returns a `.Future` execution will not proceed
    -        until the `.Future` is done.
    -
    -        .. versionadded:: 3.1
    -           Asynchronous support.
    -        """
    -        pass
    -
    -    def on_finish(self):
    -        """Called after the end of a request.
    -
    -        Override this method to perform cleanup, logging, etc.
    -        This method is a counterpart to `prepare`.  ``on_finish`` may
    -        not produce any output, as it is called after the response
    -        has been sent to the client.
    -        """
    -        pass
    -
    -    def on_connection_close(self):
    -        """Called in async handlers if the client closed the connection.
    -
    -        Override this to clean up resources associated with
    -        long-lived connections.  Note that this method is called only if
    -        the connection was closed during asynchronous processing; if you
    -        need to do cleanup after every request override `on_finish`
    -        instead.
    -
    -        Proxies may keep a connection open for a time (perhaps
    -        indefinitely) after the client has gone away, so this method
    -        may not be called promptly after the end user closes their
    -        connection.
    -        """
    -        if _has_stream_request_body(self.__class__):
    -            if not self.request.body.done():
    -                self.request.body.set_exception(iostream.StreamClosedError())
    -                self.request.body.exception()
    -
    -    def clear(self):
    -        """Resets all headers and content for this response."""
    -        self._headers = httputil.HTTPHeaders({
    -            "Server": "TornadoServer/%s" % tornado.version,
    -            "Content-Type": "text/html; charset=UTF-8",
    -            "Date": httputil.format_timestamp(time.time()),
    -        })
    -        self.set_default_headers()
    -        self._write_buffer = []
    -        self._status_code = 200
    -        self._reason = httputil.responses[200]
    -
    -    def set_default_headers(self):
    -        """Override this to set HTTP headers at the beginning of the request.
    -
    -        For example, this is the place to set a custom ``Server`` header.
    -        Note that setting such headers in the normal flow of request
    -        processing may not do what you want, since headers may be reset
    -        during error handling.
    -        """
    -        pass
    -
    -    def set_status(self, status_code, reason=None):
    -        """Sets the status code for our response.
    -
    -        :arg int status_code: Response status code.
    -        :arg str reason: Human-readable reason phrase describing the status
    -            code. If ``None``, it will be filled in from
    -            `http.client.responses` or "Unknown".
    -
    -        .. versionchanged:: 5.0
    -
    -           No longer validates that the response code is in
    -           `http.client.responses`.
    -        """
    -        self._status_code = status_code
    -        if reason is not None:
    -            self._reason = escape.native_str(reason)
    -        else:
    -            self._reason = httputil.responses.get(status_code, "Unknown")
    -
    -    def get_status(self):
    -        """Returns the status code for our response."""
    -        return self._status_code
    -
    -    def set_header(self, name, value):
    -        # type: (str, _HeaderTypes) -> None
    -        """Sets the given response header name and value.
    -
    -        If a datetime is given, we automatically format it according to the
    -        HTTP specification. If the value is not a string, we convert it to
    -        a string. All header values are then encoded as UTF-8.
    -        """
    -        self._headers[name] = self._convert_header_value(value)
    -
    -    def add_header(self, name, value):
    -        # type: (str, _HeaderTypes) -> None
    -        """Adds the given response header and value.
    -
    -        Unlike `set_header`, `add_header` may be called multiple times
    -        to return multiple values for the same header.
    -        """
    -        self._headers.add(name, self._convert_header_value(value))
    -
    -    def clear_header(self, name):
    -        """Clears an outgoing header, undoing a previous `set_header` call.
    -
    -        Note that this method does not apply to multi-valued headers
    -        set by `add_header`.
    -        """
    -        if name in self._headers:
    -            del self._headers[name]
    -
    -    _INVALID_HEADER_CHAR_RE = re.compile(r"[\x00-\x1f]")
    -
    -    def _convert_header_value(self, value):
    -        # type: (_HeaderTypes) -> str
    -
    -        # Convert the input value to a str. This type check is a bit
    -        # subtle: The bytes case only executes on python 3, and the
    -        # unicode case only executes on python 2, because the other
    -        # cases are covered by the first match for str.
    -        if isinstance(value, str):
    -            retval = value
    -        elif isinstance(value, bytes):  # py3
    -            # Non-ascii characters in headers are not well supported,
    -            # but if you pass bytes, use latin1 so they pass through as-is.
    -            retval = value.decode('latin1')
    -        elif isinstance(value, unicode_type):  # py2
    -            # TODO: This is inconsistent with the use of latin1 above,
    -            # but it's been that way for a long time. Should it change?
    -            retval = escape.utf8(value)
    -        elif isinstance(value, numbers.Integral):
    -            # return immediately since we know the converted value will be safe
    -            return str(value)
    -        elif isinstance(value, datetime.datetime):
    -            return httputil.format_timestamp(value)
    -        else:
    -            raise TypeError("Unsupported header value %r" % value)
    -        # If \n is allowed into the header, it is possible to inject
    -        # additional headers or split the request.
    -        if RequestHandler._INVALID_HEADER_CHAR_RE.search(retval):
    -            raise ValueError("Unsafe header value %r", retval)
    -        return retval
    -
    -    _ARG_DEFAULT = object()
    -
    -    def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
    -        """Returns the value of the argument with the given name.
    -
    -        If default is not provided, the argument is considered to be
    -        required, and we raise a `MissingArgumentError` if it is missing.
    -
    -        If the argument appears in the url more than once, we return the
    -        last value.
    -
    -        The returned value is always unicode.
    -        """
    -        return self._get_argument(name, default, self.request.arguments, strip)
    -
    -    def get_arguments(self, name, strip=True):
    -        """Returns a list of the arguments with the given name.
    -
    -        If the argument is not present, returns an empty list.
    -
    -        The returned values are always unicode.
    -        """
    -
    -        # Make sure `get_arguments` isn't accidentally being called with a
    -        # positional argument that's assumed to be a default (like in
    -        # `get_argument`.)
    -        assert isinstance(strip, bool)
    -
    -        return self._get_arguments(name, self.request.arguments, strip)
    -
    -    def get_body_argument(self, name, default=_ARG_DEFAULT, strip=True):
    -        """Returns the value of the argument with the given name
    -        from the request body.
    -
    -        If default is not provided, the argument is considered to be
    -        required, and we raise a `MissingArgumentError` if it is missing.
    -
    -        If the argument appears in the url more than once, we return the
    -        last value.
    -
    -        The returned value is always unicode.
    -
    -        .. versionadded:: 3.2
    -        """
    -        return self._get_argument(name, default, self.request.body_arguments,
    -                                  strip)
    -
    -    def get_body_arguments(self, name, strip=True):
    -        """Returns a list of the body arguments with the given name.
    -
    -        If the argument is not present, returns an empty list.
    -
    -        The returned values are always unicode.
    -
    -        .. versionadded:: 3.2
    -        """
    -        return self._get_arguments(name, self.request.body_arguments, strip)
    -
    -    def get_query_argument(self, name, default=_ARG_DEFAULT, strip=True):
    -        """Returns the value of the argument with the given name
    -        from the request query string.
    -
    -        If default is not provided, the argument is considered to be
    -        required, and we raise a `MissingArgumentError` if it is missing.
    -
    -        If the argument appears in the url more than once, we return the
    -        last value.
    -
    -        The returned value is always unicode.
    -
    -        .. versionadded:: 3.2
    -        """
    -        return self._get_argument(name, default,
    -                                  self.request.query_arguments, strip)
    -
    -    def get_query_arguments(self, name, strip=True):
    -        """Returns a list of the query arguments with the given name.
    -
    -        If the argument is not present, returns an empty list.
    -
    -        The returned values are always unicode.
    -
    -        .. versionadded:: 3.2
    -        """
    -        return self._get_arguments(name, self.request.query_arguments, strip)
    -
    -    def _get_argument(self, name, default, source, strip=True):
    -        args = self._get_arguments(name, source, strip=strip)
    -        if not args:
    -            if default is self._ARG_DEFAULT:
    -                raise MissingArgumentError(name)
    -            return default
    -        return args[-1]
    -
    -    def _get_arguments(self, name, source, strip=True):
    -        values = []
    -        for v in source.get(name, []):
    -            v = self.decode_argument(v, name=name)
    -            if isinstance(v, unicode_type):
    -                # Get rid of any weird control chars (unless decoding gave
    -                # us bytes, in which case leave it alone)
    -                v = RequestHandler._remove_control_chars_regex.sub(" ", v)
    -            if strip:
    -                v = v.strip()
    -            values.append(v)
    -        return values
    -
    -    def decode_argument(self, value, name=None):
    -        """Decodes an argument from the request.
    -
    -        The argument has been percent-decoded and is now a byte string.
    -        By default, this method decodes the argument as utf-8 and returns
    -        a unicode string, but this may be overridden in subclasses.
    -
    -        This method is used as a filter for both `get_argument()` and for
    -        values extracted from the url and passed to `get()`/`post()`/etc.
    -
    -        The name of the argument is provided if known, but may be None
    -        (e.g. for unnamed groups in the url regex).
    -        """
    -        try:
    -            return _unicode(value)
    -        except UnicodeDecodeError:
    -            raise HTTPError(400, "Invalid unicode in %s: %r" %
    -                            (name or "url", value[:40]))
    -
    -    @property
    -    def cookies(self):
    -        """An alias for
    -        `self.request.cookies <.httputil.HTTPServerRequest.cookies>`."""
    -        return self.request.cookies
    -
    -    def get_cookie(self, name, default=None):
    -        """Returns the value of the request cookie with the given name.
    -
    -        If the named cookie is not present, returns ``default``.
    -
    -        This method only returns cookies that were present in the request.
    -        It does not see the outgoing cookies set by `set_cookie` in this
    -        handler.
    -        """
    -        if self.request.cookies is not None and name in self.request.cookies:
    -            return self.request.cookies[name].value
    -        return default
    -
    -    def set_cookie(self, name, value, domain=None, expires=None, path="/",
    -                   expires_days=None, **kwargs):
    -        """Sets an outgoing cookie name/value with the given options.
    -
    -        Newly-set cookies are not immediately visible via `get_cookie`;
    -        they are not present until the next request.
    -
    -        expires may be a numeric timestamp as returned by `time.time`,
    -        a time tuple as returned by `time.gmtime`, or a
    -        `datetime.datetime` object.
    -
    -        Additional keyword arguments are set on the cookies.Morsel
    -        directly.
    -        See https://docs.python.org/3/library/http.cookies.html#http.cookies.Morsel
    -        for available attributes.
    -        """
    -        # The cookie library only accepts type str, in both python 2 and 3
    -        name = escape.native_str(name)
    -        value = escape.native_str(value)
    -        if re.search(r"[\x00-\x20]", name + value):
    -            # Don't let us accidentally inject bad stuff
    -            raise ValueError("Invalid cookie %r: %r" % (name, value))
    -        if not hasattr(self, "_new_cookie"):
    -            self._new_cookie = Cookie.SimpleCookie()
    -        if name in self._new_cookie:
    -            del self._new_cookie[name]
    -        self._new_cookie[name] = value
    -        morsel = self._new_cookie[name]
    -        if domain:
    -            morsel["domain"] = domain
    -        if expires_days is not None and not expires:
    -            expires = datetime.datetime.utcnow() + datetime.timedelta(
    -                days=expires_days)
    -        if expires:
    -            morsel["expires"] = httputil.format_timestamp(expires)
    -        if path:
    -            morsel["path"] = path
    -        for k, v in kwargs.items():
    -            if k == 'max_age':
    -                k = 'max-age'
    -
    -            # skip falsy values for httponly and secure flags because
    -            # SimpleCookie sets them regardless
    -            if k in ['httponly', 'secure'] and not v:
    -                continue
    -
    -            morsel[k] = v
    -
    -    def clear_cookie(self, name, path="/", domain=None):
    -        """Deletes the cookie with the given name.
    -
    -        Due to limitations of the cookie protocol, you must pass the same
    -        path and domain to clear a cookie as were used when that cookie
    -        was set (but there is no way to find out on the server side
    -        which values were used for a given cookie).
    -
    -        Similar to `set_cookie`, the effect of this method will not be
    -        seen until the following request.
    -        """
    -        expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
    -        self.set_cookie(name, value="", path=path, expires=expires,
    -                        domain=domain)
    -
    -    def clear_all_cookies(self, path="/", domain=None):
    -        """Deletes all the cookies the user sent with this request.
    -
    -        See `clear_cookie` for more information on the path and domain
    -        parameters.
    -
    -        Similar to `set_cookie`, the effect of this method will not be
    -        seen until the following request.
    -
    -        .. versionchanged:: 3.2
    -
    -           Added the ``path`` and ``domain`` parameters.
    -        """
    -        for name in self.request.cookies:
    -            self.clear_cookie(name, path=path, domain=domain)
    -
    -    def set_secure_cookie(self, name, value, expires_days=30, version=None,
    -                          **kwargs):
    -        """Signs and timestamps a cookie so it cannot be forged.
    -
    -        You must specify the ``cookie_secret`` setting in your Application
    -        to use this method. It should be a long, random sequence of bytes
    -        to be used as the HMAC secret for the signature.
    -
    -        To read a cookie set with this method, use `get_secure_cookie()`.
    -
    -        Note that the ``expires_days`` parameter sets the lifetime of the
    -        cookie in the browser, but is independent of the ``max_age_days``
    -        parameter to `get_secure_cookie`.
    -
    -        Secure cookies may contain arbitrary byte values, not just unicode
    -        strings (unlike regular cookies)
    -
    -        Similar to `set_cookie`, the effect of this method will not be
    -        seen until the following request.
    -
    -        .. versionchanged:: 3.2.1
    -
    -           Added the ``version`` argument.  Introduced cookie version 2
    -           and made it the default.
    -        """
    -        self.set_cookie(name, self.create_signed_value(name, value,
    -                                                       version=version),
    -                        expires_days=expires_days, **kwargs)
    -
    -    def create_signed_value(self, name, value, version=None):
    -        """Signs and timestamps a string so it cannot be forged.
    -
    -        Normally used via set_secure_cookie, but provided as a separate
    -        method for non-cookie uses.  To decode a value not stored
    -        as a cookie use the optional value argument to get_secure_cookie.
    -
    -        .. versionchanged:: 3.2.1
    -
    -           Added the ``version`` argument.  Introduced cookie version 2
    -           and made it the default.
    -        """
    -        self.require_setting("cookie_secret", "secure cookies")
    -        secret = self.application.settings["cookie_secret"]
    -        key_version = None
    -        if isinstance(secret, dict):
    -            if self.application.settings.get("key_version") is None:
    -                raise Exception("key_version setting must be used for secret_key dicts")
    -            key_version = self.application.settings["key_version"]
    -
    -        return create_signed_value(secret, name, value, version=version,
    -                                   key_version=key_version)
    -
    -    def get_secure_cookie(self, name, value=None, max_age_days=31,
    -                          min_version=None):
    -        """Returns the given signed cookie if it validates, or None.
    -
    -        The decoded cookie value is returned as a byte string (unlike
    -        `get_cookie`).
    -
    -        Similar to `get_cookie`, this method only returns cookies that
    -        were present in the request. It does not see outgoing cookies set by
    -        `set_secure_cookie` in this handler.
    -
    -        .. versionchanged:: 3.2.1
    -
    -           Added the ``min_version`` argument.  Introduced cookie version 2;
    -           both versions 1 and 2 are accepted by default.
    -        """
    -        self.require_setting("cookie_secret", "secure cookies")
    -        if value is None:
    -            value = self.get_cookie(name)
    -        return decode_signed_value(self.application.settings["cookie_secret"],
    -                                   name, value, max_age_days=max_age_days,
    -                                   min_version=min_version)
    -
    -    def get_secure_cookie_key_version(self, name, value=None):
    -        """Returns the signing key version of the secure cookie.
    -
    -        The version is returned as int.
    -        """
    -        self.require_setting("cookie_secret", "secure cookies")
    -        if value is None:
    -            value = self.get_cookie(name)
    -        return get_signature_key_version(value)
    -
    -    def redirect(self, url, permanent=False, status=None):
    -        """Sends a redirect to the given (optionally relative) URL.
    -
    -        If the ``status`` argument is specified, that value is used as the
    -        HTTP status code; otherwise either 301 (permanent) or 302
    -        (temporary) is chosen based on the ``permanent`` argument.
    -        The default is 302 (temporary).
    -        """
    -        if self._headers_written:
    -            raise Exception("Cannot redirect after headers have been written")
    -        if status is None:
    -            status = 301 if permanent else 302
    -        else:
    -            assert isinstance(status, int) and 300 <= status <= 399
    -        self.set_status(status)
    -        self.set_header("Location", utf8(url))
    -        self.finish()
    -
    -    def write(self, chunk):
    -        """Writes the given chunk to the output buffer.
    -
    -        To write the output to the network, use the flush() method below.
    -
    -        If the given chunk is a dictionary, we write it as JSON and set
    -        the Content-Type of the response to be ``application/json``.
    -        (if you want to send JSON as a different ``Content-Type``, call
    -        set_header *after* calling write()).
    -
    -        Note that lists are not converted to JSON because of a potential
    -        cross-site security vulnerability.  All JSON output should be
    -        wrapped in a dictionary.  More details at
    -        http://haacked.com/archive/2009/06/25/json-hijacking.aspx/ and
    -        https://github.com/facebook/tornado/issues/1009
    -        """
    -        if self._finished:
    -            raise RuntimeError("Cannot write() after finish()")
    -        if not isinstance(chunk, (bytes, unicode_type, dict)):
    -            message = "write() only accepts bytes, unicode, and dict objects"
    -            if isinstance(chunk, list):
    -                message += ". Lists not accepted for security reasons; see " + \
    -                    "http://www.tornadoweb.org/en/stable/web.html#tornado.web.RequestHandler.write"
    -            raise TypeError(message)
    -        if isinstance(chunk, dict):
    -            chunk = escape.json_encode(chunk)
    -            self.set_header("Content-Type", "application/json; charset=UTF-8")
    -        chunk = utf8(chunk)
    -        self._write_buffer.append(chunk)
    -
    -    def render(self, template_name, **kwargs):
    -        """Renders the template with the given arguments as the response.
    -
    -        ``render()`` calls ``finish()``, so no other output methods can be called
    -        after it.
    -
    -        Returns a `.Future` with the same semantics as the one returned by `finish`.
    -        Awaiting this `.Future` is optional.
    -
    -        .. versionchanged:: 5.1
    -
    -           Now returns a `.Future` instead of ``None``.
    -        """
    -        if self._finished:
    -            raise RuntimeError("Cannot render() after finish()")
    -        html = self.render_string(template_name, **kwargs)
    -
    -        # Insert the additional JS and CSS added by the modules on the page
    -        js_embed = []
    -        js_files = []
    -        css_embed = []
    -        css_files = []
    -        html_heads = []
    -        html_bodies = []
    -        for module in getattr(self, "_active_modules", {}).values():
    -            embed_part = module.embedded_javascript()
    -            if embed_part:
    -                js_embed.append(utf8(embed_part))
    -            file_part = module.javascript_files()
    -            if file_part:
    -                if isinstance(file_part, (unicode_type, bytes)):
    -                    js_files.append(file_part)
    -                else:
    -                    js_files.extend(file_part)
    -            embed_part = module.embedded_css()
    -            if embed_part:
    -                css_embed.append(utf8(embed_part))
    -            file_part = module.css_files()
    -            if file_part:
    -                if isinstance(file_part, (unicode_type, bytes)):
    -                    css_files.append(file_part)
    -                else:
    -                    css_files.extend(file_part)
    -            head_part = module.html_head()
    -            if head_part:
    -                html_heads.append(utf8(head_part))
    -            body_part = module.html_body()
    -            if body_part:
    -                html_bodies.append(utf8(body_part))
    -
    -        if js_files:
    -            # Maintain order of JavaScript files given by modules
    -            js = self.render_linked_js(js_files)
    -            sloc = html.rindex(b'')
    -            html = html[:sloc] + utf8(js) + b'\n' + html[sloc:]
    -        if js_embed:
    -            js = self.render_embed_js(js_embed)
    -            sloc = html.rindex(b'')
    -            html = html[:sloc] + js + b'\n' + html[sloc:]
    -        if css_files:
    -            css = self.render_linked_css(css_files)
    -            hloc = html.index(b'')
    -            html = html[:hloc] + utf8(css) + b'\n' + html[hloc:]
    -        if css_embed:
    -            css = self.render_embed_css(css_embed)
    -            hloc = html.index(b'')
    -            html = html[:hloc] + css + b'\n' + html[hloc:]
    -        if html_heads:
    -            hloc = html.index(b'')
    -            html = html[:hloc] + b''.join(html_heads) + b'\n' + html[hloc:]
    -        if html_bodies:
    -            hloc = html.index(b'')
    -            html = html[:hloc] + b''.join(html_bodies) + b'\n' + html[hloc:]
    -        return self.finish(html)
    -
    -    def render_linked_js(self, js_files):
    -        """Default method used to render the final js links for the
    -        rendered webpage.
    -
    -        Override this method in a sub-classed controller to change the output.
    -        """
    -        paths = []
    -        unique_paths = set()
    -
    -        for path in js_files:
    -            if not is_absolute(path):
    -                path = self.static_url(path)
    -            if path not in unique_paths:
    -                paths.append(path)
    -                unique_paths.add(path)
    -
    -        return ''.join(''
    -                       for p in paths)
    -
    -    def render_embed_js(self, js_embed):
    -        """Default method used to render the final embedded js for the
    -        rendered webpage.
    -
    -        Override this method in a sub-classed controller to change the output.
    -        """
    -        return b''
    -
    -    def render_linked_css(self, css_files):
    -        """Default method used to render the final css links for the
    -        rendered webpage.
    -
    -        Override this method in a sub-classed controller to change the output.
    -        """
    -        paths = []
    -        unique_paths = set()
    -
    -        for path in css_files:
    -            if not is_absolute(path):
    -                path = self.static_url(path)
    -            if path not in unique_paths:
    -                paths.append(path)
    -                unique_paths.add(path)
    -
    -        return ''.join(''
    -                       for p in paths)
    -
    -    def render_embed_css(self, css_embed):
    -        """Default method used to render the final embedded css for the
    -        rendered webpage.
    -
    -        Override this method in a sub-classed controller to change the output.
    -        """
    -        return b''
    -
    -    def render_string(self, template_name, **kwargs):
    -        """Generate the given template with the given arguments.
    -
    -        We return the generated byte string (in utf8). To generate and
    -        write a template as a response, use render() above.
    -        """
    -        # If no template_path is specified, use the path of the calling file
    -        template_path = self.get_template_path()
    -        if not template_path:
    -            frame = sys._getframe(0)
    -            web_file = frame.f_code.co_filename
    -            while frame.f_code.co_filename == web_file:
    -                frame = frame.f_back
    -            template_path = os.path.dirname(frame.f_code.co_filename)
    -        with RequestHandler._template_loader_lock:
    -            if template_path not in RequestHandler._template_loaders:
    -                loader = self.create_template_loader(template_path)
    -                RequestHandler._template_loaders[template_path] = loader
    -            else:
    -                loader = RequestHandler._template_loaders[template_path]
    -        t = loader.load(template_name)
    -        namespace = self.get_template_namespace()
    -        namespace.update(kwargs)
    -        return t.generate(**namespace)
    -
    -    def get_template_namespace(self):
    -        """Returns a dictionary to be used as the default template namespace.
    -
    -        May be overridden by subclasses to add or modify values.
    -
    -        The results of this method will be combined with additional
    -        defaults in the `tornado.template` module and keyword arguments
    -        to `render` or `render_string`.
    -        """
    -        namespace = dict(
    -            handler=self,
    -            request=self.request,
    -            current_user=self.current_user,
    -            locale=self.locale,
    -            _=self.locale.translate,
    -            pgettext=self.locale.pgettext,
    -            static_url=self.static_url,
    -            xsrf_form_html=self.xsrf_form_html,
    -            reverse_url=self.reverse_url
    -        )
    -        namespace.update(self.ui)
    -        return namespace
    -
    -    def create_template_loader(self, template_path):
    -        """Returns a new template loader for the given path.
    -
    -        May be overridden by subclasses.  By default returns a
    -        directory-based loader on the given path, using the
    -        ``autoescape`` and ``template_whitespace`` application
    -        settings.  If a ``template_loader`` application setting is
    -        supplied, uses that instead.
    -        """
    -        settings = self.application.settings
    -        if "template_loader" in settings:
    -            return settings["template_loader"]
    -        kwargs = {}
    -        if "autoescape" in settings:
    -            # autoescape=None means "no escaping", so we have to be sure
    -            # to only pass this kwarg if the user asked for it.
    -            kwargs["autoescape"] = settings["autoescape"]
    -        if "template_whitespace" in settings:
    -            kwargs["whitespace"] = settings["template_whitespace"]
    -        return template.Loader(template_path, **kwargs)
    -
    -    def flush(self, include_footers=False, callback=None):
    -        """Flushes the current output buffer to the network.
    -
    -        The ``callback`` argument, if given, can be used for flow control:
    -        it will be run when all flushed data has been written to the socket.
    -        Note that only one flush callback can be outstanding at a time;
    -        if another flush occurs before the previous flush's callback
    -        has been run, the previous callback will be discarded.
    -
    -        .. versionchanged:: 4.0
    -           Now returns a `.Future` if no callback is given.
    -
    -        .. deprecated:: 5.1
    -
    -           The ``callback`` argument is deprecated and will be removed in
    -           Tornado 6.0.
    -        """
    -        chunk = b"".join(self._write_buffer)
    -        self._write_buffer = []
    -        if not self._headers_written:
    -            self._headers_written = True
    -            for transform in self._transforms:
    -                self._status_code, self._headers, chunk = \
    -                    transform.transform_first_chunk(
    -                        self._status_code, self._headers,
    -                        chunk, include_footers)
    -            # Ignore the chunk and only write the headers for HEAD requests
    -            if self.request.method == "HEAD":
    -                chunk = None
    -
    -            # Finalize the cookie headers (which have been stored in a side
    -            # object so an outgoing cookie could be overwritten before it
    -            # is sent).
    -            if hasattr(self, "_new_cookie"):
    -                for cookie in self._new_cookie.values():
    -                    self.add_header("Set-Cookie", cookie.OutputString(None))
    -
    -            start_line = httputil.ResponseStartLine('',
    -                                                    self._status_code,
    -                                                    self._reason)
    -            return self.request.connection.write_headers(
    -                start_line, self._headers, chunk, callback=callback)
    -        else:
    -            for transform in self._transforms:
    -                chunk = transform.transform_chunk(chunk, include_footers)
    -            # Ignore the chunk and only write the headers for HEAD requests
    -            if self.request.method != "HEAD":
    -                return self.request.connection.write(chunk, callback=callback)
    -            else:
    -                future = Future()
    -                future.set_result(None)
    -                return future
    -
    -    def finish(self, chunk=None):
    -        """Finishes this response, ending the HTTP request.
    -
    -        Passing a ``chunk`` to ``finish()`` is equivalent to passing that
    -        chunk to ``write()`` and then calling ``finish()`` with no arguments.
    -
    -        Returns a `.Future` which may optionally be awaited to track the sending
    -        of the response to the client. This `.Future` resolves when all the response
    -        data has been sent, and raises an error if the connection is closed before all
    -        data can be sent.
    -
    -        .. versionchanged:: 5.1
    -
    -           Now returns a `.Future` instead of ``None``.
    -        """
    -        if self._finished:
    -            raise RuntimeError("finish() called twice")
    -
    -        if chunk is not None:
    -            self.write(chunk)
    -
    -        # Automatically support ETags and add the Content-Length header if
    -        # we have not flushed any content yet.
    -        if not self._headers_written:
    -            if (self._status_code == 200 and
    -                self.request.method in ("GET", "HEAD") and
    -                    "Etag" not in self._headers):
    -                self.set_etag_header()
    -                if self.check_etag_header():
    -                    self._write_buffer = []
    -                    self.set_status(304)
    -            if (self._status_code in (204, 304) or
    -                    (self._status_code >= 100 and self._status_code < 200)):
    -                assert not self._write_buffer, "Cannot send body with %s" % self._status_code
    -                self._clear_headers_for_304()
    -            elif "Content-Length" not in self._headers:
    -                content_length = sum(len(part) for part in self._write_buffer)
    -                self.set_header("Content-Length", content_length)
    -
    -        if hasattr(self.request, "connection"):
    -            # Now that the request is finished, clear the callback we
    -            # set on the HTTPConnection (which would otherwise prevent the
    -            # garbage collection of the RequestHandler when there
    -            # are keepalive connections)
    -            self.request.connection.set_close_callback(None)
    -
    -        future = self.flush(include_footers=True)
    -        self.request.connection.finish()
    -        self._log()
    -        self._finished = True
    -        self.on_finish()
    -        self._break_cycles()
    -        return future
    -
    -    def detach(self):
    -        """Take control of the underlying stream.
    -
    -        Returns the underlying `.IOStream` object and stops all
    -        further HTTP processing. Intended for implementing protocols
    -        like websockets that tunnel over an HTTP handshake.
    -
    -        This method is only supported when HTTP/1.1 is used.
    -
    -        .. versionadded:: 5.1
    -        """
    -        self._finished = True
    -        return self.request.connection.detach()
    -
    -    def _break_cycles(self):
    -        # Break up a reference cycle between this handler and the
    -        # _ui_module closures to allow for faster GC on CPython.
    -        self.ui = None
    -
    -    def send_error(self, status_code=500, **kwargs):
    -        """Sends the given HTTP error code to the browser.
    -
    -        If `flush()` has already been called, it is not possible to send
    -        an error, so this method will simply terminate the response.
    -        If output has been written but not yet flushed, it will be discarded
    -        and replaced with the error page.
    -
    -        Override `write_error()` to customize the error page that is returned.
    -        Additional keyword arguments are passed through to `write_error`.
    -        """
    -        if self._headers_written:
    -            gen_log.error("Cannot send error response after headers written")
    -            if not self._finished:
    -                # If we get an error between writing headers and finishing,
    -                # we are unlikely to be able to finish due to a
    -                # Content-Length mismatch. Try anyway to release the
    -                # socket.
    -                try:
    -                    self.finish()
    -                except Exception:
    -                    gen_log.error("Failed to flush partial response",
    -                                  exc_info=True)
    -            return
    -        self.clear()
    -
    -        reason = kwargs.get('reason')
    -        if 'exc_info' in kwargs:
    -            exception = kwargs['exc_info'][1]
    -            if isinstance(exception, HTTPError) and exception.reason:
    -                reason = exception.reason
    -        self.set_status(status_code, reason=reason)
    -        try:
    -            self.write_error(status_code, **kwargs)
    -        except Exception:
    -            app_log.error("Uncaught exception in write_error", exc_info=True)
    -        if not self._finished:
    -            self.finish()
    -
    -    def write_error(self, status_code, **kwargs):
    -        """Override to implement custom error pages.
    -
    -        ``write_error`` may call `write`, `render`, `set_header`, etc
    -        to produce output as usual.
    -
    -        If this error was caused by an uncaught exception (including
    -        HTTPError), an ``exc_info`` triple will be available as
    -        ``kwargs["exc_info"]``.  Note that this exception may not be
    -        the "current" exception for purposes of methods like
    -        ``sys.exc_info()`` or ``traceback.format_exc``.
    -        """
    -        if self.settings.get("serve_traceback") and "exc_info" in kwargs:
    -            # in debug mode, try to send a traceback
    -            self.set_header('Content-Type', 'text/plain')
    -            for line in traceback.format_exception(*kwargs["exc_info"]):
    -                self.write(line)
    -            self.finish()
    -        else:
    -            self.finish("%(code)d: %(message)s"
    -                        "%(code)d: %(message)s" % {
    -                            "code": status_code,
    -                            "message": self._reason,
    -                        })
    -
    -    @property
    -    def locale(self):
    -        """The locale for the current session.
    -
    -        Determined by either `get_user_locale`, which you can override to
    -        set the locale based on, e.g., a user preference stored in a
    -        database, or `get_browser_locale`, which uses the ``Accept-Language``
    -        header.
    -
    -        .. versionchanged: 4.1
    -           Added a property setter.
    -        """
    -        if not hasattr(self, "_locale"):
    -            self._locale = self.get_user_locale()
    -            if not self._locale:
    -                self._locale = self.get_browser_locale()
    -                assert self._locale
    -        return self._locale
    -
    -    @locale.setter
    -    def locale(self, value):
    -        self._locale = value
    -
    -    def get_user_locale(self):
    -        """Override to determine the locale from the authenticated user.
    -
    -        If None is returned, we fall back to `get_browser_locale()`.
    -
    -        This method should return a `tornado.locale.Locale` object,
    -        most likely obtained via a call like ``tornado.locale.get("en")``
    -        """
    -        return None
    -
    -    def get_browser_locale(self, default="en_US"):
    -        """Determines the user's locale from ``Accept-Language`` header.
    -
    -        See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
    -        """
    -        if "Accept-Language" in self.request.headers:
    -            languages = self.request.headers["Accept-Language"].split(",")
    -            locales = []
    -            for language in languages:
    -                parts = language.strip().split(";")
    -                if len(parts) > 1 and parts[1].startswith("q="):
    -                    try:
    -                        score = float(parts[1][2:])
    -                    except (ValueError, TypeError):
    -                        score = 0.0
    -                else:
    -                    score = 1.0
    -                locales.append((parts[0], score))
    -            if locales:
    -                locales.sort(key=lambda pair: pair[1], reverse=True)
    -                codes = [l[0] for l in locales]
    -                return locale.get(*codes)
    -        return locale.get(default)
    -
    -    @property
    -    def current_user(self):
    -        """The authenticated user for this request.
    -
    -        This is set in one of two ways:
    -
    -        * A subclass may override `get_current_user()`, which will be called
    -          automatically the first time ``self.current_user`` is accessed.
    -          `get_current_user()` will only be called once per request,
    -          and is cached for future access::
    -
    -              def get_current_user(self):
    -                  user_cookie = self.get_secure_cookie("user")
    -                  if user_cookie:
    -                      return json.loads(user_cookie)
    -                  return None
    -
    -        * It may be set as a normal variable, typically from an overridden
    -          `prepare()`::
    -
    -              @gen.coroutine
    -              def prepare(self):
    -                  user_id_cookie = self.get_secure_cookie("user_id")
    -                  if user_id_cookie:
    -                      self.current_user = yield load_user(user_id_cookie)
    -
    -        Note that `prepare()` may be a coroutine while `get_current_user()`
    -        may not, so the latter form is necessary if loading the user requires
    -        asynchronous operations.
    -
    -        The user object may be any type of the application's choosing.
    -        """
    -        if not hasattr(self, "_current_user"):
    -            self._current_user = self.get_current_user()
    -        return self._current_user
    -
    -    @current_user.setter
    -    def current_user(self, value):
    -        self._current_user = value
    -
    -    def get_current_user(self):
    -        """Override to determine the current user from, e.g., a cookie.
    -
    -        This method may not be a coroutine.
    -        """
    -        return None
    -
    -    def get_login_url(self):
    -        """Override to customize the login URL based on the request.
    -
    -        By default, we use the ``login_url`` application setting.
    -        """
    -        self.require_setting("login_url", "@tornado.web.authenticated")
    -        return self.application.settings["login_url"]
    -
    -    def get_template_path(self):
    -        """Override to customize template path for each handler.
    -
    -        By default, we use the ``template_path`` application setting.
    -        Return None to load templates relative to the calling file.
    -        """
    -        return self.application.settings.get("template_path")
    -
    -    @property
    -    def xsrf_token(self):
    -        """The XSRF-prevention token for the current user/session.
    -
    -        To prevent cross-site request forgery, we set an '_xsrf' cookie
    -        and include the same '_xsrf' value as an argument with all POST
    -        requests. If the two do not match, we reject the form submission
    -        as a potential forgery.
    -
    -        See http://en.wikipedia.org/wiki/Cross-site_request_forgery
    -
    -        This property is of type `bytes`, but it contains only ASCII
    -        characters. If a character string is required, there is no
    -        need to base64-encode it; just decode the byte string as
    -        UTF-8.
    -
    -        .. versionchanged:: 3.2.2
    -           The xsrf token will now be have a random mask applied in every
    -           request, which makes it safe to include the token in pages
    -           that are compressed.  See http://breachattack.com for more
    -           information on the issue fixed by this change.  Old (version 1)
    -           cookies will be converted to version 2 when this method is called
    -           unless the ``xsrf_cookie_version`` `Application` setting is
    -           set to 1.
    -
    -        .. versionchanged:: 4.3
    -           The ``xsrf_cookie_kwargs`` `Application` setting may be
    -           used to supply additional cookie options (which will be
    -           passed directly to `set_cookie`). For example,
    -           ``xsrf_cookie_kwargs=dict(httponly=True, secure=True)``
    -           will set the ``secure`` and ``httponly`` flags on the
    -           ``_xsrf`` cookie.
    -        """
    -        if not hasattr(self, "_xsrf_token"):
    -            version, token, timestamp = self._get_raw_xsrf_token()
    -            output_version = self.settings.get("xsrf_cookie_version", 2)
    -            cookie_kwargs = self.settings.get("xsrf_cookie_kwargs", {})
    -            if output_version == 1:
    -                self._xsrf_token = binascii.b2a_hex(token)
    -            elif output_version == 2:
    -                mask = os.urandom(4)
    -                self._xsrf_token = b"|".join([
    -                    b"2",
    -                    binascii.b2a_hex(mask),
    -                    binascii.b2a_hex(_websocket_mask(mask, token)),
    -                    utf8(str(int(timestamp)))])
    -            else:
    -                raise ValueError("unknown xsrf cookie version %d",
    -                                 output_version)
    -            if version is None:
    -                expires_days = 30 if self.current_user else None
    -                self.set_cookie("_xsrf", self._xsrf_token,
    -                                expires_days=expires_days,
    -                                **cookie_kwargs)
    -        return self._xsrf_token
    -
    -    def _get_raw_xsrf_token(self):
    -        """Read or generate the xsrf token in its raw form.
    -
    -        The raw_xsrf_token is a tuple containing:
    -
    -        * version: the version of the cookie from which this token was read,
    -          or None if we generated a new token in this request.
    -        * token: the raw token data; random (non-ascii) bytes.
    -        * timestamp: the time this token was generated (will not be accurate
    -          for version 1 cookies)
    -        """
    -        if not hasattr(self, '_raw_xsrf_token'):
    -            cookie = self.get_cookie("_xsrf")
    -            if cookie:
    -                version, token, timestamp = self._decode_xsrf_token(cookie)
    -            else:
    -                version, token, timestamp = None, None, None
    -            if token is None:
    -                version = None
    -                token = os.urandom(16)
    -                timestamp = time.time()
    -            self._raw_xsrf_token = (version, token, timestamp)
    -        return self._raw_xsrf_token
    -
    -    def _decode_xsrf_token(self, cookie):
    -        """Convert a cookie string into a the tuple form returned by
    -        _get_raw_xsrf_token.
    -        """
    -
    -        try:
    -            m = _signed_value_version_re.match(utf8(cookie))
    -
    -            if m:
    -                version = int(m.group(1))
    -                if version == 2:
    -                    _, mask, masked_token, timestamp = cookie.split("|")
    -
    -                    mask = binascii.a2b_hex(utf8(mask))
    -                    token = _websocket_mask(
    -                        mask, binascii.a2b_hex(utf8(masked_token)))
    -                    timestamp = int(timestamp)
    -                    return version, token, timestamp
    -                else:
    -                    # Treat unknown versions as not present instead of failing.
    -                    raise Exception("Unknown xsrf cookie version")
    -            else:
    -                version = 1
    -                try:
    -                    token = binascii.a2b_hex(utf8(cookie))
    -                except (binascii.Error, TypeError):
    -                    token = utf8(cookie)
    -                # We don't have a usable timestamp in older versions.
    -                timestamp = int(time.time())
    -                return (version, token, timestamp)
    -        except Exception:
    -            # Catch exceptions and return nothing instead of failing.
    -            gen_log.debug("Uncaught exception in _decode_xsrf_token",
    -                          exc_info=True)
    -            return None, None, None
    -
    -    def check_xsrf_cookie(self):
    -        """Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
    -
    -        To prevent cross-site request forgery, we set an ``_xsrf``
    -        cookie and include the same value as a non-cookie
    -        field with all ``POST`` requests. If the two do not match, we
    -        reject the form submission as a potential forgery.
    -
    -        The ``_xsrf`` value may be set as either a form field named ``_xsrf``
    -        or in a custom HTTP header named ``X-XSRFToken`` or ``X-CSRFToken``
    -        (the latter is accepted for compatibility with Django).
    -
    -        See http://en.wikipedia.org/wiki/Cross-site_request_forgery
    -
    -        Prior to release 1.1.1, this check was ignored if the HTTP header
    -        ``X-Requested-With: XMLHTTPRequest`` was present.  This exception
    -        has been shown to be insecure and has been removed.  For more
    -        information please see
    -        http://www.djangoproject.com/weblog/2011/feb/08/security/
    -        http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
    -
    -        .. versionchanged:: 3.2.2
    -           Added support for cookie version 2.  Both versions 1 and 2 are
    -           supported.
    -        """
    -        token = (self.get_argument("_xsrf", None) or
    -                 self.request.headers.get("X-Xsrftoken") or
    -                 self.request.headers.get("X-Csrftoken"))
    -        if not token:
    -            raise HTTPError(403, "'_xsrf' argument missing from POST")
    -        _, token, _ = self._decode_xsrf_token(token)
    -        _, expected_token, _ = self._get_raw_xsrf_token()
    -        if not token:
    -            raise HTTPError(403, "'_xsrf' argument has invalid format")
    -        if not _time_independent_equals(utf8(token), utf8(expected_token)):
    -            raise HTTPError(403, "XSRF cookie does not match POST argument")
    -
    -    def xsrf_form_html(self):
    -        """An HTML ```` element to be included with all POST forms.
    -
    -        It defines the ``_xsrf`` input value, which we check on all POST
    -        requests to prevent cross-site request forgery. If you have set
    -        the ``xsrf_cookies`` application setting, you must include this
    -        HTML within all of your HTML forms.
    -
    -        In a template, this method should be called with ``{% module
    -        xsrf_form_html() %}``
    -
    -        See `check_xsrf_cookie()` above for more information.
    -        """
    -        return ''
    -
    -    def static_url(self, path, include_host=None, **kwargs):
    -        """Returns a static URL for the given relative static file path.
    -
    -        This method requires you set the ``static_path`` setting in your
    -        application (which specifies the root directory of your static
    -        files).
    -
    -        This method returns a versioned url (by default appending
    -        ``?v=``), which allows the static files to be
    -        cached indefinitely.  This can be disabled by passing
    -        ``include_version=False`` (in the default implementation;
    -        other static file implementations are not required to support
    -        this, but they may support other options).
    -
    -        By default this method returns URLs relative to the current
    -        host, but if ``include_host`` is true the URL returned will be
    -        absolute.  If this handler has an ``include_host`` attribute,
    -        that value will be used as the default for all `static_url`
    -        calls that do not pass ``include_host`` as a keyword argument.
    -
    -        """
    -        self.require_setting("static_path", "static_url")
    -        get_url = self.settings.get("static_handler_class",
    -                                    StaticFileHandler).make_static_url
    -
    -        if include_host is None:
    -            include_host = getattr(self, "include_host", False)
    -
    -        if include_host:
    -            base = self.request.protocol + "://" + self.request.host
    -        else:
    -            base = ""
    -
    -        return base + get_url(self.settings, path, **kwargs)
    -
    -    def require_setting(self, name, feature="this feature"):
    -        """Raises an exception if the given app setting is not defined."""
    -        if not self.application.settings.get(name):
    -            raise Exception("You must define the '%s' setting in your "
    -                            "application to use %s" % (name, feature))
    -
    -    def reverse_url(self, name, *args):
    -        """Alias for `Application.reverse_url`."""
    -        return self.application.reverse_url(name, *args)
    -
    -    def compute_etag(self):
    -        """Computes the etag header to be used for this request.
    -
    -        By default uses a hash of the content written so far.
    -
    -        May be overridden to provide custom etag implementations,
    -        or may return None to disable tornado's default etag support.
    -        """
    -        hasher = hashlib.sha1()
    -        for part in self._write_buffer:
    -            hasher.update(part)
    -        return '"%s"' % hasher.hexdigest()
    -
    -    def set_etag_header(self):
    -        """Sets the response's Etag header using ``self.compute_etag()``.
    -
    -        Note: no header will be set if ``compute_etag()`` returns ``None``.
    -
    -        This method is called automatically when the request is finished.
    -        """
    -        etag = self.compute_etag()
    -        if etag is not None:
    -            self.set_header("Etag", etag)
    -
    -    def check_etag_header(self):
    -        """Checks the ``Etag`` header against requests's ``If-None-Match``.
    -
    -        Returns ``True`` if the request's Etag matches and a 304 should be
    -        returned. For example::
    -
    -            self.set_etag_header()
    -            if self.check_etag_header():
    -                self.set_status(304)
    -                return
    -
    -        This method is called automatically when the request is finished,
    -        but may be called earlier for applications that override
    -        `compute_etag` and want to do an early check for ``If-None-Match``
    -        before completing the request.  The ``Etag`` header should be set
    -        (perhaps with `set_etag_header`) before calling this method.
    -        """
    -        computed_etag = utf8(self._headers.get("Etag", ""))
    -        # Find all weak and strong etag values from If-None-Match header
    -        # because RFC 7232 allows multiple etag values in a single header.
    -        etags = re.findall(
    -            br'\*|(?:W/)?"[^"]*"',
    -            utf8(self.request.headers.get("If-None-Match", ""))
    -        )
    -        if not computed_etag or not etags:
    -            return False
    -
    -        match = False
    -        if etags[0] == b'*':
    -            match = True
    -        else:
    -            # Use a weak comparison when comparing entity-tags.
    -            def val(x):
    -                return x[2:] if x.startswith(b'W/') else x
    -
    -            for etag in etags:
    -                if val(etag) == val(computed_etag):
    -                    match = True
    -                    break
    -        return match
    -
    -    def _stack_context_handle_exception(self, type, value, traceback):
    -        try:
    -            # For historical reasons _handle_request_exception only takes
    -            # the exception value instead of the full triple,
    -            # so re-raise the exception to ensure that it's in
    -            # sys.exc_info()
    -            raise_exc_info((type, value, traceback))
    -        except Exception:
    -            self._handle_request_exception(value)
    -        return True
    -
    -    @gen.coroutine
    -    def _execute(self, transforms, *args, **kwargs):
    -        """Executes this request with the given output transforms."""
    -        self._transforms = transforms
    -        try:
    -            if self.request.method not in self.SUPPORTED_METHODS:
    -                raise HTTPError(405)
    -            self.path_args = [self.decode_argument(arg) for arg in args]
    -            self.path_kwargs = dict((k, self.decode_argument(v, name=k))
    -                                    for (k, v) in kwargs.items())
    -            # If XSRF cookies are turned on, reject form submissions without
    -            # the proper cookie
    -            if self.request.method not in ("GET", "HEAD", "OPTIONS") and \
    -                    self.application.settings.get("xsrf_cookies"):
    -                self.check_xsrf_cookie()
    -
    -            result = self.prepare()
    -            if result is not None:
    -                result = yield result
    -            if self._prepared_future is not None:
    -                # Tell the Application we've finished with prepare()
    -                # and are ready for the body to arrive.
    -                future_set_result_unless_cancelled(self._prepared_future, None)
    -            if self._finished:
    -                return
    -
    -            if _has_stream_request_body(self.__class__):
    -                # In streaming mode request.body is a Future that signals
    -                # the body has been completely received.  The Future has no
    -                # result; the data has been passed to self.data_received
    -                # instead.
    -                try:
    -                    yield self.request.body
    -                except iostream.StreamClosedError:
    -                    return
    -
    -            method = getattr(self, self.request.method.lower())
    -            result = method(*self.path_args, **self.path_kwargs)
    -            if result is not None:
    -                result = yield result
    -            if self._auto_finish and not self._finished:
    -                self.finish()
    -        except Exception as e:
    -            try:
    -                self._handle_request_exception(e)
    -            except Exception:
    -                app_log.error("Exception in exception handler", exc_info=True)
    -            finally:
    -                # Unset result to avoid circular references
    -                result = None
    -            if (self._prepared_future is not None and
    -                    not self._prepared_future.done()):
    -                # In case we failed before setting _prepared_future, do it
    -                # now (to unblock the HTTP server).  Note that this is not
    -                # in a finally block to avoid GC issues prior to Python 3.4.
    -                self._prepared_future.set_result(None)
    -
    -    def data_received(self, chunk):
    -        """Implement this method to handle streamed request data.
    -
    -        Requires the `.stream_request_body` decorator.
    -        """
    -        raise NotImplementedError()
    -
    -    def _log(self):
    -        """Logs the current request.
    -
    -        Sort of deprecated since this functionality was moved to the
    -        Application, but left in place for the benefit of existing apps
    -        that have overridden this method.
    -        """
    -        self.application.log_request(self)
    -
    -    def _request_summary(self):
    -        return "%s %s (%s)" % (self.request.method, self.request.uri,
    -                               self.request.remote_ip)
    -
    -    def _handle_request_exception(self, e):
    -        if isinstance(e, Finish):
    -            # Not an error; just finish the request without logging.
    -            if not self._finished:
    -                self.finish(*e.args)
    -            return
    -        try:
    -            self.log_exception(*sys.exc_info())
    -        except Exception:
    -            # An error here should still get a best-effort send_error()
    -            # to avoid leaking the connection.
    -            app_log.error("Error in exception logger", exc_info=True)
    -        if self._finished:
    -            # Extra errors after the request has been finished should
    -            # be logged, but there is no reason to continue to try and
    -            # send a response.
    -            return
    -        if isinstance(e, HTTPError):
    -            self.send_error(e.status_code, exc_info=sys.exc_info())
    -        else:
    -            self.send_error(500, exc_info=sys.exc_info())
    -
    -    def log_exception(self, typ, value, tb):
    -        """Override to customize logging of uncaught exceptions.
    -
    -        By default logs instances of `HTTPError` as warnings without
    -        stack traces (on the ``tornado.general`` logger), and all
    -        other exceptions as errors with stack traces (on the
    -        ``tornado.application`` logger).
    -
    -        .. versionadded:: 3.1
    -        """
    -        if isinstance(value, HTTPError):
    -            if value.log_message:
    -                format = "%d %s: " + value.log_message
    -                args = ([value.status_code, self._request_summary()] +
    -                        list(value.args))
    -                gen_log.warning(format, *args)
    -        else:
    -            app_log.error("Uncaught exception %s\n%r", self._request_summary(),
    -                          self.request, exc_info=(typ, value, tb))
    -
    -    def _ui_module(self, name, module):
    -        def render(*args, **kwargs):
    -            if not hasattr(self, "_active_modules"):
    -                self._active_modules = {}
    -            if name not in self._active_modules:
    -                self._active_modules[name] = module(self)
    -            rendered = self._active_modules[name].render(*args, **kwargs)
    -            return rendered
    -        return render
    -
    -    def _ui_method(self, method):
    -        return lambda *args, **kwargs: method(self, *args, **kwargs)
    -
    -    def _clear_headers_for_304(self):
    -        # 304 responses should not contain entity headers (defined in
    -        # http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
    -        # not explicitly allowed by
    -        # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
    -        headers = ["Allow", "Content-Encoding", "Content-Language",
    -                   "Content-Length", "Content-MD5", "Content-Range",
    -                   "Content-Type", "Last-Modified"]
    -        for h in headers:
    -            self.clear_header(h)
    -
    -
    -def asynchronous(method):
    -    """Wrap request handler methods with this if they are asynchronous.
    -
    -    This decorator is for callback-style asynchronous methods; for
    -    coroutines, use the ``@gen.coroutine`` decorator without
    -    ``@asynchronous``. (It is legal for legacy reasons to use the two
    -    decorators together provided ``@asynchronous`` is first, but
    -    ``@asynchronous`` will be ignored in this case)
    -
    -    This decorator should only be applied to the :ref:`HTTP verb
    -    methods `; its behavior is undefined for any other method.
    -    This decorator does not *make* a method asynchronous; it tells
    -    the framework that the method *is* asynchronous.  For this decorator
    -    to be useful the method must (at least sometimes) do something
    -    asynchronous.
    -
    -    If this decorator is given, the response is not finished when the
    -    method returns. It is up to the request handler to call
    -    `self.finish() ` to finish the HTTP
    -    request. Without this decorator, the request is automatically
    -    finished when the ``get()`` or ``post()`` method returns. Example:
    -
    -    .. testcode::
    -
    -       class MyRequestHandler(RequestHandler):
    -           @asynchronous
    -           def get(self):
    -              http = httpclient.AsyncHTTPClient()
    -              http.fetch("http://friendfeed.com/", self._on_download)
    -
    -           def _on_download(self, response):
    -              self.write("Downloaded!")
    -              self.finish()
    -
    -    .. testoutput::
    -       :hide:
    -
    -    .. versionchanged:: 3.1
    -       The ability to use ``@gen.coroutine`` without ``@asynchronous``.
    -
    -    .. versionchanged:: 4.3 Returning anything but ``None`` or a
    -       yieldable object from a method decorated with ``@asynchronous``
    -       is an error. Such return values were previously ignored silently.
    -
    -    .. deprecated:: 5.1
    -
    -       This decorator is deprecated and will be removed in Tornado 6.0.
    -       Use coroutines instead.
    -    """
    -    warnings.warn("@asynchronous is deprecated, use coroutines instead",
    -                  DeprecationWarning)
    -    # Delay the IOLoop import because it's not available on app engine.
    -    from tornado.ioloop import IOLoop
    -
    -    @functools.wraps(method)
    -    def wrapper(self, *args, **kwargs):
    -        self._auto_finish = False
    -        with stack_context.ExceptionStackContext(
    -                self._stack_context_handle_exception, delay_warning=True):
    -            result = method(self, *args, **kwargs)
    -            if result is not None:
    -                result = gen.convert_yielded(result)
    -
    -                # If @asynchronous is used with @gen.coroutine, (but
    -                # not @gen.engine), we can automatically finish the
    -                # request when the future resolves.  Additionally,
    -                # the Future will swallow any exceptions so we need
    -                # to throw them back out to the stack context to finish
    -                # the request.
    -                def future_complete(f):
    -                    f.result()
    -                    if not self._finished:
    -                        self.finish()
    -                IOLoop.current().add_future(result, future_complete)
    -                # Once we have done this, hide the Future from our
    -                # caller (i.e. RequestHandler._when_complete), which
    -                # would otherwise set up its own callback and
    -                # exception handler (resulting in exceptions being
    -                # logged twice).
    -                return None
    -            return result
    -    return wrapper
    -
    -
    -def stream_request_body(cls):
    -    """Apply to `RequestHandler` subclasses to enable streaming body support.
    -
    -    This decorator implies the following changes:
    -
    -    * `.HTTPServerRequest.body` is undefined, and body arguments will not
    -      be included in `RequestHandler.get_argument`.
    -    * `RequestHandler.prepare` is called when the request headers have been
    -      read instead of after the entire body has been read.
    -    * The subclass must define a method ``data_received(self, data):``, which
    -      will be called zero or more times as data is available.  Note that
    -      if the request has an empty body, ``data_received`` may not be called.
    -    * ``prepare`` and ``data_received`` may return Futures (such as via
    -      ``@gen.coroutine``, in which case the next method will not be called
    -      until those futures have completed.
    -    * The regular HTTP method (``post``, ``put``, etc) will be called after
    -      the entire body has been read.
    -
    -    See the `file receiver demo `_
    -    for example usage.
    -    """  # noqa: E501
    -    if not issubclass(cls, RequestHandler):
    -        raise TypeError("expected subclass of RequestHandler, got %r", cls)
    -    cls._stream_request_body = True
    -    return cls
    -
    -
    -def _has_stream_request_body(cls):
    -    if not issubclass(cls, RequestHandler):
    -        raise TypeError("expected subclass of RequestHandler, got %r", cls)
    -    return getattr(cls, '_stream_request_body', False)
    -
    -
    -def removeslash(method):
    -    """Use this decorator to remove trailing slashes from the request path.
    -
    -    For example, a request to ``/foo/`` would redirect to ``/foo`` with this
    -    decorator. Your request handler mapping should use a regular expression
    -    like ``r'/foo/*'`` in conjunction with using the decorator.
    -    """
    -    @functools.wraps(method)
    -    def wrapper(self, *args, **kwargs):
    -        if self.request.path.endswith("/"):
    -            if self.request.method in ("GET", "HEAD"):
    -                uri = self.request.path.rstrip("/")
    -                if uri:  # don't try to redirect '/' to ''
    -                    if self.request.query:
    -                        uri += "?" + self.request.query
    -                    self.redirect(uri, permanent=True)
    -                    return
    -            else:
    -                raise HTTPError(404)
    -        return method(self, *args, **kwargs)
    -    return wrapper
    -
    -
    -def addslash(method):
    -    """Use this decorator to add a missing trailing slash to the request path.
    -
    -    For example, a request to ``/foo`` would redirect to ``/foo/`` with this
    -    decorator. Your request handler mapping should use a regular expression
    -    like ``r'/foo/?'`` in conjunction with using the decorator.
    -    """
    -    @functools.wraps(method)
    -    def wrapper(self, *args, **kwargs):
    -        if not self.request.path.endswith("/"):
    -            if self.request.method in ("GET", "HEAD"):
    -                uri = self.request.path + "/"
    -                if self.request.query:
    -                    uri += "?" + self.request.query
    -                self.redirect(uri, permanent=True)
    -                return
    -            raise HTTPError(404)
    -        return method(self, *args, **kwargs)
    -    return wrapper
    -
    -
    -class _ApplicationRouter(ReversibleRuleRouter):
    -    """Routing implementation used internally by `Application`.
    -
    -    Provides a binding between `Application` and `RequestHandler`.
    -    This implementation extends `~.routing.ReversibleRuleRouter` in a couple of ways:
    -        * it allows to use `RequestHandler` subclasses as `~.routing.Rule` target and
    -        * it allows to use a list/tuple of rules as `~.routing.Rule` target.
    -        ``process_rule`` implementation will substitute this list with an appropriate
    -        `_ApplicationRouter` instance.
    -    """
    -
    -    def __init__(self, application, rules=None):
    -        assert isinstance(application, Application)
    -        self.application = application
    -        super(_ApplicationRouter, self).__init__(rules)
    -
    -    def process_rule(self, rule):
    -        rule = super(_ApplicationRouter, self).process_rule(rule)
    -
    -        if isinstance(rule.target, (list, tuple)):
    -            rule.target = _ApplicationRouter(self.application, rule.target)
    -
    -        return rule
    -
    -    def get_target_delegate(self, target, request, **target_params):
    -        if isclass(target) and issubclass(target, RequestHandler):
    -            return self.application.get_handler_delegate(request, target, **target_params)
    -
    -        return super(_ApplicationRouter, self).get_target_delegate(target, request, **target_params)
    -
    -
    -class Application(ReversibleRouter):
    -    """A collection of request handlers that make up a web application.
    -
    -    Instances of this class are callable and can be passed directly to
    -    HTTPServer to serve the application::
    -
    -        application = web.Application([
    -            (r"/", MainPageHandler),
    -        ])
    -        http_server = httpserver.HTTPServer(application)
    -        http_server.listen(8080)
    -        ioloop.IOLoop.current().start()
    -
    -    The constructor for this class takes in a list of `~.routing.Rule`
    -    objects or tuples of values corresponding to the arguments of
    -    `~.routing.Rule` constructor: ``(matcher, target, [target_kwargs], [name])``,
    -    the values in square brackets being optional. The default matcher is
    -    `~.routing.PathMatches`, so ``(regexp, target)`` tuples can also be used
    -    instead of ``(PathMatches(regexp), target)``.
    -
    -    A common routing target is a `RequestHandler` subclass, but you can also
    -    use lists of rules as a target, which create a nested routing configuration::
    -
    -        application = web.Application([
    -            (HostMatches("example.com"), [
    -                (r"/", MainPageHandler),
    -                (r"/feed", FeedHandler),
    -            ]),
    -        ])
    -
    -    In addition to this you can use nested `~.routing.Router` instances,
    -    `~.httputil.HTTPMessageDelegate` subclasses and callables as routing targets
    -    (see `~.routing` module docs for more information).
    -
    -    When we receive requests, we iterate over the list in order and
    -    instantiate an instance of the first request class whose regexp
    -    matches the request path. The request class can be specified as
    -    either a class object or a (fully-qualified) name.
    -
    -    A dictionary may be passed as the third element (``target_kwargs``)
    -    of the tuple, which will be used as keyword arguments to the handler's
    -    constructor and `~RequestHandler.initialize` method. This pattern
    -    is used for the `StaticFileHandler` in this example (note that a
    -    `StaticFileHandler` can be installed automatically with the
    -    static_path setting described below)::
    -
    -        application = web.Application([
    -            (r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
    -        ])
    -
    -    We support virtual hosts with the `add_handlers` method, which takes in
    -    a host regular expression as the first argument::
    -
    -        application.add_handlers(r"www\.myhost\.com", [
    -            (r"/article/([0-9]+)", ArticleHandler),
    -        ])
    -
    -    If there's no match for the current request's host, then ``default_host``
    -    parameter value is matched against host regular expressions.
    -
    -
    -    .. warning::
    -
    -       Applications that do not use TLS may be vulnerable to :ref:`DNS
    -       rebinding ` attacks. This attack is especially
    -       relevant to applications that only listen on ``127.0.0.1` or
    -       other private networks. Appropriate host patterns must be used
    -       (instead of the default of ``r'.*'``) to prevent this risk. The
    -       ``default_host`` argument must not be used in applications that
    -       may be vulnerable to DNS rebinding.
    -
    -    You can serve static files by sending the ``static_path`` setting
    -    as a keyword argument. We will serve those files from the
    -    ``/static/`` URI (this is configurable with the
    -    ``static_url_prefix`` setting), and we will serve ``/favicon.ico``
    -    and ``/robots.txt`` from the same directory.  A custom subclass of
    -    `StaticFileHandler` can be specified with the
    -    ``static_handler_class`` setting.
    -
    -    .. versionchanged:: 4.5
    -       Integration with the new `tornado.routing` module.
    -
    -    """
    -    def __init__(self, handlers=None, default_host=None, transforms=None,
    -                 **settings):
    -        if transforms is None:
    -            self.transforms = []
    -            if settings.get("compress_response") or settings.get("gzip"):
    -                self.transforms.append(GZipContentEncoding)
    -        else:
    -            self.transforms = transforms
    -        self.default_host = default_host
    -        self.settings = settings
    -        self.ui_modules = {'linkify': _linkify,
    -                           'xsrf_form_html': _xsrf_form_html,
    -                           'Template': TemplateModule,
    -                           }
    -        self.ui_methods = {}
    -        self._load_ui_modules(settings.get("ui_modules", {}))
    -        self._load_ui_methods(settings.get("ui_methods", {}))
    -        if self.settings.get("static_path"):
    -            path = self.settings["static_path"]
    -            handlers = list(handlers or [])
    -            static_url_prefix = settings.get("static_url_prefix",
    -                                             "/static/")
    -            static_handler_class = settings.get("static_handler_class",
    -                                                StaticFileHandler)
    -            static_handler_args = settings.get("static_handler_args", {})
    -            static_handler_args['path'] = path
    -            for pattern in [re.escape(static_url_prefix) + r"(.*)",
    -                            r"/(favicon\.ico)", r"/(robots\.txt)"]:
    -                handlers.insert(0, (pattern, static_handler_class,
    -                                    static_handler_args))
    -
    -        if self.settings.get('debug'):
    -            self.settings.setdefault('autoreload', True)
    -            self.settings.setdefault('compiled_template_cache', False)
    -            self.settings.setdefault('static_hash_cache', False)
    -            self.settings.setdefault('serve_traceback', True)
    -
    -        self.wildcard_router = _ApplicationRouter(self, handlers)
    -        self.default_router = _ApplicationRouter(self, [
    -            Rule(AnyMatches(), self.wildcard_router)
    -        ])
    -
    -        # Automatically reload modified modules
    -        if self.settings.get('autoreload'):
    -            from tornado import autoreload
    -            autoreload.start()
    -
    -    def listen(self, port, address="", **kwargs):
    -        """Starts an HTTP server for this application on the given port.
    -
    -        This is a convenience alias for creating an `.HTTPServer`
    -        object and calling its listen method.  Keyword arguments not
    -        supported by `HTTPServer.listen <.TCPServer.listen>` are passed to the
    -        `.HTTPServer` constructor.  For advanced uses
    -        (e.g. multi-process mode), do not use this method; create an
    -        `.HTTPServer` and call its
    -        `.TCPServer.bind`/`.TCPServer.start` methods directly.
    -
    -        Note that after calling this method you still need to call
    -        ``IOLoop.current().start()`` to start the server.
    -
    -        Returns the `.HTTPServer` object.
    -
    -        .. versionchanged:: 4.3
    -           Now returns the `.HTTPServer` object.
    -        """
    -        # import is here rather than top level because HTTPServer
    -        # is not importable on appengine
    -        from tornado.httpserver import HTTPServer
    -        server = HTTPServer(self, **kwargs)
    -        server.listen(port, address)
    -        return server
    -
    -    def add_handlers(self, host_pattern, host_handlers):
    -        """Appends the given handlers to our handler list.
    -
    -        Host patterns are processed sequentially in the order they were
    -        added. All matching patterns will be considered.
    -        """
    -        host_matcher = HostMatches(host_pattern)
    -        rule = Rule(host_matcher, _ApplicationRouter(self, host_handlers))
    -
    -        self.default_router.rules.insert(-1, rule)
    -
    -        if self.default_host is not None:
    -            self.wildcard_router.add_rules([(
    -                DefaultHostMatches(self, host_matcher.host_pattern),
    -                host_handlers
    -            )])
    -
    -    def add_transform(self, transform_class):
    -        self.transforms.append(transform_class)
    -
    -    def _load_ui_methods(self, methods):
    -        if isinstance(methods, types.ModuleType):
    -            self._load_ui_methods(dict((n, getattr(methods, n))
    -                                       for n in dir(methods)))
    -        elif isinstance(methods, list):
    -            for m in methods:
    -                self._load_ui_methods(m)
    -        else:
    -            for name, fn in methods.items():
    -                if not name.startswith("_") and hasattr(fn, "__call__") \
    -                        and name[0].lower() == name[0]:
    -                    self.ui_methods[name] = fn
    -
    -    def _load_ui_modules(self, modules):
    -        if isinstance(modules, types.ModuleType):
    -            self._load_ui_modules(dict((n, getattr(modules, n))
    -                                       for n in dir(modules)))
    -        elif isinstance(modules, list):
    -            for m in modules:
    -                self._load_ui_modules(m)
    -        else:
    -            assert isinstance(modules, dict)
    -            for name, cls in modules.items():
    -                try:
    -                    if issubclass(cls, UIModule):
    -                        self.ui_modules[name] = cls
    -                except TypeError:
    -                    pass
    -
    -    def __call__(self, request):
    -        # Legacy HTTPServer interface
    -        dispatcher = self.find_handler(request)
    -        return dispatcher.execute()
    -
    -    def find_handler(self, request, **kwargs):
    -        route = self.default_router.find_handler(request)
    -        if route is not None:
    -            return route
    -
    -        if self.settings.get('default_handler_class'):
    -            return self.get_handler_delegate(
    -                request,
    -                self.settings['default_handler_class'],
    -                self.settings.get('default_handler_args', {}))
    -
    -        return self.get_handler_delegate(
    -            request, ErrorHandler, {'status_code': 404})
    -
    -    def get_handler_delegate(self, request, target_class, target_kwargs=None,
    -                             path_args=None, path_kwargs=None):
    -        """Returns `~.httputil.HTTPMessageDelegate` that can serve a request
    -        for application and `RequestHandler` subclass.
    -
    -        :arg httputil.HTTPServerRequest request: current HTTP request.
    -        :arg RequestHandler target_class: a `RequestHandler` class.
    -        :arg dict target_kwargs: keyword arguments for ``target_class`` constructor.
    -        :arg list path_args: positional arguments for ``target_class`` HTTP method that
    -            will be executed while handling a request (``get``, ``post`` or any other).
    -        :arg dict path_kwargs: keyword arguments for ``target_class`` HTTP method.
    -        """
    -        return _HandlerDelegate(
    -            self, request, target_class, target_kwargs, path_args, path_kwargs)
    -
    -    def reverse_url(self, name, *args):
    -        """Returns a URL path for handler named ``name``
    -
    -        The handler must be added to the application as a named `URLSpec`.
    -
    -        Args will be substituted for capturing groups in the `URLSpec` regex.
    -        They will be converted to strings if necessary, encoded as utf8,
    -        and url-escaped.
    -        """
    -        reversed_url = self.default_router.reverse_url(name, *args)
    -        if reversed_url is not None:
    -            return reversed_url
    -
    -        raise KeyError("%s not found in named urls" % name)
    -
    -    def log_request(self, handler):
    -        """Writes a completed HTTP request to the logs.
    -
    -        By default writes to the python root logger.  To change
    -        this behavior either subclass Application and override this method,
    -        or pass a function in the application settings dictionary as
    -        ``log_function``.
    -        """
    -        if "log_function" in self.settings:
    -            self.settings["log_function"](handler)
    -            return
    -        if handler.get_status() < 400:
    -            log_method = access_log.info
    -        elif handler.get_status() < 500:
    -            log_method = access_log.warning
    -        else:
    -            log_method = access_log.error
    -        request_time = 1000.0 * handler.request.request_time()
    -        log_method("%d %s %.2fms", handler.get_status(),
    -                   handler._request_summary(), request_time)
    -
    -
    -class _HandlerDelegate(httputil.HTTPMessageDelegate):
    -    def __init__(self, application, request, handler_class, handler_kwargs,
    -                 path_args, path_kwargs):
    -        self.application = application
    -        self.connection = request.connection
    -        self.request = request
    -        self.handler_class = handler_class
    -        self.handler_kwargs = handler_kwargs or {}
    -        self.path_args = path_args or []
    -        self.path_kwargs = path_kwargs or {}
    -        self.chunks = []
    -        self.stream_request_body = _has_stream_request_body(self.handler_class)
    -
    -    def headers_received(self, start_line, headers):
    -        if self.stream_request_body:
    -            self.request.body = Future()
    -            return self.execute()
    -
    -    def data_received(self, data):
    -        if self.stream_request_body:
    -            return self.handler.data_received(data)
    -        else:
    -            self.chunks.append(data)
    -
    -    def finish(self):
    -        if self.stream_request_body:
    -            future_set_result_unless_cancelled(self.request.body, None)
    -        else:
    -            self.request.body = b''.join(self.chunks)
    -            self.request._parse_body()
    -            self.execute()
    -
    -    def on_connection_close(self):
    -        if self.stream_request_body:
    -            self.handler.on_connection_close()
    -        else:
    -            self.chunks = None
    -
    -    def execute(self):
    -        # If template cache is disabled (usually in the debug mode),
    -        # re-compile templates and reload static files on every
    -        # request so you don't need to restart to see changes
    -        if not self.application.settings.get("compiled_template_cache", True):
    -            with RequestHandler._template_loader_lock:
    -                for loader in RequestHandler._template_loaders.values():
    -                    loader.reset()
    -        if not self.application.settings.get('static_hash_cache', True):
    -            StaticFileHandler.reset()
    -
    -        self.handler = self.handler_class(self.application, self.request,
    -                                          **self.handler_kwargs)
    -        transforms = [t(self.request) for t in self.application.transforms]
    -
    -        if self.stream_request_body:
    -            self.handler._prepared_future = Future()
    -        # Note that if an exception escapes handler._execute it will be
    -        # trapped in the Future it returns (which we are ignoring here,
    -        # leaving it to be logged when the Future is GC'd).
    -        # However, that shouldn't happen because _execute has a blanket
    -        # except handler, and we cannot easily access the IOLoop here to
    -        # call add_future (because of the requirement to remain compatible
    -        # with WSGI)
    -        self.handler._execute(transforms, *self.path_args,
    -                              **self.path_kwargs)
    -        # If we are streaming the request body, then execute() is finished
    -        # when the handler has prepared to receive the body.  If not,
    -        # it doesn't matter when execute() finishes (so we return None)
    -        return self.handler._prepared_future
    -
    -
    -class HTTPError(Exception):
    -    """An exception that will turn into an HTTP error response.
    -
    -    Raising an `HTTPError` is a convenient alternative to calling
    -    `RequestHandler.send_error` since it automatically ends the
    -    current function.
    -
    -    To customize the response sent with an `HTTPError`, override
    -    `RequestHandler.write_error`.
    -
    -    :arg int status_code: HTTP status code.  Must be listed in
    -        `httplib.responses ` unless the ``reason``
    -        keyword argument is given.
    -    :arg str log_message: Message to be written to the log for this error
    -        (will not be shown to the user unless the `Application` is in debug
    -        mode).  May contain ``%s``-style placeholders, which will be filled
    -        in with remaining positional parameters.
    -    :arg str reason: Keyword-only argument.  The HTTP "reason" phrase
    -        to pass in the status line along with ``status_code``.  Normally
    -        determined automatically from ``status_code``, but can be used
    -        to use a non-standard numeric code.
    -    """
    -    def __init__(self, status_code=500, log_message=None, *args, **kwargs):
    -        self.status_code = status_code
    -        self.log_message = log_message
    -        self.args = args
    -        self.reason = kwargs.get('reason', None)
    -        if log_message and not args:
    -            self.log_message = log_message.replace('%', '%%')
    -
    -    def __str__(self):
    -        message = "HTTP %d: %s" % (
    -            self.status_code,
    -            self.reason or httputil.responses.get(self.status_code, 'Unknown'))
    -        if self.log_message:
    -            return message + " (" + (self.log_message % self.args) + ")"
    -        else:
    -            return message
    -
    -
    -class Finish(Exception):
    -    """An exception that ends the request without producing an error response.
    -
    -    When `Finish` is raised in a `RequestHandler`, the request will
    -    end (calling `RequestHandler.finish` if it hasn't already been
    -    called), but the error-handling methods (including
    -    `RequestHandler.write_error`) will not be called.
    -
    -    If `Finish()` was created with no arguments, the pending response
    -    will be sent as-is. If `Finish()` was given an argument, that
    -    argument will be passed to `RequestHandler.finish()`.
    -
    -    This can be a more convenient way to implement custom error pages
    -    than overriding ``write_error`` (especially in library code)::
    -
    -        if self.current_user is None:
    -            self.set_status(401)
    -            self.set_header('WWW-Authenticate', 'Basic realm="something"')
    -            raise Finish()
    -
    -    .. versionchanged:: 4.3
    -       Arguments passed to ``Finish()`` will be passed on to
    -       `RequestHandler.finish`.
    -    """
    -    pass
    -
    -
    -class MissingArgumentError(HTTPError):
    -    """Exception raised by `RequestHandler.get_argument`.
    -
    -    This is a subclass of `HTTPError`, so if it is uncaught a 400 response
    -    code will be used instead of 500 (and a stack trace will not be logged).
    -
    -    .. versionadded:: 3.1
    -    """
    -    def __init__(self, arg_name):
    -        super(MissingArgumentError, self).__init__(
    -            400, 'Missing argument %s' % arg_name)
    -        self.arg_name = arg_name
    -
    -
    -class ErrorHandler(RequestHandler):
    -    """Generates an error response with ``status_code`` for all requests."""
    -    def initialize(self, status_code):
    -        self.set_status(status_code)
    -
    -    def prepare(self):
    -        raise HTTPError(self._status_code)
    -
    -    def check_xsrf_cookie(self):
    -        # POSTs to an ErrorHandler don't actually have side effects,
    -        # so we don't need to check the xsrf token.  This allows POSTs
    -        # to the wrong url to return a 404 instead of 403.
    -        pass
    -
    -
    -class RedirectHandler(RequestHandler):
    -    """Redirects the client to the given URL for all GET requests.
    -
    -    You should provide the keyword argument ``url`` to the handler, e.g.::
    -
    -        application = web.Application([
    -            (r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
    -        ])
    -
    -    `RedirectHandler` supports regular expression substitutions. E.g., to
    -    swap the first and second parts of a path while preserving the remainder::
    -
    -        application = web.Application([
    -            (r"/(.*?)/(.*?)/(.*)", web.RedirectHandler, {"url": "/{1}/{0}/{2}"}),
    -        ])
    -
    -    The final URL is formatted with `str.format` and the substrings that match
    -    the capturing groups. In the above example, a request to "/a/b/c" would be
    -    formatted like::
    -
    -        str.format("/{1}/{0}/{2}", "a", "b", "c")  # -> "/b/a/c"
    -
    -    Use Python's :ref:`format string syntax ` to customize how
    -    values are substituted.
    -
    -    .. versionchanged:: 4.5
    -       Added support for substitutions into the destination URL.
    -
    -    .. versionchanged:: 5.0
    -       If any query arguments are present, they will be copied to the
    -       destination URL.
    -    """
    -    def initialize(self, url, permanent=True):
    -        self._url = url
    -        self._permanent = permanent
    -
    -    def get(self, *args):
    -        to_url = self._url.format(*args)
    -        if self.request.query_arguments:
    -            to_url = httputil.url_concat(
    -                to_url, list(httputil.qs_to_qsl(self.request.query_arguments)))
    -        self.redirect(to_url, permanent=self._permanent)
    -
    -
    -class StaticFileHandler(RequestHandler):
    -    """A simple handler that can serve static content from a directory.
    -
    -    A `StaticFileHandler` is configured automatically if you pass the
    -    ``static_path`` keyword argument to `Application`.  This handler
    -    can be customized with the ``static_url_prefix``, ``static_handler_class``,
    -    and ``static_handler_args`` settings.
    -
    -    To map an additional path to this handler for a static data directory
    -    you would add a line to your application like::
    -
    -        application = web.Application([
    -            (r"/content/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
    -        ])
    -
    -    The handler constructor requires a ``path`` argument, which specifies the
    -    local root directory of the content to be served.
    -
    -    Note that a capture group in the regex is required to parse the value for
    -    the ``path`` argument to the get() method (different than the constructor
    -    argument above); see `URLSpec` for details.
    -
    -    To serve a file like ``index.html`` automatically when a directory is
    -    requested, set ``static_handler_args=dict(default_filename="index.html")``
    -    in your application settings, or add ``default_filename`` as an initializer
    -    argument for your ``StaticFileHandler``.
    -
    -    To maximize the effectiveness of browser caching, this class supports
    -    versioned urls (by default using the argument ``?v=``).  If a version
    -    is given, we instruct the browser to cache this file indefinitely.
    -    `make_static_url` (also available as `RequestHandler.static_url`) can
    -    be used to construct a versioned url.
    -
    -    This handler is intended primarily for use in development and light-duty
    -    file serving; for heavy traffic it will be more efficient to use
    -    a dedicated static file server (such as nginx or Apache).  We support
    -    the HTTP ``Accept-Ranges`` mechanism to return partial content (because
    -    some browsers require this functionality to be present to seek in
    -    HTML5 audio or video).
    -
    -    **Subclassing notes**
    -
    -    This class is designed to be extensible by subclassing, but because
    -    of the way static urls are generated with class methods rather than
    -    instance methods, the inheritance patterns are somewhat unusual.
    -    Be sure to use the ``@classmethod`` decorator when overriding a
    -    class method.  Instance methods may use the attributes ``self.path``
    -    ``self.absolute_path``, and ``self.modified``.
    -
    -    Subclasses should only override methods discussed in this section;
    -    overriding other methods is error-prone.  Overriding
    -    ``StaticFileHandler.get`` is particularly problematic due to the
    -    tight coupling with ``compute_etag`` and other methods.
    -
    -    To change the way static urls are generated (e.g. to match the behavior
    -    of another server or CDN), override `make_static_url`, `parse_url_path`,
    -    `get_cache_time`, and/or `get_version`.
    -
    -    To replace all interaction with the filesystem (e.g. to serve
    -    static content from a database), override `get_content`,
    -    `get_content_size`, `get_modified_time`, `get_absolute_path`, and
    -    `validate_absolute_path`.
    -
    -    .. versionchanged:: 3.1
    -       Many of the methods for subclasses were added in Tornado 3.1.
    -    """
    -    CACHE_MAX_AGE = 86400 * 365 * 10  # 10 years
    -
    -    _static_hashes = {}  # type: typing.Dict
    -    _lock = threading.Lock()  # protects _static_hashes
    -
    -    def initialize(self, path, default_filename=None):
    -        self.root = path
    -        self.default_filename = default_filename
    -
    -    @classmethod
    -    def reset(cls):
    -        with cls._lock:
    -            cls._static_hashes = {}
    -
    -    def head(self, path):
    -        return self.get(path, include_body=False)
    -
    -    @gen.coroutine
    -    def get(self, path, include_body=True):
    -        # Set up our path instance variables.
    -        self.path = self.parse_url_path(path)
    -        del path  # make sure we don't refer to path instead of self.path again
    -        absolute_path = self.get_absolute_path(self.root, self.path)
    -        self.absolute_path = self.validate_absolute_path(
    -            self.root, absolute_path)
    -        if self.absolute_path is None:
    -            return
    -
    -        self.modified = self.get_modified_time()
    -        self.set_headers()
    -
    -        if self.should_return_304():
    -            self.set_status(304)
    -            return
    -
    -        request_range = None
    -        range_header = self.request.headers.get("Range")
    -        if range_header:
    -            # As per RFC 2616 14.16, if an invalid Range header is specified,
    -            # the request will be treated as if the header didn't exist.
    -            request_range = httputil._parse_request_range(range_header)
    -
    -        size = self.get_content_size()
    -        if request_range:
    -            start, end = request_range
    -            if (start is not None and start >= size) or end == 0:
    -                # As per RFC 2616 14.35.1, a range is not satisfiable only: if
    -                # the first requested byte is equal to or greater than the
    -                # content, or when a suffix with length 0 is specified
    -                self.set_status(416)  # Range Not Satisfiable
    -                self.set_header("Content-Type", "text/plain")
    -                self.set_header("Content-Range", "bytes */%s" % (size, ))
    -                return
    -            if start is not None and start < 0:
    -                start += size
    -            if end is not None and end > size:
    -                # Clients sometimes blindly use a large range to limit their
    -                # download size; cap the endpoint at the actual file size.
    -                end = size
    -            # Note: only return HTTP 206 if less than the entire range has been
    -            # requested. Not only is this semantically correct, but Chrome
    -            # refuses to play audio if it gets an HTTP 206 in response to
    -            # ``Range: bytes=0-``.
    -            if size != (end or size) - (start or 0):
    -                self.set_status(206)  # Partial Content
    -                self.set_header("Content-Range",
    -                                httputil._get_content_range(start, end, size))
    -        else:
    -            start = end = None
    -
    -        if start is not None and end is not None:
    -            content_length = end - start
    -        elif end is not None:
    -            content_length = end
    -        elif start is not None:
    -            content_length = size - start
    -        else:
    -            content_length = size
    -        self.set_header("Content-Length", content_length)
    -
    -        if include_body:
    -            content = self.get_content(self.absolute_path, start, end)
    -            if isinstance(content, bytes):
    -                content = [content]
    -            for chunk in content:
    -                try:
    -                    self.write(chunk)
    -                    yield self.flush()
    -                except iostream.StreamClosedError:
    -                    return
    -        else:
    -            assert self.request.method == "HEAD"
    -
    -    def compute_etag(self):
    -        """Sets the ``Etag`` header based on static url version.
    -
    -        This allows efficient ``If-None-Match`` checks against cached
    -        versions, and sends the correct ``Etag`` for a partial response
    -        (i.e. the same ``Etag`` as the full file).
    -
    -        .. versionadded:: 3.1
    -        """
    -        version_hash = self._get_cached_version(self.absolute_path)
    -        if not version_hash:
    -            return None
    -        return '"%s"' % (version_hash, )
    -
    -    def set_headers(self):
    -        """Sets the content and caching headers on the response.
    -
    -        .. versionadded:: 3.1
    -        """
    -        self.set_header("Accept-Ranges", "bytes")
    -        self.set_etag_header()
    -
    -        if self.modified is not None:
    -            self.set_header("Last-Modified", self.modified)
    -
    -        content_type = self.get_content_type()
    -        if content_type:
    -            self.set_header("Content-Type", content_type)
    -
    -        cache_time = self.get_cache_time(self.path, self.modified,
    -                                         content_type)
    -        if cache_time > 0:
    -            self.set_header("Expires", datetime.datetime.utcnow() +
    -                            datetime.timedelta(seconds=cache_time))
    -            self.set_header("Cache-Control", "max-age=" + str(cache_time))
    -
    -        self.set_extra_headers(self.path)
    -
    -    def should_return_304(self):
    -        """Returns True if the headers indicate that we should return 304.
    -
    -        .. versionadded:: 3.1
    -        """
    -        # If client sent If-None-Match, use it, ignore If-Modified-Since
    -        if self.request.headers.get('If-None-Match'):
    -            return self.check_etag_header()
    -
    -        # Check the If-Modified-Since, and don't send the result if the
    -        # content has not been modified
    -        ims_value = self.request.headers.get("If-Modified-Since")
    -        if ims_value is not None:
    -            date_tuple = email.utils.parsedate(ims_value)
    -            if date_tuple is not None:
    -                if_since = datetime.datetime(*date_tuple[:6])
    -                if if_since >= self.modified:
    -                    return True
    -
    -        return False
    -
    -    @classmethod
    -    def get_absolute_path(cls, root, path):
    -        """Returns the absolute location of ``path`` relative to ``root``.
    -
    -        ``root`` is the path configured for this `StaticFileHandler`
    -        (in most cases the ``static_path`` `Application` setting).
    -
    -        This class method may be overridden in subclasses.  By default
    -        it returns a filesystem path, but other strings may be used
    -        as long as they are unique and understood by the subclass's
    -        overridden `get_content`.
    -
    -        .. versionadded:: 3.1
    -        """
    -        abspath = os.path.abspath(os.path.join(root, path))
    -        return abspath
    -
    -    def validate_absolute_path(self, root, absolute_path):
    -        """Validate and return the absolute path.
    -
    -        ``root`` is the configured path for the `StaticFileHandler`,
    -        and ``path`` is the result of `get_absolute_path`
    -
    -        This is an instance method called during request processing,
    -        so it may raise `HTTPError` or use methods like
    -        `RequestHandler.redirect` (return None after redirecting to
    -        halt further processing).  This is where 404 errors for missing files
    -        are generated.
    -
    -        This method may modify the path before returning it, but note that
    -        any such modifications will not be understood by `make_static_url`.
    -
    -        In instance methods, this method's result is available as
    -        ``self.absolute_path``.
    -
    -        .. versionadded:: 3.1
    -        """
    -        # os.path.abspath strips a trailing /.
    -        # We must add it back to `root` so that we only match files
    -        # in a directory named `root` instead of files starting with
    -        # that prefix.
    -        root = os.path.abspath(root)
    -        if not root.endswith(os.path.sep):
    -            # abspath always removes a trailing slash, except when
    -            # root is '/'. This is an unusual case, but several projects
    -            # have independently discovered this technique to disable
    -            # Tornado's path validation and (hopefully) do their own,
    -            # so we need to support it.
    -            root += os.path.sep
    -        # The trailing slash also needs to be temporarily added back
    -        # the requested path so a request to root/ will match.
    -        if not (absolute_path + os.path.sep).startswith(root):
    -            raise HTTPError(403, "%s is not in root static directory",
    -                            self.path)
    -        if (os.path.isdir(absolute_path) and
    -                self.default_filename is not None):
    -            # need to look at the request.path here for when path is empty
    -            # but there is some prefix to the path that was already
    -            # trimmed by the routing
    -            if not self.request.path.endswith("/"):
    -                self.redirect(self.request.path + "/", permanent=True)
    -                return
    -            absolute_path = os.path.join(absolute_path, self.default_filename)
    -        if not os.path.exists(absolute_path):
    -            raise HTTPError(404)
    -        if not os.path.isfile(absolute_path):
    -            raise HTTPError(403, "%s is not a file", self.path)
    -        return absolute_path
    -
    -    @classmethod
    -    def get_content(cls, abspath, start=None, end=None):
    -        """Retrieve the content of the requested resource which is located
    -        at the given absolute path.
    -
    -        This class method may be overridden by subclasses.  Note that its
    -        signature is different from other overridable class methods
    -        (no ``settings`` argument); this is deliberate to ensure that
    -        ``abspath`` is able to stand on its own as a cache key.
    -
    -        This method should either return a byte string or an iterator
    -        of byte strings.  The latter is preferred for large files
    -        as it helps reduce memory fragmentation.
    -
    -        .. versionadded:: 3.1
    -        """
    -        with open(abspath, "rb") as file:
    -            if start is not None:
    -                file.seek(start)
    -            if end is not None:
    -                remaining = end - (start or 0)
    -            else:
    -                remaining = None
    -            while True:
    -                chunk_size = 64 * 1024
    -                if remaining is not None and remaining < chunk_size:
    -                    chunk_size = remaining
    -                chunk = file.read(chunk_size)
    -                if chunk:
    -                    if remaining is not None:
    -                        remaining -= len(chunk)
    -                    yield chunk
    -                else:
    -                    if remaining is not None:
    -                        assert remaining == 0
    -                    return
    -
    -    @classmethod
    -    def get_content_version(cls, abspath):
    -        """Returns a version string for the resource at the given path.
    -
    -        This class method may be overridden by subclasses.  The
    -        default implementation is a hash of the file's contents.
    -
    -        .. versionadded:: 3.1
    -        """
    -        data = cls.get_content(abspath)
    -        hasher = hashlib.md5()
    -        if isinstance(data, bytes):
    -            hasher.update(data)
    -        else:
    -            for chunk in data:
    -                hasher.update(chunk)
    -        return hasher.hexdigest()
    -
    -    def _stat(self):
    -        if not hasattr(self, '_stat_result'):
    -            self._stat_result = os.stat(self.absolute_path)
    -        return self._stat_result
    -
    -    def get_content_size(self):
    -        """Retrieve the total size of the resource at the given path.
    -
    -        This method may be overridden by subclasses.
    -
    -        .. versionadded:: 3.1
    -
    -        .. versionchanged:: 4.0
    -           This method is now always called, instead of only when
    -           partial results are requested.
    -        """
    -        stat_result = self._stat()
    -        return stat_result[stat.ST_SIZE]
    -
    -    def get_modified_time(self):
    -        """Returns the time that ``self.absolute_path`` was last modified.
    -
    -        May be overridden in subclasses.  Should return a `~datetime.datetime`
    -        object or None.
    -
    -        .. versionadded:: 3.1
    -        """
    -        stat_result = self._stat()
    -        modified = datetime.datetime.utcfromtimestamp(
    -            stat_result[stat.ST_MTIME])
    -        return modified
    -
    -    def get_content_type(self):
    -        """Returns the ``Content-Type`` header to be used for this request.
    -
    -        .. versionadded:: 3.1
    -        """
    -        mime_type, encoding = mimetypes.guess_type(self.absolute_path)
    -        # per RFC 6713, use the appropriate type for a gzip compressed file
    -        if encoding == "gzip":
    -            return "application/gzip"
    -        # As of 2015-07-21 there is no bzip2 encoding defined at
    -        # http://www.iana.org/assignments/media-types/media-types.xhtml
    -        # So for that (and any other encoding), use octet-stream.
    -        elif encoding is not None:
    -            return "application/octet-stream"
    -        elif mime_type is not None:
    -            return mime_type
    -        # if mime_type not detected, use application/octet-stream
    -        else:
    -            return "application/octet-stream"
    -
    -    def set_extra_headers(self, path):
    -        """For subclass to add extra headers to the response"""
    -        pass
    -
    -    def get_cache_time(self, path, modified, mime_type):
    -        """Override to customize cache control behavior.
    -
    -        Return a positive number of seconds to make the result
    -        cacheable for that amount of time or 0 to mark resource as
    -        cacheable for an unspecified amount of time (subject to
    -        browser heuristics).
    -
    -        By default returns cache expiry of 10 years for resources requested
    -        with ``v`` argument.
    -        """
    -        return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0
    -
    -    @classmethod
    -    def make_static_url(cls, settings, path, include_version=True):
    -        """Constructs a versioned url for the given path.
    -
    -        This method may be overridden in subclasses (but note that it
    -        is a class method rather than an instance method).  Subclasses
    -        are only required to implement the signature
    -        ``make_static_url(cls, settings, path)``; other keyword
    -        arguments may be passed through `~RequestHandler.static_url`
    -        but are not standard.
    -
    -        ``settings`` is the `Application.settings` dictionary.  ``path``
    -        is the static path being requested.  The url returned should be
    -        relative to the current host.
    -
    -        ``include_version`` determines whether the generated URL should
    -        include the query string containing the version hash of the
    -        file corresponding to the given ``path``.
    -
    -        """
    -        url = settings.get('static_url_prefix', '/static/') + path
    -        if not include_version:
    -            return url
    -
    -        version_hash = cls.get_version(settings, path)
    -        if not version_hash:
    -            return url
    -
    -        return '%s?v=%s' % (url, version_hash)
    -
    -    def parse_url_path(self, url_path):
    -        """Converts a static URL path into a filesystem path.
    -
    -        ``url_path`` is the path component of the URL with
    -        ``static_url_prefix`` removed.  The return value should be
    -        filesystem path relative to ``static_path``.
    -
    -        This is the inverse of `make_static_url`.
    -        """
    -        if os.path.sep != "/":
    -            url_path = url_path.replace("/", os.path.sep)
    -        return url_path
    -
    -    @classmethod
    -    def get_version(cls, settings, path):
    -        """Generate the version string to be used in static URLs.
    -
    -        ``settings`` is the `Application.settings` dictionary and ``path``
    -        is the relative location of the requested asset on the filesystem.
    -        The returned value should be a string, or ``None`` if no version
    -        could be determined.
    -
    -        .. versionchanged:: 3.1
    -           This method was previously recommended for subclasses to override;
    -           `get_content_version` is now preferred as it allows the base
    -           class to handle caching of the result.
    -        """
    -        abs_path = cls.get_absolute_path(settings['static_path'], path)
    -        return cls._get_cached_version(abs_path)
    -
    -    @classmethod
    -    def _get_cached_version(cls, abs_path):
    -        with cls._lock:
    -            hashes = cls._static_hashes
    -            if abs_path not in hashes:
    -                try:
    -                    hashes[abs_path] = cls.get_content_version(abs_path)
    -                except Exception:
    -                    gen_log.error("Could not open static file %r", abs_path)
    -                    hashes[abs_path] = None
    -            hsh = hashes.get(abs_path)
    -            if hsh:
    -                return hsh
    -        return None
    -
    -
    -class FallbackHandler(RequestHandler):
    -    """A `RequestHandler` that wraps another HTTP server callback.
    -
    -    The fallback is a callable object that accepts an
    -    `~.httputil.HTTPServerRequest`, such as an `Application` or
    -    `tornado.wsgi.WSGIContainer`.  This is most useful to use both
    -    Tornado ``RequestHandlers`` and WSGI in the same server.  Typical
    -    usage::
    -
    -        wsgi_app = tornado.wsgi.WSGIContainer(
    -            django.core.handlers.wsgi.WSGIHandler())
    -        application = tornado.web.Application([
    -            (r"/foo", FooHandler),
    -            (r".*", FallbackHandler, dict(fallback=wsgi_app),
    -        ])
    -    """
    -    def initialize(self, fallback):
    -        self.fallback = fallback
    -
    -    def prepare(self):
    -        self.fallback(self.request)
    -        self._finished = True
    -        self.on_finish()
    -
    -
    -class OutputTransform(object):
    -    """A transform modifies the result of an HTTP request (e.g., GZip encoding)
    -
    -    Applications are not expected to create their own OutputTransforms
    -    or interact with them directly; the framework chooses which transforms
    -    (if any) to apply.
    -    """
    -    def __init__(self, request):
    -        pass
    -
    -    def transform_first_chunk(self, status_code, headers, chunk, finishing):
    -        # type: (int, httputil.HTTPHeaders, bytes, bool) -> typing.Tuple[int, httputil.HTTPHeaders, bytes] # noqa: E501
    -        return status_code, headers, chunk
    -
    -    def transform_chunk(self, chunk, finishing):
    -        return chunk
    -
    -
    -class GZipContentEncoding(OutputTransform):
    -    """Applies the gzip content encoding to the response.
    -
    -    See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
    -
    -    .. versionchanged:: 4.0
    -        Now compresses all mime types beginning with ``text/``, instead
    -        of just a whitelist. (the whitelist is still used for certain
    -        non-text mime types).
    -    """
    -    # Whitelist of compressible mime types (in addition to any types
    -    # beginning with "text/").
    -    CONTENT_TYPES = set(["application/javascript", "application/x-javascript",
    -                         "application/xml", "application/atom+xml",
    -                         "application/json", "application/xhtml+xml",
    -                         "image/svg+xml"])
    -    # Python's GzipFile defaults to level 9, while most other gzip
    -    # tools (including gzip itself) default to 6, which is probably a
    -    # better CPU/size tradeoff.
    -    GZIP_LEVEL = 6
    -    # Responses that are too short are unlikely to benefit from gzipping
    -    # after considering the "Content-Encoding: gzip" header and the header
    -    # inside the gzip encoding.
    -    # Note that responses written in multiple chunks will be compressed
    -    # regardless of size.
    -    MIN_LENGTH = 1024
    -
    -    def __init__(self, request):
    -        self._gzipping = "gzip" in request.headers.get("Accept-Encoding", "")
    -
    -    def _compressible_type(self, ctype):
    -        return ctype.startswith('text/') or ctype in self.CONTENT_TYPES
    -
    -    def transform_first_chunk(self, status_code, headers, chunk, finishing):
    -        # type: (int, httputil.HTTPHeaders, bytes, bool) -> typing.Tuple[int, httputil.HTTPHeaders, bytes] # noqa: E501
    -        # TODO: can/should this type be inherited from the superclass?
    -        if 'Vary' in headers:
    -            headers['Vary'] += ', Accept-Encoding'
    -        else:
    -            headers['Vary'] = 'Accept-Encoding'
    -        if self._gzipping:
    -            ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
    -            self._gzipping = self._compressible_type(ctype) and \
    -                (not finishing or len(chunk) >= self.MIN_LENGTH) and \
    -                ("Content-Encoding" not in headers)
    -        if self._gzipping:
    -            headers["Content-Encoding"] = "gzip"
    -            self._gzip_value = BytesIO()
    -            self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value,
    -                                            compresslevel=self.GZIP_LEVEL)
    -            chunk = self.transform_chunk(chunk, finishing)
    -            if "Content-Length" in headers:
    -                # The original content length is no longer correct.
    -                # If this is the last (and only) chunk, we can set the new
    -                # content-length; otherwise we remove it and fall back to
    -                # chunked encoding.
    -                if finishing:
    -                    headers["Content-Length"] = str(len(chunk))
    -                else:
    -                    del headers["Content-Length"]
    -        return status_code, headers, chunk
    -
    -    def transform_chunk(self, chunk, finishing):
    -        if self._gzipping:
    -            self._gzip_file.write(chunk)
    -            if finishing:
    -                self._gzip_file.close()
    -            else:
    -                self._gzip_file.flush()
    -            chunk = self._gzip_value.getvalue()
    -            self._gzip_value.truncate(0)
    -            self._gzip_value.seek(0)
    -        return chunk
    -
    -
    -def authenticated(method):
    -    """Decorate methods with this to require that the user be logged in.
    -
    -    If the user is not logged in, they will be redirected to the configured
    -    `login url `.
    -
    -    If you configure a login url with a query parameter, Tornado will
    -    assume you know what you're doing and use it as-is.  If not, it
    -    will add a `next` parameter so the login page knows where to send
    -    you once you're logged in.
    -    """
    -    @functools.wraps(method)
    -    def wrapper(self, *args, **kwargs):
    -        if not self.current_user:
    -            if self.request.method in ("GET", "HEAD"):
    -                url = self.get_login_url()
    -                if "?" not in url:
    -                    if urlparse.urlsplit(url).scheme:
    -                        # if login url is absolute, make next absolute too
    -                        next_url = self.request.full_url()
    -                    else:
    -                        next_url = self.request.uri
    -                    url += "?" + urlencode(dict(next=next_url))
    -                self.redirect(url)
    -                return
    -            raise HTTPError(403)
    -        return method(self, *args, **kwargs)
    -    return wrapper
    -
    -
    -class UIModule(object):
    -    """A re-usable, modular UI unit on a page.
    -
    -    UI modules often execute additional queries, and they can include
    -    additional CSS and JavaScript that will be included in the output
    -    page, which is automatically inserted on page render.
    -
    -    Subclasses of UIModule must override the `render` method.
    -    """
    -    def __init__(self, handler):
    -        self.handler = handler
    -        self.request = handler.request
    -        self.ui = handler.ui
    -        self.locale = handler.locale
    -
    -    @property
    -    def current_user(self):
    -        return self.handler.current_user
    -
    -    def render(self, *args, **kwargs):
    -        """Override in subclasses to return this module's output."""
    -        raise NotImplementedError()
    -
    -    def embedded_javascript(self):
    -        """Override to return a JavaScript string
    -        to be embedded in the page."""
    -        return None
    -
    -    def javascript_files(self):
    -        """Override to return a list of JavaScript files needed by this module.
    -
    -        If the return values are relative paths, they will be passed to
    -        `RequestHandler.static_url`; otherwise they will be used as-is.
    -        """
    -        return None
    -
    -    def embedded_css(self):
    -        """Override to return a CSS string
    -        that will be embedded in the page."""
    -        return None
    -
    -    def css_files(self):
    -        """Override to returns a list of CSS files required by this module.
    -
    -        If the return values are relative paths, they will be passed to
    -        `RequestHandler.static_url`; otherwise they will be used as-is.
    -        """
    -        return None
    -
    -    def html_head(self):
    -        """Override to return an HTML string that will be put in the 
    -        element.
    -        """
    -        return None
    -
    -    def html_body(self):
    -        """Override to return an HTML string that will be put at the end of
    -        the  element.
    -        """
    -        return None
    -
    -    def render_string(self, path, **kwargs):
    -        """Renders a template and returns it as a string."""
    -        return self.handler.render_string(path, **kwargs)
    -
    -
    -class _linkify(UIModule):
    -    def render(self, text, **kwargs):
    -        return escape.linkify(text, **kwargs)
    -
    -
    -class _xsrf_form_html(UIModule):
    -    def render(self):
    -        return self.handler.xsrf_form_html()
    -
    -
    -class TemplateModule(UIModule):
    -    """UIModule that simply renders the given template.
    -
    -    {% module Template("foo.html") %} is similar to {% include "foo.html" %},
    -    but the module version gets its own namespace (with kwargs passed to
    -    Template()) instead of inheriting the outer template's namespace.
    -
    -    Templates rendered through this module also get access to UIModule's
    -    automatic javascript/css features.  Simply call set_resources
    -    inside the template and give it keyword arguments corresponding to
    -    the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }}
    -    Note that these resources are output once per template file, not once
    -    per instantiation of the template, so they must not depend on
    -    any arguments to the template.
    -    """
    -    def __init__(self, handler):
    -        super(TemplateModule, self).__init__(handler)
    -        # keep resources in both a list and a dict to preserve order
    -        self._resource_list = []
    -        self._resource_dict = {}
    -
    -    def render(self, path, **kwargs):
    -        def set_resources(**kwargs):
    -            if path not in self._resource_dict:
    -                self._resource_list.append(kwargs)
    -                self._resource_dict[path] = kwargs
    -            else:
    -                if self._resource_dict[path] != kwargs:
    -                    raise ValueError("set_resources called with different "
    -                                     "resources for the same template")
    -            return ""
    -        return self.render_string(path, set_resources=set_resources,
    -                                  **kwargs)
    -
    -    def _get_resources(self, key):
    -        return (r[key] for r in self._resource_list if key in r)
    -
    -    def embedded_javascript(self):
    -        return "\n".join(self._get_resources("embedded_javascript"))
    -
    -    def javascript_files(self):
    -        result = []
    -        for f in self._get_resources("javascript_files"):
    -            if isinstance(f, (unicode_type, bytes)):
    -                result.append(f)
    -            else:
    -                result.extend(f)
    -        return result
    -
    -    def embedded_css(self):
    -        return "\n".join(self._get_resources("embedded_css"))
    -
    -    def css_files(self):
    -        result = []
    -        for f in self._get_resources("css_files"):
    -            if isinstance(f, (unicode_type, bytes)):
    -                result.append(f)
    -            else:
    -                result.extend(f)
    -        return result
    -
    -    def html_head(self):
    -        return "".join(self._get_resources("html_head"))
    -
    -    def html_body(self):
    -        return "".join(self._get_resources("html_body"))
    -
    -
    -class _UIModuleNamespace(object):
    -    """Lazy namespace which creates UIModule proxies bound to a handler."""
    -    def __init__(self, handler, ui_modules):
    -        self.handler = handler
    -        self.ui_modules = ui_modules
    -
    -    def __getitem__(self, key):
    -        return self.handler._ui_module(key, self.ui_modules[key])
    -
    -    def __getattr__(self, key):
    -        try:
    -            return self[key]
    -        except KeyError as e:
    -            raise AttributeError(str(e))
    -
    -
    -if hasattr(hmac, 'compare_digest'):  # python 3.3
    -    _time_independent_equals = hmac.compare_digest
    -else:
    -    def _time_independent_equals(a, b):
    -        if len(a) != len(b):
    -            return False
    -        result = 0
    -        if isinstance(a[0], int):  # python3 byte strings
    -            for x, y in zip(a, b):
    -                result |= x ^ y
    -        else:  # python2
    -            for x, y in zip(a, b):
    -                result |= ord(x) ^ ord(y)
    -        return result == 0
    -
    -
    -def create_signed_value(secret, name, value, version=None, clock=None,
    -                        key_version=None):
    -    if version is None:
    -        version = DEFAULT_SIGNED_VALUE_VERSION
    -    if clock is None:
    -        clock = time.time
    -
    -    timestamp = utf8(str(int(clock())))
    -    value = base64.b64encode(utf8(value))
    -    if version == 1:
    -        signature = _create_signature_v1(secret, name, value, timestamp)
    -        value = b"|".join([value, timestamp, signature])
    -        return value
    -    elif version == 2:
    -        # The v2 format consists of a version number and a series of
    -        # length-prefixed fields "%d:%s", the last of which is a
    -        # signature, all separated by pipes.  All numbers are in
    -        # decimal format with no leading zeros.  The signature is an
    -        # HMAC-SHA256 of the whole string up to that point, including
    -        # the final pipe.
    -        #
    -        # The fields are:
    -        # - format version (i.e. 2; no length prefix)
    -        # - key version (integer, default is 0)
    -        # - timestamp (integer seconds since epoch)
    -        # - name (not encoded; assumed to be ~alphanumeric)
    -        # - value (base64-encoded)
    -        # - signature (hex-encoded; no length prefix)
    -        def format_field(s):
    -            return utf8("%d:" % len(s)) + utf8(s)
    -        to_sign = b"|".join([
    -            b"2",
    -            format_field(str(key_version or 0)),
    -            format_field(timestamp),
    -            format_field(name),
    -            format_field(value),
    -            b''])
    -
    -        if isinstance(secret, dict):
    -            assert key_version is not None, 'Key version must be set when sign key dict is used'
    -            assert version >= 2, 'Version must be at least 2 for key version support'
    -            secret = secret[key_version]
    -
    -        signature = _create_signature_v2(secret, to_sign)
    -        return to_sign + signature
    -    else:
    -        raise ValueError("Unsupported version %d" % version)
    -
    -
    -# A leading version number in decimal
    -# with no leading zeros, followed by a pipe.
    -_signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$")
    -
    -
    -def _get_version(value):
    -    # Figures out what version value is.  Version 1 did not include an
    -    # explicit version field and started with arbitrary base64 data,
    -    # which makes this tricky.
    -    m = _signed_value_version_re.match(value)
    -    if m is None:
    -        version = 1
    -    else:
    -        try:
    -            version = int(m.group(1))
    -            if version > 999:
    -                # Certain payloads from the version-less v1 format may
    -                # be parsed as valid integers.  Due to base64 padding
    -                # restrictions, this can only happen for numbers whose
    -                # length is a multiple of 4, so we can treat all
    -                # numbers up to 999 as versions, and for the rest we
    -                # fall back to v1 format.
    -                version = 1
    -        except ValueError:
    -            version = 1
    -    return version
    -
    -
    -def decode_signed_value(secret, name, value, max_age_days=31,
    -                        clock=None, min_version=None):
    -    if clock is None:
    -        clock = time.time
    -    if min_version is None:
    -        min_version = DEFAULT_SIGNED_VALUE_MIN_VERSION
    -    if min_version > 2:
    -        raise ValueError("Unsupported min_version %d" % min_version)
    -    if not value:
    -        return None
    -
    -    value = utf8(value)
    -    version = _get_version(value)
    -
    -    if version < min_version:
    -        return None
    -    if version == 1:
    -        return _decode_signed_value_v1(secret, name, value,
    -                                       max_age_days, clock)
    -    elif version == 2:
    -        return _decode_signed_value_v2(secret, name, value,
    -                                       max_age_days, clock)
    -    else:
    -        return None
    -
    -
    -def _decode_signed_value_v1(secret, name, value, max_age_days, clock):
    -    parts = utf8(value).split(b"|")
    -    if len(parts) != 3:
    -        return None
    -    signature = _create_signature_v1(secret, name, parts[0], parts[1])
    -    if not _time_independent_equals(parts[2], signature):
    -        gen_log.warning("Invalid cookie signature %r", value)
    -        return None
    -    timestamp = int(parts[1])
    -    if timestamp < clock() - max_age_days * 86400:
    -        gen_log.warning("Expired cookie %r", value)
    -        return None
    -    if timestamp > clock() + 31 * 86400:
    -        # _cookie_signature does not hash a delimiter between the
    -        # parts of the cookie, so an attacker could transfer trailing
    -        # digits from the payload to the timestamp without altering the
    -        # signature.  For backwards compatibility, sanity-check timestamp
    -        # here instead of modifying _cookie_signature.
    -        gen_log.warning("Cookie timestamp in future; possible tampering %r",
    -                        value)
    -        return None
    -    if parts[1].startswith(b"0"):
    -        gen_log.warning("Tampered cookie %r", value)
    -        return None
    -    try:
    -        return base64.b64decode(parts[0])
    -    except Exception:
    -        return None
    -
    -
    -def _decode_fields_v2(value):
    -    def _consume_field(s):
    -        length, _, rest = s.partition(b':')
    -        n = int(length)
    -        field_value = rest[:n]
    -        # In python 3, indexing bytes returns small integers; we must
    -        # use a slice to get a byte string as in python 2.
    -        if rest[n:n + 1] != b'|':
    -            raise ValueError("malformed v2 signed value field")
    -        rest = rest[n + 1:]
    -        return field_value, rest
    -
    -    rest = value[2:]  # remove version number
    -    key_version, rest = _consume_field(rest)
    -    timestamp, rest = _consume_field(rest)
    -    name_field, rest = _consume_field(rest)
    -    value_field, passed_sig = _consume_field(rest)
    -    return int(key_version), timestamp, name_field, value_field, passed_sig
    -
    -
    -def _decode_signed_value_v2(secret, name, value, max_age_days, clock):
    -    try:
    -        key_version, timestamp, name_field, value_field, passed_sig = _decode_fields_v2(value)
    -    except ValueError:
    -        return None
    -    signed_string = value[:-len(passed_sig)]
    -
    -    if isinstance(secret, dict):
    -        try:
    -            secret = secret[key_version]
    -        except KeyError:
    -            return None
    -
    -    expected_sig = _create_signature_v2(secret, signed_string)
    -    if not _time_independent_equals(passed_sig, expected_sig):
    -        return None
    -    if name_field != utf8(name):
    -        return None
    -    timestamp = int(timestamp)
    -    if timestamp < clock() - max_age_days * 86400:
    -        # The signature has expired.
    -        return None
    -    try:
    -        return base64.b64decode(value_field)
    -    except Exception:
    -        return None
    -
    -
    -def get_signature_key_version(value):
    -    value = utf8(value)
    -    version = _get_version(value)
    -    if version < 2:
    -        return None
    -    try:
    -        key_version, _, _, _, _ = _decode_fields_v2(value)
    -    except ValueError:
    -        return None
    -
    -    return key_version
    -
    -
    -def _create_signature_v1(secret, *parts):
    -    hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
    -    for part in parts:
    -        hash.update(utf8(part))
    -    return utf8(hash.hexdigest())
    -
    -
    -def _create_signature_v2(secret, s):
    -    hash = hmac.new(utf8(secret), digestmod=hashlib.sha256)
    -    hash.update(utf8(s))
    -    return utf8(hash.hexdigest())
    -
    -
    -def is_absolute(path):
    -    return any(path.startswith(x) for x in ["/", "http:", "https:"])
    +#
    +# Copyright 2009 Facebook
    +#
    +# Licensed under the Apache License, Version 2.0 (the "License"); you may
    +# not use this file except in compliance with the License. You may obtain
    +# a copy of the License at
    +#
    +#     http://www.apache.org/licenses/LICENSE-2.0
    +#
    +# Unless required by applicable law or agreed to in writing, software
    +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
    +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
    +# License for the specific language governing permissions and limitations
    +# under the License.
    +
    +"""``tornado.web`` provides a simple web framework with asynchronous
    +features that allow it to scale to large numbers of open connections,
    +making it ideal for `long polling
    +`_.
    +
    +Here is a simple "Hello, world" example app:
    +
    +.. testcode::
    +
    +    import tornado.ioloop
    +    import tornado.web
    +
    +    class MainHandler(tornado.web.RequestHandler):
    +        def get(self):
    +            self.write("Hello, world")
    +
    +    if __name__ == "__main__":
    +        application = tornado.web.Application([
    +            (r"/", MainHandler),
    +        ])
    +        application.listen(8888)
    +        tornado.ioloop.IOLoop.current().start()
    +
    +.. testoutput::
    +   :hide:
    +
    +
    +See the :doc:`guide` for additional information.
    +
    +Thread-safety notes
    +-------------------
    +
    +In general, methods on `RequestHandler` and elsewhere in Tornado are
    +not thread-safe. In particular, methods such as
    +`~RequestHandler.write()`, `~RequestHandler.finish()`, and
    +`~RequestHandler.flush()` must only be called from the main thread. If
    +you use multiple threads it is important to use `.IOLoop.add_callback`
    +to transfer control back to the main thread before finishing the
    +request, or to limit your use of other threads to
    +`.IOLoop.run_in_executor` and ensure that your callbacks running in
    +the executor do not refer to Tornado objects.
    +
    +"""
    +
    +import base64
    +import binascii
    +import datetime
    +import email.utils
    +import functools
    +import gzip
    +import hashlib
    +import hmac
    +import http.cookies
    +from inspect import isclass
    +from io import BytesIO
    +import mimetypes
    +import numbers
    +import os.path
    +import re
    +import sys
    +import threading
    +import time
    +import tornado
    +import traceback
    +import types
    +import urllib.parse
    +from urllib.parse import urlencode
    +
    +from tornado.concurrent import Future, future_set_result_unless_cancelled
    +from tornado import escape
    +from tornado import gen
    +from tornado.httpserver import HTTPServer
    +from tornado import httputil
    +from tornado import iostream
    +import tornado.locale
    +from tornado import locale
    +from tornado.log import access_log, app_log, gen_log
    +from tornado import template
    +from tornado.escape import utf8, _unicode
    +from tornado.routing import (
    +    AnyMatches,
    +    DefaultHostMatches,
    +    HostMatches,
    +    ReversibleRouter,
    +    Rule,
    +    ReversibleRuleRouter,
    +    URLSpec,
    +    _RuleList,
    +)
    +from tornado.util import ObjectDict, unicode_type, _websocket_mask
    +
    +url = URLSpec
    +
    +from typing import (
    +    Dict,
    +    Any,
    +    Union,
    +    Optional,
    +    Awaitable,
    +    Tuple,
    +    List,
    +    Callable,
    +    Iterable,
    +    Generator,
    +    Type,
    +    cast,
    +    overload,
    +)
    +from types import TracebackType
    +import typing
    +
    +if typing.TYPE_CHECKING:
    +    from typing import Set  # noqa: F401
    +
    +
    +# The following types are accepted by RequestHandler.set_header
    +# and related methods.
    +_HeaderTypes = Union[bytes, unicode_type, int, numbers.Integral, datetime.datetime]
    +
    +_CookieSecretTypes = Union[str, bytes, Dict[int, str], Dict[int, bytes]]
    +
    +
    +MIN_SUPPORTED_SIGNED_VALUE_VERSION = 1
    +"""The oldest signed value version supported by this version of Tornado.
    +
    +Signed values older than this version cannot be decoded.
    +
    +.. versionadded:: 3.2.1
    +"""
    +
    +MAX_SUPPORTED_SIGNED_VALUE_VERSION = 2
    +"""The newest signed value version supported by this version of Tornado.
    +
    +Signed values newer than this version cannot be decoded.
    +
    +.. versionadded:: 3.2.1
    +"""
    +
    +DEFAULT_SIGNED_VALUE_VERSION = 2
    +"""The signed value version produced by `.RequestHandler.create_signed_value`.
    +
    +May be overridden by passing a ``version`` keyword argument.
    +
    +.. versionadded:: 3.2.1
    +"""
    +
    +DEFAULT_SIGNED_VALUE_MIN_VERSION = 1
    +"""The oldest signed value accepted by `.RequestHandler.get_secure_cookie`.
    +
    +May be overridden by passing a ``min_version`` keyword argument.
    +
    +.. versionadded:: 3.2.1
    +"""
    +
    +
    +class _ArgDefaultMarker:
    +    pass
    +
    +
    +_ARG_DEFAULT = _ArgDefaultMarker()
    +
    +
    +class RequestHandler(object):
    +    """Base class for HTTP request handlers.
    +
    +    Subclasses must define at least one of the methods defined in the
    +    "Entry points" section below.
    +
    +    Applications should not construct `RequestHandler` objects
    +    directly and subclasses should not override ``__init__`` (override
    +    `~RequestHandler.initialize` instead).
    +
    +    """
    +
    +    SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT", "OPTIONS")
    +
    +    _template_loaders = {}  # type: Dict[str, template.BaseLoader]
    +    _template_loader_lock = threading.Lock()
    +    _remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]")
    +
    +    _stream_request_body = False
    +
    +    # Will be set in _execute.
    +    _transforms = None  # type: List[OutputTransform]
    +    path_args = None  # type: List[str]
    +    path_kwargs = None  # type: Dict[str, str]
    +
    +    def __init__(
    +        self,
    +        application: "Application",
    +        request: httputil.HTTPServerRequest,
    +        **kwargs: Any
    +    ) -> None:
    +        super(RequestHandler, self).__init__()
    +
    +        self.application = application
    +        self.request = request
    +        self._headers_written = False
    +        self._finished = False
    +        self._auto_finish = True
    +        self._prepared_future = None
    +        self.ui = ObjectDict(
    +            (n, self._ui_method(m)) for n, m in application.ui_methods.items()
    +        )
    +        # UIModules are available as both `modules` and `_tt_modules` in the
    +        # template namespace.  Historically only `modules` was available
    +        # but could be clobbered by user additions to the namespace.
    +        # The template {% module %} directive looks in `_tt_modules` to avoid
    +        # possible conflicts.
    +        self.ui["_tt_modules"] = _UIModuleNamespace(self, application.ui_modules)
    +        self.ui["modules"] = self.ui["_tt_modules"]
    +        self.clear()
    +        assert self.request.connection is not None
    +        # TODO: need to add set_close_callback to HTTPConnection interface
    +        self.request.connection.set_close_callback(  # type: ignore
    +            self.on_connection_close
    +        )
    +        self.initialize(**kwargs)  # type: ignore
    +
    +    def _initialize(self) -> None:
    +        pass
    +
    +    initialize = _initialize  # type: Callable[..., None]
    +    """Hook for subclass initialization. Called for each request.
    +
    +    A dictionary passed as the third argument of a ``URLSpec`` will be
    +    supplied as keyword arguments to ``initialize()``.
    +
    +    Example::
    +
    +        class ProfileHandler(RequestHandler):
    +            def initialize(self, database):
    +                self.database = database
    +
    +            def get(self, username):
    +                ...
    +
    +        app = Application([
    +            (r'/user/(.*)', ProfileHandler, dict(database=database)),
    +            ])
    +    """
    +
    +    @property
    +    def settings(self) -> Dict[str, Any]:
    +        """An alias for `self.application.settings `."""
    +        return self.application.settings
    +
    +    def _unimplemented_method(self, *args: str, **kwargs: str) -> None:
    +        raise HTTPError(405)
    +
    +    head = _unimplemented_method  # type: Callable[..., Optional[Awaitable[None]]]
    +    get = _unimplemented_method  # type: Callable[..., Optional[Awaitable[None]]]
    +    post = _unimplemented_method  # type: Callable[..., Optional[Awaitable[None]]]
    +    delete = _unimplemented_method  # type: Callable[..., Optional[Awaitable[None]]]
    +    patch = _unimplemented_method  # type: Callable[..., Optional[Awaitable[None]]]
    +    put = _unimplemented_method  # type: Callable[..., Optional[Awaitable[None]]]
    +    options = _unimplemented_method  # type: Callable[..., Optional[Awaitable[None]]]
    +
    +    def prepare(self) -> Optional[Awaitable[None]]:
    +        """Called at the beginning of a request before  `get`/`post`/etc.
    +
    +        Override this method to perform common initialization regardless
    +        of the request method.
    +
    +        Asynchronous support: Use ``async def`` or decorate this method with
    +        `.gen.coroutine` to make it asynchronous.
    +        If this method returns an  ``Awaitable`` execution will not proceed
    +        until the ``Awaitable`` is done.
    +
    +        .. versionadded:: 3.1
    +           Asynchronous support.
    +        """
    +        pass
    +
    +    def on_finish(self) -> None:
    +        """Called after the end of a request.
    +
    +        Override this method to perform cleanup, logging, etc.
    +        This method is a counterpart to `prepare`.  ``on_finish`` may
    +        not produce any output, as it is called after the response
    +        has been sent to the client.
    +        """
    +        pass
    +
    +    def on_connection_close(self) -> None:
    +        """Called in async handlers if the client closed the connection.
    +
    +        Override this to clean up resources associated with
    +        long-lived connections.  Note that this method is called only if
    +        the connection was closed during asynchronous processing; if you
    +        need to do cleanup after every request override `on_finish`
    +        instead.
    +
    +        Proxies may keep a connection open for a time (perhaps
    +        indefinitely) after the client has gone away, so this method
    +        may not be called promptly after the end user closes their
    +        connection.
    +        """
    +        if _has_stream_request_body(self.__class__):
    +            if not self.request._body_future.done():
    +                self.request._body_future.set_exception(iostream.StreamClosedError())
    +                self.request._body_future.exception()
    +
    +    def clear(self) -> None:
    +        """Resets all headers and content for this response."""
    +        self._headers = httputil.HTTPHeaders(
    +            {
    +                "Server": "TornadoServer/%s" % tornado.version,
    +                "Content-Type": "text/html; charset=UTF-8",
    +                "Date": httputil.format_timestamp(time.time()),
    +            }
    +        )
    +        self.set_default_headers()
    +        self._write_buffer = []  # type: List[bytes]
    +        self._status_code = 200
    +        self._reason = httputil.responses[200]
    +
    +    def set_default_headers(self) -> None:
    +        """Override this to set HTTP headers at the beginning of the request.
    +
    +        For example, this is the place to set a custom ``Server`` header.
    +        Note that setting such headers in the normal flow of request
    +        processing may not do what you want, since headers may be reset
    +        during error handling.
    +        """
    +        pass
    +
    +    def set_status(self, status_code: int, reason: str = None) -> None:
    +        """Sets the status code for our response.
    +
    +        :arg int status_code: Response status code.
    +        :arg str reason: Human-readable reason phrase describing the status
    +            code. If ``None``, it will be filled in from
    +            `http.client.responses` or "Unknown".
    +
    +        .. versionchanged:: 5.0
    +
    +           No longer validates that the response code is in
    +           `http.client.responses`.
    +        """
    +        self._status_code = status_code
    +        if reason is not None:
    +            self._reason = escape.native_str(reason)
    +        else:
    +            self._reason = httputil.responses.get(status_code, "Unknown")
    +
    +    def get_status(self) -> int:
    +        """Returns the status code for our response."""
    +        return self._status_code
    +
    +    def set_header(self, name: str, value: _HeaderTypes) -> None:
    +        """Sets the given response header name and value.
    +
    +        All header values are converted to strings (`datetime` objects
    +        are formatted according to the HTTP specification for the
    +        ``Date`` header).
    +
    +        """
    +        self._headers[name] = self._convert_header_value(value)
    +
    +    def add_header(self, name: str, value: _HeaderTypes) -> None:
    +        """Adds the given response header and value.
    +
    +        Unlike `set_header`, `add_header` may be called multiple times
    +        to return multiple values for the same header.
    +        """
    +        self._headers.add(name, self._convert_header_value(value))
    +
    +    def clear_header(self, name: str) -> None:
    +        """Clears an outgoing header, undoing a previous `set_header` call.
    +
    +        Note that this method does not apply to multi-valued headers
    +        set by `add_header`.
    +        """
    +        if name in self._headers:
    +            del self._headers[name]
    +
    +    _INVALID_HEADER_CHAR_RE = re.compile(r"[\x00-\x1f]")
    +
    +    def _convert_header_value(self, value: _HeaderTypes) -> str:
    +        # Convert the input value to a str. This type check is a bit
    +        # subtle: The bytes case only executes on python 3, and the
    +        # unicode case only executes on python 2, because the other
    +        # cases are covered by the first match for str.
    +        if isinstance(value, str):
    +            retval = value
    +        elif isinstance(value, bytes):  # py3
    +            # Non-ascii characters in headers are not well supported,
    +            # but if you pass bytes, use latin1 so they pass through as-is.
    +            retval = value.decode("latin1")
    +        elif isinstance(value, unicode_type):  # py2
    +            # TODO: This is inconsistent with the use of latin1 above,
    +            # but it's been that way for a long time. Should it change?
    +            retval = escape.utf8(value)
    +        elif isinstance(value, numbers.Integral):
    +            # return immediately since we know the converted value will be safe
    +            return str(value)
    +        elif isinstance(value, datetime.datetime):
    +            return httputil.format_timestamp(value)
    +        else:
    +            raise TypeError("Unsupported header value %r" % value)
    +        # If \n is allowed into the header, it is possible to inject
    +        # additional headers or split the request.
    +        if RequestHandler._INVALID_HEADER_CHAR_RE.search(retval):
    +            raise ValueError("Unsafe header value %r", retval)
    +        return retval
    +
    +    @overload
    +    def get_argument(self, name: str, default: str, strip: bool = True) -> str:
    +        pass
    +
    +    @overload  # noqa: F811
    +    def get_argument(
    +        self, name: str, default: _ArgDefaultMarker = _ARG_DEFAULT, strip: bool = True
    +    ) -> str:
    +        pass
    +
    +    @overload  # noqa: F811
    +    def get_argument(
    +        self, name: str, default: None, strip: bool = True
    +    ) -> Optional[str]:
    +        pass
    +
    +    def get_argument(  # noqa: F811
    +        self,
    +        name: str,
    +        default: Union[None, str, _ArgDefaultMarker] = _ARG_DEFAULT,
    +        strip: bool = True,
    +    ) -> Optional[str]:
    +        """Returns the value of the argument with the given name.
    +
    +        If default is not provided, the argument is considered to be
    +        required, and we raise a `MissingArgumentError` if it is missing.
    +
    +        If the argument appears in the request more than once, we return the
    +        last value.
    +
    +        This method searches both the query and body arguments.
    +        """
    +        return self._get_argument(name, default, self.request.arguments, strip)
    +
    +    def get_arguments(self, name: str, strip: bool = True) -> List[str]:
    +        """Returns a list of the arguments with the given name.
    +
    +        If the argument is not present, returns an empty list.
    +
    +        This method searches both the query and body arguments.
    +        """
    +
    +        # Make sure `get_arguments` isn't accidentally being called with a
    +        # positional argument that's assumed to be a default (like in
    +        # `get_argument`.)
    +        assert isinstance(strip, bool)
    +
    +        return self._get_arguments(name, self.request.arguments, strip)
    +
    +    def get_body_argument(
    +        self,
    +        name: str,
    +        default: Union[None, str, _ArgDefaultMarker] = _ARG_DEFAULT,
    +        strip: bool = True,
    +    ) -> Optional[str]:
    +        """Returns the value of the argument with the given name
    +        from the request body.
    +
    +        If default is not provided, the argument is considered to be
    +        required, and we raise a `MissingArgumentError` if it is missing.
    +
    +        If the argument appears in the url more than once, we return the
    +        last value.
    +
    +        .. versionadded:: 3.2
    +        """
    +        return self._get_argument(name, default, self.request.body_arguments, strip)
    +
    +    def get_body_arguments(self, name: str, strip: bool = True) -> List[str]:
    +        """Returns a list of the body arguments with the given name.
    +
    +        If the argument is not present, returns an empty list.
    +
    +        .. versionadded:: 3.2
    +        """
    +        return self._get_arguments(name, self.request.body_arguments, strip)
    +
    +    def get_query_argument(
    +        self,
    +        name: str,
    +        default: Union[None, str, _ArgDefaultMarker] = _ARG_DEFAULT,
    +        strip: bool = True,
    +    ) -> Optional[str]:
    +        """Returns the value of the argument with the given name
    +        from the request query string.
    +
    +        If default is not provided, the argument is considered to be
    +        required, and we raise a `MissingArgumentError` if it is missing.
    +
    +        If the argument appears in the url more than once, we return the
    +        last value.
    +
    +        .. versionadded:: 3.2
    +        """
    +        return self._get_argument(name, default, self.request.query_arguments, strip)
    +
    +    def get_query_arguments(self, name: str, strip: bool = True) -> List[str]:
    +        """Returns a list of the query arguments with the given name.
    +
    +        If the argument is not present, returns an empty list.
    +
    +        .. versionadded:: 3.2
    +        """
    +        return self._get_arguments(name, self.request.query_arguments, strip)
    +
    +    def _get_argument(
    +        self,
    +        name: str,
    +        default: Union[None, str, _ArgDefaultMarker],
    +        source: Dict[str, List[bytes]],
    +        strip: bool = True,
    +    ) -> Optional[str]:
    +        args = self._get_arguments(name, source, strip=strip)
    +        if not args:
    +            if isinstance(default, _ArgDefaultMarker):
    +                raise MissingArgumentError(name)
    +            return default
    +        return args[-1]
    +
    +    def _get_arguments(
    +        self, name: str, source: Dict[str, List[bytes]], strip: bool = True
    +    ) -> List[str]:
    +        values = []
    +        for v in source.get(name, []):
    +            s = self.decode_argument(v, name=name)
    +            if isinstance(s, unicode_type):
    +                # Get rid of any weird control chars (unless decoding gave
    +                # us bytes, in which case leave it alone)
    +                s = RequestHandler._remove_control_chars_regex.sub(" ", s)
    +            if strip:
    +                s = s.strip()
    +            values.append(s)
    +        return values
    +
    +    def decode_argument(self, value: bytes, name: str = None) -> str:
    +        """Decodes an argument from the request.
    +
    +        The argument has been percent-decoded and is now a byte string.
    +        By default, this method decodes the argument as utf-8 and returns
    +        a unicode string, but this may be overridden in subclasses.
    +
    +        This method is used as a filter for both `get_argument()` and for
    +        values extracted from the url and passed to `get()`/`post()`/etc.
    +
    +        The name of the argument is provided if known, but may be None
    +        (e.g. for unnamed groups in the url regex).
    +        """
    +        try:
    +            return _unicode(value)
    +        except UnicodeDecodeError:
    +            raise HTTPError(
    +                400, "Invalid unicode in %s: %r" % (name or "url", value[:40])
    +            )
    +
    +    @property
    +    def cookies(self) -> Dict[str, http.cookies.Morsel]:
    +        """An alias for
    +        `self.request.cookies <.httputil.HTTPServerRequest.cookies>`."""
    +        return self.request.cookies
    +
    +    def get_cookie(self, name: str, default: str = None) -> Optional[str]:
    +        """Returns the value of the request cookie with the given name.
    +
    +        If the named cookie is not present, returns ``default``.
    +
    +        This method only returns cookies that were present in the request.
    +        It does not see the outgoing cookies set by `set_cookie` in this
    +        handler.
    +        """
    +        if self.request.cookies is not None and name in self.request.cookies:
    +            return self.request.cookies[name].value
    +        return default
    +
    +    def set_cookie(
    +        self,
    +        name: str,
    +        value: Union[str, bytes],
    +        domain: str = None,
    +        expires: Union[float, Tuple, datetime.datetime] = None,
    +        path: str = "/",
    +        expires_days: int = None,
    +        **kwargs: Any
    +    ) -> None:
    +        """Sets an outgoing cookie name/value with the given options.
    +
    +        Newly-set cookies are not immediately visible via `get_cookie`;
    +        they are not present until the next request.
    +
    +        expires may be a numeric timestamp as returned by `time.time`,
    +        a time tuple as returned by `time.gmtime`, or a
    +        `datetime.datetime` object.
    +
    +        Additional keyword arguments are set on the cookies.Morsel
    +        directly.
    +        See https://docs.python.org/3/library/http.cookies.html#http.cookies.Morsel
    +        for available attributes.
    +        """
    +        # The cookie library only accepts type str, in both python 2 and 3
    +        name = escape.native_str(name)
    +        value = escape.native_str(value)
    +        if re.search(r"[\x00-\x20]", name + value):
    +            # Don't let us accidentally inject bad stuff
    +            raise ValueError("Invalid cookie %r: %r" % (name, value))
    +        if not hasattr(self, "_new_cookie"):
    +            self._new_cookie = http.cookies.SimpleCookie()
    +        if name in self._new_cookie:
    +            del self._new_cookie[name]
    +        self._new_cookie[name] = value
    +        morsel = self._new_cookie[name]
    +        if domain:
    +            morsel["domain"] = domain
    +        if expires_days is not None and not expires:
    +            expires = datetime.datetime.utcnow() + datetime.timedelta(days=expires_days)
    +        if expires:
    +            morsel["expires"] = httputil.format_timestamp(expires)
    +        if path:
    +            morsel["path"] = path
    +        for k, v in kwargs.items():
    +            if k == "max_age":
    +                k = "max-age"
    +
    +            # skip falsy values for httponly and secure flags because
    +            # SimpleCookie sets them regardless
    +            if k in ["httponly", "secure"] and not v:
    +                continue
    +
    +            morsel[k] = v
    +
    +    def clear_cookie(self, name: str, path: str = "/", domain: str = None) -> None:
    +        """Deletes the cookie with the given name.
    +
    +        Due to limitations of the cookie protocol, you must pass the same
    +        path and domain to clear a cookie as were used when that cookie
    +        was set (but there is no way to find out on the server side
    +        which values were used for a given cookie).
    +
    +        Similar to `set_cookie`, the effect of this method will not be
    +        seen until the following request.
    +        """
    +        expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
    +        self.set_cookie(name, value="", path=path, expires=expires, domain=domain)
    +
    +    def clear_all_cookies(self, path: str = "/", domain: str = None) -> None:
    +        """Deletes all the cookies the user sent with this request.
    +
    +        See `clear_cookie` for more information on the path and domain
    +        parameters.
    +
    +        Similar to `set_cookie`, the effect of this method will not be
    +        seen until the following request.
    +
    +        .. versionchanged:: 3.2
    +
    +           Added the ``path`` and ``domain`` parameters.
    +        """
    +        for name in self.request.cookies:
    +            self.clear_cookie(name, path=path, domain=domain)
    +
    +    def set_secure_cookie(
    +        self,
    +        name: str,
    +        value: Union[str, bytes],
    +        expires_days: int = 30,
    +        version: int = None,
    +        **kwargs: Any
    +    ) -> None:
    +        """Signs and timestamps a cookie so it cannot be forged.
    +
    +        You must specify the ``cookie_secret`` setting in your Application
    +        to use this method. It should be a long, random sequence of bytes
    +        to be used as the HMAC secret for the signature.
    +
    +        To read a cookie set with this method, use `get_secure_cookie()`.
    +
    +        Note that the ``expires_days`` parameter sets the lifetime of the
    +        cookie in the browser, but is independent of the ``max_age_days``
    +        parameter to `get_secure_cookie`.
    +
    +        Secure cookies may contain arbitrary byte values, not just unicode
    +        strings (unlike regular cookies)
    +
    +        Similar to `set_cookie`, the effect of this method will not be
    +        seen until the following request.
    +
    +        .. versionchanged:: 3.2.1
    +
    +           Added the ``version`` argument.  Introduced cookie version 2
    +           and made it the default.
    +        """
    +        self.set_cookie(
    +            name,
    +            self.create_signed_value(name, value, version=version),
    +            expires_days=expires_days,
    +            **kwargs
    +        )
    +
    +    def create_signed_value(
    +        self, name: str, value: Union[str, bytes], version: int = None
    +    ) -> bytes:
    +        """Signs and timestamps a string so it cannot be forged.
    +
    +        Normally used via set_secure_cookie, but provided as a separate
    +        method for non-cookie uses.  To decode a value not stored
    +        as a cookie use the optional value argument to get_secure_cookie.
    +
    +        .. versionchanged:: 3.2.1
    +
    +           Added the ``version`` argument.  Introduced cookie version 2
    +           and made it the default.
    +        """
    +        self.require_setting("cookie_secret", "secure cookies")
    +        secret = self.application.settings["cookie_secret"]
    +        key_version = None
    +        if isinstance(secret, dict):
    +            if self.application.settings.get("key_version") is None:
    +                raise Exception("key_version setting must be used for secret_key dicts")
    +            key_version = self.application.settings["key_version"]
    +
    +        return create_signed_value(
    +            secret, name, value, version=version, key_version=key_version
    +        )
    +
    +    def get_secure_cookie(
    +        self,
    +        name: str,
    +        value: str = None,
    +        max_age_days: int = 31,
    +        min_version: int = None,
    +    ) -> Optional[bytes]:
    +        """Returns the given signed cookie if it validates, or None.
    +
    +        The decoded cookie value is returned as a byte string (unlike
    +        `get_cookie`).
    +
    +        Similar to `get_cookie`, this method only returns cookies that
    +        were present in the request. It does not see outgoing cookies set by
    +        `set_secure_cookie` in this handler.
    +
    +        .. versionchanged:: 3.2.1
    +
    +           Added the ``min_version`` argument.  Introduced cookie version 2;
    +           both versions 1 and 2 are accepted by default.
    +        """
    +        self.require_setting("cookie_secret", "secure cookies")
    +        if value is None:
    +            value = self.get_cookie(name)
    +        return decode_signed_value(
    +            self.application.settings["cookie_secret"],
    +            name,
    +            value,
    +            max_age_days=max_age_days,
    +            min_version=min_version,
    +        )
    +
    +    def get_secure_cookie_key_version(
    +        self, name: str, value: str = None
    +    ) -> Optional[int]:
    +        """Returns the signing key version of the secure cookie.
    +
    +        The version is returned as int.
    +        """
    +        self.require_setting("cookie_secret", "secure cookies")
    +        if value is None:
    +            value = self.get_cookie(name)
    +        if value is None:
    +            return None
    +        return get_signature_key_version(value)
    +
    +    def redirect(self, url: str, permanent: bool = False, status: int = None) -> None:
    +        """Sends a redirect to the given (optionally relative) URL.
    +
    +        If the ``status`` argument is specified, that value is used as the
    +        HTTP status code; otherwise either 301 (permanent) or 302
    +        (temporary) is chosen based on the ``permanent`` argument.
    +        The default is 302 (temporary).
    +        """
    +        if self._headers_written:
    +            raise Exception("Cannot redirect after headers have been written")
    +        if status is None:
    +            status = 301 if permanent else 302
    +        else:
    +            assert isinstance(status, int) and 300 <= status <= 399
    +        self.set_status(status)
    +        self.set_header("Location", utf8(url))
    +        self.finish()
    +
    +    def write(self, chunk: Union[str, bytes, dict]) -> None:
    +        """Writes the given chunk to the output buffer.
    +
    +        To write the output to the network, use the `flush()` method below.
    +
    +        If the given chunk is a dictionary, we write it as JSON and set
    +        the Content-Type of the response to be ``application/json``.
    +        (if you want to send JSON as a different ``Content-Type``, call
    +        ``set_header`` *after* calling ``write()``).
    +
    +        Note that lists are not converted to JSON because of a potential
    +        cross-site security vulnerability.  All JSON output should be
    +        wrapped in a dictionary.  More details at
    +        http://haacked.com/archive/2009/06/25/json-hijacking.aspx/ and
    +        https://github.com/facebook/tornado/issues/1009
    +        """
    +        if self._finished:
    +            raise RuntimeError("Cannot write() after finish()")
    +        if not isinstance(chunk, (bytes, unicode_type, dict)):
    +            message = "write() only accepts bytes, unicode, and dict objects"
    +            if isinstance(chunk, list):
    +                message += (
    +                    ". Lists not accepted for security reasons; see "
    +                    + "http://www.tornadoweb.org/en/stable/web.html#tornado.web.RequestHandler.write"  # noqa: E501
    +                )
    +            raise TypeError(message)
    +        if isinstance(chunk, dict):
    +            chunk = escape.json_encode(chunk)
    +            self.set_header("Content-Type", "application/json; charset=UTF-8")
    +        chunk = utf8(chunk)
    +        self._write_buffer.append(chunk)
    +
    +    def render(self, template_name: str, **kwargs: Any) -> "Future[None]":
    +        """Renders the template with the given arguments as the response.
    +
    +        ``render()`` calls ``finish()``, so no other output methods can be called
    +        after it.
    +
    +        Returns a `.Future` with the same semantics as the one returned by `finish`.
    +        Awaiting this `.Future` is optional.
    +
    +        .. versionchanged:: 5.1
    +
    +           Now returns a `.Future` instead of ``None``.
    +        """
    +        if self._finished:
    +            raise RuntimeError("Cannot render() after finish()")
    +        html = self.render_string(template_name, **kwargs)
    +
    +        # Insert the additional JS and CSS added by the modules on the page
    +        js_embed = []
    +        js_files = []
    +        css_embed = []
    +        css_files = []
    +        html_heads = []
    +        html_bodies = []
    +        for module in getattr(self, "_active_modules", {}).values():
    +            embed_part = module.embedded_javascript()
    +            if embed_part:
    +                js_embed.append(utf8(embed_part))
    +            file_part = module.javascript_files()
    +            if file_part:
    +                if isinstance(file_part, (unicode_type, bytes)):
    +                    js_files.append(_unicode(file_part))
    +                else:
    +                    js_files.extend(file_part)
    +            embed_part = module.embedded_css()
    +            if embed_part:
    +                css_embed.append(utf8(embed_part))
    +            file_part = module.css_files()
    +            if file_part:
    +                if isinstance(file_part, (unicode_type, bytes)):
    +                    css_files.append(_unicode(file_part))
    +                else:
    +                    css_files.extend(file_part)
    +            head_part = module.html_head()
    +            if head_part:
    +                html_heads.append(utf8(head_part))
    +            body_part = module.html_body()
    +            if body_part:
    +                html_bodies.append(utf8(body_part))
    +
    +        if js_files:
    +            # Maintain order of JavaScript files given by modules
    +            js = self.render_linked_js(js_files)
    +            sloc = html.rindex(b"")
    +            html = html[:sloc] + utf8(js) + b"\n" + html[sloc:]
    +        if js_embed:
    +            js_bytes = self.render_embed_js(js_embed)
    +            sloc = html.rindex(b"")
    +            html = html[:sloc] + js_bytes + b"\n" + html[sloc:]
    +        if css_files:
    +            css = self.render_linked_css(css_files)
    +            hloc = html.index(b"")
    +            html = html[:hloc] + utf8(css) + b"\n" + html[hloc:]
    +        if css_embed:
    +            css_bytes = self.render_embed_css(css_embed)
    +            hloc = html.index(b"")
    +            html = html[:hloc] + css_bytes + b"\n" + html[hloc:]
    +        if html_heads:
    +            hloc = html.index(b"")
    +            html = html[:hloc] + b"".join(html_heads) + b"\n" + html[hloc:]
    +        if html_bodies:
    +            hloc = html.index(b"")
    +            html = html[:hloc] + b"".join(html_bodies) + b"\n" + html[hloc:]
    +        return self.finish(html)
    +
    +    def render_linked_js(self, js_files: Iterable[str]) -> str:
    +        """Default method used to render the final js links for the
    +        rendered webpage.
    +
    +        Override this method in a sub-classed controller to change the output.
    +        """
    +        paths = []
    +        unique_paths = set()  # type: Set[str]
    +
    +        for path in js_files:
    +            if not is_absolute(path):
    +                path = self.static_url(path)
    +            if path not in unique_paths:
    +                paths.append(path)
    +                unique_paths.add(path)
    +
    +        return "".join(
    +            ''
    +            for p in paths
    +        )
    +
    +    def render_embed_js(self, js_embed: Iterable[bytes]) -> bytes:
    +        """Default method used to render the final embedded js for the
    +        rendered webpage.
    +
    +        Override this method in a sub-classed controller to change the output.
    +        """
    +        return (
    +            b'"
    +        )
    +
    +    def render_linked_css(self, css_files: Iterable[str]) -> str:
    +        """Default method used to render the final css links for the
    +        rendered webpage.
    +
    +        Override this method in a sub-classed controller to change the output.
    +        """
    +        paths = []
    +        unique_paths = set()  # type: Set[str]
    +
    +        for path in css_files:
    +            if not is_absolute(path):
    +                path = self.static_url(path)
    +            if path not in unique_paths:
    +                paths.append(path)
    +                unique_paths.add(path)
    +
    +        return "".join(
    +            ''
    +            for p in paths
    +        )
    +
    +    def render_embed_css(self, css_embed: Iterable[bytes]) -> bytes:
    +        """Default method used to render the final embedded css for the
    +        rendered webpage.
    +
    +        Override this method in a sub-classed controller to change the output.
    +        """
    +        return b'"
    +
    +    def render_string(self, template_name: str, **kwargs: Any) -> bytes:
    +        """Generate the given template with the given arguments.
    +
    +        We return the generated byte string (in utf8). To generate and
    +        write a template as a response, use render() above.
    +        """
    +        # If no template_path is specified, use the path of the calling file
    +        template_path = self.get_template_path()
    +        if not template_path:
    +            frame = sys._getframe(0)
    +            web_file = frame.f_code.co_filename
    +            while frame.f_code.co_filename == web_file:
    +                frame = frame.f_back
    +            assert frame.f_code.co_filename is not None
    +            template_path = os.path.dirname(frame.f_code.co_filename)
    +        with RequestHandler._template_loader_lock:
    +            if template_path not in RequestHandler._template_loaders:
    +                loader = self.create_template_loader(template_path)
    +                RequestHandler._template_loaders[template_path] = loader
    +            else:
    +                loader = RequestHandler._template_loaders[template_path]
    +        t = loader.load(template_name)
    +        namespace = self.get_template_namespace()
    +        namespace.update(kwargs)
    +        return t.generate(**namespace)
    +
    +    def get_template_namespace(self) -> Dict[str, Any]:
    +        """Returns a dictionary to be used as the default template namespace.
    +
    +        May be overridden by subclasses to add or modify values.
    +
    +        The results of this method will be combined with additional
    +        defaults in the `tornado.template` module and keyword arguments
    +        to `render` or `render_string`.
    +        """
    +        namespace = dict(
    +            handler=self,
    +            request=self.request,
    +            current_user=self.current_user,
    +            locale=self.locale,
    +            _=self.locale.translate,
    +            pgettext=self.locale.pgettext,
    +            static_url=self.static_url,
    +            xsrf_form_html=self.xsrf_form_html,
    +            reverse_url=self.reverse_url,
    +        )
    +        namespace.update(self.ui)
    +        return namespace
    +
    +    def create_template_loader(self, template_path: str) -> template.BaseLoader:
    +        """Returns a new template loader for the given path.
    +
    +        May be overridden by subclasses.  By default returns a
    +        directory-based loader on the given path, using the
    +        ``autoescape`` and ``template_whitespace`` application
    +        settings.  If a ``template_loader`` application setting is
    +        supplied, uses that instead.
    +        """
    +        settings = self.application.settings
    +        if "template_loader" in settings:
    +            return settings["template_loader"]
    +        kwargs = {}
    +        if "autoescape" in settings:
    +            # autoescape=None means "no escaping", so we have to be sure
    +            # to only pass this kwarg if the user asked for it.
    +            kwargs["autoescape"] = settings["autoescape"]
    +        if "template_whitespace" in settings:
    +            kwargs["whitespace"] = settings["template_whitespace"]
    +        return template.Loader(template_path, **kwargs)
    +
    +    def flush(self, include_footers: bool = False) -> "Future[None]":
    +        """Flushes the current output buffer to the network.
    +
    +        The ``callback`` argument, if given, can be used for flow control:
    +        it will be run when all flushed data has been written to the socket.
    +        Note that only one flush callback can be outstanding at a time;
    +        if another flush occurs before the previous flush's callback
    +        has been run, the previous callback will be discarded.
    +
    +        .. versionchanged:: 4.0
    +           Now returns a `.Future` if no callback is given.
    +
    +        .. versionchanged:: 6.0
    +
    +           The ``callback`` argument was removed.
    +        """
    +        assert self.request.connection is not None
    +        chunk = b"".join(self._write_buffer)
    +        self._write_buffer = []
    +        if not self._headers_written:
    +            self._headers_written = True
    +            for transform in self._transforms:
    +                assert chunk is not None
    +                (
    +                    self._status_code,
    +                    self._headers,
    +                    chunk,
    +                ) = transform.transform_first_chunk(
    +                    self._status_code, self._headers, chunk, include_footers
    +                )
    +            # Ignore the chunk and only write the headers for HEAD requests
    +            if self.request.method == "HEAD":
    +                chunk = b""
    +
    +            # Finalize the cookie headers (which have been stored in a side
    +            # object so an outgoing cookie could be overwritten before it
    +            # is sent).
    +            if hasattr(self, "_new_cookie"):
    +                for cookie in self._new_cookie.values():
    +                    self.add_header("Set-Cookie", cookie.OutputString(None))
    +
    +            start_line = httputil.ResponseStartLine("", self._status_code, self._reason)
    +            return self.request.connection.write_headers(
    +                start_line, self._headers, chunk
    +            )
    +        else:
    +            for transform in self._transforms:
    +                chunk = transform.transform_chunk(chunk, include_footers)
    +            # Ignore the chunk and only write the headers for HEAD requests
    +            if self.request.method != "HEAD":
    +                return self.request.connection.write(chunk)
    +            else:
    +                future = Future()  # type: Future[None]
    +                future.set_result(None)
    +                return future
    +
    +    def finish(self, chunk: Union[str, bytes, dict] = None) -> "Future[None]":
    +        """Finishes this response, ending the HTTP request.
    +
    +        Passing a ``chunk`` to ``finish()`` is equivalent to passing that
    +        chunk to ``write()`` and then calling ``finish()`` with no arguments.
    +
    +        Returns a `.Future` which may optionally be awaited to track the sending
    +        of the response to the client. This `.Future` resolves when all the response
    +        data has been sent, and raises an error if the connection is closed before all
    +        data can be sent.
    +
    +        .. versionchanged:: 5.1
    +
    +           Now returns a `.Future` instead of ``None``.
    +        """
    +        if self._finished:
    +            raise RuntimeError("finish() called twice")
    +
    +        if chunk is not None:
    +            self.write(chunk)
    +
    +        # Automatically support ETags and add the Content-Length header if
    +        # we have not flushed any content yet.
    +        if not self._headers_written:
    +            if (
    +                self._status_code == 200
    +                and self.request.method in ("GET", "HEAD")
    +                and "Etag" not in self._headers
    +            ):
    +                self.set_etag_header()
    +                if self.check_etag_header():
    +                    self._write_buffer = []
    +                    self.set_status(304)
    +            if self._status_code in (204, 304) or (
    +                self._status_code >= 100 and self._status_code < 200
    +            ):
    +                assert not self._write_buffer, (
    +                    "Cannot send body with %s" % self._status_code
    +                )
    +                self._clear_headers_for_304()
    +            elif "Content-Length" not in self._headers:
    +                content_length = sum(len(part) for part in self._write_buffer)
    +                self.set_header("Content-Length", content_length)
    +
    +        assert self.request.connection is not None
    +        # Now that the request is finished, clear the callback we
    +        # set on the HTTPConnection (which would otherwise prevent the
    +        # garbage collection of the RequestHandler when there
    +        # are keepalive connections)
    +        self.request.connection.set_close_callback(None)  # type: ignore
    +
    +        future = self.flush(include_footers=True)
    +        self.request.connection.finish()
    +        self._log()
    +        self._finished = True
    +        self.on_finish()
    +        self._break_cycles()
    +        return future
    +
    +    def detach(self) -> iostream.IOStream:
    +        """Take control of the underlying stream.
    +
    +        Returns the underlying `.IOStream` object and stops all
    +        further HTTP processing. Intended for implementing protocols
    +        like websockets that tunnel over an HTTP handshake.
    +
    +        This method is only supported when HTTP/1.1 is used.
    +
    +        .. versionadded:: 5.1
    +        """
    +        self._finished = True
    +        # TODO: add detach to HTTPConnection?
    +        return self.request.connection.detach()  # type: ignore
    +
    +    def _break_cycles(self) -> None:
    +        # Break up a reference cycle between this handler and the
    +        # _ui_module closures to allow for faster GC on CPython.
    +        self.ui = None  # type: ignore
    +
    +    def send_error(self, status_code: int = 500, **kwargs: Any) -> None:
    +        """Sends the given HTTP error code to the browser.
    +
    +        If `flush()` has already been called, it is not possible to send
    +        an error, so this method will simply terminate the response.
    +        If output has been written but not yet flushed, it will be discarded
    +        and replaced with the error page.
    +
    +        Override `write_error()` to customize the error page that is returned.
    +        Additional keyword arguments are passed through to `write_error`.
    +        """
    +        if self._headers_written:
    +            gen_log.error("Cannot send error response after headers written")
    +            if not self._finished:
    +                # If we get an error between writing headers and finishing,
    +                # we are unlikely to be able to finish due to a
    +                # Content-Length mismatch. Try anyway to release the
    +                # socket.
    +                try:
    +                    self.finish()
    +                except Exception:
    +                    gen_log.error("Failed to flush partial response", exc_info=True)
    +            return
    +        self.clear()
    +
    +        reason = kwargs.get("reason")
    +        if "exc_info" in kwargs:
    +            exception = kwargs["exc_info"][1]
    +            if isinstance(exception, HTTPError) and exception.reason:
    +                reason = exception.reason
    +        self.set_status(status_code, reason=reason)
    +        try:
    +            self.write_error(status_code, **kwargs)
    +        except Exception:
    +            app_log.error("Uncaught exception in write_error", exc_info=True)
    +        if not self._finished:
    +            self.finish()
    +
    +    def write_error(self, status_code: int, **kwargs: Any) -> None:
    +        """Override to implement custom error pages.
    +
    +        ``write_error`` may call `write`, `render`, `set_header`, etc
    +        to produce output as usual.
    +
    +        If this error was caused by an uncaught exception (including
    +        HTTPError), an ``exc_info`` triple will be available as
    +        ``kwargs["exc_info"]``.  Note that this exception may not be
    +        the "current" exception for purposes of methods like
    +        ``sys.exc_info()`` or ``traceback.format_exc``.
    +        """
    +        if self.settings.get("serve_traceback") and "exc_info" in kwargs:
    +            # in debug mode, try to send a traceback
    +            self.set_header("Content-Type", "text/plain")
    +            for line in traceback.format_exception(*kwargs["exc_info"]):
    +                self.write(line)
    +            self.finish()
    +        else:
    +            self.finish(
    +                "%(code)d: %(message)s"
    +                "%(code)d: %(message)s"
    +                % {"code": status_code, "message": self._reason}
    +            )
    +
    +    @property
    +    def locale(self) -> tornado.locale.Locale:
    +        """The locale for the current session.
    +
    +        Determined by either `get_user_locale`, which you can override to
    +        set the locale based on, e.g., a user preference stored in a
    +        database, or `get_browser_locale`, which uses the ``Accept-Language``
    +        header.
    +
    +        .. versionchanged: 4.1
    +           Added a property setter.
    +        """
    +        if not hasattr(self, "_locale"):
    +            loc = self.get_user_locale()
    +            if loc is not None:
    +                self._locale = loc
    +            else:
    +                self._locale = self.get_browser_locale()
    +                assert self._locale
    +        return self._locale
    +
    +    @locale.setter
    +    def locale(self, value: tornado.locale.Locale) -> None:
    +        self._locale = value
    +
    +    def get_user_locale(self) -> Optional[tornado.locale.Locale]:
    +        """Override to determine the locale from the authenticated user.
    +
    +        If None is returned, we fall back to `get_browser_locale()`.
    +
    +        This method should return a `tornado.locale.Locale` object,
    +        most likely obtained via a call like ``tornado.locale.get("en")``
    +        """
    +        return None
    +
    +    def get_browser_locale(self, default: str = "en_US") -> tornado.locale.Locale:
    +        """Determines the user's locale from ``Accept-Language`` header.
    +
    +        See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
    +        """
    +        if "Accept-Language" in self.request.headers:
    +            languages = self.request.headers["Accept-Language"].split(",")
    +            locales = []
    +            for language in languages:
    +                parts = language.strip().split(";")
    +                if len(parts) > 1 and parts[1].startswith("q="):
    +                    try:
    +                        score = float(parts[1][2:])
    +                    except (ValueError, TypeError):
    +                        score = 0.0
    +                else:
    +                    score = 1.0
    +                locales.append((parts[0], score))
    +            if locales:
    +                locales.sort(key=lambda pair: pair[1], reverse=True)
    +                codes = [l[0] for l in locales]
    +                return locale.get(*codes)
    +        return locale.get(default)
    +
    +    @property
    +    def current_user(self) -> Any:
    +        """The authenticated user for this request.
    +
    +        This is set in one of two ways:
    +
    +        * A subclass may override `get_current_user()`, which will be called
    +          automatically the first time ``self.current_user`` is accessed.
    +          `get_current_user()` will only be called once per request,
    +          and is cached for future access::
    +
    +              def get_current_user(self):
    +                  user_cookie = self.get_secure_cookie("user")
    +                  if user_cookie:
    +                      return json.loads(user_cookie)
    +                  return None
    +
    +        * It may be set as a normal variable, typically from an overridden
    +          `prepare()`::
    +
    +              @gen.coroutine
    +              def prepare(self):
    +                  user_id_cookie = self.get_secure_cookie("user_id")
    +                  if user_id_cookie:
    +                      self.current_user = yield load_user(user_id_cookie)
    +
    +        Note that `prepare()` may be a coroutine while `get_current_user()`
    +        may not, so the latter form is necessary if loading the user requires
    +        asynchronous operations.
    +
    +        The user object may be any type of the application's choosing.
    +        """
    +        if not hasattr(self, "_current_user"):
    +            self._current_user = self.get_current_user()
    +        return self._current_user
    +
    +    @current_user.setter
    +    def current_user(self, value: Any) -> None:
    +        self._current_user = value
    +
    +    def get_current_user(self) -> Any:
    +        """Override to determine the current user from, e.g., a cookie.
    +
    +        This method may not be a coroutine.
    +        """
    +        return None
    +
    +    def get_login_url(self) -> str:
    +        """Override to customize the login URL based on the request.
    +
    +        By default, we use the ``login_url`` application setting.
    +        """
    +        self.require_setting("login_url", "@tornado.web.authenticated")
    +        return self.application.settings["login_url"]
    +
    +    def get_template_path(self) -> Optional[str]:
    +        """Override to customize template path for each handler.
    +
    +        By default, we use the ``template_path`` application setting.
    +        Return None to load templates relative to the calling file.
    +        """
    +        return self.application.settings.get("template_path")
    +
    +    @property
    +    def xsrf_token(self) -> bytes:
    +        """The XSRF-prevention token for the current user/session.
    +
    +        To prevent cross-site request forgery, we set an '_xsrf' cookie
    +        and include the same '_xsrf' value as an argument with all POST
    +        requests. If the two do not match, we reject the form submission
    +        as a potential forgery.
    +
    +        See http://en.wikipedia.org/wiki/Cross-site_request_forgery
    +
    +        This property is of type `bytes`, but it contains only ASCII
    +        characters. If a character string is required, there is no
    +        need to base64-encode it; just decode the byte string as
    +        UTF-8.
    +
    +        .. versionchanged:: 3.2.2
    +           The xsrf token will now be have a random mask applied in every
    +           request, which makes it safe to include the token in pages
    +           that are compressed.  See http://breachattack.com for more
    +           information on the issue fixed by this change.  Old (version 1)
    +           cookies will be converted to version 2 when this method is called
    +           unless the ``xsrf_cookie_version`` `Application` setting is
    +           set to 1.
    +
    +        .. versionchanged:: 4.3
    +           The ``xsrf_cookie_kwargs`` `Application` setting may be
    +           used to supply additional cookie options (which will be
    +           passed directly to `set_cookie`). For example,
    +           ``xsrf_cookie_kwargs=dict(httponly=True, secure=True)``
    +           will set the ``secure`` and ``httponly`` flags on the
    +           ``_xsrf`` cookie.
    +        """
    +        if not hasattr(self, "_xsrf_token"):
    +            version, token, timestamp = self._get_raw_xsrf_token()
    +            output_version = self.settings.get("xsrf_cookie_version", 2)
    +            cookie_kwargs = self.settings.get("xsrf_cookie_kwargs", {})
    +            if output_version == 1:
    +                self._xsrf_token = binascii.b2a_hex(token)
    +            elif output_version == 2:
    +                mask = os.urandom(4)
    +                self._xsrf_token = b"|".join(
    +                    [
    +                        b"2",
    +                        binascii.b2a_hex(mask),
    +                        binascii.b2a_hex(_websocket_mask(mask, token)),
    +                        utf8(str(int(timestamp))),
    +                    ]
    +                )
    +            else:
    +                raise ValueError("unknown xsrf cookie version %d", output_version)
    +            if version is None:
    +                if self.current_user and "expires_days" not in cookie_kwargs:
    +                    cookie_kwargs["expires_days"] = 30
    +                self.set_cookie("_xsrf", self._xsrf_token, **cookie_kwargs)
    +        return self._xsrf_token
    +
    +    def _get_raw_xsrf_token(self) -> Tuple[Optional[int], bytes, float]:
    +        """Read or generate the xsrf token in its raw form.
    +
    +        The raw_xsrf_token is a tuple containing:
    +
    +        * version: the version of the cookie from which this token was read,
    +          or None if we generated a new token in this request.
    +        * token: the raw token data; random (non-ascii) bytes.
    +        * timestamp: the time this token was generated (will not be accurate
    +          for version 1 cookies)
    +        """
    +        if not hasattr(self, "_raw_xsrf_token"):
    +            cookie = self.get_cookie("_xsrf")
    +            if cookie:
    +                version, token, timestamp = self._decode_xsrf_token(cookie)
    +            else:
    +                version, token, timestamp = None, None, None
    +            if token is None:
    +                version = None
    +                token = os.urandom(16)
    +                timestamp = time.time()
    +            assert token is not None
    +            assert timestamp is not None
    +            self._raw_xsrf_token = (version, token, timestamp)
    +        return self._raw_xsrf_token
    +
    +    def _decode_xsrf_token(
    +        self, cookie: str
    +    ) -> Tuple[Optional[int], Optional[bytes], Optional[float]]:
    +        """Convert a cookie string into a the tuple form returned by
    +        _get_raw_xsrf_token.
    +        """
    +
    +        try:
    +            m = _signed_value_version_re.match(utf8(cookie))
    +
    +            if m:
    +                version = int(m.group(1))
    +                if version == 2:
    +                    _, mask_str, masked_token, timestamp_str = cookie.split("|")
    +
    +                    mask = binascii.a2b_hex(utf8(mask_str))
    +                    token = _websocket_mask(mask, binascii.a2b_hex(utf8(masked_token)))
    +                    timestamp = int(timestamp_str)
    +                    return version, token, timestamp
    +                else:
    +                    # Treat unknown versions as not present instead of failing.
    +                    raise Exception("Unknown xsrf cookie version")
    +            else:
    +                version = 1
    +                try:
    +                    token = binascii.a2b_hex(utf8(cookie))
    +                except (binascii.Error, TypeError):
    +                    token = utf8(cookie)
    +                # We don't have a usable timestamp in older versions.
    +                timestamp = int(time.time())
    +                return (version, token, timestamp)
    +        except Exception:
    +            # Catch exceptions and return nothing instead of failing.
    +            gen_log.debug("Uncaught exception in _decode_xsrf_token", exc_info=True)
    +            return None, None, None
    +
    +    def check_xsrf_cookie(self) -> None:
    +        """Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
    +
    +        To prevent cross-site request forgery, we set an ``_xsrf``
    +        cookie and include the same value as a non-cookie
    +        field with all ``POST`` requests. If the two do not match, we
    +        reject the form submission as a potential forgery.
    +
    +        The ``_xsrf`` value may be set as either a form field named ``_xsrf``
    +        or in a custom HTTP header named ``X-XSRFToken`` or ``X-CSRFToken``
    +        (the latter is accepted for compatibility with Django).
    +
    +        See http://en.wikipedia.org/wiki/Cross-site_request_forgery
    +
    +        .. versionchanged:: 3.2.2
    +           Added support for cookie version 2.  Both versions 1 and 2 are
    +           supported.
    +        """
    +        # Prior to release 1.1.1, this check was ignored if the HTTP header
    +        # ``X-Requested-With: XMLHTTPRequest`` was present.  This exception
    +        # has been shown to be insecure and has been removed.  For more
    +        # information please see
    +        # http://www.djangoproject.com/weblog/2011/feb/08/security/
    +        # http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
    +        token = (
    +            self.get_argument("_xsrf", None)
    +            or self.request.headers.get("X-Xsrftoken")
    +            or self.request.headers.get("X-Csrftoken")
    +        )
    +        if not token:
    +            raise HTTPError(403, "'_xsrf' argument missing from POST")
    +        _, token, _ = self._decode_xsrf_token(token)
    +        _, expected_token, _ = self._get_raw_xsrf_token()
    +        if not token:
    +            raise HTTPError(403, "'_xsrf' argument has invalid format")
    +        if not hmac.compare_digest(utf8(token), utf8(expected_token)):
    +            raise HTTPError(403, "XSRF cookie does not match POST argument")
    +
    +    def xsrf_form_html(self) -> str:
    +        """An HTML ```` element to be included with all POST forms.
    +
    +        It defines the ``_xsrf`` input value, which we check on all POST
    +        requests to prevent cross-site request forgery. If you have set
    +        the ``xsrf_cookies`` application setting, you must include this
    +        HTML within all of your HTML forms.
    +
    +        In a template, this method should be called with ``{% module
    +        xsrf_form_html() %}``
    +
    +        See `check_xsrf_cookie()` above for more information.
    +        """
    +        return (
    +            ''
    +        )
    +
    +    def static_url(self, path: str, include_host: bool = None, **kwargs: Any) -> str:
    +        """Returns a static URL for the given relative static file path.
    +
    +        This method requires you set the ``static_path`` setting in your
    +        application (which specifies the root directory of your static
    +        files).
    +
    +        This method returns a versioned url (by default appending
    +        ``?v=``), which allows the static files to be
    +        cached indefinitely.  This can be disabled by passing
    +        ``include_version=False`` (in the default implementation;
    +        other static file implementations are not required to support
    +        this, but they may support other options).
    +
    +        By default this method returns URLs relative to the current
    +        host, but if ``include_host`` is true the URL returned will be
    +        absolute.  If this handler has an ``include_host`` attribute,
    +        that value will be used as the default for all `static_url`
    +        calls that do not pass ``include_host`` as a keyword argument.
    +
    +        """
    +        self.require_setting("static_path", "static_url")
    +        get_url = self.settings.get(
    +            "static_handler_class", StaticFileHandler
    +        ).make_static_url
    +
    +        if include_host is None:
    +            include_host = getattr(self, "include_host", False)
    +
    +        if include_host:
    +            base = self.request.protocol + "://" + self.request.host
    +        else:
    +            base = ""
    +
    +        return base + get_url(self.settings, path, **kwargs)
    +
    +    def require_setting(self, name: str, feature: str = "this feature") -> None:
    +        """Raises an exception if the given app setting is not defined."""
    +        if not self.application.settings.get(name):
    +            raise Exception(
    +                "You must define the '%s' setting in your "
    +                "application to use %s" % (name, feature)
    +            )
    +
    +    def reverse_url(self, name: str, *args: Any) -> str:
    +        """Alias for `Application.reverse_url`."""
    +        return self.application.reverse_url(name, *args)
    +
    +    def compute_etag(self) -> Optional[str]:
    +        """Computes the etag header to be used for this request.
    +
    +        By default uses a hash of the content written so far.
    +
    +        May be overridden to provide custom etag implementations,
    +        or may return None to disable tornado's default etag support.
    +        """
    +        hasher = hashlib.sha1()
    +        for part in self._write_buffer:
    +            hasher.update(part)
    +        return '"%s"' % hasher.hexdigest()
    +
    +    def set_etag_header(self) -> None:
    +        """Sets the response's Etag header using ``self.compute_etag()``.
    +
    +        Note: no header will be set if ``compute_etag()`` returns ``None``.
    +
    +        This method is called automatically when the request is finished.
    +        """
    +        etag = self.compute_etag()
    +        if etag is not None:
    +            self.set_header("Etag", etag)
    +
    +    def check_etag_header(self) -> bool:
    +        """Checks the ``Etag`` header against requests's ``If-None-Match``.
    +
    +        Returns ``True`` if the request's Etag matches and a 304 should be
    +        returned. For example::
    +
    +            self.set_etag_header()
    +            if self.check_etag_header():
    +                self.set_status(304)
    +                return
    +
    +        This method is called automatically when the request is finished,
    +        but may be called earlier for applications that override
    +        `compute_etag` and want to do an early check for ``If-None-Match``
    +        before completing the request.  The ``Etag`` header should be set
    +        (perhaps with `set_etag_header`) before calling this method.
    +        """
    +        computed_etag = utf8(self._headers.get("Etag", ""))
    +        # Find all weak and strong etag values from If-None-Match header
    +        # because RFC 7232 allows multiple etag values in a single header.
    +        etags = re.findall(
    +            br'\*|(?:W/)?"[^"]*"', utf8(self.request.headers.get("If-None-Match", ""))
    +        )
    +        if not computed_etag or not etags:
    +            return False
    +
    +        match = False
    +        if etags[0] == b"*":
    +            match = True
    +        else:
    +            # Use a weak comparison when comparing entity-tags.
    +            def val(x: bytes) -> bytes:
    +                return x[2:] if x.startswith(b"W/") else x
    +
    +            for etag in etags:
    +                if val(etag) == val(computed_etag):
    +                    match = True
    +                    break
    +        return match
    +
    +    async def _execute(
    +        self, transforms: List["OutputTransform"], *args: bytes, **kwargs: bytes
    +    ) -> None:
    +        """Executes this request with the given output transforms."""
    +        self._transforms = transforms
    +        try:
    +            if self.request.method not in self.SUPPORTED_METHODS:
    +                raise HTTPError(405)
    +            self.path_args = [self.decode_argument(arg) for arg in args]
    +            self.path_kwargs = dict(
    +                (k, self.decode_argument(v, name=k)) for (k, v) in kwargs.items()
    +            )
    +            # If XSRF cookies are turned on, reject form submissions without
    +            # the proper cookie
    +            if self.request.method not in (
    +                "GET",
    +                "HEAD",
    +                "OPTIONS",
    +            ) and self.application.settings.get("xsrf_cookies"):
    +                self.check_xsrf_cookie()
    +
    +            result = self.prepare()
    +            if result is not None:
    +                result = await result
    +            if self._prepared_future is not None:
    +                # Tell the Application we've finished with prepare()
    +                # and are ready for the body to arrive.
    +                future_set_result_unless_cancelled(self._prepared_future, None)
    +            if self._finished:
    +                return
    +
    +            if _has_stream_request_body(self.__class__):
    +                # In streaming mode request.body is a Future that signals
    +                # the body has been completely received.  The Future has no
    +                # result; the data has been passed to self.data_received
    +                # instead.
    +                try:
    +                    await self.request._body_future
    +                except iostream.StreamClosedError:
    +                    return
    +
    +            method = getattr(self, self.request.method.lower())
    +            result = method(*self.path_args, **self.path_kwargs)
    +            if result is not None:
    +                result = await result
    +            if self._auto_finish and not self._finished:
    +                self.finish()
    +        except Exception as e:
    +            try:
    +                self._handle_request_exception(e)
    +            except Exception:
    +                app_log.error("Exception in exception handler", exc_info=True)
    +            finally:
    +                # Unset result to avoid circular references
    +                result = None
    +            if self._prepared_future is not None and not self._prepared_future.done():
    +                # In case we failed before setting _prepared_future, do it
    +                # now (to unblock the HTTP server).  Note that this is not
    +                # in a finally block to avoid GC issues prior to Python 3.4.
    +                self._prepared_future.set_result(None)
    +
    +    def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]:
    +        """Implement this method to handle streamed request data.
    +
    +        Requires the `.stream_request_body` decorator.
    +
    +        May be a coroutine for flow control.
    +        """
    +        raise NotImplementedError()
    +
    +    def _log(self) -> None:
    +        """Logs the current request.
    +
    +        Sort of deprecated since this functionality was moved to the
    +        Application, but left in place for the benefit of existing apps
    +        that have overridden this method.
    +        """
    +        self.application.log_request(self)
    +
    +    def _request_summary(self) -> str:
    +        return "%s %s (%s)" % (
    +            self.request.method,
    +            self.request.uri,
    +            self.request.remote_ip,
    +        )
    +
    +    def _handle_request_exception(self, e: BaseException) -> None:
    +        if isinstance(e, Finish):
    +            # Not an error; just finish the request without logging.
    +            if not self._finished:
    +                self.finish(*e.args)
    +            return
    +        try:
    +            self.log_exception(*sys.exc_info())
    +        except Exception:
    +            # An error here should still get a best-effort send_error()
    +            # to avoid leaking the connection.
    +            app_log.error("Error in exception logger", exc_info=True)
    +        if self._finished:
    +            # Extra errors after the request has been finished should
    +            # be logged, but there is no reason to continue to try and
    +            # send a response.
    +            return
    +        if isinstance(e, HTTPError):
    +            self.send_error(e.status_code, exc_info=sys.exc_info())
    +        else:
    +            self.send_error(500, exc_info=sys.exc_info())
    +
    +    def log_exception(
    +        self,
    +        typ: "Optional[Type[BaseException]]",
    +        value: Optional[BaseException],
    +        tb: Optional[TracebackType],
    +    ) -> None:
    +        """Override to customize logging of uncaught exceptions.
    +
    +        By default logs instances of `HTTPError` as warnings without
    +        stack traces (on the ``tornado.general`` logger), and all
    +        other exceptions as errors with stack traces (on the
    +        ``tornado.application`` logger).
    +
    +        .. versionadded:: 3.1
    +        """
    +        if isinstance(value, HTTPError):
    +            if value.log_message:
    +                format = "%d %s: " + value.log_message
    +                args = [value.status_code, self._request_summary()] + list(value.args)
    +                gen_log.warning(format, *args)
    +        else:
    +            app_log.error(  # type: ignore
    +                "Uncaught exception %s\n%r",
    +                self._request_summary(),
    +                self.request,
    +                exc_info=(typ, value, tb),
    +            )
    +
    +    def _ui_module(self, name: str, module: Type["UIModule"]) -> Callable[..., str]:
    +        def render(*args, **kwargs) -> str:  # type: ignore
    +            if not hasattr(self, "_active_modules"):
    +                self._active_modules = {}  # type: Dict[str, UIModule]
    +            if name not in self._active_modules:
    +                self._active_modules[name] = module(self)
    +            rendered = self._active_modules[name].render(*args, **kwargs)
    +            return rendered
    +
    +        return render
    +
    +    def _ui_method(self, method: Callable[..., str]) -> Callable[..., str]:
    +        return lambda *args, **kwargs: method(self, *args, **kwargs)
    +
    +    def _clear_headers_for_304(self) -> None:
    +        # 304 responses should not contain entity headers (defined in
    +        # http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
    +        # not explicitly allowed by
    +        # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
    +        headers = [
    +            "Allow",
    +            "Content-Encoding",
    +            "Content-Language",
    +            "Content-Length",
    +            "Content-MD5",
    +            "Content-Range",
    +            "Content-Type",
    +            "Last-Modified",
    +        ]
    +        for h in headers:
    +            self.clear_header(h)
    +
    +
    +def stream_request_body(cls: Type[RequestHandler]) -> Type[RequestHandler]:
    +    """Apply to `RequestHandler` subclasses to enable streaming body support.
    +
    +    This decorator implies the following changes:
    +
    +    * `.HTTPServerRequest.body` is undefined, and body arguments will not
    +      be included in `RequestHandler.get_argument`.
    +    * `RequestHandler.prepare` is called when the request headers have been
    +      read instead of after the entire body has been read.
    +    * The subclass must define a method ``data_received(self, data):``, which
    +      will be called zero or more times as data is available.  Note that
    +      if the request has an empty body, ``data_received`` may not be called.
    +    * ``prepare`` and ``data_received`` may return Futures (such as via
    +      ``@gen.coroutine``, in which case the next method will not be called
    +      until those futures have completed.
    +    * The regular HTTP method (``post``, ``put``, etc) will be called after
    +      the entire body has been read.
    +
    +    See the `file receiver demo `_
    +    for example usage.
    +    """  # noqa: E501
    +    if not issubclass(cls, RequestHandler):
    +        raise TypeError("expected subclass of RequestHandler, got %r", cls)
    +    cls._stream_request_body = True
    +    return cls
    +
    +
    +def _has_stream_request_body(cls: Type[RequestHandler]) -> bool:
    +    if not issubclass(cls, RequestHandler):
    +        raise TypeError("expected subclass of RequestHandler, got %r", cls)
    +    return cls._stream_request_body
    +
    +
    +def removeslash(
    +    method: Callable[..., Optional[Awaitable[None]]]
    +) -> Callable[..., Optional[Awaitable[None]]]:
    +    """Use this decorator to remove trailing slashes from the request path.
    +
    +    For example, a request to ``/foo/`` would redirect to ``/foo`` with this
    +    decorator. Your request handler mapping should use a regular expression
    +    like ``r'/foo/*'`` in conjunction with using the decorator.
    +    """
    +
    +    @functools.wraps(method)
    +    def wrapper(  # type: ignore
    +        self: RequestHandler, *args, **kwargs
    +    ) -> Optional[Awaitable[None]]:
    +        if self.request.path.endswith("/"):
    +            if self.request.method in ("GET", "HEAD"):
    +                uri = self.request.path.rstrip("/")
    +                if uri:  # don't try to redirect '/' to ''
    +                    if self.request.query:
    +                        uri += "?" + self.request.query
    +                    self.redirect(uri, permanent=True)
    +                    return None
    +            else:
    +                raise HTTPError(404)
    +        return method(self, *args, **kwargs)
    +
    +    return wrapper
    +
    +
    +def addslash(
    +    method: Callable[..., Optional[Awaitable[None]]]
    +) -> Callable[..., Optional[Awaitable[None]]]:
    +    """Use this decorator to add a missing trailing slash to the request path.
    +
    +    For example, a request to ``/foo`` would redirect to ``/foo/`` with this
    +    decorator. Your request handler mapping should use a regular expression
    +    like ``r'/foo/?'`` in conjunction with using the decorator.
    +    """
    +
    +    @functools.wraps(method)
    +    def wrapper(  # type: ignore
    +        self: RequestHandler, *args, **kwargs
    +    ) -> Optional[Awaitable[None]]:
    +        if not self.request.path.endswith("/"):
    +            if self.request.method in ("GET", "HEAD"):
    +                uri = self.request.path + "/"
    +                if self.request.query:
    +                    uri += "?" + self.request.query
    +                self.redirect(uri, permanent=True)
    +                return None
    +            raise HTTPError(404)
    +        return method(self, *args, **kwargs)
    +
    +    return wrapper
    +
    +
    +class _ApplicationRouter(ReversibleRuleRouter):
    +    """Routing implementation used internally by `Application`.
    +
    +    Provides a binding between `Application` and `RequestHandler`.
    +    This implementation extends `~.routing.ReversibleRuleRouter` in a couple of ways:
    +        * it allows to use `RequestHandler` subclasses as `~.routing.Rule` target and
    +        * it allows to use a list/tuple of rules as `~.routing.Rule` target.
    +        ``process_rule`` implementation will substitute this list with an appropriate
    +        `_ApplicationRouter` instance.
    +    """
    +
    +    def __init__(self, application: "Application", rules: _RuleList = None) -> None:
    +        assert isinstance(application, Application)
    +        self.application = application
    +        super(_ApplicationRouter, self).__init__(rules)
    +
    +    def process_rule(self, rule: Rule) -> Rule:
    +        rule = super(_ApplicationRouter, self).process_rule(rule)
    +
    +        if isinstance(rule.target, (list, tuple)):
    +            rule.target = _ApplicationRouter(  # type: ignore
    +                self.application, rule.target
    +            )
    +
    +        return rule
    +
    +    def get_target_delegate(
    +        self, target: Any, request: httputil.HTTPServerRequest, **target_params: Any
    +    ) -> Optional[httputil.HTTPMessageDelegate]:
    +        if isclass(target) and issubclass(target, RequestHandler):
    +            return self.application.get_handler_delegate(
    +                request, target, **target_params
    +            )
    +
    +        return super(_ApplicationRouter, self).get_target_delegate(
    +            target, request, **target_params
    +        )
    +
    +
    +class Application(ReversibleRouter):
    +    r"""A collection of request handlers that make up a web application.
    +
    +    Instances of this class are callable and can be passed directly to
    +    HTTPServer to serve the application::
    +
    +        application = web.Application([
    +            (r"/", MainPageHandler),
    +        ])
    +        http_server = httpserver.HTTPServer(application)
    +        http_server.listen(8080)
    +        ioloop.IOLoop.current().start()
    +
    +    The constructor for this class takes in a list of `~.routing.Rule`
    +    objects or tuples of values corresponding to the arguments of
    +    `~.routing.Rule` constructor: ``(matcher, target, [target_kwargs], [name])``,
    +    the values in square brackets being optional. The default matcher is
    +    `~.routing.PathMatches`, so ``(regexp, target)`` tuples can also be used
    +    instead of ``(PathMatches(regexp), target)``.
    +
    +    A common routing target is a `RequestHandler` subclass, but you can also
    +    use lists of rules as a target, which create a nested routing configuration::
    +
    +        application = web.Application([
    +            (HostMatches("example.com"), [
    +                (r"/", MainPageHandler),
    +                (r"/feed", FeedHandler),
    +            ]),
    +        ])
    +
    +    In addition to this you can use nested `~.routing.Router` instances,
    +    `~.httputil.HTTPMessageDelegate` subclasses and callables as routing targets
    +    (see `~.routing` module docs for more information).
    +
    +    When we receive requests, we iterate over the list in order and
    +    instantiate an instance of the first request class whose regexp
    +    matches the request path. The request class can be specified as
    +    either a class object or a (fully-qualified) name.
    +
    +    A dictionary may be passed as the third element (``target_kwargs``)
    +    of the tuple, which will be used as keyword arguments to the handler's
    +    constructor and `~RequestHandler.initialize` method. This pattern
    +    is used for the `StaticFileHandler` in this example (note that a
    +    `StaticFileHandler` can be installed automatically with the
    +    static_path setting described below)::
    +
    +        application = web.Application([
    +            (r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
    +        ])
    +
    +    We support virtual hosts with the `add_handlers` method, which takes in
    +    a host regular expression as the first argument::
    +
    +        application.add_handlers(r"www\.myhost\.com", [
    +            (r"/article/([0-9]+)", ArticleHandler),
    +        ])
    +
    +    If there's no match for the current request's host, then ``default_host``
    +    parameter value is matched against host regular expressions.
    +
    +
    +    .. warning::
    +
    +       Applications that do not use TLS may be vulnerable to :ref:`DNS
    +       rebinding ` attacks. This attack is especially
    +       relevant to applications that only listen on ``127.0.0.1`` or
    +       other private networks. Appropriate host patterns must be used
    +       (instead of the default of ``r'.*'``) to prevent this risk. The
    +       ``default_host`` argument must not be used in applications that
    +       may be vulnerable to DNS rebinding.
    +
    +    You can serve static files by sending the ``static_path`` setting
    +    as a keyword argument. We will serve those files from the
    +    ``/static/`` URI (this is configurable with the
    +    ``static_url_prefix`` setting), and we will serve ``/favicon.ico``
    +    and ``/robots.txt`` from the same directory.  A custom subclass of
    +    `StaticFileHandler` can be specified with the
    +    ``static_handler_class`` setting.
    +
    +    .. versionchanged:: 4.5
    +       Integration with the new `tornado.routing` module.
    +
    +    """
    +
    +    def __init__(
    +        self,
    +        handlers: _RuleList = None,
    +        default_host: str = None,
    +        transforms: List[Type["OutputTransform"]] = None,
    +        **settings: Any
    +    ) -> None:
    +        if transforms is None:
    +            self.transforms = []  # type: List[Type[OutputTransform]]
    +            if settings.get("compress_response") or settings.get("gzip"):
    +                self.transforms.append(GZipContentEncoding)
    +        else:
    +            self.transforms = transforms
    +        self.default_host = default_host
    +        self.settings = settings
    +        self.ui_modules = {
    +            "linkify": _linkify,
    +            "xsrf_form_html": _xsrf_form_html,
    +            "Template": TemplateModule,
    +        }
    +        self.ui_methods = {}  # type: Dict[str, Callable[..., str]]
    +        self._load_ui_modules(settings.get("ui_modules", {}))
    +        self._load_ui_methods(settings.get("ui_methods", {}))
    +        if self.settings.get("static_path"):
    +            path = self.settings["static_path"]
    +            handlers = list(handlers or [])
    +            static_url_prefix = settings.get("static_url_prefix", "/static/")
    +            static_handler_class = settings.get(
    +                "static_handler_class", StaticFileHandler
    +            )
    +            static_handler_args = settings.get("static_handler_args", {})
    +            static_handler_args["path"] = path
    +            for pattern in [
    +                re.escape(static_url_prefix) + r"(.*)",
    +                r"/(favicon\.ico)",
    +                r"/(robots\.txt)",
    +            ]:
    +                handlers.insert(0, (pattern, static_handler_class, static_handler_args))
    +
    +        if self.settings.get("debug"):
    +            self.settings.setdefault("autoreload", True)
    +            self.settings.setdefault("compiled_template_cache", False)
    +            self.settings.setdefault("static_hash_cache", False)
    +            self.settings.setdefault("serve_traceback", True)
    +
    +        self.wildcard_router = _ApplicationRouter(self, handlers)
    +        self.default_router = _ApplicationRouter(
    +            self, [Rule(AnyMatches(), self.wildcard_router)]
    +        )
    +
    +        # Automatically reload modified modules
    +        if self.settings.get("autoreload"):
    +            from tornado import autoreload
    +
    +            autoreload.start()
    +
    +    def listen(self, port: int, address: str = "", **kwargs: Any) -> HTTPServer:
    +        """Starts an HTTP server for this application on the given port.
    +
    +        This is a convenience alias for creating an `.HTTPServer`
    +        object and calling its listen method.  Keyword arguments not
    +        supported by `HTTPServer.listen <.TCPServer.listen>` are passed to the
    +        `.HTTPServer` constructor.  For advanced uses
    +        (e.g. multi-process mode), do not use this method; create an
    +        `.HTTPServer` and call its
    +        `.TCPServer.bind`/`.TCPServer.start` methods directly.
    +
    +        Note that after calling this method you still need to call
    +        ``IOLoop.current().start()`` to start the server.
    +
    +        Returns the `.HTTPServer` object.
    +
    +        .. versionchanged:: 4.3
    +           Now returns the `.HTTPServer` object.
    +        """
    +        server = HTTPServer(self, **kwargs)
    +        server.listen(port, address)
    +        return server
    +
    +    def add_handlers(self, host_pattern: str, host_handlers: _RuleList) -> None:
    +        """Appends the given handlers to our handler list.
    +
    +        Host patterns are processed sequentially in the order they were
    +        added. All matching patterns will be considered.
    +        """
    +        host_matcher = HostMatches(host_pattern)
    +        rule = Rule(host_matcher, _ApplicationRouter(self, host_handlers))
    +
    +        self.default_router.rules.insert(-1, rule)
    +
    +        if self.default_host is not None:
    +            self.wildcard_router.add_rules(
    +                [(DefaultHostMatches(self, host_matcher.host_pattern), host_handlers)]
    +            )
    +
    +    def add_transform(self, transform_class: Type["OutputTransform"]) -> None:
    +        self.transforms.append(transform_class)
    +
    +    def _load_ui_methods(self, methods: Any) -> None:
    +        if isinstance(methods, types.ModuleType):
    +            self._load_ui_methods(dict((n, getattr(methods, n)) for n in dir(methods)))
    +        elif isinstance(methods, list):
    +            for m in methods:
    +                self._load_ui_methods(m)
    +        else:
    +            for name, fn in methods.items():
    +                if (
    +                    not name.startswith("_")
    +                    and hasattr(fn, "__call__")
    +                    and name[0].lower() == name[0]
    +                ):
    +                    self.ui_methods[name] = fn
    +
    +    def _load_ui_modules(self, modules: Any) -> None:
    +        if isinstance(modules, types.ModuleType):
    +            self._load_ui_modules(dict((n, getattr(modules, n)) for n in dir(modules)))
    +        elif isinstance(modules, list):
    +            for m in modules:
    +                self._load_ui_modules(m)
    +        else:
    +            assert isinstance(modules, dict)
    +            for name, cls in modules.items():
    +                try:
    +                    if issubclass(cls, UIModule):
    +                        self.ui_modules[name] = cls
    +                except TypeError:
    +                    pass
    +
    +    def __call__(
    +        self, request: httputil.HTTPServerRequest
    +    ) -> Optional[Awaitable[None]]:
    +        # Legacy HTTPServer interface
    +        dispatcher = self.find_handler(request)
    +        return dispatcher.execute()
    +
    +    def find_handler(
    +        self, request: httputil.HTTPServerRequest, **kwargs: Any
    +    ) -> "_HandlerDelegate":
    +        route = self.default_router.find_handler(request)
    +        if route is not None:
    +            return cast("_HandlerDelegate", route)
    +
    +        if self.settings.get("default_handler_class"):
    +            return self.get_handler_delegate(
    +                request,
    +                self.settings["default_handler_class"],
    +                self.settings.get("default_handler_args", {}),
    +            )
    +
    +        return self.get_handler_delegate(request, ErrorHandler, {"status_code": 404})
    +
    +    def get_handler_delegate(
    +        self,
    +        request: httputil.HTTPServerRequest,
    +        target_class: Type[RequestHandler],
    +        target_kwargs: Dict[str, Any] = None,
    +        path_args: List[bytes] = None,
    +        path_kwargs: Dict[str, bytes] = None,
    +    ) -> "_HandlerDelegate":
    +        """Returns `~.httputil.HTTPMessageDelegate` that can serve a request
    +        for application and `RequestHandler` subclass.
    +
    +        :arg httputil.HTTPServerRequest request: current HTTP request.
    +        :arg RequestHandler target_class: a `RequestHandler` class.
    +        :arg dict target_kwargs: keyword arguments for ``target_class`` constructor.
    +        :arg list path_args: positional arguments for ``target_class`` HTTP method that
    +            will be executed while handling a request (``get``, ``post`` or any other).
    +        :arg dict path_kwargs: keyword arguments for ``target_class`` HTTP method.
    +        """
    +        return _HandlerDelegate(
    +            self, request, target_class, target_kwargs, path_args, path_kwargs
    +        )
    +
    +    def reverse_url(self, name: str, *args: Any) -> str:
    +        """Returns a URL path for handler named ``name``
    +
    +        The handler must be added to the application as a named `URLSpec`.
    +
    +        Args will be substituted for capturing groups in the `URLSpec` regex.
    +        They will be converted to strings if necessary, encoded as utf8,
    +        and url-escaped.
    +        """
    +        reversed_url = self.default_router.reverse_url(name, *args)
    +        if reversed_url is not None:
    +            return reversed_url
    +
    +        raise KeyError("%s not found in named urls" % name)
    +
    +    def log_request(self, handler: RequestHandler) -> None:
    +        """Writes a completed HTTP request to the logs.
    +
    +        By default writes to the python root logger.  To change
    +        this behavior either subclass Application and override this method,
    +        or pass a function in the application settings dictionary as
    +        ``log_function``.
    +        """
    +        if "log_function" in self.settings:
    +            self.settings["log_function"](handler)
    +            return
    +        if handler.get_status() < 400:
    +            log_method = access_log.info
    +        elif handler.get_status() < 500:
    +            log_method = access_log.warning
    +        else:
    +            log_method = access_log.error
    +        request_time = 1000.0 * handler.request.request_time()
    +        log_method(
    +            "%d %s %.2fms",
    +            handler.get_status(),
    +            handler._request_summary(),
    +            request_time,
    +        )
    +
    +
    +class _HandlerDelegate(httputil.HTTPMessageDelegate):
    +    def __init__(
    +        self,
    +        application: Application,
    +        request: httputil.HTTPServerRequest,
    +        handler_class: Type[RequestHandler],
    +        handler_kwargs: Optional[Dict[str, Any]],
    +        path_args: Optional[List[bytes]],
    +        path_kwargs: Optional[Dict[str, bytes]],
    +    ) -> None:
    +        self.application = application
    +        self.connection = request.connection
    +        self.request = request
    +        self.handler_class = handler_class
    +        self.handler_kwargs = handler_kwargs or {}
    +        self.path_args = path_args or []
    +        self.path_kwargs = path_kwargs or {}
    +        self.chunks = []  # type: List[bytes]
    +        self.stream_request_body = _has_stream_request_body(self.handler_class)
    +
    +    def headers_received(
    +        self,
    +        start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine],
    +        headers: httputil.HTTPHeaders,
    +    ) -> Optional[Awaitable[None]]:
    +        if self.stream_request_body:
    +            self.request._body_future = Future()
    +            return self.execute()
    +        return None
    +
    +    def data_received(self, data: bytes) -> Optional[Awaitable[None]]:
    +        if self.stream_request_body:
    +            return self.handler.data_received(data)
    +        else:
    +            self.chunks.append(data)
    +            return None
    +
    +    def finish(self) -> None:
    +        if self.stream_request_body:
    +            future_set_result_unless_cancelled(self.request._body_future, None)
    +        else:
    +            self.request.body = b"".join(self.chunks)
    +            self.request._parse_body()
    +            self.execute()
    +
    +    def on_connection_close(self) -> None:
    +        if self.stream_request_body:
    +            self.handler.on_connection_close()
    +        else:
    +            self.chunks = None  # type: ignore
    +
    +    def execute(self) -> Optional[Awaitable[None]]:
    +        # If template cache is disabled (usually in the debug mode),
    +        # re-compile templates and reload static files on every
    +        # request so you don't need to restart to see changes
    +        if not self.application.settings.get("compiled_template_cache", True):
    +            with RequestHandler._template_loader_lock:
    +                for loader in RequestHandler._template_loaders.values():
    +                    loader.reset()
    +        if not self.application.settings.get("static_hash_cache", True):
    +            StaticFileHandler.reset()
    +
    +        self.handler = self.handler_class(
    +            self.application, self.request, **self.handler_kwargs
    +        )
    +        transforms = [t(self.request) for t in self.application.transforms]
    +
    +        if self.stream_request_body:
    +            self.handler._prepared_future = Future()
    +        # Note that if an exception escapes handler._execute it will be
    +        # trapped in the Future it returns (which we are ignoring here,
    +        # leaving it to be logged when the Future is GC'd).
    +        # However, that shouldn't happen because _execute has a blanket
    +        # except handler, and we cannot easily access the IOLoop here to
    +        # call add_future (because of the requirement to remain compatible
    +        # with WSGI)
    +        fut = gen.convert_yielded(
    +            self.handler._execute(transforms, *self.path_args, **self.path_kwargs)
    +        )
    +        fut.add_done_callback(lambda f: f.result())
    +        # If we are streaming the request body, then execute() is finished
    +        # when the handler has prepared to receive the body.  If not,
    +        # it doesn't matter when execute() finishes (so we return None)
    +        return self.handler._prepared_future
    +
    +
    +class HTTPError(Exception):
    +    """An exception that will turn into an HTTP error response.
    +
    +    Raising an `HTTPError` is a convenient alternative to calling
    +    `RequestHandler.send_error` since it automatically ends the
    +    current function.
    +
    +    To customize the response sent with an `HTTPError`, override
    +    `RequestHandler.write_error`.
    +
    +    :arg int status_code: HTTP status code.  Must be listed in
    +        `httplib.responses ` unless the ``reason``
    +        keyword argument is given.
    +    :arg str log_message: Message to be written to the log for this error
    +        (will not be shown to the user unless the `Application` is in debug
    +        mode).  May contain ``%s``-style placeholders, which will be filled
    +        in with remaining positional parameters.
    +    :arg str reason: Keyword-only argument.  The HTTP "reason" phrase
    +        to pass in the status line along with ``status_code``.  Normally
    +        determined automatically from ``status_code``, but can be used
    +        to use a non-standard numeric code.
    +    """
    +
    +    def __init__(
    +        self, status_code: int = 500, log_message: str = None, *args: Any, **kwargs: Any
    +    ) -> None:
    +        self.status_code = status_code
    +        self.log_message = log_message
    +        self.args = args
    +        self.reason = kwargs.get("reason", None)
    +        if log_message and not args:
    +            self.log_message = log_message.replace("%", "%%")
    +
    +    def __str__(self) -> str:
    +        message = "HTTP %d: %s" % (
    +            self.status_code,
    +            self.reason or httputil.responses.get(self.status_code, "Unknown"),
    +        )
    +        if self.log_message:
    +            return message + " (" + (self.log_message % self.args) + ")"
    +        else:
    +            return message
    +
    +
    +class Finish(Exception):
    +    """An exception that ends the request without producing an error response.
    +
    +    When `Finish` is raised in a `RequestHandler`, the request will
    +    end (calling `RequestHandler.finish` if it hasn't already been
    +    called), but the error-handling methods (including
    +    `RequestHandler.write_error`) will not be called.
    +
    +    If `Finish()` was created with no arguments, the pending response
    +    will be sent as-is. If `Finish()` was given an argument, that
    +    argument will be passed to `RequestHandler.finish()`.
    +
    +    This can be a more convenient way to implement custom error pages
    +    than overriding ``write_error`` (especially in library code)::
    +
    +        if self.current_user is None:
    +            self.set_status(401)
    +            self.set_header('WWW-Authenticate', 'Basic realm="something"')
    +            raise Finish()
    +
    +    .. versionchanged:: 4.3
    +       Arguments passed to ``Finish()`` will be passed on to
    +       `RequestHandler.finish`.
    +    """
    +
    +    pass
    +
    +
    +class MissingArgumentError(HTTPError):
    +    """Exception raised by `RequestHandler.get_argument`.
    +
    +    This is a subclass of `HTTPError`, so if it is uncaught a 400 response
    +    code will be used instead of 500 (and a stack trace will not be logged).
    +
    +    .. versionadded:: 3.1
    +    """
    +
    +    def __init__(self, arg_name: str) -> None:
    +        super(MissingArgumentError, self).__init__(
    +            400, "Missing argument %s" % arg_name
    +        )
    +        self.arg_name = arg_name
    +
    +
    +class ErrorHandler(RequestHandler):
    +    """Generates an error response with ``status_code`` for all requests."""
    +
    +    def initialize(self, status_code: int) -> None:
    +        self.set_status(status_code)
    +
    +    def prepare(self) -> None:
    +        raise HTTPError(self._status_code)
    +
    +    def check_xsrf_cookie(self) -> None:
    +        # POSTs to an ErrorHandler don't actually have side effects,
    +        # so we don't need to check the xsrf token.  This allows POSTs
    +        # to the wrong url to return a 404 instead of 403.
    +        pass
    +
    +
    +class RedirectHandler(RequestHandler):
    +    """Redirects the client to the given URL for all GET requests.
    +
    +    You should provide the keyword argument ``url`` to the handler, e.g.::
    +
    +        application = web.Application([
    +            (r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
    +        ])
    +
    +    `RedirectHandler` supports regular expression substitutions. E.g., to
    +    swap the first and second parts of a path while preserving the remainder::
    +
    +        application = web.Application([
    +            (r"/(.*?)/(.*?)/(.*)", web.RedirectHandler, {"url": "/{1}/{0}/{2}"}),
    +        ])
    +
    +    The final URL is formatted with `str.format` and the substrings that match
    +    the capturing groups. In the above example, a request to "/a/b/c" would be
    +    formatted like::
    +
    +        str.format("/{1}/{0}/{2}", "a", "b", "c")  # -> "/b/a/c"
    +
    +    Use Python's :ref:`format string syntax ` to customize how
    +    values are substituted.
    +
    +    .. versionchanged:: 4.5
    +       Added support for substitutions into the destination URL.
    +
    +    .. versionchanged:: 5.0
    +       If any query arguments are present, they will be copied to the
    +       destination URL.
    +    """
    +
    +    def initialize(self, url: str, permanent: bool = True) -> None:
    +        self._url = url
    +        self._permanent = permanent
    +
    +    def get(self, *args: Any) -> None:
    +        to_url = self._url.format(*args)
    +        if self.request.query_arguments:
    +            # TODO: figure out typing for the next line.
    +            to_url = httputil.url_concat(
    +                to_url,
    +                list(httputil.qs_to_qsl(self.request.query_arguments)),  # type: ignore
    +            )
    +        self.redirect(to_url, permanent=self._permanent)
    +
    +
    +class StaticFileHandler(RequestHandler):
    +    """A simple handler that can serve static content from a directory.
    +
    +    A `StaticFileHandler` is configured automatically if you pass the
    +    ``static_path`` keyword argument to `Application`.  This handler
    +    can be customized with the ``static_url_prefix``, ``static_handler_class``,
    +    and ``static_handler_args`` settings.
    +
    +    To map an additional path to this handler for a static data directory
    +    you would add a line to your application like::
    +
    +        application = web.Application([
    +            (r"/content/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
    +        ])
    +
    +    The handler constructor requires a ``path`` argument, which specifies the
    +    local root directory of the content to be served.
    +
    +    Note that a capture group in the regex is required to parse the value for
    +    the ``path`` argument to the get() method (different than the constructor
    +    argument above); see `URLSpec` for details.
    +
    +    To serve a file like ``index.html`` automatically when a directory is
    +    requested, set ``static_handler_args=dict(default_filename="index.html")``
    +    in your application settings, or add ``default_filename`` as an initializer
    +    argument for your ``StaticFileHandler``.
    +
    +    To maximize the effectiveness of browser caching, this class supports
    +    versioned urls (by default using the argument ``?v=``).  If a version
    +    is given, we instruct the browser to cache this file indefinitely.
    +    `make_static_url` (also available as `RequestHandler.static_url`) can
    +    be used to construct a versioned url.
    +
    +    This handler is intended primarily for use in development and light-duty
    +    file serving; for heavy traffic it will be more efficient to use
    +    a dedicated static file server (such as nginx or Apache).  We support
    +    the HTTP ``Accept-Ranges`` mechanism to return partial content (because
    +    some browsers require this functionality to be present to seek in
    +    HTML5 audio or video).
    +
    +    **Subclassing notes**
    +
    +    This class is designed to be extensible by subclassing, but because
    +    of the way static urls are generated with class methods rather than
    +    instance methods, the inheritance patterns are somewhat unusual.
    +    Be sure to use the ``@classmethod`` decorator when overriding a
    +    class method.  Instance methods may use the attributes ``self.path``
    +    ``self.absolute_path``, and ``self.modified``.
    +
    +    Subclasses should only override methods discussed in this section;
    +    overriding other methods is error-prone.  Overriding
    +    ``StaticFileHandler.get`` is particularly problematic due to the
    +    tight coupling with ``compute_etag`` and other methods.
    +
    +    To change the way static urls are generated (e.g. to match the behavior
    +    of another server or CDN), override `make_static_url`, `parse_url_path`,
    +    `get_cache_time`, and/or `get_version`.
    +
    +    To replace all interaction with the filesystem (e.g. to serve
    +    static content from a database), override `get_content`,
    +    `get_content_size`, `get_modified_time`, `get_absolute_path`, and
    +    `validate_absolute_path`.
    +
    +    .. versionchanged:: 3.1
    +       Many of the methods for subclasses were added in Tornado 3.1.
    +    """
    +
    +    CACHE_MAX_AGE = 86400 * 365 * 10  # 10 years
    +
    +    _static_hashes = {}  # type: Dict[str, Optional[str]]
    +    _lock = threading.Lock()  # protects _static_hashes
    +
    +    def initialize(self, path: str, default_filename: str = None) -> None:
    +        self.root = path
    +        self.default_filename = default_filename
    +
    +    @classmethod
    +    def reset(cls) -> None:
    +        with cls._lock:
    +            cls._static_hashes = {}
    +
    +    def head(self, path: str) -> Awaitable[None]:
    +        return self.get(path, include_body=False)
    +
    +    async def get(self, path: str, include_body: bool = True) -> None:
    +        # Set up our path instance variables.
    +        self.path = self.parse_url_path(path)
    +        del path  # make sure we don't refer to path instead of self.path again
    +        absolute_path = self.get_absolute_path(self.root, self.path)
    +        self.absolute_path = self.validate_absolute_path(self.root, absolute_path)
    +        if self.absolute_path is None:
    +            return
    +
    +        self.modified = self.get_modified_time()
    +        self.set_headers()
    +
    +        if self.should_return_304():
    +            self.set_status(304)
    +            return
    +
    +        request_range = None
    +        range_header = self.request.headers.get("Range")
    +        if range_header:
    +            # As per RFC 2616 14.16, if an invalid Range header is specified,
    +            # the request will be treated as if the header didn't exist.
    +            request_range = httputil._parse_request_range(range_header)
    +
    +        size = self.get_content_size()
    +        if request_range:
    +            start, end = request_range
    +            if start is not None and start < 0:
    +                start += size
    +                if start < 0:
    +                    start = 0
    +            if (
    +                start is not None
    +                and (start >= size or (end is not None and start >= end))
    +            ) or end == 0:
    +                # As per RFC 2616 14.35.1, a range is not satisfiable only: if
    +                # the first requested byte is equal to or greater than the
    +                # content, or when a suffix with length 0 is specified.
    +                # https://tools.ietf.org/html/rfc7233#section-2.1
    +                # A byte-range-spec is invalid if the last-byte-pos value is present
    +                # and less than the first-byte-pos.
    +                self.set_status(416)  # Range Not Satisfiable
    +                self.set_header("Content-Type", "text/plain")
    +                self.set_header("Content-Range", "bytes */%s" % (size,))
    +                return
    +            if end is not None and end > size:
    +                # Clients sometimes blindly use a large range to limit their
    +                # download size; cap the endpoint at the actual file size.
    +                end = size
    +            # Note: only return HTTP 206 if less than the entire range has been
    +            # requested. Not only is this semantically correct, but Chrome
    +            # refuses to play audio if it gets an HTTP 206 in response to
    +            # ``Range: bytes=0-``.
    +            if size != (end or size) - (start or 0):
    +                self.set_status(206)  # Partial Content
    +                self.set_header(
    +                    "Content-Range", httputil._get_content_range(start, end, size)
    +                )
    +        else:
    +            start = end = None
    +
    +        if start is not None and end is not None:
    +            content_length = end - start
    +        elif end is not None:
    +            content_length = end
    +        elif start is not None:
    +            content_length = size - start
    +        else:
    +            content_length = size
    +        self.set_header("Content-Length", content_length)
    +
    +        if include_body:
    +            content = self.get_content(self.absolute_path, start, end)
    +            if isinstance(content, bytes):
    +                content = [content]
    +            for chunk in content:
    +                try:
    +                    self.write(chunk)
    +                    await self.flush()
    +                except iostream.StreamClosedError:
    +                    return
    +        else:
    +            assert self.request.method == "HEAD"
    +
    +    def compute_etag(self) -> Optional[str]:
    +        """Sets the ``Etag`` header based on static url version.
    +
    +        This allows efficient ``If-None-Match`` checks against cached
    +        versions, and sends the correct ``Etag`` for a partial response
    +        (i.e. the same ``Etag`` as the full file).
    +
    +        .. versionadded:: 3.1
    +        """
    +        assert self.absolute_path is not None
    +        version_hash = self._get_cached_version(self.absolute_path)
    +        if not version_hash:
    +            return None
    +        return '"%s"' % (version_hash,)
    +
    +    def set_headers(self) -> None:
    +        """Sets the content and caching headers on the response.
    +
    +        .. versionadded:: 3.1
    +        """
    +        self.set_header("Accept-Ranges", "bytes")
    +        self.set_etag_header()
    +
    +        if self.modified is not None:
    +            self.set_header("Last-Modified", self.modified)
    +
    +        content_type = self.get_content_type()
    +        if content_type:
    +            self.set_header("Content-Type", content_type)
    +
    +        cache_time = self.get_cache_time(self.path, self.modified, content_type)
    +        if cache_time > 0:
    +            self.set_header(
    +                "Expires",
    +                datetime.datetime.utcnow() + datetime.timedelta(seconds=cache_time),
    +            )
    +            self.set_header("Cache-Control", "max-age=" + str(cache_time))
    +
    +        self.set_extra_headers(self.path)
    +
    +    def should_return_304(self) -> bool:
    +        """Returns True if the headers indicate that we should return 304.
    +
    +        .. versionadded:: 3.1
    +        """
    +        # If client sent If-None-Match, use it, ignore If-Modified-Since
    +        if self.request.headers.get("If-None-Match"):
    +            return self.check_etag_header()
    +
    +        # Check the If-Modified-Since, and don't send the result if the
    +        # content has not been modified
    +        ims_value = self.request.headers.get("If-Modified-Since")
    +        if ims_value is not None:
    +            date_tuple = email.utils.parsedate(ims_value)
    +            if date_tuple is not None:
    +                if_since = datetime.datetime(*date_tuple[:6])
    +                assert self.modified is not None
    +                if if_since >= self.modified:
    +                    return True
    +
    +        return False
    +
    +    @classmethod
    +    def get_absolute_path(cls, root: str, path: str) -> str:
    +        """Returns the absolute location of ``path`` relative to ``root``.
    +
    +        ``root`` is the path configured for this `StaticFileHandler`
    +        (in most cases the ``static_path`` `Application` setting).
    +
    +        This class method may be overridden in subclasses.  By default
    +        it returns a filesystem path, but other strings may be used
    +        as long as they are unique and understood by the subclass's
    +        overridden `get_content`.
    +
    +        .. versionadded:: 3.1
    +        """
    +        abspath = os.path.abspath(os.path.join(root, path))
    +        return abspath
    +
    +    def validate_absolute_path(self, root: str, absolute_path: str) -> Optional[str]:
    +        """Validate and return the absolute path.
    +
    +        ``root`` is the configured path for the `StaticFileHandler`,
    +        and ``path`` is the result of `get_absolute_path`
    +
    +        This is an instance method called during request processing,
    +        so it may raise `HTTPError` or use methods like
    +        `RequestHandler.redirect` (return None after redirecting to
    +        halt further processing).  This is where 404 errors for missing files
    +        are generated.
    +
    +        This method may modify the path before returning it, but note that
    +        any such modifications will not be understood by `make_static_url`.
    +
    +        In instance methods, this method's result is available as
    +        ``self.absolute_path``.
    +
    +        .. versionadded:: 3.1
    +        """
    +        # os.path.abspath strips a trailing /.
    +        # We must add it back to `root` so that we only match files
    +        # in a directory named `root` instead of files starting with
    +        # that prefix.
    +        root = os.path.abspath(root)
    +        if not root.endswith(os.path.sep):
    +            # abspath always removes a trailing slash, except when
    +            # root is '/'. This is an unusual case, but several projects
    +            # have independently discovered this technique to disable
    +            # Tornado's path validation and (hopefully) do their own,
    +            # so we need to support it.
    +            root += os.path.sep
    +        # The trailing slash also needs to be temporarily added back
    +        # the requested path so a request to root/ will match.
    +        if not (absolute_path + os.path.sep).startswith(root):
    +            raise HTTPError(403, "%s is not in root static directory", self.path)
    +        if os.path.isdir(absolute_path) and self.default_filename is not None:
    +            # need to look at the request.path here for when path is empty
    +            # but there is some prefix to the path that was already
    +            # trimmed by the routing
    +            if not self.request.path.endswith("/"):
    +                self.redirect(self.request.path + "/", permanent=True)
    +                return None
    +            absolute_path = os.path.join(absolute_path, self.default_filename)
    +        if not os.path.exists(absolute_path):
    +            raise HTTPError(404)
    +        if not os.path.isfile(absolute_path):
    +            raise HTTPError(403, "%s is not a file", self.path)
    +        return absolute_path
    +
    +    @classmethod
    +    def get_content(
    +        cls, abspath: str, start: int = None, end: int = None
    +    ) -> Generator[bytes, None, None]:
    +        """Retrieve the content of the requested resource which is located
    +        at the given absolute path.
    +
    +        This class method may be overridden by subclasses.  Note that its
    +        signature is different from other overridable class methods
    +        (no ``settings`` argument); this is deliberate to ensure that
    +        ``abspath`` is able to stand on its own as a cache key.
    +
    +        This method should either return a byte string or an iterator
    +        of byte strings.  The latter is preferred for large files
    +        as it helps reduce memory fragmentation.
    +
    +        .. versionadded:: 3.1
    +        """
    +        with open(abspath, "rb") as file:
    +            if start is not None:
    +                file.seek(start)
    +            if end is not None:
    +                remaining = end - (start or 0)  # type: Optional[int]
    +            else:
    +                remaining = None
    +            while True:
    +                chunk_size = 64 * 1024
    +                if remaining is not None and remaining < chunk_size:
    +                    chunk_size = remaining
    +                chunk = file.read(chunk_size)
    +                if chunk:
    +                    if remaining is not None:
    +                        remaining -= len(chunk)
    +                    yield chunk
    +                else:
    +                    if remaining is not None:
    +                        assert remaining == 0
    +                    return
    +
    +    @classmethod
    +    def get_content_version(cls, abspath: str) -> str:
    +        """Returns a version string for the resource at the given path.
    +
    +        This class method may be overridden by subclasses.  The
    +        default implementation is a hash of the file's contents.
    +
    +        .. versionadded:: 3.1
    +        """
    +        data = cls.get_content(abspath)
    +        hasher = hashlib.md5()
    +        if isinstance(data, bytes):
    +            hasher.update(data)
    +        else:
    +            for chunk in data:
    +                hasher.update(chunk)
    +        return hasher.hexdigest()
    +
    +    def _stat(self) -> os.stat_result:
    +        assert self.absolute_path is not None
    +        if not hasattr(self, "_stat_result"):
    +            self._stat_result = os.stat(self.absolute_path)
    +        return self._stat_result
    +
    +    def get_content_size(self) -> int:
    +        """Retrieve the total size of the resource at the given path.
    +
    +        This method may be overridden by subclasses.
    +
    +        .. versionadded:: 3.1
    +
    +        .. versionchanged:: 4.0
    +           This method is now always called, instead of only when
    +           partial results are requested.
    +        """
    +        stat_result = self._stat()
    +        return stat_result.st_size
    +
    +    def get_modified_time(self) -> Optional[datetime.datetime]:
    +        """Returns the time that ``self.absolute_path`` was last modified.
    +
    +        May be overridden in subclasses.  Should return a `~datetime.datetime`
    +        object or None.
    +
    +        .. versionadded:: 3.1
    +        """
    +        stat_result = self._stat()
    +        # NOTE: Historically, this used stat_result[stat.ST_MTIME],
    +        # which truncates the fractional portion of the timestamp. It
    +        # was changed from that form to stat_result.st_mtime to
    +        # satisfy mypy (which disallows the bracket operator), but the
    +        # latter form returns a float instead of an int. For
    +        # consistency with the past (and because we have a unit test
    +        # that relies on this), we truncate the float here, although
    +        # I'm not sure that's the right thing to do.
    +        modified = datetime.datetime.utcfromtimestamp(int(stat_result.st_mtime))
    +        return modified
    +
    +    def get_content_type(self) -> str:
    +        """Returns the ``Content-Type`` header to be used for this request.
    +
    +        .. versionadded:: 3.1
    +        """
    +        assert self.absolute_path is not None
    +        mime_type, encoding = mimetypes.guess_type(self.absolute_path)
    +        # per RFC 6713, use the appropriate type for a gzip compressed file
    +        if encoding == "gzip":
    +            return "application/gzip"
    +        # As of 2015-07-21 there is no bzip2 encoding defined at
    +        # http://www.iana.org/assignments/media-types/media-types.xhtml
    +        # So for that (and any other encoding), use octet-stream.
    +        elif encoding is not None:
    +            return "application/octet-stream"
    +        elif mime_type is not None:
    +            return mime_type
    +        # if mime_type not detected, use application/octet-stream
    +        else:
    +            return "application/octet-stream"
    +
    +    def set_extra_headers(self, path: str) -> None:
    +        """For subclass to add extra headers to the response"""
    +        pass
    +
    +    def get_cache_time(
    +        self, path: str, modified: Optional[datetime.datetime], mime_type: str
    +    ) -> int:
    +        """Override to customize cache control behavior.
    +
    +        Return a positive number of seconds to make the result
    +        cacheable for that amount of time or 0 to mark resource as
    +        cacheable for an unspecified amount of time (subject to
    +        browser heuristics).
    +
    +        By default returns cache expiry of 10 years for resources requested
    +        with ``v`` argument.
    +        """
    +        return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0
    +
    +    @classmethod
    +    def make_static_url(
    +        cls, settings: Dict[str, Any], path: str, include_version: bool = True
    +    ) -> str:
    +        """Constructs a versioned url for the given path.
    +
    +        This method may be overridden in subclasses (but note that it
    +        is a class method rather than an instance method).  Subclasses
    +        are only required to implement the signature
    +        ``make_static_url(cls, settings, path)``; other keyword
    +        arguments may be passed through `~RequestHandler.static_url`
    +        but are not standard.
    +
    +        ``settings`` is the `Application.settings` dictionary.  ``path``
    +        is the static path being requested.  The url returned should be
    +        relative to the current host.
    +
    +        ``include_version`` determines whether the generated URL should
    +        include the query string containing the version hash of the
    +        file corresponding to the given ``path``.
    +
    +        """
    +        url = settings.get("static_url_prefix", "/static/") + path
    +        if not include_version:
    +            return url
    +
    +        version_hash = cls.get_version(settings, path)
    +        if not version_hash:
    +            return url
    +
    +        return "%s?v=%s" % (url, version_hash)
    +
    +    def parse_url_path(self, url_path: str) -> str:
    +        """Converts a static URL path into a filesystem path.
    +
    +        ``url_path`` is the path component of the URL with
    +        ``static_url_prefix`` removed.  The return value should be
    +        filesystem path relative to ``static_path``.
    +
    +        This is the inverse of `make_static_url`.
    +        """
    +        if os.path.sep != "/":
    +            url_path = url_path.replace("/", os.path.sep)
    +        return url_path
    +
    +    @classmethod
    +    def get_version(cls, settings: Dict[str, Any], path: str) -> Optional[str]:
    +        """Generate the version string to be used in static URLs.
    +
    +        ``settings`` is the `Application.settings` dictionary and ``path``
    +        is the relative location of the requested asset on the filesystem.
    +        The returned value should be a string, or ``None`` if no version
    +        could be determined.
    +
    +        .. versionchanged:: 3.1
    +           This method was previously recommended for subclasses to override;
    +           `get_content_version` is now preferred as it allows the base
    +           class to handle caching of the result.
    +        """
    +        abs_path = cls.get_absolute_path(settings["static_path"], path)
    +        return cls._get_cached_version(abs_path)
    +
    +    @classmethod
    +    def _get_cached_version(cls, abs_path: str) -> Optional[str]:
    +        with cls._lock:
    +            hashes = cls._static_hashes
    +            if abs_path not in hashes:
    +                try:
    +                    hashes[abs_path] = cls.get_content_version(abs_path)
    +                except Exception:
    +                    gen_log.error("Could not open static file %r", abs_path)
    +                    hashes[abs_path] = None
    +            hsh = hashes.get(abs_path)
    +            if hsh:
    +                return hsh
    +        return None
    +
    +
    +class FallbackHandler(RequestHandler):
    +    """A `RequestHandler` that wraps another HTTP server callback.
    +
    +    The fallback is a callable object that accepts an
    +    `~.httputil.HTTPServerRequest`, such as an `Application` or
    +    `tornado.wsgi.WSGIContainer`.  This is most useful to use both
    +    Tornado ``RequestHandlers`` and WSGI in the same server.  Typical
    +    usage::
    +
    +        wsgi_app = tornado.wsgi.WSGIContainer(
    +            django.core.handlers.wsgi.WSGIHandler())
    +        application = tornado.web.Application([
    +            (r"/foo", FooHandler),
    +            (r".*", FallbackHandler, dict(fallback=wsgi_app),
    +        ])
    +    """
    +
    +    def initialize(
    +        self, fallback: Callable[[httputil.HTTPServerRequest], None]
    +    ) -> None:
    +        self.fallback = fallback
    +
    +    def prepare(self) -> None:
    +        self.fallback(self.request)
    +        self._finished = True
    +        self.on_finish()
    +
    +
    +class OutputTransform(object):
    +    """A transform modifies the result of an HTTP request (e.g., GZip encoding)
    +
    +    Applications are not expected to create their own OutputTransforms
    +    or interact with them directly; the framework chooses which transforms
    +    (if any) to apply.
    +    """
    +
    +    def __init__(self, request: httputil.HTTPServerRequest) -> None:
    +        pass
    +
    +    def transform_first_chunk(
    +        self,
    +        status_code: int,
    +        headers: httputil.HTTPHeaders,
    +        chunk: bytes,
    +        finishing: bool,
    +    ) -> Tuple[int, httputil.HTTPHeaders, bytes]:
    +        return status_code, headers, chunk
    +
    +    def transform_chunk(self, chunk: bytes, finishing: bool) -> bytes:
    +        return chunk
    +
    +
    +class GZipContentEncoding(OutputTransform):
    +    """Applies the gzip content encoding to the response.
    +
    +    See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
    +
    +    .. versionchanged:: 4.0
    +        Now compresses all mime types beginning with ``text/``, instead
    +        of just a whitelist. (the whitelist is still used for certain
    +        non-text mime types).
    +    """
    +
    +    # Whitelist of compressible mime types (in addition to any types
    +    # beginning with "text/").
    +    CONTENT_TYPES = set(
    +        [
    +            "application/javascript",
    +            "application/x-javascript",
    +            "application/xml",
    +            "application/atom+xml",
    +            "application/json",
    +            "application/xhtml+xml",
    +            "image/svg+xml",
    +        ]
    +    )
    +    # Python's GzipFile defaults to level 9, while most other gzip
    +    # tools (including gzip itself) default to 6, which is probably a
    +    # better CPU/size tradeoff.
    +    GZIP_LEVEL = 6
    +    # Responses that are too short are unlikely to benefit from gzipping
    +    # after considering the "Content-Encoding: gzip" header and the header
    +    # inside the gzip encoding.
    +    # Note that responses written in multiple chunks will be compressed
    +    # regardless of size.
    +    MIN_LENGTH = 1024
    +
    +    def __init__(self, request: httputil.HTTPServerRequest) -> None:
    +        self._gzipping = "gzip" in request.headers.get("Accept-Encoding", "")
    +
    +    def _compressible_type(self, ctype: str) -> bool:
    +        return ctype.startswith("text/") or ctype in self.CONTENT_TYPES
    +
    +    def transform_first_chunk(
    +        self,
    +        status_code: int,
    +        headers: httputil.HTTPHeaders,
    +        chunk: bytes,
    +        finishing: bool,
    +    ) -> Tuple[int, httputil.HTTPHeaders, bytes]:
    +        # TODO: can/should this type be inherited from the superclass?
    +        if "Vary" in headers:
    +            headers["Vary"] += ", Accept-Encoding"
    +        else:
    +            headers["Vary"] = "Accept-Encoding"
    +        if self._gzipping:
    +            ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
    +            self._gzipping = (
    +                self._compressible_type(ctype)
    +                and (not finishing or len(chunk) >= self.MIN_LENGTH)
    +                and ("Content-Encoding" not in headers)
    +            )
    +        if self._gzipping:
    +            headers["Content-Encoding"] = "gzip"
    +            self._gzip_value = BytesIO()
    +            self._gzip_file = gzip.GzipFile(
    +                mode="w", fileobj=self._gzip_value, compresslevel=self.GZIP_LEVEL
    +            )
    +            chunk = self.transform_chunk(chunk, finishing)
    +            if "Content-Length" in headers:
    +                # The original content length is no longer correct.
    +                # If this is the last (and only) chunk, we can set the new
    +                # content-length; otherwise we remove it and fall back to
    +                # chunked encoding.
    +                if finishing:
    +                    headers["Content-Length"] = str(len(chunk))
    +                else:
    +                    del headers["Content-Length"]
    +        return status_code, headers, chunk
    +
    +    def transform_chunk(self, chunk: bytes, finishing: bool) -> bytes:
    +        if self._gzipping:
    +            self._gzip_file.write(chunk)
    +            if finishing:
    +                self._gzip_file.close()
    +            else:
    +                self._gzip_file.flush()
    +            chunk = self._gzip_value.getvalue()
    +            self._gzip_value.truncate(0)
    +            self._gzip_value.seek(0)
    +        return chunk
    +
    +
    +def authenticated(
    +    method: Callable[..., Optional[Awaitable[None]]]
    +) -> Callable[..., Optional[Awaitable[None]]]:
    +    """Decorate methods with this to require that the user be logged in.
    +
    +    If the user is not logged in, they will be redirected to the configured
    +    `login url `.
    +
    +    If you configure a login url with a query parameter, Tornado will
    +    assume you know what you're doing and use it as-is.  If not, it
    +    will add a `next` parameter so the login page knows where to send
    +    you once you're logged in.
    +    """
    +
    +    @functools.wraps(method)
    +    def wrapper(  # type: ignore
    +        self: RequestHandler, *args, **kwargs
    +    ) -> Optional[Awaitable[None]]:
    +        if not self.current_user:
    +            if self.request.method in ("GET", "HEAD"):
    +                url = self.get_login_url()
    +                if "?" not in url:
    +                    if urllib.parse.urlsplit(url).scheme:
    +                        # if login url is absolute, make next absolute too
    +                        next_url = self.request.full_url()
    +                    else:
    +                        assert self.request.uri is not None
    +                        next_url = self.request.uri
    +                    url += "?" + urlencode(dict(next=next_url))
    +                self.redirect(url)
    +                return None
    +            raise HTTPError(403)
    +        return method(self, *args, **kwargs)
    +
    +    return wrapper
    +
    +
    +class UIModule(object):
    +    """A re-usable, modular UI unit on a page.
    +
    +    UI modules often execute additional queries, and they can include
    +    additional CSS and JavaScript that will be included in the output
    +    page, which is automatically inserted on page render.
    +
    +    Subclasses of UIModule must override the `render` method.
    +    """
    +
    +    def __init__(self, handler: RequestHandler) -> None:
    +        self.handler = handler
    +        self.request = handler.request
    +        self.ui = handler.ui
    +        self.locale = handler.locale
    +
    +    @property
    +    def current_user(self) -> Any:
    +        return self.handler.current_user
    +
    +    def render(self, *args: Any, **kwargs: Any) -> str:
    +        """Override in subclasses to return this module's output."""
    +        raise NotImplementedError()
    +
    +    def embedded_javascript(self) -> Optional[str]:
    +        """Override to return a JavaScript string
    +        to be embedded in the page."""
    +        return None
    +
    +    def javascript_files(self) -> Optional[Iterable[str]]:
    +        """Override to return a list of JavaScript files needed by this module.
    +
    +        If the return values are relative paths, they will be passed to
    +        `RequestHandler.static_url`; otherwise they will be used as-is.
    +        """
    +        return None
    +
    +    def embedded_css(self) -> Optional[str]:
    +        """Override to return a CSS string
    +        that will be embedded in the page."""
    +        return None
    +
    +    def css_files(self) -> Optional[Iterable[str]]:
    +        """Override to returns a list of CSS files required by this module.
    +
    +        If the return values are relative paths, they will be passed to
    +        `RequestHandler.static_url`; otherwise they will be used as-is.
    +        """
    +        return None
    +
    +    def html_head(self) -> Optional[str]:
    +        """Override to return an HTML string that will be put in the 
    +        element.
    +        """
    +        return None
    +
    +    def html_body(self) -> Optional[str]:
    +        """Override to return an HTML string that will be put at the end of
    +        the  element.
    +        """
    +        return None
    +
    +    def render_string(self, path: str, **kwargs: Any) -> bytes:
    +        """Renders a template and returns it as a string."""
    +        return self.handler.render_string(path, **kwargs)
    +
    +
    +class _linkify(UIModule):
    +    def render(self, text: str, **kwargs: Any) -> str:  # type: ignore
    +        return escape.linkify(text, **kwargs)
    +
    +
    +class _xsrf_form_html(UIModule):
    +    def render(self) -> str:  # type: ignore
    +        return self.handler.xsrf_form_html()
    +
    +
    +class TemplateModule(UIModule):
    +    """UIModule that simply renders the given template.
    +
    +    {% module Template("foo.html") %} is similar to {% include "foo.html" %},
    +    but the module version gets its own namespace (with kwargs passed to
    +    Template()) instead of inheriting the outer template's namespace.
    +
    +    Templates rendered through this module also get access to UIModule's
    +    automatic javascript/css features.  Simply call set_resources
    +    inside the template and give it keyword arguments corresponding to
    +    the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }}
    +    Note that these resources are output once per template file, not once
    +    per instantiation of the template, so they must not depend on
    +    any arguments to the template.
    +    """
    +
    +    def __init__(self, handler: RequestHandler) -> None:
    +        super(TemplateModule, self).__init__(handler)
    +        # keep resources in both a list and a dict to preserve order
    +        self._resource_list = []  # type: List[Dict[str, Any]]
    +        self._resource_dict = {}  # type: Dict[str, Dict[str, Any]]
    +
    +    def render(self, path: str, **kwargs: Any) -> bytes:  # type: ignore
    +        def set_resources(**kwargs) -> str:  # type: ignore
    +            if path not in self._resource_dict:
    +                self._resource_list.append(kwargs)
    +                self._resource_dict[path] = kwargs
    +            else:
    +                if self._resource_dict[path] != kwargs:
    +                    raise ValueError(
    +                        "set_resources called with different "
    +                        "resources for the same template"
    +                    )
    +            return ""
    +
    +        return self.render_string(path, set_resources=set_resources, **kwargs)
    +
    +    def _get_resources(self, key: str) -> Iterable[str]:
    +        return (r[key] for r in self._resource_list if key in r)
    +
    +    def embedded_javascript(self) -> str:
    +        return "\n".join(self._get_resources("embedded_javascript"))
    +
    +    def javascript_files(self) -> Iterable[str]:
    +        result = []
    +        for f in self._get_resources("javascript_files"):
    +            if isinstance(f, (unicode_type, bytes)):
    +                result.append(f)
    +            else:
    +                result.extend(f)
    +        return result
    +
    +    def embedded_css(self) -> str:
    +        return "\n".join(self._get_resources("embedded_css"))
    +
    +    def css_files(self) -> Iterable[str]:
    +        result = []
    +        for f in self._get_resources("css_files"):
    +            if isinstance(f, (unicode_type, bytes)):
    +                result.append(f)
    +            else:
    +                result.extend(f)
    +        return result
    +
    +    def html_head(self) -> str:
    +        return "".join(self._get_resources("html_head"))
    +
    +    def html_body(self) -> str:
    +        return "".join(self._get_resources("html_body"))
    +
    +
    +class _UIModuleNamespace(object):
    +    """Lazy namespace which creates UIModule proxies bound to a handler."""
    +
    +    def __init__(
    +        self, handler: RequestHandler, ui_modules: Dict[str, Type[UIModule]]
    +    ) -> None:
    +        self.handler = handler
    +        self.ui_modules = ui_modules
    +
    +    def __getitem__(self, key: str) -> Callable[..., str]:
    +        return self.handler._ui_module(key, self.ui_modules[key])
    +
    +    def __getattr__(self, key: str) -> Callable[..., str]:
    +        try:
    +            return self[key]
    +        except KeyError as e:
    +            raise AttributeError(str(e))
    +
    +
    +def create_signed_value(
    +    secret: _CookieSecretTypes,
    +    name: str,
    +    value: Union[str, bytes],
    +    version: int = None,
    +    clock: Callable[[], float] = None,
    +    key_version: int = None,
    +) -> bytes:
    +    if version is None:
    +        version = DEFAULT_SIGNED_VALUE_VERSION
    +    if clock is None:
    +        clock = time.time
    +
    +    timestamp = utf8(str(int(clock())))
    +    value = base64.b64encode(utf8(value))
    +    if version == 1:
    +        assert not isinstance(secret, dict)
    +        signature = _create_signature_v1(secret, name, value, timestamp)
    +        value = b"|".join([value, timestamp, signature])
    +        return value
    +    elif version == 2:
    +        # The v2 format consists of a version number and a series of
    +        # length-prefixed fields "%d:%s", the last of which is a
    +        # signature, all separated by pipes.  All numbers are in
    +        # decimal format with no leading zeros.  The signature is an
    +        # HMAC-SHA256 of the whole string up to that point, including
    +        # the final pipe.
    +        #
    +        # The fields are:
    +        # - format version (i.e. 2; no length prefix)
    +        # - key version (integer, default is 0)
    +        # - timestamp (integer seconds since epoch)
    +        # - name (not encoded; assumed to be ~alphanumeric)
    +        # - value (base64-encoded)
    +        # - signature (hex-encoded; no length prefix)
    +        def format_field(s: Union[str, bytes]) -> bytes:
    +            return utf8("%d:" % len(s)) + utf8(s)
    +
    +        to_sign = b"|".join(
    +            [
    +                b"2",
    +                format_field(str(key_version or 0)),
    +                format_field(timestamp),
    +                format_field(name),
    +                format_field(value),
    +                b"",
    +            ]
    +        )
    +
    +        if isinstance(secret, dict):
    +            assert (
    +                key_version is not None
    +            ), "Key version must be set when sign key dict is used"
    +            assert version >= 2, "Version must be at least 2 for key version support"
    +            secret = secret[key_version]
    +
    +        signature = _create_signature_v2(secret, to_sign)
    +        return to_sign + signature
    +    else:
    +        raise ValueError("Unsupported version %d" % version)
    +
    +
    +# A leading version number in decimal
    +# with no leading zeros, followed by a pipe.
    +_signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$")
    +
    +
    +def _get_version(value: bytes) -> int:
    +    # Figures out what version value is.  Version 1 did not include an
    +    # explicit version field and started with arbitrary base64 data,
    +    # which makes this tricky.
    +    m = _signed_value_version_re.match(value)
    +    if m is None:
    +        version = 1
    +    else:
    +        try:
    +            version = int(m.group(1))
    +            if version > 999:
    +                # Certain payloads from the version-less v1 format may
    +                # be parsed as valid integers.  Due to base64 padding
    +                # restrictions, this can only happen for numbers whose
    +                # length is a multiple of 4, so we can treat all
    +                # numbers up to 999 as versions, and for the rest we
    +                # fall back to v1 format.
    +                version = 1
    +        except ValueError:
    +            version = 1
    +    return version
    +
    +
    +def decode_signed_value(
    +    secret: _CookieSecretTypes,
    +    name: str,
    +    value: Union[None, str, bytes],
    +    max_age_days: int = 31,
    +    clock: Callable[[], float] = None,
    +    min_version: int = None,
    +) -> Optional[bytes]:
    +    if clock is None:
    +        clock = time.time
    +    if min_version is None:
    +        min_version = DEFAULT_SIGNED_VALUE_MIN_VERSION
    +    if min_version > 2:
    +        raise ValueError("Unsupported min_version %d" % min_version)
    +    if not value:
    +        return None
    +
    +    value = utf8(value)
    +    version = _get_version(value)
    +
    +    if version < min_version:
    +        return None
    +    if version == 1:
    +        assert not isinstance(secret, dict)
    +        return _decode_signed_value_v1(secret, name, value, max_age_days, clock)
    +    elif version == 2:
    +        return _decode_signed_value_v2(secret, name, value, max_age_days, clock)
    +    else:
    +        return None
    +
    +
    +def _decode_signed_value_v1(
    +    secret: Union[str, bytes],
    +    name: str,
    +    value: bytes,
    +    max_age_days: int,
    +    clock: Callable[[], float],
    +) -> Optional[bytes]:
    +    parts = utf8(value).split(b"|")
    +    if len(parts) != 3:
    +        return None
    +    signature = _create_signature_v1(secret, name, parts[0], parts[1])
    +    if not hmac.compare_digest(parts[2], signature):
    +        gen_log.warning("Invalid cookie signature %r", value)
    +        return None
    +    timestamp = int(parts[1])
    +    if timestamp < clock() - max_age_days * 86400:
    +        gen_log.warning("Expired cookie %r", value)
    +        return None
    +    if timestamp > clock() + 31 * 86400:
    +        # _cookie_signature does not hash a delimiter between the
    +        # parts of the cookie, so an attacker could transfer trailing
    +        # digits from the payload to the timestamp without altering the
    +        # signature.  For backwards compatibility, sanity-check timestamp
    +        # here instead of modifying _cookie_signature.
    +        gen_log.warning("Cookie timestamp in future; possible tampering %r", value)
    +        return None
    +    if parts[1].startswith(b"0"):
    +        gen_log.warning("Tampered cookie %r", value)
    +        return None
    +    try:
    +        return base64.b64decode(parts[0])
    +    except Exception:
    +        return None
    +
    +
    +def _decode_fields_v2(value: bytes) -> Tuple[int, bytes, bytes, bytes, bytes]:
    +    def _consume_field(s: bytes) -> Tuple[bytes, bytes]:
    +        length, _, rest = s.partition(b":")
    +        n = int(length)
    +        field_value = rest[:n]
    +        # In python 3, indexing bytes returns small integers; we must
    +        # use a slice to get a byte string as in python 2.
    +        if rest[n : n + 1] != b"|":
    +            raise ValueError("malformed v2 signed value field")
    +        rest = rest[n + 1 :]
    +        return field_value, rest
    +
    +    rest = value[2:]  # remove version number
    +    key_version, rest = _consume_field(rest)
    +    timestamp, rest = _consume_field(rest)
    +    name_field, rest = _consume_field(rest)
    +    value_field, passed_sig = _consume_field(rest)
    +    return int(key_version), timestamp, name_field, value_field, passed_sig
    +
    +
    +def _decode_signed_value_v2(
    +    secret: _CookieSecretTypes,
    +    name: str,
    +    value: bytes,
    +    max_age_days: int,
    +    clock: Callable[[], float],
    +) -> Optional[bytes]:
    +    try:
    +        (
    +            key_version,
    +            timestamp_bytes,
    +            name_field,
    +            value_field,
    +            passed_sig,
    +        ) = _decode_fields_v2(value)
    +    except ValueError:
    +        return None
    +    signed_string = value[: -len(passed_sig)]
    +
    +    if isinstance(secret, dict):
    +        try:
    +            secret = secret[key_version]
    +        except KeyError:
    +            return None
    +
    +    expected_sig = _create_signature_v2(secret, signed_string)
    +    if not hmac.compare_digest(passed_sig, expected_sig):
    +        return None
    +    if name_field != utf8(name):
    +        return None
    +    timestamp = int(timestamp_bytes)
    +    if timestamp < clock() - max_age_days * 86400:
    +        # The signature has expired.
    +        return None
    +    try:
    +        return base64.b64decode(value_field)
    +    except Exception:
    +        return None
    +
    +
    +def get_signature_key_version(value: Union[str, bytes]) -> Optional[int]:
    +    value = utf8(value)
    +    version = _get_version(value)
    +    if version < 2:
    +        return None
    +    try:
    +        key_version, _, _, _, _ = _decode_fields_v2(value)
    +    except ValueError:
    +        return None
    +
    +    return key_version
    +
    +
    +def _create_signature_v1(secret: Union[str, bytes], *parts: Union[str, bytes]) -> bytes:
    +    hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
    +    for part in parts:
    +        hash.update(utf8(part))
    +    return utf8(hash.hexdigest())
    +
    +
    +def _create_signature_v2(secret: Union[str, bytes], s: bytes) -> bytes:
    +    hash = hmac.new(utf8(secret), digestmod=hashlib.sha256)
    +    hash.update(utf8(s))
    +    return utf8(hash.hexdigest())
    +
    +
    +def is_absolute(path: str) -> bool:
    +    return any(path.startswith(x) for x in ["/", "http:", "https:"])
    diff --git a/server/www/packages/packages-linux/x64/tornado/websocket.py b/server/www/packages/packages-linux/x64/tornado/websocket.py
    index 0b994fc..0c6969b 100644
    --- a/server/www/packages/packages-linux/x64/tornado/websocket.py
    +++ b/server/www/packages/packages-linux/x64/tornado/websocket.py
    @@ -1,1342 +1,1663 @@
    -"""Implementation of the WebSocket protocol.
    -
    -`WebSockets `_ allow for bidirectional
    -communication between the browser and server.
    -
    -WebSockets are supported in the current versions of all major browsers,
    -although older versions that do not support WebSockets are still in use
    -(refer to http://caniuse.com/websockets for details).
    -
    -This module implements the final version of the WebSocket protocol as
    -defined in `RFC 6455 `_.  Certain
    -browser versions (notably Safari 5.x) implemented an earlier draft of
    -the protocol (known as "draft 76") and are not compatible with this module.
    -
    -.. versionchanged:: 4.0
    -   Removed support for the draft 76 protocol version.
    -"""
    -
    -from __future__ import absolute_import, division, print_function
    -
    -import base64
    -import hashlib
    -import os
    -import sys
    -import struct
    -import tornado.escape
    -import tornado.web
    -import zlib
    -
    -from tornado.concurrent import Future, future_set_result_unless_cancelled
    -from tornado.escape import utf8, native_str, to_unicode
    -from tornado import gen, httpclient, httputil
    -from tornado.ioloop import IOLoop, PeriodicCallback
    -from tornado.iostream import StreamClosedError
    -from tornado.log import gen_log
    -from tornado import simple_httpclient
    -from tornado.queues import Queue
    -from tornado.tcpclient import TCPClient
    -from tornado.util import _websocket_mask, PY3
    -
    -if PY3:
    -    from urllib.parse import urlparse  # py2
    -    xrange = range
    -else:
    -    from urlparse import urlparse  # py3
    -
    -_default_max_message_size = 10 * 1024 * 1024
    -
    -
    -class WebSocketError(Exception):
    -    pass
    -
    -
    -class WebSocketClosedError(WebSocketError):
    -    """Raised by operations on a closed connection.
    -
    -    .. versionadded:: 3.2
    -    """
    -    pass
    -
    -
    -class _DecompressTooLargeError(Exception):
    -    pass
    -
    -
    -class WebSocketHandler(tornado.web.RequestHandler):
    -    """Subclass this class to create a basic WebSocket handler.
    -
    -    Override `on_message` to handle incoming messages, and use
    -    `write_message` to send messages to the client. You can also
    -    override `open` and `on_close` to handle opened and closed
    -    connections.
    -
    -    Custom upgrade response headers can be sent by overriding
    -    `~tornado.web.RequestHandler.set_default_headers` or
    -    `~tornado.web.RequestHandler.prepare`.
    -
    -    See http://dev.w3.org/html5/websockets/ for details on the
    -    JavaScript interface.  The protocol is specified at
    -    http://tools.ietf.org/html/rfc6455.
    -
    -    Here is an example WebSocket handler that echos back all received messages
    -    back to the client:
    -
    -    .. testcode::
    -
    -      class EchoWebSocket(tornado.websocket.WebSocketHandler):
    -          def open(self):
    -              print("WebSocket opened")
    -
    -          def on_message(self, message):
    -              self.write_message(u"You said: " + message)
    -
    -          def on_close(self):
    -              print("WebSocket closed")
    -
    -    .. testoutput::
    -       :hide:
    -
    -    WebSockets are not standard HTTP connections. The "handshake" is
    -    HTTP, but after the handshake, the protocol is
    -    message-based. Consequently, most of the Tornado HTTP facilities
    -    are not available in handlers of this type. The only communication
    -    methods available to you are `write_message()`, `ping()`, and
    -    `close()`. Likewise, your request handler class should implement
    -    `open()` method rather than ``get()`` or ``post()``.
    -
    -    If you map the handler above to ``/websocket`` in your application, you can
    -    invoke it in JavaScript with::
    -
    -      var ws = new WebSocket("ws://localhost:8888/websocket");
    -      ws.onopen = function() {
    -         ws.send("Hello, world");
    -      };
    -      ws.onmessage = function (evt) {
    -         alert(evt.data);
    -      };
    -
    -    This script pops up an alert box that says "You said: Hello, world".
    -
    -    Web browsers allow any site to open a websocket connection to any other,
    -    instead of using the same-origin policy that governs other network
    -    access from javascript.  This can be surprising and is a potential
    -    security hole, so since Tornado 4.0 `WebSocketHandler` requires
    -    applications that wish to receive cross-origin websockets to opt in
    -    by overriding the `~WebSocketHandler.check_origin` method (see that
    -    method's docs for details).  Failure to do so is the most likely
    -    cause of 403 errors when making a websocket connection.
    -
    -    When using a secure websocket connection (``wss://``) with a self-signed
    -    certificate, the connection from a browser may fail because it wants
    -    to show the "accept this certificate" dialog but has nowhere to show it.
    -    You must first visit a regular HTML page using the same certificate
    -    to accept it before the websocket connection will succeed.
    -
    -    If the application setting ``websocket_ping_interval`` has a non-zero
    -    value, a ping will be sent periodically, and the connection will be
    -    closed if a response is not received before the ``websocket_ping_timeout``.
    -
    -    Messages larger than the ``websocket_max_message_size`` application setting
    -    (default 10MiB) will not be accepted.
    -
    -    .. versionchanged:: 4.5
    -       Added ``websocket_ping_interval``, ``websocket_ping_timeout``, and
    -       ``websocket_max_message_size``.
    -    """
    -    def __init__(self, application, request, **kwargs):
    -        super(WebSocketHandler, self).__init__(application, request, **kwargs)
    -        self.ws_connection = None
    -        self.close_code = None
    -        self.close_reason = None
    -        self.stream = None
    -        self._on_close_called = False
    -
    -    def get(self, *args, **kwargs):
    -        self.open_args = args
    -        self.open_kwargs = kwargs
    -
    -        # Upgrade header should be present and should be equal to WebSocket
    -        if self.request.headers.get("Upgrade", "").lower() != 'websocket':
    -            self.set_status(400)
    -            log_msg = "Can \"Upgrade\" only to \"WebSocket\"."
    -            self.finish(log_msg)
    -            gen_log.debug(log_msg)
    -            return
    -
    -        # Connection header should be upgrade.
    -        # Some proxy servers/load balancers
    -        # might mess with it.
    -        headers = self.request.headers
    -        connection = map(lambda s: s.strip().lower(),
    -                         headers.get("Connection", "").split(","))
    -        if 'upgrade' not in connection:
    -            self.set_status(400)
    -            log_msg = "\"Connection\" must be \"Upgrade\"."
    -            self.finish(log_msg)
    -            gen_log.debug(log_msg)
    -            return
    -
    -        # Handle WebSocket Origin naming convention differences
    -        # The difference between version 8 and 13 is that in 8 the
    -        # client sends a "Sec-Websocket-Origin" header and in 13 it's
    -        # simply "Origin".
    -        if "Origin" in self.request.headers:
    -            origin = self.request.headers.get("Origin")
    -        else:
    -            origin = self.request.headers.get("Sec-Websocket-Origin", None)
    -
    -        # If there was an origin header, check to make sure it matches
    -        # according to check_origin. When the origin is None, we assume it
    -        # did not come from a browser and that it can be passed on.
    -        if origin is not None and not self.check_origin(origin):
    -            self.set_status(403)
    -            log_msg = "Cross origin websockets not allowed"
    -            self.finish(log_msg)
    -            gen_log.debug(log_msg)
    -            return
    -
    -        self.ws_connection = self.get_websocket_protocol()
    -        if self.ws_connection:
    -            self.ws_connection.accept_connection()
    -        else:
    -            self.set_status(426, "Upgrade Required")
    -            self.set_header("Sec-WebSocket-Version", "7, 8, 13")
    -            self.finish()
    -
    -    stream = None
    -
    -    @property
    -    def ping_interval(self):
    -        """The interval for websocket keep-alive pings.
    -
    -        Set websocket_ping_interval = 0 to disable pings.
    -        """
    -        return self.settings.get('websocket_ping_interval', None)
    -
    -    @property
    -    def ping_timeout(self):
    -        """If no ping is received in this many seconds,
    -        close the websocket connection (VPNs, etc. can fail to cleanly close ws connections).
    -        Default is max of 3 pings or 30 seconds.
    -        """
    -        return self.settings.get('websocket_ping_timeout', None)
    -
    -    @property
    -    def max_message_size(self):
    -        """Maximum allowed message size.
    -
    -        If the remote peer sends a message larger than this, the connection
    -        will be closed.
    -
    -        Default is 10MiB.
    -        """
    -        return self.settings.get('websocket_max_message_size', _default_max_message_size)
    -
    -    def write_message(self, message, binary=False):
    -        """Sends the given message to the client of this Web Socket.
    -
    -        The message may be either a string or a dict (which will be
    -        encoded as json).  If the ``binary`` argument is false, the
    -        message will be sent as utf8; in binary mode any byte string
    -        is allowed.
    -
    -        If the connection is already closed, raises `WebSocketClosedError`.
    -        Returns a `.Future` which can be used for flow control.
    -
    -        .. versionchanged:: 3.2
    -           `WebSocketClosedError` was added (previously a closed connection
    -           would raise an `AttributeError`)
    -
    -        .. versionchanged:: 4.3
    -           Returns a `.Future` which can be used for flow control.
    -
    -        .. versionchanged:: 5.0
    -           Consistently raises `WebSocketClosedError`. Previously could
    -           sometimes raise `.StreamClosedError`.
    -        """
    -        if self.ws_connection is None:
    -            raise WebSocketClosedError()
    -        if isinstance(message, dict):
    -            message = tornado.escape.json_encode(message)
    -        return self.ws_connection.write_message(message, binary=binary)
    -
    -    def select_subprotocol(self, subprotocols):
    -        """Override to implement subprotocol negotiation.
    -
    -        ``subprotocols`` is a list of strings identifying the
    -        subprotocols proposed by the client.  This method may be
    -        overridden to return one of those strings to select it, or
    -        ``None`` to not select a subprotocol.
    -
    -        Failure to select a subprotocol does not automatically abort
    -        the connection, although clients may close the connection if
    -        none of their proposed subprotocols was selected.
    -
    -        The list may be empty, in which case this method must return
    -        None. This method is always called exactly once even if no
    -        subprotocols were proposed so that the handler can be advised
    -        of this fact.
    -
    -        .. versionchanged:: 5.1
    -
    -           Previously, this method was called with a list containing
    -           an empty string instead of an empty list if no subprotocols
    -           were proposed by the client.
    -        """
    -        return None
    -
    -    @property
    -    def selected_subprotocol(self):
    -        """The subprotocol returned by `select_subprotocol`.
    -
    -        .. versionadded:: 5.1
    -        """
    -        return self.ws_connection.selected_subprotocol
    -
    -    def get_compression_options(self):
    -        """Override to return compression options for the connection.
    -
    -        If this method returns None (the default), compression will
    -        be disabled.  If it returns a dict (even an empty one), it
    -        will be enabled.  The contents of the dict may be used to
    -        control the following compression options:
    -
    -        ``compression_level`` specifies the compression level.
    -
    -        ``mem_level`` specifies the amount of memory used for the internal compression state.
    -
    -         These parameters are documented in details here:
    -         https://docs.python.org/3.6/library/zlib.html#zlib.compressobj
    -
    -        .. versionadded:: 4.1
    -
    -        .. versionchanged:: 4.5
    -
    -           Added ``compression_level`` and ``mem_level``.
    -        """
    -        # TODO: Add wbits option.
    -        return None
    -
    -    def open(self, *args, **kwargs):
    -        """Invoked when a new WebSocket is opened.
    -
    -        The arguments to `open` are extracted from the `tornado.web.URLSpec`
    -        regular expression, just like the arguments to
    -        `tornado.web.RequestHandler.get`.
    -
    -        `open` may be a coroutine. `on_message` will not be called until
    -        `open` has returned.
    -
    -        .. versionchanged:: 5.1
    -
    -           ``open`` may be a coroutine.
    -        """
    -        pass
    -
    -    def on_message(self, message):
    -        """Handle incoming messages on the WebSocket
    -
    -        This method must be overridden.
    -
    -        .. versionchanged:: 4.5
    -
    -           ``on_message`` can be a coroutine.
    -        """
    -        raise NotImplementedError
    -
    -    def ping(self, data=b''):
    -        """Send ping frame to the remote end.
    -
    -        The data argument allows a small amount of data (up to 125
    -        bytes) to be sent as a part of the ping message. Note that not
    -        all websocket implementations expose this data to
    -        applications.
    -
    -        Consider using the ``websocket_ping_interval`` application
    -        setting instead of sending pings manually.
    -
    -        .. versionchanged:: 5.1
    -
    -           The data argument is now optional.
    -
    -        """
    -        data = utf8(data)
    -        if self.ws_connection is None:
    -            raise WebSocketClosedError()
    -        self.ws_connection.write_ping(data)
    -
    -    def on_pong(self, data):
    -        """Invoked when the response to a ping frame is received."""
    -        pass
    -
    -    def on_ping(self, data):
    -        """Invoked when the a ping frame is received."""
    -        pass
    -
    -    def on_close(self):
    -        """Invoked when the WebSocket is closed.
    -
    -        If the connection was closed cleanly and a status code or reason
    -        phrase was supplied, these values will be available as the attributes
    -        ``self.close_code`` and ``self.close_reason``.
    -
    -        .. versionchanged:: 4.0
    -
    -           Added ``close_code`` and ``close_reason`` attributes.
    -        """
    -        pass
    -
    -    def close(self, code=None, reason=None):
    -        """Closes this Web Socket.
    -
    -        Once the close handshake is successful the socket will be closed.
    -
    -        ``code`` may be a numeric status code, taken from the values
    -        defined in `RFC 6455 section 7.4.1
    -        `_.
    -        ``reason`` may be a textual message about why the connection is
    -        closing.  These values are made available to the client, but are
    -        not otherwise interpreted by the websocket protocol.
    -
    -        .. versionchanged:: 4.0
    -
    -           Added the ``code`` and ``reason`` arguments.
    -        """
    -        if self.ws_connection:
    -            self.ws_connection.close(code, reason)
    -            self.ws_connection = None
    -
    -    def check_origin(self, origin):
    -        """Override to enable support for allowing alternate origins.
    -
    -        The ``origin`` argument is the value of the ``Origin`` HTTP
    -        header, the url responsible for initiating this request.  This
    -        method is not called for clients that do not send this header;
    -        such requests are always allowed (because all browsers that
    -        implement WebSockets support this header, and non-browser
    -        clients do not have the same cross-site security concerns).
    -
    -        Should return True to accept the request or False to reject it.
    -        By default, rejects all requests with an origin on a host other
    -        than this one.
    -
    -        This is a security protection against cross site scripting attacks on
    -        browsers, since WebSockets are allowed to bypass the usual same-origin
    -        policies and don't use CORS headers.
    -
    -        .. warning::
    -
    -           This is an important security measure; don't disable it
    -           without understanding the security implications. In
    -           particular, if your authentication is cookie-based, you
    -           must either restrict the origins allowed by
    -           ``check_origin()`` or implement your own XSRF-like
    -           protection for websocket connections. See `these
    -           `_
    -           `articles
    -           `_
    -           for more.
    -
    -        To accept all cross-origin traffic (which was the default prior to
    -        Tornado 4.0), simply override this method to always return true::
    -
    -            def check_origin(self, origin):
    -                return True
    -
    -        To allow connections from any subdomain of your site, you might
    -        do something like::
    -
    -            def check_origin(self, origin):
    -                parsed_origin = urllib.parse.urlparse(origin)
    -                return parsed_origin.netloc.endswith(".mydomain.com")
    -
    -        .. versionadded:: 4.0
    -
    -        """
    -        parsed_origin = urlparse(origin)
    -        origin = parsed_origin.netloc
    -        origin = origin.lower()
    -
    -        host = self.request.headers.get("Host")
    -
    -        # Check to see that origin matches host directly, including ports
    -        return origin == host
    -
    -    def set_nodelay(self, value):
    -        """Set the no-delay flag for this stream.
    -
    -        By default, small messages may be delayed and/or combined to minimize
    -        the number of packets sent.  This can sometimes cause 200-500ms delays
    -        due to the interaction between Nagle's algorithm and TCP delayed
    -        ACKs.  To reduce this delay (at the expense of possibly increasing
    -        bandwidth usage), call ``self.set_nodelay(True)`` once the websocket
    -        connection is established.
    -
    -        See `.BaseIOStream.set_nodelay` for additional details.
    -
    -        .. versionadded:: 3.1
    -        """
    -        self.stream.set_nodelay(value)
    -
    -    def on_connection_close(self):
    -        if self.ws_connection:
    -            self.ws_connection.on_connection_close()
    -            self.ws_connection = None
    -        if not self._on_close_called:
    -            self._on_close_called = True
    -            self.on_close()
    -            self._break_cycles()
    -
    -    def _break_cycles(self):
    -        # WebSocketHandlers call finish() early, but we don't want to
    -        # break up reference cycles (which makes it impossible to call
    -        # self.render_string) until after we've really closed the
    -        # connection (if it was established in the first place,
    -        # indicated by status code 101).
    -        if self.get_status() != 101 or self._on_close_called:
    -            super(WebSocketHandler, self)._break_cycles()
    -
    -    def send_error(self, *args, **kwargs):
    -        if self.stream is None:
    -            super(WebSocketHandler, self).send_error(*args, **kwargs)
    -        else:
    -            # If we get an uncaught exception during the handshake,
    -            # we have no choice but to abruptly close the connection.
    -            # TODO: for uncaught exceptions after the handshake,
    -            # we can close the connection more gracefully.
    -            self.stream.close()
    -
    -    def get_websocket_protocol(self):
    -        websocket_version = self.request.headers.get("Sec-WebSocket-Version")
    -        if websocket_version in ("7", "8", "13"):
    -            return WebSocketProtocol13(
    -                self, compression_options=self.get_compression_options())
    -
    -    def _attach_stream(self):
    -        self.stream = self.detach()
    -        self.stream.set_close_callback(self.on_connection_close)
    -        # disable non-WS methods
    -        for method in ["write", "redirect", "set_header", "set_cookie",
    -                       "set_status", "flush", "finish"]:
    -            setattr(self, method, _raise_not_supported_for_websockets)
    -
    -
    -def _raise_not_supported_for_websockets(*args, **kwargs):
    -    raise RuntimeError("Method not supported for Web Sockets")
    -
    -
    -class WebSocketProtocol(object):
    -    """Base class for WebSocket protocol versions.
    -    """
    -    def __init__(self, handler):
    -        self.handler = handler
    -        self.request = handler.request
    -        self.stream = handler.stream
    -        self.client_terminated = False
    -        self.server_terminated = False
    -
    -    def _run_callback(self, callback, *args, **kwargs):
    -        """Runs the given callback with exception handling.
    -
    -        If the callback is a coroutine, returns its Future. On error, aborts the
    -        websocket connection and returns None.
    -        """
    -        try:
    -            result = callback(*args, **kwargs)
    -        except Exception:
    -            self.handler.log_exception(*sys.exc_info())
    -            self._abort()
    -        else:
    -            if result is not None:
    -                result = gen.convert_yielded(result)
    -                self.stream.io_loop.add_future(result, lambda f: f.result())
    -            return result
    -
    -    def on_connection_close(self):
    -        self._abort()
    -
    -    def _abort(self):
    -        """Instantly aborts the WebSocket connection by closing the socket"""
    -        self.client_terminated = True
    -        self.server_terminated = True
    -        self.stream.close()  # forcibly tear down the connection
    -        self.close()  # let the subclass cleanup
    -
    -
    -class _PerMessageDeflateCompressor(object):
    -    def __init__(self, persistent, max_wbits, compression_options=None):
    -        if max_wbits is None:
    -            max_wbits = zlib.MAX_WBITS
    -        # There is no symbolic constant for the minimum wbits value.
    -        if not (8 <= max_wbits <= zlib.MAX_WBITS):
    -            raise ValueError("Invalid max_wbits value %r; allowed range 8-%d",
    -                             max_wbits, zlib.MAX_WBITS)
    -        self._max_wbits = max_wbits
    -
    -        if compression_options is None or 'compression_level' not in compression_options:
    -            self._compression_level = tornado.web.GZipContentEncoding.GZIP_LEVEL
    -        else:
    -            self._compression_level = compression_options['compression_level']
    -
    -        if compression_options is None or 'mem_level' not in compression_options:
    -            self._mem_level = 8
    -        else:
    -            self._mem_level = compression_options['mem_level']
    -
    -        if persistent:
    -            self._compressor = self._create_compressor()
    -        else:
    -            self._compressor = None
    -
    -    def _create_compressor(self):
    -        return zlib.compressobj(self._compression_level,
    -                                zlib.DEFLATED, -self._max_wbits, self._mem_level)
    -
    -    def compress(self, data):
    -        compressor = self._compressor or self._create_compressor()
    -        data = (compressor.compress(data) +
    -                compressor.flush(zlib.Z_SYNC_FLUSH))
    -        assert data.endswith(b'\x00\x00\xff\xff')
    -        return data[:-4]
    -
    -
    -class _PerMessageDeflateDecompressor(object):
    -    def __init__(self, persistent, max_wbits, max_message_size, compression_options=None):
    -        self._max_message_size = max_message_size
    -        if max_wbits is None:
    -            max_wbits = zlib.MAX_WBITS
    -        if not (8 <= max_wbits <= zlib.MAX_WBITS):
    -            raise ValueError("Invalid max_wbits value %r; allowed range 8-%d",
    -                             max_wbits, zlib.MAX_WBITS)
    -        self._max_wbits = max_wbits
    -        if persistent:
    -            self._decompressor = self._create_decompressor()
    -        else:
    -            self._decompressor = None
    -
    -    def _create_decompressor(self):
    -        return zlib.decompressobj(-self._max_wbits)
    -
    -    def decompress(self, data):
    -        decompressor = self._decompressor or self._create_decompressor()
    -        result = decompressor.decompress(data + b'\x00\x00\xff\xff', self._max_message_size)
    -        if decompressor.unconsumed_tail:
    -            raise _DecompressTooLargeError()
    -        return result
    -
    -
    -class WebSocketProtocol13(WebSocketProtocol):
    -    """Implementation of the WebSocket protocol from RFC 6455.
    -
    -    This class supports versions 7 and 8 of the protocol in addition to the
    -    final version 13.
    -    """
    -    # Bit masks for the first byte of a frame.
    -    FIN = 0x80
    -    RSV1 = 0x40
    -    RSV2 = 0x20
    -    RSV3 = 0x10
    -    RSV_MASK = RSV1 | RSV2 | RSV3
    -    OPCODE_MASK = 0x0f
    -
    -    def __init__(self, handler, mask_outgoing=False,
    -                 compression_options=None):
    -        WebSocketProtocol.__init__(self, handler)
    -        self.mask_outgoing = mask_outgoing
    -        self._final_frame = False
    -        self._frame_opcode = None
    -        self._masked_frame = None
    -        self._frame_mask = None
    -        self._frame_length = None
    -        self._fragmented_message_buffer = None
    -        self._fragmented_message_opcode = None
    -        self._waiting = None
    -        self._compression_options = compression_options
    -        self._decompressor = None
    -        self._compressor = None
    -        self._frame_compressed = None
    -        # The total uncompressed size of all messages received or sent.
    -        # Unicode messages are encoded to utf8.
    -        # Only for testing; subject to change.
    -        self._message_bytes_in = 0
    -        self._message_bytes_out = 0
    -        # The total size of all packets received or sent.  Includes
    -        # the effect of compression, frame overhead, and control frames.
    -        self._wire_bytes_in = 0
    -        self._wire_bytes_out = 0
    -        self.ping_callback = None
    -        self.last_ping = 0
    -        self.last_pong = 0
    -
    -    def accept_connection(self):
    -        try:
    -            self._handle_websocket_headers()
    -        except ValueError:
    -            self.handler.set_status(400)
    -            log_msg = "Missing/Invalid WebSocket headers"
    -            self.handler.finish(log_msg)
    -            gen_log.debug(log_msg)
    -            return
    -
    -        try:
    -            self._accept_connection()
    -        except ValueError:
    -            gen_log.debug("Malformed WebSocket request received",
    -                          exc_info=True)
    -            self._abort()
    -            return
    -
    -    def _handle_websocket_headers(self):
    -        """Verifies all invariant- and required headers
    -
    -        If a header is missing or have an incorrect value ValueError will be
    -        raised
    -        """
    -        fields = ("Host", "Sec-Websocket-Key", "Sec-Websocket-Version")
    -        if not all(map(lambda f: self.request.headers.get(f), fields)):
    -            raise ValueError("Missing/Invalid WebSocket headers")
    -
    -    @staticmethod
    -    def compute_accept_value(key):
    -        """Computes the value for the Sec-WebSocket-Accept header,
    -        given the value for Sec-WebSocket-Key.
    -        """
    -        sha1 = hashlib.sha1()
    -        sha1.update(utf8(key))
    -        sha1.update(b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11")  # Magic value
    -        return native_str(base64.b64encode(sha1.digest()))
    -
    -    def _challenge_response(self):
    -        return WebSocketProtocol13.compute_accept_value(
    -            self.request.headers.get("Sec-Websocket-Key"))
    -
    -    @gen.coroutine
    -    def _accept_connection(self):
    -        subprotocol_header = self.request.headers.get("Sec-WebSocket-Protocol")
    -        if subprotocol_header:
    -            subprotocols = [s.strip() for s in subprotocol_header.split(',')]
    -        else:
    -            subprotocols = []
    -        self.selected_subprotocol = self.handler.select_subprotocol(subprotocols)
    -        if self.selected_subprotocol:
    -            assert self.selected_subprotocol in subprotocols
    -            self.handler.set_header("Sec-WebSocket-Protocol", self.selected_subprotocol)
    -
    -        extensions = self._parse_extensions_header(self.request.headers)
    -        for ext in extensions:
    -            if (ext[0] == 'permessage-deflate' and
    -                    self._compression_options is not None):
    -                # TODO: negotiate parameters if compression_options
    -                # specifies limits.
    -                self._create_compressors('server', ext[1], self._compression_options)
    -                if ('client_max_window_bits' in ext[1] and
    -                        ext[1]['client_max_window_bits'] is None):
    -                    # Don't echo an offered client_max_window_bits
    -                    # parameter with no value.
    -                    del ext[1]['client_max_window_bits']
    -                self.handler.set_header("Sec-WebSocket-Extensions",
    -                                        httputil._encode_header(
    -                                            'permessage-deflate', ext[1]))
    -                break
    -
    -        self.handler.clear_header("Content-Type")
    -        self.handler.set_status(101)
    -        self.handler.set_header("Upgrade", "websocket")
    -        self.handler.set_header("Connection", "Upgrade")
    -        self.handler.set_header("Sec-WebSocket-Accept", self._challenge_response())
    -        self.handler.finish()
    -
    -        self.handler._attach_stream()
    -        self.stream = self.handler.stream
    -
    -        self.start_pinging()
    -        open_result = self._run_callback(self.handler.open, *self.handler.open_args,
    -                                         **self.handler.open_kwargs)
    -        if open_result is not None:
    -            yield open_result
    -        yield self._receive_frame_loop()
    -
    -    def _parse_extensions_header(self, headers):
    -        extensions = headers.get("Sec-WebSocket-Extensions", '')
    -        if extensions:
    -            return [httputil._parse_header(e.strip())
    -                    for e in extensions.split(',')]
    -        return []
    -
    -    def _process_server_headers(self, key, headers):
    -        """Process the headers sent by the server to this client connection.
    -
    -        'key' is the websocket handshake challenge/response key.
    -        """
    -        assert headers['Upgrade'].lower() == 'websocket'
    -        assert headers['Connection'].lower() == 'upgrade'
    -        accept = self.compute_accept_value(key)
    -        assert headers['Sec-Websocket-Accept'] == accept
    -
    -        extensions = self._parse_extensions_header(headers)
    -        for ext in extensions:
    -            if (ext[0] == 'permessage-deflate' and
    -                    self._compression_options is not None):
    -                self._create_compressors('client', ext[1])
    -            else:
    -                raise ValueError("unsupported extension %r", ext)
    -
    -        self.selected_subprotocol = headers.get('Sec-WebSocket-Protocol', None)
    -
    -    def _get_compressor_options(self, side, agreed_parameters, compression_options=None):
    -        """Converts a websocket agreed_parameters set to keyword arguments
    -        for our compressor objects.
    -        """
    -        options = dict(
    -            persistent=(side + '_no_context_takeover') not in agreed_parameters)
    -        wbits_header = agreed_parameters.get(side + '_max_window_bits', None)
    -        if wbits_header is None:
    -            options['max_wbits'] = zlib.MAX_WBITS
    -        else:
    -            options['max_wbits'] = int(wbits_header)
    -        options['compression_options'] = compression_options
    -        return options
    -
    -    def _create_compressors(self, side, agreed_parameters, compression_options=None):
    -        # TODO: handle invalid parameters gracefully
    -        allowed_keys = set(['server_no_context_takeover',
    -                            'client_no_context_takeover',
    -                            'server_max_window_bits',
    -                            'client_max_window_bits'])
    -        for key in agreed_parameters:
    -            if key not in allowed_keys:
    -                raise ValueError("unsupported compression parameter %r" % key)
    -        other_side = 'client' if (side == 'server') else 'server'
    -        self._compressor = _PerMessageDeflateCompressor(
    -            **self._get_compressor_options(side, agreed_parameters, compression_options))
    -        self._decompressor = _PerMessageDeflateDecompressor(
    -            max_message_size=self.handler.max_message_size,
    -            **self._get_compressor_options(other_side, agreed_parameters, compression_options))
    -
    -    def _write_frame(self, fin, opcode, data, flags=0):
    -        data_len = len(data)
    -        if opcode & 0x8:
    -            # All control frames MUST have a payload length of 125
    -            # bytes or less and MUST NOT be fragmented.
    -            if not fin:
    -                raise ValueError("control frames may not be fragmented")
    -            if data_len > 125:
    -                raise ValueError("control frame payloads may not exceed 125 bytes")
    -        if fin:
    -            finbit = self.FIN
    -        else:
    -            finbit = 0
    -        frame = struct.pack("B", finbit | opcode | flags)
    -        if self.mask_outgoing:
    -            mask_bit = 0x80
    -        else:
    -            mask_bit = 0
    -        if data_len < 126:
    -            frame += struct.pack("B", data_len | mask_bit)
    -        elif data_len <= 0xFFFF:
    -            frame += struct.pack("!BH", 126 | mask_bit, data_len)
    -        else:
    -            frame += struct.pack("!BQ", 127 | mask_bit, data_len)
    -        if self.mask_outgoing:
    -            mask = os.urandom(4)
    -            data = mask + _websocket_mask(mask, data)
    -        frame += data
    -        self._wire_bytes_out += len(frame)
    -        return self.stream.write(frame)
    -
    -    def write_message(self, message, binary=False):
    -        """Sends the given message to the client of this Web Socket."""
    -        if binary:
    -            opcode = 0x2
    -        else:
    -            opcode = 0x1
    -        message = tornado.escape.utf8(message)
    -        assert isinstance(message, bytes)
    -        self._message_bytes_out += len(message)
    -        flags = 0
    -        if self._compressor:
    -            message = self._compressor.compress(message)
    -            flags |= self.RSV1
    -        # For historical reasons, write methods in Tornado operate in a semi-synchronous
    -        # mode in which awaiting the Future they return is optional (But errors can
    -        # still be raised). This requires us to go through an awkward dance here
    -        # to transform the errors that may be returned while presenting the same
    -        # semi-synchronous interface.
    -        try:
    -            fut = self._write_frame(True, opcode, message, flags=flags)
    -        except StreamClosedError:
    -            raise WebSocketClosedError()
    -
    -        @gen.coroutine
    -        def wrapper():
    -            try:
    -                yield fut
    -            except StreamClosedError:
    -                raise WebSocketClosedError()
    -        return wrapper()
    -
    -    def write_ping(self, data):
    -        """Send ping frame."""
    -        assert isinstance(data, bytes)
    -        self._write_frame(True, 0x9, data)
    -
    -    @gen.coroutine
    -    def _receive_frame_loop(self):
    -        try:
    -            while not self.client_terminated:
    -                yield self._receive_frame()
    -        except StreamClosedError:
    -            self._abort()
    -
    -    def _read_bytes(self, n):
    -        self._wire_bytes_in += n
    -        return self.stream.read_bytes(n)
    -
    -    @gen.coroutine
    -    def _receive_frame(self):
    -        # Read the frame header.
    -        data = yield self._read_bytes(2)
    -        header, mask_payloadlen = struct.unpack("BB", data)
    -        is_final_frame = header & self.FIN
    -        reserved_bits = header & self.RSV_MASK
    -        opcode = header & self.OPCODE_MASK
    -        opcode_is_control = opcode & 0x8
    -        if self._decompressor is not None and opcode != 0:
    -            # Compression flag is present in the first frame's header,
    -            # but we can't decompress until we have all the frames of
    -            # the message.
    -            self._frame_compressed = bool(reserved_bits & self.RSV1)
    -            reserved_bits &= ~self.RSV1
    -        if reserved_bits:
    -            # client is using as-yet-undefined extensions; abort
    -            self._abort()
    -            return
    -        is_masked = bool(mask_payloadlen & 0x80)
    -        payloadlen = mask_payloadlen & 0x7f
    -
    -        # Parse and validate the length.
    -        if opcode_is_control and payloadlen >= 126:
    -            # control frames must have payload < 126
    -            self._abort()
    -            return
    -        if payloadlen < 126:
    -            self._frame_length = payloadlen
    -        elif payloadlen == 126:
    -            data = yield self._read_bytes(2)
    -            payloadlen = struct.unpack("!H", data)[0]
    -        elif payloadlen == 127:
    -            data = yield self._read_bytes(8)
    -            payloadlen = struct.unpack("!Q", data)[0]
    -        new_len = payloadlen
    -        if self._fragmented_message_buffer is not None:
    -            new_len += len(self._fragmented_message_buffer)
    -        if new_len > self.handler.max_message_size:
    -            self.close(1009, "message too big")
    -            self._abort()
    -            return
    -
    -        # Read the payload, unmasking if necessary.
    -        if is_masked:
    -            self._frame_mask = yield self._read_bytes(4)
    -        data = yield self._read_bytes(payloadlen)
    -        if is_masked:
    -            data = _websocket_mask(self._frame_mask, data)
    -
    -        # Decide what to do with this frame.
    -        if opcode_is_control:
    -            # control frames may be interleaved with a series of fragmented
    -            # data frames, so control frames must not interact with
    -            # self._fragmented_*
    -            if not is_final_frame:
    -                # control frames must not be fragmented
    -                self._abort()
    -                return
    -        elif opcode == 0:  # continuation frame
    -            if self._fragmented_message_buffer is None:
    -                # nothing to continue
    -                self._abort()
    -                return
    -            self._fragmented_message_buffer += data
    -            if is_final_frame:
    -                opcode = self._fragmented_message_opcode
    -                data = self._fragmented_message_buffer
    -                self._fragmented_message_buffer = None
    -        else:  # start of new data message
    -            if self._fragmented_message_buffer is not None:
    -                # can't start new message until the old one is finished
    -                self._abort()
    -                return
    -            if not is_final_frame:
    -                self._fragmented_message_opcode = opcode
    -                self._fragmented_message_buffer = data
    -
    -        if is_final_frame:
    -            handled_future = self._handle_message(opcode, data)
    -            if handled_future is not None:
    -                yield handled_future
    -
    -    def _handle_message(self, opcode, data):
    -        """Execute on_message, returning its Future if it is a coroutine."""
    -        if self.client_terminated:
    -            return
    -
    -        if self._frame_compressed:
    -            try:
    -                data = self._decompressor.decompress(data)
    -            except _DecompressTooLargeError:
    -                self.close(1009, "message too big after decompression")
    -                self._abort()
    -                return
    -
    -        if opcode == 0x1:
    -            # UTF-8 data
    -            self._message_bytes_in += len(data)
    -            try:
    -                decoded = data.decode("utf-8")
    -            except UnicodeDecodeError:
    -                self._abort()
    -                return
    -            return self._run_callback(self.handler.on_message, decoded)
    -        elif opcode == 0x2:
    -            # Binary data
    -            self._message_bytes_in += len(data)
    -            return self._run_callback(self.handler.on_message, data)
    -        elif opcode == 0x8:
    -            # Close
    -            self.client_terminated = True
    -            if len(data) >= 2:
    -                self.handler.close_code = struct.unpack('>H', data[:2])[0]
    -            if len(data) > 2:
    -                self.handler.close_reason = to_unicode(data[2:])
    -            # Echo the received close code, if any (RFC 6455 section 5.5.1).
    -            self.close(self.handler.close_code)
    -        elif opcode == 0x9:
    -            # Ping
    -            try:
    -                self._write_frame(True, 0xA, data)
    -            except StreamClosedError:
    -                self._abort()
    -            self._run_callback(self.handler.on_ping, data)
    -        elif opcode == 0xA:
    -            # Pong
    -            self.last_pong = IOLoop.current().time()
    -            return self._run_callback(self.handler.on_pong, data)
    -        else:
    -            self._abort()
    -
    -    def close(self, code=None, reason=None):
    -        """Closes the WebSocket connection."""
    -        if not self.server_terminated:
    -            if not self.stream.closed():
    -                if code is None and reason is not None:
    -                    code = 1000  # "normal closure" status code
    -                if code is None:
    -                    close_data = b''
    -                else:
    -                    close_data = struct.pack('>H', code)
    -                if reason is not None:
    -                    close_data += utf8(reason)
    -                try:
    -                    self._write_frame(True, 0x8, close_data)
    -                except StreamClosedError:
    -                    self._abort()
    -            self.server_terminated = True
    -        if self.client_terminated:
    -            if self._waiting is not None:
    -                self.stream.io_loop.remove_timeout(self._waiting)
    -                self._waiting = None
    -            self.stream.close()
    -        elif self._waiting is None:
    -            # Give the client a few seconds to complete a clean shutdown,
    -            # otherwise just close the connection.
    -            self._waiting = self.stream.io_loop.add_timeout(
    -                self.stream.io_loop.time() + 5, self._abort)
    -
    -    @property
    -    def ping_interval(self):
    -        interval = self.handler.ping_interval
    -        if interval is not None:
    -            return interval
    -        return 0
    -
    -    @property
    -    def ping_timeout(self):
    -        timeout = self.handler.ping_timeout
    -        if timeout is not None:
    -            return timeout
    -        return max(3 * self.ping_interval, 30)
    -
    -    def start_pinging(self):
    -        """Start sending periodic pings to keep the connection alive"""
    -        if self.ping_interval > 0:
    -            self.last_ping = self.last_pong = IOLoop.current().time()
    -            self.ping_callback = PeriodicCallback(
    -                self.periodic_ping, self.ping_interval * 1000)
    -            self.ping_callback.start()
    -
    -    def periodic_ping(self):
    -        """Send a ping to keep the websocket alive
    -
    -        Called periodically if the websocket_ping_interval is set and non-zero.
    -        """
    -        if self.stream.closed() and self.ping_callback is not None:
    -            self.ping_callback.stop()
    -            return
    -
    -        # Check for timeout on pong. Make sure that we really have
    -        # sent a recent ping in case the machine with both server and
    -        # client has been suspended since the last ping.
    -        now = IOLoop.current().time()
    -        since_last_pong = now - self.last_pong
    -        since_last_ping = now - self.last_ping
    -        if (since_last_ping < 2 * self.ping_interval and
    -                since_last_pong > self.ping_timeout):
    -            self.close()
    -            return
    -
    -        self.write_ping(b'')
    -        self.last_ping = now
    -
    -
    -class WebSocketClientConnection(simple_httpclient._HTTPConnection):
    -    """WebSocket client connection.
    -
    -    This class should not be instantiated directly; use the
    -    `websocket_connect` function instead.
    -    """
    -    def __init__(self, request, on_message_callback=None,
    -                 compression_options=None, ping_interval=None, ping_timeout=None,
    -                 max_message_size=None, subprotocols=[]):
    -        self.compression_options = compression_options
    -        self.connect_future = Future()
    -        self.protocol = None
    -        self.read_queue = Queue(1)
    -        self.key = base64.b64encode(os.urandom(16))
    -        self._on_message_callback = on_message_callback
    -        self.close_code = self.close_reason = None
    -        self.ping_interval = ping_interval
    -        self.ping_timeout = ping_timeout
    -        self.max_message_size = max_message_size
    -
    -        scheme, sep, rest = request.url.partition(':')
    -        scheme = {'ws': 'http', 'wss': 'https'}[scheme]
    -        request.url = scheme + sep + rest
    -        request.headers.update({
    -            'Upgrade': 'websocket',
    -            'Connection': 'Upgrade',
    -            'Sec-WebSocket-Key': self.key,
    -            'Sec-WebSocket-Version': '13',
    -        })
    -        if subprotocols is not None:
    -            request.headers['Sec-WebSocket-Protocol'] = ','.join(subprotocols)
    -        if self.compression_options is not None:
    -            # Always offer to let the server set our max_wbits (and even though
    -            # we don't offer it, we will accept a client_no_context_takeover
    -            # from the server).
    -            # TODO: set server parameters for deflate extension
    -            # if requested in self.compression_options.
    -            request.headers['Sec-WebSocket-Extensions'] = (
    -                'permessage-deflate; client_max_window_bits')
    -
    -        self.tcp_client = TCPClient()
    -        super(WebSocketClientConnection, self).__init__(
    -            None, request, lambda: None, self._on_http_response,
    -            104857600, self.tcp_client, 65536, 104857600)
    -
    -    def close(self, code=None, reason=None):
    -        """Closes the websocket connection.
    -
    -        ``code`` and ``reason`` are documented under
    -        `WebSocketHandler.close`.
    -
    -        .. versionadded:: 3.2
    -
    -        .. versionchanged:: 4.0
    -
    -           Added the ``code`` and ``reason`` arguments.
    -        """
    -        if self.protocol is not None:
    -            self.protocol.close(code, reason)
    -            self.protocol = None
    -
    -    def on_connection_close(self):
    -        if not self.connect_future.done():
    -            self.connect_future.set_exception(StreamClosedError())
    -        self.on_message(None)
    -        self.tcp_client.close()
    -        super(WebSocketClientConnection, self).on_connection_close()
    -
    -    def _on_http_response(self, response):
    -        if not self.connect_future.done():
    -            if response.error:
    -                self.connect_future.set_exception(response.error)
    -            else:
    -                self.connect_future.set_exception(WebSocketError(
    -                    "Non-websocket response"))
    -
    -    def headers_received(self, start_line, headers):
    -        if start_line.code != 101:
    -            return super(WebSocketClientConnection, self).headers_received(
    -                start_line, headers)
    -
    -        self.headers = headers
    -        self.protocol = self.get_websocket_protocol()
    -        self.protocol._process_server_headers(self.key, self.headers)
    -        self.protocol.start_pinging()
    -        IOLoop.current().add_callback(self.protocol._receive_frame_loop)
    -
    -        if self._timeout is not None:
    -            self.io_loop.remove_timeout(self._timeout)
    -            self._timeout = None
    -
    -        self.stream = self.connection.detach()
    -        self.stream.set_close_callback(self.on_connection_close)
    -        # Once we've taken over the connection, clear the final callback
    -        # we set on the http request.  This deactivates the error handling
    -        # in simple_httpclient that would otherwise interfere with our
    -        # ability to see exceptions.
    -        self.final_callback = None
    -
    -        future_set_result_unless_cancelled(self.connect_future, self)
    -
    -    def write_message(self, message, binary=False):
    -        """Sends a message to the WebSocket server.
    -
    -        If the stream is closed, raises `WebSocketClosedError`.
    -        Returns a `.Future` which can be used for flow control.
    -
    -        .. versionchanged:: 5.0
    -           Exception raised on a closed stream changed from `.StreamClosedError`
    -           to `WebSocketClosedError`.
    -        """
    -        return self.protocol.write_message(message, binary=binary)
    -
    -    def read_message(self, callback=None):
    -        """Reads a message from the WebSocket server.
    -
    -        If on_message_callback was specified at WebSocket
    -        initialization, this function will never return messages
    -
    -        Returns a future whose result is the message, or None
    -        if the connection is closed.  If a callback argument
    -        is given it will be called with the future when it is
    -        ready.
    -        """
    -
    -        future = self.read_queue.get()
    -        if callback is not None:
    -            self.io_loop.add_future(future, callback)
    -        return future
    -
    -    def on_message(self, message):
    -        if self._on_message_callback:
    -            self._on_message_callback(message)
    -        else:
    -            return self.read_queue.put(message)
    -
    -    def ping(self, data=b''):
    -        """Send ping frame to the remote end.
    -
    -        The data argument allows a small amount of data (up to 125
    -        bytes) to be sent as a part of the ping message. Note that not
    -        all websocket implementations expose this data to
    -        applications.
    -
    -        Consider using the ``ping_interval`` argument to
    -        `websocket_connect` instead of sending pings manually.
    -
    -        .. versionadded:: 5.1
    -
    -        """
    -        data = utf8(data)
    -        if self.protocol is None:
    -            raise WebSocketClosedError()
    -        self.protocol.write_ping(data)
    -
    -    def on_pong(self, data):
    -        pass
    -
    -    def on_ping(self, data):
    -        pass
    -
    -    def get_websocket_protocol(self):
    -        return WebSocketProtocol13(self, mask_outgoing=True,
    -                                   compression_options=self.compression_options)
    -
    -    @property
    -    def selected_subprotocol(self):
    -        """The subprotocol selected by the server.
    -
    -        .. versionadded:: 5.1
    -        """
    -        return self.protocol.selected_subprotocol
    -
    -
    -def websocket_connect(url, callback=None, connect_timeout=None,
    -                      on_message_callback=None, compression_options=None,
    -                      ping_interval=None, ping_timeout=None,
    -                      max_message_size=_default_max_message_size, subprotocols=None):
    -    """Client-side websocket support.
    -
    -    Takes a url and returns a Future whose result is a
    -    `WebSocketClientConnection`.
    -
    -    ``compression_options`` is interpreted in the same way as the
    -    return value of `.WebSocketHandler.get_compression_options`.
    -
    -    The connection supports two styles of operation. In the coroutine
    -    style, the application typically calls
    -    `~.WebSocketClientConnection.read_message` in a loop::
    -
    -        conn = yield websocket_connect(url)
    -        while True:
    -            msg = yield conn.read_message()
    -            if msg is None: break
    -            # Do something with msg
    -
    -    In the callback style, pass an ``on_message_callback`` to
    -    ``websocket_connect``. In both styles, a message of ``None``
    -    indicates that the connection has been closed.
    -
    -    ``subprotocols`` may be a list of strings specifying proposed
    -    subprotocols. The selected protocol may be found on the
    -    ``selected_subprotocol`` attribute of the connection object
    -    when the connection is complete.
    -
    -    .. versionchanged:: 3.2
    -       Also accepts ``HTTPRequest`` objects in place of urls.
    -
    -    .. versionchanged:: 4.1
    -       Added ``compression_options`` and ``on_message_callback``.
    -
    -    .. versionchanged:: 4.5
    -       Added the ``ping_interval``, ``ping_timeout``, and ``max_message_size``
    -       arguments, which have the same meaning as in `WebSocketHandler`.
    -
    -    .. versionchanged:: 5.0
    -       The ``io_loop`` argument (deprecated since version 4.1) has been removed.
    -
    -    .. versionchanged:: 5.1
    -       Added the ``subprotocols`` argument.
    -    """
    -    if isinstance(url, httpclient.HTTPRequest):
    -        assert connect_timeout is None
    -        request = url
    -        # Copy and convert the headers dict/object (see comments in
    -        # AsyncHTTPClient.fetch)
    -        request.headers = httputil.HTTPHeaders(request.headers)
    -    else:
    -        request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout)
    -    request = httpclient._RequestProxy(
    -        request, httpclient.HTTPRequest._DEFAULTS)
    -    conn = WebSocketClientConnection(request,
    -                                     on_message_callback=on_message_callback,
    -                                     compression_options=compression_options,
    -                                     ping_interval=ping_interval,
    -                                     ping_timeout=ping_timeout,
    -                                     max_message_size=max_message_size,
    -                                     subprotocols=subprotocols)
    -    if callback is not None:
    -        IOLoop.current().add_future(conn.connect_future, callback)
    -    return conn.connect_future
    +"""Implementation of the WebSocket protocol.
    +
    +`WebSockets `_ allow for bidirectional
    +communication between the browser and server.
    +
    +WebSockets are supported in the current versions of all major browsers,
    +although older versions that do not support WebSockets are still in use
    +(refer to http://caniuse.com/websockets for details).
    +
    +This module implements the final version of the WebSocket protocol as
    +defined in `RFC 6455 `_.  Certain
    +browser versions (notably Safari 5.x) implemented an earlier draft of
    +the protocol (known as "draft 76") and are not compatible with this module.
    +
    +.. versionchanged:: 4.0
    +   Removed support for the draft 76 protocol version.
    +"""
    +
    +import abc
    +import asyncio
    +import base64
    +import hashlib
    +import os
    +import sys
    +import struct
    +import tornado.escape
    +import tornado.web
    +from urllib.parse import urlparse
    +import zlib
    +
    +from tornado.concurrent import Future, future_set_result_unless_cancelled
    +from tornado.escape import utf8, native_str, to_unicode
    +from tornado import gen, httpclient, httputil
    +from tornado.ioloop import IOLoop, PeriodicCallback
    +from tornado.iostream import StreamClosedError, IOStream
    +from tornado.log import gen_log, app_log
    +from tornado import simple_httpclient
    +from tornado.queues import Queue
    +from tornado.tcpclient import TCPClient
    +from tornado.util import _websocket_mask
    +
    +from typing import (
    +    TYPE_CHECKING,
    +    cast,
    +    Any,
    +    Optional,
    +    Dict,
    +    Union,
    +    List,
    +    Awaitable,
    +    Callable,
    +    Tuple,
    +    Type,
    +)
    +from types import TracebackType
    +
    +if TYPE_CHECKING:
    +    from typing_extensions import Protocol
    +
    +    # The zlib compressor types aren't actually exposed anywhere
    +    # publicly, so declare protocols for the portions we use.
    +    class _Compressor(Protocol):
    +        def compress(self, data: bytes) -> bytes:
    +            pass
    +
    +        def flush(self, mode: int) -> bytes:
    +            pass
    +
    +    class _Decompressor(Protocol):
    +        unconsumed_tail = b""  # type: bytes
    +
    +        def decompress(self, data: bytes, max_length: int) -> bytes:
    +            pass
    +
    +    class _WebSocketDelegate(Protocol):
    +        # The common base interface implemented by WebSocketHandler on
    +        # the server side and WebSocketClientConnection on the client
    +        # side.
    +        def on_ws_connection_close(
    +            self, close_code: int = None, close_reason: str = None
    +        ) -> None:
    +            pass
    +
    +        def on_message(self, message: Union[str, bytes]) -> Optional["Awaitable[None]"]:
    +            pass
    +
    +        def on_ping(self, data: bytes) -> None:
    +            pass
    +
    +        def on_pong(self, data: bytes) -> None:
    +            pass
    +
    +        def log_exception(
    +            self,
    +            typ: Optional[Type[BaseException]],
    +            value: Optional[BaseException],
    +            tb: Optional[TracebackType],
    +        ) -> None:
    +            pass
    +
    +
    +_default_max_message_size = 10 * 1024 * 1024
    +
    +
    +class WebSocketError(Exception):
    +    pass
    +
    +
    +class WebSocketClosedError(WebSocketError):
    +    """Raised by operations on a closed connection.
    +
    +    .. versionadded:: 3.2
    +    """
    +
    +    pass
    +
    +
    +class _DecompressTooLargeError(Exception):
    +    pass
    +
    +
    +class _WebSocketParams(object):
    +    def __init__(
    +        self,
    +        ping_interval: float = None,
    +        ping_timeout: float = None,
    +        max_message_size: int = _default_max_message_size,
    +        compression_options: Dict[str, Any] = None,
    +    ) -> None:
    +        self.ping_interval = ping_interval
    +        self.ping_timeout = ping_timeout
    +        self.max_message_size = max_message_size
    +        self.compression_options = compression_options
    +
    +
    +class WebSocketHandler(tornado.web.RequestHandler):
    +    """Subclass this class to create a basic WebSocket handler.
    +
    +    Override `on_message` to handle incoming messages, and use
    +    `write_message` to send messages to the client. You can also
    +    override `open` and `on_close` to handle opened and closed
    +    connections.
    +
    +    Custom upgrade response headers can be sent by overriding
    +    `~tornado.web.RequestHandler.set_default_headers` or
    +    `~tornado.web.RequestHandler.prepare`.
    +
    +    See http://dev.w3.org/html5/websockets/ for details on the
    +    JavaScript interface.  The protocol is specified at
    +    http://tools.ietf.org/html/rfc6455.
    +
    +    Here is an example WebSocket handler that echos back all received messages
    +    back to the client:
    +
    +    .. testcode::
    +
    +      class EchoWebSocket(tornado.websocket.WebSocketHandler):
    +          def open(self):
    +              print("WebSocket opened")
    +
    +          def on_message(self, message):
    +              self.write_message(u"You said: " + message)
    +
    +          def on_close(self):
    +              print("WebSocket closed")
    +
    +    .. testoutput::
    +       :hide:
    +
    +    WebSockets are not standard HTTP connections. The "handshake" is
    +    HTTP, but after the handshake, the protocol is
    +    message-based. Consequently, most of the Tornado HTTP facilities
    +    are not available in handlers of this type. The only communication
    +    methods available to you are `write_message()`, `ping()`, and
    +    `close()`. Likewise, your request handler class should implement
    +    `open()` method rather than ``get()`` or ``post()``.
    +
    +    If you map the handler above to ``/websocket`` in your application, you can
    +    invoke it in JavaScript with::
    +
    +      var ws = new WebSocket("ws://localhost:8888/websocket");
    +      ws.onopen = function() {
    +         ws.send("Hello, world");
    +      };
    +      ws.onmessage = function (evt) {
    +         alert(evt.data);
    +      };
    +
    +    This script pops up an alert box that says "You said: Hello, world".
    +
    +    Web browsers allow any site to open a websocket connection to any other,
    +    instead of using the same-origin policy that governs other network
    +    access from javascript.  This can be surprising and is a potential
    +    security hole, so since Tornado 4.0 `WebSocketHandler` requires
    +    applications that wish to receive cross-origin websockets to opt in
    +    by overriding the `~WebSocketHandler.check_origin` method (see that
    +    method's docs for details).  Failure to do so is the most likely
    +    cause of 403 errors when making a websocket connection.
    +
    +    When using a secure websocket connection (``wss://``) with a self-signed
    +    certificate, the connection from a browser may fail because it wants
    +    to show the "accept this certificate" dialog but has nowhere to show it.
    +    You must first visit a regular HTML page using the same certificate
    +    to accept it before the websocket connection will succeed.
    +
    +    If the application setting ``websocket_ping_interval`` has a non-zero
    +    value, a ping will be sent periodically, and the connection will be
    +    closed if a response is not received before the ``websocket_ping_timeout``.
    +
    +    Messages larger than the ``websocket_max_message_size`` application setting
    +    (default 10MiB) will not be accepted.
    +
    +    .. versionchanged:: 4.5
    +       Added ``websocket_ping_interval``, ``websocket_ping_timeout``, and
    +       ``websocket_max_message_size``.
    +    """
    +
    +    def __init__(
    +        self,
    +        application: tornado.web.Application,
    +        request: httputil.HTTPServerRequest,
    +        **kwargs: Any
    +    ) -> None:
    +        super(WebSocketHandler, self).__init__(application, request, **kwargs)
    +        self.ws_connection = None  # type: Optional[WebSocketProtocol]
    +        self.close_code = None  # type: Optional[int]
    +        self.close_reason = None  # type: Optional[str]
    +        self.stream = None  # type: Optional[IOStream]
    +        self._on_close_called = False
    +
    +    async def get(self, *args: Any, **kwargs: Any) -> None:
    +        self.open_args = args
    +        self.open_kwargs = kwargs
    +
    +        # Upgrade header should be present and should be equal to WebSocket
    +        if self.request.headers.get("Upgrade", "").lower() != "websocket":
    +            self.set_status(400)
    +            log_msg = 'Can "Upgrade" only to "WebSocket".'
    +            self.finish(log_msg)
    +            gen_log.debug(log_msg)
    +            return
    +
    +        # Connection header should be upgrade.
    +        # Some proxy servers/load balancers
    +        # might mess with it.
    +        headers = self.request.headers
    +        connection = map(
    +            lambda s: s.strip().lower(), headers.get("Connection", "").split(",")
    +        )
    +        if "upgrade" not in connection:
    +            self.set_status(400)
    +            log_msg = '"Connection" must be "Upgrade".'
    +            self.finish(log_msg)
    +            gen_log.debug(log_msg)
    +            return
    +
    +        # Handle WebSocket Origin naming convention differences
    +        # The difference between version 8 and 13 is that in 8 the
    +        # client sends a "Sec-Websocket-Origin" header and in 13 it's
    +        # simply "Origin".
    +        if "Origin" in self.request.headers:
    +            origin = self.request.headers.get("Origin")
    +        else:
    +            origin = self.request.headers.get("Sec-Websocket-Origin", None)
    +
    +        # If there was an origin header, check to make sure it matches
    +        # according to check_origin. When the origin is None, we assume it
    +        # did not come from a browser and that it can be passed on.
    +        if origin is not None and not self.check_origin(origin):
    +            self.set_status(403)
    +            log_msg = "Cross origin websockets not allowed"
    +            self.finish(log_msg)
    +            gen_log.debug(log_msg)
    +            return
    +
    +        self.ws_connection = self.get_websocket_protocol()
    +        if self.ws_connection:
    +            await self.ws_connection.accept_connection(self)
    +        else:
    +            self.set_status(426, "Upgrade Required")
    +            self.set_header("Sec-WebSocket-Version", "7, 8, 13")
    +
    +    stream = None
    +
    +    @property
    +    def ping_interval(self) -> Optional[float]:
    +        """The interval for websocket keep-alive pings.
    +
    +        Set websocket_ping_interval = 0 to disable pings.
    +        """
    +        return self.settings.get("websocket_ping_interval", None)
    +
    +    @property
    +    def ping_timeout(self) -> Optional[float]:
    +        """If no ping is received in this many seconds,
    +        close the websocket connection (VPNs, etc. can fail to cleanly close ws connections).
    +        Default is max of 3 pings or 30 seconds.
    +        """
    +        return self.settings.get("websocket_ping_timeout", None)
    +
    +    @property
    +    def max_message_size(self) -> int:
    +        """Maximum allowed message size.
    +
    +        If the remote peer sends a message larger than this, the connection
    +        will be closed.
    +
    +        Default is 10MiB.
    +        """
    +        return self.settings.get(
    +            "websocket_max_message_size", _default_max_message_size
    +        )
    +
    +    def write_message(
    +        self, message: Union[bytes, str, Dict[str, Any]], binary: bool = False
    +    ) -> "Future[None]":
    +        """Sends the given message to the client of this Web Socket.
    +
    +        The message may be either a string or a dict (which will be
    +        encoded as json).  If the ``binary`` argument is false, the
    +        message will be sent as utf8; in binary mode any byte string
    +        is allowed.
    +
    +        If the connection is already closed, raises `WebSocketClosedError`.
    +        Returns a `.Future` which can be used for flow control.
    +
    +        .. versionchanged:: 3.2
    +           `WebSocketClosedError` was added (previously a closed connection
    +           would raise an `AttributeError`)
    +
    +        .. versionchanged:: 4.3
    +           Returns a `.Future` which can be used for flow control.
    +
    +        .. versionchanged:: 5.0
    +           Consistently raises `WebSocketClosedError`. Previously could
    +           sometimes raise `.StreamClosedError`.
    +        """
    +        if self.ws_connection is None or self.ws_connection.is_closing():
    +            raise WebSocketClosedError()
    +        if isinstance(message, dict):
    +            message = tornado.escape.json_encode(message)
    +        return self.ws_connection.write_message(message, binary=binary)
    +
    +    def select_subprotocol(self, subprotocols: List[str]) -> Optional[str]:
    +        """Override to implement subprotocol negotiation.
    +
    +        ``subprotocols`` is a list of strings identifying the
    +        subprotocols proposed by the client.  This method may be
    +        overridden to return one of those strings to select it, or
    +        ``None`` to not select a subprotocol.
    +
    +        Failure to select a subprotocol does not automatically abort
    +        the connection, although clients may close the connection if
    +        none of their proposed subprotocols was selected.
    +
    +        The list may be empty, in which case this method must return
    +        None. This method is always called exactly once even if no
    +        subprotocols were proposed so that the handler can be advised
    +        of this fact.
    +
    +        .. versionchanged:: 5.1
    +
    +           Previously, this method was called with a list containing
    +           an empty string instead of an empty list if no subprotocols
    +           were proposed by the client.
    +        """
    +        return None
    +
    +    @property
    +    def selected_subprotocol(self) -> Optional[str]:
    +        """The subprotocol returned by `select_subprotocol`.
    +
    +        .. versionadded:: 5.1
    +        """
    +        assert self.ws_connection is not None
    +        return self.ws_connection.selected_subprotocol
    +
    +    def get_compression_options(self) -> Optional[Dict[str, Any]]:
    +        """Override to return compression options for the connection.
    +
    +        If this method returns None (the default), compression will
    +        be disabled.  If it returns a dict (even an empty one), it
    +        will be enabled.  The contents of the dict may be used to
    +        control the following compression options:
    +
    +        ``compression_level`` specifies the compression level.
    +
    +        ``mem_level`` specifies the amount of memory used for the internal compression state.
    +
    +         These parameters are documented in details here:
    +         https://docs.python.org/3.6/library/zlib.html#zlib.compressobj
    +
    +        .. versionadded:: 4.1
    +
    +        .. versionchanged:: 4.5
    +
    +           Added ``compression_level`` and ``mem_level``.
    +        """
    +        # TODO: Add wbits option.
    +        return None
    +
    +    def open(self, *args: str, **kwargs: str) -> Optional[Awaitable[None]]:
    +        """Invoked when a new WebSocket is opened.
    +
    +        The arguments to `open` are extracted from the `tornado.web.URLSpec`
    +        regular expression, just like the arguments to
    +        `tornado.web.RequestHandler.get`.
    +
    +        `open` may be a coroutine. `on_message` will not be called until
    +        `open` has returned.
    +
    +        .. versionchanged:: 5.1
    +
    +           ``open`` may be a coroutine.
    +        """
    +        pass
    +
    +    def on_message(self, message: Union[str, bytes]) -> Optional[Awaitable[None]]:
    +        """Handle incoming messages on the WebSocket
    +
    +        This method must be overridden.
    +
    +        .. versionchanged:: 4.5
    +
    +           ``on_message`` can be a coroutine.
    +        """
    +        raise NotImplementedError
    +
    +    def ping(self, data: Union[str, bytes] = b"") -> None:
    +        """Send ping frame to the remote end.
    +
    +        The data argument allows a small amount of data (up to 125
    +        bytes) to be sent as a part of the ping message. Note that not
    +        all websocket implementations expose this data to
    +        applications.
    +
    +        Consider using the ``websocket_ping_interval`` application
    +        setting instead of sending pings manually.
    +
    +        .. versionchanged:: 5.1
    +
    +           The data argument is now optional.
    +
    +        """
    +        data = utf8(data)
    +        if self.ws_connection is None or self.ws_connection.is_closing():
    +            raise WebSocketClosedError()
    +        self.ws_connection.write_ping(data)
    +
    +    def on_pong(self, data: bytes) -> None:
    +        """Invoked when the response to a ping frame is received."""
    +        pass
    +
    +    def on_ping(self, data: bytes) -> None:
    +        """Invoked when the a ping frame is received."""
    +        pass
    +
    +    def on_close(self) -> None:
    +        """Invoked when the WebSocket is closed.
    +
    +        If the connection was closed cleanly and a status code or reason
    +        phrase was supplied, these values will be available as the attributes
    +        ``self.close_code`` and ``self.close_reason``.
    +
    +        .. versionchanged:: 4.0
    +
    +           Added ``close_code`` and ``close_reason`` attributes.
    +        """
    +        pass
    +
    +    def close(self, code: int = None, reason: str = None) -> None:
    +        """Closes this Web Socket.
    +
    +        Once the close handshake is successful the socket will be closed.
    +
    +        ``code`` may be a numeric status code, taken from the values
    +        defined in `RFC 6455 section 7.4.1
    +        `_.
    +        ``reason`` may be a textual message about why the connection is
    +        closing.  These values are made available to the client, but are
    +        not otherwise interpreted by the websocket protocol.
    +
    +        .. versionchanged:: 4.0
    +
    +           Added the ``code`` and ``reason`` arguments.
    +        """
    +        if self.ws_connection:
    +            self.ws_connection.close(code, reason)
    +            self.ws_connection = None
    +
    +    def check_origin(self, origin: str) -> bool:
    +        """Override to enable support for allowing alternate origins.
    +
    +        The ``origin`` argument is the value of the ``Origin`` HTTP
    +        header, the url responsible for initiating this request.  This
    +        method is not called for clients that do not send this header;
    +        such requests are always allowed (because all browsers that
    +        implement WebSockets support this header, and non-browser
    +        clients do not have the same cross-site security concerns).
    +
    +        Should return ``True`` to accept the request or ``False`` to
    +        reject it. By default, rejects all requests with an origin on
    +        a host other than this one.
    +
    +        This is a security protection against cross site scripting attacks on
    +        browsers, since WebSockets are allowed to bypass the usual same-origin
    +        policies and don't use CORS headers.
    +
    +        .. warning::
    +
    +           This is an important security measure; don't disable it
    +           without understanding the security implications. In
    +           particular, if your authentication is cookie-based, you
    +           must either restrict the origins allowed by
    +           ``check_origin()`` or implement your own XSRF-like
    +           protection for websocket connections. See `these
    +           `_
    +           `articles
    +           `_
    +           for more.
    +
    +        To accept all cross-origin traffic (which was the default prior to
    +        Tornado 4.0), simply override this method to always return ``True``::
    +
    +            def check_origin(self, origin):
    +                return True
    +
    +        To allow connections from any subdomain of your site, you might
    +        do something like::
    +
    +            def check_origin(self, origin):
    +                parsed_origin = urllib.parse.urlparse(origin)
    +                return parsed_origin.netloc.endswith(".mydomain.com")
    +
    +        .. versionadded:: 4.0
    +
    +        """
    +        parsed_origin = urlparse(origin)
    +        origin = parsed_origin.netloc
    +        origin = origin.lower()
    +
    +        host = self.request.headers.get("Host")
    +
    +        # Check to see that origin matches host directly, including ports
    +        return origin == host
    +
    +    def set_nodelay(self, value: bool) -> None:
    +        """Set the no-delay flag for this stream.
    +
    +        By default, small messages may be delayed and/or combined to minimize
    +        the number of packets sent.  This can sometimes cause 200-500ms delays
    +        due to the interaction between Nagle's algorithm and TCP delayed
    +        ACKs.  To reduce this delay (at the expense of possibly increasing
    +        bandwidth usage), call ``self.set_nodelay(True)`` once the websocket
    +        connection is established.
    +
    +        See `.BaseIOStream.set_nodelay` for additional details.
    +
    +        .. versionadded:: 3.1
    +        """
    +        assert self.ws_connection is not None
    +        self.ws_connection.set_nodelay(value)
    +
    +    def on_connection_close(self) -> None:
    +        if self.ws_connection:
    +            self.ws_connection.on_connection_close()
    +            self.ws_connection = None
    +        if not self._on_close_called:
    +            self._on_close_called = True
    +            self.on_close()
    +            self._break_cycles()
    +
    +    def on_ws_connection_close(
    +        self, close_code: int = None, close_reason: str = None
    +    ) -> None:
    +        self.close_code = close_code
    +        self.close_reason = close_reason
    +        self.on_connection_close()
    +
    +    def _break_cycles(self) -> None:
    +        # WebSocketHandlers call finish() early, but we don't want to
    +        # break up reference cycles (which makes it impossible to call
    +        # self.render_string) until after we've really closed the
    +        # connection (if it was established in the first place,
    +        # indicated by status code 101).
    +        if self.get_status() != 101 or self._on_close_called:
    +            super(WebSocketHandler, self)._break_cycles()
    +
    +    def send_error(self, *args: Any, **kwargs: Any) -> None:
    +        if self.stream is None:
    +            super(WebSocketHandler, self).send_error(*args, **kwargs)
    +        else:
    +            # If we get an uncaught exception during the handshake,
    +            # we have no choice but to abruptly close the connection.
    +            # TODO: for uncaught exceptions after the handshake,
    +            # we can close the connection more gracefully.
    +            self.stream.close()
    +
    +    def get_websocket_protocol(self) -> Optional["WebSocketProtocol"]:
    +        websocket_version = self.request.headers.get("Sec-WebSocket-Version")
    +        if websocket_version in ("7", "8", "13"):
    +            params = _WebSocketParams(
    +                ping_interval=self.ping_interval,
    +                ping_timeout=self.ping_timeout,
    +                max_message_size=self.max_message_size,
    +                compression_options=self.get_compression_options(),
    +            )
    +            return WebSocketProtocol13(self, False, params)
    +        return None
    +
    +    def _detach_stream(self) -> IOStream:
    +        # disable non-WS methods
    +        for method in [
    +            "write",
    +            "redirect",
    +            "set_header",
    +            "set_cookie",
    +            "set_status",
    +            "flush",
    +            "finish",
    +        ]:
    +            setattr(self, method, _raise_not_supported_for_websockets)
    +        return self.detach()
    +
    +
    +def _raise_not_supported_for_websockets(*args: Any, **kwargs: Any) -> None:
    +    raise RuntimeError("Method not supported for Web Sockets")
    +
    +
    +class WebSocketProtocol(abc.ABC):
    +    """Base class for WebSocket protocol versions.
    +    """
    +
    +    def __init__(self, handler: "_WebSocketDelegate") -> None:
    +        self.handler = handler
    +        self.stream = None  # type: Optional[IOStream]
    +        self.client_terminated = False
    +        self.server_terminated = False
    +
    +    def _run_callback(
    +        self, callback: Callable, *args: Any, **kwargs: Any
    +    ) -> "Optional[Future[Any]]":
    +        """Runs the given callback with exception handling.
    +
    +        If the callback is a coroutine, returns its Future. On error, aborts the
    +        websocket connection and returns None.
    +        """
    +        try:
    +            result = callback(*args, **kwargs)
    +        except Exception:
    +            self.handler.log_exception(*sys.exc_info())
    +            self._abort()
    +            return None
    +        else:
    +            if result is not None:
    +                result = gen.convert_yielded(result)
    +                assert self.stream is not None
    +                self.stream.io_loop.add_future(result, lambda f: f.result())
    +            return result
    +
    +    def on_connection_close(self) -> None:
    +        self._abort()
    +
    +    def _abort(self) -> None:
    +        """Instantly aborts the WebSocket connection by closing the socket"""
    +        self.client_terminated = True
    +        self.server_terminated = True
    +        if self.stream is not None:
    +            self.stream.close()  # forcibly tear down the connection
    +        self.close()  # let the subclass cleanup
    +
    +    @abc.abstractmethod
    +    def close(self, code: int = None, reason: str = None) -> None:
    +        raise NotImplementedError()
    +
    +    @abc.abstractmethod
    +    def is_closing(self) -> bool:
    +        raise NotImplementedError()
    +
    +    @abc.abstractmethod
    +    async def accept_connection(self, handler: WebSocketHandler) -> None:
    +        raise NotImplementedError()
    +
    +    @abc.abstractmethod
    +    def write_message(
    +        self, message: Union[str, bytes], binary: bool = False
    +    ) -> "Future[None]":
    +        raise NotImplementedError()
    +
    +    @property
    +    @abc.abstractmethod
    +    def selected_subprotocol(self) -> Optional[str]:
    +        raise NotImplementedError()
    +
    +    @abc.abstractmethod
    +    def write_ping(self, data: bytes) -> None:
    +        raise NotImplementedError()
    +
    +    # The entry points below are used by WebSocketClientConnection,
    +    # which was introduced after we only supported a single version of
    +    # WebSocketProtocol. The WebSocketProtocol/WebSocketProtocol13
    +    # boundary is currently pretty ad-hoc.
    +    @abc.abstractmethod
    +    def _process_server_headers(
    +        self, key: Union[str, bytes], headers: httputil.HTTPHeaders
    +    ) -> None:
    +        raise NotImplementedError()
    +
    +    @abc.abstractmethod
    +    def start_pinging(self) -> None:
    +        raise NotImplementedError()
    +
    +    @abc.abstractmethod
    +    async def _receive_frame_loop(self) -> None:
    +        raise NotImplementedError()
    +
    +    @abc.abstractmethod
    +    def set_nodelay(self, x: bool) -> None:
    +        raise NotImplementedError()
    +
    +
    +class _PerMessageDeflateCompressor(object):
    +    def __init__(
    +        self,
    +        persistent: bool,
    +        max_wbits: Optional[int],
    +        compression_options: Dict[str, Any] = None,
    +    ) -> None:
    +        if max_wbits is None:
    +            max_wbits = zlib.MAX_WBITS
    +        # There is no symbolic constant for the minimum wbits value.
    +        if not (8 <= max_wbits <= zlib.MAX_WBITS):
    +            raise ValueError(
    +                "Invalid max_wbits value %r; allowed range 8-%d",
    +                max_wbits,
    +                zlib.MAX_WBITS,
    +            )
    +        self._max_wbits = max_wbits
    +
    +        if (
    +            compression_options is None
    +            or "compression_level" not in compression_options
    +        ):
    +            self._compression_level = tornado.web.GZipContentEncoding.GZIP_LEVEL
    +        else:
    +            self._compression_level = compression_options["compression_level"]
    +
    +        if compression_options is None or "mem_level" not in compression_options:
    +            self._mem_level = 8
    +        else:
    +            self._mem_level = compression_options["mem_level"]
    +
    +        if persistent:
    +            self._compressor = self._create_compressor()  # type: Optional[_Compressor]
    +        else:
    +            self._compressor = None
    +
    +    def _create_compressor(self) -> "_Compressor":
    +        return zlib.compressobj(
    +            self._compression_level, zlib.DEFLATED, -self._max_wbits, self._mem_level
    +        )
    +
    +    def compress(self, data: bytes) -> bytes:
    +        compressor = self._compressor or self._create_compressor()
    +        data = compressor.compress(data) + compressor.flush(zlib.Z_SYNC_FLUSH)
    +        assert data.endswith(b"\x00\x00\xff\xff")
    +        return data[:-4]
    +
    +
    +class _PerMessageDeflateDecompressor(object):
    +    def __init__(
    +        self,
    +        persistent: bool,
    +        max_wbits: Optional[int],
    +        max_message_size: int,
    +        compression_options: Dict[str, Any] = None,
    +    ) -> None:
    +        self._max_message_size = max_message_size
    +        if max_wbits is None:
    +            max_wbits = zlib.MAX_WBITS
    +        if not (8 <= max_wbits <= zlib.MAX_WBITS):
    +            raise ValueError(
    +                "Invalid max_wbits value %r; allowed range 8-%d",
    +                max_wbits,
    +                zlib.MAX_WBITS,
    +            )
    +        self._max_wbits = max_wbits
    +        if persistent:
    +            self._decompressor = (
    +                self._create_decompressor()
    +            )  # type: Optional[_Decompressor]
    +        else:
    +            self._decompressor = None
    +
    +    def _create_decompressor(self) -> "_Decompressor":
    +        return zlib.decompressobj(-self._max_wbits)
    +
    +    def decompress(self, data: bytes) -> bytes:
    +        decompressor = self._decompressor or self._create_decompressor()
    +        result = decompressor.decompress(
    +            data + b"\x00\x00\xff\xff", self._max_message_size
    +        )
    +        if decompressor.unconsumed_tail:
    +            raise _DecompressTooLargeError()
    +        return result
    +
    +
    +class WebSocketProtocol13(WebSocketProtocol):
    +    """Implementation of the WebSocket protocol from RFC 6455.
    +
    +    This class supports versions 7 and 8 of the protocol in addition to the
    +    final version 13.
    +    """
    +
    +    # Bit masks for the first byte of a frame.
    +    FIN = 0x80
    +    RSV1 = 0x40
    +    RSV2 = 0x20
    +    RSV3 = 0x10
    +    RSV_MASK = RSV1 | RSV2 | RSV3
    +    OPCODE_MASK = 0x0F
    +
    +    stream = None  # type: IOStream
    +
    +    def __init__(
    +        self,
    +        handler: "_WebSocketDelegate",
    +        mask_outgoing: bool,
    +        params: _WebSocketParams,
    +    ) -> None:
    +        WebSocketProtocol.__init__(self, handler)
    +        self.mask_outgoing = mask_outgoing
    +        self.params = params
    +        self._final_frame = False
    +        self._frame_opcode = None
    +        self._masked_frame = None
    +        self._frame_mask = None  # type: Optional[bytes]
    +        self._frame_length = None
    +        self._fragmented_message_buffer = None  # type: Optional[bytes]
    +        self._fragmented_message_opcode = None
    +        self._waiting = None  # type: object
    +        self._compression_options = params.compression_options
    +        self._decompressor = None  # type: Optional[_PerMessageDeflateDecompressor]
    +        self._compressor = None  # type: Optional[_PerMessageDeflateCompressor]
    +        self._frame_compressed = None  # type: Optional[bool]
    +        # The total uncompressed size of all messages received or sent.
    +        # Unicode messages are encoded to utf8.
    +        # Only for testing; subject to change.
    +        self._message_bytes_in = 0
    +        self._message_bytes_out = 0
    +        # The total size of all packets received or sent.  Includes
    +        # the effect of compression, frame overhead, and control frames.
    +        self._wire_bytes_in = 0
    +        self._wire_bytes_out = 0
    +        self.ping_callback = None  # type: Optional[PeriodicCallback]
    +        self.last_ping = 0.0
    +        self.last_pong = 0.0
    +        self.close_code = None  # type: Optional[int]
    +        self.close_reason = None  # type: Optional[str]
    +
    +    # Use a property for this to satisfy the abc.
    +    @property
    +    def selected_subprotocol(self) -> Optional[str]:
    +        return self._selected_subprotocol
    +
    +    @selected_subprotocol.setter
    +    def selected_subprotocol(self, value: Optional[str]) -> None:
    +        self._selected_subprotocol = value
    +
    +    async def accept_connection(self, handler: WebSocketHandler) -> None:
    +        try:
    +            self._handle_websocket_headers(handler)
    +        except ValueError:
    +            handler.set_status(400)
    +            log_msg = "Missing/Invalid WebSocket headers"
    +            handler.finish(log_msg)
    +            gen_log.debug(log_msg)
    +            return
    +
    +        try:
    +            await self._accept_connection(handler)
    +        except asyncio.CancelledError:
    +            self._abort()
    +            return
    +        except ValueError:
    +            gen_log.debug("Malformed WebSocket request received", exc_info=True)
    +            self._abort()
    +            return
    +
    +    def _handle_websocket_headers(self, handler: WebSocketHandler) -> None:
    +        """Verifies all invariant- and required headers
    +
    +        If a header is missing or have an incorrect value ValueError will be
    +        raised
    +        """
    +        fields = ("Host", "Sec-Websocket-Key", "Sec-Websocket-Version")
    +        if not all(map(lambda f: handler.request.headers.get(f), fields)):
    +            raise ValueError("Missing/Invalid WebSocket headers")
    +
    +    @staticmethod
    +    def compute_accept_value(key: Union[str, bytes]) -> str:
    +        """Computes the value for the Sec-WebSocket-Accept header,
    +        given the value for Sec-WebSocket-Key.
    +        """
    +        sha1 = hashlib.sha1()
    +        sha1.update(utf8(key))
    +        sha1.update(b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11")  # Magic value
    +        return native_str(base64.b64encode(sha1.digest()))
    +
    +    def _challenge_response(self, handler: WebSocketHandler) -> str:
    +        return WebSocketProtocol13.compute_accept_value(
    +            cast(str, handler.request.headers.get("Sec-Websocket-Key"))
    +        )
    +
    +    async def _accept_connection(self, handler: WebSocketHandler) -> None:
    +        subprotocol_header = handler.request.headers.get("Sec-WebSocket-Protocol")
    +        if subprotocol_header:
    +            subprotocols = [s.strip() for s in subprotocol_header.split(",")]
    +        else:
    +            subprotocols = []
    +        self.selected_subprotocol = handler.select_subprotocol(subprotocols)
    +        if self.selected_subprotocol:
    +            assert self.selected_subprotocol in subprotocols
    +            handler.set_header("Sec-WebSocket-Protocol", self.selected_subprotocol)
    +
    +        extensions = self._parse_extensions_header(handler.request.headers)
    +        for ext in extensions:
    +            if ext[0] == "permessage-deflate" and self._compression_options is not None:
    +                # TODO: negotiate parameters if compression_options
    +                # specifies limits.
    +                self._create_compressors("server", ext[1], self._compression_options)
    +                if (
    +                    "client_max_window_bits" in ext[1]
    +                    and ext[1]["client_max_window_bits"] is None
    +                ):
    +                    # Don't echo an offered client_max_window_bits
    +                    # parameter with no value.
    +                    del ext[1]["client_max_window_bits"]
    +                handler.set_header(
    +                    "Sec-WebSocket-Extensions",
    +                    httputil._encode_header("permessage-deflate", ext[1]),
    +                )
    +                break
    +
    +        handler.clear_header("Content-Type")
    +        handler.set_status(101)
    +        handler.set_header("Upgrade", "websocket")
    +        handler.set_header("Connection", "Upgrade")
    +        handler.set_header("Sec-WebSocket-Accept", self._challenge_response(handler))
    +        handler.finish()
    +
    +        self.stream = handler._detach_stream()
    +
    +        self.start_pinging()
    +        try:
    +            open_result = handler.open(*handler.open_args, **handler.open_kwargs)
    +            if open_result is not None:
    +                await open_result
    +        except Exception:
    +            handler.log_exception(*sys.exc_info())
    +            self._abort()
    +            return
    +
    +        await self._receive_frame_loop()
    +
    +    def _parse_extensions_header(
    +        self, headers: httputil.HTTPHeaders
    +    ) -> List[Tuple[str, Dict[str, str]]]:
    +        extensions = headers.get("Sec-WebSocket-Extensions", "")
    +        if extensions:
    +            return [httputil._parse_header(e.strip()) for e in extensions.split(",")]
    +        return []
    +
    +    def _process_server_headers(
    +        self, key: Union[str, bytes], headers: httputil.HTTPHeaders
    +    ) -> None:
    +        """Process the headers sent by the server to this client connection.
    +
    +        'key' is the websocket handshake challenge/response key.
    +        """
    +        assert headers["Upgrade"].lower() == "websocket"
    +        assert headers["Connection"].lower() == "upgrade"
    +        accept = self.compute_accept_value(key)
    +        assert headers["Sec-Websocket-Accept"] == accept
    +
    +        extensions = self._parse_extensions_header(headers)
    +        for ext in extensions:
    +            if ext[0] == "permessage-deflate" and self._compression_options is not None:
    +                self._create_compressors("client", ext[1])
    +            else:
    +                raise ValueError("unsupported extension %r", ext)
    +
    +        self.selected_subprotocol = headers.get("Sec-WebSocket-Protocol", None)
    +
    +    def _get_compressor_options(
    +        self,
    +        side: str,
    +        agreed_parameters: Dict[str, Any],
    +        compression_options: Dict[str, Any] = None,
    +    ) -> Dict[str, Any]:
    +        """Converts a websocket agreed_parameters set to keyword arguments
    +        for our compressor objects.
    +        """
    +        options = dict(
    +            persistent=(side + "_no_context_takeover") not in agreed_parameters
    +        )  # type: Dict[str, Any]
    +        wbits_header = agreed_parameters.get(side + "_max_window_bits", None)
    +        if wbits_header is None:
    +            options["max_wbits"] = zlib.MAX_WBITS
    +        else:
    +            options["max_wbits"] = int(wbits_header)
    +        options["compression_options"] = compression_options
    +        return options
    +
    +    def _create_compressors(
    +        self,
    +        side: str,
    +        agreed_parameters: Dict[str, Any],
    +        compression_options: Dict[str, Any] = None,
    +    ) -> None:
    +        # TODO: handle invalid parameters gracefully
    +        allowed_keys = set(
    +            [
    +                "server_no_context_takeover",
    +                "client_no_context_takeover",
    +                "server_max_window_bits",
    +                "client_max_window_bits",
    +            ]
    +        )
    +        for key in agreed_parameters:
    +            if key not in allowed_keys:
    +                raise ValueError("unsupported compression parameter %r" % key)
    +        other_side = "client" if (side == "server") else "server"
    +        self._compressor = _PerMessageDeflateCompressor(
    +            **self._get_compressor_options(side, agreed_parameters, compression_options)
    +        )
    +        self._decompressor = _PerMessageDeflateDecompressor(
    +            max_message_size=self.params.max_message_size,
    +            **self._get_compressor_options(
    +                other_side, agreed_parameters, compression_options
    +            )
    +        )
    +
    +    def _write_frame(
    +        self, fin: bool, opcode: int, data: bytes, flags: int = 0
    +    ) -> "Future[None]":
    +        data_len = len(data)
    +        if opcode & 0x8:
    +            # All control frames MUST have a payload length of 125
    +            # bytes or less and MUST NOT be fragmented.
    +            if not fin:
    +                raise ValueError("control frames may not be fragmented")
    +            if data_len > 125:
    +                raise ValueError("control frame payloads may not exceed 125 bytes")
    +        if fin:
    +            finbit = self.FIN
    +        else:
    +            finbit = 0
    +        frame = struct.pack("B", finbit | opcode | flags)
    +        if self.mask_outgoing:
    +            mask_bit = 0x80
    +        else:
    +            mask_bit = 0
    +        if data_len < 126:
    +            frame += struct.pack("B", data_len | mask_bit)
    +        elif data_len <= 0xFFFF:
    +            frame += struct.pack("!BH", 126 | mask_bit, data_len)
    +        else:
    +            frame += struct.pack("!BQ", 127 | mask_bit, data_len)
    +        if self.mask_outgoing:
    +            mask = os.urandom(4)
    +            data = mask + _websocket_mask(mask, data)
    +        frame += data
    +        self._wire_bytes_out += len(frame)
    +        return self.stream.write(frame)
    +
    +    def write_message(
    +        self, message: Union[str, bytes], binary: bool = False
    +    ) -> "Future[None]":
    +        """Sends the given message to the client of this Web Socket."""
    +        if binary:
    +            opcode = 0x2
    +        else:
    +            opcode = 0x1
    +        message = tornado.escape.utf8(message)
    +        assert isinstance(message, bytes)
    +        self._message_bytes_out += len(message)
    +        flags = 0
    +        if self._compressor:
    +            message = self._compressor.compress(message)
    +            flags |= self.RSV1
    +        # For historical reasons, write methods in Tornado operate in a semi-synchronous
    +        # mode in which awaiting the Future they return is optional (But errors can
    +        # still be raised). This requires us to go through an awkward dance here
    +        # to transform the errors that may be returned while presenting the same
    +        # semi-synchronous interface.
    +        try:
    +            fut = self._write_frame(True, opcode, message, flags=flags)
    +        except StreamClosedError:
    +            raise WebSocketClosedError()
    +
    +        async def wrapper() -> None:
    +            try:
    +                await fut
    +            except StreamClosedError:
    +                raise WebSocketClosedError()
    +
    +        return asyncio.ensure_future(wrapper())
    +
    +    def write_ping(self, data: bytes) -> None:
    +        """Send ping frame."""
    +        assert isinstance(data, bytes)
    +        self._write_frame(True, 0x9, data)
    +
    +    async def _receive_frame_loop(self) -> None:
    +        try:
    +            while not self.client_terminated:
    +                await self._receive_frame()
    +        except StreamClosedError:
    +            self._abort()
    +        self.handler.on_ws_connection_close(self.close_code, self.close_reason)
    +
    +    async def _read_bytes(self, n: int) -> bytes:
    +        data = await self.stream.read_bytes(n)
    +        self._wire_bytes_in += n
    +        return data
    +
    +    async def _receive_frame(self) -> None:
    +        # Read the frame header.
    +        data = await self._read_bytes(2)
    +        header, mask_payloadlen = struct.unpack("BB", data)
    +        is_final_frame = header & self.FIN
    +        reserved_bits = header & self.RSV_MASK
    +        opcode = header & self.OPCODE_MASK
    +        opcode_is_control = opcode & 0x8
    +        if self._decompressor is not None and opcode != 0:
    +            # Compression flag is present in the first frame's header,
    +            # but we can't decompress until we have all the frames of
    +            # the message.
    +            self._frame_compressed = bool(reserved_bits & self.RSV1)
    +            reserved_bits &= ~self.RSV1
    +        if reserved_bits:
    +            # client is using as-yet-undefined extensions; abort
    +            self._abort()
    +            return
    +        is_masked = bool(mask_payloadlen & 0x80)
    +        payloadlen = mask_payloadlen & 0x7F
    +
    +        # Parse and validate the length.
    +        if opcode_is_control and payloadlen >= 126:
    +            # control frames must have payload < 126
    +            self._abort()
    +            return
    +        if payloadlen < 126:
    +            self._frame_length = payloadlen
    +        elif payloadlen == 126:
    +            data = await self._read_bytes(2)
    +            payloadlen = struct.unpack("!H", data)[0]
    +        elif payloadlen == 127:
    +            data = await self._read_bytes(8)
    +            payloadlen = struct.unpack("!Q", data)[0]
    +        new_len = payloadlen
    +        if self._fragmented_message_buffer is not None:
    +            new_len += len(self._fragmented_message_buffer)
    +        if new_len > self.params.max_message_size:
    +            self.close(1009, "message too big")
    +            self._abort()
    +            return
    +
    +        # Read the payload, unmasking if necessary.
    +        if is_masked:
    +            self._frame_mask = await self._read_bytes(4)
    +        data = await self._read_bytes(payloadlen)
    +        if is_masked:
    +            assert self._frame_mask is not None
    +            data = _websocket_mask(self._frame_mask, data)
    +
    +        # Decide what to do with this frame.
    +        if opcode_is_control:
    +            # control frames may be interleaved with a series of fragmented
    +            # data frames, so control frames must not interact with
    +            # self._fragmented_*
    +            if not is_final_frame:
    +                # control frames must not be fragmented
    +                self._abort()
    +                return
    +        elif opcode == 0:  # continuation frame
    +            if self._fragmented_message_buffer is None:
    +                # nothing to continue
    +                self._abort()
    +                return
    +            self._fragmented_message_buffer += data
    +            if is_final_frame:
    +                opcode = self._fragmented_message_opcode
    +                data = self._fragmented_message_buffer
    +                self._fragmented_message_buffer = None
    +        else:  # start of new data message
    +            if self._fragmented_message_buffer is not None:
    +                # can't start new message until the old one is finished
    +                self._abort()
    +                return
    +            if not is_final_frame:
    +                self._fragmented_message_opcode = opcode
    +                self._fragmented_message_buffer = data
    +
    +        if is_final_frame:
    +            handled_future = self._handle_message(opcode, data)
    +            if handled_future is not None:
    +                await handled_future
    +
    +    def _handle_message(self, opcode: int, data: bytes) -> "Optional[Future[None]]":
    +        """Execute on_message, returning its Future if it is a coroutine."""
    +        if self.client_terminated:
    +            return None
    +
    +        if self._frame_compressed:
    +            assert self._decompressor is not None
    +            try:
    +                data = self._decompressor.decompress(data)
    +            except _DecompressTooLargeError:
    +                self.close(1009, "message too big after decompression")
    +                self._abort()
    +                return None
    +
    +        if opcode == 0x1:
    +            # UTF-8 data
    +            self._message_bytes_in += len(data)
    +            try:
    +                decoded = data.decode("utf-8")
    +            except UnicodeDecodeError:
    +                self._abort()
    +                return None
    +            return self._run_callback(self.handler.on_message, decoded)
    +        elif opcode == 0x2:
    +            # Binary data
    +            self._message_bytes_in += len(data)
    +            return self._run_callback(self.handler.on_message, data)
    +        elif opcode == 0x8:
    +            # Close
    +            self.client_terminated = True
    +            if len(data) >= 2:
    +                self.close_code = struct.unpack(">H", data[:2])[0]
    +            if len(data) > 2:
    +                self.close_reason = to_unicode(data[2:])
    +            # Echo the received close code, if any (RFC 6455 section 5.5.1).
    +            self.close(self.close_code)
    +        elif opcode == 0x9:
    +            # Ping
    +            try:
    +                self._write_frame(True, 0xA, data)
    +            except StreamClosedError:
    +                self._abort()
    +            self._run_callback(self.handler.on_ping, data)
    +        elif opcode == 0xA:
    +            # Pong
    +            self.last_pong = IOLoop.current().time()
    +            return self._run_callback(self.handler.on_pong, data)
    +        else:
    +            self._abort()
    +        return None
    +
    +    def close(self, code: int = None, reason: str = None) -> None:
    +        """Closes the WebSocket connection."""
    +        if not self.server_terminated:
    +            if not self.stream.closed():
    +                if code is None and reason is not None:
    +                    code = 1000  # "normal closure" status code
    +                if code is None:
    +                    close_data = b""
    +                else:
    +                    close_data = struct.pack(">H", code)
    +                if reason is not None:
    +                    close_data += utf8(reason)
    +                try:
    +                    self._write_frame(True, 0x8, close_data)
    +                except StreamClosedError:
    +                    self._abort()
    +            self.server_terminated = True
    +        if self.client_terminated:
    +            if self._waiting is not None:
    +                self.stream.io_loop.remove_timeout(self._waiting)
    +                self._waiting = None
    +            self.stream.close()
    +        elif self._waiting is None:
    +            # Give the client a few seconds to complete a clean shutdown,
    +            # otherwise just close the connection.
    +            self._waiting = self.stream.io_loop.add_timeout(
    +                self.stream.io_loop.time() + 5, self._abort
    +            )
    +
    +    def is_closing(self) -> bool:
    +        """Return ``True`` if this connection is closing.
    +
    +        The connection is considered closing if either side has
    +        initiated its closing handshake or if the stream has been
    +        shut down uncleanly.
    +        """
    +        return self.stream.closed() or self.client_terminated or self.server_terminated
    +
    +    @property
    +    def ping_interval(self) -> Optional[float]:
    +        interval = self.params.ping_interval
    +        if interval is not None:
    +            return interval
    +        return 0
    +
    +    @property
    +    def ping_timeout(self) -> Optional[float]:
    +        timeout = self.params.ping_timeout
    +        if timeout is not None:
    +            return timeout
    +        assert self.ping_interval is not None
    +        return max(3 * self.ping_interval, 30)
    +
    +    def start_pinging(self) -> None:
    +        """Start sending periodic pings to keep the connection alive"""
    +        assert self.ping_interval is not None
    +        if self.ping_interval > 0:
    +            self.last_ping = self.last_pong = IOLoop.current().time()
    +            self.ping_callback = PeriodicCallback(
    +                self.periodic_ping, self.ping_interval * 1000
    +            )
    +            self.ping_callback.start()
    +
    +    def periodic_ping(self) -> None:
    +        """Send a ping to keep the websocket alive
    +
    +        Called periodically if the websocket_ping_interval is set and non-zero.
    +        """
    +        if self.is_closing() and self.ping_callback is not None:
    +            self.ping_callback.stop()
    +            return
    +
    +        # Check for timeout on pong. Make sure that we really have
    +        # sent a recent ping in case the machine with both server and
    +        # client has been suspended since the last ping.
    +        now = IOLoop.current().time()
    +        since_last_pong = now - self.last_pong
    +        since_last_ping = now - self.last_ping
    +        assert self.ping_interval is not None
    +        assert self.ping_timeout is not None
    +        if (
    +            since_last_ping < 2 * self.ping_interval
    +            and since_last_pong > self.ping_timeout
    +        ):
    +            self.close()
    +            return
    +
    +        self.write_ping(b"")
    +        self.last_ping = now
    +
    +    def set_nodelay(self, x: bool) -> None:
    +        self.stream.set_nodelay(x)
    +
    +
    +class WebSocketClientConnection(simple_httpclient._HTTPConnection):
    +    """WebSocket client connection.
    +
    +    This class should not be instantiated directly; use the
    +    `websocket_connect` function instead.
    +    """
    +
    +    protocol = None  # type: WebSocketProtocol
    +
    +    def __init__(
    +        self,
    +        request: httpclient.HTTPRequest,
    +        on_message_callback: Callable[[Union[None, str, bytes]], None] = None,
    +        compression_options: Dict[str, Any] = None,
    +        ping_interval: float = None,
    +        ping_timeout: float = None,
    +        max_message_size: int = _default_max_message_size,
    +        subprotocols: Optional[List[str]] = [],
    +    ) -> None:
    +        self.connect_future = Future()  # type: Future[WebSocketClientConnection]
    +        self.read_queue = Queue(1)  # type: Queue[Union[None, str, bytes]]
    +        self.key = base64.b64encode(os.urandom(16))
    +        self._on_message_callback = on_message_callback
    +        self.close_code = None  # type: Optional[int]
    +        self.close_reason = None  # type: Optional[str]
    +        self.params = _WebSocketParams(
    +            ping_interval=ping_interval,
    +            ping_timeout=ping_timeout,
    +            max_message_size=max_message_size,
    +            compression_options=compression_options,
    +        )
    +
    +        scheme, sep, rest = request.url.partition(":")
    +        scheme = {"ws": "http", "wss": "https"}[scheme]
    +        request.url = scheme + sep + rest
    +        request.headers.update(
    +            {
    +                "Upgrade": "websocket",
    +                "Connection": "Upgrade",
    +                "Sec-WebSocket-Key": self.key,
    +                "Sec-WebSocket-Version": "13",
    +            }
    +        )
    +        if subprotocols is not None:
    +            request.headers["Sec-WebSocket-Protocol"] = ",".join(subprotocols)
    +        if compression_options is not None:
    +            # Always offer to let the server set our max_wbits (and even though
    +            # we don't offer it, we will accept a client_no_context_takeover
    +            # from the server).
    +            # TODO: set server parameters for deflate extension
    +            # if requested in self.compression_options.
    +            request.headers[
    +                "Sec-WebSocket-Extensions"
    +            ] = "permessage-deflate; client_max_window_bits"
    +
    +        self.tcp_client = TCPClient()
    +        super(WebSocketClientConnection, self).__init__(
    +            None,
    +            request,
    +            lambda: None,
    +            self._on_http_response,
    +            104857600,
    +            self.tcp_client,
    +            65536,
    +            104857600,
    +        )
    +
    +    def close(self, code: int = None, reason: str = None) -> None:
    +        """Closes the websocket connection.
    +
    +        ``code`` and ``reason`` are documented under
    +        `WebSocketHandler.close`.
    +
    +        .. versionadded:: 3.2
    +
    +        .. versionchanged:: 4.0
    +
    +           Added the ``code`` and ``reason`` arguments.
    +        """
    +        if self.protocol is not None:
    +            self.protocol.close(code, reason)
    +            self.protocol = None  # type: ignore
    +
    +    def on_connection_close(self) -> None:
    +        if not self.connect_future.done():
    +            self.connect_future.set_exception(StreamClosedError())
    +        self._on_message(None)
    +        self.tcp_client.close()
    +        super(WebSocketClientConnection, self).on_connection_close()
    +
    +    def on_ws_connection_close(
    +        self, close_code: int = None, close_reason: str = None
    +    ) -> None:
    +        self.close_code = close_code
    +        self.close_reason = close_reason
    +        self.on_connection_close()
    +
    +    def _on_http_response(self, response: httpclient.HTTPResponse) -> None:
    +        if not self.connect_future.done():
    +            if response.error:
    +                self.connect_future.set_exception(response.error)
    +            else:
    +                self.connect_future.set_exception(
    +                    WebSocketError("Non-websocket response")
    +                )
    +
    +    async def headers_received(
    +        self,
    +        start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine],
    +        headers: httputil.HTTPHeaders,
    +    ) -> None:
    +        assert isinstance(start_line, httputil.ResponseStartLine)
    +        if start_line.code != 101:
    +            await super(WebSocketClientConnection, self).headers_received(
    +                start_line, headers
    +            )
    +            return
    +
    +        if self._timeout is not None:
    +            self.io_loop.remove_timeout(self._timeout)
    +            self._timeout = None
    +
    +        self.headers = headers
    +        self.protocol = self.get_websocket_protocol()
    +        self.protocol._process_server_headers(self.key, self.headers)
    +        self.protocol.stream = self.connection.detach()
    +
    +        IOLoop.current().add_callback(self.protocol._receive_frame_loop)
    +        self.protocol.start_pinging()
    +
    +        # Once we've taken over the connection, clear the final callback
    +        # we set on the http request.  This deactivates the error handling
    +        # in simple_httpclient that would otherwise interfere with our
    +        # ability to see exceptions.
    +        self.final_callback = None  # type: ignore
    +
    +        future_set_result_unless_cancelled(self.connect_future, self)
    +
    +    def write_message(
    +        self, message: Union[str, bytes], binary: bool = False
    +    ) -> "Future[None]":
    +        """Sends a message to the WebSocket server.
    +
    +        If the stream is closed, raises `WebSocketClosedError`.
    +        Returns a `.Future` which can be used for flow control.
    +
    +        .. versionchanged:: 5.0
    +           Exception raised on a closed stream changed from `.StreamClosedError`
    +           to `WebSocketClosedError`.
    +        """
    +        return self.protocol.write_message(message, binary=binary)
    +
    +    def read_message(
    +        self, callback: Callable[["Future[Union[None, str, bytes]]"], None] = None
    +    ) -> Awaitable[Union[None, str, bytes]]:
    +        """Reads a message from the WebSocket server.
    +
    +        If on_message_callback was specified at WebSocket
    +        initialization, this function will never return messages
    +
    +        Returns a future whose result is the message, or None
    +        if the connection is closed.  If a callback argument
    +        is given it will be called with the future when it is
    +        ready.
    +        """
    +
    +        awaitable = self.read_queue.get()
    +        if callback is not None:
    +            self.io_loop.add_future(asyncio.ensure_future(awaitable), callback)
    +        return awaitable
    +
    +    def on_message(self, message: Union[str, bytes]) -> Optional[Awaitable[None]]:
    +        return self._on_message(message)
    +
    +    def _on_message(
    +        self, message: Union[None, str, bytes]
    +    ) -> Optional[Awaitable[None]]:
    +        if self._on_message_callback:
    +            self._on_message_callback(message)
    +            return None
    +        else:
    +            return self.read_queue.put(message)
    +
    +    def ping(self, data: bytes = b"") -> None:
    +        """Send ping frame to the remote end.
    +
    +        The data argument allows a small amount of data (up to 125
    +        bytes) to be sent as a part of the ping message. Note that not
    +        all websocket implementations expose this data to
    +        applications.
    +
    +        Consider using the ``ping_interval`` argument to
    +        `websocket_connect` instead of sending pings manually.
    +
    +        .. versionadded:: 5.1
    +
    +        """
    +        data = utf8(data)
    +        if self.protocol is None:
    +            raise WebSocketClosedError()
    +        self.protocol.write_ping(data)
    +
    +    def on_pong(self, data: bytes) -> None:
    +        pass
    +
    +    def on_ping(self, data: bytes) -> None:
    +        pass
    +
    +    def get_websocket_protocol(self) -> WebSocketProtocol:
    +        return WebSocketProtocol13(self, mask_outgoing=True, params=self.params)
    +
    +    @property
    +    def selected_subprotocol(self) -> Optional[str]:
    +        """The subprotocol selected by the server.
    +
    +        .. versionadded:: 5.1
    +        """
    +        return self.protocol.selected_subprotocol
    +
    +    def log_exception(
    +        self,
    +        typ: "Optional[Type[BaseException]]",
    +        value: Optional[BaseException],
    +        tb: Optional[TracebackType],
    +    ) -> None:
    +        assert typ is not None
    +        assert value is not None
    +        app_log.error("Uncaught exception %s", value, exc_info=(typ, value, tb))
    +
    +
    +def websocket_connect(
    +    url: Union[str, httpclient.HTTPRequest],
    +    callback: Callable[["Future[WebSocketClientConnection]"], None] = None,
    +    connect_timeout: float = None,
    +    on_message_callback: Callable[[Union[None, str, bytes]], None] = None,
    +    compression_options: Dict[str, Any] = None,
    +    ping_interval: float = None,
    +    ping_timeout: float = None,
    +    max_message_size: int = _default_max_message_size,
    +    subprotocols: List[str] = None,
    +) -> "Awaitable[WebSocketClientConnection]":
    +    """Client-side websocket support.
    +
    +    Takes a url and returns a Future whose result is a
    +    `WebSocketClientConnection`.
    +
    +    ``compression_options`` is interpreted in the same way as the
    +    return value of `.WebSocketHandler.get_compression_options`.
    +
    +    The connection supports two styles of operation. In the coroutine
    +    style, the application typically calls
    +    `~.WebSocketClientConnection.read_message` in a loop::
    +
    +        conn = yield websocket_connect(url)
    +        while True:
    +            msg = yield conn.read_message()
    +            if msg is None: break
    +            # Do something with msg
    +
    +    In the callback style, pass an ``on_message_callback`` to
    +    ``websocket_connect``. In both styles, a message of ``None``
    +    indicates that the connection has been closed.
    +
    +    ``subprotocols`` may be a list of strings specifying proposed
    +    subprotocols. The selected protocol may be found on the
    +    ``selected_subprotocol`` attribute of the connection object
    +    when the connection is complete.
    +
    +    .. versionchanged:: 3.2
    +       Also accepts ``HTTPRequest`` objects in place of urls.
    +
    +    .. versionchanged:: 4.1
    +       Added ``compression_options`` and ``on_message_callback``.
    +
    +    .. versionchanged:: 4.5
    +       Added the ``ping_interval``, ``ping_timeout``, and ``max_message_size``
    +       arguments, which have the same meaning as in `WebSocketHandler`.
    +
    +    .. versionchanged:: 5.0
    +       The ``io_loop`` argument (deprecated since version 4.1) has been removed.
    +
    +    .. versionchanged:: 5.1
    +       Added the ``subprotocols`` argument.
    +    """
    +    if isinstance(url, httpclient.HTTPRequest):
    +        assert connect_timeout is None
    +        request = url
    +        # Copy and convert the headers dict/object (see comments in
    +        # AsyncHTTPClient.fetch)
    +        request.headers = httputil.HTTPHeaders(request.headers)
    +    else:
    +        request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout)
    +    request = cast(
    +        httpclient.HTTPRequest,
    +        httpclient._RequestProxy(request, httpclient.HTTPRequest._DEFAULTS),
    +    )
    +    conn = WebSocketClientConnection(
    +        request,
    +        on_message_callback=on_message_callback,
    +        compression_options=compression_options,
    +        ping_interval=ping_interval,
    +        ping_timeout=ping_timeout,
    +        max_message_size=max_message_size,
    +        subprotocols=subprotocols,
    +    )
    +    if callback is not None:
    +        IOLoop.current().add_future(conn.connect_future, callback)
    +    return conn.connect_future
    diff --git a/server/www/packages/packages-linux/x64/tornado/wsgi.py b/server/www/packages/packages-linux/x64/tornado/wsgi.py
    index e1230da..b88bad6 100644
    --- a/server/www/packages/packages-linux/x64/tornado/wsgi.py
    +++ b/server/www/packages/packages-linux/x64/tornado/wsgi.py
    @@ -1,377 +1,199 @@
    -#
    -# Copyright 2009 Facebook
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License"); you may
    -# not use this file except in compliance with the License. You may obtain
    -# a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
    -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
    -# License for the specific language governing permissions and limitations
    -# under the License.
    -
    -"""WSGI support for the Tornado web framework.
    -
    -WSGI is the Python standard for web servers, and allows for interoperability
    -between Tornado and other Python web frameworks and servers.  This module
    -provides WSGI support in two ways:
    -
    -* `WSGIAdapter` converts a `tornado.web.Application` to the WSGI application
    -  interface.  This is useful for running a Tornado app on another
    -  HTTP server, such as Google App Engine.  See the `WSGIAdapter` class
    -  documentation for limitations that apply.
    -* `WSGIContainer` lets you run other WSGI applications and frameworks on the
    -  Tornado HTTP server.  For example, with this class you can mix Django
    -  and Tornado handlers in a single server.
    -"""
    -
    -from __future__ import absolute_import, division, print_function
    -
    -import sys
    -from io import BytesIO
    -import tornado
    -import warnings
    -
    -from tornado.concurrent import Future
    -from tornado import escape
    -from tornado import httputil
    -from tornado.log import access_log
    -from tornado import web
    -from tornado.escape import native_str
    -from tornado.util import unicode_type, PY3
    -
    -
    -if PY3:
    -    import urllib.parse as urllib_parse  # py3
    -else:
    -    import urllib as urllib_parse
    -
    -# PEP 3333 specifies that WSGI on python 3 generally deals with byte strings
    -# that are smuggled inside objects of type unicode (via the latin1 encoding).
    -# These functions are like those in the tornado.escape module, but defined
    -# here to minimize the temptation to use them in non-wsgi contexts.
    -if str is unicode_type:
    -    def to_wsgi_str(s):
    -        assert isinstance(s, bytes)
    -        return s.decode('latin1')
    -
    -    def from_wsgi_str(s):
    -        assert isinstance(s, str)
    -        return s.encode('latin1')
    -else:
    -    def to_wsgi_str(s):
    -        assert isinstance(s, bytes)
    -        return s
    -
    -    def from_wsgi_str(s):
    -        assert isinstance(s, str)
    -        return s
    -
    -
    -class WSGIApplication(web.Application):
    -    """A WSGI equivalent of `tornado.web.Application`.
    -
    -    .. deprecated:: 4.0
    -
    -       Use a regular `.Application` and wrap it in `WSGIAdapter` instead.
    -       This class will be removed in Tornado 6.0.
    -    """
    -    def __call__(self, environ, start_response):
    -        return WSGIAdapter(self)(environ, start_response)
    -
    -
    -# WSGI has no facilities for flow control, so just return an already-done
    -# Future when the interface requires it.
    -def _dummy_future():
    -    f = Future()
    -    f.set_result(None)
    -    return f
    -
    -
    -class _WSGIConnection(httputil.HTTPConnection):
    -    def __init__(self, method, start_response, context):
    -        self.method = method
    -        self.start_response = start_response
    -        self.context = context
    -        self._write_buffer = []
    -        self._finished = False
    -        self._expected_content_remaining = None
    -        self._error = None
    -
    -    def set_close_callback(self, callback):
    -        # WSGI has no facility for detecting a closed connection mid-request,
    -        # so we can simply ignore the callback.
    -        pass
    -
    -    def write_headers(self, start_line, headers, chunk=None, callback=None):
    -        if self.method == 'HEAD':
    -            self._expected_content_remaining = 0
    -        elif 'Content-Length' in headers:
    -            self._expected_content_remaining = int(headers['Content-Length'])
    -        else:
    -            self._expected_content_remaining = None
    -        self.start_response(
    -            '%s %s' % (start_line.code, start_line.reason),
    -            [(native_str(k), native_str(v)) for (k, v) in headers.get_all()])
    -        if chunk is not None:
    -            self.write(chunk, callback)
    -        elif callback is not None:
    -            callback()
    -        return _dummy_future()
    -
    -    def write(self, chunk, callback=None):
    -        if self._expected_content_remaining is not None:
    -            self._expected_content_remaining -= len(chunk)
    -            if self._expected_content_remaining < 0:
    -                self._error = httputil.HTTPOutputError(
    -                    "Tried to write more data than Content-Length")
    -                raise self._error
    -        self._write_buffer.append(chunk)
    -        if callback is not None:
    -            callback()
    -        return _dummy_future()
    -
    -    def finish(self):
    -        if (self._expected_content_remaining is not None and
    -                self._expected_content_remaining != 0):
    -            self._error = httputil.HTTPOutputError(
    -                "Tried to write %d bytes less than Content-Length" %
    -                self._expected_content_remaining)
    -            raise self._error
    -        self._finished = True
    -
    -
    -class _WSGIRequestContext(object):
    -    def __init__(self, remote_ip, protocol):
    -        self.remote_ip = remote_ip
    -        self.protocol = protocol
    -
    -    def __str__(self):
    -        return self.remote_ip
    -
    -
    -class WSGIAdapter(object):
    -    """Converts a `tornado.web.Application` instance into a WSGI application.
    -
    -    Example usage::
    -
    -        import tornado.web
    -        import tornado.wsgi
    -        import wsgiref.simple_server
    -
    -        class MainHandler(tornado.web.RequestHandler):
    -            def get(self):
    -                self.write("Hello, world")
    -
    -        if __name__ == "__main__":
    -            application = tornado.web.Application([
    -                (r"/", MainHandler),
    -            ])
    -            wsgi_app = tornado.wsgi.WSGIAdapter(application)
    -            server = wsgiref.simple_server.make_server('', 8888, wsgi_app)
    -            server.serve_forever()
    -
    -    See the `appengine demo
    -    `_
    -    for an example of using this module to run a Tornado app on Google
    -    App Engine.
    -
    -    In WSGI mode asynchronous methods are not supported.  This means
    -    that it is not possible to use `.AsyncHTTPClient`, or the
    -    `tornado.auth` or `tornado.websocket` modules.
    -
    -    In multithreaded WSGI servers on Python 3, it may be necessary to
    -    permit `asyncio` to create event loops on any thread. Run the
    -    following at startup (typically import time for WSGI
    -    applications)::
    -
    -        import asyncio
    -        from tornado.platform.asyncio import AnyThreadEventLoopPolicy
    -        asyncio.set_event_loop_policy(AnyThreadEventLoopPolicy())
    -
    -    .. versionadded:: 4.0
    -
    -    .. deprecated:: 5.1
    -
    -       This class is deprecated and will be removed in Tornado 6.0.
    -       Use Tornado's `.HTTPServer` instead of a WSGI container.
    -    """
    -    def __init__(self, application):
    -        warnings.warn("WSGIAdapter is deprecated, use Tornado's HTTPServer instead",
    -                      DeprecationWarning)
    -        if isinstance(application, WSGIApplication):
    -            self.application = lambda request: web.Application.__call__(
    -                application, request)
    -        else:
    -            self.application = application
    -
    -    def __call__(self, environ, start_response):
    -        method = environ["REQUEST_METHOD"]
    -        uri = urllib_parse.quote(from_wsgi_str(environ.get("SCRIPT_NAME", "")))
    -        uri += urllib_parse.quote(from_wsgi_str(environ.get("PATH_INFO", "")))
    -        if environ.get("QUERY_STRING"):
    -            uri += "?" + environ["QUERY_STRING"]
    -        headers = httputil.HTTPHeaders()
    -        if environ.get("CONTENT_TYPE"):
    -            headers["Content-Type"] = environ["CONTENT_TYPE"]
    -        if environ.get("CONTENT_LENGTH"):
    -            headers["Content-Length"] = environ["CONTENT_LENGTH"]
    -        for key in environ:
    -            if key.startswith("HTTP_"):
    -                headers[key[5:].replace("_", "-")] = environ[key]
    -        if headers.get("Content-Length"):
    -            body = environ["wsgi.input"].read(
    -                int(headers["Content-Length"]))
    -        else:
    -            body = b""
    -        protocol = environ["wsgi.url_scheme"]
    -        remote_ip = environ.get("REMOTE_ADDR", "")
    -        if environ.get("HTTP_HOST"):
    -            host = environ["HTTP_HOST"]
    -        else:
    -            host = environ["SERVER_NAME"]
    -        connection = _WSGIConnection(method, start_response,
    -                                     _WSGIRequestContext(remote_ip, protocol))
    -        request = httputil.HTTPServerRequest(
    -            method, uri, "HTTP/1.1", headers=headers, body=body,
    -            host=host, connection=connection)
    -        request._parse_body()
    -        self.application(request)
    -        if connection._error:
    -            raise connection._error
    -        if not connection._finished:
    -            raise Exception("request did not finish synchronously")
    -        return connection._write_buffer
    -
    -
    -class WSGIContainer(object):
    -    r"""Makes a WSGI-compatible function runnable on Tornado's HTTP server.
    -
    -    .. warning::
    -
    -       WSGI is a *synchronous* interface, while Tornado's concurrency model
    -       is based on single-threaded asynchronous execution.  This means that
    -       running a WSGI app with Tornado's `WSGIContainer` is *less scalable*
    -       than running the same app in a multi-threaded WSGI server like
    -       ``gunicorn`` or ``uwsgi``.  Use `WSGIContainer` only when there are
    -       benefits to combining Tornado and WSGI in the same process that
    -       outweigh the reduced scalability.
    -
    -    Wrap a WSGI function in a `WSGIContainer` and pass it to `.HTTPServer` to
    -    run it. For example::
    -
    -        def simple_app(environ, start_response):
    -            status = "200 OK"
    -            response_headers = [("Content-type", "text/plain")]
    -            start_response(status, response_headers)
    -            return ["Hello world!\n"]
    -
    -        container = tornado.wsgi.WSGIContainer(simple_app)
    -        http_server = tornado.httpserver.HTTPServer(container)
    -        http_server.listen(8888)
    -        tornado.ioloop.IOLoop.current().start()
    -
    -    This class is intended to let other frameworks (Django, web.py, etc)
    -    run on the Tornado HTTP server and I/O loop.
    -
    -    The `tornado.web.FallbackHandler` class is often useful for mixing
    -    Tornado and WSGI apps in the same server.  See
    -    https://github.com/bdarnell/django-tornado-demo for a complete example.
    -    """
    -    def __init__(self, wsgi_application):
    -        self.wsgi_application = wsgi_application
    -
    -    def __call__(self, request):
    -        data = {}
    -        response = []
    -
    -        def start_response(status, response_headers, exc_info=None):
    -            data["status"] = status
    -            data["headers"] = response_headers
    -            return response.append
    -        app_response = self.wsgi_application(
    -            WSGIContainer.environ(request), start_response)
    -        try:
    -            response.extend(app_response)
    -            body = b"".join(response)
    -        finally:
    -            if hasattr(app_response, "close"):
    -                app_response.close()
    -        if not data:
    -            raise Exception("WSGI app did not call start_response")
    -
    -        status_code, reason = data["status"].split(' ', 1)
    -        status_code = int(status_code)
    -        headers = data["headers"]
    -        header_set = set(k.lower() for (k, v) in headers)
    -        body = escape.utf8(body)
    -        if status_code != 304:
    -            if "content-length" not in header_set:
    -                headers.append(("Content-Length", str(len(body))))
    -            if "content-type" not in header_set:
    -                headers.append(("Content-Type", "text/html; charset=UTF-8"))
    -        if "server" not in header_set:
    -            headers.append(("Server", "TornadoServer/%s" % tornado.version))
    -
    -        start_line = httputil.ResponseStartLine("HTTP/1.1", status_code, reason)
    -        header_obj = httputil.HTTPHeaders()
    -        for key, value in headers:
    -            header_obj.add(key, value)
    -        request.connection.write_headers(start_line, header_obj, chunk=body)
    -        request.connection.finish()
    -        self._log(status_code, request)
    -
    -    @staticmethod
    -    def environ(request):
    -        """Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment.
    -        """
    -        hostport = request.host.split(":")
    -        if len(hostport) == 2:
    -            host = hostport[0]
    -            port = int(hostport[1])
    -        else:
    -            host = request.host
    -            port = 443 if request.protocol == "https" else 80
    -        environ = {
    -            "REQUEST_METHOD": request.method,
    -            "SCRIPT_NAME": "",
    -            "PATH_INFO": to_wsgi_str(escape.url_unescape(
    -                request.path, encoding=None, plus=False)),
    -            "QUERY_STRING": request.query,
    -            "REMOTE_ADDR": request.remote_ip,
    -            "SERVER_NAME": host,
    -            "SERVER_PORT": str(port),
    -            "SERVER_PROTOCOL": request.version,
    -            "wsgi.version": (1, 0),
    -            "wsgi.url_scheme": request.protocol,
    -            "wsgi.input": BytesIO(escape.utf8(request.body)),
    -            "wsgi.errors": sys.stderr,
    -            "wsgi.multithread": False,
    -            "wsgi.multiprocess": True,
    -            "wsgi.run_once": False,
    -        }
    -        if "Content-Type" in request.headers:
    -            environ["CONTENT_TYPE"] = request.headers.pop("Content-Type")
    -        if "Content-Length" in request.headers:
    -            environ["CONTENT_LENGTH"] = request.headers.pop("Content-Length")
    -        for key, value in request.headers.items():
    -            environ["HTTP_" + key.replace("-", "_").upper()] = value
    -        return environ
    -
    -    def _log(self, status_code, request):
    -        if status_code < 400:
    -            log_method = access_log.info
    -        elif status_code < 500:
    -            log_method = access_log.warning
    -        else:
    -            log_method = access_log.error
    -        request_time = 1000.0 * request.request_time()
    -        summary = request.method + " " + request.uri + " (" + \
    -            request.remote_ip + ")"
    -        log_method("%d %s %.2fms", status_code, summary, request_time)
    -
    -
    -HTTPRequest = httputil.HTTPServerRequest
    +#
    +# Copyright 2009 Facebook
    +#
    +# Licensed under the Apache License, Version 2.0 (the "License"); you may
    +# not use this file except in compliance with the License. You may obtain
    +# a copy of the License at
    +#
    +#     http://www.apache.org/licenses/LICENSE-2.0
    +#
    +# Unless required by applicable law or agreed to in writing, software
    +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
    +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
    +# License for the specific language governing permissions and limitations
    +# under the License.
    +
    +"""WSGI support for the Tornado web framework.
    +
    +WSGI is the Python standard for web servers, and allows for interoperability
    +between Tornado and other Python web frameworks and servers.
    +
    +This module provides WSGI support via the `WSGIContainer` class, which
    +makes it possible to run applications using other WSGI frameworks on
    +the Tornado HTTP server. The reverse is not supported; the Tornado
    +`.Application` and `.RequestHandler` classes are designed for use with
    +the Tornado `.HTTPServer` and cannot be used in a generic WSGI
    +container.
    +
    +"""
    +
    +import sys
    +from io import BytesIO
    +import tornado
    +
    +from tornado import escape
    +from tornado import httputil
    +from tornado.log import access_log
    +
    +from typing import List, Tuple, Optional, Callable, Any, Dict, Text
    +from types import TracebackType
    +import typing
    +
    +if typing.TYPE_CHECKING:
    +    from typing import Type  # noqa: F401
    +    from wsgiref.types import WSGIApplication as WSGIAppType  # noqa: F401
    +
    +
    +# PEP 3333 specifies that WSGI on python 3 generally deals with byte strings
    +# that are smuggled inside objects of type unicode (via the latin1 encoding).
    +# This function is like those in the tornado.escape module, but defined
    +# here to minimize the temptation to use it in non-wsgi contexts.
    +def to_wsgi_str(s: bytes) -> str:
    +    assert isinstance(s, bytes)
    +    return s.decode("latin1")
    +
    +
    +class WSGIContainer(object):
    +    r"""Makes a WSGI-compatible function runnable on Tornado's HTTP server.
    +
    +    .. warning::
    +
    +       WSGI is a *synchronous* interface, while Tornado's concurrency model
    +       is based on single-threaded asynchronous execution.  This means that
    +       running a WSGI app with Tornado's `WSGIContainer` is *less scalable*
    +       than running the same app in a multi-threaded WSGI server like
    +       ``gunicorn`` or ``uwsgi``.  Use `WSGIContainer` only when there are
    +       benefits to combining Tornado and WSGI in the same process that
    +       outweigh the reduced scalability.
    +
    +    Wrap a WSGI function in a `WSGIContainer` and pass it to `.HTTPServer` to
    +    run it. For example::
    +
    +        def simple_app(environ, start_response):
    +            status = "200 OK"
    +            response_headers = [("Content-type", "text/plain")]
    +            start_response(status, response_headers)
    +            return ["Hello world!\n"]
    +
    +        container = tornado.wsgi.WSGIContainer(simple_app)
    +        http_server = tornado.httpserver.HTTPServer(container)
    +        http_server.listen(8888)
    +        tornado.ioloop.IOLoop.current().start()
    +
    +    This class is intended to let other frameworks (Django, web.py, etc)
    +    run on the Tornado HTTP server and I/O loop.
    +
    +    The `tornado.web.FallbackHandler` class is often useful for mixing
    +    Tornado and WSGI apps in the same server.  See
    +    https://github.com/bdarnell/django-tornado-demo for a complete example.
    +    """
    +
    +    def __init__(self, wsgi_application: "WSGIAppType") -> None:
    +        self.wsgi_application = wsgi_application
    +
    +    def __call__(self, request: httputil.HTTPServerRequest) -> None:
    +        data = {}  # type: Dict[str, Any]
    +        response = []  # type: List[bytes]
    +
    +        def start_response(
    +            status: str,
    +            headers: List[Tuple[str, str]],
    +            exc_info: Optional[
    +                Tuple[
    +                    "Optional[Type[BaseException]]",
    +                    Optional[BaseException],
    +                    Optional[TracebackType],
    +                ]
    +            ] = None,
    +        ) -> Callable[[bytes], Any]:
    +            data["status"] = status
    +            data["headers"] = headers
    +            return response.append
    +
    +        app_response = self.wsgi_application(
    +            WSGIContainer.environ(request), start_response
    +        )
    +        try:
    +            response.extend(app_response)
    +            body = b"".join(response)
    +        finally:
    +            if hasattr(app_response, "close"):
    +                app_response.close()  # type: ignore
    +        if not data:
    +            raise Exception("WSGI app did not call start_response")
    +
    +        status_code_str, reason = data["status"].split(" ", 1)
    +        status_code = int(status_code_str)
    +        headers = data["headers"]  # type: List[Tuple[str, str]]
    +        header_set = set(k.lower() for (k, v) in headers)
    +        body = escape.utf8(body)
    +        if status_code != 304:
    +            if "content-length" not in header_set:
    +                headers.append(("Content-Length", str(len(body))))
    +            if "content-type" not in header_set:
    +                headers.append(("Content-Type", "text/html; charset=UTF-8"))
    +        if "server" not in header_set:
    +            headers.append(("Server", "TornadoServer/%s" % tornado.version))
    +
    +        start_line = httputil.ResponseStartLine("HTTP/1.1", status_code, reason)
    +        header_obj = httputil.HTTPHeaders()
    +        for key, value in headers:
    +            header_obj.add(key, value)
    +        assert request.connection is not None
    +        request.connection.write_headers(start_line, header_obj, chunk=body)
    +        request.connection.finish()
    +        self._log(status_code, request)
    +
    +    @staticmethod
    +    def environ(request: httputil.HTTPServerRequest) -> Dict[Text, Any]:
    +        """Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment.
    +        """
    +        hostport = request.host.split(":")
    +        if len(hostport) == 2:
    +            host = hostport[0]
    +            port = int(hostport[1])
    +        else:
    +            host = request.host
    +            port = 443 if request.protocol == "https" else 80
    +        environ = {
    +            "REQUEST_METHOD": request.method,
    +            "SCRIPT_NAME": "",
    +            "PATH_INFO": to_wsgi_str(
    +                escape.url_unescape(request.path, encoding=None, plus=False)
    +            ),
    +            "QUERY_STRING": request.query,
    +            "REMOTE_ADDR": request.remote_ip,
    +            "SERVER_NAME": host,
    +            "SERVER_PORT": str(port),
    +            "SERVER_PROTOCOL": request.version,
    +            "wsgi.version": (1, 0),
    +            "wsgi.url_scheme": request.protocol,
    +            "wsgi.input": BytesIO(escape.utf8(request.body)),
    +            "wsgi.errors": sys.stderr,
    +            "wsgi.multithread": False,
    +            "wsgi.multiprocess": True,
    +            "wsgi.run_once": False,
    +        }
    +        if "Content-Type" in request.headers:
    +            environ["CONTENT_TYPE"] = request.headers.pop("Content-Type")
    +        if "Content-Length" in request.headers:
    +            environ["CONTENT_LENGTH"] = request.headers.pop("Content-Length")
    +        for key, value in request.headers.items():
    +            environ["HTTP_" + key.replace("-", "_").upper()] = value
    +        return environ
    +
    +    def _log(self, status_code: int, request: httputil.HTTPServerRequest) -> None:
    +        if status_code < 400:
    +            log_method = access_log.info
    +        elif status_code < 500:
    +            log_method = access_log.warning
    +        else:
    +            log_method = access_log.error
    +        request_time = 1000.0 * request.request_time()
    +        assert request.method is not None
    +        assert request.uri is not None
    +        summary = request.method + " " + request.uri + " (" + request.remote_ip + ")"
    +        log_method("%d %s %.2fms", status_code, summary, request_time)
    +
    +
    +HTTPRequest = httputil.HTTPServerRequest
    diff --git a/server/www/packages/packages-windows/x86/PIL/BdfFontFile.py b/server/www/packages/packages-windows/x86/PIL/BdfFontFile.py
    index c8bc604..7a485cf 100644
    --- a/server/www/packages/packages-windows/x86/PIL/BdfFontFile.py
    +++ b/server/www/packages/packages-windows/x86/PIL/BdfFontFile.py
    @@ -17,10 +17,8 @@
     # See the README file for information on usage and redistribution.
     #
     
    -from __future__ import print_function
    -
    -from . import Image, FontFile
     
    +from . import FontFile, Image
     
     # --------------------------------------------------------------------
     # parse X Bitmap Distribution Format (BDF)
    @@ -32,14 +30,10 @@ bdf_slant = {
         "O": "Oblique",
         "RI": "Reverse Italic",
         "RO": "Reverse Oblique",
    -    "OT": "Other"
    +    "OT": "Other",
     }
     
    -bdf_spacing = {
    -    "P": "Proportional",
    -    "M": "Monospaced",
    -    "C": "Cell"
    -}
    +bdf_spacing = {"P": "Proportional", "M": "Monospaced", "C": "Cell"}
     
     
     def bdf_char(f):
    @@ -50,7 +44,7 @@ def bdf_char(f):
                 return None
             if s[:9] == b"STARTCHAR":
                 break
    -    id = s[9:].strip().decode('ascii')
    +    id = s[9:].strip().decode("ascii")
     
         # load symbol properties
         props = {}
    @@ -59,7 +53,7 @@ def bdf_char(f):
             if not s or s[:6] == b"BITMAP":
                 break
             i = s.find(b" ")
    -        props[s[:i].decode('ascii')] = s[i+1:-1].decode('ascii')
    +        props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii")
     
         # load bitmap
         bitmap = []
    @@ -73,7 +67,7 @@ def bdf_char(f):
         [x, y, l, d] = [int(p) for p in props["BBX"].split()]
         [dx, dy] = [int(p) for p in props["DWIDTH"].split()]
     
    -    bbox = (dx, dy), (l, -d-y, x+l, -d), (0, 0, x, y)
    +    bbox = (dx, dy), (l, -d - y, x + l, -d), (0, 0, x, y)
     
         try:
             im = Image.frombytes("1", (x, y), bitmap, "hex", "1")
    @@ -87,11 +81,10 @@ def bdf_char(f):
     ##
     # Font file plugin for the X11 BDF format.
     
    +
     class BdfFontFile(FontFile.FontFile):
    -
         def __init__(self, fp):
    -
    -        FontFile.FontFile.__init__(self)
    +        super().__init__()
     
             s = fp.readline()
             if s[:13] != b"STARTFONT 2.1":
    @@ -105,24 +98,10 @@ class BdfFontFile(FontFile.FontFile):
                 if not s or s[:13] == b"ENDPROPERTIES":
                     break
                 i = s.find(b" ")
    -            props[s[:i].decode('ascii')] = s[i+1:-1].decode('ascii')
    +            props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii")
                 if s[:i] in [b"COMMENT", b"COPYRIGHT"]:
                     if s.find(b"LogicalFontDescription") < 0:
    -                    comments.append(s[i+1:-1].decode('ascii'))
    -
    -        # font = props["FONT"].split("-")
    -
    -        # font[4] = bdf_slant[font[4].upper()]
    -        # font[11] = bdf_spacing[font[11].upper()]
    -
    -        # ascent = int(props["FONT_ASCENT"])
    -        # descent = int(props["FONT_DESCENT"])
    -
    -        # fontname = ";".join(font[1:])
    -
    -        # print("#", fontname)
    -        # for i in comments:
    -        #       print("#", i)
    +                    comments.append(s[i + 1 : -1].decode("ascii"))
     
             while True:
                 c = bdf_char(fp)
    diff --git a/server/www/packages/packages-windows/x86/PIL/BlpImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/BlpImagePlugin.py
    index 9b1a99a..5ccba37 100644
    --- a/server/www/packages/packages-windows/x86/PIL/BlpImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/BlpImagePlugin.py
    @@ -34,7 +34,6 @@ from io import BytesIO
     
     from . import Image, ImageFile
     
    -
     BLP_FORMAT_JPEG = 0
     
     BLP_ENCODING_UNCOMPRESSED = 1
    @@ -47,11 +46,7 @@ BLP_ALPHA_ENCODING_DXT5 = 7
     
     
     def unpack_565(i):
    -    return (
    -        ((i >> 11) & 0x1f) << 3,
    -        ((i >> 5) & 0x3f) << 2,
    -        (i & 0x1f) << 3
    -    )
    +    return (((i >> 11) & 0x1F) << 3, ((i >> 5) & 0x3F) << 2, (i & 0x1F) << 3)
     
     
     def decode_dxt1(data, alpha=False):
    @@ -119,12 +114,12 @@ def decode_dxt3(data):
     
         for block in range(blocks):
             idx = block * 16
    -        block = data[idx:idx + 16]
    +        block = data[idx : idx + 16]
             # Decode next 16-byte block.
             bits = struct.unpack_from("<8B", block)
             color0, color1 = struct.unpack_from(">= 4
                     else:
                         high = True
    -                    a &= 0xf
    +                    a &= 0xF
                     a *= 17  # We get a value between 0 and 15
     
                     color_code = (code >> 2 * (4 * j + i)) & 0x03
    @@ -172,19 +167,17 @@ def decode_dxt5(data):
     
         for block in range(blocks):
             idx = block * 16
    -        block = data[idx:idx + 16]
    +        block = data[idx : idx + 16]
             # Decode next 16-byte block.
             a0, a1 = struct.unpack_from("= 40:  # v3 and OS/2
    -                file_info['y_flip'] = i8(header_data[7]) == 0xff
    -                file_info['direction'] = 1 if file_info['y_flip'] else -1
    -                file_info['width'] = i32(header_data[0:4])
    -                file_info['height'] = i32(header_data[4:8]) if not file_info['y_flip'] else 2**32 - i32(header_data[4:8])
    -                file_info['planes'] = i16(header_data[8:10])
    -                file_info['bits'] = i16(header_data[10:12])
    -                file_info['compression'] = i32(header_data[12:16])
    -                file_info['data_size'] = i32(header_data[16:20])  # byte size of pixel data
    -                file_info['pixels_per_meter'] = (i32(header_data[20:24]), i32(header_data[24:28]))
    -                file_info['colors'] = i32(header_data[28:32])
    -                file_info['palette_padding'] = 4
    -                self.info["dpi"] = tuple(
    -                    map(lambda x: int(math.ceil(x / 39.3701)),
    -                        file_info['pixels_per_meter']))
    -                if file_info['compression'] == self.BITFIELDS:
    -                    if len(header_data) >= 52:
    -                        for idx, mask in enumerate(['r_mask', 'g_mask', 'b_mask', 'a_mask']):
    -                            file_info[mask] = i32(header_data[36+idx*4:40+idx*4])
    -                    else:
    -                        # 40 byte headers only have the three components in the bitfields masks,
    -                        # ref: https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx
    -                        # See also https://github.com/python-pillow/Pillow/issues/1293
    -                        # There is a 4th component in the RGBQuad, in the alpha location, but it
    -                        # is listed as a reserved component, and it is not generally an alpha channel
    -                        file_info['a_mask'] = 0x0
    -                        for mask in ['r_mask', 'g_mask', 'b_mask']:
    -                            file_info[mask] = i32(read(4))
    -                    file_info['rgb_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask'])
    -                    file_info['rgba_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask'], file_info['a_mask'])
    +        # read bmp header size @offset 14 (this is part of the header size)
    +        file_info["header_size"] = i32(read(4))
    +        file_info["direction"] = -1
    +
    +        # -------------------- If requested, read header at a specific position
    +        # read the rest of the bmp header, without its size
    +        header_data = ImageFile._safe_read(self.fp, file_info["header_size"] - 4)
    +
    +        # -------------------------------------------------- IBM OS/2 Bitmap v1
    +        # ----- This format has different offsets because of width/height types
    +        if file_info["header_size"] == 12:
    +            file_info["width"] = i16(header_data[0:2])
    +            file_info["height"] = i16(header_data[2:4])
    +            file_info["planes"] = i16(header_data[4:6])
    +            file_info["bits"] = i16(header_data[6:8])
    +            file_info["compression"] = self.RAW
    +            file_info["palette_padding"] = 3
    +
    +        # --------------------------------------------- Windows Bitmap v2 to v5
    +        # v3, OS/2 v2, v4, v5
    +        elif file_info["header_size"] in (40, 64, 108, 124):
    +            file_info["y_flip"] = i8(header_data[7]) == 0xFF
    +            file_info["direction"] = 1 if file_info["y_flip"] else -1
    +            file_info["width"] = i32(header_data[0:4])
    +            file_info["height"] = (
    +                i32(header_data[4:8])
    +                if not file_info["y_flip"]
    +                else 2 ** 32 - i32(header_data[4:8])
    +            )
    +            file_info["planes"] = i16(header_data[8:10])
    +            file_info["bits"] = i16(header_data[10:12])
    +            file_info["compression"] = i32(header_data[12:16])
    +            # byte size of pixel data
    +            file_info["data_size"] = i32(header_data[16:20])
    +            file_info["pixels_per_meter"] = (
    +                i32(header_data[20:24]),
    +                i32(header_data[24:28]),
    +            )
    +            file_info["colors"] = i32(header_data[28:32])
    +            file_info["palette_padding"] = 4
    +            self.info["dpi"] = tuple(
    +                int(x / 39.3701 + 0.5) for x in file_info["pixels_per_meter"]
    +            )
    +            if file_info["compression"] == self.BITFIELDS:
    +                if len(header_data) >= 52:
    +                    for idx, mask in enumerate(
    +                        ["r_mask", "g_mask", "b_mask", "a_mask"]
    +                    ):
    +                        file_info[mask] = i32(header_data[36 + idx * 4 : 40 + idx * 4])
    +                else:
    +                    # 40 byte headers only have the three components in the
    +                    # bitfields masks, ref:
    +                    # https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx
    +                    # See also
    +                    # https://github.com/python-pillow/Pillow/issues/1293
    +                    # There is a 4th component in the RGBQuad, in the alpha
    +                    # location, but it is listed as a reserved component,
    +                    # and it is not generally an alpha channel
    +                    file_info["a_mask"] = 0x0
    +                    for mask in ["r_mask", "g_mask", "b_mask"]:
    +                        file_info[mask] = i32(read(4))
    +                file_info["rgb_mask"] = (
    +                    file_info["r_mask"],
    +                    file_info["g_mask"],
    +                    file_info["b_mask"],
    +                )
    +                file_info["rgba_mask"] = (
    +                    file_info["r_mask"],
    +                    file_info["g_mask"],
    +                    file_info["b_mask"],
    +                    file_info["a_mask"],
    +                )
             else:
    -            raise IOError("Unsupported BMP header type (%d)" % file_info['header_size'])
    +            raise OSError("Unsupported BMP header type (%d)" % file_info["header_size"])
    +
             # ------------------ Special case : header is reported 40, which
             # ---------------------- is shorter than real size for bpp >= 16
    -        self.size = file_info['width'], file_info['height']
    -        # -------- If color count was not found in the header, compute from bits
    -        file_info['colors'] = file_info['colors'] if file_info.get('colors', 0) else (1 << file_info['bits'])
    -        # -------------------------------- Check abnormal values for DOS attacks
    -        if file_info['width'] * file_info['height'] > 2**31:
    -            raise IOError("Unsupported BMP Size: (%dx%d)" % self.size)
    -        # ----------------------- Check bit depth for unusual unsupported values
    -        self.mode, raw_mode = BIT2MODE.get(file_info['bits'], (None, None))
    +        self._size = file_info["width"], file_info["height"]
    +
    +        # ------- If color count was not found in the header, compute from bits
    +        file_info["colors"] = (
    +            file_info["colors"]
    +            if file_info.get("colors", 0)
    +            else (1 << file_info["bits"])
    +        )
    +
    +        # ------------------------------- Check abnormal values for DOS attacks
    +        if file_info["width"] * file_info["height"] > 2 ** 31:
    +            raise OSError("Unsupported BMP Size: (%dx%d)" % self.size)
    +
    +        # ---------------------- Check bit depth for unusual unsupported values
    +        self.mode, raw_mode = BIT2MODE.get(file_info["bits"], (None, None))
             if self.mode is None:
    -            raise IOError("Unsupported BMP pixel depth (%d)" % file_info['bits'])
    -        # ----------------- Process BMP with Bitfields compression (not palette)
    -        if file_info['compression'] == self.BITFIELDS:
    +            raise OSError("Unsupported BMP pixel depth (%d)" % file_info["bits"])
    +
    +        # ---------------- Process BMP with Bitfields compression (not palette)
    +        if file_info["compression"] == self.BITFIELDS:
                 SUPPORTED = {
    -                32: [(0xff0000, 0xff00, 0xff, 0x0), (0xff0000, 0xff00, 0xff, 0xff000000), (0x0, 0x0, 0x0, 0x0), (0xff000000, 0xff0000, 0xff00, 0x0)],
    -                24: [(0xff0000, 0xff00, 0xff)],
    -                16: [(0xf800, 0x7e0, 0x1f), (0x7c00, 0x3e0, 0x1f)]
    +                32: [
    +                    (0xFF0000, 0xFF00, 0xFF, 0x0),
    +                    (0xFF0000, 0xFF00, 0xFF, 0xFF000000),
    +                    (0xFF, 0xFF00, 0xFF0000, 0xFF000000),
    +                    (0x0, 0x0, 0x0, 0x0),
    +                    (0xFF000000, 0xFF0000, 0xFF00, 0x0),
    +                ],
    +                24: [(0xFF0000, 0xFF00, 0xFF)],
    +                16: [(0xF800, 0x7E0, 0x1F), (0x7C00, 0x3E0, 0x1F)],
                 }
                 MASK_MODES = {
    -                (32, (0xff0000, 0xff00, 0xff, 0x0)): "BGRX",
    -                (32, (0xff000000, 0xff0000, 0xff00, 0x0)): "XBGR",
    -                (32, (0xff0000, 0xff00, 0xff, 0xff000000)): "BGRA",
    +                (32, (0xFF0000, 0xFF00, 0xFF, 0x0)): "BGRX",
    +                (32, (0xFF000000, 0xFF0000, 0xFF00, 0x0)): "XBGR",
    +                (32, (0xFF, 0xFF00, 0xFF0000, 0xFF000000)): "RGBA",
    +                (32, (0xFF0000, 0xFF00, 0xFF, 0xFF000000)): "BGRA",
                     (32, (0x0, 0x0, 0x0, 0x0)): "BGRA",
    -                (24, (0xff0000, 0xff00, 0xff)): "BGR",
    -                (16, (0xf800, 0x7e0, 0x1f)): "BGR;16",
    -                (16, (0x7c00, 0x3e0, 0x1f)): "BGR;15"
    +                (24, (0xFF0000, 0xFF00, 0xFF)): "BGR",
    +                (16, (0xF800, 0x7E0, 0x1F)): "BGR;16",
    +                (16, (0x7C00, 0x3E0, 0x1F)): "BGR;15",
                 }
    -            if file_info['bits'] in SUPPORTED:
    -                if file_info['bits'] == 32 and file_info['rgba_mask'] in SUPPORTED[file_info['bits']]:
    -                    raw_mode = MASK_MODES[(file_info['bits'], file_info['rgba_mask'])]
    -                    self.mode = "RGBA" if raw_mode in ("BGRA",) else self.mode
    -                elif file_info['bits'] in (24, 16) and file_info['rgb_mask'] in SUPPORTED[file_info['bits']]:
    -                    raw_mode = MASK_MODES[(file_info['bits'], file_info['rgb_mask'])]
    +            if file_info["bits"] in SUPPORTED:
    +                if (
    +                    file_info["bits"] == 32
    +                    and file_info["rgba_mask"] in SUPPORTED[file_info["bits"]]
    +                ):
    +                    raw_mode = MASK_MODES[(file_info["bits"], file_info["rgba_mask"])]
    +                    self.mode = "RGBA" if "A" in raw_mode else self.mode
    +                elif (
    +                    file_info["bits"] in (24, 16)
    +                    and file_info["rgb_mask"] in SUPPORTED[file_info["bits"]]
    +                ):
    +                    raw_mode = MASK_MODES[(file_info["bits"], file_info["rgb_mask"])]
                     else:
    -                    raise IOError("Unsupported BMP bitfields layout")
    +                    raise OSError("Unsupported BMP bitfields layout")
                 else:
    -                raise IOError("Unsupported BMP bitfields layout")
    -        elif file_info['compression'] == self.RAW:
    -            if file_info['bits'] == 32 and header == 22:  # 32-bit .cur offset
    +                raise OSError("Unsupported BMP bitfields layout")
    +        elif file_info["compression"] == self.RAW:
    +            if file_info["bits"] == 32 and header == 22:  # 32-bit .cur offset
                     raw_mode, self.mode = "BGRA", "RGBA"
             else:
    -            raise IOError("Unsupported BMP compression (%d)" % file_info['compression'])
    -        # ---------------- Once the header is processed, process the palette/LUT
    +            raise OSError("Unsupported BMP compression (%d)" % file_info["compression"])
    +
    +        # --------------- Once the header is processed, process the palette/LUT
             if self.mode == "P":  # Paletted for 1, 4 and 8 bit images
    -            # ----------------------------------------------------- 1-bit images
    -            if not (0 < file_info['colors'] <= 65536):
    -                raise IOError("Unsupported BMP Palette size (%d)" % file_info['colors'])
    +
    +            # ---------------------------------------------------- 1-bit images
    +            if not (0 < file_info["colors"] <= 65536):
    +                raise OSError("Unsupported BMP Palette size (%d)" % file_info["colors"])
                 else:
    -                padding = file_info['palette_padding']
    -                palette = read(padding * file_info['colors'])
    +                padding = file_info["palette_padding"]
    +                palette = read(padding * file_info["colors"])
                     greyscale = True
    -                indices = (0, 255) if file_info['colors'] == 2 else list(range(file_info['colors']))
    -                # ------------------ Check if greyscale and ignore palette if so
    +                indices = (
    +                    (0, 255)
    +                    if file_info["colors"] == 2
    +                    else list(range(file_info["colors"]))
    +                )
    +
    +                # ----------------- Check if greyscale and ignore palette if so
                     for ind, val in enumerate(indices):
    -                    rgb = palette[ind*padding:ind*padding + 3]
    +                    rgb = palette[ind * padding : ind * padding + 3]
                         if rgb != o8(val) * 3:
                             greyscale = False
    -                # -------- If all colors are grey, white or black, ditch palette
    +
    +                # ------- If all colors are grey, white or black, ditch palette
                     if greyscale:
    -                    self.mode = "1" if file_info['colors'] == 2 else "L"
    +                    self.mode = "1" if file_info["colors"] == 2 else "L"
                         raw_mode = self.mode
                     else:
                         self.mode = "P"
    -                    self.palette = ImagePalette.raw("BGRX" if padding == 4 else "BGR", palette)
    +                    self.palette = ImagePalette.raw(
    +                        "BGRX" if padding == 4 else "BGR", palette
    +                    )
     
    -        # ----------------------------- Finally set the tile data for the plugin
    -        self.info['compression'] = file_info['compression']
    -        self.tile = [('raw', (0, 0, file_info['width'], file_info['height']), offset or self.fp.tell(),
    -                      (raw_mode, ((file_info['width'] * file_info['bits'] + 31) >> 3) & (~3), file_info['direction'])
    -                      )]
    +        # ---------------------------- Finally set the tile data for the plugin
    +        self.info["compression"] = file_info["compression"]
    +        self.tile = [
    +            (
    +                "raw",
    +                (0, 0, file_info["width"], file_info["height"]),
    +                offset or self.fp.tell(),
    +                (
    +                    raw_mode,
    +                    ((file_info["width"] * file_info["bits"] + 31) >> 3) & (~3),
    +                    file_info["direction"],
    +                ),
    +            )
    +        ]
     
         def _open(self):
             """ Open file, check magic number and read header """
    @@ -201,9 +271,9 @@ class BmpImageFile(ImageFile.ImageFile):
             self._bitmap(offset=offset)
     
     
    -# ==============================================================================
    +# =============================================================================
     # Image plugin for the DIB format (BMP alias)
    -# ==============================================================================
    +# =============================================================================
     class DibImageFile(BmpImageFile):
     
         format = "DIB"
    @@ -212,6 +282,7 @@ class DibImageFile(BmpImageFile):
         def _open(self):
             self._bitmap()
     
    +
     #
     # --------------------------------------------------------------------
     # Write BMP file
    @@ -226,43 +297,56 @@ SAVE = {
     }
     
     
    -def _save(im, fp, filename):
    +def _dib_save(im, fp, filename):
    +    _save(im, fp, filename, False)
    +
    +
    +def _save(im, fp, filename, bitmap_header=True):
         try:
             rawmode, bits, colors = SAVE[im.mode]
         except KeyError:
    -        raise IOError("cannot write mode %s as BMP" % im.mode)
    +        raise OSError("cannot write mode %s as BMP" % im.mode)
     
         info = im.encoderinfo
     
         dpi = info.get("dpi", (96, 96))
     
         # 1 meter == 39.3701 inches
    -    ppm = tuple(map(lambda x: int(x * 39.3701), dpi))
    +    ppm = tuple(map(lambda x: int(x * 39.3701 + 0.5), dpi))
     
    -    stride = ((im.size[0]*bits+7)//8+3) & (~3)
    +    stride = ((im.size[0] * bits + 7) // 8 + 3) & (~3)
         header = 40  # or 64 for OS/2 version 2
    -    offset = 14 + header + colors * 4
         image = stride * im.size[1]
     
         # bitmap header
    -    fp.write(b"BM" +                      # file type (magic)
    -             o32(offset+image) +          # file size
    -             o32(0) +                     # reserved
    -             o32(offset))                 # image data offset
    +    if bitmap_header:
    +        offset = 14 + header + colors * 4
    +        file_size = offset + image
    +        if file_size > 2 ** 32 - 1:
    +            raise ValueError("File size is too large for the BMP format")
    +        fp.write(
    +            b"BM"  # file type (magic)
    +            + o32(file_size)  # file size
    +            + o32(0)  # reserved
    +            + o32(offset)  # image data offset
    +        )
     
         # bitmap info header
    -    fp.write(o32(header) +                # info header size
    -             o32(im.size[0]) +            # width
    -             o32(im.size[1]) +            # height
    -             o16(1) +                     # planes
    -             o16(bits) +                  # depth
    -             o32(0) +                     # compression (0=uncompressed)
    -             o32(image) +                 # size of bitmap
    -             o32(ppm[0]) + o32(ppm[1]) +  # resolution
    -             o32(colors) +                # colors used
    -             o32(colors))                 # colors important
    +    fp.write(
    +        o32(header)  # info header size
    +        + o32(im.size[0])  # width
    +        + o32(im.size[1])  # height
    +        + o16(1)  # planes
    +        + o16(bits)  # depth
    +        + o32(0)  # compression (0=uncompressed)
    +        + o32(image)  # size of bitmap
    +        + o32(ppm[0])  # resolution
    +        + o32(ppm[1])  # resolution
    +        + o32(colors)  # colors used
    +        + o32(colors)  # colors important
    +    )
     
    -    fp.write(b"\0" * (header - 40))       # padding (for OS/2 format)
    +    fp.write(b"\0" * (header - 40))  # padding (for OS/2 format)
     
         if im.mode == "1":
             for i in (0, 255):
    @@ -273,8 +357,8 @@ def _save(im, fp, filename):
         elif im.mode == "P":
             fp.write(im.im.getpalette("RGB", "BGRX"))
     
    -    ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0,
    -                    (rawmode, stride, -1))])
    +    ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, stride, -1))])
    +
     
     #
     # --------------------------------------------------------------------
    @@ -287,3 +371,10 @@ Image.register_save(BmpImageFile.format, _save)
     Image.register_extension(BmpImageFile.format, ".bmp")
     
     Image.register_mime(BmpImageFile.format, "image/bmp")
    +
    +Image.register_open(DibImageFile.format, DibImageFile, _dib_accept)
    +Image.register_save(DibImageFile.format, _dib_save)
    +
    +Image.register_extension(DibImageFile.format, ".dib")
    +
    +Image.register_mime(DibImageFile.format, "image/bmp")
    diff --git a/server/www/packages/packages-windows/x86/PIL/BufrStubImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/BufrStubImagePlugin.py
    index 16d83c7..48f21e1 100644
    --- a/server/www/packages/packages-windows/x86/PIL/BufrStubImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/BufrStubImagePlugin.py
    @@ -27,6 +27,7 @@ def register_handler(handler):
     # --------------------------------------------------------------------
     # Image adapter
     
    +
     def _accept(prefix):
         return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC"
     
    @@ -47,7 +48,7 @@ class BufrStubImageFile(ImageFile.StubImageFile):
     
             # make something up
             self.mode = "F"
    -        self.size = 1, 1
    +        self._size = 1, 1
     
             loader = self._load()
             if loader:
    @@ -59,7 +60,7 @@ class BufrStubImageFile(ImageFile.StubImageFile):
     
     def _save(im, fp, filename):
         if _handler is None or not hasattr("_handler", "save"):
    -        raise IOError("BUFR save handler not installed")
    +        raise OSError("BUFR save handler not installed")
         _handler.save(im, fp, filename)
     
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/ContainerIO.py b/server/www/packages/packages-windows/x86/PIL/ContainerIO.py
    index 496ed68..5bb0086 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ContainerIO.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ContainerIO.py
    @@ -18,9 +18,10 @@
     # A file object that provides read access to a part of an existing
     # file (for example a TAR file).
     
    +import io
     
    -class ContainerIO(object):
     
    +class ContainerIO:
         def __init__(self, file, offset, length):
             """
             Create file object.
    @@ -39,9 +40,9 @@ class ContainerIO(object):
         # Always false.
     
         def isatty(self):
    -        return 0
    +        return False
     
    -    def seek(self, offset, mode=0):
    +    def seek(self, offset, mode=io.SEEK_SET):
             """
             Move file pointer.
     
    @@ -81,7 +82,7 @@ class ContainerIO(object):
             else:
                 n = self.length - self.pos
             if not n:  # EOF
    -            return ""
    +            return b"" if "b" in self.fh.mode else ""
             self.pos = self.pos + n
             return self.fh.read(n)
     
    @@ -91,13 +92,14 @@ class ContainerIO(object):
     
             :returns: An 8-bit string.
             """
    -        s = ""
    +        s = b"" if "b" in self.fh.mode else ""
    +        newline_character = b"\n" if "b" in self.fh.mode else "\n"
             while True:
                 c = self.read(1)
                 if not c:
                     break
                 s = s + c
    -            if c == "\n":
    +            if c == newline_character:
                     break
             return s
     
    @@ -107,10 +109,10 @@ class ContainerIO(object):
     
             :returns: A list of 8-bit strings.
             """
    -        l = []
    +        lines = []
             while True:
                 s = self.readline()
                 if not s:
                     break
    -            l.append(s)
    -        return l
    +            lines.append(s)
    +        return lines
    diff --git a/server/www/packages/packages-windows/x86/PIL/CurImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/CurImagePlugin.py
    index e4257cd..3a1b6d2 100644
    --- a/server/www/packages/packages-windows/x86/PIL/CurImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/CurImagePlugin.py
    @@ -15,14 +15,9 @@
     #
     # See the README file for information on usage and redistribution.
     #
    -
    -from __future__ import print_function
    -
    -from . import Image, BmpImagePlugin
    +from . import BmpImagePlugin, Image
     from ._binary import i8, i16le as i16, i32le as i32
     
    -__version__ = "0.1"
    -
     #
     # --------------------------------------------------------------------
     
    @@ -34,6 +29,7 @@ def _accept(prefix):
     ##
     # Image plugin for Windows Cursor files.
     
    +
     class CurImageFile(BmpImagePlugin.BmpImageFile):
     
         format = "CUR"
    @@ -56,14 +52,6 @@ class CurImageFile(BmpImagePlugin.BmpImageFile):
                     m = s
                 elif i8(s[0]) > i8(m[0]) and i8(s[1]) > i8(m[1]):
                     m = s
    -            # print("width", i8(s[0]))
    -            # print("height", i8(s[1]))
    -            # print("colors", i8(s[2]))
    -            # print("reserved", i8(s[3]))
    -            # print("hotspot x", i16(s[4:]))
    -            # print("hotspot y", i16(s[6:]))
    -            # print("bytes", i32(s[8:]))
    -            # print("offset", i32(s[12:]))
             if not m:
                 raise TypeError("No cursors were found")
     
    @@ -71,9 +59,9 @@ class CurImageFile(BmpImagePlugin.BmpImageFile):
             self._bitmap(i32(m[12:]) + offset)
     
             # patch up the bitmap height
    -        self.size = self.size[0], self.size[1]//2
    +        self._size = self.size[0], self.size[1] // 2
             d, e, o, a = self.tile[0]
    -        self.tile[0] = d, (0, 0)+self.size, o, a
    +        self.tile[0] = d, (0, 0) + self.size, o, a
     
             return
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/DcxImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/DcxImagePlugin.py
    index 2045927..7d2aff3 100644
    --- a/server/www/packages/packages-windows/x86/PIL/DcxImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/DcxImagePlugin.py
    @@ -25,8 +25,6 @@ from . import Image
     from ._binary import i32le as i32
     from .PcxImagePlugin import PcxImageFile
     
    -__version__ = "0.2"
    -
     MAGIC = 0x3ADE68B1  # QUIZ: what's this value, then?
     
     
    @@ -37,6 +35,7 @@ def _accept(prefix):
     ##
     # Image plugin for the Intel DCX format.
     
    +
     class DcxImageFile(PcxImageFile):
     
         format = "DCX"
    @@ -81,6 +80,15 @@ class DcxImageFile(PcxImageFile):
         def tell(self):
             return self.frame
     
    +    def _close__fp(self):
    +        try:
    +            if self.__fp != self.fp:
    +                self.__fp.close()
    +        except AttributeError:
    +            pass
    +        finally:
    +            self.__fp = None
    +
     
     Image.register_open(DcxImageFile.format, DcxImageFile, _accept)
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/DdsImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/DdsImagePlugin.py
    index e755f94..9ba6e0f 100644
    --- a/server/www/packages/packages-windows/x86/PIL/DdsImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/DdsImagePlugin.py
    @@ -12,8 +12,8 @@ Full text of the CC0 license:
     
     import struct
     from io import BytesIO
    -from . import Image, ImageFile
     
    +from . import Image, ImageFile
     
     # Magic ("DDS ")
     DDS_MAGIC = 0x20534444
    @@ -61,8 +61,7 @@ DDS_LUMINANCEA = DDPF_LUMINANCE | DDPF_ALPHAPIXELS
     DDS_ALPHA = DDPF_ALPHA
     DDS_PAL8 = DDPF_PALETTEINDEXED8
     
    -DDS_HEADER_FLAGS_TEXTURE = (DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH |
    -                            DDSD_PIXELFORMAT)
    +DDS_HEADER_FLAGS_TEXTURE = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | DDSD_PIXELFORMAT
     DDS_HEADER_FLAGS_MIPMAP = DDSD_MIPMAPCOUNT
     DDS_HEADER_FLAGS_VOLUME = DDSD_DEPTH
     DDS_HEADER_FLAGS_PITCH = DDSD_PITCH
    @@ -107,58 +106,65 @@ class DdsImageFile(ImageFile.ImageFile):
         def _open(self):
             magic, header_size = struct.unpack(" 0:
    -                s = fp.read(min(lengthfile, 100*1024))
    +                s = fp.read(min(lengthfile, 100 * 1024))
                     if not s:
                         break
                     lengthfile -= len(s)
                     f.write(s)
     
    -    # Build ghostscript command
    -    command = ["gs",
    -               "-q",                         # quiet mode
    -               "-g%dx%d" % size,             # set output geometry (pixels)
    -               "-r%fx%f" % res,              # set input DPI (dots per inch)
    -               "-dBATCH",                    # exit after processing
    -               "-dNOPAUSE",                  # don't pause between pages,
    -               "-dSAFER",                    # safe mode
    -               "-sDEVICE=ppmraw",            # ppm driver
    -               "-sOutputFile=%s" % outfile,  # output file
    -               "-c", "%d %d translate" % (-bbox[0], -bbox[1]),
    -                                             # adjust for image origin
    -               "-f", infile,                 # input file
    -               "-c", "showpage",             # showpage (see: https://bugs.ghostscript.com/show_bug.cgi?id=698272)
    -               ]
    +    # Build Ghostscript command
    +    command = [
    +        "gs",
    +        "-q",  # quiet mode
    +        "-g%dx%d" % size,  # set output geometry (pixels)
    +        "-r%fx%f" % res,  # set input DPI (dots per inch)
    +        "-dBATCH",  # exit after processing
    +        "-dNOPAUSE",  # don't pause between pages
    +        "-dSAFER",  # safe mode
    +        "-sDEVICE=ppmraw",  # ppm driver
    +        "-sOutputFile=%s" % outfile,  # output file
    +        # adjust for image origin
    +        "-c",
    +        "%d %d translate" % (-bbox[0], -bbox[1]),
    +        "-f",
    +        infile,  # input file
    +        # showpage (see https://bugs.ghostscript.com/show_bug.cgi?id=698272)
    +        "-c",
    +        "showpage",
    +    ]
     
         if gs_windows_binary is not None:
             if not gs_windows_binary:
    -            raise WindowsError('Unable to locate Ghostscript on paths')
    +            raise OSError("Unable to locate Ghostscript on paths")
             command[0] = gs_windows_binary
     
    -    # push data through ghostscript
    +    # push data through Ghostscript
         try:
    -        with open(os.devnull, 'w+b') as devnull:
    -            subprocess.check_call(command, stdin=devnull, stdout=devnull)
    -        im = Image.open(outfile)
    -        im.load()
    +        startupinfo = None
    +        if sys.platform.startswith("win"):
    +            startupinfo = subprocess.STARTUPINFO()
    +            startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
    +        subprocess.check_call(command, startupinfo=startupinfo)
    +        out_im = Image.open(outfile)
    +        out_im.load()
         finally:
             try:
                 os.unlink(outfile)
    @@ -152,18 +151,21 @@ def Ghostscript(tile, size, fp, scale=1):
             except OSError:
                 pass
     
    -    return im.im.copy()
    +    im = out_im.im.copy()
    +    out_im.close()
    +    return im
     
     
    -class PSFile(object):
    +class PSFile:
         """
         Wrapper for bytesio object that treats either CR or LF as end of line.
         """
    +
         def __init__(self, fp):
             self.fp = fp
             self.char = None
     
    -    def seek(self, offset, whence=0):
    +    def seek(self, offset, whence=io.SEEK_SET):
             self.char = None
             self.fp.seek(offset, whence)
     
    @@ -181,12 +183,12 @@ class PSFile(object):
             if self.char in b"\r\n":
                 self.char = None
     
    -        return s.decode('latin-1')
    +        return s.decode("latin-1")
     
     
     def _accept(prefix):
    -    return prefix[:4] == b"%!PS" or \
    -           (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5)
    +    return prefix[:4] == b"%!PS" or (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5)
    +
     
     ##
     # Image plugin for Encapsulated Postscript.  This plugin supports only
    @@ -206,16 +208,7 @@ class EpsImageFile(ImageFile.ImageFile):
     
             # Rewrap the open file pointer in something that will
             # convert line endings and decode to latin-1.
    -        try:
    -            if py3:
    -                # Python3, can use bare open command.
    -                fp = open(self.fp.name, "Ur", encoding='latin-1')
    -            else:
    -                # Python2, no encoding conversion necessary
    -                fp = open(self.fp.name, "Ur")
    -        except:
    -            # Expect this for bytesio/stringio
    -            fp = PSFile(self.fp)
    +        fp = PSFile(self.fp)
     
             # go to offset - start of "%!PS"
             fp.seek(offset)
    @@ -223,13 +216,13 @@ class EpsImageFile(ImageFile.ImageFile):
             box = None
     
             self.mode = "RGB"
    -        self.size = 1, 1  # FIXME: huh?
    +        self._size = 1, 1  # FIXME: huh?
     
             #
             # Load EPS header
     
             s_raw = fp.readline()
    -        s = s_raw.strip('\r\n')
    +        s = s_raw.strip("\r\n")
     
             while s_raw:
                 if s:
    @@ -238,7 +231,7 @@ class EpsImageFile(ImageFile.ImageFile):
     
                     try:
                         m = split.match(s)
    -                except re.error as v:
    +                except re.error:
                         raise SyntaxError("not an EPS file")
     
                     if m:
    @@ -250,10 +243,11 @@ class EpsImageFile(ImageFile.ImageFile):
                                 # fields should be integers, but some drivers
                                 # put floating point values there anyway.
                                 box = [int(float(i)) for i in v.split()]
    -                            self.size = box[2] - box[0], box[3] - box[1]
    -                            self.tile = [("eps", (0, 0) + self.size, offset,
    -                                          (length, box))]
    -                        except:
    +                            self._size = box[2] - box[0], box[3] - box[1]
    +                            self.tile = [
    +                                ("eps", (0, 0) + self.size, offset, (length, box))
    +                            ]
    +                        except Exception:
                                 pass
     
                     else:
    @@ -267,15 +261,15 @@ class EpsImageFile(ImageFile.ImageFile):
                                 self.info[k[:8]] = k[9:]
                             else:
                                 self.info[k] = ""
    -                    elif s[0] == '%':
    +                    elif s[0] == "%":
                             # handle non-DSC Postscript comments that some
                             # tools mistakenly put in the Comments section
                             pass
                         else:
    -                        raise IOError("bad EPS header")
    +                        raise OSError("bad EPS header")
     
                 s_raw = fp.readline()
    -            s = s_raw.strip('\r\n')
    +            s = s_raw.strip("\r\n")
     
                 if s and s[:1] != "%":
                     break
    @@ -299,15 +293,15 @@ class EpsImageFile(ImageFile.ImageFile):
                     except ValueError:
                         break
     
    -                self.size = int(x), int(y)
    +                self._size = int(x), int(y)
                     return
     
    -            s = fp.readline().strip('\r\n')
    +            s = fp.readline().strip("\r\n")
                 if not s:
                     break
     
             if not box:
    -            raise IOError("cannot determine EPS bounding box")
    +            raise OSError("cannot determine EPS bounding box")
     
         def _find_offset(self, fp):
     
    @@ -315,7 +309,7 @@ class EpsImageFile(ImageFile.ImageFile):
     
             if s[:4] == b"%!PS":
                 # for HEAD without binary preview
    -            fp.seek(0, 2)
    +            fp.seek(0, io.SEEK_END)
                 length = fp.tell()
                 offset = 0
             elif i32(s[0:4]) == 0xC6D3D0C5:
    @@ -337,7 +331,7 @@ class EpsImageFile(ImageFile.ImageFile):
                 return
             self.im = Ghostscript(self.tile, self.size, self.fp, scale)
             self.mode = self.im.mode
    -        self.size = self.im.size
    +        self._size = self.im.size
             self.tile = []
     
         def load_seek(self, *args, **kwargs):
    @@ -349,6 +343,7 @@ class EpsImageFile(ImageFile.ImageFile):
     #
     # --------------------------------------------------------------------
     
    +
     def _save(im, fp, filename, eps=1):
         """EPS Writer for the Python Imaging Library."""
     
    @@ -367,54 +362,49 @@ def _save(im, fp, filename, eps=1):
         else:
             raise ValueError("image mode is not supported")
     
    -    class NoCloseStream(object):
    -        def __init__(self, fp):
    -            self.fp = fp
    -
    -        def __getattr__(self, name):
    -            return getattr(self.fp, name)
    -
    -        def close(self):
    -            pass
    -
         base_fp = fp
    +    wrapped_fp = False
         if fp != sys.stdout:
    -        fp = NoCloseStream(fp)
    -        if sys.version_info.major > 2:
    -            fp = io.TextIOWrapper(fp, encoding='latin-1')
    +        fp = io.TextIOWrapper(fp, encoding="latin-1")
    +        wrapped_fp = True
    +
    +    try:
    +        if eps:
    +            #
    +            # write EPS header
    +            fp.write("%!PS-Adobe-3.0 EPSF-3.0\n")
    +            fp.write("%%Creator: PIL 0.1 EpsEncode\n")
    +            # fp.write("%%CreationDate: %s"...)
    +            fp.write("%%%%BoundingBox: 0 0 %d %d\n" % im.size)
    +            fp.write("%%Pages: 1\n")
    +            fp.write("%%EndComments\n")
    +            fp.write("%%Page: 1 1\n")
    +            fp.write("%%ImageData: %d %d " % im.size)
    +            fp.write('%d %d 0 1 1 "%s"\n' % operator)
     
    -    if eps:
             #
    -        # write EPS header
    -        fp.write("%!PS-Adobe-3.0 EPSF-3.0\n")
    -        fp.write("%%Creator: PIL 0.1 EpsEncode\n")
    -        # fp.write("%%CreationDate: %s"...)
    -        fp.write("%%%%BoundingBox: 0 0 %d %d\n" % im.size)
    -        fp.write("%%Pages: 1\n")
    -        fp.write("%%EndComments\n")
    -        fp.write("%%Page: 1 1\n")
    -        fp.write("%%ImageData: %d %d " % im.size)
    -        fp.write("%d %d 0 1 1 \"%s\"\n" % operator)
    +        # image header
    +        fp.write("gsave\n")
    +        fp.write("10 dict begin\n")
    +        fp.write("/buf %d string def\n" % (im.size[0] * operator[1]))
    +        fp.write("%d %d scale\n" % im.size)
    +        fp.write("%d %d 8\n" % im.size)  # <= bits
    +        fp.write("[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1]))
    +        fp.write("{ currentfile buf readhexstring pop } bind\n")
    +        fp.write(operator[2] + "\n")
    +        if hasattr(fp, "flush"):
    +            fp.flush()
     
    -    #
    -    # image header
    -    fp.write("gsave\n")
    -    fp.write("10 dict begin\n")
    -    fp.write("/buf %d string def\n" % (im.size[0] * operator[1]))
    -    fp.write("%d %d scale\n" % im.size)
    -    fp.write("%d %d 8\n" % im.size)  # <= bits
    -    fp.write("[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1]))
    -    fp.write("{ currentfile buf readhexstring pop } bind\n")
    -    fp.write(operator[2] + "\n")
    -    if hasattr(fp, "flush"):
    -        fp.flush()
    +        ImageFile._save(im, base_fp, [("eps", (0, 0) + im.size, 0, None)])
     
    -    ImageFile._save(im, base_fp, [("eps", (0, 0)+im.size, 0, None)])
    +        fp.write("\n%%%%EndBinary\n")
    +        fp.write("grestore end\n")
    +        if hasattr(fp, "flush"):
    +            fp.flush()
    +    finally:
    +        if wrapped_fp:
    +            fp.detach()
     
    -    fp.write("\n%%%%EndBinary\n")
    -    fp.write("grestore end\n")
    -    if hasattr(fp, "flush"):
    -        fp.flush()
     
     #
     # --------------------------------------------------------------------
    diff --git a/server/www/packages/packages-windows/x86/PIL/ExifTags.py b/server/www/packages/packages-windows/x86/PIL/ExifTags.py
    index a8ad26b..cecc3f2 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ExifTags.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ExifTags.py
    @@ -18,11 +18,10 @@
     # Maps EXIF tags to tag names.
     
     TAGS = {
    -
         # possibly incomplete
    -    0x000b: "ProcessingSoftware",
    -    0x00fe: "NewSubfileType",
    -    0x00ff: "SubfileType",
    +    0x000B: "ProcessingSoftware",
    +    0x00FE: "NewSubfileType",
    +    0x00FF: "SubfileType",
         0x0100: "ImageWidth",
         0x0101: "ImageLength",
         0x0102: "BitsPerSample",
    @@ -31,10 +30,10 @@ TAGS = {
         0x0107: "Thresholding",
         0x0108: "CellWidth",
         0x0109: "CellLength",
    -    0x010a: "FillOrder",
    -    0x010d: "DocumentName",
    -    0x010e: "ImageDescription",
    -    0x010f: "Make",
    +    0x010A: "FillOrder",
    +    0x010D: "DocumentName",
    +    0x010E: "ImageDescription",
    +    0x010F: "Make",
         0x0110: "Model",
         0x0111: "StripOffsets",
         0x0112: "Orientation",
    @@ -43,10 +42,10 @@ TAGS = {
         0x0117: "StripByteCounts",
         0x0118: "MinSampleValue",
         0x0119: "MaxSampleValue",
    -    0x011a: "XResolution",
    -    0x011b: "YResolution",
    -    0x011c: "PlanarConfiguration",
    -    0x011d: "PageName",
    +    0x011A: "XResolution",
    +    0x011B: "YResolution",
    +    0x011C: "PlanarConfiguration",
    +    0x011D: "PageName",
         0x0120: "FreeOffsets",
         0x0121: "FreeByteCounts",
         0x0122: "GrayResponseUnit",
    @@ -55,24 +54,24 @@ TAGS = {
         0x0125: "T6Options",
         0x0128: "ResolutionUnit",
         0x0129: "PageNumber",
    -    0x012d: "TransferFunction",
    +    0x012D: "TransferFunction",
         0x0131: "Software",
         0x0132: "DateTime",
    -    0x013b: "Artist",
    -    0x013c: "HostComputer",
    -    0x013d: "Predictor",
    -    0x013e: "WhitePoint",
    -    0x013f: "PrimaryChromaticities",
    +    0x013B: "Artist",
    +    0x013C: "HostComputer",
    +    0x013D: "Predictor",
    +    0x013E: "WhitePoint",
    +    0x013F: "PrimaryChromaticities",
         0x0140: "ColorMap",
         0x0141: "HalftoneHints",
         0x0142: "TileWidth",
         0x0143: "TileLength",
         0x0144: "TileOffsets",
         0x0145: "TileByteCounts",
    -    0x014a: "SubIFDs",
    -    0x014c: "InkSet",
    -    0x014d: "InkNames",
    -    0x014e: "NumberOfInks",
    +    0x014A: "SubIFDs",
    +    0x014C: "InkSet",
    +    0x014D: "InkNames",
    +    0x014E: "NumberOfInks",
         0x0150: "DotRange",
         0x0151: "TargetPrinter",
         0x0152: "ExtraSamples",
    @@ -83,9 +82,9 @@ TAGS = {
         0x0157: "ClipPath",
         0x0158: "XClipPathUnits",
         0x0159: "YClipPathUnits",
    -    0x015a: "Indexed",
    -    0x015b: "JPEGTables",
    -    0x015f: "OPIProxy",
    +    0x015A: "Indexed",
    +    0x015B: "JPEGTables",
    +    0x015F: "OPIProxy",
         0x0200: "JPEGProc",
         0x0201: "JpegIFOffset",
         0x0202: "JpegIFByteCount",
    @@ -99,20 +98,20 @@ TAGS = {
         0x0212: "YCbCrSubSampling",
         0x0213: "YCbCrPositioning",
         0x0214: "ReferenceBlackWhite",
    -    0x02bc: "XMLPacket",
    +    0x02BC: "XMLPacket",
         0x1000: "RelatedImageFileFormat",
         0x1001: "RelatedImageWidth",
         0x1002: "RelatedImageLength",
         0x4746: "Rating",
         0x4749: "RatingPercent",
    -    0x800d: "ImageID",
    -    0x828d: "CFARepeatPatternDim",
    -    0x828e: "CFAPattern",
    -    0x828f: "BatteryLevel",
    +    0x800D: "ImageID",
    +    0x828D: "CFARepeatPatternDim",
    +    0x828E: "CFAPattern",
    +    0x828F: "BatteryLevel",
         0x8298: "Copyright",
    -    0x829a: "ExposureTime",
    -    0x829d: "FNumber",
    -    0x83bb: "IPTCNAA",
    +    0x829A: "ExposureTime",
    +    0x829D: "FNumber",
    +    0x83BB: "IPTCNAA",
         0x8649: "ImageResources",
         0x8769: "ExifOffset",
         0x8773: "InterColorProfile",
    @@ -122,8 +121,8 @@ TAGS = {
         0x8827: "ISOSpeedRatings",
         0x8828: "OECF",
         0x8829: "Interlace",
    -    0x882a: "TimeZoneOffset",
    -    0x882b: "SelfTimerMode",
    +    0x882A: "TimeZoneOffset",
    +    0x882B: "SelfTimerMode",
         0x9000: "ExifVersion",
         0x9003: "DateTimeOriginal",
         0x9004: "DateTimeDigitized",
    @@ -138,142 +137,148 @@ TAGS = {
         0x9207: "MeteringMode",
         0x9208: "LightSource",
         0x9209: "Flash",
    -    0x920a: "FocalLength",
    -    0x920b: "FlashEnergy",
    -    0x920c: "SpatialFrequencyResponse",
    -    0x920d: "Noise",
    +    0x920A: "FocalLength",
    +    0x920B: "FlashEnergy",
    +    0x920C: "SpatialFrequencyResponse",
    +    0x920D: "Noise",
         0x9211: "ImageNumber",
         0x9212: "SecurityClassification",
         0x9213: "ImageHistory",
         0x9214: "SubjectLocation",
         0x9215: "ExposureIndex",
         0x9216: "TIFF/EPStandardID",
    -    0x927c: "MakerNote",
    +    0x927C: "MakerNote",
         0x9286: "UserComment",
         0x9290: "SubsecTime",
         0x9291: "SubsecTimeOriginal",
         0x9292: "SubsecTimeDigitized",
    -    0x9c9b: "XPTitle",
    -    0x9c9c: "XPComment",
    -    0x9c9d: "XPAuthor",
    -    0x9c9e: "XPKeywords",
    -    0x9c9f: "XPSubject",
    -    0xa000: "FlashPixVersion",
    -    0xa001: "ColorSpace",
    -    0xa002: "ExifImageWidth",
    -    0xa003: "ExifImageHeight",
    -    0xa004: "RelatedSoundFile",
    -    0xa005: "ExifInteroperabilityOffset",
    -    0xa20b: "FlashEnergy",
    -    0xa20c: "SpatialFrequencyResponse",
    -    0xa20e: "FocalPlaneXResolution",
    -    0xa20f: "FocalPlaneYResolution",
    -    0xa210: "FocalPlaneResolutionUnit",
    -    0xa214: "SubjectLocation",
    -    0xa215: "ExposureIndex",
    -    0xa217: "SensingMethod",
    -    0xa300: "FileSource",
    -    0xa301: "SceneType",
    -    0xa302: "CFAPattern",
    -    0xa401: "CustomRendered",
    -    0xa402: "ExposureMode",
    -    0xa403: "WhiteBalance",
    -    0xa404: "DigitalZoomRatio",
    -    0xa405: "FocalLengthIn35mmFilm",
    -    0xa406: "SceneCaptureType",
    -    0xa407: "GainControl",
    -    0xa408: "Contrast",
    -    0xa409: "Saturation",
    -    0xa40a: "Sharpness",
    -    0xa40b: "DeviceSettingDescription",
    -    0xa40c: "SubjectDistanceRange",
    -    0xa420: "ImageUniqueID",
    -    0xa430: "CameraOwnerName",
    -    0xa431: "BodySerialNumber",
    -    0xa432: "LensSpecification",
    -    0xa433: "LensMake",
    -    0xa434: "LensModel",
    -    0xa435: "LensSerialNumber",
    -    0xa500: "Gamma",
    -    0xc4a5: "PrintImageMatching",
    -    0xc612: "DNGVersion",
    -    0xc613: "DNGBackwardVersion",
    -    0xc614: "UniqueCameraModel",
    -    0xc615: "LocalizedCameraModel",
    -    0xc616: "CFAPlaneColor",
    -    0xc617: "CFALayout",
    -    0xc618: "LinearizationTable",
    -    0xc619: "BlackLevelRepeatDim",
    -    0xc61a: "BlackLevel",
    -    0xc61b: "BlackLevelDeltaH",
    -    0xc61c: "BlackLevelDeltaV",
    -    0xc61d: "WhiteLevel",
    -    0xc61e: "DefaultScale",
    -    0xc61f: "DefaultCropOrigin",
    -    0xc620: "DefaultCropSize",
    -    0xc621: "ColorMatrix1",
    -    0xc622: "ColorMatrix2",
    -    0xc623: "CameraCalibration1",
    -    0xc624: "CameraCalibration2",
    -    0xc625: "ReductionMatrix1",
    -    0xc626: "ReductionMatrix2",
    -    0xc627: "AnalogBalance",
    -    0xc628: "AsShotNeutral",
    -    0xc629: "AsShotWhiteXY",
    -    0xc62a: "BaselineExposure",
    -    0xc62b: "BaselineNoise",
    -    0xc62c: "BaselineSharpness",
    -    0xc62d: "BayerGreenSplit",
    -    0xc62e: "LinearResponseLimit",
    -    0xc62f: "CameraSerialNumber",
    -    0xc630: "LensInfo",
    -    0xc631: "ChromaBlurRadius",
    -    0xc632: "AntiAliasStrength",
    -    0xc633: "ShadowScale",
    -    0xc634: "DNGPrivateData",
    -    0xc635: "MakerNoteSafety",
    -    0xc65a: "CalibrationIlluminant1",
    -    0xc65b: "CalibrationIlluminant2",
    -    0xc65c: "BestQualityScale",
    -    0xc65d: "RawDataUniqueID",
    -    0xc68b: "OriginalRawFileName",
    -    0xc68c: "OriginalRawFileData",
    -    0xc68d: "ActiveArea",
    -    0xc68e: "MaskedAreas",
    -    0xc68f: "AsShotICCProfile",
    -    0xc690: "AsShotPreProfileMatrix",
    -    0xc691: "CurrentICCProfile",
    -    0xc692: "CurrentPreProfileMatrix",
    -    0xc6bf: "ColorimetricReference",
    -    0xc6f3: "CameraCalibrationSignature",
    -    0xc6f4: "ProfileCalibrationSignature",
    -    0xc6f6: "AsShotProfileName",
    -    0xc6f7: "NoiseReductionApplied",
    -    0xc6f8: "ProfileName",
    -    0xc6f9: "ProfileHueSatMapDims",
    -    0xc6fa: "ProfileHueSatMapData1",
    -    0xc6fb: "ProfileHueSatMapData2",
    -    0xc6fc: "ProfileToneCurve",
    -    0xc6fd: "ProfileEmbedPolicy",
    -    0xc6fe: "ProfileCopyright",
    -    0xc714: "ForwardMatrix1",
    -    0xc715: "ForwardMatrix2",
    -    0xc716: "PreviewApplicationName",
    -    0xc717: "PreviewApplicationVersion",
    -    0xc718: "PreviewSettingsName",
    -    0xc719: "PreviewSettingsDigest",
    -    0xc71a: "PreviewColorSpace",
    -    0xc71b: "PreviewDateTime",
    -    0xc71c: "RawImageDigest",
    -    0xc71d: "OriginalRawFileDigest",
    -    0xc71e: "SubTileBlockSize",
    -    0xc71f: "RowInterleaveFactor",
    -    0xc725: "ProfileLookTableDims",
    -    0xc726: "ProfileLookTableData",
    -    0xc740: "OpcodeList1",
    -    0xc741: "OpcodeList2",
    -    0xc74e: "OpcodeList3",
    -    0xc761: "NoiseProfile"
    +    0x9400: "AmbientTemperature",
    +    0x9401: "Humidity",
    +    0x9402: "Pressure",
    +    0x9403: "WaterDepth",
    +    0x9404: "Acceleration",
    +    0x9405: "CameraElevationAngle",
    +    0x9C9B: "XPTitle",
    +    0x9C9C: "XPComment",
    +    0x9C9D: "XPAuthor",
    +    0x9C9E: "XPKeywords",
    +    0x9C9F: "XPSubject",
    +    0xA000: "FlashPixVersion",
    +    0xA001: "ColorSpace",
    +    0xA002: "ExifImageWidth",
    +    0xA003: "ExifImageHeight",
    +    0xA004: "RelatedSoundFile",
    +    0xA005: "ExifInteroperabilityOffset",
    +    0xA20B: "FlashEnergy",
    +    0xA20C: "SpatialFrequencyResponse",
    +    0xA20E: "FocalPlaneXResolution",
    +    0xA20F: "FocalPlaneYResolution",
    +    0xA210: "FocalPlaneResolutionUnit",
    +    0xA214: "SubjectLocation",
    +    0xA215: "ExposureIndex",
    +    0xA217: "SensingMethod",
    +    0xA300: "FileSource",
    +    0xA301: "SceneType",
    +    0xA302: "CFAPattern",
    +    0xA401: "CustomRendered",
    +    0xA402: "ExposureMode",
    +    0xA403: "WhiteBalance",
    +    0xA404: "DigitalZoomRatio",
    +    0xA405: "FocalLengthIn35mmFilm",
    +    0xA406: "SceneCaptureType",
    +    0xA407: "GainControl",
    +    0xA408: "Contrast",
    +    0xA409: "Saturation",
    +    0xA40A: "Sharpness",
    +    0xA40B: "DeviceSettingDescription",
    +    0xA40C: "SubjectDistanceRange",
    +    0xA420: "ImageUniqueID",
    +    0xA430: "CameraOwnerName",
    +    0xA431: "BodySerialNumber",
    +    0xA432: "LensSpecification",
    +    0xA433: "LensMake",
    +    0xA434: "LensModel",
    +    0xA435: "LensSerialNumber",
    +    0xA500: "Gamma",
    +    0xC4A5: "PrintImageMatching",
    +    0xC612: "DNGVersion",
    +    0xC613: "DNGBackwardVersion",
    +    0xC614: "UniqueCameraModel",
    +    0xC615: "LocalizedCameraModel",
    +    0xC616: "CFAPlaneColor",
    +    0xC617: "CFALayout",
    +    0xC618: "LinearizationTable",
    +    0xC619: "BlackLevelRepeatDim",
    +    0xC61A: "BlackLevel",
    +    0xC61B: "BlackLevelDeltaH",
    +    0xC61C: "BlackLevelDeltaV",
    +    0xC61D: "WhiteLevel",
    +    0xC61E: "DefaultScale",
    +    0xC61F: "DefaultCropOrigin",
    +    0xC620: "DefaultCropSize",
    +    0xC621: "ColorMatrix1",
    +    0xC622: "ColorMatrix2",
    +    0xC623: "CameraCalibration1",
    +    0xC624: "CameraCalibration2",
    +    0xC625: "ReductionMatrix1",
    +    0xC626: "ReductionMatrix2",
    +    0xC627: "AnalogBalance",
    +    0xC628: "AsShotNeutral",
    +    0xC629: "AsShotWhiteXY",
    +    0xC62A: "BaselineExposure",
    +    0xC62B: "BaselineNoise",
    +    0xC62C: "BaselineSharpness",
    +    0xC62D: "BayerGreenSplit",
    +    0xC62E: "LinearResponseLimit",
    +    0xC62F: "CameraSerialNumber",
    +    0xC630: "LensInfo",
    +    0xC631: "ChromaBlurRadius",
    +    0xC632: "AntiAliasStrength",
    +    0xC633: "ShadowScale",
    +    0xC634: "DNGPrivateData",
    +    0xC635: "MakerNoteSafety",
    +    0xC65A: "CalibrationIlluminant1",
    +    0xC65B: "CalibrationIlluminant2",
    +    0xC65C: "BestQualityScale",
    +    0xC65D: "RawDataUniqueID",
    +    0xC68B: "OriginalRawFileName",
    +    0xC68C: "OriginalRawFileData",
    +    0xC68D: "ActiveArea",
    +    0xC68E: "MaskedAreas",
    +    0xC68F: "AsShotICCProfile",
    +    0xC690: "AsShotPreProfileMatrix",
    +    0xC691: "CurrentICCProfile",
    +    0xC692: "CurrentPreProfileMatrix",
    +    0xC6BF: "ColorimetricReference",
    +    0xC6F3: "CameraCalibrationSignature",
    +    0xC6F4: "ProfileCalibrationSignature",
    +    0xC6F6: "AsShotProfileName",
    +    0xC6F7: "NoiseReductionApplied",
    +    0xC6F8: "ProfileName",
    +    0xC6F9: "ProfileHueSatMapDims",
    +    0xC6FA: "ProfileHueSatMapData1",
    +    0xC6FB: "ProfileHueSatMapData2",
    +    0xC6FC: "ProfileToneCurve",
    +    0xC6FD: "ProfileEmbedPolicy",
    +    0xC6FE: "ProfileCopyright",
    +    0xC714: "ForwardMatrix1",
    +    0xC715: "ForwardMatrix2",
    +    0xC716: "PreviewApplicationName",
    +    0xC717: "PreviewApplicationVersion",
    +    0xC718: "PreviewSettingsName",
    +    0xC719: "PreviewSettingsDigest",
    +    0xC71A: "PreviewColorSpace",
    +    0xC71B: "PreviewDateTime",
    +    0xC71C: "RawImageDigest",
    +    0xC71D: "OriginalRawFileDigest",
    +    0xC71E: "SubTileBlockSize",
    +    0xC71F: "RowInterleaveFactor",
    +    0xC725: "ProfileLookTableDims",
    +    0xC726: "ProfileLookTableData",
    +    0xC740: "OpcodeList1",
    +    0xC741: "OpcodeList2",
    +    0xC74E: "OpcodeList3",
    +    0xC761: "NoiseProfile",
     }
     
     ##
    diff --git a/server/www/packages/packages-windows/x86/PIL/FitsStubImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/FitsStubImagePlugin.py
    index be926ca..c2ce865 100644
    --- a/server/www/packages/packages-windows/x86/PIL/FitsStubImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/FitsStubImagePlugin.py
    @@ -23,6 +23,7 @@ def register_handler(handler):
         global _handler
         _handler = handler
     
    +
     # --------------------------------------------------------------------
     # Image adapter
     
    @@ -50,7 +51,7 @@ class FITSStubImageFile(ImageFile.StubImageFile):
     
             # make something up
             self.mode = "F"
    -        self.size = 1, 1
    +        self._size = 1, 1
     
             loader = self._load()
             if loader:
    @@ -62,7 +63,7 @@ class FITSStubImageFile(ImageFile.StubImageFile):
     
     def _save(im, fp, filename):
         if _handler is None or not hasattr("_handler", "save"):
    -        raise IOError("FITS save handler not installed")
    +        raise OSError("FITS save handler not installed")
         _handler.save(im, fp, filename)
     
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/FliImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/FliImagePlugin.py
    index 2c190b6..9bf7d74 100644
    --- a/server/www/packages/packages-windows/x86/PIL/FliImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/FliImagePlugin.py
    @@ -19,12 +19,10 @@
     from . import Image, ImageFile, ImagePalette
     from ._binary import i8, i16le as i16, i32le as i32, o8
     
    -__version__ = "0.2"
    -
    -
     #
     # decoder
     
    +
     def _accept(prefix):
         return len(prefix) >= 6 and i16(prefix[4:6]) in [0xAF11, 0xAF12]
     
    @@ -33,6 +31,7 @@ def _accept(prefix):
     # Image plugin for the FLI/FLC animation format.  Use the seek
     # method to load individual frames.
     
    +
     class FliImageFile(ImageFile.ImageFile):
     
         format = "FLI"
    @@ -44,9 +43,11 @@ class FliImageFile(ImageFile.ImageFile):
             # HEAD
             s = self.fp.read(128)
             magic = i16(s[4:6])
    -        if not (magic in [0xAF11, 0xAF12] and
    -                i16(s[14:16]) in [0, 3] and  # flags
    -                s[20:22] == b"\x00\x00"):  # reserved
    +        if not (
    +            magic in [0xAF11, 0xAF12]
    +            and i16(s[14:16]) in [0, 3]  # flags
    +            and s[20:22] == b"\x00\x00"  # reserved
    +        ):
                 raise SyntaxError("not an FLI/FLC file")
     
             # frames
    @@ -54,7 +55,7 @@ class FliImageFile(ImageFile.ImageFile):
     
             # image characteristics
             self.mode = "P"
    -        self.size = i16(s[8:10]), i16(s[10:12])
    +        self._size = i16(s[8:10]), i16(s[10:12])
     
             # animation speed
             duration = i32(s[16:20])
    @@ -82,7 +83,7 @@ class FliImageFile(ImageFile.ImageFile):
                 elif i16(s[4:6]) == 4:
                     self._palette(palette, 0)
     
    -        palette = [o8(r)+o8(g)+o8(b) for (r, g, b) in palette]
    +        palette = [o8(r) + o8(g) + o8(b) for (r, g, b) in palette]
             self.palette = ImagePalette.raw("RGB", b"".join(palette))
     
             # set things up to decode first frame
    @@ -104,8 +105,8 @@ class FliImageFile(ImageFile.ImageFile):
                 s = self.fp.read(n * 3)
                 for n in range(0, len(s), 3):
                     r = i8(s[n]) << shift
    -                g = i8(s[n+1]) << shift
    -                b = i8(s[n+2]) << shift
    +                g = i8(s[n + 1]) << shift
    +                b = i8(s[n + 2]) << shift
                     palette[i] = (r, g, b)
                     i += 1
     
    @@ -131,6 +132,9 @@ class FliImageFile(ImageFile.ImageFile):
                 self.__frame = -1
                 self.__fp.seek(self.__rewind)
                 self.__offset = 128
    +        else:
    +            # ensure that the previous frame was loaded
    +            self.load()
     
             if frame != self.__frame + 1:
                 raise ValueError("cannot seek to frame %d" % frame)
    @@ -147,13 +151,22 @@ class FliImageFile(ImageFile.ImageFile):
             framesize = i32(s)
     
             self.decodermaxblock = framesize
    -        self.tile = [("fli", (0, 0)+self.size, self.__offset, None)]
    +        self.tile = [("fli", (0, 0) + self.size, self.__offset, None)]
     
             self.__offset += framesize
     
         def tell(self):
             return self.__frame
     
    +    def _close__fp(self):
    +        try:
    +            if self.__fp != self.fp:
    +                self.__fp.close()
    +        except AttributeError:
    +            pass
    +        finally:
    +            self.__fp = None
    +
     
     #
     # registry
    diff --git a/server/www/packages/packages-windows/x86/PIL/FontFile.py b/server/www/packages/packages-windows/x86/PIL/FontFile.py
    index 46e49bc..979a1e3 100644
    --- a/server/www/packages/packages-windows/x86/PIL/FontFile.py
    +++ b/server/www/packages/packages-windows/x86/PIL/FontFile.py
    @@ -14,9 +14,9 @@
     # See the README file for information on usage and redistribution.
     #
     
    -from __future__ import print_function
     
     import os
    +
     from . import Image, _binary
     
     WIDTH = 800
    @@ -33,7 +33,8 @@ def puti16(fp, values):
     ##
     # Base class for raster font file handlers.
     
    -class FontFile(object):
    +
    +class FontFile:
     
         bitmap = None
     
    @@ -46,7 +47,7 @@ class FontFile(object):
             return self.glyph[ix]
     
         def compile(self):
    -        "Create metrics and bitmap"
    +        """Create metrics and bitmap"""
     
             if self.bitmap:
                 return
    @@ -61,7 +62,7 @@ class FontFile(object):
                     w = w + (src[2] - src[0])
                     if w > WIDTH:
                         lines += 1
    -                    w = (src[2] - src[0])
    +                    w = src[2] - src[0]
                     maxwidth = max(maxwidth, w)
     
             xsize = maxwidth
    @@ -90,11 +91,10 @@ class FontFile(object):
                         x = xx
                     s = src[0] + x0, src[1] + y0, src[2] + x0, src[3] + y0
                     self.bitmap.paste(im.crop(src), s)
    -                # print(chr(i), dst, s)
                     self.metrics[i] = d, dst, s
     
         def save(self, filename):
    -        "Save font"
    +        """Save font"""
     
             self.compile()
     
    @@ -104,7 +104,7 @@ class FontFile(object):
             # font metrics
             with open(os.path.splitext(filename)[0] + ".pil", "wb") as fp:
                 fp.write(b"PILfont\n")
    -            fp.write((";;;;;;%d;\n" % self.ysize).encode('ascii'))  # HACK!!!
    +            fp.write((";;;;;;%d;\n" % self.ysize).encode("ascii"))  # HACK!!!
                 fp.write(b"DATA\n")
                 for id in range(256):
                     m = self.metrics[id]
    diff --git a/server/www/packages/packages-windows/x86/PIL/FpxImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/FpxImagePlugin.py
    index d7bba42..8d252c7 100644
    --- a/server/www/packages/packages-windows/x86/PIL/FpxImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/FpxImagePlugin.py
    @@ -14,35 +14,31 @@
     #
     # See the README file for information on usage and redistribution.
     #
    -
    -from __future__ import print_function
    -
    -from . import Image, ImageFile
    -from ._binary import i32le as i32, i8
    -
     import olefile
     
    -__version__ = "0.1"
    +from . import Image, ImageFile
    +from ._binary import i8, i32le as i32
     
     # we map from colour field tuples to (mode, rawmode) descriptors
     MODES = {
         # opacity
    -    (0x00007ffe): ("A", "L"),
    +    (0x00007FFE): ("A", "L"),
         # monochrome
         (0x00010000,): ("L", "L"),
    -    (0x00018000, 0x00017ffe): ("RGBA", "LA"),
    +    (0x00018000, 0x00017FFE): ("RGBA", "LA"),
         # photo YCC
         (0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"),
    -    (0x00028000, 0x00028001, 0x00028002, 0x00027ffe): ("RGBA", "YCCA;P"),
    +    (0x00028000, 0x00028001, 0x00028002, 0x00027FFE): ("RGBA", "YCCA;P"),
         # standard RGB (NIFRGB)
         (0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"),
    -    (0x00038000, 0x00038001, 0x00038002, 0x00037ffe): ("RGBA", "RGBA"),
    +    (0x00038000, 0x00038001, 0x00038002, 0x00037FFE): ("RGBA", "RGBA"),
     }
     
     
     #
     # --------------------------------------------------------------------
     
    +
     def _accept(prefix):
         return prefix[:8] == olefile.MAGIC
     
    @@ -50,6 +46,7 @@ def _accept(prefix):
     ##
     # Image plugin for the FlashPix images.
     
    +
     class FpxImageFile(ImageFile.ImageFile):
     
         format = "FPX"
    @@ -62,7 +59,7 @@ class FpxImageFile(ImageFile.ImageFile):
     
             try:
                 self.ole = olefile.OleFileIO(self.fp)
    -        except IOError:
    +        except OSError:
                 raise SyntaxError("not an FPX file; invalid OLE file")
     
             if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B":
    @@ -74,14 +71,13 @@ class FpxImageFile(ImageFile.ImageFile):
             #
             # get the Image Contents Property Set
     
    -        prop = self.ole.getproperties([
    -            "Data Object Store %06d" % index,
    -            "\005Image Contents"
    -        ])
    +        prop = self.ole.getproperties(
    +            ["Data Object Store %06d" % index, "\005Image Contents"]
    +        )
     
             # size (highest resolution)
     
    -        self.size = prop[0x1000002], prop[0x1000003]
    +        self._size = prop[0x1000002], prop[0x1000003]
     
             size = max(self.size)
             i = 1
    @@ -101,9 +97,12 @@ class FpxImageFile(ImageFile.ImageFile):
             s = prop[0x2000002 | id]
     
             colors = []
    -        for i in range(i32(s, 4)):
    +        bands = i32(s, 4)
    +        if bands > 4:
    +            raise IOError("Invalid number of bands")
    +        for i in range(bands):
                 # note: for now, we ignore the "uncalibrated" flag
    -            colors.append(i32(s, 8+i*4) & 0x7fffffff)
    +            colors.append(i32(s, 8 + i * 4) & 0x7FFFFFFF)
     
             self.mode, self.rawmode = MODES[tuple(colors)]
     
    @@ -114,8 +113,6 @@ class FpxImageFile(ImageFile.ImageFile):
                 if id in prop:
                     self.jpeg[i] = prop[id]
     
    -        # print(len(self.jpeg), "tables loaded")
    -
             self._open_subimage(1, self.maxid)
     
         def _open_subimage(self, index=1, subimage=0):
    @@ -125,7 +122,7 @@ class FpxImageFile(ImageFile.ImageFile):
             stream = [
                 "Data Object Store %06d" % index,
                 "Resolution %04d" % subimage,
    -            "Subimage 0000 Header"
    +            "Subimage 0000 Header",
             ]
     
             fp = self.ole.openstream(stream)
    @@ -143,10 +140,8 @@ class FpxImageFile(ImageFile.ImageFile):
             offset = i32(s, 28)
             length = i32(s, 32)
     
    -        # print(size, self.mode, self.rawmode)
    -
             if size != self.size:
    -            raise IOError("subimage mismatch")
    +            raise OSError("subimage mismatch")
     
             # get tile descriptors
             fp.seek(28 + offset)
    @@ -159,17 +154,29 @@ class FpxImageFile(ImageFile.ImageFile):
     
             for i in range(0, len(s), length):
     
    -            compression = i32(s, i+8)
    +            compression = i32(s, i + 8)
     
                 if compression == 0:
    -                self.tile.append(("raw", (x, y, x+xtile, y+ytile),
    -                                 i32(s, i) + 28, (self.rawmode)))
    +                self.tile.append(
    +                    (
    +                        "raw",
    +                        (x, y, x + xtile, y + ytile),
    +                        i32(s, i) + 28,
    +                        (self.rawmode),
    +                    )
    +                )
     
                 elif compression == 1:
     
                     # FIXME: the fill decoder is not implemented
    -                self.tile.append(("fill", (x, y, x+xtile, y+ytile),
    -                                 i32(s, i) + 28, (self.rawmode, s[12:16])))
    +                self.tile.append(
    +                    (
    +                        "fill",
    +                        (x, y, x + xtile, y + ytile),
    +                        i32(s, i) + 28,
    +                        (self.rawmode, s[12:16]),
    +                    )
    +                )
     
                 elif compression == 2:
     
    @@ -191,8 +198,14 @@ class FpxImageFile(ImageFile.ImageFile):
                         # The image is stored as defined by rawmode
                         jpegmode = rawmode
     
    -                self.tile.append(("jpeg", (x, y, x+xtile, y+ytile),
    -                                 i32(s, i) + 28, (rawmode, jpegmode)))
    +                self.tile.append(
    +                    (
    +                        "jpeg",
    +                        (x, y, x + xtile, y + ytile),
    +                        i32(s, i) + 28,
    +                        (rawmode, jpegmode),
    +                    )
    +                )
     
                     # FIXME: jpeg tables are tile dependent; the prefix
                     # data must be placed in the tile descriptor itself!
    @@ -201,7 +214,7 @@ class FpxImageFile(ImageFile.ImageFile):
                         self.tile_prefix = self.jpeg[jpeg_tables]
     
                 else:
    -                raise IOError("unknown/invalid compression")
    +                raise OSError("unknown/invalid compression")
     
                 x = x + xtile
                 if x >= xsize:
    @@ -215,11 +228,11 @@ class FpxImageFile(ImageFile.ImageFile):
         def load(self):
     
             if not self.fp:
    -            self.fp = self.ole.openstream(self.stream[:2] +
    -                                          ["Subimage 0000 Data"])
    +            self.fp = self.ole.openstream(self.stream[:2] + ["Subimage 0000 Data"])
     
             return ImageFile.ImageFile.load(self)
     
    +
     #
     # --------------------------------------------------------------------
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/FtexImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/FtexImagePlugin.py
    index 9b98090..096ccac 100644
    --- a/server/www/packages/packages-windows/x86/PIL/FtexImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/FtexImagePlugin.py
    @@ -9,7 +9,8 @@ Full text of the CC0 license:
     Independence War 2: Edge Of Chaos - Texture File Format - 16 October 2001
     
     The textures used for 3D objects in Independence War 2: Edge Of Chaos are in a
    -packed custom format called FTEX. This file format uses file extensions FTC and FTU.
    +packed custom format called FTEX. This file format uses file extensions FTC
    +and FTU.
     * FTC files are compressed textures (using standard texture compression).
     * FTU files are not compressed.
     Texture File Format
    @@ -19,31 +20,41 @@ has the following structure:
     {format_directory}
     {data}
     Where:
    -{header} = { u32:magic, u32:version, u32:width, u32:height, u32:mipmap_count, u32:format_count }
    +{header} = {
    +    u32:magic,
    +    u32:version,
    +    u32:width,
    +    u32:height,
    +    u32:mipmap_count,
    +    u32:format_count
    +}
     
     * The "magic" number is "FTEX".
     * "width" and "height" are the dimensions of the texture.
     * "mipmap_count" is the number of mipmaps in the texture.
    -* "format_count" is the number of texture formats (different versions of the same texture) in this file.
    +* "format_count" is the number of texture formats (different versions of the
    +same texture) in this file.
     
     {format_directory} = format_count * { u32:format, u32:where }
     
    -The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB uncompressed textures.
    +The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB
    +uncompressed textures.
     The texture data for a format starts at the position "where" in the file.
     
     Each set of texture data in the file has the following structure:
     {data} = format_count * { u32:mipmap_size, mipmap_size * { u8 } }
    -* "mipmap_size" is the number of bytes in that mip level. For compressed textures this is the
    -size of the texture data compressed with DXT1. For 24 bit uncompressed textures, this is 3 * width * height.
    -Following this are the image bytes for that mipmap level.
    +* "mipmap_size" is the number of bytes in that mip level. For compressed
    +textures this is the size of the texture data compressed with DXT1. For 24 bit
    +uncompressed textures, this is 3 * width * height. Following this are the image
    +bytes for that mipmap level.
     
     Note: All data is stored in little-Endian (Intel) byte order.
     """
     
     import struct
     from io import BytesIO
    -from . import Image, ImageFile
     
    +from . import Image, ImageFile
     
     MAGIC = b"FTEX"
     FORMAT_DXT1 = 0
    @@ -55,19 +66,20 @@ class FtexImageFile(ImageFile.ImageFile):
         format_description = "Texture File Format (IW2:EOC)"
     
         def _open(self):
    -        magic = struct.unpack("Image.open function.  To use
     # this plugin, you have to import the GdImageFile module and
     # use the GdImageFile.open function.
     
    +
     class GdImageFile(ImageFile.ImageFile):
     
         format = "GD"
    @@ -49,19 +47,23 @@ class GdImageFile(ImageFile.ImageFile):
                 raise SyntaxError("Not a valid GD 2.x .gd file")
     
             self.mode = "L"  # FIXME: "P"
    -        self.size = i16(s[2:4]), i16(s[4:6])
    +        self._size = i16(s[2:4]), i16(s[4:6])
     
             trueColor = i8(s[6])
             trueColorOffset = 2 if trueColor else 0
     
             # transparency index
    -        tindex = i32(s[7+trueColorOffset:7+trueColorOffset+4])
    +        tindex = i32(s[7 + trueColorOffset : 7 + trueColorOffset + 4])
             if tindex < 256:
                 self.info["transparency"] = tindex
     
    -        self.palette = ImagePalette.raw("XBGR", s[7+trueColorOffset+4:7+trueColorOffset+4+256*4])
    +        self.palette = ImagePalette.raw(
    +            "XBGR", s[7 + trueColorOffset + 4 : 7 + trueColorOffset + 4 + 256 * 4]
    +        )
     
    -        self.tile = [("raw", (0, 0)+self.size, 7+trueColorOffset+4+256*4, ("L", 0, 1))]
    +        self.tile = [
    +            ("raw", (0, 0) + self.size, 7 + trueColorOffset + 4 + 256 * 4, ("L", 0, 1))
    +        ]
     
     
     def open(fp, mode="r"):
    @@ -80,4 +82,4 @@ def open(fp, mode="r"):
         try:
             return GdImageFile(fp)
         except SyntaxError:
    -        raise IOError("cannot identify this image file")
    +        raise UnidentifiedImageError("cannot identify this image file")
    diff --git a/server/www/packages/packages-windows/x86/PIL/GifImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/GifImagePlugin.py
    index 1bfbb5f..1d94fc7 100644
    --- a/server/www/packages/packages-windows/x86/PIL/GifImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/GifImagePlugin.py
    @@ -24,17 +24,18 @@
     # See the README file for information on usage and redistribution.
     #
     
    -from . import Image, ImageFile, ImagePalette, ImageChops, ImageSequence
    -from ._binary import i8, i16le as i16, o8, o16le as o16
    -
     import itertools
    +import math
    +import os
    +import subprocess
     
    -__version__ = "0.9"
    -
    +from . import Image, ImageChops, ImageFile, ImagePalette, ImageSequence
    +from ._binary import i8, i16le as i16, o8, o16le as o16
     
     # --------------------------------------------------------------------
     # Identify/read GIF files
     
    +
     def _accept(prefix):
         return prefix[:6] in [b"GIF87a", b"GIF89a"]
     
    @@ -43,6 +44,7 @@ def _accept(prefix):
     # Image plugin for GIF images.  This plugin supports both GIF87 and
     # GIF89 images.
     
    +
     class GifImageFile(ImageFile.ImageFile):
     
         format = "GIF"
    @@ -65,7 +67,7 @@ class GifImageFile(ImageFile.ImageFile):
                 raise SyntaxError("not a GIF file")
     
             self.info["version"] = s[:6]
    -        self.size = i16(s[6:]), i16(s[8:])
    +        self._size = i16(s[6:]), i16(s[8:])
             self.tile = []
             flags = i8(s[10])
             bits = (flags & 7) + 1
    @@ -76,7 +78,7 @@ class GifImageFile(ImageFile.ImageFile):
                 # check if palette contains colour indices
                 p = self.fp.read(3 << bits)
                 for i in range(0, len(p), 3):
    -                if not (i//3 == i8(p[i]) == i8(p[i+1]) == i8(p[i+2])):
    +                if not (i // 3 == i8(p[i]) == i8(p[i + 1]) == i8(p[i + 2])):
                         p = ImagePalette.raw("RGB", p)
                         self.global_palette = self.palette = p
                         break
    @@ -120,6 +122,8 @@ class GifImageFile(ImageFile.ImageFile):
             if not self._seek_check(frame):
                 return
             if frame < self.__frame:
    +            if frame != 0:
    +                self.im = None
                 self._seek(0)
     
             last_frame = self.__frame
    @@ -164,8 +168,10 @@ class GifImageFile(ImageFile.ImageFile):
                 self.im.paste(self.dispose, self.dispose_extent)
     
             from copy import copy
    +
             self.palette = copy(self.global_palette)
     
    +        info = {}
             while True:
     
                 s = self.fp.read(1)
    @@ -184,8 +190,8 @@ class GifImageFile(ImageFile.ImageFile):
                         #
                         flags = i8(block[0])
                         if flags & 1:
    -                        self.info["transparency"] = i8(block[3])
    -                    self.info["duration"] = i16(block[1:3]) * 10
    +                        info["transparency"] = i8(block[3])
    +                    info["duration"] = i16(block[1:3]) * 10
     
                         # disposal method - find the value of bits 4 - 6
                         dispose_bits = 0b00011100 & flags
    @@ -200,16 +206,22 @@ class GifImageFile(ImageFile.ImageFile):
                         #
                         # comment extension
                         #
    -                    self.info["comment"] = block
    +                    while block:
    +                        if "comment" in info:
    +                            info["comment"] += block
    +                        else:
    +                            info["comment"] = block
    +                        block = self.data()
    +                    continue
                     elif i8(s) == 255:
                         #
                         # application extension
                         #
    -                    self.info["extension"] = block, self.fp.tell()
    +                    info["extension"] = block, self.fp.tell()
                         if block[:11] == b"NETSCAPE2.0":
                             block = self.data()
                             if len(block) >= 3 and i8(block[0]) == 1:
    -                            self.info["loop"] = i16(block[1:3])
    +                            info["loop"] = i16(block[1:3])
                     while self.data():
                         pass
     
    @@ -222,6 +234,8 @@ class GifImageFile(ImageFile.ImageFile):
                     # extent
                     x0, y0 = i16(s[0:]), i16(s[2:])
                     x1, y1 = x0 + i16(s[4:]), y0 + i16(s[6:])
    +                if x1 > self.size[0] or y1 > self.size[1]:
    +                    self._size = max(x1, self.size[0]), max(y1, self.size[1])
                     self.dispose_extent = x0, y0, x1, y1
                     flags = i8(s[8])
     
    @@ -229,16 +243,14 @@ class GifImageFile(ImageFile.ImageFile):
     
                     if flags & 128:
                         bits = (flags & 7) + 1
    -                    self.palette =\
    -                        ImagePalette.raw("RGB", self.fp.read(3 << bits))
    +                    self.palette = ImagePalette.raw("RGB", self.fp.read(3 << bits))
     
                     # image data
                     bits = i8(self.fp.read(1))
                     self.__offset = self.fp.tell()
    -                self.tile = [("gif",
    -                             (x0, y0, x1, y1),
    -                             self.__offset,
    -                             (bits, interlace))]
    +                self.tile = [
    +                    ("gif", (x0, y0, x1, y1), self.__offset, (bits, interlace))
    +                ]
                     break
     
                 else:
    @@ -251,8 +263,8 @@ class GifImageFile(ImageFile.ImageFile):
                     self.dispose = None
                 elif self.disposal_method == 2:
                     # replace with background colour
    -                self.dispose = Image.core.fill("P", self.size,
    -                                               self.info["background"])
    +                Image._decompression_bomb_check(self.size)
    +                self.dispose = Image.core.fill("P", self.size, self.info["background"])
                 else:
                     # replace with previous contents
                     if self.im:
    @@ -268,6 +280,12 @@ class GifImageFile(ImageFile.ImageFile):
                 # self.__fp = None
                 raise EOFError
     
    +        for k in ["transparency", "duration", "comment", "extension", "loop"]:
    +            if k in info:
    +                self.info[k] = info[k]
    +            elif k in self.info:
    +                del self.info[k]
    +
             self.mode = "L"
             if self.palette:
                 self.mode = "P"
    @@ -284,20 +302,25 @@ class GifImageFile(ImageFile.ImageFile):
                 # we do this by pasting the updated area onto the previous
                 # frame which we then use as the current image content
                 updated = self._crop(self.im, self.dispose_extent)
    -            self._prev_im.paste(updated, self.dispose_extent,
    -                                updated.convert('RGBA'))
    +            self._prev_im.paste(updated, self.dispose_extent, updated.convert("RGBA"))
                 self.im = self._prev_im
             self._prev_im = self.im.copy()
     
    +    def _close__fp(self):
    +        try:
    +            if self.__fp != self.fp:
    +                self.__fp.close()
    +        except AttributeError:
    +            pass
    +        finally:
    +            self.__fp = None
    +
    +
     # --------------------------------------------------------------------
     # Write GIF files
     
     
    -RAWMODE = {
    -    "1": "L",
    -    "L": "L",
    -    "P": "P"
    -}
    +RAWMODE = {"1": "L", "L": "L", "P": "P"}
     
     
     def _normalize_mode(im, initial_call=False):
    @@ -348,19 +371,23 @@ def _normalize_palette(im, palette, info):
             if isinstance(palette, (bytes, bytearray, list)):
                 source_palette = bytearray(palette[:768])
             if isinstance(palette, ImagePalette.ImagePalette):
    -            source_palette = bytearray(itertools.chain.from_iterable(
    -                                zip(palette.palette[:256],
    -                                    palette.palette[256:512],
    -                                    palette.palette[512:768])))
    +            source_palette = bytearray(
    +                itertools.chain.from_iterable(
    +                    zip(
    +                        palette.palette[:256],
    +                        palette.palette[256:512],
    +                        palette.palette[512:768],
    +                    )
    +                )
    +            )
     
         if im.mode == "P":
             if not source_palette:
                 source_palette = im.im.getpalette("RGB")[:768]
         else:  # L-mode
             if not source_palette:
    -            source_palette = bytearray(i//3 for i in range(768))
    -        im.palette = ImagePalette.ImagePalette("RGB",
    -                                               palette=source_palette)
    +            source_palette = bytearray(i // 3 for i in range(768))
    +        im.palette = ImagePalette.ImagePalette("RGB", palette=source_palette)
     
         used_palette_colors = _get_optimize(im, info)
         if used_palette_colors is not None:
    @@ -372,6 +399,8 @@ def _normalize_palette(im, palette, info):
     
     def _write_single_frame(im, fp, palette):
         im_out = _normalize_mode(im, True)
    +    for k, v in im_out.info.items():
    +        im.encoderinfo.setdefault(k, v)
         im_out = _normalize_palette(im_out, palette, im.encoderinfo)
     
         for s in _get_global_header(im_out, im.encoderinfo):
    @@ -384,28 +413,31 @@ def _write_single_frame(im, fp, palette):
         _write_local_header(fp, im, (0, 0), flags)
     
         im_out.encoderconfig = (8, get_interlace(im))
    -    ImageFile._save(im_out, fp, [("gif", (0, 0)+im.size, 0,
    -                                  RAWMODE[im_out.mode])])
    +    ImageFile._save(im_out, fp, [("gif", (0, 0) + im.size, 0, RAWMODE[im_out.mode])])
     
         fp.write(b"\0")  # end of image data
     
     
     def _write_multiple_frames(im, fp, palette):
     
    -    duration = im.encoderinfo.get("duration", None)
    -    disposal = im.encoderinfo.get('disposal', None)
    +    duration = im.encoderinfo.get("duration", im.info.get("duration"))
    +    disposal = im.encoderinfo.get("disposal", im.info.get("disposal"))
     
         im_frames = []
         frame_count = 0
    +    background_im = None
         for imSequence in itertools.chain([im], im.encoderinfo.get("append_images", [])):
             for im_frame in ImageSequence.Iterator(imSequence):
                 # a copy is required here since seek can still mutate the image
                 im_frame = _normalize_mode(im_frame.copy())
    +            if frame_count == 0:
    +                for k, v in im_frame.info.items():
    +                    im.encoderinfo.setdefault(k, v)
                 im_frame = _normalize_palette(im_frame, palette, im.encoderinfo)
     
                 encoderinfo = im.encoderinfo.copy()
                 if isinstance(duration, (list, tuple)):
    -                encoderinfo['duration'] = duration[frame_count]
    +                encoderinfo["duration"] = duration[frame_count]
                 if isinstance(disposal, (list, tuple)):
                     encoderinfo["disposal"] = disposal[frame_count]
                 frame_count += 1
    @@ -413,43 +445,54 @@ def _write_multiple_frames(im, fp, palette):
                 if im_frames:
                     # delta frame
                     previous = im_frames[-1]
    -                if _get_palette_bytes(im_frame) == _get_palette_bytes(previous['im']):
    -                    delta = ImageChops.subtract_modulo(im_frame,
    -                                                       previous['im'])
    +                if encoderinfo.get("disposal") == 2:
    +                    if background_im is None:
    +                        background = _get_background(
    +                            im,
    +                            im.encoderinfo.get("background", im.info.get("background")),
    +                        )
    +                        background_im = Image.new("P", im_frame.size, background)
    +                        background_im.putpalette(im_frames[0]["im"].palette)
    +                    base_im = background_im
                     else:
    -                    delta = ImageChops.subtract_modulo(im_frame.convert('RGB'),
    -                                                       previous['im'].convert('RGB'))
    +                    base_im = previous["im"]
    +                if _get_palette_bytes(im_frame) == _get_palette_bytes(base_im):
    +                    delta = ImageChops.subtract_modulo(im_frame, base_im)
    +                else:
    +                    delta = ImageChops.subtract_modulo(
    +                        im_frame.convert("RGB"), base_im.convert("RGB")
    +                    )
                     bbox = delta.getbbox()
                     if not bbox:
                         # This frame is identical to the previous frame
                         if duration:
    -                        previous['encoderinfo']['duration'] += encoderinfo['duration']
    +                        previous["encoderinfo"]["duration"] += encoderinfo["duration"]
                         continue
                 else:
                     bbox = None
    -            im_frames.append({
    -                'im': im_frame,
    -                'bbox': bbox,
    -                'encoderinfo': encoderinfo
    -            })
    +            im_frames.append({"im": im_frame, "bbox": bbox, "encoderinfo": encoderinfo})
     
         if len(im_frames) > 1:
             for frame_data in im_frames:
    -            im_frame = frame_data['im']
    -            if not frame_data['bbox']:
    +            im_frame = frame_data["im"]
    +            if not frame_data["bbox"]:
                     # global header
    -                for s in _get_global_header(im_frame,
    -                                            frame_data['encoderinfo']):
    +                for s in _get_global_header(im_frame, frame_data["encoderinfo"]):
                         fp.write(s)
                     offset = (0, 0)
                 else:
                     # compress difference
    -                frame_data['encoderinfo']['include_color_table'] = True
    +                frame_data["encoderinfo"]["include_color_table"] = True
     
    -                im_frame = im_frame.crop(frame_data['bbox'])
    -                offset = frame_data['bbox'][:2]
    -            _write_frame_data(fp, im_frame, offset, frame_data['encoderinfo'])
    +                im_frame = im_frame.crop(frame_data["bbox"])
    +                offset = frame_data["bbox"][:2]
    +            _write_frame_data(fp, im_frame, offset, frame_data["encoderinfo"])
             return True
    +    elif "duration" in im.encoderinfo and isinstance(
    +        im.encoderinfo["duration"], (list, tuple)
    +    ):
    +        # Since multiple frames will not be written, add together the frame durations
    +        im.encoderinfo["duration"] = sum(im.encoderinfo["duration"])
     
     
     def _save_all(im, fp, filename):
    @@ -457,12 +500,10 @@ def _save_all(im, fp, filename):
     
     
     def _save(im, fp, filename, save_all=False):
    -    for k, v in im.info.items():
    -        im.encoderinfo.setdefault(k, v)
         # header
    -    try:
    -        palette = im.encoderinfo["palette"]
    -    except KeyError:
    +    if "palette" in im.encoderinfo or "palette" in im.info:
    +        palette = im.encoderinfo.get("palette", im.info.get("palette"))
    +    else:
             palette = None
             im.encoderinfo["optimize"] = im.encoderinfo.get("optimize", True)
     
    @@ -509,7 +550,7 @@ def _write_local_header(fp, im, offset, flags):
         else:
             duration = 0
     
    -    disposal = int(im.encoderinfo.get('disposal', 0))
    +    disposal = int(im.encoderinfo.get("disposal", 0))
     
         if transparent_color_exists or duration != 0 or disposal:
             packed_flag = 1 if transparent_color_exists else 0
    @@ -517,48 +558,56 @@ def _write_local_header(fp, im, offset, flags):
             if not transparent_color_exists:
                 transparency = 0
     
    -        fp.write(b"!" +
    -                 o8(249) +                # extension intro
    -                 o8(4) +                  # length
    -                 o8(packed_flag) +        # packed fields
    -                 o16(duration) +          # duration
    -                 o8(transparency) +       # transparency index
    -                 o8(0))
    +        fp.write(
    +            b"!"
    +            + o8(249)  # extension intro
    +            + o8(4)  # length
    +            + o8(packed_flag)  # packed fields
    +            + o16(duration)  # duration
    +            + o8(transparency)  # transparency index
    +            + o8(0)
    +        )
     
    -    if "comment" in im.encoderinfo and 1 <= len(im.encoderinfo["comment"]) <= 255:
    -        fp.write(b"!" +
    -                 o8(254) +                # extension intro
    -                 o8(len(im.encoderinfo["comment"])) +
    -                 im.encoderinfo["comment"] +
    -                 o8(0))
    +    if "comment" in im.encoderinfo and 1 <= len(im.encoderinfo["comment"]):
    +        fp.write(b"!" + o8(254))  # extension intro
    +        comment = im.encoderinfo["comment"]
    +        if isinstance(comment, str):
    +            comment = comment.encode()
    +        for i in range(0, len(comment), 255):
    +            subblock = comment[i : i + 255]
    +            fp.write(o8(len(subblock)) + subblock)
    +        fp.write(o8(0))
         if "loop" in im.encoderinfo:
             number_of_loops = im.encoderinfo["loop"]
    -        fp.write(b"!" +
    -                 o8(255) +                # extension intro
    -                 o8(11) +
    -                 b"NETSCAPE2.0" +
    -                 o8(3) +
    -                 o8(1) +
    -                 o16(number_of_loops) +   # number of loops
    -                 o8(0))
    -    include_color_table = im.encoderinfo.get('include_color_table')
    +        fp.write(
    +            b"!"
    +            + o8(255)  # extension intro
    +            + o8(11)
    +            + b"NETSCAPE2.0"
    +            + o8(3)
    +            + o8(1)
    +            + o16(number_of_loops)  # number of loops
    +            + o8(0)
    +        )
    +    include_color_table = im.encoderinfo.get("include_color_table")
         if include_color_table:
    -        palette = im.encoderinfo.get("palette", None)
             palette_bytes = _get_palette_bytes(im)
             color_table_size = _get_color_table_size(palette_bytes)
             if color_table_size:
    -            flags = flags | 128               # local color table flag
    +            flags = flags | 128  # local color table flag
                 flags = flags | color_table_size
     
    -    fp.write(b"," +
    -             o16(offset[0]) +             # offset
    -             o16(offset[1]) +
    -             o16(im.size[0]) +            # size
    -             o16(im.size[1]) +
    -             o8(flags))                   # flags
    +    fp.write(
    +        b","
    +        + o16(offset[0])  # offset
    +        + o16(offset[1])
    +        + o16(im.size[0])  # size
    +        + o16(im.size[1])
    +        + o8(flags)  # flags
    +    )
         if include_color_table and color_table_size:
             fp.write(_get_header_palette(palette_bytes))
    -    fp.write(o8(8))                       # bits
    +    fp.write(o8(8))  # bits
     
     
     def _save_netpbm(im, fp, filename):
    @@ -569,40 +618,44 @@ def _save_netpbm(im, fp, filename):
         # If you need real GIF compression and/or RGB quantization, you
         # can use the external NETPBM/PBMPLUS utilities.  See comments
         # below for information on how to enable this.
    -
    -    import os
    -    from subprocess import Popen, check_call, PIPE, CalledProcessError
    -    file = im._dump()
    -
    -    with open(filename, 'wb') as f:
    -        if im.mode != "RGB":
    -            with open(os.devnull, 'wb') as devnull:
    -                check_call(["ppmtogif", file], stdout=f, stderr=devnull)
    -        else:
    -            # Pipe ppmquant output into ppmtogif
    -            # "ppmquant 256 %s | ppmtogif > %s" % (file, filename)
    -            quant_cmd = ["ppmquant", "256", file]
    -            togif_cmd = ["ppmtogif"]
    -            with open(os.devnull, 'wb') as devnull:
    -                quant_proc = Popen(quant_cmd, stdout=PIPE, stderr=devnull)
    -                togif_proc = Popen(togif_cmd, stdin=quant_proc.stdout,
    -                                   stdout=f, stderr=devnull)
    -
    -            # Allow ppmquant to receive SIGPIPE if ppmtogif exits
    -            quant_proc.stdout.close()
    -
    -            retcode = quant_proc.wait()
    -            if retcode:
    -                raise CalledProcessError(retcode, quant_cmd)
    -
    -            retcode = togif_proc.wait()
    -            if retcode:
    -                raise CalledProcessError(retcode, togif_cmd)
    +    tempfile = im._dump()
     
         try:
    -        os.unlink(file)
    -    except OSError:
    -        pass
    +        with open(filename, "wb") as f:
    +            if im.mode != "RGB":
    +                subprocess.check_call(
    +                    ["ppmtogif", tempfile], stdout=f, stderr=subprocess.DEVNULL
    +                )
    +            else:
    +                # Pipe ppmquant output into ppmtogif
    +                # "ppmquant 256 %s | ppmtogif > %s" % (tempfile, filename)
    +                quant_cmd = ["ppmquant", "256", tempfile]
    +                togif_cmd = ["ppmtogif"]
    +                quant_proc = subprocess.Popen(
    +                    quant_cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL
    +                )
    +                togif_proc = subprocess.Popen(
    +                    togif_cmd,
    +                    stdin=quant_proc.stdout,
    +                    stdout=f,
    +                    stderr=subprocess.DEVNULL,
    +                )
    +
    +                # Allow ppmquant to receive SIGPIPE if ppmtogif exits
    +                quant_proc.stdout.close()
    +
    +                retcode = quant_proc.wait()
    +                if retcode:
    +                    raise subprocess.CalledProcessError(retcode, quant_cmd)
    +
    +                retcode = togif_proc.wait()
    +                if retcode:
    +                    raise subprocess.CalledProcessError(retcode, togif_cmd)
    +    finally:
    +        try:
    +            os.unlink(tempfile)
    +        except OSError:
    +            pass
     
     
     # Force optimization so that we can test performance against
    @@ -632,7 +685,7 @@ def _get_optimize(im, info):
             # * If we have a 'large' image, the palette is in the noise.
     
             # create the new palette if not every color is used
    -        optimise = _FORCE_OPTIMIZE or im.mode == 'L'
    +        optimise = _FORCE_OPTIMIZE or im.mode == "L"
             if optimise or im.width * im.height < 512 * 512:
                 # check which colors are used
                 used_palette_colors = []
    @@ -640,18 +693,21 @@ def _get_optimize(im, info):
                     if count:
                         used_palette_colors.append(i)
     
    -            if optimise or (len(used_palette_colors) <= 128 and
    -               max(used_palette_colors) > len(used_palette_colors)):
    +            if optimise or (
    +                len(used_palette_colors) <= 128
    +                and max(used_palette_colors) > len(used_palette_colors)
    +            ):
                     return used_palette_colors
     
     
     def _get_color_table_size(palette_bytes):
         # calculate the palette size for the header
    -    import math
    -    color_table_size = int(math.ceil(math.log(len(palette_bytes)//3, 2)))-1
    -    if color_table_size < 0:
    -        color_table_size = 0
    -    return color_table_size
    +    if not palette_bytes:
    +        return 0
    +    elif len(palette_bytes) < 9:
    +        return 1
    +    else:
    +        return math.ceil(math.log(len(palette_bytes) // 3, 2)) - 1
     
     
     def _get_header_palette(palette_bytes):
    @@ -666,7 +722,7 @@ def _get_header_palette(palette_bytes):
     
         # add the missing amount of bytes
         # the palette has to be 2< 0:
             palette_bytes += o8(0) * 3 * actual_target_size_diff
         return palette_bytes
    @@ -682,6 +738,18 @@ def _get_palette_bytes(im):
         return im.palette.palette
     
     
    +def _get_background(im, infoBackground):
    +    background = 0
    +    if infoBackground:
    +        background = infoBackground
    +        if isinstance(background, tuple):
    +            # WebPImagePlugin stores an RGBA value in info["background"]
    +            # So it must be converted to the same format as GifImagePlugin's
    +            # info["background"] - a global color table index
    +            background = im.palette.getcolor(background)
    +    return background
    +
    +
     def _get_global_header(im, info):
         """Return a list of strings representing a GIF header"""
     
    @@ -691,8 +759,9 @@ def _get_global_header(im, info):
         version = b"87a"
         for extensionKey in ["transparency", "duration", "loop", "comment"]:
             if info and extensionKey in info:
    -            if ((extensionKey == "duration" and info[extensionKey] == 0) or
    -               (extensionKey == "comment" and not (1 <= len(info[extensionKey]) <= 255))):
    +            if (extensionKey == "duration" and info[extensionKey] == 0) or (
    +                extensionKey == "comment" and not (1 <= len(info[extensionKey]) <= 255)
    +            ):
                     continue
                 version = b"89a"
                 break
    @@ -700,24 +769,23 @@ def _get_global_header(im, info):
             if im.info.get("version") == b"89a":
                 version = b"89a"
     
    +    background = _get_background(im, info.get("background"))
    +
         palette_bytes = _get_palette_bytes(im)
         color_table_size = _get_color_table_size(palette_bytes)
     
    -    background = info["background"] if "background" in info else 0
    -
         return [
    -        b"GIF"+version +               # signature + version
    -        o16(im.size[0]) +              # canvas width
    -        o16(im.size[1]),               # canvas height
    -
    +        b"GIF"  # signature
    +        + version  # version
    +        + o16(im.size[0])  # canvas width
    +        + o16(im.size[1]),  # canvas height
             # Logical Screen Descriptor
             # size of global color table + global color table flag
    -        o8(color_table_size + 128),   # packed fields
    +        o8(color_table_size + 128),  # packed fields
             # background + reserved/aspect
             o8(background) + o8(0),
    -
             # Global Color Table
    -        _get_header_palette(palette_bytes)
    +        _get_header_palette(palette_bytes),
         ]
     
     
    @@ -728,13 +796,15 @@ def _write_frame_data(fp, im_frame, offset, params):
             # local image header
             _write_local_header(fp, im_frame, offset, 0)
     
    -        ImageFile._save(im_frame, fp, [("gif", (0, 0)+im_frame.size, 0,
    -                                        RAWMODE[im_frame.mode])])
    +        ImageFile._save(
    +            im_frame, fp, [("gif", (0, 0) + im_frame.size, 0, RAWMODE[im_frame.mode])]
    +        )
     
             fp.write(b"\0")  # end of image data
         finally:
             del im_frame.encoderinfo
     
    +
     # --------------------------------------------------------------------
     # Legacy GIF utilities
     
    @@ -783,7 +853,8 @@ def getdata(im, offset=(0, 0), **params):
         :returns: List of Bytes containing gif encoded frame data
     
         """
    -    class Collector(object):
    +
    +    class Collector:
             data = []
     
             def write(self, data):
    diff --git a/server/www/packages/packages-windows/x86/PIL/GimpGradientFile.py b/server/www/packages/packages-windows/x86/PIL/GimpGradientFile.py
    index 10593da..1cacf57 100644
    --- a/server/www/packages/packages-windows/x86/PIL/GimpGradientFile.py
    +++ b/server/www/packages/packages-windows/x86/PIL/GimpGradientFile.py
    @@ -13,7 +13,8 @@
     # See the README file for information on usage and redistribution.
     #
     
    -from math import pi, log, sin, sqrt
    +from math import log, pi, sin, sqrt
    +
     from ._binary import o8
     
     # --------------------------------------------------------------------
    @@ -59,7 +60,7 @@ def sphere_decreasing(middle, pos):
     SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing]
     
     
    -class GradientFile(object):
    +class GradientFile:
     
         gradient = None
     
    @@ -72,7 +73,7 @@ class GradientFile(object):
     
             for i in range(entries):
     
    -            x = i / float(entries-1)
    +            x = i / (entries - 1)
     
                 while x1 < x:
                     ix += 1
    @@ -100,8 +101,8 @@ class GradientFile(object):
     ##
     # File handler for GIMP's gradient format.
     
    -class GimpGradientFile(GradientFile):
     
    +class GimpGradientFile(GradientFile):
         def __init__(self, fp):
     
             if fp.readline()[:13] != b"GIMP Gradient":
    @@ -131,7 +132,7 @@ class GimpGradientFile(GradientFile):
                 cspace = int(s[12])
     
                 if cspace != 0:
    -                raise IOError("cannot handle HSV colour space")
    +                raise OSError("cannot handle HSV colour space")
     
                 gradient.append((x0, x1, xm, rgb0, rgb1, segment))
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/GimpPaletteFile.py b/server/www/packages/packages-windows/x86/PIL/GimpPaletteFile.py
    index 6eef6a2..e3060ab 100644
    --- a/server/www/packages/packages-windows/x86/PIL/GimpPaletteFile.py
    +++ b/server/www/packages/packages-windows/x86/PIL/GimpPaletteFile.py
    @@ -15,31 +15,30 @@
     #
     
     import re
    -from ._binary import o8
     
    +from ._binary import o8
     
     ##
     # File handler for GIMP's palette format.
     
    -class GimpPaletteFile(object):
    +
    +class GimpPaletteFile:
     
         rawmode = "RGB"
     
         def __init__(self, fp):
     
    -        self.palette = [o8(i)*3 for i in range(256)]
    +        self.palette = [o8(i) * 3 for i in range(256)]
     
             if fp.readline()[:12] != b"GIMP Palette":
                 raise SyntaxError("not a GIMP palette file")
     
    -        i = 0
    -
    -        while i <= 255:
    +        for i in range(256):
     
                 s = fp.readline()
    -
                 if not s:
                     break
    +
                 # skip fields and comment lines
                 if re.match(br"\w+:|#", s):
                     continue
    @@ -50,10 +49,7 @@ class GimpPaletteFile(object):
                 if len(v) != 3:
                     raise ValueError("bad palette entry")
     
    -            if 0 <= i <= 255:
    -                self.palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2])
    -
    -            i += 1
    +            self.palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2])
     
             self.palette = b"".join(self.palette)
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/GribStubImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/GribStubImagePlugin.py
    index 33c8291..515c272 100644
    --- a/server/www/packages/packages-windows/x86/PIL/GribStubImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/GribStubImagePlugin.py
    @@ -28,6 +28,7 @@ def register_handler(handler):
     # --------------------------------------------------------------------
     # Image adapter
     
    +
     def _accept(prefix):
         return prefix[0:4] == b"GRIB" and i8(prefix[7]) == 1
     
    @@ -48,7 +49,7 @@ class GribStubImageFile(ImageFile.StubImageFile):
     
             # make something up
             self.mode = "F"
    -        self.size = 1, 1
    +        self._size = 1, 1
     
             loader = self._load()
             if loader:
    @@ -60,7 +61,7 @@ class GribStubImageFile(ImageFile.StubImageFile):
     
     def _save(im, fp, filename):
         if _handler is None or not hasattr("_handler", "save"):
    -        raise IOError("GRIB save handler not installed")
    +        raise OSError("GRIB save handler not installed")
         _handler.save(im, fp, filename)
     
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/Hdf5StubImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/Hdf5StubImagePlugin.py
    index de4d5bb..362f2d3 100644
    --- a/server/www/packages/packages-windows/x86/PIL/Hdf5StubImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/Hdf5StubImagePlugin.py
    @@ -27,6 +27,7 @@ def register_handler(handler):
     # --------------------------------------------------------------------
     # Image adapter
     
    +
     def _accept(prefix):
         return prefix[:8] == b"\x89HDF\r\n\x1a\n"
     
    @@ -47,7 +48,7 @@ class HDF5StubImageFile(ImageFile.StubImageFile):
     
             # make something up
             self.mode = "F"
    -        self.size = 1, 1
    +        self._size = 1, 1
     
             loader = self._load()
             if loader:
    @@ -59,7 +60,7 @@ class HDF5StubImageFile(ImageFile.StubImageFile):
     
     def _save(im, fp, filename):
         if _handler is None or not hasattr("_handler", "save"):
    -        raise IOError("HDF5 save handler not installed")
    +        raise OSError("HDF5 save handler not installed")
         _handler.save(im, fp, filename)
     
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/IcnsImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/IcnsImagePlugin.py
    index dc93f6a..c003926 100644
    --- a/server/www/packages/packages-windows/x86/PIL/IcnsImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/IcnsImagePlugin.py
    @@ -15,16 +15,18 @@
     # See the README file for information on usage and redistribution.
     #
     
    -from PIL import Image, ImageFile, PngImagePlugin
    -from PIL._binary import i8
     import io
     import os
     import shutil
     import struct
    +import subprocess
     import sys
     import tempfile
     
    -enable_jpeg2k = hasattr(Image.core, 'jp2klib_version')
    +from PIL import Image, ImageFile, PngImagePlugin
    +from PIL._binary import i8
    +
    +enable_jpeg2k = hasattr(Image.core, "jp2klib_version")
     if enable_jpeg2k:
         from PIL import Jpeg2KImagePlugin
     
    @@ -32,7 +34,7 @@ HEADERSIZE = 8
     
     
     def nextheader(fobj):
    -    return struct.unpack('>4sI', fobj.read(HEADERSIZE))
    +    return struct.unpack(">4sI", fobj.read(HEADERSIZE))
     
     
     def read_32t(fobj, start_length, size):
    @@ -40,8 +42,8 @@ def read_32t(fobj, start_length, size):
         (start, length) = start_length
         fobj.seek(start)
         sig = fobj.read(4)
    -    if sig != b'\x00\x00\x00\x00':
    -        raise SyntaxError('Unknown signature, expecting 0x00000000')
    +    if sig != b"\x00\x00\x00\x00":
    +        raise SyntaxError("Unknown signature, expecting 0x00000000")
         return read_32(fobj, (start + 4, length - 4), size)
     
     
    @@ -81,12 +83,8 @@ def read_32(fobj, start_length, size):
                     if bytesleft <= 0:
                         break
                 if bytesleft != 0:
    -                raise SyntaxError(
    -                    "Error reading channel [%r left]" % bytesleft
    -                    )
    -            band = Image.frombuffer(
    -                "L", pixel_size, b"".join(data), "raw", "L", 0, 1
    -                )
    +                raise SyntaxError("Error reading channel [%r left]" % bytesleft)
    +            band = Image.frombuffer("L", pixel_size, b"".join(data), "raw", "L", 0, 1)
                 im.im.putband(band.im, band_ix)
         return {"RGB": im}
     
    @@ -97,9 +95,7 @@ def read_mk(fobj, start_length, size):
         fobj.seek(start)
         pixel_size = (size[0] * size[2], size[1] * size[2])
         sizesq = pixel_size[0] * pixel_size[1]
    -    band = Image.frombuffer(
    -        "L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1
    -        )
    +    band = Image.frombuffer("L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1)
         return {"A": band}
     
     
    @@ -107,73 +103,58 @@ def read_png_or_jpeg2000(fobj, start_length, size):
         (start, length) = start_length
         fobj.seek(start)
         sig = fobj.read(12)
    -    if sig[:8] == b'\x89PNG\x0d\x0a\x1a\x0a':
    +    if sig[:8] == b"\x89PNG\x0d\x0a\x1a\x0a":
             fobj.seek(start)
             im = PngImagePlugin.PngImageFile(fobj)
             return {"RGBA": im}
    -    elif sig[:4] == b'\xff\x4f\xff\x51' \
    -            or sig[:4] == b'\x0d\x0a\x87\x0a' \
    -            or sig == b'\x00\x00\x00\x0cjP  \x0d\x0a\x87\x0a':
    +    elif (
    +        sig[:4] == b"\xff\x4f\xff\x51"
    +        or sig[:4] == b"\x0d\x0a\x87\x0a"
    +        or sig == b"\x00\x00\x00\x0cjP  \x0d\x0a\x87\x0a"
    +    ):
             if not enable_jpeg2k:
    -            raise ValueError('Unsupported icon subimage format (rebuild PIL '
    -                             'with JPEG 2000 support to fix this)')
    +            raise ValueError(
    +                "Unsupported icon subimage format (rebuild PIL "
    +                "with JPEG 2000 support to fix this)"
    +            )
             # j2k, jpc or j2c
             fobj.seek(start)
             jp2kstream = fobj.read(length)
             f = io.BytesIO(jp2kstream)
             im = Jpeg2KImagePlugin.Jpeg2KImageFile(f)
    -        if im.mode != 'RGBA':
    -            im = im.convert('RGBA')
    +        if im.mode != "RGBA":
    +            im = im.convert("RGBA")
             return {"RGBA": im}
         else:
    -        raise ValueError('Unsupported icon subimage format')
    +        raise ValueError("Unsupported icon subimage format")
     
     
    -class IcnsFile(object):
    +class IcnsFile:
     
         SIZES = {
    -        (512, 512, 2): [
    -            (b'ic10', read_png_or_jpeg2000),
    -        ],
    -        (512, 512, 1): [
    -            (b'ic09', read_png_or_jpeg2000),
    -        ],
    -        (256, 256, 2): [
    -            (b'ic14', read_png_or_jpeg2000),
    -        ],
    -        (256, 256, 1): [
    -            (b'ic08', read_png_or_jpeg2000),
    -        ],
    -        (128, 128, 2): [
    -            (b'ic13', read_png_or_jpeg2000),
    -        ],
    +        (512, 512, 2): [(b"ic10", read_png_or_jpeg2000)],
    +        (512, 512, 1): [(b"ic09", read_png_or_jpeg2000)],
    +        (256, 256, 2): [(b"ic14", read_png_or_jpeg2000)],
    +        (256, 256, 1): [(b"ic08", read_png_or_jpeg2000)],
    +        (128, 128, 2): [(b"ic13", read_png_or_jpeg2000)],
             (128, 128, 1): [
    -            (b'ic07', read_png_or_jpeg2000),
    -            (b'it32', read_32t),
    -            (b't8mk', read_mk),
    -        ],
    -        (64, 64, 1): [
    -            (b'icp6', read_png_or_jpeg2000),
    -        ],
    -        (32, 32, 2): [
    -            (b'ic12', read_png_or_jpeg2000),
    -        ],
    -        (48, 48, 1): [
    -            (b'ih32', read_32),
    -            (b'h8mk', read_mk),
    +            (b"ic07", read_png_or_jpeg2000),
    +            (b"it32", read_32t),
    +            (b"t8mk", read_mk),
             ],
    +        (64, 64, 1): [(b"icp6", read_png_or_jpeg2000)],
    +        (32, 32, 2): [(b"ic12", read_png_or_jpeg2000)],
    +        (48, 48, 1): [(b"ih32", read_32), (b"h8mk", read_mk)],
             (32, 32, 1): [
    -            (b'icp5', read_png_or_jpeg2000),
    -            (b'il32', read_32),
    -            (b'l8mk', read_mk),
    -        ],
    -        (16, 16, 2): [
    -            (b'ic11', read_png_or_jpeg2000),
    +            (b"icp5", read_png_or_jpeg2000),
    +            (b"il32", read_32),
    +            (b"l8mk", read_mk),
             ],
    +        (16, 16, 2): [(b"ic11", read_png_or_jpeg2000)],
             (16, 16, 1): [
    -            (b'icp4', read_png_or_jpeg2000),
    -            (b'is32', read_32),
    -            (b's8mk', read_mk),
    +            (b"icp4", read_png_or_jpeg2000),
    +            (b"is32", read_32),
    +            (b"s8mk", read_mk),
             ],
         }
     
    @@ -185,17 +166,17 @@ class IcnsFile(object):
             self.dct = dct = {}
             self.fobj = fobj
             sig, filesize = nextheader(fobj)
    -        if sig != b'icns':
    -            raise SyntaxError('not an icns file')
    +        if sig != b"icns":
    +            raise SyntaxError("not an icns file")
             i = HEADERSIZE
             while i < filesize:
                 sig, blocksize = nextheader(fobj)
                 if blocksize <= 0:
    -                raise SyntaxError('invalid block header')
    +                raise SyntaxError("invalid block header")
                 i += HEADERSIZE
                 blocksize -= HEADERSIZE
                 dct[sig] = (i, blocksize)
    -            fobj.seek(blocksize, 1)
    +            fobj.seek(blocksize, io.SEEK_CUR)
                 i += blocksize
     
         def itersizes(self):
    @@ -233,7 +214,7 @@ class IcnsFile(object):
                 size = (size[0], size[1], 1)
             channels = self.dataforsize(size)
     
    -        im = channels.get('RGBA', None)
    +        im = channels.get("RGBA", None)
             if im:
                 return im
     
    @@ -248,6 +229,7 @@ class IcnsFile(object):
     ##
     # Image plugin for Mac OS icons.
     
    +
     class IcnsImageFile(ImageFile.ImageFile):
         """
         PIL image support for Mac OS .icns files.
    @@ -264,22 +246,48 @@ class IcnsImageFile(ImageFile.ImageFile):
     
         def _open(self):
             self.icns = IcnsFile(self.fp)
    -        self.mode = 'RGBA'
    +        self.mode = "RGBA"
    +        self.info["sizes"] = self.icns.itersizes()
             self.best_size = self.icns.bestsize()
    -        self.size = (self.best_size[0] * self.best_size[2],
    -                     self.best_size[1] * self.best_size[2])
    -        self.info['sizes'] = self.icns.itersizes()
    -        # Just use this to see if it's loaded or not yet.
    -        self.tile = ('',)
    +        self.size = (
    +            self.best_size[0] * self.best_size[2],
    +            self.best_size[1] * self.best_size[2],
    +        )
    +
    +    @property
    +    def size(self):
    +        return self._size
    +
    +    @size.setter
    +    def size(self, value):
    +        info_size = value
    +        if info_size not in self.info["sizes"] and len(info_size) == 2:
    +            info_size = (info_size[0], info_size[1], 1)
    +        if (
    +            info_size not in self.info["sizes"]
    +            and len(info_size) == 3
    +            and info_size[2] == 1
    +        ):
    +            simple_sizes = [
    +                (size[0] * size[2], size[1] * size[2]) for size in self.info["sizes"]
    +            ]
    +            if value in simple_sizes:
    +                info_size = self.info["sizes"][simple_sizes.index(value)]
    +        if info_size not in self.info["sizes"]:
    +            raise ValueError("This is not one of the allowed sizes of this image")
    +        self._size = value
     
         def load(self):
             if len(self.size) == 3:
                 self.best_size = self.size
    -            self.size = (self.best_size[0] * self.best_size[2],
    -                         self.best_size[1] * self.best_size[2])
    +            self.size = (
    +                self.best_size[0] * self.best_size[2],
    +                self.best_size[1] * self.best_size[2],
    +            )
     
             Image.Image.load(self)
    -        if not self.tile:
    +        if self.im and self.im.size == self.size:
    +            # Already loaded
                 return
             self.load_prepare()
             # This is likely NOT the best way to do it, but whatever.
    @@ -291,9 +299,6 @@ class IcnsImageFile(ImageFile.ImageFile):
             self.im = im.im
             self.mode = im.mode
             self.size = im.size
    -        self.fp = None
    -        self.icns = None
    -        self.tile = ()
             self.load_end()
     
     
    @@ -309,66 +314,63 @@ def _save(im, fp, filename):
             fp.flush()
     
         # create the temporary set of pngs
    -    iconset = tempfile.mkdtemp('.iconset')
    -    provided_images = {im.width: im
    -                       for im in im.encoderinfo.get("append_images", [])}
    -    last_w = None
    -    for w in [16, 32, 128, 256, 512]:
    -        prefix = 'icon_{}x{}'.format(w, w)
    +    with tempfile.TemporaryDirectory(".iconset") as iconset:
    +        provided_images = {
    +            im.width: im for im in im.encoderinfo.get("append_images", [])
    +        }
    +        last_w = None
    +        second_path = None
    +        for w in [16, 32, 128, 256, 512]:
    +            prefix = "icon_{}x{}".format(w, w)
     
    -        first_path = os.path.join(iconset, prefix+'.png')
    -        if last_w == w:
    -            shutil.copyfile(second_path, first_path)
    -        else:
    -            im_w = provided_images.get(w, im.resize((w, w), Image.LANCZOS))
    -            im_w.save(first_path)
    +            first_path = os.path.join(iconset, prefix + ".png")
    +            if last_w == w:
    +                shutil.copyfile(second_path, first_path)
    +            else:
    +                im_w = provided_images.get(w, im.resize((w, w), Image.LANCZOS))
    +                im_w.save(first_path)
     
    -        second_path = os.path.join(iconset, prefix+'@2x.png')
    -        im_w2 = provided_images.get(w*2, im.resize((w*2, w*2), Image.LANCZOS))
    -        im_w2.save(second_path)
    -        last_w = w*2
    +            second_path = os.path.join(iconset, prefix + "@2x.png")
    +            im_w2 = provided_images.get(w * 2, im.resize((w * 2, w * 2), Image.LANCZOS))
    +            im_w2.save(second_path)
    +            last_w = w * 2
     
    -    # iconutil -c icns -o {} {}
    -    from subprocess import Popen, PIPE, CalledProcessError
    +        # iconutil -c icns -o {} {}
     
    -    convert_cmd = ["iconutil", "-c", "icns", "-o", filename, iconset]
    -    with open(os.devnull, 'wb') as devnull:
    -        convert_proc = Popen(convert_cmd, stdout=PIPE, stderr=devnull)
    +        convert_cmd = ["iconutil", "-c", "icns", "-o", filename, iconset]
    +        convert_proc = subprocess.Popen(
    +            convert_cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL
    +        )
     
    -    convert_proc.stdout.close()
    +        convert_proc.stdout.close()
     
    -    retcode = convert_proc.wait()
    +        retcode = convert_proc.wait()
     
    -    # remove the temporary files
    -    shutil.rmtree(iconset)
    -
    -    if retcode:
    -        raise CalledProcessError(retcode, convert_cmd)
    +        if retcode:
    +            raise subprocess.CalledProcessError(retcode, convert_cmd)
     
     
    -Image.register_open(IcnsImageFile.format, IcnsImageFile,
    -                    lambda x: x[:4] == b'icns')
    -Image.register_extension(IcnsImageFile.format, '.icns')
    +Image.register_open(IcnsImageFile.format, IcnsImageFile, lambda x: x[:4] == b"icns")
    +Image.register_extension(IcnsImageFile.format, ".icns")
     
    -if sys.platform == 'darwin':
    +if sys.platform == "darwin":
         Image.register_save(IcnsImageFile.format, _save)
     
         Image.register_mime(IcnsImageFile.format, "image/icns")
     
     
    -if __name__ == '__main__':
    +if __name__ == "__main__":
     
         if len(sys.argv) < 2:
             print("Syntax: python IcnsImagePlugin.py [file]")
             sys.exit()
     
    -    imf = IcnsImageFile(open(sys.argv[1], 'rb'))
    -    for size in imf.info['sizes']:
    -        imf.size = size
    -        imf.load()
    -        im = imf.im
    -        im.save('out-%s-%s-%s.png' % size)
    -    im = Image.open(sys.argv[1])
    -    im.save("out.png")
    -    if sys.platform == 'windows':
    -        os.startfile("out.png")
    +    with open(sys.argv[1], "rb") as fp:
    +        imf = IcnsImageFile(fp)
    +        for size in imf.info["sizes"]:
    +            imf.size = size
    +            imf.save("out-%s-%s-%s.png" % size)
    +        with Image.open(sys.argv[1]) as im:
    +            im.save("out.png")
    +        if sys.platform == "windows":
    +            os.startfile("out.png")
    diff --git a/server/www/packages/packages-windows/x86/PIL/IcoImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/IcoImagePlugin.py
    index 428fdd4..e4a7432 100644
    --- a/server/www/packages/packages-windows/x86/PIL/IcoImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/IcoImagePlugin.py
    @@ -23,13 +23,12 @@
     
     
     import struct
    +import warnings
     from io import BytesIO
    +from math import ceil, log
     
    -from . import Image, ImageFile, BmpImagePlugin, PngImagePlugin
    +from . import BmpImagePlugin, Image, ImageFile, PngImagePlugin
     from ._binary import i8, i16le as i16, i32le as i32
    -from math import log, ceil
    -
    -__version__ = "0.1"
     
     #
     # --------------------------------------------------------------------
    @@ -39,16 +38,20 @@ _MAGIC = b"\0\0\1\0"
     
     def _save(im, fp, filename):
         fp.write(_MAGIC)  # (2+2)
    -    sizes = im.encoderinfo.get("sizes",
    -                               [(16, 16), (24, 24), (32, 32), (48, 48),
    -                                (64, 64), (128, 128), (256, 256)])
    +    sizes = im.encoderinfo.get(
    +        "sizes",
    +        [(16, 16), (24, 24), (32, 32), (48, 48), (64, 64), (128, 128), (256, 256)],
    +    )
         width, height = im.size
    -    sizes = filter(lambda x: False if (x[0] > width or x[1] > height or
    -                                       x[0] > 256 or x[1] > 256) else True,
    -                   sizes)
    +    sizes = filter(
    +        lambda x: False
    +        if (x[0] > width or x[1] > height or x[0] > 256 or x[1] > 256)
    +        else True,
    +        sizes,
    +    )
         sizes = list(sizes)
         fp.write(struct.pack("=8bpp)
    -                'reserved': i8(s[3]),
    -                'planes': i16(s[4:]),
    -                'bpp': i16(s[6:]),
    -                'size': i32(s[8:]),
    -                'offset': i32(s[12:])
    +                "width": i8(s[0]),
    +                "height": i8(s[1]),
    +                "nb_color": i8(s[2]),  # No. of colors in image (0 if >=8bpp)
    +                "reserved": i8(s[3]),
    +                "planes": i16(s[4:]),
    +                "bpp": i16(s[6:]),
    +                "size": i32(s[8:]),
    +                "offset": i32(s[12:]),
                 }
     
                 # See Wikipedia
    -            for j in ('width', 'height'):
    +            for j in ("width", "height"):
                     if not icon_header[j]:
                         icon_header[j] = 256
     
                 # See Wikipedia notes about color depth.
                 # We need this just to differ images with equal sizes
    -            icon_header['color_depth'] = (icon_header['bpp'] or
    -                                          (icon_header['nb_color'] != 0 and
    -                                           ceil(log(icon_header['nb_color'],
    -                                                    2))) or 256)
    +            icon_header["color_depth"] = (
    +                icon_header["bpp"]
    +                or (
    +                    icon_header["nb_color"] != 0
    +                    and ceil(log(icon_header["nb_color"], 2))
    +                )
    +                or 256
    +            )
     
    -            icon_header['dim'] = (icon_header['width'], icon_header['height'])
    -            icon_header['square'] = (icon_header['width'] *
    -                                     icon_header['height'])
    +            icon_header["dim"] = (icon_header["width"], icon_header["height"])
    +            icon_header["square"] = icon_header["width"] * icon_header["height"]
     
                 self.entry.append(icon_header)
     
    -        self.entry = sorted(self.entry, key=lambda x: x['color_depth'])
    +        self.entry = sorted(self.entry, key=lambda x: x["color_depth"])
             # ICO images are usually squares
             # self.entry = sorted(self.entry, key=lambda x: x['width'])
    -        self.entry = sorted(self.entry, key=lambda x: x['square'])
    +        self.entry = sorted(self.entry, key=lambda x: x["square"])
             self.entry.reverse()
     
         def sizes(self):
             """
             Get a list of all available icon sizes and color depths.
             """
    -        return {(h['width'], h['height']) for h in self.entry}
    +        return {(h["width"], h["height"]) for h in self.entry}
    +
    +    def getentryindex(self, size, bpp=False):
    +        for (i, h) in enumerate(self.entry):
    +            if size == h["dim"] and (bpp is False or bpp == h["color_depth"]):
    +                return i
    +        return 0
     
         def getimage(self, size, bpp=False):
             """
             Get an image from the icon
             """
    -        for (i, h) in enumerate(self.entry):
    -            if size == h['dim'] and (bpp is False or bpp == h['color_depth']):
    -                return self.frame(i)
    -        return self.frame(0)
    +        return self.frame(self.getentryindex(size, bpp))
     
         def frame(self, idx):
             """
    @@ -157,9 +167,9 @@ class IcoFile(object):
     
             header = self.entry[idx]
     
    -        self.buf.seek(header['offset'])
    +        self.buf.seek(header["offset"])
             data = self.buf.read(8)
    -        self.buf.seek(header['offset'])
    +        self.buf.seek(header["offset"])
     
             if data[:8] == PngImagePlugin._MAGIC:
                 # png frame
    @@ -167,9 +177,10 @@ class IcoFile(object):
             else:
                 # XOR + AND mask bmp frame
                 im = BmpImagePlugin.DibImageFile(self.buf)
    +            Image._decompression_bomb_check(im.size)
     
                 # change tile dimension to only encompass XOR image
    -            im.size = (im.size[0], int(im.size[1] / 2))
    +            im._size = (im.size[0], int(im.size[1] / 2))
                 d, e, o, a = im.tile[0]
                 im.tile[0] = d, (0, 0) + im.size, o, a
     
    @@ -194,11 +205,11 @@ class IcoFile(object):
     
                     # convert to an 8bpp grayscale image
                     mask = Image.frombuffer(
    -                    'L',            # 8bpp
    -                    im.size,        # (w, h)
    -                    alpha_bytes,    # source chars
    -                    'raw',          # raw decoder
    -                    ('L', 0, -1)    # 8bpp inverted, unpadded, reversed
    +                    "L",  # 8bpp
    +                    im.size,  # (w, h)
    +                    alpha_bytes,  # source chars
    +                    "raw",  # raw decoder
    +                    ("L", 0, -1),  # 8bpp inverted, unpadded, reversed
                     )
                 else:
                     # get AND image from end of bitmap
    @@ -210,8 +221,7 @@ class IcoFile(object):
                     # the total mask data is
                     # padded row size * height / bits per char
     
    -                and_mask_offset = o + int(im.size[0] * im.size[1] *
    -                                          (bpp / 8.0))
    +                and_mask_offset = o + int(im.size[0] * im.size[1] * (bpp / 8.0))
                     total_bytes = int((w * im.size[1]) / 8)
     
                     self.buf.seek(and_mask_offset)
    @@ -219,17 +229,17 @@ class IcoFile(object):
     
                     # convert raw data to image
                     mask = Image.frombuffer(
    -                    '1',            # 1 bpp
    -                    im.size,        # (w, h)
    -                    mask_data,      # source chars
    -                    'raw',          # raw decoder
    -                    ('1;I', int(w/8), -1)  # 1bpp inverted, padded, reversed
    +                    "1",  # 1 bpp
    +                    im.size,  # (w, h)
    +                    mask_data,  # source chars
    +                    "raw",  # raw decoder
    +                    ("1;I", int(w / 8), -1),  # 1bpp inverted, padded, reversed
                     )
     
                     # now we have two images, im is XOR image and mask is AND image
     
                 # apply mask image as alpha channel
    -            im = im.convert('RGBA')
    +            im = im.convert("RGBA")
                 im.putalpha(mask)
     
             return im
    @@ -238,6 +248,7 @@ class IcoFile(object):
     ##
     # Image plugin for Windows Icon files.
     
    +
     class IcoImageFile(ImageFile.ImageFile):
         """
         PIL read-only image support for Microsoft Windows .ico files.
    @@ -250,31 +261,58 @@ class IcoImageFile(ImageFile.ImageFile):
     
         Handles classic, XP and Vista icon formats.
     
    +    When saving, PNG compression is used. Support for this was only added in
    +    Windows Vista.
    +
         This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis
         .
         https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki
         """
    +
         format = "ICO"
         format_description = "Windows Icon"
     
         def _open(self):
             self.ico = IcoFile(self.fp)
    -        self.info['sizes'] = self.ico.sizes()
    -        self.size = self.ico.entry[0]['dim']
    +        self.info["sizes"] = self.ico.sizes()
    +        self.size = self.ico.entry[0]["dim"]
             self.load()
     
    +    @property
    +    def size(self):
    +        return self._size
    +
    +    @size.setter
    +    def size(self, value):
    +        if value not in self.info["sizes"]:
    +            raise ValueError("This is not one of the allowed sizes of this image")
    +        self._size = value
    +
         def load(self):
    +        if self.im and self.im.size == self.size:
    +            # Already loaded
    +            return
             im = self.ico.getimage(self.size)
             # if tile is PNG, it won't really be loaded yet
             im.load()
             self.im = im.im
             self.mode = im.mode
    -        self.size = im.size
    +        if im.size != self.size:
    +            warnings.warn("Image was not the expected size")
    +
    +            index = self.ico.getentryindex(self.size)
    +            sizes = list(self.info["sizes"])
    +            sizes[index] = im.size
    +            self.info["sizes"] = set(sizes)
    +
    +            self.size = im.size
     
         def load_seek(self):
             # Flag the ImageFile.Parser so that it
             # just does all the decode at the end.
             pass
    +
    +
     #
     # --------------------------------------------------------------------
     
    @@ -282,3 +320,5 @@ class IcoImageFile(ImageFile.ImageFile):
     Image.register_open(IcoImageFile.format, IcoImageFile, _accept)
     Image.register_save(IcoImageFile.format, _save)
     Image.register_extension(IcoImageFile.format, ".ico")
    +
    +Image.register_mime(IcoImageFile.format, "image/x-icon")
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/ImImagePlugin.py
    index b87fa90..8b03f35 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImImagePlugin.py
    @@ -26,13 +26,12 @@
     #
     
     
    +import os
     import re
    +
     from . import Image, ImageFile, ImagePalette
     from ._binary import i8
     
    -__version__ = "0.7"
    -
    -
     # --------------------------------------------------------------------
     # Standard tags
     
    @@ -46,8 +45,17 @@ SCALE = "Scale (x,y)"
     SIZE = "Image size (x*y)"
     MODE = "Image type"
     
    -TAGS = {COMMENT: 0, DATE: 0, EQUIPMENT: 0, FRAMES: 0, LUT: 0, NAME: 0,
    -        SCALE: 0, SIZE: 0, MODE: 0}
    +TAGS = {
    +    COMMENT: 0,
    +    DATE: 0,
    +    EQUIPMENT: 0,
    +    FRAMES: 0,
    +    LUT: 0,
    +    NAME: 0,
    +    SCALE: 0,
    +    SIZE: 0,
    +    MODE: 0,
    +}
     
     OPEN = {
         # ifunc93/p3cfunc formats
    @@ -69,6 +77,7 @@ OPEN = {
         "RYB3 image": ("RGB", "RYB;T"),
         # extensions
         "LA image": ("LA", "LA;L"),
    +    "PA image": ("LA", "PA;L"),
         "RGBA image": ("RGBA", "RGBA;L"),
         "RGBX image": ("RGBX", "RGBX;L"),
         "CMYK image": ("CMYK", "CMYK;L"),
    @@ -105,6 +114,7 @@ def number(s):
     ##
     # Image plugin for the IFUNC IM file format.
     
    +
     class ImImageFile(ImageFile.ImageFile):
     
         format = "IM"
    @@ -137,7 +147,7 @@ class ImImageFile(ImageFile.ImageFile):
                 if s == b"\r":
                     continue
     
    -            if not s or s == b'\0' or s == b'\x1A':
    +            if not s or s == b"\0" or s == b"\x1A":
                     break
     
                 # FIXME: this may read whole file if not a text file
    @@ -146,14 +156,14 @@ class ImImageFile(ImageFile.ImageFile):
                 if len(s) > 100:
                     raise SyntaxError("not an IM file")
     
    -            if s[-2:] == b'\r\n':
    +            if s[-2:] == b"\r\n":
                     s = s[:-2]
    -            elif s[-1:] == b'\n':
    +            elif s[-1:] == b"\n":
                     s = s[:-1]
     
                 try:
                     m = split.match(s)
    -            except re.error as v:
    +            except re.error:
                     raise SyntaxError("not an IM file")
     
                 if m:
    @@ -162,8 +172,8 @@ class ImImageFile(ImageFile.ImageFile):
     
                     # Don't know if this is the correct encoding,
                     # but a decent guess (I guess)
    -                k = k.decode('latin-1', 'replace')
    -                v = v.decode('latin-1', 'replace')
    +                k = k.decode("latin-1", "replace")
    +                v = v.decode("latin-1", "replace")
     
                     # Convert value as appropriate
                     if k in [FRAMES, SCALE, SIZE]:
    @@ -189,18 +199,19 @@ class ImImageFile(ImageFile.ImageFile):
     
                 else:
     
    -                raise SyntaxError("Syntax error in IM header: " +
    -                                  s.decode('ascii', 'replace'))
    +                raise SyntaxError(
    +                    "Syntax error in IM header: " + s.decode("ascii", "replace")
    +                )
     
             if not n:
                 raise SyntaxError("Not an IM file")
     
             # Basic attributes
    -        self.size = self.info[SIZE]
    +        self._size = self.info[SIZE]
             self.mode = self.info[MODE]
     
             # Skip forward to start of image data
    -        while s and s[0:1] != b'\x1A':
    +        while s and s[0:1] != b"\x1A":
                 s = self.fp.read(1)
             if not s:
                 raise SyntaxError("File truncated")
    @@ -211,20 +222,21 @@ class ImImageFile(ImageFile.ImageFile):
                 greyscale = 1  # greyscale palette
                 linear = 1  # linear greyscale palette
                 for i in range(256):
    -                if palette[i] == palette[i+256] == palette[i+512]:
    +                if palette[i] == palette[i + 256] == palette[i + 512]:
                         if i8(palette[i]) != i:
                             linear = 0
                     else:
                         greyscale = 0
    -            if self.mode == "L" or self.mode == "LA":
    +            if self.mode in ["L", "LA", "P", "PA"]:
                     if greyscale:
                         if not linear:
                             self.lut = [i8(c) for c in palette[:256]]
                     else:
    -                    if self.mode == "L":
    +                    if self.mode in ["L", "P"]:
                             self.mode = self.rawmode = "P"
    -                    elif self.mode == "LA":
    -                        self.mode = self.rawmode = "PA"
    +                    elif self.mode in ["LA", "PA"]:
    +                        self.mode = "PA"
    +                        self.rawmode = "PA;L"
                         self.palette = ImagePalette.raw("RGB;L", palette)
                 elif self.mode == "RGB":
                     if not greyscale or not linear:
    @@ -243,8 +255,7 @@ class ImImageFile(ImageFile.ImageFile):
                     # use bit decoder (if necessary)
                     bits = int(self.rawmode[2:])
                     if bits not in [8, 16, 32]:
    -                    self.tile = [("bit", (0, 0)+self.size, offs,
    -                                 (bits, 8, 3, 0, -1))]
    +                    self.tile = [("bit", (0, 0) + self.size, offs, (bits, 8, 3, 0, -1))]
                         return
                 except ValueError:
                     pass
    @@ -253,13 +264,14 @@ class ImImageFile(ImageFile.ImageFile):
                 # Old LabEye/3PC files.  Would be very surprised if anyone
                 # ever stumbled upon such a file ;-)
                 size = self.size[0] * self.size[1]
    -            self.tile = [("raw", (0, 0)+self.size, offs, ("G", 0, -1)),
    -                         ("raw", (0, 0)+self.size, offs+size, ("R", 0, -1)),
    -                         ("raw", (0, 0)+self.size, offs+2*size, ("B", 0, -1))]
    +            self.tile = [
    +                ("raw", (0, 0) + self.size, offs, ("G", 0, -1)),
    +                ("raw", (0, 0) + self.size, offs + size, ("R", 0, -1)),
    +                ("raw", (0, 0) + self.size, offs + 2 * size, ("B", 0, -1)),
    +            ]
             else:
                 # LabEye/IFUNC files
    -            self.tile = [("raw", (0, 0)+self.size, offs,
    -                         (self.rawmode, 0, -1))]
    +            self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))]
     
         @property
         def n_frames(self):
    @@ -285,11 +297,21 @@ class ImImageFile(ImageFile.ImageFile):
     
             self.fp = self.__fp
     
    -        self.tile = [("raw", (0, 0)+self.size, offs, (self.rawmode, 0, -1))]
    +        self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))]
     
         def tell(self):
             return self.frame
     
    +    def _close__fp(self):
    +        try:
    +            if self.__fp != self.fp:
    +                self.__fp.close()
    +        except AttributeError:
    +            pass
    +        finally:
    +            self.__fp = None
    +
    +
     #
     # --------------------------------------------------------------------
     # Save IM files
    @@ -311,7 +333,7 @@ SAVE = {
         "RGBA": ("RGBA", "RGBA;L"),
         "RGBX": ("RGBX", "RGBX;L"),
         "CMYK": ("CMYK", "CMYK;L"),
    -    "YCbCr": ("YCC", "YCbCr;L")
    +    "YCbCr": ("YCC", "YCbCr;L"),
     }
     
     
    @@ -324,17 +346,25 @@ def _save(im, fp, filename):
     
         frames = im.encoderinfo.get("frames", 1)
     
    -    fp.write(("Image type: %s image\r\n" % image_type).encode('ascii'))
    +    fp.write(("Image type: %s image\r\n" % image_type).encode("ascii"))
         if filename:
    -        fp.write(("Name: %s\r\n" % filename).encode('ascii'))
    -    fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode('ascii'))
    -    fp.write(("File size (no of images): %d\r\n" % frames).encode('ascii'))
    -    if im.mode == "P":
    +        # Each line must be 100 characters or less,
    +        # or: SyntaxError("not an IM file")
    +        # 8 characters are used for "Name: " and "\r\n"
    +        # Keep just the filename, ditch the potentially overlong path
    +        name, ext = os.path.splitext(os.path.basename(filename))
    +        name = "".join([name[: 92 - len(ext)], ext])
    +
    +        fp.write(("Name: %s\r\n" % name).encode("ascii"))
    +    fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode("ascii"))
    +    fp.write(("File size (no of images): %d\r\n" % frames).encode("ascii"))
    +    if im.mode in ["P", "PA"]:
             fp.write(b"Lut: 1\r\n")
    -    fp.write(b"\000" * (511-fp.tell()) + b"\032")
    -    if im.mode == "P":
    +    fp.write(b"\000" * (511 - fp.tell()) + b"\032")
    +    if im.mode in ["P", "PA"]:
             fp.write(im.im.getpalette("RGB", "RGB;L"))  # 768 bytes
    -    ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, -1))])
    +    ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, -1))])
    +
     
     #
     # --------------------------------------------------------------------
    diff --git a/server/www/packages/packages-windows/x86/PIL/Image.py b/server/www/packages/packages-windows/x86/PIL/Image.py
    index c589526..3ced965 100644
    --- a/server/www/packages/packages-windows/x86/PIL/Image.py
    +++ b/server/www/packages/packages-windows/x86/PIL/Image.py
    @@ -24,15 +24,50 @@
     # See the README file for information on usage and redistribution.
     #
     
    -# VERSION is deprecated and will be removed in Pillow 6.0.0.
    -# PILLOW_VERSION is deprecated and will be removed after that.
    -# Use __version__ instead.
    -from . import VERSION, PILLOW_VERSION, __version__, _plugins
    -from ._util import py3
    -
    +import atexit
    +import builtins
    +import io
     import logging
    -import warnings
     import math
    +import numbers
    +import os
    +import struct
    +import sys
    +import tempfile
    +import warnings
    +from collections.abc import Callable, MutableMapping
    +from pathlib import Path
    +
    +# VERSION was removed in Pillow 6.0.0.
    +# PILLOW_VERSION is deprecated and will be removed in a future release.
    +# Use __version__ instead.
    +from . import (
    +    ImageMode,
    +    TiffTags,
    +    UnidentifiedImageError,
    +    __version__,
    +    _plugins,
    +    _raise_version_warning,
    +)
    +from ._binary import i8, i32le
    +from ._util import deferred_error, isPath
    +
    +if sys.version_info >= (3, 7):
    +
    +    def __getattr__(name):
    +        if name == "PILLOW_VERSION":
    +            _raise_version_warning()
    +            return __version__
    +        raise AttributeError("module '{}' has no attribute '{}'".format(__name__, name))
    +
    +
    +else:
    +
    +    from . import PILLOW_VERSION
    +
    +    # Silence warning
    +    assert PILLOW_VERSION
    +
     
     logger = logging.getLogger(__name__)
     
    @@ -45,12 +80,6 @@ class DecompressionBombError(Exception):
         pass
     
     
    -class _imaging_not_installed(object):
    -    # module placeholder
    -    def __getattr__(self, id):
    -        raise ImportError("The _imaging C module is not installed")
    -
    -
     # Limit to around a quarter gigabyte for a 24 bit (3 bpp) image
     MAX_IMAGE_PIXELS = int(1024 * 1024 * 1024 // 4 // 3)
     
    @@ -62,91 +91,38 @@ try:
         # Also note that Image.core is not a publicly documented interface,
         # and should be considered private and subject to change.
         from . import _imaging as core
    -    if __version__ != getattr(core, 'PILLOW_VERSION', None):
    -        raise ImportError("The _imaging extension was built for another "
    -                          "version of Pillow or PIL:\n"
    -                          "Core version: %s\n"
    -                          "Pillow version: %s" %
    -                          (getattr(core, 'PILLOW_VERSION', None),
    -                           __version__))
    +
    +    if __version__ != getattr(core, "PILLOW_VERSION", None):
    +        raise ImportError(
    +            "The _imaging extension was built for another version of Pillow or PIL:\n"
    +            "Core version: %s\n"
    +            "Pillow version: %s" % (getattr(core, "PILLOW_VERSION", None), __version__)
    +        )
     
     except ImportError as v:
    -    core = _imaging_not_installed()
    +    core = deferred_error(ImportError("The _imaging C module is not installed."))
         # Explanations for ways that we know we might have an import error
         if str(v).startswith("Module use of python"):
             # The _imaging C module is present, but not compiled for
             # the right version (windows only).  Print a warning, if
             # possible.
             warnings.warn(
    -            "The _imaging extension was built for another version "
    -            "of Python.",
    -            RuntimeWarning
    -            )
    +            "The _imaging extension was built for another version of Python.",
    +            RuntimeWarning,
    +        )
         elif str(v).startswith("The _imaging extension"):
             warnings.warn(str(v), RuntimeWarning)
    -    elif "Symbol not found: _PyUnicodeUCS2_" in str(v):
    -        # should match _PyUnicodeUCS2_FromString and
    -        # _PyUnicodeUCS2_AsLatin1String
    -        warnings.warn(
    -            "The _imaging extension was built for Python with UCS2 support; "
    -            "recompile Pillow or build Python --without-wide-unicode. ",
    -            RuntimeWarning
    -            )
    -    elif "Symbol not found: _PyUnicodeUCS4_" in str(v):
    -        # should match _PyUnicodeUCS4_FromString and
    -        # _PyUnicodeUCS4_AsLatin1String
    -        warnings.warn(
    -            "The _imaging extension was built for Python with UCS4 support; "
    -            "recompile Pillow or build Python --with-wide-unicode. ",
    -            RuntimeWarning
    -            )
         # Fail here anyway. Don't let people run with a mostly broken Pillow.
         # see docs/porting.rst
         raise
     
    -try:
    -    import builtins
    -except ImportError:
    -    import __builtin__
    -    builtins = __builtin__
    -
    -from . import ImageMode
    -from ._binary import i8
    -from ._util import isPath, isStringType, deferred_error
    -
    -import os
    -import sys
    -import io
    -import struct
    -import atexit
    -
    -# type stuff
    -import numbers
    -try:
    -    # Python 3
    -    from collections.abc import Callable
    -except ImportError:
    -    # Python 2.7
    -    from collections import Callable
    -
     
     # works everywhere, win for pypy, not cpython
    -USE_CFFI_ACCESS = hasattr(sys, 'pypy_version_info')
    +USE_CFFI_ACCESS = hasattr(sys, "pypy_version_info")
     try:
         import cffi
    -    HAS_CFFI = True
     except ImportError:
    -    HAS_CFFI = False
    -
    -try:
    -    from pathlib import Path
    -    HAS_PATHLIB = True
    -except ImportError:
    -    try:
    -        from pathlib2 import Path
    -        HAS_PATHLIB = True
    -    except ImportError:
    -        HAS_PATHLIB = False
    +    cffi = None
     
     
     def isImageType(t):
    @@ -164,7 +140,7 @@ def isImageType(t):
     
     
     #
    -# Constants (also defined in _imagingmodule.c!)
    +# Constants
     
     NONE = 0
     
    @@ -177,14 +153,14 @@ ROTATE_270 = 4
     TRANSPOSE = 5
     TRANSVERSE = 6
     
    -# transforms
    +# transforms (also defined in Imaging.h)
     AFFINE = 0
     EXTENT = 1
     PERSPECTIVE = 2
     QUAD = 3
     MESH = 4
     
    -# resampling filters
    +# resampling filters (also defined in Imaging.h)
     NEAREST = NONE = 0
     BOX = 4
     BILINEAR = LINEAR = 2
    @@ -192,6 +168,9 @@ HAMMING = 5
     BICUBIC = CUBIC = 3
     LANCZOS = ANTIALIAS = 1
     
    +_filters_support = {BOX: 0.5, BILINEAR: 1.0, HAMMING: 1.0, BICUBIC: 2.0, LANCZOS: 3.0}
    +
    +
     # dithers
     NEAREST = NONE = 0
     ORDERED = 1  # Not yet implemented
    @@ -212,7 +191,7 @@ NORMAL = 0
     SEQUENCE = 1
     CONTAINER = 2
     
    -if hasattr(core, 'DEFAULT_STRATEGY'):
    +if hasattr(core, "DEFAULT_STRATEGY"):
         DEFAULT_STRATEGY = core.DEFAULT_STRATEGY
         FILTERED = core.FILTERED
         HUFFMAN_ONLY = core.HUFFMAN_ONLY
    @@ -238,13 +217,12 @@ ENCODERS = {}
     _MODEINFO = {
         # NOTE: this table will be removed in future versions.  use
         # getmode* functions or ImageMode descriptors instead.
    -
         # official modes
         "1": ("L", "L", ("1",)),
         "L": ("L", "L", ("L",)),
         "I": ("L", "I", ("I",)),
         "F": ("L", "F", ("F",)),
    -    "P": ("RGB", "L", ("P",)),
    +    "P": ("P", "L", ("P",)),
         "RGB": ("RGB", "L", ("R", "G", "B")),
         "RGBX": ("RGB", "L", ("R", "G", "B", "X")),
         "RGBA": ("RGB", "L", ("R", "G", "B", "A")),
    @@ -252,46 +230,44 @@ _MODEINFO = {
         "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr")),
         "LAB": ("RGB", "L", ("L", "A", "B")),
         "HSV": ("RGB", "L", ("H", "S", "V")),
    -
         # Experimental modes include I;16, I;16L, I;16B, RGBa, BGR;15, and
         # BGR;24.  Use these modes only if you know exactly what you're
         # doing...
    -
     }
     
    -if sys.byteorder == 'little':
    -    _ENDIAN = '<'
    +if sys.byteorder == "little":
    +    _ENDIAN = "<"
     else:
    -    _ENDIAN = '>'
    +    _ENDIAN = ">"
     
     _MODE_CONV = {
         # official modes
    -    "1": ('|b1', None),  # Bits need to be extended to bytes
    -    "L": ('|u1', None),
    -    "LA": ('|u1', 2),
    -    "I": (_ENDIAN + 'i4', None),
    -    "F": (_ENDIAN + 'f4', None),
    -    "P": ('|u1', None),
    -    "RGB": ('|u1', 3),
    -    "RGBX": ('|u1', 4),
    -    "RGBA": ('|u1', 4),
    -    "CMYK": ('|u1', 4),
    -    "YCbCr": ('|u1', 3),
    -    "LAB": ('|u1', 3),  # UNDONE - unsigned |u1i1i1
    -    "HSV": ('|u1', 3),
    +    "1": ("|b1", None),  # Bits need to be extended to bytes
    +    "L": ("|u1", None),
    +    "LA": ("|u1", 2),
    +    "I": (_ENDIAN + "i4", None),
    +    "F": (_ENDIAN + "f4", None),
    +    "P": ("|u1", None),
    +    "RGB": ("|u1", 3),
    +    "RGBX": ("|u1", 4),
    +    "RGBA": ("|u1", 4),
    +    "CMYK": ("|u1", 4),
    +    "YCbCr": ("|u1", 3),
    +    "LAB": ("|u1", 3),  # UNDONE - unsigned |u1i1i1
    +    "HSV": ("|u1", 3),
         # I;16 == I;16L, and I;32 == I;32L
    -    "I;16": ('u2', None),
    -    "I;16L": ('i2', None),
    -    "I;16LS": ('u4', None),
    -    "I;32L": ('i4', None),
    -    "I;32LS": ('u2", None),
    +    "I;16L": ("i2", None),
    +    "I;16LS": ("u4", None),
    +    "I;32L": ("i4", None),
    +    "I;32LS": ("= 3:
    -        def __del__(self):
    -            if (hasattr(self, 'fp') and hasattr(self, '_exclusive_fp')
    -               and self.fp and self._exclusive_fp):
    -                self.fp.close()
    -            self.fp = None
    -
         def _copy(self):
             self.load()
             self.im = self.im.copy()
    @@ -615,11 +619,9 @@ class Image(object):
                 self.load()
     
         def _dump(self, file=None, format=None, **options):
    -        import tempfile
    -
    -        suffix = ''
    +        suffix = ""
             if format:
    -            suffix = '.'+format
    +            suffix = "." + format
     
             if not file:
                 f, filename = tempfile.mkstemp(suffix)
    @@ -639,35 +641,34 @@ class Image(object):
             return filename
     
         def __eq__(self, other):
    -        return (isinstance(other, Image) and
    -                self.__class__.__name__ == other.__class__.__name__ and
    -                self.mode == other.mode and
    -                self.size == other.size and
    -                self.info == other.info and
    -                self.category == other.category and
    -                self.readonly == other.readonly and
    -                self.getpalette() == other.getpalette() and
    -                self.tobytes() == other.tobytes())
    -
    -    def __ne__(self, other):
    -        eq = (self == other)
    -        return not eq
    +        return (
    +            self.__class__ is other.__class__
    +            and self.mode == other.mode
    +            and self.size == other.size
    +            and self.info == other.info
    +            and self.category == other.category
    +            and self.readonly == other.readonly
    +            and self.getpalette() == other.getpalette()
    +            and self.tobytes() == other.tobytes()
    +        )
     
         def __repr__(self):
             return "<%s.%s image mode=%s size=%dx%d at 0x%X>" % (
    -            self.__class__.__module__, self.__class__.__name__,
    -            self.mode, self.size[0], self.size[1],
    -            id(self)
    -            )
    +            self.__class__.__module__,
    +            self.__class__.__name__,
    +            self.mode,
    +            self.size[0],
    +            self.size[1],
    +            id(self),
    +        )
     
         def _repr_png_(self):
             """ iPython display hook support
     
             :returns: png version of the image as bytes
             """
    -        from io import BytesIO
    -        b = BytesIO()
    -        self.save(b, 'PNG')
    +        b = io.BytesIO()
    +        self.save(b, "PNG")
             return b.getvalue()
     
         @property
    @@ -675,24 +676,19 @@ class Image(object):
             # numpy array interface support
             new = {}
             shape, typestr = _conv_type_shape(self)
    -        new['shape'] = shape
    -        new['typestr'] = typestr
    -        new['version'] = 3
    -        if self.mode == '1':
    +        new["shape"] = shape
    +        new["typestr"] = typestr
    +        new["version"] = 3
    +        if self.mode == "1":
                 # Binary images need to be extended from bits to bytes
                 # See: https://github.com/python-pillow/Pillow/issues/350
    -            new['data'] = self.tobytes('raw', 'L')
    +            new["data"] = self.tobytes("raw", "L")
             else:
    -            new['data'] = self.tobytes()
    +            new["data"] = self.tobytes()
             return new
     
         def __getstate__(self):
    -        return [
    -            self.info,
    -            self.mode,
    -            self.size,
    -            self.getpalette(),
    -            self.tobytes()]
    +        return [self.info, self.mode, self.size, self.getpalette(), self.tobytes()]
     
         def __setstate__(self, state):
             Image.__init__(self)
    @@ -700,9 +696,9 @@ class Image(object):
             info, mode, size, palette, data = state
             self.info = info
             self.mode = mode
    -        self.size = size
    +        self._size = size
             self.im = core.new(mode, size)
    -        if mode in ("L", "P") and palette:
    +        if mode in ("L", "LA", "P", "PA") and palette:
                 self.putpalette(palette)
             self.frombytes(data)
     
    @@ -750,8 +746,9 @@ class Image(object):
             return b"".join(data)
     
         def tostring(self, *args, **kw):
    -        raise NotImplementedError("tostring() has been removed. "
    -                                  "Please call tobytes() instead.")
    +        raise NotImplementedError(
    +            "tostring() has been removed. Please call tobytes() instead."
    +        )
     
         def tobitmap(self, name="image"):
             """
    @@ -768,11 +765,15 @@ class Image(object):
             if self.mode != "1":
                 raise ValueError("not a bitmap")
             data = self.tobytes("xbm")
    -        return b"".join([
    -            ("#define %s_width %d\n" % (name, self.size[0])).encode('ascii'),
    -            ("#define %s_height %d\n" % (name, self.size[1])).encode('ascii'),
    -            ("static char %s_bits[] = {\n" % name).encode('ascii'), data, b"};"
    -            ])
    +        return b"".join(
    +            [
    +                ("#define %s_width %d\n" % (name, self.size[0])).encode("ascii"),
    +                ("#define %s_height %d\n" % (name, self.size[1])).encode("ascii"),
    +                ("static char %s_bits[] = {\n" % name).encode("ascii"),
    +                data,
    +                b"};",
    +            ]
    +        )
     
         def frombytes(self, data, decoder_name="raw", *args):
             """
    @@ -801,8 +802,9 @@ class Image(object):
                 raise ValueError("cannot decode image data")
     
         def fromstring(self, *args, **kw):
    -        raise NotImplementedError("fromstring() has been removed. "
    -                                  "Please call frombytes() instead.")
    +        raise NotImplementedError(
    +            "fromstring() has been removed. Please call frombytes() instead."
    +        )
     
         def load(self):
             """
    @@ -811,8 +813,10 @@ class Image(object):
             Image class automatically loads an opened image when it is
             accessed for the first time.
     
    -        This method will close the file associated with the image. See
    -        :ref:`file-handling` for more information.
    +        If the file associated with the image was opened by Pillow, then this
    +        method will close it. The exception to this is if the image has
    +        multiple frames, in which case the file will be left open for seek
    +        operations. See :ref:`file-handling` for more information.
     
             :returns: An image access object.
             :rtype: :ref:`PixelAccess` or :py:class:`PIL.PyAccess`
    @@ -831,10 +835,11 @@ class Image(object):
                     self.palette.mode = "RGBA"
     
             if self.im:
    -            if HAS_CFFI and USE_CFFI_ACCESS:
    +            if cffi and USE_CFFI_ACCESS:
                     if self.pyaccess:
                         return self.pyaccess
                     from . import PyAccess
    +
                     self.pyaccess = PyAccess.new(self, self.readonly)
                     if self.pyaccess:
                         return self.pyaccess
    @@ -851,8 +856,7 @@ class Image(object):
             """
             pass
     
    -    def convert(self, mode=None, matrix=None, dither=None,
    -                palette=WEB, colors=256):
    +    def convert(self, mode=None, matrix=None, dither=None, palette=WEB, colors=256):
             """
             Returns a converted copy of this image. For the "P" mode, this
             method translates pixels through the palette.  If mode is
    @@ -863,7 +867,7 @@ class Image(object):
             "L", "RGB" and "CMYK." The **matrix** argument only supports "L"
             and "RGB".
     
    -        When translating a color image to black and white (mode "L"),
    +        When translating a color image to greyscale (mode "L"),
             the library uses the ITU-R 601-2 luma transform::
     
                 L = R * 299/1000 + G * 587/1000 + B * 114/1000
    @@ -871,9 +875,13 @@ class Image(object):
             The default method of converting a greyscale ("L") or "RGB"
             image into a bilevel (mode "1") image uses Floyd-Steinberg
             dither to approximate the original image luminosity levels. If
    -        dither is NONE, all non-zero values are set to 255 (white). To
    -        use other thresholds, use the :py:meth:`~PIL.Image.Image.point`
    -        method.
    +        dither is NONE, all values larger than 128 are set to 255 (white),
    +        all other values to 0 (black). To use other thresholds, use the
    +        :py:meth:`~PIL.Image.Image.point` method.
    +
    +        When converting from "RGBA" to "P" without a **matrix** argument,
    +        this passes the operation to :py:meth:`~PIL.Image.Image.quantize`,
    +        and **dither** and **palette** are ignored.
     
             :param mode: The requested mode. See: :ref:`concept-modes`.
             :param matrix: An optional conversion matrix.  If given, this
    @@ -881,6 +889,7 @@ class Image(object):
             :param dither: Dithering method, used when converting from
                mode "RGB" to "P" or from "RGB" or "L" to "1".
                Available methods are NONE or FLOYDSTEINBERG (default).
    +           Note that this is not used when **matrix** is supplied.
             :param palette: Palette to use when converting from mode "RGB"
                to "P".  Available palettes are WEB or ADAPTIVE.
             :param colors: Number of colors to use for the ADAPTIVE palette.
    @@ -900,12 +909,33 @@ class Image(object):
             if not mode or (mode == self.mode and not matrix):
                 return self.copy()
     
    +        has_transparency = self.info.get("transparency") is not None
             if matrix:
                 # matrix conversion
                 if mode not in ("L", "RGB"):
                     raise ValueError("illegal conversion")
                 im = self.im.convert_matrix(mode, matrix)
    -            return self._new(im)
    +            new = self._new(im)
    +            if has_transparency and self.im.bands == 3:
    +                transparency = new.info["transparency"]
    +
    +                def convert_transparency(m, v):
    +                    v = m[0] * v[0] + m[1] * v[1] + m[2] * v[2] + m[3] * 0.5
    +                    return max(0, min(255, int(v)))
    +
    +                if mode == "L":
    +                    transparency = convert_transparency(matrix, transparency)
    +                elif len(mode) == 3:
    +                    transparency = tuple(
    +                        [
    +                            convert_transparency(
    +                                matrix[i * 4 : i * 4 + 4], transparency
    +                            )
    +                            for i in range(0, len(transparency))
    +                        ]
    +                    )
    +                new.info["transparency"] = transparency
    +            return new
     
             if mode == "P" and self.mode == "RGBA":
                 return self.quantize(colors)
    @@ -913,47 +943,49 @@ class Image(object):
             trns = None
             delete_trns = False
             # transparency handling
    -        if "transparency" in self.info and \
    -                self.info['transparency'] is not None:
    -            if self.mode in ('L', 'RGB') and mode == 'RGBA':
    +        if has_transparency:
    +            if self.mode in ("1", "L", "I", "RGB") and mode == "RGBA":
                     # Use transparent conversion to promote from transparent
                     # color to an alpha channel.
    -                new_im = self._new(self.im.convert_transparent(
    -                    mode, self.info['transparency']))
    -                del(new_im.info['transparency'])
    +                new_im = self._new(
    +                    self.im.convert_transparent(mode, self.info["transparency"])
    +                )
    +                del new_im.info["transparency"]
                     return new_im
    -            elif self.mode in ('L', 'RGB', 'P') and mode in ('L', 'RGB', 'P'):
    -                t = self.info['transparency']
    +            elif self.mode in ("L", "RGB", "P") and mode in ("L", "RGB", "P"):
    +                t = self.info["transparency"]
                     if isinstance(t, bytes):
                         # Dragons. This can't be represented by a single color
    -                    warnings.warn('Palette images with Transparency  ' +
    -                                  ' expressed in bytes should be converted ' +
    -                                  'to RGBA images')
    +                    warnings.warn(
    +                        "Palette images with Transparency expressed in bytes should be "
    +                        "converted to RGBA images"
    +                    )
                         delete_trns = True
                     else:
                         # get the new transparency color.
                         # use existing conversions
                         trns_im = Image()._new(core.new(self.mode, (1, 1)))
    -                    if self.mode == 'P':
    +                    if self.mode == "P":
                             trns_im.putpalette(self.palette)
                             if isinstance(t, tuple):
                                 try:
                                     t = trns_im.palette.getcolor(t)
    -                            except:
    -                                raise ValueError("Couldn't allocate a palette "
    -                                                 "color for transparency")
    +                            except Exception:
    +                                raise ValueError(
    +                                    "Couldn't allocate a palette color for transparency"
    +                                )
                         trns_im.putpixel((0, 0), t)
     
    -                    if mode in ('L', 'RGB'):
    +                    if mode in ("L", "RGB"):
                             trns_im = trns_im.convert(mode)
                         else:
                             # can't just retrieve the palette number, got to do it
                             # after quantization.
    -                        trns_im = trns_im.convert('RGB')
    +                        trns_im = trns_im.convert("RGB")
                         trns = trns_im.getpixel((0, 0))
     
    -            elif self.mode == 'P' and mode == 'RGBA':
    -                t = self.info['transparency']
    +            elif self.mode == "P" and mode == "RGBA":
    +                t = self.info["transparency"]
                     delete_trns = True
     
                     if isinstance(t, bytes):
    @@ -961,27 +993,26 @@ class Image(object):
                     elif isinstance(t, int):
                         self.im.putpalettealpha(t, 0)
                     else:
    -                    raise ValueError("Transparency for P mode should" +
    -                                     " be bytes or int")
    +                    raise ValueError("Transparency for P mode should be bytes or int")
     
             if mode == "P" and palette == ADAPTIVE:
                 im = self.im.quantize(colors)
                 new = self._new(im)
                 from . import ImagePalette
    +
                 new.palette = ImagePalette.raw("RGB", new.im.getpalette("RGB"))
                 if delete_trns:
                     # This could possibly happen if we requantize to fewer colors.
                     # The transparency would be totally off in that case.
    -                del(new.info['transparency'])
    +                del new.info["transparency"]
                 if trns is not None:
                     try:
    -                    new.info['transparency'] = new.palette.getcolor(trns)
    -                except:
    +                    new.info["transparency"] = new.palette.getcolor(trns)
    +                except Exception:
                         # if we can't make a transparent color, don't leave the old
                         # transparency hanging around to mess us up.
    -                    del(new.info['transparency'])
    -                    warnings.warn("Couldn't allocate palette entry " +
    -                                  "for transparency")
    +                    del new.info["transparency"]
    +                    warnings.warn("Couldn't allocate palette entry for transparency")
                 return new
     
             # colorspace conversion
    @@ -1001,20 +1032,19 @@ class Image(object):
             new_im = self._new(im)
             if delete_trns:
                 # crash fail if we leave a bytes transparency in an rgb/l mode.
    -            del(new_im.info['transparency'])
    +            del new_im.info["transparency"]
             if trns is not None:
    -            if new_im.mode == 'P':
    +            if new_im.mode == "P":
                     try:
    -                    new_im.info['transparency'] = new_im.palette.getcolor(trns)
    -                except:
    -                    del(new_im.info['transparency'])
    -                    warnings.warn("Couldn't allocate palette entry " +
    -                                  "for transparency")
    +                    new_im.info["transparency"] = new_im.palette.getcolor(trns)
    +                except Exception:
    +                    del new_im.info["transparency"]
    +                    warnings.warn("Couldn't allocate palette entry for transparency")
                 else:
    -                new_im.info['transparency'] = trns
    +                new_im.info["transparency"] = trns
             return new_im
     
    -    def quantize(self, colors=256, method=None, kmeans=0, palette=None):
    +    def quantize(self, colors=256, method=None, kmeans=0, palette=None, dither=1):
             """
             Convert the image to 'P' mode with the specified number
             of colors.
    @@ -1025,7 +1055,12 @@ class Image(object):
                            2 = fast octree
                            3 = libimagequant
             :param kmeans: Integer
    -        :param palette: Quantize to the palette of given :py:class:`PIL.Image.Image`.
    +        :param palette: Quantize to the palette of given
    +                        :py:class:`PIL.Image.Image`.
    +        :param dither: Dithering method, used when converting from
    +           mode "RGB" to "P" or from "RGB" or "L" to "1".
    +           Available methods are NONE or FLOYDSTEINBERG (default).
    +           Default: 1 (legacy setting)
             :returns: A new image
     
             """
    @@ -1035,14 +1070,15 @@ class Image(object):
             if method is None:
                 # defaults:
                 method = 0
    -            if self.mode == 'RGBA':
    +            if self.mode == "RGBA":
                     method = 2
     
    -        if self.mode == 'RGBA' and method not in (2, 3):
    +        if self.mode == "RGBA" and method not in (2, 3):
                 # Caller specified an invalid mode.
                 raise ValueError(
    -                'Fast Octree (method == 2) and libimagequant (method == 3) ' +
    -                'are the only valid methods for quantizing RGBA images')
    +                "Fast Octree (method == 2) and libimagequant (method == 3) "
    +                "are the only valid methods for quantizing RGBA images"
    +            )
     
             if palette:
                 # use palette from reference image
    @@ -1052,11 +1088,18 @@ class Image(object):
                 if self.mode != "RGB" and self.mode != "L":
                     raise ValueError(
                         "only RGB or L mode images can be quantized to a palette"
    -                    )
    -            im = self.im.convert("P", 1, palette.im)
    +                )
    +            im = self.im.convert("P", dither, palette.im)
                 return self._new(im)
     
    -        return self._new(self.im.quantize(colors, method, kmeans))
    +        im = self._new(self.im.quantize(colors, method, kmeans))
    +
    +        from . import ImagePalette
    +
    +        mode = im.im.getpalettemode()
    +        im.palette = ImagePalette.ImagePalette(mode, im.im.getpalette(mode, mode))
    +
    +        return im
     
         def copy(self):
             """
    @@ -1104,12 +1147,9 @@ class Image(object):
     
             x0, y0, x1, y1 = map(int, map(round, box))
     
    -        if x1 < x0:
    -            x1 = x0
    -        if y1 < y0:
    -            y1 = y0
    +        absolute_values = (abs(x1 - x0), abs(y1 - y0))
     
    -        _decompression_bomb_check((x1, y1))
    +        _decompression_bomb_check(absolute_values)
     
             return im.crop((x0, y0, x1, y1))
     
    @@ -1117,16 +1157,18 @@ class Image(object):
             """
             Configures the image file loader so it returns a version of the
             image that as closely as possible matches the given mode and
    -        size.  For example, you can use this method to convert a color
    -        JPEG to greyscale while loading it, or to extract a 128x192
    -        version from a PCD file.
    +        size. For example, you can use this method to convert a color
    +        JPEG to greyscale while loading it.
    +
    +        If any changes are made, returns a tuple with the chosen ``mode`` and
    +        ``box`` with coordinates of the original image within the altered one.
     
             Note that this method modifies the :py:class:`~PIL.Image.Image` object
    -        in place.  If the image has already been loaded, this method has no
    +        in place. If the image has already been loaded, this method has no
             effect.
     
             Note: This method is not implemented for most images. It is
    -        currently implemented only for JPEG and PCD images.
    +        currently implemented only for JPEG and MPO images.
     
             :param mode: The requested mode.
             :param size: The requested size.
    @@ -1154,8 +1196,9 @@ class Image(object):
             if isinstance(filter, Callable):
                 filter = filter()
             if not hasattr(filter, "filter"):
    -            raise TypeError("filter argument should be ImageFilter.Filter " +
    -                            "instance or class")
    +            raise TypeError(
    +                "filter argument should be ImageFilter.Filter instance or class"
    +            )
     
             multiband = isinstance(filter, ImageFilter.MultibandFilter)
             if self.im.bands == 1 or multiband:
    @@ -1254,6 +1297,12 @@ class Image(object):
                 return tuple(extrema)
             return self.im.getextrema()
     
    +    def getexif(self):
    +        if self._exif is None:
    +            self._exif = Exif()
    +        self._exif.load(self.info.get("exif"))
    +        return self._exif
    +
         def getim(self):
             """
             Returns a capsule that points to the internal image memory.
    @@ -1274,10 +1323,7 @@ class Image(object):
     
             self.load()
             try:
    -            if py3:
    -                return list(self.im.getpalette())
    -            else:
    -                return [i8(c) for c in self.im.getpalette()]
    +            return list(self.im.getpalette())
             except ValueError:
                 return None  # no palette
     
    @@ -1325,6 +1371,7 @@ class Image(object):
             bi-level image (mode "1") or a greyscale image ("L").
     
             :param mask: An optional mask.
    +        :param extrema: An optional tuple of manually-specified extrema.
             :returns: A list containing pixel counts.
             """
             self.load()
    @@ -1337,9 +1384,36 @@ class Image(object):
                 return self.im.histogram(extrema)
             return self.im.histogram()
     
    +    def entropy(self, mask=None, extrema=None):
    +        """
    +        Calculates and returns the entropy for the image.
    +
    +        A bilevel image (mode "1") is treated as a greyscale ("L")
    +        image by this method.
    +
    +        If a mask is provided, the method employs the histogram for
    +        those parts of the image where the mask image is non-zero.
    +        The mask image must have the same size as the image, and be
    +        either a bi-level image (mode "1") or a greyscale image ("L").
    +
    +        :param mask: An optional mask.
    +        :param extrema: An optional tuple of manually-specified extrema.
    +        :returns: A float value representing the image entropy
    +        """
    +        self.load()
    +        if mask:
    +            mask.load()
    +            return self.im.entropy((0, 0), mask.im)
    +        if self.mode in ("I", "F"):
    +            if extrema is None:
    +                extrema = self.getextrema()
    +            return self.im.entropy(extrema)
    +        return self.im.entropy()
    +
         def offset(self, xoffset, yoffset=None):
    -        raise NotImplementedError("offset() has been removed. "
    -                                  "Please call ImageChops.offset() instead.")
    +        raise NotImplementedError(
    +            "offset() has been removed. Please call ImageChops.offset() instead."
    +        )
     
         def paste(self, im, box=None, mask=None):
             """
    @@ -1397,13 +1471,12 @@ class Image(object):
                     size = mask.size
                 else:
                     # FIXME: use self.size here?
    -                raise ValueError(
    -                    "cannot determine region size; use 4-item box"
    -                    )
    -            box += (box[0]+size[0], box[1]+size[1])
    +                raise ValueError("cannot determine region size; use 4-item box")
    +            box += (box[0] + size[0], box[1] + size[1])
     
    -        if isStringType(im):
    +        if isinstance(im, str):
                 from . import ImageColor
    +
                 im = ImageColor.getcolor(im, self.mode)
     
             elif isImageType(im):
    @@ -1522,7 +1595,7 @@ class Image(object):
     
             self._ensure_mutable()
     
    -        if self.mode not in ("LA", "RGBA"):
    +        if self.mode not in ("LA", "PA", "RGBA"):
                 # attempt to promote self to a matching alpha mode
                 try:
                     mode = getmodebase(self.mode) + "A"
    @@ -1531,7 +1604,7 @@ class Image(object):
                     except (AttributeError, ValueError):
                         # do things the hard way
                         im = self.im.convert(mode)
    -                    if im.mode not in ("LA", "RGBA"):
    +                    if im.mode not in ("LA", "PA", "RGBA"):
                             raise ValueError  # sanity check
                         self.im = im
                     self.pyaccess = None
    @@ -1539,7 +1612,7 @@ class Image(object):
                 except (KeyError, ValueError):
                     raise ValueError("illegal image mode")
     
    -        if self.mode == "LA":
    +        if self.mode in ("LA", "PA"):
                 band = 1
             else:
                 band = 3
    @@ -1582,10 +1655,10 @@ class Image(object):
     
         def putpalette(self, data, rawmode="RGB"):
             """
    -        Attaches a palette to this image.  The image must be a "P" or
    -        "L" image, and the palette sequence must contain 768 integer
    -        values, where each group of three values represent the red,
    -        green, and blue values for the corresponding pixel
    +        Attaches a palette to this image.  The image must be a "P",
    +        "PA", "L" or "LA" image, and the palette sequence must contain
    +        768 integer values, where each group of three values represent
    +        the red, green, and blue values for the corresponding pixel
             index. Instead of an integer sequence, you can use an 8-bit
             string.
     
    @@ -1594,19 +1667,16 @@ class Image(object):
             """
             from . import ImagePalette
     
    -        if self.mode not in ("L", "P"):
    +        if self.mode not in ("L", "LA", "P", "PA"):
                 raise ValueError("illegal image mode")
             self.load()
             if isinstance(data, ImagePalette.ImagePalette):
                 palette = ImagePalette.raw(data.rawmode, data.palette)
             else:
                 if not isinstance(data, bytes):
    -                if py3:
    -                    data = bytes(data)
    -                else:
    -                    data = "".join(chr(x) for x in data)
    +                data = bytes(data)
                 palette = ImagePalette.raw(rawmode, data)
    -        self.mode = "P"
    +        self.mode = "PA" if "A" in self.mode else "P"
             self.palette = palette
             self.palette.mode = "RGB"
             self.load()  # install new palette
    @@ -1615,7 +1685,8 @@ class Image(object):
             """
             Modifies the pixel at the given position. The color is given as
             a single numerical value for single-band images, and a tuple for
    -        multi-band images.
    +        multi-band images. In addition to this, RGB and RGBA tuples are
    +        accepted for P images.
     
             Note that this method is relatively slow.  For more extensive changes,
             use :py:meth:`~PIL.Image.Image.paste` or the :py:mod:`~PIL.ImageDraw`
    @@ -1638,6 +1709,14 @@ class Image(object):
     
             if self.pyaccess:
                 return self.pyaccess.putpixel(xy, value)
    +
    +        if (
    +            self.mode == "P"
    +            and isinstance(value, (list, tuple))
    +            and len(value) in [3, 4]
    +        ):
    +            # RGB or RGBA value for a P image
    +            value = self.palette.getcolor(value)
             return self.im.putpixel(xy, value)
     
         def remap_palette(self, dest_map, source_palette=None):
    @@ -1645,7 +1724,7 @@ class Image(object):
             Rewrites the image to reorder the palette.
     
             :param dest_map: A list of indexes into the original palette.
    -           e.g. [1,0] would swap a two item palette, and list(range(255))
    +           e.g. [1,0] would swap a two item palette, and list(range(256))
                is the identity transform.
             :param source_palette: Bytes or None.
             :returns:  An :py:class:`~PIL.Image.Image` object.
    @@ -1660,16 +1739,16 @@ class Image(object):
                 if self.mode == "P":
                     real_source_palette = self.im.getpalette("RGB")[:768]
                 else:  # L-mode
    -                real_source_palette = bytearray(i//3 for i in range(768))
    +                real_source_palette = bytearray(i // 3 for i in range(768))
             else:
                 real_source_palette = source_palette
     
             palette_bytes = b""
    -        new_positions = [0]*256
    +        new_positions = [0] * 256
     
             # pick only the used colors from the palette
             for i, oldPosition in enumerate(dest_map):
    -            palette_bytes += real_source_palette[oldPosition*3:oldPosition*3+3]
    +            palette_bytes += real_source_palette[oldPosition * 3 : oldPosition * 3 + 3]
                 new_positions[oldPosition] = i
     
             # replace the palette color id of all pixel with the new id
    @@ -1693,30 +1772,46 @@ class Image(object):
             mapping_palette = bytearray(new_positions)
     
             m_im = self.copy()
    -        m_im.mode = 'P'
    +        m_im.mode = "P"
     
    -        m_im.palette = ImagePalette.ImagePalette("RGB",
    -                                                 palette=mapping_palette*3,
    -                                                 size=768)
    +        m_im.palette = ImagePalette.ImagePalette(
    +            "RGB", palette=mapping_palette * 3, size=768
    +        )
             # possibly set palette dirty, then
             # m_im.putpalette(mapping_palette, 'L')  # converts to 'P'
             # or just force it.
             # UNDONE -- this is part of the general issue with palettes
             m_im.im.putpalette(*m_im.palette.getdata())
     
    -        m_im = m_im.convert('L')
    +        m_im = m_im.convert("L")
     
             # Internally, we require 768 bytes for a palette.
    -        new_palette_bytes = (palette_bytes +
    -                             (768 - len(palette_bytes)) * b'\x00')
    +        new_palette_bytes = palette_bytes + (768 - len(palette_bytes)) * b"\x00"
             m_im.putpalette(new_palette_bytes)
    -        m_im.palette = ImagePalette.ImagePalette("RGB",
    -                                                 palette=palette_bytes,
    -                                                 size=len(palette_bytes))
    +        m_im.palette = ImagePalette.ImagePalette(
    +            "RGB", palette=palette_bytes, size=len(palette_bytes)
    +        )
     
             return m_im
     
    -    def resize(self, size, resample=NEAREST, box=None):
    +    def _get_safe_box(self, size, resample, box):
    +        """Expands the box so it includes adjacent pixels
    +        that may be used by resampling with the given resampling filter.
    +        """
    +        filter_support = _filters_support[resample] - 0.5
    +        scale_x = (box[2] - box[0]) / size[0]
    +        scale_y = (box[3] - box[1]) / size[1]
    +        support_x = filter_support * scale_x
    +        support_y = filter_support * scale_y
    +
    +        return (
    +            max(0, int(box[0] - support_x)),
    +            max(0, int(box[1] - support_y)),
    +            min(self.size[0], math.ceil(box[2] + support_x)),
    +            min(self.size[1], math.ceil(box[3] + support_y)),
    +        )
    +
    +    def resize(self, size, resample=BICUBIC, box=None, reducing_gap=None):
             """
             Returns a resized copy of this image.
     
    @@ -1726,20 +1821,49 @@ class Image(object):
                one of :py:attr:`PIL.Image.NEAREST`, :py:attr:`PIL.Image.BOX`,
                :py:attr:`PIL.Image.BILINEAR`, :py:attr:`PIL.Image.HAMMING`,
                :py:attr:`PIL.Image.BICUBIC` or :py:attr:`PIL.Image.LANCZOS`.
    -           If omitted, or if the image has mode "1" or "P", it is
    -           set :py:attr:`PIL.Image.NEAREST`.
    +           Default filter is :py:attr:`PIL.Image.BICUBIC`.
    +           If the image has mode "1" or "P", it is
    +           always set to :py:attr:`PIL.Image.NEAREST`.
                See: :ref:`concept-filters`.
    -        :param box: An optional 4-tuple of floats giving the region
    -           of the source image which should be scaled.
    -           The values should be within (0, 0, width, height) rectangle.
    +        :param box: An optional 4-tuple of floats providing
    +           the source image region to be scaled.
    +           The values must be within (0, 0, width, height) rectangle.
                If omitted or None, the entire source is used.
    +        :param reducing_gap: Apply optimization by resizing the image
    +           in two steps. First, reducing the image by integer times
    +           using :py:meth:`~PIL.Image.Image.reduce`.
    +           Second, resizing using regular resampling. The last step
    +           changes size no less than by ``reducing_gap`` times.
    +           ``reducing_gap`` may be None (no first step is performed)
    +           or should be greater than 1.0. The bigger ``reducing_gap``,
    +           the closer the result to the fair resampling.
    +           The smaller ``reducing_gap``, the faster resizing.
    +           With ``reducing_gap`` greater or equal to 3.0, the result is
    +           indistinguishable from fair resampling in most cases.
    +           The default value is None (no optimization).
             :returns: An :py:class:`~PIL.Image.Image` object.
             """
     
    -        if resample not in (
    -                NEAREST, BILINEAR, BICUBIC, LANCZOS, BOX, HAMMING,
    -        ):
    -            raise ValueError("unknown resampling filter")
    +        if resample not in (NEAREST, BILINEAR, BICUBIC, LANCZOS, BOX, HAMMING):
    +            message = "Unknown resampling filter ({}).".format(resample)
    +
    +            filters = [
    +                "{} ({})".format(filter[1], filter[0])
    +                for filter in (
    +                    (NEAREST, "Image.NEAREST"),
    +                    (LANCZOS, "Image.LANCZOS"),
    +                    (BILINEAR, "Image.BILINEAR"),
    +                    (BICUBIC, "Image.BICUBIC"),
    +                    (BOX, "Image.BOX"),
    +                    (HAMMING, "Image.HAMMING"),
    +                )
    +            ]
    +            raise ValueError(
    +                message + " Use " + ", ".join(filters[:-1]) + " or " + filters[-1]
    +            )
    +
    +        if reducing_gap is not None and reducing_gap < 1.0:
    +            raise ValueError("reducing_gap must be 1.0 or greater")
     
             size = tuple(size)
     
    @@ -1754,18 +1878,74 @@ class Image(object):
             if self.mode in ("1", "P"):
                 resample = NEAREST
     
    -        if self.mode == 'LA':
    -            return self.convert('La').resize(size, resample, box).convert('LA')
    -
    -        if self.mode == 'RGBA':
    -            return self.convert('RGBa').resize(size, resample, box).convert('RGBA')
    +        if self.mode in ["LA", "RGBA"]:
    +            im = self.convert(self.mode[:-1] + "a")
    +            im = im.resize(size, resample, box)
    +            return im.convert(self.mode)
     
             self.load()
     
    +        if reducing_gap is not None and resample != NEAREST:
    +            factor_x = int((box[2] - box[0]) / size[0] / reducing_gap) or 1
    +            factor_y = int((box[3] - box[1]) / size[1] / reducing_gap) or 1
    +            if factor_x > 1 or factor_y > 1:
    +                reduce_box = self._get_safe_box(size, resample, box)
    +                factor = (factor_x, factor_y)
    +                if callable(self.reduce):
    +                    self = self.reduce(factor, box=reduce_box)
    +                else:
    +                    self = Image.reduce(self, factor, box=reduce_box)
    +                box = (
    +                    (box[0] - reduce_box[0]) / factor_x,
    +                    (box[1] - reduce_box[1]) / factor_y,
    +                    (box[2] - reduce_box[0]) / factor_x,
    +                    (box[3] - reduce_box[1]) / factor_y,
    +                )
    +
             return self._new(self.im.resize(size, resample, box))
     
    -    def rotate(self, angle, resample=NEAREST, expand=0, center=None,
    -               translate=None, fillcolor=None):
    +    def reduce(self, factor, box=None):
    +        """
    +        Returns a copy of the image reduced by `factor` times.
    +        If the size of the image is not dividable by the `factor`,
    +        the resulting size will be rounded up.
    +
    +        :param factor: A greater than 0 integer or tuple of two integers
    +           for width and height separately.
    +        :param box: An optional 4-tuple of ints providing
    +           the source image region to be reduced.
    +           The values must be within (0, 0, width, height) rectangle.
    +           If omitted or None, the entire source is used.
    +        """
    +        if not isinstance(factor, (list, tuple)):
    +            factor = (factor, factor)
    +
    +        if box is None:
    +            box = (0, 0) + self.size
    +        else:
    +            box = tuple(box)
    +
    +        if factor == (1, 1) and box == (0, 0) + self.size:
    +            return self.copy()
    +
    +        if self.mode in ["LA", "RGBA"]:
    +            im = self.convert(self.mode[:-1] + "a")
    +            im = im.reduce(factor, box)
    +            return im.convert(self.mode)
    +
    +        self.load()
    +
    +        return self._new(self.im.reduce(factor, box))
    +
    +    def rotate(
    +        self,
    +        angle,
    +        resample=NEAREST,
    +        expand=0,
    +        center=None,
    +        translate=None,
    +        fillcolor=None,
    +    ):
             """
             Returns a rotated copy of this image.  This method returns a
             copy of this image, rotated the given number of degrees counter
    @@ -1778,7 +1958,7 @@ class Image(object):
                environment), or :py:attr:`PIL.Image.BICUBIC`
                (cubic spline interpolation in a 4x4 environment).
                If omitted, or if the image has mode "1" or "P", it is
    -           set :py:attr:`PIL.Image.NEAREST`. See :ref:`concept-filters`.
    +           set to :py:attr:`PIL.Image.NEAREST`. See :ref:`concept-filters`.
             :param expand: Optional expansion flag.  If true, expands the output
                image to make it large enough to hold the entire rotated image.
                If false or omitted, make the output image the same size as the
    @@ -1830,22 +2010,28 @@ class Image(object):
             else:
                 post_trans = translate
             if center is None:
    -            rotn_center = (w / 2.0, h / 2.0)  # FIXME These should be rounded to ints?
    +            # FIXME These should be rounded to ints?
    +            rotn_center = (w / 2.0, h / 2.0)
             else:
                 rotn_center = center
     
    -        angle = - math.radians(angle)
    +        angle = -math.radians(angle)
             matrix = [
    -            round(math.cos(angle), 15), round(math.sin(angle), 15), 0.0,
    -            round(-math.sin(angle), 15), round(math.cos(angle), 15), 0.0
    +            round(math.cos(angle), 15),
    +            round(math.sin(angle), 15),
    +            0.0,
    +            round(-math.sin(angle), 15),
    +            round(math.cos(angle), 15),
    +            0.0,
             ]
     
             def transform(x, y, matrix):
                 (a, b, c, d, e, f) = matrix
    -            return a*x + b*y + c, d*x + e*y + f
    +            return a * x + b * y + c, d * x + e * y + f
     
    -        matrix[2], matrix[5] = transform(-rotn_center[0] - post_trans[0],
    -                                         -rotn_center[1] - post_trans[1], matrix)
    +        matrix[2], matrix[5] = transform(
    +            -rotn_center[0] - post_trans[0], -rotn_center[1] - post_trans[1], matrix
    +        )
             matrix[2] += rotn_center[0]
             matrix[5] += rotn_center[1]
     
    @@ -1857,15 +2043,13 @@ class Image(object):
                     x, y = transform(x, y, matrix)
                     xx.append(x)
                     yy.append(y)
    -            nw = int(math.ceil(max(xx)) - math.floor(min(xx)))
    -            nh = int(math.ceil(max(yy)) - math.floor(min(yy)))
    +            nw = math.ceil(max(xx)) - math.floor(min(xx))
    +            nh = math.ceil(max(yy)) - math.floor(min(yy))
     
                 # We multiply a translation matrix from the right.  Because of its
                 # special form, this is the same as taking the image of the
                 # translation vector as new translation vector.
    -            matrix[2], matrix[5] = transform(-(nw - w) / 2.0,
    -                                             -(nh - h) / 2.0,
    -                                             matrix)
    +            matrix[2], matrix[5] = transform(-(nw - w) / 2.0, -(nh - h) / 2.0, matrix)
                 w, h = nw, nh
     
             return self.transform((w, h), AFFINE, matrix, resample, fillcolor=fillcolor)
    @@ -1894,7 +2078,7 @@ class Image(object):
                parameter should always be used.
             :param params: Extra parameters to the image writer.
             :returns: None
    -        :exception KeyError: If the output format could not be determined
    +        :exception ValueError: If the output format could not be determined
                from the file name.  Use the format option to solve this.
             :exception IOError: If the file could not be written.  The file
                may have been created, and may contain partial data.
    @@ -1905,7 +2089,7 @@ class Image(object):
             if isPath(fp):
                 filename = fp
                 open_fp = True
    -        elif HAS_PATHLIB and isinstance(fp, Path):
    +        elif isinstance(fp, Path):
                 filename = str(fp)
                 open_fp = True
             if not filename and hasattr(fp, "name") and isPath(fp.name):
    @@ -1913,9 +2097,9 @@ class Image(object):
                 filename = fp.name
     
             # may mutate self!
    -        self.load()
    +        self._ensure_mutable()
     
    -        save_all = params.pop('save_all', False)
    +        save_all = params.pop("save_all", False)
             self.encoderinfo = params
             self.encoderconfig = ()
     
    @@ -1929,7 +2113,7 @@ class Image(object):
                 try:
                     format = EXTENSION[ext]
                 except KeyError:
    -                raise ValueError('unknown file extension: {}'.format(ext))
    +                raise ValueError("unknown file extension: {}".format(ext))
     
             if format.upper() not in SAVE:
                 init()
    @@ -1939,11 +2123,11 @@ class Image(object):
                 save_handler = SAVE[format.upper()]
     
             if open_fp:
    -            if params.get('append', False):
    -                fp = builtins.open(filename, "r+b")
    -            else:
    +            if params.get("append", False):
                     # Open also for reading ("+"), because TIFF save_all
                     # writer needs to go back and edit the written data.
    +                fp = builtins.open(filename, "r+b")
    +            else:
                     fp = builtins.open(filename, "w+b")
     
             try:
    @@ -1960,9 +2144,6 @@ class Image(object):
             **EOFError** exception. When a sequence file is opened, the
             library automatically seeks to frame 0.
     
    -        Note that in the current version of the library, most sequence
    -        formats only allows you to seek to the next frame.
    -
             See :py:meth:`~PIL.Image.Image.tell`.
     
             :param frame: Frame number, starting at 0.
    @@ -1979,15 +2160,15 @@ class Image(object):
             Displays this image. This method is mainly intended for
             debugging purposes.
     
    -        On Unix platforms, this method saves the image to a temporary
    -        PPM file, and calls either the **xv** utility or the **display**
    -        utility, depending on which one can be found.
    +        The image is first saved to a temporary file. By default, it will be in
    +        PNG format.
     
    -        On macOS, this method saves the image to a temporary BMP file, and
    -        opens it with the native Preview application.
    +        On Unix, the image is then opened using the **display**, **eog** or
    +        **xv** utility, depending on which one can be found.
     
    -        On Windows, it saves the image to a temporary BMP file, and uses
    -        the standard BMP display utility to show it (usually Paint).
    +        On macOS, the image is opened with the native Preview application.
    +
    +        On Windows, the image is opened with the standard PNG display utility.
     
             :param title: Optional title to use for the image window,
                where possible.
    @@ -2030,12 +2211,11 @@ class Image(object):
             """
             self.load()
     
    -        if isStringType(channel):
    +        if isinstance(channel, str):
                 try:
                     channel = self.getbands().index(channel)
                 except ValueError:
    -                raise ValueError(
    -                    'The image has no channel "{}"'.format(channel))
    +                raise ValueError('The image has no channel "{}"'.format(channel))
     
             return self._new(self.im.getband(channel))
     
    @@ -2047,7 +2227,7 @@ class Image(object):
             """
             return 0
     
    -    def thumbnail(self, size, resample=BICUBIC):
    +    def thumbnail(self, size, resample=BICUBIC, reducing_gap=2.0):
             """
             Make this image into a thumbnail.  This method modifies the
             image to contain a thumbnail version of itself, no larger than
    @@ -2066,38 +2246,60 @@ class Image(object):
                of :py:attr:`PIL.Image.NEAREST`, :py:attr:`PIL.Image.BILINEAR`,
                :py:attr:`PIL.Image.BICUBIC`, or :py:attr:`PIL.Image.LANCZOS`.
                If omitted, it defaults to :py:attr:`PIL.Image.BICUBIC`.
    -           (was :py:attr:`PIL.Image.NEAREST` prior to version 2.5.0)
    +           (was :py:attr:`PIL.Image.NEAREST` prior to version 2.5.0).
    +        :param reducing_gap: Apply optimization by resizing the image
    +           in two steps. First, reducing the image by integer times
    +           using :py:meth:`~PIL.Image.Image.reduce` or
    +           :py:meth:`~PIL.Image.Image.draft` for JPEG images.
    +           Second, resizing using regular resampling. The last step
    +           changes size no less than by ``reducing_gap`` times.
    +           ``reducing_gap`` may be None (no first step is performed)
    +           or should be greater than 1.0. The bigger ``reducing_gap``,
    +           the closer the result to the fair resampling.
    +           The smaller ``reducing_gap``, the faster resizing.
    +           With ``reducing_gap`` greater or equal to 3.0, the result is
    +           indistinguishable from fair resampling in most cases.
    +           The default value is 2.0 (very close to fair resampling
    +           while still being faster in many cases).
             :returns: None
             """
     
    -        # preserve aspect ratio
    -        x, y = self.size
    -        if x > size[0]:
    -            y = int(max(y * size[0] / x, 1))
    -            x = int(size[0])
    -        if y > size[1]:
    -            x = int(max(x * size[1] / y, 1))
    -            y = int(size[1])
    -        size = x, y
    -
    -        if size == self.size:
    +        x, y = map(math.floor, size)
    +        if x >= self.width and y >= self.height:
                 return
     
    -        self.draft(None, size)
    +        def round_aspect(number, key):
    +            return max(min(math.floor(number), math.ceil(number), key=key), 1)
     
    -        im = self.resize(size, resample)
    +        # preserve aspect ratio
    +        aspect = self.width / self.height
    +        if x / y >= aspect:
    +            x = round_aspect(y * aspect, key=lambda n: abs(aspect - n / y))
    +        else:
    +            y = round_aspect(x / aspect, key=lambda n: abs(aspect - x / n))
    +        size = (x, y)
     
    -        self.im = im.im
    -        self.mode = im.mode
    -        self.size = size
    +        box = None
    +        if reducing_gap is not None:
    +            res = self.draft(None, (size[0] * reducing_gap, size[1] * reducing_gap))
    +            if res is not None:
    +                box = res[1]
    +
    +        if self.size != size:
    +            im = self.resize(size, resample, box=box, reducing_gap=reducing_gap)
    +
    +            self.im = im.im
    +            self._size = size
    +            self.mode = self.im.mode
     
             self.readonly = 0
             self.pyaccess = None
     
         # FIXME: the different transform methods need further explanation
         # instead of bloating the method docs, add a separate chapter.
    -    def transform(self, size, method, data=None, resample=NEAREST,
    -                  fill=1, fillcolor=None):
    +    def transform(
    +        self, size, method, data=None, resample=NEAREST, fill=1, fillcolor=None
    +    ):
             """
             Transforms this image.  This method creates a new image with the
             given size, and the same mode as the original, and copies data
    @@ -2114,12 +2316,14 @@ class Image(object):
     
               It may also be an :py:class:`~PIL.Image.ImageTransformHandler`
               object::
    +
                 class Example(Image.ImageTransformHandler):
                     def transform(size, method, data, resample, fill=1):
                         # Return result
     
               It may also be an object with a :py:meth:`~method.getdata` method
               that returns a tuple supplying new **method** and **data** values::
    +
                 class Example(object):
                     def getdata(self):
                         method = Image.EXTENT
    @@ -2135,18 +2339,24 @@ class Image(object):
             :param fill: If **method** is an
               :py:class:`~PIL.Image.ImageTransformHandler` object, this is one of
               the arguments passed to it. Otherwise, it is unused.
    -        :param fillcolor: Optional fill color for the area outside the transform
    -           in the output image.
    +        :param fillcolor: Optional fill color for the area outside the
    +           transform in the output image.
             :returns: An :py:class:`~PIL.Image.Image` object.
             """
     
    -        if self.mode == 'LA':
    -            return self.convert('La').transform(
    -                size, method, data, resample, fill, fillcolor).convert('LA')
    +        if self.mode == "LA":
    +            return (
    +                self.convert("La")
    +                .transform(size, method, data, resample, fill, fillcolor)
    +                .convert("LA")
    +            )
     
    -        if self.mode == 'RGBA':
    -            return self.convert('RGBa').transform(
    -                size, method, data, resample, fill, fillcolor).convert('RGBA')
    +        if self.mode == "RGBA":
    +            return (
    +                self.convert("RGBa")
    +                .transform(size, method, data, resample, fill, fillcolor)
    +                .convert("RGBA")
    +            )
     
             if isinstance(method, ImageTransformHandler):
                 return method.transform(size, self, resample=resample, fill=fill)
    @@ -2159,19 +2369,19 @@ class Image(object):
                 raise ValueError("missing method data")
     
             im = new(self.mode, size, fillcolor)
    +        im.info = self.info.copy()
             if method == MESH:
                 # list of quads
                 for box, quad in data:
    -                im.__transformer(box, self, QUAD, quad, resample,
    -                                 fillcolor is None)
    +                im.__transformer(box, self, QUAD, quad, resample, fillcolor is None)
             else:
    -            im.__transformer((0, 0)+size, self, method, data,
    -                             resample, fillcolor is None)
    +            im.__transformer(
    +                (0, 0) + size, self, method, data, resample, fillcolor is None
    +            )
     
             return im
     
    -    def __transformer(self, box, image, method, data,
    -                      resample=NEAREST, fill=1):
    +    def __transformer(self, box, image, method, data, resample=NEAREST, fill=1):
             w = box[2] - box[0]
             h = box[3] - box[1]
     
    @@ -2181,8 +2391,8 @@ class Image(object):
             elif method == EXTENT:
                 # convert extent to an affine transform
                 x0, y0, x1, y1 = data
    -            xs = float(x1 - x0) / w
    -            ys = float(y1 - y0) / h
    +            xs = (x1 - x0) / w
    +            ys = (y1 - y0) / h
                 method = AFFINE
                 data = (xs, 0, x0, 0, ys, y0)
     
    @@ -2199,16 +2409,41 @@ class Image(object):
                 x0, y0 = nw
                 As = 1.0 / w
                 At = 1.0 / h
    -            data = (x0, (ne[0]-x0)*As, (sw[0]-x0)*At,
    -                    (se[0]-sw[0]-ne[0]+x0)*As*At,
    -                    y0, (ne[1]-y0)*As, (sw[1]-y0)*At,
    -                    (se[1]-sw[1]-ne[1]+y0)*As*At)
    +            data = (
    +                x0,
    +                (ne[0] - x0) * As,
    +                (sw[0] - x0) * At,
    +                (se[0] - sw[0] - ne[0] + x0) * As * At,
    +                y0,
    +                (ne[1] - y0) * As,
    +                (sw[1] - y0) * At,
    +                (se[1] - sw[1] - ne[1] + y0) * As * At,
    +            )
     
             else:
                 raise ValueError("unknown transformation method")
     
             if resample not in (NEAREST, BILINEAR, BICUBIC):
    -            raise ValueError("unknown resampling filter")
    +            if resample in (BOX, HAMMING, LANCZOS):
    +                message = {
    +                    BOX: "Image.BOX",
    +                    HAMMING: "Image.HAMMING",
    +                    LANCZOS: "Image.LANCZOS/Image.ANTIALIAS",
    +                }[resample] + " ({}) cannot be used.".format(resample)
    +            else:
    +                message = "Unknown resampling filter ({}).".format(resample)
    +
    +            filters = [
    +                "{} ({})".format(filter[1], filter[0])
    +                for filter in (
    +                    (NEAREST, "Image.NEAREST"),
    +                    (BILINEAR, "Image.BILINEAR"),
    +                    (BICUBIC, "Image.BICUBIC"),
    +                )
    +            ]
    +            raise ValueError(
    +                message + " Use " + ", ".join(filters[:-1]) + " or " + filters[-1]
    +            )
     
             image.load()
     
    @@ -2245,6 +2480,7 @@ class Image(object):
         def toqimage(self):
             """Returns a QImage copy of this image"""
             from . import ImageQt
    +
             if not ImageQt.qt_is_installed:
                 raise ImportError("Qt bindings are not installed")
             return ImageQt.toqimage(self)
    @@ -2252,6 +2488,7 @@ class Image(object):
         def toqpixmap(self):
             """Returns a QPixmap copy of this image"""
             from . import ImageQt
    +
             if not ImageQt.qt_is_installed:
                 raise ImportError("Qt bindings are not installed")
             return ImageQt.toqpixmap(self)
    @@ -2260,12 +2497,13 @@ class Image(object):
     # --------------------------------------------------------------------
     # Abstract handlers.
     
    -class ImagePointHandler(object):
    +
    +class ImagePointHandler:
         # used as a mixin by point transforms (for use with im.point)
         pass
     
     
    -class ImageTransformHandler(object):
    +class ImageTransformHandler:
         # used as a mixin by geometry transforms (for use with im.transform)
         pass
     
    @@ -2276,6 +2514,7 @@ class ImageTransformHandler(object):
     #
     # Debugging
     
    +
     def _wedge():
         """Create greyscale wedge (for debugging only)"""
     
    @@ -2322,13 +2561,21 @@ def new(mode, size, color=0):
             # don't initialize
             return Image()._new(core.new(mode, size))
     
    -    if isStringType(color):
    +    if isinstance(color, str):
             # css3-style specifier
     
             from . import ImageColor
    +
             color = ImageColor.getcolor(color, mode)
     
    -    return Image()._new(core.fill(mode, size, color))
    +    im = Image()
    +    if mode == "P" and isinstance(color, (list, tuple)) and len(color) in [3, 4]:
    +        # RGB or RGBA value for a P image
    +        from . import ImagePalette
    +
    +        im.palette = ImagePalette.ImagePalette()
    +        color = im.palette.getcolor(color)
    +    return im._new(core.fill(mode, size, color))
     
     
     def frombytes(mode, size, data, decoder_name="raw", *args):
    @@ -2370,8 +2617,9 @@ def frombytes(mode, size, data, decoder_name="raw", *args):
     
     
     def fromstring(*args, **kw):
    -    raise NotImplementedError("fromstring() has been removed. " +
    -                              "Please call frombytes() instead.")
    +    raise NotImplementedError(
    +        "fromstring() has been removed. Please call frombytes() instead."
    +    )
     
     
     def frombuffer(mode, size, data, decoder_name="raw", *args):
    @@ -2417,18 +2665,10 @@ def frombuffer(mode, size, data, decoder_name="raw", *args):
     
         if decoder_name == "raw":
             if args == ():
    -            warnings.warn(
    -                "the frombuffer defaults may change in a future release; "
    -                "for portability, change the call to read:\n"
    -                "  frombuffer(mode, size, data, 'raw', mode, 0, 1)",
    -                RuntimeWarning, stacklevel=2
    -            )
    -            args = mode, 0, -1  # may change to (mode, 0, 1) post-1.1.6
    +            args = mode, 0, 1
             if args[0] in _MAPMODES:
                 im = new(mode, (1, 1))
    -            im = im._new(
    -                core.map_buffer(data, size, decoder_name, None, 0, args)
    -                )
    +            im = im._new(core.map_buffer(data, size, decoder_name, 0, args))
                 im.readonly = 1
                 return im
     
    @@ -2440,9 +2680,20 @@ def fromarray(obj, mode=None):
         Creates an image memory from an object exporting the array interface
         (using the buffer protocol).
     
    -    If obj is not contiguous, then the tobytes method is called
    +    If **obj** is not contiguous, then the tobytes method is called
         and :py:func:`~PIL.Image.frombuffer` is used.
     
    +    If you have an image in NumPy::
    +
    +      from PIL import Image
    +      import numpy as np
    +      im = Image.open('hopper.jpg')
    +      a = np.asarray(im)
    +
    +    Then this can be used to convert it to a Pillow image::
    +
    +      im = Image.fromarray(a)
    +
         :param obj: Object with array interface
         :param mode: Mode to use (will be determined from type if None)
           See: :ref:`concept-modes`.
    @@ -2451,16 +2702,18 @@ def fromarray(obj, mode=None):
         .. versionadded:: 1.1.6
         """
         arr = obj.__array_interface__
    -    shape = arr['shape']
    +    shape = arr["shape"]
         ndim = len(shape)
    -    strides = arr.get('strides', None)
    +    strides = arr.get("strides", None)
         if mode is None:
             try:
    -            typekey = (1, 1) + shape[2:], arr['typestr']
    +            typekey = (1, 1) + shape[2:], arr["typestr"]
    +        except KeyError:
    +            raise TypeError("Cannot handle this data type")
    +        try:
                 mode, rawmode = _fromarray_typemap[typekey]
             except KeyError:
    -            # print(typekey)
    -            raise TypeError("Cannot handle this data type")
    +            raise TypeError("Cannot handle this data type: %s, %s" % typekey)
         else:
             rawmode = mode
         if mode in ["1", "L", "I", "P", "F"]:
    @@ -2474,7 +2727,7 @@ def fromarray(obj, mode=None):
     
         size = shape[1], shape[0]
         if strides is not None:
    -        if hasattr(obj, 'tobytes'):
    +        if hasattr(obj, "tobytes"):
                 obj = obj.tobytes()
             else:
                 obj = obj.tostring()
    @@ -2485,6 +2738,7 @@ def fromarray(obj, mode=None):
     def fromqimage(im):
         """Creates an image instance from a QImage image"""
         from . import ImageQt
    +
         if not ImageQt.qt_is_installed:
             raise ImportError("Qt bindings are not installed")
         return ImageQt.fromqimage(im)
    @@ -2493,6 +2747,7 @@ def fromqimage(im):
     def fromqpixmap(im):
         """Creates an image instance from a QPixmap image"""
         from . import ImageQt
    +
         if not ImageQt.qt_is_installed:
             raise ImportError("Qt bindings are not installed")
         return ImageQt.fromqpixmap(im)
    @@ -2519,7 +2774,7 @@ _fromarray_typemap = {
         ((1, 1, 2), "|u1"): ("LA", "LA"),
         ((1, 1, 3), "|u1"): ("RGB", "RGB"),
         ((1, 1, 4), "|u1"): ("RGBA", "RGBA"),
    -    }
    +}
     
     # shortcuts
     _fromarray_typemap[((1, 1), _ENDIAN + "i4")] = ("I", "I")
    @@ -2535,15 +2790,15 @@ def _decompression_bomb_check(size):
         if pixels > 2 * MAX_IMAGE_PIXELS:
             raise DecompressionBombError(
                 "Image size (%d pixels) exceeds limit of %d pixels, "
    -            "could be decompression bomb DOS attack." %
    -            (pixels, 2 * MAX_IMAGE_PIXELS))
    +            "could be decompression bomb DOS attack." % (pixels, 2 * MAX_IMAGE_PIXELS)
    +        )
     
         if pixels > MAX_IMAGE_PIXELS:
             warnings.warn(
                 "Image size (%d pixels) exceeds limit of %d pixels, "
    -            "could be decompression bomb DOS attack." %
    -            (pixels, MAX_IMAGE_PIXELS),
    -            DecompressionBombWarning)
    +            "could be decompression bomb DOS attack." % (pixels, MAX_IMAGE_PIXELS),
    +            DecompressionBombWarning,
    +        )
     
     
     def open(fp, mode="r"):
    @@ -2562,19 +2817,27 @@ def open(fp, mode="r"):
            and be opened in binary mode.
         :param mode: The mode.  If given, this argument must be "r".
         :returns: An :py:class:`~PIL.Image.Image` object.
    -    :exception IOError: If the file cannot be found, or the image cannot be
    -       opened and identified.
    +    :exception FileNotFoundError: If the file cannot be found.
    +    :exception PIL.UnidentifiedImageError: If the image cannot be opened and
    +       identified.
    +    :exception ValueError: If the ``mode`` is not "r", or if a ``StringIO``
    +       instance is used for ``fp``.
         """
     
         if mode != "r":
             raise ValueError("bad mode %r" % mode)
    +    elif isinstance(fp, io.StringIO):
    +        raise ValueError(
    +            "StringIO cannot be used to open an image. "
    +            "Binary data must be used instead."
    +        )
     
         exclusive_fp = False
         filename = ""
    -    if isPath(fp):
    -        filename = fp
    -    elif HAS_PATHLIB and isinstance(fp, Path):
    +    if isinstance(fp, Path):
             filename = str(fp.resolve())
    +    elif isPath(fp):
    +        filename = fp
     
         if filename:
             fp = builtins.open(filename, "rb")
    @@ -2590,11 +2853,16 @@ def open(fp, mode="r"):
     
         preinit()
     
    +    accept_warnings = []
    +
         def _open_core(fp, filename, prefix):
             for i in ID:
                 try:
                     factory, accept = OPEN[i]
    -                if not accept or accept(prefix):
    +                result = not accept or accept(prefix)
    +                if type(result) in [str, bytes]:
    +                    accept_warnings.append(result)
    +                elif result:
                         fp.seek(0)
                         im = factory(fp, filename)
                         _decompression_bomb_check(im.size)
    @@ -2604,6 +2872,10 @@ def open(fp, mode="r"):
                     # opening failures that are entirely expected.
                     # logger.debug("", exc_info=True)
                     continue
    +            except BaseException:
    +                if exclusive_fp:
    +                    fp.close()
    +                raise
             return None
     
         im = _open_core(fp, filename, prefix)
    @@ -2618,8 +2890,12 @@ def open(fp, mode="r"):
     
         if exclusive_fp:
             fp.close()
    -    raise IOError("cannot identify image file %r"
    -                  % (filename if filename else fp))
    +    for message in accept_warnings:
    +        warnings.warn(message)
    +    raise UnidentifiedImageError(
    +        "cannot identify image file %r" % (filename if filename else fp)
    +    )
    +
     
     #
     # Image processing.
    @@ -2723,6 +2999,7 @@ def merge(mode, bands):
     # --------------------------------------------------------------------
     # Plugin registry
     
    +
     def register_open(id, factory, accept=None):
         """
         Register an image file plugin.  This function should not be used
    @@ -2836,6 +3113,7 @@ def register_encoder(name, encoder):
     # --------------------------------------------------------------------
     # Simple display support.  User code may override this.
     
    +
     def _show(image, **options):
         # override me, as necessary
         _showxv(image, **options)
    @@ -2843,12 +3121,14 @@ def _show(image, **options):
     
     def _showxv(image, title=None, **options):
         from . import ImageShow
    +
         ImageShow.show(image, title, **options)
     
     
     # --------------------------------------------------------------------
     # Effects
     
    +
     def effect_mandelbrot(size, extent, quality):
         """
         Generate a Mandelbrot set covering the given extent.
    @@ -2894,14 +3174,15 @@ def radial_gradient(mode):
     # --------------------------------------------------------------------
     # Resources
     
    +
     def _apply_env_variables(env=None):
         if env is None:
             env = os.environ
     
         for var_name, setter in [
    -        ('PILLOW_ALIGNMENT', core.set_alignment),
    -        ('PILLOW_BLOCK_SIZE', core.set_block_size),
    -        ('PILLOW_BLOCKS_MAX', core.set_blocks_max),
    +        ("PILLOW_ALIGNMENT", core.set_alignment),
    +        ("PILLOW_BLOCK_SIZE", core.set_block_size),
    +        ("PILLOW_BLOCKS_MAX", core.set_blocks_max),
         ]:
             if var_name not in env:
                 continue
    @@ -2909,22 +3190,228 @@ def _apply_env_variables(env=None):
             var = env[var_name].lower()
     
             units = 1
    -        for postfix, mul in [('k', 1024), ('m', 1024*1024)]:
    +        for postfix, mul in [("k", 1024), ("m", 1024 * 1024)]:
                 if var.endswith(postfix):
                     units = mul
    -                var = var[:-len(postfix)]
    +                var = var[: -len(postfix)]
     
             try:
                 var = int(var) * units
             except ValueError:
    -            warnings.warn("{0} is not int".format(var_name))
    +            warnings.warn("{} is not int".format(var_name))
                 continue
     
             try:
                 setter(var)
             except ValueError as e:
    -            warnings.warn("{0}: {1}".format(var_name, e))
    +            warnings.warn("{}: {}".format(var_name, e))
     
     
     _apply_env_variables()
     atexit.register(core.clear_cache)
    +
    +
    +class Exif(MutableMapping):
    +    endian = "<"
    +
    +    def __init__(self):
    +        self._data = {}
    +        self._ifds = {}
    +        self._info = None
    +        self._loaded_exif = None
    +
    +    def _fixup(self, value):
    +        try:
    +            if len(value) == 1 and not isinstance(value, dict):
    +                return value[0]
    +        except Exception:
    +            pass
    +        return value
    +
    +    def _fixup_dict(self, src_dict):
    +        # Helper function for _getexif()
    +        # returns a dict with any single item tuples/lists as individual values
    +        return {k: self._fixup(v) for k, v in src_dict.items()}
    +
    +    def _get_ifd_dict(self, tag):
    +        try:
    +            # an offset pointer to the location of the nested embedded IFD.
    +            # It should be a long, but may be corrupted.
    +            self.fp.seek(self[tag])
    +        except (KeyError, TypeError):
    +            pass
    +        else:
    +            from . import TiffImagePlugin
    +
    +            info = TiffImagePlugin.ImageFileDirectory_v1(self.head)
    +            info.load(self.fp)
    +            return self._fixup_dict(info)
    +
    +    def load(self, data):
    +        # Extract EXIF information.  This is highly experimental,
    +        # and is likely to be replaced with something better in a future
    +        # version.
    +
    +        # The EXIF record consists of a TIFF file embedded in a JPEG
    +        # application marker (!).
    +        if data == self._loaded_exif:
    +            return
    +        self._loaded_exif = data
    +        self._data.clear()
    +        self._ifds.clear()
    +        self._info = None
    +        if not data:
    +            return
    +
    +        self.fp = io.BytesIO(data[6:])
    +        self.head = self.fp.read(8)
    +        # process dictionary
    +        from . import TiffImagePlugin
    +
    +        self._info = TiffImagePlugin.ImageFileDirectory_v1(self.head)
    +        self.endian = self._info._endian
    +        self.fp.seek(self._info.next)
    +        self._info.load(self.fp)
    +
    +        # get EXIF extension
    +        ifd = self._get_ifd_dict(0x8769)
    +        if ifd:
    +            self._data.update(ifd)
    +            self._ifds[0x8769] = ifd
    +
    +    def tobytes(self, offset=0):
    +        from . import TiffImagePlugin
    +
    +        if self.endian == "<":
    +            head = b"II\x2A\x00\x08\x00\x00\x00"
    +        else:
    +            head = b"MM\x00\x2A\x00\x00\x00\x08"
    +        ifd = TiffImagePlugin.ImageFileDirectory_v2(ifh=head)
    +        for tag, value in self.items():
    +            ifd[tag] = value
    +        return b"Exif\x00\x00" + head + ifd.tobytes(offset)
    +
    +    def get_ifd(self, tag):
    +        if tag not in self._ifds and tag in self:
    +            if tag in [0x8825, 0xA005]:
    +                # gpsinfo, interop
    +                self._ifds[tag] = self._get_ifd_dict(tag)
    +            elif tag == 0x927C:  # makernote
    +                from .TiffImagePlugin import ImageFileDirectory_v2
    +
    +                if self[0x927C][:8] == b"FUJIFILM":
    +                    exif_data = self[0x927C]
    +                    ifd_offset = i32le(exif_data[8:12])
    +                    ifd_data = exif_data[ifd_offset:]
    +
    +                    makernote = {}
    +                    for i in range(0, struct.unpack(" 4:
    +                            (offset,) = struct.unpack("H", ifd_data[:2])[0]):
    +                        ifd_tag, typ, count, data = struct.unpack(
    +                            ">HHL4s", ifd_data[i * 12 + 2 : (i + 1) * 12 + 2]
    +                        )
    +                        if ifd_tag == 0x1101:
    +                            # CameraInfo
    +                            (offset,) = struct.unpack(">L", data)
    +                            self.fp.seek(offset)
    +
    +                            camerainfo = {"ModelID": self.fp.read(4)}
    +
    +                            self.fp.read(4)
    +                            # Seconds since 2000
    +                            camerainfo["TimeStamp"] = i32le(self.fp.read(12))
    +
    +                            self.fp.read(4)
    +                            camerainfo["InternalSerialNumber"] = self.fp.read(4)
    +
    +                            self.fp.read(12)
    +                            parallax = self.fp.read(4)
    +                            handler = ImageFileDirectory_v2._load_dispatch[
    +                                TiffTags.FLOAT
    +                            ][1]
    +                            camerainfo["Parallax"] = handler(
    +                                ImageFileDirectory_v2(), parallax, False
    +                            )
    +
    +                            self.fp.read(4)
    +                            camerainfo["Category"] = self.fp.read(2)
    +
    +                            makernote = {0x1101: dict(self._fixup_dict(camerainfo))}
    +                    self._ifds[0x927C] = makernote
    +        return self._ifds.get(tag, {})
    +
    +    def __str__(self):
    +        if self._info is not None:
    +            # Load all keys into self._data
    +            for tag in self._info.keys():
    +                self[tag]
    +
    +        return str(self._data)
    +
    +    def __len__(self):
    +        keys = set(self._data)
    +        if self._info is not None:
    +            keys.update(self._info)
    +        return len(keys)
    +
    +    def __getitem__(self, tag):
    +        if self._info is not None and tag not in self._data and tag in self._info:
    +            self._data[tag] = self._fixup(self._info[tag])
    +            if tag == 0x8825:
    +                self._data[tag] = self.get_ifd(tag)
    +            del self._info[tag]
    +        return self._data[tag]
    +
    +    def __contains__(self, tag):
    +        return tag in self._data or (self._info is not None and tag in self._info)
    +
    +    def __setitem__(self, tag, value):
    +        if self._info is not None and tag in self._info:
    +            del self._info[tag]
    +        self._data[tag] = value
    +
    +    def __delitem__(self, tag):
    +        if self._info is not None and tag in self._info:
    +            del self._info[tag]
    +        del self._data[tag]
    +
    +    def __iter__(self):
    +        keys = set(self._data)
    +        if self._info is not None:
    +            keys.update(self._info)
    +        return iter(keys)
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageChops.py b/server/www/packages/packages-windows/x86/PIL/ImageChops.py
    index 8901673..2d13b52 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageChops.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageChops.py
    @@ -54,7 +54,7 @@ def invert(image):
     def lighter(image1, image2):
         """
         Compares the two images, pixel by pixel, and returns a new image containing
    -    the lighter values.
    +    the lighter values. At least one of the images must have mode "1".
     
         .. code-block:: python
     
    @@ -70,8 +70,8 @@ def lighter(image1, image2):
     
     def darker(image1, image2):
         """
    -    Compares the two images, pixel by pixel, and returns a new image
    -    containing the darker values.
    +    Compares the two images, pixel by pixel, and returns a new image containing
    +    the darker values. At least one of the images must have mode "1".
     
         .. code-block:: python
     
    @@ -88,7 +88,7 @@ def darker(image1, image2):
     def difference(image1, image2):
         """
         Returns the absolute value of the pixel-by-pixel difference between the two
    -    images.
    +    images. At least one of the images must have mode "1".
     
         .. code-block:: python
     
    @@ -107,7 +107,8 @@ def multiply(image1, image2):
         Superimposes two images on top of each other.
     
         If you multiply an image with a solid black image, the result is black. If
    -    you multiply with a solid white image, the image is unaffected.
    +    you multiply with a solid white image, the image is unaffected. At least
    +    one of the images must have mode "1".
     
         .. code-block:: python
     
    @@ -123,7 +124,8 @@ def multiply(image1, image2):
     
     def screen(image1, image2):
         """
    -    Superimposes two inverted images on top of each other.
    +    Superimposes two inverted images on top of each other. At least one of the
    +    images must have mode "1".
     
         .. code-block:: python
     
    @@ -137,10 +139,47 @@ def screen(image1, image2):
         return image1._new(image1.im.chop_screen(image2.im))
     
     
    +def soft_light(image1, image2):
    +    """
    +    Superimposes two images on top of each other using the Soft Light algorithm
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image1.load()
    +    image2.load()
    +    return image1._new(image1.im.chop_soft_light(image2.im))
    +
    +
    +def hard_light(image1, image2):
    +    """
    +    Superimposes two images on top of each other using the Hard Light algorithm
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image1.load()
    +    image2.load()
    +    return image1._new(image1.im.chop_hard_light(image2.im))
    +
    +
    +def overlay(image1, image2):
    +    """
    +    Superimposes two images on top of each other using the Overlay algorithm
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image1.load()
    +    image2.load()
    +    return image1._new(image1.im.chop_overlay(image2.im))
    +
    +
     def add(image1, image2, scale=1.0, offset=0):
         """
         Adds two images, dividing the result by scale and adding the
         offset. If omitted, scale defaults to 1.0, and offset to 0.0.
    +    At least one of the images must have mode "1".
     
         .. code-block:: python
     
    @@ -156,8 +195,9 @@ def add(image1, image2, scale=1.0, offset=0):
     
     def subtract(image1, image2, scale=1.0, offset=0):
         """
    -    Subtracts two images, dividing the result by scale and adding the
    -    offset. If omitted, scale defaults to 1.0, and offset to 0.0.
    +    Subtracts two images, dividing the result by scale and adding the offset.
    +    If omitted, scale defaults to 1.0, and offset to 0.0. At least one of the
    +    images must have mode "1".
     
         .. code-block:: python
     
    @@ -172,7 +212,8 @@ def subtract(image1, image2, scale=1.0, offset=0):
     
     
     def add_modulo(image1, image2):
    -    """Add two images, without clipping the result.
    +    """Add two images, without clipping the result. At least one of the images
    +    must have mode "1".
     
         .. code-block:: python
     
    @@ -187,7 +228,8 @@ def add_modulo(image1, image2):
     
     
     def subtract_modulo(image1, image2):
    -    """Subtract two images, without clipping the result.
    +    """Subtract two images, without clipping the result. At least one of the
    +    images must have mode "1".
     
         .. code-block:: python
     
    @@ -202,7 +244,8 @@ def subtract_modulo(image1, image2):
     
     
     def logical_and(image1, image2):
    -    """Logical AND between two images.
    +    """Logical AND between two images. At least one of the images must have
    +    mode "1".
     
         .. code-block:: python
     
    @@ -217,7 +260,8 @@ def logical_and(image1, image2):
     
     
     def logical_or(image1, image2):
    -    """Logical OR between two images.
    +    """Logical OR between two images. At least one of the images must have
    +    mode "1".
     
         .. code-block:: python
     
    @@ -232,7 +276,8 @@ def logical_or(image1, image2):
     
     
     def logical_xor(image1, image2):
    -    """Logical XOR between two images.
    +    """Logical XOR between two images. At least one of the images must have
    +    mode "1".
     
         .. code-block:: python
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageCms.py b/server/www/packages/packages-windows/x86/PIL/ImageCms.py
    index d82e30e..661c3f3 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageCms.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageCms.py
    @@ -15,18 +15,18 @@
     # See the README file for information on usage and redistribution.  See
     # below for the original description.
     
    -from __future__ import print_function
     import sys
     
     from PIL import Image
    +
     try:
         from PIL import _imagingcms
     except ImportError as ex:
         # Allow error import for doc purposes, but error out when accessing
         # anything in core.
    -    from _util import deferred_error
    +    from ._util import deferred_error
    +
         _imagingcms = deferred_error(ex)
    -from PIL._util import isStringType
     
     DESCRIPTION = """
     pyCMS
    @@ -132,7 +132,7 @@ FLAGS = {
         "SOFTPROOFING": 16384,  # Do softproofing
         "PRESERVEBLACK": 32768,  # Black preservation
         "NODEFAULTRESOURCEDEF": 16777216,  # CRD special
    -    "GRIDPOINTS": lambda n: ((n) & 0xFF) << 16  # Gridpoints
    +    "GRIDPOINTS": lambda n: ((n) & 0xFF) << 16,  # Gridpoints
     }
     
     _MAX_FLAG = 0
    @@ -148,8 +148,8 @@ for flag in FLAGS.values():
     ##
     # Profile.
     
    -class ImageCmsProfile(object):
     
    +class ImageCmsProfile:
         def __init__(self, profile):
             """
             :param profile: Either a string representing a filename,
    @@ -158,7 +158,7 @@ class ImageCmsProfile(object):
     
             """
     
    -        if isStringType(profile):
    +        if isinstance(profile, str):
                 self._set(core.profile_open(profile), profile)
             elif hasattr(profile, "read"):
                 self._set(core.profile_frombytes(profile.read()))
    @@ -197,22 +197,31 @@ class ImageCmsTransform(Image.ImagePointHandler):
         Will return the output profile in the output.info['icc_profile'].
         """
     
    -    def __init__(self, input, output, input_mode, output_mode,
    -                 intent=INTENT_PERCEPTUAL, proof=None,
    -                 proof_intent=INTENT_ABSOLUTE_COLORIMETRIC, flags=0):
    +    def __init__(
    +        self,
    +        input,
    +        output,
    +        input_mode,
    +        output_mode,
    +        intent=INTENT_PERCEPTUAL,
    +        proof=None,
    +        proof_intent=INTENT_ABSOLUTE_COLORIMETRIC,
    +        flags=0,
    +    ):
             if proof is None:
                 self.transform = core.buildTransform(
    -                input.profile, output.profile,
    -                input_mode, output_mode,
    -                intent,
    -                flags
    +                input.profile, output.profile, input_mode, output_mode, intent, flags
                 )
             else:
                 self.transform = core.buildProofTransform(
    -                input.profile, output.profile, proof.profile,
    -                input_mode, output_mode,
    -                intent, proof_intent,
    -                flags
    +                input.profile,
    +                output.profile,
    +                proof.profile,
    +                input_mode,
    +                output_mode,
    +                intent,
    +                proof_intent,
    +                flags,
                 )
             # Note: inputMode and outputMode are for pyCMS compatibility only
             self.input_mode = self.inputMode = input_mode
    @@ -228,7 +237,7 @@ class ImageCmsTransform(Image.ImagePointHandler):
             if imOut is None:
                 imOut = Image.new(self.output_mode, im.size, None)
             self.transform.apply(im.im.id, imOut.im.id)
    -        imOut.info['icc_profile'] = self.output_profile.tobytes()
    +        imOut.info["icc_profile"] = self.output_profile.tobytes()
             return imOut
     
         def apply_in_place(self, im):
    @@ -236,7 +245,7 @@ class ImageCmsTransform(Image.ImagePointHandler):
             if im.mode != self.output_mode:
                 raise ValueError("mode mismatch")  # wrong output mode
             self.transform.apply(im.im.id, im.im.id)
    -        im.info['icc_profile'] = self.output_profile.tobytes()
    +        im.info["icc_profile"] = self.output_profile.tobytes()
             return im
     
     
    @@ -245,19 +254,17 @@ def get_display_profile(handle=None):
         :returns: None if the profile is not known.
         """
     
    -    if sys.platform == "win32":
    -        from PIL import ImageWin
    -        if isinstance(handle, ImageWin.HDC):
    -            profile = core.get_display_profile_win32(handle, 1)
    -        else:
    -            profile = core.get_display_profile_win32(handle or 0)
    +    if sys.platform != "win32":
    +        return None
    +
    +    from PIL import ImageWin
    +
    +    if isinstance(handle, ImageWin.HDC):
    +        profile = core.get_display_profile_win32(handle, 1)
         else:
    -        try:
    -            get = _imagingcms.get_display_profile
    -        except AttributeError:
    -            return None
    -        else:
    -            profile = get()
    +        profile = core.get_display_profile_win32(handle or 0)
    +    if profile is None:
    +        return None
         return ImageCmsProfile(profile)
     
     
    @@ -265,22 +272,30 @@ def get_display_profile(handle=None):
     # pyCMS compatible layer
     # --------------------------------------------------------------------.
     
    +
     class PyCMSError(Exception):
     
         """ (pyCMS) Exception class.
         This is used for all errors in the pyCMS API. """
    +
         pass
     
     
     def profileToProfile(
    -        im, inputProfile, outputProfile, renderingIntent=INTENT_PERCEPTUAL,
    -        outputMode=None, inPlace=0, flags=0):
    +    im,
    +    inputProfile,
    +    outputProfile,
    +    renderingIntent=INTENT_PERCEPTUAL,
    +    outputMode=None,
    +    inPlace=False,
    +    flags=0,
    +):
         """
         (pyCMS) Applies an ICC transformation to a given image, mapping from
         inputProfile to outputProfile.
     
         If the input or output profiles specified are not valid filenames, a
    -    PyCMSError will be raised.  If inPlace == TRUE and outputMode != im.mode,
    +    PyCMSError will be raised.  If inPlace is True and outputMode != im.mode,
         a PyCMSError will be raised.  If an error occurs during application of
         the profiles, a PyCMSError will be raised.  If outputMode is not a mode
         supported by the outputProfile (or by pyCMS), a PyCMSError will be
    @@ -305,10 +320,10 @@ def profileToProfile(
         :param renderingIntent: Integer (0-3) specifying the rendering intent you
             wish to use for the transform
     
    -            INTENT_PERCEPTUAL            = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
    -            INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
    -            INTENT_SATURATION            = 2 (ImageCms.INTENT_SATURATION)
    -            INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
    +            ImageCms.INTENT_PERCEPTUAL            = 0 (DEFAULT)
    +            ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1
    +            ImageCms.INTENT_SATURATION            = 2
    +            ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3
     
             see the pyCMS documentation for details on rendering intents and what
             they do.
    @@ -317,9 +332,9 @@ def profileToProfile(
             MUST be the same mode as the input, or omitted completely.  If
             omitted, the outputMode will be the same as the mode of the input
             image (im.mode)
    -    :param inPlace: Boolean (1 = True, None or 0 = False).  If True, the
    -        original image is modified in-place, and None is returned.  If False
    -        (default), a new Image object is returned with the transform applied.
    +    :param inPlace: Boolean.  If True, the original image is modified in-place,
    +        and None is returned.  If False (default), a new Image object is
    +        returned with the transform applied.
         :param flags: Integer (0-...) specifying additional flags
         :returns: Either None or a new PIL image object, depending on value of
             inPlace
    @@ -333,8 +348,7 @@ def profileToProfile(
             raise PyCMSError("renderingIntent must be an integer between 0 and 3")
     
         if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG):
    -        raise PyCMSError(
    -            "flags must be an integer between 0 and %s" + _MAX_FLAG)
    +        raise PyCMSError("flags must be an integer between 0 and %s" + _MAX_FLAG)
     
         try:
             if not isinstance(inputProfile, ImageCmsProfile):
    @@ -342,15 +356,19 @@ def profileToProfile(
             if not isinstance(outputProfile, ImageCmsProfile):
                 outputProfile = ImageCmsProfile(outputProfile)
             transform = ImageCmsTransform(
    -            inputProfile, outputProfile, im.mode, outputMode,
    -            renderingIntent, flags=flags
    +            inputProfile,
    +            outputProfile,
    +            im.mode,
    +            outputMode,
    +            renderingIntent,
    +            flags=flags,
             )
             if inPlace:
                 transform.apply_in_place(im)
                 imOut = None
             else:
                 imOut = transform.apply(im)
    -    except (IOError, TypeError, ValueError) as v:
    +    except (OSError, TypeError, ValueError) as v:
             raise PyCMSError(v)
     
         return imOut
    @@ -374,13 +392,18 @@ def getOpenProfile(profileFilename):
     
         try:
             return ImageCmsProfile(profileFilename)
    -    except (IOError, TypeError, ValueError) as v:
    +    except (OSError, TypeError, ValueError) as v:
             raise PyCMSError(v)
     
     
     def buildTransform(
    -        inputProfile, outputProfile, inMode, outMode,
    -        renderingIntent=INTENT_PERCEPTUAL, flags=0):
    +    inputProfile,
    +    outputProfile,
    +    inMode,
    +    outMode,
    +    renderingIntent=INTENT_PERCEPTUAL,
    +    flags=0,
    +):
         """
         (pyCMS) Builds an ICC transform mapping from the inputProfile to the
         outputProfile.  Use applyTransform to apply the transform to a given
    @@ -424,10 +447,10 @@ def buildTransform(
         :param renderingIntent: Integer (0-3) specifying the rendering intent you
             wish to use for the transform
     
    -            INTENT_PERCEPTUAL            = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
    -            INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
    -            INTENT_SATURATION            = 2 (ImageCms.INTENT_SATURATION)
    -            INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
    +            ImageCms.INTENT_PERCEPTUAL            = 0 (DEFAULT)
    +            ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1
    +            ImageCms.INTENT_SATURATION            = 2
    +            ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3
     
             see the pyCMS documentation for details on rendering intents and what
             they do.
    @@ -440,8 +463,7 @@ def buildTransform(
             raise PyCMSError("renderingIntent must be an integer between 0 and 3")
     
         if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG):
    -        raise PyCMSError(
    -            "flags must be an integer between 0 and %s" + _MAX_FLAG)
    +        raise PyCMSError("flags must be an integer between 0 and %s" + _MAX_FLAG)
     
         try:
             if not isinstance(inputProfile, ImageCmsProfile):
    @@ -449,17 +471,22 @@ def buildTransform(
             if not isinstance(outputProfile, ImageCmsProfile):
                 outputProfile = ImageCmsProfile(outputProfile)
             return ImageCmsTransform(
    -            inputProfile, outputProfile, inMode, outMode,
    -            renderingIntent, flags=flags)
    -    except (IOError, TypeError, ValueError) as v:
    +            inputProfile, outputProfile, inMode, outMode, renderingIntent, flags=flags
    +        )
    +    except (OSError, TypeError, ValueError) as v:
             raise PyCMSError(v)
     
     
     def buildProofTransform(
    -        inputProfile, outputProfile, proofProfile, inMode, outMode,
    -        renderingIntent=INTENT_PERCEPTUAL,
    -        proofRenderingIntent=INTENT_ABSOLUTE_COLORIMETRIC,
    -        flags=FLAGS["SOFTPROOFING"]):
    +    inputProfile,
    +    outputProfile,
    +    proofProfile,
    +    inMode,
    +    outMode,
    +    renderingIntent=INTENT_PERCEPTUAL,
    +    proofRenderingIntent=INTENT_ABSOLUTE_COLORIMETRIC,
    +    flags=FLAGS["SOFTPROOFING"],
    +):
         """
         (pyCMS) Builds an ICC transform mapping from the inputProfile to the
         outputProfile, but tries to simulate the result that would be
    @@ -512,20 +539,20 @@ def buildProofTransform(
         :param renderingIntent: Integer (0-3) specifying the rendering intent you
             wish to use for the input->proof (simulated) transform
     
    -            INTENT_PERCEPTUAL            = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
    -            INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
    -            INTENT_SATURATION            = 2 (ImageCms.INTENT_SATURATION)
    -            INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
    +            ImageCms.INTENT_PERCEPTUAL            = 0 (DEFAULT)
    +            ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1
    +            ImageCms.INTENT_SATURATION            = 2
    +            ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3
     
             see the pyCMS documentation for details on rendering intents and what
             they do.
    -    :param proofRenderingIntent: Integer (0-3) specifying the rendering intent you
    -        wish to use for proof->output transform
    +    :param proofRenderingIntent: Integer (0-3) specifying the rendering intent
    +        you wish to use for proof->output transform
     
    -            INTENT_PERCEPTUAL            = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
    -            INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
    -            INTENT_SATURATION            = 2 (ImageCms.INTENT_SATURATION)
    -            INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
    +            ImageCms.INTENT_PERCEPTUAL            = 0 (DEFAULT)
    +            ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1
    +            ImageCms.INTENT_SATURATION            = 2
    +            ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3
     
             see the pyCMS documentation for details on rendering intents and what
             they do.
    @@ -538,8 +565,7 @@ def buildProofTransform(
             raise PyCMSError("renderingIntent must be an integer between 0 and 3")
     
         if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG):
    -        raise PyCMSError(
    -            "flags must be an integer between 0 and %s" + _MAX_FLAG)
    +        raise PyCMSError("flags must be an integer between 0 and %s" + _MAX_FLAG)
     
         try:
             if not isinstance(inputProfile, ImageCmsProfile):
    @@ -549,9 +575,16 @@ def buildProofTransform(
             if not isinstance(proofProfile, ImageCmsProfile):
                 proofProfile = ImageCmsProfile(proofProfile)
             return ImageCmsTransform(
    -            inputProfile, outputProfile, inMode, outMode, renderingIntent,
    -            proofProfile, proofRenderingIntent, flags)
    -    except (IOError, TypeError, ValueError) as v:
    +            inputProfile,
    +            outputProfile,
    +            inMode,
    +            outMode,
    +            renderingIntent,
    +            proofProfile,
    +            proofRenderingIntent,
    +            flags,
    +        )
    +    except (OSError, TypeError, ValueError) as v:
             raise PyCMSError(v)
     
     
    @@ -559,16 +592,16 @@ buildTransformFromOpenProfiles = buildTransform
     buildProofTransformFromOpenProfiles = buildProofTransform
     
     
    -def applyTransform(im, transform, inPlace=0):
    +def applyTransform(im, transform, inPlace=False):
         """
         (pyCMS) Applies a transform to a given image.
     
         If im.mode != transform.inMode, a PyCMSError is raised.
     
    -    If inPlace == TRUE and transform.inMode != transform.outMode, a
    +    If inPlace is True and transform.inMode != transform.outMode, a
         PyCMSError is raised.
     
    -    If im.mode, transfer.inMode, or transfer.outMode is not supported by
    +    If im.mode, transform.inMode, or transform.outMode is not supported by
         pyCMSdll or the profiles you used for the transform, a PyCMSError is
         raised.
     
    @@ -581,7 +614,7 @@ def applyTransform(im, transform, inPlace=0):
         considerable calculation time if doing the same conversion multiple times.
     
         If you want to modify im in-place instead of receiving a new image as
    -    the return value, set inPlace to TRUE.  This can only be done if
    +    the return value, set inPlace to True.  This can only be done if
         transform.inMode and transform.outMode are the same, because we can't
         change the mode in-place (the buffer sizes for some modes are
         different).  The  default behavior is to return a new Image object of
    @@ -590,10 +623,9 @@ def applyTransform(im, transform, inPlace=0):
         :param im: A PIL Image object, and im.mode must be the same as the inMode
             supported by the transform.
         :param transform: A valid CmsTransform class object
    -    :param inPlace: Bool (1 == True, 0 or None == False).  If True, im is
    -        modified in place and None is returned, if False, a new Image object
    -        with the transform applied is returned (and im is not changed). The
    -        default is False.
    +    :param inPlace: Bool.  If True, im is modified in place and None is
    +        returned, if False, a new Image object with the transform applied is
    +        returned (and im is not changed). The default is False.
         :returns: Either None, or a new PIL Image object, depending on the value of
             inPlace. The profile will be returned in the image's
             info['icc_profile'].
    @@ -642,15 +674,16 @@ def createProfile(colorSpace, colorTemp=-1):
         if colorSpace not in ["LAB", "XYZ", "sRGB"]:
             raise PyCMSError(
                 "Color space not supported for on-the-fly profile creation (%s)"
    -            % colorSpace)
    +            % colorSpace
    +        )
     
         if colorSpace == "LAB":
             try:
                 colorTemp = float(colorTemp)
    -        except:
    +        except (TypeError, ValueError):
                 raise PyCMSError(
    -                "Color temperature must be numeric, \"%s\" not valid"
    -                % colorTemp)
    +                'Color temperature must be numeric, "%s" not valid' % colorTemp
    +            )
     
         try:
             return core.createProfile(colorSpace, colorTemp)
    @@ -687,16 +720,16 @@ def getProfileName(profile):
             #    // name was "%s - %s" (model, manufacturer) || Description ,
             #    // but if the Model and Manufacturer were the same or the model
             #    // was long, Just the model,  in 1.x
    -        model = profile.profile.product_model
    -        manufacturer = profile.profile.product_manufacturer
    +        model = profile.profile.model
    +        manufacturer = profile.profile.manufacturer
     
             if not (model or manufacturer):
    -            return profile.profile.product_description + "\n"
    +            return (profile.profile.profile_description or "") + "\n"
             if not manufacturer or len(model) > 30:
                 return model + "\n"
    -        return "%s - %s\n" % (model, manufacturer)
    +        return "{} - {}\n".format(model, manufacturer)
     
    -    except (AttributeError, IOError, TypeError, ValueError) as v:
    +    except (AttributeError, OSError, TypeError, ValueError) as v:
             raise PyCMSError(v)
     
     
    @@ -727,16 +760,16 @@ def getProfileInfo(profile):
             # add an extra newline to preserve pyCMS compatibility
             # Python, not C. the white point bits weren't working well,
             # so skipping.
    -        #    // info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint
    -        description = profile.profile.product_description
    -        cpright = profile.profile.product_copyright
    +        # info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint
    +        description = profile.profile.profile_description
    +        cpright = profile.profile.copyright
             arr = []
             for elt in (description, cpright):
                 if elt:
                     arr.append(elt)
             return "\r\n\r\n".join(arr) + "\r\n\r\n"
     
    -    except (AttributeError, IOError, TypeError, ValueError) as v:
    +    except (AttributeError, OSError, TypeError, ValueError) as v:
             raise PyCMSError(v)
     
     
    @@ -763,8 +796,8 @@ def getProfileCopyright(profile):
             # add an extra newline to preserve pyCMS compatibility
             if not isinstance(profile, ImageCmsProfile):
                 profile = ImageCmsProfile(profile)
    -        return profile.profile.product_copyright + "\n"
    -    except (AttributeError, IOError, TypeError, ValueError) as v:
    +        return (profile.profile.copyright or "") + "\n"
    +    except (AttributeError, OSError, TypeError, ValueError) as v:
             raise PyCMSError(v)
     
     
    @@ -791,8 +824,8 @@ def getProfileManufacturer(profile):
             # add an extra newline to preserve pyCMS compatibility
             if not isinstance(profile, ImageCmsProfile):
                 profile = ImageCmsProfile(profile)
    -        return profile.profile.product_manufacturer + "\n"
    -    except (AttributeError, IOError, TypeError, ValueError) as v:
    +        return (profile.profile.manufacturer or "") + "\n"
    +    except (AttributeError, OSError, TypeError, ValueError) as v:
             raise PyCMSError(v)
     
     
    @@ -820,8 +853,8 @@ def getProfileModel(profile):
             # add an extra newline to preserve pyCMS compatibility
             if not isinstance(profile, ImageCmsProfile):
                 profile = ImageCmsProfile(profile)
    -        return profile.profile.product_model + "\n"
    -    except (AttributeError, IOError, TypeError, ValueError) as v:
    +        return (profile.profile.model or "") + "\n"
    +    except (AttributeError, OSError, TypeError, ValueError) as v:
             raise PyCMSError(v)
     
     
    @@ -849,8 +882,8 @@ def getProfileDescription(profile):
             # add an extra newline to preserve pyCMS compatibility
             if not isinstance(profile, ImageCmsProfile):
                 profile = ImageCmsProfile(profile)
    -        return profile.profile.product_description + "\n"
    -    except (AttributeError, IOError, TypeError, ValueError) as v:
    +        return (profile.profile.profile_description or "") + "\n"
    +    except (AttributeError, OSError, TypeError, ValueError) as v:
             raise PyCMSError(v)
     
     
    @@ -875,10 +908,10 @@ def getDefaultIntent(profile):
         :returns: Integer 0-3 specifying the default rendering intent for this
             profile.
     
    -            INTENT_PERCEPTUAL            = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
    -            INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
    -            INTENT_SATURATION            = 2 (ImageCms.INTENT_SATURATION)
    -            INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
    +            ImageCms.INTENT_PERCEPTUAL            = 0 (DEFAULT)
    +            ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1
    +            ImageCms.INTENT_SATURATION            = 2
    +            ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3
     
             see the pyCMS documentation for details on rendering intents and what
                 they do.
    @@ -889,7 +922,7 @@ def getDefaultIntent(profile):
             if not isinstance(profile, ImageCmsProfile):
                 profile = ImageCmsProfile(profile)
             return profile.profile.rendering_intent
    -    except (AttributeError, IOError, TypeError, ValueError) as v:
    +    except (AttributeError, OSError, TypeError, ValueError) as v:
             raise PyCMSError(v)
     
     
    @@ -913,15 +946,15 @@ def isIntentSupported(profile, intent, direction):
         :param intent: Integer (0-3) specifying the rendering intent you wish to
             use with this profile
     
    -            INTENT_PERCEPTUAL            = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
    -            INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
    -            INTENT_SATURATION            = 2 (ImageCms.INTENT_SATURATION)
    -            INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
    +            ImageCms.INTENT_PERCEPTUAL            = 0 (DEFAULT)
    +            ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1
    +            ImageCms.INTENT_SATURATION            = 2
    +            ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3
     
             see the pyCMS documentation for details on rendering intents and what
                 they do.
    -    :param direction: Integer specifying if the profile is to be used for input,
    -        output, or proof
    +    :param direction: Integer specifying if the profile is to be used for
    +        input, output, or proof
     
                 INPUT  = 0 (or use ImageCms.DIRECTION_INPUT)
                 OUTPUT = 1 (or use ImageCms.DIRECTION_OUTPUT)
    @@ -940,7 +973,7 @@ def isIntentSupported(profile, intent, direction):
                 return 1
             else:
                 return -1
    -    except (AttributeError, IOError, TypeError, ValueError) as v:
    +    except (AttributeError, OSError, TypeError, ValueError) as v:
             raise PyCMSError(v)
     
     
    @@ -949,7 +982,4 @@ def versions():
         (pyCMS) Fetches versions.
         """
     
    -    return (
    -        VERSION, core.littlecms_version,
    -        sys.version.split()[0], Image.VERSION
    -    )
    +    return (VERSION, core.littlecms_version, sys.version.split()[0], Image.__version__)
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageColor.py b/server/www/packages/packages-windows/x86/PIL/ImageColor.py
    index 08c00fd..9cf7a99 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageColor.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageColor.py
    @@ -17,9 +17,10 @@
     # See the README file for information on usage and redistribution.
     #
     
    -from . import Image
     import re
     
    +from . import Image
    +
     
     def getrgb(color):
         """
    @@ -41,89 +42,77 @@ def getrgb(color):
             return rgb
     
         # check for known string formats
    -    if re.match('#[a-f0-9]{3}$', color):
    -        return (
    -            int(color[1]*2, 16),
    -            int(color[2]*2, 16),
    -            int(color[3]*2, 16),
    -            )
    +    if re.match("#[a-f0-9]{3}$", color):
    +        return (int(color[1] * 2, 16), int(color[2] * 2, 16), int(color[3] * 2, 16))
     
    -    if re.match('#[a-f0-9]{4}$', color):
    +    if re.match("#[a-f0-9]{4}$", color):
             return (
    -            int(color[1]*2, 16),
    -            int(color[2]*2, 16),
    -            int(color[3]*2, 16),
    -            int(color[4]*2, 16),
    -            )
    +            int(color[1] * 2, 16),
    +            int(color[2] * 2, 16),
    +            int(color[3] * 2, 16),
    +            int(color[4] * 2, 16),
    +        )
     
    -    if re.match('#[a-f0-9]{6}$', color):
    -        return (
    -            int(color[1:3], 16),
    -            int(color[3:5], 16),
    -            int(color[5:7], 16),
    -            )
    +    if re.match("#[a-f0-9]{6}$", color):
    +        return (int(color[1:3], 16), int(color[3:5], 16), int(color[5:7], 16))
     
    -    if re.match('#[a-f0-9]{8}$', color):
    +    if re.match("#[a-f0-9]{8}$", color):
             return (
                 int(color[1:3], 16),
                 int(color[3:5], 16),
                 int(color[5:7], 16),
                 int(color[7:9], 16),
    -            )
    +        )
     
         m = re.match(r"rgb\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color)
         if m:
    -        return (
    -            int(m.group(1)),
    -            int(m.group(2)),
    -            int(m.group(3))
    -            )
    +        return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
     
         m = re.match(r"rgb\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color)
         if m:
             return (
                 int((int(m.group(1)) * 255) / 100.0 + 0.5),
                 int((int(m.group(2)) * 255) / 100.0 + 0.5),
    -            int((int(m.group(3)) * 255) / 100.0 + 0.5)
    -            )
    +            int((int(m.group(3)) * 255) / 100.0 + 0.5),
    +        )
     
    -    m = re.match(r"hsl\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color)
    +    m = re.match(
    +        r"hsl\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color
    +    )
         if m:
             from colorsys import hls_to_rgb
    +
             rgb = hls_to_rgb(
                 float(m.group(1)) / 360.0,
                 float(m.group(3)) / 100.0,
                 float(m.group(2)) / 100.0,
    -            )
    +        )
             return (
                 int(rgb[0] * 255 + 0.5),
                 int(rgb[1] * 255 + 0.5),
    -            int(rgb[2] * 255 + 0.5)
    -            )
    +            int(rgb[2] * 255 + 0.5),
    +        )
     
    -    m = re.match(r"hs[bv]\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color)
    +    m = re.match(
    +        r"hs[bv]\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color
    +    )
         if m:
             from colorsys import hsv_to_rgb
    +
             rgb = hsv_to_rgb(
                 float(m.group(1)) / 360.0,
                 float(m.group(2)) / 100.0,
                 float(m.group(3)) / 100.0,
    -            )
    +        )
             return (
                 int(rgb[0] * 255 + 0.5),
                 int(rgb[1] * 255 + 0.5),
    -            int(rgb[2] * 255 + 0.5)
    -            )
    +            int(rgb[2] * 255 + 0.5),
    +        )
     
    -    m = re.match(r"rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$",
    -                 color)
    +    m = re.match(r"rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color)
         if m:
    -        return (
    -            int(m.group(1)),
    -            int(m.group(2)),
    -            int(m.group(3)),
    -            int(m.group(4))
    -            )
    +        return (int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4)))
         raise ValueError("unknown color specifier: %r" % color)
     
     
    @@ -145,11 +134,13 @@ def getcolor(color, mode):
     
         if Image.getmodebase(mode) == "L":
             r, g, b = color
    -        color = (r*299 + g*587 + b*114)//1000
    -        if mode[-1] == 'A':
    +        # ITU-R Recommendation 601-2 for nonlinear RGB
    +        # scaled to 24 bits to match the convert's implementation.
    +        color = (r * 19595 + g * 38470 + b * 7471 + 0x8000) >> 16
    +        if mode[-1] == "A":
                 return (color, alpha)
         else:
    -        if mode[-1] == 'A':
    +        if mode[-1] == "A":
                 return color + (alpha,)
         return color
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageDraw.py b/server/www/packages/packages-windows/x86/PIL/ImageDraw.py
    index 5bc8902..7abd459 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageDraw.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageDraw.py
    @@ -30,10 +30,11 @@
     # See the README file for information on usage and redistribution.
     #
     
    +import math
     import numbers
     
     from . import Image, ImageColor
    -from ._util import isStringType
    +
     
     """
     A simple 2D drawing interface for PIL images.
    @@ -43,8 +44,7 @@ directly.
     """
     
     
    -class ImageDraw(object):
    -
    +class ImageDraw:
         def __init__(self, im, mode=None):
             """
             Create a drawing instance.
    @@ -75,9 +75,9 @@ class ImageDraw(object):
             self.draw = Image.core.draw(self.im, blend)
             self.mode = mode
             if mode in ("I", "F"):
    -            self.ink = self.draw.draw_ink(1, mode)
    +            self.ink = self.draw.draw_ink(1)
             else:
    -            self.ink = self.draw.draw_ink(-1, mode)
    +            self.ink = self.draw.draw_ink(-1)
             if mode in ("1", "P", "I", "F"):
                 # FIXME: fix Fill2 to properly support matte for I+F images
                 self.fontmode = "1"
    @@ -94,6 +94,7 @@ class ImageDraw(object):
             if not self.font:
                 # FIXME: should add a font repository
                 from . import ImageFont
    +
                 self.font = ImageFont.load_default()
             return self.font
     
    @@ -105,24 +106,24 @@ class ImageDraw(object):
                     ink = self.ink
             else:
                 if ink is not None:
    -                if isStringType(ink):
    +                if isinstance(ink, str):
                         ink = ImageColor.getcolor(ink, self.mode)
                     if self.palette and not isinstance(ink, numbers.Number):
                         ink = self.palette.getcolor(ink)
    -                ink = self.draw.draw_ink(ink, self.mode)
    +                ink = self.draw.draw_ink(ink)
                 if fill is not None:
    -                if isStringType(fill):
    +                if isinstance(fill, str):
                         fill = ImageColor.getcolor(fill, self.mode)
                     if self.palette and not isinstance(fill, numbers.Number):
                         fill = self.palette.getcolor(fill)
    -                fill = self.draw.draw_ink(fill, self.mode)
    +                fill = self.draw.draw_ink(fill)
             return ink, fill
     
    -    def arc(self, xy, start, end, fill=None):
    +    def arc(self, xy, start, end, fill=None, width=0):
             """Draw an arc."""
             ink, fill = self._getink(fill)
             if ink is not None:
    -            self.draw.draw_arc(xy, start, end, ink)
    +            self.draw.draw_arc(xy, start, end, ink, width)
     
         def bitmap(self, xy, bitmap, fill=None):
             """Draw a bitmap."""
    @@ -133,27 +134,81 @@ class ImageDraw(object):
             if ink is not None:
                 self.draw.draw_bitmap(xy, bitmap.im, ink)
     
    -    def chord(self, xy, start, end, fill=None, outline=None):
    +    def chord(self, xy, start, end, fill=None, outline=None, width=1):
             """Draw a chord."""
             ink, fill = self._getink(outline, fill)
             if fill is not None:
                 self.draw.draw_chord(xy, start, end, fill, 1)
    -        if ink is not None:
    -            self.draw.draw_chord(xy, start, end, ink, 0)
    +        if ink is not None and ink != fill and width != 0:
    +            self.draw.draw_chord(xy, start, end, ink, 0, width)
     
    -    def ellipse(self, xy, fill=None, outline=None):
    +    def ellipse(self, xy, fill=None, outline=None, width=1):
             """Draw an ellipse."""
             ink, fill = self._getink(outline, fill)
             if fill is not None:
                 self.draw.draw_ellipse(xy, fill, 1)
    -        if ink is not None:
    -            self.draw.draw_ellipse(xy, ink, 0)
    +        if ink is not None and ink != fill and width != 0:
    +            self.draw.draw_ellipse(xy, ink, 0, width)
     
    -    def line(self, xy, fill=None, width=0):
    +    def line(self, xy, fill=None, width=0, joint=None):
             """Draw a line, or a connected sequence of line segments."""
    -        ink, fill = self._getink(fill)
    +        ink = self._getink(fill)[0]
             if ink is not None:
                 self.draw.draw_lines(xy, ink, width)
    +            if joint == "curve" and width > 4:
    +                for i in range(1, len(xy) - 1):
    +                    point = xy[i]
    +                    angles = [
    +                        math.degrees(math.atan2(end[0] - start[0], start[1] - end[1]))
    +                        % 360
    +                        for start, end in ((xy[i - 1], point), (point, xy[i + 1]))
    +                    ]
    +                    if angles[0] == angles[1]:
    +                        # This is a straight line, so no joint is required
    +                        continue
    +
    +                    def coord_at_angle(coord, angle):
    +                        x, y = coord
    +                        angle -= 90
    +                        distance = width / 2 - 1
    +                        return tuple(
    +                            [
    +                                p + (math.floor(p_d) if p_d > 0 else math.ceil(p_d))
    +                                for p, p_d in (
    +                                    (x, distance * math.cos(math.radians(angle))),
    +                                    (y, distance * math.sin(math.radians(angle))),
    +                                )
    +                            ]
    +                        )
    +
    +                    flipped = (
    +                        angles[1] > angles[0] and angles[1] - 180 > angles[0]
    +                    ) or (angles[1] < angles[0] and angles[1] + 180 > angles[0])
    +                    coords = [
    +                        (point[0] - width / 2 + 1, point[1] - width / 2 + 1),
    +                        (point[0] + width / 2 - 1, point[1] + width / 2 - 1),
    +                    ]
    +                    if flipped:
    +                        start, end = (angles[1] + 90, angles[0] + 90)
    +                    else:
    +                        start, end = (angles[0] - 90, angles[1] - 90)
    +                    self.pieslice(coords, start - 90, end - 90, fill)
    +
    +                    if width > 8:
    +                        # Cover potential gaps between the line and the joint
    +                        if flipped:
    +                            gapCoords = [
    +                                coord_at_angle(point, angles[0] + 90),
    +                                point,
    +                                coord_at_angle(point, angles[1] + 90),
    +                            ]
    +                        else:
    +                            gapCoords = [
    +                                coord_at_angle(point, angles[0] - 90),
    +                                point,
    +                                coord_at_angle(point, angles[1] - 90),
    +                            ]
    +                        self.line(gapCoords, fill, width=3)
     
         def shape(self, shape, fill=None, outline=None):
             """(Experimental) Draw a shape."""
    @@ -161,16 +216,16 @@ class ImageDraw(object):
             ink, fill = self._getink(outline, fill)
             if fill is not None:
                 self.draw.draw_outline(shape, fill, 1)
    -        if ink is not None:
    +        if ink is not None and ink != fill:
                 self.draw.draw_outline(shape, ink, 0)
     
    -    def pieslice(self, xy, start, end, fill=None, outline=None):
    +    def pieslice(self, xy, start, end, fill=None, outline=None, width=1):
             """Draw a pieslice."""
             ink, fill = self._getink(outline, fill)
             if fill is not None:
                 self.draw.draw_pieslice(xy, start, end, fill, 1)
    -        if ink is not None:
    -            self.draw.draw_pieslice(xy, start, end, ink, 0)
    +        if ink is not None and ink != fill and width != 0:
    +            self.draw.draw_pieslice(xy, start, end, ink, 0, width)
     
         def point(self, xy, fill=None):
             """Draw one or more individual pixels."""
    @@ -183,16 +238,16 @@ class ImageDraw(object):
             ink, fill = self._getink(outline, fill)
             if fill is not None:
                 self.draw.draw_polygon(xy, fill, 1)
    -        if ink is not None:
    +        if ink is not None and ink != fill:
                 self.draw.draw_polygon(xy, ink, 0)
     
    -    def rectangle(self, xy, fill=None, outline=None):
    +    def rectangle(self, xy, fill=None, outline=None, width=1):
             """Draw a rectangle."""
             ink, fill = self._getink(outline, fill)
             if fill is not None:
                 self.draw.draw_rectangle(xy, fill, 1)
    -        if ink is not None:
    -            self.draw.draw_rectangle(xy, ink, 0)
    +        if ink is not None and ink != fill and width != 0:
    +            self.draw.draw_rectangle(xy, ink, 0, width)
     
         def _multiline_check(self, text):
             """Draw text."""
    @@ -205,35 +260,126 @@ class ImageDraw(object):
     
             return text.split(split_character)
     
    -    def text(self, xy, text, fill=None, font=None, anchor=None,
    -             *args, **kwargs):
    +    def text(
    +        self,
    +        xy,
    +        text,
    +        fill=None,
    +        font=None,
    +        anchor=None,
    +        spacing=4,
    +        align="left",
    +        direction=None,
    +        features=None,
    +        language=None,
    +        stroke_width=0,
    +        stroke_fill=None,
    +        *args,
    +        **kwargs
    +    ):
             if self._multiline_check(text):
    -            return self.multiline_text(xy, text, fill, font, anchor,
    -                                       *args, **kwargs)
    -        ink, fill = self._getink(fill)
    +            return self.multiline_text(
    +                xy,
    +                text,
    +                fill,
    +                font,
    +                anchor,
    +                spacing,
    +                align,
    +                direction,
    +                features,
    +                language,
    +                stroke_width,
    +                stroke_fill,
    +            )
    +
             if font is None:
                 font = self.getfont()
    -        if ink is None:
    -            ink = fill
    -        if ink is not None:
    +
    +        def getink(fill):
    +            ink, fill = self._getink(fill)
    +            if ink is None:
    +                return fill
    +            return ink
    +
    +        def draw_text(ink, stroke_width=0, stroke_offset=None):
    +            coord = xy
                 try:
    -                mask, offset = font.getmask2(text, self.fontmode, *args, **kwargs)
    -                xy = xy[0] + offset[0], xy[1] + offset[1]
    +                mask, offset = font.getmask2(
    +                    text,
    +                    self.fontmode,
    +                    direction=direction,
    +                    features=features,
    +                    language=language,
    +                    stroke_width=stroke_width,
    +                    *args,
    +                    **kwargs,
    +                )
    +                coord = coord[0] + offset[0], coord[1] + offset[1]
                 except AttributeError:
                     try:
    -                    mask = font.getmask(text, self.fontmode, *args, **kwargs)
    +                    mask = font.getmask(
    +                        text,
    +                        self.fontmode,
    +                        direction,
    +                        features,
    +                        language,
    +                        stroke_width,
    +                        *args,
    +                        **kwargs,
    +                    )
                     except TypeError:
                         mask = font.getmask(text)
    -            self.draw.draw_bitmap(xy, mask, ink)
    +            if stroke_offset:
    +                coord = coord[0] + stroke_offset[0], coord[1] + stroke_offset[1]
    +            self.draw.draw_bitmap(coord, mask, ink)
     
    -    def multiline_text(self, xy, text, fill=None, font=None, anchor=None,
    -                       spacing=4, align="left", direction=None, features=None):
    +        ink = getink(fill)
    +        if ink is not None:
    +            stroke_ink = None
    +            if stroke_width:
    +                stroke_ink = getink(stroke_fill) if stroke_fill is not None else ink
    +
    +            if stroke_ink is not None:
    +                # Draw stroked text
    +                draw_text(stroke_ink, stroke_width)
    +
    +                # Draw normal text
    +                draw_text(ink, 0, (stroke_width, stroke_width))
    +            else:
    +                # Only draw normal text
    +                draw_text(ink)
    +
    +    def multiline_text(
    +        self,
    +        xy,
    +        text,
    +        fill=None,
    +        font=None,
    +        anchor=None,
    +        spacing=4,
    +        align="left",
    +        direction=None,
    +        features=None,
    +        language=None,
    +        stroke_width=0,
    +        stroke_fill=None,
    +    ):
             widths = []
             max_width = 0
             lines = self._multiline_split(text)
    -        line_spacing = self.textsize('A', font=font)[1] + spacing
    +        line_spacing = (
    +            self.textsize("A", font=font, stroke_width=stroke_width)[1] + spacing
    +        )
             for line in lines:
    -            line_width, line_height = self.textsize(line, font)
    +            line_width, line_height = self.textsize(
    +                line,
    +                font,
    +                direction=direction,
    +                features=features,
    +                language=language,
    +                stroke_width=stroke_width,
    +            )
                 widths.append(line_width)
                 max_width = max(max_width, line_width)
             left, top = xy
    @@ -243,35 +389,65 @@ class ImageDraw(object):
                 elif align == "center":
                     left += (max_width - widths[idx]) / 2.0
                 elif align == "right":
    -                left += (max_width - widths[idx])
    +                left += max_width - widths[idx]
                 else:
    -                assert False, 'align must be "left", "center" or "right"'
    -            self.text((left, top), line, fill, font, anchor,
    -                      direction=direction, features=features)
    +                raise ValueError('align must be "left", "center" or "right"')
    +            self.text(
    +                (left, top),
    +                line,
    +                fill,
    +                font,
    +                anchor,
    +                direction=direction,
    +                features=features,
    +                language=language,
    +                stroke_width=stroke_width,
    +                stroke_fill=stroke_fill,
    +            )
                 top += line_spacing
                 left = xy[0]
     
    -    def textsize(self, text, font=None, spacing=4, direction=None,
    -                 features=None):
    +    def textsize(
    +        self,
    +        text,
    +        font=None,
    +        spacing=4,
    +        direction=None,
    +        features=None,
    +        language=None,
    +        stroke_width=0,
    +    ):
             """Get the size of a given string, in pixels."""
             if self._multiline_check(text):
    -            return self.multiline_textsize(text, font, spacing,
    -                                           direction, features)
    +            return self.multiline_textsize(
    +                text, font, spacing, direction, features, language, stroke_width
    +            )
     
             if font is None:
                 font = self.getfont()
    -        return font.getsize(text, direction, features)
    +        return font.getsize(text, direction, features, language, stroke_width)
     
    -    def multiline_textsize(self, text, font=None, spacing=4, direction=None,
    -                           features=None):
    +    def multiline_textsize(
    +        self,
    +        text,
    +        font=None,
    +        spacing=4,
    +        direction=None,
    +        features=None,
    +        language=None,
    +        stroke_width=0,
    +    ):
             max_width = 0
             lines = self._multiline_split(text)
    -        line_spacing = self.textsize('A', font=font)[1] + spacing
    +        line_spacing = (
    +            self.textsize("A", font=font, stroke_width=stroke_width)[1] + spacing
    +        )
             for line in lines:
    -            line_width, line_height = self.textsize(line, font, spacing,
    -                                                    direction, features)
    +            line_width, line_height = self.textsize(
    +                line, font, spacing, direction, features, language, stroke_width
    +            )
                 max_width = max(max_width, line_width)
    -        return max_width, len(lines)*line_spacing - spacing
    +        return max_width, len(lines) * line_spacing - spacing
     
     
     def Draw(im, mode=None):
    @@ -336,10 +512,11 @@ def floodfill(image, xy, value, border=None, thresh=0):
             pixel.
         :param thresh: Optional threshold value which specifies a maximum
             tolerable difference of a pixel value from the 'background' in
    -        order for it to be replaced. Useful for filling regions of non-
    -        homogeneous, but similar, colors.
    +        order for it to be replaced. Useful for filling regions of
    +        non-homogeneous, but similar, colors.
         """
         # based on an implementation by Eric S. Raymond
    +    # amended by yo1995 @20180806
         pixel = image.load()
         x, y = xy
         try:
    @@ -349,39 +526,39 @@ def floodfill(image, xy, value, border=None, thresh=0):
             pixel[x, y] = value
         except (ValueError, IndexError):
             return  # seed point outside image
    -    edge = [(x, y)]
    -    if border is None:
    -        while edge:
    -            newedge = []
    -            for (x, y) in edge:
    -                for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)):
    -                    try:
    -                        p = pixel[s, t]
    -                    except IndexError:
    -                        pass
    +    edge = {(x, y)}
    +    # use a set to keep record of current and previous edge pixels
    +    # to reduce memory consumption
    +    full_edge = set()
    +    while edge:
    +        new_edge = set()
    +        for (x, y) in edge:  # 4 adjacent method
    +            for (s, t) in ((x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)):
    +                # If already processed, or if a coordinate is negative, skip
    +                if (s, t) in full_edge or s < 0 or t < 0:
    +                    continue
    +                try:
    +                    p = pixel[s, t]
    +                except (ValueError, IndexError):
    +                    pass
    +                else:
    +                    full_edge.add((s, t))
    +                    if border is None:
    +                        fill = _color_diff(p, background) <= thresh
                         else:
    -                        if _color_diff(p, background) <= thresh:
    -                            pixel[s, t] = value
    -                            newedge.append((s, t))
    -            edge = newedge
    +                        fill = p != value and p != border
    +                    if fill:
    +                        pixel[s, t] = value
    +                        new_edge.add((s, t))
    +        full_edge = edge  # discard pixels processed
    +        edge = new_edge
    +
    +
    +def _color_diff(color1, color2):
    +    """
    +    Uses 1-norm distance to calculate difference between two values.
    +    """
    +    if isinstance(color2, tuple):
    +        return sum([abs(color1[i] - color2[i]) for i in range(0, len(color2))])
         else:
    -        while edge:
    -            newedge = []
    -            for (x, y) in edge:
    -                for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)):
    -                    try:
    -                        p = pixel[s, t]
    -                    except IndexError:
    -                        pass
    -                    else:
    -                        if p != value and p != border:
    -                            pixel[s, t] = value
    -                            newedge.append((s, t))
    -            edge = newedge
    -
    -
    -def _color_diff(rgb1, rgb2):
    -    """
    -    Uses 1-norm distance to calculate difference between two rgb values.
    -    """
    -    return abs(rgb1[0]-rgb2[0]) +  abs(rgb1[1]-rgb2[1]) +  abs(rgb1[2]-rgb2[2])
    +        return abs(color1 - color2)
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageDraw2.py b/server/www/packages/packages-windows/x86/PIL/ImageDraw2.py
    index f7902b0..20b5fe4 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageDraw2.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageDraw2.py
    @@ -19,26 +19,25 @@
     from . import Image, ImageColor, ImageDraw, ImageFont, ImagePath
     
     
    -class Pen(object):
    +class Pen:
         def __init__(self, color, width=1, opacity=255):
             self.color = ImageColor.getrgb(color)
             self.width = width
     
     
    -class Brush(object):
    +class Brush:
         def __init__(self, color, opacity=255):
             self.color = ImageColor.getrgb(color)
     
     
    -class Font(object):
    +class Font:
         def __init__(self, color, file, size=12):
             # FIXME: add support for bitmap fonts
             self.color = ImageColor.getrgb(color)
             self.font = ImageFont.truetype(file, size)
     
     
    -class Draw(object):
    -
    +class Draw:
         def __init__(self, image, size=None, color=None):
             if not hasattr(image, "im"):
                 image = Image.new(image, size, color)
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageEnhance.py b/server/www/packages/packages-windows/x86/PIL/ImageEnhance.py
    index 11c9c3a..3b79d5c 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageEnhance.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageEnhance.py
    @@ -21,8 +21,7 @@
     from . import Image, ImageFilter, ImageStat
     
     
    -class _Enhance(object):
    -
    +class _Enhance:
         def enhance(self, factor):
             """
             Returns an enhanced image.
    @@ -45,11 +44,12 @@ class Color(_Enhance):
         factor of 0.0 gives a black and white image. A factor of 1.0 gives
         the original image.
         """
    +
         def __init__(self, image):
             self.image = image
    -        self.intermediate_mode = 'L'
    -        if 'A' in image.getbands():
    -            self.intermediate_mode = 'LA'
    +        self.intermediate_mode = "L"
    +        if "A" in image.getbands():
    +            self.intermediate_mode = "LA"
     
             self.degenerate = image.convert(self.intermediate_mode).convert(image.mode)
     
    @@ -61,13 +61,14 @@ class Contrast(_Enhance):
         to the contrast control on a TV set. An enhancement factor of 0.0
         gives a solid grey image. A factor of 1.0 gives the original image.
         """
    +
         def __init__(self, image):
             self.image = image
             mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5)
             self.degenerate = Image.new("L", image.size, mean).convert(image.mode)
     
    -        if 'A' in image.getbands():
    -            self.degenerate.putalpha(image.getchannel('A'))
    +        if "A" in image.getbands():
    +            self.degenerate.putalpha(image.getchannel("A"))
     
     
     class Brightness(_Enhance):
    @@ -77,12 +78,13 @@ class Brightness(_Enhance):
         enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the
         original image.
         """
    +
         def __init__(self, image):
             self.image = image
             self.degenerate = Image.new(image.mode, image.size, 0)
     
    -        if 'A' in image.getbands():
    -            self.degenerate.putalpha(image.getchannel('A'))
    +        if "A" in image.getbands():
    +            self.degenerate.putalpha(image.getchannel("A"))
     
     
     class Sharpness(_Enhance):
    @@ -92,9 +94,10 @@ class Sharpness(_Enhance):
         enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the
         original image, and a factor of 2.0 gives a sharpened image.
         """
    +
         def __init__(self, image):
             self.image = image
             self.degenerate = image.filter(ImageFilter.SMOOTH)
     
    -        if 'A' in image.getbands():
    -            self.degenerate.putalpha(image.getchannel('A'))
    +        if "A" in image.getbands():
    +            self.degenerate.putalpha(image.getchannel("A"))
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageFile.py b/server/www/packages/packages-windows/x86/PIL/ImageFile.py
    index 681dee5..6287968 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageFile.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageFile.py
    @@ -27,16 +27,16 @@
     # See the README file for information on usage and redistribution.
     #
     
    +import io
    +import struct
    +import sys
    +
     from . import Image
     from ._util import isPath
    -import io
    -import os
    -import sys
    -import struct
     
     MAXBLOCK = 65536
     
    -SAFEBLOCK = 1024*1024
    +SAFEBLOCK = 1024 * 1024
     
     LOAD_TRUNCATED_IMAGES = False
     
    @@ -45,7 +45,7 @@ ERRORS = {
         -2: "decoding error",
         -3: "unknown error",
         -8: "bad configuration",
    -    -9: "out of memory error"
    +    -9: "out of memory error",
     }
     
     
    @@ -56,13 +56,14 @@ def raise_ioerror(error):
             message = ERRORS.get(error)
         if not message:
             message = "decoder error %d" % error
    -    raise IOError(message + " when reading image file")
    +    raise OSError(message + " when reading image file")
     
     
     #
     # --------------------------------------------------------------------
     # Helpers
     
    +
     def _tilesort(t):
         # sort on offset
         return t[2]
    @@ -72,14 +73,17 @@ def _tilesort(t):
     # --------------------------------------------------------------------
     # ImageFile base class
     
    +
     class ImageFile(Image.Image):
         "Base class for image file format handlers."
     
         def __init__(self, fp=None, filename=None):
    -        Image.Image.__init__(self)
    +        super().__init__()
     
             self._min_frame = 0
     
    +        self.custom_mimetype = None
    +
             self.tile = None
             self.readonly = 1  # until we know better
     
    @@ -99,32 +103,33 @@ class ImageFile(Image.Image):
                 self._exclusive_fp = None
     
             try:
    -            self._open()
    -        except (IndexError,  # end of data
    +            try:
    +                self._open()
    +            except (
    +                IndexError,  # end of data
                     TypeError,  # end of data (ord)
                     KeyError,  # unsupported mode
                     EOFError,  # got header but not the first frame
    -                struct.error) as v:
    +                struct.error,
    +            ) as v:
    +                raise SyntaxError(v)
    +
    +            if not self.mode or self.size[0] <= 0:
    +                raise SyntaxError("not identified by this driver")
    +        except BaseException:
                 # close the file only if we have opened it this constructor
                 if self._exclusive_fp:
                     self.fp.close()
    -            raise SyntaxError(v)
    -
    -        if not self.mode or self.size[0] <= 0:
    -            raise SyntaxError("not identified by this driver")
    -
    -    def draft(self, mode, size):
    -        "Set draft mode"
    -
    -        pass
    +            raise
     
         def get_format_mimetype(self):
    -        if self.format is None:
    -            return
    -        return Image.MIME.get(self.format.upper())
    +        if self.custom_mimetype:
    +            return self.custom_mimetype
    +        if self.format is not None:
    +            return Image.MIME.get(self.format.upper())
     
         def verify(self):
    -        "Check file integrity"
    +        """Check file integrity"""
     
             # raise exception if something's wrong.  must be called
             # directly after open, and closes file when finished.
    @@ -133,19 +138,19 @@ class ImageFile(Image.Image):
             self.fp = None
     
         def load(self):
    -        "Load image data based on tile list"
    +        """Load image data based on tile list"""
     
             pixel = Image.Image.load(self)
     
             if self.tile is None:
    -            raise IOError("cannot load this image")
    +            raise OSError("cannot load this image")
             if not self.tile:
                 return pixel
     
             self.map = None
             use_mmap = self.filename and len(self.tile) == 1
             # As of pypy 2.1.0, memory mapping was failing here.
    -        use_mmap = use_mmap and not hasattr(sys, 'pypy_version_info')
    +        use_mmap = use_mmap and not hasattr(sys, "pypy_version_info")
     
             readonly = 0
     
    @@ -166,8 +171,12 @@ class ImageFile(Image.Image):
             if use_mmap:
                 # try memory mapping
                 decoder_name, extents, offset, args = self.tile[0]
    -            if decoder_name == "raw" and len(args) >= 3 and args[0] == self.mode \
    -               and args[0] in Image._MAPMODES:
    +            if (
    +                decoder_name == "raw"
    +                and len(args) >= 3
    +                and args[0] == self.mode
    +                and args[0] in Image._MAPMODES
    +            ):
                     try:
                         if hasattr(Image.core, "map"):
                             # use built-in mapper  WIN32 only
    @@ -175,20 +184,24 @@ class ImageFile(Image.Image):
                             self.map.seek(offset)
                             self.im = self.map.readimage(
                                 self.mode, self.size, args[1], args[2]
    -                            )
    +                        )
                         else:
                             # use mmap, if possible
                             import mmap
    +
                             with open(self.filename, "r") as fp:
    -                            self.map = mmap.mmap(fp.fileno(), 0, access=mmap.ACCESS_READ)
    -                        self.im = Image.core.map_buffer(
    -                            self.map, self.size, decoder_name, extents, offset, args
    +                            self.map = mmap.mmap(
    +                                fp.fileno(), 0, access=mmap.ACCESS_READ
                                 )
    +                        self.im = Image.core.map_buffer(
    +                            self.map, self.size, decoder_name, offset, args
    +                        )
                         readonly = 1
    -                    # After trashing self.im, we might need to reload the palette data.
    +                    # After trashing self.im,
    +                    # we might need to reload the palette data.
                         if self.palette:
                             self.palette.dirty = 1
    -                except (AttributeError, EnvironmentError, ImportError):
    +                except (AttributeError, OSError, ImportError):
                         self.map = None
     
             self.load_prepare()
    @@ -204,8 +217,9 @@ class ImageFile(Image.Image):
                     prefix = b""
     
                 for decoder_name, extents, offset, args in self.tile:
    -                decoder = Image._getdecoder(self.mode, decoder_name,
    -                                            args, self.decoderconfig)
    +                decoder = Image._getdecoder(
    +                    self.mode, decoder_name, args, self.decoderconfig
    +                )
                     try:
                         seek(offset)
                         decoder.setimage(self.im, extents)
    @@ -217,19 +231,21 @@ class ImageFile(Image.Image):
                             while True:
                                 try:
                                     s = read(self.decodermaxblock)
    -                            except (IndexError, struct.error):  # truncated png/gif
    +                            except (IndexError, struct.error):
    +                                # truncated png/gif
                                     if LOAD_TRUNCATED_IMAGES:
                                         break
                                     else:
    -                                    raise IOError("image file is truncated")
    +                                    raise OSError("image file is truncated")
     
                                 if not s:  # truncated jpeg
                                     if LOAD_TRUNCATED_IMAGES:
                                         break
                                     else:
    -                                    self.tile = []
    -                                    raise IOError("image file is truncated "
    -                                                  "(%d bytes not processed)" % len(b))
    +                                    raise OSError(
    +                                        "image file is truncated "
    +                                        "(%d bytes not processed)" % len(b)
    +                                    )
     
                                 b = b + s
                                 n, err_code = decoder.decode(b)
    @@ -257,8 +273,7 @@ class ImageFile(Image.Image):
     
         def load_prepare(self):
             # create image memory if necessary
    -        if not self.im or\
    -           self.im.mode != self.mode or self.im.size != self.size:
    +        if not self.im or self.im.mode != self.mode or self.im.size != self.size:
                 self.im = Image.core.new(self.mode, self.size)
             # create palette (optional)
             if self.mode == "P":
    @@ -277,11 +292,15 @@ class ImageFile(Image.Image):
         #     pass
     
         def _seek_check(self, frame):
    -        if (frame < self._min_frame or
    +        if (
    +            frame < self._min_frame
                 # Only check upper limit on frames if additional seek operations
                 # are not required to do so
    -            (not (hasattr(self, "_n_frames") and self._n_frames is None) and
    -             frame >= self.n_frames+self._min_frame)):
    +            or (
    +                not (hasattr(self, "_n_frames") and self._n_frames is None)
    +                and frame >= self.n_frames + self._min_frame
    +            )
    +        ):
                 raise EOFError("attempt to seek outside sequence")
     
             return self.tell() != frame
    @@ -296,14 +315,12 @@ class StubImageFile(ImageFile):
         """
     
         def _open(self):
    -        raise NotImplementedError(
    -            "StubImageFile subclass must implement _open"
    -            )
    +        raise NotImplementedError("StubImageFile subclass must implement _open")
     
         def load(self):
             loader = self._load()
             if loader is None:
    -            raise IOError("cannot find loader for this %s file" % self.format)
    +            raise OSError("cannot find loader for this %s file" % self.format)
             image = loader.load(self)
             assert image is not None
             # become the other object (!)
    @@ -311,17 +328,16 @@ class StubImageFile(ImageFile):
             self.__dict__ = image.__dict__
     
         def _load(self):
    -        "(Hook) Find actual image loader."
    -        raise NotImplementedError(
    -            "StubImageFile subclass must implement _load"
    -            )
    +        """(Hook) Find actual image loader."""
    +        raise NotImplementedError("StubImageFile subclass must implement _load")
     
     
    -class Parser(object):
    +class Parser:
         """
         Incremental image parser.  This class implements the standard
         feed/close consumer interface.
         """
    +
         incremental = None
         image = None
         data = None
    @@ -393,7 +409,7 @@ class Parser(object):
                 try:
                     with io.BytesIO(self.data) as fp:
                         im = Image.open(fp)
    -            except IOError:
    +            except OSError:
                     # traceback.print_exc()
                     pass  # not enough data
                 else:
    @@ -406,15 +422,13 @@ class Parser(object):
                         im.load_prepare()
                         d, e, o, a = im.tile[0]
                         im.tile = []
    -                    self.decoder = Image._getdecoder(
    -                        im.mode, d, a, im.decoderconfig
    -                        )
    +                    self.decoder = Image._getdecoder(im.mode, d, a, im.decoderconfig)
                         self.decoder.setimage(im.im, e)
     
                         # calculate decoder offset
                         self.offset = o
                         if self.offset <= len(self.data):
    -                        self.data = self.data[self.offset:]
    +                        self.data = self.data[self.offset :]
                             self.offset = 0
     
                     self.image = im
    @@ -440,9 +454,9 @@ class Parser(object):
                 self.feed(b"")
                 self.data = self.decoder = None
                 if not self.finished:
    -                raise IOError("image was incomplete")
    +                raise OSError("image was incomplete")
             if not self.image:
    -            raise IOError("cannot parse this image")
    +            raise OSError("cannot parse this image")
             if self.data:
                 # incremental parsing not possible; reopen the file
                 # not that we have all data
    @@ -456,6 +470,7 @@ class Parser(object):
     
     # --------------------------------------------------------------------
     
    +
     def _save(im, fp, tile, bufsize=0):
         """Helper to save image based on tile list
     
    @@ -485,7 +500,7 @@ def _save(im, fp, tile, bufsize=0):
             for e, b, o, a in tile:
                 e = Image._getencoder(im.mode, e, a, im.encoderconfig)
                 if o > 0:
    -                fp.seek(o, 0)
    +                fp.seek(o)
                 e.setimage(im.im, b)
                 if e.pushes_fd:
                     e.setfd(fp)
    @@ -497,14 +512,14 @@ def _save(im, fp, tile, bufsize=0):
                         if s:
                             break
                 if s < 0:
    -                raise IOError("encoder error %d when writing image file" % s)
    +                raise OSError("encoder error %d when writing image file" % s)
                 e.cleanup()
         else:
             # slight speedup: compress to real file object
             for e, b, o, a in tile:
                 e = Image._getencoder(im.mode, e, a, im.encoderconfig)
                 if o > 0:
    -                fp.seek(o, 0)
    +                fp.seek(o)
                 e.setimage(im.im, b)
                 if e.pushes_fd:
                     e.setfd(fp)
    @@ -512,7 +527,7 @@ def _save(im, fp, tile, bufsize=0):
                 else:
                     s = e.encode_to_file(fh, bufsize)
                 if s < 0:
    -                raise IOError("encoder error %d when writing image file" % s)
    +                raise OSError("encoder error %d when writing image file" % s)
                 e.cleanup()
         if hasattr(fp, "flush"):
             fp.flush()
    @@ -542,7 +557,7 @@ def _safe_read(fp, size):
         return b"".join(data)
     
     
    -class PyCodecState(object):
    +class PyCodecState:
         def __init__(self):
             self.xsize = 0
             self.ysize = 0
    @@ -550,11 +565,10 @@ class PyCodecState(object):
             self.yoff = 0
     
         def extents(self):
    -        return (self.xoff, self.yoff,
    -                self.xoff+self.xsize, self.yoff+self.ysize)
    +        return (self.xoff, self.yoff, self.xoff + self.xsize, self.yoff + self.ysize)
     
     
    -class PyDecoder(object):
    +class PyDecoder:
         """
         Python implementation of a format decoder. Override this class and
         add the decoding logic in the `decode` method.
    @@ -588,10 +602,10 @@ class PyDecoder(object):
             """
             Override to perform the decoding process.
     
    -        :param buffer: A bytes object with the data to be decoded.  If `handles_eof`
    -             is set, then `buffer` will be empty and `self.fd` will be set.
    -        :returns: A tuple of (bytes consumed, errcode). If finished with decoding
    -             return <0 for the bytes consumed. Err codes are from `ERRORS`
    +        :param buffer: A bytes object with the data to be decoded.
    +        :returns: A tuple of (bytes consumed, errcode).
    +            If finished with decoding return <0 for the bytes consumed.
    +            Err codes are from `ERRORS`
             """
             raise NotImplementedError()
     
    @@ -641,8 +655,10 @@ class PyDecoder(object):
             if self.state.xsize <= 0 or self.state.ysize <= 0:
                 raise ValueError("Size cannot be negative")
     
    -        if (self.state.xsize + self.state.xoff > self.im.size[0] or
    -           self.state.ysize + self.state.yoff > self.im.size[1]):
    +        if (
    +            self.state.xsize + self.state.xoff > self.im.size[0]
    +            or self.state.ysize + self.state.yoff > self.im.size[1]
    +        ):
                 raise ValueError("Tile cannot extend outside image")
     
         def set_as_raw(self, data, rawmode=None):
    @@ -650,14 +666,14 @@ class PyDecoder(object):
             Convenience method to set the internal image from a stream of raw data
     
             :param data: Bytes to be set
    -        :param rawmode: The rawmode to be used for the decoder. If not specified,
    -             it will default to the mode of the image
    +        :param rawmode: The rawmode to be used for the decoder.
    +            If not specified, it will default to the mode of the image
             :returns: None
             """
     
             if not rawmode:
                 rawmode = self.mode
    -        d = Image._getdecoder(self.mode, 'raw', (rawmode))
    +        d = Image._getdecoder(self.mode, "raw", (rawmode))
             d.setimage(self.im, self.state.extents())
             s = d.decode(data)
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageFilter.py b/server/www/packages/packages-windows/x86/PIL/ImageFilter.py
    index e77349d..6b0f5eb 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageFilter.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageFilter.py
    @@ -14,9 +14,6 @@
     #
     # See the README file for information on usage and redistribution.
     #
    -
    -from __future__ import division
    -
     import functools
     
     try:
    @@ -25,7 +22,7 @@ except ImportError:  # pragma: no cover
         numpy = None
     
     
    -class Filter(object):
    +class Filter:
         pass
     
     
    @@ -33,7 +30,14 @@ class MultibandFilter(Filter):
         pass
     
     
    -class Kernel(MultibandFilter):
    +class BuiltinFilter(MultibandFilter):
    +    def filter(self, image):
    +        if image.mode == "P":
    +            raise ValueError("cannot filter palette images")
    +        return image.filter(*self.filterargs)
    +
    +
    +class Kernel(BuiltinFilter):
         """
         Create a convolution kernel.  The current version only
         supports 3x3 and 5x5 integer and floating point kernels.
    @@ -50,26 +54,17 @@ class Kernel(MultibandFilter):
         :param offset: Offset. If given, this value is added to the result,
                         after it has been divided by the scale factor.
         """
    +
         name = "Kernel"
     
         def __init__(self, size, kernel, scale=None, offset=0):
             if scale is None:
                 # default scale is sum of kernel
    -            scale = functools.reduce(lambda a, b: a+b, kernel)
    +            scale = functools.reduce(lambda a, b: a + b, kernel)
             if size[0] * size[1] != len(kernel):
                 raise ValueError("not enough coefficients in kernel")
             self.filterargs = size, scale, offset, kernel
     
    -    def filter(self, image):
    -        if image.mode == "P":
    -            raise ValueError("cannot filter palette images")
    -        return image.filter(*self.filterargs)
    -
    -
    -class BuiltinFilter(Kernel):
    -    def __init__(self):
    -        pass
    -
     
     class RankFilter(Filter):
         """
    @@ -81,6 +76,7 @@ class RankFilter(Filter):
                      ``size * size / 2`` for a median filter, ``size * size - 1``
                      for a max filter, etc.
         """
    +
         name = "Rank"
     
         def __init__(self, size, rank):
    @@ -90,7 +86,7 @@ class RankFilter(Filter):
         def filter(self, image):
             if image.mode == "P":
                 raise ValueError("cannot filter palette images")
    -        image = image.expand(self.size//2, self.size//2)
    +        image = image.expand(self.size // 2, self.size // 2)
             return image.rankfilter(self.size, self.rank)
     
     
    @@ -101,11 +97,12 @@ class MedianFilter(RankFilter):
     
         :param size: The kernel size, in pixels.
         """
    +
         name = "Median"
     
         def __init__(self, size=3):
             self.size = size
    -        self.rank = size*size//2
    +        self.rank = size * size // 2
     
     
     class MinFilter(RankFilter):
    @@ -115,6 +112,7 @@ class MinFilter(RankFilter):
     
         :param size: The kernel size, in pixels.
         """
    +
         name = "Min"
     
         def __init__(self, size=3):
    @@ -129,11 +127,12 @@ class MaxFilter(RankFilter):
     
         :param size: The kernel size, in pixels.
         """
    +
         name = "Max"
     
         def __init__(self, size=3):
             self.size = size
    -        self.rank = size*size-1
    +        self.rank = size * size - 1
     
     
     class ModeFilter(Filter):
    @@ -144,6 +143,7 @@ class ModeFilter(Filter):
     
         :param size: The kernel size, in pixels.
         """
    +
         name = "Mode"
     
         def __init__(self, size=3):
    @@ -158,6 +158,7 @@ class GaussianBlur(MultibandFilter):
     
         :param radius: Blur radius.
         """
    +
         name = "GaussianBlur"
     
         def __init__(self, radius=2):
    @@ -178,6 +179,7 @@ class BoxBlur(MultibandFilter):
                        returns an identical image. Radius 1 takes 1 pixel
                        in each direction, i.e. 9 pixels in total.
         """
    +
         name = "BoxBlur"
     
         def __init__(self, radius):
    @@ -200,7 +202,8 @@ class UnsharpMask(MultibandFilter):
     
         .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking
     
    -    """
    +    """  # noqa: E501
    +
         name = "UnsharpMask"
     
         def __init__(self, radius=2, percent=150, threshold=3):
    @@ -214,96 +217,116 @@ class UnsharpMask(MultibandFilter):
     
     class BLUR(BuiltinFilter):
         name = "Blur"
    +    # fmt: off
         filterargs = (5, 5), 16, 0, (
    -        1,  1,  1,  1,  1,
    -        1,  0,  0,  0,  1,
    -        1,  0,  0,  0,  1,
    -        1,  0,  0,  0,  1,
    -        1,  1,  1,  1,  1
    -        )
    +        1, 1, 1, 1, 1,
    +        1, 0, 0, 0, 1,
    +        1, 0, 0, 0, 1,
    +        1, 0, 0, 0, 1,
    +        1, 1, 1, 1, 1,
    +    )
    +    # fmt: on
     
     
     class CONTOUR(BuiltinFilter):
         name = "Contour"
    +    # fmt: off
         filterargs = (3, 3), 1, 255, (
             -1, -1, -1,
             -1,  8, -1,
    -        -1, -1, -1
    -        )
    +        -1, -1, -1,
    +    )
    +    # fmt: on
     
     
     class DETAIL(BuiltinFilter):
         name = "Detail"
    +    # fmt: off
         filterargs = (3, 3), 6, 0, (
    -        0, -1,  0,
    +        0,  -1,  0,
             -1, 10, -1,
    -        0, -1,  0
    -        )
    +        0,  -1,  0,
    +    )
    +    # fmt: on
     
     
     class EDGE_ENHANCE(BuiltinFilter):
         name = "Edge-enhance"
    +    # fmt: off
         filterargs = (3, 3), 2, 0, (
             -1, -1, -1,
             -1, 10, -1,
    -        -1, -1, -1
    -        )
    +        -1, -1, -1,
    +    )
    +    # fmt: on
     
     
     class EDGE_ENHANCE_MORE(BuiltinFilter):
         name = "Edge-enhance More"
    +    # fmt: off
         filterargs = (3, 3), 1, 0, (
             -1, -1, -1,
             -1,  9, -1,
    -        -1, -1, -1
    -        )
    +        -1, -1, -1,
    +    )
    +    # fmt: on
     
     
     class EMBOSS(BuiltinFilter):
         name = "Emboss"
    +    # fmt: off
         filterargs = (3, 3), 1, 128, (
    -        -1,  0,  0,
    -        0,  1,  0,
    -        0,  0,  0
    -        )
    +        -1, 0, 0,
    +        0,  1, 0,
    +        0,  0, 0,
    +    )
    +    # fmt: on
     
     
     class FIND_EDGES(BuiltinFilter):
         name = "Find Edges"
    +    # fmt: off
         filterargs = (3, 3), 1, 0, (
             -1, -1, -1,
             -1,  8, -1,
    -        -1, -1, -1
    -        )
    +        -1, -1, -1,
    +    )
    +    # fmt: on
     
     
     class SHARPEN(BuiltinFilter):
         name = "Sharpen"
    +    # fmt: off
         filterargs = (3, 3), 16, 0, (
             -2, -2, -2,
             -2, 32, -2,
    -        -2, -2, -2
    -        )
    +        -2, -2, -2,
    +    )
    +    # fmt: on
     
     
     class SMOOTH(BuiltinFilter):
         name = "Smooth"
    +    # fmt: off
         filterargs = (3, 3), 13, 0, (
    -        1,  1,  1,
    -        1,  5,  1,
    -        1,  1,  1
    -        )
    +        1, 1, 1,
    +        1, 5, 1,
    +        1, 1, 1,
    +    )
    +    # fmt: on
     
     
     class SMOOTH_MORE(BuiltinFilter):
         name = "Smooth More"
    +    # fmt: off
         filterargs = (5, 5), 100, 0, (
    -        1,  1,  1,  1,  1,
    -        1,  5,  5,  5,  1,
    -        1,  5, 44,  5,  1,
    -        1,  5,  5,  5,  1,
    -        1,  1,  1,  1,  1
    -        )
    +        1, 1,  1, 1, 1,
    +        1, 5,  5, 5, 1,
    +        1, 5, 44, 5, 1,
    +        1, 5,  5, 5, 1,
    +        1, 1,  1, 1, 1,
    +    )
    +    # fmt: on
     
     
     class Color3DLUT(MultibandFilter):
    @@ -330,6 +353,7 @@ class Color3DLUT(MultibandFilter):
                             than ``channels`` channels. Default is ``None``,
                             which means that mode wouldn't be changed.
         """
    +
         name = "Color 3D LUT"
     
         def __init__(self, size, table, channels=3, target_mode=None, **kwargs):
    @@ -341,7 +365,7 @@ class Color3DLUT(MultibandFilter):
     
             # Hidden flag `_copy_table=False` could be used to avoid extra copying
             # of the table if the table is specially made for the constructor.
    -        copy_table = kwargs.get('_copy_table', True)
    +        copy_table = kwargs.get("_copy_table", True)
             items = size[0] * size[1] * size[2]
             wrong_size = False
     
    @@ -349,8 +373,11 @@ class Color3DLUT(MultibandFilter):
                 if copy_table:
                     table = table.copy()
     
    -            if table.shape in [(items * channels,), (items, channels),
    -                               (size[2], size[1], size[0], channels)]:
    +            if table.shape in [
    +                (items * channels,),
    +                (items, channels),
    +                (size[2], size[1], size[0], channels),
    +            ]:
                     table = table.reshape(items * channels)
                 else:
                     wrong_size = True
    @@ -366,7 +393,8 @@ class Color3DLUT(MultibandFilter):
                         if len(pixel) != channels:
                             raise ValueError(
                                 "The elements of the table should "
    -                            "have a length of {}.".format(channels))
    +                            "have a length of {}.".format(channels)
    +                        )
                         table.extend(pixel)
     
             if wrong_size or len(table) != items * channels:
    @@ -374,7 +402,9 @@ class Color3DLUT(MultibandFilter):
                     "The table should have either channels * size**3 float items "
                     "or size**3 items of channels-sized tuples with floats. "
                     "Table should be: {}x{}x{}x{}. Actual length: {}".format(
    -                    channels, size[0], size[1], size[2], len(table)))
    +                    channels, size[0], size[1], size[2], len(table)
    +                )
    +            )
             self.table = table
     
         @staticmethod
    @@ -382,8 +412,9 @@ class Color3DLUT(MultibandFilter):
             try:
                 _, _, _ = size
             except ValueError:
    -            raise ValueError("Size should be either an integer or "
    -                             "a tuple of three integers.")
    +            raise ValueError(
    +                "Size should be either an integer or a tuple of three integers."
    +            )
             except TypeError:
                 size = (size, size, size)
             size = [int(x) for x in size]
    @@ -414,15 +445,20 @@ class Color3DLUT(MultibandFilter):
             for b in range(size3D):
                 for g in range(size2D):
                     for r in range(size1D):
    -                    table[idx_out:idx_out + channels] = callback(
    -                        r / (size1D-1), g / (size2D-1), b / (size3D-1))
    +                    table[idx_out : idx_out + channels] = callback(
    +                        r / (size1D - 1), g / (size2D - 1), b / (size3D - 1)
    +                    )
                         idx_out += channels
     
    -        return cls((size1D, size2D, size3D), table, channels=channels,
    -                   target_mode=target_mode, _copy_table=False)
    +        return cls(
    +            (size1D, size2D, size3D),
    +            table,
    +            channels=channels,
    +            target_mode=target_mode,
    +            _copy_table=False,
    +        )
     
    -    def transform(self, callback, with_normals=False, channels=None,
    -                  target_mode=None):
    +    def transform(self, callback, with_normals=False, channels=None, target_mode=None):
             """Transforms the table values using provided callback and returns
             a new LUT with altered values.
     
    @@ -453,24 +489,31 @@ class Color3DLUT(MultibandFilter):
             for b in range(size3D):
                 for g in range(size2D):
                     for r in range(size1D):
    -                    values = self.table[idx_in:idx_in + ch_in]
    +                    values = self.table[idx_in : idx_in + ch_in]
                         if with_normals:
    -                        values = callback(r / (size1D-1), g / (size2D-1),
    -                                          b / (size3D-1), *values)
    +                        values = callback(
    +                            r / (size1D - 1),
    +                            g / (size2D - 1),
    +                            b / (size3D - 1),
    +                            *values,
    +                        )
                         else:
                             values = callback(*values)
    -                    table[idx_out:idx_out + ch_out] = values
    +                    table[idx_out : idx_out + ch_out] = values
                         idx_in += ch_in
                         idx_out += ch_out
     
    -        return type(self)(self.size, table, channels=ch_out,
    -                          target_mode=target_mode or self.mode,
    -                          _copy_table=False)
    +        return type(self)(
    +            self.size,
    +            table,
    +            channels=ch_out,
    +            target_mode=target_mode or self.mode,
    +            _copy_table=False,
    +        )
     
         def __repr__(self):
             r = [
    -            "{} from {}".format(self.__class__.__name__,
    -                                  self.table.__class__.__name__),
    +            "{} from {}".format(self.__class__.__name__, self.table.__class__.__name__),
                 "size={:d}x{:d}x{:d}".format(*self.size),
                 "channels={:d}".format(self.channels),
             ]
    @@ -482,5 +525,11 @@ class Color3DLUT(MultibandFilter):
             from . import Image
     
             return image.color_lut_3d(
    -            self.mode or image.mode, Image.LINEAR, self.channels,
    -            self.size[0], self.size[1], self.size[2], self.table)
    +            self.mode or image.mode,
    +            Image.LINEAR,
    +            self.channels,
    +            self.size[0],
    +            self.size[1],
    +            self.size[2],
    +            self.table,
    +        )
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageFont.py b/server/www/packages/packages-windows/x86/PIL/ImageFont.py
    index 3ac29e8..027e4c4 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageFont.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageFont.py
    @@ -25,16 +25,19 @@
     # See the README file for information on usage and redistribution.
     #
     
    -from . import Image
    -from ._util import isDirectory, isPath, py3
    +import base64
     import os
     import sys
    +from io import BytesIO
    +
    +from . import Image
    +from ._util import isDirectory, isPath
     
     LAYOUT_BASIC = 0
     LAYOUT_RAQM = 1
     
     
    -class _imagingft_not_installed(object):
    +class _imagingft_not_installed:
         # module placeholder
         def __getattr__(self, id):
             raise ImportError("The _imagingft C module is not installed")
    @@ -62,27 +65,33 @@ except ImportError:
     # --------------------------------------------------------------------
     
     
    -class ImageFont(object):
    +class ImageFont:
         "PIL font wrapper"
     
         def _load_pilfont(self, filename):
     
             with open(filename, "rb") as fp:
    +            image = None
                 for ext in (".png", ".gif", ".pbm"):
    +                if image:
    +                    image.close()
                     try:
                         fullname = os.path.splitext(filename)[0] + ext
                         image = Image.open(fullname)
    -                except:
    +                except Exception:
                         pass
                     else:
                         if image and image.mode in ("1", "L"):
                             break
                 else:
    -                raise IOError("cannot find glyph data file")
    +                if image:
    +                    image.close()
    +                raise OSError("cannot find glyph data file")
     
                 self.file = fullname
     
    -            return self._load_pilfont_data(fp, image)
    +            self._load_pilfont_data(fp, image)
    +            image.close()
     
         def _load_pilfont_data(self, file, image):
     
    @@ -98,7 +107,7 @@ class ImageFont(object):
                 self.info.append(s)
     
             # read PILfont metrics
    -        data = file.read(256*20)
    +        data = file.read(256 * 20)
     
             # check image
             if image.mode not in ("1", "L"):
    @@ -109,9 +118,33 @@ class ImageFont(object):
             self.font = Image.core.font(image.im, data)
     
         def getsize(self, text, *args, **kwargs):
    +        """
    +        Returns width and height (in pixels) of given text.
    +
    +        :param text: Text to measure.
    +
    +        :return: (width, height)
    +        """
             return self.font.getsize(text)
     
         def getmask(self, text, mode="", *args, **kwargs):
    +        """
    +        Create a bitmap for the text.
    +
    +        If the font uses antialiasing, the bitmap should have mode ``L`` and use a
    +        maximum value of 255. Otherwise, it should have mode ``1``.
    +
    +        :param text: Text to render.
    +        :param mode: Used by some graphics drivers to indicate what mode the
    +                     driver prefers; if empty, the renderer may return either
    +                     mode. Note that the mode is always a string, to simplify
    +                     C-level implementations.
    +
    +                     .. versionadded:: 1.1.5
    +
    +        :return: An internal PIL storage memory instance as defined by the
    +                 :py:mod:`PIL.Image.core` interface module.
    +        """
             return self.font.getmask(text, mode)
     
     
    @@ -119,11 +152,11 @@ class ImageFont(object):
     # Wrapper for FreeType fonts.  Application code should use the
     # truetype factory function to create font objects.
     
    -class FreeTypeFont(object):
    +
    +class FreeTypeFont:
         "FreeType font wrapper (requires _imagingft service)"
     
    -    def __init__(self, font=None, size=10, index=0, encoding="",
    -                 layout_engine=None):
    +    def __init__(self, font=None, size=10, index=0, encoding="", layout_engine=None):
             # FIXME: use service provider instead
     
             self.path = font
    @@ -135,56 +168,317 @@ class FreeTypeFont(object):
                 layout_engine = LAYOUT_BASIC
                 if core.HAVE_RAQM:
                     layout_engine = LAYOUT_RAQM
    -        if layout_engine == LAYOUT_RAQM and not core.HAVE_RAQM:
    +        elif layout_engine == LAYOUT_RAQM and not core.HAVE_RAQM:
                 layout_engine = LAYOUT_BASIC
     
             self.layout_engine = layout_engine
     
    -        if isPath(font):
    -            self.font = core.getfont(font, size, index, encoding, layout_engine=layout_engine)
    -        else:
    -            self.font_bytes = font.read()
    +        def load_from_bytes(f):
    +            self.font_bytes = f.read()
                 self.font = core.getfont(
    -                "", size, index, encoding, self.font_bytes, layout_engine)
    +                "", size, index, encoding, self.font_bytes, layout_engine
    +            )
    +
    +        if isPath(font):
    +            if sys.platform == "win32":
    +                font_bytes_path = font if isinstance(font, bytes) else font.encode()
    +                try:
    +                    font_bytes_path.decode("ascii")
    +                except UnicodeDecodeError:
    +                    # FreeType cannot load fonts with non-ASCII characters on Windows
    +                    # So load it into memory first
    +                    with open(font, "rb") as f:
    +                        load_from_bytes(f)
    +                    return
    +            self.font = core.getfont(
    +                font, size, index, encoding, layout_engine=layout_engine
    +            )
    +        else:
    +            load_from_bytes(font)
     
         def _multiline_split(self, text):
             split_character = "\n" if isinstance(text, str) else b"\n"
             return text.split(split_character)
     
         def getname(self):
    +        """
    +        :return: A tuple of the font family (e.g. Helvetica) and the font style
    +            (e.g. Bold)
    +        """
             return self.font.family, self.font.style
     
         def getmetrics(self):
    +        """
    +        :return: A tuple of the font ascent (the distance from the baseline to
    +            the highest outline point) and descent (the distance from the
    +            baseline to the lowest outline point, a negative value)
    +        """
             return self.font.ascent, self.font.descent
     
    -    def getsize(self, text, direction=None, features=None):
    -        size, offset = self.font.getsize(text, direction, features)
    -        return (size[0] + offset[0], size[1] + offset[1])
    +    def getsize(
    +        self, text, direction=None, features=None, language=None, stroke_width=0
    +    ):
    +        """
    +        Returns width and height (in pixels) of given text if rendered in font with
    +        provided direction, features, and language.
     
    -    def getsize_multiline(self, text, direction=None, spacing=4, features=None):
    +        :param text: Text to measure.
    +
    +        :param direction: Direction of the text. It can be 'rtl' (right to
    +                          left), 'ltr' (left to right) or 'ttb' (top to bottom).
    +                          Requires libraqm.
    +
    +                          .. versionadded:: 4.2.0
    +
    +        :param features: A list of OpenType font features to be used during text
    +                         layout. This is usually used to turn on optional
    +                         font features that are not enabled by default,
    +                         for example 'dlig' or 'ss01', but can be also
    +                         used to turn off default font features for
    +                         example '-liga' to disable ligatures or '-kern'
    +                         to disable kerning.  To get all supported
    +                         features, see
    +                         https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist
    +                         Requires libraqm.
    +
    +                         .. versionadded:: 4.2.0
    +
    +        :param language: Language of the text. Different languages may use
    +                         different glyph shapes or ligatures. This parameter tells
    +                         the font which language the text is in, and to apply the
    +                         correct substitutions as appropriate, if available.
    +                         It should be a `BCP 47 language code
    +                         `
    +                         Requires libraqm.
    +
    +                         .. versionadded:: 6.0.0
    +
    +        :param stroke_width: The width of the text stroke.
    +
    +                         .. versionadded:: 6.2.0
    +
    +        :return: (width, height)
    +        """
    +        size, offset = self.font.getsize(text, direction, features, language)
    +        return (
    +            size[0] + stroke_width * 2 + offset[0],
    +            size[1] + stroke_width * 2 + offset[1],
    +        )
    +
    +    def getsize_multiline(
    +        self,
    +        text,
    +        direction=None,
    +        spacing=4,
    +        features=None,
    +        language=None,
    +        stroke_width=0,
    +    ):
    +        """
    +        Returns width and height (in pixels) of given text if rendered in font
    +        with provided direction, features, and language, while respecting
    +        newline characters.
    +
    +        :param text: Text to measure.
    +
    +        :param direction: Direction of the text. It can be 'rtl' (right to
    +                          left), 'ltr' (left to right) or 'ttb' (top to bottom).
    +                          Requires libraqm.
    +
    +        :param spacing: The vertical gap between lines, defaulting to 4 pixels.
    +
    +        :param features: A list of OpenType font features to be used during text
    +                         layout. This is usually used to turn on optional
    +                         font features that are not enabled by default,
    +                         for example 'dlig' or 'ss01', but can be also
    +                         used to turn off default font features for
    +                         example '-liga' to disable ligatures or '-kern'
    +                         to disable kerning.  To get all supported
    +                         features, see
    +                         https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist
    +                         Requires libraqm.
    +
    +        :param language: Language of the text. Different languages may use
    +                         different glyph shapes or ligatures. This parameter tells
    +                         the font which language the text is in, and to apply the
    +                         correct substitutions as appropriate, if available.
    +                         It should be a `BCP 47 language code
    +                         `
    +                         Requires libraqm.
    +
    +                         .. versionadded:: 6.0.0
    +
    +        :param stroke_width: The width of the text stroke.
    +
    +                         .. versionadded:: 6.2.0
    +
    +        :return: (width, height)
    +        """
             max_width = 0
             lines = self._multiline_split(text)
    -        line_spacing = self.getsize('A')[1] + spacing
    +        line_spacing = self.getsize("A", stroke_width=stroke_width)[1] + spacing
             for line in lines:
    -            line_width, line_height = self.getsize(line, direction, features)
    +            line_width, line_height = self.getsize(
    +                line, direction, features, language, stroke_width
    +            )
                 max_width = max(max_width, line_width)
     
    -        return max_width, len(lines)*line_spacing - spacing
    +        return max_width, len(lines) * line_spacing - spacing
     
         def getoffset(self, text):
    +        """
    +        Returns the offset of given text. This is the gap between the
    +        starting coordinate and the first marking. Note that this gap is
    +        included in the result of :py:func:`~PIL.ImageFont.FreeTypeFont.getsize`.
    +
    +        :param text: Text to measure.
    +
    +        :return: A tuple of the x and y offset
    +        """
             return self.font.getsize(text)[1]
     
    -    def getmask(self, text, mode="", direction=None, features=None):
    -        return self.getmask2(text, mode, direction=direction, features=features)[0]
    +    def getmask(
    +        self,
    +        text,
    +        mode="",
    +        direction=None,
    +        features=None,
    +        language=None,
    +        stroke_width=0,
    +    ):
    +        """
    +        Create a bitmap for the text.
     
    -    def getmask2(self, text, mode="", fill=Image.core.fill, direction=None, features=None, *args, **kwargs):
    -        size, offset = self.font.getsize(text, direction, features)
    +        If the font uses antialiasing, the bitmap should have mode ``L`` and use a
    +        maximum value of 255. Otherwise, it should have mode ``1``.
    +
    +        :param text: Text to render.
    +        :param mode: Used by some graphics drivers to indicate what mode the
    +                     driver prefers; if empty, the renderer may return either
    +                     mode. Note that the mode is always a string, to simplify
    +                     C-level implementations.
    +
    +                     .. versionadded:: 1.1.5
    +
    +        :param direction: Direction of the text. It can be 'rtl' (right to
    +                          left), 'ltr' (left to right) or 'ttb' (top to bottom).
    +                          Requires libraqm.
    +
    +                          .. versionadded:: 4.2.0
    +
    +        :param features: A list of OpenType font features to be used during text
    +                         layout. This is usually used to turn on optional
    +                         font features that are not enabled by default,
    +                         for example 'dlig' or 'ss01', but can be also
    +                         used to turn off default font features for
    +                         example '-liga' to disable ligatures or '-kern'
    +                         to disable kerning.  To get all supported
    +                         features, see
    +                         https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist
    +                         Requires libraqm.
    +
    +                         .. versionadded:: 4.2.0
    +
    +        :param language: Language of the text. Different languages may use
    +                         different glyph shapes or ligatures. This parameter tells
    +                         the font which language the text is in, and to apply the
    +                         correct substitutions as appropriate, if available.
    +                         It should be a `BCP 47 language code
    +                         `
    +                         Requires libraqm.
    +
    +                         .. versionadded:: 6.0.0
    +
    +        :param stroke_width: The width of the text stroke.
    +
    +                         .. versionadded:: 6.2.0
    +
    +        :return: An internal PIL storage memory instance as defined by the
    +                 :py:mod:`PIL.Image.core` interface module.
    +        """
    +        return self.getmask2(
    +            text,
    +            mode,
    +            direction=direction,
    +            features=features,
    +            language=language,
    +            stroke_width=stroke_width,
    +        )[0]
    +
    +    def getmask2(
    +        self,
    +        text,
    +        mode="",
    +        fill=Image.core.fill,
    +        direction=None,
    +        features=None,
    +        language=None,
    +        stroke_width=0,
    +        *args,
    +        **kwargs
    +    ):
    +        """
    +        Create a bitmap for the text.
    +
    +        If the font uses antialiasing, the bitmap should have mode ``L`` and use a
    +        maximum value of 255. Otherwise, it should have mode ``1``.
    +
    +        :param text: Text to render.
    +        :param mode: Used by some graphics drivers to indicate what mode the
    +                     driver prefers; if empty, the renderer may return either
    +                     mode. Note that the mode is always a string, to simplify
    +                     C-level implementations.
    +
    +                     .. versionadded:: 1.1.5
    +
    +        :param direction: Direction of the text. It can be 'rtl' (right to
    +                          left), 'ltr' (left to right) or 'ttb' (top to bottom).
    +                          Requires libraqm.
    +
    +                          .. versionadded:: 4.2.0
    +
    +        :param features: A list of OpenType font features to be used during text
    +                         layout. This is usually used to turn on optional
    +                         font features that are not enabled by default,
    +                         for example 'dlig' or 'ss01', but can be also
    +                         used to turn off default font features for
    +                         example '-liga' to disable ligatures or '-kern'
    +                         to disable kerning.  To get all supported
    +                         features, see
    +                         https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist
    +                         Requires libraqm.
    +
    +                         .. versionadded:: 4.2.0
    +
    +        :param language: Language of the text. Different languages may use
    +                         different glyph shapes or ligatures. This parameter tells
    +                         the font which language the text is in, and to apply the
    +                         correct substitutions as appropriate, if available.
    +                         It should be a `BCP 47 language code
    +                         `
    +                         Requires libraqm.
    +
    +                         .. versionadded:: 6.0.0
    +
    +        :param stroke_width: The width of the text stroke.
    +
    +                         .. versionadded:: 6.2.0
    +
    +        :return: A tuple of an internal PIL storage memory instance as defined by the
    +                 :py:mod:`PIL.Image.core` interface module, and the text offset, the
    +                 gap between the starting coordinate and the first marking
    +        """
    +        size, offset = self.font.getsize(text, direction, features, language)
    +        size = size[0] + stroke_width * 2, size[1] + stroke_width * 2
             im = fill("L", size, 0)
    -        self.font.render(text, im.id, mode == "1", direction, features)
    +        self.font.render(
    +            text, im.id, mode == "1", direction, features, language, stroke_width
    +        )
             return im, offset
     
    -    def font_variant(self, font=None, size=None, index=None, encoding=None,
    -                     layout_engine=None):
    +    def font_variant(
    +        self, font=None, size=None, index=None, encoding=None, layout_engine=None
    +    ):
             """
             Create a copy of this FreeTypeFont object,
             using any specified arguments to override the settings.
    @@ -194,15 +488,69 @@ class FreeTypeFont(object):
     
             :return: A FreeTypeFont object.
             """
    -        return FreeTypeFont(font=self.path if font is None else font,
    -                            size=self.size if size is None else size,
    -                            index=self.index if index is None else index,
    -                            encoding=self.encoding if encoding is None else encoding,
    -                            layout_engine=self.layout_engine if layout_engine is None else layout_engine
    -                            )
    +        return FreeTypeFont(
    +            font=self.path if font is None else font,
    +            size=self.size if size is None else size,
    +            index=self.index if index is None else index,
    +            encoding=self.encoding if encoding is None else encoding,
    +            layout_engine=layout_engine or self.layout_engine,
    +        )
    +
    +    def get_variation_names(self):
    +        """
    +        :returns: A list of the named styles in a variation font.
    +        :exception IOError: If the font is not a variation font.
    +        """
    +        try:
    +            names = self.font.getvarnames()
    +        except AttributeError:
    +            raise NotImplementedError("FreeType 2.9.1 or greater is required")
    +        return [name.replace(b"\x00", b"") for name in names]
    +
    +    def set_variation_by_name(self, name):
    +        """
    +        :param name: The name of the style.
    +        :exception IOError: If the font is not a variation font.
    +        """
    +        names = self.get_variation_names()
    +        if not isinstance(name, bytes):
    +            name = name.encode()
    +        index = names.index(name)
    +
    +        if index == getattr(self, "_last_variation_index", None):
    +            # When the same name is set twice in a row,
    +            # there is an 'unknown freetype error'
    +            # https://savannah.nongnu.org/bugs/?56186
    +            return
    +        self._last_variation_index = index
    +
    +        self.font.setvarname(index)
    +
    +    def get_variation_axes(self):
    +        """
    +        :returns: A list of the axes in a variation font.
    +        :exception IOError: If the font is not a variation font.
    +        """
    +        try:
    +            axes = self.font.getvaraxes()
    +        except AttributeError:
    +            raise NotImplementedError("FreeType 2.9.1 or greater is required")
    +        for axis in axes:
    +            axis["name"] = axis["name"].replace(b"\x00", b"")
    +        return axes
    +
    +    def set_variation_by_axes(self, axes):
    +        """
    +        :param axes: A list of values for each axis.
    +        :exception IOError: If the font is not a variation font.
    +        """
    +        try:
    +            self.font.setvaraxes(axes)
    +        except AttributeError:
    +            raise NotImplementedError("FreeType 2.9.1 or greater is required")
     
     
    -class TransposedFont(object):
    +class TransposedFont:
         "Wrapper for writing rotated or mirrored text"
     
         def __init__(self, font, orientation=None):
    @@ -245,35 +593,62 @@ def load(filename):
         return f
     
     
    -def truetype(font=None, size=10, index=0, encoding="",
    -             layout_engine=None):
    +def truetype(font=None, size=10, index=0, encoding="", layout_engine=None):
         """
         Load a TrueType or OpenType font from a file or file-like object,
         and create a font object.
         This function loads a font object from the given file or file-like
         object, and creates a font object for a font of the given size.
     
    +    Pillow uses FreeType to open font files. If you are opening many fonts
    +    simultaneously on Windows, be aware that Windows limits the number of files
    +    that can be open in C at once to 512. If you approach that limit, an
    +    ``OSError`` may be thrown, reporting that FreeType "cannot open resource".
    +
         This function requires the _imagingft service.
     
         :param font: A filename or file-like object containing a TrueType font.
    -                     Under Windows, if the file is not found in this filename,
    -                     the loader also looks in Windows :file:`fonts/` directory.
    +                 If the file is not found in this filename, the loader may also
    +                 search in other directories, such as the :file:`fonts/`
    +                 directory on Windows or :file:`/Library/Fonts/`,
    +                 :file:`/System/Library/Fonts/` and :file:`~/Library/Fonts/` on
    +                 macOS.
    +
         :param size: The requested size, in points.
         :param index: Which font face to load (default is first available face).
    -    :param encoding: Which font encoding to use (default is Unicode). Common
    -                     encodings are "unic" (Unicode), "symb" (Microsoft
    -                     Symbol), "ADOB" (Adobe Standard), "ADBE" (Adobe Expert),
    -                     and "armn" (Apple Roman). See the FreeType documentation
    -                     for more information.
    +    :param encoding: Which font encoding to use (default is Unicode). Possible
    +                     encodings include (see the FreeType documentation for more
    +                     information):
    +
    +                     * "unic" (Unicode)
    +                     * "symb" (Microsoft Symbol)
    +                     * "ADOB" (Adobe Standard)
    +                     * "ADBE" (Adobe Expert)
    +                     * "ADBC" (Adobe Custom)
    +                     * "armn" (Apple Roman)
    +                     * "sjis" (Shift JIS)
    +                     * "gb  " (PRC)
    +                     * "big5"
    +                     * "wans" (Extended Wansung)
    +                     * "joha" (Johab)
    +                     * "lat1" (Latin-1)
    +
    +                     This specifies the character set to use. It does not alter the
    +                     encoding of any text provided in subsequent operations.
         :param layout_engine: Which layout engine to use, if available:
                          `ImageFont.LAYOUT_BASIC` or `ImageFont.LAYOUT_RAQM`.
         :return: A font object.
         :exception IOError: If the file could not be read.
         """
     
    -    try:
    +    def freetype(font):
             return FreeTypeFont(font, size, index, encoding, layout_engine)
    -    except IOError:
    +
    +    try:
    +        return freetype(font)
    +    except OSError:
    +        if not isPath(font):
    +            raise
             ttf_filename = os.path.basename(font)
     
             dirs = []
    @@ -284,17 +659,19 @@ def truetype(font=None, size=10, index=0, encoding="",
                 windir = os.environ.get("WINDIR")
                 if windir:
                     dirs.append(os.path.join(windir, "fonts"))
    -        elif sys.platform in ('linux', 'linux2'):
    +        elif sys.platform in ("linux", "linux2"):
                 lindirs = os.environ.get("XDG_DATA_DIRS", "")
                 if not lindirs:
                     # According to the freedesktop spec, XDG_DATA_DIRS should
                     # default to /usr/share
    -                lindirs = '/usr/share'
    -            dirs += [os.path.join(lindir, "fonts")
    -                     for lindir in lindirs.split(":")]
    -        elif sys.platform == 'darwin':
    -            dirs += ['/Library/Fonts', '/System/Library/Fonts',
    -                     os.path.expanduser('~/Library/Fonts')]
    +                lindirs = "/usr/share"
    +            dirs += [os.path.join(lindir, "fonts") for lindir in lindirs.split(":")]
    +        elif sys.platform == "darwin":
    +            dirs += [
    +                "/Library/Fonts",
    +                "/System/Library/Fonts",
    +                os.path.expanduser("~/Library/Fonts"),
    +            ]
     
             ext = os.path.splitext(ttf_filename)[1]
             first_font_with_a_different_extension = None
    @@ -302,17 +679,15 @@ def truetype(font=None, size=10, index=0, encoding="",
                 for walkroot, walkdir, walkfilenames in os.walk(directory):
                     for walkfilename in walkfilenames:
                         if ext and walkfilename == ttf_filename:
    -                        fontpath = os.path.join(walkroot, walkfilename)
    -                        return FreeTypeFont(fontpath, size, index, encoding, layout_engine)
    +                        return freetype(os.path.join(walkroot, walkfilename))
                         elif not ext and os.path.splitext(walkfilename)[0] == ttf_filename:
                             fontpath = os.path.join(walkroot, walkfilename)
    -                        if os.path.splitext(fontpath)[1] == '.ttf':
    -                            return FreeTypeFont(fontpath, size, index, encoding, layout_engine)
    +                        if os.path.splitext(fontpath)[1] == ".ttf":
    +                            return freetype(fontpath)
                             if not ext and first_font_with_a_different_extension is None:
                                 first_font_with_a_different_extension = fontpath
             if first_font_with_a_different_extension:
    -            return FreeTypeFont(first_font_with_a_different_extension, size,
    -                                index, encoding, layout_engine)
    +            return freetype(first_font_with_a_different_extension)
             raise
     
     
    @@ -328,15 +703,12 @@ def load_path(filename):
         for directory in sys.path:
             if isDirectory(directory):
                 if not isinstance(filename, str):
    -                if py3:
    -                    filename = filename.decode("utf-8")
    -                else:
    -                    filename = filename.encode("utf-8")
    +                filename = filename.decode("utf-8")
                 try:
                     return load(os.path.join(directory, filename))
    -            except IOError:
    +            except OSError:
                     pass
    -    raise IOError("cannot find font file")
    +    raise OSError("cannot find font file")
     
     
     def load_default():
    @@ -346,12 +718,12 @@ def load_default():
     
         :return: A font object.
         """
    -    from io import BytesIO
    -    import base64
         f = ImageFont()
         f._load_pilfont_data(
             # courB08
    -        BytesIO(base64.b64decode(b'''
    +        BytesIO(
    +            base64.b64decode(
    +                b"""
     UElMZm9udAo7Ozs7OzsxMDsKREFUQQoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
     AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
     AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
    @@ -443,7 +815,13 @@ AJsAEQAGAAAAAP/6AAX//wCbAAoAoAAPAAYAAAAA//oABQABAKAACgClABEABgAA////+AAGAAAA
     pQAKAKwAEgAGAAD////4AAYAAACsAAoAswASAAYAAP////gABgAAALMACgC6ABIABgAA////+QAG
     AAAAugAKAMEAEQAGAAD////4AAYAAgDBAAoAyAAUAAYAAP////kABQACAMgACgDOABMABgAA////
     +QAGAAIAzgAKANUAEw==
    -''')), Image.open(BytesIO(base64.b64decode(b'''
    +"""
    +            )
    +        ),
    +        Image.open(
    +            BytesIO(
    +                base64.b64decode(
    +                    b"""
     iVBORw0KGgoAAAANSUhEUgAAAx4AAAAUAQAAAAArMtZoAAAEwElEQVR4nABlAJr/AHVE4czCI/4u
     Mc4b7vuds/xzjz5/3/7u/n9vMe7vnfH/9++vPn/xyf5zhxzjt8GHw8+2d83u8x27199/nxuQ6Od9
     M43/5z2I+9n9ZtmDBwMQECDRQw/eQIQohJXxpBCNVE6QCCAAAAD//wBlAJr/AgALyj1t/wINwq0g
    @@ -467,5 +845,9 @@ evta/58PTEWzr21hufPjA8N+qlnBwAAAAAD//2JiWLci5v1+HmFXDqcnULE/MxgYGBj+f6CaJQAA
     AAD//2Ji2FrkY3iYpYC5qDeGgeEMAwPDvwQBBoYvcTwOVLMEAAAA//9isDBgkP///0EOg9z35v//
     Gc/eeW7BwPj5+QGZhANUswMAAAD//2JgqGBgYGBgqEMXlvhMPUsAAAAA//8iYDd1AAAAAP//AwDR
     w7IkEbzhVQAAAABJRU5ErkJggg==
    -'''))))
    +"""
    +                )
    +            )
    +        ),
    +    )
         return f
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageGrab.py b/server/www/packages/packages-windows/x86/PIL/ImageGrab.py
    index 712b02c..66e2e85 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageGrab.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageGrab.py
    @@ -15,35 +15,54 @@
     # See the README file for information on usage and redistribution.
     #
     
    +import sys
    +
     from . import Image
     
    -import sys
    -if sys.platform not in ["win32", "darwin"]:
    -    raise ImportError("ImageGrab is macOS and Windows only")
    -
    -if sys.platform == "win32":
    -    grabber = Image.core.grabscreen
    -elif sys.platform == "darwin":
    +if sys.platform == "darwin":
         import os
         import tempfile
         import subprocess
     
     
    -def grab(bbox=None):
    -    if sys.platform == "darwin":
    -        fh, filepath = tempfile.mkstemp('.png')
    -        os.close(fh)
    -        subprocess.call(['screencapture', '-x', filepath])
    -        im = Image.open(filepath)
    -        im.load()
    -        os.unlink(filepath)
    -    else:
    -        size, data = grabber()
    -        im = Image.frombytes(
    -            "RGB", size, data,
    -            # RGB, 32-bit line padding, origin lower left corner
    -            "raw", "BGR", (size[0]*3 + 3) & -4, -1
    +def grab(bbox=None, include_layered_windows=False, all_screens=False, xdisplay=None):
    +    if xdisplay is None:
    +        if sys.platform == "darwin":
    +            fh, filepath = tempfile.mkstemp(".png")
    +            os.close(fh)
    +            subprocess.call(["screencapture", "-x", filepath])
    +            im = Image.open(filepath)
    +            im.load()
    +            os.unlink(filepath)
    +            if bbox:
    +                im_cropped = im.crop(bbox)
    +                im.close()
    +                return im_cropped
    +            return im
    +        elif sys.platform == "win32":
    +            offset, size, data = Image.core.grabscreen_win32(
    +                include_layered_windows, all_screens
                 )
    +            im = Image.frombytes(
    +                "RGB",
    +                size,
    +                data,
    +                # RGB, 32-bit line padding, origin lower left corner
    +                "raw",
    +                "BGR",
    +                (size[0] * 3 + 3) & -4,
    +                -1,
    +            )
    +            if bbox:
    +                x0, y0 = offset
    +                left, top, right, bottom = bbox
    +                im = im.crop((left - x0, top - y0, right - x0, bottom - y0))
    +            return im
    +    # use xdisplay=None for default display on non-win32/macOS systems
    +    if not Image.core.HAVE_XCB:
    +        raise IOError("Pillow was built without XCB support")
    +    size, data = Image.core.grabscreen_x11(xdisplay)
    +    im = Image.frombytes("RGB", size, data, "raw", "BGRX", size[0] * 4, 1)
         if bbox:
             im = im.crop(bbox)
         return im
    @@ -51,14 +70,16 @@ def grab(bbox=None):
     
     def grabclipboard():
         if sys.platform == "darwin":
    -        fh, filepath = tempfile.mkstemp('.jpg')
    +        fh, filepath = tempfile.mkstemp(".jpg")
             os.close(fh)
             commands = [
    -            "set theFile to (open for access POSIX file \""+filepath+"\" with write permission)",
    +            'set theFile to (open for access POSIX file "'
    +            + filepath
    +            + '" with write permission)',
                 "try",
    -                "write (the clipboard as JPEG picture) to theFile",
    +            "    write (the clipboard as JPEG picture) to theFile",
                 "end try",
    -            "close access theFile"
    +            "close access theFile",
             ]
             script = ["osascript"]
             for command in commands:
    @@ -71,10 +92,13 @@ def grabclipboard():
                 im.load()
             os.unlink(filepath)
             return im
    -    else:
    -        data = Image.core.grabclipboard()
    +    elif sys.platform == "win32":
    +        data = Image.core.grabclipboard_win32()
             if isinstance(data, bytes):
                 from . import BmpImagePlugin
                 import io
    +
                 return BmpImagePlugin.DibImageFile(io.BytesIO(data))
             return data
    +    else:
    +        raise NotImplementedError("ImageGrab.grabclipboard() is macOS and Windows only")
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageMath.py b/server/www/packages/packages-windows/x86/PIL/ImageMath.py
    index d985877..adbb940 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageMath.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageMath.py
    @@ -15,23 +15,18 @@
     # See the README file for information on usage and redistribution.
     #
     
    -from . import Image, _imagingmath
    -from ._util import py3
    +import builtins
     
    -try:
    -    import builtins
    -except ImportError:
    -    import __builtin__
    -    builtins = __builtin__
    +from . import Image, _imagingmath
     
     VERBOSE = 0
     
     
     def _isconstant(v):
    -    return isinstance(v, int) or isinstance(v, float)
    +    return isinstance(v, (int, float))
     
     
    -class _Operand(object):
    +class _Operand:
         """Wraps an image operand, providing standard operators"""
     
         def __init__(self, im):
    @@ -61,7 +56,7 @@ class _Operand(object):
                 out = Image.new(mode or im1.mode, im1.size, None)
                 im1.load()
                 try:
    -                op = getattr(_imagingmath, op+"_"+im1.mode)
    +                op = getattr(_imagingmath, op + "_" + im1.mode)
                 except AttributeError:
                     raise TypeError("bad operand type for '%s'" % op)
                 _imagingmath.unop(op, out.im.id, im1.im.id)
    @@ -78,8 +73,7 @@ class _Operand(object):
                         raise ValueError("mode mismatch")
                 if im1.size != im2.size:
                     # crop both arguments to a common size
    -                size = (min(im1.size[0], im2.size[0]),
    -                        min(im1.size[1], im2.size[1]))
    +                size = (min(im1.size[0], im2.size[0]), min(im1.size[1], im2.size[1]))
                     if im1.size != size:
                         im1 = im1.crop((0, 0) + size)
                     if im2.size != size:
    @@ -90,7 +84,7 @@ class _Operand(object):
                 im1.load()
                 im2.load()
                 try:
    -                op = getattr(_imagingmath, op+"_"+im1.mode)
    +                op = getattr(_imagingmath, op + "_" + im1.mode)
                 except AttributeError:
                     raise TypeError("bad operand type for '%s'" % op)
                 _imagingmath.binop(op, out.im.id, im1.im.id, im2.im.id)
    @@ -101,11 +95,6 @@ class _Operand(object):
             # an image is "true" if it contains at least one non-zero pixel
             return self.im.getbbox() is not None
     
    -    if not py3:
    -        # Provide __nonzero__ for pre-Py3k
    -        __nonzero__ = __bool__
    -        del __bool__
    -
         def __abs__(self):
             return self.apply("abs", self)
     
    @@ -152,13 +141,6 @@ class _Operand(object):
         def __rpow__(self, other):
             return self.apply("pow", other, self)
     
    -    if not py3:
    -        # Provide __div__ and __rdiv__ for pre-Py3k
    -        __div__ = __truediv__
    -        __rdiv__ = __rtruediv__
    -        del __truediv__
    -        del __rtruediv__
    -
         # bitwise
         def __invert__(self):
             return self.apply("invert", self)
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageMode.py b/server/www/packages/packages-windows/x86/PIL/ImageMode.py
    index b227f21..9882883 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageMode.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageMode.py
    @@ -17,7 +17,7 @@
     _modes = None
     
     
    -class ModeDescriptor(object):
    +class ModeDescriptor:
         """Wrapper for mode strings."""
     
         def __init__(self, mode, bands, basemode, basetype):
    @@ -37,6 +37,7 @@ def getmode(mode):
             # initialize mode cache
     
             from . import Image
    +
             modes = {}
             # core modes
             for m, (basemode, basetype, bands) in Image._MODEINFO.items():
    @@ -47,9 +48,17 @@ def getmode(mode):
             modes["La"] = ModeDescriptor("La", ("L", "a"), "L", "L")
             modes["PA"] = ModeDescriptor("PA", ("P", "A"), "RGB", "L")
             # mapping modes
    -        modes["I;16"] = ModeDescriptor("I;16", "I", "L", "L")
    -        modes["I;16L"] = ModeDescriptor("I;16L", "I", "L", "L")
    -        modes["I;16B"] = ModeDescriptor("I;16B", "I", "L", "L")
    +        for i16mode in (
    +            "I;16",
    +            "I;16S",
    +            "I;16L",
    +            "I;16LS",
    +            "I;16B",
    +            "I;16BS",
    +            "I;16N",
    +            "I;16NS",
    +        ):
    +            modes[i16mode] = ModeDescriptor(i16mode, ("I",), "L", "L")
             # set global mode cache atomically
             _modes = modes
         return _modes[mode]
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageMorph.py b/server/www/packages/packages-windows/x86/PIL/ImageMorph.py
    index 579ee4e..d1ec09e 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageMorph.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageMorph.py
    @@ -5,15 +5,27 @@
     #
     # Copyright (c) 2014 Dov Grobgeld 
     
    -from __future__ import print_function
    +import re
     
     from . import Image, _imagingmorph
    -import re
     
     LUT_SIZE = 1 << 9
     
    +# fmt: off
    +ROTATION_MATRIX = [
    +    6, 3, 0,
    +    7, 4, 1,
    +    8, 5, 2,
    +]
    +MIRROR_MATRIX = [
    +    2, 1, 0,
    +    5, 4, 3,
    +    8, 7, 6,
    +]
    +# fmt: on
     
    -class LutBuilder(object):
    +
    +class LutBuilder:
         """A class for building a MorphLut from a descriptive language
     
           The input patterns is a list of a strings sequences like these::
    @@ -48,6 +60,7 @@ class LutBuilder(object):
               lut = lb.build_lut()
     
         """
    +
         def __init__(self, patterns=None, op_name=None):
             if patterns is not None:
                 self.patterns = patterns
    @@ -56,20 +69,19 @@ class LutBuilder(object):
             self.lut = None
             if op_name is not None:
                 known_patterns = {
    -                'corner': ['1:(... ... ...)->0',
    -                           '4:(00. 01. ...)->1'],
    -                'dilation4': ['4:(... .0. .1.)->1'],
    -                'dilation8': ['4:(... .0. .1.)->1',
    -                              '4:(... .0. ..1)->1'],
    -                'erosion4': ['4:(... .1. .0.)->0'],
    -                'erosion8': ['4:(... .1. .0.)->0',
    -                             '4:(... .1. ..0)->0'],
    -                'edge': ['1:(... ... ...)->0',
    -                         '4:(.0. .1. ...)->1',
    -                         '4:(01. .1. ...)->1']
    +                "corner": ["1:(... ... ...)->0", "4:(00. 01. ...)->1"],
    +                "dilation4": ["4:(... .0. .1.)->1"],
    +                "dilation8": ["4:(... .0. .1.)->1", "4:(... .0. ..1)->1"],
    +                "erosion4": ["4:(... .1. .0.)->0"],
    +                "erosion8": ["4:(... .1. .0.)->0", "4:(... .1. ..0)->0"],
    +                "edge": [
    +                    "1:(... ... ...)->0",
    +                    "4:(.0. .1. ...)->1",
    +                    "4:(01. .1. ...)->1",
    +                ],
                 }
                 if op_name not in known_patterns:
    -                raise Exception('Unknown pattern '+op_name+'!')
    +                raise Exception("Unknown pattern " + op_name + "!")
     
                 self.patterns = known_patterns[op_name]
     
    @@ -88,8 +100,8 @@ class LutBuilder(object):
             """string_permute takes a pattern and a permutation and returns the
             string permuted according to the permutation list.
             """
    -        assert(len(permutation) == 9)
    -        return ''.join(pattern[p] for p in permutation)
    +        assert len(permutation) == 9
    +        return "".join(pattern[p] for p in permutation)
     
         def _pattern_permute(self, basic_pattern, options, basic_result):
             """pattern_permute takes a basic pattern and its result and clones
    @@ -98,32 +110,25 @@ class LutBuilder(object):
             patterns = [(basic_pattern, basic_result)]
     
             # rotations
    -        if '4' in options:
    +        if "4" in options:
                 res = patterns[-1][1]
                 for i in range(4):
                     patterns.append(
    -                    (self._string_permute(patterns[-1][0], [6, 3, 0,
    -                                                            7, 4, 1,
    -                                                            8, 5, 2]), res))
    +                    (self._string_permute(patterns[-1][0], ROTATION_MATRIX), res)
    +                )
             # mirror
    -        if 'M' in options:
    +        if "M" in options:
                 n = len(patterns)
                 for pattern, res in patterns[0:n]:
    -                patterns.append(
    -                    (self._string_permute(pattern, [2, 1, 0,
    -                                                    5, 4, 3,
    -                                                    8, 7, 6]), res))
    +                patterns.append((self._string_permute(pattern, MIRROR_MATRIX), res))
     
             # negate
    -        if 'N' in options:
    +        if "N" in options:
                 n = len(patterns)
                 for pattern, res in patterns[0:n]:
                     # Swap 0 and 1
    -                pattern = (pattern
    -                           .replace('0', 'Z')
    -                           .replace('1', '0')
    -                           .replace('Z', '1'))
    -                res = 1-int(res)
    +                pattern = pattern.replace("0", "Z").replace("1", "0").replace("Z", "1")
    +                res = 1 - int(res)
                     patterns.append((pattern, res))
     
             return patterns
    @@ -138,27 +143,21 @@ class LutBuilder(object):
     
             # Parse and create symmetries of the patterns strings
             for p in self.patterns:
    -            m = re.search(
    -                r'(\w*):?\s*\((.+?)\)\s*->\s*(\d)', p.replace('\n', ''))
    +            m = re.search(r"(\w*):?\s*\((.+?)\)\s*->\s*(\d)", p.replace("\n", ""))
                 if not m:
    -                raise Exception('Syntax error in pattern "'+p+'"')
    +                raise Exception('Syntax error in pattern "' + p + '"')
                 options = m.group(1)
                 pattern = m.group(2)
                 result = int(m.group(3))
     
                 # Get rid of spaces
    -            pattern = pattern.replace(' ', '').replace('\n', '')
    +            pattern = pattern.replace(" ", "").replace("\n", "")
     
                 patterns += self._pattern_permute(pattern, options, result)
     
    -#        # Debugging
    -#        for p, r in patterns:
    -#            print(p, r)
    -#        print('--')
    -
             # compile the patterns into regular expressions for speed
             for i, pattern in enumerate(patterns):
    -            p = pattern[0].replace('.', 'X').replace('X', '[01]')
    +            p = pattern[0].replace(".", "X").replace("X", "[01]")
                 p = re.compile(p)
                 patterns[i] = (p, pattern[1])
     
    @@ -168,7 +167,7 @@ class LutBuilder(object):
             for i in range(LUT_SIZE):
                 # Build the bit pattern
                 bitpattern = bin(i)[2:]
    -            bitpattern = ('0'*(9-len(bitpattern)) + bitpattern)[::-1]
    +            bitpattern = ("0" * (9 - len(bitpattern)) + bitpattern)[::-1]
     
                 for p, r in patterns:
                     if p.match(bitpattern):
    @@ -177,13 +176,10 @@ class LutBuilder(object):
             return self.lut
     
     
    -class MorphOp(object):
    +class MorphOp:
         """A class for binary morphological operators"""
     
    -    def __init__(self,
    -                 lut=None,
    -                 op_name=None,
    -                 patterns=None):
    +    def __init__(self, lut=None, op_name=None, patterns=None):
             """Create a binary morphological operator"""
             self.lut = lut
             if op_name is not None:
    @@ -197,13 +193,12 @@ class MorphOp(object):
             Returns a tuple of the number of changed pixels and the
             morphed image"""
             if self.lut is None:
    -            raise Exception('No operator loaded')
    +            raise Exception("No operator loaded")
     
    -        if image.mode != 'L':
    -            raise Exception('Image must be binary, meaning it must use mode L')
    +        if image.mode != "L":
    +            raise Exception("Image must be binary, meaning it must use mode L")
             outimage = Image.new(image.mode, image.size, None)
    -        count = _imagingmorph.apply(
    -            bytes(self.lut), image.im.id, outimage.im.id)
    +        count = _imagingmorph.apply(bytes(self.lut), image.im.id, outimage.im.id)
             return count, outimage
     
         def match(self, image):
    @@ -213,10 +208,10 @@ class MorphOp(object):
             Returns a list of tuples of (x,y) coordinates
             of all matching pixels. See :ref:`coordinate-system`."""
             if self.lut is None:
    -            raise Exception('No operator loaded')
    +            raise Exception("No operator loaded")
     
    -        if image.mode != 'L':
    -            raise Exception('Image must be binary, meaning it must use mode L')
    +        if image.mode != "L":
    +            raise Exception("Image must be binary, meaning it must use mode L")
             return _imagingmorph.match(bytes(self.lut), image.im.id)
     
         def get_on_pixels(self, image):
    @@ -225,24 +220,24 @@ class MorphOp(object):
             Returns a list of tuples of (x,y) coordinates
             of all matching pixels. See :ref:`coordinate-system`."""
     
    -        if image.mode != 'L':
    -            raise Exception('Image must be binary, meaning it must use mode L')
    +        if image.mode != "L":
    +            raise Exception("Image must be binary, meaning it must use mode L")
             return _imagingmorph.get_on_pixels(image.im.id)
     
         def load_lut(self, filename):
             """Load an operator from an mrl file"""
    -        with open(filename, 'rb') as f:
    +        with open(filename, "rb") as f:
                 self.lut = bytearray(f.read())
     
             if len(self.lut) != LUT_SIZE:
                 self.lut = None
    -            raise Exception('Wrong size operator file!')
    +            raise Exception("Wrong size operator file!")
     
         def save_lut(self, filename):
             """Save an operator to an mrl file"""
             if self.lut is None:
    -            raise Exception('No operator loaded')
    -        with open(filename, 'wb') as f:
    +            raise Exception("No operator loaded")
    +        with open(filename, "wb") as f:
                 f.write(self.lut)
     
         def set_lut(self, lut):
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageOps.py b/server/www/packages/packages-windows/x86/PIL/ImageOps.py
    index 25d491a..e4e0840 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageOps.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageOps.py
    @@ -17,16 +17,15 @@
     # See the README file for information on usage and redistribution.
     #
     
    -from . import Image
    -from ._util import isStringType
    -import operator
     import functools
    -import warnings
    +import operator
     
    +from . import Image
     
     #
     # helpers
     
    +
     def _border(border):
         if isinstance(border, tuple):
             if len(border) == 2:
    @@ -39,8 +38,9 @@ def _border(border):
     
     
     def _color(color, mode):
    -    if isStringType(color):
    +    if isinstance(color, str):
             from . import ImageColor
    +
             color = ImageColor.getcolor(color, mode)
         return color
     
    @@ -54,7 +54,8 @@ def _lut(image, lut):
                 lut = lut + lut + lut
             return image.point(lut)
         else:
    -        raise IOError("not supported for this image mode")
    +        raise OSError("not supported for this image mode")
    +
     
     #
     # actions
    @@ -76,7 +77,7 @@ def autocontrast(image, cutoff=0, ignore=None):
         histogram = image.histogram()
         lut = []
         for layer in range(0, len(histogram), 256):
    -        h = histogram[layer:layer+256]
    +        h = histogram[layer : layer + 256]
             if ignore is not None:
                 # get rid of outliers
                 try:
    @@ -136,32 +137,135 @@ def autocontrast(image, cutoff=0, ignore=None):
         return _lut(image, lut)
     
     
    -def colorize(image, black, white):
    +def colorize(image, black, white, mid=None, blackpoint=0, whitepoint=255, midpoint=127):
         """
    -    Colorize grayscale image.  The **black** and **white**
    -    arguments should be RGB tuples; this function calculates a color
    -    wedge mapping all black pixels in the source image to the first
    -    color, and all white pixels to the second color.
    +    Colorize grayscale image.
    +    This function calculates a color wedge which maps all black pixels in
    +    the source image to the first color and all white pixels to the
    +    second color. If **mid** is specified, it uses three-color mapping.
    +    The **black** and **white** arguments should be RGB tuples or color names;
    +    optionally you can use three-color mapping by also specifying **mid**.
    +    Mapping positions for any of the colors can be specified
    +    (e.g. **blackpoint**), where these parameters are the integer
    +    value corresponding to where the corresponding color should be mapped.
    +    These parameters must have logical order, such that
    +    **blackpoint** <= **midpoint** <= **whitepoint** (if **mid** is specified).
     
         :param image: The image to colorize.
         :param black: The color to use for black input pixels.
         :param white: The color to use for white input pixels.
    +    :param mid: The color to use for midtone input pixels.
    +    :param blackpoint: an int value [0, 255] for the black mapping.
    +    :param whitepoint: an int value [0, 255] for the white mapping.
    +    :param midpoint: an int value [0, 255] for the midtone mapping.
         :return: An image.
         """
    +
    +    # Initial asserts
         assert image.mode == "L"
    +    if mid is None:
    +        assert 0 <= blackpoint <= whitepoint <= 255
    +    else:
    +        assert 0 <= blackpoint <= midpoint <= whitepoint <= 255
    +
    +    # Define colors from arguments
         black = _color(black, "RGB")
         white = _color(white, "RGB")
    +    if mid is not None:
    +        mid = _color(mid, "RGB")
    +
    +    # Empty lists for the mapping
         red = []
         green = []
         blue = []
    -    for i in range(256):
    -        red.append(black[0]+i*(white[0]-black[0])//255)
    -        green.append(black[1]+i*(white[1]-black[1])//255)
    -        blue.append(black[2]+i*(white[2]-black[2])//255)
    +
    +    # Create the low-end values
    +    for i in range(0, blackpoint):
    +        red.append(black[0])
    +        green.append(black[1])
    +        blue.append(black[2])
    +
    +    # Create the mapping (2-color)
    +    if mid is None:
    +
    +        range_map = range(0, whitepoint - blackpoint)
    +
    +        for i in range_map:
    +            red.append(black[0] + i * (white[0] - black[0]) // len(range_map))
    +            green.append(black[1] + i * (white[1] - black[1]) // len(range_map))
    +            blue.append(black[2] + i * (white[2] - black[2]) // len(range_map))
    +
    +    # Create the mapping (3-color)
    +    else:
    +
    +        range_map1 = range(0, midpoint - blackpoint)
    +        range_map2 = range(0, whitepoint - midpoint)
    +
    +        for i in range_map1:
    +            red.append(black[0] + i * (mid[0] - black[0]) // len(range_map1))
    +            green.append(black[1] + i * (mid[1] - black[1]) // len(range_map1))
    +            blue.append(black[2] + i * (mid[2] - black[2]) // len(range_map1))
    +        for i in range_map2:
    +            red.append(mid[0] + i * (white[0] - mid[0]) // len(range_map2))
    +            green.append(mid[1] + i * (white[1] - mid[1]) // len(range_map2))
    +            blue.append(mid[2] + i * (white[2] - mid[2]) // len(range_map2))
    +
    +    # Create the high-end values
    +    for i in range(0, 256 - whitepoint):
    +        red.append(white[0])
    +        green.append(white[1])
    +        blue.append(white[2])
    +
    +    # Return converted image
         image = image.convert("RGB")
         return _lut(image, red + green + blue)
     
     
    +def pad(image, size, method=Image.BICUBIC, color=None, centering=(0.5, 0.5)):
    +    """
    +    Returns a sized and padded version of the image, expanded to fill the
    +    requested aspect ratio and size.
    +
    +    :param image: The image to size and crop.
    +    :param size: The requested output size in pixels, given as a
    +                 (width, height) tuple.
    +    :param method: What resampling method to use. Default is
    +                   :py:attr:`PIL.Image.BICUBIC`. See :ref:`concept-filters`.
    +    :param color: The background color of the padded image.
    +    :param centering: Control the position of the original image within the
    +                      padded version.
    +
    +                          (0.5, 0.5) will keep the image centered
    +                          (0, 0) will keep the image aligned to the top left
    +                          (1, 1) will keep the image aligned to the bottom
    +                          right
    +    :return: An image.
    +    """
    +
    +    im_ratio = image.width / image.height
    +    dest_ratio = size[0] / size[1]
    +
    +    if im_ratio == dest_ratio:
    +        out = image.resize(size, resample=method)
    +    else:
    +        out = Image.new(image.mode, size, color)
    +        if im_ratio > dest_ratio:
    +            new_height = int(image.height / image.width * size[0])
    +            if new_height != size[1]:
    +                image = image.resize((size[0], new_height), resample=method)
    +
    +            y = int((size[1] - new_height) * max(0, min(centering[1], 1)))
    +            out.paste(image, (0, y))
    +        else:
    +            new_width = int(image.width / image.height * size[1])
    +            if new_width != size[0]:
    +                image = image.resize((new_width, size[1]), resample=method)
    +
    +            x = int((size[0] - new_width) * max(0, min(centering[0], 1)))
    +            out.paste(image, (x, 0))
    +    return out
    +
    +
     def crop(image, border=0):
         """
         Remove border from image.  The same amount of pixels are removed
    @@ -174,12 +278,10 @@ def crop(image, border=0):
         :return: An image.
         """
         left, top, right, bottom = _border(border)
    -    return image.crop(
    -        (left, top, image.size[0]-right, image.size[1]-bottom)
    -        )
    +    return image.crop((left, top, image.size[0] - right, image.size[1] - bottom))
     
     
    -def scale(image, factor, resample=Image.NEAREST):
    +def scale(image, factor, resample=Image.BICUBIC):
         """
         Returns a rescaled image by a specific factor given in parameter.
         A factor greater than 1 expands the image, between 0 and 1 contracts the
    @@ -187,8 +289,8 @@ def scale(image, factor, resample=Image.NEAREST):
     
         :param image: The image to rescale.
         :param factor: The expansion factor, as a float.
    -    :param resample: An optional resampling filter. Same values possible as
    -       in the PIL.Image.resize function.
    +    :param resample: What resampling method to use. Default is
    +                     :py:attr:`PIL.Image.BICUBIC`. See :ref:`concept-filters`.
         :returns: An :py:class:`~PIL.Image.Image` object.
         """
         if factor == 1:
    @@ -196,8 +298,7 @@ def scale(image, factor, resample=Image.NEAREST):
         elif factor <= 0:
             raise ValueError("the factor must be greater than 0")
         else:
    -        size = (int(round(factor * image.width)),
    -                int(round(factor * image.height)))
    +        size = (round(factor * image.width), round(factor * image.height))
             return image.resize(size, resample)
     
     
    @@ -212,9 +313,7 @@ def deform(image, deformer, resample=Image.BILINEAR):
            in the PIL.Image.transform function.
         :return: An image.
         """
    -    return image.transform(
    -        image.size, Image.MESH, deformer.getmesh(image), resample
    -        )
    +    return image.transform(image.size, Image.MESH, deformer.getmesh(image), resample)
     
     
     def equalize(image, mask=None):
    @@ -233,7 +332,7 @@ def equalize(image, mask=None):
         h = image.histogram(mask)
         lut = []
         for b in range(0, len(h), 256):
    -        histo = [_f for _f in h[b:b+256] if _f]
    +        histo = [_f for _f in h[b : b + 256] if _f]
             if len(histo) <= 1:
                 lut.extend(list(range(256)))
             else:
    @@ -244,7 +343,7 @@ def equalize(image, mask=None):
                     n = step // 2
                     for i in range(256):
                         lut.append(n // step)
    -                    n = n + h[i+b]
    +                    n = n + h[i + b]
         return _lut(image, lut)
     
     
    @@ -265,7 +364,7 @@ def expand(image, border=0, fill=0):
         return out
     
     
    -def fit(image, size, method=Image.NEAREST, bleed=0.0, centering=(0.5, 0.5)):
    +def fit(image, size, method=Image.BICUBIC, bleed=0.0, centering=(0.5, 0.5)):
         """
         Returns a sized and cropped version of the image, cropped to the
         requested aspect ratio and size.
    @@ -276,10 +375,11 @@ def fit(image, size, method=Image.NEAREST, bleed=0.0, centering=(0.5, 0.5)):
         :param size: The requested output size in pixels, given as a
                      (width, height) tuple.
         :param method: What resampling method to use. Default is
    -                   :py:attr:`PIL.Image.NEAREST`.
    -    :param bleed: Remove a border around the outside of the image (from all
    +                   :py:attr:`PIL.Image.BICUBIC`. See :ref:`concept-filters`.
    +    :param bleed: Remove a border around the outside of the image from all
                       four edges. The value is a decimal percentage (use 0.01 for
                       one percent). The default value is 0 (no border).
    +                  Cannot be greater than or equal to 0.5.
         :param centering: Control the cropping position.  Use (0.5, 0.5) for
                           center cropping (e.g. if cropping the width, take 50% off
                           of the left side, and therefore 50% off the right side).
    @@ -297,66 +397,56 @@ def fit(image, size, method=Image.NEAREST, bleed=0.0, centering=(0.5, 0.5)):
         # kevin@cazabon.com
         # http://www.cazabon.com
     
    -    # ensure inputs are valid
    -    if not isinstance(centering, list):
    -        centering = [centering[0], centering[1]]
    +    # ensure centering is mutable
    +    centering = list(centering)
     
    -    if centering[0] > 1.0 or centering[0] < 0.0:
    -        centering[0] = 0.50
    -    if centering[1] > 1.0 or centering[1] < 0.0:
    -        centering[1] = 0.50
    +    if not 0.0 <= centering[0] <= 1.0:
    +        centering[0] = 0.5
    +    if not 0.0 <= centering[1] <= 1.0:
    +        centering[1] = 0.5
     
    -    if bleed > 0.49999 or bleed < 0.0:
    +    if not 0.0 <= bleed < 0.5:
             bleed = 0.0
     
         # calculate the area to use for resizing and cropping, subtracting
         # the 'bleed' around the edges
     
         # number of pixels to trim off on Top and Bottom, Left and Right
    -    bleedPixels = (
    -        int((float(bleed) * float(image.size[0])) + 0.5),
    -        int((float(bleed) * float(image.size[1])) + 0.5)
    -        )
    +    bleed_pixels = (bleed * image.size[0], bleed * image.size[1])
     
    -    liveArea = (0, 0, image.size[0], image.size[1])
    -    if bleed > 0.0:
    -        liveArea = (
    -            bleedPixels[0], bleedPixels[1], image.size[0] - bleedPixels[0] - 1,
    -            image.size[1] - bleedPixels[1] - 1
    -            )
    +    live_size = (
    +        image.size[0] - bleed_pixels[0] * 2,
    +        image.size[1] - bleed_pixels[1] * 2,
    +    )
     
    -    liveSize = (liveArea[2] - liveArea[0], liveArea[3] - liveArea[1])
    -
    -    # calculate the aspect ratio of the liveArea
    -    liveAreaAspectRatio = float(liveSize[0])/float(liveSize[1])
    +    # calculate the aspect ratio of the live_size
    +    live_size_ratio = live_size[0] / live_size[1]
     
         # calculate the aspect ratio of the output image
    -    aspectRatio = float(size[0]) / float(size[1])
    +    output_ratio = size[0] / size[1]
     
         # figure out if the sides or top/bottom will be cropped off
    -    if liveAreaAspectRatio >= aspectRatio:
    -        # liveArea is wider than what's needed, crop the sides
    -        cropWidth = int((aspectRatio * float(liveSize[1])) + 0.5)
    -        cropHeight = liveSize[1]
    +    if live_size_ratio == output_ratio:
    +        # live_size is already the needed ratio
    +        crop_width = live_size[0]
    +        crop_height = live_size[1]
    +    elif live_size_ratio >= output_ratio:
    +        # live_size is wider than what's needed, crop the sides
    +        crop_width = output_ratio * live_size[1]
    +        crop_height = live_size[1]
         else:
    -        # liveArea is taller than what's needed, crop the top and bottom
    -        cropWidth = liveSize[0]
    -        cropHeight = int((float(liveSize[0])/aspectRatio) + 0.5)
    +        # live_size is taller than what's needed, crop the top and bottom
    +        crop_width = live_size[0]
    +        crop_height = live_size[0] / output_ratio
     
         # make the crop
    -    leftSide = int(liveArea[0] + (float(liveSize[0]-cropWidth) * centering[0]))
    -    if leftSide < 0:
    -        leftSide = 0
    -    topSide = int(liveArea[1] + (float(liveSize[1]-cropHeight) * centering[1]))
    -    if topSide < 0:
    -        topSide = 0
    +    crop_left = bleed_pixels[0] + (live_size[0] - crop_width) * centering[0]
    +    crop_top = bleed_pixels[1] + (live_size[1] - crop_height) * centering[1]
     
    -    out = image.crop(
    -        (leftSide, topSide, leftSide + cropWidth, topSide + cropHeight)
    -        )
    +    crop = (crop_left, crop_top, crop_left + crop_width, crop_top + crop_height)
     
         # resize the image and return it
    -    return out.resize(size, method)
    +    return image.resize(size, method, box=crop)
     
     
     def flip(image):
    @@ -388,7 +478,7 @@ def invert(image):
         """
         lut = []
         for i in range(256):
    -        lut.append(255-i)
    +        lut.append(255 - i)
         return _lut(image, lut)
     
     
    @@ -411,7 +501,7 @@ def posterize(image, bits):
         :return: An image.
         """
         lut = []
    -    mask = ~(2**(8-bits)-1)
    +    mask = ~(2 ** (8 - bits) - 1)
         for i in range(256):
             lut.append(i & mask)
         return _lut(image, lut)
    @@ -430,100 +520,32 @@ def solarize(image, threshold=128):
             if i < threshold:
                 lut.append(i)
             else:
    -            lut.append(255-i)
    +            lut.append(255 - i)
         return _lut(image, lut)
     
     
    -# --------------------------------------------------------------------
    -# PIL USM components, from Kevin Cazabon.
    -
    -def gaussian_blur(im, radius=None):
    -    """ PIL_usm.gblur(im, [radius])"""
    -
    -    warnings.warn(
    -        'PIL.ImageOps.gaussian_blur is deprecated. '
    -        'Use PIL.ImageFilter.GaussianBlur instead. '
    -        'This function will be removed in a future version.',
    -        DeprecationWarning
    -    )
    -
    -    if radius is None:
    -        radius = 5.0
    -
    -    im.load()
    -
    -    return im.im.gaussian_blur(radius)
    -
    -
    -def gblur(im, radius=None):
    -    """ PIL_usm.gblur(im, [radius])"""
    -
    -    warnings.warn(
    -        'PIL.ImageOps.gblur is deprecated. '
    -        'Use PIL.ImageFilter.GaussianBlur instead. '
    -        'This function will be removed in a future version.',
    -        DeprecationWarning
    -    )
    -
    -    return gaussian_blur(im, radius)
    -
    -
    -def unsharp_mask(im, radius=None, percent=None, threshold=None):
    -    """ PIL_usm.usm(im, [radius, percent, threshold])"""
    -
    -    warnings.warn(
    -        'PIL.ImageOps.unsharp_mask is deprecated. '
    -        'Use PIL.ImageFilter.UnsharpMask instead. '
    -        'This function will be removed in a future version.',
    -        DeprecationWarning
    -    )
    -
    -    if radius is None:
    -        radius = 5.0
    -    if percent is None:
    -        percent = 150
    -    if threshold is None:
    -        threshold = 3
    -
    -    im.load()
    -
    -    return im.im.unsharp_mask(radius, percent, threshold)
    -
    -
    -def usm(im, radius=None, percent=None, threshold=None):
    -    """ PIL_usm.usm(im, [radius, percent, threshold])"""
    -
    -    warnings.warn(
    -        'PIL.ImageOps.usm is deprecated. '
    -        'Use PIL.ImageFilter.UnsharpMask instead. '
    -        'This function will be removed in a future version.',
    -        DeprecationWarning
    -    )
    -
    -    return unsharp_mask(im, radius, percent, threshold)
    -
    -
    -def box_blur(image, radius):
    +def exif_transpose(image):
         """
    -    Blur the image by setting each pixel to the average value of the pixels
    -    in a square box extending radius pixels in each direction.
    -    Supports float radius of arbitrary size. Uses an optimized implementation
    -    which runs in linear time relative to the size of the image
    -    for any radius value.
    +    If an image has an EXIF Orientation tag, return a new image that is
    +    transposed accordingly. Otherwise, return a copy of the image.
     
    -    :param image: The image to blur.
    -    :param radius: Size of the box in one direction. Radius 0 does not blur,
    -                   returns an identical image. Radius 1 takes 1 pixel
    -                   in each direction, i.e. 9 pixels in total.
    +    :param image: The image to transpose.
         :return: An image.
         """
    -    warnings.warn(
    -        'PIL.ImageOps.box_blur is deprecated. '
    -        'Use PIL.ImageFilter.BoxBlur instead. '
    -        'This function will be removed in a future version.',
    -        DeprecationWarning
    -    )
    -
    -    image.load()
    -
    -    return image._new(image.im.box_blur(radius))
    +    exif = image.getexif()
    +    orientation = exif.get(0x0112)
    +    method = {
    +        2: Image.FLIP_LEFT_RIGHT,
    +        3: Image.ROTATE_180,
    +        4: Image.FLIP_TOP_BOTTOM,
    +        5: Image.TRANSPOSE,
    +        6: Image.ROTATE_270,
    +        7: Image.TRANSVERSE,
    +        8: Image.ROTATE_90,
    +    }.get(orientation)
    +    if method is not None:
    +        transposed_image = image.transpose(method)
    +        del exif[0x0112]
    +        transposed_image.info["exif"] = exif.tobytes()
    +        return transposed_image
    +    return image.copy()
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImagePalette.py b/server/www/packages/packages-windows/x86/PIL/ImagePalette.py
    index cecc645..e0d439c 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImagePalette.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImagePalette.py
    @@ -17,10 +17,11 @@
     #
     
     import array
    -from . import ImageColor, GimpPaletteFile, GimpGradientFile, PaletteFile
    +
    +from . import GimpGradientFile, GimpPaletteFile, ImageColor, PaletteFile
     
     
    -class ImagePalette(object):
    +class ImagePalette:
         """
         Color palette for palette mapped images
     
    @@ -38,11 +39,12 @@ class ImagePalette(object):
         def __init__(self, mode="RGB", palette=None, size=0):
             self.mode = mode
             self.rawmode = None  # if set, palette contains raw data
    -        self.palette = palette or bytearray(range(256))*len(self.mode)
    +        self.palette = palette or bytearray(range(256)) * len(self.mode)
             self.colors = {}
             self.dirty = None
    -        if ((size == 0 and len(self.mode)*256 != len(self.palette)) or
    -                (size != 0 and size != len(self.palette))):
    +        if (size == 0 and len(self.mode) * 256 != len(self.palette)) or (
    +            size != 0 and size != len(self.palette)
    +        ):
                 raise ValueError("wrong palette size")
     
         def copy(self):
    @@ -59,7 +61,7 @@ class ImagePalette(object):
     
         def getdata(self):
             """
    -        Get palette contents in format suitable # for the low-level
    +        Get palette contents in format suitable for the low-level
             ``im.putpalette`` primitive.
     
             .. warning:: This method is experimental.
    @@ -78,7 +80,7 @@ class ImagePalette(object):
             if isinstance(self.palette, bytes):
                 return self.palette
             arr = array.array("B", self.palette)
    -        if hasattr(arr, 'tobytes'):
    +        if hasattr(arr, "tobytes"):
                 return arr.tobytes()
             return arr.tostring()
     
    @@ -104,8 +106,8 @@ class ImagePalette(object):
                         raise ValueError("cannot allocate more than 256 colors")
                     self.colors[color] = index
                     self.palette[index] = color[0]
    -                self.palette[index+256] = color[1]
    -                self.palette[index+512] = color[2]
    +                self.palette[index + 256] = color[1]
    +                self.palette[index + 512] = color[2]
                     self.dirty = 1
                     return index
             else:
    @@ -124,7 +126,7 @@ class ImagePalette(object):
             fp.write("# Mode: %s\n" % self.mode)
             for i in range(256):
                 fp.write("%d" % i)
    -            for j in range(i*len(self.mode), (i+1)*len(self.mode)):
    +            for j in range(i * len(self.mode), (i + 1) * len(self.mode)):
                     try:
                         fp.write(" %d" % self.palette[j])
                     except IndexError:
    @@ -136,6 +138,7 @@ class ImagePalette(object):
     # --------------------------------------------------------------------
     # Internal
     
    +
     def raw(rawmode, data):
         palette = ImagePalette()
         palette.rawmode = rawmode
    @@ -147,11 +150,12 @@ def raw(rawmode, data):
     # --------------------------------------------------------------------
     # Factories
     
    +
     def make_linear_lut(black, white):
         lut = []
         if black == 0:
             for i in range(256):
    -            lut.append(white*i//255)
    +            lut.append(white * i // 255)
         else:
             raise NotImplementedError  # FIXME
         return lut
    @@ -172,8 +176,9 @@ def negative(mode="RGB"):
     
     def random(mode="RGB"):
         from random import randint
    +
         palette = []
    -    for i in range(256*len(mode)):
    +    for i in range(256 * len(mode)):
             palette.append(randint(0, 255))
         return ImagePalette(mode, palette)
     
    @@ -199,7 +204,7 @@ def load(filename):
             for paletteHandler in [
                 GimpPaletteFile.GimpPaletteFile,
                 GimpGradientFile.GimpGradientFile,
    -            PaletteFile.PaletteFile
    +            PaletteFile.PaletteFile,
             ]:
                 try:
                     fp.seek(0)
    @@ -211,6 +216,6 @@ def load(filename):
                     # traceback.print_exc()
                     pass
             else:
    -            raise IOError("cannot load palette")
    +            raise OSError("cannot load palette")
     
         return lut  # data, rawmode
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImagePath.py b/server/www/packages/packages-windows/x86/PIL/ImagePath.py
    index 8cbfec0..3d3538c 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImagePath.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImagePath.py
    @@ -16,5 +16,4 @@
     
     from . import Image
     
    -
     Path = Image.core.path
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageQt.py b/server/www/packages/packages-windows/x86/PIL/ImageQt.py
    index c9dc363..dfe2f80 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageQt.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageQt.py
    @@ -16,29 +16,24 @@
     # See the README file for information on usage and redistribution.
     #
     
    -from . import Image
    -from ._util import isPath, py3
    -from io import BytesIO
     import sys
    +from io import BytesIO
    +
    +from . import Image
    +from ._util import isPath
    +
    +qt_versions = [["5", "PyQt5"], ["side2", "PySide2"]]
     
    -qt_versions = [
    -    ['5', 'PyQt5'],
    -    ['4', 'PyQt4'],
    -    ['side', 'PySide']
    -]
     # If a version has already been imported, attempt it first
     qt_versions.sort(key=lambda qt_version: qt_version[1] in sys.modules, reverse=True)
     for qt_version, qt_module in qt_versions:
         try:
    -        if qt_module == 'PyQt5':
    +        if qt_module == "PyQt5":
                 from PyQt5.QtGui import QImage, qRgba, QPixmap
                 from PyQt5.QtCore import QBuffer, QIODevice
    -        elif qt_module == 'PyQt4':
    -            from PyQt4.QtGui import QImage, qRgba, QPixmap
    -            from PyQt4.QtCore import QBuffer, QIODevice
    -        elif qt_module == 'PySide':
    -            from PySide.QtGui import QImage, qRgba, QPixmap
    -            from PySide.QtCore import QBuffer, QIODevice
    +        elif qt_module == "PySide2":
    +            from PySide2.QtGui import QImage, qRgba, QPixmap
    +            from PySide2.QtCore import QBuffer, QIODevice
         except (ImportError, RuntimeError):
             continue
         qt_is_installed = True
    @@ -52,7 +47,7 @@ def rgb(r, g, b, a=255):
         """(Internal) Turns an RGB color into a Qt compatible color integer."""
         # use qRgb to pack the colors, and then turn the resulting long
         # into a negative integer with the same bitpattern.
    -    return (qRgba(r, g, b, a) & 0xffffffff)
    +    return qRgba(r, g, b, a) & 0xFFFFFFFF
     
     
     def fromqimage(im):
    @@ -62,19 +57,15 @@ def fromqimage(im):
         """
         buffer = QBuffer()
         buffer.open(QIODevice.ReadWrite)
    -    # preserve alha channel with png
    +    # preserve alpha channel with png
         # otherwise ppm is more friendly with Image.open
         if im.hasAlphaChannel():
    -        im.save(buffer, 'png')
    +        im.save(buffer, "png")
         else:
    -        im.save(buffer, 'ppm')
    +        im.save(buffer, "ppm")
     
         b = BytesIO()
    -    try:
    -        b.write(buffer.data())
    -    except TypeError:
    -        # workaround for Python 2
    -        b.write(str(buffer.data()))
    +    b.write(buffer.data())
         buffer.close()
         b.seek(0)
     
    @@ -100,11 +91,7 @@ def align8to32(bytes, width, mode):
         converts each scanline of data from 8 bit to 32 bit aligned
         """
     
    -    bits_per_pixel = {
    -        '1': 1,
    -        'L': 8,
    -        'P': 8,
    -    }[mode]
    +    bits_per_pixel = {"1": 1, "L": 8, "P": 8}[mode]
     
         # calculate bytes per line and the extra padding if needed
         bits_per_line = bits_per_pixel * width
    @@ -119,9 +106,12 @@ def align8to32(bytes, width, mode):
     
         new_data = []
         for i in range(len(bytes) // bytes_per_line):
    -        new_data.append(bytes[i*bytes_per_line:(i+1)*bytes_per_line] + b'\x00' * extra_padding)
    +        new_data.append(
    +            bytes[i * bytes_per_line : (i + 1) * bytes_per_line]
    +            + b"\x00" * extra_padding
    +        )
     
    -    return b''.join(new_data)
    +    return b"".join(new_data)
     
     
     def _toqclass_helper(im):
    @@ -131,10 +121,7 @@ def _toqclass_helper(im):
         # handle filename, if given instead of image name
         if hasattr(im, "toUtf8"):
             # FIXME - is this really the best way to do this?
    -        if py3:
    -            im = str(im.toUtf8(), "utf-8")
    -        else:
    -            im = unicode(im.toUtf8(), "utf-8")
    +        im = str(im.toUtf8(), "utf-8")
         if isPath(im):
             im = Image.open(im)
     
    @@ -150,7 +137,7 @@ def _toqclass_helper(im):
             colortable = []
             palette = im.getpalette()
             for i in range(0, len(palette), 3):
    -            colortable.append(rgb(*palette[i:i+3]))
    +            colortable.append(rgb(*palette[i : i + 3]))
         elif im.mode == "RGB":
             data = im.tobytes("raw", "BGRX")
             format = QImage.Format_RGB32
    @@ -166,33 +153,34 @@ def _toqclass_helper(im):
             raise ValueError("unsupported image mode %r" % im.mode)
     
         __data = data or align8to32(im.tobytes(), im.size[0], im.mode)
    -    return {
    -        'data': __data, 'im': im, 'format': format, 'colortable': colortable
    -    }
    +    return {"data": __data, "im": im, "format": format, "colortable": colortable}
     
     
     if qt_is_installed:
    -    class ImageQt(QImage):
     
    +    class ImageQt(QImage):
             def __init__(self, im):
                 """
                 An PIL image wrapper for Qt.  This is a subclass of PyQt's QImage
                 class.
     
    -            :param im: A PIL Image object, or a file name (given either as Python
    -                string or a PyQt string object).
    +            :param im: A PIL Image object, or a file name (given either as
    +                Python string or a PyQt string object).
                 """
                 im_data = _toqclass_helper(im)
                 # must keep a reference, or Qt will crash!
                 # All QImage constructors that take data operate on an existing
                 # buffer, so this buffer has to hang on for the life of the image.
                 # Fixes https://github.com/python-pillow/Pillow/issues/1370
    -            self.__data = im_data['data']
    -            QImage.__init__(self,
    -                            self.__data, im_data['im'].size[0],
    -                            im_data['im'].size[1], im_data['format'])
    -            if im_data['colortable']:
    -                self.setColorTable(im_data['colortable'])
    +            self.__data = im_data["data"]
    +            super().__init__(
    +                self.__data,
    +                im_data["im"].size[0],
    +                im_data["im"].size[1],
    +                im_data["format"],
    +            )
    +            if im_data["colortable"]:
    +                self.setColorTable(im_data["colortable"])
     
     
     def toqimage(im):
    @@ -205,8 +193,8 @@ def toqpixmap(im):
         # result = QPixmap(im_data['im'].size[0], im_data['im'].size[1])
         # result.loadFromData(im_data['data'])
         # Fix some strange bug that causes
    -    if im.mode == 'RGB':
    -        im = im.convert('RGBA')
    +    if im.mode == "RGB":
    +        im = im.convert("RGBA")
     
         qimage = toqimage(im)
         return QPixmap.fromImage(qimage)
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageSequence.py b/server/www/packages/packages-windows/x86/PIL/ImageSequence.py
    index 1fc6e5d..4e9f5c2 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageSequence.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageSequence.py
    @@ -16,7 +16,7 @@
     ##
     
     
    -class Iterator(object):
    +class Iterator:
         """
         This class implements an iterator object that can be used to loop
         over an image sequence.
    @@ -32,7 +32,7 @@ class Iterator(object):
             if not hasattr(im, "seek"):
                 raise AttributeError("im must have seek method")
             self.im = im
    -        self.position = 0
    +        self.position = getattr(self.im, "_min_frame", 0)
     
         def __getitem__(self, ix):
             try:
    @@ -52,5 +52,24 @@ class Iterator(object):
             except EOFError:
                 raise StopIteration
     
    -    def next(self):
    -        return self.__next__()
    +
    +def all_frames(im, func=None):
    +    """
    +    Applies a given function to all frames in an image or a list of images.
    +    The frames are returned as a list of separate images.
    +
    +    :param im: An image, or a list of images.
    +    :param func: The function to apply to all of the image frames.
    +    :returns: A list of images.
    +    """
    +    if not isinstance(im, list):
    +        im = [im]
    +
    +    ims = []
    +    for imSequence in im:
    +        current = imSequence.tell()
    +
    +        ims += [im_frame.copy() for im_frame in Iterator(imSequence)]
    +
    +        imSequence.seek(current)
    +    return [func(im) for im in ims] if func else ims
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageShow.py b/server/www/packages/packages-windows/x86/PIL/ImageShow.py
    index b50d613..fc50894 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageShow.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageShow.py
    @@ -11,17 +11,14 @@
     #
     # See the README file for information on usage and redistribution.
     #
    -
    -from __future__ import print_function
    +import os
    +import shutil
    +import subprocess
    +import sys
    +import tempfile
    +from shlex import quote
     
     from PIL import Image
    -import os
    -import sys
    -
    -if sys.version_info.major >= 3:
    -    from shlex import quote
    -else:
    -    from pipes import quote
     
     _viewers = []
     
    @@ -53,7 +50,7 @@ def show(image, title=None, **options):
         return 0
     
     
    -class Viewer(object):
    +class Viewer:
         """Base class for viewers."""
     
         # main api
    @@ -61,16 +58,12 @@ class Viewer(object):
         def show(self, image, **options):
     
             # save temporary image to disk
    -        if image.mode[:4] == "I;16":
    -            # @PIL88 @PIL101
    -            # "I;16" isn't an 'official' mode, but we still want to
    -            # provide a simple way to show 16-bit images.
    -            base = "L"
    -            # FIXME: auto-contrast if max() > 255?
    -        else:
    +        if not (
    +            image.mode in ("1", "RGBA") or (self.format == "PNG" and image.mode == "LA")
    +        ):
                 base = Image.getmodebase(image.mode)
    -        if base != image.mode and image.mode != "1" and image.mode != "RGBA":
    -            image = image.convert(base)
    +            if image.mode != base:
    +                image = image.convert(base)
     
             return self.show_image(image, **options)
     
    @@ -99,18 +92,22 @@ class Viewer(object):
             os.system(self.get_command(file, **options))
             return 1
     
    +
     # --------------------------------------------------------------------
     
     
     if sys.platform == "win32":
     
         class WindowsViewer(Viewer):
    -        format = "BMP"
    +        format = "PNG"
    +        options = {"compress_level": 1}
     
             def get_command(self, file, **options):
    -            return ('start "Pillow" /WAIT "%s" '
    -                    '&& ping -n 2 127.0.0.1 >NUL '
    -                    '&& del /f "%s"' % (file, file))
    +            return (
    +                'start "Pillow" /WAIT "%s" '
    +                "&& ping -n 2 127.0.0.1 >NUL "
    +                '&& del /f "%s"' % (file, file)
    +            )
     
         register(WindowsViewer)
     
    @@ -118,41 +115,56 @@ elif sys.platform == "darwin":
     
         class MacViewer(Viewer):
             format = "PNG"
    -        options = {'compress_level': 1}
    +        options = {"compress_level": 1}
     
             def get_command(self, file, **options):
                 # on darwin open returns immediately resulting in the temp
                 # file removal while app is opening
    -            command = "open -a /Applications/Preview.app"
    -            command = "(%s %s; sleep 20; rm -f %s)&" % (command, quote(file),
    -                                                        quote(file))
    +            command = "open -a Preview.app"
    +            command = "({} {}; sleep 20; rm -f {})&".format(
    +                command, quote(file), quote(file)
    +            )
                 return command
     
    +        def show_file(self, file, **options):
    +            """Display given file"""
    +            fd, path = tempfile.mkstemp()
    +            with os.fdopen(fd, "w") as f:
    +                f.write(file)
    +            with open(path, "r") as f:
    +                subprocess.Popen(
    +                    ["im=$(cat); open -a Preview.app $im; sleep 20; rm -f $im"],
    +                    shell=True,
    +                    stdin=f,
    +                )
    +            os.remove(path)
    +            return 1
    +
         register(MacViewer)
     
     else:
     
         # unixoids
     
    -    def which(executable):
    -        path = os.environ.get("PATH")
    -        if not path:
    -            return None
    -        for dirname in path.split(os.pathsep):
    -            filename = os.path.join(dirname, executable)
    -            if os.path.isfile(filename) and os.access(filename, os.X_OK):
    -                return filename
    -        return None
    -
         class UnixViewer(Viewer):
             format = "PNG"
    -        options = {'compress_level': 1}
    +        options = {"compress_level": 1}
    +
    +        def get_command(self, file, **options):
    +            command = self.get_command_ex(file, **options)[0]
    +            return "({} {}; rm -f {})&".format(command, quote(file), quote(file))
     
             def show_file(self, file, **options):
    -            command, executable = self.get_command_ex(file, **options)
    -            command = "(%s %s; rm -f %s)&" % (command, quote(file),
    -                                              quote(file))
    -            os.system(command)
    +            """Display given file"""
    +            fd, path = tempfile.mkstemp()
    +            with os.fdopen(fd, "w") as f:
    +                f.write(file)
    +            with open(path, "r") as f:
    +                command = self.get_command_ex(file, **options)[0]
    +                subprocess.Popen(
    +                    ["im=$(cat);" + command + " $im; rm -f $im"], shell=True, stdin=f
    +                )
    +            os.remove(path)
                 return 1
     
         # implementations
    @@ -162,7 +174,7 @@ else:
                 command = executable = "display"
                 return command, executable
     
    -    if which("display"):
    +    if shutil.which("display"):
             register(DisplayViewer)
     
         class EogViewer(UnixViewer):
    @@ -170,7 +182,7 @@ else:
                 command = executable = "eog"
                 return command, executable
     
    -    if which("eog"):
    +    if shutil.which("eog"):
             register(EogViewer)
     
         class XVViewer(UnixViewer):
    @@ -182,7 +194,7 @@ else:
                     command += " -name %s" % quote(title)
                 return command, executable
     
    -    if which("xv"):
    +    if shutil.which("xv"):
             register(XVViewer)
     
     if __name__ == "__main__":
    @@ -191,4 +203,5 @@ if __name__ == "__main__":
             print("Syntax: python ImageShow.py imagefile [title]")
             sys.exit()
     
    -    print(show(Image.open(sys.argv[1]), *sys.argv[2:]))
    +    with Image.open(sys.argv[1]) as im:
    +        print(show(im, *sys.argv[2:]))
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageStat.py b/server/www/packages/packages-windows/x86/PIL/ImageStat.py
    index cd58fc8..50bafc9 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageStat.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageStat.py
    @@ -21,13 +21,12 @@
     # See the README file for information on usage and redistribution.
     #
     
    +import functools
     import math
     import operator
    -import functools
     
     
    -class Stat(object):
    -
    +class Stat:
         def __init__(self, image_or_list, mask=None):
             try:
                 if mask:
    @@ -41,7 +40,7 @@ class Stat(object):
             self.bands = list(range(len(self.h) // 256))
     
         def __getattr__(self, id):
    -        "Calculate missing attribute"
    +        """Calculate missing attribute"""
             if id[:4] == "_get":
                 raise AttributeError(id)
             # calculate missing attribute
    @@ -50,7 +49,7 @@ class Stat(object):
             return v
     
         def _getextrema(self):
    -        "Get min/max values for each band in the image"
    +        """Get min/max values for each band in the image"""
     
             def minmax(histogram):
                 n = 255
    @@ -67,15 +66,15 @@ class Stat(object):
             return v
     
         def _getcount(self):
    -        "Get total number of pixels in each layer"
    +        """Get total number of pixels in each layer"""
     
             v = []
             for i in range(0, len(self.h), 256):
    -            v.append(functools.reduce(operator.add, self.h[i:i+256]))
    +            v.append(functools.reduce(operator.add, self.h[i : i + 256]))
             return v
     
         def _getsum(self):
    -        "Get sum of all pixels in each layer"
    +        """Get sum of all pixels in each layer"""
     
             v = []
             for i in range(0, len(self.h), 256):
    @@ -86,7 +85,7 @@ class Stat(object):
             return v
     
         def _getsum2(self):
    -        "Get squared sum of all pixels in each layer"
    +        """Get squared sum of all pixels in each layer"""
     
             v = []
             for i in range(0, len(self.h), 256):
    @@ -97,7 +96,7 @@ class Stat(object):
             return v
     
         def _getmean(self):
    -        "Get average pixel level for each layer"
    +        """Get average pixel level for each layer"""
     
             v = []
             for i in self.bands:
    @@ -105,22 +104,22 @@ class Stat(object):
             return v
     
         def _getmedian(self):
    -        "Get median pixel level for each layer"
    +        """Get median pixel level for each layer"""
     
             v = []
             for i in self.bands:
                 s = 0
    -            l = self.count[i]//2
    +            half = self.count[i] // 2
                 b = i * 256
                 for j in range(256):
    -                s = s + self.h[b+j]
    -                if s > l:
    +                s = s + self.h[b + j]
    +                if s > half:
                         break
                 v.append(j)
             return v
     
         def _getrms(self):
    -        "Get RMS for each layer"
    +        """Get RMS for each layer"""
     
             v = []
             for i in self.bands:
    @@ -128,16 +127,16 @@ class Stat(object):
             return v
     
         def _getvar(self):
    -        "Get variance for each layer"
    +        """Get variance for each layer"""
     
             v = []
             for i in self.bands:
                 n = self.count[i]
    -            v.append((self.sum2[i]-(self.sum[i]**2.0)/n)/n)
    +            v.append((self.sum2[i] - (self.sum[i] ** 2.0) / n) / n)
             return v
     
         def _getstddev(self):
    -        "Get standard deviation for each layer"
    +        """Get standard deviation for each layer"""
     
             v = []
             for i in self.bands:
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageTk.py b/server/www/packages/packages-windows/x86/PIL/ImageTk.py
    index b5ad53d..ee707cf 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageTk.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageTk.py
    @@ -25,23 +25,10 @@
     # See the README file for information on usage and redistribution.
     #
     
    -import sys
    -
    -if sys.version_info.major > 2:
    -    import tkinter
    -else:
    -    import Tkinter as tkinter
    -
    -# required for pypy, which always has cffi installed
    -try:
    -    from cffi import FFI
    -    ffi = FFI()
    -except ImportError:
    -    pass
    -
    -from . import Image
    +import tkinter
     from io import BytesIO
     
    +from . import Image
     
     # --------------------------------------------------------------------
     # Check for Tkinter interface hooks
    @@ -74,7 +61,8 @@ def _get_image_from_kw(kw):
     # --------------------------------------------------------------------
     # PhotoImage
     
    -class PhotoImage(object):
    +
    +class PhotoImage:
         """
         A Tkinter-compatible photo image.  This can be used
         everywhere Tkinter expects an image object.  If the image is an RGBA
    @@ -131,7 +119,7 @@ class PhotoImage(object):
             self.__photo.name = None
             try:
                 self.__photo.tk.call("image", "delete", name)
    -        except:
    +        except Exception:
                 pass  # ignore internal errors
     
         def __str__(self):
    @@ -190,9 +178,15 @@ class PhotoImage(object):
                 # activate Tkinter hook
                 try:
                     from . import _imagingtk
    +
                     try:
    -                    if hasattr(tk, 'interp'):
    -                        # Pypy is using a ffi cdata element
    +                    if hasattr(tk, "interp"):
    +                        # Required for PyPy, which always has CFFI installed
    +                        from cffi import FFI
    +
    +                        ffi = FFI()
    +
    +                        # PyPy is using an FFI CDATA element
                             # (Pdb) self.tk.interp
                             #  
                             _imagingtk.tkinit(int(ffi.cast("uintptr_t", tk.interp)), 1)
    @@ -204,11 +198,12 @@ class PhotoImage(object):
                 except (ImportError, AttributeError, tkinter.TclError):
                     raise  # configuration problem; cannot attach to Tkinter
     
    +
     # --------------------------------------------------------------------
     # BitmapImage
     
     
    -class BitmapImage(object):
    +class BitmapImage:
         """
         A Tkinter-compatible bitmap image.  This can be used everywhere Tkinter
         expects an image object.
    @@ -246,7 +241,7 @@ class BitmapImage(object):
             self.__photo.name = None
             try:
                 self.__photo.tk.call("image", "delete", name)
    -        except:
    +        except Exception:
                 pass  # ignore internal errors
     
         def width(self):
    @@ -277,10 +272,13 @@ class BitmapImage(object):
     
     
     def getimage(photo):
    -    """ This function is unimplemented """
    -
         """Copies the contents of a PhotoImage to a PIL image memory."""
    -    photo.tk.call("PyImagingPhotoGet", photo)
    +    im = Image.new("RGBA", (photo.width(), photo.height()))
    +    block = im.im
    +
    +    photo.tk.call("PyImagingPhotoGet", photo, block.id)
    +
    +    return im
     
     
     def _show(image, title):
    @@ -292,11 +290,10 @@ def _show(image, title):
                     self.image = BitmapImage(im, foreground="white", master=master)
                 else:
                     self.image = PhotoImage(im, master=master)
    -            tkinter.Label.__init__(self, master, image=self.image,
    -                                   bg="black", bd=0)
    +            super().__init__(master, image=self.image, bg="black", bd=0)
     
         if not tkinter._default_root:
    -        raise IOError("tkinter not initialized")
    +        raise OSError("tkinter not initialized")
         top = tkinter.Toplevel()
         if title:
             top.title(title)
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageTransform.py b/server/www/packages/packages-windows/x86/PIL/ImageTransform.py
    index c3f6af8..77791ab 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageTransform.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageTransform.py
    @@ -46,6 +46,7 @@ class AffineTransform(Transform):
         :param matrix: A 6-tuple (a, b, c, d, e, f) containing the first two rows
             from an affine transform matrix.
         """
    +
         method = Image.AFFINE
     
     
    @@ -67,6 +68,7 @@ class ExtentTransform(Transform):
         :param bbox: A 4-tuple (x0, y0, x1, y1) which specifies two points in the
             input image's coordinate system. See :ref:`coordinate-system`.
         """
    +
         method = Image.EXTENT
     
     
    @@ -83,6 +85,7 @@ class QuadTransform(Transform):
             upper left, lower left, lower right, and upper right corner of the
             source quadrilateral.
         """
    +
         method = Image.QUAD
     
     
    @@ -95,4 +98,5 @@ class MeshTransform(Transform):
     
         :param data: A list of (bbox, quad) tuples.
         """
    +
         method = Image.MESH
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImageWin.py b/server/www/packages/packages-windows/x86/PIL/ImageWin.py
    index 9b86270..927b169 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImageWin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImageWin.py
    @@ -20,12 +20,13 @@
     from . import Image
     
     
    -class HDC(object):
    +class HDC:
         """
         Wraps an HDC integer. The resulting object can be passed to the
         :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose`
         methods.
         """
    +
         def __init__(self, dc):
             self.dc = dc
     
    @@ -33,12 +34,13 @@ class HDC(object):
             return self.dc
     
     
    -class HWND(object):
    +class HWND:
         """
         Wraps an HWND integer. The resulting object can be passed to the
         :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose`
         methods, instead of a DC.
         """
    +
         def __init__(self, wnd):
             self.wnd = wnd
     
    @@ -46,7 +48,7 @@ class HWND(object):
             return self.wnd
     
     
    -class Dib(object):
    +class Dib:
         """
         A Windows bitmap with the given mode and size.  The mode can be one of "1",
         "L", "P", or "RGB".
    @@ -184,13 +186,13 @@ class Dib(object):
             return self.image.tobytes()
     
     
    -class Window(object):
    +class Window:
         """Create a Window with the given title size."""
     
         def __init__(self, title="PIL", width=None, height=None):
             self.hwnd = Image.core.createwindow(
                 title, self.__dispatcher, width or 0, height or 0
    -            )
    +        )
     
         def __dispatcher(self, action, *args):
             return getattr(self, "ui_handle_" + action)(*args)
    @@ -222,7 +224,7 @@ class ImageWindow(Window):
                 image = Dib(image)
             self.image = image
             width, height = image.size
    -        Window.__init__(self, title, width=width, height=height)
    +        super().__init__(title, width=width, height=height)
     
         def ui_handle_repair(self, dc, x0, y0, x1, y1):
             self.image.draw(dc, (x0, y0, x1, y1))
    diff --git a/server/www/packages/packages-windows/x86/PIL/ImtImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/ImtImagePlugin.py
    index 05e8cd3..21ffd74 100644
    --- a/server/www/packages/packages-windows/x86/PIL/ImtImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/ImtImagePlugin.py
    @@ -19,9 +19,6 @@ import re
     
     from . import Image, ImageFile
     
    -__version__ = "0.2"
    -
    -
     #
     # --------------------------------------------------------------------
     
    @@ -31,6 +28,7 @@ field = re.compile(br"([a-z]*) ([^ \r\n]*)")
     ##
     # Image plugin for IM Tools images.
     
    +
     class ImtImageFile(ImageFile.ImageFile):
     
         format = "IMT"
    @@ -53,12 +51,12 @@ class ImtImageFile(ImageFile.ImageFile):
                 if not s:
                     break
     
    -            if s == b'\x0C':
    +            if s == b"\x0C":
     
                     # image data begins
    -                self.tile = [("raw", (0, 0)+self.size,
    -                             self.fp.tell(),
    -                             (self.mode, 0, 1))]
    +                self.tile = [
    +                    ("raw", (0, 0) + self.size, self.fp.tell(), (self.mode, 0, 1))
    +                ]
     
                     break
     
    @@ -78,10 +76,10 @@ class ImtImageFile(ImageFile.ImageFile):
                     k, v = m.group(1, 2)
                     if k == "width":
                         xsize = int(v)
    -                    self.size = xsize, ysize
    +                    self._size = xsize, ysize
                     elif k == "height":
                         ysize = int(v)
    -                    self.size = xsize, ysize
    +                    self._size = xsize, ysize
                     elif k == "pixel" and v == "n8":
                         self.mode = "L"
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/IptcImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/IptcImagePlugin.py
    index f5a8de1..b2f976d 100644
    --- a/server/www/packages/packages-windows/x86/PIL/IptcImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/IptcImagePlugin.py
    @@ -14,20 +14,13 @@
     #
     # See the README file for information on usage and redistribution.
     #
    -
    -from __future__ import print_function
    -
    -from . import Image, ImageFile
    -from ._binary import i8, i16be as i16, i32be as i32, o8
     import os
     import tempfile
     
    -__version__ = "0.3"
    +from . import Image, ImageFile
    +from ._binary import i8, i16be as i16, i32be as i32, o8
     
    -COMPRESSION = {
    -    1: "raw",
    -    5: "jpeg"
    -}
    +COMPRESSION = {1: "raw", 5: "jpeg"}
     
     PAD = o8(0) * 4
     
    @@ -35,13 +28,14 @@ PAD = o8(0) * 4
     #
     # Helpers
     
    +
     def i(c):
         return i32((PAD + c)[-4:])
     
     
     def dump(c):
         for i in c:
    -        print("%02x" % i8(i), end=' ')
    +        print("%02x" % i8(i), end=" ")
         print()
     
     
    @@ -49,6 +43,7 @@ def dump(c):
     # Image plugin for IPTC/NAA datastreams.  To read IPTC/NAA fields
     # from TIFF and JPEG files, use the getiptcinfo function.
     
    +
     class IptcImageFile(ImageFile.ImageFile):
     
         format = "IPTC"
    @@ -73,11 +68,11 @@ class IptcImageFile(ImageFile.ImageFile):
             # field size
             size = i8(s[3])
             if size > 132:
    -            raise IOError("illegal field length in IPTC/NAA file")
    +            raise OSError("illegal field length in IPTC/NAA file")
             elif size == 128:
                 size = 0
             elif size > 128:
    -            size = i(self.fp.read(size-128))
    +            size = i(self.fp.read(size - 128))
             else:
                 size = i16(s[3:])
     
    @@ -103,13 +98,11 @@ class IptcImageFile(ImageFile.ImageFile):
                 else:
                     self.info[tag] = tagdata
     
    -            # print(tag, self.info[tag])
    -
             # mode
             layers = i8(self.info[(3, 60)][0])
             component = i8(self.info[(3, 60)][1])
             if (3, 65) in self.info:
    -            id = i8(self.info[(3, 65)][0])-1
    +            id = i8(self.info[(3, 65)][0]) - 1
             else:
                 id = 0
             if layers == 1 and not component:
    @@ -120,18 +113,19 @@ class IptcImageFile(ImageFile.ImageFile):
                 self.mode = "CMYK"[id]
     
             # size
    -        self.size = self.getint((3, 20)), self.getint((3, 30))
    +        self._size = self.getint((3, 20)), self.getint((3, 30))
     
             # compression
             try:
                 compression = COMPRESSION[self.getint((3, 120))]
             except KeyError:
    -            raise IOError("Unknown IPTC image compression")
    +            raise OSError("Unknown IPTC image compression")
     
             # tile
             if tag == (8, 10):
    -            self.tile = [("iptc", (compression, offset),
    -                         (0, 0, self.size[0], self.size[1]))]
    +            self.tile = [
    +                ("iptc", (compression, offset), (0, 0, self.size[0], self.size[1]))
    +            ]
     
         def load(self):
     
    @@ -164,9 +158,9 @@ class IptcImageFile(ImageFile.ImageFile):
             o.close()
     
             try:
    -            _im = Image.open(outfile)
    -            _im.load()
    -            self.im = _im.im
    +            with Image.open(outfile) as _im:
    +                _im.load()
    +                self.im = _im.im
             finally:
                 try:
                     os.unlink(outfile)
    @@ -198,35 +192,9 @@ def getiptcinfo(im):
     
         elif isinstance(im, JpegImagePlugin.JpegImageFile):
             # extract the IPTC/NAA resource
    -        try:
    -            app = im.app["APP13"]
    -            if app[:14] == b"Photoshop 3.0\x00":
    -                app = app[14:]
    -                # parse the image resource block
    -                offset = 0
    -                while app[offset:offset+4] == b"8BIM":
    -                    offset += 4
    -                    # resource code
    -                    code = i16(app, offset)
    -                    offset += 2
    -                    # resource name (usually empty)
    -                    name_len = i8(app[offset])
    -                    # name = app[offset+1:offset+1+name_len]
    -                    offset = 1 + offset + name_len
    -                    if offset & 1:
    -                        offset += 1
    -                    # resource data block
    -                    size = i32(app, offset)
    -                    offset += 4
    -                    if code == 0x0404:
    -                        # 0x0404 contains IPTC/NAA data
    -                        data = app[offset:offset+size]
    -                        break
    -                    offset = offset + size
    -                    if offset & 1:
    -                        offset += 1
    -        except (AttributeError, KeyError):
    -            pass
    +        photoshop = im.info.get("photoshop")
    +        if photoshop:
    +            data = photoshop.get(0x0404)
     
         elif isinstance(im, TiffImagePlugin.TiffImageFile):
             # get raw data from the IPTC/NAA tag (PhotoShop tags the data
    @@ -240,8 +208,9 @@ def getiptcinfo(im):
             return None  # no properties
     
         # create an IptcImagePlugin object without initializing it
    -    class FakeImage(object):
    +    class FakeImage:
             pass
    +
         im = FakeImage()
         im.__class__ = IptcImageFile
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/Jpeg2KImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/Jpeg2KImagePlugin.py
    index 25fbefb..0b0d433 100644
    --- a/server/www/packages/packages-windows/x86/PIL/Jpeg2KImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/Jpeg2KImagePlugin.py
    @@ -12,12 +12,11 @@
     #
     # See the README file for information on usage and redistribution.
     #
    -from . import Image, ImageFile
    -import struct
    -import os
     import io
    +import os
    +import struct
     
    -__version__ = "0.1"
    +from . import Image, ImageFile
     
     
     def _parse_codestream(fp):
    @@ -25,30 +24,29 @@ def _parse_codestream(fp):
         count from the SIZ marker segment, returning a PIL (size, mode) tuple."""
     
         hdr = fp.read(2)
    -    lsiz = struct.unpack('>H', hdr)[0]
    +    lsiz = struct.unpack(">H", hdr)[0]
         siz = hdr + fp.read(lsiz - 2)
    -    lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, xtsiz, ytsiz, \
    -        xtosiz, ytosiz, csiz \
    -        = struct.unpack_from('>HHIIIIIIIIH', siz)
    -    ssiz = [None]*csiz
    -    xrsiz = [None]*csiz
    -    yrsiz = [None]*csiz
    +    lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, _, _, _, _, csiz = struct.unpack_from(
    +        ">HHIIIIIIIIH", siz
    +    )
    +    ssiz = [None] * csiz
    +    xrsiz = [None] * csiz
    +    yrsiz = [None] * csiz
         for i in range(csiz):
    -        ssiz[i], xrsiz[i], yrsiz[i] \
    -            = struct.unpack_from('>BBB', siz, 36 + 3 * i)
    +        ssiz[i], xrsiz[i], yrsiz[i] = struct.unpack_from(">BBB", siz, 36 + 3 * i)
     
         size = (xsiz - xosiz, ysiz - yosiz)
         if csiz == 1:
    -        if (yrsiz[0] & 0x7f) > 8:
    -            mode = 'I;16'
    +        if (yrsiz[0] & 0x7F) > 8:
    +            mode = "I;16"
             else:
    -            mode = 'L'
    +            mode = "L"
         elif csiz == 2:
    -        mode = 'LA'
    +        mode = "LA"
         elif csiz == 3:
    -        mode = 'RGB'
    +        mode = "RGB"
         elif csiz == 4:
    -        mode = 'RGBA'
    +        mode = "RGBA"
         else:
             mode = None
     
    @@ -57,29 +55,34 @@ def _parse_codestream(fp):
     
     def _parse_jp2_header(fp):
         """Parse the JP2 header box to extract size, component count and
    -    color space information, returning a PIL (size, mode) tuple."""
    +    color space information, returning a (size, mode, mimetype) tuple."""
     
         # Find the JP2 header box
         header = None
    +    mimetype = None
         while True:
    -        lbox, tbox = struct.unpack('>I4s', fp.read(8))
    +        lbox, tbox = struct.unpack(">I4s", fp.read(8))
             if lbox == 1:
    -            lbox = struct.unpack('>Q', fp.read(8))[0]
    +            lbox = struct.unpack(">Q", fp.read(8))[0]
                 hlen = 16
             else:
                 hlen = 8
     
             if lbox < hlen:
    -            raise SyntaxError('Invalid JP2 header length')
    +            raise SyntaxError("Invalid JP2 header length")
     
    -        if tbox == b'jp2h':
    +        if tbox == b"jp2h":
                 header = fp.read(lbox - hlen)
                 break
    +        elif tbox == b"ftyp":
    +            if fp.read(4) == b"jpx ":
    +                mimetype = "image/jpx"
    +            fp.seek(lbox - hlen - 4, os.SEEK_CUR)
             else:
                 fp.seek(lbox - hlen, os.SEEK_CUR)
     
         if header is None:
    -        raise SyntaxError('could not find JP2 header')
    +        raise SyntaxError("could not find JP2 header")
     
         size = None
         mode = None
    @@ -88,64 +91,64 @@ def _parse_jp2_header(fp):
     
         hio = io.BytesIO(header)
         while True:
    -        lbox, tbox = struct.unpack('>I4s', hio.read(8))
    +        lbox, tbox = struct.unpack(">I4s", hio.read(8))
             if lbox == 1:
    -            lbox = struct.unpack('>Q', hio.read(8))[0]
    +            lbox = struct.unpack(">Q", hio.read(8))[0]
                 hlen = 16
             else:
                 hlen = 8
     
             content = hio.read(lbox - hlen)
     
    -        if tbox == b'ihdr':
    -            height, width, nc, bpc, c, unkc, ipr \
    -                = struct.unpack('>IIHBBBB', content)
    +        if tbox == b"ihdr":
    +            height, width, nc, bpc, c, unkc, ipr = struct.unpack(">IIHBBBB", content)
                 size = (width, height)
                 if unkc:
    -                if nc == 1 and (bpc & 0x7f) > 8:
    -                    mode = 'I;16'
    +                if nc == 1 and (bpc & 0x7F) > 8:
    +                    mode = "I;16"
                     elif nc == 1:
    -                    mode = 'L'
    +                    mode = "L"
                     elif nc == 2:
    -                    mode = 'LA'
    +                    mode = "LA"
                     elif nc == 3:
    -                    mode = 'RGB'
    +                    mode = "RGB"
                     elif nc == 4:
    -                    mode = 'RGBA'
    +                    mode = "RGBA"
                     break
    -        elif tbox == b'colr':
    -            meth, prec, approx = struct.unpack_from('>BBB', content)
    +        elif tbox == b"colr":
    +            meth, prec, approx = struct.unpack_from(">BBB", content)
                 if meth == 1:
    -                cs = struct.unpack_from('>I', content, 3)[0]
    -                if cs == 16:   # sRGB
    -                    if nc == 1 and (bpc & 0x7f) > 8:
    -                        mode = 'I;16'
    +                cs = struct.unpack_from(">I", content, 3)[0]
    +                if cs == 16:  # sRGB
    +                    if nc == 1 and (bpc & 0x7F) > 8:
    +                        mode = "I;16"
                         elif nc == 1:
    -                        mode = 'L'
    +                        mode = "L"
                         elif nc == 3:
    -                        mode = 'RGB'
    +                        mode = "RGB"
                         elif nc == 4:
    -                        mode = 'RGBA'
    +                        mode = "RGBA"
                         break
                     elif cs == 17:  # grayscale
    -                    if nc == 1 and (bpc & 0x7f) > 8:
    -                        mode = 'I;16'
    +                    if nc == 1 and (bpc & 0x7F) > 8:
    +                        mode = "I;16"
                         elif nc == 1:
    -                        mode = 'L'
    +                        mode = "L"
                         elif nc == 2:
    -                        mode = 'LA'
    +                        mode = "LA"
                         break
                     elif cs == 18:  # sYCC
                         if nc == 3:
    -                        mode = 'RGB'
    +                        mode = "RGB"
                         elif nc == 4:
    -                        mode = 'RGBA'
    +                        mode = "RGBA"
                         break
     
         if size is None or mode is None:
             raise SyntaxError("Malformed jp2 header")
     
    -    return (size, mode)
    +    return (size, mode, mimetype)
    +
     
     ##
     # Image plugin for JPEG2000 images.
    @@ -157,22 +160,23 @@ class Jpeg2KImageFile(ImageFile.ImageFile):
     
         def _open(self):
             sig = self.fp.read(4)
    -        if sig == b'\xff\x4f\xff\x51':
    +        if sig == b"\xff\x4f\xff\x51":
                 self.codec = "j2k"
    -            self.size, self.mode = _parse_codestream(self.fp)
    +            self._size, self.mode = _parse_codestream(self.fp)
             else:
                 sig = sig + self.fp.read(8)
     
    -            if sig == b'\x00\x00\x00\x0cjP  \x0d\x0a\x87\x0a':
    +            if sig == b"\x00\x00\x00\x0cjP  \x0d\x0a\x87\x0a":
                     self.codec = "jp2"
    -                self.size, self.mode = _parse_jp2_header(self.fp)
    +                header = _parse_jp2_header(self.fp)
    +                self._size, self.mode, self.custom_mimetype = header
                 else:
    -                raise SyntaxError('not a JPEG 2000 file')
    +                raise SyntaxError("not a JPEG 2000 file")
     
             if self.size is None or self.mode is None:
    -            raise SyntaxError('unable to determine size/mode')
    +            raise SyntaxError("unable to determine size/mode")
     
    -        self.reduce = 0
    +        self._reduce = 0
             self.layers = 0
     
             fd = -1
    @@ -181,69 +185,101 @@ class Jpeg2KImageFile(ImageFile.ImageFile):
             try:
                 fd = self.fp.fileno()
                 length = os.fstat(fd).st_size
    -        except:
    +        except Exception:
                 fd = -1
                 try:
                     pos = self.fp.tell()
    -                self.fp.seek(0, 2)
    +                self.fp.seek(0, io.SEEK_END)
                     length = self.fp.tell()
    -                self.fp.seek(pos, 0)
    -            except:
    +                self.fp.seek(pos)
    +            except Exception:
                     length = -1
     
    -        self.tile = [('jpeg2k', (0, 0) + self.size, 0,
    -                      (self.codec, self.reduce, self.layers, fd, length))]
    +        self.tile = [
    +            (
    +                "jpeg2k",
    +                (0, 0) + self.size,
    +                0,
    +                (self.codec, self._reduce, self.layers, fd, length),
    +            )
    +        ]
    +
    +    @property
    +    def reduce(self):
    +        # https://github.com/python-pillow/Pillow/issues/4343 found that the
    +        # new Image 'reduce' method was shadowed by this plugin's 'reduce'
    +        # property. This attempts to allow for both scenarios
    +        return self._reduce or super().reduce
    +
    +    @reduce.setter
    +    def reduce(self, value):
    +        self._reduce = value
     
         def load(self):
    -        if self.reduce:
    -            power = 1 << self.reduce
    +        if self.tile and self._reduce:
    +            power = 1 << self._reduce
                 adjust = power >> 1
    -            self.size = (int((self.size[0] + adjust) / power),
    -                         int((self.size[1] + adjust) / power))
    +            self._size = (
    +                int((self.size[0] + adjust) / power),
    +                int((self.size[1] + adjust) / power),
    +            )
     
    -        if self.tile:
                 # Update the reduce and layers settings
                 t = self.tile[0]
    -            t3 = (t[3][0], self.reduce, self.layers, t[3][3], t[3][4])
    +            t3 = (t[3][0], self._reduce, self.layers, t[3][3], t[3][4])
                 self.tile = [(t[0], (0, 0) + self.size, t[2], t3)]
     
             return ImageFile.ImageFile.load(self)
     
     
     def _accept(prefix):
    -    return (prefix[:4] == b'\xff\x4f\xff\x51' or
    -            prefix[:12] == b'\x00\x00\x00\x0cjP  \x0d\x0a\x87\x0a')
    +    return (
    +        prefix[:4] == b"\xff\x4f\xff\x51"
    +        or prefix[:12] == b"\x00\x00\x00\x0cjP  \x0d\x0a\x87\x0a"
    +    )
     
     
     # ------------------------------------------------------------
     # Save support
     
    +
     def _save(im, fp, filename):
    -    if filename.endswith('.j2k'):
    -        kind = 'j2k'
    +    if filename.endswith(".j2k"):
    +        kind = "j2k"
         else:
    -        kind = 'jp2'
    +        kind = "jp2"
     
         # Get the keyword arguments
         info = im.encoderinfo
     
    -    offset = info.get('offset', None)
    -    tile_offset = info.get('tile_offset', None)
    -    tile_size = info.get('tile_size', None)
    -    quality_mode = info.get('quality_mode', 'rates')
    -    quality_layers = info.get('quality_layers', None)
    -    num_resolutions = info.get('num_resolutions', 0)
    -    cblk_size = info.get('codeblock_size', None)
    -    precinct_size = info.get('precinct_size', None)
    -    irreversible = info.get('irreversible', False)
    -    progression = info.get('progression', 'LRCP')
    -    cinema_mode = info.get('cinema_mode', 'no')
    +    offset = info.get("offset", None)
    +    tile_offset = info.get("tile_offset", None)
    +    tile_size = info.get("tile_size", None)
    +    quality_mode = info.get("quality_mode", "rates")
    +    quality_layers = info.get("quality_layers", None)
    +    if quality_layers is not None and not (
    +        isinstance(quality_layers, (list, tuple))
    +        and all(
    +            [
    +                isinstance(quality_layer, (int, float))
    +                for quality_layer in quality_layers
    +            ]
    +        )
    +    ):
    +        raise ValueError("quality_layers must be a sequence of numbers")
    +
    +    num_resolutions = info.get("num_resolutions", 0)
    +    cblk_size = info.get("codeblock_size", None)
    +    precinct_size = info.get("precinct_size", None)
    +    irreversible = info.get("irreversible", False)
    +    progression = info.get("progression", "LRCP")
    +    cinema_mode = info.get("cinema_mode", "no")
         fd = -1
     
         if hasattr(fp, "fileno"):
             try:
                 fd = fp.fileno()
    -        except:
    +        except Exception:
                 fd = -1
     
         im.encoderconfig = (
    @@ -258,10 +294,11 @@ def _save(im, fp, filename):
             irreversible,
             progression,
             cinema_mode,
    -        fd
    +        fd,
         )
     
    -    ImageFile._save(im, fp, [('jpeg2k', (0, 0)+im.size, 0, kind)])
    +    ImageFile._save(im, fp, [("jpeg2k", (0, 0) + im.size, 0, kind)])
    +
     
     # ------------------------------------------------------------
     # Registry stuff
    @@ -270,7 +307,8 @@ def _save(im, fp, filename):
     Image.register_open(Jpeg2KImageFile.format, Jpeg2KImageFile, _accept)
     Image.register_save(Jpeg2KImageFile.format, _save)
     
    -Image.register_extensions(Jpeg2KImageFile.format, [".jp2", ".j2k", ".jpc", ".jpf", ".jpx", ".j2c"])
    +Image.register_extensions(
    +    Jpeg2KImageFile.format, [".jp2", ".j2k", ".jpc", ".jpf", ".jpx", ".j2c"]
    +)
     
    -Image.register_mime(Jpeg2KImageFile.format, 'image/jp2')
    -Image.register_mime(Jpeg2KImageFile.format, 'image/jpx')
    +Image.register_mime(Jpeg2KImageFile.format, "image/jp2")
    diff --git a/server/www/packages/packages-windows/x86/PIL/JpegImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/JpegImagePlugin.py
    index 97ef834..2aa029e 100644
    --- a/server/www/packages/packages-windows/x86/PIL/JpegImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/JpegImagePlugin.py
    @@ -31,26 +31,24 @@
     #
     # See the README file for information on usage and redistribution.
     #
    -
    -from __future__ import print_function
    -
     import array
    -import struct
     import io
    +import os
    +import struct
    +import subprocess
    +import tempfile
     import warnings
    +
     from . import Image, ImageFile, TiffImagePlugin
    -from ._binary import i8, o8, i16be as i16
    +from ._binary import i8, i16be as i16, i32be as i32, o8
     from .JpegPresets import presets
    -from ._util import isStringType
    -
    -__version__ = "0.6"
    -
     
     #
     # Parser
     
    +
     def Skip(self, marker):
    -    n = i16(self.fp.read(2))-2
    +    n = i16(self.fp.read(2)) - 2
         ImageFile._safe_read(self.fp, n)
     
     
    @@ -59,7 +57,7 @@ def APP(self, marker):
         # Application marker.  Store these in the APP dictionary.
         # Also look for well-known application markers.
     
    -    n = i16(self.fp.read(2))-2
    +    n = i16(self.fp.read(2)) - 2
         s = ImageFile._safe_read(self.fp, n)
     
         app = "APP%d" % (marker & 15)
    @@ -75,7 +73,7 @@ def APP(self, marker):
             try:
                 jfif_unit = i8(s[7])
                 jfif_density = i16(s, 8), i16(s, 10)
    -        except:
    +        except Exception:
                 pass
             else:
                 if jfif_unit == 1:
    @@ -84,7 +82,7 @@ def APP(self, marker):
                 self.info["jfif_density"] = jfif_density
         elif marker == 0xFFE1 and s[:5] == b"Exif\0":
             if "exif" not in self.info:
    -            # extract Exif information (incomplete)
    +            # extract EXIF information (incomplete)
                 self.info["exif"] = s  # FIXME: value will change
         elif marker == 0xFFE2 and s[:5] == b"FPXR\0":
             # extract FlashPix information (incomplete)
    @@ -102,12 +100,44 @@ def APP(self, marker):
             # reassemble the profile, rather than assuming that the APP2
             # markers appear in the correct sequence.
             self.icclist.append(s)
    +    elif marker == 0xFFED and s[:14] == b"Photoshop 3.0\x00":
    +        # parse the image resource block
    +        offset = 14
    +        photoshop = self.info.setdefault("photoshop", {})
    +        while s[offset : offset + 4] == b"8BIM":
    +            try:
    +                offset += 4
    +                # resource code
    +                code = i16(s, offset)
    +                offset += 2
    +                # resource name (usually empty)
    +                name_len = i8(s[offset])
    +                # name = s[offset+1:offset+1+name_len]
    +                offset += 1 + name_len
    +                offset += offset & 1  # align
    +                # resource data block
    +                size = i32(s, offset)
    +                offset += 4
    +                data = s[offset : offset + size]
    +                if code == 0x03ED:  # ResolutionInfo
    +                    data = {
    +                        "XResolution": i32(data[:4]) / 65536,
    +                        "DisplayedUnitsX": i16(data[4:8]),
    +                        "YResolution": i32(data[8:12]) / 65536,
    +                        "DisplayedUnitsY": i16(data[12:]),
    +                    }
    +                photoshop[code] = data
    +                offset += size
    +                offset += offset & 1  # align
    +            except struct.error:
    +                break  # insufficient data
    +
         elif marker == 0xFFEE and s[:5] == b"Adobe":
             self.info["adobe"] = i16(s, 5)
             # extract Adobe custom properties
             try:
                 adobe_transform = i8(s[1])
    -        except:
    +        except Exception:
                 pass
             else:
                 self.info["adobe_transform"] = adobe_transform
    @@ -121,30 +151,32 @@ def APP(self, marker):
         # If DPI isn't in JPEG header, fetch from EXIF
         if "dpi" not in self.info and "exif" in self.info:
             try:
    -            exif = self._getexif()
    +            exif = self.getexif()
                 resolution_unit = exif[0x0128]
                 x_resolution = exif[0x011A]
                 try:
    -                dpi = x_resolution[0] / x_resolution[1]
    +                dpi = float(x_resolution[0]) / x_resolution[1]
                 except TypeError:
                     dpi = x_resolution
                 if resolution_unit == 3:  # cm
                     # 1 dpcm = 2.54 dpi
                     dpi *= 2.54
    -            self.info["dpi"] = dpi, dpi
    -        except (KeyError, SyntaxError, ZeroDivisionError):
    -            # SyntaxError for invalid/unreadable exif
    +            self.info["dpi"] = int(dpi + 0.5), int(dpi + 0.5)
    +        except (KeyError, SyntaxError, ValueError, ZeroDivisionError):
    +            # SyntaxError for invalid/unreadable EXIF
                 # KeyError for dpi not included
                 # ZeroDivisionError for invalid dpi rational value
    +            # ValueError for x_resolution[0] being an invalid float
                 self.info["dpi"] = 72, 72
     
     
     def COM(self, marker):
         #
         # Comment marker.  Store these in the APP dictionary.
    -    n = i16(self.fp.read(2))-2
    +    n = i16(self.fp.read(2)) - 2
         s = ImageFile._safe_read(self.fp, n)
     
    +    self.info["comment"] = s
         self.app["COM"] = s  # compatibility
         self.applist.append(("COM", s))
     
    @@ -157,9 +189,9 @@ def SOF(self, marker):
         # mode.  Note that this could be made a bit brighter, by
         # looking for JFIF and Adobe APP markers.
     
    -    n = i16(self.fp.read(2))-2
    +    n = i16(self.fp.read(2)) - 2
         s = ImageFile._safe_read(self.fp, n)
    -    self.size = i16(s[3:]), i16(s[1:])
    +    self._size = i16(s[3:]), i16(s[1:])
     
         self.bits = i8(s[0])
         if self.bits != 8:
    @@ -192,9 +224,9 @@ def SOF(self, marker):
             self.icclist = None
     
         for i in range(6, len(s), 3):
    -        t = s[i:i+3]
    +        t = s[i : i + 3]
             # 4-tuples: id, vsamp, hsamp, qtable
    -        self.layer.append((t[0], i8(t[1])//16, i8(t[1]) & 15, i8(t[2])))
    +        self.layer.append((t[0], i8(t[1]) // 16, i8(t[1]) & 15, i8(t[2])))
     
     
     def DQT(self, marker):
    @@ -206,13 +238,13 @@ def DQT(self, marker):
         # FIXME: The quantization tables can be used to estimate the
         # compression quality.
     
    -    n = i16(self.fp.read(2))-2
    +    n = i16(self.fp.read(2)) - 2
         s = ImageFile._safe_read(self.fp, n)
         while len(s):
             if len(s) < 65:
                 raise SyntaxError("bad quantization table marker")
             v = i8(s[0])
    -        if v//16 == 0:
    +        if v // 16 == 0:
                 self.quantization[v & 15] = array.array("B", s[1:65])
                 s = s[65:]
             else:
    @@ -286,7 +318,7 @@ MARKER = {
         0xFFFB: ("JPG11", "Extension 11", None),
         0xFFFC: ("JPG12", "Extension 12", None),
         0xFFFD: ("JPG13", "Extension 13", None),
    -    0xFFFE: ("COM", "Comment", COM)
    +    0xFFFE: ("COM", "Comment", COM),
     }
     
     
    @@ -297,6 +329,7 @@ def _accept(prefix):
     ##
     # Image plugin for JPEG and JFIF images.
     
    +
     class JpegImageFile(ImageFile.ImageFile):
     
         format = "JPEG"
    @@ -334,15 +367,13 @@ class JpegImageFile(ImageFile.ImageFile):
     
                 if i in MARKER:
                     name, description, handler = MARKER[i]
    -                # print(hex(i), name, description)
                     if handler is not None:
                         handler(self, i)
                     if i == 0xFFDA:  # start of scan
                         rawmode = self.mode
                         if self.mode == "CMYK":
                             rawmode = "CMYK;I"  # assume adobe conventions
    -                    self.tile = [("jpeg", (0, 0) + self.size, 0,
    -                                 (rawmode, ""))]
    +                    self.tile = [("jpeg", (0, 0) + self.size, 0, (rawmode, ""))]
                         # self.__offset = self.fp.tell()
                         break
                     s = self.fp.read(1)
    @@ -379,7 +410,8 @@ class JpegImageFile(ImageFile.ImageFile):
                 return
     
             d, e, o, a = self.tile[0]
    -        scale = 0
    +        scale = 1
    +        original_size = self.size
     
             if a[0] == "RGB" and mode in ["L", "YCbCr"]:
                 self.mode = mode
    @@ -390,22 +422,25 @@ class JpegImageFile(ImageFile.ImageFile):
                 for s in [8, 4, 2, 1]:
                     if scale >= s:
                         break
    -            e = e[0], e[1], (e[2]-e[0]+s-1)//s+e[0], (e[3]-e[1]+s-1)//s+e[1]
    -            self.size = ((self.size[0]+s-1)//s, (self.size[1]+s-1)//s)
    +            e = (
    +                e[0],
    +                e[1],
    +                (e[2] - e[0] + s - 1) // s + e[0],
    +                (e[3] - e[1] + s - 1) // s + e[1],
    +            )
    +            self._size = ((self.size[0] + s - 1) // s, (self.size[1] + s - 1) // s)
                 scale = s
     
             self.tile = [(d, e, o, a)]
             self.decoderconfig = (scale, 0)
     
    -        return self
    +        box = (0, 0, original_size[0] / scale, original_size[1] / scale)
    +        return (self.mode, box)
     
         def load_djpeg(self):
     
             # ALTERNATIVE: handle JPEGs via the IJG command line utilities
     
    -        import subprocess
    -        import tempfile
    -        import os
             f, path = tempfile.mkstemp()
             os.close(f)
             if os.path.exists(self.filename):
    @@ -414,9 +449,9 @@ class JpegImageFile(ImageFile.ImageFile):
                 raise ValueError("Invalid Filename")
     
             try:
    -            _im = Image.open(path)
    -            _im.load()
    -            self.im = _im.im
    +            with Image.open(path) as _im:
    +                _im.load()
    +                self.im = _im.im
             finally:
                 try:
                     os.unlink(path)
    @@ -424,7 +459,7 @@ class JpegImageFile(ImageFile.ImageFile):
                     pass
     
             self.mode = self.im.mode
    -        self.size = self.im.size
    +        self._size = self.im.size
     
             self.tile = []
     
    @@ -438,60 +473,14 @@ class JpegImageFile(ImageFile.ImageFile):
     def _fixup_dict(src_dict):
         # Helper function for _getexif()
         # returns a dict with any single item tuples/lists as individual values
    -    def _fixup(value):
    -        try:
    -            if len(value) == 1 and not isinstance(value, dict):
    -                return value[0]
    -        except:
    -            pass
    -        return value
    -
    -    return {k: _fixup(v) for k, v in src_dict.items()}
    +    exif = Image.Exif()
    +    return exif._fixup_dict(src_dict)
     
     
     def _getexif(self):
    -    # Extract EXIF information.  This method is highly experimental,
    -    # and is likely to be replaced with something better in a future
    -    # version.
    -
    -    # The EXIF record consists of a TIFF file embedded in a JPEG
    -    # application marker (!).
    -    try:
    -        data = self.info["exif"]
    -    except KeyError:
    +    if "exif" not in self.info:
             return None
    -    file = io.BytesIO(data[6:])
    -    head = file.read(8)
    -    # process dictionary
    -    info = TiffImagePlugin.ImageFileDirectory_v1(head)
    -    info.load(file)
    -    exif = dict(_fixup_dict(info))
    -    # get exif extension
    -    try:
    -        # exif field 0x8769 is an offset pointer to the location
    -        # of the nested embedded exif ifd.
    -        # It should be a long, but may be corrupted.
    -        file.seek(exif[0x8769])
    -    except (KeyError, TypeError):
    -        pass
    -    else:
    -        info = TiffImagePlugin.ImageFileDirectory_v1(head)
    -        info.load(file)
    -        exif.update(_fixup_dict(info))
    -    # get gpsinfo extension
    -    try:
    -        # exif field 0x8825 is an offset pointer to the location
    -        # of the nested embedded gps exif ifd.
    -        # It should be a long, but may be corrupted.
    -        file.seek(exif[0x8825])
    -    except (KeyError, TypeError):
    -        pass
    -    else:
    -        info = TiffImagePlugin.ImageFileDirectory_v1(head)
    -        info.load(file)
    -        exif[0x8825] = _fixup_dict(info)
    -
    -    return exif
    +    return dict(self.getexif())
     
     
     def _getmp(self):
    @@ -507,13 +496,14 @@ def _getmp(self):
             return None
         file_contents = io.BytesIO(data)
         head = file_contents.read(8)
    -    endianness = '>' if head[:4] == b'\x4d\x4d\x00\x2a' else '<'
    +    endianness = ">" if head[:4] == b"\x4d\x4d\x00\x2a" else "<"
         # process dictionary
         try:
             info = TiffImagePlugin.ImageFileDirectory_v2(head)
    +        file_contents.seek(info.next)
             info.load(file_contents)
             mp = dict(info)
    -    except:
    +    except Exception:
             raise SyntaxError("malformed MP Index (unreadable directory)")
         # it's an error not to have a number of images
         try:
    @@ -526,37 +516,33 @@ def _getmp(self):
             rawmpentries = mp[0xB002]
             for entrynum in range(0, quant):
                 unpackedentry = struct.unpack_from(
    -                '{}LLLHH'.format(endianness), rawmpentries, entrynum * 16)
    -            labels = ('Attribute', 'Size', 'DataOffset', 'EntryNo1',
    -                      'EntryNo2')
    +                "{}LLLHH".format(endianness), rawmpentries, entrynum * 16
    +            )
    +            labels = ("Attribute", "Size", "DataOffset", "EntryNo1", "EntryNo2")
                 mpentry = dict(zip(labels, unpackedentry))
                 mpentryattr = {
    -                'DependentParentImageFlag': bool(mpentry['Attribute'] &
    -                                                 (1 << 31)),
    -                'DependentChildImageFlag': bool(mpentry['Attribute'] &
    -                                                (1 << 30)),
    -                'RepresentativeImageFlag': bool(mpentry['Attribute'] &
    -                                                (1 << 29)),
    -                'Reserved': (mpentry['Attribute'] & (3 << 27)) >> 27,
    -                'ImageDataFormat': (mpentry['Attribute'] & (7 << 24)) >> 24,
    -                'MPType': mpentry['Attribute'] & 0x00FFFFFF
    +                "DependentParentImageFlag": bool(mpentry["Attribute"] & (1 << 31)),
    +                "DependentChildImageFlag": bool(mpentry["Attribute"] & (1 << 30)),
    +                "RepresentativeImageFlag": bool(mpentry["Attribute"] & (1 << 29)),
    +                "Reserved": (mpentry["Attribute"] & (3 << 27)) >> 27,
    +                "ImageDataFormat": (mpentry["Attribute"] & (7 << 24)) >> 24,
    +                "MPType": mpentry["Attribute"] & 0x00FFFFFF,
                 }
    -            if mpentryattr['ImageDataFormat'] == 0:
    -                mpentryattr['ImageDataFormat'] = 'JPEG'
    +            if mpentryattr["ImageDataFormat"] == 0:
    +                mpentryattr["ImageDataFormat"] = "JPEG"
                 else:
                     raise SyntaxError("unsupported picture format in MPO")
                 mptypemap = {
    -                0x000000: 'Undefined',
    -                0x010001: 'Large Thumbnail (VGA Equivalent)',
    -                0x010002: 'Large Thumbnail (Full HD Equivalent)',
    -                0x020001: 'Multi-Frame Image (Panorama)',
    -                0x020002: 'Multi-Frame Image: (Disparity)',
    -                0x020003: 'Multi-Frame Image: (Multi-Angle)',
    -                0x030000: 'Baseline MP Primary Image'
    +                0x000000: "Undefined",
    +                0x010001: "Large Thumbnail (VGA Equivalent)",
    +                0x010002: "Large Thumbnail (Full HD Equivalent)",
    +                0x020001: "Multi-Frame Image (Panorama)",
    +                0x020002: "Multi-Frame Image: (Disparity)",
    +                0x020003: "Multi-Frame Image: (Multi-Angle)",
    +                0x030000: "Baseline MP Primary Image",
                 }
    -            mpentryattr['MPType'] = mptypemap.get(mpentryattr['MPType'],
    -                                                  'Unknown')
    -            mpentry['Attribute'] = mpentryattr
    +            mpentryattr["MPType"] = mptypemap.get(mpentryattr["MPType"], "Unknown")
    +            mpentry["Attribute"] = mpentryattr
                 mpentries.append(mpentry)
             mp[0xB002] = mpentries
         except KeyError:
    @@ -579,19 +565,24 @@ RAWMODE = {
         "YCbCr": "YCbCr",
     }
     
    -zigzag_index = (0,  1,  5,  6, 14, 15, 27, 28,
    -                2,  4,  7, 13, 16, 26, 29, 42,
    -                3,  8, 12, 17, 25, 30, 41, 43,
    -                9, 11, 18, 24, 31, 40, 44, 53,
    -               10, 19, 23, 32, 39, 45, 52, 54,
    -               20, 22, 33, 38, 46, 51, 55, 60,
    -               21, 34, 37, 47, 50, 56, 59, 61,
    -               35, 36, 48, 49, 57, 58, 62, 63)
    +# fmt: off
    +zigzag_index = (
    +    0,  1,  5,  6, 14, 15, 27, 28,
    +    2,  4,  7, 13, 16, 26, 29, 42,
    +    3,  8, 12, 17, 25, 30, 41, 43,
    +    9, 11, 18, 24, 31, 40, 44, 53,
    +    10, 19, 23, 32, 39, 45, 52, 54,
    +    20, 22, 33, 38, 46, 51, 55, 60,
    +    21, 34, 37, 47, 50, 56, 59, 61,
    +    35, 36, 48, 49, 57, 58, 62, 63,
    +)
     
    -samplings = {(1, 1, 1, 1, 1, 1): 0,
    -             (2, 1, 1, 1, 1, 1): 1,
    -             (2, 2, 1, 1, 1, 1): 2,
    -             }
    +samplings = {
    +    (1, 1, 1, 1, 1, 1): 0,
    +    (2, 1, 1, 1, 1, 1): 1,
    +    (2, 2, 1, 1, 1, 1): 2,
    +}
    +# fmt: on
     
     
     def convert_dict_qtables(qtables):
    @@ -609,7 +600,7 @@ def get_sampling(im):
         # NOTE: currently Pillow can't encode JPEG to YCCK format.
         # If YCCK support is added in the future, subsampling code will have
         # to be updated (here and in JpegEncode.c) to deal with 4 layers.
    -    if not hasattr(im, 'layers') or im.layers in (1, 4):
    +    if not hasattr(im, "layers") or im.layers in (1, 4):
             return -1
         sampling = im.layer[0][1:3] + im.layer[1][1:3] + im.layer[2][1:3]
         return samplings.get(sampling, -1)
    @@ -620,32 +611,32 @@ def _save(im, fp, filename):
         try:
             rawmode = RAWMODE[im.mode]
         except KeyError:
    -        raise IOError("cannot write mode %s as JPEG" % im.mode)
    +        raise OSError("cannot write mode %s as JPEG" % im.mode)
     
         info = im.encoderinfo
     
    -    dpi = [int(round(x)) for x in info.get("dpi", (0, 0))]
    +    dpi = [round(x) for x in info.get("dpi", (0, 0))]
     
    -    quality = info.get("quality", 0)
    +    quality = info.get("quality", -1)
         subsampling = info.get("subsampling", -1)
         qtables = info.get("qtables")
     
         if quality == "keep":
    -        quality = 0
    +        quality = -1
             subsampling = "keep"
             qtables = "keep"
         elif quality in presets:
             preset = presets[quality]
    -        quality = 0
    -        subsampling = preset.get('subsampling', -1)
    -        qtables = preset.get('quantization')
    +        quality = -1
    +        subsampling = preset.get("subsampling", -1)
    +        qtables = preset.get("quantization")
         elif not isinstance(quality, int):
             raise ValueError("Invalid quality setting")
         else:
             if subsampling in presets:
    -            subsampling = presets[subsampling].get('subsampling', -1)
    -        if isStringType(qtables) and qtables in presets:
    -            qtables = presets[qtables].get('quantization')
    +            subsampling = presets[subsampling].get("subsampling", -1)
    +        if isinstance(qtables, str) and qtables in presets:
    +            qtables = presets[qtables].get("quantization")
     
         if subsampling == "4:4:4":
             subsampling = 0
    @@ -659,21 +650,23 @@ def _save(im, fp, filename):
             subsampling = 2
         elif subsampling == "keep":
             if im.format != "JPEG":
    -            raise ValueError(
    -                "Cannot use 'keep' when original image is not a JPEG")
    +            raise ValueError("Cannot use 'keep' when original image is not a JPEG")
             subsampling = get_sampling(im)
     
         def validate_qtables(qtables):
             if qtables is None:
                 return qtables
    -        if isStringType(qtables):
    +        if isinstance(qtables, str):
                 try:
    -                lines = [int(num) for line in qtables.splitlines()
    -                         for num in line.split('#', 1)[0].split()]
    +                lines = [
    +                    int(num)
    +                    for line in qtables.splitlines()
    +                    for num in line.split("#", 1)[0].split()
    +                ]
                 except ValueError:
                     raise ValueError("Invalid quantization table")
                 else:
    -                qtables = [lines[s:s+64] for s in range(0, len(lines), 64)]
    +                qtables = [lines[s : s + 64] for s in range(0, len(lines), 64)]
             if isinstance(qtables, (tuple, list, dict)):
                 if isinstance(qtables, dict):
                     qtables = convert_dict_qtables(qtables)
    @@ -685,7 +678,7 @@ def _save(im, fp, filename):
                     try:
                         if len(table) != 64:
                             raise TypeError
    -                    table = array.array('B', table)
    +                    table = array.array("B", table)
                     except TypeError:
                         raise ValueError("Invalid quantization table")
                     else:
    @@ -694,8 +687,7 @@ def _save(im, fp, filename):
     
         if qtables == "keep":
             if im.format != "JPEG":
    -            raise ValueError(
    -                "Cannot use 'keep' when original image is not a JPEG")
    +            raise ValueError("Cannot use 'keep' when original image is not a JPEG")
             qtables = getattr(im, "quantization", None)
         qtables = validate_qtables(qtables)
     
    @@ -713,18 +705,27 @@ def _save(im, fp, filename):
             i = 1
             for marker in markers:
                 size = struct.pack(">H", 2 + ICC_OVERHEAD_LEN + len(marker))
    -            extra += (b"\xFF\xE2" + size + b"ICC_PROFILE\0" + o8(i) +
    -                      o8(len(markers)) + marker)
    +            extra += (
    +                b"\xFF\xE2"
    +                + size
    +                + b"ICC_PROFILE\0"
    +                + o8(i)
    +                + o8(len(markers))
    +                + marker
    +            )
                 i += 1
     
         # "progressive" is the official name, but older documentation
         # says "progression"
         # FIXME: issue a warning if the wrong form is used (post-1.1.7)
    -    progressive = (info.get("progressive", False) or
    -                   info.get("progression", False))
    +    progressive = info.get("progressive", False) or info.get("progression", False)
     
         optimize = info.get("optimize", False)
     
    +    exif = info.get("exif", b"")
    +    if isinstance(exif, Image.Exif):
    +        exif = exif.tobytes()
    +
         # get keyword arguments
         im.encoderconfig = (
             quality,
    @@ -732,12 +733,13 @@ def _save(im, fp, filename):
             info.get("smooth", 0),
             optimize,
             info.get("streamtype", 0),
    -        dpi[0], dpi[1],
    +        dpi[0],
    +        dpi[1],
             subsampling,
             qtables,
             extra,
    -        info.get("exif", b"")
    -        )
    +        exif,
    +    )
     
         # if we optimize, libjpeg needs a buffer big enough to hold the whole image
         # in a shot. Guessing on the size, at im.size bytes. (raw pixel size is
    @@ -746,26 +748,23 @@ def _save(im, fp, filename):
         bufsize = 0
         if optimize or progressive:
             # CMYK can be bigger
    -        if im.mode == 'CMYK':
    +        if im.mode == "CMYK":
                 bufsize = 4 * im.size[0] * im.size[1]
    -        # keep sets quality to 0, but the actual value may be high.
    -        elif quality >= 95 or quality == 0:
    +        # keep sets quality to -1, but the actual value may be high.
    +        elif quality >= 95 or quality == -1:
                 bufsize = 2 * im.size[0] * im.size[1]
             else:
                 bufsize = im.size[0] * im.size[1]
     
    -    # The exif info needs to be written as one block, + APP1, + one spare byte.
    +    # The EXIF info needs to be written as one block, + APP1, + one spare byte.
         # Ensure that our buffer is big enough. Same with the icc_profile block.
    -    bufsize = max(ImageFile.MAXBLOCK, bufsize, len(info.get("exif", b"")) + 5,
    -                  len(extra) + 1)
    +    bufsize = max(ImageFile.MAXBLOCK, bufsize, len(exif) + 5, len(extra) + 1)
     
    -    ImageFile._save(im, fp, [("jpeg", (0, 0)+im.size, 0, rawmode)], bufsize)
    +    ImageFile._save(im, fp, [("jpeg", (0, 0) + im.size, 0, rawmode)], bufsize)
     
     
     def _save_cjpeg(im, fp, filename):
         # ALTERNATIVE: handle JPEGs via the IJG command line utilities.
    -    import os
    -    import subprocess
         tempfile = im._dump()
         subprocess.check_call(["cjpeg", "-outfile", filename, tempfile])
         try:
    @@ -783,17 +782,21 @@ def jpeg_factory(fp=None, filename=None):
             if mpheader[45057] > 1:
                 # It's actually an MPO
                 from .MpoImagePlugin import MpoImageFile
    -            im = MpoImageFile(fp, filename)
    +
    +            # Don't reload everything, just convert it.
    +            im = MpoImageFile.adopt(im, mpheader)
         except (TypeError, IndexError):
             # It is really a JPEG
             pass
         except SyntaxError:
    -        warnings.warn("Image appears to be a malformed MPO file, it will be "
    -                      "interpreted as a base JPEG file")
    +        warnings.warn(
    +            "Image appears to be a malformed MPO file, it will be "
    +            "interpreted as a base JPEG file"
    +        )
         return im
     
     
    -# -------------------------------------------------------------------q-
    +# ---------------------------------------------------------------------
     # Registry stuff
     
     Image.register_open(JpegImageFile.format, jpeg_factory, _accept)
    diff --git a/server/www/packages/packages-windows/x86/PIL/JpegPresets.py b/server/www/packages/packages-windows/x86/PIL/JpegPresets.py
    index 5f01f0d..012bf81 100644
    --- a/server/www/packages/packages-windows/x86/PIL/JpegPresets.py
    +++ b/server/www/packages/packages-windows/x86/PIL/JpegPresets.py
    @@ -33,7 +33,10 @@ Possible subsampling values are 0, 1 and 2 that correspond to 4:4:4, 4:2:2 and
     4:2:0.
     
     You can get the subsampling of a JPEG with the
    -`JpegImagePlugin.get_subsampling(im)` function.
    +`JpegImagePlugin.get_sampling(im)` function.
    +
    +In JPEG compressed data a JPEG marker is used instead of an EXIF tag.
    +(ref.: https://www.exiv2.org/tags.html)
     
     
     Quantization tables
    @@ -62,11 +65,13 @@ The tables format between im.quantization and quantization in presets differ in
     You can convert the dict format to the preset format with the
     `JpegImagePlugin.convert_dict_qtables(dict_qtables)` function.
     
    -Libjpeg ref.: https://web.archive.org/web/20120328125543/http://www.jpegcameras.com/libjpeg/libjpeg-3.html
    +Libjpeg ref.:
    +https://web.archive.org/web/20120328125543/http://www.jpegcameras.com/libjpeg/libjpeg-3.html
     
     """
     
    -presets = {
    +# fmt: off
    +presets = {  # noqa: E128
                 'web_low':      {'subsampling':  2,  # "4:2:0"
                                  'quantization': [
                                    [20, 16, 25, 39, 50, 46, 62, 68,
    @@ -239,3 +244,4 @@ presets = {
                                     15, 12, 12, 12, 12, 12, 12, 12]
                                  ]},
     }
    +# fmt: on
    diff --git a/server/www/packages/packages-windows/x86/PIL/McIdasImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/McIdasImagePlugin.py
    index 06da33f..cd047fe 100644
    --- a/server/www/packages/packages-windows/x86/PIL/McIdasImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/McIdasImagePlugin.py
    @@ -17,9 +17,8 @@
     #
     
     import struct
    -from . import Image, ImageFile
     
    -__version__ = "0.2"
    +from . import Image, ImageFile
     
     
     def _accept(s):
    @@ -29,6 +28,7 @@ def _accept(s):
     ##
     # Image plugin for McIdas area images.
     
    +
     class McIdasImageFile(ImageFile.ImageFile):
     
         format = "MCIDAS"
    @@ -59,10 +59,10 @@ class McIdasImageFile(ImageFile.ImageFile):
                 raise SyntaxError("unsupported McIdas format")
     
             self.mode = mode
    -        self.size = w[10], w[9]
    +        self._size = w[10], w[9]
     
             offset = w[34] + w[15]
    -        stride = w[15] + w[10]*w[11]*w[14]
    +        stride = w[15] + w[10] * w[11] * w[14]
     
             self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride, 1))]
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/MicImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/MicImagePlugin.py
    index 1dbb6a5..8610988 100644
    --- a/server/www/packages/packages-windows/x86/PIL/MicImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/MicImagePlugin.py
    @@ -17,12 +17,9 @@
     #
     
     
    -from . import Image, TiffImagePlugin
    -
     import olefile
     
    -__version__ = "0.1"
    -
    +from . import Image, TiffImagePlugin
     
     #
     # --------------------------------------------------------------------
    @@ -35,6 +32,7 @@ def _accept(prefix):
     ##
     # Image plugin for Microsoft's Image Composer file format.
     
    +
     class MicImageFile(TiffImagePlugin.TiffImageFile):
     
         format = "MIC"
    @@ -48,7 +46,7 @@ class MicImageFile(TiffImagePlugin.TiffImageFile):
     
             try:
                 self.ole = olefile.OleFileIO(self.fp)
    -        except IOError:
    +        except OSError:
                 raise SyntaxError("not an MIC file; invalid OLE file")
     
             # find ACI subfiles with Image members (maybe not the
    @@ -95,9 +93,17 @@ class MicImageFile(TiffImagePlugin.TiffImageFile):
             self.frame = frame
     
         def tell(self):
    -
             return self.frame
     
    +    def _close__fp(self):
    +        try:
    +            if self.__fp != self.fp:
    +                self.__fp.close()
    +        except AttributeError:
    +            pass
    +        finally:
    +            self.__fp = None
    +
     
     #
     # --------------------------------------------------------------------
    diff --git a/server/www/packages/packages-windows/x86/PIL/MpegImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/MpegImagePlugin.py
    index fca7f9d..a358dfd 100644
    --- a/server/www/packages/packages-windows/x86/PIL/MpegImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/MpegImagePlugin.py
    @@ -17,14 +17,11 @@
     from . import Image, ImageFile
     from ._binary import i8
     
    -__version__ = "0.1"
    -
    -
     #
     # Bitstream parser
     
    -class BitStream(object):
     
    +class BitStream:
         def __init__(self, fp):
             self.fp = fp
             self.bits = 0
    @@ -59,6 +56,7 @@ class BitStream(object):
     # Image plugin for MPEG streams.  This plugin can identify a stream,
     # but it cannot read it.
     
    +
     class MpegImageFile(ImageFile.ImageFile):
     
         format = "MPEG"
    @@ -72,7 +70,7 @@ class MpegImageFile(ImageFile.ImageFile):
                 raise SyntaxError("not an MPEG file")
     
             self.mode = "RGB"
    -        self.size = s.read(12), s.read(12)
    +        self._size = s.read(12), s.read(12)
     
     
     # --------------------------------------------------------------------
    diff --git a/server/www/packages/packages-windows/x86/PIL/MpoImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/MpoImagePlugin.py
    index 460ccec..e97176d 100644
    --- a/server/www/packages/packages-windows/x86/PIL/MpoImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/MpoImagePlugin.py
    @@ -18,9 +18,8 @@
     # See the README file for information on usage and redistribution.
     #
     
    -from . import Image, JpegImagePlugin
    -
    -__version__ = "0.1"
    +from . import Image, ImageFile, JpegImagePlugin
    +from ._binary import i16be as i16
     
     
     def _accept(prefix):
    @@ -35,6 +34,7 @@ def _save(im, fp, filename):
     ##
     # Image plugin for MPO images.
     
    +
     class MpoImageFile(JpegImagePlugin.JpegImageFile):
     
         format = "MPO"
    @@ -44,15 +44,19 @@ class MpoImageFile(JpegImagePlugin.JpegImageFile):
         def _open(self):
             self.fp.seek(0)  # prep the fp in order to pass the JPEG test
             JpegImagePlugin.JpegImageFile._open(self)
    -        self.mpinfo = self._getmp()
    +        self._after_jpeg_open()
    +
    +    def _after_jpeg_open(self, mpheader=None):
    +        self.mpinfo = mpheader if mpheader is not None else self._getmp()
             self.__framecount = self.mpinfo[0xB001]
    -        self.__mpoffsets = [mpent['DataOffset'] + self.info['mpoffset']
    -                            for mpent in self.mpinfo[0xB002]]
    +        self.__mpoffsets = [
    +            mpent["DataOffset"] + self.info["mpoffset"] for mpent in self.mpinfo[0xB002]
    +        ]
             self.__mpoffsets[0] = 0
             # Note that the following assertion will only be invalid if something
             # gets broken within JpegImagePlugin.
             assert self.__framecount == len(self.__mpoffsets)
    -        del self.info['mpoffset']  # no longer needed
    +        del self.info["mpoffset"]  # no longer needed
             self.__fp = self.fp  # FIXME: hack
             self.__fp.seek(self.__mpoffsets[0])  # get ready to read first frame
             self.__frame = 0
    @@ -76,16 +80,54 @@ class MpoImageFile(JpegImagePlugin.JpegImageFile):
                 return
             self.fp = self.__fp
             self.offset = self.__mpoffsets[frame]
    -        self.tile = [
    -            ("jpeg", (0, 0) + self.size, self.offset, (self.mode, ""))
    -        ]
    +
    +        self.fp.seek(self.offset + 2)  # skip SOI marker
    +        segment = self.fp.read(2)
    +        if not segment:
    +            raise ValueError("No data found for frame")
    +        if i16(segment) == 0xFFE1:  # APP1
    +            n = i16(self.fp.read(2)) - 2
    +            self.info["exif"] = ImageFile._safe_read(self.fp, n)
    +
    +            exif = self.getexif()
    +            if 40962 in exif and 40963 in exif:
    +                self._size = (exif[40962], exif[40963])
    +        elif "exif" in self.info:
    +            del self.info["exif"]
    +
    +        self.tile = [("jpeg", (0, 0) + self.size, self.offset, (self.mode, ""))]
             self.__frame = frame
     
         def tell(self):
             return self.__frame
     
    +    def _close__fp(self):
    +        try:
    +            if self.__fp != self.fp:
    +                self.__fp.close()
    +        except AttributeError:
    +            pass
    +        finally:
    +            self.__fp = None
     
    -# -------------------------------------------------------------------q-
    +    @staticmethod
    +    def adopt(jpeg_instance, mpheader=None):
    +        """
    +        Transform the instance of JpegImageFile into
    +        an instance of MpoImageFile.
    +        After the call, the JpegImageFile is extended
    +        to be an MpoImageFile.
    +
    +        This is essentially useful when opening a JPEG
    +        file that reveals itself as an MPO, to avoid
    +        double call to _open.
    +        """
    +        jpeg_instance.__class__ = MpoImageFile
    +        jpeg_instance._after_jpeg_open(mpheader)
    +        return jpeg_instance
    +
    +
    +# ---------------------------------------------------------------------
     # Registry stuff
     
     # Note that since MPO shares a factory with JPEG, we do not need to do a
    diff --git a/server/www/packages/packages-windows/x86/PIL/MspImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/MspImagePlugin.py
    index 9692d11..2b2937e 100644
    --- a/server/www/packages/packages-windows/x86/PIL/MspImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/MspImagePlugin.py
    @@ -23,13 +23,11 @@
     #
     # See also: http://www.fileformat.info/format/mspaint/egff.htm
     
    -from . import Image, ImageFile
    -from ._binary import i16le as i16, o16le as o16, i8
    -import struct
     import io
    +import struct
     
    -__version__ = "0.1"
    -
    +from . import Image, ImageFile
    +from ._binary import i8, i16le as i16, o16le as o16
     
     #
     # read MSP files
    @@ -43,6 +41,7 @@ def _accept(prefix):
     # Image plugin for Windows MSP images.  This plugin supports both
     # uncompressed (Windows 1.0).
     
    +
     class MspImageFile(ImageFile.ImageFile):
     
         format = "MSP"
    @@ -58,17 +57,17 @@ class MspImageFile(ImageFile.ImageFile):
             # Header checksum
             checksum = 0
             for i in range(0, 32, 2):
    -            checksum = checksum ^ i16(s[i:i+2])
    +            checksum = checksum ^ i16(s[i : i + 2])
             if checksum != 0:
                 raise SyntaxError("bad MSP checksum")
     
             self.mode = "1"
    -        self.size = i16(s[4:]), i16(s[6:])
    +        self._size = i16(s[4:]), i16(s[6:])
     
             if s[:4] == b"DanM":
    -            self.tile = [("raw", (0, 0)+self.size, 32, ("1", 0, 1))]
    +            self.tile = [("raw", (0, 0) + self.size, 32, ("1", 0, 1))]
             else:
    -            self.tile = [("MSP", (0, 0)+self.size, 32, None)]
    +            self.tile = [("MSP", (0, 0) + self.size, 32, None)]
     
     
     class MspDecoder(ImageFile.PyDecoder):
    @@ -111,13 +110,14 @@ class MspDecoder(ImageFile.PyDecoder):
         def decode(self, buffer):
     
             img = io.BytesIO()
    -        blank_line = bytearray((0xff,)*((self.state.xsize+7)//8))
    +        blank_line = bytearray((0xFF,) * ((self.state.xsize + 7) // 8))
             try:
                 self.fd.seek(32)
    -            rowmap = struct.unpack_from("<%dH" % (self.state.ysize),
    -                                        self.fd.read(self.state.ysize*2))
    +            rowmap = struct.unpack_from(
    +                "<%dH" % (self.state.ysize), self.fd.read(self.state.ysize * 2)
    +            )
             except struct.error:
    -            raise IOError("Truncated MSP file in row map")
    +            raise OSError("Truncated MSP file in row map")
     
             for x, rowlen in enumerate(rowmap):
                 try:
    @@ -126,8 +126,9 @@ class MspDecoder(ImageFile.PyDecoder):
                         continue
                     row = self.fd.read(rowlen)
                     if len(row) != rowlen:
    -                    raise IOError("Truncated MSP file, expected %d bytes on row %s",
    -                                  (rowlen, x))
    +                    raise OSError(
    +                        "Truncated MSP file, expected %d bytes on row %s", (rowlen, x)
    +                    )
                     idx = 0
                     while idx < rowlen:
                         runtype = i8(row[idx])
    @@ -138,18 +139,18 @@ class MspDecoder(ImageFile.PyDecoder):
                             idx += 2
                         else:
                             runcount = runtype
    -                        img.write(row[idx:idx+runcount])
    +                        img.write(row[idx : idx + runcount])
                             idx += runcount
     
                 except struct.error:
    -                raise IOError("Corrupted MSP file in row %d" % x)
    +                raise OSError("Corrupted MSP file in row %d" % x)
     
             self.set_as_raw(img.getvalue(), ("1", 0, 1))
     
             return 0, 0
     
     
    -Image.register_decoder('MSP', MspDecoder)
    +Image.register_decoder("MSP", MspDecoder)
     
     
     #
    @@ -159,7 +160,7 @@ Image.register_decoder('MSP', MspDecoder)
     def _save(im, fp, filename):
     
         if im.mode != "1":
    -        raise IOError("cannot write mode %s as MSP" % im.mode)
    +        raise OSError("cannot write mode %s as MSP" % im.mode)
     
         # create MSP header
         header = [0] * 16
    @@ -180,7 +181,7 @@ def _save(im, fp, filename):
             fp.write(o16(h))
     
         # image body
    -    ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 32, ("1", 0, 1))])
    +    ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 32, ("1", 0, 1))])
     
     
     #
    diff --git a/server/www/packages/packages-windows/x86/PIL/OleFileIO.py b/server/www/packages/packages-windows/x86/PIL/OleFileIO.py
    deleted file mode 100644
    index b3caa10..0000000
    --- a/server/www/packages/packages-windows/x86/PIL/OleFileIO.py
    +++ /dev/null
    @@ -1,4 +0,0 @@
    -raise ImportError(
    -    'PIL.OleFileIO is deprecated. Use the olefile Python package '
    -    'instead. This module will be removed in a future version.'
    -)
    diff --git a/server/www/packages/packages-windows/x86/PIL/PSDraw.py b/server/www/packages/packages-windows/x86/PIL/PSDraw.py
    index d2ded6f..762d31e 100644
    --- a/server/www/packages/packages-windows/x86/PIL/PSDraw.py
    +++ b/server/www/packages/packages-windows/x86/PIL/PSDraw.py
    @@ -15,15 +15,15 @@
     # See the README file for information on usage and redistribution.
     #
     
    -from . import EpsImagePlugin
    -from ._util import py3
     import sys
     
    +from . import EpsImagePlugin
    +
     ##
     # Simple Postscript graphics interface.
     
     
    -class PSDraw(object):
    +class PSDraw:
         """
         Sets up printing to the given file. If **fp** is omitted,
         :py:attr:`sys.stdout` is assumed.
    @@ -35,19 +35,21 @@ class PSDraw(object):
             self.fp = fp
     
         def _fp_write(self, to_write):
    -        if not py3 or self.fp == sys.stdout:
    +        if self.fp == sys.stdout:
                 self.fp.write(to_write)
             else:
    -            self.fp.write(bytes(to_write, 'UTF-8'))
    +            self.fp.write(bytes(to_write, "UTF-8"))
     
         def begin_document(self, id=None):
             """Set up printing of a document. (Write Postscript DSC header.)"""
             # FIXME: incomplete
    -        self._fp_write("%!PS-Adobe-3.0\n"
    -                       "save\n"
    -                       "/showpage { } def\n"
    -                       "%%EndComments\n"
    -                       "%%BeginDocument\n")
    +        self._fp_write(
    +            "%!PS-Adobe-3.0\n"
    +            "save\n"
    +            "/showpage { } def\n"
    +            "%%EndComments\n"
    +            "%%BeginDocument\n"
    +        )
             # self._fp_write(ERROR_PS)  # debugging!
             self._fp_write(EDROFF_PS)
             self._fp_write(VDI_PS)
    @@ -56,9 +58,7 @@ class PSDraw(object):
     
         def end_document(self):
             """Ends printing. (Write Postscript DSC footer.)"""
    -        self._fp_write("%%EndDocument\n"
    -                       "restore showpage\n"
    -                       "%%End\n")
    +        self._fp_write("%%EndDocument\nrestore showpage\n%%End\n")
             if hasattr(self.fp, "flush"):
                 self.fp.flush()
     
    @@ -71,8 +71,7 @@ class PSDraw(object):
             """
             if font not in self.isofont:
                 # reencode font
    -            self._fp_write("/PSDraw-%s ISOLatin1Encoding /%s E\n" %
    -                           (font, font))
    +            self._fp_write("/PSDraw-{} ISOLatin1Encoding /{} E\n".format(font, font))
                 self.isofont[font] = 1
             # rough
             self._fp_write("/F0 %d /PSDraw-%s F\n" % (size, font))
    @@ -120,8 +119,8 @@ class PSDraw(object):
                 else:
                     dpi = 100  # greyscale
             # image size (on paper)
    -        x = float(im.size[0] * 72) / dpi
    -        y = float(im.size[1] * 72) / dpi
    +        x = im.size[0] * 72 / dpi
    +        y = im.size[1] * 72 / dpi
             # max allowed size
             xmax = float(box[2] - box[0])
             ymax = float(box[3] - box[1])
    @@ -133,15 +132,16 @@ class PSDraw(object):
                 y = ymax
             dx = (xmax - x) / 2 + box[0]
             dy = (ymax - y) / 2 + box[1]
    -        self._fp_write("gsave\n%f %f translate\n" % (dx, dy))
    +        self._fp_write("gsave\n{:f} {:f} translate\n".format(dx, dy))
             if (x, y) != im.size:
                 # EpsImagePlugin._save prints the image at (0,0,xsize,ysize)
                 sx = x / im.size[0]
                 sy = y / im.size[1]
    -            self._fp_write("%f %f scale\n" % (sx, sy))
    +            self._fp_write("{:f} {:f} scale\n".format(sx, sy))
             EpsImagePlugin._save(im, self.fp, None, 0)
             self._fp_write("\ngrestore\n")
     
    +
     # --------------------------------------------------------------------
     # Postscript driver
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/PaletteFile.py b/server/www/packages/packages-windows/x86/PIL/PaletteFile.py
    index 9ed69d6..73f1b4b 100644
    --- a/server/www/packages/packages-windows/x86/PIL/PaletteFile.py
    +++ b/server/www/packages/packages-windows/x86/PIL/PaletteFile.py
    @@ -15,11 +15,11 @@
     
     from ._binary import o8
     
    -
     ##
     # File handler for Teragon-style palette files.
     
    -class PaletteFile(object):
    +
    +class PaletteFile:
     
         rawmode = "RGB"
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/PalmImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PalmImagePlugin.py
    index 7d7b165..804ece3 100644
    --- a/server/www/packages/packages-windows/x86/PIL/PalmImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/PalmImagePlugin.py
    @@ -10,9 +10,8 @@
     from . import Image, ImageFile
     from ._binary import o8, o16be as o16b
     
    -__version__ = "1.0"
    -
    -_Palm8BitColormapValues = (
    +# fmt: off
    +_Palm8BitColormapValues = (  # noqa: E131
         (255, 255, 255), (255, 204, 255), (255, 153, 255), (255, 102, 255),
         (255,  51, 255), (255,   0, 255), (255, 255, 204), (255, 204, 204),
         (255, 153, 204), (255, 102, 204), (255,  51, 204), (255,   0, 204),
    @@ -77,6 +76,7 @@ _Palm8BitColormapValues = (
           (0,   0,   0),   (0,   0,   0),   (0,   0,   0),   (0,   0,   0),
           (0,   0,   0),   (0,   0,   0),   (0,   0,   0),   (0,   0,   0),
           (0,   0,   0),   (0,   0,   0),   (0,   0,   0),   (0,   0,   0))
    +# fmt: on
     
     
     # so build a prototype image to be used for palette resampling
    @@ -86,7 +86,7 @@ def build_prototype_image():
         palettedata = ()
         for colormapValue in _Palm8BitColormapValues:
             palettedata += colormapValue
    -    palettedata += (0, 0, 0)*(256 - len(_Palm8BitColormapValues))
    +    palettedata += (0, 0, 0) * (256 - len(_Palm8BitColormapValues))
         image.putpalette(palettedata)
         return image
     
    @@ -98,17 +98,9 @@ Palm8BitColormapImage = build_prototype_image()
     #
     # --------------------------------------------------------------------
     
    -_FLAGS = {
    -    "custom-colormap": 0x4000,
    -    "is-compressed":   0x8000,
    -    "has-transparent": 0x2000,
    -    }
    +_FLAGS = {"custom-colormap": 0x4000, "is-compressed": 0x8000, "has-transparent": 0x2000}
     
    -_COMPRESSION_TYPES = {
    -    "none":     0xFF,
    -    "rle":      0x01,
    -    "scanline": 0x00,
    -    }
    +_COMPRESSION_TYPES = {"none": 0xFF, "rle": 0x01, "scanline": 0x00}
     
     
     #
    @@ -117,6 +109,7 @@ _COMPRESSION_TYPES = {
     ##
     # (Internal) Image save plugin for the Palm format.
     
    +
     def _save(im, fp, filename):
     
         if im.mode == "P":
    @@ -128,28 +121,24 @@ def _save(im, fp, filename):
             bpp = 8
             version = 1
     
    -    elif (im.mode == "L" and
    -          "bpp" in im.encoderinfo and
    -          im.encoderinfo["bpp"] in (1, 2, 4)):
    +    elif im.mode == "L":
    +        if im.encoderinfo.get("bpp") in (1, 2, 4):
    +            # this is 8-bit grayscale, so we shift it to get the high-order bits,
    +            # and invert it because
    +            # Palm does greyscale from white (0) to black (1)
    +            bpp = im.encoderinfo["bpp"]
    +            im = im.point(
    +                lambda x, shift=8 - bpp, maxval=(1 << bpp) - 1: maxval - (x >> shift)
    +            )
    +        elif im.info.get("bpp") in (1, 2, 4):
    +            # here we assume that even though the inherent mode is 8-bit grayscale,
    +            # only the lower bpp bits are significant.
    +            # We invert them to match the Palm.
    +            bpp = im.info["bpp"]
    +            im = im.point(lambda x, maxval=(1 << bpp) - 1: maxval - (x & maxval))
    +        else:
    +            raise OSError("cannot write mode %s as Palm" % im.mode)
     
    -        # this is 8-bit grayscale, so we shift it to get the high-order bits,
    -        # and invert it because
    -        # Palm does greyscale from white (0) to black (1)
    -        bpp = im.encoderinfo["bpp"]
    -        im = im.point(
    -            lambda x, shift=8-bpp, maxval=(1 << bpp)-1: maxval - (x >> shift))
    -        # we ignore the palette here
    -        im.mode = "P"
    -        rawmode = "P;" + str(bpp)
    -        version = 1
    -
    -    elif im.mode == "L" and "bpp" in im.info and im.info["bpp"] in (1, 2, 4):
    -
    -        # here we assume that even though the inherent mode is 8-bit grayscale,
    -        # only the lower bpp bits are significant.
    -        # We invert them to match the Palm.
    -        bpp = im.info["bpp"]
    -        im = im.point(lambda x, maxval=(1 << bpp)-1: maxval - (x & maxval))
             # we ignore the palette here
             im.mode = "P"
             rawmode = "P;" + str(bpp)
    @@ -164,7 +153,7 @@ def _save(im, fp, filename):
     
         else:
     
    -        raise IOError("cannot write mode %s as Palm" % im.mode)
    +        raise OSError("cannot write mode %s as Palm" % im.mode)
     
         #
         # make sure image data is available
    @@ -175,7 +164,7 @@ def _save(im, fp, filename):
         cols = im.size[0]
         rows = im.size[1]
     
    -    rowbytes = int((cols + (16//bpp - 1)) / (16 // bpp)) * 2
    +    rowbytes = int((cols + (16 // bpp - 1)) / (16 // bpp)) * 2
         transparent_index = 0
         compression_type = _COMPRESSION_TYPES["none"]
     
    @@ -199,7 +188,7 @@ def _save(im, fp, filename):
         fp.write(o16b(offset))
         fp.write(o8(transparent_index))
         fp.write(o8(compression_type))
    -    fp.write(o16b(0))   # reserved by Palm
    +    fp.write(o16b(0))  # reserved by Palm
     
         # now write colormap if necessary
     
    @@ -207,20 +196,21 @@ def _save(im, fp, filename):
             fp.write(o16b(256))
             for i in range(256):
                 fp.write(o8(i))
    -            if colormapmode == 'RGB':
    +            if colormapmode == "RGB":
                     fp.write(
    -                    o8(colormap[3 * i]) +
    -                    o8(colormap[3 * i + 1]) +
    -                    o8(colormap[3 * i + 2]))
    -            elif colormapmode == 'RGBA':
    +                    o8(colormap[3 * i])
    +                    + o8(colormap[3 * i + 1])
    +                    + o8(colormap[3 * i + 2])
    +                )
    +            elif colormapmode == "RGBA":
                     fp.write(
    -                    o8(colormap[4 * i]) +
    -                    o8(colormap[4 * i + 1]) +
    -                    o8(colormap[4 * i + 2]))
    +                    o8(colormap[4 * i])
    +                    + o8(colormap[4 * i + 1])
    +                    + o8(colormap[4 * i + 2])
    +                )
     
         # now convert data to raw form
    -    ImageFile._save(
    -        im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, rowbytes, 1))])
    +    ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, rowbytes, 1))])
     
         if hasattr(fp, "flush"):
             fp.flush()
    diff --git a/server/www/packages/packages-windows/x86/PIL/PcdImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PcdImagePlugin.py
    index fa95b50..625f556 100644
    --- a/server/www/packages/packages-windows/x86/PIL/PcdImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/PcdImagePlugin.py
    @@ -18,14 +18,12 @@
     from . import Image, ImageFile
     from ._binary import i8
     
    -__version__ = "0.1"
    -
    -
     ##
     # Image plugin for PhotoCD images.  This plugin only reads the 768x512
     # image from the file; higher resolutions are encoded in a proprietary
     # encoding.
     
    +
     class PcdImageFile(ImageFile.ImageFile):
     
         format = "PCD"
    @@ -48,14 +46,14 @@ class PcdImageFile(ImageFile.ImageFile):
                 self.tile_post_rotate = -90
     
             self.mode = "RGB"
    -        self.size = 768, 512  # FIXME: not correct for rotated images!
    -        self.tile = [("pcd", (0, 0)+self.size, 96*2048, None)]
    +        self._size = 768, 512  # FIXME: not correct for rotated images!
    +        self.tile = [("pcd", (0, 0) + self.size, 96 * 2048, None)]
     
         def load_end(self):
             if self.tile_post_rotate:
                 # Handle rotated PCDs
                 self.im = self.im.rotate(self.tile_post_rotate)
    -            self.size = self.im.size
    +            self._size = self.im.size
     
     
     #
    diff --git a/server/www/packages/packages-windows/x86/PIL/PcfFontFile.py b/server/www/packages/packages-windows/x86/PIL/PcfFontFile.py
    index eba85fe..c463533 100644
    --- a/server/www/packages/packages-windows/x86/PIL/PcfFontFile.py
    +++ b/server/www/packages/packages-windows/x86/PIL/PcfFontFile.py
    @@ -16,50 +16,55 @@
     # See the README file for information on usage and redistribution.
     #
     
    -from . import Image, FontFile
    -from ._binary import i8, i16le as l16, i32le as l32, i16be as b16, i32be as b32
    +import io
    +
    +from . import FontFile, Image
    +from ._binary import i8, i16be as b16, i16le as l16, i32be as b32, i32le as l32
     
     # --------------------------------------------------------------------
     # declarations
     
     PCF_MAGIC = 0x70636601  # "\x01fcp"
     
    -PCF_PROPERTIES = (1 << 0)
    -PCF_ACCELERATORS = (1 << 1)
    -PCF_METRICS = (1 << 2)
    -PCF_BITMAPS = (1 << 3)
    -PCF_INK_METRICS = (1 << 4)
    -PCF_BDF_ENCODINGS = (1 << 5)
    -PCF_SWIDTHS = (1 << 6)
    -PCF_GLYPH_NAMES = (1 << 7)
    -PCF_BDF_ACCELERATORS = (1 << 8)
    +PCF_PROPERTIES = 1 << 0
    +PCF_ACCELERATORS = 1 << 1
    +PCF_METRICS = 1 << 2
    +PCF_BITMAPS = 1 << 3
    +PCF_INK_METRICS = 1 << 4
    +PCF_BDF_ENCODINGS = 1 << 5
    +PCF_SWIDTHS = 1 << 6
    +PCF_GLYPH_NAMES = 1 << 7
    +PCF_BDF_ACCELERATORS = 1 << 8
     
     BYTES_PER_ROW = [
    -    lambda bits: ((bits+7) >> 3),
    -    lambda bits: ((bits+15) >> 3) & ~1,
    -    lambda bits: ((bits+31) >> 3) & ~3,
    -    lambda bits: ((bits+63) >> 3) & ~7,
    +    lambda bits: ((bits + 7) >> 3),
    +    lambda bits: ((bits + 15) >> 3) & ~1,
    +    lambda bits: ((bits + 31) >> 3) & ~3,
    +    lambda bits: ((bits + 63) >> 3) & ~7,
     ]
     
     
     def sz(s, o):
    -    return s[o:s.index(b"\0", o)]
    +    return s[o : s.index(b"\0", o)]
     
     
     ##
     # Font file plugin for the X11 PCF format.
     
    +
     class PcfFontFile(FontFile.FontFile):
     
         name = "name"
     
    -    def __init__(self, fp):
    +    def __init__(self, fp, charset_encoding="iso8859-1"):
    +
    +        self.charset_encoding = charset_encoding
     
             magic = l32(fp.read(4))
             if magic != PCF_MAGIC:
                 raise SyntaxError("not a PCF file")
     
    -        FontFile.FontFile.__init__(self)
    +        super().__init__()
     
             count = l32(fp.read(4))
             self.toc = {}
    @@ -82,7 +87,7 @@ class PcfFontFile(FontFile.FontFile):
                 ix = encoding[ch]
                 if ix is not None:
                     x, y, l, r, w, a, d, f = metrics[ix]
    -                glyph = (w, 0), (l, d-y, x+l, d), (0, 0, x, y), bitmaps[ix]
    +                glyph = (w, 0), (l, d - y, x + l, d), (0, 0, x, y), bitmaps[ix]
                     self.glyph[ch] = glyph
     
         def _getformat(self, tag):
    @@ -117,7 +122,7 @@ class PcfFontFile(FontFile.FontFile):
             for i in range(nprops):
                 p.append((i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4))))
             if nprops & 3:
    -            fp.seek(4 - (nprops & 3), 1)  # pad
    +            fp.seek(4 - (nprops & 3), io.SEEK_CUR)  # pad
     
             data = fp.read(i32(fp.read(4)))
     
    @@ -140,7 +145,7 @@ class PcfFontFile(FontFile.FontFile):
     
             append = metrics.append
     
    -        if (format & 0xff00) == 0x100:
    +        if (format & 0xFF00) == 0x100:
     
                 # "compressed" metrics
                 for i in range(i16(fp.read(2))):
    @@ -151,10 +156,7 @@ class PcfFontFile(FontFile.FontFile):
                     descent = i8(fp.read(1)) - 128
                     xsize = right - left
                     ysize = ascent + descent
    -                append(
    -                    (xsize, ysize, left, right, width,
    -                     ascent, descent, 0)
    -                    )
    +                append((xsize, ysize, left, right, width, ascent, descent, 0))
     
             else:
     
    @@ -168,10 +170,7 @@ class PcfFontFile(FontFile.FontFile):
                     attributes = i16(fp.read(2))
                     xsize = right - left
                     ysize = ascent + descent
    -                append(
    -                    (xsize, ysize, left, right, width,
    -                     ascent, descent, attributes)
    -                    )
    +                append((xsize, ysize, left, right, width, ascent, descent, attributes))
     
             return metrics
     
    @@ -187,7 +186,7 @@ class PcfFontFile(FontFile.FontFile):
             nbitmaps = i32(fp.read(4))
     
             if nbitmaps != len(metrics):
    -            raise IOError("Wrong number of bitmaps")
    +            raise OSError("Wrong number of bitmaps")
     
             offsets = []
             for i in range(nbitmaps):
    @@ -198,7 +197,7 @@ class PcfFontFile(FontFile.FontFile):
                 bitmapSizes.append(i32(fp.read(4)))
     
             # byteorder = format & 4  # non-zero => MSB
    -        bitorder = format & 8   # non-zero => MSB
    +        bitorder = format & 8  # non-zero => MSB
             padindex = format & 3
     
             bitmapsize = bitmapSizes[padindex]
    @@ -213,10 +212,8 @@ class PcfFontFile(FontFile.FontFile):
     
             for i in range(nbitmaps):
                 x, y, l, r, w, a, d, f = metrics[i]
    -            b, e = offsets[i], offsets[i+1]
    -            bitmaps.append(
    -                Image.frombytes("1", (x, y), data[b:e], "raw", mode, pad(x))
    -                )
    +            b, e = offsets[i], offsets[i + 1]
    +            bitmaps.append(Image.frombytes("1", (x, y), data[b:e], "raw", mode, pad(x)))
     
             return bitmaps
     
    @@ -230,16 +227,21 @@ class PcfFontFile(FontFile.FontFile):
             firstCol, lastCol = i16(fp.read(2)), i16(fp.read(2))
             firstRow, lastRow = i16(fp.read(2)), i16(fp.read(2))
     
    -        default = i16(fp.read(2))
    +        i16(fp.read(2))  # default
     
             nencoding = (lastCol - firstCol + 1) * (lastRow - firstRow + 1)
     
    -        for i in range(nencoding):
    -            encodingOffset = i16(fp.read(2))
    -            if encodingOffset != 0xFFFF:
    -                try:
    -                    encoding[i+firstCol] = encodingOffset
    -                except IndexError:
    -                    break  # only load ISO-8859-1 glyphs
    +        encodingOffsets = [i16(fp.read(2)) for _ in range(nencoding)]
    +
    +        for i in range(firstCol, len(encoding)):
    +            try:
    +                encodingOffset = encodingOffsets[
    +                    ord(bytearray([i]).decode(self.charset_encoding))
    +                ]
    +                if encodingOffset != 0xFFFF:
    +                    encoding[i] = encodingOffset
    +            except UnicodeDecodeError:
    +                # character is not supported in selected encoding
    +                pass
     
             return encoding
    diff --git a/server/www/packages/packages-windows/x86/PIL/PcxImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PcxImagePlugin.py
    index 564713a..6cf10de 100644
    --- a/server/www/packages/packages-windows/x86/PIL/PcxImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/PcxImagePlugin.py
    @@ -25,14 +25,14 @@
     # See the README file for information on usage and redistribution.
     #
     
    +import io
     import logging
    +
     from . import Image, ImageFile, ImagePalette
     from ._binary import i8, i16le as i16, o8, o16le as o16
     
     logger = logging.getLogger(__name__)
     
    -__version__ = "0.6"
    -
     
     def _accept(prefix):
         return i8(prefix[0]) == 10 and i8(prefix[1]) in [0, 2, 3, 5]
    @@ -41,6 +41,7 @@ def _accept(prefix):
     ##
     # Image plugin for Paintbrush images.
     
    +
     class PcxImageFile(ImageFile.ImageFile):
     
         format = "PCX"
    @@ -54,7 +55,7 @@ class PcxImageFile(ImageFile.ImageFile):
                 raise SyntaxError("not a PCX file")
     
             # image
    -        bbox = i16(s, 4), i16(s, 6), i16(s, 8)+1, i16(s, 10)+1
    +        bbox = i16(s, 4), i16(s, 6), i16(s, 8) + 1, i16(s, 10) + 1
             if bbox[2] <= bbox[0] or bbox[3] <= bbox[1]:
                 raise SyntaxError("bad PCX image size")
             logger.debug("BBox: %s %s %s %s", *bbox)
    @@ -64,8 +65,13 @@ class PcxImageFile(ImageFile.ImageFile):
             bits = i8(s[3])
             planes = i8(s[65])
             stride = i16(s, 66)
    -        logger.debug("PCX version %s, bits %s, planes %s, stride %s",
    -                     version, bits, planes, stride)
    +        logger.debug(
    +            "PCX version %s, bits %s, planes %s, stride %s",
    +            version,
    +            bits,
    +            planes,
    +            stride,
    +        )
     
             self.info["dpi"] = i16(s, 12), i16(s, 14)
     
    @@ -80,12 +86,12 @@ class PcxImageFile(ImageFile.ImageFile):
             elif version == 5 and bits == 8 and planes == 1:
                 mode = rawmode = "L"
                 # FIXME: hey, this doesn't work with the incremental loader !!!
    -            self.fp.seek(-769, 2)
    +            self.fp.seek(-769, io.SEEK_END)
                 s = self.fp.read(769)
                 if len(s) == 769 and i8(s[0]) == 12:
                     # check if the palette is linear greyscale
                     for i in range(256):
    -                    if s[i*3+1:i*3+4] != o8(i)*3:
    +                    if s[i * 3 + 1 : i * 3 + 4] != o8(i) * 3:
                             mode = rawmode = "P"
                             break
                     if mode == "P":
    @@ -97,16 +103,17 @@ class PcxImageFile(ImageFile.ImageFile):
                 rawmode = "RGB;L"
     
             else:
    -            raise IOError("unknown PCX mode")
    +            raise OSError("unknown PCX mode")
     
             self.mode = mode
    -        self.size = bbox[2]-bbox[0], bbox[3]-bbox[1]
    +        self._size = bbox[2] - bbox[0], bbox[3] - bbox[1]
     
             bbox = (0, 0) + self.size
             logger.debug("size: %sx%s", *self.size)
     
             self.tile = [("pcx", bbox, self.fp.tell(), (rawmode, planes * stride))]
     
    +
     # --------------------------------------------------------------------
     # save PCX files
     
    @@ -135,8 +142,12 @@ def _save(im, fp, filename):
         # Ideally it should be passed in in the state, but the bytes value
         # gets overwritten.
     
    -    logger.debug("PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d",
    -                 im.size[0], bits, stride)
    +    logger.debug(
    +        "PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d",
    +        im.size[0],
    +        bits,
    +        stride,
    +    )
     
         # under windows, we could determine the current screen size with
         # "Image.core.display_mode()[1]", but I think that's overkill...
    @@ -147,17 +158,30 @@ def _save(im, fp, filename):
     
         # PCX header
         fp.write(
    -        o8(10) + o8(version) + o8(1) + o8(bits) + o16(0) +
    -        o16(0) + o16(im.size[0]-1) + o16(im.size[1]-1) + o16(dpi[0]) +
    -        o16(dpi[1]) + b"\0"*24 + b"\xFF"*24 + b"\0" + o8(planes) +
    -        o16(stride) + o16(1) + o16(screen[0]) + o16(screen[1]) +
    -        b"\0"*54
    -        )
    +        o8(10)
    +        + o8(version)
    +        + o8(1)
    +        + o8(bits)
    +        + o16(0)
    +        + o16(0)
    +        + o16(im.size[0] - 1)
    +        + o16(im.size[1] - 1)
    +        + o16(dpi[0])
    +        + o16(dpi[1])
    +        + b"\0" * 24
    +        + b"\xFF" * 24
    +        + b"\0"
    +        + o8(planes)
    +        + o16(stride)
    +        + o16(1)
    +        + o16(screen[0])
    +        + o16(screen[1])
    +        + b"\0" * 54
    +    )
     
         assert fp.tell() == 128
     
    -    ImageFile._save(im, fp, [("pcx", (0, 0)+im.size, 0,
    -                              (rawmode, bits*planes))])
    +    ImageFile._save(im, fp, [("pcx", (0, 0) + im.size, 0, (rawmode, bits * planes))])
     
         if im.mode == "P":
             # colour palette
    @@ -167,7 +191,8 @@ def _save(im, fp, filename):
             # greyscale palette
             fp.write(o8(12))
             for i in range(256):
    -            fp.write(o8(i)*3)
    +            fp.write(o8(i) * 3)
    +
     
     # --------------------------------------------------------------------
     # registry
    @@ -177,3 +202,5 @@ Image.register_open(PcxImageFile.format, PcxImageFile, _accept)
     Image.register_save(PcxImageFile.format, _save)
     
     Image.register_extension(PcxImageFile.format, ".pcx")
    +
    +Image.register_mime(PcxImageFile.format, "image/x-pcx")
    diff --git a/server/www/packages/packages-windows/x86/PIL/PdfImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PdfImagePlugin.py
    index 8538bcd..47500ba 100644
    --- a/server/www/packages/packages-windows/x86/PIL/PdfImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/PdfImagePlugin.py
    @@ -20,11 +20,11 @@
     # Image plugin for PDF images (output only).
     ##
     
    -from . import Image, ImageFile, ImageSequence, PdfParser
     import io
    +import os
    +import time
     
    -__version__ = "0.5"
    -
    +from . import Image, ImageFile, ImageSequence, PdfParser, __version__
     
     #
     # --------------------------------------------------------------------
    @@ -44,33 +44,32 @@ def _save_all(im, fp, filename):
     ##
     # (Internal) Image save plugin for the PDF format.
     
    -def _save(im, fp, filename, save_all=False):
    -    resolution = im.encoderinfo.get("resolution", 72.0)
    -    is_appending = im.encoderinfo.get("append", False)
    -    title = im.encoderinfo.get("title", None)
    -    author = im.encoderinfo.get("author", None)
    -    subject = im.encoderinfo.get("subject", None)
    -    keywords = im.encoderinfo.get("keywords", None)
    -    creator = im.encoderinfo.get("creator", None)
    -    producer = im.encoderinfo.get("producer", None)
     
    +def _save(im, fp, filename, save_all=False):
    +    is_appending = im.encoderinfo.get("append", False)
         if is_appending:
             existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="r+b")
         else:
             existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="w+b")
     
    -    if title:
    -        existing_pdf.info.Title = title
    -    if author:
    -        existing_pdf.info.Author = author
    -    if subject:
    -        existing_pdf.info.Subject = subject
    -    if keywords:
    -        existing_pdf.info.Keywords = keywords
    -    if creator:
    -        existing_pdf.info.Creator = creator
    -    if producer:
    -        existing_pdf.info.Producer = producer
    +    resolution = im.encoderinfo.get("resolution", 72.0)
    +
    +    info = {
    +        "title": None
    +        if is_appending
    +        else os.path.splitext(os.path.basename(filename))[0],
    +        "author": None,
    +        "subject": None,
    +        "keywords": None,
    +        "creator": None,
    +        "producer": None,
    +        "creationDate": None if is_appending else time.gmtime(),
    +        "modDate": None if is_appending else time.gmtime(),
    +    }
    +    for k, default in info.items():
    +        v = im.encoderinfo.get(k) if k in im.encoderinfo else default
    +        if v:
    +            existing_pdf.info[k[0].upper() + k[1:]] = v
     
         #
         # make sure image data is available
    @@ -78,7 +77,7 @@ def _save(im, fp, filename, save_all=False):
     
         existing_pdf.start_writing()
         existing_pdf.write_header()
    -    existing_pdf.write_comment("created by PIL PDF driver " + __version__)
    +    existing_pdf.write_comment("created by Pillow {} PDF driver".format(__version__))
     
         #
         # pages
    @@ -98,7 +97,8 @@ def _save(im, fp, filename, save_all=False):
                 try:
                     im_numberOfPages = im.n_frames
                 except AttributeError:
    -                # Image format does not have n_frames. It is a single frame image
    +                # Image format does not have n_frames.
    +                # It is a single frame image
                     pass
             numberOfPages += im_numberOfPages
             for i in range(im_numberOfPages):
    @@ -115,9 +115,9 @@ def _save(im, fp, filename, save_all=False):
         for imSequence in ims:
             im_pages = ImageSequence.Iterator(imSequence) if save_all else [imSequence]
             for im in im_pages:
    -            # FIXME: Should replace ASCIIHexDecode with RunLengthDecode (packbits)
    -            # or LZWDecode (tiff/lzw compression).  Note that PDF 1.2 also supports
    -            # Flatedecode (zip compression).
    +            # FIXME: Should replace ASCIIHexDecode with RunLengthDecode
    +            # (packbits) or LZWDecode (tiff/lzw compression).  Note that
    +            # PDF 1.2 also supports Flatedecode (zip compression).
     
                 bits = 8
                 params = None
    @@ -135,7 +135,12 @@ def _save(im, fp, filename, save_all=False):
                 elif im.mode == "P":
                     filter = "ASCIIHexDecode"
                     palette = im.im.getpalette("RGB")
    -                colorspace = [PdfParser.PdfName("Indexed"), PdfParser.PdfName("DeviceRGB"), 255, PdfParser.PdfBinary(palette)]
    +                colorspace = [
    +                    PdfParser.PdfName("Indexed"),
    +                    PdfParser.PdfName("DeviceRGB"),
    +                    255,
    +                    PdfParser.PdfBinary(palette),
    +                ]
                     procset = "ImageI"  # indexed color
                 elif im.mode == "RGB":
                     filter = "DCTDecode"
    @@ -158,15 +163,15 @@ def _save(im, fp, filename, save_all=False):
                         # FIXME: the hex encoder doesn't support packed 1-bit
                         # images; do things the hard way...
                         data = im.tobytes("raw", "1")
    -                    im = Image.new("L", (len(data), 1), None)
    +                    im = Image.new("L", im.size)
                         im.putdata(data)
    -                ImageFile._save(im, op, [("hex", (0, 0)+im.size, 0, im.mode)])
    +                ImageFile._save(im, op, [("hex", (0, 0) + im.size, 0, im.mode)])
                 elif filter == "DCTDecode":
                     Image.SAVE["JPEG"](im, op, filename)
                 elif filter == "FlateDecode":
    -                ImageFile._save(im, op, [("zip", (0, 0)+im.size, 0, im.mode)])
    +                ImageFile._save(im, op, [("zip", (0, 0) + im.size, 0, im.mode)])
                 elif filter == "RunLengthDecode":
    -                ImageFile._save(im, op, [("packbits", (0, 0)+im.size, 0, im.mode)])
    +                ImageFile._save(im, op, [("packbits", (0, 0) + im.size, 0, im.mode)])
                 else:
                     raise ValueError("unsupported PDF filter (%s)" % filter)
     
    @@ -175,7 +180,9 @@ def _save(im, fp, filename, save_all=False):
     
                 width, height = im.size
     
    -            existing_pdf.write_obj(image_refs[pageNumber], stream=op.getvalue(),
    +            existing_pdf.write_obj(
    +                image_refs[pageNumber],
    +                stream=op.getvalue(),
                     Type=PdfParser.PdfName("XObject"),
                     Subtype=PdfParser.PdfName("Image"),
                     Width=width,  # * 72.0 / resolution,
    @@ -183,26 +190,34 @@ def _save(im, fp, filename, save_all=False):
                     Filter=PdfParser.PdfName(filter),
                     BitsPerComponent=bits,
                     DecodeParams=params,
    -                ColorSpace=colorspace)
    +                ColorSpace=colorspace,
    +            )
     
                 #
                 # page
     
    -            existing_pdf.write_page(page_refs[pageNumber],
    +            existing_pdf.write_page(
    +                page_refs[pageNumber],
                     Resources=PdfParser.PdfDict(
                         ProcSet=[PdfParser.PdfName("PDF"), PdfParser.PdfName(procset)],
    -                    XObject=PdfParser.PdfDict(image=image_refs[pageNumber])),
    -                MediaBox=[0, 0, int(width * 72.0 / resolution), int(height * 72.0 / resolution)],
    -                Contents=contents_refs[pageNumber]
    -                )
    +                    XObject=PdfParser.PdfDict(image=image_refs[pageNumber]),
    +                ),
    +                MediaBox=[
    +                    0,
    +                    0,
    +                    int(width * 72.0 / resolution),
    +                    int(height * 72.0 / resolution),
    +                ],
    +                Contents=contents_refs[pageNumber],
    +            )
     
                 #
                 # page contents
     
    -            page_contents = PdfParser.make_bytes(
    -                "q %d 0 0 %d 0 0 cm /image Do Q\n" % (
    -                    int(width * 72.0 / resolution),
    -                    int(height * 72.0 / resolution)))
    +            page_contents = b"q %d 0 0 %d 0 0 cm /image Do Q\n" % (
    +                int(width * 72.0 / resolution),
    +                int(height * 72.0 / resolution),
    +            )
     
                 existing_pdf.write_obj(contents_refs[pageNumber], stream=page_contents)
     
    @@ -215,6 +230,7 @@ def _save(im, fp, filename, save_all=False):
             fp.flush()
         existing_pdf.close()
     
    +
     #
     # --------------------------------------------------------------------
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/PdfParser.py b/server/www/packages/packages-windows/x86/PIL/PdfParser.py
    index c0635ef..fdb35ed 100644
    --- a/server/www/packages/packages-windows/x86/PIL/PdfParser.py
    +++ b/server/www/packages/packages-windows/x86/PIL/PdfParser.py
    @@ -1,86 +1,75 @@
    +import calendar
     import codecs
     import collections
     import mmap
     import os
     import re
    +import time
     import zlib
    -from ._util import py3
    -
    -try:
    -    from UserDict import UserDict  # Python 2.x
    -except ImportError:
    -    UserDict = collections.UserDict  # Python 3.x
     
     
    -if py3:  # Python 3.x
    -    def make_bytes(s):
    -        return s.encode("us-ascii")
    -else:  # Python 2.x
    -    def make_bytes(s):  # pragma: no cover
    -        return s        # pragma: no cover
    -
    -
    -# see 7.9.2.2 Text String Type on page 86 and D.3 PDFDocEncoding Character Set on page 656
    +# see 7.9.2.2 Text String Type on page 86 and D.3 PDFDocEncoding Character Set
    +# on page 656
     def encode_text(s):
         return codecs.BOM_UTF16_BE + s.encode("utf_16_be")
     
     
     PDFDocEncoding = {
    -    0x16: u"\u0017",
    -    0x18: u"\u02D8",
    -    0x19: u"\u02C7",
    -    0x1A: u"\u02C6",
    -    0x1B: u"\u02D9",
    -    0x1C: u"\u02DD",
    -    0x1D: u"\u02DB",
    -    0x1E: u"\u02DA",
    -    0x1F: u"\u02DC",
    -    0x80: u"\u2022",
    -    0x81: u"\u2020",
    -    0x82: u"\u2021",
    -    0x83: u"\u2026",
    -    0x84: u"\u2014",
    -    0x85: u"\u2013",
    -    0x86: u"\u0192",
    -    0x87: u"\u2044",
    -    0x88: u"\u2039",
    -    0x89: u"\u203A",
    -    0x8A: u"\u2212",
    -    0x8B: u"\u2030",
    -    0x8C: u"\u201E",
    -    0x8D: u"\u201C",
    -    0x8E: u"\u201D",
    -    0x8F: u"\u2018",
    -    0x90: u"\u2019",
    -    0x91: u"\u201A",
    -    0x92: u"\u2122",
    -    0x93: u"\uFB01",
    -    0x94: u"\uFB02",
    -    0x95: u"\u0141",
    -    0x96: u"\u0152",
    -    0x97: u"\u0160",
    -    0x98: u"\u0178",
    -    0x99: u"\u017D",
    -    0x9A: u"\u0131",
    -    0x9B: u"\u0142",
    -    0x9C: u"\u0153",
    -    0x9D: u"\u0161",
    -    0x9E: u"\u017E",
    -    0xA0: u"\u20AC",
    +    0x16: "\u0017",
    +    0x18: "\u02D8",
    +    0x19: "\u02C7",
    +    0x1A: "\u02C6",
    +    0x1B: "\u02D9",
    +    0x1C: "\u02DD",
    +    0x1D: "\u02DB",
    +    0x1E: "\u02DA",
    +    0x1F: "\u02DC",
    +    0x80: "\u2022",
    +    0x81: "\u2020",
    +    0x82: "\u2021",
    +    0x83: "\u2026",
    +    0x84: "\u2014",
    +    0x85: "\u2013",
    +    0x86: "\u0192",
    +    0x87: "\u2044",
    +    0x88: "\u2039",
    +    0x89: "\u203A",
    +    0x8A: "\u2212",
    +    0x8B: "\u2030",
    +    0x8C: "\u201E",
    +    0x8D: "\u201C",
    +    0x8E: "\u201D",
    +    0x8F: "\u2018",
    +    0x90: "\u2019",
    +    0x91: "\u201A",
    +    0x92: "\u2122",
    +    0x93: "\uFB01",
    +    0x94: "\uFB02",
    +    0x95: "\u0141",
    +    0x96: "\u0152",
    +    0x97: "\u0160",
    +    0x98: "\u0178",
    +    0x99: "\u017D",
    +    0x9A: "\u0131",
    +    0x9B: "\u0142",
    +    0x9C: "\u0153",
    +    0x9D: "\u0161",
    +    0x9E: "\u017E",
    +    0xA0: "\u20AC",
     }
     
     
     def decode_text(b):
    -    if b[:len(codecs.BOM_UTF16_BE)] == codecs.BOM_UTF16_BE:
    -        return b[len(codecs.BOM_UTF16_BE):].decode("utf_16_be")
    -    elif py3:  # Python 3.x
    +    if b[: len(codecs.BOM_UTF16_BE)] == codecs.BOM_UTF16_BE:
    +        return b[len(codecs.BOM_UTF16_BE) :].decode("utf_16_be")
    +    else:
             return "".join(PDFDocEncoding.get(byte, chr(byte)) for byte in b)
    -    else:  # Python 2.x
    -        return u"".join(PDFDocEncoding.get(ord(byte), byte) for byte in b)
     
     
     class PdfFormatError(RuntimeError):
    -    """An error that probably indicates a syntactic or semantic error in the PDF file structure"""
    +    """An error that probably indicates a syntactic or semantic error in the
    +    PDF file structure"""
    +
         pass
     
     
    @@ -89,7 +78,9 @@ def check_format_condition(condition, error_message):
             raise PdfFormatError(error_message)
     
     
    -class IndirectReference(collections.namedtuple("IndirectReferenceTuple", ["object_id", "generation"])):
    +class IndirectReference(
    +    collections.namedtuple("IndirectReferenceTuple", ["object_id", "generation"])
    +):
         def __str__(self):
             return "%s %s R" % self
     
    @@ -97,7 +88,11 @@ class IndirectReference(collections.namedtuple("IndirectReferenceTuple", ["objec
             return self.__str__().encode("us-ascii")
     
         def __eq__(self, other):
    -        return other.__class__ is self.__class__ and other.object_id == self.object_id and other.generation == self.generation
    +        return (
    +            other.__class__ is self.__class__
    +            and other.object_id == self.object_id
    +            and other.generation == self.generation
    +        )
     
         def __ne__(self, other):
             return not (self == other)
    @@ -113,9 +108,9 @@ class IndirectObjectDef(IndirectReference):
     
     class XrefTable:
         def __init__(self):
    -        self.existing_entries = {}          # object ID => (offset, generation)
    -        self.new_entries = {}               # object ID => (offset, generation)
    -        self.deleted_entries = {0: 65536}   # object ID => generation
    +        self.existing_entries = {}  # object ID => (offset, generation)
    +        self.new_entries = {}  # object ID => (offset, generation)
    +        self.deleted_entries = {0: 65536}  # object ID => generation
             self.reading_finished = False
     
         def __setitem__(self, key, value):
    @@ -143,16 +138,24 @@ class XrefTable:
             elif key in self.deleted_entries:
                 generation = self.deleted_entries[key]
             else:
    -            raise IndexError("object ID " + str(key) + " cannot be deleted because it doesn't exist")
    +            raise IndexError(
    +                "object ID " + str(key) + " cannot be deleted because it doesn't exist"
    +            )
     
         def __contains__(self, key):
             return key in self.existing_entries or key in self.new_entries
     
         def __len__(self):
    -        return len(set(self.existing_entries.keys()) | set(self.new_entries.keys()) | set(self.deleted_entries.keys()))
    +        return len(
    +            set(self.existing_entries.keys())
    +            | set(self.new_entries.keys())
    +            | set(self.deleted_entries.keys())
    +        )
     
         def keys(self):
    -        return (set(self.existing_entries.keys()) - set(self.deleted_entries.keys())) | set(self.new_entries.keys())
    +        return (
    +            set(self.existing_entries.keys()) - set(self.deleted_entries.keys())
    +        ) | set(self.new_entries.keys())
     
         def write(self, f):
             keys = sorted(set(self.new_entries.keys()) | set(self.deleted_entries.keys()))
    @@ -163,7 +166,7 @@ class XrefTable:
                 # find a contiguous sequence of object IDs
                 prev = None
                 for index, key in enumerate(keys):
    -                if prev is None or prev+1 == key:
    +                if prev is None or prev + 1 == key:
                         prev = key
                     else:
                         contiguous_keys = keys[:index]
    @@ -172,21 +175,25 @@ class XrefTable:
                 else:
                     contiguous_keys = keys
                     keys = None
    -            f.write(make_bytes("%d %d\n" % (contiguous_keys[0], len(contiguous_keys))))
    +            f.write(b"%d %d\n" % (contiguous_keys[0], len(contiguous_keys)))
                 for object_id in contiguous_keys:
                     if object_id in self.new_entries:
    -                    f.write(make_bytes("%010d %05d n \n" % self.new_entries[object_id]))
    +                    f.write(b"%010d %05d n \n" % self.new_entries[object_id])
                     else:
                         this_deleted_object_id = deleted_keys.pop(0)
    -                    check_format_condition(object_id == this_deleted_object_id,
    -                                           "expected the next deleted object "
    -                                           "ID to be %s, instead found %s" %
    -                                           (object_id, this_deleted_object_id))
    +                    check_format_condition(
    +                        object_id == this_deleted_object_id,
    +                        "expected the next deleted object ID to be %s, instead found %s"
    +                        % (object_id, this_deleted_object_id),
    +                    )
                         try:
                             next_in_linked_list = deleted_keys[0]
                         except IndexError:
                             next_in_linked_list = 0
    -                    f.write(make_bytes("%010d %05d f \n" % (next_in_linked_list, self.deleted_entries[object_id])))
    +                    f.write(
    +                        b"%010d %05d f \n"
    +                        % (next_in_linked_list, self.deleted_entries[object_id])
    +                    )
             return startxref
     
     
    @@ -203,7 +210,9 @@ class PdfName:
             return self.name.decode("us-ascii")
     
         def __eq__(self, other):
    -        return (isinstance(other, PdfName) and other.name == self.name) or other == self.name
    +        return (
    +            isinstance(other, PdfName) and other.name == self.name
    +        ) or other == self.name
     
         def __hash__(self):
             return hash(self.name)
    @@ -215,57 +224,56 @@ class PdfName:
         def from_pdf_stream(cls, data):
             return cls(PdfParser.interpret_name(data))
     
    -    allowed_chars = set(range(33, 127)) - set(ord(c) for c in "#%/()<>[]{}")
    +    allowed_chars = set(range(33, 127)) - {ord(c) for c in "#%/()<>[]{}"}
     
         def __bytes__(self):
             result = bytearray(b"/")
             for b in self.name:
    -            if py3:  # Python 3.x
    -                if b in self.allowed_chars:
    -                    result.append(b)
    -                else:
    -                    result.extend(make_bytes("#%02X" % b))
    -            else:  # Python 2.x
    -                if ord(b) in self.allowed_chars:
    -                    result.append(b)
    -                else:
    -                    result.extend(b"#%02X" % ord(b))
    +            if b in self.allowed_chars:
    +                result.append(b)
    +            else:
    +                result.extend(b"#%02X" % b)
             return bytes(result)
     
    -    __str__ = __bytes__
    -
     
     class PdfArray(list):
         def __bytes__(self):
             return b"[ " + b" ".join(pdf_repr(x) for x in self) + b" ]"
     
    -    __str__ = __bytes__
     
    -
    -class PdfDict(UserDict):
    +class PdfDict(collections.UserDict):
         def __setattr__(self, key, value):
             if key == "data":
    -            if hasattr(UserDict, "__setattr__"):
    -                UserDict.__setattr__(self, key, value)
    -            else:
    -                self.__dict__[key] = value
    +            collections.UserDict.__setattr__(self, key, value)
             else:
    -            if isinstance(key, str):
    -                key = key.encode("us-ascii")
    -            self[key] = value
    +            self[key.encode("us-ascii")] = value
     
         def __getattr__(self, key):
             try:
    -            value = self[key]
    +            value = self[key.encode("us-ascii")]
             except KeyError:
    -            try:
    -                value = self[key.encode("us-ascii")]
    -            except KeyError:
    -                raise AttributeError(key)
    +            raise AttributeError(key)
             if isinstance(value, bytes):
    -            return decode_text(value)
    -        else:
    -            return value
    +            value = decode_text(value)
    +        if key.endswith("Date"):
    +            if value.startswith("D:"):
    +                value = value[2:]
    +
    +            relationship = "Z"
    +            if len(value) > 17:
    +                relationship = value[14]
    +                offset = int(value[15:17]) * 60
    +                if len(value) > 20:
    +                    offset += int(value[18:20])
    +
    +            format = "%Y%m%d%H%M%S"[: len(value) - 2]
    +            value = time.strptime(value[: len(format) + 2], format)
    +            if relationship in ["+", "-"]:
    +                offset *= 60
    +                if relationship == "+":
    +                    offset *= -1
    +                value = time.gmtime(calendar.timegm(value) + offset)
    +        return value
     
         def __bytes__(self):
             out = bytearray(b"<<")
    @@ -280,20 +288,13 @@ class PdfDict(UserDict):
             out.extend(b"\n>>")
             return bytes(out)
     
    -    if not py3:
    -        __str__ = __bytes__
    -
     
     class PdfBinary:
         def __init__(self, data):
             self.data = data
     
    -    if py3:  # Python 3.x
    -        def __bytes__(self):
    -            return make_bytes("<%s>" % "".join("%02X" % b for b in self.data))
    -    else:  # Python 2.x
    -        def __str__(self):
    -            return "<%s>" % "".join("%02X" % ord(b) for b in self.data)
    +    def __bytes__(self):
    +        return b"<%s>" % b"".join(b"%02X" % b for b in self.data)
     
     
     class PdfStream:
    @@ -313,7 +314,9 @@ class PdfStream:
                     expected_length = self.dictionary.Length
                 return zlib.decompress(self.buf, bufsize=int(expected_length))
             else:
    -            raise NotImplementedError("stream filter %s unknown/unsupported" % repr(self.dictionary.Filter))
    +            raise NotImplementedError(
    +                "stream filter %s unknown/unsupported" % repr(self.dictionary.Filter)
    +            )
     
     
     def pdf_repr(x):
    @@ -323,29 +326,35 @@ def pdf_repr(x):
             return b"false"
         elif x is None:
             return b"null"
    -    elif isinstance(x, PdfName) or isinstance(x, PdfDict) or isinstance(x, PdfArray) or isinstance(x, PdfBinary):
    +    elif isinstance(x, (PdfName, PdfDict, PdfArray, PdfBinary)):
             return bytes(x)
         elif isinstance(x, int):
             return str(x).encode("us-ascii")
    +    elif isinstance(x, time.struct_time):
    +        return b"(D:" + time.strftime("%Y%m%d%H%M%SZ", x).encode("us-ascii") + b")"
         elif isinstance(x, dict):
             return bytes(PdfDict(x))
         elif isinstance(x, list):
             return bytes(PdfArray(x))
    -    elif (py3 and isinstance(x, str)) or (not py3 and isinstance(x, unicode)):
    +    elif isinstance(x, str):
             return pdf_repr(encode_text(x))
         elif isinstance(x, bytes):
    -        return b"(" + x.replace(b"\\", b"\\\\").replace(b"(", b"\\(").replace(b")", b"\\)") + b")"  # XXX escape more chars? handle binary garbage
    +        # XXX escape more chars? handle binary garbage
    +        x = x.replace(b"\\", b"\\\\")
    +        x = x.replace(b"(", b"\\(")
    +        x = x.replace(b")", b"\\)")
    +        return b"(" + x + b")"
         else:
             return bytes(x)
     
     
     class PdfParser:
    -    """Based on https://www.adobe.com/content/dam/acom/en/devnet/acrobat/pdfs/PDF32000_2008.pdf
    +    """Based on
    +    https://www.adobe.com/content/dam/acom/en/devnet/acrobat/pdfs/PDF32000_2008.pdf
         Supports PDF up to 1.4
         """
     
         def __init__(self, filename=None, f=None, buf=None, start_offset=0, mode="rb"):
    -        # type: (PdfParser, str, file, Union[bytes, bytearray], int, str) -> None
             if buf and f:
                 raise RuntimeError("specify buf or f or filename, but not both buf and f")
             self.filename = filename
    @@ -414,20 +423,20 @@ class PdfParser:
             self.f.write(b"%PDF-1.4\n")
     
         def write_comment(self, s):
    -        self.f.write(("%% %s\n" % (s,)).encode("utf-8"))
    +        self.f.write(("% {}\n".format(s)).encode("utf-8"))
     
         def write_catalog(self):
             self.del_root()
             self.root_ref = self.next_object_id(self.f.tell())
             self.pages_ref = self.next_object_id(0)
             self.rewrite_pages()
    -        self.write_obj(self.root_ref,
    -            Type=PdfName(b"Catalog"),
    -            Pages=self.pages_ref)
    -        self.write_obj(self.pages_ref,
    +        self.write_obj(self.root_ref, Type=PdfName(b"Catalog"), Pages=self.pages_ref)
    +        self.write_obj(
    +            self.pages_ref,
                 Type=PdfName(b"Pages"),
                 Count=len(self.pages),
    -            Kids=self.pages)
    +            Kids=self.pages,
    +        )
             return self.root_ref
     
         def rewrite_pages(self):
    @@ -473,7 +482,11 @@ class PdfParser:
             if self.info:
                 trailer_dict[b"Info"] = self.info_ref
             self.last_xref_section_offset = start_xref
    -        self.f.write(b"trailer\n" + bytes(PdfDict(trailer_dict)) + make_bytes("\nstartxref\n%d\n%%%%EOF" % start_xref))
    +        self.f.write(
    +            b"trailer\n"
    +            + bytes(PdfDict(trailer_dict))
    +            + b"\nstartxref\n%d\n%%%%EOF" % start_xref
    +        )
     
         def write_page(self, ref, *objs, **dict_obj):
             if isinstance(ref, int):
    @@ -535,13 +548,20 @@ class PdfParser:
             else:
                 self.info = PdfDict(self.read_indirect(self.info_ref))
             check_format_condition(b"Type" in self.root, "/Type missing in Root")
    -        check_format_condition(self.root[b"Type"] == b"Catalog", "/Type in Root is not /Catalog")
    +        check_format_condition(
    +            self.root[b"Type"] == b"Catalog", "/Type in Root is not /Catalog"
    +        )
             check_format_condition(b"Pages" in self.root, "/Pages missing in Root")
    -        check_format_condition(isinstance(self.root[b"Pages"], IndirectReference), "/Pages in Root is not an indirect reference")
    +        check_format_condition(
    +            isinstance(self.root[b"Pages"], IndirectReference),
    +            "/Pages in Root is not an indirect reference",
    +        )
             self.pages_ref = self.root[b"Pages"]
             self.page_tree_root = self.read_indirect(self.pages_ref)
             self.pages = self.linearize_page_tree(self.page_tree_root)
    -        # save the original list of page references in case the user modifies, adds or deletes some pages and we need to rewrite the pages and their list
    +        # save the original list of page references
    +        # in case the user modifies, adds or deletes some pages
    +        # and we need to rewrite the pages and their list
             self.orig_pages = self.pages[:]
     
         def next_object_id(self, offset=None):
    @@ -562,10 +582,35 @@ class PdfParser:
         whitespace_mandatory = whitespace + b"+"
         newline_only = br"[\r\n]+"
         newline = whitespace_optional + newline_only + whitespace_optional
    -    re_trailer_end = re.compile(whitespace_mandatory + br"trailer" + whitespace_optional + br"\<\<(.*\>\>)" + newline
    -        + br"startxref" + newline + br"([0-9]+)" + newline + br"%%EOF" + whitespace_optional + br"$", re.DOTALL)
    -    re_trailer_prev = re.compile(whitespace_optional + br"trailer" + whitespace_optional + br"\<\<(.*?\>\>)" + newline
    -        + br"startxref" + newline + br"([0-9]+)" + newline + br"%%EOF" + whitespace_optional, re.DOTALL)
    +    re_trailer_end = re.compile(
    +        whitespace_mandatory
    +        + br"trailer"
    +        + whitespace_optional
    +        + br"\<\<(.*\>\>)"
    +        + newline
    +        + br"startxref"
    +        + newline
    +        + br"([0-9]+)"
    +        + newline
    +        + br"%%EOF"
    +        + whitespace_optional
    +        + br"$",
    +        re.DOTALL,
    +    )
    +    re_trailer_prev = re.compile(
    +        whitespace_optional
    +        + br"trailer"
    +        + whitespace_optional
    +        + br"\<\<(.*?\>\>)"
    +        + newline
    +        + br"startxref"
    +        + newline
    +        + br"([0-9]+)"
    +        + newline
    +        + br"%%EOF"
    +        + whitespace_optional,
    +        re.DOTALL,
    +    )
     
         def read_trailer(self):
             search_start_offset = len(self.buf) - 16384
    @@ -577,7 +622,7 @@ class PdfParser:
             last_match = m
             while m:
                 last_match = m
    -            m = self.re_trailer_end.search(self.buf, m.start()+16)
    +            m = self.re_trailer_end.search(self.buf, m.start() + 16)
             if not m:
                 m = last_match
             trailer_data = m.group(1)
    @@ -590,16 +635,26 @@ class PdfParser:
     
         def read_prev_trailer(self, xref_section_offset):
             trailer_offset = self.read_xref_table(xref_section_offset=xref_section_offset)
    -        m = self.re_trailer_prev.search(self.buf[trailer_offset:trailer_offset+16384])
    +        m = self.re_trailer_prev.search(
    +            self.buf[trailer_offset : trailer_offset + 16384]
    +        )
             check_format_condition(m, "previous trailer not found")
             trailer_data = m.group(1)
    -        check_format_condition(int(m.group(2)) == xref_section_offset, "xref section offset in previous trailer doesn't match what was expected")
    +        check_format_condition(
    +            int(m.group(2)) == xref_section_offset,
    +            "xref section offset in previous trailer doesn't match what was expected",
    +        )
             trailer_dict = self.interpret_trailer(trailer_data)
             if b"Prev" in trailer_dict:
                 self.read_prev_trailer(trailer_dict[b"Prev"])
     
         re_whitespace_optional = re.compile(whitespace_optional)
    -    re_name = re.compile(whitespace_optional + br"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?=" + delimiter_or_ws + br")")
    +    re_name = re.compile(
    +        whitespace_optional
    +        + br"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?="
    +        + delimiter_or_ws
    +        + br")"
    +    )
         re_dict_start = re.compile(whitespace_optional + br"\<\<")
         re_dict_end = re.compile(whitespace_optional + br"\>\>" + whitespace_optional)
     
    @@ -611,13 +666,23 @@ class PdfParser:
                 m = cls.re_name.match(trailer_data, offset)
                 if not m:
                     m = cls.re_dict_end.match(trailer_data, offset)
    -                check_format_condition(m and m.end() == len(trailer_data), "name not found in trailer, remaining data: " + repr(trailer_data[offset:]))
    +                check_format_condition(
    +                    m and m.end() == len(trailer_data),
    +                    "name not found in trailer, remaining data: "
    +                    + repr(trailer_data[offset:]),
    +                )
                     break
                 key = cls.interpret_name(m.group(1))
                 value, offset = cls.get_value(trailer_data, m.end())
                 trailer[key] = value
    -        check_format_condition(b"Size" in trailer and isinstance(trailer[b"Size"], int), "/Size not in trailer or not an integer")
    -        check_format_condition(b"Root" in trailer and isinstance(trailer[b"Root"], IndirectReference), "/Root not in trailer or not an indirect reference")
    +        check_format_condition(
    +            b"Size" in trailer and isinstance(trailer[b"Size"], int),
    +            "/Size not in trailer or not an integer",
    +        )
    +        check_format_condition(
    +            b"Root" in trailer and isinstance(trailer[b"Root"], IndirectReference),
    +            "/Root not in trailer or not an indirect reference",
    +        )
             return trailer
     
         re_hashes_in_name = re.compile(br"([^#]*)(#([0-9a-fA-F]{2}))?")
    @@ -638,18 +703,51 @@ class PdfParser:
         re_null = re.compile(whitespace_optional + br"null(?=" + delimiter_or_ws + br")")
         re_true = re.compile(whitespace_optional + br"true(?=" + delimiter_or_ws + br")")
         re_false = re.compile(whitespace_optional + br"false(?=" + delimiter_or_ws + br")")
    -    re_int = re.compile(whitespace_optional + br"([-+]?[0-9]+)(?=" + delimiter_or_ws + br")")
    -    re_real = re.compile(whitespace_optional + br"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?=" + delimiter_or_ws + br")")
    +    re_int = re.compile(
    +        whitespace_optional + br"([-+]?[0-9]+)(?=" + delimiter_or_ws + br")"
    +    )
    +    re_real = re.compile(
    +        whitespace_optional
    +        + br"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?="
    +        + delimiter_or_ws
    +        + br")"
    +    )
         re_array_start = re.compile(whitespace_optional + br"\[")
         re_array_end = re.compile(whitespace_optional + br"]")
    -    re_string_hex = re.compile(whitespace_optional + br"\<(" + whitespace_or_hex + br"*)\>")
    +    re_string_hex = re.compile(
    +        whitespace_optional + br"\<(" + whitespace_or_hex + br"*)\>"
    +    )
         re_string_lit = re.compile(whitespace_optional + br"\(")
    -    re_indirect_reference = re.compile(whitespace_optional + br"([-+]?[0-9]+)" + whitespace_mandatory + br"([-+]?[0-9]+)" + whitespace_mandatory + br"R(?=" + delimiter_or_ws + br")")
    -    re_indirect_def_start = re.compile(whitespace_optional + br"([-+]?[0-9]+)" + whitespace_mandatory + br"([-+]?[0-9]+)" + whitespace_mandatory + br"obj(?=" + delimiter_or_ws + br")")
    -    re_indirect_def_end = re.compile(whitespace_optional + br"endobj(?=" + delimiter_or_ws + br")")
    -    re_comment = re.compile(br"(" + whitespace_optional + br"%[^\r\n]*" + newline + br")*")
    +    re_indirect_reference = re.compile(
    +        whitespace_optional
    +        + br"([-+]?[0-9]+)"
    +        + whitespace_mandatory
    +        + br"([-+]?[0-9]+)"
    +        + whitespace_mandatory
    +        + br"R(?="
    +        + delimiter_or_ws
    +        + br")"
    +    )
    +    re_indirect_def_start = re.compile(
    +        whitespace_optional
    +        + br"([-+]?[0-9]+)"
    +        + whitespace_mandatory
    +        + br"([-+]?[0-9]+)"
    +        + whitespace_mandatory
    +        + br"obj(?="
    +        + delimiter_or_ws
    +        + br")"
    +    )
    +    re_indirect_def_end = re.compile(
    +        whitespace_optional + br"endobj(?=" + delimiter_or_ws + br")"
    +    )
    +    re_comment = re.compile(
    +        br"(" + whitespace_optional + br"%[^\r\n]*" + newline + br")*"
    +    )
         re_stream_start = re.compile(whitespace_optional + br"stream\r?\n")
    -    re_stream_end = re.compile(whitespace_optional + br"endstream(?=" + delimiter_or_ws + br")")
    +    re_stream_end = re.compile(
    +        whitespace_optional + br"endstream(?=" + delimiter_or_ws + br")"
    +    )
     
         @classmethod
         def get_value(cls, data, offset, expect_indirect=None, max_nesting=-1):
    @@ -660,21 +758,39 @@ class PdfParser:
                 offset = m.end()
             m = cls.re_indirect_def_start.match(data, offset)
             if m:
    -            check_format_condition(int(m.group(1)) > 0, "indirect object definition: object ID must be greater than 0")
    -            check_format_condition(int(m.group(2)) >= 0, "indirect object definition: generation must be non-negative")
    -            check_format_condition(expect_indirect is None or expect_indirect == IndirectReference(int(m.group(1)), int(m.group(2))),
    -                "indirect object definition different than expected")
    -            object, offset = cls.get_value(data, m.end(), max_nesting=max_nesting-1)
    +            check_format_condition(
    +                int(m.group(1)) > 0,
    +                "indirect object definition: object ID must be greater than 0",
    +            )
    +            check_format_condition(
    +                int(m.group(2)) >= 0,
    +                "indirect object definition: generation must be non-negative",
    +            )
    +            check_format_condition(
    +                expect_indirect is None
    +                or expect_indirect
    +                == IndirectReference(int(m.group(1)), int(m.group(2))),
    +                "indirect object definition different than expected",
    +            )
    +            object, offset = cls.get_value(data, m.end(), max_nesting=max_nesting - 1)
                 if offset is None:
                     return object, None
                 m = cls.re_indirect_def_end.match(data, offset)
                 check_format_condition(m, "indirect object definition end not found")
                 return object, m.end()
    -        check_format_condition(not expect_indirect, "indirect object definition not found")
    +        check_format_condition(
    +            not expect_indirect, "indirect object definition not found"
    +        )
             m = cls.re_indirect_reference.match(data, offset)
             if m:
    -            check_format_condition(int(m.group(1)) > 0, "indirect object reference: object ID must be greater than 0")
    -            check_format_condition(int(m.group(2)) >= 0, "indirect object reference: generation must be non-negative")
    +            check_format_condition(
    +                int(m.group(1)) > 0,
    +                "indirect object reference: object ID must be greater than 0",
    +            )
    +            check_format_condition(
    +                int(m.group(2)) >= 0,
    +                "indirect object reference: generation must be non-negative",
    +            )
                 return IndirectReference(int(m.group(1)), int(m.group(2))), m.end()
             m = cls.re_dict_start.match(data, offset)
             if m:
    @@ -682,10 +798,10 @@ class PdfParser:
                 result = {}
                 m = cls.re_dict_end.match(data, offset)
                 while not m:
    -                key, offset = cls.get_value(data, offset, max_nesting=max_nesting-1)
    +                key, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1)
                     if offset is None:
                         return result, None
    -                value, offset = cls.get_value(data, offset, max_nesting=max_nesting-1)
    +                value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1)
                     result[key] = value
                     if offset is None:
                         return result, None
    @@ -696,8 +812,11 @@ class PdfParser:
                     try:
                         stream_len = int(result[b"Length"])
                     except (TypeError, KeyError, ValueError):
    -                    raise PdfFormatError("bad or missing Length in stream dict (%r)" % result.get(b"Length", None))
    -                stream_data = data[m.end():m.end() + stream_len]
    +                    raise PdfFormatError(
    +                        "bad or missing Length in stream dict (%r)"
    +                        % result.get(b"Length", None)
    +                    )
    +                stream_data = data[m.end() : m.end() + stream_len]
                     m = cls.re_stream_end.match(data, m.end() + stream_len)
                     check_format_condition(m, "stream end not found")
                     offset = m.end()
    @@ -711,7 +830,7 @@ class PdfParser:
                 result = []
                 m = cls.re_array_end.match(data, offset)
                 while not m:
    -                value, offset = cls.get_value(data, offset, max_nesting=max_nesting-1)
    +                value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1)
                     result.append(value)
                     if offset is None:
                         return result, None
    @@ -734,20 +853,27 @@ class PdfParser:
                 return int(m.group(1)), m.end()
             m = cls.re_real.match(data, offset)
             if m:
    -            return float(m.group(1)), m.end()  # XXX Decimal instead of float???
    +            # XXX Decimal instead of float???
    +            return float(m.group(1)), m.end()
             m = cls.re_string_hex.match(data, offset)
             if m:
    -            hex_string = bytearray([b for b in m.group(1) if b in b"0123456789abcdefABCDEF"])  # filter out whitespace
    +            # filter out whitespace
    +            hex_string = bytearray(
    +                [b for b in m.group(1) if b in b"0123456789abcdefABCDEF"]
    +            )
                 if len(hex_string) % 2 == 1:
    -                hex_string.append(ord(b"0"))  # append a 0 if the length is not even - yes, at the end
    +                # append a 0 if the length is not even - yes, at the end
    +                hex_string.append(ord(b"0"))
                 return bytearray.fromhex(hex_string.decode("us-ascii")), m.end()
             m = cls.re_string_lit.match(data, offset)
             if m:
                 return cls.get_literal_string(data, m.end())
    -        #return None, offset  # fallback (only for debugging)
    -        raise PdfFormatError("unrecognized object: " + repr(data[offset:offset+32]))
    +        # return None, offset  # fallback (only for debugging)
    +        raise PdfFormatError("unrecognized object: " + repr(data[offset : offset + 32]))
     
    -    re_lit_str_token = re.compile(br"(\\[nrtbf()\\])|(\\[0-9]{1,3})|(\\(\r\n|\r|\n))|(\r\n|\r|\n)|(\()|(\))")
    +    re_lit_str_token = re.compile(
    +        br"(\\[nrtbf()\\])|(\\[0-9]{1,3})|(\\(\r\n|\r|\n))|(\r\n|\r|\n)|(\()|(\))"
    +    )
         escaped_chars = {
             b"n": b"\n",
             b"r": b"\r",
    @@ -765,14 +891,14 @@ class PdfParser:
             ord(b"("): b"(",
             ord(b")"): b")",
             ord(b"\\"): b"\\",
    -        }
    +    }
     
         @classmethod
         def get_literal_string(cls, data, offset):
             nesting_depth = 0
             result = bytearray()
             for m in cls.re_lit_str_token.finditer(data, offset):
    -            result.extend(data[offset:m.start()])
    +            result.extend(data[offset : m.start()])
                 if m.group(1):
                     result.extend(cls.escaped_chars[m.group(1)[1]])
                 elif m.group(2):
    @@ -793,24 +919,35 @@ class PdfParser:
             raise PdfFormatError("unfinished literal string")
     
         re_xref_section_start = re.compile(whitespace_optional + br"xref" + newline)
    -    re_xref_subsection_start = re.compile(whitespace_optional + br"([0-9]+)" + whitespace_mandatory + br"([0-9]+)" + whitespace_optional + newline_only)
    +    re_xref_subsection_start = re.compile(
    +        whitespace_optional
    +        + br"([0-9]+)"
    +        + whitespace_mandatory
    +        + br"([0-9]+)"
    +        + whitespace_optional
    +        + newline_only
    +    )
         re_xref_entry = re.compile(br"([0-9]{10}) ([0-9]{5}) ([fn])( \r| \n|\r\n)")
     
         def read_xref_table(self, xref_section_offset):
             subsection_found = False
    -        m = self.re_xref_section_start.match(self.buf, xref_section_offset + self.start_offset)
    +        m = self.re_xref_section_start.match(
    +            self.buf, xref_section_offset + self.start_offset
    +        )
             check_format_condition(m, "xref section start not found")
             offset = m.end()
             while True:
                 m = self.re_xref_subsection_start.match(self.buf, offset)
                 if not m:
    -                check_format_condition(subsection_found, "xref subsection start not found")
    +                check_format_condition(
    +                    subsection_found, "xref subsection start not found"
    +                )
                     break
                 subsection_found = True
                 offset = m.end()
                 first_object = int(m.group(1))
                 num_objects = int(m.group(2))
    -            for i in range(first_object, first_object+num_objects):
    +            for i in range(first_object, first_object + num_objects):
                     m = self.re_xref_entry.match(self.buf, offset)
                     check_format_condition(m, "xref entry not found")
                     offset = m.end()
    @@ -818,22 +955,36 @@ class PdfParser:
                     generation = int(m.group(2))
                     if not is_free:
                         new_entry = (int(m.group(1)), generation)
    -                    check_format_condition(i not in self.xref_table or self.xref_table[i] == new_entry, "xref entry duplicated (and not identical)")
    +                    check_format_condition(
    +                        i not in self.xref_table or self.xref_table[i] == new_entry,
    +                        "xref entry duplicated (and not identical)",
    +                    )
                         self.xref_table[i] = new_entry
             return offset
     
         def read_indirect(self, ref, max_nesting=-1):
             offset, generation = self.xref_table[ref[0]]
    -        check_format_condition(generation == ref[1], "expected to find generation %s for object ID %s in xref table, instead found generation %s at offset %s" \
    -            % (ref[1], ref[0], generation, offset))
    -        value = self.get_value(self.buf, offset + self.start_offset, expect_indirect=IndirectReference(*ref), max_nesting=max_nesting)[0]
    +        check_format_condition(
    +            generation == ref[1],
    +            "expected to find generation %s for object ID %s in xref table, "
    +            "instead found generation %s at offset %s"
    +            % (ref[1], ref[0], generation, offset),
    +        )
    +        value = self.get_value(
    +            self.buf,
    +            offset + self.start_offset,
    +            expect_indirect=IndirectReference(*ref),
    +            max_nesting=max_nesting,
    +        )[0]
             self.cached_objects[ref] = value
             return value
     
         def linearize_page_tree(self, node=None):
             if node is None:
                 node = self.page_tree_root
    -        check_format_condition(node[b"Type"] == b"Pages", "/Type of page tree node is not /Pages")
    +        check_format_condition(
    +            node[b"Type"] == b"Pages", "/Type of page tree node is not /Pages"
    +        )
             pages = []
             for kid in node[b"Kids"]:
                 kid_object = self.read_indirect(kid)
    diff --git a/server/www/packages/packages-windows/x86/PIL/PixarImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PixarImagePlugin.py
    index 220577c..5ea32ba 100644
    --- a/server/www/packages/packages-windows/x86/PIL/PixarImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/PixarImagePlugin.py
    @@ -22,12 +22,10 @@
     from . import Image, ImageFile
     from ._binary import i16le as i16
     
    -__version__ = "0.1"
    -
    -
     #
     # helpers
     
    +
     def _accept(prefix):
         return prefix[:4] == b"\200\350\000\000"
     
    @@ -35,6 +33,7 @@ def _accept(prefix):
     ##
     # Image plugin for PIXAR raster images.
     
    +
     class PixarImageFile(ImageFile.ImageFile):
     
         format = "PIXAR"
    @@ -50,7 +49,7 @@ class PixarImageFile(ImageFile.ImageFile):
             # read rest of header
             s = s + self.fp.read(508)
     
    -        self.size = i16(s[418:420]), i16(s[416:418])
    +        self._size = i16(s[418:420]), i16(s[416:418])
     
             # get channel/depth descriptions
             mode = i16(s[424:426]), i16(s[426:428])
    @@ -60,7 +59,7 @@ class PixarImageFile(ImageFile.ImageFile):
             # FIXME: to be continued...
     
             # create tile descriptor (assuming "dumped")
    -        self.tile = [("raw", (0, 0)+self.size, 1024, (self.mode, 0, 1))]
    +        self.tile = [("raw", (0, 0) + self.size, 1024, (self.mode, 0, 1))]
     
     
     #
    diff --git a/server/www/packages/packages-windows/x86/PIL/PngImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PngImagePlugin.py
    index 8260619..ee9d52b 100644
    --- a/server/www/packages/packages-windows/x86/PIL/PngImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/PngImagePlugin.py
    @@ -31,16 +31,15 @@
     # See the README file for information on usage and redistribution.
     #
     
    +import itertools
     import logging
     import re
    -import zlib
     import struct
    +import warnings
    +import zlib
     
    -from . import Image, ImageFile, ImagePalette
    -from ._binary import i8, i16be as i16, i32be as i32, o16be as o16, o32be as o32
    -from ._util import py3
    -
    -__version__ = "0.9"
    +from . import Image, ImageChops, ImageFile, ImagePalette, ImageSequence
    +from ._binary import i8, i16be as i16, i32be as i32, o8, o16be as o16, o32be as o32
     
     logger = logging.getLogger(__name__)
     
    @@ -52,25 +51,30 @@ _MAGIC = b"\211PNG\r\n\032\n"
     
     _MODES = {
         # supported bits/color combinations, and corresponding modes/rawmodes
    -    (1, 0):  ("1", "1"),
    -    (2, 0):  ("L", "L;2"),
    -    (4, 0):  ("L", "L;4"),
    -    (8, 0):  ("L", "L"),
    +    # Greyscale
    +    (1, 0): ("1", "1"),
    +    (2, 0): ("L", "L;2"),
    +    (4, 0): ("L", "L;4"),
    +    (8, 0): ("L", "L"),
         (16, 0): ("I", "I;16B"),
    -    (8, 2):  ("RGB", "RGB"),
    +    # Truecolour
    +    (8, 2): ("RGB", "RGB"),
         (16, 2): ("RGB", "RGB;16B"),
    -    (1, 3):  ("P", "P;1"),
    -    (2, 3):  ("P", "P;2"),
    -    (4, 3):  ("P", "P;4"),
    -    (8, 3):  ("P", "P"),
    -    (8, 4):  ("LA", "LA"),
    +    # Indexed-colour
    +    (1, 3): ("P", "P;1"),
    +    (2, 3): ("P", "P;2"),
    +    (4, 3): ("P", "P;4"),
    +    (8, 3): ("P", "P"),
    +    # Greyscale with alpha
    +    (8, 4): ("LA", "LA"),
         (16, 4): ("RGBA", "LA;16B"),  # LA;16B->LA not yet available
    -    (8, 6):  ("RGBA", "RGBA"),
    +    # Truecolour with alpha
    +    (8, 6): ("RGBA", "RGBA"),
         (16, 6): ("RGBA", "RGBA;16B"),
     }
     
     
    -_simple_palette = re.compile(b'^\xff*\x00\xff*$')
    +_simple_palette = re.compile(b"^\xff*\x00\xff*$")
     
     # Maximum decompressed size for a iTXt or zTXt chunk.
     # Eliminates decompression bombs where compressed chunks can expand 1000x
    @@ -79,6 +83,16 @@ MAX_TEXT_CHUNK = ImageFile.SAFEBLOCK
     MAX_TEXT_MEMORY = 64 * MAX_TEXT_CHUNK
     
     
    +# APNG frame disposal modes
    +APNG_DISPOSE_OP_NONE = 0
    +APNG_DISPOSE_OP_BACKGROUND = 1
    +APNG_DISPOSE_OP_PREVIOUS = 2
    +
    +# APNG frame blend modes
    +APNG_BLEND_OP_SOURCE = 0
    +APNG_BLEND_OP_OVER = 1
    +
    +
     def _safe_zlib_decompress(s):
         dobj = zlib.decompressobj()
         plaintext = dobj.decompress(s, MAX_TEXT_CHUNK)
    @@ -88,21 +102,21 @@ def _safe_zlib_decompress(s):
     
     
     def _crc32(data, seed=0):
    -    return zlib.crc32(data, seed) & 0xffffffff
    +    return zlib.crc32(data, seed) & 0xFFFFFFFF
     
     
     # --------------------------------------------------------------------
     # Support classes.  Suitable for PNG and related formats like MNG etc.
     
    -class ChunkStream(object):
     
    +class ChunkStream:
         def __init__(self, fp):
     
             self.fp = fp
             self.queue = []
     
         def read(self):
    -        "Fetch a new chunk. Returns header information."
    +        """Fetch a new chunk. Returns header information."""
             cid = None
     
             if self.queue:
    @@ -134,15 +148,16 @@ class ChunkStream(object):
             self.queue.append((cid, pos, length))
     
         def call(self, cid, pos, length):
    -        "Call the appropriate chunk handler"
    +        """Call the appropriate chunk handler"""
     
             logger.debug("STREAM %r %s %s", cid, pos, length)
    -        return getattr(self, "chunk_" + cid.decode('ascii'))(pos, length)
    +        return getattr(self, "chunk_" + cid.decode("ascii"))(pos, length)
     
         def crc(self, cid, data):
    -        "Read and verify checksum"
    +        """Read and verify checksum"""
     
    -        # Skip CRC checks for ancillary chunks if allowed to load truncated images
    +        # Skip CRC checks for ancillary chunks if allowed to load truncated
    +        # images
             # 5th byte of first char is 1 [specs, section 5.4]
             if ImageFile.LOAD_TRUNCATED_IMAGES and (i8(cid[0]) >> 5 & 1):
                 self.crc_skip(cid, data)
    @@ -152,14 +167,12 @@ class ChunkStream(object):
                 crc1 = _crc32(data, _crc32(cid))
                 crc2 = i32(self.fp.read(4))
                 if crc1 != crc2:
    -                raise SyntaxError("broken PNG file (bad header checksum in %r)"
    -                                  % cid)
    +                raise SyntaxError("broken PNG file (bad header checksum in %r)" % cid)
             except struct.error:
    -            raise SyntaxError("broken PNG file (incomplete checksum in %r)"
    -                              % cid)
    +            raise SyntaxError("broken PNG file (incomplete checksum in %r)" % cid)
     
         def crc_skip(self, cid, data):
    -        "Read checksum.  Used if the C module is not present"
    +        """Read checksum.  Used if the C module is not present"""
     
             self.fp.read(4)
     
    @@ -174,7 +187,7 @@ class ChunkStream(object):
                 try:
                     cid, pos, length = self.read()
                 except struct.error:
    -                raise IOError("truncated PNG file")
    +                raise OSError("truncated PNG file")
     
                 if cid == endchunk:
                     break
    @@ -190,8 +203,9 @@ class iTXt(str):
         keeping their extra information
     
         """
    +
         @staticmethod
    -    def __new__(cls, text, lang, tkey):
    +    def __new__(cls, text, lang=None, tkey=None):
             """
             :param cls: the class to use when creating the instance
             :param text: value for this key
    @@ -205,7 +219,7 @@ class iTXt(str):
             return self
     
     
    -class PngInfo(object):
    +class PngInfo:
         """
         PNG chunk container (for use with save(pnginfo=))
     
    @@ -245,11 +259,12 @@ class PngInfo(object):
                 tkey = tkey.encode("utf-8", "strict")
     
             if zip:
    -            self.add(b"iTXt", key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" +
    -                     zlib.compress(value))
    +            self.add(
    +                b"iTXt",
    +                key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + zlib.compress(value),
    +            )
             else:
    -            self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" +
    -                     value)
    +            self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + value)
     
         def add_text(self, key, value, zip=False):
             """Appends a text chunk.
    @@ -266,12 +281,12 @@ class PngInfo(object):
             # The tEXt chunk stores latin-1 text
             if not isinstance(value, bytes):
                 try:
    -                value = value.encode('latin-1', 'strict')
    +                value = value.encode("latin-1", "strict")
                 except UnicodeError:
                     return self.add_itxt(key, value, zip=zip)
     
             if not isinstance(key, bytes):
    -            key = key.encode('latin-1', 'strict')
    +            key = key.encode("latin-1", "strict")
     
             if zip:
                 self.add(b"zTXt", key + b"\0\0" + zlib.compress(value))
    @@ -282,11 +297,10 @@ class PngInfo(object):
     # --------------------------------------------------------------------
     # PNG image stream (IHDR/IEND)
     
    +
     class PngStream(ChunkStream):
    -
         def __init__(self, fp):
    -
    -        ChunkStream.__init__(self, fp)
    +        super().__init__(fp)
     
             # local copies of Image attributes
             self.im_info = {}
    @@ -295,14 +309,32 @@ class PngStream(ChunkStream):
             self.im_mode = None
             self.im_tile = None
             self.im_palette = None
    +        self.im_custom_mimetype = None
    +        self.im_n_frames = None
    +        self._seq_num = None
    +        self.rewind_state = None
     
             self.text_memory = 0
     
         def check_text_memory(self, chunklen):
             self.text_memory += chunklen
             if self.text_memory > MAX_TEXT_MEMORY:
    -            raise ValueError("Too much memory used in text chunks: %s>MAX_TEXT_MEMORY" %
    -                             self.text_memory)
    +            raise ValueError(
    +                "Too much memory used in text chunks: %s>MAX_TEXT_MEMORY"
    +                % self.text_memory
    +            )
    +
    +    def save_rewind(self):
    +        self.rewind_state = {
    +            "info": self.im_info.copy(),
    +            "tile": self.im_tile,
    +            "seq_num": self._seq_num,
    +        }
    +
    +    def rewind(self):
    +        self.im_info = self.rewind_state["info"]
    +        self.im_tile = self.rewind_state["tile"]
    +        self._seq_num = self.rewind_state["seq_num"]
     
         def chunk_iCCP(self, pos, length):
     
    @@ -318,10 +350,11 @@ class PngStream(ChunkStream):
             logger.debug("Compression method %s", i8(s[i]))
             comp_method = i8(s[i])
             if comp_method != 0:
    -            raise SyntaxError("Unknown compression method %s in iCCP chunk" %
    -                              comp_method)
    +            raise SyntaxError(
    +                "Unknown compression method %s in iCCP chunk" % comp_method
    +            )
             try:
    -            icc_profile = _safe_zlib_decompress(s[i+2:])
    +            icc_profile = _safe_zlib_decompress(s[i + 2 :])
             except ValueError:
                 if ImageFile.LOAD_TRUNCATED_IMAGES:
                     icc_profile = None
    @@ -339,7 +372,7 @@ class PngStream(ChunkStream):
             self.im_size = i32(s), i32(s[4:])
             try:
                 self.im_mode, self.im_rawmode = _MODES[(i8(s[8]), i8(s[9]))]
    -        except:
    +        except Exception:
                 pass
             if i8(s[12]):
                 self.im_info["interlace"] = 1
    @@ -350,7 +383,13 @@ class PngStream(ChunkStream):
         def chunk_IDAT(self, pos, length):
     
             # image data
    -        self.im_tile = [("zip", (0, 0)+self.im_size, pos, self.im_rawmode)]
    +        if "bbox" in self.im_info:
    +            tile = [("zip", self.im_info["bbox"], pos, self.im_rawmode)]
    +        else:
    +            if self.im_n_frames is not None:
    +                self.im_info["default_image"] = True
    +            tile = [("zip", (0, 0) + self.im_size, pos, self.im_rawmode)]
    +        self.im_tile = tile
             self.im_idat = length
             raise EOFError
     
    @@ -382,7 +421,7 @@ class PngStream(ChunkStream):
                     # otherwise, we have a byte string with one alpha value
                     # for each palette entry
                     self.im_info["transparency"] = s
    -        elif self.im_mode == "L":
    +        elif self.im_mode in ("1", "L", "I"):
                 self.im_info["transparency"] = i16(s)
             elif self.im_mode == "RGB":
                 self.im_info["transparency"] = i16(s), i16(s[2:]), i16(s[4:])
    @@ -399,8 +438,8 @@ class PngStream(ChunkStream):
             # WP x,y, Red x,y, Green x,y Blue x,y
     
             s = ImageFile._safe_read(self.fp, length)
    -        raw_vals = struct.unpack('>%dI' % (len(s) // 4), s)
    -        self.im_info['chromaticity'] = tuple(elt/100000.0 for elt in raw_vals)
    +        raw_vals = struct.unpack(">%dI" % (len(s) // 4), s)
    +        self.im_info["chromaticity"] = tuple(elt / 100000.0 for elt in raw_vals)
             return s
     
         def chunk_sRGB(self, pos, length):
    @@ -411,7 +450,7 @@ class PngStream(ChunkStream):
             # 3 absolute colorimetric
     
             s = ImageFile._safe_read(self.fp, length)
    -        self.im_info['srgb'] = i8(s)
    +        self.im_info["srgb"] = i8(s)
             return s
     
         def chunk_pHYs(self, pos, length):
    @@ -438,9 +477,8 @@ class PngStream(ChunkStream):
                 k = s
                 v = b""
             if k:
    -            if py3:
    -                k = k.decode('latin-1', 'strict')
    -                v = v.decode('latin-1', 'replace')
    +            k = k.decode("latin-1", "strict")
    +            v = v.decode("latin-1", "replace")
     
                 self.im_info[k] = self.im_text[k] = v
                 self.check_text_memory(len(v))
    @@ -461,8 +499,9 @@ class PngStream(ChunkStream):
             else:
                 comp_method = 0
             if comp_method != 0:
    -            raise SyntaxError("Unknown compression method %s in zTXt chunk" %
    -                              comp_method)
    +            raise SyntaxError(
    +                "Unknown compression method %s in zTXt chunk" % comp_method
    +            )
             try:
                 v = _safe_zlib_decompress(v[1:])
             except ValueError:
    @@ -474,9 +513,8 @@ class PngStream(ChunkStream):
                 v = b""
     
             if k:
    -            if py3:
    -                k = k.decode('latin-1', 'strict')
    -                v = v.decode('latin-1', 'replace')
    +            k = k.decode("latin-1", "strict")
    +            v = v.decode("latin-1", "replace")
     
                 self.im_info[k] = self.im_text[k] = v
                 self.check_text_memory(len(v))
    @@ -511,24 +549,75 @@ class PngStream(ChunkStream):
                         return s
                 else:
                     return s
    -        if py3:
    -            try:
    -                k = k.decode("latin-1", "strict")
    -                lang = lang.decode("utf-8", "strict")
    -                tk = tk.decode("utf-8", "strict")
    -                v = v.decode("utf-8", "strict")
    -            except UnicodeError:
    -                return s
    +        try:
    +            k = k.decode("latin-1", "strict")
    +            lang = lang.decode("utf-8", "strict")
    +            tk = tk.decode("utf-8", "strict")
    +            v = v.decode("utf-8", "strict")
    +        except UnicodeError:
    +            return s
     
             self.im_info[k] = self.im_text[k] = iTXt(v, lang, tk)
             self.check_text_memory(len(v))
     
             return s
     
    +    def chunk_eXIf(self, pos, length):
    +        s = ImageFile._safe_read(self.fp, length)
    +        self.im_info["exif"] = b"Exif\x00\x00" + s
    +        return s
    +
    +    # APNG chunks
    +    def chunk_acTL(self, pos, length):
    +        s = ImageFile._safe_read(self.fp, length)
    +        if self.im_n_frames is not None:
    +            self.im_n_frames = None
    +            warnings.warn("Invalid APNG, will use default PNG image if possible")
    +            return s
    +        n_frames = i32(s)
    +        if n_frames == 0 or n_frames > 0x80000000:
    +            warnings.warn("Invalid APNG, will use default PNG image if possible")
    +            return s
    +        self.im_n_frames = n_frames
    +        self.im_info["loop"] = i32(s[4:])
    +        self.im_custom_mimetype = "image/apng"
    +        return s
    +
    +    def chunk_fcTL(self, pos, length):
    +        s = ImageFile._safe_read(self.fp, length)
    +        seq = i32(s)
    +        if (self._seq_num is None and seq != 0) or (
    +            self._seq_num is not None and self._seq_num != seq - 1
    +        ):
    +            raise SyntaxError("APNG contains frame sequence errors")
    +        self._seq_num = seq
    +        width, height = i32(s[4:]), i32(s[8:])
    +        px, py = i32(s[12:]), i32(s[16:])
    +        im_w, im_h = self.im_size
    +        if px + width > im_w or py + height > im_h:
    +            raise SyntaxError("APNG contains invalid frames")
    +        self.im_info["bbox"] = (px, py, px + width, py + height)
    +        delay_num, delay_den = i16(s[20:]), i16(s[22:])
    +        if delay_den == 0:
    +            delay_den = 100
    +        self.im_info["duration"] = float(delay_num) / float(delay_den) * 1000
    +        self.im_info["disposal"] = i8(s[24])
    +        self.im_info["blend"] = i8(s[25])
    +        return s
    +
    +    def chunk_fdAT(self, pos, length):
    +        s = ImageFile._safe_read(self.fp, 4)
    +        seq = i32(s)
    +        if self._seq_num != seq - 1:
    +            raise SyntaxError("APNG contains frame sequence errors")
    +        self._seq_num = seq
    +        return self.chunk_IDAT(pos + 4, length - 4)
    +
     
     # --------------------------------------------------------------------
     # PNG reader
     
    +
     def _accept(prefix):
         return prefix[:8] == _MAGIC
     
    @@ -536,6 +625,7 @@ def _accept(prefix):
     ##
     # Image plugin for PNG images.
     
    +
     class PngImageFile(ImageFile.ImageFile):
     
         format = "PNG"
    @@ -545,9 +635,11 @@ class PngImageFile(ImageFile.ImageFile):
     
             if self.fp.read(8) != _MAGIC:
                 raise SyntaxError("not a PNG file")
    +        self.__fp = self.fp
    +        self.__frame = 0
     
             #
    -        # Parse headers up to the first IDAT chunk
    +        # Parse headers up to the first IDAT or fDAT chunk
     
             self.png = PngStream(self.fp)
     
    @@ -576,19 +668,51 @@ class PngImageFile(ImageFile.ImageFile):
             # (believe me, I've tried ;-)
     
             self.mode = self.png.im_mode
    -        self.size = self.png.im_size
    +        self._size = self.png.im_size
             self.info = self.png.im_info
    -        self.text = self.png.im_text  # experimental
    +        self._text = None
             self.tile = self.png.im_tile
    +        self.custom_mimetype = self.png.im_custom_mimetype
    +        self.n_frames = self.png.im_n_frames or 1
    +        self.default_image = self.info.get("default_image", False)
     
             if self.png.im_palette:
                 rawmode, data = self.png.im_palette
                 self.palette = ImagePalette.raw(rawmode, data)
     
    -        self.__idat = length  # used by load_read()
    +        if cid == b"fdAT":
    +            self.__prepare_idat = length - 4
    +        else:
    +            self.__prepare_idat = length  # used by load_prepare()
    +
    +        if self.png.im_n_frames is not None:
    +            self._close_exclusive_fp_after_loading = False
    +            self.png.save_rewind()
    +            self.__rewind_idat = self.__prepare_idat
    +            self.__rewind = self.__fp.tell()
    +            if self.default_image:
    +                # IDAT chunk contains default image and not first animation frame
    +                self.n_frames += 1
    +            self._seek(0)
    +        self.is_animated = self.n_frames > 1
    +
    +    @property
    +    def text(self):
    +        # experimental
    +        if self._text is None:
    +            # iTxt, tEXt and zTXt chunks may appear at the end of the file
    +            # So load the file to ensure that they are read
    +            if self.is_animated:
    +                frame = self.__frame
    +                # for APNG, seek to the final frame before loading
    +                self.seek(self.n_frames - 1)
    +            self.load()
    +            if self.is_animated:
    +                self.seek(frame)
    +        return self._text
     
         def verify(self):
    -        "Verify PNG file"
    +        """Verify PNG file"""
     
             if self.fp is None:
                 raise RuntimeError("verify must be called directly after open")
    @@ -599,18 +723,112 @@ class PngImageFile(ImageFile.ImageFile):
             self.png.verify()
             self.png.close()
     
    +        if self._exclusive_fp:
    +            self.fp.close()
             self.fp = None
     
    +    def seek(self, frame):
    +        if not self._seek_check(frame):
    +            return
    +        if frame < self.__frame:
    +            self._seek(0, True)
    +
    +        last_frame = self.__frame
    +        for f in range(self.__frame + 1, frame + 1):
    +            try:
    +                self._seek(f)
    +            except EOFError:
    +                self.seek(last_frame)
    +                raise EOFError("no more images in APNG file")
    +
    +    def _seek(self, frame, rewind=False):
    +        if frame == 0:
    +            if rewind:
    +                self.__fp.seek(self.__rewind)
    +                self.png.rewind()
    +                self.__prepare_idat = self.__rewind_idat
    +                self.im = None
    +                if self.pyaccess:
    +                    self.pyaccess = None
    +                self.info = self.png.im_info
    +                self.tile = self.png.im_tile
    +                self.fp = self.__fp
    +            self._prev_im = None
    +            self.dispose = None
    +            self.default_image = self.info.get("default_image", False)
    +            self.dispose_op = self.info.get("disposal")
    +            self.blend_op = self.info.get("blend")
    +            self.dispose_extent = self.info.get("bbox")
    +            self.__frame = 0
    +            return
    +        else:
    +            if frame != self.__frame + 1:
    +                raise ValueError("cannot seek to frame %d" % frame)
    +
    +        # ensure previous frame was loaded
    +        self.load()
    +
    +        self.fp = self.__fp
    +
    +        # advance to the next frame
    +        if self.__prepare_idat:
    +            ImageFile._safe_read(self.fp, self.__prepare_idat)
    +            self.__prepare_idat = 0
    +        frame_start = False
    +        while True:
    +            self.fp.read(4)  # CRC
    +
    +            try:
    +                cid, pos, length = self.png.read()
    +            except (struct.error, SyntaxError):
    +                break
    +
    +            if cid == b"IEND":
    +                raise EOFError("No more images in APNG file")
    +            if cid == b"fcTL":
    +                if frame_start:
    +                    # there must be at least one fdAT chunk between fcTL chunks
    +                    raise SyntaxError("APNG missing frame data")
    +                frame_start = True
    +
    +            try:
    +                self.png.call(cid, pos, length)
    +            except UnicodeDecodeError:
    +                break
    +            except EOFError:
    +                if cid == b"fdAT":
    +                    length -= 4
    +                    if frame_start:
    +                        self.__prepare_idat = length
    +                        break
    +                ImageFile._safe_read(self.fp, length)
    +            except AttributeError:
    +                logger.debug("%r %s %s (unknown)", cid, pos, length)
    +                ImageFile._safe_read(self.fp, length)
    +
    +        self.__frame = frame
    +        self.tile = self.png.im_tile
    +        self.dispose_op = self.info.get("disposal")
    +        self.blend_op = self.info.get("blend")
    +        self.dispose_extent = self.info.get("bbox")
    +
    +        if not self.tile:
    +            raise EOFError
    +
    +    def tell(self):
    +        return self.__frame
    +
         def load_prepare(self):
    -        "internal: prepare to read PNG file"
    +        """internal: prepare to read PNG file"""
     
             if self.info.get("interlace"):
                 self.decoderconfig = self.decoderconfig + (1,)
     
    +        self.__idat = self.__prepare_idat  # used by load_read()
             ImageFile.ImageFile.load_prepare(self)
     
         def load_read(self, read_bytes):
    -        "internal: read more image data"
    +        """internal: read more image data"""
     
             while self.__idat == 0:
                 # end of chunk, skip forward to next one
    @@ -619,11 +837,18 @@ class PngImageFile(ImageFile.ImageFile):
     
                 cid, pos, length = self.png.read()
     
    -            if cid not in [b"IDAT", b"DDAT"]:
    +            if cid not in [b"IDAT", b"DDAT", b"fdAT"]:
                     self.png.push(cid, pos, length)
                     return b""
     
    -            self.__idat = length  # empty chunks are allowed
    +            if cid == b"fdAT":
    +                try:
    +                    self.png.call(cid, pos, length)
    +                except EOFError:
    +                    pass
    +                self.__idat = length - 4  # sequence_num has already been read
    +            else:
    +                self.__idat = length  # empty chunks are allowed
     
             # read more data from this chunk
             if read_bytes <= 0:
    @@ -636,10 +861,95 @@ class PngImageFile(ImageFile.ImageFile):
             return self.fp.read(read_bytes)
     
         def load_end(self):
    -        "internal: finished reading image data"
    +        """internal: finished reading image data"""
    +        while True:
    +            self.fp.read(4)  # CRC
     
    -        self.png.close()
    -        self.png = None
    +            try:
    +                cid, pos, length = self.png.read()
    +            except (struct.error, SyntaxError):
    +                break
    +
    +            if cid == b"IEND":
    +                break
    +            elif cid == b"fcTL" and self.is_animated:
    +                # start of the next frame, stop reading
    +                self.__prepare_idat = 0
    +                self.png.push(cid, pos, length)
    +                break
    +
    +            try:
    +                self.png.call(cid, pos, length)
    +            except UnicodeDecodeError:
    +                break
    +            except EOFError:
    +                if cid == b"fdAT":
    +                    length -= 4
    +                ImageFile._safe_read(self.fp, length)
    +            except AttributeError:
    +                logger.debug("%r %s %s (unknown)", cid, pos, length)
    +                ImageFile._safe_read(self.fp, length)
    +        self._text = self.png.im_text
    +        if not self.is_animated:
    +            self.png.close()
    +            self.png = None
    +        else:
    +            # setup frame disposal (actual disposal done when needed in _seek())
    +            if self._prev_im is None and self.dispose_op == APNG_DISPOSE_OP_PREVIOUS:
    +                self.dispose_op = APNG_DISPOSE_OP_BACKGROUND
    +
    +            if self.dispose_op == APNG_DISPOSE_OP_PREVIOUS:
    +                dispose = self._prev_im.copy()
    +                dispose = self._crop(dispose, self.dispose_extent)
    +            elif self.dispose_op == APNG_DISPOSE_OP_BACKGROUND:
    +                dispose = Image.core.fill("RGBA", self.size, (0, 0, 0, 0))
    +                dispose = self._crop(dispose, self.dispose_extent)
    +            else:
    +                dispose = None
    +
    +            if self._prev_im and self.blend_op == APNG_BLEND_OP_OVER:
    +                updated = self._crop(self.im, self.dispose_extent)
    +                self._prev_im.paste(
    +                    updated, self.dispose_extent, updated.convert("RGBA")
    +                )
    +                self.im = self._prev_im
    +                if self.pyaccess:
    +                    self.pyaccess = None
    +            self._prev_im = self.im.copy()
    +
    +            if dispose:
    +                self._prev_im.paste(dispose, self.dispose_extent)
    +
    +    def _getexif(self):
    +        if "exif" not in self.info:
    +            self.load()
    +        if "exif" not in self.info and "Raw profile type exif" not in self.info:
    +            return None
    +        return dict(self.getexif())
    +
    +    def getexif(self):
    +        if "exif" not in self.info:
    +            self.load()
    +
    +        if self._exif is None:
    +            self._exif = Image.Exif()
    +
    +        exif_info = self.info.get("exif")
    +        if exif_info is None and "Raw profile type exif" in self.info:
    +            exif_info = bytes.fromhex(
    +                "".join(self.info["Raw profile type exif"].split("\n")[3:])
    +            )
    +        self._exif.load(exif_info)
    +        return self._exif
    +
    +    def _close__fp(self):
    +        try:
    +            if self.__fp != self.fp:
    +                self.__fp.close()
    +        except AttributeError:
    +            pass
    +        finally:
    +            self.__fp = None
     
     
     # --------------------------------------------------------------------
    @@ -647,19 +957,20 @@ class PngImageFile(ImageFile.ImageFile):
     
     _OUTMODES = {
         # supported PIL modes, and corresponding rawmodes/bits/color combinations
    -    "1":    ("1",       b'\x01\x00'),
    -    "L;1":  ("L;1",     b'\x01\x00'),
    -    "L;2":  ("L;2",     b'\x02\x00'),
    -    "L;4":  ("L;4",     b'\x04\x00'),
    -    "L":    ("L",       b'\x08\x00'),
    -    "LA":   ("LA",      b'\x08\x04'),
    -    "I":    ("I;16B",   b'\x10\x00'),
    -    "P;1":  ("P;1",     b'\x01\x03'),
    -    "P;2":  ("P;2",     b'\x02\x03'),
    -    "P;4":  ("P;4",     b'\x04\x03'),
    -    "P":    ("P",       b'\x08\x03'),
    -    "RGB":  ("RGB",     b'\x08\x02'),
    -    "RGBA": ("RGBA",    b'\x08\x06'),
    +    "1": ("1", b"\x01\x00"),
    +    "L;1": ("L;1", b"\x01\x00"),
    +    "L;2": ("L;2", b"\x02\x00"),
    +    "L;4": ("L;4", b"\x04\x00"),
    +    "L": ("L", b"\x08\x00"),
    +    "LA": ("LA", b"\x08\x04"),
    +    "I": ("I;16B", b"\x10\x00"),
    +    "I;16": ("I;16B", b"\x10\x00"),
    +    "P;1": ("P;1", b"\x01\x03"),
    +    "P;2": ("P;2", b"\x02\x03"),
    +    "P;4": ("P;4", b"\x04\x03"),
    +    "P": ("P", b"\x08\x03"),
    +    "RGB": ("RGB", b"\x08\x02"),
    +    "RGBA": ("RGBA", b"\x08\x06"),
     }
     
     
    @@ -674,7 +985,7 @@ def putchunk(fp, cid, *data):
         fp.write(o32(crc))
     
     
    -class _idat(object):
    +class _idat:
         # wrap output from the encoder in IDAT chunks
     
         def __init__(self, fp, chunk):
    @@ -685,7 +996,147 @@ class _idat(object):
             self.chunk(self.fp, b"IDAT", data)
     
     
    -def _save(im, fp, filename, chunk=putchunk):
    +class _fdat:
    +    # wrap encoder output in fdAT chunks
    +
    +    def __init__(self, fp, chunk, seq_num):
    +        self.fp = fp
    +        self.chunk = chunk
    +        self.seq_num = seq_num
    +
    +    def write(self, data):
    +        self.chunk(self.fp, b"fdAT", o32(self.seq_num), data)
    +        self.seq_num += 1
    +
    +
    +def _write_multiple_frames(im, fp, chunk, rawmode):
    +    default_image = im.encoderinfo.get("default_image", im.info.get("default_image"))
    +    duration = im.encoderinfo.get("duration", im.info.get("duration", 0))
    +    loop = im.encoderinfo.get("loop", im.info.get("loop", 0))
    +    disposal = im.encoderinfo.get("disposal", im.info.get("disposal"))
    +    blend = im.encoderinfo.get("blend", im.info.get("blend"))
    +
    +    if default_image:
    +        chain = itertools.chain(im.encoderinfo.get("append_images", []))
    +    else:
    +        chain = itertools.chain([im], im.encoderinfo.get("append_images", []))
    +
    +    im_frames = []
    +    frame_count = 0
    +    for im_seq in chain:
    +        for im_frame in ImageSequence.Iterator(im_seq):
    +            im_frame = im_frame.copy()
    +            if im_frame.mode != im.mode:
    +                if im.mode == "P":
    +                    im_frame = im_frame.convert(im.mode, palette=im.palette)
    +                else:
    +                    im_frame = im_frame.convert(im.mode)
    +            encoderinfo = im.encoderinfo.copy()
    +            if isinstance(duration, (list, tuple)):
    +                encoderinfo["duration"] = duration[frame_count]
    +            if isinstance(disposal, (list, tuple)):
    +                encoderinfo["disposal"] = disposal[frame_count]
    +            if isinstance(blend, (list, tuple)):
    +                encoderinfo["blend"] = blend[frame_count]
    +            frame_count += 1
    +
    +            if im_frames:
    +                previous = im_frames[-1]
    +                prev_disposal = previous["encoderinfo"].get("disposal")
    +                prev_blend = previous["encoderinfo"].get("blend")
    +                if prev_disposal == APNG_DISPOSE_OP_PREVIOUS and len(im_frames) < 2:
    +                    prev_disposal == APNG_DISPOSE_OP_BACKGROUND
    +
    +                if prev_disposal == APNG_DISPOSE_OP_BACKGROUND:
    +                    base_im = previous["im"]
    +                    dispose = Image.core.fill("RGBA", im.size, (0, 0, 0, 0))
    +                    bbox = previous["bbox"]
    +                    if bbox:
    +                        dispose = dispose.crop(bbox)
    +                    else:
    +                        bbox = (0, 0) + im.size
    +                    base_im.paste(dispose, bbox)
    +                elif prev_disposal == APNG_DISPOSE_OP_PREVIOUS:
    +                    base_im = im_frames[-2]["im"]
    +                else:
    +                    base_im = previous["im"]
    +                delta = ImageChops.subtract_modulo(
    +                    im_frame.convert("RGB"), base_im.convert("RGB")
    +                )
    +                bbox = delta.getbbox()
    +                if (
    +                    not bbox
    +                    and prev_disposal == encoderinfo.get("disposal")
    +                    and prev_blend == encoderinfo.get("blend")
    +                ):
    +                    duration = encoderinfo.get("duration", 0)
    +                    if duration:
    +                        if "duration" in previous["encoderinfo"]:
    +                            previous["encoderinfo"]["duration"] += duration
    +                        else:
    +                            previous["encoderinfo"]["duration"] = duration
    +                    continue
    +            else:
    +                bbox = None
    +            im_frames.append({"im": im_frame, "bbox": bbox, "encoderinfo": encoderinfo})
    +
    +    # animation control
    +    chunk(
    +        fp, b"acTL", o32(len(im_frames)), o32(loop),  # 0: num_frames  # 4: num_plays
    +    )
    +
    +    # default image IDAT (if it exists)
    +    if default_image:
    +        ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)])
    +
    +    seq_num = 0
    +    for frame, frame_data in enumerate(im_frames):
    +        im_frame = frame_data["im"]
    +        if not frame_data["bbox"]:
    +            bbox = (0, 0) + im_frame.size
    +        else:
    +            bbox = frame_data["bbox"]
    +            im_frame = im_frame.crop(bbox)
    +        size = im_frame.size
    +        duration = int(round(frame_data["encoderinfo"].get("duration", 0)))
    +        disposal = frame_data["encoderinfo"].get("disposal", APNG_DISPOSE_OP_NONE)
    +        blend = frame_data["encoderinfo"].get("blend", APNG_BLEND_OP_SOURCE)
    +        # frame control
    +        chunk(
    +            fp,
    +            b"fcTL",
    +            o32(seq_num),  # sequence_number
    +            o32(size[0]),  # width
    +            o32(size[1]),  # height
    +            o32(bbox[0]),  # x_offset
    +            o32(bbox[1]),  # y_offset
    +            o16(duration),  # delay_numerator
    +            o16(1000),  # delay_denominator
    +            o8(disposal),  # dispose_op
    +            o8(blend),  # blend_op
    +        )
    +        seq_num += 1
    +        # frame data
    +        if frame == 0 and not default_image:
    +            # first frame must be in IDAT chunks for backwards compatibility
    +            ImageFile._save(
    +                im_frame,
    +                _idat(fp, chunk),
    +                [("zip", (0, 0) + im_frame.size, 0, rawmode)],
    +            )
    +        else:
    +            fdat_chunks = _fdat(fp, chunk, seq_num)
    +            ImageFile._save(
    +                im_frame, fdat_chunks, [("zip", (0, 0) + im_frame.size, 0, rawmode)],
    +            )
    +            seq_num = fdat_chunks.seq_num
    +
    +
    +def _save_all(im, fp, filename):
    +    _save(im, fp, filename, save_all=True)
    +
    +
    +def _save(im, fp, filename, chunk=putchunk, save_all=False):
         # save an image to disk (called by the save method)
     
         mode = im.mode
    @@ -700,7 +1151,7 @@ def _save(im, fp, filename, chunk=putchunk):
             else:
                 # check palette contents
                 if im.palette:
    -                colors = max(min(len(im.palette.getdata()[1])//3, 256), 2)
    +                colors = max(min(len(im.palette.getdata()[1]) // 3, 256), 2)
                 else:
                     colors = 256
     
    @@ -716,28 +1167,34 @@ def _save(im, fp, filename, chunk=putchunk):
                 mode = "%s;%d" % (mode, bits)
     
         # encoder options
    -    im.encoderconfig = (im.encoderinfo.get("optimize", False),
    -                        im.encoderinfo.get("compress_level", -1),
    -                        im.encoderinfo.get("compress_type", -1),
    -                        im.encoderinfo.get("dictionary", b""))
    +    im.encoderconfig = (
    +        im.encoderinfo.get("optimize", False),
    +        im.encoderinfo.get("compress_level", -1),
    +        im.encoderinfo.get("compress_type", -1),
    +        im.encoderinfo.get("dictionary", b""),
    +    )
     
         # get the corresponding PNG mode
         try:
             rawmode, mode = _OUTMODES[mode]
         except KeyError:
    -        raise IOError("cannot write mode %s as PNG" % mode)
    +        raise OSError("cannot write mode %s as PNG" % mode)
     
         #
         # write minimal PNG file
     
         fp.write(_MAGIC)
     
    -    chunk(fp, b"IHDR",
    -          o32(im.size[0]), o32(im.size[1]),     # 0: size
    -          mode,                                 # 8: depth/type
    -          b'\0',                                # 10: compression
    -          b'\0',                                # 11: filter category
    -          b'\0')                                # 12: interlace flag
    +    chunk(
    +        fp,
    +        b"IHDR",
    +        o32(im.size[0]),  # 0: size
    +        o32(im.size[1]),
    +        mode,  # 8: depth/type
    +        b"\0",  # 10: compression
    +        b"\0",  # 11: filter category
    +        b"\0",  # 12: interlace flag
    +    )
     
         chunks = [b"cHRM", b"gAMA", b"sBIT", b"sRGB", b"tIME"]
     
    @@ -771,23 +1228,22 @@ def _save(im, fp, filename, chunk=putchunk):
             palette_byte_number = (2 ** bits) * 3
             palette_bytes = im.im.getpalette("RGB")[:palette_byte_number]
             while len(palette_bytes) < palette_byte_number:
    -            palette_bytes += b'\0'
    +            palette_bytes += b"\0"
             chunk(fp, b"PLTE", palette_bytes)
     
    -    transparency = im.encoderinfo.get('transparency',
    -                                      im.info.get('transparency', None))
    +    transparency = im.encoderinfo.get("transparency", im.info.get("transparency", None))
     
         if transparency or transparency == 0:
             if im.mode == "P":
                 # limit to actual palette size
    -            alpha_bytes = 2**bits
    +            alpha_bytes = 2 ** bits
                 if isinstance(transparency, bytes):
                     chunk(fp, b"tRNS", transparency[:alpha_bytes])
                 else:
                     transparency = max(0, min(255, transparency))
    -                alpha = b'\xFF' * transparency + b'\0'
    +                alpha = b"\xFF" * transparency + b"\0"
                     chunk(fp, b"tRNS", alpha[:alpha_bytes])
    -        elif im.mode == "L":
    +        elif im.mode in ("1", "L", "I"):
                 transparency = max(0, min(65535, transparency))
                 chunk(fp, b"tRNS", o16(transparency))
             elif im.mode == "RGB":
    @@ -797,21 +1253,23 @@ def _save(im, fp, filename, chunk=putchunk):
                 if "transparency" in im.encoderinfo:
                     # don't bother with transparency if it's an RGBA
                     # and it's in the info dict. It's probably just stale.
    -                raise IOError("cannot use transparency for this mode")
    +                raise OSError("cannot use transparency for this mode")
         else:
             if im.mode == "P" and im.im.getpalettemode() == "RGBA":
                 alpha = im.im.getpalette("RGBA", "A")
    -            alpha_bytes = 2**bits
    +            alpha_bytes = 2 ** bits
                 chunk(fp, b"tRNS", alpha[:alpha_bytes])
     
         dpi = im.encoderinfo.get("dpi")
         if dpi:
    -        chunk(fp, b"pHYs",
    -              o32(int(dpi[0] / 0.0254 + 0.5)),
    -              o32(int(dpi[1] / 0.0254 + 0.5)),
    -              b'\x01')
    +        chunk(
    +            fp,
    +            b"pHYs",
    +            o32(int(dpi[0] / 0.0254 + 0.5)),
    +            o32(int(dpi[1] / 0.0254 + 0.5)),
    +            b"\x01",
    +        )
     
    -    info = im.encoderinfo.get("pnginfo")
         if info:
             chunks = [b"bKGD", b"hIST"]
             for cid, data in info.chunks:
    @@ -819,8 +1277,18 @@ def _save(im, fp, filename, chunk=putchunk):
                     chunks.remove(cid)
                     chunk(fp, cid, data)
     
    -    ImageFile._save(im, _idat(fp, chunk),
    -                    [("zip", (0, 0)+im.size, 0, rawmode)])
    +    exif = im.encoderinfo.get("exif", im.info.get("exif"))
    +    if exif:
    +        if isinstance(exif, Image.Exif):
    +            exif = exif.tobytes(8)
    +        if exif.startswith(b"Exif\x00\x00"):
    +            exif = exif[6:]
    +        chunk(fp, b"eXIf", exif)
    +
    +    if save_all:
    +        _write_multiple_frames(im, fp, chunk, rawmode)
    +    else:
    +        ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)])
     
         chunk(fp, b"IEND", b"")
     
    @@ -831,10 +1299,11 @@ def _save(im, fp, filename, chunk=putchunk):
     # --------------------------------------------------------------------
     # PNG chunk converter
     
    +
     def getchunks(im, **params):
         """Return a list of PNG chunks representing this image."""
     
    -    class collector(object):
    +    class collector:
             data = []
     
             def write(self, data):
    @@ -864,7 +1333,8 @@ def getchunks(im, **params):
     
     Image.register_open(PngImageFile.format, PngImageFile, _accept)
     Image.register_save(PngImageFile.format, _save)
    +Image.register_save_all(PngImageFile.format, _save_all)
     
    -Image.register_extension(PngImageFile.format, ".png")
    +Image.register_extensions(PngImageFile.format, [".png", ".apng"])
     
     Image.register_mime(PngImageFile.format, "image/png")
    diff --git a/server/www/packages/packages-windows/x86/PIL/PpmImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PpmImagePlugin.py
    index c599ba8..35a77ba 100644
    --- a/server/www/packages/packages-windows/x86/PIL/PpmImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/PpmImagePlugin.py
    @@ -17,12 +17,10 @@
     
     from . import Image, ImageFile
     
    -__version__ = "0.2"
    -
     #
     # --------------------------------------------------------------------
     
    -b_whitespace = b'\x20\x09\x0a\x0b\x0c\x0d'
    +b_whitespace = b"\x20\x09\x0a\x0b\x0c\x0d"
     
     MODES = {
         # standard
    @@ -34,7 +32,7 @@ MODES = {
         # PIL extensions (for test purposes only)
         b"PyP": "P",
         b"PyRGBA": "RGBA",
    -    b"PyCMYK": "CMYK"
    +    b"PyCMYK": "CMYK",
     }
     
     
    @@ -45,6 +43,7 @@ def _accept(prefix):
     ##
     # Image plugin for PBM, PGM, and PPM images.
     
    +
     class PpmImageFile(ImageFile.ImageFile):
     
         format = "PPM"
    @@ -55,10 +54,10 @@ class PpmImageFile(ImageFile.ImageFile):
                 c = self.fp.read(1)
                 if not c or c in b_whitespace:
                     break
    -            if c > b'\x79':
    +            if c > b"\x79":
                     raise ValueError("Expected ASCII value, found binary")
                 s = s + c
    -            if (len(s) > 9):
    +            if len(s) > 9:
                     raise ValueError("Expected int, got > 9 digits")
             return s
     
    @@ -68,7 +67,14 @@ class PpmImageFile(ImageFile.ImageFile):
             s = self.fp.read(1)
             if s != b"P":
                 raise SyntaxError("not a PPM file")
    -        mode = MODES[self._token(s)]
    +        magic_number = self._token(s)
    +        mode = MODES[magic_number]
    +
    +        self.custom_mimetype = {
    +            b"P4": "image/x-portable-bitmap",
    +            b"P5": "image/x-portable-graymap",
    +            b"P6": "image/x-portable-pixmap",
    +        }.get(magic_number)
     
             if mode == "1":
                 self.mode = "1"
    @@ -97,32 +103,30 @@ class PpmImageFile(ImageFile.ImageFile):
                 elif ix == 2:
                     # maxgrey
                     if s > 255:
    -                    if not mode == 'L':
    +                    if not mode == "L":
                             raise ValueError("Too many colors for band: %s" % s)
    -                    if s < 2**16:
    -                        self.mode = 'I'
    -                        rawmode = 'I;16B'
    +                    if s < 2 ** 16:
    +                        self.mode = "I"
    +                        rawmode = "I;16B"
                         else:
    -                        self.mode = 'I'
    -                        rawmode = 'I;32B'
    +                        self.mode = "I"
    +                        rawmode = "I;32B"
     
    -        self.size = xsize, ysize
    -        self.tile = [("raw",
    -                     (0, 0, xsize, ysize),
    -                     self.fp.tell(),
    -                     (rawmode, 0, 1))]
    +        self._size = xsize, ysize
    +        self.tile = [("raw", (0, 0, xsize, ysize), self.fp.tell(), (rawmode, 0, 1))]
     
     
     #
     # --------------------------------------------------------------------
     
    +
     def _save(im, fp, filename):
         if im.mode == "1":
             rawmode, head = "1;I", b"P4"
         elif im.mode == "L":
             rawmode, head = "L", b"P5"
         elif im.mode == "I":
    -        if im.getextrema()[1] < 2**16:
    +        if im.getextrema()[1] < 2 ** 16:
                 rawmode, head = "I;16B", b"P5"
             else:
                 rawmode, head = "I;32B", b"P5"
    @@ -131,8 +135,8 @@ def _save(im, fp, filename):
         elif im.mode == "RGBA":
             rawmode, head = "RGB", b"P6"
         else:
    -        raise IOError("cannot write mode %s as PPM" % im.mode)
    -    fp.write(head + ("\n%d %d\n" % im.size).encode('ascii'))
    +        raise OSError("cannot write mode %s as PPM" % im.mode)
    +    fp.write(head + ("\n%d %d\n" % im.size).encode("ascii"))
         if head == b"P6":
             fp.write(b"255\n")
         if head == b"P5":
    @@ -142,11 +146,12 @@ def _save(im, fp, filename):
                 fp.write(b"65535\n")
             elif rawmode == "I;32B":
                 fp.write(b"2147483648\n")
    -    ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, 1))])
    +    ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))])
     
         # ALTERNATIVE: save via builtin debug function
         # im._dump(filename)
     
    +
     #
     # --------------------------------------------------------------------
     
    @@ -154,4 +159,6 @@ def _save(im, fp, filename):
     Image.register_open(PpmImageFile.format, PpmImageFile, _accept)
     Image.register_save(PpmImageFile.format, _save)
     
    -Image.register_extensions(PpmImageFile.format, [".pbm", ".pgm", ".ppm"])
    +Image.register_extensions(PpmImageFile.format, [".pbm", ".pgm", ".ppm", ".pnm"])
    +
    +Image.register_mime(PpmImageFile.format, "image/x-portable-anymap")
    diff --git a/server/www/packages/packages-windows/x86/PIL/PsdImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PsdImagePlugin.py
    index f6e04f7..cceb85c 100644
    --- a/server/www/packages/packages-windows/x86/PIL/PsdImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/PsdImagePlugin.py
    @@ -16,7 +16,7 @@
     # See the README file for information on usage and redistribution.
     #
     
    -__version__ = "0.4"
    +import io
     
     from . import Image, ImageFile, ImagePalette
     from ._binary import i8, i16be as i16, i32be as i32
    @@ -31,13 +31,14 @@ MODES = {
         (4, 8): ("CMYK", 4),
         (7, 8): ("L", 1),  # FIXME: multilayer
         (8, 8): ("L", 1),  # duotone
    -    (9, 8): ("LAB", 3)
    +    (9, 8): ("LAB", 3),
     }
     
     
     # --------------------------------------------------------------------.
     # read PSD images
     
    +
     def _accept(prefix):
         return prefix[:4] == b"8BPS"
     
    @@ -45,10 +46,12 @@ def _accept(prefix):
     ##
     # Image plugin for Photoshop images.
     
    +
     class PsdImageFile(ImageFile.ImageFile):
     
         format = "PSD"
         format_description = "Adobe Photoshop"
    +    _close_exclusive_fp_after_loading = False
     
         def _open(self):
     
    @@ -68,10 +71,10 @@ class PsdImageFile(ImageFile.ImageFile):
             mode, channels = MODES[(psd_mode, psd_bits)]
     
             if channels > psd_channels:
    -            raise IOError("not enough channels")
    +            raise OSError("not enough channels")
     
             self.mode = mode
    -        self.size = i32(s[18:]), i32(s[14:])
    +        self._size = i32(s[18:]), i32(s[14:])
     
             #
             # color mode data
    @@ -92,13 +95,13 @@ class PsdImageFile(ImageFile.ImageFile):
                 # load resources
                 end = self.fp.tell() + size
                 while self.fp.tell() < end:
    -                signature = read(4)
    +                read(4)  # signature
                     id = i16(read(2))
                     name = read(i8(read(1)))
                     if not (len(name) & 1):
                         read(1)  # padding
                     data = read(i32(read(4)))
    -                if (len(data) & 1):
    +                if len(data) & 1:
                         read(1)  # padding
                     self.resources.append((id, name, data))
                     if id == 1039:  # ICC profile
    @@ -123,7 +126,7 @@ class PsdImageFile(ImageFile.ImageFile):
             self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels)
     
             # keep the file open
    -        self._fp = self.fp
    +        self.__fp = self.fp
             self.frame = 1
             self._min_frame = 1
     
    @@ -141,11 +144,11 @@ class PsdImageFile(ImageFile.ImageFile):
     
             # seek to given layer (1..max)
             try:
    -            name, mode, bbox, tile = self.layers[layer-1]
    +            name, mode, bbox, tile = self.layers[layer - 1]
                 self.mode = mode
                 self.tile = tile
                 self.frame = layer
    -            self.fp = self._fp
    +            self.fp = self.__fp
                 return name, bbox
             except IndexError:
                 raise EOFError("no such layer")
    @@ -156,13 +159,21 @@ class PsdImageFile(ImageFile.ImageFile):
     
         def load_prepare(self):
             # create image memory if necessary
    -        if not self.im or\
    -           self.im.mode != self.mode or self.im.size != self.size:
    +        if not self.im or self.im.mode != self.mode or self.im.size != self.size:
                 self.im = Image.core.fill(self.mode, self.size, 0)
             # create palette (optional)
             if self.mode == "P":
                 Image.Image.load(self)
     
    +    def _close__fp(self):
    +        try:
    +            if self.__fp != self.fp:
    +                self.__fp.close()
    +        except AttributeError:
    +            pass
    +        finally:
    +            self.__fp = None
    +
     
     def _layerinfo(file):
         # read layerinfo block
    @@ -207,33 +218,31 @@ def _layerinfo(file):
                 mode = None  # unknown
     
             # skip over blend flags and extra information
    -        filler = read(12)
    +        read(12)  # filler
             name = ""
    -        size = i32(read(4))
    +        size = i32(read(4))  # length of the extra data field
             combined = 0
             if size:
    +            data_end = file.tell() + size
    +
                 length = i32(read(4))
                 if length:
    -                mask_y = i32(read(4))
    -                mask_x = i32(read(4))
    -                mask_h = i32(read(4)) - mask_y
    -                mask_w = i32(read(4)) - mask_x
    -                file.seek(length - 16, 1)
    +                file.seek(length - 16, io.SEEK_CUR)
                 combined += length + 4
     
                 length = i32(read(4))
                 if length:
    -                file.seek(length, 1)
    +                file.seek(length, io.SEEK_CUR)
                 combined += length + 4
     
                 length = i8(read(1))
                 if length:
                     # Don't know the proper encoding,
                     # Latin-1 should be a good guess
    -                name = read(length).decode('latin-1', 'replace')
    +                name = read(length).decode("latin-1", "replace")
                 combined += length + 1
     
    -        file.seek(size - combined, 1)
    +            file.seek(data_end)
             layers.append((name, mode, (x0, y0, x1, y1)))
     
         # get tiles
    @@ -271,7 +280,7 @@ def _maketile(file, mode, bbox, channels):
                 if mode == "CMYK":
                     layer += ";I"
                 tile.append(("raw", bbox, offset, layer))
    -            offset = offset + xsize*ysize
    +            offset = offset + xsize * ysize
     
         elif compression == 1:
             #
    @@ -284,11 +293,9 @@ def _maketile(file, mode, bbox, channels):
                 layer = mode[channel]
                 if mode == "CMYK":
                     layer += ";I"
    -            tile.append(
    -                ("packbits", bbox, offset, layer)
    -                )
    +            tile.append(("packbits", bbox, offset, layer))
                 for y in range(ysize):
    -                offset = offset + i16(bytecount[i:i+2])
    +                offset = offset + i16(bytecount[i : i + 2])
                     i += 2
     
         file.seek(offset)
    @@ -298,6 +305,7 @@ def _maketile(file, mode, bbox, channels):
     
         return tile
     
    +
     # --------------------------------------------------------------------
     # registry
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/PyAccess.py b/server/www/packages/packages-windows/x86/PIL/PyAccess.py
    index cce2de2..359a949 100644
    --- a/server/www/packages/packages-windows/x86/PIL/PyAccess.py
    +++ b/server/www/packages/packages-windows/x86/PIL/PyAccess.py
    @@ -25,7 +25,6 @@ import sys
     
     from cffi import FFI
     
    -
     logger = logging.getLogger(__name__)
     
     
    @@ -41,18 +40,19 @@ ffi = FFI()
     ffi.cdef(defs)
     
     
    -class PyAccess(object):
    -
    +class PyAccess:
         def __init__(self, img, readonly=False):
             vals = dict(img.im.unsafe_ptrs)
             self.readonly = readonly
    -        self.image8 = ffi.cast('unsigned char **', vals['image8'])
    -        self.image32 = ffi.cast('int **', vals['image32'])
    -        self.image = ffi.cast('unsigned char **', vals['image'])
    +        self.image8 = ffi.cast("unsigned char **", vals["image8"])
    +        self.image32 = ffi.cast("int **", vals["image32"])
    +        self.image = ffi.cast("unsigned char **", vals["image"])
             self.xsize, self.ysize = img.im.size
     
             # Keep pointer to im object to prevent dereferencing.
             self._im = img.im
    +        if self._im.mode == "P":
    +            self._palette = img.palette
     
             # Debugging is polluting test traces, only useful here
             # when hacking on PyAccess
    @@ -73,8 +73,22 @@ class PyAccess(object):
             :param color: The pixel value.
             """
             if self.readonly:
    -            raise ValueError('Attempt to putpixel a read only image')
    -        (x, y) = self.check_xy(xy)
    +            raise ValueError("Attempt to putpixel a read only image")
    +        (x, y) = xy
    +        if x < 0:
    +            x = self.xsize + x
    +        if y < 0:
    +            y = self.ysize + y
    +        (x, y) = self.check_xy((x, y))
    +
    +        if (
    +            self._im.mode == "P"
    +            and isinstance(color, (list, tuple))
    +            and len(color) in [3, 4]
    +        ):
    +            # RGB or RGBA value for a P image
    +            color = self._palette.getcolor(color)
    +
             return self.set_pixel(x, y, color)
     
         def __getitem__(self, xy):
    @@ -88,8 +102,12 @@ class PyAccess(object):
             :returns: a pixel value for single band images, a tuple of
               pixel values for multiband images.
             """
    -
    -        (x, y) = self.check_xy(xy)
    +        (x, y) = xy
    +        if x < 0:
    +            x = self.xsize + x
    +        if y < 0:
    +            y = self.ysize + y
    +        (x, y) = self.check_xy((x, y))
             return self.get_pixel(x, y)
     
         putpixel = __setitem__
    @@ -98,12 +116,13 @@ class PyAccess(object):
         def check_xy(self, xy):
             (x, y) = xy
             if not (0 <= x < self.xsize and 0 <= y < self.ysize):
    -            raise ValueError('pixel location out of range')
    +            raise ValueError("pixel location out of range")
             return xy
     
     
     class _PyAccess32_2(PyAccess):
         """ PA, LA, stored in first and last bytes of a 32 bit word """
    +
         def _post_init(self, *args, **kwargs):
             self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32)
     
    @@ -139,6 +158,7 @@ class _PyAccess32_3(PyAccess):
     
     class _PyAccess32_4(PyAccess):
         """ RGBA etc, all 4 bytes of a 32 bit word """
    +
         def _post_init(self, *args, **kwargs):
             self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32)
     
    @@ -157,6 +177,7 @@ class _PyAccess32_4(PyAccess):
     
     class _PyAccess8(PyAccess):
         """ 1, L, P, 8 bit images stored as uint8 """
    +
         def _post_init(self, *args, **kwargs):
             self.pixels = self.image8
     
    @@ -174,8 +195,9 @@ class _PyAccess8(PyAccess):
     
     class _PyAccessI16_N(PyAccess):
         """ I;16 access, native bitendian without conversion """
    +
         def _post_init(self, *args, **kwargs):
    -        self.pixels = ffi.cast('unsigned short **', self.image)
    +        self.pixels = ffi.cast("unsigned short **", self.image)
     
         def get_pixel(self, x, y):
             return self.pixels[y][x]
    @@ -191,8 +213,9 @@ class _PyAccessI16_N(PyAccess):
     
     class _PyAccessI16_L(PyAccess):
         """ I;16L access, with conversion """
    +
         def _post_init(self, *args, **kwargs):
    -        self.pixels = ffi.cast('struct Pixel_I16 **', self.image)
    +        self.pixels = ffi.cast("struct Pixel_I16 **", self.image)
     
         def get_pixel(self, x, y):
             pixel = self.pixels[y][x]
    @@ -205,14 +228,15 @@ class _PyAccessI16_L(PyAccess):
             except TypeError:
                 color = min(color[0], 65535)
     
    -        pixel.l = color & 0xFF
    +        pixel.l = color & 0xFF  # noqa: E741
             pixel.r = color >> 8
     
     
     class _PyAccessI16_B(PyAccess):
         """ I;16B access, with conversion """
    +
         def _post_init(self, *args, **kwargs):
    -        self.pixels = ffi.cast('struct Pixel_I16 **', self.image)
    +        self.pixels = ffi.cast("struct Pixel_I16 **", self.image)
     
         def get_pixel(self, x, y):
             pixel = self.pixels[y][x]
    @@ -222,15 +246,16 @@ class _PyAccessI16_B(PyAccess):
             pixel = self.pixels[y][x]
             try:
                 color = min(color, 65535)
    -        except:
    +        except Exception:
                 color = min(color[0], 65535)
     
    -        pixel.l = color >> 8
    +        pixel.l = color >> 8  # noqa: E741
             pixel.r = color & 0xFF
     
     
     class _PyAccessI32_N(PyAccess):
         """ Signed Int32 access, native endian """
    +
         def _post_init(self, *args, **kwargs):
             self.pixels = self.image32
     
    @@ -243,15 +268,15 @@ class _PyAccessI32_N(PyAccess):
     
     class _PyAccessI32_Swap(PyAccess):
         """ I;32L/B access, with byteswapping conversion """
    +
         def _post_init(self, *args, **kwargs):
             self.pixels = self.image32
     
         def reverse(self, i):
    -        orig = ffi.new('int *', i)
    -        chars = ffi.cast('unsigned char *', orig)
    -        chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], \
    -            chars[1], chars[0]
    -        return ffi.cast('int *', chars)[0]
    +        orig = ffi.new("int *", i)
    +        chars = ffi.cast("unsigned char *", orig)
    +        chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], chars[1], chars[0]
    +        return ffi.cast("int *", chars)[0]
     
         def get_pixel(self, x, y):
             return self.reverse(self.pixels[y][x])
    @@ -262,8 +287,9 @@ class _PyAccessI32_Swap(PyAccess):
     
     class _PyAccessF(PyAccess):
         """ 32 bit float access """
    +
         def _post_init(self, *args, **kwargs):
    -        self.pixels = ffi.cast('float **', self.image32)
    +        self.pixels = ffi.cast("float **", self.image32)
     
         def get_pixel(self, x, y):
             return self.pixels[y][x]
    @@ -277,38 +303,39 @@ class _PyAccessF(PyAccess):
                 self.pixels[y][x] = color[0]
     
     
    -mode_map = {'1': _PyAccess8,
    -            'L': _PyAccess8,
    -            'P': _PyAccess8,
    -            'LA': _PyAccess32_2,
    -            'La': _PyAccess32_2,
    -            'PA': _PyAccess32_2,
    -            'RGB': _PyAccess32_3,
    -            'LAB': _PyAccess32_3,
    -            'HSV': _PyAccess32_3,
    -            'YCbCr': _PyAccess32_3,
    -            'RGBA': _PyAccess32_4,
    -            'RGBa': _PyAccess32_4,
    -            'RGBX': _PyAccess32_4,
    -            'CMYK': _PyAccess32_4,
    -            'F': _PyAccessF,
    -            'I': _PyAccessI32_N,
    -            }
    +mode_map = {
    +    "1": _PyAccess8,
    +    "L": _PyAccess8,
    +    "P": _PyAccess8,
    +    "LA": _PyAccess32_2,
    +    "La": _PyAccess32_2,
    +    "PA": _PyAccess32_2,
    +    "RGB": _PyAccess32_3,
    +    "LAB": _PyAccess32_3,
    +    "HSV": _PyAccess32_3,
    +    "YCbCr": _PyAccess32_3,
    +    "RGBA": _PyAccess32_4,
    +    "RGBa": _PyAccess32_4,
    +    "RGBX": _PyAccess32_4,
    +    "CMYK": _PyAccess32_4,
    +    "F": _PyAccessF,
    +    "I": _PyAccessI32_N,
    +}
     
    -if sys.byteorder == 'little':
    -    mode_map['I;16'] = _PyAccessI16_N
    -    mode_map['I;16L'] = _PyAccessI16_N
    -    mode_map['I;16B'] = _PyAccessI16_B
    +if sys.byteorder == "little":
    +    mode_map["I;16"] = _PyAccessI16_N
    +    mode_map["I;16L"] = _PyAccessI16_N
    +    mode_map["I;16B"] = _PyAccessI16_B
     
    -    mode_map['I;32L'] = _PyAccessI32_N
    -    mode_map['I;32B'] = _PyAccessI32_Swap
    +    mode_map["I;32L"] = _PyAccessI32_N
    +    mode_map["I;32B"] = _PyAccessI32_Swap
     else:
    -    mode_map['I;16'] = _PyAccessI16_L
    -    mode_map['I;16L'] = _PyAccessI16_L
    -    mode_map['I;16B'] = _PyAccessI16_N
    +    mode_map["I;16"] = _PyAccessI16_L
    +    mode_map["I;16L"] = _PyAccessI16_L
    +    mode_map["I;16B"] = _PyAccessI16_N
     
    -    mode_map['I;32L'] = _PyAccessI32_Swap
    -    mode_map['I;32B'] = _PyAccessI32_N
    +    mode_map["I;32L"] = _PyAccessI32_Swap
    +    mode_map["I;32B"] = _PyAccessI32_N
     
     
     def new(img, readonly=False):
    diff --git a/server/www/packages/packages-windows/x86/PIL/SgiImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/SgiImagePlugin.py
    index ef0f40e..ddd3de3 100644
    --- a/server/www/packages/packages-windows/x86/PIL/SgiImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/SgiImagePlugin.py
    @@ -22,14 +22,11 @@
     #
     
     
    -from . import Image, ImageFile
    -from ._binary import i8, o8, i16be as i16
    -from ._util import py3
    -import struct
     import os
    +import struct
     
    -
    -__version__ = "0.3"
    +from . import Image, ImageFile
    +from ._binary import i8, i16be as i16, o8
     
     
     def _accept(prefix):
    @@ -44,7 +41,7 @@ MODES = {
         (1, 3, 3): "RGB",
         (2, 3, 3): "RGB;16B",
         (1, 3, 4): "RGBA",
    -    (2, 3, 4): "RGBA;16B"
    +    (2, 3, 4): "RGBA;16B",
     }
     
     
    @@ -96,8 +93,10 @@ class SgiImageFile(ImageFile.ImageFile):
             if rawmode == "":
                 raise ValueError("Unsupported SGI image mode")
     
    -        self.size = xsize, ysize
    +        self._size = xsize, ysize
             self.mode = rawmode.split(";")[0]
    +        if self.mode == "RGB":
    +            self.custom_mimetype = "image/rgb"
     
             # orientation -1 : scanlines begins at the bottom-left corner
             orientation = -1
    @@ -106,19 +105,21 @@ class SgiImageFile(ImageFile.ImageFile):
             if compression == 0:
                 pagesize = xsize * ysize * bpc
                 if bpc == 2:
    -                self.tile = [("SGI16", (0, 0) + self.size,
    -                              headlen, (self.mode, 0, orientation))]
    +                self.tile = [
    +                    ("SGI16", (0, 0) + self.size, headlen, (self.mode, 0, orientation))
    +                ]
                 else:
                     self.tile = []
                     offset = headlen
                     for layer in self.mode:
                         self.tile.append(
    -                        ("raw", (0, 0) + self.size,
    -                            offset, (layer, 0, orientation)))
    +                        ("raw", (0, 0) + self.size, offset, (layer, 0, orientation))
    +                    )
                         offset += pagesize
             elif compression == 1:
    -            self.tile = [("sgi_rle", (0, 0) + self.size,
    -                          headlen, (rawmode, orientation, bpc))]
    +            self.tile = [
    +                ("sgi_rle", (0, 0) + self.size, headlen, (rawmode, orientation, bpc))
    +            ]
     
     
     def _save(im, fp, filename):
    @@ -157,8 +158,11 @@ def _save(im, fp, filename):
     
         # assert we've got the right number of bands.
         if len(im.getbands()) != z:
    -        raise ValueError("incorrect number of bands in SGI write: %s vs %s" %
    -                         (z, len(im.getbands())))
    +        raise ValueError(
    +            "incorrect number of bands in SGI write: {} vs {}".format(
    +                z, len(im.getbands())
    +            )
    +        )
     
         # Minimum Byte value
         pinmin = 0
    @@ -166,31 +170,30 @@ def _save(im, fp, filename):
         pinmax = 255
         # Image name (79 characters max, truncated below in write)
         imgName = os.path.splitext(os.path.basename(filename))[0]
    -    if py3:
    -        imgName = imgName.encode('ascii', 'ignore')
    +    imgName = imgName.encode("ascii", "ignore")
         # Standard representation of pixel in the file
         colormap = 0
    -    fp.write(struct.pack('>h', magicNumber))
    +    fp.write(struct.pack(">h", magicNumber))
         fp.write(o8(rle))
         fp.write(o8(bpc))
    -    fp.write(struct.pack('>H', dim))
    -    fp.write(struct.pack('>H', x))
    -    fp.write(struct.pack('>H', y))
    -    fp.write(struct.pack('>H', z))
    -    fp.write(struct.pack('>l', pinmin))
    -    fp.write(struct.pack('>l', pinmax))
    -    fp.write(struct.pack('4s', b''))  # dummy
    -    fp.write(struct.pack('79s', imgName))  # truncates to 79 chars
    -    fp.write(struct.pack('s', b''))  # force null byte after imgname
    -    fp.write(struct.pack('>l', colormap))
    -    fp.write(struct.pack('404s', b''))  # dummy
    +    fp.write(struct.pack(">H", dim))
    +    fp.write(struct.pack(">H", x))
    +    fp.write(struct.pack(">H", y))
    +    fp.write(struct.pack(">H", z))
    +    fp.write(struct.pack(">l", pinmin))
    +    fp.write(struct.pack(">l", pinmax))
    +    fp.write(struct.pack("4s", b""))  # dummy
    +    fp.write(struct.pack("79s", imgName))  # truncates to 79 chars
    +    fp.write(struct.pack("s", b""))  # force null byte after imgname
    +    fp.write(struct.pack(">l", colormap))
    +    fp.write(struct.pack("404s", b""))  # dummy
     
    -    rawmode = 'L'
    +    rawmode = "L"
         if bpc == 2:
    -        rawmode = 'L;16B'
    +        rawmode = "L;16B"
     
         for channel in im.split():
    -        fp.write(channel.tobytes('raw', rawmode, 0, orientation))
    +        fp.write(channel.tobytes("raw", rawmode, 0, orientation))
     
         fp.close()
     
    @@ -205,13 +208,15 @@ class SGI16Decoder(ImageFile.PyDecoder):
             self.fd.seek(512)
     
             for band in range(zsize):
    -            channel = Image.new('L', (self.state.xsize, self.state.ysize))
    -            channel.frombytes(self.fd.read(2 * pagesize), 'raw',
    -                              'L;16B', stride, orientation)
    +            channel = Image.new("L", (self.state.xsize, self.state.ysize))
    +            channel.frombytes(
    +                self.fd.read(2 * pagesize), "raw", "L;16B", stride, orientation
    +            )
                 self.im.putband(channel.im, band)
     
             return -1, 0
     
    +
     #
     # registry
     
    @@ -220,7 +225,6 @@ Image.register_decoder("SGI16", SGI16Decoder)
     Image.register_open(SgiImageFile.format, SgiImageFile, _accept)
     Image.register_save(SgiImageFile.format, _save)
     Image.register_mime(SgiImageFile.format, "image/sgi")
    -Image.register_mime(SgiImageFile.format, "image/rgb")
     
     Image.register_extensions(SgiImageFile.format, [".bw", ".rgb", ".rgba", ".sgi"])
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/SpiderImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/SpiderImagePlugin.py
    index d502779..cbd31cf 100644
    --- a/server/www/packages/packages-windows/x86/PIL/SpiderImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/SpiderImagePlugin.py
    @@ -32,19 +32,17 @@
     # Details about the Spider image format:
     # https://spider.wadsworth.org/spider_doc/spider/docs/image_doc.html
     #
    -
    -from __future__ import print_function
    -
    -from PIL import Image, ImageFile
     import os
     import struct
     import sys
     
    +from PIL import Image, ImageFile
    +
     
     def isInt(f):
         try:
             i = int(f)
    -        if f-i == 0:
    +        if f - i == 0:
                 return 1
             else:
                 return 0
    @@ -60,8 +58,9 @@ iforms = [1, 3, -11, -12, -21, -22]
     # Returns no. of bytes in the header, if it is a valid Spider header,
     # otherwise returns 0
     
    +
     def isSpiderHeader(t):
    -    h = (99,) + t   # add 1 value so can use spider header index start=1
    +    h = (99,) + t  # add 1 value so can use spider header index start=1
         # header values 1,2,5,12,13,22,23 should be integers
         for i in [1, 2, 5, 12, 13, 22, 23]:
             if not isInt(h[i]):
    @@ -71,10 +70,9 @@ def isSpiderHeader(t):
         if iform not in iforms:
             return 0
         # check other header values
    -    labrec = int(h[13])   # no. records in file header
    -    labbyt = int(h[22])   # total no. of bytes in header
    -    lenbyt = int(h[23])   # record length in bytes
    -    # print("labrec = %d, labbyt = %d, lenbyt = %d" % (labrec,labbyt,lenbyt))
    +    labrec = int(h[13])  # no. records in file header
    +    labbyt = int(h[22])  # total no. of bytes in header
    +    lenbyt = int(h[23])  # record length in bytes
         if labbyt != (labrec * lenbyt):
             return 0
         # looks like a valid header
    @@ -82,12 +80,12 @@ def isSpiderHeader(t):
     
     
     def isSpiderImage(filename):
    -    with open(filename, 'rb') as fp:
    -        f = fp.read(92)   # read 23 * 4 bytes
    -    t = struct.unpack('>23f', f)  # try big-endian first
    +    with open(filename, "rb") as fp:
    +        f = fp.read(92)  # read 23 * 4 bytes
    +    t = struct.unpack(">23f", f)  # try big-endian first
         hdrlen = isSpiderHeader(t)
         if hdrlen == 0:
    -        t = struct.unpack('<23f', f)  # little-endian
    +        t = struct.unpack("<23f", f)  # little-endian
             hdrlen = isSpiderHeader(t)
         return hdrlen
     
    @@ -105,23 +103,23 @@ class SpiderImageFile(ImageFile.ImageFile):
     
             try:
                 self.bigendian = 1
    -            t = struct.unpack('>27f', f)  # try big-endian first
    +            t = struct.unpack(">27f", f)  # try big-endian first
                 hdrlen = isSpiderHeader(t)
                 if hdrlen == 0:
                     self.bigendian = 0
    -                t = struct.unpack('<27f', f)  # little-endian
    +                t = struct.unpack("<27f", f)  # little-endian
                     hdrlen = isSpiderHeader(t)
                 if hdrlen == 0:
                     raise SyntaxError("not a valid Spider file")
             except struct.error:
                 raise SyntaxError("not a valid Spider file")
     
    -        h = (99,) + t   # add 1 value : spider header index starts at 1
    +        h = (99,) + t  # add 1 value : spider header index starts at 1
             iform = int(h[5])
             if iform != 1:
                 raise SyntaxError("not a Spider 2D image")
     
    -        self.size = int(h[12]), int(h[2])  # size in pixels (width, height)
    +        self._size = int(h[12]), int(h[2])  # size in pixels (width, height)
             self.istack = int(h[24])
             self.imgnumber = int(h[27])
     
    @@ -150,9 +148,7 @@ class SpiderImageFile(ImageFile.ImageFile):
                 self.rawmode = "F;32F"
             self.mode = "F"
     
    -        self.tile = [
    -            ("raw", (0, 0) + self.size, offset,
    -                (self.rawmode, 0, 1))]
    +        self.tile = [("raw", (0, 0) + self.size, offset, (self.rawmode, 0, 1))]
             self.__fp = self.fp  # FIXME: hack
     
         @property
    @@ -185,22 +181,32 @@ class SpiderImageFile(ImageFile.ImageFile):
             (minimum, maximum) = self.getextrema()
             m = 1
             if maximum != minimum:
    -            m = depth / (maximum-minimum)
    +            m = depth / (maximum - minimum)
             b = -m * minimum
             return self.point(lambda i, m=m, b=b: i * m + b).convert("L")
     
         # returns a ImageTk.PhotoImage object, after rescaling to 0..255
         def tkPhotoImage(self):
             from PIL import ImageTk
    +
             return ImageTk.PhotoImage(self.convert2byte(), palette=256)
     
    +    def _close__fp(self):
    +        try:
    +            if self.__fp != self.fp:
    +                self.__fp.close()
    +        except AttributeError:
    +            pass
    +        finally:
    +            self.__fp = None
    +
     
     # --------------------------------------------------------------------
     # Image series
     
     # given a list of filenames, return a list of images
     def loadImageSeries(filelist=None):
    -    """create a list of Image.images for use in montage"""
    +    """create a list of :py:class:`~PIL.Image.Image` objects for use in a montage"""
         if filelist is None or len(filelist) < 1:
             return
     
    @@ -210,12 +216,13 @@ def loadImageSeries(filelist=None):
                 print("unable to find %s" % img)
                 continue
             try:
    -            im = Image.open(img).convert2byte()
    -        except:
    +            with Image.open(img) as im:
    +                im = im.convert2byte()
    +        except Exception:
                 if not isSpiderImage(img):
                     print(img + " is not a Spider image file")
                 continue
    -        im.info['filename'] = img
    +        im.info["filename"] = img
             imglist.append(im)
         return imglist
     
    @@ -223,10 +230,11 @@ def loadImageSeries(filelist=None):
     # --------------------------------------------------------------------
     # For saving images in Spider format
     
    +
     def makeSpiderHeader(im):
         nsam, nrow = im.size
         lenbyt = nsam * 4  # There are labrec records in the header
    -    labrec = 1024 / lenbyt
    +    labrec = int(1024 / lenbyt)
         if 1024 % lenbyt != 0:
             labrec += 1
         labbyt = labrec * lenbyt
    @@ -239,10 +247,10 @@ def makeSpiderHeader(im):
             return []
     
         # NB these are Fortran indices
    -    hdr[1] = 1.0             # nslice (=1 for an image)
    -    hdr[2] = float(nrow)     # number of rows per slice
    -    hdr[5] = 1.0             # iform for 2D image
    -    hdr[12] = float(nsam)    # number of pixels per line
    +    hdr[1] = 1.0  # nslice (=1 for an image)
    +    hdr[2] = float(nrow)  # number of rows per slice
    +    hdr[5] = 1.0  # iform for 2D image
    +    hdr[12] = float(nsam)  # number of pixels per line
         hdr[13] = float(labrec)  # number of records in file header
         hdr[22] = float(labbyt)  # total number of bytes in header
         hdr[23] = float(lenbyt)  # record length in bytes
    @@ -253,23 +261,23 @@ def makeSpiderHeader(im):
         # pack binary data into a string
         hdrstr = []
         for v in hdr:
    -        hdrstr.append(struct.pack('f', v))
    +        hdrstr.append(struct.pack("f", v))
         return hdrstr
     
     
     def _save(im, fp, filename):
         if im.mode[0] != "F":
    -        im = im.convert('F')
    +        im = im.convert("F")
     
         hdr = makeSpiderHeader(im)
         if len(hdr) < 256:
    -        raise IOError("Error creating Spider header")
    +        raise OSError("Error creating Spider header")
     
         # write the SPIDER header
         fp.writelines(hdr)
     
         rawmode = "F;32NF"  # 32-bit native floating point
    -    ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, 1))])
    +    ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))])
     
     
     def _save_spider(im, fp, filename):
    @@ -278,6 +286,7 @@ def _save_spider(im, fp, filename):
         Image.register_extension(SpiderImageFile.format, ext)
         _save(im, fp, filename)
     
    +
     # --------------------------------------------------------------------
     
     
    @@ -295,20 +304,21 @@ if __name__ == "__main__":
             print("input image must be in Spider format")
             sys.exit()
     
    -    im = Image.open(filename)
    -    print("image: " + str(im))
    -    print("format: " + str(im.format))
    -    print("size: " + str(im.size))
    -    print("mode: " + str(im.mode))
    -    print("max, min: ", end=' ')
    -    print(im.getextrema())
    +    with Image.open(filename) as im:
    +        print("image: " + str(im))
    +        print("format: " + str(im.format))
    +        print("size: " + str(im.size))
    +        print("mode: " + str(im.mode))
    +        print("max, min: ", end=" ")
    +        print(im.getextrema())
     
    -    if len(sys.argv) > 2:
    -        outfile = sys.argv[2]
    +        if len(sys.argv) > 2:
    +            outfile = sys.argv[2]
     
    -        # perform some image operation
    -        im = im.transpose(Image.FLIP_LEFT_RIGHT)
    -        print(
    -            "saving a flipped version of %s as %s " %
    -            (os.path.basename(filename), outfile))
    -        im.save(outfile, SpiderImageFile.format)
    +            # perform some image operation
    +            im = im.transpose(Image.FLIP_LEFT_RIGHT)
    +            print(
    +                "saving a flipped version of %s as %s "
    +                % (os.path.basename(filename), outfile)
    +            )
    +            im.save(outfile, SpiderImageFile.format)
    diff --git a/server/www/packages/packages-windows/x86/PIL/SunImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/SunImagePlugin.py
    index fd5e827..fd7ca8a 100644
    --- a/server/www/packages/packages-windows/x86/PIL/SunImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/SunImagePlugin.py
    @@ -20,16 +20,15 @@
     from . import Image, ImageFile, ImagePalette
     from ._binary import i32be as i32
     
    -__version__ = "0.3"
    -
     
     def _accept(prefix):
    -    return len(prefix) >= 4 and i32(prefix) == 0x59a66a95
    +    return len(prefix) >= 4 and i32(prefix) == 0x59A66A95
     
     
     ##
     # Image plugin for Sun raster files.
     
    +
     class SunImageFile(ImageFile.ImageFile):
     
         format = "SUN"
    @@ -54,15 +53,15 @@ class SunImageFile(ImageFile.ImageFile):
     
             # HEAD
             s = self.fp.read(32)
    -        if i32(s) != 0x59a66a95:
    +        if i32(s) != 0x59A66A95:
                 raise SyntaxError("not an SUN raster file")
     
             offset = 32
     
    -        self.size = i32(s[4:8]), i32(s[8:12])
    +        self._size = i32(s[4:8]), i32(s[8:12])
     
             depth = i32(s[12:16])
    -        data_length = i32(s[16:20])   # unreliable, ignore.
    +        # data_length = i32(s[16:20])   # unreliable, ignore.
             file_type = i32(s[20:24])
             palette_type = i32(s[24:28])  # 0: None, 1: RGB, 2: Raw/arbitrary
             palette_length = i32(s[28:32])
    @@ -80,9 +79,9 @@ class SunImageFile(ImageFile.ImageFile):
                     self.mode, rawmode = "RGB", "BGR"
             elif depth == 32:
                 if file_type == 3:
    -                self.mode, rawmode = 'RGB', 'RGBX'
    +                self.mode, rawmode = "RGB", "RGBX"
                 else:
    -                self.mode, rawmode = 'RGB', 'BGRX'
    +                self.mode, rawmode = "RGB", "BGRX"
             else:
                 raise SyntaxError("Unsupported Mode/Bit Depth")
     
    @@ -97,7 +96,7 @@ class SunImageFile(ImageFile.ImageFile):
                 self.palette = ImagePalette.raw("RGB;L", self.fp.read(palette_length))
                 if self.mode == "L":
                     self.mode = "P"
    -                rawmode = rawmode.replace('L', 'P')
    +                rawmode = rawmode.replace("L", "P")
     
             # 16 bit boundaries on stride
             stride = ((self.size[0] * depth + 15) // 16) * 2
    @@ -121,11 +120,12 @@ class SunImageFile(ImageFile.ImageFile):
             # (https://www.fileformat.info/format/sunraster/egff.htm)
     
             if file_type in (0, 1, 3, 4, 5):
    -            self.tile = [("raw", (0, 0)+self.size, offset, (rawmode, stride))]
    +            self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride))]
             elif file_type == 2:
    -            self.tile = [("sun_rle", (0, 0)+self.size, offset, rawmode)]
    +            self.tile = [("sun_rle", (0, 0) + self.size, offset, rawmode)]
             else:
    -            raise SyntaxError('Unsupported Sun Raster file type')
    +            raise SyntaxError("Unsupported Sun Raster file type")
    +
     
     #
     # registry
    diff --git a/server/www/packages/packages-windows/x86/PIL/TarIO.py b/server/www/packages/packages-windows/x86/PIL/TarIO.py
    index 0e949ff..ede6464 100644
    --- a/server/www/packages/packages-windows/x86/PIL/TarIO.py
    +++ b/server/www/packages/packages-windows/x86/PIL/TarIO.py
    @@ -14,15 +14,16 @@
     # See the README file for information on usage and redistribution.
     #
     
    -from . import ContainerIO
    +import io
     
    +from . import ContainerIO
     
     ##
     # A file object that provides read access to a given member of a TAR
     # file.
     
    -class TarIO(ContainerIO.ContainerIO):
     
    +class TarIO(ContainerIO.ContainerIO):
         def __init__(self, tarfile, file):
             """
             Create file object.
    @@ -30,18 +31,18 @@ class TarIO(ContainerIO.ContainerIO):
             :param tarfile: Name of TAR file.
             :param file: Name of member file.
             """
    -        fh = open(tarfile, "rb")
    +        self.fh = open(tarfile, "rb")
     
             while True:
     
    -            s = fh.read(512)
    +            s = self.fh.read(512)
                 if len(s) != 512:
    -                raise IOError("unexpected end of tar file")
    +                raise OSError("unexpected end of tar file")
     
    -            name = s[:100].decode('utf-8')
    -            i = name.find('\0')
    +            name = s[:100].decode("utf-8")
    +            i = name.find("\0")
                 if i == 0:
    -                raise IOError("cannot find subfile")
    +                raise OSError("cannot find subfile")
                 if i > 0:
                     name = name[:i]
     
    @@ -50,7 +51,17 @@ class TarIO(ContainerIO.ContainerIO):
                 if file == name:
                     break
     
    -            fh.seek((size + 511) & (~511), 1)
    +            self.fh.seek((size + 511) & (~511), io.SEEK_CUR)
     
             # Open region
    -        ContainerIO.ContainerIO.__init__(self, fh, fh.tell(), size)
    +        super().__init__(self.fh, self.fh.tell(), size)
    +
    +    # Context manager support
    +    def __enter__(self):
    +        return self
    +
    +    def __exit__(self, *args):
    +        self.close()
    +
    +    def close(self):
    +        self.fh.close()
    diff --git a/server/www/packages/packages-windows/x86/PIL/TgaImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/TgaImagePlugin.py
    index 57b6ae2..fd71e54 100644
    --- a/server/www/packages/packages-windows/x86/PIL/TgaImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/TgaImagePlugin.py
    @@ -17,12 +17,11 @@
     #
     
     
    +import warnings
    +
     from . import Image, ImageFile, ImagePalette
     from ._binary import i8, i16le as i16, o8, o16le as o16
     
    -__version__ = "0.3"
    -
    -
     #
     # --------------------------------------------------------------------
     # Read RGA file
    @@ -30,9 +29,9 @@ __version__ = "0.3"
     
     MODES = {
         # map imagetype/depth to rawmode
    -    (1, 8):  "P",
    -    (3, 1):  "1",
    -    (3, 8):  "L",
    +    (1, 8): "P",
    +    (3, 1): "1",
    +    (3, 8): "L",
         (3, 16): "LA",
         (2, 16): "BGR;5",
         (2, 24): "BGR",
    @@ -43,6 +42,7 @@ MODES = {
     ##
     # Image plugin for Targa files.
     
    +
     class TgaImageFile(ImageFile.ImageFile):
     
         format = "TGA"
    @@ -53,7 +53,7 @@ class TgaImageFile(ImageFile.ImageFile):
             # process header
             s = self.fp.read(18)
     
    -        idlen = i8(s[0])
    +        id_len = i8(s[0])
     
             colormaptype = i8(s[1])
             imagetype = i8(s[2])
    @@ -62,12 +62,15 @@ class TgaImageFile(ImageFile.ImageFile):
     
             flags = i8(s[17])
     
    -        self.size = i16(s[12:]), i16(s[14:])
    +        self._size = i16(s[12:]), i16(s[14:])
     
             # validate header fields
    -        if colormaptype not in (0, 1) or\
    -           self.size[0] <= 0 or self.size[1] <= 0 or\
    -           depth not in (1, 8, 16, 24, 32):
    +        if (
    +            colormaptype not in (0, 1)
    +            or self.size[0] <= 0
    +            or self.size[1] <= 0
    +            or depth not in (1, 8, 16, 24, 32)
    +        ):
                 raise SyntaxError("not a TGA file")
     
             # image mode
    @@ -100,35 +103,51 @@ class TgaImageFile(ImageFile.ImageFile):
             if imagetype & 8:
                 self.info["compression"] = "tga_rle"
     
    -        if idlen:
    -            self.info["id_section"] = self.fp.read(idlen)
    +        if id_len:
    +            self.info["id_section"] = self.fp.read(id_len)
     
             if colormaptype:
                 # read palette
                 start, size, mapdepth = i16(s[3:]), i16(s[5:]), i16(s[7:])
                 if mapdepth == 16:
                     self.palette = ImagePalette.raw(
    -                    "BGR;16", b"\0"*2*start + self.fp.read(2*size))
    +                    "BGR;16", b"\0" * 2 * start + self.fp.read(2 * size)
    +                )
                 elif mapdepth == 24:
                     self.palette = ImagePalette.raw(
    -                    "BGR", b"\0"*3*start + self.fp.read(3*size))
    +                    "BGR", b"\0" * 3 * start + self.fp.read(3 * size)
    +                )
                 elif mapdepth == 32:
                     self.palette = ImagePalette.raw(
    -                    "BGRA", b"\0"*4*start + self.fp.read(4*size))
    +                    "BGRA", b"\0" * 4 * start + self.fp.read(4 * size)
    +                )
     
             # setup tile descriptor
             try:
                 rawmode = MODES[(imagetype & 7, depth)]
                 if imagetype & 8:
                     # compressed
    -                self.tile = [("tga_rle", (0, 0)+self.size,
    -                              self.fp.tell(), (rawmode, orientation, depth))]
    +                self.tile = [
    +                    (
    +                        "tga_rle",
    +                        (0, 0) + self.size,
    +                        self.fp.tell(),
    +                        (rawmode, orientation, depth),
    +                    )
    +                ]
                 else:
    -                self.tile = [("raw", (0, 0)+self.size,
    -                              self.fp.tell(), (rawmode, 0, orientation))]
    +                self.tile = [
    +                    (
    +                        "raw",
    +                        (0, 0) + self.size,
    +                        self.fp.tell(),
    +                        (rawmode, 0, orientation),
    +                    )
    +                ]
             except KeyError:
                 pass  # cannot decode
     
    +
     #
     # --------------------------------------------------------------------
     # Write TGA file
    @@ -149,13 +168,23 @@ def _save(im, fp, filename):
         try:
             rawmode, bits, colormaptype, imagetype = SAVE[im.mode]
         except KeyError:
    -        raise IOError("cannot write mode %s as TGA" % im.mode)
    -
    -    rle = im.encoderinfo.get("rle", False)
    +        raise OSError("cannot write mode %s as TGA" % im.mode)
     
    +    if "rle" in im.encoderinfo:
    +        rle = im.encoderinfo["rle"]
    +    else:
    +        compression = im.encoderinfo.get("compression", im.info.get("compression"))
    +        rle = compression == "tga_rle"
         if rle:
             imagetype += 8
     
    +    id_section = im.encoderinfo.get("id_section", im.info.get("id_section", ""))
    +    id_len = len(id_section)
    +    if id_len > 255:
    +        id_len = 255
    +        id_section = id_section[:255]
    +        warnings.warn("id_section has been trimmed to 255 characters")
    +
         if colormaptype:
             colormapfirst, colormaplength, colormapentry = 0, 256, 24
         else:
    @@ -166,38 +195,44 @@ def _save(im, fp, filename):
         else:
             flags = 0
     
    -    orientation = im.info.get("orientation", -1)
    +    orientation = im.encoderinfo.get("orientation", im.info.get("orientation", -1))
         if orientation > 0:
             flags = flags | 0x20
     
    -    fp.write(b"\000" +
    -             o8(colormaptype) +
    -             o8(imagetype) +
    -             o16(colormapfirst) +
    -             o16(colormaplength) +
    -             o8(colormapentry) +
    -             o16(0) +
    -             o16(0) +
    -             o16(im.size[0]) +
    -             o16(im.size[1]) +
    -             o8(bits) +
    -             o8(flags))
    +    fp.write(
    +        o8(id_len)
    +        + o8(colormaptype)
    +        + o8(imagetype)
    +        + o16(colormapfirst)
    +        + o16(colormaplength)
    +        + o8(colormapentry)
    +        + o16(0)
    +        + o16(0)
    +        + o16(im.size[0])
    +        + o16(im.size[1])
    +        + o8(bits)
    +        + o8(flags)
    +    )
    +
    +    if id_section:
    +        fp.write(id_section)
     
         if colormaptype:
             fp.write(im.im.getpalette("RGB", "BGR"))
     
         if rle:
             ImageFile._save(
    -            im,
    -            fp,
    -            [("tga_rle", (0, 0) + im.size, 0, (rawmode, orientation))])
    +            im, fp, [("tga_rle", (0, 0) + im.size, 0, (rawmode, orientation))]
    +        )
         else:
             ImageFile._save(
    -            im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))])
    +            im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))]
    +        )
     
         # write targa version 2 footer
         fp.write(b"\000" * 8 + b"TRUEVISION-XFILE." + b"\000")
     
    +
     #
     # --------------------------------------------------------------------
     # Registry
    @@ -206,4 +241,6 @@ def _save(im, fp, filename):
     Image.register_open(TgaImageFile.format, TgaImageFile)
     Image.register_save(TgaImageFile.format, _save)
     
    -Image.register_extension(TgaImageFile.format, ".tga")
    +Image.register_extensions(TgaImageFile.format, [".tga", ".icb", ".vda", ".vst"])
    +
    +Image.register_mime(TgaImageFile.format, "image/x-tga")
    diff --git a/server/www/packages/packages-windows/x86/PIL/TiffImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/TiffImagePlugin.py
    index 6f032f4..74fb695 100644
    --- a/server/www/packages/packages-windows/x86/PIL/TiffImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/TiffImagePlugin.py
    @@ -38,35 +38,19 @@
     #
     # See the README file for information on usage and redistribution.
     #
    -
    -from __future__ import division, print_function
    -
    -from . import Image, ImageFile, ImagePalette, TiffTags
    -from ._binary import i8, o8
    -from ._util import py3
    -
    -import collections
    -from fractions import Fraction
    -from numbers import Number, Rational
    -
     import io
     import itertools
     import os
     import struct
    -import sys
     import warnings
    +from collections.abc import MutableMapping
    +from fractions import Fraction
    +from numbers import Number, Rational
     
    +from . import Image, ImageFile, ImagePalette, TiffTags
    +from ._binary import i8, o8
     from .TiffTags import TYPES
     
    -try:
    -    # Python 3
    -    from collections.abc import MutableMapping
    -except ImportError:
    -    # Python 2.7
    -    from collections import MutableMapping
    -
    -
    -__version__ = "1.3.5"
     DEBUG = False  # Needs to be merged with the new logging approach.
     
     # Set these to true to force use of libtiff for reading or writing.
    @@ -97,6 +81,7 @@ X_RESOLUTION = 282
     Y_RESOLUTION = 283
     PLANAR_CONFIGURATION = 284
     RESOLUTION_UNIT = 296
    +TRANSFERFUNCTION = 301
     SOFTWARE = 305
     DATE_TIME = 306
     ARTIST = 315
    @@ -106,12 +91,14 @@ TILEOFFSETS = 324
     EXTRASAMPLES = 338
     SAMPLEFORMAT = 339
     JPEGTABLES = 347
    +REFERENCEBLACKWHITE = 532
     COPYRIGHT = 33432
     IPTC_NAA_CHUNK = 33723  # newsphoto properties
     PHOTOSHOP_CHUNK = 34377  # photoshop properties
     ICCPROFILE = 34675
     EXIFIFD = 34665
     XMP = 700
    +JPEGQUALITY = 65537  # pseudo-tag by libtiff
     
     # https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java
     IMAGEJ_META_DATA_BYTE_COUNTS = 50838
    @@ -133,6 +120,9 @@ COMPRESSION_INFO = {
         32946: "tiff_deflate",
         34676: "tiff_sgilog",
         34677: "tiff_sgilog24",
    +    34925: "lzma",
    +    50000: "zstd",
    +    50001: "webp",
     }
     
     COMPRESSION_INFO_REV = {v: k for k, v in COMPRESSION_INFO.items()}
    @@ -148,7 +138,6 @@ OPEN_INFO = {
         (MM, 1, (1,), 1, (1,), ()): ("1", "1"),
         (II, 1, (1,), 2, (1,), ()): ("1", "1;R"),
         (MM, 1, (1,), 2, (1,), ()): ("1", "1;R"),
    -
         (II, 0, (1,), 1, (2,), ()): ("L", "L;2I"),
         (MM, 0, (1,), 1, (2,), ()): ("L", "L;2I"),
         (II, 0, (1,), 2, (2,), ()): ("L", "L;2IR"),
    @@ -157,7 +146,6 @@ OPEN_INFO = {
         (MM, 1, (1,), 1, (2,), ()): ("L", "L;2"),
         (II, 1, (1,), 2, (2,), ()): ("L", "L;2R"),
         (MM, 1, (1,), 2, (2,), ()): ("L", "L;2R"),
    -
         (II, 0, (1,), 1, (4,), ()): ("L", "L;4I"),
         (MM, 0, (1,), 1, (4,), ()): ("L", "L;4I"),
         (II, 0, (1,), 2, (4,), ()): ("L", "L;4IR"),
    @@ -166,7 +154,6 @@ OPEN_INFO = {
         (MM, 1, (1,), 1, (4,), ()): ("L", "L;4"),
         (II, 1, (1,), 2, (4,), ()): ("L", "L;4R"),
         (MM, 1, (1,), 2, (4,), ()): ("L", "L;4R"),
    -
         (II, 0, (1,), 1, (8,), ()): ("L", "L;I"),
         (MM, 0, (1,), 1, (8,), ()): ("L", "L;I"),
         (II, 0, (1,), 2, (8,), ()): ("L", "L;IR"),
    @@ -175,14 +162,11 @@ OPEN_INFO = {
         (MM, 1, (1,), 1, (8,), ()): ("L", "L"),
         (II, 1, (1,), 2, (8,), ()): ("L", "L;R"),
         (MM, 1, (1,), 2, (8,), ()): ("L", "L;R"),
    -
         (II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"),
    -
         (II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"),
         (MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"),
         (II, 1, (2,), 1, (16,), ()): ("I", "I;16S"),
         (MM, 1, (2,), 1, (16,), ()): ("I", "I;16BS"),
    -
         (II, 0, (3,), 1, (32,), ()): ("F", "F;32F"),
         (MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"),
         (II, 1, (1,), 1, (32,), ()): ("I", "I;32N"),
    @@ -190,10 +174,8 @@ OPEN_INFO = {
         (MM, 1, (2,), 1, (32,), ()): ("I", "I;32BS"),
         (II, 1, (3,), 1, (32,), ()): ("F", "F;32F"),
         (MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"),
    -
         (II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"),
         (MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"),
    -
         (II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
         (MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
         (II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"),
    @@ -208,11 +190,18 @@ OPEN_INFO = {
         (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"),
         (II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
         (MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
    +    (II, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
    +    (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
    +    (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
    +    (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
         (II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
         (MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
    +    (II, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
    +    (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
    +    (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
    +    (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
         (II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"),  # Corel Draw 10
         (MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"),  # Corel Draw 10
    -
         (II, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16L"),
         (MM, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16B"),
         (II, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16L"),
    @@ -223,7 +212,6 @@ OPEN_INFO = {
         (MM, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16B"),
         (II, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16L"),
         (MM, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16B"),
    -
         (II, 3, (1,), 1, (1,), ()): ("P", "P;1"),
         (MM, 3, (1,), 1, (1,), ()): ("P", "P;1"),
         (II, 3, (1,), 2, (1,), ()): ("P", "P;1R"),
    @@ -242,23 +230,17 @@ OPEN_INFO = {
         (MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"),
         (II, 3, (1,), 2, (8,), ()): ("P", "P;R"),
         (MM, 3, (1,), 2, (8,), ()): ("P", "P;R"),
    -
         (II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
         (MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
         (II, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"),
         (MM, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"),
         (II, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
         (MM, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
    -
    -    (II, 6, (1,), 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"),
    -    (MM, 6, (1,), 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"),
    -    (II, 6, (1,), 1, (8, 8, 8, 8), (0,)): ("YCbCr", "YCbCrX"),
    -    (MM, 6, (1,), 1, (8, 8, 8, 8), (0,)): ("YCbCr", "YCbCrX"),
    -    (II, 6, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("YCbCr", "YCbCrXXX"),
    -    (MM, 6, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("YCbCr", "YCbCrXXX"),
    -    (II, 6, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("YCbCr", "YCbCrXXX"),
    -    (MM, 6, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("YCbCr", "YCbCrXXX"),
    -
    +    (II, 5, (1,), 1, (16, 16, 16, 16), ()): ("CMYK", "CMYK;16L"),
    +    # JPEG compressed images handled by LibTiff and auto-converted to RGBX
    +    # Minimal Baseline TIFF requires YCbCr images to have 3 SamplesPerPixel
    +    (II, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"),
    +    (MM, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"),
         (II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
         (MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
     }
    @@ -281,6 +263,20 @@ def _limit_rational(val, max_val):
         return n_d[::-1] if inv else n_d
     
     
    +def _limit_signed_rational(val, max_val, min_val):
    +    frac = Fraction(val)
    +    n_d = frac.numerator, frac.denominator
    +
    +    if min(n_d) < min_val:
    +        n_d = _limit_rational(val, abs(min_val))
    +
    +    if max(n_d) > max_val:
    +        val = Fraction(*n_d)
    +        n_d = _limit_rational(val, max_val)
    +
    +    return n_d
    +
    +
     ##
     # Wrapper for TIFF IFDs.
     
    @@ -300,7 +296,7 @@ class IFDRational(Rational):
     
         """
     
    -    __slots__ = ('_numerator', '_denominator', '_val')
    +    __slots__ = ("_numerator", "_denominator", "_val")
     
         def __init__(self, value, denominator=1):
             """
    @@ -308,25 +304,21 @@ class IFDRational(Rational):
             float/rational/other number, or an IFDRational
             :param denominator: Optional integer denominator
             """
    -        self._denominator = denominator
    -        self._numerator = value
    -        self._val = float(1)
    +        if isinstance(value, IFDRational):
    +            self._numerator = value.numerator
    +            self._denominator = value.denominator
    +            self._val = value._val
    +            return
     
             if isinstance(value, Fraction):
                 self._numerator = value.numerator
                 self._denominator = value.denominator
    -            self._val = value
    -
    -        if isinstance(value, IFDRational):
    -            self._denominator = value.denominator
    -            self._numerator = value.numerator
    -            self._val = value._val
    -            return
    +        else:
    +            self._numerator = value
    +            self._denominator = denominator
     
             if denominator == 0:
    -            self._val = float('nan')
    -            return
    -
    +            self._val = float("nan")
             elif denominator == 1:
                 self._val = Fraction(value)
             else:
    @@ -365,44 +357,43 @@ class IFDRational(Rational):
         def _delegate(op):
             def delegate(self, *args):
                 return getattr(self._val, op)(*args)
    +
             return delegate
     
    -    """ a = ['add','radd', 'sub', 'rsub','div', 'rdiv', 'mul', 'rmul',
    -             'truediv', 'rtruediv', 'floordiv',
    -             'rfloordiv','mod','rmod', 'pow','rpow', 'pos', 'neg',
    -             'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'nonzero',
    +    """ a = ['add','radd', 'sub', 'rsub', 'mul', 'rmul',
    +             'truediv', 'rtruediv', 'floordiv', 'rfloordiv',
    +             'mod','rmod', 'pow','rpow', 'pos', 'neg',
    +             'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'bool',
                  'ceil', 'floor', 'round']
             print("\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a))
             """
     
    -    __add__ = _delegate('__add__')
    -    __radd__ = _delegate('__radd__')
    -    __sub__ = _delegate('__sub__')
    -    __rsub__ = _delegate('__rsub__')
    -    __div__ = _delegate('__div__')
    -    __rdiv__ = _delegate('__rdiv__')
    -    __mul__ = _delegate('__mul__')
    -    __rmul__ = _delegate('__rmul__')
    -    __truediv__ = _delegate('__truediv__')
    -    __rtruediv__ = _delegate('__rtruediv__')
    -    __floordiv__ = _delegate('__floordiv__')
    -    __rfloordiv__ = _delegate('__rfloordiv__')
    -    __mod__ = _delegate('__mod__')
    -    __rmod__ = _delegate('__rmod__')
    -    __pow__ = _delegate('__pow__')
    -    __rpow__ = _delegate('__rpow__')
    -    __pos__ = _delegate('__pos__')
    -    __neg__ = _delegate('__neg__')
    -    __abs__ = _delegate('__abs__')
    -    __trunc__ = _delegate('__trunc__')
    -    __lt__ = _delegate('__lt__')
    -    __gt__ = _delegate('__gt__')
    -    __le__ = _delegate('__le__')
    -    __ge__ = _delegate('__ge__')
    -    __nonzero__ = _delegate('__nonzero__')
    -    __ceil__ = _delegate('__ceil__')
    -    __floor__ = _delegate('__floor__')
    -    __round__ = _delegate('__round__')
    +    __add__ = _delegate("__add__")
    +    __radd__ = _delegate("__radd__")
    +    __sub__ = _delegate("__sub__")
    +    __rsub__ = _delegate("__rsub__")
    +    __mul__ = _delegate("__mul__")
    +    __rmul__ = _delegate("__rmul__")
    +    __truediv__ = _delegate("__truediv__")
    +    __rtruediv__ = _delegate("__rtruediv__")
    +    __floordiv__ = _delegate("__floordiv__")
    +    __rfloordiv__ = _delegate("__rfloordiv__")
    +    __mod__ = _delegate("__mod__")
    +    __rmod__ = _delegate("__rmod__")
    +    __pow__ = _delegate("__pow__")
    +    __rpow__ = _delegate("__rpow__")
    +    __pos__ = _delegate("__pos__")
    +    __neg__ = _delegate("__neg__")
    +    __abs__ = _delegate("__abs__")
    +    __trunc__ = _delegate("__trunc__")
    +    __lt__ = _delegate("__lt__")
    +    __gt__ = _delegate("__gt__")
    +    __le__ = _delegate("__le__")
    +    __ge__ = _delegate("__ge__")
    +    __bool__ = _delegate("__bool__")
    +    __ceil__ = _delegate("__ceil__")
    +    __floor__ = _delegate("__floor__")
    +    __round__ = _delegate("__round__")
     
     
     class ImageFileDirectory_v2(MutableMapping):
    @@ -413,7 +404,7 @@ class ImageFileDirectory_v2(MutableMapping):
     
             ifd = ImageFileDirectory_v2()
             ifd[key] = 'Some Data'
    -        ifd.tagtype[key] = 2
    +        ifd.tagtype[key] = TiffTags.ASCII
             print(ifd[key])
             'Some Data'
     
    @@ -431,10 +422,12 @@ class ImageFileDirectory_v2(MutableMapping):
             * self.tagtype = {}
     
               * Key: numerical tiff tag number
    -          * Value: integer corresponding to the data type from `~PIL.TiffTags.TYPES`
    +          * Value: integer corresponding to the data type from
    +                   ~PIL.TiffTags.TYPES`
     
         .. versionadded:: 3.0.0
         """
    +
         """
         Documentation:
     
    @@ -479,7 +472,7 @@ class ImageFileDirectory_v2(MutableMapping):
             else:
                 raise SyntaxError("not a TIFF IFD")
             self.reset()
    -        self.next, = self._unpack("L", ifh[4:])
    +        (self.next,) = self._unpack("L", ifh[4:])
             self._legacy_api = False
     
         prefix = property(lambda self: self._prefix)
    @@ -494,7 +487,7 @@ class ImageFileDirectory_v2(MutableMapping):
             self._tags_v1 = {}  # will remain empty if legacy_api is false
             self._tags_v2 = {}  # main tag storage
             self._tagdata = {}
    -        self.tagtype = {}   # added 2008-06-05 by Florian Hoech
    +        self.tagtype = {}  # added 2008-06-05 by Florian Hoech
             self._next = None
             self._offset = None
     
    @@ -507,8 +500,7 @@ class ImageFileDirectory_v2(MutableMapping):
     
             Returns the complete tag dictionary, with named tags where possible.
             """
    -        return dict((TiffTags.lookup(code).name, value)
    -                    for code, value in self.items())
    +        return {TiffTags.lookup(code).name: value for code, value in self.items()}
     
         def __len__(self):
             return len(set(self._tagdata) | set(self._tags_v2))
    @@ -521,23 +513,17 @@ class ImageFileDirectory_v2(MutableMapping):
                 self[tag] = handler(self, data, self.legacy_api)  # check type
             val = self._tags_v2[tag]
             if self.legacy_api and not isinstance(val, (tuple, bytes)):
    -            val = val,
    +            val = (val,)
             return val
     
         def __contains__(self, tag):
             return tag in self._tags_v2 or tag in self._tagdata
     
    -    if not py3:
    -        def has_key(self, tag):
    -            return tag in self
    -
         def __setitem__(self, tag, value):
             self._setitem(tag, value, self.legacy_api)
     
         def _setitem(self, tag, value, legacy_api):
             basetypes = (Number, bytes, str)
    -        if not py3:
    -            basetypes += unicode,
     
             info = TiffTags.lookup(tag)
             values = [value] if isinstance(value, basetypes) else value
    @@ -546,27 +532,36 @@ class ImageFileDirectory_v2(MutableMapping):
                 if info.type:
                     self.tagtype[tag] = info.type
                 else:
    -                self.tagtype[tag] = 7
    +                self.tagtype[tag] = TiffTags.UNDEFINED
                     if all(isinstance(v, IFDRational) for v in values):
    -                    self.tagtype[tag] = 5
    +                    self.tagtype[tag] = (
    +                        TiffTags.RATIONAL
    +                        if all(v >= 0 for v in values)
    +                        else TiffTags.SIGNED_RATIONAL
    +                    )
                     elif all(isinstance(v, int) for v in values):
    -                    if all(v < 2 ** 16 for v in values):
    -                        self.tagtype[tag] = 3
    +                    if all(0 <= v < 2 ** 16 for v in values):
    +                        self.tagtype[tag] = TiffTags.SHORT
    +                    elif all(-(2 ** 15) < v < 2 ** 15 for v in values):
    +                        self.tagtype[tag] = TiffTags.SIGNED_SHORT
                         else:
    -                        self.tagtype[tag] = 4
    +                        self.tagtype[tag] = (
    +                            TiffTags.LONG
    +                            if all(v >= 0 for v in values)
    +                            else TiffTags.SIGNED_LONG
    +                        )
                     elif all(isinstance(v, float) for v in values):
    -                    self.tagtype[tag] = 12
    +                    self.tagtype[tag] = TiffTags.DOUBLE
                     else:
    -                    if py3:
    -                        if all(isinstance(v, str) for v in values):
    -                            self.tagtype[tag] = 2
    -                    else:
    -                        # Never treat data as binary by default on Python 2.
    -                        self.tagtype[tag] = 2
    +                    if all(isinstance(v, str) for v in values):
    +                        self.tagtype[tag] = TiffTags.ASCII
     
    -        if self.tagtype[tag] == 7 and py3:
    -            values = [value.encode("ascii", 'replace') if isinstance(
    -                      value, str) else value]
    +        if self.tagtype[tag] == TiffTags.UNDEFINED:
    +            values = [
    +                value.encode("ascii", "replace") if isinstance(value, str) else value
    +            ]
    +        elif self.tagtype[tag] == TiffTags.RATIONAL:
    +            values = [float(v) if isinstance(v, int) else v for v in values]
     
             values = tuple(info.cvt_enum(value) for value in values)
     
    @@ -577,18 +572,23 @@ class ImageFileDirectory_v2(MutableMapping):
             # Spec'd length == 1, Actual > 1, Warn and truncate. Formerly barfed.
             # No Spec, Actual length 1, Formerly (<4.2) returned a 1 element tuple.
             # Don't mess with the legacy api, since it's frozen.
    -        if ((info.length == 1) or
    -            (info.length is None and len(values) == 1 and not legacy_api)):
    +        if (info.length == 1) or (
    +            info.length is None and len(values) == 1 and not legacy_api
    +        ):
                 # Don't mess with the legacy api, since it's frozen.
    -            if legacy_api and self.tagtype[tag] in [5, 10]:  # rationals
    -                values = values,
    +            if legacy_api and self.tagtype[tag] in [
    +                TiffTags.RATIONAL,
    +                TiffTags.SIGNED_RATIONAL,
    +            ]:  # rationals
    +                values = (values,)
                 try:
    -                dest[tag], = values
    +                (dest[tag],) = values
                 except ValueError:
                     # We've got a builtin tag with 1 expected entry
                     warnings.warn(
    -                    "Metadata Warning, tag %s had too many entries: %s, expected 1" % (
    -                        tag, len(values)))
    +                    "Metadata Warning, tag %s had too many entries: %s, expected 1"
    +                    % (tag, len(values))
    +                )
                     dest[tag] = values[0]
     
             else:
    @@ -613,36 +613,51 @@ class ImageFileDirectory_v2(MutableMapping):
         def _register_loader(idx, size):
             def decorator(func):
                 from .TiffTags import TYPES
    +
                 if func.__name__.startswith("load_"):
                     TYPES[idx] = func.__name__[5:].replace("_", " ")
    -            _load_dispatch[idx] = size, func
    +            _load_dispatch[idx] = size, func  # noqa: F821
                 return func
    +
             return decorator
     
         def _register_writer(idx):
             def decorator(func):
    -            _write_dispatch[idx] = func
    +            _write_dispatch[idx] = func  # noqa: F821
                 return func
    +
             return decorator
     
         def _register_basic(idx_fmt_name):
             from .TiffTags import TYPES
    +
             idx, fmt, name = idx_fmt_name
             TYPES[idx] = name
             size = struct.calcsize("=" + fmt)
    -        _load_dispatch[idx] = size, lambda self, data, legacy_api=True: (
    -            self._unpack("{}{}".format(len(data) // size, fmt), data))
    -        _write_dispatch[idx] = lambda self, *values: (
    -            b"".join(self._pack(fmt, value) for value in values))
    +        _load_dispatch[idx] = (  # noqa: F821
    +            size,
    +            lambda self, data, legacy_api=True: (
    +                self._unpack("{}{}".format(len(data) // size, fmt), data)
    +            ),
    +        )
    +        _write_dispatch[idx] = lambda self, *values: (  # noqa: F821
    +            b"".join(self._pack(fmt, value) for value in values)
    +        )
     
    -    list(map(_register_basic,
    -             [(3, "H", "short"),
    -              (4, "L", "long"),
    -              (6, "b", "signed byte"),
    -              (8, "h", "signed short"),
    -              (9, "l", "signed long"),
    -              (11, "f", "float"),
    -              (12, "d", "double")]))
    +    list(
    +        map(
    +            _register_basic,
    +            [
    +                (TiffTags.SHORT, "H", "short"),
    +                (TiffTags.LONG, "L", "long"),
    +                (TiffTags.SIGNED_BYTE, "b", "signed byte"),
    +                (TiffTags.SIGNED_SHORT, "h", "signed short"),
    +                (TiffTags.SIGNED_LONG, "l", "signed long"),
    +                (TiffTags.FLOAT, "f", "float"),
    +                (TiffTags.DOUBLE, "d", "double"),
    +            ],
    +        )
    +    )
     
         @_register_loader(1, 1)  # Basic type, except for the legacy API.
         def load_byte(self, data, legacy_api=True):
    @@ -661,22 +676,22 @@ class ImageFileDirectory_v2(MutableMapping):
         @_register_writer(2)
         def write_string(self, value):
             # remerge of https://github.com/python-pillow/Pillow/pull/1416
    -        if sys.version_info.major == 2:
    -            value = value.decode('ascii', 'replace')
    -        return b"" + value.encode('ascii', 'replace') + b"\0"
    +        return b"" + value.encode("ascii", "replace") + b"\0"
     
         @_register_loader(5, 8)
         def load_rational(self, data, legacy_api=True):
             vals = self._unpack("{}L".format(len(data) // 4), data)
     
    -        def combine(a, b): return (a, b) if legacy_api else IFDRational(a, b)
    -        return tuple(combine(num, denom)
    -                     for num, denom in zip(vals[::2], vals[1::2]))
    +        def combine(a, b):
    +            return (a, b) if legacy_api else IFDRational(a, b)
    +
    +        return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2]))
     
         @_register_writer(5)
         def write_rational(self, *values):
    -        return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 31))
    -                        for frac in values)
    +        return b"".join(
    +            self._pack("2L", *_limit_rational(frac, 2 ** 32 - 1)) for frac in values
    +        )
     
         @_register_loader(7, 1)
         def load_undefined(self, data, legacy_api=True):
    @@ -690,21 +705,25 @@ class ImageFileDirectory_v2(MutableMapping):
         def load_signed_rational(self, data, legacy_api=True):
             vals = self._unpack("{}l".format(len(data) // 4), data)
     
    -        def combine(a, b): return (a, b) if legacy_api else IFDRational(a, b)
    -        return tuple(combine(num, denom)
    -                     for num, denom in zip(vals[::2], vals[1::2]))
    +        def combine(a, b):
    +            return (a, b) if legacy_api else IFDRational(a, b)
    +
    +        return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2]))
     
         @_register_writer(10)
         def write_signed_rational(self, *values):
    -        return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 30))
    -                        for frac in values)
    +        return b"".join(
    +            self._pack("2l", *_limit_signed_rational(frac, 2 ** 31 - 1, -(2 ** 31)))
    +            for frac in values
    +        )
     
         def _ensure_read(self, fp, size):
             ret = fp.read(size)
             if len(ret) != size:
    -            raise IOError("Corrupt EXIF data.  " +
    -                          "Expecting to read %d bytes but only got %d. " %
    -                          (size, len(ret)))
    +            raise OSError(
    +                "Corrupt EXIF data.  "
    +                + "Expecting to read %d bytes but only got %d. " % (size, len(ret))
    +            )
             return ret
     
         def load(self, fp):
    @@ -714,13 +733,14 @@ class ImageFileDirectory_v2(MutableMapping):
     
             try:
                 for i in range(self._unpack("H", self._ensure_read(fp, 2))[0]):
    -                tag, typ, count, data = self._unpack("HHL4s",
    -                                                     self._ensure_read(fp, 12))
    +                tag, typ, count, data = self._unpack("HHL4s", self._ensure_read(fp, 12))
                     if DEBUG:
                         tagname = TiffTags.lookup(tag).name
                         typname = TYPES.get(typ, "unknown")
    -                    print("tag: %s (%d) - type: %s (%d)" %
    -                          (tagname, tag, typname, typ), end=" ")
    +                    print(
    +                        "tag: %s (%d) - type: %s (%d)" % (tagname, tag, typname, typ),
    +                        end=" ",
    +                    )
     
                     try:
                         unit_size, handler = self._load_dispatch[typ]
    @@ -731,10 +751,12 @@ class ImageFileDirectory_v2(MutableMapping):
                     size = count * unit_size
                     if size > 4:
                         here = fp.tell()
    -                    offset, = self._unpack("L", data)
    +                    (offset,) = self._unpack("L", data)
                         if DEBUG:
    -                        print("Tag Location: %s - Data Location: %s" %
    -                              (here, offset), end=" ")
    +                        print(
    +                            "Tag Location: {} - Data Location: {}".format(here, offset),
    +                            end=" ",
    +                        )
                         fp.seek(offset)
                         data = ImageFile._safe_read(fp, size)
                         fp.seek(here)
    @@ -742,9 +764,11 @@ class ImageFileDirectory_v2(MutableMapping):
                         data = data[:size]
     
                     if len(data) != size:
    -                    warnings.warn("Possibly corrupt EXIF data.  "
    -                                  "Expecting to read %d bytes but only got %d."
    -                                  " Skipping tag %s" % (size, len(data), tag))
    +                    warnings.warn(
    +                        "Possibly corrupt EXIF data.  "
    +                        "Expecting to read %d bytes but only got %d."
    +                        " Skipping tag %s" % (size, len(data), tag)
    +                    )
                         continue
     
                     if not data:
    @@ -759,22 +783,17 @@ class ImageFileDirectory_v2(MutableMapping):
                         else:
                             print("- value:", self[tag])
     
    -            self.next, = self._unpack("L", self._ensure_read(fp, 4))
    -        except IOError as msg:
    +            (self.next,) = self._unpack("L", self._ensure_read(fp, 4))
    +        except OSError as msg:
                 warnings.warn(str(msg))
                 return
     
    -    def save(self, fp):
    -
    -        if fp.tell() == 0:  # skip TIFF header on subsequent pages
    -            # tiff header -- PIL always starts the first IFD at offset 8
    -            fp.write(self._prefix + self._pack("HL", 42, 8))
    -
    +    def tobytes(self, offset=0):
             # FIXME What about tagdata?
    -        fp.write(self._pack("H", len(self._tags_v2)))
    +        result = self._pack("H", len(self._tags_v2))
     
             entries = []
    -        offset = fp.tell() + len(self._tags_v2) * 12 + 4
    +        offset = offset + len(result) + len(self._tags_v2) * 12 + 4
             stripoffsets = None
     
             # pass 1: convert tags to binary format
    @@ -784,54 +803,68 @@ class ImageFileDirectory_v2(MutableMapping):
                     stripoffsets = len(entries)
                 typ = self.tagtype.get(tag)
                 if DEBUG:
    -                print("Tag %s, Type: %s, Value: %s" % (tag, typ, value))
    +                print("Tag {}, Type: {}, Value: {}".format(tag, typ, value))
                 values = value if isinstance(value, tuple) else (value,)
                 data = self._write_dispatch[typ](self, *values)
                 if DEBUG:
                     tagname = TiffTags.lookup(tag).name
                     typname = TYPES.get(typ, "unknown")
    -                print("save: %s (%d) - type: %s (%d)" %
    -                      (tagname, tag, typname, typ), end=" ")
    +                print(
    +                    "save: %s (%d) - type: %s (%d)" % (tagname, tag, typname, typ),
    +                    end=" ",
    +                )
                     if len(data) >= 16:
                         print("- value: " % len(data))
                     else:
                         print("- value:", values)
     
                 # count is sum of lengths for string and arbitrary data
    -            count = len(data) if typ in [2, 7] else len(values)
    +            if typ in [TiffTags.BYTE, TiffTags.ASCII, TiffTags.UNDEFINED]:
    +                count = len(data)
    +            else:
    +                count = len(values)
                 # figure out if data fits into the entry
                 if len(data) <= 4:
                     entries.append((tag, typ, count, data.ljust(4, b"\0"), b""))
                 else:
    -                entries.append((tag, typ, count, self._pack("L", offset),
    -                                data))
    +                entries.append((tag, typ, count, self._pack("L", offset), data))
                     offset += (len(data) + 1) // 2 * 2  # pad to word
     
             # update strip offset data to point beyond auxiliary data
             if stripoffsets is not None:
                 tag, typ, count, value, data = entries[stripoffsets]
                 if data:
    -                raise NotImplementedError(
    -                    "multistrip support not yet implemented")
    +                raise NotImplementedError("multistrip support not yet implemented")
                 value = self._pack("L", self._unpack("L", value)[0] + offset)
                 entries[stripoffsets] = tag, typ, count, value, data
     
             # pass 2: write entries to file
             for tag, typ, count, value, data in entries:
    -            if DEBUG > 1:
    +            if DEBUG:
                     print(tag, typ, count, repr(value), repr(data))
    -            fp.write(self._pack("HHL4s", tag, typ, count, value))
    +            result += self._pack("HHL4s", tag, typ, count, value)
     
             # -- overwrite here for multi-page --
    -        fp.write(b"\0\0\0\0")  # end of entries
    +        result += b"\0\0\0\0"  # end of entries
     
             # pass 3: write auxiliary data to file
             for tag, typ, count, value, data in entries:
    -            fp.write(data)
    +            result += data
                 if len(data) & 1:
    -                fp.write(b"\0")
    +                result += b"\0"
     
    -        return offset
    +        return result
    +
    +    def save(self, fp):
    +
    +        if fp.tell() == 0:  # skip TIFF header on subsequent pages
    +            # tiff header -- PIL always starts the first IFD at offset 8
    +            fp.write(self._prefix + self._pack("HL", 42, 8))
    +
    +        offset = fp.tell()
    +        result = self.tobytes(offset)
    +        fp.write(result)
    +        return offset + len(result)
     
     
     ImageFileDirectory_v2._load_dispatch = _load_dispatch
    @@ -851,7 +884,7 @@ class ImageFileDirectory_v1(ImageFileDirectory_v2):
     
             ifd = ImageFileDirectory_v1()
             ifd[key] = 'Some Data'
    -        ifd.tagtype[key] = 2
    +        ifd.tagtype[key] = TiffTags.ASCII
             print(ifd[key])
             ('Some Data',)
     
    @@ -862,8 +895,9 @@ class ImageFileDirectory_v1(ImageFileDirectory_v2):
     
         ..  deprecated:: 3.0.0
         """
    +
         def __init__(self, *args, **kwargs):
    -        ImageFileDirectory_v2.__init__(self, *args, **kwargs)
    +        super().__init__(*args, **kwargs)
             self._legacy_api = True
     
         tags = property(lambda self: self._tags_v1)
    @@ -926,7 +960,7 @@ class ImageFileDirectory_v1(ImageFileDirectory_v2):
                     self._setitem(tag, handler(self, data, legacy), legacy)
             val = self._tags_v1[tag]
             if not isinstance(val, (tuple, bytes)):
    -            val = val,
    +            val = (val,)
             return val
     
     
    @@ -937,6 +971,7 @@ ImageFileDirectory = ImageFileDirectory_v1
     ##
     # Image plugin for TIFF files.
     
    +
     class TiffImageFile(ImageFile.ImageFile):
     
         format = "TIFF"
    @@ -944,7 +979,7 @@ class TiffImageFile(ImageFile.ImageFile):
         _close_exclusive_fp_after_loading = False
     
         def _open(self):
    -        "Open the first image in a TIFF file"
    +        """Open the first image in a TIFF file"""
     
             # Header
             ifh = self.fp.read(8)
    @@ -961,7 +996,6 @@ class TiffImageFile(ImageFile.ImageFile):
             self.__fp = self.fp
             self._frame_pos = []
             self._n_frames = None
    -        self._is_animated = None
     
             if DEBUG:
                 print("*** TiffImageFile._open ***")
    @@ -975,33 +1009,18 @@ class TiffImageFile(ImageFile.ImageFile):
         def n_frames(self):
             if self._n_frames is None:
                 current = self.tell()
    -            try:
    -                while True:
    -                    self._seek(self.tell() + 1)
    -            except EOFError:
    -                self._n_frames = self.tell() + 1
    +            self._seek(len(self._frame_pos))
    +            while self._n_frames is None:
    +                self._seek(self.tell() + 1)
                 self.seek(current)
             return self._n_frames
     
         @property
         def is_animated(self):
    -        if self._is_animated is None:
    -            if self._n_frames is not None:
    -                self._is_animated = self._n_frames != 1
    -            else:
    -                current = self.tell()
    -
    -                try:
    -                    self.seek(1)
    -                    self._is_animated = True
    -                except EOFError:
    -                    self._is_animated = False
    -
    -                self.seek(current)
             return self._is_animated
     
         def seek(self, frame):
    -        "Select a given frame as current image"
    +        """Select a given frame as current image"""
             if not self._seek_check(frame):
                 return
             self._seek(frame)
    @@ -1017,10 +1036,11 @@ class TiffImageFile(ImageFile.ImageFile):
                 if not self.__next:
                     raise EOFError("no more images in TIFF file")
                 if DEBUG:
    -                print("Seeking to frame %s, on frame %s, "
    -                      "__next %s, location: %s" %
    -                      (frame, self.__frame, self.__next, self.fp.tell()))
    -            # reset python3 buffered io handle in case fp
    +                print(
    +                    "Seeking to frame %s, on frame %s, __next %s, location: %s"
    +                    % (frame, self.__frame, self.__next, self.fp.tell())
    +                )
    +            # reset buffered io handle in case fp
                 # was passed to libtiff, invalidating the buffer
                 self.fp.tell()
                 self.fp.seek(self.__next)
    @@ -1029,42 +1049,45 @@ class TiffImageFile(ImageFile.ImageFile):
                     print("Loading tags, location: %s" % self.fp.tell())
                 self.tag_v2.load(self.fp)
                 self.__next = self.tag_v2.next
    +            if self.__next == 0:
    +                self._n_frames = frame + 1
    +            if len(self._frame_pos) == 1:
    +                self._is_animated = self.__next != 0
                 self.__frame += 1
             self.fp.seek(self._frame_pos[frame])
             self.tag_v2.load(self.fp)
    -        self.__next = self.tag_v2.next
             # fill the legacy tag/ifd entries
             self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2)
             self.__frame = frame
             self._setup()
     
         def tell(self):
    -        "Return the current frame number"
    +        """Return the current frame number"""
             return self.__frame
     
    -    def _decoder(self, rawmode, layer, tile=None):
    -        "Setup decoder contexts"
    -
    -        args = None
    -        if rawmode == "RGB" and self._planar_configuration == 2:
    -            rawmode = rawmode[layer]
    -        compression = self._compression
    -        if compression == "raw":
    -            args = (rawmode, 0, 1)
    -        elif compression == "packbits":
    -            args = rawmode
    -
    -        return args
    -
         def load(self):
             if self.use_load_libtiff:
                 return self._load_libtiff()
    -        return super(TiffImageFile, self).load()
    +        return super().load()
     
         def load_end(self):
    +        if self._tile_orientation:
    +            method = {
    +                2: Image.FLIP_LEFT_RIGHT,
    +                3: Image.ROTATE_180,
    +                4: Image.FLIP_TOP_BOTTOM,
    +                5: Image.TRANSPOSE,
    +                6: Image.ROTATE_270,
    +                7: Image.TRANSVERSE,
    +                8: Image.ROTATE_90,
    +            }.get(self._tile_orientation)
    +            if method is not None:
    +                self.im = self.im.transpose(method)
    +                self._size = self.im.size
    +
             # allow closing if we're on the first frame, there's no next
             # This is the ImageFile.load path only, libtiff specific below.
    -        if self.__frame == 0 and not self.__next:
    +        if not self._is_animated:
                 self._close_exclusive_fp_after_loading = True
     
         def _load_libtiff(self):
    @@ -1074,19 +1097,19 @@ class TiffImageFile(ImageFile.ImageFile):
             pixel = Image.Image.load(self)
     
             if self.tile is None:
    -            raise IOError("cannot load this image")
    +            raise OSError("cannot load this image")
             if not self.tile:
                 return pixel
     
             self.load_prepare()
     
             if not len(self.tile) == 1:
    -            raise IOError("Not exactly one tile")
    +            raise OSError("Not exactly one tile")
     
             # (self._compression, (extents tuple),
             #   0, (rawmode, self._compression, fp))
             extents = self.tile[0][1]
    -        args = list(self.tile[0][3]) + [self.tag_v2.offset]
    +        args = list(self.tile[0][3])
     
             # To be nice on memory footprint, if there's a
             # file descriptor, use that instead of reading
    @@ -1095,11 +1118,11 @@ class TiffImageFile(ImageFile.ImageFile):
             try:
                 fp = hasattr(self.fp, "fileno") and os.dup(self.fp.fileno())
                 # flush the file descriptor, prevents error on pypy 2.4+
    -            # should also eliminate the need for fp.tell for py3
    +            # should also eliminate the need for fp.tell
                 # in _seek
                 if hasattr(self.fp, "flush"):
                     self.fp.flush()
    -        except IOError:
    +        except OSError:
                 # io.BytesIO have a fileno, but returns an IOError if
                 # it doesn't use a file descriptor.
                 fp = False
    @@ -1107,13 +1130,15 @@ class TiffImageFile(ImageFile.ImageFile):
             if fp:
                 args[2] = fp
     
    -        decoder = Image._getdecoder(self.mode, 'libtiff', tuple(args),
    -                                    self.decoderconfig)
    +        decoder = Image._getdecoder(
    +            self.mode, "libtiff", tuple(args), self.decoderconfig
    +        )
             try:
                 decoder.setimage(self.im, extents)
             except ValueError:
    -            raise IOError("Couldn't set the image")
    +            raise OSError("Couldn't set the image")
     
    +        close_self_fp = self._exclusive_fp and not self._is_animated
             if hasattr(self.fp, "getvalue"):
                 # We've got a stringio like thing passed in. Yay for all in memory.
                 # The decoder needs the entire file in one shot, so there's not
    @@ -1127,38 +1152,42 @@ class TiffImageFile(ImageFile.ImageFile):
                 if DEBUG:
                     print("have getvalue. just sending in a string from getvalue")
                 n, err = decoder.decode(self.fp.getvalue())
    -        elif hasattr(self.fp, "fileno"):
    +        elif fp:
                 # we've got a actual file on disk, pass in the fp.
                 if DEBUG:
                     print("have fileno, calling fileno version of the decoder.")
    -            self.fp.seek(0)
    +            if not close_self_fp:
    +                self.fp.seek(0)
                 # 4 bytes, otherwise the trace might error out
                 n, err = decoder.decode(b"fpfp")
             else:
                 # we have something else.
                 if DEBUG:
                     print("don't have fileno or getvalue. just reading")
    +            self.fp.seek(0)
                 # UNDONE -- so much for that buffer size thing.
                 n, err = decoder.decode(self.fp.read())
     
             self.tile = []
             self.readonly = 0
    +
    +        self.load_end()
    +
             # libtiff closed the fp in a, we need to close self.fp, if possible
    -        if self._exclusive_fp:
    -            if self.__frame == 0 and not self.__next:
    -                self.fp.close()
    -                self.fp = None  # might be shared
    +        if close_self_fp:
    +            self.fp.close()
    +            self.fp = None  # might be shared
     
             if err < 0:
    -            raise IOError(err)
    +            raise OSError(err)
     
             return Image.Image.load(self)
     
         def _setup(self):
    -        "Setup this image object based on current tags"
    +        """Setup this image object based on current tags"""
     
             if 0xBC01 in self.tag_v2:
    -            raise IOError("Windows Media Photo files not yet supported")
    +            raise OSError("Windows Media Photo files not yet supported")
     
             # extract relevant tags
             self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)]
    @@ -1168,6 +1197,10 @@ class TiffImageFile(ImageFile.ImageFile):
             # the specification
             photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0)
     
    +        # old style jpeg compression images most certainly are YCbCr
    +        if self._compression == "tiff_jpeg":
    +            photo = 6
    +
             fillorder = self.tag_v2.get(FILLORDER, 1)
     
             if DEBUG:
    @@ -1176,18 +1209,18 @@ class TiffImageFile(ImageFile.ImageFile):
                 print("- photometric_interpretation:", photo)
                 print("- planar_configuration:", self._planar_configuration)
                 print("- fill_order:", fillorder)
    +            print("- YCbCr subsampling:", self.tag.get(530))
     
             # size
    -        xsize = self.tag_v2.get(IMAGEWIDTH)
    -        ysize = self.tag_v2.get(IMAGELENGTH)
    -        self.size = xsize, ysize
    +        xsize = int(self.tag_v2.get(IMAGEWIDTH))
    +        ysize = int(self.tag_v2.get(IMAGELENGTH))
    +        self._size = xsize, ysize
     
             if DEBUG:
                 print("- size:", self.size)
     
             sampleFormat = self.tag_v2.get(SAMPLEFORMAT, (1,))
    -        if (len(sampleFormat) > 1
    -           and max(sampleFormat) == min(sampleFormat) == 1):
    +        if len(sampleFormat) > 1 and max(sampleFormat) == min(sampleFormat) == 1:
                 # SAMPLEFORMAT is properly per band, so an RGB image will
                 # be (1,1,1).  But, we don't support per band pixel types,
                 # and anything more than one band is a uint8. So, just
    @@ -1210,8 +1243,14 @@ class TiffImageFile(ImageFile.ImageFile):
                 bps_tuple = bps_tuple * bps_count
     
             # mode: check photometric interpretation and bits per pixel
    -        key = (self.tag_v2.prefix, photo, sampleFormat, fillorder,
    -               bps_tuple, extra_tuple)
    +        key = (
    +            self.tag_v2.prefix,
    +            photo,
    +            sampleFormat,
    +            fillorder,
    +            bps_tuple,
    +            extra_tuple,
    +        )
             if DEBUG:
                 print("format key:", key)
             try:
    @@ -1233,11 +1272,11 @@ class TiffImageFile(ImageFile.ImageFile):
             if xres and yres:
                 resunit = self.tag_v2.get(RESOLUTION_UNIT)
                 if resunit == 2:  # dots per inch
    -                self.info["dpi"] = xres, yres
    +                self.info["dpi"] = int(xres + 0.5), int(yres + 0.5)
                 elif resunit == 3:  # dots per centimeter. convert to dpi
    -                self.info["dpi"] = xres * 2.54, yres * 2.54
    +                self.info["dpi"] = int(xres * 2.54 + 0.5), int(yres * 2.54 + 0.5)
                 elif resunit is None:  # used to default to 1, but now 2)
    -                self.info["dpi"] = xres, yres
    +                self.info["dpi"] = int(xres + 0.5), int(yres + 0.5)
                     # For backward compatibility,
                     # we also preserve the old behavior
                     self.info["resolution"] = xres, yres
    @@ -1245,99 +1284,87 @@ class TiffImageFile(ImageFile.ImageFile):
                     self.info["resolution"] = xres, yres
     
             # build tile descriptors
    -        x = y = l = 0
    +        x = y = layer = 0
             self.tile = []
    -        self.use_load_libtiff = False
    -        if STRIPOFFSETS in self.tag_v2:
    +        self.use_load_libtiff = READ_LIBTIFF or self._compression != "raw"
    +        if self.use_load_libtiff:
    +            # Decoder expects entire file as one tile.
    +            # There's a buffer size limit in load (64k)
    +            # so large g4 images will fail if we use that
    +            # function.
    +            #
    +            # Setup the one tile for the whole image, then
    +            # use the _load_libtiff function.
    +
    +            # libtiff handles the fillmode for us, so 1;IR should
    +            # actually be 1;I. Including the R double reverses the
    +            # bits, so stripes of the image are reversed.  See
    +            # https://github.com/python-pillow/Pillow/issues/279
    +            if fillorder == 2:
    +                # Replace fillorder with fillorder=1
    +                key = key[:3] + (1,) + key[4:]
    +                if DEBUG:
    +                    print("format key:", key)
    +                # this should always work, since all the
    +                # fillorder==2 modes have a corresponding
    +                # fillorder=1 mode
    +                self.mode, rawmode = OPEN_INFO[key]
    +            # libtiff always returns the bytes in native order.
    +            # we're expecting image byte order. So, if the rawmode
    +            # contains I;16, we need to convert from native to image
    +            # byte order.
    +            if rawmode == "I;16":
    +                rawmode = "I;16N"
    +            if ";16B" in rawmode:
    +                rawmode = rawmode.replace(";16B", ";16N")
    +            if ";16L" in rawmode:
    +                rawmode = rawmode.replace(";16L", ";16N")
    +
    +            # Offset in the tile tuple is 0, we go from 0,0 to
    +            # w,h, and we only do this once -- eds
    +            a = (rawmode, self._compression, False, self.tag_v2.offset)
    +            self.tile.append(("libtiff", (0, 0, xsize, ysize), 0, a))
    +
    +        elif STRIPOFFSETS in self.tag_v2 or TILEOFFSETS in self.tag_v2:
                 # striped image
    -            offsets = self.tag_v2[STRIPOFFSETS]
    -            h = self.tag_v2.get(ROWSPERSTRIP, ysize)
    -            w = self.size[0]
    -            if READ_LIBTIFF or self._compression != 'raw':
    -                # if DEBUG:
    -                #     print("Activating g4 compression for whole file")
    -
    -                # Decoder expects entire file as one tile.
    -                # There's a buffer size limit in load (64k)
    -                # so large g4 images will fail if we use that
    -                # function.
    -                #
    -                # Setup the one tile for the whole image, then
    -                # use the _load_libtiff function.
    -
    -                self.use_load_libtiff = True
    -
    -                # libtiff handles the fillmode for us, so 1;IR should
    -                # actually be 1;I. Including the R double reverses the
    -                # bits, so stripes of the image are reversed.  See
    -                # https://github.com/python-pillow/Pillow/issues/279
    -                if fillorder == 2:
    -                    key = (
    -                        self.tag_v2.prefix, photo, sampleFormat, 1,
    -                        self.tag_v2.get(BITSPERSAMPLE, (1,)),
    -                        self.tag_v2.get(EXTRASAMPLES, ())
    -                        )
    -                    if DEBUG:
    -                        print("format key:", key)
    -                    # this should always work, since all the
    -                    # fillorder==2 modes have a corresponding
    -                    # fillorder=1 mode
    -                    self.mode, rawmode = OPEN_INFO[key]
    -                # libtiff always returns the bytes in native order.
    -                # we're expecting image byte order. So, if the rawmode
    -                # contains I;16, we need to convert from native to image
    -                # byte order.
    -                if rawmode == 'I;16':
    -                    rawmode = 'I;16N'
    -                if ';16B' in rawmode:
    -                    rawmode = rawmode.replace(';16B', ';16N')
    -                if ';16L' in rawmode:
    -                    rawmode = rawmode.replace(';16L', ';16N')
    -
    -                # Offset in the tile tuple is 0, we go from 0,0 to
    -                # w,h, and we only do this once -- eds
    -                a = (rawmode, self._compression, False)
    -                self.tile.append(
    -                    (self._compression,
    -                     (0, 0, w, ysize),
    -                     0, a))
    -                a = None
    -
    +            if STRIPOFFSETS in self.tag_v2:
    +                offsets = self.tag_v2[STRIPOFFSETS]
    +                h = self.tag_v2.get(ROWSPERSTRIP, ysize)
    +                w = self.size[0]
                 else:
    -                for i, offset in enumerate(offsets):
    -                    a = self._decoder(rawmode, l, i)
    -                    self.tile.append(
    -                        (self._compression,
    -                            (0, min(y, ysize), w, min(y+h, ysize)),
    -                            offset, a))
    -                    if DEBUG:
    -                        print("tiles: ", self.tile)
    -                    y = y + h
    -                    if y >= self.size[1]:
    -                        x = y = 0
    -                        l += 1
    -                    a = None
    -        elif TILEOFFSETS in self.tag_v2:
    -            # tiled image
    -            w = self.tag_v2.get(322)
    -            h = self.tag_v2.get(323)
    -            a = None
    -            for o in self.tag_v2[TILEOFFSETS]:
    -                if not a:
    -                    a = self._decoder(rawmode, l)
    -                # FIXME: this doesn't work if the image size
    -                # is not a multiple of the tile size...
    +                # tiled image
    +                offsets = self.tag_v2[TILEOFFSETS]
    +                w = self.tag_v2.get(322)
    +                h = self.tag_v2.get(323)
    +
    +            for offset in offsets:
    +                if x + w > xsize:
    +                    stride = w * sum(bps_tuple) / 8  # bytes per line
    +                else:
    +                    stride = 0
    +
    +                tile_rawmode = rawmode
    +                if self._planar_configuration == 2:
    +                    # each band on it's own layer
    +                    tile_rawmode = rawmode[layer]
    +                    # adjust stride width accordingly
    +                    stride /= bps_count
    +
    +                a = (tile_rawmode, int(stride), 1)
                     self.tile.append(
    -                    (self._compression,
    -                        (x, y, x+w, y+h),
    -                        o, a))
    +                    (
    +                        self._compression,
    +                        (x, y, min(x + w, xsize), min(y + h, ysize)),
    +                        offset,
    +                        a,
    +                    )
    +                )
                     x = x + w
                     if x >= self.size[0]:
                         x, y = 0, y + h
                         if y >= self.size[1]:
                             x = y = 0
    -                        l += 1
    -                        a = None
    +                        layer += 1
             else:
                 if DEBUG:
                     print("- unsupported data organization")
    @@ -1345,14 +1372,25 @@ class TiffImageFile(ImageFile.ImageFile):
     
             # Fix up info.
             if ICCPROFILE in self.tag_v2:
    -            self.info['icc_profile'] = self.tag_v2[ICCPROFILE]
    +            self.info["icc_profile"] = self.tag_v2[ICCPROFILE]
     
             # fixup palette descriptor
     
    -        if self.mode == "P":
    +        if self.mode in ["P", "PA"]:
                 palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]]
                 self.palette = ImagePalette.raw("RGB;L", b"".join(palette))
     
    +        self._tile_orientation = self.tag_v2.get(0x0112)
    +
    +    def _close__fp(self):
    +        try:
    +            if self.__fp != self.fp:
    +                self.__fp.close()
    +        except AttributeError:
    +            pass
    +        finally:
    +            self.__fp = None
    +
     
     #
     # --------------------------------------------------------------------
    @@ -1379,7 +1417,6 @@ SAVE_INFO = {
         "CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None),
         "YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None),
         "LAB": ("LAB", II, 8, 1, (8, 8, 8), None),
    -
         "I;32BS": ("I;32BS", MM, 1, 2, (32,), None),
         "I;16B": ("I;16B", MM, 1, 1, (16,), None),
         "I;16BS": ("I;16BS", MM, 1, 2, (16,), None),
    @@ -1392,17 +1429,18 @@ def _save(im, fp, filename):
         try:
             rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode]
         except KeyError:
    -        raise IOError("cannot write mode %s as TIFF" % im.mode)
    +        raise OSError("cannot write mode %s as TIFF" % im.mode)
     
         ifd = ImageFileDirectory_v2(prefix=prefix)
     
    -    compression = im.encoderinfo.get('compression',
    -                                     im.info.get('compression', 'raw'))
    +    compression = im.encoderinfo.get("compression", im.info.get("compression"))
    +    if compression is None:
    +        compression = "raw"
     
    -    libtiff = WRITE_LIBTIFF or compression != 'raw'
    +    libtiff = WRITE_LIBTIFF or compression != "raw"
     
         # required for color libtiff images
    -    ifd[PLANAR_CONFIGURATION] = getattr(im, '_planar_configuration', 1)
    +    ifd[PLANAR_CONFIGURATION] = getattr(im, "_planar_configuration", 1)
     
         ifd[IMAGEWIDTH] = im.size[0]
         ifd[IMAGELENGTH] = im.size[1]
    @@ -1417,15 +1455,21 @@ def _save(im, fp, filename):
             ifd[key] = info.get(key)
             try:
                 ifd.tagtype[key] = info.tagtype[key]
    -        except:
    -            pass  # might not be an IFD, Might not have populated type
    +        except Exception:
    +            pass  # might not be an IFD. Might not have populated type
     
         # additions written by Greg Couch, gregc@cgl.ucsf.edu
         # inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com
    -    if hasattr(im, 'tag_v2'):
    +    if hasattr(im, "tag_v2"):
             # preserve tags from original TIFF image file
    -        for key in (RESOLUTION_UNIT, X_RESOLUTION, Y_RESOLUTION,
    -                    IPTC_NAA_CHUNK, PHOTOSHOP_CHUNK, XMP):
    +        for key in (
    +            RESOLUTION_UNIT,
    +            X_RESOLUTION,
    +            Y_RESOLUTION,
    +            IPTC_NAA_CHUNK,
    +            PHOTOSHOP_CHUNK,
    +            XMP,
    +        ):
                 if key in im.tag_v2:
                     ifd[key] = im.tag_v2[key]
                     ifd.tagtype[key] = im.tag_v2.tagtype[key]
    @@ -1435,24 +1479,26 @@ def _save(im, fp, filename):
         if "icc_profile" in im.info:
             ifd[ICCPROFILE] = im.info["icc_profile"]
     
    -    for key, name in [(IMAGEDESCRIPTION, "description"),
    -                      (X_RESOLUTION, "resolution"),
    -                      (Y_RESOLUTION, "resolution"),
    -                      (X_RESOLUTION, "x_resolution"),
    -                      (Y_RESOLUTION, "y_resolution"),
    -                      (RESOLUTION_UNIT, "resolution_unit"),
    -                      (SOFTWARE, "software"),
    -                      (DATE_TIME, "date_time"),
    -                      (ARTIST, "artist"),
    -                      (COPYRIGHT, "copyright")]:
    +    for key, name in [
    +        (IMAGEDESCRIPTION, "description"),
    +        (X_RESOLUTION, "resolution"),
    +        (Y_RESOLUTION, "resolution"),
    +        (X_RESOLUTION, "x_resolution"),
    +        (Y_RESOLUTION, "y_resolution"),
    +        (RESOLUTION_UNIT, "resolution_unit"),
    +        (SOFTWARE, "software"),
    +        (DATE_TIME, "date_time"),
    +        (ARTIST, "artist"),
    +        (COPYRIGHT, "copyright"),
    +    ]:
             if name in im.encoderinfo:
                 ifd[key] = im.encoderinfo[name]
     
         dpi = im.encoderinfo.get("dpi")
         if dpi:
             ifd[RESOLUTION_UNIT] = 2
    -        ifd[X_RESOLUTION] = dpi[0]
    -        ifd[Y_RESOLUTION] = dpi[1]
    +        ifd[X_RESOLUTION] = int(dpi[0] + 0.5)
    +        ifd[Y_RESOLUTION] = int(dpi[1] + 0.5)
     
         if bits != (1,):
             ifd[BITSPERSAMPLE] = bits
    @@ -1465,11 +1511,11 @@ def _save(im, fp, filename):
     
         ifd[PHOTOMETRIC_INTERPRETATION] = photo
     
    -    if im.mode == "P":
    +    if im.mode in ["P", "PA"]:
             lut = im.im.getpalette("RGB", "RGB;L")
             ifd[COLORMAP] = tuple(i8(v) * 256 for v in lut)
         # data orientation
    -    stride = len(bits) * ((im.size[0]*bits[0]+7)//8)
    +    stride = len(bits) * ((im.size[0] * bits[0] + 7) // 8)
         ifd[ROWSPERSTRIP] = im.size[1]
         ifd[STRIPBYTECOUNTS] = stride * im.size[1]
         ifd[STRIPOFFSETS] = 0  # this is adjusted by IFD writer
    @@ -1477,6 +1523,16 @@ def _save(im, fp, filename):
         ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1)
     
         if libtiff:
    +        if "quality" in im.encoderinfo:
    +            quality = im.encoderinfo["quality"]
    +            if not isinstance(quality, int) or quality < 0 or quality > 100:
    +                raise ValueError("Invalid quality setting")
    +            if compression != "jpeg":
    +                raise ValueError(
    +                    "quality setting only supported for 'jpeg' compression"
    +                )
    +            ifd[JPEGQUALITY] = quality
    +
             if DEBUG:
                 print("Saving using libtiff encoder")
                 print("Items: %s" % sorted(ifd.items()))
    @@ -1488,9 +1544,24 @@ def _save(im, fp, filename):
                 except io.UnsupportedOperation:
                     pass
     
    +        # optional types for non core tags
    +        types = {}
    +        # SAMPLEFORMAT is determined by the image format and should not be copied
    +        # from legacy_ifd.
             # STRIPOFFSETS and STRIPBYTECOUNTS are added by the library
             # based on the data in the strip.
    -        blocklist = [STRIPOFFSETS, STRIPBYTECOUNTS]
    +        # The other tags expect arrays with a certain length (fixed or depending on
    +        # BITSPERSAMPLE, etc), passing arrays with a different length will result in
    +        # segfaults. Block these tags until we add extra validation.
    +        blocklist = [
    +            COLORMAP,
    +            REFERENCEBLACKWHITE,
    +            SAMPLEFORMAT,
    +            STRIPBYTECOUNTS,
    +            STRIPOFFSETS,
    +            TRANSFERFUNCTION,
    +        ]
    +
             atts = {}
             # bits per sample is a single short in the tiff directory, not a list.
             atts[BITSPERSAMPLE] = bits[0]
    @@ -1498,21 +1569,29 @@ def _save(im, fp, filename):
             # the original file, e.g x,y resolution so that we can
             # save(load('')) == original file.
             legacy_ifd = {}
    -        if hasattr(im, 'tag'):
    +        if hasattr(im, "tag"):
                 legacy_ifd = im.tag.to_v2()
    -        for tag, value in itertools.chain(ifd.items(),
    -                                          getattr(im, 'tag_v2', {}).items(),
    -                                          legacy_ifd.items()):
    +        for tag, value in itertools.chain(
    +            ifd.items(), getattr(im, "tag_v2", {}).items(), legacy_ifd.items()
    +        ):
                 # Libtiff can only process certain core items without adding
    -            # them to the custom dictionary. It will segfault if it attempts
    -            # to add a custom tag without the dictionary entry
    -            #
    -            # UNDONE --  add code for the custom dictionary
    +            # them to the custom dictionary.
    +            # Custom items are supported for int, float, unicode, string and byte
    +            # values. Other types and tuples require a tagtype.
                 if tag not in TiffTags.LIBTIFF_CORE:
    -                continue
    +                if (
    +                    TiffTags.lookup(tag).type == TiffTags.UNDEFINED
    +                    or not Image.core.libtiff_support_custom_tags
    +                ):
    +                    continue
    +
    +                if tag in ifd.tagtype:
    +                    types[tag] = ifd.tagtype[tag]
    +                elif not (isinstance(value, (int, float, str, bytes))):
    +                    continue
                 if tag not in atts and tag not in blocklist:
    -                if isinstance(value, str if py3 else unicode):
    -                    atts[tag] = value.encode('ascii', 'replace') + b"\0"
    +                if isinstance(value, str):
    +                    atts[tag] = value.encode("ascii", "replace") + b"\0"
                     elif isinstance(value, IFDRational):
                         atts[tag] = float(value)
                     else:
    @@ -1525,29 +1604,33 @@ def _save(im, fp, filename):
             # we're storing image byte order. So, if the rawmode
             # contains I;16, we need to convert from native to image
             # byte order.
    -        if im.mode in ('I;16B', 'I;16'):
    -            rawmode = 'I;16N'
    +        if im.mode in ("I;16B", "I;16"):
    +            rawmode = "I;16N"
     
    -        a = (rawmode, compression, _fp, filename, atts)
    -        # print(im.mode, compression, a, im.encoderconfig)
    -        e = Image._getencoder(im.mode, 'libtiff', a, im.encoderconfig)
    -        e.setimage(im.im, (0, 0)+im.size)
    +        # Pass tags as sorted list so that the tags are set in a fixed order.
    +        # This is required by libtiff for some tags. For example, the JPEGQUALITY
    +        # pseudo tag requires that the COMPRESS tag was already set.
    +        tags = list(atts.items())
    +        tags.sort()
    +        a = (rawmode, compression, _fp, filename, tags, types)
    +        e = Image._getencoder(im.mode, "libtiff", a, im.encoderconfig)
    +        e.setimage(im.im, (0, 0) + im.size)
             while True:
                 # undone, change to self.decodermaxblock:
    -            l, s, d = e.encode(16*1024)
    +            l, s, d = e.encode(16 * 1024)
                 if not _fp:
                     fp.write(d)
                 if s:
                     break
             if s < 0:
    -            raise IOError("encoder error %d when writing image file" % s)
    +            raise OSError("encoder error %d when writing image file" % s)
     
         else:
             offset = ifd.save(fp)
     
    -        ImageFile._save(im, fp, [
    -            ("raw", (0, 0)+im.size, offset, (rawmode, stride, 1))
    -            ])
    +        ImageFile._save(
    +            im, fp, [("raw", (0, 0) + im.size, offset, (rawmode, stride, 1))]
    +        )
     
         # -- helper for multi-page save --
         if "_debug_multipage" in im.encoderinfo:
    @@ -1581,16 +1664,16 @@ class AppendingTiffWriter:
         Tags = {273, 288, 324, 519, 520, 521}
     
         def __init__(self, fn, new=False):
    -        if hasattr(fn, 'read'):
    +        if hasattr(fn, "read"):
                 self.f = fn
                 self.close_fp = False
             else:
                 self.name = fn
                 self.close_fp = True
                 try:
    -                self.f = io.open(fn, "w+b" if new else "r+b")
    -            except IOError:
    -                self.f = io.open(fn, "w+b")
    +                self.f = open(fn, "w+b" if new else "r+b")
    +            except OSError:
    +                self.f = open(fn, "w+b")
             self.beginning = self.f.tell()
             self.setup()
     
    @@ -1632,8 +1715,7 @@ class AppendingTiffWriter:
                 return
     
             if IIMM != self.IIMM:
    -            raise RuntimeError("IIMM of new page doesn't match IIMM of "
    -                               "first page")
    +            raise RuntimeError("IIMM of new page doesn't match IIMM of first page")
     
             IFDoffset = self.readLong()
             IFDoffset += self.offsetOfNewPage
    @@ -1658,7 +1740,7 @@ class AppendingTiffWriter:
         def tell(self):
             return self.f.tell() - self.offsetOfNewPage
     
    -    def seek(self, offset, whence):
    +    def seek(self, offset, whence=io.SEEK_SET):
             if whence == os.SEEK_SET:
                 offset += self.offsetOfNewPage
     
    @@ -1672,7 +1754,7 @@ class AppendingTiffWriter:
             # pad to 16 byte boundary
             padBytes = 16 - pos % 16
             if 0 < padBytes < 16:
    -            self.f.write(bytes(bytearray(padBytes)))
    +            self.f.write(bytes(padBytes))
             self.offsetOfNewPage = self.f.tell()
     
         def setEndian(self, endian):
    @@ -1696,45 +1778,40 @@ class AppendingTiffWriter:
             return self.f.write(data)
     
         def readShort(self):
    -        value, = struct.unpack(self.shortFmt, self.f.read(2))
    +        (value,) = struct.unpack(self.shortFmt, self.f.read(2))
             return value
     
         def readLong(self):
    -        value, = struct.unpack(self.longFmt, self.f.read(4))
    +        (value,) = struct.unpack(self.longFmt, self.f.read(4))
             return value
     
         def rewriteLastShortToLong(self, value):
             self.f.seek(-2, os.SEEK_CUR)
             bytesWritten = self.f.write(struct.pack(self.longFmt, value))
             if bytesWritten is not None and bytesWritten != 4:
    -            raise RuntimeError("wrote only %u bytes but wanted 4" %
    -                               bytesWritten)
    +            raise RuntimeError("wrote only %u bytes but wanted 4" % bytesWritten)
     
         def rewriteLastShort(self, value):
             self.f.seek(-2, os.SEEK_CUR)
             bytesWritten = self.f.write(struct.pack(self.shortFmt, value))
             if bytesWritten is not None and bytesWritten != 2:
    -            raise RuntimeError("wrote only %u bytes but wanted 2" %
    -                               bytesWritten)
    +            raise RuntimeError("wrote only %u bytes but wanted 2" % bytesWritten)
     
         def rewriteLastLong(self, value):
             self.f.seek(-4, os.SEEK_CUR)
             bytesWritten = self.f.write(struct.pack(self.longFmt, value))
             if bytesWritten is not None and bytesWritten != 4:
    -            raise RuntimeError("wrote only %u bytes but wanted 4" %
    -                               bytesWritten)
    +            raise RuntimeError("wrote only %u bytes but wanted 4" % bytesWritten)
     
         def writeShort(self, value):
             bytesWritten = self.f.write(struct.pack(self.shortFmt, value))
             if bytesWritten is not None and bytesWritten != 2:
    -            raise RuntimeError("wrote only %u bytes but wanted 2" %
    -                               bytesWritten)
    +            raise RuntimeError("wrote only %u bytes but wanted 2" % bytesWritten)
     
         def writeLong(self, value):
             bytesWritten = self.f.write(struct.pack(self.longFmt, value))
             if bytesWritten is not None and bytesWritten != 4:
    -            raise RuntimeError("wrote only %u bytes but wanted 4" %
    -                               bytesWritten)
    +            raise RuntimeError("wrote only %u bytes but wanted 4" % bytesWritten)
     
         def close(self):
             self.finalize()
    @@ -1744,12 +1821,11 @@ class AppendingTiffWriter:
             numTags = self.readShort()
     
             for i in range(numTags):
    -            tag, fieldType, count = struct.unpack(self.tagFormat,
    -                                                  self.f.read(8))
    +            tag, fieldType, count = struct.unpack(self.tagFormat, self.f.read(8))
     
                 fieldSize = self.fieldSizes[fieldType]
                 totalSize = fieldSize * count
    -            isLocal = (totalSize <= 4)
    +            isLocal = totalSize <= 4
                 if not isLocal:
                     offset = self.readLong()
                     offset += self.offsetOfNewPage
    @@ -1759,13 +1835,15 @@ class AppendingTiffWriter:
                     curPos = self.f.tell()
     
                     if isLocal:
    -                    self.fixOffsets(count, isShort=(fieldSize == 2),
    -                                    isLong=(fieldSize == 4))
    +                    self.fixOffsets(
    +                        count, isShort=(fieldSize == 2), isLong=(fieldSize == 4)
    +                    )
                         self.f.seek(curPos + 4)
                     else:
                         self.f.seek(offset)
    -                    self.fixOffsets(count, isShort=(fieldSize == 2),
    -                                    isLong=(fieldSize == 4))
    +                    self.fixOffsets(
    +                        count, isShort=(fieldSize == 2), isLong=(fieldSize == 4)
    +                    )
                         self.f.seek(curPos)
     
                     offset = curPos = None
    @@ -1790,7 +1868,7 @@ class AppendingTiffWriter:
                     # local (not referenced with another offset)
                     self.rewriteLastShortToLong(offset)
                     self.f.seek(-10, os.SEEK_CUR)
    -                self.writeShort(4)  # rewrite the type to LONG
    +                self.writeShort(TiffTags.LONG)  # rewrite the type to LONG
                     self.f.seek(8, os.SEEK_CUR)
                 elif isShort:
                     self.rewriteLastShort(offset)
    @@ -1808,7 +1886,7 @@ def _save_all(im, fp, filename):
         cur_idx = im.tell()
         try:
             with AppendingTiffWriter(fp) as tf:
    -            for ims in [im]+append_images:
    +            for ims in [im] + append_images:
                     ims.encoderinfo = encoderinfo
                     ims.encoderconfig = encoderconfig
                     if not hasattr(ims, "n_frames"):
    diff --git a/server/www/packages/packages-windows/x86/PIL/TiffTags.py b/server/www/packages/packages-windows/x86/PIL/TiffTags.py
    index 427f3a4..6cc9ff7 100644
    --- a/server/www/packages/packages-windows/x86/PIL/TiffTags.py
    +++ b/server/www/packages/packages-windows/x86/PIL/TiffTags.py
    @@ -24,11 +24,13 @@ class TagInfo(namedtuple("_TagInfo", "value name type length enum")):
         __slots__ = []
     
         def __new__(cls, value=None, name="unknown", type=None, length=None, enum=None):
    -        return super(TagInfo, cls).__new__(
    -            cls, value, name, type, length, enum or {})
    +        return super().__new__(cls, value, name, type, length, enum or {})
     
         def cvt_enum(self, value):
    -        return self.enum.get(value, value)
    +        # Using get will call hash(value), which can be expensive
    +        # for some types (e.g. Fraction). Since self.enum is rarely
    +        # used, it's usually better to test it first.
    +        return self.enum.get(value, value) if self.enum else value
     
     
     def lookup(tag):
    @@ -40,7 +42,7 @@ def lookup(tag):
     
         """
     
    -    return TAGS_V2.get(tag, TagInfo(tag, TAGS.get(tag, 'unknown')))
    +    return TAGS_V2.get(tag, TagInfo(tag, TAGS.get(tag, "unknown")))
     
     
     ##
    @@ -60,32 +62,56 @@ ASCII = 2
     SHORT = 3
     LONG = 4
     RATIONAL = 5
    +SIGNED_BYTE = 6
     UNDEFINED = 7
    +SIGNED_SHORT = 8
    +SIGNED_LONG = 9
     SIGNED_RATIONAL = 10
    +FLOAT = 11
     DOUBLE = 12
     
     TAGS_V2 = {
    -
         254: ("NewSubfileType", LONG, 1),
         255: ("SubfileType", SHORT, 1),
         256: ("ImageWidth", LONG, 1),
         257: ("ImageLength", LONG, 1),
         258: ("BitsPerSample", SHORT, 0),
    -    259: ("Compression", SHORT, 1,
    -          {"Uncompressed": 1, "CCITT 1d": 2, "Group 3 Fax": 3, "Group 4 Fax": 4,
    -           "LZW": 5, "JPEG": 6, "PackBits": 32773}),
    -
    -    262: ("PhotometricInterpretation", SHORT, 1,
    -          {"WhiteIsZero": 0, "BlackIsZero": 1, "RGB": 2, "RGB Palette": 3,
    -           "Transparency Mask": 4, "CMYK": 5, "YCbCr": 6, "CieLAB": 8,
    -           "CFA": 32803,  # TIFF/EP, Adobe DNG
    -           "LinearRaw": 32892}),  # Adobe DNG
    +    259: (
    +        "Compression",
    +        SHORT,
    +        1,
    +        {
    +            "Uncompressed": 1,
    +            "CCITT 1d": 2,
    +            "Group 3 Fax": 3,
    +            "Group 4 Fax": 4,
    +            "LZW": 5,
    +            "JPEG": 6,
    +            "PackBits": 32773,
    +        },
    +    ),
    +    262: (
    +        "PhotometricInterpretation",
    +        SHORT,
    +        1,
    +        {
    +            "WhiteIsZero": 0,
    +            "BlackIsZero": 1,
    +            "RGB": 2,
    +            "RGB Palette": 3,
    +            "Transparency Mask": 4,
    +            "CMYK": 5,
    +            "YCbCr": 6,
    +            "CieLAB": 8,
    +            "CFA": 32803,  # TIFF/EP, Adobe DNG
    +            "LinearRaw": 32892,  # Adobe DNG
    +        },
    +    ),
         263: ("Threshholding", SHORT, 1),
         264: ("CellWidth", SHORT, 1),
         265: ("CellLength", SHORT, 1),
         266: ("FillOrder", SHORT, 1),
         269: ("DocumentName", ASCII, 1),
    -
         270: ("ImageDescription", ASCII, 1),
         271: ("Make", ASCII, 1),
         272: ("Model", ASCII, 1),
    @@ -94,8 +120,7 @@ TAGS_V2 = {
         277: ("SamplesPerPixel", SHORT, 1),
         278: ("RowsPerStrip", LONG, 1),
         279: ("StripByteCounts", LONG, 0),
    -
    -    280: ("MinSampleValue", LONG, 0),
    +    280: ("MinSampleValue", SHORT, 0),
         281: ("MaxSampleValue", SHORT, 0),
         282: ("XResolution", RATIONAL, 1),
         283: ("YResolution", RATIONAL, 1),
    @@ -105,31 +130,26 @@ TAGS_V2 = {
         287: ("YPosition", RATIONAL, 1),
         288: ("FreeOffsets", LONG, 1),
         289: ("FreeByteCounts", LONG, 1),
    -
         290: ("GrayResponseUnit", SHORT, 1),
         291: ("GrayResponseCurve", SHORT, 0),
         292: ("T4Options", LONG, 1),
         293: ("T6Options", LONG, 1),
         296: ("ResolutionUnit", SHORT, 1, {"none": 1, "inch": 2, "cm": 3}),
         297: ("PageNumber", SHORT, 2),
    -
         301: ("TransferFunction", SHORT, 0),
         305: ("Software", ASCII, 1),
         306: ("DateTime", ASCII, 1),
    -
         315: ("Artist", ASCII, 1),
         316: ("HostComputer", ASCII, 1),
         317: ("Predictor", SHORT, 1, {"none": 1, "Horizontal Differencing": 2}),
         318: ("WhitePoint", RATIONAL, 2),
    -    319: ("PrimaryChromaticities", SHORT, 6),
    -
    +    319: ("PrimaryChromaticities", RATIONAL, 6),
         320: ("ColorMap", SHORT, 0),
         321: ("HalftoneHints", SHORT, 2),
         322: ("TileWidth", LONG, 1),
         323: ("TileLength", LONG, 1),
         324: ("TileOffsets", LONG, 0),
         325: ("TileByteCounts", LONG, 0),
    -
         332: ("InkSet", SHORT, 1),
         333: ("InkNames", ASCII, 1),
         334: ("NumberOfInks", SHORT, 1),
    @@ -137,13 +157,10 @@ TAGS_V2 = {
         337: ("TargetPrinter", ASCII, 1),
         338: ("ExtraSamples", SHORT, 0),
         339: ("SampleFormat", SHORT, 0),
    -
         340: ("SMinSampleValue", DOUBLE, 0),
         341: ("SMaxSampleValue", DOUBLE, 0),
         342: ("TransferRange", SHORT, 6),
    -
         347: ("JPEGTables", UNDEFINED, 1),
    -
         # obsolete JPEG tags
         512: ("JPEGProc", SHORT, 1),
         513: ("JPEGInterchangeFormat", LONG, 1),
    @@ -154,22 +171,18 @@ TAGS_V2 = {
         519: ("JPEGQTables", LONG, 0),
         520: ("JPEGDCTables", LONG, 0),
         521: ("JPEGACTables", LONG, 0),
    -
         529: ("YCbCrCoefficients", RATIONAL, 3),
         530: ("YCbCrSubSampling", SHORT, 2),
         531: ("YCbCrPositioning", SHORT, 1),
    -    532: ("ReferenceBlackWhite", LONG, 0),
    -
    -    700: ('XMP', BYTE, 1),
    -
    +    532: ("ReferenceBlackWhite", RATIONAL, 6),
    +    700: ("XMP", BYTE, 0),
         33432: ("Copyright", ASCII, 1),
    -    34377: ('PhotoshopInfo', BYTE, 1),
    -
    +    33723: ("IptcNaaInfo", UNDEFINED, 0),
    +    34377: ("PhotoshopInfo", BYTE, 0),
         # FIXME add more tags here
    -    34665: ("ExifIFD", SHORT, 1),
    -    34675: ('ICCProfile', UNDEFINED, 1),
    -    34853: ('GPSInfoIFD', BYTE, 1),
    -
    +    34665: ("ExifIFD", LONG, 1),
    +    34675: ("ICCProfile", UNDEFINED, 1),
    +    34853: ("GPSInfoIFD", LONG, 1),
         # MPInfo
         45056: ("MPFVersion", UNDEFINED, 1),
         45057: ("NumberOfImages", LONG, 1),
    @@ -190,159 +203,157 @@ TAGS_V2 = {
         45579: ("YawAngle", SIGNED_RATIONAL, 1),
         45580: ("PitchAngle", SIGNED_RATIONAL, 1),
         45581: ("RollAngle", SIGNED_RATIONAL, 1),
    -
         50741: ("MakerNoteSafety", SHORT, 1, {"Unsafe": 0, "Safe": 1}),
         50780: ("BestQualityScale", RATIONAL, 1),
         50838: ("ImageJMetaDataByteCounts", LONG, 0),  # Can be more than one
    -    50839: ("ImageJMetaData", UNDEFINED, 1)        # see Issue #2006
    +    50839: ("ImageJMetaData", UNDEFINED, 1),  # see Issue #2006
     }
     
     # Legacy Tags structure
     # these tags aren't included above, but were in the previous versions
    -TAGS = {347: 'JPEGTables',
    -        700: 'XMP',
    -
    -        # Additional Exif Info
    -        32932: 'Wang Annotation',
    -        33434: 'ExposureTime',
    -        33437: 'FNumber',
    -        33445: 'MD FileTag',
    -        33446: 'MD ScalePixel',
    -        33447: 'MD ColorTable',
    -        33448: 'MD LabName',
    -        33449: 'MD SampleInfo',
    -        33450: 'MD PrepDate',
    -        33451: 'MD PrepTime',
    -        33452: 'MD FileUnits',
    -        33550: 'ModelPixelScaleTag',
    -        33723: 'IptcNaaInfo',
    -        33918: 'INGR Packet Data Tag',
    -        33919: 'INGR Flag Registers',
    -        33920: 'IrasB Transformation Matrix',
    -        33922: 'ModelTiepointTag',
    -        34264: 'ModelTransformationTag',
    -        34377: 'PhotoshopInfo',
    -        34735: 'GeoKeyDirectoryTag',
    -        34736: 'GeoDoubleParamsTag',
    -        34737: 'GeoAsciiParamsTag',
    -        34850: 'ExposureProgram',
    -        34852: 'SpectralSensitivity',
    -        34855: 'ISOSpeedRatings',
    -        34856: 'OECF',
    -        34864: 'SensitivityType',
    -        34865: 'StandardOutputSensitivity',
    -        34866: 'RecommendedExposureIndex',
    -        34867: 'ISOSpeed',
    -        34868: 'ISOSpeedLatitudeyyy',
    -        34869: 'ISOSpeedLatitudezzz',
    -        34908: 'HylaFAX FaxRecvParams',
    -        34909: 'HylaFAX FaxSubAddress',
    -        34910: 'HylaFAX FaxRecvTime',
    -        36864: 'ExifVersion',
    -        36867: 'DateTimeOriginal',
    -        36868: 'DateTImeDigitized',
    -        37121: 'ComponentsConfiguration',
    -        37122: 'CompressedBitsPerPixel',
    -        37724: 'ImageSourceData',
    -        37377: 'ShutterSpeedValue',
    -        37378: 'ApertureValue',
    -        37379: 'BrightnessValue',
    -        37380: 'ExposureBiasValue',
    -        37381: 'MaxApertureValue',
    -        37382: 'SubjectDistance',
    -        37383: 'MeteringMode',
    -        37384: 'LightSource',
    -        37385: 'Flash',
    -        37386: 'FocalLength',
    -        37396: 'SubjectArea',
    -        37500: 'MakerNote',
    -        37510: 'UserComment',
    -        37520: 'SubSec',
    -        37521: 'SubSecTimeOriginal',
    -        37522: 'SubsecTimeDigitized',
    -        40960: 'FlashPixVersion',
    -        40961: 'ColorSpace',
    -        40962: 'PixelXDimension',
    -        40963: 'PixelYDimension',
    -        40964: 'RelatedSoundFile',
    -        40965: 'InteroperabilityIFD',
    -        41483: 'FlashEnergy',
    -        41484: 'SpatialFrequencyResponse',
    -        41486: 'FocalPlaneXResolution',
    -        41487: 'FocalPlaneYResolution',
    -        41488: 'FocalPlaneResolutionUnit',
    -        41492: 'SubjectLocation',
    -        41493: 'ExposureIndex',
    -        41495: 'SensingMethod',
    -        41728: 'FileSource',
    -        41729: 'SceneType',
    -        41730: 'CFAPattern',
    -        41985: 'CustomRendered',
    -        41986: 'ExposureMode',
    -        41987: 'WhiteBalance',
    -        41988: 'DigitalZoomRatio',
    -        41989: 'FocalLengthIn35mmFilm',
    -        41990: 'SceneCaptureType',
    -        41991: 'GainControl',
    -        41992: 'Contrast',
    -        41993: 'Saturation',
    -        41994: 'Sharpness',
    -        41995: 'DeviceSettingDescription',
    -        41996: 'SubjectDistanceRange',
    -        42016: 'ImageUniqueID',
    -        42032: 'CameraOwnerName',
    -        42033: 'BodySerialNumber',
    -        42034: 'LensSpecification',
    -        42035: 'LensMake',
    -        42036: 'LensModel',
    -        42037: 'LensSerialNumber',
    -        42112: 'GDAL_METADATA',
    -        42113: 'GDAL_NODATA',
    -        42240: 'Gamma',
    -        50215: 'Oce Scanjob Description',
    -        50216: 'Oce Application Selector',
    -        50217: 'Oce Identification Number',
    -        50218: 'Oce ImageLogic Characteristics',
    -
    -        # Adobe DNG
    -        50706: 'DNGVersion',
    -        50707: 'DNGBackwardVersion',
    -        50708: 'UniqueCameraModel',
    -        50709: 'LocalizedCameraModel',
    -        50710: 'CFAPlaneColor',
    -        50711: 'CFALayout',
    -        50712: 'LinearizationTable',
    -        50713: 'BlackLevelRepeatDim',
    -        50714: 'BlackLevel',
    -        50715: 'BlackLevelDeltaH',
    -        50716: 'BlackLevelDeltaV',
    -        50717: 'WhiteLevel',
    -        50718: 'DefaultScale',
    -        50719: 'DefaultCropOrigin',
    -        50720: 'DefaultCropSize',
    -        50721: 'ColorMatrix1',
    -        50722: 'ColorMatrix2',
    -        50723: 'CameraCalibration1',
    -        50724: 'CameraCalibration2',
    -        50725: 'ReductionMatrix1',
    -        50726: 'ReductionMatrix2',
    -        50727: 'AnalogBalance',
    -        50728: 'AsShotNeutral',
    -        50729: 'AsShotWhiteXY',
    -        50730: 'BaselineExposure',
    -        50731: 'BaselineNoise',
    -        50732: 'BaselineSharpness',
    -        50733: 'BayerGreenSplit',
    -        50734: 'LinearResponseLimit',
    -        50735: 'CameraSerialNumber',
    -        50736: 'LensInfo',
    -        50737: 'ChromaBlurRadius',
    -        50738: 'AntiAliasStrength',
    -        50740: 'DNGPrivateData',
    -        50778: 'CalibrationIlluminant1',
    -        50779: 'CalibrationIlluminant2',
    -        50784: 'Alias Layer Metadata'
    -        }
    +TAGS = {
    +    347: "JPEGTables",
    +    700: "XMP",
    +    # Additional Exif Info
    +    32932: "Wang Annotation",
    +    33434: "ExposureTime",
    +    33437: "FNumber",
    +    33445: "MD FileTag",
    +    33446: "MD ScalePixel",
    +    33447: "MD ColorTable",
    +    33448: "MD LabName",
    +    33449: "MD SampleInfo",
    +    33450: "MD PrepDate",
    +    33451: "MD PrepTime",
    +    33452: "MD FileUnits",
    +    33550: "ModelPixelScaleTag",
    +    33723: "IptcNaaInfo",
    +    33918: "INGR Packet Data Tag",
    +    33919: "INGR Flag Registers",
    +    33920: "IrasB Transformation Matrix",
    +    33922: "ModelTiepointTag",
    +    34264: "ModelTransformationTag",
    +    34377: "PhotoshopInfo",
    +    34735: "GeoKeyDirectoryTag",
    +    34736: "GeoDoubleParamsTag",
    +    34737: "GeoAsciiParamsTag",
    +    34850: "ExposureProgram",
    +    34852: "SpectralSensitivity",
    +    34855: "ISOSpeedRatings",
    +    34856: "OECF",
    +    34864: "SensitivityType",
    +    34865: "StandardOutputSensitivity",
    +    34866: "RecommendedExposureIndex",
    +    34867: "ISOSpeed",
    +    34868: "ISOSpeedLatitudeyyy",
    +    34869: "ISOSpeedLatitudezzz",
    +    34908: "HylaFAX FaxRecvParams",
    +    34909: "HylaFAX FaxSubAddress",
    +    34910: "HylaFAX FaxRecvTime",
    +    36864: "ExifVersion",
    +    36867: "DateTimeOriginal",
    +    36868: "DateTImeDigitized",
    +    37121: "ComponentsConfiguration",
    +    37122: "CompressedBitsPerPixel",
    +    37724: "ImageSourceData",
    +    37377: "ShutterSpeedValue",
    +    37378: "ApertureValue",
    +    37379: "BrightnessValue",
    +    37380: "ExposureBiasValue",
    +    37381: "MaxApertureValue",
    +    37382: "SubjectDistance",
    +    37383: "MeteringMode",
    +    37384: "LightSource",
    +    37385: "Flash",
    +    37386: "FocalLength",
    +    37396: "SubjectArea",
    +    37500: "MakerNote",
    +    37510: "UserComment",
    +    37520: "SubSec",
    +    37521: "SubSecTimeOriginal",
    +    37522: "SubsecTimeDigitized",
    +    40960: "FlashPixVersion",
    +    40961: "ColorSpace",
    +    40962: "PixelXDimension",
    +    40963: "PixelYDimension",
    +    40964: "RelatedSoundFile",
    +    40965: "InteroperabilityIFD",
    +    41483: "FlashEnergy",
    +    41484: "SpatialFrequencyResponse",
    +    41486: "FocalPlaneXResolution",
    +    41487: "FocalPlaneYResolution",
    +    41488: "FocalPlaneResolutionUnit",
    +    41492: "SubjectLocation",
    +    41493: "ExposureIndex",
    +    41495: "SensingMethod",
    +    41728: "FileSource",
    +    41729: "SceneType",
    +    41730: "CFAPattern",
    +    41985: "CustomRendered",
    +    41986: "ExposureMode",
    +    41987: "WhiteBalance",
    +    41988: "DigitalZoomRatio",
    +    41989: "FocalLengthIn35mmFilm",
    +    41990: "SceneCaptureType",
    +    41991: "GainControl",
    +    41992: "Contrast",
    +    41993: "Saturation",
    +    41994: "Sharpness",
    +    41995: "DeviceSettingDescription",
    +    41996: "SubjectDistanceRange",
    +    42016: "ImageUniqueID",
    +    42032: "CameraOwnerName",
    +    42033: "BodySerialNumber",
    +    42034: "LensSpecification",
    +    42035: "LensMake",
    +    42036: "LensModel",
    +    42037: "LensSerialNumber",
    +    42112: "GDAL_METADATA",
    +    42113: "GDAL_NODATA",
    +    42240: "Gamma",
    +    50215: "Oce Scanjob Description",
    +    50216: "Oce Application Selector",
    +    50217: "Oce Identification Number",
    +    50218: "Oce ImageLogic Characteristics",
    +    # Adobe DNG
    +    50706: "DNGVersion",
    +    50707: "DNGBackwardVersion",
    +    50708: "UniqueCameraModel",
    +    50709: "LocalizedCameraModel",
    +    50710: "CFAPlaneColor",
    +    50711: "CFALayout",
    +    50712: "LinearizationTable",
    +    50713: "BlackLevelRepeatDim",
    +    50714: "BlackLevel",
    +    50715: "BlackLevelDeltaH",
    +    50716: "BlackLevelDeltaV",
    +    50717: "WhiteLevel",
    +    50718: "DefaultScale",
    +    50719: "DefaultCropOrigin",
    +    50720: "DefaultCropSize",
    +    50721: "ColorMatrix1",
    +    50722: "ColorMatrix2",
    +    50723: "CameraCalibration1",
    +    50724: "CameraCalibration2",
    +    50725: "ReductionMatrix1",
    +    50726: "ReductionMatrix2",
    +    50727: "AnalogBalance",
    +    50728: "AsShotNeutral",
    +    50729: "AsShotWhiteXY",
    +    50730: "BaselineExposure",
    +    50731: "BaselineNoise",
    +    50732: "BaselineSharpness",
    +    50733: "BayerGreenSplit",
    +    50734: "LinearResponseLimit",
    +    50735: "CameraSerialNumber",
    +    50736: "LensInfo",
    +    50737: "ChromaBlurRadius",
    +    50738: "AntiAliasStrength",
    +    50740: "DNGPrivateData",
    +    50778: "CalibrationIlluminant1",
    +    50779: "CalibrationIlluminant2",
    +    50784: "Alias Layer Metadata",
    +}
     
     
     def _populate():
    @@ -422,22 +433,62 @@ TYPES = {}
     # 389: case TIFFTAG_REFERENCEBLACKWHITE:
     # 393: case TIFFTAG_INKNAMES:
     
    +# Following pseudo-tags are also handled by default in libtiff:
    +# TIFFTAG_JPEGQUALITY 65537
    +
     # some of these are not in our TAGS_V2 dict and were included from tiff.h
     
    -LIBTIFF_CORE = {255, 256, 257, 258, 259, 262, 263, 266, 274, 277,
    -                278, 280, 281, 340, 341, 282, 283, 284, 286, 287,
    -                296, 297, 321, 320, 338, 32995, 322, 323, 32998,
    -                32996, 339, 32997, 330, 531, 530, 301, 532, 333,
    -                # as above
    -                269  # this has been in our tests forever, and works
    -                }
    +# This list also exists in encode.c
    +LIBTIFF_CORE = {
    +    255,
    +    256,
    +    257,
    +    258,
    +    259,
    +    262,
    +    263,
    +    266,
    +    274,
    +    277,
    +    278,
    +    280,
    +    281,
    +    340,
    +    341,
    +    282,
    +    283,
    +    284,
    +    286,
    +    287,
    +    296,
    +    297,
    +    321,
    +    320,
    +    338,
    +    32995,
    +    322,
    +    323,
    +    32998,
    +    32996,
    +    339,
    +    32997,
    +    330,
    +    531,
    +    530,
    +    301,
    +    532,
    +    333,
    +    # as above
    +    269,  # this has been in our tests forever, and works
    +    65537,
    +}
     
     LIBTIFF_CORE.remove(320)  # Array of short, crashes
     LIBTIFF_CORE.remove(301)  # Array of short, crashes
     LIBTIFF_CORE.remove(532)  # Array of long, crashes
     
     LIBTIFF_CORE.remove(255)  # We don't have support for subfiletypes
    -LIBTIFF_CORE.remove(322)  # We don't have support for tiled images in libtiff
    +LIBTIFF_CORE.remove(322)  # We don't have support for writing tiled images with libtiff
     LIBTIFF_CORE.remove(323)  # Tiled images
     LIBTIFF_CORE.remove(333)  # Ink Names either
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/WalImageFile.py b/server/www/packages/packages-windows/x86/PIL/WalImageFile.py
    index 6602cc8..d5a5c8e 100644
    --- a/server/www/packages/packages-windows/x86/PIL/WalImageFile.py
    +++ b/server/www/packages/packages-windows/x86/PIL/WalImageFile.py
    @@ -1,4 +1,3 @@
    -# encoding: utf-8
     #
     # The Python Imaging Library.
     # $Id$
    @@ -21,15 +20,11 @@
     #    https://www.flipcode.com/archives/Quake_2_BSP_File_Format.shtml
     # and has been tested with a few sample files found using google.
     
    +import builtins
    +
     from . import Image
     from ._binary import i32le as i32
     
    -try:
    -    import builtins
    -except ImportError:
    -    import __builtin__
    -    builtins = __builtin__
    -
     
     def open(filename):
         """
    @@ -46,7 +41,7 @@ def open(filename):
     
         def imopen(fp):
             # read header fields
    -        header = fp.read(32+24+32+12)
    +        header = fp.read(32 + 24 + 32 + 12)
             size = i32(header, 32), i32(header, 36)
             offset = i32(header, 40)
     
    @@ -62,7 +57,7 @@ def open(filename):
     
             # strings are null-terminated
             im.info["name"] = header[:32].split(b"\0", 1)[0]
    -        next_name = header[56:56+32].split(b"\0", 1)[0]
    +        next_name = header[56 : 56 + 32].split(b"\0", 1)[0]
             if next_name:
                 im.info["next_name"] = next_name
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/WebPImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/WebPImagePlugin.py
    index 39a8f2e..eda6855 100644
    --- a/server/www/packages/packages-windows/x86/PIL/WebPImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/WebPImagePlugin.py
    @@ -1,22 +1,24 @@
    -from . import Image, ImageFile, _webp
     from io import BytesIO
     
    +from . import Image, ImageFile
     
    -_VALID_WEBP_MODES = {
    -    "RGBX": True,
    -    "RGBA": True,
    -    }
    +try:
    +    from . import _webp
     
    -_VALID_WEBP_LEGACY_MODES = {
    -    "RGB": True,
    -    "RGBA": True,
    -    }
    +    SUPPORTED = True
    +except ImportError:
    +    SUPPORTED = False
    +
    +
    +_VALID_WEBP_MODES = {"RGBX": True, "RGBA": True, "RGB": True}
    +
    +_VALID_WEBP_LEGACY_MODES = {"RGB": True, "RGBA": True}
     
     _VP8_MODES_BY_IDENTIFIER = {
         b"VP8 ": "RGB",
         b"VP8X": "RGBA",
         b"VP8L": "RGBA",  # lossless
    -    }
    +}
     
     
     def _accept(prefix):
    @@ -24,7 +26,12 @@ def _accept(prefix):
         is_webp_file = prefix[8:12] == b"WEBP"
         is_valid_vp8_mode = prefix[12:16] in _VP8_MODES_BY_IDENTIFIER
     
    -    return is_riff_file_format and is_webp_file and is_valid_vp8_mode
    +    if is_riff_file_format and is_webp_file and is_valid_vp8_mode:
    +        if not SUPPORTED:
    +            return (
    +                "image file could not be identified because WEBP support not installed"
    +            )
    +        return True
     
     
     class WebPImageFile(ImageFile.ImageFile):
    @@ -35,13 +42,14 @@ class WebPImageFile(ImageFile.ImageFile):
         def _open(self):
             if not _webp.HAVE_WEBPANIM:
                 # Legacy mode
    -            data, width, height, self.mode, icc_profile, exif = \
    -                _webp.WebPDecode(self.fp.read())
    +            data, width, height, self.mode, icc_profile, exif = _webp.WebPDecode(
    +                self.fp.read()
    +            )
                 if icc_profile:
                     self.info["icc_profile"] = icc_profile
                 if exif:
                     self.info["exif"] = exif
    -            self.size = width, height
    +            self._size = width, height
                 self.fp = BytesIO(data)
                 self.tile = [("raw", (0, 0) + self.size, 0, self.mode)]
                 self._n_frames = 1
    @@ -52,18 +60,19 @@ class WebPImageFile(ImageFile.ImageFile):
             self._decoder = _webp.WebPAnimDecoder(self.fp.read())
     
             # Get info from decoder
    -        width, height, loop_count, bgcolor, frame_count, mode = \
    -            self._decoder.get_info()
    -        self.size = width, height
    +        width, height, loop_count, bgcolor, frame_count, mode = self._decoder.get_info()
    +        self._size = width, height
             self.info["loop"] = loop_count
    -        bg_a, bg_r, bg_g, bg_b = \
    -            (bgcolor >> 24) & 0xFF, \
    -            (bgcolor >> 16) & 0xFF, \
    -            (bgcolor >> 8) & 0xFF, \
    -            bgcolor & 0xFF
    +        bg_a, bg_r, bg_g, bg_b = (
    +            (bgcolor >> 24) & 0xFF,
    +            (bgcolor >> 16) & 0xFF,
    +            (bgcolor >> 8) & 0xFF,
    +            bgcolor & 0xFF,
    +        )
             self.info["background"] = (bg_r, bg_g, bg_b, bg_a)
             self._n_frames = frame_count
    -        self.mode = mode
    +        self.mode = "RGB" if mode == "RGBX" else mode
    +        self.rawmode = mode
             self.tile = []
     
             # Attempt to read ICC / EXIF / XMP chunks from file
    @@ -82,8 +91,9 @@ class WebPImageFile(ImageFile.ImageFile):
             self.seek(0)
     
         def _getexif(self):
    -        from .JpegImagePlugin import _getexif
    -        return _getexif(self)
    +        if "exif" not in self.info:
    +            return None
    +        return dict(self.getexif())
     
         @property
         def n_frames(self):
    @@ -95,7 +105,7 @@ class WebPImageFile(ImageFile.ImageFile):
     
         def seek(self, frame):
             if not _webp.HAVE_WEBPANIM:
    -            return super(WebPImageFile, self).seek(frame)
    +            return super().seek(frame)
     
             # Perform some simple checks first
             if frame >= self._n_frames:
    @@ -120,7 +130,7 @@ class WebPImageFile(ImageFile.ImageFile):
     
             # Check if an error occurred
             if ret is None:
    -            self._reset()   # Reset just to be safe
    +            self._reset()  # Reset just to be safe
                 self.seek(0)
                 raise EOFError("failed to decode next frame in WebP file")
     
    @@ -135,11 +145,11 @@ class WebPImageFile(ImageFile.ImageFile):
     
         def _seek(self, frame):
             if self.__physical_frame == frame:
    -            return              # Nothing to do
    +            return  # Nothing to do
             if frame < self.__physical_frame:
    -            self._reset()       # Rewind to beginning
    +            self._reset()  # Rewind to beginning
             while self.__physical_frame < frame:
    -            self._get_next()    # Advance to the requested frame
    +            self._get_next()  # Advance to the requested frame
     
         def load(self):
             if _webp.HAVE_WEBPANIM:
    @@ -153,14 +163,16 @@ class WebPImageFile(ImageFile.ImageFile):
                     self.__loaded = self.__logical_frame
     
                     # Set tile
    +                if self.fp and self._exclusive_fp:
    +                    self.fp.close()
                     self.fp = BytesIO(data)
    -                self.tile = [("raw", (0, 0) + self.size, 0, self.mode)]
    +                self.tile = [("raw", (0, 0) + self.size, 0, self.rawmode)]
     
    -        return super(WebPImageFile, self).load()
    +        return super().load()
     
         def tell(self):
             if not _webp.HAVE_WEBPANIM:
    -            return super(WebPImageFile, self).tell()
    +            return super().tell()
     
             return self.__logical_frame
     
    @@ -172,13 +184,25 @@ def _save_all(im, fp, filename):
         # If total frame count is 1, then save using the legacy API, which
         # will preserve non-alpha modes
         total = 0
    -    for ims in [im]+append_images:
    -        total += 1 if not hasattr(ims, "n_frames") else ims.n_frames
    +    for ims in [im] + append_images:
    +        total += getattr(ims, "n_frames", 1)
         if total == 1:
             _save(im, fp, filename)
             return
     
    -    background = encoderinfo.get("background", (0, 0, 0, 0))
    +    background = (0, 0, 0, 0)
    +    if "background" in encoderinfo:
    +        background = encoderinfo["background"]
    +    elif "background" in im.info:
    +        background = im.info["background"]
    +        if isinstance(background, int):
    +            # GifImagePlugin stores a global color table index in
    +            # info["background"]. So it must be converted to an RGBA value
    +            palette = im.getpalette()
    +            if palette:
    +                r, g, b = palette[background * 3 : (background + 1) * 3]
    +                background = (r, g, b, 0)
    +
         duration = im.encoderinfo.get("duration", 0)
         loop = im.encoderinfo.get("loop", 0)
         minimize_size = im.encoderinfo.get("minimize_size", False)
    @@ -191,6 +215,8 @@ def _save_all(im, fp, filename):
         method = im.encoderinfo.get("method", 0)
         icc_profile = im.encoderinfo.get("icc_profile", "")
         exif = im.encoderinfo.get("exif", "")
    +    if isinstance(exif, Image.Exif):
    +        exif = exif.tobytes()
         xmp = im.encoderinfo.get("xmp", "")
         if allow_mixed:
             lossless = False
    @@ -202,10 +228,15 @@ def _save_all(im, fp, filename):
             kmax = 17 if lossless else 5
     
         # Validate background color
    -    if (not isinstance(background, (list, tuple)) or len(background) != 4 or
    -            not all(v >= 0 and v < 256 for v in background)):
    -        raise IOError("Background color is not an RGBA tuple clamped "
    -                      "to (0-255): %s" % str(background))
    +    if (
    +        not isinstance(background, (list, tuple))
    +        or len(background) != 4
    +        or not all(v >= 0 and v < 256 for v in background)
    +    ):
    +        raise OSError(
    +            "Background color is not an RGBA tuple clamped to (0-255): %s"
    +            % str(background)
    +        )
     
         # Convert to packed uint
         bg_r, bg_g, bg_b, bg_a = background
    @@ -213,13 +244,15 @@ def _save_all(im, fp, filename):
     
         # Setup the WebP animation encoder
         enc = _webp.WebPAnimEncoder(
    -        im.size[0], im.size[1],
    +        im.size[0],
    +        im.size[1],
             background,
             loop,
             minimize_size,
    -        kmin, kmax,
    +        kmin,
    +        kmax,
             allow_mixed,
    -        verbose
    +        verbose,
         )
     
         # Add each frame
    @@ -227,12 +260,9 @@ def _save_all(im, fp, filename):
         timestamp = 0
         cur_idx = im.tell()
         try:
    -        for ims in [im]+append_images:
    +        for ims in [im] + append_images:
                 # Get # of frames in this image
    -            if not hasattr(ims, "n_frames"):
    -                nfr = 1
    -            else:
    -                nfr = ims.n_frames
    +            nfr = getattr(ims, "n_frames", 1)
     
                 for idx in range(nfr):
                     ims.seek(idx)
    @@ -240,19 +270,30 @@ def _save_all(im, fp, filename):
     
                     # Make sure image mode is supported
                     frame = ims
    +                rawmode = ims.mode
                     if ims.mode not in _VALID_WEBP_MODES:
    -                    alpha = ims.mode == 'P' and 'A' in ims.im.getpalettemode()
    -                    frame = ims.convert('RGBA' if alpha else 'RGBX')
    +                    alpha = (
    +                        "A" in ims.mode
    +                        or "a" in ims.mode
    +                        or (ims.mode == "P" and "A" in ims.im.getpalettemode())
    +                    )
    +                    rawmode = "RGBA" if alpha else "RGB"
    +                    frame = ims.convert(rawmode)
    +
    +                if rawmode == "RGB":
    +                    # For faster conversion, use RGBX
    +                    rawmode = "RGBX"
     
                     # Append the frame to the animation encoder
                     enc.add(
    -                    frame.tobytes(),
    +                    frame.tobytes("raw", rawmode),
                         timestamp,
    -                    frame.size[0], frame.size[1],
    -                    frame.mode,
    +                    frame.size[0],
    +                    frame.size[1],
    +                    rawmode,
                         lossless,
                         quality,
    -                    method
    +                    method,
                     )
     
                     # Update timestamp and frame index
    @@ -266,16 +307,12 @@ def _save_all(im, fp, filename):
             im.seek(cur_idx)
     
         # Force encoder to flush frames
    -    enc.add(
    -        None,
    -        timestamp,
    -        0, 0, "", lossless, quality, 0
    -    )
    +    enc.add(None, timestamp, 0, 0, "", lossless, quality, 0)
     
         # Get the final output from the encoder
         data = enc.assemble(icc_profile, exif, xmp)
         if data is None:
    -        raise IOError("cannot write file as WebP (encoder returned None)")
    +        raise OSError("cannot write file as WebP (encoder returned None)")
     
         fp.write(data)
     
    @@ -285,11 +322,17 @@ def _save(im, fp, filename):
         quality = im.encoderinfo.get("quality", 80)
         icc_profile = im.encoderinfo.get("icc_profile", "")
         exif = im.encoderinfo.get("exif", "")
    +    if isinstance(exif, Image.Exif):
    +        exif = exif.tobytes()
         xmp = im.encoderinfo.get("xmp", "")
     
         if im.mode not in _VALID_WEBP_LEGACY_MODES:
    -        alpha = im.mode == 'P' and 'A' in im.im.getpalettemode()
    -        im = im.convert('RGBA' if alpha else 'RGB')
    +        alpha = (
    +            "A" in im.mode
    +            or "a" in im.mode
    +            or (im.mode == "P" and "A" in im.im.getpalettemode())
    +        )
    +        im = im.convert("RGBA" if alpha else "RGB")
     
         data = _webp.WebPEncode(
             im.tobytes(),
    @@ -300,17 +343,18 @@ def _save(im, fp, filename):
             im.mode,
             icc_profile,
             exif,
    -        xmp
    +        xmp,
         )
         if data is None:
    -        raise IOError("cannot write file as WebP (encoder returned None)")
    +        raise OSError("cannot write file as WebP (encoder returned None)")
     
         fp.write(data)
     
     
     Image.register_open(WebPImageFile.format, WebPImageFile, _accept)
    -Image.register_save(WebPImageFile.format, _save)
    -if _webp.HAVE_WEBPANIM:
    -    Image.register_save_all(WebPImageFile.format, _save_all)
    -Image.register_extension(WebPImageFile.format, ".webp")
    -Image.register_mime(WebPImageFile.format, "image/webp")
    +if SUPPORTED:
    +    Image.register_save(WebPImageFile.format, _save)
    +    if _webp.HAVE_WEBPANIM:
    +        Image.register_save_all(WebPImageFile.format, _save_all)
    +    Image.register_extension(WebPImageFile.format, ".webp")
    +    Image.register_mime(WebPImageFile.format, "image/webp")
    diff --git a/server/www/packages/packages-windows/x86/PIL/WmfImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/WmfImagePlugin.py
    index 2135844..024222c 100644
    --- a/server/www/packages/packages-windows/x86/PIL/WmfImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/WmfImagePlugin.py
    @@ -19,20 +19,11 @@
     # http://wvware.sourceforge.net/caolan/index.html
     # http://wvware.sourceforge.net/caolan/ora-wmf.html
     
    -from __future__ import print_function
    -
     from . import Image, ImageFile
    -from ._binary import i16le as word, si16le as short, i32le as dword, si32le as _long
    -from ._util import py3
    -
    -
    -__version__ = "0.2"
    +from ._binary import i16le as word, i32le as dword, si16le as short, si32le as _long
     
     _handler = None
     
    -if py3:
    -    long = int
    -
     
     def register_handler(handler):
         """
    @@ -47,8 +38,7 @@ def register_handler(handler):
     if hasattr(Image.core, "drawwmf"):
         # install default handler (windows only)
     
    -    class WmfHandler(object):
    -
    +    class WmfHandler:
             def open(self, im):
                 im.mode = "RGB"
                 self.bbox = im.info["wmf_bbox"]
    @@ -56,10 +46,14 @@ if hasattr(Image.core, "drawwmf"):
             def load(self, im):
                 im.fp.seek(0)  # rewind
                 return Image.frombytes(
    -                "RGB", im.size,
    +                "RGB",
    +                im.size,
                     Image.core.drawwmf(im.fp.read(), im.size, self.bbox),
    -                "raw", "BGR", (im.size[0]*3 + 3) & -4, -1
    -                )
    +                "raw",
    +                "BGR",
    +                (im.size[0] * 3 + 3) & -4,
    +                -1,
    +            )
     
         register_handler(WmfHandler())
     
    @@ -70,20 +64,21 @@ if hasattr(Image.core, "drawwmf"):
     
     def _accept(prefix):
         return (
    -        prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or
    -        prefix[:4] == b"\x01\x00\x00\x00"
    -        )
    +        prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or prefix[:4] == b"\x01\x00\x00\x00"
    +    )
     
     
     ##
     # Image plugin for Windows metafiles.
     
    +
     class WmfStubImageFile(ImageFile.StubImageFile):
     
         format = "WMF"
         format_description = "Windows Metafile"
     
         def _open(self):
    +        self._inch = None
     
             # check placable header
             s = self.fp.read(80)
    @@ -93,7 +88,7 @@ class WmfStubImageFile(ImageFile.StubImageFile):
                 # placeable windows metafile
     
                 # get units per inch
    -            inch = word(s, 14)
    +            self._inch = word(s, 14)
     
                 # get bounding box
                 x0 = short(s, 6)
    @@ -102,14 +97,14 @@ class WmfStubImageFile(ImageFile.StubImageFile):
                 y1 = short(s, 12)
     
                 # normalize size to 72 dots per inch
    -            size = (x1 - x0) * 72 // inch, (y1 - y0) * 72 // inch
    +            self.info["dpi"] = 72
    +            size = (
    +                (x1 - x0) * self.info["dpi"] // self._inch,
    +                (y1 - y0) * self.info["dpi"] // self._inch,
    +            )
     
                 self.info["wmf_bbox"] = x0, y0, x1, y1
     
    -            self.info["dpi"] = 72
    -
    -            # print(self.mode, self.size, self.info)
    -
                 # sanity check (standard metafile header)
                 if s[22:26] != b"\x01\x00\t\x00":
                     raise SyntaxError("Unsupported WMF file format")
    @@ -126,12 +121,11 @@ class WmfStubImageFile(ImageFile.StubImageFile):
                 # get frame (in 0.01 millimeter units)
                 frame = _long(s, 24), _long(s, 28), _long(s, 32), _long(s, 36)
     
    -            # normalize size to 72 dots per inch
                 size = x1 - x0, y1 - y0
     
                 # calculate dots per inch from bbox and frame
    -            xdpi = 2540 * (x1 - y0) // (frame[2] - frame[0])
    -            ydpi = 2540 * (y1 - y0) // (frame[3] - frame[1])
    +            xdpi = int(2540.0 * (x1 - y0) / (frame[2] - frame[0]) + 0.5)
    +            ydpi = int(2540.0 * (y1 - y0) / (frame[3] - frame[1]) + 0.5)
     
                 self.info["wmf_bbox"] = x0, y0, x1, y1
     
    @@ -144,7 +138,7 @@ class WmfStubImageFile(ImageFile.StubImageFile):
                 raise SyntaxError("Unsupported file format")
     
             self.mode = "RGB"
    -        self.size = size
    +        self._size = size
     
             loader = self._load()
             if loader:
    @@ -153,12 +147,23 @@ class WmfStubImageFile(ImageFile.StubImageFile):
         def _load(self):
             return _handler
     
    +    def load(self, dpi=None):
    +        if dpi is not None and self._inch is not None:
    +            self.info["dpi"] = int(dpi + 0.5)
    +            x0, y0, x1, y1 = self.info["wmf_bbox"]
    +            self._size = (
    +                (x1 - x0) * self.info["dpi"] // self._inch,
    +                (y1 - y0) * self.info["dpi"] // self._inch,
    +            )
    +        super().load()
    +
     
     def _save(im, fp, filename):
         if _handler is None or not hasattr(_handler, "save"):
    -        raise IOError("WMF save handler not installed")
    +        raise OSError("WMF save handler not installed")
         _handler.save(im, fp, filename)
     
    +
     #
     # --------------------------------------------------------------------
     # Registry stuff
    diff --git a/server/www/packages/packages-windows/x86/PIL/XVThumbImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/XVThumbImagePlugin.py
    index a7d39ed..c0d8db0 100644
    --- a/server/www/packages/packages-windows/x86/PIL/XVThumbImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/XVThumbImagePlugin.py
    @@ -20,8 +20,6 @@
     from . import Image, ImageFile, ImagePalette
     from ._binary import i8, o8
     
    -__version__ = "0.1"
    -
     _MAGIC = b"P7 332"
     
     # standard color palette for thumbnails (RGB332)
    @@ -29,7 +27,9 @@ PALETTE = b""
     for r in range(8):
         for g in range(8):
             for b in range(4):
    -            PALETTE = PALETTE + (o8((r*255)//7)+o8((g*255)//7)+o8((b*255)//3))
    +            PALETTE = PALETTE + (
    +                o8((r * 255) // 7) + o8((g * 255) // 7) + o8((b * 255) // 3)
    +            )
     
     
     def _accept(prefix):
    @@ -39,6 +39,7 @@ def _accept(prefix):
     ##
     # Image plugin for XV thumbnail images.
     
    +
     class XVThumbImageFile(ImageFile.ImageFile):
     
         format = "XVThumb"
    @@ -65,14 +66,11 @@ class XVThumbImageFile(ImageFile.ImageFile):
             s = s.strip().split()
     
             self.mode = "P"
    -        self.size = int(s[0]), int(s[1])
    +        self._size = int(s[0]), int(s[1])
     
             self.palette = ImagePalette.raw("RGB", PALETTE)
     
    -        self.tile = [
    -            ("raw", (0, 0)+self.size,
    -             self.fp.tell(), (self.mode, 0, 1)
    -             )]
    +        self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), (self.mode, 0, 1))]
     
     
     # --------------------------------------------------------------------
    diff --git a/server/www/packages/packages-windows/x86/PIL/XbmImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/XbmImagePlugin.py
    index b43fbef..ead9722 100644
    --- a/server/www/packages/packages-windows/x86/PIL/XbmImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/XbmImagePlugin.py
    @@ -20,9 +20,8 @@
     #
     
     import re
    -from . import Image, ImageFile
     
    -__version__ = "0.6"
    +from . import Image, ImageFile
     
     # XBM header
     xbm_head = re.compile(
    @@ -43,6 +42,7 @@ def _accept(prefix):
     ##
     # Image plugin for X11 bitmaps.
     
    +
     class XbmImageFile(ImageFile.ImageFile):
     
         format = "XBM"
    @@ -58,32 +58,30 @@ class XbmImageFile(ImageFile.ImageFile):
                 ysize = int(m.group("height"))
     
                 if m.group("hotspot"):
    -                self.info["hotspot"] = (
    -                    int(m.group("xhot")), int(m.group("yhot"))
    -                    )
    +                self.info["hotspot"] = (int(m.group("xhot")), int(m.group("yhot")))
     
                 self.mode = "1"
    -            self.size = xsize, ysize
    +            self._size = xsize, ysize
     
    -            self.tile = [("xbm", (0, 0)+self.size, m.end(), None)]
    +            self.tile = [("xbm", (0, 0) + self.size, m.end(), None)]
     
     
     def _save(im, fp, filename):
     
         if im.mode != "1":
    -        raise IOError("cannot write mode %s as XBM" % im.mode)
    +        raise OSError("cannot write mode %s as XBM" % im.mode)
     
    -    fp.write(("#define im_width %d\n" % im.size[0]).encode('ascii'))
    -    fp.write(("#define im_height %d\n" % im.size[1]).encode('ascii'))
    +    fp.write(("#define im_width %d\n" % im.size[0]).encode("ascii"))
    +    fp.write(("#define im_height %d\n" % im.size[1]).encode("ascii"))
     
         hotspot = im.encoderinfo.get("hotspot")
         if hotspot:
    -        fp.write(("#define im_x_hot %d\n" % hotspot[0]).encode('ascii'))
    -        fp.write(("#define im_y_hot %d\n" % hotspot[1]).encode('ascii'))
    +        fp.write(("#define im_x_hot %d\n" % hotspot[0]).encode("ascii"))
    +        fp.write(("#define im_y_hot %d\n" % hotspot[1]).encode("ascii"))
     
         fp.write(b"static char im_bits[] = {\n")
     
    -    ImageFile._save(im, fp, [("xbm", (0, 0)+im.size, 0, None)])
    +    ImageFile._save(im, fp, [("xbm", (0, 0) + im.size, 0, None)])
     
         fp.write(b"};\n")
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/XpmImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/XpmImagePlugin.py
    index a5cca0e..d8bd00a 100644
    --- a/server/www/packages/packages-windows/x86/PIL/XpmImagePlugin.py
    +++ b/server/www/packages/packages-windows/x86/PIL/XpmImagePlugin.py
    @@ -16,13 +16,12 @@
     
     
     import re
    +
     from . import Image, ImageFile, ImagePalette
     from ._binary import i8, o8
     
    -__version__ = "0.2"
    -
     # XPM header
    -xpm_head = re.compile(b"\"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)")
    +xpm_head = re.compile(b'"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)')
     
     
     def _accept(prefix):
    @@ -32,6 +31,7 @@ def _accept(prefix):
     ##
     # Image plugin for X11 pixel maps.
     
    +
     class XpmImageFile(ImageFile.ImageFile):
     
         format = "XPM"
    @@ -51,7 +51,7 @@ class XpmImageFile(ImageFile.ImageFile):
                 if m:
                     break
     
    -        self.size = int(m.group(1)), int(m.group(2))
    +        self._size = int(m.group(1)), int(m.group(2))
     
             pal = int(m.group(3))
             bpp = int(m.group(4))
    @@ -67,9 +67,9 @@ class XpmImageFile(ImageFile.ImageFile):
             for i in range(pal):
     
                 s = self.fp.readline()
    -            if s[-2:] == b'\r\n':
    +            if s[-2:] == b"\r\n":
                     s = s[:-2]
    -            elif s[-1:] in b'\r\n':
    +            elif s[-1:] in b"\r\n":
                     s = s[:-1]
     
                 c = i8(s[1])
    @@ -80,15 +80,15 @@ class XpmImageFile(ImageFile.ImageFile):
                     if s[i] == b"c":
     
                         # process colour key
    -                    rgb = s[i+1]
    +                    rgb = s[i + 1]
                         if rgb == b"None":
                             self.info["transparency"] = c
                         elif rgb[0:1] == b"#":
                             # FIXME: handle colour names (see ImagePalette.py)
                             rgb = int(rgb[1:], 16)
    -                        palette[c] = (o8((rgb >> 16) & 255) +
    -                                      o8((rgb >> 8) & 255) +
    -                                      o8(rgb & 255))
    +                        palette[c] = (
    +                            o8((rgb >> 16) & 255) + o8((rgb >> 8) & 255) + o8(rgb & 255)
    +                        )
                         else:
                             # unknown colour
                             raise ValueError("cannot read this XPM file")
    @@ -102,7 +102,7 @@ class XpmImageFile(ImageFile.ImageFile):
             self.mode = "P"
             self.palette = ImagePalette.raw("RGB", b"".join(palette))
     
    -        self.tile = [("raw", (0, 0)+self.size, self.fp.tell(), ("P", 0, 1))]
    +        self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), ("P", 0, 1))]
     
         def load_read(self, bytes):
     
    @@ -114,10 +114,11 @@ class XpmImageFile(ImageFile.ImageFile):
             s = [None] * ysize
     
             for i in range(ysize):
    -            s[i] = self.fp.readline()[1:xsize+1].ljust(xsize)
    +            s[i] = self.fp.readline()[1 : xsize + 1].ljust(xsize)
     
             return b"".join(s)
     
    +
     #
     # Registry
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/__init__.py b/server/www/packages/packages-windows/x86/PIL/__init__.py
    index a07280e..f9cb157 100644
    --- a/server/www/packages/packages-windows/x86/PIL/__init__.py
    +++ b/server/www/packages/packages-windows/x86/PIL/__init__.py
    @@ -1,4 +1,4 @@
    -"""Pillow {} (Fork of the Python Imaging Library)
    +"""Pillow (Fork of the Python Imaging Library)
     
     Pillow is the friendly PIL fork by Alex Clark and Contributors.
         https://github.com/python-pillow/Pillow/
    @@ -9,66 +9,127 @@ PIL is the Python Imaging Library by Fredrik Lundh and Contributors.
     Copyright (c) 1999 by Secret Labs AB.
     
     Use PIL.__version__ for this Pillow version.
    -PIL.VERSION is the old PIL version and will be removed in the future.
     
     ;-)
     """
     
    +import sys
    +import warnings
    +
     from . import _version
     
    -# VERSION is deprecated and will be removed in Pillow 6.0.0.
    -# PILLOW_VERSION is deprecated and will be removed after that.
    +# VERSION was removed in Pillow 6.0.0.
    +__version__ = _version.__version__
    +
    +
    +# PILLOW_VERSION is deprecated and will be removed in a future release.
     # Use __version__ instead.
    -VERSION = '1.1.7'  # PIL Version
    -PILLOW_VERSION = __version__ = _version.__version__
    +def _raise_version_warning():
    +    warnings.warn(
    +        "PILLOW_VERSION is deprecated and will be removed in a future release. "
    +        "Use __version__ instead.",
    +        DeprecationWarning,
    +        stacklevel=3,
    +    )
    +
    +
    +if sys.version_info >= (3, 7):
    +
    +    def __getattr__(name):
    +        if name == "PILLOW_VERSION":
    +            _raise_version_warning()
    +            return __version__
    +        raise AttributeError("module '{}' has no attribute '{}'".format(__name__, name))
    +
    +
    +else:
    +
    +    class _Deprecated_Version(str):
    +        def __str__(self):
    +            _raise_version_warning()
    +            return super().__str__()
    +
    +        def __getitem__(self, key):
    +            _raise_version_warning()
    +            return super().__getitem__(key)
    +
    +        def __eq__(self, other):
    +            _raise_version_warning()
    +            return super().__eq__(other)
    +
    +        def __ne__(self, other):
    +            _raise_version_warning()
    +            return super().__ne__(other)
    +
    +        def __gt__(self, other):
    +            _raise_version_warning()
    +            return super().__gt__(other)
    +
    +        def __lt__(self, other):
    +            _raise_version_warning()
    +            return super().__lt__(other)
    +
    +        def __ge__(self, other):
    +            _raise_version_warning()
    +            return super().__gt__(other)
    +
    +        def __le__(self, other):
    +            _raise_version_warning()
    +            return super().__lt__(other)
    +
    +    PILLOW_VERSION = _Deprecated_Version(__version__)
     
     del _version
     
    -__doc__ = __doc__.format(__version__)  # include version in docstring
    +
    +_plugins = [
    +    "BlpImagePlugin",
    +    "BmpImagePlugin",
    +    "BufrStubImagePlugin",
    +    "CurImagePlugin",
    +    "DcxImagePlugin",
    +    "DdsImagePlugin",
    +    "EpsImagePlugin",
    +    "FitsStubImagePlugin",
    +    "FliImagePlugin",
    +    "FpxImagePlugin",
    +    "FtexImagePlugin",
    +    "GbrImagePlugin",
    +    "GifImagePlugin",
    +    "GribStubImagePlugin",
    +    "Hdf5StubImagePlugin",
    +    "IcnsImagePlugin",
    +    "IcoImagePlugin",
    +    "ImImagePlugin",
    +    "ImtImagePlugin",
    +    "IptcImagePlugin",
    +    "JpegImagePlugin",
    +    "Jpeg2KImagePlugin",
    +    "McIdasImagePlugin",
    +    "MicImagePlugin",
    +    "MpegImagePlugin",
    +    "MpoImagePlugin",
    +    "MspImagePlugin",
    +    "PalmImagePlugin",
    +    "PcdImagePlugin",
    +    "PcxImagePlugin",
    +    "PdfImagePlugin",
    +    "PixarImagePlugin",
    +    "PngImagePlugin",
    +    "PpmImagePlugin",
    +    "PsdImagePlugin",
    +    "SgiImagePlugin",
    +    "SpiderImagePlugin",
    +    "SunImagePlugin",
    +    "TgaImagePlugin",
    +    "TiffImagePlugin",
    +    "WebPImagePlugin",
    +    "WmfImagePlugin",
    +    "XbmImagePlugin",
    +    "XpmImagePlugin",
    +    "XVThumbImagePlugin",
    +]
     
     
    -_plugins = ['BlpImagePlugin',
    -            'BmpImagePlugin',
    -            'BufrStubImagePlugin',
    -            'CurImagePlugin',
    -            'DcxImagePlugin',
    -            'DdsImagePlugin',
    -            'EpsImagePlugin',
    -            'FitsStubImagePlugin',
    -            'FliImagePlugin',
    -            'FpxImagePlugin',
    -            'FtexImagePlugin',
    -            'GbrImagePlugin',
    -            'GifImagePlugin',
    -            'GribStubImagePlugin',
    -            'Hdf5StubImagePlugin',
    -            'IcnsImagePlugin',
    -            'IcoImagePlugin',
    -            'ImImagePlugin',
    -            'ImtImagePlugin',
    -            'IptcImagePlugin',
    -            'JpegImagePlugin',
    -            'Jpeg2KImagePlugin',
    -            'McIdasImagePlugin',
    -            'MicImagePlugin',
    -            'MpegImagePlugin',
    -            'MpoImagePlugin',
    -            'MspImagePlugin',
    -            'PalmImagePlugin',
    -            'PcdImagePlugin',
    -            'PcxImagePlugin',
    -            'PdfImagePlugin',
    -            'PixarImagePlugin',
    -            'PngImagePlugin',
    -            'PpmImagePlugin',
    -            'PsdImagePlugin',
    -            'SgiImagePlugin',
    -            'SpiderImagePlugin',
    -            'SunImagePlugin',
    -            'TgaImagePlugin',
    -            'TiffImagePlugin',
    -            'WebPImagePlugin',
    -            'WmfImagePlugin',
    -            'XbmImagePlugin',
    -            'XpmImagePlugin',
    -            'XVThumbImagePlugin']
    +class UnidentifiedImageError(IOError):
    +    pass
    diff --git a/server/www/packages/packages-windows/x86/PIL/__main__.py b/server/www/packages/packages-windows/x86/PIL/__main__.py
    new file mode 100644
    index 0000000..a05323f
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/PIL/__main__.py
    @@ -0,0 +1,3 @@
    +from .features import pilinfo
    +
    +pilinfo()
    diff --git a/server/www/packages/packages-windows/x86/PIL/_binary.py b/server/www/packages/packages-windows/x86/PIL/_binary.py
    index 767c13b..529b8c9 100644
    --- a/server/www/packages/packages-windows/x86/PIL/_binary.py
    +++ b/server/www/packages/packages-windows/x86/PIL/_binary.py
    @@ -11,21 +11,15 @@
     # See the README file for information on usage and redistribution.
     #
     
    -from struct import unpack_from, pack
    -from ._util import py3
    +from struct import pack, unpack_from
     
    -if py3:
    -    def i8(c):
    -        return c if c.__class__ is int else c[0]
     
    -    def o8(i):
    -        return bytes((i & 255,))
    -else:
    -    def i8(c):
    -        return ord(c)
    +def i8(c):
    +    return c if c.__class__ is int else c[0]
     
    -    def o8(i):
    -        return chr(i & 255)
    +
    +def o8(i):
    +    return bytes((i & 255,))
     
     
     # Input, le = little endian, be = big endian
    @@ -33,8 +27,8 @@ def i16le(c, o=0):
         """
         Converts a 2-bytes (16 bits) string to an unsigned integer.
     
    -    c: string containing bytes to convert
    -    o: offset of bytes to convert in string
    +    :param c: string containing bytes to convert
    +    :param o: offset of bytes to convert in string
         """
         return unpack_from(" 2:
    -    from tkinter import _tkinter as tk
    -else:
    -    from Tkinter import tkinter as tk
    -
    -if hasattr(sys, 'pypy_find_executable'):
    +if hasattr(sys, "pypy_find_executable"):
         # Tested with packages at https://bitbucket.org/pypy/pypy/downloads.
         # PyPies 1.6, 2.0 do not have tkinter built in.  PyPy3-2.3.1 gives an
         # OSError trying to import tkinter. Otherwise:
    diff --git a/server/www/packages/packages-windows/x86/PIL/_util.py b/server/www/packages/packages-windows/x86/PIL/_util.py
    index 6618c62..755b4b2 100644
    --- a/server/www/packages/packages-windows/x86/PIL/_util.py
    +++ b/server/www/packages/packages-windows/x86/PIL/_util.py
    @@ -1,19 +1,20 @@
    -import os, sys
    +import os
    +import sys
     
    -py3 = sys.version_info.major >= 3
    +py36 = sys.version_info[0:2] >= (3, 6)
     
    -if py3:
    -    def isStringType(t):
    -        return isinstance(t, str)
    +
    +if py36:
    +    from pathlib import Path
    +
    +    def isPath(f):
    +        return isinstance(f, (bytes, str, Path))
    +
    +
    +else:
     
         def isPath(f):
             return isinstance(f, (bytes, str))
    -else:
    -    def isStringType(t):
    -        return isinstance(t, basestring)
    -
    -    def isPath(f):
    -        return isinstance(f, basestring)
     
     
     # Checks if an object is a string, and that it points to a directory.
    @@ -21,7 +22,7 @@ def isDirectory(f):
         return isPath(f) and os.path.isdir(f)
     
     
    -class deferred_error(object):
    +class deferred_error:
         def __init__(self, ex):
             self.ex = ex
     
    diff --git a/server/www/packages/packages-windows/x86/PIL/_version.py b/server/www/packages/packages-windows/x86/PIL/_version.py
    index b42628d..1af2909 100644
    --- a/server/www/packages/packages-windows/x86/PIL/_version.py
    +++ b/server/www/packages/packages-windows/x86/PIL/_version.py
    @@ -1,2 +1,2 @@
     # Master version for Pillow
    -__version__ = '5.2.0'
    +__version__ = "7.1.2"
    diff --git a/server/www/packages/packages-windows/x86/PIL/_webp.cp37-win32.pyd b/server/www/packages/packages-windows/x86/PIL/_webp.cp37-win32.pyd
    index 5086304..c91db4f 100644
    Binary files a/server/www/packages/packages-windows/x86/PIL/_webp.cp37-win32.pyd and b/server/www/packages/packages-windows/x86/PIL/_webp.cp37-win32.pyd differ
    diff --git a/server/www/packages/packages-windows/x86/PIL/features.py b/server/www/packages/packages-windows/x86/PIL/features.py
    index 9926445..ac06c0f 100644
    --- a/server/www/packages/packages-windows/x86/PIL/features.py
    +++ b/server/www/packages/packages-windows/x86/PIL/features.py
    @@ -1,3 +1,10 @@
    +import collections
    +import os
    +import sys
    +import warnings
    +
    +import PIL
    +
     from . import Image
     
     modules = {
    @@ -26,12 +33,7 @@ def get_supported_modules():
         return [f for f in modules if check_module(f)]
     
     
    -codecs = {
    -    "jpg": "jpeg",
    -    "jpg_2000": "jpeg2k",
    -    "zlib": "zip",
    -    "libtiff": "libtiff"
    -}
    +codecs = {"jpg": "jpeg", "jpg_2000": "jpeg2k", "zlib": "zip", "libtiff": "libtiff"}
     
     
     def check_codec(feature):
    @@ -48,10 +50,13 @@ def get_supported_codecs():
     
     
     features = {
    -    "webp_anim": ("PIL._webp", 'HAVE_WEBPANIM'),
    -    "webp_mux": ("PIL._webp", 'HAVE_WEBPMUX'),
    +    "webp_anim": ("PIL._webp", "HAVE_WEBPANIM"),
    +    "webp_mux": ("PIL._webp", "HAVE_WEBPMUX"),
         "transp_webp": ("PIL._webp", "HAVE_TRANSPARENCY"),
    -    "raqm": ("PIL._imagingft", "HAVE_RAQM")
    +    "raqm": ("PIL._imagingft", "HAVE_RAQM"),
    +    "libjpeg_turbo": ("PIL._imaging", "HAVE_LIBJPEGTURBO"),
    +    "libimagequant": ("PIL._imaging", "HAVE_LIBIMAGEQUANT"),
    +    "xcb": ("PIL._imaging", "HAVE_XCB"),
     }
     
     
    @@ -62,7 +67,7 @@ def check_feature(feature):
         module, flag = features[feature]
     
         try:
    -        imported_module = __import__(module, fromlist=['PIL'])
    +        imported_module = __import__(module, fromlist=["PIL"])
             return getattr(imported_module, flag)
         except ImportError:
             return None
    @@ -73,9 +78,14 @@ def get_supported_features():
     
     
     def check(feature):
    -    return (feature in modules and check_module(feature) or
    -            feature in codecs and check_codec(feature) or
    -            feature in features and check_feature(feature))
    +    if feature in modules:
    +        return check_module(feature)
    +    if feature in codecs:
    +        return check_codec(feature)
    +    if feature in features:
    +        return check_feature(feature)
    +    warnings.warn("Unknown feature '%s'." % feature, stacklevel=2)
    +    return False
     
     
     def get_supported():
    @@ -83,3 +93,81 @@ def get_supported():
         ret.extend(get_supported_features())
         ret.extend(get_supported_codecs())
         return ret
    +
    +
    +def pilinfo(out=None, supported_formats=True):
    +    if out is None:
    +        out = sys.stdout
    +
    +    Image.init()
    +
    +    print("-" * 68, file=out)
    +    print("Pillow {}".format(PIL.__version__), file=out)
    +    py_version = sys.version.splitlines()
    +    print("Python {}".format(py_version[0].strip()), file=out)
    +    for py_version in py_version[1:]:
    +        print("       {}".format(py_version.strip()), file=out)
    +    print("-" * 68, file=out)
    +    print(
    +        "Python modules loaded from {}".format(os.path.dirname(Image.__file__)),
    +        file=out,
    +    )
    +    print(
    +        "Binary modules loaded from {}".format(os.path.dirname(Image.core.__file__)),
    +        file=out,
    +    )
    +    print("-" * 68, file=out)
    +
    +    for name, feature in [
    +        ("pil", "PIL CORE"),
    +        ("tkinter", "TKINTER"),
    +        ("freetype2", "FREETYPE2"),
    +        ("littlecms2", "LITTLECMS2"),
    +        ("webp", "WEBP"),
    +        ("transp_webp", "WEBP Transparency"),
    +        ("webp_mux", "WEBPMUX"),
    +        ("webp_anim", "WEBP Animation"),
    +        ("jpg", "JPEG"),
    +        ("jpg_2000", "OPENJPEG (JPEG2000)"),
    +        ("zlib", "ZLIB (PNG/ZIP)"),
    +        ("libtiff", "LIBTIFF"),
    +        ("raqm", "RAQM (Bidirectional Text)"),
    +        ("libimagequant", "LIBIMAGEQUANT (Quantization method)"),
    +        ("xcb", "XCB (X protocol)"),
    +    ]:
    +        if check(name):
    +            print("---", feature, "support ok", file=out)
    +        else:
    +            print("***", feature, "support not installed", file=out)
    +    print("-" * 68, file=out)
    +
    +    if supported_formats:
    +        extensions = collections.defaultdict(list)
    +        for ext, i in Image.EXTENSION.items():
    +            extensions[i].append(ext)
    +
    +        for i in sorted(Image.ID):
    +            line = "{}".format(i)
    +            if i in Image.MIME:
    +                line = "{} {}".format(line, Image.MIME[i])
    +            print(line, file=out)
    +
    +            if i in extensions:
    +                print(
    +                    "Extensions: {}".format(", ".join(sorted(extensions[i]))), file=out
    +                )
    +
    +            features = []
    +            if i in Image.OPEN:
    +                features.append("open")
    +            if i in Image.SAVE:
    +                features.append("save")
    +            if i in Image.SAVE_ALL:
    +                features.append("save_all")
    +            if i in Image.DECODERS:
    +                features.append("decode")
    +            if i in Image.ENCODERS:
    +                features.append("encode")
    +
    +            print("Features: {}".format(", ".join(features)), file=out)
    +            print("-" * 68, file=out)
    diff --git a/server/www/packages/packages-windows/x86/_cffi_backend.cp37-win32.pyd b/server/www/packages/packages-windows/x86/_cffi_backend.cp37-win32.pyd
    new file mode 100644
    index 0000000..3dfa627
    Binary files /dev/null and b/server/www/packages/packages-windows/x86/_cffi_backend.cp37-win32.pyd differ
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/__init__.py b/server/www/packages/packages-windows/x86/asn1crypto/__init__.py
    new file mode 100644
    index 0000000..2c93f00
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/__init__.py
    @@ -0,0 +1,47 @@
    +# coding: utf-8
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +from .version import __version__, __version_info__
    +
    +__all__ = [
    +    '__version__',
    +    '__version_info__',
    +    'load_order',
    +]
    +
    +
    +def load_order():
    +    """
    +    Returns a list of the module and sub-module names for asn1crypto in
    +    dependency load order, for the sake of live reloading code
    +
    +    :return:
    +        A list of unicode strings of module names, as they would appear in
    +        sys.modules, ordered by which module should be reloaded first
    +    """
    +
    +    return [
    +        'asn1crypto._errors',
    +        'asn1crypto._int',
    +        'asn1crypto._ordereddict',
    +        'asn1crypto._teletex_codec',
    +        'asn1crypto._types',
    +        'asn1crypto._inet',
    +        'asn1crypto._iri',
    +        'asn1crypto.version',
    +        'asn1crypto.pem',
    +        'asn1crypto.util',
    +        'asn1crypto.parser',
    +        'asn1crypto.core',
    +        'asn1crypto.algos',
    +        'asn1crypto.keys',
    +        'asn1crypto.x509',
    +        'asn1crypto.crl',
    +        'asn1crypto.csr',
    +        'asn1crypto.ocsp',
    +        'asn1crypto.cms',
    +        'asn1crypto.pdf',
    +        'asn1crypto.pkcs12',
    +        'asn1crypto.tsp',
    +        'asn1crypto',
    +    ]
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/_errors.py b/server/www/packages/packages-windows/x86/asn1crypto/_errors.py
    new file mode 100644
    index 0000000..d8797a2
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/_errors.py
    @@ -0,0 +1,54 @@
    +# coding: utf-8
    +
    +"""
    +Exports the following items:
    +
    + - unwrap()
    + - APIException()
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +import re
    +import textwrap
    +
    +
    +class APIException(Exception):
    +    """
    +    An exception indicating an API has been removed from asn1crypto
    +    """
    +
    +    pass
    +
    +
    +def unwrap(string, *params):
    +    """
    +    Takes a multi-line string and does the following:
    +
    +     - dedents
    +     - converts newlines with text before and after into a single line
    +     - strips leading and trailing whitespace
    +
    +    :param string:
    +        The string to format
    +
    +    :param *params:
    +        Params to interpolate into the string
    +
    +    :return:
    +        The formatted string
    +    """
    +
    +    output = textwrap.dedent(string)
    +
    +    # Unwrap lines, taking into account bulleted lists, ordered lists and
    +    # underlines consisting of = signs
    +    if output.find('\n') != -1:
    +        output = re.sub('(?<=\\S)\n(?=[^ \n\t\\d\\*\\-=])', ' ', output)
    +
    +    if params:
    +        output = output % params
    +
    +    output = output.strip()
    +
    +    return output
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/_inet.py b/server/www/packages/packages-windows/x86/asn1crypto/_inet.py
    new file mode 100644
    index 0000000..045ba56
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/_inet.py
    @@ -0,0 +1,170 @@
    +# coding: utf-8
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +import socket
    +import struct
    +
    +from ._errors import unwrap
    +from ._types import byte_cls, bytes_to_list, str_cls, type_name
    +
    +
    +def inet_ntop(address_family, packed_ip):
    +    """
    +    Windows compatibility shim for socket.inet_ntop().
    +
    +    :param address_family:
    +        socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
    +
    +    :param packed_ip:
    +        A byte string of the network form of an IP address
    +
    +    :return:
    +        A unicode string of the IP address
    +    """
    +
    +    if address_family not in set([socket.AF_INET, socket.AF_INET6]):
    +        raise ValueError(unwrap(
    +            '''
    +            address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
    +            not %s
    +            ''',
    +            repr(socket.AF_INET),
    +            repr(socket.AF_INET6),
    +            repr(address_family)
    +        ))
    +
    +    if not isinstance(packed_ip, byte_cls):
    +        raise TypeError(unwrap(
    +            '''
    +            packed_ip must be a byte string, not %s
    +            ''',
    +            type_name(packed_ip)
    +        ))
    +
    +    required_len = 4 if address_family == socket.AF_INET else 16
    +    if len(packed_ip) != required_len:
    +        raise ValueError(unwrap(
    +            '''
    +            packed_ip must be %d bytes long - is %d
    +            ''',
    +            required_len,
    +            len(packed_ip)
    +        ))
    +
    +    if address_family == socket.AF_INET:
    +        return '%d.%d.%d.%d' % tuple(bytes_to_list(packed_ip))
    +
    +    octets = struct.unpack(b'!HHHHHHHH', packed_ip)
    +
    +    runs_of_zero = {}
    +    longest_run = 0
    +    zero_index = None
    +    for i, octet in enumerate(octets + (-1,)):
    +        if octet != 0:
    +            if zero_index is not None:
    +                length = i - zero_index
    +                if length not in runs_of_zero:
    +                    runs_of_zero[length] = zero_index
    +                longest_run = max(longest_run, length)
    +                zero_index = None
    +        elif zero_index is None:
    +            zero_index = i
    +
    +    hexed = [hex(o)[2:] for o in octets]
    +
    +    if longest_run < 2:
    +        return ':'.join(hexed)
    +
    +    zero_start = runs_of_zero[longest_run]
    +    zero_end = zero_start + longest_run
    +
    +    return ':'.join(hexed[:zero_start]) + '::' + ':'.join(hexed[zero_end:])
    +
    +
    +def inet_pton(address_family, ip_string):
    +    """
    +    Windows compatibility shim for socket.inet_ntop().
    +
    +    :param address_family:
    +        socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
    +
    +    :param ip_string:
    +        A unicode string of an IP address
    +
    +    :return:
    +        A byte string of the network form of the IP address
    +    """
    +
    +    if address_family not in set([socket.AF_INET, socket.AF_INET6]):
    +        raise ValueError(unwrap(
    +            '''
    +            address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
    +            not %s
    +            ''',
    +            repr(socket.AF_INET),
    +            repr(socket.AF_INET6),
    +            repr(address_family)
    +        ))
    +
    +    if not isinstance(ip_string, str_cls):
    +        raise TypeError(unwrap(
    +            '''
    +            ip_string must be a unicode string, not %s
    +            ''',
    +            type_name(ip_string)
    +        ))
    +
    +    if address_family == socket.AF_INET:
    +        octets = ip_string.split('.')
    +        error = len(octets) != 4
    +        if not error:
    +            ints = []
    +            for o in octets:
    +                o = int(o)
    +                if o > 255 or o < 0:
    +                    error = True
    +                    break
    +                ints.append(o)
    +
    +        if error:
    +            raise ValueError(unwrap(
    +                '''
    +                ip_string must be a dotted string with four integers in the
    +                range of 0 to 255, got %s
    +                ''',
    +                repr(ip_string)
    +            ))
    +
    +        return struct.pack(b'!BBBB', *ints)
    +
    +    error = False
    +    omitted = ip_string.count('::')
    +    if omitted > 1:
    +        error = True
    +    elif omitted == 0:
    +        octets = ip_string.split(':')
    +        error = len(octets) != 8
    +    else:
    +        begin, end = ip_string.split('::')
    +        begin_octets = begin.split(':')
    +        end_octets = end.split(':')
    +        missing = 8 - len(begin_octets) - len(end_octets)
    +        octets = begin_octets + (['0'] * missing) + end_octets
    +
    +    if not error:
    +        ints = []
    +        for o in octets:
    +            o = int(o, 16)
    +            if o > 65535 or o < 0:
    +                error = True
    +                break
    +            ints.append(o)
    +
    +        return struct.pack(b'!HHHHHHHH', *ints)
    +
    +    raise ValueError(unwrap(
    +        '''
    +        ip_string must be a valid ipv6 string, got %s
    +        ''',
    +        repr(ip_string)
    +    ))
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/_int.py b/server/www/packages/packages-windows/x86/asn1crypto/_int.py
    new file mode 100644
    index 0000000..094fc95
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/_int.py
    @@ -0,0 +1,22 @@
    +# coding: utf-8
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +
    +def fill_width(bytes_, width):
    +    """
    +    Ensure a byte string representing a positive integer is a specific width
    +    (in bytes)
    +
    +    :param bytes_:
    +        The integer byte string
    +
    +    :param width:
    +        The desired width as an integer
    +
    +    :return:
    +        A byte string of the width specified
    +    """
    +
    +    while len(bytes_) < width:
    +        bytes_ = b'\x00' + bytes_
    +    return bytes_
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/_iri.py b/server/www/packages/packages-windows/x86/asn1crypto/_iri.py
    new file mode 100644
    index 0000000..7394b4d
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/_iri.py
    @@ -0,0 +1,291 @@
    +# coding: utf-8
    +
    +"""
    +Functions to convert unicode IRIs into ASCII byte string URIs and back. Exports
    +the following items:
    +
    + - iri_to_uri()
    + - uri_to_iri()
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +from encodings import idna  # noqa
    +import codecs
    +import re
    +import sys
    +
    +from ._errors import unwrap
    +from ._types import byte_cls, str_cls, type_name, bytes_to_list, int_types
    +
    +if sys.version_info < (3,):
    +    from urlparse import urlsplit, urlunsplit
    +    from urllib import (
    +        quote as urlquote,
    +        unquote as unquote_to_bytes,
    +    )
    +
    +else:
    +    from urllib.parse import (
    +        quote as urlquote,
    +        unquote_to_bytes,
    +        urlsplit,
    +        urlunsplit,
    +    )
    +
    +
    +def iri_to_uri(value, normalize=False):
    +    """
    +    Encodes a unicode IRI into an ASCII byte string URI
    +
    +    :param value:
    +        A unicode string of an IRI
    +
    +    :param normalize:
    +        A bool that controls URI normalization
    +
    +    :return:
    +        A byte string of the ASCII-encoded URI
    +    """
    +
    +    if not isinstance(value, str_cls):
    +        raise TypeError(unwrap(
    +            '''
    +            value must be a unicode string, not %s
    +            ''',
    +            type_name(value)
    +        ))
    +
    +    scheme = None
    +    # Python 2.6 doesn't split properly is the URL doesn't start with http:// or https://
    +    if sys.version_info < (2, 7) and not value.startswith('http://') and not value.startswith('https://'):
    +        real_prefix = None
    +        prefix_match = re.match('^[^:]*://', value)
    +        if prefix_match:
    +            real_prefix = prefix_match.group(0)
    +            value = 'http://' + value[len(real_prefix):]
    +        parsed = urlsplit(value)
    +        if real_prefix:
    +            value = real_prefix + value[7:]
    +            scheme = _urlquote(real_prefix[:-3])
    +    else:
    +        parsed = urlsplit(value)
    +
    +    if scheme is None:
    +        scheme = _urlquote(parsed.scheme)
    +    hostname = parsed.hostname
    +    if hostname is not None:
    +        hostname = hostname.encode('idna')
    +    # RFC 3986 allows userinfo to contain sub-delims
    +    username = _urlquote(parsed.username, safe='!$&\'()*+,;=')
    +    password = _urlquote(parsed.password, safe='!$&\'()*+,;=')
    +    port = parsed.port
    +    if port is not None:
    +        port = str_cls(port).encode('ascii')
    +
    +    netloc = b''
    +    if username is not None:
    +        netloc += username
    +        if password:
    +            netloc += b':' + password
    +        netloc += b'@'
    +    if hostname is not None:
    +        netloc += hostname
    +    if port is not None:
    +        default_http = scheme == b'http' and port == b'80'
    +        default_https = scheme == b'https' and port == b'443'
    +        if not normalize or (not default_http and not default_https):
    +            netloc += b':' + port
    +
    +    # RFC 3986 allows a path to contain sub-delims, plus "@" and ":"
    +    path = _urlquote(parsed.path, safe='/!$&\'()*+,;=@:')
    +    # RFC 3986 allows the query to contain sub-delims, plus "@", ":" , "/" and "?"
    +    query = _urlquote(parsed.query, safe='/?!$&\'()*+,;=@:')
    +    # RFC 3986 allows the fragment to contain sub-delims, plus "@", ":" , "/" and "?"
    +    fragment = _urlquote(parsed.fragment, safe='/?!$&\'()*+,;=@:')
    +
    +    if normalize and query is None and fragment is None and path == b'/':
    +        path = None
    +
    +    # Python 2.7 compat
    +    if path is None:
    +        path = ''
    +
    +    output = urlunsplit((scheme, netloc, path, query, fragment))
    +    if isinstance(output, str_cls):
    +        output = output.encode('latin1')
    +    return output
    +
    +
    +def uri_to_iri(value):
    +    """
    +    Converts an ASCII URI byte string into a unicode IRI
    +
    +    :param value:
    +        An ASCII-encoded byte string of the URI
    +
    +    :return:
    +        A unicode string of the IRI
    +    """
    +
    +    if not isinstance(value, byte_cls):
    +        raise TypeError(unwrap(
    +            '''
    +            value must be a byte string, not %s
    +            ''',
    +            type_name(value)
    +        ))
    +
    +    parsed = urlsplit(value)
    +
    +    scheme = parsed.scheme
    +    if scheme is not None:
    +        scheme = scheme.decode('ascii')
    +
    +    username = _urlunquote(parsed.username, remap=[':', '@'])
    +    password = _urlunquote(parsed.password, remap=[':', '@'])
    +    hostname = parsed.hostname
    +    if hostname:
    +        hostname = hostname.decode('idna')
    +    port = parsed.port
    +    if port and not isinstance(port, int_types):
    +        port = port.decode('ascii')
    +
    +    netloc = ''
    +    if username is not None:
    +        netloc += username
    +        if password:
    +            netloc += ':' + password
    +        netloc += '@'
    +    if hostname is not None:
    +        netloc += hostname
    +    if port is not None:
    +        netloc += ':' + str_cls(port)
    +
    +    path = _urlunquote(parsed.path, remap=['/'], preserve=True)
    +    query = _urlunquote(parsed.query, remap=['&', '='], preserve=True)
    +    fragment = _urlunquote(parsed.fragment)
    +
    +    return urlunsplit((scheme, netloc, path, query, fragment))
    +
    +
    +def _iri_utf8_errors_handler(exc):
    +    """
    +    Error handler for decoding UTF-8 parts of a URI into an IRI. Leaves byte
    +    sequences encoded in %XX format, but as part of a unicode string.
    +
    +    :param exc:
    +        The UnicodeDecodeError exception
    +
    +    :return:
    +        A 2-element tuple of (replacement unicode string, integer index to
    +        resume at)
    +    """
    +
    +    bytes_as_ints = bytes_to_list(exc.object[exc.start:exc.end])
    +    replacements = ['%%%02x' % num for num in bytes_as_ints]
    +    return (''.join(replacements), exc.end)
    +
    +
    +codecs.register_error('iriutf8', _iri_utf8_errors_handler)
    +
    +
    +def _urlquote(string, safe=''):
    +    """
    +    Quotes a unicode string for use in a URL
    +
    +    :param string:
    +        A unicode string
    +
    +    :param safe:
    +        A unicode string of character to not encode
    +
    +    :return:
    +        None (if string is None) or an ASCII byte string of the quoted string
    +    """
    +
    +    if string is None or string == '':
    +        return None
    +
    +    # Anything already hex quoted is pulled out of the URL and unquoted if
    +    # possible
    +    escapes = []
    +    if re.search('%[0-9a-fA-F]{2}', string):
    +        # Try to unquote any percent values, restoring them if they are not
    +        # valid UTF-8. Also, requote any safe chars since encoded versions of
    +        # those are functionally different than the unquoted ones.
    +        def _try_unescape(match):
    +            byte_string = unquote_to_bytes(match.group(0))
    +            unicode_string = byte_string.decode('utf-8', 'iriutf8')
    +            for safe_char in list(safe):
    +                unicode_string = unicode_string.replace(safe_char, '%%%02x' % ord(safe_char))
    +            return unicode_string
    +        string = re.sub('(?:%[0-9a-fA-F]{2})+', _try_unescape, string)
    +
    +        # Once we have the minimal set of hex quoted values, removed them from
    +        # the string so that they are not double quoted
    +        def _extract_escape(match):
    +            escapes.append(match.group(0).encode('ascii'))
    +            return '\x00'
    +        string = re.sub('%[0-9a-fA-F]{2}', _extract_escape, string)
    +
    +    output = urlquote(string.encode('utf-8'), safe=safe.encode('utf-8'))
    +    if not isinstance(output, byte_cls):
    +        output = output.encode('ascii')
    +
    +    # Restore the existing quoted values that we extracted
    +    if len(escapes) > 0:
    +        def _return_escape(_):
    +            return escapes.pop(0)
    +        output = re.sub(b'%00', _return_escape, output)
    +
    +    return output
    +
    +
    +def _urlunquote(byte_string, remap=None, preserve=None):
    +    """
    +    Unquotes a URI portion from a byte string into unicode using UTF-8
    +
    +    :param byte_string:
    +        A byte string of the data to unquote
    +
    +    :param remap:
    +        A list of characters (as unicode) that should be re-mapped to a
    +        %XX encoding. This is used when characters are not valid in part of a
    +        URL.
    +
    +    :param preserve:
    +        A bool - indicates that the chars to be remapped if they occur in
    +        non-hex form, should be preserved. E.g. / for URL path.
    +
    +    :return:
    +        A unicode string
    +    """
    +
    +    if byte_string is None:
    +        return byte_string
    +
    +    if byte_string == b'':
    +        return ''
    +
    +    if preserve:
    +        replacements = ['\x1A', '\x1C', '\x1D', '\x1E', '\x1F']
    +        preserve_unmap = {}
    +        for char in remap:
    +            replacement = replacements.pop(0)
    +            preserve_unmap[replacement] = char
    +            byte_string = byte_string.replace(char.encode('ascii'), replacement.encode('ascii'))
    +
    +    byte_string = unquote_to_bytes(byte_string)
    +
    +    if remap:
    +        for char in remap:
    +            byte_string = byte_string.replace(char.encode('ascii'), ('%%%02x' % ord(char)).encode('ascii'))
    +
    +    output = byte_string.decode('utf-8', 'iriutf8')
    +
    +    if preserve:
    +        for replacement, original in preserve_unmap.items():
    +            output = output.replace(replacement, original)
    +
    +    return output
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/_ordereddict.py b/server/www/packages/packages-windows/x86/asn1crypto/_ordereddict.py
    new file mode 100644
    index 0000000..2f18ab5
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/_ordereddict.py
    @@ -0,0 +1,135 @@
    +# Copyright (c) 2009 Raymond Hettinger
    +#
    +# Permission is hereby granted, free of charge, to any person
    +# obtaining a copy of this software and associated documentation files
    +# (the "Software"), to deal in the Software without restriction,
    +# including without limitation the rights to use, copy, modify, merge,
    +# publish, distribute, sublicense, and/or sell copies of the Software,
    +# and to permit persons to whom the Software is furnished to do so,
    +# subject to the following conditions:
    +#
    +#     The above copyright notice and this permission notice shall be
    +#     included in all copies or substantial portions of the Software.
    +#
    +#     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    +#     EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
    +#     OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    +#     NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
    +#     HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
    +#     WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
    +#     FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
    +#     OTHER DEALINGS IN THE SOFTWARE.
    +
    +import sys
    +
    +if not sys.version_info < (2, 7):
    +
    +    from collections import OrderedDict
    +
    +else:
    +
    +    from UserDict import DictMixin
    +
    +    class OrderedDict(dict, DictMixin):
    +
    +        def __init__(self, *args, **kwds):
    +            if len(args) > 1:
    +                raise TypeError('expected at most 1 arguments, got %d' % len(args))
    +            try:
    +                self.__end
    +            except AttributeError:
    +                self.clear()
    +            self.update(*args, **kwds)
    +
    +        def clear(self):
    +            self.__end = end = []
    +            end += [None, end, end]  # sentinel node for doubly linked list
    +            self.__map = {}          # key --> [key, prev, next]
    +            dict.clear(self)
    +
    +        def __setitem__(self, key, value):
    +            if key not in self:
    +                end = self.__end
    +                curr = end[1]
    +                curr[2] = end[1] = self.__map[key] = [key, curr, end]
    +            dict.__setitem__(self, key, value)
    +
    +        def __delitem__(self, key):
    +            dict.__delitem__(self, key)
    +            key, prev, next_ = self.__map.pop(key)
    +            prev[2] = next_
    +            next_[1] = prev
    +
    +        def __iter__(self):
    +            end = self.__end
    +            curr = end[2]
    +            while curr is not end:
    +                yield curr[0]
    +                curr = curr[2]
    +
    +        def __reversed__(self):
    +            end = self.__end
    +            curr = end[1]
    +            while curr is not end:
    +                yield curr[0]
    +                curr = curr[1]
    +
    +        def popitem(self, last=True):
    +            if not self:
    +                raise KeyError('dictionary is empty')
    +            if last:
    +                key = reversed(self).next()
    +            else:
    +                key = iter(self).next()
    +            value = self.pop(key)
    +            return key, value
    +
    +        def __reduce__(self):
    +            items = [[k, self[k]] for k in self]
    +            tmp = self.__map, self.__end
    +            del self.__map, self.__end
    +            inst_dict = vars(self).copy()
    +            self.__map, self.__end = tmp
    +            if inst_dict:
    +                return (self.__class__, (items,), inst_dict)
    +            return self.__class__, (items,)
    +
    +        def keys(self):
    +            return list(self)
    +
    +        setdefault = DictMixin.setdefault
    +        update = DictMixin.update
    +        pop = DictMixin.pop
    +        values = DictMixin.values
    +        items = DictMixin.items
    +        iterkeys = DictMixin.iterkeys
    +        itervalues = DictMixin.itervalues
    +        iteritems = DictMixin.iteritems
    +
    +        def __repr__(self):
    +            if not self:
    +                return '%s()' % (self.__class__.__name__,)
    +            return '%s(%r)' % (self.__class__.__name__, self.items())
    +
    +        def copy(self):
    +            return self.__class__(self)
    +
    +        @classmethod
    +        def fromkeys(cls, iterable, value=None):
    +            d = cls()
    +            for key in iterable:
    +                d[key] = value
    +            return d
    +
    +        def __eq__(self, other):
    +            if isinstance(other, OrderedDict):
    +                if len(self) != len(other):
    +                    return False
    +                for p, q in zip(self.items(), other.items()):
    +                    if p != q:
    +                        return False
    +                return True
    +            return dict.__eq__(self, other)
    +
    +        def __ne__(self, other):
    +            return not self == other
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/_teletex_codec.py b/server/www/packages/packages-windows/x86/asn1crypto/_teletex_codec.py
    new file mode 100644
    index 0000000..b5991aa
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/_teletex_codec.py
    @@ -0,0 +1,331 @@
    +# coding: utf-8
    +
    +"""
    +Implementation of the teletex T.61 codec. Exports the following items:
    +
    + - register()
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +import codecs
    +
    +
    +class TeletexCodec(codecs.Codec):
    +
    +    def encode(self, input_, errors='strict'):
    +        return codecs.charmap_encode(input_, errors, ENCODING_TABLE)
    +
    +    def decode(self, input_, errors='strict'):
    +        return codecs.charmap_decode(input_, errors, DECODING_TABLE)
    +
    +
    +class TeletexIncrementalEncoder(codecs.IncrementalEncoder):
    +
    +    def encode(self, input_, final=False):
    +        return codecs.charmap_encode(input_, self.errors, ENCODING_TABLE)[0]
    +
    +
    +class TeletexIncrementalDecoder(codecs.IncrementalDecoder):
    +
    +    def decode(self, input_, final=False):
    +        return codecs.charmap_decode(input_, self.errors, DECODING_TABLE)[0]
    +
    +
    +class TeletexStreamWriter(TeletexCodec, codecs.StreamWriter):
    +
    +    pass
    +
    +
    +class TeletexStreamReader(TeletexCodec, codecs.StreamReader):
    +
    +    pass
    +
    +
    +def teletex_search_function(name):
    +    """
    +    Search function for teletex codec that is passed to codecs.register()
    +    """
    +
    +    if name != 'teletex':
    +        return None
    +
    +    return codecs.CodecInfo(
    +        name='teletex',
    +        encode=TeletexCodec().encode,
    +        decode=TeletexCodec().decode,
    +        incrementalencoder=TeletexIncrementalEncoder,
    +        incrementaldecoder=TeletexIncrementalDecoder,
    +        streamreader=TeletexStreamReader,
    +        streamwriter=TeletexStreamWriter,
    +    )
    +
    +
    +def register():
    +    """
    +    Registers the teletex codec
    +    """
    +
    +    codecs.register(teletex_search_function)
    +
    +
    +# http://en.wikipedia.org/wiki/ITU_T.61
    +DECODING_TABLE = (
    +    '\u0000'
    +    '\u0001'
    +    '\u0002'
    +    '\u0003'
    +    '\u0004'
    +    '\u0005'
    +    '\u0006'
    +    '\u0007'
    +    '\u0008'
    +    '\u0009'
    +    '\u000A'
    +    '\u000B'
    +    '\u000C'
    +    '\u000D'
    +    '\u000E'
    +    '\u000F'
    +    '\u0010'
    +    '\u0011'
    +    '\u0012'
    +    '\u0013'
    +    '\u0014'
    +    '\u0015'
    +    '\u0016'
    +    '\u0017'
    +    '\u0018'
    +    '\u0019'
    +    '\u001A'
    +    '\u001B'
    +    '\u001C'
    +    '\u001D'
    +    '\u001E'
    +    '\u001F'
    +    '\u0020'
    +    '\u0021'
    +    '\u0022'
    +    '\ufffe'
    +    '\ufffe'
    +    '\u0025'
    +    '\u0026'
    +    '\u0027'
    +    '\u0028'
    +    '\u0029'
    +    '\u002A'
    +    '\u002B'
    +    '\u002C'
    +    '\u002D'
    +    '\u002E'
    +    '\u002F'
    +    '\u0030'
    +    '\u0031'
    +    '\u0032'
    +    '\u0033'
    +    '\u0034'
    +    '\u0035'
    +    '\u0036'
    +    '\u0037'
    +    '\u0038'
    +    '\u0039'
    +    '\u003A'
    +    '\u003B'
    +    '\u003C'
    +    '\u003D'
    +    '\u003E'
    +    '\u003F'
    +    '\u0040'
    +    '\u0041'
    +    '\u0042'
    +    '\u0043'
    +    '\u0044'
    +    '\u0045'
    +    '\u0046'
    +    '\u0047'
    +    '\u0048'
    +    '\u0049'
    +    '\u004A'
    +    '\u004B'
    +    '\u004C'
    +    '\u004D'
    +    '\u004E'
    +    '\u004F'
    +    '\u0050'
    +    '\u0051'
    +    '\u0052'
    +    '\u0053'
    +    '\u0054'
    +    '\u0055'
    +    '\u0056'
    +    '\u0057'
    +    '\u0058'
    +    '\u0059'
    +    '\u005A'
    +    '\u005B'
    +    '\ufffe'
    +    '\u005D'
    +    '\ufffe'
    +    '\u005F'
    +    '\ufffe'
    +    '\u0061'
    +    '\u0062'
    +    '\u0063'
    +    '\u0064'
    +    '\u0065'
    +    '\u0066'
    +    '\u0067'
    +    '\u0068'
    +    '\u0069'
    +    '\u006A'
    +    '\u006B'
    +    '\u006C'
    +    '\u006D'
    +    '\u006E'
    +    '\u006F'
    +    '\u0070'
    +    '\u0071'
    +    '\u0072'
    +    '\u0073'
    +    '\u0074'
    +    '\u0075'
    +    '\u0076'
    +    '\u0077'
    +    '\u0078'
    +    '\u0079'
    +    '\u007A'
    +    '\ufffe'
    +    '\u007C'
    +    '\ufffe'
    +    '\ufffe'
    +    '\u007F'
    +    '\u0080'
    +    '\u0081'
    +    '\u0082'
    +    '\u0083'
    +    '\u0084'
    +    '\u0085'
    +    '\u0086'
    +    '\u0087'
    +    '\u0088'
    +    '\u0089'
    +    '\u008A'
    +    '\u008B'
    +    '\u008C'
    +    '\u008D'
    +    '\u008E'
    +    '\u008F'
    +    '\u0090'
    +    '\u0091'
    +    '\u0092'
    +    '\u0093'
    +    '\u0094'
    +    '\u0095'
    +    '\u0096'
    +    '\u0097'
    +    '\u0098'
    +    '\u0099'
    +    '\u009A'
    +    '\u009B'
    +    '\u009C'
    +    '\u009D'
    +    '\u009E'
    +    '\u009F'
    +    '\u00A0'
    +    '\u00A1'
    +    '\u00A2'
    +    '\u00A3'
    +    '\u0024'
    +    '\u00A5'
    +    '\u0023'
    +    '\u00A7'
    +    '\u00A4'
    +    '\ufffe'
    +    '\ufffe'
    +    '\u00AB'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\u00B0'
    +    '\u00B1'
    +    '\u00B2'
    +    '\u00B3'
    +    '\u00D7'
    +    '\u00B5'
    +    '\u00B6'
    +    '\u00B7'
    +    '\u00F7'
    +    '\ufffe'
    +    '\ufffe'
    +    '\u00BB'
    +    '\u00BC'
    +    '\u00BD'
    +    '\u00BE'
    +    '\u00BF'
    +    '\ufffe'
    +    '\u0300'
    +    '\u0301'
    +    '\u0302'
    +    '\u0303'
    +    '\u0304'
    +    '\u0306'
    +    '\u0307'
    +    '\u0308'
    +    '\ufffe'
    +    '\u030A'
    +    '\u0327'
    +    '\u0332'
    +    '\u030B'
    +    '\u0328'
    +    '\u030C'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\ufffe'
    +    '\u2126'
    +    '\u00C6'
    +    '\u00D0'
    +    '\u00AA'
    +    '\u0126'
    +    '\ufffe'
    +    '\u0132'
    +    '\u013F'
    +    '\u0141'
    +    '\u00D8'
    +    '\u0152'
    +    '\u00BA'
    +    '\u00DE'
    +    '\u0166'
    +    '\u014A'
    +    '\u0149'
    +    '\u0138'
    +    '\u00E6'
    +    '\u0111'
    +    '\u00F0'
    +    '\u0127'
    +    '\u0131'
    +    '\u0133'
    +    '\u0140'
    +    '\u0142'
    +    '\u00F8'
    +    '\u0153'
    +    '\u00DF'
    +    '\u00FE'
    +    '\u0167'
    +    '\u014B'
    +    '\ufffe'
    +)
    +ENCODING_TABLE = codecs.charmap_build(DECODING_TABLE)
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/_types.py b/server/www/packages/packages-windows/x86/asn1crypto/_types.py
    new file mode 100644
    index 0000000..b9ca8cc
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/_types.py
    @@ -0,0 +1,46 @@
    +# coding: utf-8
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +import inspect
    +import sys
    +
    +
    +if sys.version_info < (3,):
    +    str_cls = unicode  # noqa
    +    byte_cls = str
    +    int_types = (int, long)  # noqa
    +
    +    def bytes_to_list(byte_string):
    +        return [ord(b) for b in byte_string]
    +
    +    chr_cls = chr
    +
    +else:
    +    str_cls = str
    +    byte_cls = bytes
    +    int_types = int
    +
    +    bytes_to_list = list
    +
    +    def chr_cls(num):
    +        return bytes([num])
    +
    +
    +def type_name(value):
    +    """
    +    Returns a user-readable name for the type of an object
    +
    +    :param value:
    +        A value to get the type name of
    +
    +    :return:
    +        A unicode string of the object's type name
    +    """
    +
    +    if inspect.isclass(value):
    +        cls = value
    +    else:
    +        cls = value.__class__
    +    if cls.__module__ in set(['builtins', '__builtin__']):
    +        return cls.__name__
    +    return '%s.%s' % (cls.__module__, cls.__name__)
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/algos.py b/server/www/packages/packages-windows/x86/asn1crypto/algos.py
    new file mode 100644
    index 0000000..d49be26
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/algos.py
    @@ -0,0 +1,1181 @@
    +# coding: utf-8
    +
    +"""
    +ASN.1 type classes for various algorithms using in various aspects of public
    +key cryptography. Exports the following items:
    +
    + - AlgorithmIdentifier()
    + - AnyAlgorithmIdentifier()
    + - DigestAlgorithm()
    + - DigestInfo()
    + - DSASignature()
    + - EncryptionAlgorithm()
    + - HmacAlgorithm()
    + - KdfAlgorithm()
    + - Pkcs5MacAlgorithm()
    + - SignedDigestAlgorithm()
    +
    +Other type classes are defined that help compose the types listed above.
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +from ._errors import unwrap
    +from ._int import fill_width
    +from .util import int_from_bytes, int_to_bytes
    +from .core import (
    +    Any,
    +    Choice,
    +    Integer,
    +    Null,
    +    ObjectIdentifier,
    +    OctetString,
    +    Sequence,
    +    Void,
    +)
    +
    +
    +# Structures and OIDs in this file are pulled from
    +# https://tools.ietf.org/html/rfc3279, https://tools.ietf.org/html/rfc4055,
    +# https://tools.ietf.org/html/rfc5758, https://tools.ietf.org/html/rfc7292,
    +# http://www.emc.com/collateral/white-papers/h11302-pkcs5v2-1-password-based-cryptography-standard-wp.pdf
    +
    +class AlgorithmIdentifier(Sequence):
    +    _fields = [
    +        ('algorithm', ObjectIdentifier),
    +        ('parameters', Any, {'optional': True}),
    +    ]
    +
    +
    +class _ForceNullParameters(object):
    +    """
    +    Various structures based on AlgorithmIdentifier require that the parameters
    +    field be core.Null() for certain OIDs. This mixin ensures that happens.
    +    """
    +
    +    # The following attribute, plus the parameters spec callback and custom
    +    # __setitem__ are all to handle a situation where parameters should not be
    +    # optional and must be Null for certain OIDs. More info at
    +    # https://tools.ietf.org/html/rfc4055#page-15 and
    +    # https://tools.ietf.org/html/rfc4055#section-2.1
    +    _null_algos = set([
    +        '1.2.840.113549.1.1.1',    # rsassa_pkcs1v15 / rsaes_pkcs1v15 / rsa
    +        '1.2.840.113549.1.1.11',   # sha256_rsa
    +        '1.2.840.113549.1.1.12',   # sha384_rsa
    +        '1.2.840.113549.1.1.13',   # sha512_rsa
    +        '1.2.840.113549.1.1.14',   # sha224_rsa
    +        '1.3.14.3.2.26',           # sha1
    +        '2.16.840.1.101.3.4.2.4',  # sha224
    +        '2.16.840.1.101.3.4.2.1',  # sha256
    +        '2.16.840.1.101.3.4.2.2',  # sha384
    +        '2.16.840.1.101.3.4.2.3',  # sha512
    +    ])
    +
    +    def _parameters_spec(self):
    +        if self._oid_pair == ('algorithm', 'parameters'):
    +            algo = self['algorithm'].native
    +            if algo in self._oid_specs:
    +                return self._oid_specs[algo]
    +
    +        if self['algorithm'].dotted in self._null_algos:
    +            return Null
    +
    +        return None
    +
    +    _spec_callbacks = {
    +        'parameters': _parameters_spec
    +    }
    +
    +    # We have to override this since the spec callback uses the value of
    +    # algorithm to determine the parameter spec, however default values are
    +    # assigned before setting a field, so a default value can't be based on
    +    # another field value (unless it is a default also). Thus we have to
    +    # manually check to see if the algorithm was set and parameters is unset,
    +    # and then fix the value as appropriate.
    +    def __setitem__(self, key, value):
    +        res = super(_ForceNullParameters, self).__setitem__(key, value)
    +        if key != 'algorithm':
    +            return res
    +        if self['algorithm'].dotted not in self._null_algos:
    +            return res
    +        if self['parameters'].__class__ != Void:
    +            return res
    +        self['parameters'] = Null()
    +        return res
    +
    +
    +class HmacAlgorithmId(ObjectIdentifier):
    +    _map = {
    +        '1.3.14.3.2.10': 'des_mac',
    +        '1.2.840.113549.2.7': 'sha1',
    +        '1.2.840.113549.2.8': 'sha224',
    +        '1.2.840.113549.2.9': 'sha256',
    +        '1.2.840.113549.2.10': 'sha384',
    +        '1.2.840.113549.2.11': 'sha512',
    +        '1.2.840.113549.2.12': 'sha512_224',
    +        '1.2.840.113549.2.13': 'sha512_256',
    +        '2.16.840.1.101.3.4.2.13': 'sha3_224',
    +        '2.16.840.1.101.3.4.2.14': 'sha3_256',
    +        '2.16.840.1.101.3.4.2.15': 'sha3_384',
    +        '2.16.840.1.101.3.4.2.16': 'sha3_512',
    +    }
    +
    +
    +class HmacAlgorithm(Sequence):
    +    _fields = [
    +        ('algorithm', HmacAlgorithmId),
    +        ('parameters', Any, {'optional': True}),
    +    ]
    +
    +
    +class DigestAlgorithmId(ObjectIdentifier):
    +    _map = {
    +        '1.2.840.113549.2.2': 'md2',
    +        '1.2.840.113549.2.5': 'md5',
    +        '1.3.14.3.2.26': 'sha1',
    +        '2.16.840.1.101.3.4.2.4': 'sha224',
    +        '2.16.840.1.101.3.4.2.1': 'sha256',
    +        '2.16.840.1.101.3.4.2.2': 'sha384',
    +        '2.16.840.1.101.3.4.2.3': 'sha512',
    +        '2.16.840.1.101.3.4.2.5': 'sha512_224',
    +        '2.16.840.1.101.3.4.2.6': 'sha512_256',
    +        '2.16.840.1.101.3.4.2.7': 'sha3_224',
    +        '2.16.840.1.101.3.4.2.8': 'sha3_256',
    +        '2.16.840.1.101.3.4.2.9': 'sha3_384',
    +        '2.16.840.1.101.3.4.2.10': 'sha3_512',
    +        '2.16.840.1.101.3.4.2.11': 'shake128',
    +        '2.16.840.1.101.3.4.2.12': 'shake256',
    +        '2.16.840.1.101.3.4.2.17': 'shake128_len',
    +        '2.16.840.1.101.3.4.2.18': 'shake256_len',
    +    }
    +
    +
    +class DigestAlgorithm(_ForceNullParameters, Sequence):
    +    _fields = [
    +        ('algorithm', DigestAlgorithmId),
    +        ('parameters', Any, {'optional': True}),
    +    ]
    +
    +
    +# This structure is what is signed with a SignedDigestAlgorithm
    +class DigestInfo(Sequence):
    +    _fields = [
    +        ('digest_algorithm', DigestAlgorithm),
    +        ('digest', OctetString),
    +    ]
    +
    +
    +class MaskGenAlgorithmId(ObjectIdentifier):
    +    _map = {
    +        '1.2.840.113549.1.1.8': 'mgf1',
    +    }
    +
    +
    +class MaskGenAlgorithm(Sequence):
    +    _fields = [
    +        ('algorithm', MaskGenAlgorithmId),
    +        ('parameters', Any, {'optional': True}),
    +    ]
    +
    +    _oid_pair = ('algorithm', 'parameters')
    +    _oid_specs = {
    +        'mgf1': DigestAlgorithm
    +    }
    +
    +
    +class TrailerField(Integer):
    +    _map = {
    +        1: 'trailer_field_bc',
    +    }
    +
    +
    +class RSASSAPSSParams(Sequence):
    +    _fields = [
    +        (
    +            'hash_algorithm',
    +            DigestAlgorithm,
    +            {
    +                'explicit': 0,
    +                'default': {'algorithm': 'sha1'},
    +            }
    +        ),
    +        (
    +            'mask_gen_algorithm',
    +            MaskGenAlgorithm,
    +            {
    +                'explicit': 1,
    +                'default': {
    +                    'algorithm': 'mgf1',
    +                    'parameters': {'algorithm': 'sha1'},
    +                },
    +            }
    +        ),
    +        (
    +            'salt_length',
    +            Integer,
    +            {
    +                'explicit': 2,
    +                'default': 20,
    +            }
    +        ),
    +        (
    +            'trailer_field',
    +            TrailerField,
    +            {
    +                'explicit': 3,
    +                'default': 'trailer_field_bc',
    +            }
    +        ),
    +    ]
    +
    +
    +class SignedDigestAlgorithmId(ObjectIdentifier):
    +    _map = {
    +        '1.3.14.3.2.3': 'md5_rsa',
    +        '1.3.14.3.2.29': 'sha1_rsa',
    +        '1.3.14.7.2.3.1': 'md2_rsa',
    +        '1.2.840.113549.1.1.2': 'md2_rsa',
    +        '1.2.840.113549.1.1.4': 'md5_rsa',
    +        '1.2.840.113549.1.1.5': 'sha1_rsa',
    +        '1.2.840.113549.1.1.14': 'sha224_rsa',
    +        '1.2.840.113549.1.1.11': 'sha256_rsa',
    +        '1.2.840.113549.1.1.12': 'sha384_rsa',
    +        '1.2.840.113549.1.1.13': 'sha512_rsa',
    +        '1.2.840.113549.1.1.10': 'rsassa_pss',
    +        '1.2.840.10040.4.3': 'sha1_dsa',
    +        '1.3.14.3.2.13': 'sha1_dsa',
    +        '1.3.14.3.2.27': 'sha1_dsa',
    +        '2.16.840.1.101.3.4.3.1': 'sha224_dsa',
    +        '2.16.840.1.101.3.4.3.2': 'sha256_dsa',
    +        '1.2.840.10045.4.1': 'sha1_ecdsa',
    +        '1.2.840.10045.4.3.1': 'sha224_ecdsa',
    +        '1.2.840.10045.4.3.2': 'sha256_ecdsa',
    +        '1.2.840.10045.4.3.3': 'sha384_ecdsa',
    +        '1.2.840.10045.4.3.4': 'sha512_ecdsa',
    +        '2.16.840.1.101.3.4.3.9': 'sha3_224_ecdsa',
    +        '2.16.840.1.101.3.4.3.10': 'sha3_256_ecdsa',
    +        '2.16.840.1.101.3.4.3.11': 'sha3_384_ecdsa',
    +        '2.16.840.1.101.3.4.3.12': 'sha3_512_ecdsa',
    +        # For when the digest is specified elsewhere in a Sequence
    +        '1.2.840.113549.1.1.1': 'rsassa_pkcs1v15',
    +        '1.2.840.10040.4.1': 'dsa',
    +        '1.2.840.10045.4': 'ecdsa',
    +    }
    +
    +    _reverse_map = {
    +        'dsa': '1.2.840.10040.4.1',
    +        'ecdsa': '1.2.840.10045.4',
    +        'md2_rsa': '1.2.840.113549.1.1.2',
    +        'md5_rsa': '1.2.840.113549.1.1.4',
    +        'rsassa_pkcs1v15': '1.2.840.113549.1.1.1',
    +        'rsassa_pss': '1.2.840.113549.1.1.10',
    +        'sha1_dsa': '1.2.840.10040.4.3',
    +        'sha1_ecdsa': '1.2.840.10045.4.1',
    +        'sha1_rsa': '1.2.840.113549.1.1.5',
    +        'sha224_dsa': '2.16.840.1.101.3.4.3.1',
    +        'sha224_ecdsa': '1.2.840.10045.4.3.1',
    +        'sha224_rsa': '1.2.840.113549.1.1.14',
    +        'sha256_dsa': '2.16.840.1.101.3.4.3.2',
    +        'sha256_ecdsa': '1.2.840.10045.4.3.2',
    +        'sha256_rsa': '1.2.840.113549.1.1.11',
    +        'sha384_ecdsa': '1.2.840.10045.4.3.3',
    +        'sha384_rsa': '1.2.840.113549.1.1.12',
    +        'sha512_ecdsa': '1.2.840.10045.4.3.4',
    +        'sha512_rsa': '1.2.840.113549.1.1.13',
    +        'sha3_224_ecdsa': '2.16.840.1.101.3.4.3.9',
    +        'sha3_256_ecdsa': '2.16.840.1.101.3.4.3.10',
    +        'sha3_384_ecdsa': '2.16.840.1.101.3.4.3.11',
    +        'sha3_512_ecdsa': '2.16.840.1.101.3.4.3.12',
    +    }
    +
    +
    +class SignedDigestAlgorithm(_ForceNullParameters, Sequence):
    +    _fields = [
    +        ('algorithm', SignedDigestAlgorithmId),
    +        ('parameters', Any, {'optional': True}),
    +    ]
    +
    +    _oid_pair = ('algorithm', 'parameters')
    +    _oid_specs = {
    +        'rsassa_pss': RSASSAPSSParams,
    +    }
    +
    +    @property
    +    def signature_algo(self):
    +        """
    +        :return:
    +            A unicode string of "rsassa_pkcs1v15", "rsassa_pss", "dsa" or
    +            "ecdsa"
    +        """
    +
    +        algorithm = self['algorithm'].native
    +
    +        algo_map = {
    +            'md2_rsa': 'rsassa_pkcs1v15',
    +            'md5_rsa': 'rsassa_pkcs1v15',
    +            'sha1_rsa': 'rsassa_pkcs1v15',
    +            'sha224_rsa': 'rsassa_pkcs1v15',
    +            'sha256_rsa': 'rsassa_pkcs1v15',
    +            'sha384_rsa': 'rsassa_pkcs1v15',
    +            'sha512_rsa': 'rsassa_pkcs1v15',
    +            'rsassa_pkcs1v15': 'rsassa_pkcs1v15',
    +            'rsassa_pss': 'rsassa_pss',
    +            'sha1_dsa': 'dsa',
    +            'sha224_dsa': 'dsa',
    +            'sha256_dsa': 'dsa',
    +            'dsa': 'dsa',
    +            'sha1_ecdsa': 'ecdsa',
    +            'sha224_ecdsa': 'ecdsa',
    +            'sha256_ecdsa': 'ecdsa',
    +            'sha384_ecdsa': 'ecdsa',
    +            'sha512_ecdsa': 'ecdsa',
    +            'sha3_224_ecdsa': 'ecdsa',
    +            'sha3_256_ecdsa': 'ecdsa',
    +            'sha3_384_ecdsa': 'ecdsa',
    +            'sha3_512_ecdsa': 'ecdsa',
    +            'ecdsa': 'ecdsa',
    +        }
    +        if algorithm in algo_map:
    +            return algo_map[algorithm]
    +
    +        raise ValueError(unwrap(
    +            '''
    +            Signature algorithm not known for %s
    +            ''',
    +            algorithm
    +        ))
    +
    +    @property
    +    def hash_algo(self):
    +        """
    +        :return:
    +            A unicode string of "md2", "md5", "sha1", "sha224", "sha256",
    +            "sha384", "sha512", "sha512_224", "sha512_256"
    +        """
    +
    +        algorithm = self['algorithm'].native
    +
    +        algo_map = {
    +            'md2_rsa': 'md2',
    +            'md5_rsa': 'md5',
    +            'sha1_rsa': 'sha1',
    +            'sha224_rsa': 'sha224',
    +            'sha256_rsa': 'sha256',
    +            'sha384_rsa': 'sha384',
    +            'sha512_rsa': 'sha512',
    +            'sha1_dsa': 'sha1',
    +            'sha224_dsa': 'sha224',
    +            'sha256_dsa': 'sha256',
    +            'sha1_ecdsa': 'sha1',
    +            'sha224_ecdsa': 'sha224',
    +            'sha256_ecdsa': 'sha256',
    +            'sha384_ecdsa': 'sha384',
    +            'sha512_ecdsa': 'sha512',
    +        }
    +        if algorithm in algo_map:
    +            return algo_map[algorithm]
    +
    +        if algorithm == 'rsassa_pss':
    +            return self['parameters']['hash_algorithm']['algorithm'].native
    +
    +        raise ValueError(unwrap(
    +            '''
    +            Hash algorithm not known for %s
    +            ''',
    +            algorithm
    +        ))
    +
    +
    +class Pbkdf2Salt(Choice):
    +    _alternatives = [
    +        ('specified', OctetString),
    +        ('other_source', AlgorithmIdentifier),
    +    ]
    +
    +
    +class Pbkdf2Params(Sequence):
    +    _fields = [
    +        ('salt', Pbkdf2Salt),
    +        ('iteration_count', Integer),
    +        ('key_length', Integer, {'optional': True}),
    +        ('prf', HmacAlgorithm, {'default': {'algorithm': 'sha1'}}),
    +    ]
    +
    +
    +class KdfAlgorithmId(ObjectIdentifier):
    +    _map = {
    +        '1.2.840.113549.1.5.12': 'pbkdf2'
    +    }
    +
    +
    +class KdfAlgorithm(Sequence):
    +    _fields = [
    +        ('algorithm', KdfAlgorithmId),
    +        ('parameters', Any, {'optional': True}),
    +    ]
    +    _oid_pair = ('algorithm', 'parameters')
    +    _oid_specs = {
    +        'pbkdf2': Pbkdf2Params
    +    }
    +
    +
    +class DHParameters(Sequence):
    +    """
    +    Original Name: DHParameter
    +    Source: ftp://ftp.rsasecurity.com/pub/pkcs/ascii/pkcs-3.asc section 9
    +    """
    +
    +    _fields = [
    +        ('p', Integer),
    +        ('g', Integer),
    +        ('private_value_length', Integer, {'optional': True}),
    +    ]
    +
    +
    +class KeyExchangeAlgorithmId(ObjectIdentifier):
    +    _map = {
    +        '1.2.840.113549.1.3.1': 'dh',
    +    }
    +
    +
    +class KeyExchangeAlgorithm(Sequence):
    +    _fields = [
    +        ('algorithm', KeyExchangeAlgorithmId),
    +        ('parameters', Any, {'optional': True}),
    +    ]
    +    _oid_pair = ('algorithm', 'parameters')
    +    _oid_specs = {
    +        'dh': DHParameters,
    +    }
    +
    +
    +class Rc2Params(Sequence):
    +    _fields = [
    +        ('rc2_parameter_version', Integer, {'optional': True}),
    +        ('iv', OctetString),
    +    ]
    +
    +
    +class Rc5ParamVersion(Integer):
    +    _map = {
    +        16: 'v1-0'
    +    }
    +
    +
    +class Rc5Params(Sequence):
    +    _fields = [
    +        ('version', Rc5ParamVersion),
    +        ('rounds', Integer),
    +        ('block_size_in_bits', Integer),
    +        ('iv', OctetString, {'optional': True}),
    +    ]
    +
    +
    +class Pbes1Params(Sequence):
    +    _fields = [
    +        ('salt', OctetString),
    +        ('iterations', Integer),
    +    ]
    +
    +
    +class CcmParams(Sequence):
    +    # https://tools.ietf.org/html/rfc5084
    +    # aes_ICVlen: 4 | 6 | 8 | 10 | 12 | 14 | 16
    +    _fields = [
    +        ('aes_nonce', OctetString),
    +        ('aes_icvlen', Integer),
    +    ]
    +
    +
    +class PSourceAlgorithmId(ObjectIdentifier):
    +    _map = {
    +        '1.2.840.113549.1.1.9': 'p_specified',
    +    }
    +
    +
    +class PSourceAlgorithm(Sequence):
    +    _fields = [
    +        ('algorithm', PSourceAlgorithmId),
    +        ('parameters', Any, {'optional': True}),
    +    ]
    +
    +    _oid_pair = ('algorithm', 'parameters')
    +    _oid_specs = {
    +        'p_specified': OctetString
    +    }
    +
    +
    +class RSAESOAEPParams(Sequence):
    +    _fields = [
    +        (
    +            'hash_algorithm',
    +            DigestAlgorithm,
    +            {
    +                'explicit': 0,
    +                'default': {'algorithm': 'sha1'}
    +            }
    +        ),
    +        (
    +            'mask_gen_algorithm',
    +            MaskGenAlgorithm,
    +            {
    +                'explicit': 1,
    +                'default': {
    +                    'algorithm': 'mgf1',
    +                    'parameters': {'algorithm': 'sha1'}
    +                }
    +            }
    +        ),
    +        (
    +            'p_source_algorithm',
    +            PSourceAlgorithm,
    +            {
    +                'explicit': 2,
    +                'default': {
    +                    'algorithm': 'p_specified',
    +                    'parameters': b''
    +                }
    +            }
    +        ),
    +    ]
    +
    +
    +class DSASignature(Sequence):
    +    """
    +    An ASN.1 class for translating between the OS crypto library's
    +    representation of an (EC)DSA signature and the ASN.1 structure that is part
    +    of various RFCs.
    +
    +    Original Name: DSS-Sig-Value
    +    Source: https://tools.ietf.org/html/rfc3279#section-2.2.2
    +    """
    +
    +    _fields = [
    +        ('r', Integer),
    +        ('s', Integer),
    +    ]
    +
    +    @classmethod
    +    def from_p1363(cls, data):
    +        """
    +        Reads a signature from a byte string encoding accordint to IEEE P1363,
    +        which is used by Microsoft's BCryptSignHash() function.
    +
    +        :param data:
    +            A byte string from BCryptSignHash()
    +
    +        :return:
    +            A DSASignature object
    +        """
    +
    +        r = int_from_bytes(data[0:len(data) // 2])
    +        s = int_from_bytes(data[len(data) // 2:])
    +        return cls({'r': r, 's': s})
    +
    +    def to_p1363(self):
    +        """
    +        Dumps a signature to a byte string compatible with Microsoft's
    +        BCryptVerifySignature() function.
    +
    +        :return:
    +            A byte string compatible with BCryptVerifySignature()
    +        """
    +
    +        r_bytes = int_to_bytes(self['r'].native)
    +        s_bytes = int_to_bytes(self['s'].native)
    +
    +        int_byte_length = max(len(r_bytes), len(s_bytes))
    +        r_bytes = fill_width(r_bytes, int_byte_length)
    +        s_bytes = fill_width(s_bytes, int_byte_length)
    +
    +        return r_bytes + s_bytes
    +
    +
    +class EncryptionAlgorithmId(ObjectIdentifier):
    +    _map = {
    +        '1.3.14.3.2.7': 'des',
    +        '1.2.840.113549.3.7': 'tripledes_3key',
    +        '1.2.840.113549.3.2': 'rc2',
    +        '1.2.840.113549.3.4': 'rc4',
    +        '1.2.840.113549.3.9': 'rc5',
    +        # From http://csrc.nist.gov/groups/ST/crypto_apps_infra/csor/algorithms.html#AES
    +        '2.16.840.1.101.3.4.1.1': 'aes128_ecb',
    +        '2.16.840.1.101.3.4.1.2': 'aes128_cbc',
    +        '2.16.840.1.101.3.4.1.3': 'aes128_ofb',
    +        '2.16.840.1.101.3.4.1.4': 'aes128_cfb',
    +        '2.16.840.1.101.3.4.1.5': 'aes128_wrap',
    +        '2.16.840.1.101.3.4.1.6': 'aes128_gcm',
    +        '2.16.840.1.101.3.4.1.7': 'aes128_ccm',
    +        '2.16.840.1.101.3.4.1.8': 'aes128_wrap_pad',
    +        '2.16.840.1.101.3.4.1.21': 'aes192_ecb',
    +        '2.16.840.1.101.3.4.1.22': 'aes192_cbc',
    +        '2.16.840.1.101.3.4.1.23': 'aes192_ofb',
    +        '2.16.840.1.101.3.4.1.24': 'aes192_cfb',
    +        '2.16.840.1.101.3.4.1.25': 'aes192_wrap',
    +        '2.16.840.1.101.3.4.1.26': 'aes192_gcm',
    +        '2.16.840.1.101.3.4.1.27': 'aes192_ccm',
    +        '2.16.840.1.101.3.4.1.28': 'aes192_wrap_pad',
    +        '2.16.840.1.101.3.4.1.41': 'aes256_ecb',
    +        '2.16.840.1.101.3.4.1.42': 'aes256_cbc',
    +        '2.16.840.1.101.3.4.1.43': 'aes256_ofb',
    +        '2.16.840.1.101.3.4.1.44': 'aes256_cfb',
    +        '2.16.840.1.101.3.4.1.45': 'aes256_wrap',
    +        '2.16.840.1.101.3.4.1.46': 'aes256_gcm',
    +        '2.16.840.1.101.3.4.1.47': 'aes256_ccm',
    +        '2.16.840.1.101.3.4.1.48': 'aes256_wrap_pad',
    +        # From PKCS#5
    +        '1.2.840.113549.1.5.13': 'pbes2',
    +        '1.2.840.113549.1.5.1': 'pbes1_md2_des',
    +        '1.2.840.113549.1.5.3': 'pbes1_md5_des',
    +        '1.2.840.113549.1.5.4': 'pbes1_md2_rc2',
    +        '1.2.840.113549.1.5.6': 'pbes1_md5_rc2',
    +        '1.2.840.113549.1.5.10': 'pbes1_sha1_des',
    +        '1.2.840.113549.1.5.11': 'pbes1_sha1_rc2',
    +        # From PKCS#12
    +        '1.2.840.113549.1.12.1.1': 'pkcs12_sha1_rc4_128',
    +        '1.2.840.113549.1.12.1.2': 'pkcs12_sha1_rc4_40',
    +        '1.2.840.113549.1.12.1.3': 'pkcs12_sha1_tripledes_3key',
    +        '1.2.840.113549.1.12.1.4': 'pkcs12_sha1_tripledes_2key',
    +        '1.2.840.113549.1.12.1.5': 'pkcs12_sha1_rc2_128',
    +        '1.2.840.113549.1.12.1.6': 'pkcs12_sha1_rc2_40',
    +        # PKCS#1 v2.2
    +        '1.2.840.113549.1.1.1': 'rsaes_pkcs1v15',
    +        '1.2.840.113549.1.1.7': 'rsaes_oaep',
    +    }
    +
    +
    +class EncryptionAlgorithm(_ForceNullParameters, Sequence):
    +    _fields = [
    +        ('algorithm', EncryptionAlgorithmId),
    +        ('parameters', Any, {'optional': True}),
    +    ]
    +
    +    _oid_pair = ('algorithm', 'parameters')
    +    _oid_specs = {
    +        'des': OctetString,
    +        'tripledes_3key': OctetString,
    +        'rc2': Rc2Params,
    +        'rc5': Rc5Params,
    +        'aes128_cbc': OctetString,
    +        'aes192_cbc': OctetString,
    +        'aes256_cbc': OctetString,
    +        'aes128_ofb': OctetString,
    +        'aes192_ofb': OctetString,
    +        'aes256_ofb': OctetString,
    +        # From RFC5084
    +        'aes128_ccm': CcmParams,
    +        'aes192_ccm': CcmParams,
    +        'aes256_ccm': CcmParams,
    +        # From PKCS#5
    +        'pbes1_md2_des': Pbes1Params,
    +        'pbes1_md5_des': Pbes1Params,
    +        'pbes1_md2_rc2': Pbes1Params,
    +        'pbes1_md5_rc2': Pbes1Params,
    +        'pbes1_sha1_des': Pbes1Params,
    +        'pbes1_sha1_rc2': Pbes1Params,
    +        # From PKCS#12
    +        'pkcs12_sha1_rc4_128': Pbes1Params,
    +        'pkcs12_sha1_rc4_40': Pbes1Params,
    +        'pkcs12_sha1_tripledes_3key': Pbes1Params,
    +        'pkcs12_sha1_tripledes_2key': Pbes1Params,
    +        'pkcs12_sha1_rc2_128': Pbes1Params,
    +        'pkcs12_sha1_rc2_40': Pbes1Params,
    +        # PKCS#1 v2.2
    +        'rsaes_oaep': RSAESOAEPParams,
    +    }
    +
    +    @property
    +    def kdf(self):
    +        """
    +        Returns the name of the key derivation function to use.
    +
    +        :return:
    +            A unicode from of one of the following: "pbkdf1", "pbkdf2",
    +            "pkcs12_kdf"
    +        """
    +
    +        encryption_algo = self['algorithm'].native
    +
    +        if encryption_algo == 'pbes2':
    +            return self['parameters']['key_derivation_func']['algorithm'].native
    +
    +        if encryption_algo.find('.') == -1:
    +            if encryption_algo.find('_') != -1:
    +                encryption_algo, _ = encryption_algo.split('_', 1)
    +
    +                if encryption_algo == 'pbes1':
    +                    return 'pbkdf1'
    +
    +                if encryption_algo == 'pkcs12':
    +                    return 'pkcs12_kdf'
    +
    +            raise ValueError(unwrap(
    +                '''
    +                Encryption algorithm "%s" does not have a registered key
    +                derivation function
    +                ''',
    +                encryption_algo
    +            ))
    +
    +        raise ValueError(unwrap(
    +            '''
    +            Unrecognized encryption algorithm "%s", can not determine key
    +            derivation function
    +            ''',
    +            encryption_algo
    +        ))
    +
    +    @property
    +    def kdf_hmac(self):
    +        """
    +        Returns the HMAC algorithm to use with the KDF.
    +
    +        :return:
    +            A unicode string of one of the following: "md2", "md5", "sha1",
    +            "sha224", "sha256", "sha384", "sha512"
    +        """
    +
    +        encryption_algo = self['algorithm'].native
    +
    +        if encryption_algo == 'pbes2':
    +            return self['parameters']['key_derivation_func']['parameters']['prf']['algorithm'].native
    +
    +        if encryption_algo.find('.') == -1:
    +            if encryption_algo.find('_') != -1:
    +                _, hmac_algo, _ = encryption_algo.split('_', 2)
    +                return hmac_algo
    +
    +            raise ValueError(unwrap(
    +                '''
    +                Encryption algorithm "%s" does not have a registered key
    +                derivation function
    +                ''',
    +                encryption_algo
    +            ))
    +
    +        raise ValueError(unwrap(
    +            '''
    +            Unrecognized encryption algorithm "%s", can not determine key
    +            derivation hmac algorithm
    +            ''',
    +            encryption_algo
    +        ))
    +
    +    @property
    +    def kdf_salt(self):
    +        """
    +        Returns the byte string to use as the salt for the KDF.
    +
    +        :return:
    +            A byte string
    +        """
    +
    +        encryption_algo = self['algorithm'].native
    +
    +        if encryption_algo == 'pbes2':
    +            salt = self['parameters']['key_derivation_func']['parameters']['salt']
    +
    +            if salt.name == 'other_source':
    +                raise ValueError(unwrap(
    +                    '''
    +                    Can not determine key derivation salt - the
    +                    reserved-for-future-use other source salt choice was
    +                    specified in the PBKDF2 params structure
    +                    '''
    +                ))
    +
    +            return salt.native
    +
    +        if encryption_algo.find('.') == -1:
    +            if encryption_algo.find('_') != -1:
    +                return self['parameters']['salt'].native
    +
    +            raise ValueError(unwrap(
    +                '''
    +                Encryption algorithm "%s" does not have a registered key
    +                derivation function
    +                ''',
    +                encryption_algo
    +            ))
    +
    +        raise ValueError(unwrap(
    +            '''
    +            Unrecognized encryption algorithm "%s", can not determine key
    +            derivation salt
    +            ''',
    +            encryption_algo
    +        ))
    +
    +    @property
    +    def kdf_iterations(self):
    +        """
    +        Returns the number of iterations that should be run via the KDF.
    +
    +        :return:
    +            An integer
    +        """
    +
    +        encryption_algo = self['algorithm'].native
    +
    +        if encryption_algo == 'pbes2':
    +            return self['parameters']['key_derivation_func']['parameters']['iteration_count'].native
    +
    +        if encryption_algo.find('.') == -1:
    +            if encryption_algo.find('_') != -1:
    +                return self['parameters']['iterations'].native
    +
    +            raise ValueError(unwrap(
    +                '''
    +                Encryption algorithm "%s" does not have a registered key
    +                derivation function
    +                ''',
    +                encryption_algo
    +            ))
    +
    +        raise ValueError(unwrap(
    +            '''
    +            Unrecognized encryption algorithm "%s", can not determine key
    +            derivation iterations
    +            ''',
    +            encryption_algo
    +        ))
    +
    +    @property
    +    def key_length(self):
    +        """
    +        Returns the key length to pass to the cipher/kdf. The PKCS#5 spec does
    +        not specify a way to store the RC5 key length, however this tends not
    +        to be a problem since OpenSSL does not support RC5 in PKCS#8 and OS X
    +        does not provide an RC5 cipher for use in the Security Transforms
    +        library.
    +
    +        :raises:
    +            ValueError - when the key length can not be determined
    +
    +        :return:
    +            An integer representing the length in bytes
    +        """
    +
    +        encryption_algo = self['algorithm'].native
    +
    +        if encryption_algo[0:3] == 'aes':
    +            return {
    +                'aes128_': 16,
    +                'aes192_': 24,
    +                'aes256_': 32,
    +            }[encryption_algo[0:7]]
    +
    +        cipher_lengths = {
    +            'des': 8,
    +            'tripledes_3key': 24,
    +        }
    +
    +        if encryption_algo in cipher_lengths:
    +            return cipher_lengths[encryption_algo]
    +
    +        if encryption_algo == 'rc2':
    +            rc2_params = self['parameters'].parsed['encryption_scheme']['parameters'].parsed
    +            rc2_parameter_version = rc2_params['rc2_parameter_version'].native
    +
    +            # See page 24 of
    +            # http://www.emc.com/collateral/white-papers/h11302-pkcs5v2-1-password-based-cryptography-standard-wp.pdf
    +            encoded_key_bits_map = {
    +                160: 5,   # 40-bit
    +                120: 8,   # 64-bit
    +                58: 16,   # 128-bit
    +            }
    +
    +            if rc2_parameter_version in encoded_key_bits_map:
    +                return encoded_key_bits_map[rc2_parameter_version]
    +
    +            if rc2_parameter_version >= 256:
    +                return rc2_parameter_version
    +
    +            if rc2_parameter_version is None:
    +                return 4  # 32-bit default
    +
    +            raise ValueError(unwrap(
    +                '''
    +                Invalid RC2 parameter version found in EncryptionAlgorithm
    +                parameters
    +                '''
    +            ))
    +
    +        if encryption_algo == 'pbes2':
    +            key_length = self['parameters']['key_derivation_func']['parameters']['key_length'].native
    +            if key_length is not None:
    +                return key_length
    +
    +            # If the KDF params don't specify the key size, we can infer it from
    +            # the encryption scheme for all schemes except for RC5. However, in
    +            # practical terms, neither OpenSSL or OS X support RC5 for PKCS#8
    +            # so it is unlikely to be an issue that is run into.
    +
    +            return self['parameters']['encryption_scheme'].key_length
    +
    +        if encryption_algo.find('.') == -1:
    +            return {
    +                'pbes1_md2_des': 8,
    +                'pbes1_md5_des': 8,
    +                'pbes1_md2_rc2': 8,
    +                'pbes1_md5_rc2': 8,
    +                'pbes1_sha1_des': 8,
    +                'pbes1_sha1_rc2': 8,
    +                'pkcs12_sha1_rc4_128': 16,
    +                'pkcs12_sha1_rc4_40': 5,
    +                'pkcs12_sha1_tripledes_3key': 24,
    +                'pkcs12_sha1_tripledes_2key': 16,
    +                'pkcs12_sha1_rc2_128': 16,
    +                'pkcs12_sha1_rc2_40': 5,
    +            }[encryption_algo]
    +
    +        raise ValueError(unwrap(
    +            '''
    +            Unrecognized encryption algorithm "%s"
    +            ''',
    +            encryption_algo
    +        ))
    +
    +    @property
    +    def encryption_mode(self):
    +        """
    +        Returns the name of the encryption mode to use.
    +
    +        :return:
    +            A unicode string from one of the following: "cbc", "ecb", "ofb",
    +            "cfb", "wrap", "gcm", "ccm", "wrap_pad"
    +        """
    +
    +        encryption_algo = self['algorithm'].native
    +
    +        if encryption_algo[0:7] in set(['aes128_', 'aes192_', 'aes256_']):
    +            return encryption_algo[7:]
    +
    +        if encryption_algo[0:6] == 'pbes1_':
    +            return 'cbc'
    +
    +        if encryption_algo[0:7] == 'pkcs12_':
    +            return 'cbc'
    +
    +        if encryption_algo in set(['des', 'tripledes_3key', 'rc2', 'rc5']):
    +            return 'cbc'
    +
    +        if encryption_algo == 'pbes2':
    +            return self['parameters']['encryption_scheme'].encryption_mode
    +
    +        raise ValueError(unwrap(
    +            '''
    +            Unrecognized encryption algorithm "%s"
    +            ''',
    +            encryption_algo
    +        ))
    +
    +    @property
    +    def encryption_cipher(self):
    +        """
    +        Returns the name of the symmetric encryption cipher to use. The key
    +        length can be retrieved via the .key_length property to disabiguate
    +        between different variations of TripleDES, AES, and the RC* ciphers.
    +
    +        :return:
    +            A unicode string from one of the following: "rc2", "rc5", "des",
    +            "tripledes", "aes"
    +        """
    +
    +        encryption_algo = self['algorithm'].native
    +
    +        if encryption_algo[0:7] in set(['aes128_', 'aes192_', 'aes256_']):
    +            return 'aes'
    +
    +        if encryption_algo in set(['des', 'rc2', 'rc5']):
    +            return encryption_algo
    +
    +        if encryption_algo == 'tripledes_3key':
    +            return 'tripledes'
    +
    +        if encryption_algo == 'pbes2':
    +            return self['parameters']['encryption_scheme'].encryption_cipher
    +
    +        if encryption_algo.find('.') == -1:
    +            return {
    +                'pbes1_md2_des': 'des',
    +                'pbes1_md5_des': 'des',
    +                'pbes1_md2_rc2': 'rc2',
    +                'pbes1_md5_rc2': 'rc2',
    +                'pbes1_sha1_des': 'des',
    +                'pbes1_sha1_rc2': 'rc2',
    +                'pkcs12_sha1_rc4_128': 'rc4',
    +                'pkcs12_sha1_rc4_40': 'rc4',
    +                'pkcs12_sha1_tripledes_3key': 'tripledes',
    +                'pkcs12_sha1_tripledes_2key': 'tripledes',
    +                'pkcs12_sha1_rc2_128': 'rc2',
    +                'pkcs12_sha1_rc2_40': 'rc2',
    +            }[encryption_algo]
    +
    +        raise ValueError(unwrap(
    +            '''
    +            Unrecognized encryption algorithm "%s"
    +            ''',
    +            encryption_algo
    +        ))
    +
    +    @property
    +    def encryption_block_size(self):
    +        """
    +        Returns the block size of the encryption cipher, in bytes.
    +
    +        :return:
    +            An integer that is the block size in bytes
    +        """
    +
    +        encryption_algo = self['algorithm'].native
    +
    +        if encryption_algo[0:7] in set(['aes128_', 'aes192_', 'aes256_']):
    +            return 16
    +
    +        cipher_map = {
    +            'des': 8,
    +            'tripledes_3key': 8,
    +            'rc2': 8,
    +        }
    +        if encryption_algo in cipher_map:
    +            return cipher_map[encryption_algo]
    +
    +        if encryption_algo == 'rc5':
    +            return self['parameters'].parsed['block_size_in_bits'].native / 8
    +
    +        if encryption_algo == 'pbes2':
    +            return self['parameters']['encryption_scheme'].encryption_block_size
    +
    +        if encryption_algo.find('.') == -1:
    +            return {
    +                'pbes1_md2_des': 8,
    +                'pbes1_md5_des': 8,
    +                'pbes1_md2_rc2': 8,
    +                'pbes1_md5_rc2': 8,
    +                'pbes1_sha1_des': 8,
    +                'pbes1_sha1_rc2': 8,
    +                'pkcs12_sha1_rc4_128': 0,
    +                'pkcs12_sha1_rc4_40': 0,
    +                'pkcs12_sha1_tripledes_3key': 8,
    +                'pkcs12_sha1_tripledes_2key': 8,
    +                'pkcs12_sha1_rc2_128': 8,
    +                'pkcs12_sha1_rc2_40': 8,
    +            }[encryption_algo]
    +
    +        raise ValueError(unwrap(
    +            '''
    +            Unrecognized encryption algorithm "%s"
    +            ''',
    +            encryption_algo
    +        ))
    +
    +    @property
    +    def encryption_iv(self):
    +        """
    +        Returns the byte string of the initialization vector for the encryption
    +        scheme. Only the PBES2 stores the IV in the params. For PBES1, the IV
    +        is derived from the KDF and this property will return None.
    +
    +        :return:
    +            A byte string or None
    +        """
    +
    +        encryption_algo = self['algorithm'].native
    +
    +        if encryption_algo in set(['rc2', 'rc5']):
    +            return self['parameters'].parsed['iv'].native
    +
    +        # For DES/Triple DES and AES the IV is the entirety of the parameters
    +        octet_string_iv_oids = set([
    +            'des',
    +            'tripledes_3key',
    +            'aes128_cbc',
    +            'aes192_cbc',
    +            'aes256_cbc',
    +            'aes128_ofb',
    +            'aes192_ofb',
    +            'aes256_ofb',
    +        ])
    +        if encryption_algo in octet_string_iv_oids:
    +            return self['parameters'].native
    +
    +        if encryption_algo == 'pbes2':
    +            return self['parameters']['encryption_scheme'].encryption_iv
    +
    +        # All of the PBES1 algos use their KDF to create the IV. For the pbkdf1,
    +        # the KDF is told to generate a key that is an extra 8 bytes long, and
    +        # that is used for the IV. For the PKCS#12 KDF, it is called with an id
    +        # of 2 to generate the IV. In either case, we can't return the IV
    +        # without knowing the user's password.
    +        if encryption_algo.find('.') == -1:
    +            return None
    +
    +        raise ValueError(unwrap(
    +            '''
    +            Unrecognized encryption algorithm "%s"
    +            ''',
    +            encryption_algo
    +        ))
    +
    +
    +class Pbes2Params(Sequence):
    +    _fields = [
    +        ('key_derivation_func', KdfAlgorithm),
    +        ('encryption_scheme', EncryptionAlgorithm),
    +    ]
    +
    +
    +class Pbmac1Params(Sequence):
    +    _fields = [
    +        ('key_derivation_func', KdfAlgorithm),
    +        ('message_auth_scheme', HmacAlgorithm),
    +    ]
    +
    +
    +class Pkcs5MacId(ObjectIdentifier):
    +    _map = {
    +        '1.2.840.113549.1.5.14': 'pbmac1',
    +    }
    +
    +
    +class Pkcs5MacAlgorithm(Sequence):
    +    _fields = [
    +        ('algorithm', Pkcs5MacId),
    +        ('parameters', Any),
    +    ]
    +
    +    _oid_pair = ('algorithm', 'parameters')
    +    _oid_specs = {
    +        'pbmac1': Pbmac1Params,
    +    }
    +
    +
    +EncryptionAlgorithm._oid_specs['pbes2'] = Pbes2Params
    +
    +
    +class AnyAlgorithmId(ObjectIdentifier):
    +    _map = {}
    +
    +    def _setup(self):
    +        _map = self.__class__._map
    +        for other_cls in (EncryptionAlgorithmId, SignedDigestAlgorithmId, DigestAlgorithmId):
    +            for oid, name in other_cls._map.items():
    +                _map[oid] = name
    +
    +
    +class AnyAlgorithmIdentifier(_ForceNullParameters, Sequence):
    +    _fields = [
    +        ('algorithm', AnyAlgorithmId),
    +        ('parameters', Any, {'optional': True}),
    +    ]
    +
    +    _oid_pair = ('algorithm', 'parameters')
    +    _oid_specs = {}
    +
    +    def _setup(self):
    +        Sequence._setup(self)
    +        specs = self.__class__._oid_specs
    +        for other_cls in (EncryptionAlgorithm, SignedDigestAlgorithm):
    +            for oid, spec in other_cls._oid_specs.items():
    +                specs[oid] = spec
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/cms.py b/server/www/packages/packages-windows/x86/asn1crypto/cms.py
    new file mode 100644
    index 0000000..2115aed
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/cms.py
    @@ -0,0 +1,984 @@
    +# coding: utf-8
    +
    +"""
    +ASN.1 type classes for cryptographic message syntax (CMS). Structures are also
    +compatible with PKCS#7. Exports the following items:
    +
    + - AuthenticatedData()
    + - AuthEnvelopedData()
    + - CompressedData()
    + - ContentInfo()
    + - DigestedData()
    + - EncryptedData()
    + - EnvelopedData()
    + - SignedAndEnvelopedData()
    + - SignedData()
    +
    +Other type classes are defined that help compose the types listed above.
    +
    +Most CMS structures in the wild are formatted as ContentInfo encapsulating one of the other types.
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +try:
    +    import zlib
    +except (ImportError):
    +    zlib = None
    +
    +from .algos import (
    +    _ForceNullParameters,
    +    DigestAlgorithm,
    +    EncryptionAlgorithm,
    +    HmacAlgorithm,
    +    KdfAlgorithm,
    +    RSAESOAEPParams,
    +    SignedDigestAlgorithm,
    +)
    +from .core import (
    +    Any,
    +    BitString,
    +    Choice,
    +    Enumerated,
    +    GeneralizedTime,
    +    Integer,
    +    ObjectIdentifier,
    +    OctetBitString,
    +    OctetString,
    +    ParsableOctetString,
    +    Sequence,
    +    SequenceOf,
    +    SetOf,
    +    UTCTime,
    +    UTF8String,
    +)
    +from .crl import CertificateList
    +from .keys import PublicKeyInfo
    +from .ocsp import OCSPResponse
    +from .x509 import Attributes, Certificate, Extensions, GeneralName, GeneralNames, Name
    +
    +
    +# These structures are taken from
    +# ftp://ftp.rsasecurity.com/pub/pkcs/ascii/pkcs-6.asc
    +
    +class ExtendedCertificateInfo(Sequence):
    +    _fields = [
    +        ('version', Integer),
    +        ('certificate', Certificate),
    +        ('attributes', Attributes),
    +    ]
    +
    +
    +class ExtendedCertificate(Sequence):
    +    _fields = [
    +        ('extended_certificate_info', ExtendedCertificateInfo),
    +        ('signature_algorithm', SignedDigestAlgorithm),
    +        ('signature', OctetBitString),
    +    ]
    +
    +
    +# These structures are taken from https://tools.ietf.org/html/rfc5652,
    +# https://tools.ietf.org/html/rfc5083, http://tools.ietf.org/html/rfc2315,
    +# https://tools.ietf.org/html/rfc5940, https://tools.ietf.org/html/rfc3274,
    +# https://tools.ietf.org/html/rfc3281
    +
    +
    +class CMSVersion(Integer):
    +    _map = {
    +        0: 'v0',
    +        1: 'v1',
    +        2: 'v2',
    +        3: 'v3',
    +        4: 'v4',
    +        5: 'v5',
    +    }
    +
    +
    +class CMSAttributeType(ObjectIdentifier):
    +    _map = {
    +        '1.2.840.113549.1.9.3': 'content_type',
    +        '1.2.840.113549.1.9.4': 'message_digest',
    +        '1.2.840.113549.1.9.5': 'signing_time',
    +        '1.2.840.113549.1.9.6': 'counter_signature',
    +        # https://tools.ietf.org/html/rfc2633#page-26
    +        '1.2.840.113549.1.9.16.2.11': 'encrypt_key_pref',
    +        # https://tools.ietf.org/html/rfc3161#page-20
    +        '1.2.840.113549.1.9.16.2.14': 'signature_time_stamp_token',
    +        # https://tools.ietf.org/html/rfc6211#page-5
    +        '1.2.840.113549.1.9.52': 'cms_algorithm_protection',
    +        # https://docs.microsoft.com/en-us/previous-versions/hh968145(v%3Dvs.85)
    +        '1.3.6.1.4.1.311.2.4.1': 'microsoft_nested_signature',
    +        # Some places refer to this as SPC_RFC3161_OBJID, others szOID_RFC3161_counterSign.
    +        # https://docs.microsoft.com/en-us/windows/win32/api/wincrypt/ns-wincrypt-crypt_algorithm_identifier
    +        # refers to szOID_RFC3161_counterSign as "1.2.840.113549.1.9.16.1.4",
    +        # but that OID is also called szOID_TIMESTAMP_TOKEN. Because of there being
    +        # no canonical source for this OID, we give it our own name
    +        '1.3.6.1.4.1.311.3.3.1': 'microsoft_time_stamp_token',
    +    }
    +
    +
    +class Time(Choice):
    +    _alternatives = [
    +        ('utc_time', UTCTime),
    +        ('generalized_time', GeneralizedTime),
    +    ]
    +
    +
    +class ContentType(ObjectIdentifier):
    +    _map = {
    +        '1.2.840.113549.1.7.1': 'data',
    +        '1.2.840.113549.1.7.2': 'signed_data',
    +        '1.2.840.113549.1.7.3': 'enveloped_data',
    +        '1.2.840.113549.1.7.4': 'signed_and_enveloped_data',
    +        '1.2.840.113549.1.7.5': 'digested_data',
    +        '1.2.840.113549.1.7.6': 'encrypted_data',
    +        '1.2.840.113549.1.9.16.1.2': 'authenticated_data',
    +        '1.2.840.113549.1.9.16.1.9': 'compressed_data',
    +        '1.2.840.113549.1.9.16.1.23': 'authenticated_enveloped_data',
    +    }
    +
    +
    +class CMSAlgorithmProtection(Sequence):
    +    _fields = [
    +        ('digest_algorithm', DigestAlgorithm),
    +        ('signature_algorithm', SignedDigestAlgorithm, {'implicit': 1, 'optional': True}),
    +        ('mac_algorithm', HmacAlgorithm, {'implicit': 2, 'optional': True}),
    +    ]
    +
    +
    +class SetOfContentType(SetOf):
    +    _child_spec = ContentType
    +
    +
    +class SetOfOctetString(SetOf):
    +    _child_spec = OctetString
    +
    +
    +class SetOfTime(SetOf):
    +    _child_spec = Time
    +
    +
    +class SetOfAny(SetOf):
    +    _child_spec = Any
    +
    +
    +class SetOfCMSAlgorithmProtection(SetOf):
    +    _child_spec = CMSAlgorithmProtection
    +
    +
    +class CMSAttribute(Sequence):
    +    _fields = [
    +        ('type', CMSAttributeType),
    +        ('values', None),
    +    ]
    +
    +    _oid_specs = {}
    +
    +    def _values_spec(self):
    +        return self._oid_specs.get(self['type'].native, SetOfAny)
    +
    +    _spec_callbacks = {
    +        'values': _values_spec
    +    }
    +
    +
    +class CMSAttributes(SetOf):
    +    _child_spec = CMSAttribute
    +
    +
    +class IssuerSerial(Sequence):
    +    _fields = [
    +        ('issuer', GeneralNames),
    +        ('serial', Integer),
    +        ('issuer_uid', OctetBitString, {'optional': True}),
    +    ]
    +
    +
    +class AttCertVersion(Integer):
    +    _map = {
    +        0: 'v1',
    +        1: 'v2',
    +    }
    +
    +
    +class AttCertSubject(Choice):
    +    _alternatives = [
    +        ('base_certificate_id', IssuerSerial, {'explicit': 0}),
    +        ('subject_name', GeneralNames, {'explicit': 1}),
    +    ]
    +
    +
    +class AttCertValidityPeriod(Sequence):
    +    _fields = [
    +        ('not_before_time', GeneralizedTime),
    +        ('not_after_time', GeneralizedTime),
    +    ]
    +
    +
    +class AttributeCertificateInfoV1(Sequence):
    +    _fields = [
    +        ('version', AttCertVersion, {'default': 'v1'}),
    +        ('subject', AttCertSubject),
    +        ('issuer', GeneralNames),
    +        ('signature', SignedDigestAlgorithm),
    +        ('serial_number', Integer),
    +        ('att_cert_validity_period', AttCertValidityPeriod),
    +        ('attributes', Attributes),
    +        ('issuer_unique_id', OctetBitString, {'optional': True}),
    +        ('extensions', Extensions, {'optional': True}),
    +    ]
    +
    +
    +class AttributeCertificateV1(Sequence):
    +    _fields = [
    +        ('ac_info', AttributeCertificateInfoV1),
    +        ('signature_algorithm', SignedDigestAlgorithm),
    +        ('signature', OctetBitString),
    +    ]
    +
    +
    +class DigestedObjectType(Enumerated):
    +    _map = {
    +        0: 'public_key',
    +        1: 'public_key_cert',
    +        2: 'other_objy_types',
    +    }
    +
    +
    +class ObjectDigestInfo(Sequence):
    +    _fields = [
    +        ('digested_object_type', DigestedObjectType),
    +        ('other_object_type_id', ObjectIdentifier, {'optional': True}),
    +        ('digest_algorithm', DigestAlgorithm),
    +        ('object_digest', OctetBitString),
    +    ]
    +
    +
    +class Holder(Sequence):
    +    _fields = [
    +        ('base_certificate_id', IssuerSerial, {'implicit': 0, 'optional': True}),
    +        ('entity_name', GeneralNames, {'implicit': 1, 'optional': True}),
    +        ('object_digest_info', ObjectDigestInfo, {'implicit': 2, 'optional': True}),
    +    ]
    +
    +
    +class V2Form(Sequence):
    +    _fields = [
    +        ('issuer_name', GeneralNames, {'optional': True}),
    +        ('base_certificate_id', IssuerSerial, {'explicit': 0, 'optional': True}),
    +        ('object_digest_info', ObjectDigestInfo, {'explicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class AttCertIssuer(Choice):
    +    _alternatives = [
    +        ('v1_form', GeneralNames),
    +        ('v2_form', V2Form, {'explicit': 0}),
    +    ]
    +
    +
    +class IetfAttrValue(Choice):
    +    _alternatives = [
    +        ('octets', OctetString),
    +        ('oid', ObjectIdentifier),
    +        ('string', UTF8String),
    +    ]
    +
    +
    +class IetfAttrValues(SequenceOf):
    +    _child_spec = IetfAttrValue
    +
    +
    +class IetfAttrSyntax(Sequence):
    +    _fields = [
    +        ('policy_authority', GeneralNames, {'implicit': 0, 'optional': True}),
    +        ('values', IetfAttrValues),
    +    ]
    +
    +
    +class SetOfIetfAttrSyntax(SetOf):
    +    _child_spec = IetfAttrSyntax
    +
    +
    +class SvceAuthInfo(Sequence):
    +    _fields = [
    +        ('service', GeneralName),
    +        ('ident', GeneralName),
    +        ('auth_info', OctetString, {'optional': True}),
    +    ]
    +
    +
    +class SetOfSvceAuthInfo(SetOf):
    +    _child_spec = SvceAuthInfo
    +
    +
    +class RoleSyntax(Sequence):
    +    _fields = [
    +        ('role_authority', GeneralNames, {'implicit': 0, 'optional': True}),
    +        ('role_name', GeneralName, {'implicit': 1}),
    +    ]
    +
    +
    +class SetOfRoleSyntax(SetOf):
    +    _child_spec = RoleSyntax
    +
    +
    +class ClassList(BitString):
    +    _map = {
    +        0: 'unmarked',
    +        1: 'unclassified',
    +        2: 'restricted',
    +        3: 'confidential',
    +        4: 'secret',
    +        5: 'top_secret',
    +    }
    +
    +
    +class SecurityCategory(Sequence):
    +    _fields = [
    +        ('type', ObjectIdentifier, {'implicit': 0}),
    +        ('value', Any, {'implicit': 1}),
    +    ]
    +
    +
    +class SetOfSecurityCategory(SetOf):
    +    _child_spec = SecurityCategory
    +
    +
    +class Clearance(Sequence):
    +    _fields = [
    +        ('policy_id', ObjectIdentifier, {'implicit': 0}),
    +        ('class_list', ClassList, {'implicit': 1, 'default': 'unclassified'}),
    +        ('security_categories', SetOfSecurityCategory, {'implicit': 2, 'optional': True}),
    +    ]
    +
    +
    +class SetOfClearance(SetOf):
    +    _child_spec = Clearance
    +
    +
    +class BigTime(Sequence):
    +    _fields = [
    +        ('major', Integer),
    +        ('fractional_seconds', Integer),
    +        ('sign', Integer, {'optional': True}),
    +    ]
    +
    +
    +class LeapData(Sequence):
    +    _fields = [
    +        ('leap_time', BigTime),
    +        ('action', Integer),
    +    ]
    +
    +
    +class SetOfLeapData(SetOf):
    +    _child_spec = LeapData
    +
    +
    +class TimingMetrics(Sequence):
    +    _fields = [
    +        ('ntp_time', BigTime),
    +        ('offset', BigTime),
    +        ('delay', BigTime),
    +        ('expiration', BigTime),
    +        ('leap_event', SetOfLeapData, {'optional': True}),
    +    ]
    +
    +
    +class SetOfTimingMetrics(SetOf):
    +    _child_spec = TimingMetrics
    +
    +
    +class TimingPolicy(Sequence):
    +    _fields = [
    +        ('policy_id', SequenceOf, {'spec': ObjectIdentifier}),
    +        ('max_offset', BigTime, {'explicit': 0, 'optional': True}),
    +        ('max_delay', BigTime, {'explicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class SetOfTimingPolicy(SetOf):
    +    _child_spec = TimingPolicy
    +
    +
    +class AttCertAttributeType(ObjectIdentifier):
    +    _map = {
    +        '1.3.6.1.5.5.7.10.1': 'authentication_info',
    +        '1.3.6.1.5.5.7.10.2': 'access_identity',
    +        '1.3.6.1.5.5.7.10.3': 'charging_identity',
    +        '1.3.6.1.5.5.7.10.4': 'group',
    +        '2.5.4.72': 'role',
    +        '2.5.4.55': 'clearance',
    +        '1.3.6.1.4.1.601.10.4.1': 'timing_metrics',
    +        '1.3.6.1.4.1.601.10.4.2': 'timing_policy',
    +    }
    +
    +
    +class AttCertAttribute(Sequence):
    +    _fields = [
    +        ('type', AttCertAttributeType),
    +        ('values', None),
    +    ]
    +
    +    _oid_specs = {
    +        'authentication_info': SetOfSvceAuthInfo,
    +        'access_identity': SetOfSvceAuthInfo,
    +        'charging_identity': SetOfIetfAttrSyntax,
    +        'group': SetOfIetfAttrSyntax,
    +        'role': SetOfRoleSyntax,
    +        'clearance': SetOfClearance,
    +        'timing_metrics': SetOfTimingMetrics,
    +        'timing_policy': SetOfTimingPolicy,
    +    }
    +
    +    def _values_spec(self):
    +        return self._oid_specs.get(self['type'].native, SetOfAny)
    +
    +    _spec_callbacks = {
    +        'values': _values_spec
    +    }
    +
    +
    +class AttCertAttributes(SequenceOf):
    +    _child_spec = AttCertAttribute
    +
    +
    +class AttributeCertificateInfoV2(Sequence):
    +    _fields = [
    +        ('version', AttCertVersion),
    +        ('holder', Holder),
    +        ('issuer', AttCertIssuer),
    +        ('signature', SignedDigestAlgorithm),
    +        ('serial_number', Integer),
    +        ('att_cert_validity_period', AttCertValidityPeriod),
    +        ('attributes', AttCertAttributes),
    +        ('issuer_unique_id', OctetBitString, {'optional': True}),
    +        ('extensions', Extensions, {'optional': True}),
    +    ]
    +
    +
    +class AttributeCertificateV2(Sequence):
    +    # Handle the situation where a V2 cert is encoded as V1
    +    _bad_tag = 1
    +
    +    _fields = [
    +        ('ac_info', AttributeCertificateInfoV2),
    +        ('signature_algorithm', SignedDigestAlgorithm),
    +        ('signature', OctetBitString),
    +    ]
    +
    +
    +class OtherCertificateFormat(Sequence):
    +    _fields = [
    +        ('other_cert_format', ObjectIdentifier),
    +        ('other_cert', Any),
    +    ]
    +
    +
    +class CertificateChoices(Choice):
    +    _alternatives = [
    +        ('certificate', Certificate),
    +        ('extended_certificate', ExtendedCertificate, {'implicit': 0}),
    +        ('v1_attr_cert', AttributeCertificateV1, {'implicit': 1}),
    +        ('v2_attr_cert', AttributeCertificateV2, {'implicit': 2}),
    +        ('other', OtherCertificateFormat, {'implicit': 3}),
    +    ]
    +
    +    def validate(self, class_, tag, contents):
    +        """
    +        Ensures that the class and tag specified exist as an alternative. This
    +        custom version fixes parsing broken encodings there a V2 attribute
    +        # certificate is encoded as a V1
    +
    +        :param class_:
    +            The integer class_ from the encoded value header
    +
    +        :param tag:
    +            The integer tag from the encoded value header
    +
    +        :param contents:
    +            A byte string of the contents of the value - used when the object
    +            is explicitly tagged
    +
    +        :raises:
    +            ValueError - when value is not a valid alternative
    +        """
    +
    +        super(CertificateChoices, self).validate(class_, tag, contents)
    +        if self._choice == 2:
    +            if AttCertVersion.load(Sequence.load(contents)[0].dump()).native == 'v2':
    +                self._choice = 3
    +
    +
    +class CertificateSet(SetOf):
    +    _child_spec = CertificateChoices
    +
    +
    +class ContentInfo(Sequence):
    +    _fields = [
    +        ('content_type', ContentType),
    +        ('content', Any, {'explicit': 0, 'optional': True}),
    +    ]
    +
    +    _oid_pair = ('content_type', 'content')
    +    _oid_specs = {}
    +
    +
    +class SetOfContentInfo(SetOf):
    +    _child_spec = ContentInfo
    +
    +
    +class EncapsulatedContentInfo(Sequence):
    +    _fields = [
    +        ('content_type', ContentType),
    +        ('content', ParsableOctetString, {'explicit': 0, 'optional': True}),
    +    ]
    +
    +    _oid_pair = ('content_type', 'content')
    +    _oid_specs = {}
    +
    +
    +class IssuerAndSerialNumber(Sequence):
    +    _fields = [
    +        ('issuer', Name),
    +        ('serial_number', Integer),
    +    ]
    +
    +
    +class SignerIdentifier(Choice):
    +    _alternatives = [
    +        ('issuer_and_serial_number', IssuerAndSerialNumber),
    +        ('subject_key_identifier', OctetString, {'implicit': 0}),
    +    ]
    +
    +
    +class DigestAlgorithms(SetOf):
    +    _child_spec = DigestAlgorithm
    +
    +
    +class CertificateRevocationLists(SetOf):
    +    _child_spec = CertificateList
    +
    +
    +class SCVPReqRes(Sequence):
    +    _fields = [
    +        ('request', ContentInfo, {'explicit': 0, 'optional': True}),
    +        ('response', ContentInfo),
    +    ]
    +
    +
    +class OtherRevInfoFormatId(ObjectIdentifier):
    +    _map = {
    +        '1.3.6.1.5.5.7.16.2': 'ocsp_response',
    +        '1.3.6.1.5.5.7.16.4': 'scvp',
    +    }
    +
    +
    +class OtherRevocationInfoFormat(Sequence):
    +    _fields = [
    +        ('other_rev_info_format', OtherRevInfoFormatId),
    +        ('other_rev_info', Any),
    +    ]
    +
    +    _oid_pair = ('other_rev_info_format', 'other_rev_info')
    +    _oid_specs = {
    +        'ocsp_response': OCSPResponse,
    +        'scvp': SCVPReqRes,
    +    }
    +
    +
    +class RevocationInfoChoice(Choice):
    +    _alternatives = [
    +        ('crl', CertificateList),
    +        ('other', OtherRevocationInfoFormat, {'implicit': 1}),
    +    ]
    +
    +
    +class RevocationInfoChoices(SetOf):
    +    _child_spec = RevocationInfoChoice
    +
    +
    +class SignerInfo(Sequence):
    +    _fields = [
    +        ('version', CMSVersion),
    +        ('sid', SignerIdentifier),
    +        ('digest_algorithm', DigestAlgorithm),
    +        ('signed_attrs', CMSAttributes, {'implicit': 0, 'optional': True}),
    +        ('signature_algorithm', SignedDigestAlgorithm),
    +        ('signature', OctetString),
    +        ('unsigned_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class SignerInfos(SetOf):
    +    _child_spec = SignerInfo
    +
    +
    +class SignedData(Sequence):
    +    _fields = [
    +        ('version', CMSVersion),
    +        ('digest_algorithms', DigestAlgorithms),
    +        ('encap_content_info', None),
    +        ('certificates', CertificateSet, {'implicit': 0, 'optional': True}),
    +        ('crls', RevocationInfoChoices, {'implicit': 1, 'optional': True}),
    +        ('signer_infos', SignerInfos),
    +    ]
    +
    +    def _encap_content_info_spec(self):
    +        # If the encap_content_info is version v1, then this could be a PKCS#7
    +        # structure, or a CMS structure. CMS wraps the encoded value in an
    +        # Octet String tag.
    +
    +        # If the version is greater than 1, it is definite CMS
    +        if self['version'].native != 'v1':
    +            return EncapsulatedContentInfo
    +
    +        # Otherwise, the ContentInfo spec from PKCS#7 will be compatible with
    +        # CMS v1 (which only allows Data, an Octet String) and PKCS#7, which
    +        # allows Any
    +        return ContentInfo
    +
    +    _spec_callbacks = {
    +        'encap_content_info': _encap_content_info_spec
    +    }
    +
    +
    +class OriginatorInfo(Sequence):
    +    _fields = [
    +        ('certs', CertificateSet, {'implicit': 0, 'optional': True}),
    +        ('crls', RevocationInfoChoices, {'implicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class RecipientIdentifier(Choice):
    +    _alternatives = [
    +        ('issuer_and_serial_number', IssuerAndSerialNumber),
    +        ('subject_key_identifier', OctetString, {'implicit': 0}),
    +    ]
    +
    +
    +class KeyEncryptionAlgorithmId(ObjectIdentifier):
    +    _map = {
    +        '1.2.840.113549.1.1.1': 'rsaes_pkcs1v15',
    +        '1.2.840.113549.1.1.7': 'rsaes_oaep',
    +        '2.16.840.1.101.3.4.1.5': 'aes128_wrap',
    +        '2.16.840.1.101.3.4.1.8': 'aes128_wrap_pad',
    +        '2.16.840.1.101.3.4.1.25': 'aes192_wrap',
    +        '2.16.840.1.101.3.4.1.28': 'aes192_wrap_pad',
    +        '2.16.840.1.101.3.4.1.45': 'aes256_wrap',
    +        '2.16.840.1.101.3.4.1.48': 'aes256_wrap_pad',
    +    }
    +
    +    _reverse_map = {
    +        'rsa': '1.2.840.113549.1.1.1',
    +        'rsaes_pkcs1v15': '1.2.840.113549.1.1.1',
    +        'rsaes_oaep': '1.2.840.113549.1.1.7',
    +        'aes128_wrap': '2.16.840.1.101.3.4.1.5',
    +        'aes128_wrap_pad': '2.16.840.1.101.3.4.1.8',
    +        'aes192_wrap': '2.16.840.1.101.3.4.1.25',
    +        'aes192_wrap_pad': '2.16.840.1.101.3.4.1.28',
    +        'aes256_wrap': '2.16.840.1.101.3.4.1.45',
    +        'aes256_wrap_pad': '2.16.840.1.101.3.4.1.48',
    +    }
    +
    +
    +class KeyEncryptionAlgorithm(_ForceNullParameters, Sequence):
    +    _fields = [
    +        ('algorithm', KeyEncryptionAlgorithmId),
    +        ('parameters', Any, {'optional': True}),
    +    ]
    +
    +    _oid_pair = ('algorithm', 'parameters')
    +    _oid_specs = {
    +        'rsaes_oaep': RSAESOAEPParams,
    +    }
    +
    +
    +class KeyTransRecipientInfo(Sequence):
    +    _fields = [
    +        ('version', CMSVersion),
    +        ('rid', RecipientIdentifier),
    +        ('key_encryption_algorithm', KeyEncryptionAlgorithm),
    +        ('encrypted_key', OctetString),
    +    ]
    +
    +
    +class OriginatorIdentifierOrKey(Choice):
    +    _alternatives = [
    +        ('issuer_and_serial_number', IssuerAndSerialNumber),
    +        ('subject_key_identifier', OctetString, {'implicit': 0}),
    +        ('originator_key', PublicKeyInfo, {'implicit': 1}),
    +    ]
    +
    +
    +class OtherKeyAttribute(Sequence):
    +    _fields = [
    +        ('key_attr_id', ObjectIdentifier),
    +        ('key_attr', Any),
    +    ]
    +
    +
    +class RecipientKeyIdentifier(Sequence):
    +    _fields = [
    +        ('subject_key_identifier', OctetString),
    +        ('date', GeneralizedTime, {'optional': True}),
    +        ('other', OtherKeyAttribute, {'optional': True}),
    +    ]
    +
    +
    +class KeyAgreementRecipientIdentifier(Choice):
    +    _alternatives = [
    +        ('issuer_and_serial_number', IssuerAndSerialNumber),
    +        ('r_key_id', RecipientKeyIdentifier, {'implicit': 0}),
    +    ]
    +
    +
    +class RecipientEncryptedKey(Sequence):
    +    _fields = [
    +        ('rid', KeyAgreementRecipientIdentifier),
    +        ('encrypted_key', OctetString),
    +    ]
    +
    +
    +class RecipientEncryptedKeys(SequenceOf):
    +    _child_spec = RecipientEncryptedKey
    +
    +
    +class KeyAgreeRecipientInfo(Sequence):
    +    _fields = [
    +        ('version', CMSVersion),
    +        ('originator', OriginatorIdentifierOrKey, {'explicit': 0}),
    +        ('ukm', OctetString, {'explicit': 1, 'optional': True}),
    +        ('key_encryption_algorithm', KeyEncryptionAlgorithm),
    +        ('recipient_encrypted_keys', RecipientEncryptedKeys),
    +    ]
    +
    +
    +class KEKIdentifier(Sequence):
    +    _fields = [
    +        ('key_identifier', OctetString),
    +        ('date', GeneralizedTime, {'optional': True}),
    +        ('other', OtherKeyAttribute, {'optional': True}),
    +    ]
    +
    +
    +class KEKRecipientInfo(Sequence):
    +    _fields = [
    +        ('version', CMSVersion),
    +        ('kekid', KEKIdentifier),
    +        ('key_encryption_algorithm', KeyEncryptionAlgorithm),
    +        ('encrypted_key', OctetString),
    +    ]
    +
    +
    +class PasswordRecipientInfo(Sequence):
    +    _fields = [
    +        ('version', CMSVersion),
    +        ('key_derivation_algorithm', KdfAlgorithm, {'implicit': 0, 'optional': True}),
    +        ('key_encryption_algorithm', KeyEncryptionAlgorithm),
    +        ('encrypted_key', OctetString),
    +    ]
    +
    +
    +class OtherRecipientInfo(Sequence):
    +    _fields = [
    +        ('ori_type', ObjectIdentifier),
    +        ('ori_value', Any),
    +    ]
    +
    +
    +class RecipientInfo(Choice):
    +    _alternatives = [
    +        ('ktri', KeyTransRecipientInfo),
    +        ('kari', KeyAgreeRecipientInfo, {'implicit': 1}),
    +        ('kekri', KEKRecipientInfo, {'implicit': 2}),
    +        ('pwri', PasswordRecipientInfo, {'implicit': 3}),
    +        ('ori', OtherRecipientInfo, {'implicit': 4}),
    +    ]
    +
    +
    +class RecipientInfos(SetOf):
    +    _child_spec = RecipientInfo
    +
    +
    +class EncryptedContentInfo(Sequence):
    +    _fields = [
    +        ('content_type', ContentType),
    +        ('content_encryption_algorithm', EncryptionAlgorithm),
    +        ('encrypted_content', OctetString, {'implicit': 0, 'optional': True}),
    +    ]
    +
    +
    +class EnvelopedData(Sequence):
    +    _fields = [
    +        ('version', CMSVersion),
    +        ('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
    +        ('recipient_infos', RecipientInfos),
    +        ('encrypted_content_info', EncryptedContentInfo),
    +        ('unprotected_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class SignedAndEnvelopedData(Sequence):
    +    _fields = [
    +        ('version', CMSVersion),
    +        ('recipient_infos', RecipientInfos),
    +        ('digest_algorithms', DigestAlgorithms),
    +        ('encrypted_content_info', EncryptedContentInfo),
    +        ('certificates', CertificateSet, {'implicit': 0, 'optional': True}),
    +        ('crls', CertificateRevocationLists, {'implicit': 1, 'optional': True}),
    +        ('signer_infos', SignerInfos),
    +    ]
    +
    +
    +class DigestedData(Sequence):
    +    _fields = [
    +        ('version', CMSVersion),
    +        ('digest_algorithm', DigestAlgorithm),
    +        ('encap_content_info', None),
    +        ('digest', OctetString),
    +    ]
    +
    +    def _encap_content_info_spec(self):
    +        # If the encap_content_info is version v1, then this could be a PKCS#7
    +        # structure, or a CMS structure. CMS wraps the encoded value in an
    +        # Octet String tag.
    +
    +        # If the version is greater than 1, it is definite CMS
    +        if self['version'].native != 'v1':
    +            return EncapsulatedContentInfo
    +
    +        # Otherwise, the ContentInfo spec from PKCS#7 will be compatible with
    +        # CMS v1 (which only allows Data, an Octet String) and PKCS#7, which
    +        # allows Any
    +        return ContentInfo
    +
    +    _spec_callbacks = {
    +        'encap_content_info': _encap_content_info_spec
    +    }
    +
    +
    +class EncryptedData(Sequence):
    +    _fields = [
    +        ('version', CMSVersion),
    +        ('encrypted_content_info', EncryptedContentInfo),
    +        ('unprotected_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class AuthenticatedData(Sequence):
    +    _fields = [
    +        ('version', CMSVersion),
    +        ('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
    +        ('recipient_infos', RecipientInfos),
    +        ('mac_algorithm', HmacAlgorithm),
    +        ('digest_algorithm', DigestAlgorithm, {'implicit': 1, 'optional': True}),
    +        # This does not require the _spec_callbacks approach of SignedData and
    +        # DigestedData since AuthenticatedData was not part of PKCS#7
    +        ('encap_content_info', EncapsulatedContentInfo),
    +        ('auth_attrs', CMSAttributes, {'implicit': 2, 'optional': True}),
    +        ('mac', OctetString),
    +        ('unauth_attrs', CMSAttributes, {'implicit': 3, 'optional': True}),
    +    ]
    +
    +
    +class AuthEnvelopedData(Sequence):
    +    _fields = [
    +        ('version', CMSVersion),
    +        ('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
    +        ('recipient_infos', RecipientInfos),
    +        ('auth_encrypted_content_info', EncryptedContentInfo),
    +        ('auth_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
    +        ('mac', OctetString),
    +        ('unauth_attrs', CMSAttributes, {'implicit': 2, 'optional': True}),
    +    ]
    +
    +
    +class CompressionAlgorithmId(ObjectIdentifier):
    +    _map = {
    +        '1.2.840.113549.1.9.16.3.8': 'zlib',
    +    }
    +
    +
    +class CompressionAlgorithm(Sequence):
    +    _fields = [
    +        ('algorithm', CompressionAlgorithmId),
    +        ('parameters', Any, {'optional': True}),
    +    ]
    +
    +
    +class CompressedData(Sequence):
    +    _fields = [
    +        ('version', CMSVersion),
    +        ('compression_algorithm', CompressionAlgorithm),
    +        ('encap_content_info', EncapsulatedContentInfo),
    +    ]
    +
    +    _decompressed = None
    +
    +    @property
    +    def decompressed(self):
    +        if self._decompressed is None:
    +            if zlib is None:
    +                raise SystemError('The zlib module is not available')
    +            self._decompressed = zlib.decompress(self['encap_content_info']['content'].native)
    +        return self._decompressed
    +
    +
    +class RecipientKeyIdentifier(Sequence):
    +    _fields = [
    +        ('subjectKeyIdentifier', OctetString),
    +        ('date', GeneralizedTime, {'optional': True}),
    +        ('other', OtherKeyAttribute, {'optional': True}),
    +    ]
    +
    +
    +class SMIMEEncryptionKeyPreference(Choice):
    +    _alternatives = [
    +        ('issuer_and_serial_number', IssuerAndSerialNumber, {'implicit': 0}),
    +        ('recipientKeyId', RecipientKeyIdentifier, {'implicit': 1}),
    +        ('subjectAltKeyIdentifier', PublicKeyInfo, {'implicit': 2}),
    +    ]
    +
    +
    +class SMIMEEncryptionKeyPreferences(SetOf):
    +    _child_spec = SMIMEEncryptionKeyPreference
    +
    +
    +ContentInfo._oid_specs = {
    +    'data': OctetString,
    +    'signed_data': SignedData,
    +    'enveloped_data': EnvelopedData,
    +    'signed_and_enveloped_data': SignedAndEnvelopedData,
    +    'digested_data': DigestedData,
    +    'encrypted_data': EncryptedData,
    +    'authenticated_data': AuthenticatedData,
    +    'compressed_data': CompressedData,
    +    'authenticated_enveloped_data': AuthEnvelopedData,
    +}
    +
    +
    +EncapsulatedContentInfo._oid_specs = {
    +    'signed_data': SignedData,
    +    'enveloped_data': EnvelopedData,
    +    'signed_and_enveloped_data': SignedAndEnvelopedData,
    +    'digested_data': DigestedData,
    +    'encrypted_data': EncryptedData,
    +    'authenticated_data': AuthenticatedData,
    +    'compressed_data': CompressedData,
    +    'authenticated_enveloped_data': AuthEnvelopedData,
    +}
    +
    +
    +CMSAttribute._oid_specs = {
    +    'content_type': SetOfContentType,
    +    'message_digest': SetOfOctetString,
    +    'signing_time': SetOfTime,
    +    'counter_signature': SignerInfos,
    +    'signature_time_stamp_token': SetOfContentInfo,
    +    'cms_algorithm_protection': SetOfCMSAlgorithmProtection,
    +    'microsoft_nested_signature': SetOfContentInfo,
    +    'microsoft_time_stamp_token': SetOfContentInfo,
    +    'encrypt_key_pref': SMIMEEncryptionKeyPreferences,
    +}
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/core.py b/server/www/packages/packages-windows/x86/asn1crypto/core.py
    new file mode 100644
    index 0000000..933f8ca
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/core.py
    @@ -0,0 +1,5650 @@
    +# coding: utf-8
    +
    +"""
    +ASN.1 type classes for universal types. Exports the following items:
    +
    + - load()
    + - Any()
    + - Asn1Value()
    + - BitString()
    + - BMPString()
    + - Boolean()
    + - CharacterString()
    + - Choice()
    + - EmbeddedPdv()
    + - Enumerated()
    + - GeneralizedTime()
    + - GeneralString()
    + - GraphicString()
    + - IA5String()
    + - InstanceOf()
    + - Integer()
    + - IntegerBitString()
    + - IntegerOctetString()
    + - Null()
    + - NumericString()
    + - ObjectDescriptor()
    + - ObjectIdentifier()
    + - OctetBitString()
    + - OctetString()
    + - PrintableString()
    + - Real()
    + - RelativeOid()
    + - Sequence()
    + - SequenceOf()
    + - Set()
    + - SetOf()
    + - TeletexString()
    + - UniversalString()
    + - UTCTime()
    + - UTF8String()
    + - VideotexString()
    + - VisibleString()
    + - VOID
    + - Void()
    +
    +Other type classes are defined that help compose the types listed above.
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +from datetime import datetime, timedelta
    +from fractions import Fraction
    +import binascii
    +import copy
    +import math
    +import re
    +import sys
    +
    +from . import _teletex_codec
    +from ._errors import unwrap
    +from ._ordereddict import OrderedDict
    +from ._types import type_name, str_cls, byte_cls, int_types, chr_cls
    +from .parser import _parse, _dump_header
    +from .util import int_to_bytes, int_from_bytes, timezone, extended_datetime, create_timezone, utc_with_dst
    +
    +if sys.version_info <= (3,):
    +    from cStringIO import StringIO as BytesIO
    +
    +    range = xrange  # noqa
    +    _PY2 = True
    +
    +else:
    +    from io import BytesIO
    +
    +    _PY2 = False
    +
    +
    +_teletex_codec.register()
    +
    +
    +CLASS_NUM_TO_NAME_MAP = {
    +    0: 'universal',
    +    1: 'application',
    +    2: 'context',
    +    3: 'private',
    +}
    +
    +CLASS_NAME_TO_NUM_MAP = {
    +    'universal': 0,
    +    'application': 1,
    +    'context': 2,
    +    'private': 3,
    +    0: 0,
    +    1: 1,
    +    2: 2,
    +    3: 3,
    +}
    +
    +METHOD_NUM_TO_NAME_MAP = {
    +    0: 'primitive',
    +    1: 'constructed',
    +}
    +
    +
    +_OID_RE = re.compile(r'^\d+(\.\d+)*$')
    +
    +
    +# A global tracker to ensure that _setup() is called for every class, even
    +# if is has been called for a parent class. This allows different _fields
    +# definitions for child classes. Without such a construct, the child classes
    +# would just see the parent class attributes and would use them.
    +_SETUP_CLASSES = {}
    +
    +
    +def load(encoded_data, strict=False):
    +    """
    +    Loads a BER/DER-encoded byte string and construct a universal object based
    +    on the tag value:
    +
    +     - 1: Boolean
    +     - 2: Integer
    +     - 3: BitString
    +     - 4: OctetString
    +     - 5: Null
    +     - 6: ObjectIdentifier
    +     - 7: ObjectDescriptor
    +     - 8: InstanceOf
    +     - 9: Real
    +     - 10: Enumerated
    +     - 11: EmbeddedPdv
    +     - 12: UTF8String
    +     - 13: RelativeOid
    +     - 16: Sequence,
    +     - 17: Set
    +     - 18: NumericString
    +     - 19: PrintableString
    +     - 20: TeletexString
    +     - 21: VideotexString
    +     - 22: IA5String
    +     - 23: UTCTime
    +     - 24: GeneralizedTime
    +     - 25: GraphicString
    +     - 26: VisibleString
    +     - 27: GeneralString
    +     - 28: UniversalString
    +     - 29: CharacterString
    +     - 30: BMPString
    +
    +    :param encoded_data:
    +        A byte string of BER or DER-encoded data
    +
    +    :param strict:
    +        A boolean indicating if trailing data should be forbidden - if so, a
    +        ValueError will be raised when trailing data exists
    +
    +    :raises:
    +        ValueError - when strict is True and trailing data is present
    +        ValueError - when the encoded value tag a tag other than listed above
    +        ValueError - when the ASN.1 header length is longer than the data
    +        TypeError - when encoded_data is not a byte string
    +
    +    :return:
    +        An instance of the one of the universal classes
    +    """
    +
    +    return Asn1Value.load(encoded_data, strict=strict)
    +
    +
    +class Asn1Value(object):
    +    """
    +    The basis of all ASN.1 values
    +    """
    +
    +    # The integer 0 for primitive, 1 for constructed
    +    method = None
    +
    +    # An integer 0 through 3 - see CLASS_NUM_TO_NAME_MAP for value
    +    class_ = None
    +
    +    # An integer 1 or greater indicating the tag number
    +    tag = None
    +
    +    # An alternate tag allowed for this type - used for handling broken
    +    # structures where a string value is encoded using an incorrect tag
    +    _bad_tag = None
    +
    +    # If the value has been implicitly tagged
    +    implicit = False
    +
    +    # If explicitly tagged, a tuple of 2-element tuples containing the
    +    # class int and tag int, from innermost to outermost
    +    explicit = None
    +
    +    # The BER/DER header bytes
    +    _header = None
    +
    +    # Raw encoded value bytes not including class, method, tag, length header
    +    contents = None
    +
    +    # The BER/DER trailer bytes
    +    _trailer = b''
    +
    +    # The native python representation of the value - this is not used by
    +    # some classes since they utilize _bytes or _unicode
    +    _native = None
    +
    +    @classmethod
    +    def load(cls, encoded_data, strict=False, **kwargs):
    +        """
    +        Loads a BER/DER-encoded byte string using the current class as the spec
    +
    +        :param encoded_data:
    +            A byte string of BER or DER-encoded data
    +
    +        :param strict:
    +            A boolean indicating if trailing data should be forbidden - if so, a
    +            ValueError will be raised when trailing data exists
    +
    +        :return:
    +            An instance of the current class
    +        """
    +
    +        if not isinstance(encoded_data, byte_cls):
    +            raise TypeError('encoded_data must be a byte string, not %s' % type_name(encoded_data))
    +
    +        spec = None
    +        if cls.tag is not None:
    +            spec = cls
    +
    +        value, _ = _parse_build(encoded_data, spec=spec, spec_params=kwargs, strict=strict)
    +        return value
    +
    +    def __init__(self, explicit=None, implicit=None, no_explicit=False, tag_type=None, class_=None, tag=None,
    +                 optional=None, default=None, contents=None, method=None):
    +        """
    +        The optional parameter is not used, but rather included so we don't
    +        have to delete it from the parameter dictionary when passing as keyword
    +        args
    +
    +        :param explicit:
    +            An int tag number for explicit tagging, or a 2-element tuple of
    +            class and tag.
    +
    +        :param implicit:
    +            An int tag number for implicit tagging, or a 2-element tuple of
    +            class and tag.
    +
    +        :param no_explicit:
    +            If explicit tagging info should be removed from this instance.
    +            Used internally to allow contructing the underlying value that
    +            has been wrapped in an explicit tag.
    +
    +        :param tag_type:
    +            None for normal values, or one of "implicit", "explicit" for tagged
    +            values. Deprecated in favor of explicit and implicit params.
    +
    +        :param class_:
    +            The class for the value - defaults to "universal" if tag_type is
    +            None, otherwise defaults to "context". Valid values include:
    +             - "universal"
    +             - "application"
    +             - "context"
    +             - "private"
    +            Deprecated in favor of explicit and implicit params.
    +
    +        :param tag:
    +            The integer tag to override - usually this is used with tag_type or
    +            class_. Deprecated in favor of explicit and implicit params.
    +
    +        :param optional:
    +            Dummy parameter that allows "optional" key in spec param dicts
    +
    +        :param default:
    +            The default value to use if the value is currently None
    +
    +        :param contents:
    +            A byte string of the encoded contents of the value
    +
    +        :param method:
    +            The method for the value - no default value since this is
    +            normally set on a class. Valid values include:
    +             - "primitive" or 0
    +             - "constructed" or 1
    +
    +        :raises:
    +            ValueError - when implicit, explicit, tag_type, class_ or tag are invalid values
    +        """
    +
    +        try:
    +            if self.__class__ not in _SETUP_CLASSES:
    +                cls = self.__class__
    +                # Allow explicit to be specified as a simple 2-element tuple
    +                # instead of requiring the user make a nested tuple
    +                if cls.explicit is not None and isinstance(cls.explicit[0], int_types):
    +                    cls.explicit = (cls.explicit, )
    +                if hasattr(cls, '_setup'):
    +                    self._setup()
    +                _SETUP_CLASSES[cls] = True
    +
    +            # Normalize tagging values
    +            if explicit is not None:
    +                if isinstance(explicit, int_types):
    +                    if class_ is None:
    +                        class_ = 'context'
    +                    explicit = (class_, explicit)
    +                # Prevent both explicit and tag_type == 'explicit'
    +                if tag_type == 'explicit':
    +                    tag_type = None
    +                    tag = None
    +
    +            if implicit is not None:
    +                if isinstance(implicit, int_types):
    +                    if class_ is None:
    +                        class_ = 'context'
    +                    implicit = (class_, implicit)
    +                # Prevent both implicit and tag_type == 'implicit'
    +                if tag_type == 'implicit':
    +                    tag_type = None
    +                    tag = None
    +
    +            # Convert old tag_type API to explicit/implicit params
    +            if tag_type is not None:
    +                if class_ is None:
    +                    class_ = 'context'
    +                if tag_type == 'explicit':
    +                    explicit = (class_, tag)
    +                elif tag_type == 'implicit':
    +                    implicit = (class_, tag)
    +                else:
    +                    raise ValueError(unwrap(
    +                        '''
    +                        tag_type must be one of "implicit", "explicit", not %s
    +                        ''',
    +                        repr(tag_type)
    +                    ))
    +
    +            if explicit is not None:
    +                # Ensure we have a tuple of 2-element tuples
    +                if len(explicit) == 2 and isinstance(explicit[1], int_types):
    +                    explicit = (explicit, )
    +                for class_, tag in explicit:
    +                    invalid_class = None
    +                    if isinstance(class_, int_types):
    +                        if class_ not in CLASS_NUM_TO_NAME_MAP:
    +                            invalid_class = class_
    +                    else:
    +                        if class_ not in CLASS_NAME_TO_NUM_MAP:
    +                            invalid_class = class_
    +                        class_ = CLASS_NAME_TO_NUM_MAP[class_]
    +                    if invalid_class is not None:
    +                        raise ValueError(unwrap(
    +                            '''
    +                            explicit class must be one of "universal", "application",
    +                            "context", "private", not %s
    +                            ''',
    +                            repr(invalid_class)
    +                        ))
    +                    if tag is not None:
    +                        if not isinstance(tag, int_types):
    +                            raise TypeError(unwrap(
    +                                '''
    +                                explicit tag must be an integer, not %s
    +                                ''',
    +                                type_name(tag)
    +                            ))
    +                    if self.explicit is None:
    +                        self.explicit = ((class_, tag), )
    +                    else:
    +                        self.explicit = self.explicit + ((class_, tag), )
    +
    +            elif implicit is not None:
    +                class_, tag = implicit
    +                if class_ not in CLASS_NAME_TO_NUM_MAP:
    +                    raise ValueError(unwrap(
    +                        '''
    +                        implicit class must be one of "universal", "application",
    +                        "context", "private", not %s
    +                        ''',
    +                        repr(class_)
    +                    ))
    +                if tag is not None:
    +                    if not isinstance(tag, int_types):
    +                        raise TypeError(unwrap(
    +                            '''
    +                            implicit tag must be an integer, not %s
    +                            ''',
    +                            type_name(tag)
    +                        ))
    +                self.class_ = CLASS_NAME_TO_NUM_MAP[class_]
    +                self.tag = tag
    +                self.implicit = True
    +            else:
    +                if class_ is not None:
    +                    if class_ not in CLASS_NAME_TO_NUM_MAP:
    +                        raise ValueError(unwrap(
    +                            '''
    +                            class_ must be one of "universal", "application",
    +                            "context", "private", not %s
    +                            ''',
    +                            repr(class_)
    +                        ))
    +                    self.class_ = CLASS_NAME_TO_NUM_MAP[class_]
    +
    +                if self.class_ is None:
    +                    self.class_ = 0
    +
    +                if tag is not None:
    +                    self.tag = tag
    +
    +            if method is not None:
    +                if method not in set(["primitive", 0, "constructed", 1]):
    +                    raise ValueError(unwrap(
    +                        '''
    +                        method must be one of "primitive" or "constructed",
    +                        not %s
    +                        ''',
    +                        repr(method)
    +                    ))
    +                if method == "primitive":
    +                    method = 0
    +                elif method == "constructed":
    +                    method = 1
    +                self.method = method
    +
    +            if no_explicit:
    +                self.explicit = None
    +
    +            if contents is not None:
    +                self.contents = contents
    +
    +            elif default is not None:
    +                self.set(default)
    +
    +        except (ValueError, TypeError) as e:
    +            args = e.args[1:]
    +            e.args = (e.args[0] + '\n    while constructing %s' % type_name(self),) + args
    +            raise e
    +
    +    def __str__(self):
    +        """
    +        Since str is different in Python 2 and 3, this calls the appropriate
    +        method, __unicode__() or __bytes__()
    +
    +        :return:
    +            A unicode string
    +        """
    +
    +        if _PY2:
    +            return self.__bytes__()
    +        else:
    +            return self.__unicode__()
    +
    +    def __repr__(self):
    +        """
    +        :return:
    +            A unicode string
    +        """
    +
    +        if _PY2:
    +            return '<%s %s b%s>' % (type_name(self), id(self), repr(self.dump()))
    +        else:
    +            return '<%s %s %s>' % (type_name(self), id(self), repr(self.dump()))
    +
    +    def __bytes__(self):
    +        """
    +        A fall-back method for print() in Python 2
    +
    +        :return:
    +            A byte string of the output of repr()
    +        """
    +
    +        return self.__repr__().encode('utf-8')
    +
    +    def __unicode__(self):
    +        """
    +        A fall-back method for print() in Python 3
    +
    +        :return:
    +            A unicode string of the output of repr()
    +        """
    +
    +        return self.__repr__()
    +
    +    def _new_instance(self):
    +        """
    +        Constructs a new copy of the current object, preserving any tagging
    +
    +        :return:
    +            An Asn1Value object
    +        """
    +
    +        new_obj = self.__class__()
    +        new_obj.class_ = self.class_
    +        new_obj.tag = self.tag
    +        new_obj.implicit = self.implicit
    +        new_obj.explicit = self.explicit
    +        return new_obj
    +
    +    def __copy__(self):
    +        """
    +        Implements the copy.copy() interface
    +
    +        :return:
    +            A new shallow copy of the current Asn1Value object
    +        """
    +
    +        new_obj = self._new_instance()
    +        new_obj._copy(self, copy.copy)
    +        return new_obj
    +
    +    def __deepcopy__(self, memo):
    +        """
    +        Implements the copy.deepcopy() interface
    +
    +        :param memo:
    +            A dict for memoization
    +
    +        :return:
    +            A new deep copy of the current Asn1Value object
    +        """
    +
    +        new_obj = self._new_instance()
    +        memo[id(self)] = new_obj
    +        new_obj._copy(self, copy.deepcopy)
    +        return new_obj
    +
    +    def copy(self):
    +        """
    +        Copies the object, preserving any special tagging from it
    +
    +        :return:
    +            An Asn1Value object
    +        """
    +
    +        return copy.deepcopy(self)
    +
    +    def retag(self, tagging, tag=None):
    +        """
    +        Copies the object, applying a new tagging to it
    +
    +        :param tagging:
    +            A dict containing the keys "explicit" and "implicit". Legacy
    +            API allows a unicode string of "implicit" or "explicit".
    +
    +        :param tag:
    +            A integer tag number. Only used when tagging is a unicode string.
    +
    +        :return:
    +            An Asn1Value object
    +        """
    +
    +        # This is required to preserve the old API
    +        if not isinstance(tagging, dict):
    +            tagging = {tagging: tag}
    +        new_obj = self.__class__(explicit=tagging.get('explicit'), implicit=tagging.get('implicit'))
    +        new_obj._copy(self, copy.deepcopy)
    +        return new_obj
    +
    +    def untag(self):
    +        """
    +        Copies the object, removing any special tagging from it
    +
    +        :return:
    +            An Asn1Value object
    +        """
    +
    +        new_obj = self.__class__()
    +        new_obj._copy(self, copy.deepcopy)
    +        return new_obj
    +
    +    def _copy(self, other, copy_func):
    +        """
    +        Copies the contents of another Asn1Value object to itself
    +
    +        :param object:
    +            Another instance of the same class
    +
    +        :param copy_func:
    +            An reference of copy.copy() or copy.deepcopy() to use when copying
    +            lists, dicts and objects
    +        """
    +
    +        if self.__class__ != other.__class__:
    +            raise TypeError(unwrap(
    +                '''
    +                Can not copy values from %s object to %s object
    +                ''',
    +                type_name(other),
    +                type_name(self)
    +            ))
    +
    +        self.contents = other.contents
    +        self._native = copy_func(other._native)
    +
    +    def debug(self, nest_level=1):
    +        """
    +        Show the binary data and parsed data in a tree structure
    +        """
    +
    +        prefix = '  ' * nest_level
    +
    +        # This interacts with Any and moves the tag, implicit, explicit, _header,
    +        # contents, _footer to the parsed value so duplicate data isn't present
    +        has_parsed = hasattr(self, 'parsed')
    +
    +        _basic_debug(prefix, self)
    +        if has_parsed:
    +            self.parsed.debug(nest_level + 2)
    +        elif hasattr(self, 'chosen'):
    +            self.chosen.debug(nest_level + 2)
    +        else:
    +            if _PY2 and isinstance(self.native, byte_cls):
    +                print('%s    Native: b%s' % (prefix, repr(self.native)))
    +            else:
    +                print('%s    Native: %s' % (prefix, self.native))
    +
    +    def dump(self, force=False):
    +        """
    +        Encodes the value using DER
    +
    +        :param force:
    +            If the encoded contents already exist, clear them and regenerate
    +            to ensure they are in DER format instead of BER format
    +
    +        :return:
    +            A byte string of the DER-encoded value
    +        """
    +
    +        contents = self.contents
    +
    +        # If the length is indefinite, force the re-encoding
    +        if self._header is not None and self._header[-1:] == b'\x80':
    +            force = True
    +
    +        if self._header is None or force:
    +            if isinstance(self, Constructable) and self._indefinite:
    +                self.method = 0
    +
    +            header = _dump_header(self.class_, self.method, self.tag, self.contents)
    +
    +            if self.explicit is not None:
    +                for class_, tag in self.explicit:
    +                    header = _dump_header(class_, 1, tag, header + self.contents) + header
    +
    +            self._header = header
    +            self._trailer = b''
    +
    +        return self._header + contents + self._trailer
    +
    +
    +class ValueMap():
    +    """
    +    Basic functionality that allows for mapping values from ints or OIDs to
    +    python unicode strings
    +    """
    +
    +    # A dict from primitive value (int or OID) to unicode string. This needs
    +    # to be defined in the source code
    +    _map = None
    +
    +    # A dict from unicode string to int/OID. This is automatically generated
    +    # from _map the first time it is needed
    +    _reverse_map = None
    +
    +    def _setup(self):
    +        """
    +        Generates _reverse_map from _map
    +        """
    +
    +        cls = self.__class__
    +        if cls._map is None or cls._reverse_map is not None:
    +            return
    +        cls._reverse_map = {}
    +        for key, value in cls._map.items():
    +            cls._reverse_map[value] = key
    +
    +
    +class Castable(object):
    +    """
    +    A mixin to handle converting an object between different classes that
    +    represent the same encoded value, but with different rules for converting
    +    to and from native Python values
    +    """
    +
    +    def cast(self, other_class):
    +        """
    +        Converts the current object into an object of a different class. The
    +        new class must use the ASN.1 encoding for the value.
    +
    +        :param other_class:
    +            The class to instantiate the new object from
    +
    +        :return:
    +            An instance of the type other_class
    +        """
    +
    +        if other_class.tag != self.__class__.tag:
    +            raise TypeError(unwrap(
    +                '''
    +                Can not covert a value from %s object to %s object since they
    +                use different tags: %d versus %d
    +                ''',
    +                type_name(other_class),
    +                type_name(self),
    +                other_class.tag,
    +                self.__class__.tag
    +            ))
    +
    +        new_obj = other_class()
    +        new_obj.class_ = self.class_
    +        new_obj.implicit = self.implicit
    +        new_obj.explicit = self.explicit
    +        new_obj._header = self._header
    +        new_obj.contents = self.contents
    +        new_obj._trailer = self._trailer
    +        if isinstance(self, Constructable):
    +            new_obj.method = self.method
    +            new_obj._indefinite = self._indefinite
    +        return new_obj
    +
    +
    +class Constructable(object):
    +    """
    +    A mixin to handle string types that may be constructed from chunks
    +    contained within an indefinite length BER-encoded container
    +    """
    +
    +    # Instance attribute indicating if an object was indefinite
    +    # length when parsed - affects parsing and dumping
    +    _indefinite = False
    +
    +    def _merge_chunks(self):
    +        """
    +        :return:
    +            A concatenation of the native values of the contained chunks
    +        """
    +
    +        if not self._indefinite:
    +            return self._as_chunk()
    +
    +        pointer = 0
    +        contents_len = len(self.contents)
    +        output = None
    +
    +        while pointer < contents_len:
    +            # We pass the current class as the spec so content semantics are preserved
    +            sub_value, pointer = _parse_build(self.contents, pointer, spec=self.__class__)
    +            if output is None:
    +                output = sub_value._merge_chunks()
    +            else:
    +                output += sub_value._merge_chunks()
    +
    +        if output is None:
    +            return self._as_chunk()
    +
    +        return output
    +
    +    def _as_chunk(self):
    +        """
    +        A method to return a chunk of data that can be combined for
    +        constructed method values
    +
    +        :return:
    +            A native Python value that can be added together. Examples include
    +            byte strings, unicode strings or tuples.
    +        """
    +
    +        return self.contents
    +
    +    def _setable_native(self):
    +        """
    +        Returns a native value that can be round-tripped into .set(), to
    +        result in a DER encoding. This differs from .native in that .native
    +        is designed for the end use, and may account for the fact that the
    +        merged value is further parsed as ASN.1, such as in the case of
    +        ParsableOctetString() and ParsableOctetBitString().
    +
    +        :return:
    +            A python value that is valid to pass to .set()
    +        """
    +
    +        return self.native
    +
    +    def _copy(self, other, copy_func):
    +        """
    +        Copies the contents of another Constructable object to itself
    +
    +        :param object:
    +            Another instance of the same class
    +
    +        :param copy_func:
    +            An reference of copy.copy() or copy.deepcopy() to use when copying
    +            lists, dicts and objects
    +        """
    +
    +        super(Constructable, self)._copy(other, copy_func)
    +        # We really don't want to dump BER encodings, so if we see an
    +        # indefinite encoding, let's re-encode it
    +        if other._indefinite:
    +            self.set(other._setable_native())
    +
    +
    +class Void(Asn1Value):
    +    """
    +    A representation of an optional value that is not present. Has .native
    +    property and .dump() method to be compatible with other value classes.
    +    """
    +
    +    contents = b''
    +
    +    def __eq__(self, other):
    +        """
    +        :param other:
    +            The other Primitive to compare to
    +
    +        :return:
    +            A boolean
    +        """
    +
    +        return other.__class__ == self.__class__
    +
    +    def __nonzero__(self):
    +        return False
    +
    +    def __len__(self):
    +        return 0
    +
    +    def __iter__(self):
    +        return iter(())
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            None
    +        """
    +
    +        return None
    +
    +    def dump(self, force=False):
    +        """
    +        Encodes the value using DER
    +
    +        :param force:
    +            If the encoded contents already exist, clear them and regenerate
    +            to ensure they are in DER format instead of BER format
    +
    +        :return:
    +            A byte string of the DER-encoded value
    +        """
    +
    +        return b''
    +
    +
    +VOID = Void()
    +
    +
    +class Any(Asn1Value):
    +    """
    +    A value class that can contain any value, and allows for easy parsing of
    +    the underlying encoded value using a spec. This is normally contained in
    +    a Structure that has an ObjectIdentifier field and _oid_pair and _oid_specs
    +    defined.
    +    """
    +
    +    # The parsed value object
    +    _parsed = None
    +
    +    def __init__(self, value=None, **kwargs):
    +        """
    +        Sets the value of the object before passing to Asn1Value.__init__()
    +
    +        :param value:
    +            An Asn1Value object that will be set as the parsed value
    +        """
    +
    +        Asn1Value.__init__(self, **kwargs)
    +
    +        try:
    +            if value is not None:
    +                if not isinstance(value, Asn1Value):
    +                    raise TypeError(unwrap(
    +                        '''
    +                        value must be an instance of Asn1Value, not %s
    +                        ''',
    +                        type_name(value)
    +                    ))
    +
    +                self._parsed = (value, value.__class__, None)
    +                self.contents = value.dump()
    +
    +        except (ValueError, TypeError) as e:
    +            args = e.args[1:]
    +            e.args = (e.args[0] + '\n    while constructing %s' % type_name(self),) + args
    +            raise e
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            The .native value from the parsed value object
    +        """
    +
    +        if self._parsed is None:
    +            self.parse()
    +
    +        return self._parsed[0].native
    +
    +    @property
    +    def parsed(self):
    +        """
    +        Returns the parsed object from .parse()
    +
    +        :return:
    +            The object returned by .parse()
    +        """
    +
    +        if self._parsed is None:
    +            self.parse()
    +
    +        return self._parsed[0]
    +
    +    def parse(self, spec=None, spec_params=None):
    +        """
    +        Parses the contents generically, or using a spec with optional params
    +
    +        :param spec:
    +            A class derived from Asn1Value that defines what class_ and tag the
    +            value should have, and the semantics of the encoded value. The
    +            return value will be of this type. If omitted, the encoded value
    +            will be decoded using the standard universal tag based on the
    +            encoded tag number.
    +
    +        :param spec_params:
    +            A dict of params to pass to the spec object
    +
    +        :return:
    +            An object of the type spec, or if not present, a child of Asn1Value
    +        """
    +
    +        if self._parsed is None or self._parsed[1:3] != (spec, spec_params):
    +            try:
    +                passed_params = spec_params or {}
    +                _tag_type_to_explicit_implicit(passed_params)
    +                if self.explicit is not None:
    +                    if 'explicit' in passed_params:
    +                        passed_params['explicit'] = self.explicit + passed_params['explicit']
    +                    else:
    +                        passed_params['explicit'] = self.explicit
    +                contents = self._header + self.contents + self._trailer
    +                parsed_value, _ = _parse_build(
    +                    contents,
    +                    spec=spec,
    +                    spec_params=passed_params
    +                )
    +                self._parsed = (parsed_value, spec, spec_params)
    +
    +                # Once we've parsed the Any value, clear any attributes from this object
    +                # since they are now duplicate
    +                self.tag = None
    +                self.explicit = None
    +                self.implicit = False
    +                self._header = b''
    +                self.contents = contents
    +                self._trailer = b''
    +
    +            except (ValueError, TypeError) as e:
    +                args = e.args[1:]
    +                e.args = (e.args[0] + '\n    while parsing %s' % type_name(self),) + args
    +                raise e
    +        return self._parsed[0]
    +
    +    def _copy(self, other, copy_func):
    +        """
    +        Copies the contents of another Any object to itself
    +
    +        :param object:
    +            Another instance of the same class
    +
    +        :param copy_func:
    +            An reference of copy.copy() or copy.deepcopy() to use when copying
    +            lists, dicts and objects
    +        """
    +
    +        super(Any, self)._copy(other, copy_func)
    +        self._parsed = copy_func(other._parsed)
    +
    +    def dump(self, force=False):
    +        """
    +        Encodes the value using DER
    +
    +        :param force:
    +            If the encoded contents already exist, clear them and regenerate
    +            to ensure they are in DER format instead of BER format
    +
    +        :return:
    +            A byte string of the DER-encoded value
    +        """
    +
    +        if self._parsed is None:
    +            self.parse()
    +
    +        return self._parsed[0].dump(force=force)
    +
    +
    +class Choice(Asn1Value):
    +    """
    +    A class to handle when a value may be one of several options
    +    """
    +
    +    # The index in _alternatives of the validated alternative
    +    _choice = None
    +
    +    # The name of the chosen alternative
    +    _name = None
    +
    +    # The Asn1Value object for the chosen alternative
    +    _parsed = None
    +
    +    # Choice overrides .contents to be a property so that the code expecting
    +    # the .contents attribute will get the .contents of the chosen alternative
    +    _contents = None
    +
    +    # A list of tuples in one of the following forms.
    +    #
    +    # Option 1, a unicode string field name and a value class
    +    #
    +    # ("name", Asn1ValueClass)
    +    #
    +    # Option 2, same as Option 1, but with a dict of class params
    +    #
    +    # ("name", Asn1ValueClass, {'explicit': 5})
    +    _alternatives = None
    +
    +    # A dict that maps tuples of (class_, tag) to an index in _alternatives
    +    _id_map = None
    +
    +    # A dict that maps alternative names to an index in _alternatives
    +    _name_map = None
    +
    +    @classmethod
    +    def load(cls, encoded_data, strict=False, **kwargs):
    +        """
    +        Loads a BER/DER-encoded byte string using the current class as the spec
    +
    +        :param encoded_data:
    +            A byte string of BER or DER encoded data
    +
    +        :param strict:
    +            A boolean indicating if trailing data should be forbidden - if so, a
    +            ValueError will be raised when trailing data exists
    +
    +        :return:
    +            A instance of the current class
    +        """
    +
    +        if not isinstance(encoded_data, byte_cls):
    +            raise TypeError('encoded_data must be a byte string, not %s' % type_name(encoded_data))
    +
    +        value, _ = _parse_build(encoded_data, spec=cls, spec_params=kwargs, strict=strict)
    +        return value
    +
    +    def _setup(self):
    +        """
    +        Generates _id_map from _alternatives to allow validating contents
    +        """
    +
    +        cls = self.__class__
    +        cls._id_map = {}
    +        cls._name_map = {}
    +        for index, info in enumerate(cls._alternatives):
    +            if len(info) < 3:
    +                info = info + ({},)
    +                cls._alternatives[index] = info
    +            id_ = _build_id_tuple(info[2], info[1])
    +            cls._id_map[id_] = index
    +            cls._name_map[info[0]] = index
    +
    +    def __init__(self, name=None, value=None, **kwargs):
    +        """
    +        Checks to ensure implicit tagging is not being used since it is
    +        incompatible with Choice, then forwards on to Asn1Value.__init__()
    +
    +        :param name:
    +            The name of the alternative to be set - used with value.
    +            Alternatively this may be a dict with a single key being the name
    +            and the value being the value, or a two-element tuple of the name
    +            and the value.
    +
    +        :param value:
    +            The alternative value to set - used with name
    +
    +        :raises:
    +            ValueError - when implicit param is passed (or legacy tag_type param is "implicit")
    +        """
    +
    +        _tag_type_to_explicit_implicit(kwargs)
    +
    +        Asn1Value.__init__(self, **kwargs)
    +
    +        try:
    +            if kwargs.get('implicit') is not None:
    +                raise ValueError(unwrap(
    +                    '''
    +                    The Choice type can not be implicitly tagged even if in an
    +                    implicit module - due to its nature any tagging must be
    +                    explicit
    +                    '''
    +                ))
    +
    +            if name is not None:
    +                if isinstance(name, dict):
    +                    if len(name) != 1:
    +                        raise ValueError(unwrap(
    +                            '''
    +                            When passing a dict as the "name" argument to %s,
    +                            it must have a single key/value - however %d were
    +                            present
    +                            ''',
    +                            type_name(self),
    +                            len(name)
    +                        ))
    +                    name, value = list(name.items())[0]
    +
    +                if isinstance(name, tuple):
    +                    if len(name) != 2:
    +                        raise ValueError(unwrap(
    +                            '''
    +                            When passing a tuple as the "name" argument to %s,
    +                            it must have two elements, the name and value -
    +                            however %d were present
    +                            ''',
    +                            type_name(self),
    +                            len(name)
    +                        ))
    +                    value = name[1]
    +                    name = name[0]
    +
    +                if name not in self._name_map:
    +                    raise ValueError(unwrap(
    +                        '''
    +                        The name specified, "%s", is not a valid alternative
    +                        for %s
    +                        ''',
    +                        name,
    +                        type_name(self)
    +                    ))
    +
    +                self._choice = self._name_map[name]
    +                _, spec, params = self._alternatives[self._choice]
    +
    +                if not isinstance(value, spec):
    +                    value = spec(value, **params)
    +                else:
    +                    value = _fix_tagging(value, params)
    +                self._parsed = value
    +
    +        except (ValueError, TypeError) as e:
    +            args = e.args[1:]
    +            e.args = (e.args[0] + '\n    while constructing %s' % type_name(self),) + args
    +            raise e
    +
    +    @property
    +    def contents(self):
    +        """
    +        :return:
    +            A byte string of the DER-encoded contents of the chosen alternative
    +        """
    +
    +        if self._parsed is not None:
    +            return self._parsed.contents
    +
    +        return self._contents
    +
    +    @contents.setter
    +    def contents(self, value):
    +        """
    +        :param value:
    +            A byte string of the DER-encoded contents of the chosen alternative
    +        """
    +
    +        self._contents = value
    +
    +    @property
    +    def name(self):
    +        """
    +        :return:
    +            A unicode string of the field name of the chosen alternative
    +        """
    +        if not self._name:
    +            self._name = self._alternatives[self._choice][0]
    +        return self._name
    +
    +    def parse(self):
    +        """
    +        Parses the detected alternative
    +
    +        :return:
    +            An Asn1Value object of the chosen alternative
    +        """
    +
    +        if self._parsed is None:
    +            try:
    +                _, spec, params = self._alternatives[self._choice]
    +                self._parsed, _ = _parse_build(self._contents, spec=spec, spec_params=params)
    +            except (ValueError, TypeError) as e:
    +                args = e.args[1:]
    +                e.args = (e.args[0] + '\n    while parsing %s' % type_name(self),) + args
    +                raise e
    +        return self._parsed
    +
    +    @property
    +    def chosen(self):
    +        """
    +        :return:
    +            An Asn1Value object of the chosen alternative
    +        """
    +
    +        return self.parse()
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            The .native value from the contained value object
    +        """
    +
    +        return self.chosen.native
    +
    +    def validate(self, class_, tag, contents):
    +        """
    +        Ensures that the class and tag specified exist as an alternative
    +
    +        :param class_:
    +            The integer class_ from the encoded value header
    +
    +        :param tag:
    +            The integer tag from the encoded value header
    +
    +        :param contents:
    +            A byte string of the contents of the value - used when the object
    +            is explicitly tagged
    +
    +        :raises:
    +            ValueError - when value is not a valid alternative
    +        """
    +
    +        id_ = (class_, tag)
    +
    +        if self.explicit is not None:
    +            if self.explicit[-1] != id_:
    +                raise ValueError(unwrap(
    +                    '''
    +                    %s was explicitly tagged, but the value provided does not
    +                    match the class and tag
    +                    ''',
    +                    type_name(self)
    +                ))
    +
    +            ((class_, _, tag, _, _, _), _) = _parse(contents, len(contents))
    +            id_ = (class_, tag)
    +
    +        if id_ in self._id_map:
    +            self._choice = self._id_map[id_]
    +            return
    +
    +        # This means the Choice was implicitly tagged
    +        if self.class_ is not None and self.tag is not None:
    +            if len(self._alternatives) > 1:
    +                raise ValueError(unwrap(
    +                    '''
    +                    %s was implicitly tagged, but more than one alternative
    +                    exists
    +                    ''',
    +                    type_name(self)
    +                ))
    +            if id_ == (self.class_, self.tag):
    +                self._choice = 0
    +                return
    +
    +        asn1 = self._format_class_tag(class_, tag)
    +        asn1s = [self._format_class_tag(pair[0], pair[1]) for pair in self._id_map]
    +
    +        raise ValueError(unwrap(
    +            '''
    +            Value %s did not match the class and tag of any of the alternatives
    +            in %s: %s
    +            ''',
    +            asn1,
    +            type_name(self),
    +            ', '.join(asn1s)
    +        ))
    +
    +    def _format_class_tag(self, class_, tag):
    +        """
    +        :return:
    +            A unicode string of a human-friendly representation of the class and tag
    +        """
    +
    +        return '[%s %s]' % (CLASS_NUM_TO_NAME_MAP[class_].upper(), tag)
    +
    +    def _copy(self, other, copy_func):
    +        """
    +        Copies the contents of another Choice object to itself
    +
    +        :param object:
    +            Another instance of the same class
    +
    +        :param copy_func:
    +            An reference of copy.copy() or copy.deepcopy() to use when copying
    +            lists, dicts and objects
    +        """
    +
    +        super(Choice, self)._copy(other, copy_func)
    +        self._choice = other._choice
    +        self._name = other._name
    +        self._parsed = copy_func(other._parsed)
    +
    +    def dump(self, force=False):
    +        """
    +        Encodes the value using DER
    +
    +        :param force:
    +            If the encoded contents already exist, clear them and regenerate
    +            to ensure they are in DER format instead of BER format
    +
    +        :return:
    +            A byte string of the DER-encoded value
    +        """
    +
    +        # If the length is indefinite, force the re-encoding
    +        if self._header is not None and self._header[-1:] == b'\x80':
    +            force = True
    +
    +        self._contents = self.chosen.dump(force=force)
    +        if self._header is None or force:
    +            self._header = b''
    +            if self.explicit is not None:
    +                for class_, tag in self.explicit:
    +                    self._header = _dump_header(class_, 1, tag, self._header + self._contents) + self._header
    +        return self._header + self._contents
    +
    +
    +class Concat(object):
    +    """
    +    A class that contains two or more encoded child values concatentated
    +    together. THIS IS NOT PART OF THE ASN.1 SPECIFICATION! This exists to handle
    +    the x509.TrustedCertificate() class for OpenSSL certificates containing
    +    extra information.
    +    """
    +
    +    # A list of the specs of the concatenated values
    +    _child_specs = None
    +
    +    _children = None
    +
    +    @classmethod
    +    def load(cls, encoded_data, strict=False):
    +        """
    +        Loads a BER/DER-encoded byte string using the current class as the spec
    +
    +        :param encoded_data:
    +            A byte string of BER or DER encoded data
    +
    +        :param strict:
    +            A boolean indicating if trailing data should be forbidden - if so, a
    +            ValueError will be raised when trailing data exists
    +
    +        :return:
    +            A Concat object
    +        """
    +
    +        return cls(contents=encoded_data, strict=strict)
    +
    +    def __init__(self, value=None, contents=None, strict=False):
    +        """
    +        :param value:
    +            A native Python datatype to initialize the object value with
    +
    +        :param contents:
    +            A byte string of the encoded contents of the value
    +
    +        :param strict:
    +            A boolean indicating if trailing data should be forbidden - if so, a
    +            ValueError will be raised when trailing data exists in contents
    +
    +        :raises:
    +            ValueError - when an error occurs with one of the children
    +            TypeError - when an error occurs with one of the children
    +        """
    +
    +        if contents is not None:
    +            try:
    +                contents_len = len(contents)
    +                self._children = []
    +
    +                offset = 0
    +                for spec in self._child_specs:
    +                    if offset < contents_len:
    +                        child_value, offset = _parse_build(contents, pointer=offset, spec=spec)
    +                    else:
    +                        child_value = spec()
    +                    self._children.append(child_value)
    +
    +                if strict and offset != contents_len:
    +                    extra_bytes = contents_len - offset
    +                    raise ValueError('Extra data - %d bytes of trailing data were provided' % extra_bytes)
    +
    +            except (ValueError, TypeError) as e:
    +                args = e.args[1:]
    +                e.args = (e.args[0] + '\n    while constructing %s' % type_name(self),) + args
    +                raise e
    +
    +        if value is not None:
    +            if self._children is None:
    +                self._children = [None] * len(self._child_specs)
    +            for index, data in enumerate(value):
    +                self.__setitem__(index, data)
    +
    +    def __str__(self):
    +        """
    +        Since str is different in Python 2 and 3, this calls the appropriate
    +        method, __unicode__() or __bytes__()
    +
    +        :return:
    +            A unicode string
    +        """
    +
    +        if _PY2:
    +            return self.__bytes__()
    +        else:
    +            return self.__unicode__()
    +
    +    def __bytes__(self):
    +        """
    +        A byte string of the DER-encoded contents
    +        """
    +
    +        return self.dump()
    +
    +    def __unicode__(self):
    +        """
    +        :return:
    +            A unicode string
    +        """
    +
    +        return repr(self)
    +
    +    def __repr__(self):
    +        """
    +        :return:
    +            A unicode string
    +        """
    +
    +        return '<%s %s %s>' % (type_name(self), id(self), repr(self.dump()))
    +
    +    def __copy__(self):
    +        """
    +        Implements the copy.copy() interface
    +
    +        :return:
    +            A new shallow copy of the Concat object
    +        """
    +
    +        new_obj = self.__class__()
    +        new_obj._copy(self, copy.copy)
    +        return new_obj
    +
    +    def __deepcopy__(self, memo):
    +        """
    +        Implements the copy.deepcopy() interface
    +
    +        :param memo:
    +            A dict for memoization
    +
    +        :return:
    +            A new deep copy of the Concat object and all child objects
    +        """
    +
    +        new_obj = self.__class__()
    +        memo[id(self)] = new_obj
    +        new_obj._copy(self, copy.deepcopy)
    +        return new_obj
    +
    +    def copy(self):
    +        """
    +        Copies the object
    +
    +        :return:
    +            A Concat object
    +        """
    +
    +        return copy.deepcopy(self)
    +
    +    def _copy(self, other, copy_func):
    +        """
    +        Copies the contents of another Concat object to itself
    +
    +        :param object:
    +            Another instance of the same class
    +
    +        :param copy_func:
    +            An reference of copy.copy() or copy.deepcopy() to use when copying
    +            lists, dicts and objects
    +        """
    +
    +        if self.__class__ != other.__class__:
    +            raise TypeError(unwrap(
    +                '''
    +                Can not copy values from %s object to %s object
    +                ''',
    +                type_name(other),
    +                type_name(self)
    +            ))
    +
    +        self._children = copy_func(other._children)
    +
    +    def debug(self, nest_level=1):
    +        """
    +        Show the binary data and parsed data in a tree structure
    +        """
    +
    +        prefix = '  ' * nest_level
    +        print('%s%s Object #%s' % (prefix, type_name(self), id(self)))
    +        print('%s  Children:' % (prefix,))
    +        for child in self._children:
    +            child.debug(nest_level + 2)
    +
    +    def dump(self, force=False):
    +        """
    +        Encodes the value using DER
    +
    +        :param force:
    +            If the encoded contents already exist, clear them and regenerate
    +            to ensure they are in DER format instead of BER format
    +
    +        :return:
    +            A byte string of the DER-encoded value
    +        """
    +
    +        contents = b''
    +        for child in self._children:
    +            contents += child.dump(force=force)
    +        return contents
    +
    +    @property
    +    def contents(self):
    +        """
    +        :return:
    +            A byte string of the DER-encoded contents of the children
    +        """
    +
    +        return self.dump()
    +
    +    def __len__(self):
    +        """
    +        :return:
    +            Integer
    +        """
    +
    +        return len(self._children)
    +
    +    def __getitem__(self, key):
    +        """
    +        Allows accessing children by index
    +
    +        :param key:
    +            An integer of the child index
    +
    +        :raises:
    +            KeyError - when an index is invalid
    +
    +        :return:
    +            The Asn1Value object of the child specified
    +        """
    +
    +        if key > len(self._child_specs) - 1 or key < 0:
    +            raise KeyError(unwrap(
    +                '''
    +                No child is definition for position %d of %s
    +                ''',
    +                key,
    +                type_name(self)
    +            ))
    +
    +        return self._children[key]
    +
    +    def __setitem__(self, key, value):
    +        """
    +        Allows settings children by index
    +
    +        :param key:
    +            An integer of the child index
    +
    +        :param value:
    +            An Asn1Value object to set the child to
    +
    +        :raises:
    +            KeyError - when an index is invalid
    +            ValueError - when the value is not an instance of Asn1Value
    +        """
    +
    +        if key > len(self._child_specs) - 1 or key < 0:
    +            raise KeyError(unwrap(
    +                '''
    +                No child is defined for position %d of %s
    +                ''',
    +                key,
    +                type_name(self)
    +            ))
    +
    +        if not isinstance(value, Asn1Value):
    +            raise ValueError(unwrap(
    +                '''
    +                Value for child %s of %s is not an instance of
    +                asn1crypto.core.Asn1Value
    +                ''',
    +                key,
    +                type_name(self)
    +            ))
    +
    +        self._children[key] = value
    +
    +    def __iter__(self):
    +        """
    +        :return:
    +            An iterator of child values
    +        """
    +
    +        return iter(self._children)
    +
    +
    +class Primitive(Asn1Value):
    +    """
    +    Sets the class_ and method attributes for primitive, universal values
    +    """
    +
    +    class_ = 0
    +
    +    method = 0
    +
    +    def __init__(self, value=None, default=None, contents=None, **kwargs):
    +        """
    +        Sets the value of the object before passing to Asn1Value.__init__()
    +
    +        :param value:
    +            A native Python datatype to initialize the object value with
    +
    +        :param default:
    +            The default value if no value is specified
    +
    +        :param contents:
    +            A byte string of the encoded contents of the value
    +        """
    +
    +        Asn1Value.__init__(self, **kwargs)
    +
    +        try:
    +            if contents is not None:
    +                self.contents = contents
    +
    +            elif value is not None:
    +                self.set(value)
    +
    +            elif default is not None:
    +                self.set(default)
    +
    +        except (ValueError, TypeError) as e:
    +            args = e.args[1:]
    +            e.args = (e.args[0] + '\n    while constructing %s' % type_name(self),) + args
    +            raise e
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            A byte string
    +        """
    +
    +        if not isinstance(value, byte_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be a byte string, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        self._native = value
    +        self.contents = value
    +        self._header = None
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    def dump(self, force=False):
    +        """
    +        Encodes the value using DER
    +
    +        :param force:
    +            If the encoded contents already exist, clear them and regenerate
    +            to ensure they are in DER format instead of BER format
    +
    +        :return:
    +            A byte string of the DER-encoded value
    +        """
    +
    +        # If the length is indefinite, force the re-encoding
    +        if self._header is not None and self._header[-1:] == b'\x80':
    +            force = True
    +
    +        if force:
    +            native = self.native
    +            self.contents = None
    +            self.set(native)
    +
    +        return Asn1Value.dump(self)
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __eq__(self, other):
    +        """
    +        :param other:
    +            The other Primitive to compare to
    +
    +        :return:
    +            A boolean
    +        """
    +
    +        if not isinstance(other, Primitive):
    +            return False
    +
    +        if self.contents != other.contents:
    +            return False
    +
    +        # We compare class tag numbers since object tag numbers could be
    +        # different due to implicit or explicit tagging
    +        if self.__class__.tag != other.__class__.tag:
    +            return False
    +
    +        if self.__class__ == other.__class__ and self.contents == other.contents:
    +            return True
    +
    +        # If the objects share a common base class that is not too low-level
    +        # then we can compare the contents
    +        self_bases = (set(self.__class__.__bases__) | set([self.__class__])) - set([Asn1Value, Primitive, ValueMap])
    +        other_bases = (set(other.__class__.__bases__) | set([other.__class__])) - set([Asn1Value, Primitive, ValueMap])
    +        if self_bases | other_bases:
    +            return self.contents == other.contents
    +
    +        # When tagging is going on, do the extra work of constructing new
    +        # objects to see if the dumped representation are the same
    +        if self.implicit or self.explicit or other.implicit or other.explicit:
    +            return self.untag().dump() == other.untag().dump()
    +
    +        return self.dump() == other.dump()
    +
    +
    +class AbstractString(Constructable, Primitive):
    +    """
    +    A base class for all strings that have a known encoding. In general, we do
    +    not worry ourselves with confirming that the decoded values match a specific
    +    set of characters, only that they are decoded into a Python unicode string
    +    """
    +
    +    # The Python encoding name to use when decoding or encoded the contents
    +    _encoding = 'latin1'
    +
    +    # Instance attribute of (possibly-merged) unicode string
    +    _unicode = None
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the string
    +
    +        :param value:
    +            A unicode string
    +        """
    +
    +        if not isinstance(value, str_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be a unicode string, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        self._unicode = value
    +        self.contents = value.encode(self._encoding)
    +        self._header = None
    +        if self._indefinite:
    +            self._indefinite = False
    +            self.method = 0
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    def __unicode__(self):
    +        """
    +        :return:
    +            A unicode string
    +        """
    +
    +        if self.contents is None:
    +            return ''
    +        if self._unicode is None:
    +            self._unicode = self._merge_chunks().decode(self._encoding)
    +        return self._unicode
    +
    +    def _copy(self, other, copy_func):
    +        """
    +        Copies the contents of another AbstractString object to itself
    +
    +        :param object:
    +            Another instance of the same class
    +
    +        :param copy_func:
    +            An reference of copy.copy() or copy.deepcopy() to use when copying
    +            lists, dicts and objects
    +        """
    +
    +        super(AbstractString, self)._copy(other, copy_func)
    +        self._unicode = other._unicode
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            A unicode string or None
    +        """
    +
    +        if self.contents is None:
    +            return None
    +
    +        return self.__unicode__()
    +
    +
    +class Boolean(Primitive):
    +    """
    +    Represents a boolean in both ASN.1 and Python
    +    """
    +
    +    tag = 1
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            True, False or another value that works with bool()
    +        """
    +
    +        self._native = bool(value)
    +        self.contents = b'\x00' if not value else b'\xff'
    +        self._header = None
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    # Python 2
    +    def __nonzero__(self):
    +        """
    +        :return:
    +            True or False
    +        """
    +        return self.__bool__()
    +
    +    def __bool__(self):
    +        """
    +        :return:
    +            True or False
    +        """
    +        return self.contents != b'\x00'
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            True, False or None
    +        """
    +
    +        if self.contents is None:
    +            return None
    +
    +        if self._native is None:
    +            self._native = self.__bool__()
    +        return self._native
    +
    +
    +class Integer(Primitive, ValueMap):
    +    """
    +    Represents an integer in both ASN.1 and Python
    +    """
    +
    +    tag = 2
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            An integer, or a unicode string if _map is set
    +
    +        :raises:
    +            ValueError - when an invalid value is passed
    +        """
    +
    +        if isinstance(value, str_cls):
    +            if self._map is None:
    +                raise ValueError(unwrap(
    +                    '''
    +                    %s value is a unicode string, but no _map provided
    +                    ''',
    +                    type_name(self)
    +                ))
    +
    +            if value not in self._reverse_map:
    +                raise ValueError(unwrap(
    +                    '''
    +                    %s value, %s, is not present in the _map
    +                    ''',
    +                    type_name(self),
    +                    value
    +                ))
    +
    +            value = self._reverse_map[value]
    +
    +        elif not isinstance(value, int_types):
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be an integer or unicode string when a name_map
    +                is provided, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        self._native = self._map[value] if self._map and value in self._map else value
    +
    +        self.contents = int_to_bytes(value, signed=True)
    +        self._header = None
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    def __int__(self):
    +        """
    +        :return:
    +            An integer
    +        """
    +        return int_from_bytes(self.contents, signed=True)
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            An integer or None
    +        """
    +
    +        if self.contents is None:
    +            return None
    +
    +        if self._native is None:
    +            self._native = self.__int__()
    +            if self._map is not None and self._native in self._map:
    +                self._native = self._map[self._native]
    +        return self._native
    +
    +
    +class _IntegerBitString(object):
    +    """
    +    A mixin for IntegerBitString and BitString to parse the contents as an integer.
    +    """
    +
    +    # Tuple of 1s and 0s; set through native
    +    _unused_bits = ()
    +
    +    def _as_chunk(self):
    +        """
    +        Parse the contents of a primitive BitString encoding as an integer value.
    +        Allows reconstructing indefinite length values.
    +
    +        :raises:
    +            ValueError - when an invalid value is passed
    +
    +        :return:
    +            A list with one tuple (value, bits, unused_bits) where value is an integer
    +            with the value of the BitString, bits is the bit count of value and
    +            unused_bits is a tuple of 1s and 0s.
    +        """
    +
    +        if self._indefinite:
    +            # return an empty chunk, for cases like \x23\x80\x00\x00
    +            return []
    +
    +        unused_bits_len = ord(self.contents[0]) if _PY2 else self.contents[0]
    +        value = int_from_bytes(self.contents[1:])
    +        bits = (len(self.contents) - 1) * 8
    +
    +        if not unused_bits_len:
    +            return [(value, bits, ())]
    +
    +        if len(self.contents) == 1:
    +            # Disallowed by X.690 §8.6.2.3
    +            raise ValueError('Empty bit string has {0} unused bits'.format(unused_bits_len))
    +
    +        if unused_bits_len > 7:
    +            # Disallowed by X.690 §8.6.2.2
    +            raise ValueError('Bit string has {0} unused bits'.format(unused_bits_len))
    +
    +        unused_bits = _int_to_bit_tuple(value & ((1 << unused_bits_len) - 1), unused_bits_len)
    +        value >>= unused_bits_len
    +        bits -= unused_bits_len
    +
    +        return [(value, bits, unused_bits)]
    +
    +    def _chunks_to_int(self):
    +        """
    +        Combines the chunks into a single value.
    +
    +        :raises:
    +            ValueError - when an invalid value is passed
    +
    +        :return:
    +            A tuple (value, bits, unused_bits) where value is an integer with the
    +            value of the BitString, bits is the bit count of value and unused_bits
    +            is a tuple of 1s and 0s.
    +        """
    +
    +        if not self._indefinite:
    +            # Fast path
    +            return self._as_chunk()[0]
    +
    +        value = 0
    +        total_bits = 0
    +        unused_bits = ()
    +
    +        # X.690 §8.6.3 allows empty indefinite encodings
    +        for chunk, bits, unused_bits in self._merge_chunks():
    +            if total_bits & 7:
    +                # Disallowed by X.690 §8.6.4
    +                raise ValueError('Only last chunk in a bit string may have unused bits')
    +            total_bits += bits
    +            value = (value << bits) | chunk
    +
    +        return value, total_bits, unused_bits
    +
    +    def _copy(self, other, copy_func):
    +        """
    +        Copies the contents of another _IntegerBitString object to itself
    +
    +        :param object:
    +            Another instance of the same class
    +
    +        :param copy_func:
    +            An reference of copy.copy() or copy.deepcopy() to use when copying
    +            lists, dicts and objects
    +        """
    +
    +        super(_IntegerBitString, self)._copy(other, copy_func)
    +        self._unused_bits = other._unused_bits
    +
    +    @property
    +    def unused_bits(self):
    +        """
    +        The unused bits of the bit string encoding.
    +
    +        :return:
    +            A tuple of 1s and 0s
    +        """
    +
    +        # call native to set _unused_bits
    +        self.native
    +
    +        return self._unused_bits
    +
    +
    +class BitString(_IntegerBitString, Constructable, Castable, Primitive, ValueMap):
    +    """
    +    Represents a bit string from ASN.1 as a Python tuple of 1s and 0s
    +    """
    +
    +    tag = 3
    +
    +    _size = None
    +
    +    def _setup(self):
    +        """
    +        Generates _reverse_map from _map
    +        """
    +
    +        ValueMap._setup(self)
    +
    +        cls = self.__class__
    +        if cls._map is not None:
    +            cls._size = max(self._map.keys()) + 1
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            An integer or a tuple of integers 0 and 1
    +
    +        :raises:
    +            ValueError - when an invalid value is passed
    +        """
    +
    +        if isinstance(value, set):
    +            if self._map is None:
    +                raise ValueError(unwrap(
    +                    '''
    +                    %s._map has not been defined
    +                    ''',
    +                    type_name(self)
    +                ))
    +
    +            bits = [0] * self._size
    +            self._native = value
    +            for index in range(0, self._size):
    +                key = self._map.get(index)
    +                if key is None:
    +                    continue
    +                if key in value:
    +                    bits[index] = 1
    +
    +            value = ''.join(map(str_cls, bits))
    +
    +        elif value.__class__ == tuple:
    +            if self._map is None:
    +                self._native = value
    +            else:
    +                self._native = set()
    +                for index, bit in enumerate(value):
    +                    if bit:
    +                        name = self._map.get(index, index)
    +                        self._native.add(name)
    +            value = ''.join(map(str_cls, value))
    +
    +        else:
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be a tuple of ones and zeros or a set of unicode
    +                strings, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        if self._map is not None:
    +            if len(value) > self._size:
    +                raise ValueError(unwrap(
    +                    '''
    +                    %s value must be at most %s bits long, specified was %s long
    +                    ''',
    +                    type_name(self),
    +                    self._size,
    +                    len(value)
    +                ))
    +            # A NamedBitList must have trailing zero bit truncated. See
    +            # https://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf
    +            # section 11.2,
    +            # https://tools.ietf.org/html/rfc5280#page-134 and
    +            # https://www.ietf.org/mail-archive/web/pkix/current/msg10443.html
    +            value = value.rstrip('0')
    +        size = len(value)
    +
    +        size_mod = size % 8
    +        extra_bits = 0
    +        if size_mod != 0:
    +            extra_bits = 8 - size_mod
    +            value += '0' * extra_bits
    +
    +        size_in_bytes = int(math.ceil(size / 8))
    +
    +        if extra_bits:
    +            extra_bits_byte = int_to_bytes(extra_bits)
    +        else:
    +            extra_bits_byte = b'\x00'
    +
    +        if value == '':
    +            value_bytes = b''
    +        else:
    +            value_bytes = int_to_bytes(int(value, 2))
    +        if len(value_bytes) != size_in_bytes:
    +            value_bytes = (b'\x00' * (size_in_bytes - len(value_bytes))) + value_bytes
    +
    +        self.contents = extra_bits_byte + value_bytes
    +        self._unused_bits = (0,) * extra_bits
    +        self._header = None
    +        if self._indefinite:
    +            self._indefinite = False
    +            self.method = 0
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    def __getitem__(self, key):
    +        """
    +        Retrieves a boolean version of one of the bits based on a name from the
    +        _map
    +
    +        :param key:
    +            The unicode string of one of the bit names
    +
    +        :raises:
    +            ValueError - when _map is not set or the key name is invalid
    +
    +        :return:
    +            A boolean if the bit is set
    +        """
    +
    +        is_int = isinstance(key, int_types)
    +        if not is_int:
    +            if not isinstance(self._map, dict):
    +                raise ValueError(unwrap(
    +                    '''
    +                    %s._map has not been defined
    +                    ''',
    +                    type_name(self)
    +                ))
    +
    +            if key not in self._reverse_map:
    +                raise ValueError(unwrap(
    +                    '''
    +                    %s._map does not contain an entry for "%s"
    +                    ''',
    +                    type_name(self),
    +                    key
    +                ))
    +
    +        if self._native is None:
    +            self.native
    +
    +        if self._map is None:
    +            if len(self._native) >= key + 1:
    +                return bool(self._native[key])
    +            return False
    +
    +        if is_int:
    +            key = self._map.get(key, key)
    +
    +        return key in self._native
    +
    +    def __setitem__(self, key, value):
    +        """
    +        Sets one of the bits based on a name from the _map
    +
    +        :param key:
    +            The unicode string of one of the bit names
    +
    +        :param value:
    +            A boolean value
    +
    +        :raises:
    +            ValueError - when _map is not set or the key name is invalid
    +        """
    +
    +        is_int = isinstance(key, int_types)
    +        if not is_int:
    +            if self._map is None:
    +                raise ValueError(unwrap(
    +                    '''
    +                    %s._map has not been defined
    +                    ''',
    +                    type_name(self)
    +                ))
    +
    +            if key not in self._reverse_map:
    +                raise ValueError(unwrap(
    +                    '''
    +                    %s._map does not contain an entry for "%s"
    +                    ''',
    +                    type_name(self),
    +                    key
    +                ))
    +
    +        if self._native is None:
    +            self.native
    +
    +        if self._map is None:
    +            new_native = list(self._native)
    +            max_key = len(new_native) - 1
    +            if key > max_key:
    +                new_native.extend([0] * (key - max_key))
    +            new_native[key] = 1 if value else 0
    +            self._native = tuple(new_native)
    +
    +        else:
    +            if is_int:
    +                key = self._map.get(key, key)
    +
    +            if value:
    +                if key not in self._native:
    +                    self._native.add(key)
    +            else:
    +                if key in self._native:
    +                    self._native.remove(key)
    +
    +        self.set(self._native)
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            If a _map is set, a set of names, or if no _map is set, a tuple of
    +            integers 1 and 0. None if no value.
    +        """
    +
    +        # For BitString we default the value to be all zeros
    +        if self.contents is None:
    +            if self._map is None:
    +                self.set(())
    +            else:
    +                self.set(set())
    +
    +        if self._native is None:
    +            int_value, bit_count, self._unused_bits = self._chunks_to_int()
    +            bits = _int_to_bit_tuple(int_value, bit_count)
    +
    +            if self._map:
    +                self._native = set()
    +                for index, bit in enumerate(bits):
    +                    if bit:
    +                        name = self._map.get(index, index)
    +                        self._native.add(name)
    +            else:
    +                self._native = bits
    +        return self._native
    +
    +
    +class OctetBitString(Constructable, Castable, Primitive):
    +    """
    +    Represents a bit string in ASN.1 as a Python byte string
    +    """
    +
    +    tag = 3
    +
    +    # Instance attribute of (possibly-merged) byte string
    +    _bytes = None
    +
    +    # Tuple of 1s and 0s; set through native
    +    _unused_bits = ()
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            A byte string
    +
    +        :raises:
    +            ValueError - when an invalid value is passed
    +        """
    +
    +        if not isinstance(value, byte_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be a byte string, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        self._bytes = value
    +        # Set the unused bits to 0
    +        self.contents = b'\x00' + value
    +        self._unused_bits = ()
    +        self._header = None
    +        if self._indefinite:
    +            self._indefinite = False
    +            self.method = 0
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    def __bytes__(self):
    +        """
    +        :return:
    +            A byte string
    +        """
    +
    +        if self.contents is None:
    +            return b''
    +        if self._bytes is None:
    +            if not self._indefinite:
    +                self._bytes, self._unused_bits = self._as_chunk()[0]
    +            else:
    +                chunks = self._merge_chunks()
    +                self._unused_bits = ()
    +                for chunk in chunks:
    +                    if self._unused_bits:
    +                        # Disallowed by X.690 §8.6.4
    +                        raise ValueError('Only last chunk in a bit string may have unused bits')
    +                    self._unused_bits = chunk[1]
    +                self._bytes = b''.join(chunk[0] for chunk in chunks)
    +
    +        return self._bytes
    +
    +    def _copy(self, other, copy_func):
    +        """
    +        Copies the contents of another OctetBitString object to itself
    +
    +        :param object:
    +            Another instance of the same class
    +
    +        :param copy_func:
    +            An reference of copy.copy() or copy.deepcopy() to use when copying
    +            lists, dicts and objects
    +        """
    +
    +        super(OctetBitString, self)._copy(other, copy_func)
    +        self._bytes = other._bytes
    +        self._unused_bits = other._unused_bits
    +
    +    def _as_chunk(self):
    +        """
    +        Allows reconstructing indefinite length values
    +
    +        :raises:
    +            ValueError - when an invalid value is passed
    +
    +        :return:
    +            List with one tuple, consisting of a byte string and an integer (unused bits)
    +        """
    +
    +        unused_bits_len = ord(self.contents[0]) if _PY2 else self.contents[0]
    +        if not unused_bits_len:
    +            return [(self.contents[1:], ())]
    +
    +        if len(self.contents) == 1:
    +            # Disallowed by X.690 §8.6.2.3
    +            raise ValueError('Empty bit string has {0} unused bits'.format(unused_bits_len))
    +
    +        if unused_bits_len > 7:
    +            # Disallowed by X.690 §8.6.2.2
    +            raise ValueError('Bit string has {0} unused bits'.format(unused_bits_len))
    +
    +        mask = (1 << unused_bits_len) - 1
    +        last_byte = ord(self.contents[-1]) if _PY2 else self.contents[-1]
    +
    +        # zero out the unused bits in the last byte.
    +        zeroed_byte = last_byte & ~mask
    +        value = self.contents[1:-1] + (chr(zeroed_byte) if _PY2 else bytes((zeroed_byte,)))
    +
    +        unused_bits = _int_to_bit_tuple(last_byte & mask, unused_bits_len)
    +
    +        return [(value, unused_bits)]
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            A byte string or None
    +        """
    +
    +        if self.contents is None:
    +            return None
    +
    +        return self.__bytes__()
    +
    +    @property
    +    def unused_bits(self):
    +        """
    +        The unused bits of the bit string encoding.
    +
    +        :return:
    +            A tuple of 1s and 0s
    +        """
    +
    +        # call native to set _unused_bits
    +        self.native
    +
    +        return self._unused_bits
    +
    +
    +class IntegerBitString(_IntegerBitString, Constructable, Castable, Primitive):
    +    """
    +    Represents a bit string in ASN.1 as a Python integer
    +    """
    +
    +    tag = 3
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            An integer
    +
    +        :raises:
    +            ValueError - when an invalid value is passed
    +        """
    +
    +        if not isinstance(value, int_types):
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be a positive integer, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        if value < 0:
    +            raise ValueError(unwrap(
    +                '''
    +                %s value must be a positive integer, not %d
    +                ''',
    +                type_name(self),
    +                value
    +            ))
    +
    +        self._native = value
    +        # Set the unused bits to 0
    +        self.contents = b'\x00' + int_to_bytes(value, signed=True)
    +        self._unused_bits = ()
    +        self._header = None
    +        if self._indefinite:
    +            self._indefinite = False
    +            self.method = 0
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            An integer or None
    +        """
    +
    +        if self.contents is None:
    +            return None
    +
    +        if self._native is None:
    +            self._native, __, self._unused_bits = self._chunks_to_int()
    +
    +        return self._native
    +
    +
    +class OctetString(Constructable, Castable, Primitive):
    +    """
    +    Represents a byte string in both ASN.1 and Python
    +    """
    +
    +    tag = 4
    +
    +    # Instance attribute of (possibly-merged) byte string
    +    _bytes = None
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            A byte string
    +        """
    +
    +        if not isinstance(value, byte_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be a byte string, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        self._bytes = value
    +        self.contents = value
    +        self._header = None
    +        if self._indefinite:
    +            self._indefinite = False
    +            self.method = 0
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    def __bytes__(self):
    +        """
    +        :return:
    +            A byte string
    +        """
    +
    +        if self.contents is None:
    +            return b''
    +        if self._bytes is None:
    +            self._bytes = self._merge_chunks()
    +        return self._bytes
    +
    +    def _copy(self, other, copy_func):
    +        """
    +        Copies the contents of another OctetString object to itself
    +
    +        :param object:
    +            Another instance of the same class
    +
    +        :param copy_func:
    +            An reference of copy.copy() or copy.deepcopy() to use when copying
    +            lists, dicts and objects
    +        """
    +
    +        super(OctetString, self)._copy(other, copy_func)
    +        self._bytes = other._bytes
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            A byte string or None
    +        """
    +
    +        if self.contents is None:
    +            return None
    +
    +        return self.__bytes__()
    +
    +
    +class IntegerOctetString(Constructable, Castable, Primitive):
    +    """
    +    Represents a byte string in ASN.1 as a Python integer
    +    """
    +
    +    tag = 4
    +
    +    # An explicit length in bytes the integer should be encoded to. This should
    +    # generally not be used since DER defines a canonical encoding, however some
    +    # use of this, such as when storing elliptic curve private keys, requires an
    +    # exact number of bytes, even if the leading bytes are null.
    +    _encoded_width = None
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            An integer
    +
    +        :raises:
    +            ValueError - when an invalid value is passed
    +        """
    +
    +        if not isinstance(value, int_types):
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be a positive integer, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        if value < 0:
    +            raise ValueError(unwrap(
    +                '''
    +                %s value must be a positive integer, not %d
    +                ''',
    +                type_name(self),
    +                value
    +            ))
    +
    +        self._native = value
    +        self.contents = int_to_bytes(value, signed=False, width=self._encoded_width)
    +        self._header = None
    +        if self._indefinite:
    +            self._indefinite = False
    +            self.method = 0
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            An integer or None
    +        """
    +
    +        if self.contents is None:
    +            return None
    +
    +        if self._native is None:
    +            self._native = int_from_bytes(self._merge_chunks())
    +        return self._native
    +
    +    def set_encoded_width(self, width):
    +        """
    +        Set the explicit enoding width for the integer
    +
    +        :param width:
    +            An integer byte width to encode the integer to
    +        """
    +
    +        self._encoded_width = width
    +        # Make sure the encoded value is up-to-date with the proper width
    +        if self.contents is not None and len(self.contents) != width:
    +            self.set(self.native)
    +
    +
    +class ParsableOctetString(Constructable, Castable, Primitive):
    +
    +    tag = 4
    +
    +    _parsed = None
    +
    +    # Instance attribute of (possibly-merged) byte string
    +    _bytes = None
    +
    +    def __init__(self, value=None, parsed=None, **kwargs):
    +        """
    +        Allows providing a parsed object that will be serialized to get the
    +        byte string value
    +
    +        :param value:
    +            A native Python datatype to initialize the object value with
    +
    +        :param parsed:
    +            If value is None and this is an Asn1Value object, this will be
    +            set as the parsed value, and the value will be obtained by calling
    +            .dump() on this object.
    +        """
    +
    +        set_parsed = False
    +        if value is None and parsed is not None and isinstance(parsed, Asn1Value):
    +            value = parsed.dump()
    +            set_parsed = True
    +
    +        Primitive.__init__(self, value=value, **kwargs)
    +
    +        if set_parsed:
    +            self._parsed = (parsed, parsed.__class__, None)
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            A byte string
    +        """
    +
    +        if not isinstance(value, byte_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be a byte string, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        self._bytes = value
    +        self.contents = value
    +        self._header = None
    +        if self._indefinite:
    +            self._indefinite = False
    +            self.method = 0
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    def parse(self, spec=None, spec_params=None):
    +        """
    +        Parses the contents generically, or using a spec with optional params
    +
    +        :param spec:
    +            A class derived from Asn1Value that defines what class_ and tag the
    +            value should have, and the semantics of the encoded value. The
    +            return value will be of this type. If omitted, the encoded value
    +            will be decoded using the standard universal tag based on the
    +            encoded tag number.
    +
    +        :param spec_params:
    +            A dict of params to pass to the spec object
    +
    +        :return:
    +            An object of the type spec, or if not present, a child of Asn1Value
    +        """
    +
    +        if self._parsed is None or self._parsed[1:3] != (spec, spec_params):
    +            parsed_value, _ = _parse_build(self.__bytes__(), spec=spec, spec_params=spec_params)
    +            self._parsed = (parsed_value, spec, spec_params)
    +        return self._parsed[0]
    +
    +    def __bytes__(self):
    +        """
    +        :return:
    +            A byte string
    +        """
    +
    +        if self.contents is None:
    +            return b''
    +        if self._bytes is None:
    +            self._bytes = self._merge_chunks()
    +        return self._bytes
    +
    +    def _setable_native(self):
    +        """
    +        Returns a byte string that can be passed into .set()
    +
    +        :return:
    +            A python value that is valid to pass to .set()
    +        """
    +
    +        return self.__bytes__()
    +
    +    def _copy(self, other, copy_func):
    +        """
    +        Copies the contents of another ParsableOctetString object to itself
    +
    +        :param object:
    +            Another instance of the same class
    +
    +        :param copy_func:
    +            An reference of copy.copy() or copy.deepcopy() to use when copying
    +            lists, dicts and objects
    +        """
    +
    +        super(ParsableOctetString, self)._copy(other, copy_func)
    +        self._bytes = other._bytes
    +        self._parsed = copy_func(other._parsed)
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            A byte string or None
    +        """
    +
    +        if self.contents is None:
    +            return None
    +
    +        if self._parsed is not None:
    +            return self._parsed[0].native
    +        else:
    +            return self.__bytes__()
    +
    +    @property
    +    def parsed(self):
    +        """
    +        Returns the parsed object from .parse()
    +
    +        :return:
    +            The object returned by .parse()
    +        """
    +
    +        if self._parsed is None:
    +            self.parse()
    +
    +        return self._parsed[0]
    +
    +    def dump(self, force=False):
    +        """
    +        Encodes the value using DER
    +
    +        :param force:
    +            If the encoded contents already exist, clear them and regenerate
    +            to ensure they are in DER format instead of BER format
    +
    +        :return:
    +            A byte string of the DER-encoded value
    +        """
    +
    +        # If the length is indefinite, force the re-encoding
    +        if self._indefinite:
    +            force = True
    +
    +        if force:
    +            if self._parsed is not None:
    +                native = self.parsed.dump(force=force)
    +            else:
    +                native = self.native
    +            self.contents = None
    +            self.set(native)
    +
    +        return Asn1Value.dump(self)
    +
    +
    +class ParsableOctetBitString(ParsableOctetString):
    +
    +    tag = 3
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            A byte string
    +
    +        :raises:
    +            ValueError - when an invalid value is passed
    +        """
    +
    +        if not isinstance(value, byte_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be a byte string, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        self._bytes = value
    +        # Set the unused bits to 0
    +        self.contents = b'\x00' + value
    +        self._header = None
    +        if self._indefinite:
    +            self._indefinite = False
    +            self.method = 0
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    def _as_chunk(self):
    +        """
    +        Allows reconstructing indefinite length values
    +
    +        :raises:
    +            ValueError - when an invalid value is passed
    +
    +        :return:
    +            A byte string
    +        """
    +
    +        unused_bits_len = ord(self.contents[0]) if _PY2 else self.contents[0]
    +        if unused_bits_len:
    +            raise ValueError('ParsableOctetBitString should have no unused bits')
    +
    +        return self.contents[1:]
    +
    +
    +class Null(Primitive):
    +    """
    +    Represents a null value in ASN.1 as None in Python
    +    """
    +
    +    tag = 5
    +
    +    contents = b''
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            None
    +        """
    +
    +        self.contents = b''
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            None
    +        """
    +
    +        return None
    +
    +
    +class ObjectIdentifier(Primitive, ValueMap):
    +    """
    +    Represents an object identifier in ASN.1 as a Python unicode dotted
    +    integer string
    +    """
    +
    +    tag = 6
    +
    +    # A unicode string of the dotted form of the object identifier
    +    _dotted = None
    +
    +    @classmethod
    +    def map(cls, value):
    +        """
    +        Converts a dotted unicode string OID into a mapped unicode string
    +
    +        :param value:
    +            A dotted unicode string OID
    +
    +        :raises:
    +            ValueError - when no _map dict has been defined on the class
    +            TypeError - when value is not a unicode string
    +
    +        :return:
    +            A mapped unicode string
    +        """
    +
    +        if cls._map is None:
    +            raise ValueError(unwrap(
    +                '''
    +                %s._map has not been defined
    +                ''',
    +                type_name(cls)
    +            ))
    +
    +        if not isinstance(value, str_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                value must be a unicode string, not %s
    +                ''',
    +                type_name(value)
    +            ))
    +
    +        return cls._map.get(value, value)
    +
    +    @classmethod
    +    def unmap(cls, value):
    +        """
    +        Converts a mapped unicode string value into a dotted unicode string OID
    +
    +        :param value:
    +            A mapped unicode string OR dotted unicode string OID
    +
    +        :raises:
    +            ValueError - when no _map dict has been defined on the class or the value can't be unmapped
    +            TypeError - when value is not a unicode string
    +
    +        :return:
    +            A dotted unicode string OID
    +        """
    +
    +        if cls not in _SETUP_CLASSES:
    +            cls()._setup()
    +            _SETUP_CLASSES[cls] = True
    +
    +        if cls._map is None:
    +            raise ValueError(unwrap(
    +                '''
    +                %s._map has not been defined
    +                ''',
    +                type_name(cls)
    +            ))
    +
    +        if not isinstance(value, str_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                value must be a unicode string, not %s
    +                ''',
    +                type_name(value)
    +            ))
    +
    +        if value in cls._reverse_map:
    +            return cls._reverse_map[value]
    +
    +        if not _OID_RE.match(value):
    +            raise ValueError(unwrap(
    +                '''
    +                %s._map does not contain an entry for "%s"
    +                ''',
    +                type_name(cls),
    +                value
    +            ))
    +
    +        return value
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            A unicode string. May be a dotted integer string, or if _map is
    +            provided, one of the mapped values.
    +
    +        :raises:
    +            ValueError - when an invalid value is passed
    +        """
    +
    +        if not isinstance(value, str_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be a unicode string, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        self._native = value
    +
    +        if self._map is not None:
    +            if value in self._reverse_map:
    +                value = self._reverse_map[value]
    +
    +        self.contents = b''
    +        first = None
    +        for index, part in enumerate(value.split('.')):
    +            part = int(part)
    +
    +            # The first two parts are merged into a single byte
    +            if index == 0:
    +                first = part
    +                continue
    +            elif index == 1:
    +                part = (first * 40) + part
    +
    +            encoded_part = chr_cls(0x7F & part)
    +            part = part >> 7
    +            while part > 0:
    +                encoded_part = chr_cls(0x80 | (0x7F & part)) + encoded_part
    +                part = part >> 7
    +            self.contents += encoded_part
    +
    +        self._header = None
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    def __unicode__(self):
    +        """
    +        :return:
    +            A unicode string
    +        """
    +
    +        return self.dotted
    +
    +    @property
    +    def dotted(self):
    +        """
    +        :return:
    +            A unicode string of the object identifier in dotted notation, thus
    +            ignoring any mapped value
    +        """
    +
    +        if self._dotted is None:
    +            output = []
    +
    +            part = 0
    +            for byte in self.contents:
    +                if _PY2:
    +                    byte = ord(byte)
    +                part = part * 128
    +                part += byte & 127
    +                # Last byte in subidentifier has the eighth bit set to 0
    +                if byte & 0x80 == 0:
    +                    if len(output) == 0:
    +                        output.append(str_cls(part // 40))
    +                        output.append(str_cls(part % 40))
    +                    else:
    +                        output.append(str_cls(part))
    +                    part = 0
    +
    +            self._dotted = '.'.join(output)
    +        return self._dotted
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            A unicode string or None. If _map is not defined, the unicode string
    +            is a string of dotted integers. If _map is defined and the dotted
    +            string is present in the _map, the mapped value is returned.
    +        """
    +
    +        if self.contents is None:
    +            return None
    +
    +        if self._native is None:
    +            self._native = self.dotted
    +        if self._map is not None and self._native in self._map:
    +            self._native = self._map[self._native]
    +        return self._native
    +
    +
    +class ObjectDescriptor(Primitive):
    +    """
    +    Represents an object descriptor from ASN.1 - no Python implementation
    +    """
    +
    +    tag = 7
    +
    +
    +class InstanceOf(Primitive):
    +    """
    +    Represents an instance from ASN.1 - no Python implementation
    +    """
    +
    +    tag = 8
    +
    +
    +class Real(Primitive):
    +    """
    +    Represents a real number from ASN.1 - no Python implementation
    +    """
    +
    +    tag = 9
    +
    +
    +class Enumerated(Integer):
    +    """
    +    Represents a enumerated list of integers from ASN.1 as a Python
    +    unicode string
    +    """
    +
    +    tag = 10
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            An integer or a unicode string from _map
    +
    +        :raises:
    +            ValueError - when an invalid value is passed
    +        """
    +
    +        if not isinstance(value, int_types) and not isinstance(value, str_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be an integer or a unicode string, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        if isinstance(value, str_cls):
    +            if value not in self._reverse_map:
    +                raise ValueError(unwrap(
    +                    '''
    +                    %s value "%s" is not a valid value
    +                    ''',
    +                    type_name(self),
    +                    value
    +                ))
    +
    +            value = self._reverse_map[value]
    +
    +        elif value not in self._map:
    +            raise ValueError(unwrap(
    +                '''
    +                %s value %s is not a valid value
    +                ''',
    +                type_name(self),
    +                value
    +            ))
    +
    +        Integer.set(self, value)
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            A unicode string or None
    +        """
    +
    +        if self.contents is None:
    +            return None
    +
    +        if self._native is None:
    +            self._native = self._map[self.__int__()]
    +        return self._native
    +
    +
    +class UTF8String(AbstractString):
    +    """
    +    Represents a UTF-8 string from ASN.1 as a Python unicode string
    +    """
    +
    +    tag = 12
    +    _encoding = 'utf-8'
    +
    +
    +class RelativeOid(ObjectIdentifier):
    +    """
    +    Represents an object identifier in ASN.1 as a Python unicode dotted
    +    integer string
    +    """
    +
    +    tag = 13
    +
    +
    +class Sequence(Asn1Value):
    +    """
    +    Represents a sequence of fields from ASN.1 as a Python object with a
    +    dict-like interface
    +    """
    +
    +    tag = 16
    +
    +    class_ = 0
    +    method = 1
    +
    +    # A list of child objects, in order of _fields
    +    children = None
    +
    +    # Sequence overrides .contents to be a property so that the mutated state
    +    # of child objects can be checked to ensure everything is up-to-date
    +    _contents = None
    +
    +    # Variable to track if the object has been mutated
    +    _mutated = False
    +
    +    # A list of tuples in one of the following forms.
    +    #
    +    # Option 1, a unicode string field name and a value class
    +    #
    +    # ("name", Asn1ValueClass)
    +    #
    +    # Option 2, same as Option 1, but with a dict of class params
    +    #
    +    # ("name", Asn1ValueClass, {'explicit': 5})
    +    _fields = []
    +
    +    # A dict with keys being the name of a field and the value being a unicode
    +    # string of the method name on self to call to get the spec for that field
    +    _spec_callbacks = None
    +
    +    # A dict that maps unicode string field names to an index in _fields
    +    _field_map = None
    +
    +    # A list in the same order as _fields that has tuples in the form (class_, tag)
    +    _field_ids = None
    +
    +    # An optional 2-element tuple that defines the field names of an OID field
    +    # and the field that the OID should be used to help decode. Works with the
    +    # _oid_specs attribute.
    +    _oid_pair = None
    +
    +    # A dict with keys that are unicode string OID values and values that are
    +    # Asn1Value classes to use for decoding a variable-type field.
    +    _oid_specs = None
    +
    +    # A 2-element tuple of the indexes in _fields of the OID and value fields
    +    _oid_nums = None
    +
    +    # Predetermined field specs to optimize away calls to _determine_spec()
    +    _precomputed_specs = None
    +
    +    def __init__(self, value=None, default=None, **kwargs):
    +        """
    +        Allows setting field values before passing everything else along to
    +        Asn1Value.__init__()
    +
    +        :param value:
    +            A native Python datatype to initialize the object value with
    +
    +        :param default:
    +            The default value if no value is specified
    +        """
    +
    +        Asn1Value.__init__(self, **kwargs)
    +
    +        check_existing = False
    +        if value is None and default is not None:
    +            check_existing = True
    +            if self.children is None:
    +                if self.contents is None:
    +                    check_existing = False
    +                else:
    +                    self._parse_children()
    +            value = default
    +
    +        if value is not None:
    +            try:
    +                # Fields are iterated in definition order to allow things like
    +                # OID-based specs. Otherwise sometimes the value would be processed
    +                # before the OID field, resulting in invalid value object creation.
    +                if self._fields:
    +                    keys = [info[0] for info in self._fields]
    +                    unused_keys = set(value.keys())
    +                else:
    +                    keys = value.keys()
    +                    unused_keys = set(keys)
    +
    +                for key in keys:
    +                    # If we are setting defaults, but a real value has already
    +                    # been set for the field, then skip it
    +                    if check_existing:
    +                        index = self._field_map[key]
    +                        if index < len(self.children) and self.children[index] is not VOID:
    +                            if key in unused_keys:
    +                                unused_keys.remove(key)
    +                            continue
    +
    +                    if key in value:
    +                        self.__setitem__(key, value[key])
    +                        unused_keys.remove(key)
    +
    +                if len(unused_keys):
    +                    raise ValueError(unwrap(
    +                        '''
    +                        One or more unknown fields was passed to the constructor
    +                        of %s: %s
    +                        ''',
    +                        type_name(self),
    +                        ', '.join(sorted(list(unused_keys)))
    +                    ))
    +
    +            except (ValueError, TypeError) as e:
    +                args = e.args[1:]
    +                e.args = (e.args[0] + '\n    while constructing %s' % type_name(self),) + args
    +                raise e
    +
    +    @property
    +    def contents(self):
    +        """
    +        :return:
    +            A byte string of the DER-encoded contents of the sequence
    +        """
    +
    +        if self.children is None:
    +            return self._contents
    +
    +        if self._is_mutated():
    +            self._set_contents()
    +
    +        return self._contents
    +
    +    @contents.setter
    +    def contents(self, value):
    +        """
    +        :param value:
    +            A byte string of the DER-encoded contents of the sequence
    +        """
    +
    +        self._contents = value
    +
    +    def _is_mutated(self):
    +        """
    +        :return:
    +            A boolean - if the sequence or any children (recursively) have been
    +            mutated
    +        """
    +
    +        mutated = self._mutated
    +        if self.children is not None:
    +            for child in self.children:
    +                if isinstance(child, Sequence) or isinstance(child, SequenceOf):
    +                    mutated = mutated or child._is_mutated()
    +
    +        return mutated
    +
    +    def _lazy_child(self, index):
    +        """
    +        Builds a child object if the child has only been parsed into a tuple so far
    +        """
    +
    +        child = self.children[index]
    +        if child.__class__ == tuple:
    +            child = self.children[index] = _build(*child)
    +        return child
    +
    +    def __len__(self):
    +        """
    +        :return:
    +            Integer
    +        """
    +        # We inline this check to prevent method invocation each time
    +        if self.children is None:
    +            self._parse_children()
    +
    +        return len(self.children)
    +
    +    def __getitem__(self, key):
    +        """
    +        Allows accessing fields by name or index
    +
    +        :param key:
    +            A unicode string of the field name, or an integer of the field index
    +
    +        :raises:
    +            KeyError - when a field name or index is invalid
    +
    +        :return:
    +            The Asn1Value object of the field specified
    +        """
    +
    +        # We inline this check to prevent method invocation each time
    +        if self.children is None:
    +            self._parse_children()
    +
    +        if not isinstance(key, int_types):
    +            if key not in self._field_map:
    +                raise KeyError(unwrap(
    +                    '''
    +                    No field named "%s" defined for %s
    +                    ''',
    +                    key,
    +                    type_name(self)
    +                ))
    +            key = self._field_map[key]
    +
    +        if key >= len(self.children):
    +            raise KeyError(unwrap(
    +                '''
    +                No field numbered %s is present in this %s
    +                ''',
    +                key,
    +                type_name(self)
    +            ))
    +
    +        try:
    +            return self._lazy_child(key)
    +
    +        except (ValueError, TypeError) as e:
    +            args = e.args[1:]
    +            e.args = (e.args[0] + '\n    while parsing %s' % type_name(self),) + args
    +            raise e
    +
    +    def __setitem__(self, key, value):
    +        """
    +        Allows settings fields by name or index
    +
    +        :param key:
    +            A unicode string of the field name, or an integer of the field index
    +
    +        :param value:
    +            A native Python datatype to set the field value to. This method will
    +            construct the appropriate Asn1Value object from _fields.
    +
    +        :raises:
    +            ValueError - when a field name or index is invalid
    +        """
    +
    +        # We inline this check to prevent method invocation each time
    +        if self.children is None:
    +            self._parse_children()
    +
    +        if not isinstance(key, int_types):
    +            if key not in self._field_map:
    +                raise KeyError(unwrap(
    +                    '''
    +                    No field named "%s" defined for %s
    +                    ''',
    +                    key,
    +                    type_name(self)
    +                ))
    +            key = self._field_map[key]
    +
    +        field_name, field_spec, value_spec, field_params, _ = self._determine_spec(key)
    +
    +        new_value = self._make_value(field_name, field_spec, value_spec, field_params, value)
    +
    +        invalid_value = False
    +        if isinstance(new_value, Any):
    +            invalid_value = new_value.parsed is None
    +        else:
    +            invalid_value = new_value.contents is None
    +
    +        if invalid_value:
    +            raise ValueError(unwrap(
    +                '''
    +                Value for field "%s" of %s is not set
    +                ''',
    +                field_name,
    +                type_name(self)
    +            ))
    +
    +        self.children[key] = new_value
    +
    +        if self._native is not None:
    +            self._native[self._fields[key][0]] = self.children[key].native
    +        self._mutated = True
    +
    +    def __delitem__(self, key):
    +        """
    +        Allows deleting optional or default fields by name or index
    +
    +        :param key:
    +            A unicode string of the field name, or an integer of the field index
    +
    +        :raises:
    +            ValueError - when a field name or index is invalid, or the field is not optional or defaulted
    +        """
    +
    +        # We inline this check to prevent method invocation each time
    +        if self.children is None:
    +            self._parse_children()
    +
    +        if not isinstance(key, int_types):
    +            if key not in self._field_map:
    +                raise KeyError(unwrap(
    +                    '''
    +                    No field named "%s" defined for %s
    +                    ''',
    +                    key,
    +                    type_name(self)
    +                ))
    +            key = self._field_map[key]
    +
    +        name, _, params = self._fields[key]
    +        if not params or ('default' not in params and 'optional' not in params):
    +            raise ValueError(unwrap(
    +                '''
    +                Can not delete the value for the field "%s" of %s since it is
    +                not optional or defaulted
    +                ''',
    +                name,
    +                type_name(self)
    +            ))
    +
    +        if 'optional' in params:
    +            self.children[key] = VOID
    +            if self._native is not None:
    +                self._native[name] = None
    +        else:
    +            self.__setitem__(key, None)
    +        self._mutated = True
    +
    +    def __iter__(self):
    +        """
    +        :return:
    +            An iterator of field key names
    +        """
    +
    +        for info in self._fields:
    +            yield info[0]
    +
    +    def _set_contents(self, force=False):
    +        """
    +        Updates the .contents attribute of the value with the encoded value of
    +        all of the child objects
    +
    +        :param force:
    +            Ensure all contents are in DER format instead of possibly using
    +            cached BER-encoded data
    +        """
    +
    +        if self.children is None:
    +            self._parse_children()
    +
    +        contents = BytesIO()
    +        for index, info in enumerate(self._fields):
    +            child = self.children[index]
    +            if child is None:
    +                child_dump = b''
    +            elif child.__class__ == tuple:
    +                if force:
    +                    child_dump = self._lazy_child(index).dump(force=force)
    +                else:
    +                    child_dump = child[3] + child[4] + child[5]
    +            else:
    +                child_dump = child.dump(force=force)
    +            # Skip values that are the same as the default
    +            if info[2] and 'default' in info[2]:
    +                default_value = info[1](**info[2])
    +                if default_value.dump() == child_dump:
    +                    continue
    +            contents.write(child_dump)
    +        self._contents = contents.getvalue()
    +
    +        self._header = None
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    def _setup(self):
    +        """
    +        Generates _field_map, _field_ids and _oid_nums for use in parsing
    +        """
    +
    +        cls = self.__class__
    +        cls._field_map = {}
    +        cls._field_ids = []
    +        cls._precomputed_specs = []
    +        for index, field in enumerate(cls._fields):
    +            if len(field) < 3:
    +                field = field + ({},)
    +                cls._fields[index] = field
    +            cls._field_map[field[0]] = index
    +            cls._field_ids.append(_build_id_tuple(field[2], field[1]))
    +
    +        if cls._oid_pair is not None:
    +            cls._oid_nums = (cls._field_map[cls._oid_pair[0]], cls._field_map[cls._oid_pair[1]])
    +
    +        for index, field in enumerate(cls._fields):
    +            has_callback = cls._spec_callbacks is not None and field[0] in cls._spec_callbacks
    +            is_mapped_oid = cls._oid_nums is not None and cls._oid_nums[1] == index
    +            if has_callback or is_mapped_oid:
    +                cls._precomputed_specs.append(None)
    +            else:
    +                cls._precomputed_specs.append((field[0], field[1], field[1], field[2], None))
    +
    +    def _determine_spec(self, index):
    +        """
    +        Determine how a value for a field should be constructed
    +
    +        :param index:
    +            The field number
    +
    +        :return:
    +            A tuple containing the following elements:
    +             - unicode string of the field name
    +             - Asn1Value class of the field spec
    +             - Asn1Value class of the value spec
    +             - None or dict of params to pass to the field spec
    +             - None or Asn1Value class indicating the value spec was derived from an OID or a spec callback
    +        """
    +
    +        name, field_spec, field_params = self._fields[index]
    +        value_spec = field_spec
    +        spec_override = None
    +
    +        if self._spec_callbacks is not None and name in self._spec_callbacks:
    +            callback = self._spec_callbacks[name]
    +            spec_override = callback(self)
    +            if spec_override:
    +                # Allow a spec callback to specify both the base spec and
    +                # the override, for situations such as OctetString and parse_as
    +                if spec_override.__class__ == tuple and len(spec_override) == 2:
    +                    field_spec, value_spec = spec_override
    +                    if value_spec is None:
    +                        value_spec = field_spec
    +                        spec_override = None
    +                # When no field spec is specified, use a single return value as that
    +                elif field_spec is None:
    +                    field_spec = spec_override
    +                    value_spec = field_spec
    +                    spec_override = None
    +                else:
    +                    value_spec = spec_override
    +
    +        elif self._oid_nums is not None and self._oid_nums[1] == index:
    +            oid = self._lazy_child(self._oid_nums[0]).native
    +            if oid in self._oid_specs:
    +                spec_override = self._oid_specs[oid]
    +                value_spec = spec_override
    +
    +        return (name, field_spec, value_spec, field_params, spec_override)
    +
    +    def _make_value(self, field_name, field_spec, value_spec, field_params, value):
    +        """
    +        Contructs an appropriate Asn1Value object for a field
    +
    +        :param field_name:
    +            A unicode string of the field name
    +
    +        :param field_spec:
    +            An Asn1Value class that is the field spec
    +
    +        :param value_spec:
    +            An Asn1Value class that is the vaue spec
    +
    +        :param field_params:
    +            None or a dict of params for the field spec
    +
    +        :param value:
    +            The value to construct an Asn1Value object from
    +
    +        :return:
    +            An instance of a child class of Asn1Value
    +        """
    +
    +        if value is None and 'optional' in field_params:
    +            return VOID
    +
    +        specs_different = field_spec != value_spec
    +        is_any = issubclass(field_spec, Any)
    +
    +        if issubclass(value_spec, Choice):
    +            is_asn1value = isinstance(value, Asn1Value)
    +            is_tuple = isinstance(value, tuple) and len(value) == 2
    +            is_dict = isinstance(value, dict) and len(value) == 1
    +            if not is_asn1value and not is_tuple and not is_dict:
    +                raise ValueError(unwrap(
    +                    '''
    +                    Can not set a native python value to %s, which has the
    +                    choice type of %s - value must be an instance of Asn1Value
    +                    ''',
    +                    field_name,
    +                    type_name(value_spec)
    +                ))
    +            if is_tuple or is_dict:
    +                value = value_spec(value)
    +            if not isinstance(value, value_spec):
    +                wrapper = value_spec()
    +                wrapper.validate(value.class_, value.tag, value.contents)
    +                wrapper._parsed = value
    +                new_value = wrapper
    +            else:
    +                new_value = value
    +
    +        elif isinstance(value, field_spec):
    +            new_value = value
    +            if specs_different:
    +                new_value.parse(value_spec)
    +
    +        elif (not specs_different or is_any) and not isinstance(value, value_spec):
    +            if (not is_any or specs_different) and isinstance(value, Asn1Value):
    +                raise TypeError(unwrap(
    +                    '''
    +                    %s value must be %s, not %s
    +                    ''',
    +                    field_name,
    +                    type_name(value_spec),
    +                    type_name(value)
    +                ))
    +            new_value = value_spec(value, **field_params)
    +
    +        else:
    +            if isinstance(value, value_spec):
    +                new_value = value
    +            else:
    +                if isinstance(value, Asn1Value):
    +                    raise TypeError(unwrap(
    +                        '''
    +                        %s value must be %s, not %s
    +                        ''',
    +                        field_name,
    +                        type_name(value_spec),
    +                        type_name(value)
    +                    ))
    +                new_value = value_spec(value)
    +
    +            # For when the field is OctetString or OctetBitString with embedded
    +            # values we need to wrap the value in the field spec to get the
    +            # appropriate encoded value.
    +            if specs_different and not is_any:
    +                wrapper = field_spec(value=new_value.dump(), **field_params)
    +                wrapper._parsed = (new_value, new_value.__class__, None)
    +                new_value = wrapper
    +
    +        new_value = _fix_tagging(new_value, field_params)
    +
    +        return new_value
    +
    +    def _parse_children(self, recurse=False):
    +        """
    +        Parses the contents and generates Asn1Value objects based on the
    +        definitions from _fields.
    +
    +        :param recurse:
    +            If child objects that are Sequence or SequenceOf objects should
    +            be recursively parsed
    +
    +        :raises:
    +            ValueError - when an error occurs parsing child objects
    +        """
    +
    +        cls = self.__class__
    +        if self._contents is None:
    +            if self._fields:
    +                self.children = [VOID] * len(self._fields)
    +                for index, (_, _, params) in enumerate(self._fields):
    +                    if 'default' in params:
    +                        if cls._precomputed_specs[index]:
    +                            field_name, field_spec, value_spec, field_params, _ = cls._precomputed_specs[index]
    +                        else:
    +                            field_name, field_spec, value_spec, field_params, _ = self._determine_spec(index)
    +                        self.children[index] = self._make_value(field_name, field_spec, value_spec, field_params, None)
    +            return
    +
    +        try:
    +            self.children = []
    +            contents_length = len(self._contents)
    +            child_pointer = 0
    +            field = 0
    +            field_len = len(self._fields)
    +            parts = None
    +            again = child_pointer < contents_length
    +            while again:
    +                if parts is None:
    +                    parts, child_pointer = _parse(self._contents, contents_length, pointer=child_pointer)
    +                again = child_pointer < contents_length
    +
    +                if field < field_len:
    +                    _, field_spec, value_spec, field_params, spec_override = (
    +                        cls._precomputed_specs[field] or self._determine_spec(field))
    +
    +                    # If the next value is optional or default, allow it to be absent
    +                    if field_params and ('optional' in field_params or 'default' in field_params):
    +                        if self._field_ids[field] != (parts[0], parts[2]) and field_spec != Any:
    +
    +                            # See if the value is a valid choice before assuming
    +                            # that we have a missing optional or default value
    +                            choice_match = False
    +                            if issubclass(field_spec, Choice):
    +                                try:
    +                                    tester = field_spec(**field_params)
    +                                    tester.validate(parts[0], parts[2], parts[4])
    +                                    choice_match = True
    +                                except (ValueError):
    +                                    pass
    +
    +                            if not choice_match:
    +                                if 'optional' in field_params:
    +                                    self.children.append(VOID)
    +                                else:
    +                                    self.children.append(field_spec(**field_params))
    +                                field += 1
    +                                again = True
    +                                continue
    +
    +                    if field_spec is None or (spec_override and issubclass(field_spec, Any)):
    +                        field_spec = value_spec
    +                        spec_override = None
    +
    +                    if spec_override:
    +                        child = parts + (field_spec, field_params, value_spec)
    +                    else:
    +                        child = parts + (field_spec, field_params)
    +
    +                # Handle situations where an optional or defaulted field definition is incorrect
    +                elif field_len > 0 and field + 1 <= field_len:
    +                    missed_fields = []
    +                    prev_field = field - 1
    +                    while prev_field >= 0:
    +                        prev_field_info = self._fields[prev_field]
    +                        if len(prev_field_info) < 3:
    +                            break
    +                        if 'optional' in prev_field_info[2] or 'default' in prev_field_info[2]:
    +                            missed_fields.append(prev_field_info[0])
    +                        prev_field -= 1
    +                    plural = 's' if len(missed_fields) > 1 else ''
    +                    missed_field_names = ', '.join(missed_fields)
    +                    raise ValueError(unwrap(
    +                        '''
    +                        Data for field %s (%s class, %s method, tag %s) does
    +                        not match the field definition%s of %s
    +                        ''',
    +                        field + 1,
    +                        CLASS_NUM_TO_NAME_MAP.get(parts[0]),
    +                        METHOD_NUM_TO_NAME_MAP.get(parts[1]),
    +                        parts[2],
    +                        plural,
    +                        missed_field_names
    +                    ))
    +
    +                else:
    +                    child = parts
    +
    +                if recurse:
    +                    child = _build(*child)
    +                    if isinstance(child, (Sequence, SequenceOf)):
    +                        child._parse_children(recurse=True)
    +
    +                self.children.append(child)
    +                field += 1
    +                parts = None
    +
    +            index = len(self.children)
    +            while index < field_len:
    +                name, field_spec, field_params = self._fields[index]
    +                if 'default' in field_params:
    +                    self.children.append(field_spec(**field_params))
    +                elif 'optional' in field_params:
    +                    self.children.append(VOID)
    +                else:
    +                    raise ValueError(unwrap(
    +                        '''
    +                        Field "%s" is missing from structure
    +                        ''',
    +                        name
    +                    ))
    +                index += 1
    +
    +        except (ValueError, TypeError) as e:
    +            self.children = None
    +            args = e.args[1:]
    +            e.args = (e.args[0] + '\n    while parsing %s' % type_name(self),) + args
    +            raise e
    +
    +    def spec(self, field_name):
    +        """
    +        Determines the spec to use for the field specified. Depending on how
    +        the spec is determined (_oid_pair or _spec_callbacks), it may be
    +        necessary to set preceding field values before calling this. Usually
    +        specs, if dynamic, are controlled by a preceding ObjectIdentifier
    +        field.
    +
    +        :param field_name:
    +            A unicode string of the field name to get the spec for
    +
    +        :return:
    +            A child class of asn1crypto.core.Asn1Value that the field must be
    +            encoded using
    +        """
    +
    +        if not isinstance(field_name, str_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                field_name must be a unicode string, not %s
    +                ''',
    +                type_name(field_name)
    +            ))
    +
    +        if self._fields is None:
    +            raise ValueError(unwrap(
    +                '''
    +                Unable to retrieve spec for field %s in the class %s because
    +                _fields has not been set
    +                ''',
    +                repr(field_name),
    +                type_name(self)
    +            ))
    +
    +        index = self._field_map[field_name]
    +        info = self._determine_spec(index)
    +
    +        return info[2]
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            An OrderedDict or None. If an OrderedDict, all child values are
    +            recursively converted to native representation also.
    +        """
    +
    +        if self.contents is None:
    +            return None
    +
    +        if self._native is None:
    +            if self.children is None:
    +                self._parse_children(recurse=True)
    +            try:
    +                self._native = OrderedDict()
    +                for index, child in enumerate(self.children):
    +                    if child.__class__ == tuple:
    +                        child = _build(*child)
    +                        self.children[index] = child
    +                    try:
    +                        name = self._fields[index][0]
    +                    except (IndexError):
    +                        name = str_cls(index)
    +                    self._native[name] = child.native
    +            except (ValueError, TypeError) as e:
    +                self._native = None
    +                args = e.args[1:]
    +                e.args = (e.args[0] + '\n    while parsing %s' % type_name(self),) + args
    +                raise e
    +        return self._native
    +
    +    def _copy(self, other, copy_func):
    +        """
    +        Copies the contents of another Sequence object to itself
    +
    +        :param object:
    +            Another instance of the same class
    +
    +        :param copy_func:
    +            An reference of copy.copy() or copy.deepcopy() to use when copying
    +            lists, dicts and objects
    +        """
    +
    +        super(Sequence, self)._copy(other, copy_func)
    +        if self.children is not None:
    +            self.children = []
    +            for child in other.children:
    +                if child.__class__ == tuple:
    +                    self.children.append(child)
    +                else:
    +                    self.children.append(child.copy())
    +
    +    def debug(self, nest_level=1):
    +        """
    +        Show the binary data and parsed data in a tree structure
    +        """
    +
    +        if self.children is None:
    +            self._parse_children()
    +
    +        prefix = '  ' * nest_level
    +        _basic_debug(prefix, self)
    +        for field_name in self:
    +            child = self._lazy_child(self._field_map[field_name])
    +            if child is not VOID:
    +                print('%s    Field "%s"' % (prefix, field_name))
    +                child.debug(nest_level + 3)
    +
    +    def dump(self, force=False):
    +        """
    +        Encodes the value using DER
    +
    +        :param force:
    +            If the encoded contents already exist, clear them and regenerate
    +            to ensure they are in DER format instead of BER format
    +
    +        :return:
    +            A byte string of the DER-encoded value
    +        """
    +
    +        # If the length is indefinite, force the re-encoding
    +        if self._header is not None and self._header[-1:] == b'\x80':
    +            force = True
    +
    +        if force:
    +            self._set_contents(force=force)
    +
    +        if self._fields and self.children is not None:
    +            for index, (field_name, _, params) in enumerate(self._fields):
    +                if self.children[index] is not VOID:
    +                    continue
    +                if 'default' in params or 'optional' in params:
    +                    continue
    +                raise ValueError(unwrap(
    +                    '''
    +                    Field "%s" is missing from structure
    +                    ''',
    +                    field_name
    +                ))
    +
    +        return Asn1Value.dump(self)
    +
    +
    +class SequenceOf(Asn1Value):
    +    """
    +    Represents a sequence (ordered) of a single type of values from ASN.1 as a
    +    Python object with a list-like interface
    +    """
    +
    +    tag = 16
    +
    +    class_ = 0
    +    method = 1
    +
    +    # A list of child objects
    +    children = None
    +
    +    # SequenceOf overrides .contents to be a property so that the mutated state
    +    # of child objects can be checked to ensure everything is up-to-date
    +    _contents = None
    +
    +    # Variable to track if the object has been mutated
    +    _mutated = False
    +
    +    # An Asn1Value class to use when parsing children
    +    _child_spec = None
    +
    +    def __init__(self, value=None, default=None, contents=None, spec=None, **kwargs):
    +        """
    +        Allows setting child objects and the _child_spec via the spec parameter
    +        before passing everything else along to Asn1Value.__init__()
    +
    +        :param value:
    +            A native Python datatype to initialize the object value with
    +
    +        :param default:
    +            The default value if no value is specified
    +
    +        :param contents:
    +            A byte string of the encoded contents of the value
    +
    +        :param spec:
    +            A class derived from Asn1Value to use to parse children
    +        """
    +
    +        if spec:
    +            self._child_spec = spec
    +
    +        Asn1Value.__init__(self, **kwargs)
    +
    +        try:
    +            if contents is not None:
    +                self.contents = contents
    +            else:
    +                if value is None and default is not None:
    +                    value = default
    +
    +                if value is not None:
    +                    for index, child in enumerate(value):
    +                        self.__setitem__(index, child)
    +
    +                    # Make sure a blank list is serialized
    +                    if self.contents is None:
    +                        self._set_contents()
    +
    +        except (ValueError, TypeError) as e:
    +            args = e.args[1:]
    +            e.args = (e.args[0] + '\n    while constructing %s' % type_name(self),) + args
    +            raise e
    +
    +    @property
    +    def contents(self):
    +        """
    +        :return:
    +            A byte string of the DER-encoded contents of the sequence
    +        """
    +
    +        if self.children is None:
    +            return self._contents
    +
    +        if self._is_mutated():
    +            self._set_contents()
    +
    +        return self._contents
    +
    +    @contents.setter
    +    def contents(self, value):
    +        """
    +        :param value:
    +            A byte string of the DER-encoded contents of the sequence
    +        """
    +
    +        self._contents = value
    +
    +    def _is_mutated(self):
    +        """
    +        :return:
    +            A boolean - if the sequence or any children (recursively) have been
    +            mutated
    +        """
    +
    +        mutated = self._mutated
    +        if self.children is not None:
    +            for child in self.children:
    +                if isinstance(child, Sequence) or isinstance(child, SequenceOf):
    +                    mutated = mutated or child._is_mutated()
    +
    +        return mutated
    +
    +    def _lazy_child(self, index):
    +        """
    +        Builds a child object if the child has only been parsed into a tuple so far
    +        """
    +
    +        child = self.children[index]
    +        if child.__class__ == tuple:
    +            child = _build(*child)
    +            self.children[index] = child
    +        return child
    +
    +    def _make_value(self, value):
    +        """
    +        Constructs a _child_spec value from a native Python data type, or
    +        an appropriate Asn1Value object
    +
    +        :param value:
    +            A native Python value, or some child of Asn1Value
    +
    +        :return:
    +            An object of type _child_spec
    +        """
    +
    +        if isinstance(value, self._child_spec):
    +            new_value = value
    +
    +        elif issubclass(self._child_spec, Any):
    +            if isinstance(value, Asn1Value):
    +                new_value = value
    +            else:
    +                raise ValueError(unwrap(
    +                    '''
    +                    Can not set a native python value to %s where the
    +                    _child_spec is Any - value must be an instance of Asn1Value
    +                    ''',
    +                    type_name(self)
    +                ))
    +
    +        elif issubclass(self._child_spec, Choice):
    +            if not isinstance(value, Asn1Value):
    +                raise ValueError(unwrap(
    +                    '''
    +                    Can not set a native python value to %s where the
    +                    _child_spec is the choice type %s - value must be an
    +                    instance of Asn1Value
    +                    ''',
    +                    type_name(self),
    +                    self._child_spec.__name__
    +                ))
    +            if not isinstance(value, self._child_spec):
    +                wrapper = self._child_spec()
    +                wrapper.validate(value.class_, value.tag, value.contents)
    +                wrapper._parsed = value
    +                value = wrapper
    +            new_value = value
    +
    +        else:
    +            return self._child_spec(value=value)
    +
    +        params = {}
    +        if self._child_spec.explicit:
    +            params['explicit'] = self._child_spec.explicit
    +        if self._child_spec.implicit:
    +            params['implicit'] = (self._child_spec.class_, self._child_spec.tag)
    +        return _fix_tagging(new_value, params)
    +
    +    def __len__(self):
    +        """
    +        :return:
    +            An integer
    +        """
    +        # We inline this checks to prevent method invocation each time
    +        if self.children is None:
    +            self._parse_children()
    +
    +        return len(self.children)
    +
    +    def __getitem__(self, key):
    +        """
    +        Allows accessing children via index
    +
    +        :param key:
    +            Integer index of child
    +        """
    +
    +        # We inline this checks to prevent method invocation each time
    +        if self.children is None:
    +            self._parse_children()
    +
    +        return self._lazy_child(key)
    +
    +    def __setitem__(self, key, value):
    +        """
    +        Allows overriding a child via index
    +
    +        :param key:
    +            Integer index of child
    +
    +        :param value:
    +            Native python datatype that will be passed to _child_spec to create
    +            new child object
    +        """
    +
    +        # We inline this checks to prevent method invocation each time
    +        if self.children is None:
    +            self._parse_children()
    +
    +        new_value = self._make_value(value)
    +
    +        # If adding at the end, create a space for the new value
    +        if key == len(self.children):
    +            self.children.append(None)
    +            if self._native is not None:
    +                self._native.append(None)
    +
    +        self.children[key] = new_value
    +
    +        if self._native is not None:
    +            self._native[key] = self.children[key].native
    +
    +        self._mutated = True
    +
    +    def __delitem__(self, key):
    +        """
    +        Allows removing a child via index
    +
    +        :param key:
    +            Integer index of child
    +        """
    +
    +        # We inline this checks to prevent method invocation each time
    +        if self.children is None:
    +            self._parse_children()
    +
    +        self.children.pop(key)
    +        if self._native is not None:
    +            self._native.pop(key)
    +
    +        self._mutated = True
    +
    +    def __iter__(self):
    +        """
    +        :return:
    +            An iter() of child objects
    +        """
    +
    +        # We inline this checks to prevent method invocation each time
    +        if self.children is None:
    +            self._parse_children()
    +
    +        for index in range(0, len(self.children)):
    +            yield self._lazy_child(index)
    +
    +    def __contains__(self, item):
    +        """
    +        :param item:
    +            An object of the type cls._child_spec
    +
    +        :return:
    +            A boolean if the item is contained in this SequenceOf
    +        """
    +
    +        if item is None or item is VOID:
    +            return False
    +
    +        if not isinstance(item, self._child_spec):
    +            raise TypeError(unwrap(
    +                '''
    +                Checking membership in %s is only available for instances of
    +                %s, not %s
    +                ''',
    +                type_name(self),
    +                type_name(self._child_spec),
    +                type_name(item)
    +            ))
    +
    +        for child in self:
    +            if child == item:
    +                return True
    +
    +        return False
    +
    +    def append(self, value):
    +        """
    +        Allows adding a child to the end of the sequence
    +
    +        :param value:
    +            Native python datatype that will be passed to _child_spec to create
    +            new child object
    +        """
    +
    +        # We inline this checks to prevent method invocation each time
    +        if self.children is None:
    +            self._parse_children()
    +
    +        self.children.append(self._make_value(value))
    +
    +        if self._native is not None:
    +            self._native.append(self.children[-1].native)
    +
    +        self._mutated = True
    +
    +    def _set_contents(self, force=False):
    +        """
    +        Encodes all child objects into the contents for this object
    +
    +        :param force:
    +            Ensure all contents are in DER format instead of possibly using
    +            cached BER-encoded data
    +        """
    +
    +        if self.children is None:
    +            self._parse_children()
    +
    +        contents = BytesIO()
    +        for child in self:
    +            contents.write(child.dump(force=force))
    +        self._contents = contents.getvalue()
    +        self._header = None
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    def _parse_children(self, recurse=False):
    +        """
    +        Parses the contents and generates Asn1Value objects based on the
    +        definitions from _child_spec.
    +
    +        :param recurse:
    +            If child objects that are Sequence or SequenceOf objects should
    +            be recursively parsed
    +
    +        :raises:
    +            ValueError - when an error occurs parsing child objects
    +        """
    +
    +        try:
    +            self.children = []
    +            if self._contents is None:
    +                return
    +            contents_length = len(self._contents)
    +            child_pointer = 0
    +            while child_pointer < contents_length:
    +                parts, child_pointer = _parse(self._contents, contents_length, pointer=child_pointer)
    +                if self._child_spec:
    +                    child = parts + (self._child_spec,)
    +                else:
    +                    child = parts
    +                if recurse:
    +                    child = _build(*child)
    +                    if isinstance(child, (Sequence, SequenceOf)):
    +                        child._parse_children(recurse=True)
    +                self.children.append(child)
    +        except (ValueError, TypeError) as e:
    +            self.children = None
    +            args = e.args[1:]
    +            e.args = (e.args[0] + '\n    while parsing %s' % type_name(self),) + args
    +            raise e
    +
    +    def spec(self):
    +        """
    +        Determines the spec to use for child values.
    +
    +        :return:
    +            A child class of asn1crypto.core.Asn1Value that child values must be
    +            encoded using
    +        """
    +
    +        return self._child_spec
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            A list or None. If a list, all child values are recursively
    +            converted to native representation also.
    +        """
    +
    +        if self.contents is None:
    +            return None
    +
    +        if self._native is None:
    +            if self.children is None:
    +                self._parse_children(recurse=True)
    +            try:
    +                self._native = [child.native for child in self]
    +            except (ValueError, TypeError) as e:
    +                args = e.args[1:]
    +                e.args = (e.args[0] + '\n    while parsing %s' % type_name(self),) + args
    +                raise e
    +        return self._native
    +
    +    def _copy(self, other, copy_func):
    +        """
    +        Copies the contents of another SequenceOf object to itself
    +
    +        :param object:
    +            Another instance of the same class
    +
    +        :param copy_func:
    +            An reference of copy.copy() or copy.deepcopy() to use when copying
    +            lists, dicts and objects
    +        """
    +
    +        super(SequenceOf, self)._copy(other, copy_func)
    +        if self.children is not None:
    +            self.children = []
    +            for child in other.children:
    +                if child.__class__ == tuple:
    +                    self.children.append(child)
    +                else:
    +                    self.children.append(child.copy())
    +
    +    def debug(self, nest_level=1):
    +        """
    +        Show the binary data and parsed data in a tree structure
    +        """
    +
    +        if self.children is None:
    +            self._parse_children()
    +
    +        prefix = '  ' * nest_level
    +        _basic_debug(prefix, self)
    +        for child in self:
    +            child.debug(nest_level + 1)
    +
    +    def dump(self, force=False):
    +        """
    +        Encodes the value using DER
    +
    +        :param force:
    +            If the encoded contents already exist, clear them and regenerate
    +            to ensure they are in DER format instead of BER format
    +
    +        :return:
    +            A byte string of the DER-encoded value
    +        """
    +
    +        # If the length is indefinite, force the re-encoding
    +        if self._header is not None and self._header[-1:] == b'\x80':
    +            force = True
    +
    +        if force:
    +            self._set_contents(force=force)
    +
    +        return Asn1Value.dump(self)
    +
    +
    +class Set(Sequence):
    +    """
    +    Represents a set of fields (unordered) from ASN.1 as a Python object with a
    +    dict-like interface
    +    """
    +
    +    method = 1
    +    class_ = 0
    +    tag = 17
    +
    +    # A dict of 2-element tuples in the form (class_, tag) as keys and integers
    +    # as values that are the index of the field in _fields
    +    _field_ids = None
    +
    +    def _setup(self):
    +        """
    +        Generates _field_map, _field_ids and _oid_nums for use in parsing
    +        """
    +
    +        cls = self.__class__
    +        cls._field_map = {}
    +        cls._field_ids = {}
    +        cls._precomputed_specs = []
    +        for index, field in enumerate(cls._fields):
    +            if len(field) < 3:
    +                field = field + ({},)
    +                cls._fields[index] = field
    +            cls._field_map[field[0]] = index
    +            cls._field_ids[_build_id_tuple(field[2], field[1])] = index
    +
    +        if cls._oid_pair is not None:
    +            cls._oid_nums = (cls._field_map[cls._oid_pair[0]], cls._field_map[cls._oid_pair[1]])
    +
    +        for index, field in enumerate(cls._fields):
    +            has_callback = cls._spec_callbacks is not None and field[0] in cls._spec_callbacks
    +            is_mapped_oid = cls._oid_nums is not None and cls._oid_nums[1] == index
    +            if has_callback or is_mapped_oid:
    +                cls._precomputed_specs.append(None)
    +            else:
    +                cls._precomputed_specs.append((field[0], field[1], field[1], field[2], None))
    +
    +    def _parse_children(self, recurse=False):
    +        """
    +        Parses the contents and generates Asn1Value objects based on the
    +        definitions from _fields.
    +
    +        :param recurse:
    +            If child objects that are Sequence or SequenceOf objects should
    +            be recursively parsed
    +
    +        :raises:
    +            ValueError - when an error occurs parsing child objects
    +        """
    +
    +        cls = self.__class__
    +        if self._contents is None:
    +            if self._fields:
    +                self.children = [VOID] * len(self._fields)
    +                for index, (_, _, params) in enumerate(self._fields):
    +                    if 'default' in params:
    +                        if cls._precomputed_specs[index]:
    +                            field_name, field_spec, value_spec, field_params, _ = cls._precomputed_specs[index]
    +                        else:
    +                            field_name, field_spec, value_spec, field_params, _ = self._determine_spec(index)
    +                        self.children[index] = self._make_value(field_name, field_spec, value_spec, field_params, None)
    +            return
    +
    +        try:
    +            child_map = {}
    +            contents_length = len(self.contents)
    +            child_pointer = 0
    +            seen_field = 0
    +            while child_pointer < contents_length:
    +                parts, child_pointer = _parse(self.contents, contents_length, pointer=child_pointer)
    +
    +                id_ = (parts[0], parts[2])
    +
    +                field = self._field_ids.get(id_)
    +                if field is None:
    +                    raise ValueError(unwrap(
    +                        '''
    +                        Data for field %s (%s class, %s method, tag %s) does
    +                        not match any of the field definitions
    +                        ''',
    +                        seen_field,
    +                        CLASS_NUM_TO_NAME_MAP.get(parts[0]),
    +                        METHOD_NUM_TO_NAME_MAP.get(parts[1]),
    +                        parts[2],
    +                    ))
    +
    +                _, field_spec, value_spec, field_params, spec_override = (
    +                    cls._precomputed_specs[field] or self._determine_spec(field))
    +
    +                if field_spec is None or (spec_override and issubclass(field_spec, Any)):
    +                    field_spec = value_spec
    +                    spec_override = None
    +
    +                if spec_override:
    +                    child = parts + (field_spec, field_params, value_spec)
    +                else:
    +                    child = parts + (field_spec, field_params)
    +
    +                if recurse:
    +                    child = _build(*child)
    +                    if isinstance(child, (Sequence, SequenceOf)):
    +                        child._parse_children(recurse=True)
    +
    +                child_map[field] = child
    +                seen_field += 1
    +
    +            total_fields = len(self._fields)
    +
    +            for index in range(0, total_fields):
    +                if index in child_map:
    +                    continue
    +
    +                name, field_spec, value_spec, field_params, spec_override = (
    +                    cls._precomputed_specs[index] or self._determine_spec(index))
    +
    +                if field_spec is None or (spec_override and issubclass(field_spec, Any)):
    +                    field_spec = value_spec
    +                    spec_override = None
    +
    +                missing = False
    +
    +                if not field_params:
    +                    missing = True
    +                elif 'optional' not in field_params and 'default' not in field_params:
    +                    missing = True
    +                elif 'optional' in field_params:
    +                    child_map[index] = VOID
    +                elif 'default' in field_params:
    +                    child_map[index] = field_spec(**field_params)
    +
    +                if missing:
    +                    raise ValueError(unwrap(
    +                        '''
    +                        Missing required field "%s" from %s
    +                        ''',
    +                        name,
    +                        type_name(self)
    +                    ))
    +
    +            self.children = []
    +            for index in range(0, total_fields):
    +                self.children.append(child_map[index])
    +
    +        except (ValueError, TypeError) as e:
    +            args = e.args[1:]
    +            e.args = (e.args[0] + '\n    while parsing %s' % type_name(self),) + args
    +            raise e
    +
    +    def _set_contents(self, force=False):
    +        """
    +        Encodes all child objects into the contents for this object.
    +
    +        This method is overridden because a Set needs to be encoded by
    +        removing defaulted fields and then sorting the fields by tag.
    +
    +        :param force:
    +            Ensure all contents are in DER format instead of possibly using
    +            cached BER-encoded data
    +        """
    +
    +        if self.children is None:
    +            self._parse_children()
    +
    +        child_tag_encodings = []
    +        for index, child in enumerate(self.children):
    +            child_encoding = child.dump(force=force)
    +
    +            # Skip encoding defaulted children
    +            name, spec, field_params = self._fields[index]
    +            if 'default' in field_params:
    +                if spec(**field_params).dump() == child_encoding:
    +                    continue
    +
    +            child_tag_encodings.append((child.tag, child_encoding))
    +        child_tag_encodings.sort(key=lambda ct: ct[0])
    +
    +        self._contents = b''.join([ct[1] for ct in child_tag_encodings])
    +        self._header = None
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +
    +class SetOf(SequenceOf):
    +    """
    +    Represents a set (unordered) of a single type of values from ASN.1 as a
    +    Python object with a list-like interface
    +    """
    +
    +    tag = 17
    +
    +    def _set_contents(self, force=False):
    +        """
    +        Encodes all child objects into the contents for this object.
    +
    +        This method is overridden because a SetOf needs to be encoded by
    +        sorting the child encodings.
    +
    +        :param force:
    +            Ensure all contents are in DER format instead of possibly using
    +            cached BER-encoded data
    +        """
    +
    +        if self.children is None:
    +            self._parse_children()
    +
    +        child_encodings = []
    +        for child in self:
    +            child_encodings.append(child.dump(force=force))
    +
    +        self._contents = b''.join(sorted(child_encodings))
    +        self._header = None
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +
    +class EmbeddedPdv(Sequence):
    +    """
    +    A sequence structure
    +    """
    +
    +    tag = 11
    +
    +
    +class NumericString(AbstractString):
    +    """
    +    Represents a numeric string from ASN.1 as a Python unicode string
    +    """
    +
    +    tag = 18
    +    _encoding = 'latin1'
    +
    +
    +class PrintableString(AbstractString):
    +    """
    +    Represents a printable string from ASN.1 as a Python unicode string
    +    """
    +
    +    tag = 19
    +    _encoding = 'latin1'
    +
    +
    +class TeletexString(AbstractString):
    +    """
    +    Represents a teletex string from ASN.1 as a Python unicode string
    +    """
    +
    +    tag = 20
    +    _encoding = 'teletex'
    +
    +
    +class VideotexString(OctetString):
    +    """
    +    Represents a videotex string from ASN.1 as a Python byte string
    +    """
    +
    +    tag = 21
    +
    +
    +class IA5String(AbstractString):
    +    """
    +    Represents an IA5 string from ASN.1 as a Python unicode string
    +    """
    +
    +    tag = 22
    +    _encoding = 'ascii'
    +
    +
    +class AbstractTime(AbstractString):
    +    """
    +    Represents a time from ASN.1 as a Python datetime.datetime object
    +    """
    +
    +    @property
    +    def _parsed_time(self):
    +        """
    +        The parsed datetime string.
    +
    +        :raises:
    +            ValueError - when an invalid value is passed
    +
    +        :return:
    +            A dict with the parsed values
    +        """
    +
    +        string = str_cls(self)
    +
    +        m = self._TIMESTRING_RE.match(string)
    +        if not m:
    +            raise ValueError(unwrap(
    +                '''
    +                Error parsing %s to a %s
    +                ''',
    +                string,
    +                type_name(self),
    +            ))
    +
    +        groups = m.groupdict()
    +
    +        tz = None
    +        if groups['zulu']:
    +            tz = timezone.utc
    +        elif groups['dsign']:
    +            sign = 1 if groups['dsign'] == '+' else -1
    +            tz = create_timezone(sign * timedelta(
    +                hours=int(groups['dhour']),
    +                minutes=int(groups['dminute'] or 0)
    +            ))
    +
    +        if groups['fraction']:
    +            # Compute fraction in microseconds
    +            fract = Fraction(
    +                int(groups['fraction']),
    +                10 ** len(groups['fraction'])
    +            ) * 1000000
    +
    +            if groups['minute'] is None:
    +                fract *= 3600
    +            elif groups['second'] is None:
    +                fract *= 60
    +
    +            fract_usec = int(fract.limit_denominator(1))
    +
    +        else:
    +            fract_usec = 0
    +
    +        return {
    +            'year': int(groups['year']),
    +            'month': int(groups['month']),
    +            'day': int(groups['day']),
    +            'hour': int(groups['hour']),
    +            'minute': int(groups['minute'] or 0),
    +            'second': int(groups['second'] or 0),
    +            'tzinfo': tz,
    +            'fraction': fract_usec,
    +        }
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            A datetime.datetime object, asn1crypto.util.extended_datetime object or
    +            None. The datetime object is usually timezone aware. If it's naive, then
    +            it's in the sender's local time; see X.680 sect. 42.3
    +        """
    +
    +        if self.contents is None:
    +            return None
    +
    +        if self._native is None:
    +            parsed = self._parsed_time
    +
    +            fraction = parsed.pop('fraction', 0)
    +
    +            value = self._get_datetime(parsed)
    +
    +            if fraction:
    +                value += timedelta(microseconds=fraction)
    +
    +            self._native = value
    +
    +        return self._native
    +
    +
    +class UTCTime(AbstractTime):
    +    """
    +    Represents a UTC time from ASN.1 as a timezone aware Python datetime.datetime object
    +    """
    +
    +    tag = 23
    +
    +    # Regular expression for UTCTime as described in X.680 sect. 43 and ISO 8601
    +    _TIMESTRING_RE = re.compile(r'''
    +        ^
    +        # YYMMDD
    +        (?P\d{2})
    +        (?P\d{2})
    +        (?P\d{2})
    +
    +        # hhmm or hhmmss
    +        (?P\d{2})
    +        (?P\d{2})
    +        (?P\d{2})?
    +
    +        # Matches nothing, needed because GeneralizedTime uses this.
    +        (?P)
    +
    +        # Z or [-+]hhmm
    +        (?:
    +            (?PZ)
    +            |
    +            (?:
    +                (?P[-+])
    +                (?P\d{2})
    +                (?P\d{2})
    +            )
    +        )
    +        $
    +    ''', re.X)
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            A unicode string or a datetime.datetime object
    +
    +        :raises:
    +            ValueError - when an invalid value is passed
    +        """
    +
    +        if isinstance(value, datetime):
    +            if not value.tzinfo:
    +                raise ValueError('Must be timezone aware')
    +
    +            # Convert value to UTC.
    +            value = value.astimezone(utc_with_dst)
    +
    +            if not 1950 <= value.year <= 2049:
    +                raise ValueError('Year of the UTCTime is not in range [1950, 2049], use GeneralizedTime instead')
    +
    +            value = value.strftime('%y%m%d%H%M%SZ')
    +            if _PY2:
    +                value = value.decode('ascii')
    +
    +        AbstractString.set(self, value)
    +        # Set it to None and let the class take care of converting the next
    +        # time that .native is called
    +        self._native = None
    +
    +    def _get_datetime(self, parsed):
    +        """
    +        Create a datetime object from the parsed time.
    +
    +        :return:
    +            An aware datetime.datetime object
    +        """
    +
    +        # X.680 only specifies that UTCTime is not using a century.
    +        # So "18" could as well mean 2118 or 1318.
    +        # X.509 and CMS specify to use UTCTime for years earlier than 2050.
    +        # Assume that UTCTime is only used for years [1950, 2049].
    +        if parsed['year'] < 50:
    +            parsed['year'] += 2000
    +        else:
    +            parsed['year'] += 1900
    +
    +        return datetime(**parsed)
    +
    +
    +class GeneralizedTime(AbstractTime):
    +    """
    +    Represents a generalized time from ASN.1 as a Python datetime.datetime
    +    object or asn1crypto.util.extended_datetime object in UTC
    +    """
    +
    +    tag = 24
    +
    +    # Regular expression for GeneralizedTime as described in X.680 sect. 42 and ISO 8601
    +    _TIMESTRING_RE = re.compile(r'''
    +        ^
    +        # YYYYMMDD
    +        (?P\d{4})
    +        (?P\d{2})
    +        (?P\d{2})
    +
    +        # hh or hhmm or hhmmss
    +        (?P\d{2})
    +        (?:
    +            (?P\d{2})
    +            (?P\d{2})?
    +        )?
    +
    +        # Optional fraction; [.,]dddd (one or more decimals)
    +        # If Seconds are given, it's fractions of Seconds.
    +        # Else if Minutes are given, it's fractions of Minutes.
    +        # Else it's fractions of Hours.
    +        (?:
    +            [,.]
    +            (?P\d+)
    +        )?
    +
    +        # Optional timezone. If left out, the time is in local time.
    +        # Z or [-+]hh or [-+]hhmm
    +        (?:
    +            (?PZ)
    +            |
    +            (?:
    +                (?P[-+])
    +                (?P\d{2})
    +                (?P\d{2})?
    +            )
    +        )?
    +        $
    +    ''', re.X)
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            A unicode string, a datetime.datetime object or an
    +            asn1crypto.util.extended_datetime object
    +
    +        :raises:
    +            ValueError - when an invalid value is passed
    +        """
    +
    +        if isinstance(value, (datetime, extended_datetime)):
    +            if not value.tzinfo:
    +                raise ValueError('Must be timezone aware')
    +
    +            # Convert value to UTC.
    +            value = value.astimezone(utc_with_dst)
    +
    +            if value.microsecond:
    +                fraction = '.' + str(value.microsecond).zfill(6).rstrip('0')
    +            else:
    +                fraction = ''
    +
    +            value = value.strftime('%Y%m%d%H%M%S') + fraction + 'Z'
    +            if _PY2:
    +                value = value.decode('ascii')
    +
    +        AbstractString.set(self, value)
    +        # Set it to None and let the class take care of converting the next
    +        # time that .native is called
    +        self._native = None
    +
    +    def _get_datetime(self, parsed):
    +        """
    +        Create a datetime object from the parsed time.
    +
    +        :return:
    +            A datetime.datetime object or asn1crypto.util.extended_datetime object.
    +            It may or may not be aware.
    +        """
    +
    +        if parsed['year'] == 0:
    +            # datetime does not support year 0. Use extended_datetime instead.
    +            return extended_datetime(**parsed)
    +        else:
    +            return datetime(**parsed)
    +
    +
    +class GraphicString(AbstractString):
    +    """
    +    Represents a graphic string from ASN.1 as a Python unicode string
    +    """
    +
    +    tag = 25
    +    # This is technically not correct since this type can contain any charset
    +    _encoding = 'latin1'
    +
    +
    +class VisibleString(AbstractString):
    +    """
    +    Represents a visible string from ASN.1 as a Python unicode string
    +    """
    +
    +    tag = 26
    +    _encoding = 'latin1'
    +
    +
    +class GeneralString(AbstractString):
    +    """
    +    Represents a general string from ASN.1 as a Python unicode string
    +    """
    +
    +    tag = 27
    +    # This is technically not correct since this type can contain any charset
    +    _encoding = 'latin1'
    +
    +
    +class UniversalString(AbstractString):
    +    """
    +    Represents a universal string from ASN.1 as a Python unicode string
    +    """
    +
    +    tag = 28
    +    _encoding = 'utf-32-be'
    +
    +
    +class CharacterString(AbstractString):
    +    """
    +    Represents a character string from ASN.1 as a Python unicode string
    +    """
    +
    +    tag = 29
    +    # This is technically not correct since this type can contain any charset
    +    _encoding = 'latin1'
    +
    +
    +class BMPString(AbstractString):
    +    """
    +    Represents a BMP string from ASN.1 as a Python unicode string
    +    """
    +
    +    tag = 30
    +    _encoding = 'utf-16-be'
    +
    +
    +def _basic_debug(prefix, self):
    +    """
    +    Prints out basic information about an Asn1Value object. Extracted for reuse
    +    among different classes that customize the debug information.
    +
    +    :param prefix:
    +        A unicode string of spaces to prefix output line with
    +
    +    :param self:
    +        The object to print the debugging information about
    +    """
    +
    +    print('%s%s Object #%s' % (prefix, type_name(self), id(self)))
    +    if self._header:
    +        print('%s  Header: 0x%s' % (prefix, binascii.hexlify(self._header or b'').decode('utf-8')))
    +
    +    has_header = self.method is not None and self.class_ is not None and self.tag is not None
    +    if has_header:
    +        method_name = METHOD_NUM_TO_NAME_MAP.get(self.method)
    +        class_name = CLASS_NUM_TO_NAME_MAP.get(self.class_)
    +
    +    if self.explicit is not None:
    +        for class_, tag in self.explicit:
    +            print(
    +                '%s    %s tag %s (explicitly tagged)' %
    +                (
    +                    prefix,
    +                    CLASS_NUM_TO_NAME_MAP.get(class_),
    +                    tag
    +                )
    +            )
    +        if has_header:
    +            print('%s      %s %s %s' % (prefix, method_name, class_name, self.tag))
    +
    +    elif self.implicit:
    +        if has_header:
    +            print('%s    %s %s tag %s (implicitly tagged)' % (prefix, method_name, class_name, self.tag))
    +
    +    elif has_header:
    +        print('%s    %s %s tag %s' % (prefix, method_name, class_name, self.tag))
    +
    +    if self._trailer:
    +        print('%s  Trailer: 0x%s' % (prefix, binascii.hexlify(self._trailer or b'').decode('utf-8')))
    +
    +    print('%s  Data: 0x%s' % (prefix, binascii.hexlify(self.contents or b'').decode('utf-8')))
    +
    +
    +def _tag_type_to_explicit_implicit(params):
    +    """
    +    Converts old-style "tag_type" and "tag" params to "explicit" and "implicit"
    +
    +    :param params:
    +        A dict of parameters to convert from tag_type/tag to explicit/implicit
    +    """
    +
    +    if 'tag_type' in params:
    +        if params['tag_type'] == 'explicit':
    +            params['explicit'] = (params.get('class', 2), params['tag'])
    +        elif params['tag_type'] == 'implicit':
    +            params['implicit'] = (params.get('class', 2), params['tag'])
    +        del params['tag_type']
    +        del params['tag']
    +        if 'class' in params:
    +            del params['class']
    +
    +
    +def _fix_tagging(value, params):
    +    """
    +    Checks if a value is properly tagged based on the spec, and re/untags as
    +    necessary
    +
    +    :param value:
    +        An Asn1Value object
    +
    +    :param params:
    +        A dict of spec params
    +
    +    :return:
    +        An Asn1Value that is properly tagged
    +    """
    +
    +    _tag_type_to_explicit_implicit(params)
    +
    +    retag = False
    +    if 'implicit' not in params:
    +        if value.implicit is not False:
    +            retag = True
    +    else:
    +        if isinstance(params['implicit'], tuple):
    +            class_, tag = params['implicit']
    +        else:
    +            tag = params['implicit']
    +            class_ = 'context'
    +        if value.implicit is False:
    +            retag = True
    +        elif value.class_ != CLASS_NAME_TO_NUM_MAP[class_] or value.tag != tag:
    +            retag = True
    +
    +    if params.get('explicit') != value.explicit:
    +        retag = True
    +
    +    if retag:
    +        return value.retag(params)
    +    return value
    +
    +
    +def _build_id_tuple(params, spec):
    +    """
    +    Builds a 2-element tuple used to identify fields by grabbing the class_
    +    and tag from an Asn1Value class and the params dict being passed to it
    +
    +    :param params:
    +        A dict of params to pass to spec
    +
    +    :param spec:
    +        An Asn1Value class
    +
    +    :return:
    +        A 2-element integer tuple in the form (class_, tag)
    +    """
    +
    +    # Handle situations where the spec is not known at setup time
    +    if spec is None:
    +        return (None, None)
    +
    +    required_class = spec.class_
    +    required_tag = spec.tag
    +
    +    _tag_type_to_explicit_implicit(params)
    +
    +    if 'explicit' in params:
    +        if isinstance(params['explicit'], tuple):
    +            required_class, required_tag = params['explicit']
    +        else:
    +            required_class = 2
    +            required_tag = params['explicit']
    +    elif 'implicit' in params:
    +        if isinstance(params['implicit'], tuple):
    +            required_class, required_tag = params['implicit']
    +        else:
    +            required_class = 2
    +            required_tag = params['implicit']
    +    if required_class is not None and not isinstance(required_class, int_types):
    +        required_class = CLASS_NAME_TO_NUM_MAP[required_class]
    +
    +    required_class = params.get('class_', required_class)
    +    required_tag = params.get('tag', required_tag)
    +
    +    return (required_class, required_tag)
    +
    +
    +def _int_to_bit_tuple(value, bits):
    +    """
    +    Format value as a tuple of 1s and 0s.
    +
    +    :param value:
    +        A non-negative integer to format
    +
    +    :param bits:
    +        Number of bits in the output
    +
    +    :return:
    +        A tuple of 1s and 0s with bits members.
    +    """
    +
    +    if not value and not bits:
    +        return ()
    +
    +    result = tuple(map(int, format(value, '0{0}b'.format(bits))))
    +    if len(result) != bits:
    +        raise ValueError('Result too large: {0} > {1}'.format(len(result), bits))
    +
    +    return result
    +
    +
    +_UNIVERSAL_SPECS = {
    +    1: Boolean,
    +    2: Integer,
    +    3: BitString,
    +    4: OctetString,
    +    5: Null,
    +    6: ObjectIdentifier,
    +    7: ObjectDescriptor,
    +    8: InstanceOf,
    +    9: Real,
    +    10: Enumerated,
    +    11: EmbeddedPdv,
    +    12: UTF8String,
    +    13: RelativeOid,
    +    16: Sequence,
    +    17: Set,
    +    18: NumericString,
    +    19: PrintableString,
    +    20: TeletexString,
    +    21: VideotexString,
    +    22: IA5String,
    +    23: UTCTime,
    +    24: GeneralizedTime,
    +    25: GraphicString,
    +    26: VisibleString,
    +    27: GeneralString,
    +    28: UniversalString,
    +    29: CharacterString,
    +    30: BMPString
    +}
    +
    +
    +def _build(class_, method, tag, header, contents, trailer, spec=None, spec_params=None, nested_spec=None):
    +    """
    +    Builds an Asn1Value object generically, or using a spec with optional params
    +
    +    :param class_:
    +        An integer representing the ASN.1 class
    +
    +    :param method:
    +        An integer representing the ASN.1 method
    +
    +    :param tag:
    +        An integer representing the ASN.1 tag
    +
    +    :param header:
    +        A byte string of the ASN.1 header (class, method, tag, length)
    +
    +    :param contents:
    +        A byte string of the ASN.1 value
    +
    +    :param trailer:
    +        A byte string of any ASN.1 trailer (only used by indefinite length encodings)
    +
    +    :param spec:
    +        A class derived from Asn1Value that defines what class_ and tag the
    +        value should have, and the semantics of the encoded value. The
    +        return value will be of this type. If omitted, the encoded value
    +        will be decoded using the standard universal tag based on the
    +        encoded tag number.
    +
    +    :param spec_params:
    +        A dict of params to pass to the spec object
    +
    +    :param nested_spec:
    +        For certain Asn1Value classes (such as OctetString and BitString), the
    +        contents can be further parsed and interpreted as another Asn1Value.
    +        This parameter controls the spec for that sub-parsing.
    +
    +    :return:
    +        An object of the type spec, or if not specified, a child of Asn1Value
    +    """
    +
    +    if spec_params is not None:
    +        _tag_type_to_explicit_implicit(spec_params)
    +
    +    if header is None:
    +        return VOID
    +
    +    header_set = False
    +
    +    # If an explicit specification was passed in, make sure it matches
    +    if spec is not None:
    +        # If there is explicit tagging and contents, we have to split
    +        # the header and trailer off before we do the parsing
    +        no_explicit = spec_params and 'no_explicit' in spec_params
    +        if not no_explicit and (spec.explicit or (spec_params and 'explicit' in spec_params)):
    +            if spec_params:
    +                value = spec(**spec_params)
    +            else:
    +                value = spec()
    +            original_explicit = value.explicit
    +            explicit_info = reversed(original_explicit)
    +            parsed_class = class_
    +            parsed_method = method
    +            parsed_tag = tag
    +            to_parse = contents
    +            explicit_header = header
    +            explicit_trailer = trailer or b''
    +            for expected_class, expected_tag in explicit_info:
    +                if parsed_class != expected_class:
    +                    raise ValueError(unwrap(
    +                        '''
    +                        Error parsing %s - explicitly-tagged class should have been
    +                        %s, but %s was found
    +                        ''',
    +                        type_name(value),
    +                        CLASS_NUM_TO_NAME_MAP.get(expected_class),
    +                        CLASS_NUM_TO_NAME_MAP.get(parsed_class, parsed_class)
    +                    ))
    +                if parsed_method != 1:
    +                    raise ValueError(unwrap(
    +                        '''
    +                        Error parsing %s - explicitly-tagged method should have
    +                        been %s, but %s was found
    +                        ''',
    +                        type_name(value),
    +                        METHOD_NUM_TO_NAME_MAP.get(1),
    +                        METHOD_NUM_TO_NAME_MAP.get(parsed_method, parsed_method)
    +                    ))
    +                if parsed_tag != expected_tag:
    +                    raise ValueError(unwrap(
    +                        '''
    +                        Error parsing %s - explicitly-tagged tag should have been
    +                        %s, but %s was found
    +                        ''',
    +                        type_name(value),
    +                        expected_tag,
    +                        parsed_tag
    +                    ))
    +                info, _ = _parse(to_parse, len(to_parse))
    +                parsed_class, parsed_method, parsed_tag, parsed_header, to_parse, parsed_trailer = info
    +
    +                if not isinstance(value, Choice):
    +                    explicit_header += parsed_header
    +                    explicit_trailer = parsed_trailer + explicit_trailer
    +
    +            value = _build(*info, spec=spec, spec_params={'no_explicit': True})
    +            value._header = explicit_header
    +            value._trailer = explicit_trailer
    +            value.explicit = original_explicit
    +            header_set = True
    +        else:
    +            if spec_params:
    +                value = spec(contents=contents, **spec_params)
    +            else:
    +                value = spec(contents=contents)
    +
    +            if spec is Any:
    +                pass
    +
    +            elif isinstance(value, Choice):
    +                value.validate(class_, tag, contents)
    +                try:
    +                    # Force parsing the Choice now
    +                    value.contents = header + value.contents
    +                    header = b''
    +                    value.parse()
    +                except (ValueError, TypeError) as e:
    +                    args = e.args[1:]
    +                    e.args = (e.args[0] + '\n    while parsing %s' % type_name(value),) + args
    +                    raise e
    +
    +            else:
    +                if class_ != value.class_:
    +                    raise ValueError(unwrap(
    +                        '''
    +                        Error parsing %s - class should have been %s, but %s was
    +                        found
    +                        ''',
    +                        type_name(value),
    +                        CLASS_NUM_TO_NAME_MAP.get(value.class_),
    +                        CLASS_NUM_TO_NAME_MAP.get(class_, class_)
    +                    ))
    +                if method != value.method:
    +                    # Allow parsing a primitive method as constructed if the value
    +                    # is indefinite length. This is to allow parsing BER.
    +                    ber_indef = method == 1 and value.method == 0 and trailer == b'\x00\x00'
    +                    if not ber_indef or not isinstance(value, Constructable):
    +                        raise ValueError(unwrap(
    +                            '''
    +                            Error parsing %s - method should have been %s, but %s was found
    +                            ''',
    +                            type_name(value),
    +                            METHOD_NUM_TO_NAME_MAP.get(value.method),
    +                            METHOD_NUM_TO_NAME_MAP.get(method, method)
    +                        ))
    +                    else:
    +                        value.method = method
    +                        value._indefinite = True
    +                if tag != value.tag:
    +                    if isinstance(value._bad_tag, tuple):
    +                        is_bad_tag = tag in value._bad_tag
    +                    else:
    +                        is_bad_tag = tag == value._bad_tag
    +                    if not is_bad_tag:
    +                        raise ValueError(unwrap(
    +                            '''
    +                            Error parsing %s - tag should have been %s, but %s was found
    +                            ''',
    +                            type_name(value),
    +                            value.tag,
    +                            tag
    +                        ))
    +
    +    # For explicitly tagged, un-speced parsings, we use a generic container
    +    # since we will be parsing the contents and discarding the outer object
    +    # anyway a little further on
    +    elif spec_params and 'explicit' in spec_params:
    +        original_value = Asn1Value(contents=contents, **spec_params)
    +        original_explicit = original_value.explicit
    +
    +        to_parse = contents
    +        explicit_header = header
    +        explicit_trailer = trailer or b''
    +        for expected_class, expected_tag in reversed(original_explicit):
    +            info, _ = _parse(to_parse, len(to_parse))
    +            _, _, _, parsed_header, to_parse, parsed_trailer = info
    +            explicit_header += parsed_header
    +            explicit_trailer = parsed_trailer + explicit_trailer
    +        value = _build(*info, spec=spec, spec_params={'no_explicit': True})
    +        value._header = header + value._header
    +        value._trailer += trailer or b''
    +        value.explicit = original_explicit
    +        header_set = True
    +
    +    # If no spec was specified, allow anything and just process what
    +    # is in the input data
    +    else:
    +        if tag not in _UNIVERSAL_SPECS:
    +            raise ValueError(unwrap(
    +                '''
    +                Unknown element - %s class, %s method, tag %s
    +                ''',
    +                CLASS_NUM_TO_NAME_MAP.get(class_),
    +                METHOD_NUM_TO_NAME_MAP.get(method),
    +                tag
    +            ))
    +
    +        spec = _UNIVERSAL_SPECS[tag]
    +
    +        value = spec(contents=contents, class_=class_)
    +        ber_indef = method == 1 and value.method == 0 and trailer == b'\x00\x00'
    +        if ber_indef and isinstance(value, Constructable):
    +            value._indefinite = True
    +        value.method = method
    +
    +    if not header_set:
    +        value._header = header
    +        value._trailer = trailer or b''
    +
    +    # Destroy any default value that our contents have overwritten
    +    value._native = None
    +
    +    if nested_spec:
    +        try:
    +            value.parse(nested_spec)
    +        except (ValueError, TypeError) as e:
    +            args = e.args[1:]
    +            e.args = (e.args[0] + '\n    while parsing %s' % type_name(value),) + args
    +            raise e
    +
    +    return value
    +
    +
    +def _parse_build(encoded_data, pointer=0, spec=None, spec_params=None, strict=False):
    +    """
    +    Parses a byte string generically, or using a spec with optional params
    +
    +    :param encoded_data:
    +        A byte string that contains BER-encoded data
    +
    +    :param pointer:
    +        The index in the byte string to parse from
    +
    +    :param spec:
    +        A class derived from Asn1Value that defines what class_ and tag the
    +        value should have, and the semantics of the encoded value. The
    +        return value will be of this type. If omitted, the encoded value
    +        will be decoded using the standard universal tag based on the
    +        encoded tag number.
    +
    +    :param spec_params:
    +        A dict of params to pass to the spec object
    +
    +    :param strict:
    +        A boolean indicating if trailing data should be forbidden - if so, a
    +        ValueError will be raised when trailing data exists
    +
    +    :return:
    +        A 2-element tuple:
    +         - 0: An object of the type spec, or if not specified, a child of Asn1Value
    +         - 1: An integer indicating how many bytes were consumed
    +    """
    +
    +    encoded_len = len(encoded_data)
    +    info, new_pointer = _parse(encoded_data, encoded_len, pointer)
    +    if strict and new_pointer != pointer + encoded_len:
    +        extra_bytes = pointer + encoded_len - new_pointer
    +        raise ValueError('Extra data - %d bytes of trailing data were provided' % extra_bytes)
    +    return (_build(*info, spec=spec, spec_params=spec_params), new_pointer)
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/crl.py b/server/www/packages/packages-windows/x86/asn1crypto/crl.py
    new file mode 100644
    index 0000000..84cb168
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/crl.py
    @@ -0,0 +1,536 @@
    +# coding: utf-8
    +
    +"""
    +ASN.1 type classes for certificate revocation lists (CRL). Exports the
    +following items:
    +
    + - CertificateList()
    +
    +Other type classes are defined that help compose the types listed above.
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +import hashlib
    +
    +from .algos import SignedDigestAlgorithm
    +from .core import (
    +    Boolean,
    +    Enumerated,
    +    GeneralizedTime,
    +    Integer,
    +    ObjectIdentifier,
    +    OctetBitString,
    +    ParsableOctetString,
    +    Sequence,
    +    SequenceOf,
    +)
    +from .x509 import (
    +    AuthorityInfoAccessSyntax,
    +    AuthorityKeyIdentifier,
    +    CRLDistributionPoints,
    +    DistributionPointName,
    +    GeneralNames,
    +    Name,
    +    ReasonFlags,
    +    Time,
    +)
    +
    +
    +# The structures in this file are taken from https://tools.ietf.org/html/rfc5280
    +
    +
    +class Version(Integer):
    +    _map = {
    +        0: 'v1',
    +        1: 'v2',
    +        2: 'v3',
    +    }
    +
    +
    +class IssuingDistributionPoint(Sequence):
    +    _fields = [
    +        ('distribution_point', DistributionPointName, {'explicit': 0, 'optional': True}),
    +        ('only_contains_user_certs', Boolean, {'implicit': 1, 'default': False}),
    +        ('only_contains_ca_certs', Boolean, {'implicit': 2, 'default': False}),
    +        ('only_some_reasons', ReasonFlags, {'implicit': 3, 'optional': True}),
    +        ('indirect_crl', Boolean, {'implicit': 4, 'default': False}),
    +        ('only_contains_attribute_certs', Boolean, {'implicit': 5, 'default': False}),
    +    ]
    +
    +
    +class TBSCertListExtensionId(ObjectIdentifier):
    +    _map = {
    +        '2.5.29.18': 'issuer_alt_name',
    +        '2.5.29.20': 'crl_number',
    +        '2.5.29.27': 'delta_crl_indicator',
    +        '2.5.29.28': 'issuing_distribution_point',
    +        '2.5.29.35': 'authority_key_identifier',
    +        '2.5.29.46': 'freshest_crl',
    +        '1.3.6.1.5.5.7.1.1': 'authority_information_access',
    +    }
    +
    +
    +class TBSCertListExtension(Sequence):
    +    _fields = [
    +        ('extn_id', TBSCertListExtensionId),
    +        ('critical', Boolean, {'default': False}),
    +        ('extn_value', ParsableOctetString),
    +    ]
    +
    +    _oid_pair = ('extn_id', 'extn_value')
    +    _oid_specs = {
    +        'issuer_alt_name': GeneralNames,
    +        'crl_number': Integer,
    +        'delta_crl_indicator': Integer,
    +        'issuing_distribution_point': IssuingDistributionPoint,
    +        'authority_key_identifier': AuthorityKeyIdentifier,
    +        'freshest_crl': CRLDistributionPoints,
    +        'authority_information_access': AuthorityInfoAccessSyntax,
    +    }
    +
    +
    +class TBSCertListExtensions(SequenceOf):
    +    _child_spec = TBSCertListExtension
    +
    +
    +class CRLReason(Enumerated):
    +    _map = {
    +        0: 'unspecified',
    +        1: 'key_compromise',
    +        2: 'ca_compromise',
    +        3: 'affiliation_changed',
    +        4: 'superseded',
    +        5: 'cessation_of_operation',
    +        6: 'certificate_hold',
    +        8: 'remove_from_crl',
    +        9: 'privilege_withdrawn',
    +        10: 'aa_compromise',
    +    }
    +
    +    @property
    +    def human_friendly(self):
    +        """
    +        :return:
    +            A unicode string with revocation description that is suitable to
    +            show to end-users. Starts with a lower case letter and phrased in
    +            such a way that it makes sense after the phrase "because of" or
    +            "due to".
    +        """
    +
    +        return {
    +            'unspecified': 'an unspecified reason',
    +            'key_compromise': 'a compromised key',
    +            'ca_compromise': 'the CA being compromised',
    +            'affiliation_changed': 'an affiliation change',
    +            'superseded': 'certificate supersession',
    +            'cessation_of_operation': 'a cessation of operation',
    +            'certificate_hold': 'a certificate hold',
    +            'remove_from_crl': 'removal from the CRL',
    +            'privilege_withdrawn': 'privilege withdrawl',
    +            'aa_compromise': 'the AA being compromised',
    +        }[self.native]
    +
    +
    +class CRLEntryExtensionId(ObjectIdentifier):
    +    _map = {
    +        '2.5.29.21': 'crl_reason',
    +        '2.5.29.23': 'hold_instruction_code',
    +        '2.5.29.24': 'invalidity_date',
    +        '2.5.29.29': 'certificate_issuer',
    +    }
    +
    +
    +class CRLEntryExtension(Sequence):
    +    _fields = [
    +        ('extn_id', CRLEntryExtensionId),
    +        ('critical', Boolean, {'default': False}),
    +        ('extn_value', ParsableOctetString),
    +    ]
    +
    +    _oid_pair = ('extn_id', 'extn_value')
    +    _oid_specs = {
    +        'crl_reason': CRLReason,
    +        'hold_instruction_code': ObjectIdentifier,
    +        'invalidity_date': GeneralizedTime,
    +        'certificate_issuer': GeneralNames,
    +    }
    +
    +
    +class CRLEntryExtensions(SequenceOf):
    +    _child_spec = CRLEntryExtension
    +
    +
    +class RevokedCertificate(Sequence):
    +    _fields = [
    +        ('user_certificate', Integer),
    +        ('revocation_date', Time),
    +        ('crl_entry_extensions', CRLEntryExtensions, {'optional': True}),
    +    ]
    +
    +    _processed_extensions = False
    +    _critical_extensions = None
    +    _crl_reason_value = None
    +    _invalidity_date_value = None
    +    _certificate_issuer_value = None
    +    _issuer_name = False
    +
    +    def _set_extensions(self):
    +        """
    +        Sets common named extensions to private attributes and creates a list
    +        of critical extensions
    +        """
    +
    +        self._critical_extensions = set()
    +
    +        for extension in self['crl_entry_extensions']:
    +            name = extension['extn_id'].native
    +            attribute_name = '_%s_value' % name
    +            if hasattr(self, attribute_name):
    +                setattr(self, attribute_name, extension['extn_value'].parsed)
    +            if extension['critical'].native:
    +                self._critical_extensions.add(name)
    +
    +        self._processed_extensions = True
    +
    +    @property
    +    def critical_extensions(self):
    +        """
    +        Returns a set of the names (or OID if not a known extension) of the
    +        extensions marked as critical
    +
    +        :return:
    +            A set of unicode strings
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._critical_extensions
    +
    +    @property
    +    def crl_reason_value(self):
    +        """
    +        This extension indicates the reason that a certificate was revoked.
    +
    +        :return:
    +            None or a CRLReason object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._crl_reason_value
    +
    +    @property
    +    def invalidity_date_value(self):
    +        """
    +        This extension indicates the suspected date/time the private key was
    +        compromised or the certificate became invalid. This would usually be
    +        before the revocation date, which is when the CA processed the
    +        revocation.
    +
    +        :return:
    +            None or a GeneralizedTime object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._invalidity_date_value
    +
    +    @property
    +    def certificate_issuer_value(self):
    +        """
    +        This extension indicates the issuer of the certificate in question,
    +        and is used in indirect CRLs. CRL entries without this extension are
    +        for certificates issued from the last seen issuer.
    +
    +        :return:
    +            None or an x509.GeneralNames object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._certificate_issuer_value
    +
    +    @property
    +    def issuer_name(self):
    +        """
    +        :return:
    +            None, or an asn1crypto.x509.Name object for the issuer of the cert
    +        """
    +
    +        if self._issuer_name is False:
    +            self._issuer_name = None
    +            if self.certificate_issuer_value:
    +                for general_name in self.certificate_issuer_value:
    +                    if general_name.name == 'directory_name':
    +                        self._issuer_name = general_name.chosen
    +                        break
    +        return self._issuer_name
    +
    +
    +class RevokedCertificates(SequenceOf):
    +    _child_spec = RevokedCertificate
    +
    +
    +class TbsCertList(Sequence):
    +    _fields = [
    +        ('version', Version, {'optional': True}),
    +        ('signature', SignedDigestAlgorithm),
    +        ('issuer', Name),
    +        ('this_update', Time),
    +        ('next_update', Time, {'optional': True}),
    +        ('revoked_certificates', RevokedCertificates, {'optional': True}),
    +        ('crl_extensions', TBSCertListExtensions, {'explicit': 0, 'optional': True}),
    +    ]
    +
    +
    +class CertificateList(Sequence):
    +    _fields = [
    +        ('tbs_cert_list', TbsCertList),
    +        ('signature_algorithm', SignedDigestAlgorithm),
    +        ('signature', OctetBitString),
    +    ]
    +
    +    _processed_extensions = False
    +    _critical_extensions = None
    +    _issuer_alt_name_value = None
    +    _crl_number_value = None
    +    _delta_crl_indicator_value = None
    +    _issuing_distribution_point_value = None
    +    _authority_key_identifier_value = None
    +    _freshest_crl_value = None
    +    _authority_information_access_value = None
    +    _issuer_cert_urls = None
    +    _delta_crl_distribution_points = None
    +    _sha1 = None
    +    _sha256 = None
    +
    +    def _set_extensions(self):
    +        """
    +        Sets common named extensions to private attributes and creates a list
    +        of critical extensions
    +        """
    +
    +        self._critical_extensions = set()
    +
    +        for extension in self['tbs_cert_list']['crl_extensions']:
    +            name = extension['extn_id'].native
    +            attribute_name = '_%s_value' % name
    +            if hasattr(self, attribute_name):
    +                setattr(self, attribute_name, extension['extn_value'].parsed)
    +            if extension['critical'].native:
    +                self._critical_extensions.add(name)
    +
    +        self._processed_extensions = True
    +
    +    @property
    +    def critical_extensions(self):
    +        """
    +        Returns a set of the names (or OID if not a known extension) of the
    +        extensions marked as critical
    +
    +        :return:
    +            A set of unicode strings
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._critical_extensions
    +
    +    @property
    +    def issuer_alt_name_value(self):
    +        """
    +        This extension allows associating one or more alternative names with
    +        the issuer of the CRL.
    +
    +        :return:
    +            None or an x509.GeneralNames object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._issuer_alt_name_value
    +
    +    @property
    +    def crl_number_value(self):
    +        """
    +        This extension adds a monotonically increasing number to the CRL and is
    +        used to distinguish different versions of the CRL.
    +
    +        :return:
    +            None or an Integer object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._crl_number_value
    +
    +    @property
    +    def delta_crl_indicator_value(self):
    +        """
    +        This extension indicates a CRL is a delta CRL, and contains the CRL
    +        number of the base CRL that it is a delta from.
    +
    +        :return:
    +            None or an Integer object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._delta_crl_indicator_value
    +
    +    @property
    +    def issuing_distribution_point_value(self):
    +        """
    +        This extension includes information about what types of revocations
    +        and certificates are part of the CRL.
    +
    +        :return:
    +            None or an IssuingDistributionPoint object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._issuing_distribution_point_value
    +
    +    @property
    +    def authority_key_identifier_value(self):
    +        """
    +        This extension helps in identifying the public key with which to
    +        validate the authenticity of the CRL.
    +
    +        :return:
    +            None or an AuthorityKeyIdentifier object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._authority_key_identifier_value
    +
    +    @property
    +    def freshest_crl_value(self):
    +        """
    +        This extension is used in complete CRLs to indicate where a delta CRL
    +        may be located.
    +
    +        :return:
    +            None or a CRLDistributionPoints object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._freshest_crl_value
    +
    +    @property
    +    def authority_information_access_value(self):
    +        """
    +        This extension is used to provide a URL with which to download the
    +        certificate used to sign this CRL.
    +
    +        :return:
    +            None or an AuthorityInfoAccessSyntax object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._authority_information_access_value
    +
    +    @property
    +    def issuer(self):
    +        """
    +        :return:
    +            An asn1crypto.x509.Name object for the issuer of the CRL
    +        """
    +
    +        return self['tbs_cert_list']['issuer']
    +
    +    @property
    +    def authority_key_identifier(self):
    +        """
    +        :return:
    +            None or a byte string of the key_identifier from the authority key
    +            identifier extension
    +        """
    +
    +        if not self.authority_key_identifier_value:
    +            return None
    +
    +        return self.authority_key_identifier_value['key_identifier'].native
    +
    +    @property
    +    def issuer_cert_urls(self):
    +        """
    +        :return:
    +            A list of unicode strings that are URLs that should contain either
    +            an individual DER-encoded X.509 certificate, or a DER-encoded CMS
    +            message containing multiple certificates
    +        """
    +
    +        if self._issuer_cert_urls is None:
    +            self._issuer_cert_urls = []
    +            if self.authority_information_access_value:
    +                for entry in self.authority_information_access_value:
    +                    if entry['access_method'].native == 'ca_issuers':
    +                        location = entry['access_location']
    +                        if location.name != 'uniform_resource_identifier':
    +                            continue
    +                        url = location.native
    +                        if url.lower()[0:7] == 'http://':
    +                            self._issuer_cert_urls.append(url)
    +        return self._issuer_cert_urls
    +
    +    @property
    +    def delta_crl_distribution_points(self):
    +        """
    +        Returns delta CRL URLs - only applies to complete CRLs
    +
    +        :return:
    +            A list of zero or more DistributionPoint objects
    +        """
    +
    +        if self._delta_crl_distribution_points is None:
    +            self._delta_crl_distribution_points = []
    +
    +            if self.freshest_crl_value is not None:
    +                for distribution_point in self.freshest_crl_value:
    +                    distribution_point_name = distribution_point['distribution_point']
    +                    # RFC 5280 indicates conforming CA should not use the relative form
    +                    if distribution_point_name.name == 'name_relative_to_crl_issuer':
    +                        continue
    +                    # This library is currently only concerned with HTTP-based CRLs
    +                    for general_name in distribution_point_name.chosen:
    +                        if general_name.name == 'uniform_resource_identifier':
    +                            self._delta_crl_distribution_points.append(distribution_point)
    +
    +        return self._delta_crl_distribution_points
    +
    +    @property
    +    def signature(self):
    +        """
    +        :return:
    +            A byte string of the signature
    +        """
    +
    +        return self['signature'].native
    +
    +    @property
    +    def sha1(self):
    +        """
    +        :return:
    +            The SHA1 hash of the DER-encoded bytes of this certificate list
    +        """
    +
    +        if self._sha1 is None:
    +            self._sha1 = hashlib.sha1(self.dump()).digest()
    +        return self._sha1
    +
    +    @property
    +    def sha256(self):
    +        """
    +        :return:
    +            The SHA-256 hash of the DER-encoded bytes of this certificate list
    +        """
    +
    +        if self._sha256 is None:
    +            self._sha256 = hashlib.sha256(self.dump()).digest()
    +        return self._sha256
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/csr.py b/server/www/packages/packages-windows/x86/asn1crypto/csr.py
    new file mode 100644
    index 0000000..7ea2848
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/csr.py
    @@ -0,0 +1,96 @@
    +# coding: utf-8
    +
    +"""
    +ASN.1 type classes for certificate signing requests (CSR). Exports the
    +following items:
    +
    + - CertificatationRequest()
    +
    +Other type classes are defined that help compose the types listed above.
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +from .algos import SignedDigestAlgorithm
    +from .core import (
    +    Any,
    +    Integer,
    +    ObjectIdentifier,
    +    OctetBitString,
    +    Sequence,
    +    SetOf,
    +)
    +from .keys import PublicKeyInfo
    +from .x509 import DirectoryString, Extensions, Name
    +
    +
    +# The structures in this file are taken from https://tools.ietf.org/html/rfc2986
    +# and https://tools.ietf.org/html/rfc2985
    +
    +
    +class Version(Integer):
    +    _map = {
    +        0: 'v1',
    +    }
    +
    +
    +class CSRAttributeType(ObjectIdentifier):
    +    _map = {
    +        '1.2.840.113549.1.9.7': 'challenge_password',
    +        '1.2.840.113549.1.9.9': 'extended_certificate_attributes',
    +        '1.2.840.113549.1.9.14': 'extension_request',
    +    }
    +
    +
    +class SetOfDirectoryString(SetOf):
    +    _child_spec = DirectoryString
    +
    +
    +class Attribute(Sequence):
    +    _fields = [
    +        ('type', ObjectIdentifier),
    +        ('values', SetOf, {'spec': Any}),
    +    ]
    +
    +
    +class SetOfAttributes(SetOf):
    +    _child_spec = Attribute
    +
    +
    +class SetOfExtensions(SetOf):
    +    _child_spec = Extensions
    +
    +
    +class CRIAttribute(Sequence):
    +    _fields = [
    +        ('type', CSRAttributeType),
    +        ('values', Any),
    +    ]
    +
    +    _oid_pair = ('type', 'values')
    +    _oid_specs = {
    +        'challenge_password': SetOfDirectoryString,
    +        'extended_certificate_attributes': SetOfAttributes,
    +        'extension_request': SetOfExtensions,
    +    }
    +
    +
    +class CRIAttributes(SetOf):
    +    _child_spec = CRIAttribute
    +
    +
    +class CertificationRequestInfo(Sequence):
    +    _fields = [
    +        ('version', Version),
    +        ('subject', Name),
    +        ('subject_pk_info', PublicKeyInfo),
    +        ('attributes', CRIAttributes, {'implicit': 0, 'optional': True}),
    +    ]
    +
    +
    +class CertificationRequest(Sequence):
    +    _fields = [
    +        ('certification_request_info', CertificationRequestInfo),
    +        ('signature_algorithm', SignedDigestAlgorithm),
    +        ('signature', OctetBitString),
    +    ]
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/keys.py b/server/www/packages/packages-windows/x86/asn1crypto/keys.py
    new file mode 100644
    index 0000000..599929f
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/keys.py
    @@ -0,0 +1,1279 @@
    +# coding: utf-8
    +
    +"""
    +ASN.1 type classes for public and private keys. Exports the following items:
    +
    + - DSAPrivateKey()
    + - ECPrivateKey()
    + - EncryptedPrivateKeyInfo()
    + - PrivateKeyInfo()
    + - PublicKeyInfo()
    + - RSAPrivateKey()
    + - RSAPublicKey()
    +
    +Other type classes are defined that help compose the types listed above.
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +import hashlib
    +import math
    +
    +from ._errors import unwrap, APIException
    +from ._types import type_name, byte_cls
    +from .algos import _ForceNullParameters, DigestAlgorithm, EncryptionAlgorithm, RSAESOAEPParams, RSASSAPSSParams
    +from .core import (
    +    Any,
    +    Asn1Value,
    +    BitString,
    +    Choice,
    +    Integer,
    +    IntegerOctetString,
    +    Null,
    +    ObjectIdentifier,
    +    OctetBitString,
    +    OctetString,
    +    ParsableOctetString,
    +    ParsableOctetBitString,
    +    Sequence,
    +    SequenceOf,
    +    SetOf,
    +)
    +from .util import int_from_bytes, int_to_bytes
    +
    +
    +class OtherPrimeInfo(Sequence):
    +    """
    +    Source: https://tools.ietf.org/html/rfc3447#page-46
    +    """
    +
    +    _fields = [
    +        ('prime', Integer),
    +        ('exponent', Integer),
    +        ('coefficient', Integer),
    +    ]
    +
    +
    +class OtherPrimeInfos(SequenceOf):
    +    """
    +    Source: https://tools.ietf.org/html/rfc3447#page-46
    +    """
    +
    +    _child_spec = OtherPrimeInfo
    +
    +
    +class RSAPrivateKeyVersion(Integer):
    +    """
    +    Original Name: Version
    +    Source: https://tools.ietf.org/html/rfc3447#page-45
    +    """
    +
    +    _map = {
    +        0: 'two-prime',
    +        1: 'multi',
    +    }
    +
    +
    +class RSAPrivateKey(Sequence):
    +    """
    +    Source: https://tools.ietf.org/html/rfc3447#page-45
    +    """
    +
    +    _fields = [
    +        ('version', RSAPrivateKeyVersion),
    +        ('modulus', Integer),
    +        ('public_exponent', Integer),
    +        ('private_exponent', Integer),
    +        ('prime1', Integer),
    +        ('prime2', Integer),
    +        ('exponent1', Integer),
    +        ('exponent2', Integer),
    +        ('coefficient', Integer),
    +        ('other_prime_infos', OtherPrimeInfos, {'optional': True})
    +    ]
    +
    +
    +class RSAPublicKey(Sequence):
    +    """
    +    Source: https://tools.ietf.org/html/rfc3447#page-44
    +    """
    +
    +    _fields = [
    +        ('modulus', Integer),
    +        ('public_exponent', Integer)
    +    ]
    +
    +
    +class DSAPrivateKey(Sequence):
    +    """
    +    The ASN.1 structure that OpenSSL uses to store a DSA private key that is
    +    not part of a PKCS#8 structure. Reversed engineered from english-language
    +    description on linked OpenSSL documentation page.
    +
    +    Original Name: None
    +    Source: https://www.openssl.org/docs/apps/dsa.html
    +    """
    +
    +    _fields = [
    +        ('version', Integer),
    +        ('p', Integer),
    +        ('q', Integer),
    +        ('g', Integer),
    +        ('public_key', Integer),
    +        ('private_key', Integer),
    +    ]
    +
    +
    +class _ECPoint():
    +    """
    +    In both PublicKeyInfo and PrivateKeyInfo, the EC public key is a byte
    +    string that is encoded as a bit string. This class adds convenience
    +    methods for converting to and from the byte string to a pair of integers
    +    that are the X and Y coordinates.
    +    """
    +
    +    @classmethod
    +    def from_coords(cls, x, y):
    +        """
    +        Creates an ECPoint object from the X and Y integer coordinates of the
    +        point
    +
    +        :param x:
    +            The X coordinate, as an integer
    +
    +        :param y:
    +            The Y coordinate, as an integer
    +
    +        :return:
    +            An ECPoint object
    +        """
    +
    +        x_bytes = int(math.ceil(math.log(x, 2) / 8.0))
    +        y_bytes = int(math.ceil(math.log(y, 2) / 8.0))
    +
    +        num_bytes = max(x_bytes, y_bytes)
    +
    +        byte_string = b'\x04'
    +        byte_string += int_to_bytes(x, width=num_bytes)
    +        byte_string += int_to_bytes(y, width=num_bytes)
    +
    +        return cls(byte_string)
    +
    +    def to_coords(self):
    +        """
    +        Returns the X and Y coordinates for this EC point, as native Python
    +        integers
    +
    +        :return:
    +            A 2-element tuple containing integers (X, Y)
    +        """
    +
    +        data = self.native
    +        first_byte = data[0:1]
    +
    +        # Uncompressed
    +        if first_byte == b'\x04':
    +            remaining = data[1:]
    +            field_len = len(remaining) // 2
    +            x = int_from_bytes(remaining[0:field_len])
    +            y = int_from_bytes(remaining[field_len:])
    +            return (x, y)
    +
    +        if first_byte not in set([b'\x02', b'\x03']):
    +            raise ValueError(unwrap(
    +                '''
    +                Invalid EC public key - first byte is incorrect
    +                '''
    +            ))
    +
    +        raise ValueError(unwrap(
    +            '''
    +            Compressed representations of EC public keys are not supported due
    +            to patent US6252960
    +            '''
    +        ))
    +
    +
    +class ECPoint(OctetString, _ECPoint):
    +
    +    pass
    +
    +
    +class ECPointBitString(OctetBitString, _ECPoint):
    +
    +    pass
    +
    +
    +class SpecifiedECDomainVersion(Integer):
    +    """
    +    Source: http://www.secg.org/sec1-v2.pdf page 104
    +    """
    +    _map = {
    +        1: 'ecdpVer1',
    +        2: 'ecdpVer2',
    +        3: 'ecdpVer3',
    +    }
    +
    +
    +class FieldType(ObjectIdentifier):
    +    """
    +    Original Name: None
    +    Source: http://www.secg.org/sec1-v2.pdf page 101
    +    """
    +
    +    _map = {
    +        '1.2.840.10045.1.1': 'prime_field',
    +        '1.2.840.10045.1.2': 'characteristic_two_field',
    +    }
    +
    +
    +class CharacteristicTwoBasis(ObjectIdentifier):
    +    """
    +    Original Name: None
    +    Source: http://www.secg.org/sec1-v2.pdf page 102
    +    """
    +
    +    _map = {
    +        '1.2.840.10045.1.2.1.1': 'gn_basis',
    +        '1.2.840.10045.1.2.1.2': 'tp_basis',
    +        '1.2.840.10045.1.2.1.3': 'pp_basis',
    +    }
    +
    +
    +class Pentanomial(Sequence):
    +    """
    +    Source: http://www.secg.org/sec1-v2.pdf page 102
    +    """
    +
    +    _fields = [
    +        ('k1', Integer),
    +        ('k2', Integer),
    +        ('k3', Integer),
    +    ]
    +
    +
    +class CharacteristicTwo(Sequence):
    +    """
    +    Original Name: Characteristic-two
    +    Source: http://www.secg.org/sec1-v2.pdf page 101
    +    """
    +
    +    _fields = [
    +        ('m', Integer),
    +        ('basis', CharacteristicTwoBasis),
    +        ('parameters', Any),
    +    ]
    +
    +    _oid_pair = ('basis', 'parameters')
    +    _oid_specs = {
    +        'gn_basis': Null,
    +        'tp_basis': Integer,
    +        'pp_basis': Pentanomial,
    +    }
    +
    +
    +class FieldID(Sequence):
    +    """
    +    Source: http://www.secg.org/sec1-v2.pdf page 100
    +    """
    +
    +    _fields = [
    +        ('field_type', FieldType),
    +        ('parameters', Any),
    +    ]
    +
    +    _oid_pair = ('field_type', 'parameters')
    +    _oid_specs = {
    +        'prime_field': Integer,
    +        'characteristic_two_field': CharacteristicTwo,
    +    }
    +
    +
    +class Curve(Sequence):
    +    """
    +    Source: http://www.secg.org/sec1-v2.pdf page 104
    +    """
    +
    +    _fields = [
    +        ('a', OctetString),
    +        ('b', OctetString),
    +        ('seed', OctetBitString, {'optional': True}),
    +    ]
    +
    +
    +class SpecifiedECDomain(Sequence):
    +    """
    +    Source: http://www.secg.org/sec1-v2.pdf page 103
    +    """
    +
    +    _fields = [
    +        ('version', SpecifiedECDomainVersion),
    +        ('field_id', FieldID),
    +        ('curve', Curve),
    +        ('base', ECPoint),
    +        ('order', Integer),
    +        ('cofactor', Integer, {'optional': True}),
    +        ('hash', DigestAlgorithm, {'optional': True}),
    +    ]
    +
    +
    +class NamedCurve(ObjectIdentifier):
    +    """
    +    Various named curves
    +
    +    Original Name: None
    +    Source: https://tools.ietf.org/html/rfc3279#page-23,
    +            https://tools.ietf.org/html/rfc5480#page-5
    +    """
    +
    +    _map = {
    +        # https://tools.ietf.org/html/rfc3279#page-23
    +        '1.2.840.10045.3.0.1': 'c2pnb163v1',
    +        '1.2.840.10045.3.0.2': 'c2pnb163v2',
    +        '1.2.840.10045.3.0.3': 'c2pnb163v3',
    +        '1.2.840.10045.3.0.4': 'c2pnb176w1',
    +        '1.2.840.10045.3.0.5': 'c2tnb191v1',
    +        '1.2.840.10045.3.0.6': 'c2tnb191v2',
    +        '1.2.840.10045.3.0.7': 'c2tnb191v3',
    +        '1.2.840.10045.3.0.8': 'c2onb191v4',
    +        '1.2.840.10045.3.0.9': 'c2onb191v5',
    +        '1.2.840.10045.3.0.10': 'c2pnb208w1',
    +        '1.2.840.10045.3.0.11': 'c2tnb239v1',
    +        '1.2.840.10045.3.0.12': 'c2tnb239v2',
    +        '1.2.840.10045.3.0.13': 'c2tnb239v3',
    +        '1.2.840.10045.3.0.14': 'c2onb239v4',
    +        '1.2.840.10045.3.0.15': 'c2onb239v5',
    +        '1.2.840.10045.3.0.16': 'c2pnb272w1',
    +        '1.2.840.10045.3.0.17': 'c2pnb304w1',
    +        '1.2.840.10045.3.0.18': 'c2tnb359v1',
    +        '1.2.840.10045.3.0.19': 'c2pnb368w1',
    +        '1.2.840.10045.3.0.20': 'c2tnb431r1',
    +        '1.2.840.10045.3.1.2': 'prime192v2',
    +        '1.2.840.10045.3.1.3': 'prime192v3',
    +        '1.2.840.10045.3.1.4': 'prime239v1',
    +        '1.2.840.10045.3.1.5': 'prime239v2',
    +        '1.2.840.10045.3.1.6': 'prime239v3',
    +        # https://tools.ietf.org/html/rfc5480#page-5
    +        # http://www.secg.org/SEC2-Ver-1.0.pdf
    +        '1.2.840.10045.3.1.1': 'secp192r1',
    +        '1.2.840.10045.3.1.7': 'secp256r1',
    +        '1.3.132.0.1': 'sect163k1',
    +        '1.3.132.0.2': 'sect163r1',
    +        '1.3.132.0.3': 'sect239k1',
    +        '1.3.132.0.4': 'sect113r1',
    +        '1.3.132.0.5': 'sect113r2',
    +        '1.3.132.0.6': 'secp112r1',
    +        '1.3.132.0.7': 'secp112r2',
    +        '1.3.132.0.8': 'secp160r1',
    +        '1.3.132.0.9': 'secp160k1',
    +        '1.3.132.0.10': 'secp256k1',
    +        '1.3.132.0.15': 'sect163r2',
    +        '1.3.132.0.16': 'sect283k1',
    +        '1.3.132.0.17': 'sect283r1',
    +        '1.3.132.0.22': 'sect131r1',
    +        '1.3.132.0.23': 'sect131r2',
    +        '1.3.132.0.24': 'sect193r1',
    +        '1.3.132.0.25': 'sect193r2',
    +        '1.3.132.0.26': 'sect233k1',
    +        '1.3.132.0.27': 'sect233r1',
    +        '1.3.132.0.28': 'secp128r1',
    +        '1.3.132.0.29': 'secp128r2',
    +        '1.3.132.0.30': 'secp160r2',
    +        '1.3.132.0.31': 'secp192k1',
    +        '1.3.132.0.32': 'secp224k1',
    +        '1.3.132.0.33': 'secp224r1',
    +        '1.3.132.0.34': 'secp384r1',
    +        '1.3.132.0.35': 'secp521r1',
    +        '1.3.132.0.36': 'sect409k1',
    +        '1.3.132.0.37': 'sect409r1',
    +        '1.3.132.0.38': 'sect571k1',
    +        '1.3.132.0.39': 'sect571r1',
    +        # https://tools.ietf.org/html/rfc5639#section-4.1
    +        '1.3.36.3.3.2.8.1.1.1': 'brainpoolp160r1',
    +        '1.3.36.3.3.2.8.1.1.2': 'brainpoolp160t1',
    +        '1.3.36.3.3.2.8.1.1.3': 'brainpoolp192r1',
    +        '1.3.36.3.3.2.8.1.1.4': 'brainpoolp192t1',
    +        '1.3.36.3.3.2.8.1.1.5': 'brainpoolp224r1',
    +        '1.3.36.3.3.2.8.1.1.6': 'brainpoolp224t1',
    +        '1.3.36.3.3.2.8.1.1.7': 'brainpoolp256r1',
    +        '1.3.36.3.3.2.8.1.1.8': 'brainpoolp256t1',
    +        '1.3.36.3.3.2.8.1.1.9': 'brainpoolp320r1',
    +        '1.3.36.3.3.2.8.1.1.10': 'brainpoolp320t1',
    +        '1.3.36.3.3.2.8.1.1.11': 'brainpoolp384r1',
    +        '1.3.36.3.3.2.8.1.1.12': 'brainpoolp384t1',
    +        '1.3.36.3.3.2.8.1.1.13': 'brainpoolp512r1',
    +        '1.3.36.3.3.2.8.1.1.14': 'brainpoolp512t1',
    +    }
    +
    +    _key_sizes = {
    +        # Order values used to compute these sourced from
    +        # http://cr.openjdk.java.net/~vinnie/7194075/webrev-3/src/share/classes/sun/security/ec/CurveDB.java.html
    +        '1.2.840.10045.3.0.1': 21,
    +        '1.2.840.10045.3.0.2': 21,
    +        '1.2.840.10045.3.0.3': 21,
    +        '1.2.840.10045.3.0.4': 21,
    +        '1.2.840.10045.3.0.5': 24,
    +        '1.2.840.10045.3.0.6': 24,
    +        '1.2.840.10045.3.0.7': 24,
    +        '1.2.840.10045.3.0.8': 24,
    +        '1.2.840.10045.3.0.9': 24,
    +        '1.2.840.10045.3.0.10': 25,
    +        '1.2.840.10045.3.0.11': 30,
    +        '1.2.840.10045.3.0.12': 30,
    +        '1.2.840.10045.3.0.13': 30,
    +        '1.2.840.10045.3.0.14': 30,
    +        '1.2.840.10045.3.0.15': 30,
    +        '1.2.840.10045.3.0.16': 33,
    +        '1.2.840.10045.3.0.17': 37,
    +        '1.2.840.10045.3.0.18': 45,
    +        '1.2.840.10045.3.0.19': 45,
    +        '1.2.840.10045.3.0.20': 53,
    +        '1.2.840.10045.3.1.2': 24,
    +        '1.2.840.10045.3.1.3': 24,
    +        '1.2.840.10045.3.1.4': 30,
    +        '1.2.840.10045.3.1.5': 30,
    +        '1.2.840.10045.3.1.6': 30,
    +        # Order values used to compute these sourced from
    +        # http://www.secg.org/SEC2-Ver-1.0.pdf
    +        # ceil(n.bit_length() / 8)
    +        '1.2.840.10045.3.1.1': 24,
    +        '1.2.840.10045.3.1.7': 32,
    +        '1.3.132.0.1': 21,
    +        '1.3.132.0.2': 21,
    +        '1.3.132.0.3': 30,
    +        '1.3.132.0.4': 15,
    +        '1.3.132.0.5': 15,
    +        '1.3.132.0.6': 14,
    +        '1.3.132.0.7': 14,
    +        '1.3.132.0.8': 21,
    +        '1.3.132.0.9': 21,
    +        '1.3.132.0.10': 32,
    +        '1.3.132.0.15': 21,
    +        '1.3.132.0.16': 36,
    +        '1.3.132.0.17': 36,
    +        '1.3.132.0.22': 17,
    +        '1.3.132.0.23': 17,
    +        '1.3.132.0.24': 25,
    +        '1.3.132.0.25': 25,
    +        '1.3.132.0.26': 29,
    +        '1.3.132.0.27': 30,
    +        '1.3.132.0.28': 16,
    +        '1.3.132.0.29': 16,
    +        '1.3.132.0.30': 21,
    +        '1.3.132.0.31': 24,
    +        '1.3.132.0.32': 29,
    +        '1.3.132.0.33': 28,
    +        '1.3.132.0.34': 48,
    +        '1.3.132.0.35': 66,
    +        '1.3.132.0.36': 51,
    +        '1.3.132.0.37': 52,
    +        '1.3.132.0.38': 72,
    +        '1.3.132.0.39': 72,
    +        # Order values used to compute these sourced from
    +        # https://tools.ietf.org/html/rfc5639#section-3
    +        # ceil(q.bit_length() / 8)
    +        '1.3.36.3.3.2.8.1.1.1': 20,
    +        '1.3.36.3.3.2.8.1.1.2': 20,
    +        '1.3.36.3.3.2.8.1.1.3': 24,
    +        '1.3.36.3.3.2.8.1.1.4': 24,
    +        '1.3.36.3.3.2.8.1.1.5': 28,
    +        '1.3.36.3.3.2.8.1.1.6': 28,
    +        '1.3.36.3.3.2.8.1.1.7': 32,
    +        '1.3.36.3.3.2.8.1.1.8': 32,
    +        '1.3.36.3.3.2.8.1.1.9': 40,
    +        '1.3.36.3.3.2.8.1.1.10': 40,
    +        '1.3.36.3.3.2.8.1.1.11': 48,
    +        '1.3.36.3.3.2.8.1.1.12': 48,
    +        '1.3.36.3.3.2.8.1.1.13': 64,
    +        '1.3.36.3.3.2.8.1.1.14': 64,
    +    }
    +
    +    @classmethod
    +    def register(cls, name, oid, key_size):
    +        """
    +        Registers a new named elliptic curve that is not included in the
    +        default list of named curves
    +
    +        :param name:
    +            A unicode string of the curve name
    +
    +        :param oid:
    +            A unicode string of the dotted format OID
    +
    +        :param key_size:
    +            An integer of the number of bytes the private key should be
    +            encoded to
    +        """
    +
    +        cls._map[oid] = name
    +        if cls._reverse_map is not None:
    +            cls._reverse_map[name] = oid
    +        cls._key_sizes[oid] = key_size
    +
    +
    +class ECDomainParameters(Choice):
    +    """
    +    Source: http://www.secg.org/sec1-v2.pdf page 102
    +    """
    +
    +    _alternatives = [
    +        ('specified', SpecifiedECDomain),
    +        ('named', NamedCurve),
    +        ('implicit_ca', Null),
    +    ]
    +
    +    @property
    +    def key_size(self):
    +        if self.name == 'implicit_ca':
    +            raise ValueError(unwrap(
    +                '''
    +                Unable to calculate key_size from ECDomainParameters
    +                that are implicitly defined by the CA key
    +                '''
    +            ))
    +
    +        if self.name == 'specified':
    +            order = self.chosen['order'].native
    +            return math.ceil(math.log(order, 2.0) / 8.0)
    +
    +        oid = self.chosen.dotted
    +        if oid not in NamedCurve._key_sizes:
    +            raise ValueError(unwrap(
    +                '''
    +                The asn1crypto.keys.NamedCurve %s does not have a registered key length,
    +                please call asn1crypto.keys.NamedCurve.register()
    +                ''',
    +                repr(oid)
    +            ))
    +        return NamedCurve._key_sizes[oid]
    +
    +
    +class ECPrivateKeyVersion(Integer):
    +    """
    +    Original Name: None
    +    Source: http://www.secg.org/sec1-v2.pdf page 108
    +    """
    +
    +    _map = {
    +        1: 'ecPrivkeyVer1',
    +    }
    +
    +
    +class ECPrivateKey(Sequence):
    +    """
    +    Source: http://www.secg.org/sec1-v2.pdf page 108
    +    """
    +
    +    _fields = [
    +        ('version', ECPrivateKeyVersion),
    +        ('private_key', IntegerOctetString),
    +        ('parameters', ECDomainParameters, {'explicit': 0, 'optional': True}),
    +        ('public_key', ECPointBitString, {'explicit': 1, 'optional': True}),
    +    ]
    +
    +    # Ensures the key is set to the correct length when encoding
    +    _key_size = None
    +
    +    # This is necessary to ensure the private_key IntegerOctetString is encoded properly
    +    def __setitem__(self, key, value):
    +        res = super(ECPrivateKey, self).__setitem__(key, value)
    +
    +        if key == 'private_key':
    +            if self._key_size is None:
    +                # Infer the key_size from the existing private key if possible
    +                pkey_contents = self['private_key'].contents
    +                if isinstance(pkey_contents, byte_cls) and len(pkey_contents) > 1:
    +                    self.set_key_size(len(self['private_key'].contents))
    +
    +            elif self._key_size is not None:
    +                self._update_key_size()
    +
    +        elif key == 'parameters' and isinstance(self['parameters'], ECDomainParameters) and \
    +                self['parameters'].name != 'implicit_ca':
    +            self.set_key_size(self['parameters'].key_size)
    +
    +        return res
    +
    +    def set_key_size(self, key_size):
    +        """
    +        Sets the key_size to ensure the private key is encoded to the proper length
    +
    +        :param key_size:
    +            An integer byte length to encode the private_key to
    +        """
    +
    +        self._key_size = key_size
    +        self._update_key_size()
    +
    +    def _update_key_size(self):
    +        """
    +        Ensure the private_key explicit encoding width is set
    +        """
    +
    +        if self._key_size is not None and isinstance(self['private_key'], IntegerOctetString):
    +            self['private_key'].set_encoded_width(self._key_size)
    +
    +
    +class DSAParams(Sequence):
    +    """
    +    Parameters for a DSA public or private key
    +
    +    Original Name: Dss-Parms
    +    Source: https://tools.ietf.org/html/rfc3279#page-9
    +    """
    +
    +    _fields = [
    +        ('p', Integer),
    +        ('q', Integer),
    +        ('g', Integer),
    +    ]
    +
    +
    +class Attribute(Sequence):
    +    """
    +    Source: https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-X.501-198811-S!!PDF-E&type=items page 8
    +    """
    +
    +    _fields = [
    +        ('type', ObjectIdentifier),
    +        ('values', SetOf, {'spec': Any}),
    +    ]
    +
    +
    +class Attributes(SetOf):
    +    """
    +    Source: https://tools.ietf.org/html/rfc5208#page-3
    +    """
    +
    +    _child_spec = Attribute
    +
    +
    +class PrivateKeyAlgorithmId(ObjectIdentifier):
    +    """
    +    These OIDs for various public keys are reused when storing private keys
    +    inside of a PKCS#8 structure
    +
    +    Original Name: None
    +    Source: https://tools.ietf.org/html/rfc3279
    +    """
    +
    +    _map = {
    +        # https://tools.ietf.org/html/rfc3279#page-19
    +        '1.2.840.113549.1.1.1': 'rsa',
    +        # https://tools.ietf.org/html/rfc4055#page-8
    +        '1.2.840.113549.1.1.10': 'rsassa_pss',
    +        # https://tools.ietf.org/html/rfc3279#page-18
    +        '1.2.840.10040.4.1': 'dsa',
    +        # https://tools.ietf.org/html/rfc3279#page-13
    +        '1.2.840.10045.2.1': 'ec',
    +    }
    +
    +
    +class PrivateKeyAlgorithm(_ForceNullParameters, Sequence):
    +    """
    +    Original Name: PrivateKeyAlgorithmIdentifier
    +    Source: https://tools.ietf.org/html/rfc5208#page-3
    +    """
    +
    +    _fields = [
    +        ('algorithm', PrivateKeyAlgorithmId),
    +        ('parameters', Any, {'optional': True}),
    +    ]
    +
    +    _oid_pair = ('algorithm', 'parameters')
    +    _oid_specs = {
    +        'dsa': DSAParams,
    +        'ec': ECDomainParameters,
    +        'rsassa_pss': RSASSAPSSParams,
    +    }
    +
    +
    +class PrivateKeyInfo(Sequence):
    +    """
    +    Source: https://tools.ietf.org/html/rfc5208#page-3
    +    """
    +
    +    _fields = [
    +        ('version', Integer),
    +        ('private_key_algorithm', PrivateKeyAlgorithm),
    +        ('private_key', ParsableOctetString),
    +        ('attributes', Attributes, {'implicit': 0, 'optional': True}),
    +    ]
    +
    +    def _private_key_spec(self):
    +        algorithm = self['private_key_algorithm']['algorithm'].native
    +        return {
    +            'rsa': RSAPrivateKey,
    +            'rsassa_pss': RSAPrivateKey,
    +            'dsa': Integer,
    +            'ec': ECPrivateKey,
    +        }[algorithm]
    +
    +    _spec_callbacks = {
    +        'private_key': _private_key_spec
    +    }
    +
    +    _algorithm = None
    +    _bit_size = None
    +    _public_key = None
    +    _fingerprint = None
    +
    +    @classmethod
    +    def wrap(cls, private_key, algorithm):
    +        """
    +        Wraps a private key in a PrivateKeyInfo structure
    +
    +        :param private_key:
    +            A byte string or Asn1Value object of the private key
    +
    +        :param algorithm:
    +            A unicode string of "rsa", "dsa" or "ec"
    +
    +        :return:
    +            A PrivateKeyInfo object
    +        """
    +
    +        if not isinstance(private_key, byte_cls) and not isinstance(private_key, Asn1Value):
    +            raise TypeError(unwrap(
    +                '''
    +                private_key must be a byte string or Asn1Value, not %s
    +                ''',
    +                type_name(private_key)
    +            ))
    +
    +        if algorithm == 'rsa':
    +            if not isinstance(private_key, RSAPrivateKey):
    +                private_key = RSAPrivateKey.load(private_key)
    +            params = Null()
    +        elif algorithm == 'dsa':
    +            if not isinstance(private_key, DSAPrivateKey):
    +                private_key = DSAPrivateKey.load(private_key)
    +            params = DSAParams()
    +            params['p'] = private_key['p']
    +            params['q'] = private_key['q']
    +            params['g'] = private_key['g']
    +            public_key = private_key['public_key']
    +            private_key = private_key['private_key']
    +        elif algorithm == 'ec':
    +            if not isinstance(private_key, ECPrivateKey):
    +                private_key = ECPrivateKey.load(private_key)
    +            else:
    +                private_key = private_key.copy()
    +            params = private_key['parameters']
    +            del private_key['parameters']
    +        else:
    +            raise ValueError(unwrap(
    +                '''
    +                algorithm must be one of "rsa", "dsa", "ec", not %s
    +                ''',
    +                repr(algorithm)
    +            ))
    +
    +        private_key_algo = PrivateKeyAlgorithm()
    +        private_key_algo['algorithm'] = PrivateKeyAlgorithmId(algorithm)
    +        private_key_algo['parameters'] = params
    +
    +        container = cls()
    +        container._algorithm = algorithm
    +        container['version'] = Integer(0)
    +        container['private_key_algorithm'] = private_key_algo
    +        container['private_key'] = private_key
    +
    +        # Here we save the DSA public key if possible since it is not contained
    +        # within the PKCS#8 structure for a DSA key
    +        if algorithm == 'dsa':
    +            container._public_key = public_key
    +
    +        return container
    +
    +    # This is necessary to ensure any contained ECPrivateKey is the
    +    # correct size
    +    def __setitem__(self, key, value):
    +        res = super(PrivateKeyInfo, self).__setitem__(key, value)
    +
    +        algorithm = self['private_key_algorithm']
    +
    +        # When possible, use the parameter info to make sure the private key encoding
    +        # retains any necessary leading bytes, instead of them being dropped
    +        if (key == 'private_key_algorithm' or key == 'private_key') and \
    +                algorithm['algorithm'].native == 'ec' and \
    +                isinstance(algorithm['parameters'], ECDomainParameters) and \
    +                algorithm['parameters'].name != 'implicit_ca' and \
    +                isinstance(self['private_key'], ParsableOctetString) and \
    +                isinstance(self['private_key'].parsed, ECPrivateKey):
    +            self['private_key'].parsed.set_key_size(algorithm['parameters'].key_size)
    +
    +        return res
    +
    +    def unwrap(self):
    +        """
    +        Unwraps the private key into an RSAPrivateKey, DSAPrivateKey or
    +        ECPrivateKey object
    +
    +        :return:
    +            An RSAPrivateKey, DSAPrivateKey or ECPrivateKey object
    +        """
    +
    +        raise APIException(
    +            'asn1crypto.keys.PrivateKeyInfo().unwrap() has been removed, '
    +            'please use oscrypto.asymmetric.PrivateKey().unwrap() instead')
    +
    +    @property
    +    def curve(self):
    +        """
    +        Returns information about the curve used for an EC key
    +
    +        :raises:
    +            ValueError - when the key is not an EC key
    +
    +        :return:
    +            A two-element tuple, with the first element being a unicode string
    +            of "implicit_ca", "specified" or "named". If the first element is
    +            "implicit_ca", the second is None. If "specified", the second is
    +            an OrderedDict that is the native version of SpecifiedECDomain. If
    +            "named", the second is a unicode string of the curve name.
    +        """
    +
    +        if self.algorithm != 'ec':
    +            raise ValueError(unwrap(
    +                '''
    +                Only EC keys have a curve, this key is %s
    +                ''',
    +                self.algorithm.upper()
    +            ))
    +
    +        params = self['private_key_algorithm']['parameters']
    +        chosen = params.chosen
    +
    +        if params.name == 'implicit_ca':
    +            value = None
    +        else:
    +            value = chosen.native
    +
    +        return (params.name, value)
    +
    +    @property
    +    def hash_algo(self):
    +        """
    +        Returns the name of the family of hash algorithms used to generate a
    +        DSA key
    +
    +        :raises:
    +            ValueError - when the key is not a DSA key
    +
    +        :return:
    +            A unicode string of "sha1" or "sha2"
    +        """
    +
    +        if self.algorithm != 'dsa':
    +            raise ValueError(unwrap(
    +                '''
    +                Only DSA keys are generated using a hash algorithm, this key is
    +                %s
    +                ''',
    +                self.algorithm.upper()
    +            ))
    +
    +        byte_len = math.log(self['private_key_algorithm']['parameters']['q'].native, 2) / 8
    +
    +        return 'sha1' if byte_len <= 20 else 'sha2'
    +
    +    @property
    +    def algorithm(self):
    +        """
    +        :return:
    +            A unicode string of "rsa", "dsa" or "ec"
    +        """
    +
    +        if self._algorithm is None:
    +            self._algorithm = self['private_key_algorithm']['algorithm'].native
    +        return self._algorithm
    +
    +    @property
    +    def bit_size(self):
    +        """
    +        :return:
    +            The bit size of the private key, as an integer
    +        """
    +
    +        if self._bit_size is None:
    +            if self.algorithm == 'rsa':
    +                prime = self['private_key'].parsed['modulus'].native
    +            elif self.algorithm == 'dsa':
    +                prime = self['private_key_algorithm']['parameters']['p'].native
    +            elif self.algorithm == 'ec':
    +                prime = self['private_key'].parsed['private_key'].native
    +            self._bit_size = int(math.ceil(math.log(prime, 2)))
    +            modulus = self._bit_size % 8
    +            if modulus != 0:
    +                self._bit_size += 8 - modulus
    +        return self._bit_size
    +
    +    @property
    +    def byte_size(self):
    +        """
    +        :return:
    +            The byte size of the private key, as an integer
    +        """
    +
    +        return int(math.ceil(self.bit_size / 8))
    +
    +    @property
    +    def public_key(self):
    +        """
    +        :return:
    +            If an RSA key, an RSAPublicKey object. If a DSA key, an Integer
    +            object. If an EC key, an ECPointBitString object.
    +        """
    +
    +        raise APIException(
    +            'asn1crypto.keys.PrivateKeyInfo().public_key has been removed, '
    +            'please use oscrypto.asymmetric.PrivateKey().public_key.unwrap() instead')
    +
    +    @property
    +    def public_key_info(self):
    +        """
    +        :return:
    +            A PublicKeyInfo object derived from this private key.
    +        """
    +
    +        raise APIException(
    +            'asn1crypto.keys.PrivateKeyInfo().public_key_info has been removed, '
    +            'please use oscrypto.asymmetric.PrivateKey().public_key.asn1 instead')
    +
    +    @property
    +    def fingerprint(self):
    +        """
    +        Creates a fingerprint that can be compared with a public key to see if
    +        the two form a pair.
    +
    +        This fingerprint is not compatible with fingerprints generated by any
    +        other software.
    +
    +        :return:
    +            A byte string that is a sha256 hash of selected components (based
    +            on the key type)
    +        """
    +
    +        raise APIException(
    +            'asn1crypto.keys.PrivateKeyInfo().fingerprint has been removed, '
    +            'please use oscrypto.asymmetric.PrivateKey().fingerprint instead')
    +
    +
    +class EncryptedPrivateKeyInfo(Sequence):
    +    """
    +    Source: https://tools.ietf.org/html/rfc5208#page-4
    +    """
    +
    +    _fields = [
    +        ('encryption_algorithm', EncryptionAlgorithm),
    +        ('encrypted_data', OctetString),
    +    ]
    +
    +
    +# These structures are from https://tools.ietf.org/html/rfc3279
    +
    +class ValidationParms(Sequence):
    +    """
    +    Source: https://tools.ietf.org/html/rfc3279#page-10
    +    """
    +
    +    _fields = [
    +        ('seed', BitString),
    +        ('pgen_counter', Integer),
    +    ]
    +
    +
    +class DomainParameters(Sequence):
    +    """
    +    Source: https://tools.ietf.org/html/rfc3279#page-10
    +    """
    +
    +    _fields = [
    +        ('p', Integer),
    +        ('g', Integer),
    +        ('q', Integer),
    +        ('j', Integer, {'optional': True}),
    +        ('validation_params', ValidationParms, {'optional': True}),
    +    ]
    +
    +
    +class PublicKeyAlgorithmId(ObjectIdentifier):
    +    """
    +    Original Name: None
    +    Source: https://tools.ietf.org/html/rfc3279
    +    """
    +
    +    _map = {
    +        # https://tools.ietf.org/html/rfc3279#page-19
    +        '1.2.840.113549.1.1.1': 'rsa',
    +        # https://tools.ietf.org/html/rfc3447#page-47
    +        '1.2.840.113549.1.1.7': 'rsaes_oaep',
    +        # https://tools.ietf.org/html/rfc4055#page-8
    +        '1.2.840.113549.1.1.10': 'rsassa_pss',
    +        # https://tools.ietf.org/html/rfc3279#page-18
    +        '1.2.840.10040.4.1': 'dsa',
    +        # https://tools.ietf.org/html/rfc3279#page-13
    +        '1.2.840.10045.2.1': 'ec',
    +        # https://tools.ietf.org/html/rfc3279#page-10
    +        '1.2.840.10046.2.1': 'dh',
    +    }
    +
    +
    +class PublicKeyAlgorithm(_ForceNullParameters, Sequence):
    +    """
    +    Original Name: AlgorithmIdentifier
    +    Source: https://tools.ietf.org/html/rfc5280#page-18
    +    """
    +
    +    _fields = [
    +        ('algorithm', PublicKeyAlgorithmId),
    +        ('parameters', Any, {'optional': True}),
    +    ]
    +
    +    _oid_pair = ('algorithm', 'parameters')
    +    _oid_specs = {
    +        'dsa': DSAParams,
    +        'ec': ECDomainParameters,
    +        'dh': DomainParameters,
    +        'rsaes_oaep': RSAESOAEPParams,
    +        'rsassa_pss': RSASSAPSSParams,
    +    }
    +
    +
    +class PublicKeyInfo(Sequence):
    +    """
    +    Original Name: SubjectPublicKeyInfo
    +    Source: https://tools.ietf.org/html/rfc5280#page-17
    +    """
    +
    +    _fields = [
    +        ('algorithm', PublicKeyAlgorithm),
    +        ('public_key', ParsableOctetBitString),
    +    ]
    +
    +    def _public_key_spec(self):
    +        algorithm = self['algorithm']['algorithm'].native
    +        return {
    +            'rsa': RSAPublicKey,
    +            'rsaes_oaep': RSAPublicKey,
    +            'rsassa_pss': RSAPublicKey,
    +            'dsa': Integer,
    +            # We override the field spec with ECPoint so that users can easily
    +            # decompose the byte string into the constituent X and Y coords
    +            'ec': (ECPointBitString, None),
    +            'dh': Integer,
    +        }[algorithm]
    +
    +    _spec_callbacks = {
    +        'public_key': _public_key_spec
    +    }
    +
    +    _algorithm = None
    +    _bit_size = None
    +    _fingerprint = None
    +    _sha1 = None
    +    _sha256 = None
    +
    +    @classmethod
    +    def wrap(cls, public_key, algorithm):
    +        """
    +        Wraps a public key in a PublicKeyInfo structure
    +
    +        :param public_key:
    +            A byte string or Asn1Value object of the public key
    +
    +        :param algorithm:
    +            A unicode string of "rsa"
    +
    +        :return:
    +            A PublicKeyInfo object
    +        """
    +
    +        if not isinstance(public_key, byte_cls) and not isinstance(public_key, Asn1Value):
    +            raise TypeError(unwrap(
    +                '''
    +                public_key must be a byte string or Asn1Value, not %s
    +                ''',
    +                type_name(public_key)
    +            ))
    +
    +        if algorithm != 'rsa':
    +            raise ValueError(unwrap(
    +                '''
    +                algorithm must "rsa", not %s
    +                ''',
    +                repr(algorithm)
    +            ))
    +
    +        algo = PublicKeyAlgorithm()
    +        algo['algorithm'] = PublicKeyAlgorithmId(algorithm)
    +        algo['parameters'] = Null()
    +
    +        container = cls()
    +        container['algorithm'] = algo
    +        if isinstance(public_key, Asn1Value):
    +            public_key = public_key.untag().dump()
    +        container['public_key'] = ParsableOctetBitString(public_key)
    +
    +        return container
    +
    +    def unwrap(self):
    +        """
    +        Unwraps an RSA public key into an RSAPublicKey object. Does not support
    +        DSA or EC public keys since they do not have an unwrapped form.
    +
    +        :return:
    +            An RSAPublicKey object
    +        """
    +
    +        raise APIException(
    +            'asn1crypto.keys.PublicKeyInfo().unwrap() has been removed, '
    +            'please use oscrypto.asymmetric.PublicKey().unwrap() instead')
    +
    +    @property
    +    def curve(self):
    +        """
    +        Returns information about the curve used for an EC key
    +
    +        :raises:
    +            ValueError - when the key is not an EC key
    +
    +        :return:
    +            A two-element tuple, with the first element being a unicode string
    +            of "implicit_ca", "specified" or "named". If the first element is
    +            "implicit_ca", the second is None. If "specified", the second is
    +            an OrderedDict that is the native version of SpecifiedECDomain. If
    +            "named", the second is a unicode string of the curve name.
    +        """
    +
    +        if self.algorithm != 'ec':
    +            raise ValueError(unwrap(
    +                '''
    +                Only EC keys have a curve, this key is %s
    +                ''',
    +                self.algorithm.upper()
    +            ))
    +
    +        params = self['algorithm']['parameters']
    +        chosen = params.chosen
    +
    +        if params.name == 'implicit_ca':
    +            value = None
    +        else:
    +            value = chosen.native
    +
    +        return (params.name, value)
    +
    +    @property
    +    def hash_algo(self):
    +        """
    +        Returns the name of the family of hash algorithms used to generate a
    +        DSA key
    +
    +        :raises:
    +            ValueError - when the key is not a DSA key
    +
    +        :return:
    +            A unicode string of "sha1" or "sha2" or None if no parameters are
    +            present
    +        """
    +
    +        if self.algorithm != 'dsa':
    +            raise ValueError(unwrap(
    +                '''
    +                Only DSA keys are generated using a hash algorithm, this key is
    +                %s
    +                ''',
    +                self.algorithm.upper()
    +            ))
    +
    +        parameters = self['algorithm']['parameters']
    +        if parameters.native is None:
    +            return None
    +
    +        byte_len = math.log(parameters['q'].native, 2) / 8
    +
    +        return 'sha1' if byte_len <= 20 else 'sha2'
    +
    +    @property
    +    def algorithm(self):
    +        """
    +        :return:
    +            A unicode string of "rsa", "dsa" or "ec"
    +        """
    +
    +        if self._algorithm is None:
    +            self._algorithm = self['algorithm']['algorithm'].native
    +        return self._algorithm
    +
    +    @property
    +    def bit_size(self):
    +        """
    +        :return:
    +            The bit size of the public key, as an integer
    +        """
    +
    +        if self._bit_size is None:
    +            if self.algorithm == 'ec':
    +                self._bit_size = ((len(self['public_key'].native) - 1) / 2) * 8
    +            else:
    +                if self.algorithm == 'rsa':
    +                    prime = self['public_key'].parsed['modulus'].native
    +                elif self.algorithm == 'dsa':
    +                    prime = self['algorithm']['parameters']['p'].native
    +                self._bit_size = int(math.ceil(math.log(prime, 2)))
    +                modulus = self._bit_size % 8
    +                if modulus != 0:
    +                    self._bit_size += 8 - modulus
    +
    +        return self._bit_size
    +
    +    @property
    +    def byte_size(self):
    +        """
    +        :return:
    +            The byte size of the public key, as an integer
    +        """
    +
    +        return int(math.ceil(self.bit_size / 8))
    +
    +    @property
    +    def sha1(self):
    +        """
    +        :return:
    +            The SHA1 hash of the DER-encoded bytes of this public key info
    +        """
    +
    +        if self._sha1 is None:
    +            self._sha1 = hashlib.sha1(byte_cls(self['public_key'])).digest()
    +        return self._sha1
    +
    +    @property
    +    def sha256(self):
    +        """
    +        :return:
    +            The SHA-256 hash of the DER-encoded bytes of this public key info
    +        """
    +
    +        if self._sha256 is None:
    +            self._sha256 = hashlib.sha256(byte_cls(self['public_key'])).digest()
    +        return self._sha256
    +
    +    @property
    +    def fingerprint(self):
    +        """
    +        Creates a fingerprint that can be compared with a private key to see if
    +        the two form a pair.
    +
    +        This fingerprint is not compatible with fingerprints generated by any
    +        other software.
    +
    +        :return:
    +            A byte string that is a sha256 hash of selected components (based
    +            on the key type)
    +        """
    +
    +        raise APIException(
    +            'asn1crypto.keys.PublicKeyInfo().fingerprint has been removed, '
    +            'please use oscrypto.asymmetric.PublicKey().fingerprint instead')
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/ocsp.py b/server/www/packages/packages-windows/x86/asn1crypto/ocsp.py
    new file mode 100644
    index 0000000..91c7fbf
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/ocsp.py
    @@ -0,0 +1,703 @@
    +# coding: utf-8
    +
    +"""
    +ASN.1 type classes for the online certificate status protocol (OCSP). Exports
    +the following items:
    +
    + - OCSPRequest()
    + - OCSPResponse()
    +
    +Other type classes are defined that help compose the types listed above.
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +from ._errors import unwrap
    +from .algos import DigestAlgorithm, SignedDigestAlgorithm
    +from .core import (
    +    Boolean,
    +    Choice,
    +    Enumerated,
    +    GeneralizedTime,
    +    IA5String,
    +    Integer,
    +    Null,
    +    ObjectIdentifier,
    +    OctetBitString,
    +    OctetString,
    +    ParsableOctetString,
    +    Sequence,
    +    SequenceOf,
    +)
    +from .crl import AuthorityInfoAccessSyntax, CRLReason
    +from .keys import PublicKeyAlgorithm
    +from .x509 import Certificate, GeneralName, GeneralNames, Name
    +
    +
    +# The structures in this file are taken from https://tools.ietf.org/html/rfc6960
    +
    +
    +class Version(Integer):
    +    _map = {
    +        0: 'v1'
    +    }
    +
    +
    +class CertId(Sequence):
    +    _fields = [
    +        ('hash_algorithm', DigestAlgorithm),
    +        ('issuer_name_hash', OctetString),
    +        ('issuer_key_hash', OctetString),
    +        ('serial_number', Integer),
    +    ]
    +
    +
    +class ServiceLocator(Sequence):
    +    _fields = [
    +        ('issuer', Name),
    +        ('locator', AuthorityInfoAccessSyntax),
    +    ]
    +
    +
    +class RequestExtensionId(ObjectIdentifier):
    +    _map = {
    +        '1.3.6.1.5.5.7.48.1.7': 'service_locator',
    +    }
    +
    +
    +class RequestExtension(Sequence):
    +    _fields = [
    +        ('extn_id', RequestExtensionId),
    +        ('critical', Boolean, {'default': False}),
    +        ('extn_value', ParsableOctetString),
    +    ]
    +
    +    _oid_pair = ('extn_id', 'extn_value')
    +    _oid_specs = {
    +        'service_locator': ServiceLocator,
    +    }
    +
    +
    +class RequestExtensions(SequenceOf):
    +    _child_spec = RequestExtension
    +
    +
    +class Request(Sequence):
    +    _fields = [
    +        ('req_cert', CertId),
    +        ('single_request_extensions', RequestExtensions, {'explicit': 0, 'optional': True}),
    +    ]
    +
    +    _processed_extensions = False
    +    _critical_extensions = None
    +    _service_locator_value = None
    +
    +    def _set_extensions(self):
    +        """
    +        Sets common named extensions to private attributes and creates a list
    +        of critical extensions
    +        """
    +
    +        self._critical_extensions = set()
    +
    +        for extension in self['single_request_extensions']:
    +            name = extension['extn_id'].native
    +            attribute_name = '_%s_value' % name
    +            if hasattr(self, attribute_name):
    +                setattr(self, attribute_name, extension['extn_value'].parsed)
    +            if extension['critical'].native:
    +                self._critical_extensions.add(name)
    +
    +        self._processed_extensions = True
    +
    +    @property
    +    def critical_extensions(self):
    +        """
    +        Returns a set of the names (or OID if not a known extension) of the
    +        extensions marked as critical
    +
    +        :return:
    +            A set of unicode strings
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._critical_extensions
    +
    +    @property
    +    def service_locator_value(self):
    +        """
    +        This extension is used when communicating with an OCSP responder that
    +        acts as a proxy for OCSP requests
    +
    +        :return:
    +            None or a ServiceLocator object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._service_locator_value
    +
    +
    +class Requests(SequenceOf):
    +    _child_spec = Request
    +
    +
    +class ResponseType(ObjectIdentifier):
    +    _map = {
    +        '1.3.6.1.5.5.7.48.1.1': 'basic_ocsp_response',
    +    }
    +
    +
    +class AcceptableResponses(SequenceOf):
    +    _child_spec = ResponseType
    +
    +
    +class PreferredSignatureAlgorithm(Sequence):
    +    _fields = [
    +        ('sig_identifier', SignedDigestAlgorithm),
    +        ('cert_identifier', PublicKeyAlgorithm, {'optional': True}),
    +    ]
    +
    +
    +class PreferredSignatureAlgorithms(SequenceOf):
    +    _child_spec = PreferredSignatureAlgorithm
    +
    +
    +class TBSRequestExtensionId(ObjectIdentifier):
    +    _map = {
    +        '1.3.6.1.5.5.7.48.1.2': 'nonce',
    +        '1.3.6.1.5.5.7.48.1.4': 'acceptable_responses',
    +        '1.3.6.1.5.5.7.48.1.8': 'preferred_signature_algorithms',
    +    }
    +
    +
    +class TBSRequestExtension(Sequence):
    +    _fields = [
    +        ('extn_id', TBSRequestExtensionId),
    +        ('critical', Boolean, {'default': False}),
    +        ('extn_value', ParsableOctetString),
    +    ]
    +
    +    _oid_pair = ('extn_id', 'extn_value')
    +    _oid_specs = {
    +        'nonce': OctetString,
    +        'acceptable_responses': AcceptableResponses,
    +        'preferred_signature_algorithms': PreferredSignatureAlgorithms,
    +    }
    +
    +
    +class TBSRequestExtensions(SequenceOf):
    +    _child_spec = TBSRequestExtension
    +
    +
    +class TBSRequest(Sequence):
    +    _fields = [
    +        ('version', Version, {'explicit': 0, 'default': 'v1'}),
    +        ('requestor_name', GeneralName, {'explicit': 1, 'optional': True}),
    +        ('request_list', Requests),
    +        ('request_extensions', TBSRequestExtensions, {'explicit': 2, 'optional': True}),
    +    ]
    +
    +
    +class Certificates(SequenceOf):
    +    _child_spec = Certificate
    +
    +
    +class Signature(Sequence):
    +    _fields = [
    +        ('signature_algorithm', SignedDigestAlgorithm),
    +        ('signature', OctetBitString),
    +        ('certs', Certificates, {'explicit': 0, 'optional': True}),
    +    ]
    +
    +
    +class OCSPRequest(Sequence):
    +    _fields = [
    +        ('tbs_request', TBSRequest),
    +        ('optional_signature', Signature, {'explicit': 0, 'optional': True}),
    +    ]
    +
    +    _processed_extensions = False
    +    _critical_extensions = None
    +    _nonce_value = None
    +    _acceptable_responses_value = None
    +    _preferred_signature_algorithms_value = None
    +
    +    def _set_extensions(self):
    +        """
    +        Sets common named extensions to private attributes and creates a list
    +        of critical extensions
    +        """
    +
    +        self._critical_extensions = set()
    +
    +        for extension in self['tbs_request']['request_extensions']:
    +            name = extension['extn_id'].native
    +            attribute_name = '_%s_value' % name
    +            if hasattr(self, attribute_name):
    +                setattr(self, attribute_name, extension['extn_value'].parsed)
    +            if extension['critical'].native:
    +                self._critical_extensions.add(name)
    +
    +        self._processed_extensions = True
    +
    +    @property
    +    def critical_extensions(self):
    +        """
    +        Returns a set of the names (or OID if not a known extension) of the
    +        extensions marked as critical
    +
    +        :return:
    +            A set of unicode strings
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._critical_extensions
    +
    +    @property
    +    def nonce_value(self):
    +        """
    +        This extension is used to prevent replay attacks by including a unique,
    +        random value with each request/response pair
    +
    +        :return:
    +            None or an OctetString object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._nonce_value
    +
    +    @property
    +    def acceptable_responses_value(self):
    +        """
    +        This extension is used to allow the client and server to communicate
    +        with alternative response formats other than just basic_ocsp_response,
    +        although no other formats are defined in the standard.
    +
    +        :return:
    +            None or an AcceptableResponses object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._acceptable_responses_value
    +
    +    @property
    +    def preferred_signature_algorithms_value(self):
    +        """
    +        This extension is used by the client to define what signature algorithms
    +        are preferred, including both the hash algorithm and the public key
    +        algorithm, with a level of detail down to even the public key algorithm
    +        parameters, such as curve name.
    +
    +        :return:
    +            None or a PreferredSignatureAlgorithms object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._preferred_signature_algorithms_value
    +
    +
    +class OCSPResponseStatus(Enumerated):
    +    _map = {
    +        0: 'successful',
    +        1: 'malformed_request',
    +        2: 'internal_error',
    +        3: 'try_later',
    +        5: 'sign_required',
    +        6: 'unauthorized',
    +    }
    +
    +
    +class ResponderId(Choice):
    +    _alternatives = [
    +        ('by_name', Name, {'explicit': 1}),
    +        ('by_key', OctetString, {'explicit': 2}),
    +    ]
    +
    +
    +# Custom class to return a meaningful .native attribute from CertStatus()
    +class StatusGood(Null):
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            None or 'good'
    +        """
    +
    +        if value is not None and value != 'good' and not isinstance(value, Null):
    +            raise ValueError(unwrap(
    +                '''
    +                value must be one of None, "good", not %s
    +                ''',
    +                repr(value)
    +            ))
    +
    +        self.contents = b''
    +
    +    @property
    +    def native(self):
    +        return 'good'
    +
    +
    +# Custom class to return a meaningful .native attribute from CertStatus()
    +class StatusUnknown(Null):
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            None or 'unknown'
    +        """
    +
    +        if value is not None and value != 'unknown' and not isinstance(value, Null):
    +            raise ValueError(unwrap(
    +                '''
    +                value must be one of None, "unknown", not %s
    +                ''',
    +                repr(value)
    +            ))
    +
    +        self.contents = b''
    +
    +    @property
    +    def native(self):
    +        return 'unknown'
    +
    +
    +class RevokedInfo(Sequence):
    +    _fields = [
    +        ('revocation_time', GeneralizedTime),
    +        ('revocation_reason', CRLReason, {'explicit': 0, 'optional': True}),
    +    ]
    +
    +
    +class CertStatus(Choice):
    +    _alternatives = [
    +        ('good', StatusGood, {'implicit': 0}),
    +        ('revoked', RevokedInfo, {'implicit': 1}),
    +        ('unknown', StatusUnknown, {'implicit': 2}),
    +    ]
    +
    +
    +class CrlId(Sequence):
    +    _fields = [
    +        ('crl_url', IA5String, {'explicit': 0, 'optional': True}),
    +        ('crl_num', Integer, {'explicit': 1, 'optional': True}),
    +        ('crl_time', GeneralizedTime, {'explicit': 2, 'optional': True}),
    +    ]
    +
    +
    +class SingleResponseExtensionId(ObjectIdentifier):
    +    _map = {
    +        '1.3.6.1.5.5.7.48.1.3': 'crl',
    +        '1.3.6.1.5.5.7.48.1.6': 'archive_cutoff',
    +        # These are CRLEntryExtension values from
    +        # https://tools.ietf.org/html/rfc5280
    +        '2.5.29.21': 'crl_reason',
    +        '2.5.29.24': 'invalidity_date',
    +        '2.5.29.29': 'certificate_issuer',
    +        # https://tools.ietf.org/html/rfc6962.html#page-13
    +        '1.3.6.1.4.1.11129.2.4.5': 'signed_certificate_timestamp_list',
    +    }
    +
    +
    +class SingleResponseExtension(Sequence):
    +    _fields = [
    +        ('extn_id', SingleResponseExtensionId),
    +        ('critical', Boolean, {'default': False}),
    +        ('extn_value', ParsableOctetString),
    +    ]
    +
    +    _oid_pair = ('extn_id', 'extn_value')
    +    _oid_specs = {
    +        'crl': CrlId,
    +        'archive_cutoff': GeneralizedTime,
    +        'crl_reason': CRLReason,
    +        'invalidity_date': GeneralizedTime,
    +        'certificate_issuer': GeneralNames,
    +        'signed_certificate_timestamp_list': OctetString,
    +    }
    +
    +
    +class SingleResponseExtensions(SequenceOf):
    +    _child_spec = SingleResponseExtension
    +
    +
    +class SingleResponse(Sequence):
    +    _fields = [
    +        ('cert_id', CertId),
    +        ('cert_status', CertStatus),
    +        ('this_update', GeneralizedTime),
    +        ('next_update', GeneralizedTime, {'explicit': 0, 'optional': True}),
    +        ('single_extensions', SingleResponseExtensions, {'explicit': 1, 'optional': True}),
    +    ]
    +
    +    _processed_extensions = False
    +    _critical_extensions = None
    +    _crl_value = None
    +    _archive_cutoff_value = None
    +    _crl_reason_value = None
    +    _invalidity_date_value = None
    +    _certificate_issuer_value = None
    +
    +    def _set_extensions(self):
    +        """
    +        Sets common named extensions to private attributes and creates a list
    +        of critical extensions
    +        """
    +
    +        self._critical_extensions = set()
    +
    +        for extension in self['single_extensions']:
    +            name = extension['extn_id'].native
    +            attribute_name = '_%s_value' % name
    +            if hasattr(self, attribute_name):
    +                setattr(self, attribute_name, extension['extn_value'].parsed)
    +            if extension['critical'].native:
    +                self._critical_extensions.add(name)
    +
    +        self._processed_extensions = True
    +
    +    @property
    +    def critical_extensions(self):
    +        """
    +        Returns a set of the names (or OID if not a known extension) of the
    +        extensions marked as critical
    +
    +        :return:
    +            A set of unicode strings
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._critical_extensions
    +
    +    @property
    +    def crl_value(self):
    +        """
    +        This extension is used to locate the CRL that a certificate's revocation
    +        is contained within.
    +
    +        :return:
    +            None or a CrlId object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._crl_value
    +
    +    @property
    +    def archive_cutoff_value(self):
    +        """
    +        This extension is used to indicate the date at which an archived
    +        (historical) certificate status entry will no longer be available.
    +
    +        :return:
    +            None or a GeneralizedTime object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._archive_cutoff_value
    +
    +    @property
    +    def crl_reason_value(self):
    +        """
    +        This extension indicates the reason that a certificate was revoked.
    +
    +        :return:
    +            None or a CRLReason object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._crl_reason_value
    +
    +    @property
    +    def invalidity_date_value(self):
    +        """
    +        This extension indicates the suspected date/time the private key was
    +        compromised or the certificate became invalid. This would usually be
    +        before the revocation date, which is when the CA processed the
    +        revocation.
    +
    +        :return:
    +            None or a GeneralizedTime object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._invalidity_date_value
    +
    +    @property
    +    def certificate_issuer_value(self):
    +        """
    +        This extension indicates the issuer of the certificate in question.
    +
    +        :return:
    +            None or an x509.GeneralNames object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._certificate_issuer_value
    +
    +
    +class Responses(SequenceOf):
    +    _child_spec = SingleResponse
    +
    +
    +class ResponseDataExtensionId(ObjectIdentifier):
    +    _map = {
    +        '1.3.6.1.5.5.7.48.1.2': 'nonce',
    +        '1.3.6.1.5.5.7.48.1.9': 'extended_revoke',
    +    }
    +
    +
    +class ResponseDataExtension(Sequence):
    +    _fields = [
    +        ('extn_id', ResponseDataExtensionId),
    +        ('critical', Boolean, {'default': False}),
    +        ('extn_value', ParsableOctetString),
    +    ]
    +
    +    _oid_pair = ('extn_id', 'extn_value')
    +    _oid_specs = {
    +        'nonce': OctetString,
    +        'extended_revoke': Null,
    +    }
    +
    +
    +class ResponseDataExtensions(SequenceOf):
    +    _child_spec = ResponseDataExtension
    +
    +
    +class ResponseData(Sequence):
    +    _fields = [
    +        ('version', Version, {'explicit': 0, 'default': 'v1'}),
    +        ('responder_id', ResponderId),
    +        ('produced_at', GeneralizedTime),
    +        ('responses', Responses),
    +        ('response_extensions', ResponseDataExtensions, {'explicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class BasicOCSPResponse(Sequence):
    +    _fields = [
    +        ('tbs_response_data', ResponseData),
    +        ('signature_algorithm', SignedDigestAlgorithm),
    +        ('signature', OctetBitString),
    +        ('certs', Certificates, {'explicit': 0, 'optional': True}),
    +    ]
    +
    +
    +class ResponseBytes(Sequence):
    +    _fields = [
    +        ('response_type', ResponseType),
    +        ('response', ParsableOctetString),
    +    ]
    +
    +    _oid_pair = ('response_type', 'response')
    +    _oid_specs = {
    +        'basic_ocsp_response': BasicOCSPResponse,
    +    }
    +
    +
    +class OCSPResponse(Sequence):
    +    _fields = [
    +        ('response_status', OCSPResponseStatus),
    +        ('response_bytes', ResponseBytes, {'explicit': 0, 'optional': True}),
    +    ]
    +
    +    _processed_extensions = False
    +    _critical_extensions = None
    +    _nonce_value = None
    +    _extended_revoke_value = None
    +
    +    def _set_extensions(self):
    +        """
    +        Sets common named extensions to private attributes and creates a list
    +        of critical extensions
    +        """
    +
    +        self._critical_extensions = set()
    +
    +        for extension in self['response_bytes']['response'].parsed['tbs_response_data']['response_extensions']:
    +            name = extension['extn_id'].native
    +            attribute_name = '_%s_value' % name
    +            if hasattr(self, attribute_name):
    +                setattr(self, attribute_name, extension['extn_value'].parsed)
    +            if extension['critical'].native:
    +                self._critical_extensions.add(name)
    +
    +        self._processed_extensions = True
    +
    +    @property
    +    def critical_extensions(self):
    +        """
    +        Returns a set of the names (or OID if not a known extension) of the
    +        extensions marked as critical
    +
    +        :return:
    +            A set of unicode strings
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._critical_extensions
    +
    +    @property
    +    def nonce_value(self):
    +        """
    +        This extension is used to prevent replay attacks on the request/response
    +        exchange
    +
    +        :return:
    +            None or an OctetString object
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._nonce_value
    +
    +    @property
    +    def extended_revoke_value(self):
    +        """
    +        This extension is used to signal that the responder will return a
    +        "revoked" status for non-issued certificates.
    +
    +        :return:
    +            None or a Null object (if present)
    +        """
    +
    +        if self._processed_extensions is False:
    +            self._set_extensions()
    +        return self._extended_revoke_value
    +
    +    @property
    +    def basic_ocsp_response(self):
    +        """
    +        A shortcut into the BasicOCSPResponse sequence
    +
    +        :return:
    +            None or an asn1crypto.ocsp.BasicOCSPResponse object
    +        """
    +
    +        return self['response_bytes']['response'].parsed
    +
    +    @property
    +    def response_data(self):
    +        """
    +        A shortcut into the parsed, ResponseData sequence
    +
    +        :return:
    +            None or an asn1crypto.ocsp.ResponseData object
    +        """
    +
    +        return self['response_bytes']['response'].parsed['tbs_response_data']
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/parser.py b/server/www/packages/packages-windows/x86/asn1crypto/parser.py
    new file mode 100644
    index 0000000..c4f91f6
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/parser.py
    @@ -0,0 +1,285 @@
    +# coding: utf-8
    +
    +"""
    +Functions for parsing and dumping using the ASN.1 DER encoding. Exports the
    +following items:
    +
    + - emit()
    + - parse()
    + - peek()
    +
    +Other type classes are defined that help compose the types listed above.
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +import sys
    +
    +from ._types import byte_cls, chr_cls, type_name
    +from .util import int_from_bytes, int_to_bytes
    +
    +_PY2 = sys.version_info <= (3,)
    +_INSUFFICIENT_DATA_MESSAGE = 'Insufficient data - %s bytes requested but only %s available'
    +
    +
    +def emit(class_, method, tag, contents):
    +    """
    +    Constructs a byte string of an ASN.1 DER-encoded value
    +
    +    This is typically not useful. Instead, use one of the standard classes from
    +    asn1crypto.core, or construct a new class with specific fields, and call the
    +    .dump() method.
    +
    +    :param class_:
    +        An integer ASN.1 class value: 0 (universal), 1 (application),
    +        2 (context), 3 (private)
    +
    +    :param method:
    +        An integer ASN.1 method value: 0 (primitive), 1 (constructed)
    +
    +    :param tag:
    +        An integer ASN.1 tag value
    +
    +    :param contents:
    +        A byte string of the encoded byte contents
    +
    +    :return:
    +        A byte string of the ASN.1 DER value (header and contents)
    +    """
    +
    +    if not isinstance(class_, int):
    +        raise TypeError('class_ must be an integer, not %s' % type_name(class_))
    +
    +    if class_ < 0 or class_ > 3:
    +        raise ValueError('class_ must be one of 0, 1, 2 or 3, not %s' % class_)
    +
    +    if not isinstance(method, int):
    +        raise TypeError('method must be an integer, not %s' % type_name(method))
    +
    +    if method < 0 or method > 1:
    +        raise ValueError('method must be 0 or 1, not %s' % method)
    +
    +    if not isinstance(tag, int):
    +        raise TypeError('tag must be an integer, not %s' % type_name(tag))
    +
    +    if tag < 0:
    +        raise ValueError('tag must be greater than zero, not %s' % tag)
    +
    +    if not isinstance(contents, byte_cls):
    +        raise TypeError('contents must be a byte string, not %s' % type_name(contents))
    +
    +    return _dump_header(class_, method, tag, contents) + contents
    +
    +
    +def parse(contents, strict=False):
    +    """
    +    Parses a byte string of ASN.1 BER/DER-encoded data.
    +
    +    This is typically not useful. Instead, use one of the standard classes from
    +    asn1crypto.core, or construct a new class with specific fields, and call the
    +    .load() class method.
    +
    +    :param contents:
    +        A byte string of BER/DER-encoded data
    +
    +    :param strict:
    +        A boolean indicating if trailing data should be forbidden - if so, a
    +        ValueError will be raised when trailing data exists
    +
    +    :raises:
    +        ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
    +        TypeError - when contents is not a byte string
    +
    +    :return:
    +        A 6-element tuple:
    +         - 0: integer class (0 to 3)
    +         - 1: integer method
    +         - 2: integer tag
    +         - 3: byte string header
    +         - 4: byte string content
    +         - 5: byte string trailer
    +    """
    +
    +    if not isinstance(contents, byte_cls):
    +        raise TypeError('contents must be a byte string, not %s' % type_name(contents))
    +
    +    contents_len = len(contents)
    +    info, consumed = _parse(contents, contents_len)
    +    if strict and consumed != contents_len:
    +        raise ValueError('Extra data - %d bytes of trailing data were provided' % (contents_len - consumed))
    +    return info
    +
    +
    +def peek(contents):
    +    """
    +    Parses a byte string of ASN.1 BER/DER-encoded data to find the length
    +
    +    This is typically used to look into an encoded value to see how long the
    +    next chunk of ASN.1-encoded data is. Primarily it is useful when a
    +    value is a concatenation of multiple values.
    +
    +    :param contents:
    +        A byte string of BER/DER-encoded data
    +
    +    :raises:
    +        ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
    +        TypeError - when contents is not a byte string
    +
    +    :return:
    +        An integer with the number of bytes occupied by the ASN.1 value
    +    """
    +
    +    if not isinstance(contents, byte_cls):
    +        raise TypeError('contents must be a byte string, not %s' % type_name(contents))
    +
    +    info, consumed = _parse(contents, len(contents))
    +    return consumed
    +
    +
    +def _parse(encoded_data, data_len, pointer=0, lengths_only=False):
    +    """
    +    Parses a byte string into component parts
    +
    +    :param encoded_data:
    +        A byte string that contains BER-encoded data
    +
    +    :param data_len:
    +        The integer length of the encoded data
    +
    +    :param pointer:
    +        The index in the byte string to parse from
    +
    +    :param lengths_only:
    +        A boolean to cause the call to return a 2-element tuple of the integer
    +        number of bytes in the header and the integer number of bytes in the
    +        contents. Internal use only.
    +
    +    :return:
    +        A 2-element tuple:
    +         - 0: A tuple of (class_, method, tag, header, content, trailer)
    +         - 1: An integer indicating how many bytes were consumed
    +    """
    +
    +    if data_len < pointer + 2:
    +        raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (2, data_len - pointer))
    +
    +    start = pointer
    +    first_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
    +    pointer += 1
    +
    +    tag = first_octet & 31
    +    # Base 128 length using 8th bit as continuation indicator
    +    if tag == 31:
    +        tag = 0
    +        while True:
    +            num = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
    +            pointer += 1
    +            tag *= 128
    +            tag += num & 127
    +            if num >> 7 == 0:
    +                break
    +
    +    length_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
    +    pointer += 1
    +
    +    if length_octet >> 7 == 0:
    +        if lengths_only:
    +            return (pointer, pointer + (length_octet & 127))
    +        contents_end = pointer + (length_octet & 127)
    +
    +    else:
    +        length_octets = length_octet & 127
    +        if length_octets:
    +            pointer += length_octets
    +            contents_end = pointer + int_from_bytes(encoded_data[pointer - length_octets:pointer], signed=False)
    +            if lengths_only:
    +                return (pointer, contents_end)
    +
    +        else:
    +            # To properly parse indefinite length values, we need to scan forward
    +            # parsing headers until we find a value with a length of zero. If we
    +            # just scanned looking for \x00\x00, nested indefinite length values
    +            # would not work.
    +            contents_end = pointer
    +            while contents_end < data_len:
    +                sub_header_end, contents_end = _parse(encoded_data, data_len, contents_end, lengths_only=True)
    +                if contents_end == sub_header_end and encoded_data[contents_end - 2:contents_end] == b'\x00\x00':
    +                    break
    +            if lengths_only:
    +                return (pointer, contents_end)
    +            if contents_end > data_len:
    +                raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len))
    +            return (
    +                (
    +                    first_octet >> 6,
    +                    (first_octet >> 5) & 1,
    +                    tag,
    +                    encoded_data[start:pointer],
    +                    encoded_data[pointer:contents_end - 2],
    +                    b'\x00\x00'
    +                ),
    +                contents_end
    +            )
    +
    +    if contents_end > data_len:
    +        raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len))
    +    return (
    +        (
    +            first_octet >> 6,
    +            (first_octet >> 5) & 1,
    +            tag,
    +            encoded_data[start:pointer],
    +            encoded_data[pointer:contents_end],
    +            b''
    +        ),
    +        contents_end
    +    )
    +
    +
    +def _dump_header(class_, method, tag, contents):
    +    """
    +    Constructs the header bytes for an ASN.1 object
    +
    +    :param class_:
    +        An integer ASN.1 class value: 0 (universal), 1 (application),
    +        2 (context), 3 (private)
    +
    +    :param method:
    +        An integer ASN.1 method value: 0 (primitive), 1 (constructed)
    +
    +    :param tag:
    +        An integer ASN.1 tag value
    +
    +    :param contents:
    +        A byte string of the encoded byte contents
    +
    +    :return:
    +        A byte string of the ASN.1 DER header
    +    """
    +
    +    header = b''
    +
    +    id_num = 0
    +    id_num |= class_ << 6
    +    id_num |= method << 5
    +
    +    if tag >= 31:
    +        cont_bit = 0
    +        while tag > 0:
    +            header = chr_cls(cont_bit | (tag & 0x7f)) + header
    +            if not cont_bit:
    +                cont_bit = 0x80
    +            tag = tag >> 7
    +        header = chr_cls(id_num | 31) + header
    +    else:
    +        header += chr_cls(id_num | tag)
    +
    +    length = len(contents)
    +    if length <= 127:
    +        header += chr_cls(length)
    +    else:
    +        length_bytes = int_to_bytes(length)
    +        header += chr_cls(0x80 | len(length_bytes))
    +        header += length_bytes
    +
    +    return header
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/pdf.py b/server/www/packages/packages-windows/x86/asn1crypto/pdf.py
    new file mode 100644
    index 0000000..b72c886
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/pdf.py
    @@ -0,0 +1,84 @@
    +# coding: utf-8
    +
    +"""
    +ASN.1 type classes for PDF signature structures. Adds extra oid mapping and
    +value parsing to asn1crypto.x509.Extension() and asn1crypto.xms.CMSAttribute().
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +from .cms import CMSAttributeType, CMSAttribute
    +from .core import (
    +    Boolean,
    +    Integer,
    +    Null,
    +    ObjectIdentifier,
    +    OctetString,
    +    Sequence,
    +    SequenceOf,
    +    SetOf,
    +)
    +from .crl import CertificateList
    +from .ocsp import OCSPResponse
    +from .x509 import (
    +    Extension,
    +    ExtensionId,
    +    GeneralName,
    +    KeyPurposeId,
    +)
    +
    +
    +class AdobeArchiveRevInfo(Sequence):
    +    _fields = [
    +        ('version', Integer)
    +    ]
    +
    +
    +class AdobeTimestamp(Sequence):
    +    _fields = [
    +        ('version', Integer),
    +        ('location', GeneralName),
    +        ('requires_auth', Boolean, {'optional': True, 'default': False}),
    +    ]
    +
    +
    +class OtherRevInfo(Sequence):
    +    _fields = [
    +        ('type', ObjectIdentifier),
    +        ('value', OctetString),
    +    ]
    +
    +
    +class SequenceOfCertificateList(SequenceOf):
    +    _child_spec = CertificateList
    +
    +
    +class SequenceOfOCSPResponse(SequenceOf):
    +    _child_spec = OCSPResponse
    +
    +
    +class SequenceOfOtherRevInfo(SequenceOf):
    +    _child_spec = OtherRevInfo
    +
    +
    +class RevocationInfoArchival(Sequence):
    +    _fields = [
    +        ('crl', SequenceOfCertificateList, {'explicit': 0, 'optional': True}),
    +        ('ocsp', SequenceOfOCSPResponse, {'explicit': 1, 'optional': True}),
    +        ('other_rev_info', SequenceOfOtherRevInfo, {'explicit': 2, 'optional': True}),
    +    ]
    +
    +
    +class SetOfRevocationInfoArchival(SetOf):
    +    _child_spec = RevocationInfoArchival
    +
    +
    +ExtensionId._map['1.2.840.113583.1.1.9.2'] = 'adobe_archive_rev_info'
    +ExtensionId._map['1.2.840.113583.1.1.9.1'] = 'adobe_timestamp'
    +ExtensionId._map['1.2.840.113583.1.1.10'] = 'adobe_ppklite_credential'
    +Extension._oid_specs['adobe_archive_rev_info'] = AdobeArchiveRevInfo
    +Extension._oid_specs['adobe_timestamp'] = AdobeTimestamp
    +Extension._oid_specs['adobe_ppklite_credential'] = Null
    +KeyPurposeId._map['1.2.840.113583.1.1.5'] = 'pdf_signing'
    +CMSAttributeType._map['1.2.840.113583.1.1.8'] = 'adobe_revocation_info_archival'
    +CMSAttribute._oid_specs['adobe_revocation_info_archival'] = SetOfRevocationInfoArchival
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/pem.py b/server/www/packages/packages-windows/x86/asn1crypto/pem.py
    new file mode 100644
    index 0000000..511ea4b
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/pem.py
    @@ -0,0 +1,222 @@
    +# coding: utf-8
    +
    +"""
    +Encoding DER to PEM and decoding PEM to DER. Exports the following items:
    +
    + - armor()
    + - detect()
    + - unarmor()
    +
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +import base64
    +import re
    +import sys
    +
    +from ._errors import unwrap
    +from ._types import type_name as _type_name, str_cls, byte_cls
    +
    +if sys.version_info < (3,):
    +    from cStringIO import StringIO as BytesIO
    +else:
    +    from io import BytesIO
    +
    +
    +def detect(byte_string):
    +    """
    +    Detect if a byte string seems to contain a PEM-encoded block
    +
    +    :param byte_string:
    +        A byte string to look through
    +
    +    :return:
    +        A boolean, indicating if a PEM-encoded block is contained in the byte
    +        string
    +    """
    +
    +    if not isinstance(byte_string, byte_cls):
    +        raise TypeError(unwrap(
    +            '''
    +            byte_string must be a byte string, not %s
    +            ''',
    +            _type_name(byte_string)
    +        ))
    +
    +    return byte_string.find(b'-----BEGIN') != -1 or byte_string.find(b'---- BEGIN') != -1
    +
    +
    +def armor(type_name, der_bytes, headers=None):
    +    """
    +    Armors a DER-encoded byte string in PEM
    +
    +    :param type_name:
    +        A unicode string that will be capitalized and placed in the header
    +        and footer of the block. E.g. "CERTIFICATE", "PRIVATE KEY", etc. This
    +        will appear as "-----BEGIN CERTIFICATE-----" and
    +        "-----END CERTIFICATE-----".
    +
    +    :param der_bytes:
    +        A byte string to be armored
    +
    +    :param headers:
    +        An OrderedDict of the header lines to write after the BEGIN line
    +
    +    :return:
    +        A byte string of the PEM block
    +    """
    +
    +    if not isinstance(der_bytes, byte_cls):
    +        raise TypeError(unwrap(
    +            '''
    +            der_bytes must be a byte string, not %s
    +            ''' % _type_name(der_bytes)
    +        ))
    +
    +    if not isinstance(type_name, str_cls):
    +        raise TypeError(unwrap(
    +            '''
    +            type_name must be a unicode string, not %s
    +            ''',
    +            _type_name(type_name)
    +        ))
    +
    +    type_name = type_name.upper().encode('ascii')
    +
    +    output = BytesIO()
    +    output.write(b'-----BEGIN ')
    +    output.write(type_name)
    +    output.write(b'-----\n')
    +    if headers:
    +        for key in headers:
    +            output.write(key.encode('ascii'))
    +            output.write(b': ')
    +            output.write(headers[key].encode('ascii'))
    +            output.write(b'\n')
    +        output.write(b'\n')
    +    b64_bytes = base64.b64encode(der_bytes)
    +    b64_len = len(b64_bytes)
    +    i = 0
    +    while i < b64_len:
    +        output.write(b64_bytes[i:i + 64])
    +        output.write(b'\n')
    +        i += 64
    +    output.write(b'-----END ')
    +    output.write(type_name)
    +    output.write(b'-----\n')
    +
    +    return output.getvalue()
    +
    +
    +def _unarmor(pem_bytes):
    +    """
    +    Convert a PEM-encoded byte string into one or more DER-encoded byte strings
    +
    +    :param pem_bytes:
    +        A byte string of the PEM-encoded data
    +
    +    :raises:
    +        ValueError - when the pem_bytes do not appear to be PEM-encoded bytes
    +
    +    :return:
    +        A generator of 3-element tuples in the format: (object_type, headers,
    +        der_bytes). The object_type is a unicode string of what is between
    +        "-----BEGIN " and "-----". Examples include: "CERTIFICATE",
    +        "PUBLIC KEY", "PRIVATE KEY". The headers is a dict containing any lines
    +        in the form "Name: Value" that are right after the begin line.
    +    """
    +
    +    if not isinstance(pem_bytes, byte_cls):
    +        raise TypeError(unwrap(
    +            '''
    +            pem_bytes must be a byte string, not %s
    +            ''',
    +            _type_name(pem_bytes)
    +        ))
    +
    +    # Valid states include: "trash", "headers", "body"
    +    state = 'trash'
    +    headers = {}
    +    base64_data = b''
    +    object_type = None
    +
    +    found_start = False
    +    found_end = False
    +
    +    for line in pem_bytes.splitlines(False):
    +        if line == b'':
    +            continue
    +
    +        if state == "trash":
    +            # Look for a starting line since some CA cert bundle show the cert
    +            # into in a parsed format above each PEM block
    +            type_name_match = re.match(b'^(?:---- |-----)BEGIN ([A-Z0-9 ]+)(?: ----|-----)', line)
    +            if not type_name_match:
    +                continue
    +            object_type = type_name_match.group(1).decode('ascii')
    +
    +            found_start = True
    +            state = 'headers'
    +            continue
    +
    +        if state == 'headers':
    +            if line.find(b':') == -1:
    +                state = 'body'
    +            else:
    +                decoded_line = line.decode('ascii')
    +                name, value = decoded_line.split(':', 1)
    +                headers[name] = value.strip()
    +                continue
    +
    +        if state == 'body':
    +            if line[0:5] in (b'-----', b'---- '):
    +                der_bytes = base64.b64decode(base64_data)
    +
    +                yield (object_type, headers, der_bytes)
    +
    +                state = 'trash'
    +                headers = {}
    +                base64_data = b''
    +                object_type = None
    +                found_end = True
    +                continue
    +
    +            base64_data += line
    +
    +    if not found_start or not found_end:
    +        raise ValueError(unwrap(
    +            '''
    +            pem_bytes does not appear to contain PEM-encoded data - no
    +            BEGIN/END combination found
    +            '''
    +        ))
    +
    +
    +def unarmor(pem_bytes, multiple=False):
    +    """
    +    Convert a PEM-encoded byte string into a DER-encoded byte string
    +
    +    :param pem_bytes:
    +        A byte string of the PEM-encoded data
    +
    +    :param multiple:
    +        If True, function will return a generator
    +
    +    :raises:
    +        ValueError - when the pem_bytes do not appear to be PEM-encoded bytes
    +
    +    :return:
    +        A 3-element tuple (object_name, headers, der_bytes). The object_name is
    +        a unicode string of what is between "-----BEGIN " and "-----". Examples
    +        include: "CERTIFICATE", "PUBLIC KEY", "PRIVATE KEY". The headers is a
    +        dict containing any lines in the form "Name: Value" that are right
    +        after the begin line.
    +    """
    +
    +    generator = _unarmor(pem_bytes)
    +
    +    if not multiple:
    +        return next(generator)
    +
    +    return generator
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/pkcs12.py b/server/www/packages/packages-windows/x86/asn1crypto/pkcs12.py
    new file mode 100644
    index 0000000..7ebcefe
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/pkcs12.py
    @@ -0,0 +1,193 @@
    +# coding: utf-8
    +
    +"""
    +ASN.1 type classes for PKCS#12 files. Exports the following items:
    +
    + - CertBag()
    + - CrlBag()
    + - Pfx()
    + - SafeBag()
    + - SecretBag()
    +
    +Other type classes are defined that help compose the types listed above.
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +from .algos import DigestInfo
    +from .cms import ContentInfo, SignedData
    +from .core import (
    +    Any,
    +    BMPString,
    +    Integer,
    +    ObjectIdentifier,
    +    OctetString,
    +    ParsableOctetString,
    +    Sequence,
    +    SequenceOf,
    +    SetOf,
    +)
    +from .keys import PrivateKeyInfo, EncryptedPrivateKeyInfo
    +from .x509 import Certificate, KeyPurposeId
    +
    +
    +# The structures in this file are taken from https://tools.ietf.org/html/rfc7292
    +
    +class MacData(Sequence):
    +    _fields = [
    +        ('mac', DigestInfo),
    +        ('mac_salt', OctetString),
    +        ('iterations', Integer, {'default': 1}),
    +    ]
    +
    +
    +class Version(Integer):
    +    _map = {
    +        3: 'v3'
    +    }
    +
    +
    +class AttributeType(ObjectIdentifier):
    +    _map = {
    +        # https://tools.ietf.org/html/rfc2985#page-18
    +        '1.2.840.113549.1.9.20': 'friendly_name',
    +        '1.2.840.113549.1.9.21': 'local_key_id',
    +        # https://support.microsoft.com/en-us/kb/287547
    +        '1.3.6.1.4.1.311.17.1': 'microsoft_local_machine_keyset',
    +        # https://github.com/frohoff/jdk8u-dev-jdk/blob/master/src/share/classes/sun/security/pkcs12/PKCS12KeyStore.java
    +        # this is a set of OIDs, representing key usage, the usual value is a SET of one element OID 2.5.29.37.0
    +        '2.16.840.1.113894.746875.1.1': 'trusted_key_usage',
    +    }
    +
    +
    +class SetOfAny(SetOf):
    +    _child_spec = Any
    +
    +
    +class SetOfBMPString(SetOf):
    +    _child_spec = BMPString
    +
    +
    +class SetOfOctetString(SetOf):
    +    _child_spec = OctetString
    +
    +
    +class SetOfKeyPurposeId(SetOf):
    +    _child_spec = KeyPurposeId
    +
    +
    +class Attribute(Sequence):
    +    _fields = [
    +        ('type', AttributeType),
    +        ('values', None),
    +    ]
    +
    +    _oid_specs = {
    +        'friendly_name': SetOfBMPString,
    +        'local_key_id': SetOfOctetString,
    +        'microsoft_csp_name': SetOfBMPString,
    +        'trusted_key_usage': SetOfKeyPurposeId,
    +    }
    +
    +    def _values_spec(self):
    +        return self._oid_specs.get(self['type'].native, SetOfAny)
    +
    +    _spec_callbacks = {
    +        'values': _values_spec
    +    }
    +
    +
    +class Attributes(SetOf):
    +    _child_spec = Attribute
    +
    +
    +class Pfx(Sequence):
    +    _fields = [
    +        ('version', Version),
    +        ('auth_safe', ContentInfo),
    +        ('mac_data', MacData, {'optional': True})
    +    ]
    +
    +    _authenticated_safe = None
    +
    +    @property
    +    def authenticated_safe(self):
    +        if self._authenticated_safe is None:
    +            content = self['auth_safe']['content']
    +            if isinstance(content, SignedData):
    +                content = content['content_info']['content']
    +            self._authenticated_safe = AuthenticatedSafe.load(content.native)
    +        return self._authenticated_safe
    +
    +
    +class AuthenticatedSafe(SequenceOf):
    +    _child_spec = ContentInfo
    +
    +
    +class BagId(ObjectIdentifier):
    +    _map = {
    +        '1.2.840.113549.1.12.10.1.1': 'key_bag',
    +        '1.2.840.113549.1.12.10.1.2': 'pkcs8_shrouded_key_bag',
    +        '1.2.840.113549.1.12.10.1.3': 'cert_bag',
    +        '1.2.840.113549.1.12.10.1.4': 'crl_bag',
    +        '1.2.840.113549.1.12.10.1.5': 'secret_bag',
    +        '1.2.840.113549.1.12.10.1.6': 'safe_contents',
    +    }
    +
    +
    +class CertId(ObjectIdentifier):
    +    _map = {
    +        '1.2.840.113549.1.9.22.1': 'x509',
    +        '1.2.840.113549.1.9.22.2': 'sdsi',
    +    }
    +
    +
    +class CertBag(Sequence):
    +    _fields = [
    +        ('cert_id', CertId),
    +        ('cert_value', ParsableOctetString, {'explicit': 0}),
    +    ]
    +
    +    _oid_pair = ('cert_id', 'cert_value')
    +    _oid_specs = {
    +        'x509': Certificate,
    +    }
    +
    +
    +class CrlBag(Sequence):
    +    _fields = [
    +        ('crl_id', ObjectIdentifier),
    +        ('crl_value', OctetString, {'explicit': 0}),
    +    ]
    +
    +
    +class SecretBag(Sequence):
    +    _fields = [
    +        ('secret_type_id', ObjectIdentifier),
    +        ('secret_value', OctetString, {'explicit': 0}),
    +    ]
    +
    +
    +class SafeContents(SequenceOf):
    +    pass
    +
    +
    +class SafeBag(Sequence):
    +    _fields = [
    +        ('bag_id', BagId),
    +        ('bag_value', Any, {'explicit': 0}),
    +        ('bag_attributes', Attributes, {'optional': True}),
    +    ]
    +
    +    _oid_pair = ('bag_id', 'bag_value')
    +    _oid_specs = {
    +        'key_bag': PrivateKeyInfo,
    +        'pkcs8_shrouded_key_bag': EncryptedPrivateKeyInfo,
    +        'cert_bag': CertBag,
    +        'crl_bag': CrlBag,
    +        'secret_bag': SecretBag,
    +        'safe_contents': SafeContents
    +    }
    +
    +
    +SafeContents._child_spec = SafeBag
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/tsp.py b/server/www/packages/packages-windows/x86/asn1crypto/tsp.py
    new file mode 100644
    index 0000000..bd40810
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/tsp.py
    @@ -0,0 +1,310 @@
    +# coding: utf-8
    +
    +"""
    +ASN.1 type classes for the time stamp protocol (TSP). Exports the following
    +items:
    +
    + - TimeStampReq()
    + - TimeStampResp()
    +
    +Also adds TimeStampedData() support to asn1crypto.cms.ContentInfo(),
    +TimeStampedData() and TSTInfo() support to
    +asn1crypto.cms.EncapsulatedContentInfo() and some oids and value parsers to
    +asn1crypto.cms.CMSAttribute().
    +
    +Other type classes are defined that help compose the types listed above.
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +from .algos import DigestAlgorithm
    +from .cms import (
    +    CMSAttribute,
    +    CMSAttributeType,
    +    ContentInfo,
    +    ContentType,
    +    EncapsulatedContentInfo,
    +)
    +from .core import (
    +    Any,
    +    BitString,
    +    Boolean,
    +    Choice,
    +    GeneralizedTime,
    +    IA5String,
    +    Integer,
    +    ObjectIdentifier,
    +    OctetString,
    +    Sequence,
    +    SequenceOf,
    +    SetOf,
    +    UTF8String,
    +)
    +from .crl import CertificateList
    +from .x509 import (
    +    Attributes,
    +    CertificatePolicies,
    +    GeneralName,
    +    GeneralNames,
    +)
    +
    +
    +# The structures in this file are based on https://tools.ietf.org/html/rfc3161,
    +# https://tools.ietf.org/html/rfc4998, https://tools.ietf.org/html/rfc5544,
    +# https://tools.ietf.org/html/rfc5035, https://tools.ietf.org/html/rfc2634
    +
    +class Version(Integer):
    +    _map = {
    +        0: 'v0',
    +        1: 'v1',
    +        2: 'v2',
    +        3: 'v3',
    +        4: 'v4',
    +        5: 'v5',
    +    }
    +
    +
    +class MessageImprint(Sequence):
    +    _fields = [
    +        ('hash_algorithm', DigestAlgorithm),
    +        ('hashed_message', OctetString),
    +    ]
    +
    +
    +class Accuracy(Sequence):
    +    _fields = [
    +        ('seconds', Integer, {'optional': True}),
    +        ('millis', Integer, {'implicit': 0, 'optional': True}),
    +        ('micros', Integer, {'implicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class Extension(Sequence):
    +    _fields = [
    +        ('extn_id', ObjectIdentifier),
    +        ('critical', Boolean, {'default': False}),
    +        ('extn_value', OctetString),
    +    ]
    +
    +
    +class Extensions(SequenceOf):
    +    _child_spec = Extension
    +
    +
    +class TSTInfo(Sequence):
    +    _fields = [
    +        ('version', Version),
    +        ('policy', ObjectIdentifier),
    +        ('message_imprint', MessageImprint),
    +        ('serial_number', Integer),
    +        ('gen_time', GeneralizedTime),
    +        ('accuracy', Accuracy, {'optional': True}),
    +        ('ordering', Boolean, {'default': False}),
    +        ('nonce', Integer, {'optional': True}),
    +        ('tsa', GeneralName, {'explicit': 0, 'optional': True}),
    +        ('extensions', Extensions, {'implicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class TimeStampReq(Sequence):
    +    _fields = [
    +        ('version', Version),
    +        ('message_imprint', MessageImprint),
    +        ('req_policy', ObjectIdentifier, {'optional': True}),
    +        ('nonce', Integer, {'optional': True}),
    +        ('cert_req', Boolean, {'default': False}),
    +        ('extensions', Extensions, {'implicit': 0, 'optional': True}),
    +    ]
    +
    +
    +class PKIStatus(Integer):
    +    _map = {
    +        0: 'granted',
    +        1: 'granted_with_mods',
    +        2: 'rejection',
    +        3: 'waiting',
    +        4: 'revocation_warning',
    +        5: 'revocation_notification',
    +    }
    +
    +
    +class PKIFreeText(SequenceOf):
    +    _child_spec = UTF8String
    +
    +
    +class PKIFailureInfo(BitString):
    +    _map = {
    +        0: 'bad_alg',
    +        2: 'bad_request',
    +        5: 'bad_data_format',
    +        14: 'time_not_available',
    +        15: 'unaccepted_policy',
    +        16: 'unaccepted_extensions',
    +        17: 'add_info_not_available',
    +        25: 'system_failure',
    +    }
    +
    +
    +class PKIStatusInfo(Sequence):
    +    _fields = [
    +        ('status', PKIStatus),
    +        ('status_string', PKIFreeText, {'optional': True}),
    +        ('fail_info', PKIFailureInfo, {'optional': True}),
    +    ]
    +
    +
    +class TimeStampResp(Sequence):
    +    _fields = [
    +        ('status', PKIStatusInfo),
    +        ('time_stamp_token', ContentInfo),
    +    ]
    +
    +
    +class MetaData(Sequence):
    +    _fields = [
    +        ('hash_protected', Boolean),
    +        ('file_name', UTF8String, {'optional': True}),
    +        ('media_type', IA5String, {'optional': True}),
    +        ('other_meta_data', Attributes, {'optional': True}),
    +    ]
    +
    +
    +class TimeStampAndCRL(SequenceOf):
    +    _fields = [
    +        ('time_stamp', EncapsulatedContentInfo),
    +        ('crl', CertificateList, {'optional': True}),
    +    ]
    +
    +
    +class TimeStampTokenEvidence(SequenceOf):
    +    _child_spec = TimeStampAndCRL
    +
    +
    +class DigestAlgorithms(SequenceOf):
    +    _child_spec = DigestAlgorithm
    +
    +
    +class EncryptionInfo(Sequence):
    +    _fields = [
    +        ('encryption_info_type', ObjectIdentifier),
    +        ('encryption_info_value', Any),
    +    ]
    +
    +
    +class PartialHashtree(SequenceOf):
    +    _child_spec = OctetString
    +
    +
    +class PartialHashtrees(SequenceOf):
    +    _child_spec = PartialHashtree
    +
    +
    +class ArchiveTimeStamp(Sequence):
    +    _fields = [
    +        ('digest_algorithm', DigestAlgorithm, {'implicit': 0, 'optional': True}),
    +        ('attributes', Attributes, {'implicit': 1, 'optional': True}),
    +        ('reduced_hashtree', PartialHashtrees, {'implicit': 2, 'optional': True}),
    +        ('time_stamp', ContentInfo),
    +    ]
    +
    +
    +class ArchiveTimeStampSequence(SequenceOf):
    +    _child_spec = ArchiveTimeStamp
    +
    +
    +class EvidenceRecord(Sequence):
    +    _fields = [
    +        ('version', Version),
    +        ('digest_algorithms', DigestAlgorithms),
    +        ('crypto_infos', Attributes, {'implicit': 0, 'optional': True}),
    +        ('encryption_info', EncryptionInfo, {'implicit': 1, 'optional': True}),
    +        ('archive_time_stamp_sequence', ArchiveTimeStampSequence),
    +    ]
    +
    +
    +class OtherEvidence(Sequence):
    +    _fields = [
    +        ('oe_type', ObjectIdentifier),
    +        ('oe_value', Any),
    +    ]
    +
    +
    +class Evidence(Choice):
    +    _alternatives = [
    +        ('tst_evidence', TimeStampTokenEvidence, {'implicit': 0}),
    +        ('ers_evidence', EvidenceRecord, {'implicit': 1}),
    +        ('other_evidence', OtherEvidence, {'implicit': 2}),
    +    ]
    +
    +
    +class TimeStampedData(Sequence):
    +    _fields = [
    +        ('version', Version),
    +        ('data_uri', IA5String, {'optional': True}),
    +        ('meta_data', MetaData, {'optional': True}),
    +        ('content', OctetString, {'optional': True}),
    +        ('temporal_evidence', Evidence),
    +    ]
    +
    +
    +class IssuerSerial(Sequence):
    +    _fields = [
    +        ('issuer', GeneralNames),
    +        ('serial_number', Integer),
    +    ]
    +
    +
    +class ESSCertID(Sequence):
    +    _fields = [
    +        ('cert_hash', OctetString),
    +        ('issuer_serial', IssuerSerial, {'optional': True}),
    +    ]
    +
    +
    +class ESSCertIDs(SequenceOf):
    +    _child_spec = ESSCertID
    +
    +
    +class SigningCertificate(Sequence):
    +    _fields = [
    +        ('certs', ESSCertIDs),
    +        ('policies', CertificatePolicies, {'optional': True}),
    +    ]
    +
    +
    +class SetOfSigningCertificates(SetOf):
    +    _child_spec = SigningCertificate
    +
    +
    +class ESSCertIDv2(Sequence):
    +    _fields = [
    +        ('hash_algorithm', DigestAlgorithm, {'default': {'algorithm': 'sha256'}}),
    +        ('cert_hash', OctetString),
    +        ('issuer_serial', IssuerSerial, {'optional': True}),
    +    ]
    +
    +
    +class ESSCertIDv2s(SequenceOf):
    +    _child_spec = ESSCertIDv2
    +
    +
    +class SigningCertificateV2(Sequence):
    +    _fields = [
    +        ('certs', ESSCertIDv2s),
    +        ('policies', CertificatePolicies, {'optional': True}),
    +    ]
    +
    +
    +class SetOfSigningCertificatesV2(SetOf):
    +    _child_spec = SigningCertificateV2
    +
    +
    +EncapsulatedContentInfo._oid_specs['tst_info'] = TSTInfo
    +EncapsulatedContentInfo._oid_specs['timestamped_data'] = TimeStampedData
    +ContentInfo._oid_specs['timestamped_data'] = TimeStampedData
    +ContentType._map['1.2.840.113549.1.9.16.1.4'] = 'tst_info'
    +ContentType._map['1.2.840.113549.1.9.16.1.31'] = 'timestamped_data'
    +CMSAttributeType._map['1.2.840.113549.1.9.16.2.12'] = 'signing_certificate'
    +CMSAttribute._oid_specs['signing_certificate'] = SetOfSigningCertificates
    +CMSAttributeType._map['1.2.840.113549.1.9.16.2.47'] = 'signing_certificate_v2'
    +CMSAttribute._oid_specs['signing_certificate_v2'] = SetOfSigningCertificatesV2
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/util.py b/server/www/packages/packages-windows/x86/asn1crypto/util.py
    new file mode 100644
    index 0000000..7196897
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/util.py
    @@ -0,0 +1,878 @@
    +# coding: utf-8
    +
    +"""
    +Miscellaneous data helpers, including functions for converting integers to and
    +from bytes and UTC timezone. Exports the following items:
    +
    + - OrderedDict()
    + - int_from_bytes()
    + - int_to_bytes()
    + - timezone.utc
    + - utc_with_dst
    + - create_timezone()
    + - inet_ntop()
    + - inet_pton()
    + - uri_to_iri()
    + - iri_to_uri()
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +import math
    +import sys
    +from datetime import datetime, date, timedelta, tzinfo
    +
    +from ._errors import unwrap
    +from ._iri import iri_to_uri, uri_to_iri  # noqa
    +from ._ordereddict import OrderedDict  # noqa
    +from ._types import type_name
    +
    +if sys.platform == 'win32':
    +    from ._inet import inet_ntop, inet_pton
    +else:
    +    from socket import inet_ntop, inet_pton  # noqa
    +
    +
    +# Python 2
    +if sys.version_info <= (3,):
    +
    +    def int_to_bytes(value, signed=False, width=None):
    +        """
    +        Converts an integer to a byte string
    +
    +        :param value:
    +            The integer to convert
    +
    +        :param signed:
    +            If the byte string should be encoded using two's complement
    +
    +        :param width:
    +            If None, the minimal possible size (but at least 1),
    +            otherwise an integer of the byte width for the return value
    +
    +        :return:
    +            A byte string
    +        """
    +
    +        if value == 0 and width == 0:
    +            return b''
    +
    +        # Handle negatives in two's complement
    +        is_neg = False
    +        if signed and value < 0:
    +            is_neg = True
    +            bits = int(math.ceil(len('%x' % abs(value)) / 2.0) * 8)
    +            value = (value + (1 << bits)) % (1 << bits)
    +
    +        hex_str = '%x' % value
    +        if len(hex_str) & 1:
    +            hex_str = '0' + hex_str
    +
    +        output = hex_str.decode('hex')
    +
    +        if signed and not is_neg and ord(output[0:1]) & 0x80:
    +            output = b'\x00' + output
    +
    +        if width is not None:
    +            if len(output) > width:
    +                raise OverflowError('int too big to convert')
    +            if is_neg:
    +                pad_char = b'\xFF'
    +            else:
    +                pad_char = b'\x00'
    +            output = (pad_char * (width - len(output))) + output
    +        elif is_neg and ord(output[0:1]) & 0x80 == 0:
    +            output = b'\xFF' + output
    +
    +        return output
    +
    +    def int_from_bytes(value, signed=False):
    +        """
    +        Converts a byte string to an integer
    +
    +        :param value:
    +            The byte string to convert
    +
    +        :param signed:
    +            If the byte string should be interpreted using two's complement
    +
    +        :return:
    +            An integer
    +        """
    +
    +        if value == b'':
    +            return 0
    +
    +        num = long(value.encode("hex"), 16)  # noqa
    +
    +        if not signed:
    +            return num
    +
    +        # Check for sign bit and handle two's complement
    +        if ord(value[0:1]) & 0x80:
    +            bit_len = len(value) * 8
    +            return num - (1 << bit_len)
    +
    +        return num
    +
    +    class timezone(tzinfo):  # noqa
    +        """
    +        Implements datetime.timezone for py2.
    +        Only full minute offsets are supported.
    +        DST is not supported.
    +        """
    +
    +        def __init__(self, offset, name=None):
    +            """
    +            :param offset:
    +                A timedelta with this timezone's offset from UTC
    +
    +            :param name:
    +                Name of the timezone; if None, generate one.
    +            """
    +
    +            if not timedelta(hours=-24) < offset < timedelta(hours=24):
    +                raise ValueError('Offset must be in [-23:59, 23:59]')
    +
    +            if offset.seconds % 60 or offset.microseconds:
    +                raise ValueError('Offset must be full minutes')
    +
    +            self._offset = offset
    +
    +            if name is not None:
    +                self._name = name
    +            elif not offset:
    +                self._name = 'UTC'
    +            else:
    +                self._name = 'UTC' + _format_offset(offset)
    +
    +        def __eq__(self, other):
    +            """
    +            Compare two timezones
    +
    +            :param other:
    +                The other timezone to compare to
    +
    +            :return:
    +                A boolean
    +            """
    +
    +            if type(other) != timezone:
    +                return False
    +            return self._offset == other._offset
    +
    +        def __getinitargs__(self):
    +            """
    +            Called by tzinfo.__reduce__ to support pickle and copy.
    +
    +            :return:
    +                offset and name, to be used for __init__
    +            """
    +
    +            return self._offset, self._name
    +
    +        def tzname(self, dt):
    +            """
    +            :param dt:
    +                A datetime object; ignored.
    +
    +            :return:
    +                Name of this timezone
    +            """
    +
    +            return self._name
    +
    +        def utcoffset(self, dt):
    +            """
    +            :param dt:
    +                A datetime object; ignored.
    +
    +            :return:
    +                A timedelta object with the offset from UTC
    +            """
    +
    +            return self._offset
    +
    +        def dst(self, dt):
    +            """
    +            :param dt:
    +                A datetime object; ignored.
    +
    +            :return:
    +                Zero timedelta
    +            """
    +
    +            return timedelta(0)
    +
    +    timezone.utc = timezone(timedelta(0))
    +
    +# Python 3
    +else:
    +
    +    from datetime import timezone  # noqa
    +
    +    def int_to_bytes(value, signed=False, width=None):
    +        """
    +        Converts an integer to a byte string
    +
    +        :param value:
    +            The integer to convert
    +
    +        :param signed:
    +            If the byte string should be encoded using two's complement
    +
    +        :param width:
    +            If None, the minimal possible size (but at least 1),
    +            otherwise an integer of the byte width for the return value
    +
    +        :return:
    +            A byte string
    +        """
    +
    +        if width is None:
    +            if signed:
    +                if value < 0:
    +                    bits_required = abs(value + 1).bit_length()
    +                else:
    +                    bits_required = value.bit_length()
    +                if bits_required % 8 == 0:
    +                    bits_required += 1
    +            else:
    +                bits_required = value.bit_length()
    +            width = math.ceil(bits_required / 8) or 1
    +        return value.to_bytes(width, byteorder='big', signed=signed)
    +
    +    def int_from_bytes(value, signed=False):
    +        """
    +        Converts a byte string to an integer
    +
    +        :param value:
    +            The byte string to convert
    +
    +        :param signed:
    +            If the byte string should be interpreted using two's complement
    +
    +        :return:
    +            An integer
    +        """
    +
    +        return int.from_bytes(value, 'big', signed=signed)
    +
    +
    +def _format_offset(off):
    +    """
    +    Format a timedelta into "[+-]HH:MM" format or "" for None
    +    """
    +
    +    if off is None:
    +        return ''
    +    mins = off.days * 24 * 60 + off.seconds // 60
    +    sign = '-' if mins < 0 else '+'
    +    return sign + '%02d:%02d' % divmod(abs(mins), 60)
    +
    +
    +class _UtcWithDst(tzinfo):
    +    """
    +    Utc class where dst does not return None; required for astimezone
    +    """
    +
    +    def tzname(self, dt):
    +        return 'UTC'
    +
    +    def utcoffset(self, dt):
    +        return timedelta(0)
    +
    +    def dst(self, dt):
    +        return timedelta(0)
    +
    +
    +utc_with_dst = _UtcWithDst()
    +
    +_timezone_cache = {}
    +
    +
    +def create_timezone(offset):
    +    """
    +    Returns a new datetime.timezone object with the given offset.
    +    Uses cached objects if possible.
    +
    +    :param offset:
    +        A datetime.timedelta object; It needs to be in full minutes and between -23:59 and +23:59.
    +
    +    :return:
    +        A datetime.timezone object
    +    """
    +
    +    try:
    +        tz = _timezone_cache[offset]
    +    except KeyError:
    +        tz = _timezone_cache[offset] = timezone(offset)
    +    return tz
    +
    +
    +class extended_date(object):
    +    """
    +    A datetime.datetime-like object that represents the year 0. This is just
    +    to handle 0000-01-01 found in some certificates. Python's datetime does
    +    not support year 0.
    +
    +    The proleptic gregorian calendar repeats itself every 400 years. Therefore,
    +    the simplest way to format is to substitute year 2000.
    +    """
    +
    +    def __init__(self, year, month, day):
    +        """
    +        :param year:
    +            The integer 0
    +
    +        :param month:
    +            An integer from 1 to 12
    +
    +        :param day:
    +            An integer from 1 to 31
    +        """
    +
    +        if year != 0:
    +            raise ValueError('year must be 0')
    +
    +        self._y2k = date(2000, month, day)
    +
    +    @property
    +    def year(self):
    +        """
    +        :return:
    +            The integer 0
    +        """
    +
    +        return 0
    +
    +    @property
    +    def month(self):
    +        """
    +        :return:
    +            An integer from 1 to 12
    +        """
    +
    +        return self._y2k.month
    +
    +    @property
    +    def day(self):
    +        """
    +        :return:
    +            An integer from 1 to 31
    +        """
    +
    +        return self._y2k.day
    +
    +    def strftime(self, format):
    +        """
    +        Formats the date using strftime()
    +
    +        :param format:
    +            A strftime() format string
    +
    +        :return:
    +            A str, the formatted date as a unicode string
    +            in Python 3 and a byte string in Python 2
    +        """
    +
    +        # Format the date twice, once with year 2000, once with year 4000.
    +        # The only differences in the result will be in the millennium. Find them and replace by zeros.
    +        y2k = self._y2k.strftime(format)
    +        y4k = self._y2k.replace(year=4000).strftime(format)
    +        return ''.join('0' if (c2, c4) == ('2', '4') else c2 for c2, c4 in zip(y2k, y4k))
    +
    +    def isoformat(self):
    +        """
    +        Formats the date as %Y-%m-%d
    +
    +        :return:
    +            The date formatted to %Y-%m-%d as a unicode string in Python 3
    +            and a byte string in Python 2
    +        """
    +
    +        return self.strftime('0000-%m-%d')
    +
    +    def replace(self, year=None, month=None, day=None):
    +        """
    +        Returns a new datetime.date or asn1crypto.util.extended_date
    +        object with the specified components replaced
    +
    +        :return:
    +            A datetime.date or asn1crypto.util.extended_date object
    +        """
    +
    +        if year is None:
    +            year = self.year
    +        if month is None:
    +            month = self.month
    +        if day is None:
    +            day = self.day
    +
    +        if year > 0:
    +            cls = date
    +        else:
    +            cls = extended_date
    +
    +        return cls(
    +            year,
    +            month,
    +            day
    +        )
    +
    +    def __str__(self):
    +        """
    +        :return:
    +            A str representing this extended_date, e.g. "0000-01-01"
    +        """
    +
    +        return self.strftime('%Y-%m-%d')
    +
    +    def __eq__(self, other):
    +        """
    +        Compare two extended_date objects
    +
    +        :param other:
    +            The other extended_date to compare to
    +
    +        :return:
    +            A boolean
    +        """
    +
    +        # datetime.date object wouldn't compare equal because it can't be year 0
    +        if not isinstance(other, self.__class__):
    +            return False
    +        return self.__cmp__(other) == 0
    +
    +    def __ne__(self, other):
    +        """
    +        Compare two extended_date objects
    +
    +        :param other:
    +            The other extended_date to compare to
    +
    +        :return:
    +            A boolean
    +        """
    +
    +        return not self.__eq__(other)
    +
    +    def _comparison_error(self, other):
    +        raise TypeError(unwrap(
    +            '''
    +            An asn1crypto.util.extended_date object can only be compared to
    +            an asn1crypto.util.extended_date or datetime.date object, not %s
    +            ''',
    +            type_name(other)
    +        ))
    +
    +    def __cmp__(self, other):
    +        """
    +        Compare two extended_date or datetime.date objects
    +
    +        :param other:
    +            The other extended_date object to compare to
    +
    +        :return:
    +            An integer smaller than, equal to, or larger than 0
    +        """
    +
    +        # self is year 0, other is >= year 1
    +        if isinstance(other, date):
    +            return -1
    +
    +        if not isinstance(other, self.__class__):
    +            self._comparison_error(other)
    +
    +        if self._y2k < other._y2k:
    +            return -1
    +        if self._y2k > other._y2k:
    +            return 1
    +        return 0
    +
    +    def __lt__(self, other):
    +        return self.__cmp__(other) < 0
    +
    +    def __le__(self, other):
    +        return self.__cmp__(other) <= 0
    +
    +    def __gt__(self, other):
    +        return self.__cmp__(other) > 0
    +
    +    def __ge__(self, other):
    +        return self.__cmp__(other) >= 0
    +
    +
    +class extended_datetime(object):
    +    """
    +    A datetime.datetime-like object that represents the year 0. This is just
    +    to handle 0000-01-01 found in some certificates. Python's datetime does
    +    not support year 0.
    +
    +    The proleptic gregorian calendar repeats itself every 400 years. Therefore,
    +    the simplest way to format is to substitute year 2000.
    +    """
    +
    +    # There are 97 leap days during 400 years.
    +    DAYS_IN_400_YEARS = 400 * 365 + 97
    +    DAYS_IN_2000_YEARS = 5 * DAYS_IN_400_YEARS
    +
    +    def __init__(self, year, *args, **kwargs):
    +        """
    +        :param year:
    +            The integer 0
    +
    +        :param args:
    +            Other positional arguments; see datetime.datetime.
    +
    +        :param kwargs:
    +            Other keyword arguments; see datetime.datetime.
    +        """
    +
    +        if year != 0:
    +            raise ValueError('year must be 0')
    +
    +        self._y2k = datetime(2000, *args, **kwargs)
    +
    +    @property
    +    def year(self):
    +        """
    +        :return:
    +            The integer 0
    +        """
    +
    +        return 0
    +
    +    @property
    +    def month(self):
    +        """
    +        :return:
    +            An integer from 1 to 12
    +        """
    +
    +        return self._y2k.month
    +
    +    @property
    +    def day(self):
    +        """
    +        :return:
    +            An integer from 1 to 31
    +        """
    +
    +        return self._y2k.day
    +
    +    @property
    +    def hour(self):
    +        """
    +        :return:
    +            An integer from 1 to 24
    +        """
    +
    +        return self._y2k.hour
    +
    +    @property
    +    def minute(self):
    +        """
    +        :return:
    +            An integer from 1 to 60
    +        """
    +
    +        return self._y2k.minute
    +
    +    @property
    +    def second(self):
    +        """
    +        :return:
    +            An integer from 1 to 60
    +        """
    +
    +        return self._y2k.second
    +
    +    @property
    +    def microsecond(self):
    +        """
    +        :return:
    +            An integer from 0 to 999999
    +        """
    +
    +        return self._y2k.microsecond
    +
    +    @property
    +    def tzinfo(self):
    +        """
    +        :return:
    +            If object is timezone aware, a datetime.tzinfo object, else None.
    +        """
    +
    +        return self._y2k.tzinfo
    +
    +    def utcoffset(self):
    +        """
    +        :return:
    +            If object is timezone aware, a datetime.timedelta object, else None.
    +        """
    +
    +        return self._y2k.utcoffset()
    +
    +    def time(self):
    +        """
    +        :return:
    +            A datetime.time object
    +        """
    +
    +        return self._y2k.time()
    +
    +    def date(self):
    +        """
    +        :return:
    +            An asn1crypto.util.extended_date of the date
    +        """
    +
    +        return extended_date(0, self.month, self.day)
    +
    +    def strftime(self, format):
    +        """
    +        Performs strftime(), always returning a str
    +
    +        :param format:
    +            A strftime() format string
    +
    +        :return:
    +            A str of the formatted datetime
    +        """
    +
    +        # Format the datetime twice, once with year 2000, once with year 4000.
    +        # The only differences in the result will be in the millennium. Find them and replace by zeros.
    +        y2k = self._y2k.strftime(format)
    +        y4k = self._y2k.replace(year=4000).strftime(format)
    +        return ''.join('0' if (c2, c4) == ('2', '4') else c2 for c2, c4 in zip(y2k, y4k))
    +
    +    def isoformat(self, sep='T'):
    +        """
    +        Formats the date as "%Y-%m-%d %H:%M:%S" with the sep param between the
    +        date and time portions
    +
    +        :param set:
    +            A single character of the separator to place between the date and
    +            time
    +
    +        :return:
    +            The formatted datetime as a unicode string in Python 3 and a byte
    +            string in Python 2
    +        """
    +
    +        s = '0000-%02d-%02d%c%02d:%02d:%02d' % (self.month, self.day, sep, self.hour, self.minute, self.second)
    +        if self.microsecond:
    +            s += '.%06d' % self.microsecond
    +        return s + _format_offset(self.utcoffset())
    +
    +    def replace(self, year=None, *args, **kwargs):
    +        """
    +        Returns a new datetime.datetime or asn1crypto.util.extended_datetime
    +        object with the specified components replaced
    +
    +        :param year:
    +            The new year to substitute. None to keep it.
    +
    +        :param args:
    +            Other positional arguments; see datetime.datetime.replace.
    +
    +        :param kwargs:
    +            Other keyword arguments; see datetime.datetime.replace.
    +
    +        :return:
    +            A datetime.datetime or asn1crypto.util.extended_datetime object
    +        """
    +
    +        if year:
    +            return self._y2k.replace(year, *args, **kwargs)
    +
    +        return extended_datetime.from_y2k(self._y2k.replace(2000, *args, **kwargs))
    +
    +    def astimezone(self, tz):
    +        """
    +        Convert this extended_datetime to another timezone.
    +
    +        :param tz:
    +            A datetime.tzinfo object.
    +
    +        :return:
    +            A new extended_datetime or datetime.datetime object
    +        """
    +
    +        return extended_datetime.from_y2k(self._y2k.astimezone(tz))
    +
    +    def timestamp(self):
    +        """
    +        Return POSIX timestamp. Only supported in python >= 3.3
    +
    +        :return:
    +            A float representing the seconds since 1970-01-01 UTC. This will be a negative value.
    +        """
    +
    +        return self._y2k.timestamp() - self.DAYS_IN_2000_YEARS * 86400
    +
    +    def __str__(self):
    +        """
    +        :return:
    +            A str representing this extended_datetime, e.g. "0000-01-01 00:00:00.000001-10:00"
    +        """
    +
    +        return self.isoformat(sep=' ')
    +
    +    def __eq__(self, other):
    +        """
    +        Compare two extended_datetime objects
    +
    +        :param other:
    +            The other extended_datetime to compare to
    +
    +        :return:
    +            A boolean
    +        """
    +
    +        # Only compare against other datetime or extended_datetime objects
    +        if not isinstance(other, (self.__class__, datetime)):
    +            return False
    +
    +        # Offset-naive and offset-aware datetimes are never the same
    +        if (self.tzinfo is None) != (other.tzinfo is None):
    +            return False
    +
    +        return self.__cmp__(other) == 0
    +
    +    def __ne__(self, other):
    +        """
    +        Compare two extended_datetime objects
    +
    +        :param other:
    +            The other extended_datetime to compare to
    +
    +        :return:
    +            A boolean
    +        """
    +
    +        return not self.__eq__(other)
    +
    +    def _comparison_error(self, other):
    +        """
    +        Raises a TypeError about the other object not being suitable for
    +        comparison
    +
    +        :param other:
    +            The object being compared to
    +        """
    +
    +        raise TypeError(unwrap(
    +            '''
    +            An asn1crypto.util.extended_datetime object can only be compared to
    +            an asn1crypto.util.extended_datetime or datetime.datetime object,
    +            not %s
    +            ''',
    +            type_name(other)
    +        ))
    +
    +    def __cmp__(self, other):
    +        """
    +        Compare two extended_datetime or datetime.datetime objects
    +
    +        :param other:
    +            The other extended_datetime or datetime.datetime object to compare to
    +
    +        :return:
    +            An integer smaller than, equal to, or larger than 0
    +        """
    +
    +        if not isinstance(other, (self.__class__, datetime)):
    +            self._comparison_error(other)
    +
    +        if (self.tzinfo is None) != (other.tzinfo is None):
    +            raise TypeError("can't compare offset-naive and offset-aware datetimes")
    +
    +        diff = self - other
    +        zero = timedelta(0)
    +        if diff < zero:
    +            return -1
    +        if diff > zero:
    +            return 1
    +        return 0
    +
    +    def __lt__(self, other):
    +        return self.__cmp__(other) < 0
    +
    +    def __le__(self, other):
    +        return self.__cmp__(other) <= 0
    +
    +    def __gt__(self, other):
    +        return self.__cmp__(other) > 0
    +
    +    def __ge__(self, other):
    +        return self.__cmp__(other) >= 0
    +
    +    def __add__(self, other):
    +        """
    +        Adds a timedelta
    +
    +        :param other:
    +            A datetime.timedelta object to add.
    +
    +        :return:
    +            A new extended_datetime or datetime.datetime object.
    +        """
    +
    +        return extended_datetime.from_y2k(self._y2k + other)
    +
    +    def __sub__(self, other):
    +        """
    +        Subtracts a timedelta or another datetime.
    +
    +        :param other:
    +            A datetime.timedelta or datetime.datetime or extended_datetime object to subtract.
    +
    +        :return:
    +            If a timedelta is passed, a new extended_datetime or datetime.datetime object.
    +            Else a datetime.timedelta object.
    +        """
    +
    +        if isinstance(other, timedelta):
    +            return extended_datetime.from_y2k(self._y2k - other)
    +
    +        if isinstance(other, extended_datetime):
    +            return self._y2k - other._y2k
    +
    +        if isinstance(other, datetime):
    +            return self._y2k - other - timedelta(days=self.DAYS_IN_2000_YEARS)
    +
    +        return NotImplemented
    +
    +    def __rsub__(self, other):
    +        return -(self - other)
    +
    +    @classmethod
    +    def from_y2k(cls, value):
    +        """
    +        Revert substitution of year 2000.
    +
    +        :param value:
    +            A datetime.datetime object which is 2000 years in the future.
    +        :return:
    +            A new extended_datetime or datetime.datetime object.
    +        """
    +
    +        year = value.year - 2000
    +
    +        if year > 0:
    +            new_cls = datetime
    +        else:
    +            new_cls = cls
    +
    +        return new_cls(
    +            year,
    +            value.month,
    +            value.day,
    +            value.hour,
    +            value.minute,
    +            value.second,
    +            value.microsecond,
    +            value.tzinfo
    +        )
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/version.py b/server/www/packages/packages-windows/x86/asn1crypto/version.py
    new file mode 100644
    index 0000000..b7c352c
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/version.py
    @@ -0,0 +1,6 @@
    +# coding: utf-8
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +
    +__version__ = '1.3.0'
    +__version_info__ = (1, 3, 0)
    diff --git a/server/www/packages/packages-windows/x86/asn1crypto/x509.py b/server/www/packages/packages-windows/x86/asn1crypto/x509.py
    new file mode 100644
    index 0000000..2cce9a5
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/asn1crypto/x509.py
    @@ -0,0 +1,3031 @@
    +# coding: utf-8
    +
    +"""
    +ASN.1 type classes for X.509 certificates. Exports the following items:
    +
    + - Attributes()
    + - Certificate()
    + - Extensions()
    + - GeneralName()
    + - GeneralNames()
    + - Name()
    +
    +Other type classes are defined that help compose the types listed above.
    +"""
    +
    +from __future__ import unicode_literals, division, absolute_import, print_function
    +
    +from contextlib import contextmanager
    +from encodings import idna  # noqa
    +import hashlib
    +import re
    +import socket
    +import stringprep
    +import sys
    +import unicodedata
    +
    +from ._errors import unwrap
    +from ._iri import iri_to_uri, uri_to_iri
    +from ._ordereddict import OrderedDict
    +from ._types import type_name, str_cls, bytes_to_list
    +from .algos import AlgorithmIdentifier, AnyAlgorithmIdentifier, DigestAlgorithm, SignedDigestAlgorithm
    +from .core import (
    +    Any,
    +    BitString,
    +    BMPString,
    +    Boolean,
    +    Choice,
    +    Concat,
    +    Enumerated,
    +    GeneralizedTime,
    +    GeneralString,
    +    IA5String,
    +    Integer,
    +    Null,
    +    NumericString,
    +    ObjectIdentifier,
    +    OctetBitString,
    +    OctetString,
    +    ParsableOctetString,
    +    PrintableString,
    +    Sequence,
    +    SequenceOf,
    +    Set,
    +    SetOf,
    +    TeletexString,
    +    UniversalString,
    +    UTCTime,
    +    UTF8String,
    +    VisibleString,
    +    VOID,
    +)
    +from .keys import PublicKeyInfo
    +from .util import int_to_bytes, int_from_bytes, inet_ntop, inet_pton
    +
    +
    +# The structures in this file are taken from https://tools.ietf.org/html/rfc5280
    +# and a few other supplementary sources, mostly due to extra supported
    +# extension and name OIDs
    +
    +
    +class DNSName(IA5String):
    +
    +    _encoding = 'idna'
    +    _bad_tag = (12, 19)
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __eq__(self, other):
    +        """
    +        Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.2
    +
    +        :param other:
    +            Another DNSName object
    +
    +        :return:
    +            A boolean
    +        """
    +
    +        if not isinstance(other, DNSName):
    +            return False
    +
    +        return self.__unicode__().lower() == other.__unicode__().lower()
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the DNS name
    +
    +        :param value:
    +            A unicode string
    +        """
    +
    +        if not isinstance(value, str_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be a unicode string, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        if value.startswith('.'):
    +            encoded_value = b'.' + value[1:].encode(self._encoding)
    +        else:
    +            encoded_value = value.encode(self._encoding)
    +
    +        self._unicode = value
    +        self.contents = encoded_value
    +        self._header = None
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +
    +class URI(IA5String):
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the string
    +
    +        :param value:
    +            A unicode string
    +        """
    +
    +        if not isinstance(value, str_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be a unicode string, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        self._unicode = value
    +        self.contents = iri_to_uri(value)
    +        self._header = None
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __eq__(self, other):
    +        """
    +        Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.4
    +
    +        :param other:
    +            Another URI object
    +
    +        :return:
    +            A boolean
    +        """
    +
    +        if not isinstance(other, URI):
    +            return False
    +
    +        return iri_to_uri(self.native, True) == iri_to_uri(other.native, True)
    +
    +    def __unicode__(self):
    +        """
    +        :return:
    +            A unicode string
    +        """
    +
    +        if self.contents is None:
    +            return ''
    +        if self._unicode is None:
    +            self._unicode = uri_to_iri(self._merge_chunks())
    +        return self._unicode
    +
    +
    +class EmailAddress(IA5String):
    +
    +    _contents = None
    +
    +    # If the value has gone through the .set() method, thus normalizing it
    +    _normalized = False
    +
    +    # In the wild we've seen this encoded as a UTF8String and PrintableString
    +    _bad_tag = (12, 19)
    +
    +    @property
    +    def contents(self):
    +        """
    +        :return:
    +            A byte string of the DER-encoded contents of the sequence
    +        """
    +
    +        return self._contents
    +
    +    @contents.setter
    +    def contents(self, value):
    +        """
    +        :param value:
    +            A byte string of the DER-encoded contents of the sequence
    +        """
    +
    +        self._normalized = False
    +        self._contents = value
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the string
    +
    +        :param value:
    +            A unicode string
    +        """
    +
    +        if not isinstance(value, str_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be a unicode string, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        if value.find('@') != -1:
    +            mailbox, hostname = value.rsplit('@', 1)
    +            encoded_value = mailbox.encode('ascii') + b'@' + hostname.encode('idna')
    +        else:
    +            encoded_value = value.encode('ascii')
    +
    +        self._normalized = True
    +        self._unicode = value
    +        self.contents = encoded_value
    +        self._header = None
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    def __unicode__(self):
    +        """
    +        :return:
    +            A unicode string
    +        """
    +
    +        # We've seen this in the wild as a PrintableString, and since ascii is a
    +        # subset of cp1252, we use the later for decoding to be more user friendly
    +        if self._unicode is None:
    +            contents = self._merge_chunks()
    +            if contents.find(b'@') == -1:
    +                self._unicode = contents.decode('cp1252')
    +            else:
    +                mailbox, hostname = contents.rsplit(b'@', 1)
    +                self._unicode = mailbox.decode('cp1252') + '@' + hostname.decode('idna')
    +        return self._unicode
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __eq__(self, other):
    +        """
    +        Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.5
    +
    +        :param other:
    +            Another EmailAddress object
    +
    +        :return:
    +            A boolean
    +        """
    +
    +        if not isinstance(other, EmailAddress):
    +            return False
    +
    +        if not self._normalized:
    +            self.set(self.native)
    +        if not other._normalized:
    +            other.set(other.native)
    +
    +        if self._contents.find(b'@') == -1 or other._contents.find(b'@') == -1:
    +            return self._contents == other._contents
    +
    +        other_mailbox, other_hostname = other._contents.rsplit(b'@', 1)
    +        mailbox, hostname = self._contents.rsplit(b'@', 1)
    +
    +        if mailbox != other_mailbox:
    +            return False
    +
    +        if hostname.lower() != other_hostname.lower():
    +            return False
    +
    +        return True
    +
    +
    +class IPAddress(OctetString):
    +    def parse(self, spec=None, spec_params=None):
    +        """
    +        This method is not applicable to IP addresses
    +        """
    +
    +        raise ValueError(unwrap(
    +            '''
    +            IP address values can not be parsed
    +            '''
    +        ))
    +
    +    def set(self, value):
    +        """
    +        Sets the value of the object
    +
    +        :param value:
    +            A unicode string containing an IPv4 address, IPv4 address with CIDR,
    +            an IPv6 address or IPv6 address with CIDR
    +        """
    +
    +        if not isinstance(value, str_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                %s value must be a unicode string, not %s
    +                ''',
    +                type_name(self),
    +                type_name(value)
    +            ))
    +
    +        original_value = value
    +
    +        has_cidr = value.find('/') != -1
    +        cidr = 0
    +        if has_cidr:
    +            parts = value.split('/', 1)
    +            value = parts[0]
    +            cidr = int(parts[1])
    +            if cidr < 0:
    +                raise ValueError(unwrap(
    +                    '''
    +                    %s value contains a CIDR range less than 0
    +                    ''',
    +                    type_name(self)
    +                ))
    +
    +        if value.find(':') != -1:
    +            family = socket.AF_INET6
    +            if cidr > 128:
    +                raise ValueError(unwrap(
    +                    '''
    +                    %s value contains a CIDR range bigger than 128, the maximum
    +                    value for an IPv6 address
    +                    ''',
    +                    type_name(self)
    +                ))
    +            cidr_size = 128
    +        else:
    +            family = socket.AF_INET
    +            if cidr > 32:
    +                raise ValueError(unwrap(
    +                    '''
    +                    %s value contains a CIDR range bigger than 32, the maximum
    +                    value for an IPv4 address
    +                    ''',
    +                    type_name(self)
    +                ))
    +            cidr_size = 32
    +
    +        cidr_bytes = b''
    +        if has_cidr:
    +            cidr_mask = '1' * cidr
    +            cidr_mask += '0' * (cidr_size - len(cidr_mask))
    +            cidr_bytes = int_to_bytes(int(cidr_mask, 2))
    +            cidr_bytes = (b'\x00' * ((cidr_size // 8) - len(cidr_bytes))) + cidr_bytes
    +
    +        self._native = original_value
    +        self.contents = inet_pton(family, value) + cidr_bytes
    +        self._bytes = self.contents
    +        self._header = None
    +        if self._trailer != b'':
    +            self._trailer = b''
    +
    +    @property
    +    def native(self):
    +        """
    +        The native Python datatype representation of this value
    +
    +        :return:
    +            A unicode string or None
    +        """
    +
    +        if self.contents is None:
    +            return None
    +
    +        if self._native is None:
    +            byte_string = self.__bytes__()
    +            byte_len = len(byte_string)
    +            value = None
    +            cidr_int = None
    +            if byte_len in set([32, 16]):
    +                value = inet_ntop(socket.AF_INET6, byte_string[0:16])
    +                if byte_len > 16:
    +                    cidr_int = int_from_bytes(byte_string[16:])
    +            elif byte_len in set([8, 4]):
    +                value = inet_ntop(socket.AF_INET, byte_string[0:4])
    +                if byte_len > 4:
    +                    cidr_int = int_from_bytes(byte_string[4:])
    +            if cidr_int is not None:
    +                cidr_bits = '{0:b}'.format(cidr_int)
    +                cidr = len(cidr_bits.rstrip('0'))
    +                value = value + '/' + str_cls(cidr)
    +            self._native = value
    +        return self._native
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __eq__(self, other):
    +        """
    +        :param other:
    +            Another IPAddress object
    +
    +        :return:
    +            A boolean
    +        """
    +
    +        if not isinstance(other, IPAddress):
    +            return False
    +
    +        return self.__bytes__() == other.__bytes__()
    +
    +
    +class Attribute(Sequence):
    +    _fields = [
    +        ('type', ObjectIdentifier),
    +        ('values', SetOf, {'spec': Any}),
    +    ]
    +
    +
    +class Attributes(SequenceOf):
    +    _child_spec = Attribute
    +
    +
    +class KeyUsage(BitString):
    +    _map = {
    +        0: 'digital_signature',
    +        1: 'non_repudiation',
    +        2: 'key_encipherment',
    +        3: 'data_encipherment',
    +        4: 'key_agreement',
    +        5: 'key_cert_sign',
    +        6: 'crl_sign',
    +        7: 'encipher_only',
    +        8: 'decipher_only',
    +    }
    +
    +
    +class PrivateKeyUsagePeriod(Sequence):
    +    _fields = [
    +        ('not_before', GeneralizedTime, {'implicit': 0, 'optional': True}),
    +        ('not_after', GeneralizedTime, {'implicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class NotReallyTeletexString(TeletexString):
    +    """
    +    OpenSSL (and probably some other libraries) puts ISO-8859-1
    +    into TeletexString instead of ITU T.61. We use Windows-1252 when
    +    decoding since it is a superset of ISO-8859-1, and less likely to
    +    cause encoding issues, but we stay strict with encoding to prevent
    +    us from creating bad data.
    +    """
    +
    +    _decoding_encoding = 'cp1252'
    +
    +    def __unicode__(self):
    +        """
    +        :return:
    +            A unicode string
    +        """
    +
    +        if self.contents is None:
    +            return ''
    +        if self._unicode is None:
    +            self._unicode = self._merge_chunks().decode(self._decoding_encoding)
    +        return self._unicode
    +
    +
    +@contextmanager
    +def strict_teletex():
    +    try:
    +        NotReallyTeletexString._decoding_encoding = 'teletex'
    +        yield
    +    finally:
    +        NotReallyTeletexString._decoding_encoding = 'cp1252'
    +
    +
    +class DirectoryString(Choice):
    +    _alternatives = [
    +        ('teletex_string', NotReallyTeletexString),
    +        ('printable_string', PrintableString),
    +        ('universal_string', UniversalString),
    +        ('utf8_string', UTF8String),
    +        ('bmp_string', BMPString),
    +        # This is an invalid/bad alternative, but some broken certs use it
    +        ('ia5_string', IA5String),
    +    ]
    +
    +
    +class NameType(ObjectIdentifier):
    +    _map = {
    +        '2.5.4.3': 'common_name',
    +        '2.5.4.4': 'surname',
    +        '2.5.4.5': 'serial_number',
    +        '2.5.4.6': 'country_name',
    +        '2.5.4.7': 'locality_name',
    +        '2.5.4.8': 'state_or_province_name',
    +        '2.5.4.9': 'street_address',
    +        '2.5.4.10': 'organization_name',
    +        '2.5.4.11': 'organizational_unit_name',
    +        '2.5.4.12': 'title',
    +        '2.5.4.15': 'business_category',
    +        '2.5.4.17': 'postal_code',
    +        '2.5.4.20': 'telephone_number',
    +        '2.5.4.41': 'name',
    +        '2.5.4.42': 'given_name',
    +        '2.5.4.43': 'initials',
    +        '2.5.4.44': 'generation_qualifier',
    +        '2.5.4.45': 'unique_identifier',
    +        '2.5.4.46': 'dn_qualifier',
    +        '2.5.4.65': 'pseudonym',
    +        '2.5.4.97': 'organization_identifier',
    +        # https://www.trustedcomputinggroup.org/wp-content/uploads/Credential_Profile_EK_V2.0_R14_published.pdf
    +        '2.23.133.2.1': 'tpm_manufacturer',
    +        '2.23.133.2.2': 'tpm_model',
    +        '2.23.133.2.3': 'tpm_version',
    +        '2.23.133.2.4': 'platform_manufacturer',
    +        '2.23.133.2.5': 'platform_model',
    +        '2.23.133.2.6': 'platform_version',
    +        # https://tools.ietf.org/html/rfc2985#page-26
    +        '1.2.840.113549.1.9.1': 'email_address',
    +        # Page 10 of https://cabforum.org/wp-content/uploads/EV-V1_5_5.pdf
    +        '1.3.6.1.4.1.311.60.2.1.1': 'incorporation_locality',
    +        '1.3.6.1.4.1.311.60.2.1.2': 'incorporation_state_or_province',
    +        '1.3.6.1.4.1.311.60.2.1.3': 'incorporation_country',
    +        # https://tools.ietf.org/html/rfc4519#section-2.39
    +        '0.9.2342.19200300.100.1.1': 'user_id',
    +        # https://tools.ietf.org/html/rfc2247#section-4
    +        '0.9.2342.19200300.100.1.25': 'domain_component',
    +        # http://www.alvestrand.no/objectid/0.2.262.1.10.7.20.html
    +        '0.2.262.1.10.7.20': 'name_distinguisher',
    +    }
    +
    +    # This order is largely based on observed order seen in EV certs from
    +    # Symantec and DigiCert. Some of the uncommon name-related fields are
    +    # just placed in what seems like a reasonable order.
    +    preferred_order = [
    +        'incorporation_country',
    +        'incorporation_state_or_province',
    +        'incorporation_locality',
    +        'business_category',
    +        'serial_number',
    +        'country_name',
    +        'postal_code',
    +        'state_or_province_name',
    +        'locality_name',
    +        'street_address',
    +        'organization_name',
    +        'organizational_unit_name',
    +        'title',
    +        'common_name',
    +        'user_id',
    +        'initials',
    +        'generation_qualifier',
    +        'surname',
    +        'given_name',
    +        'name',
    +        'pseudonym',
    +        'dn_qualifier',
    +        'telephone_number',
    +        'email_address',
    +        'domain_component',
    +        'name_distinguisher',
    +        'organization_identifier',
    +        'tpm_manufacturer',
    +        'tpm_model',
    +        'tpm_version',
    +        'platform_manufacturer',
    +        'platform_model',
    +        'platform_version',
    +    ]
    +
    +    @classmethod
    +    def preferred_ordinal(cls, attr_name):
    +        """
    +        Returns an ordering value for a particular attribute key.
    +
    +        Unrecognized attributes and OIDs will be sorted lexically at the end.
    +
    +        :return:
    +            An orderable value.
    +
    +        """
    +
    +        attr_name = cls.map(attr_name)
    +        if attr_name in cls.preferred_order:
    +            ordinal = cls.preferred_order.index(attr_name)
    +        else:
    +            ordinal = len(cls.preferred_order)
    +
    +        return (ordinal, attr_name)
    +
    +    @property
    +    def human_friendly(self):
    +        """
    +        :return:
    +            A human-friendly unicode string to display to users
    +        """
    +
    +        return {
    +            'common_name': 'Common Name',
    +            'surname': 'Surname',
    +            'serial_number': 'Serial Number',
    +            'country_name': 'Country',
    +            'locality_name': 'Locality',
    +            'state_or_province_name': 'State/Province',
    +            'street_address': 'Street Address',
    +            'organization_name': 'Organization',
    +            'organizational_unit_name': 'Organizational Unit',
    +            'title': 'Title',
    +            'business_category': 'Business Category',
    +            'postal_code': 'Postal Code',
    +            'telephone_number': 'Telephone Number',
    +            'name': 'Name',
    +            'given_name': 'Given Name',
    +            'initials': 'Initials',
    +            'generation_qualifier': 'Generation Qualifier',
    +            'unique_identifier': 'Unique Identifier',
    +            'dn_qualifier': 'DN Qualifier',
    +            'pseudonym': 'Pseudonym',
    +            'email_address': 'Email Address',
    +            'incorporation_locality': 'Incorporation Locality',
    +            'incorporation_state_or_province': 'Incorporation State/Province',
    +            'incorporation_country': 'Incorporation Country',
    +            'domain_component': 'Domain Component',
    +            'name_distinguisher': 'Name Distinguisher',
    +            'organization_identifier': 'Organization Identifier',
    +            'tpm_manufacturer': 'TPM Manufacturer',
    +            'tpm_model': 'TPM Model',
    +            'tpm_version': 'TPM Version',
    +            'platform_manufacturer': 'Platform Manufacturer',
    +            'platform_model': 'Platform Model',
    +            'platform_version': 'Platform Version',
    +            'user_id': 'User ID',
    +        }.get(self.native, self.native)
    +
    +
    +class NameTypeAndValue(Sequence):
    +    _fields = [
    +        ('type', NameType),
    +        ('value', Any),
    +    ]
    +
    +    _oid_pair = ('type', 'value')
    +    _oid_specs = {
    +        'common_name': DirectoryString,
    +        'surname': DirectoryString,
    +        'serial_number': DirectoryString,
    +        'country_name': DirectoryString,
    +        'locality_name': DirectoryString,
    +        'state_or_province_name': DirectoryString,
    +        'street_address': DirectoryString,
    +        'organization_name': DirectoryString,
    +        'organizational_unit_name': DirectoryString,
    +        'title': DirectoryString,
    +        'business_category': DirectoryString,
    +        'postal_code': DirectoryString,
    +        'telephone_number': PrintableString,
    +        'name': DirectoryString,
    +        'given_name': DirectoryString,
    +        'initials': DirectoryString,
    +        'generation_qualifier': DirectoryString,
    +        'unique_identifier': OctetBitString,
    +        'dn_qualifier': DirectoryString,
    +        'pseudonym': DirectoryString,
    +        # https://tools.ietf.org/html/rfc2985#page-26
    +        'email_address': EmailAddress,
    +        # Page 10 of https://cabforum.org/wp-content/uploads/EV-V1_5_5.pdf
    +        'incorporation_locality': DirectoryString,
    +        'incorporation_state_or_province': DirectoryString,
    +        'incorporation_country': DirectoryString,
    +        'domain_component': DNSName,
    +        'name_distinguisher': DirectoryString,
    +        'organization_identifier': DirectoryString,
    +        'tpm_manufacturer': UTF8String,
    +        'tpm_model': UTF8String,
    +        'tpm_version': UTF8String,
    +        'platform_manufacturer': UTF8String,
    +        'platform_model': UTF8String,
    +        'platform_version': UTF8String,
    +        'user_id': DirectoryString,
    +    }
    +
    +    _prepped = None
    +
    +    @property
    +    def prepped_value(self):
    +        """
    +        Returns the value after being processed by the internationalized string
    +        preparation as specified by RFC 5280
    +
    +        :return:
    +            A unicode string
    +        """
    +
    +        if self._prepped is None:
    +            self._prepped = self._ldap_string_prep(self['value'].native)
    +        return self._prepped
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __eq__(self, other):
    +        """
    +        Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1
    +
    +        :param other:
    +            Another NameTypeAndValue object
    +
    +        :return:
    +            A boolean
    +        """
    +
    +        if not isinstance(other, NameTypeAndValue):
    +            return False
    +
    +        if other['type'].native != self['type'].native:
    +            return False
    +
    +        return other.prepped_value == self.prepped_value
    +
    +    def _ldap_string_prep(self, string):
    +        """
    +        Implements the internationalized string preparation algorithm from
    +        RFC 4518. https://tools.ietf.org/html/rfc4518#section-2
    +
    +        :param string:
    +            A unicode string to prepare
    +
    +        :return:
    +            A prepared unicode string, ready for comparison
    +        """
    +
    +        # Map step
    +        string = re.sub('[\u00ad\u1806\u034f\u180b-\u180d\ufe0f-\uff00\ufffc]+', '', string)
    +        string = re.sub('[\u0009\u000a\u000b\u000c\u000d\u0085]', ' ', string)
    +        if sys.maxunicode == 0xffff:
    +            # Some installs of Python 2.7 don't support 8-digit unicode escape
    +            # ranges, so we have to break them into pieces
    +            # Original was: \U0001D173-\U0001D17A and \U000E0020-\U000E007F
    +            string = re.sub('\ud834[\udd73-\udd7a]|\udb40[\udc20-\udc7f]|\U000e0001', '', string)
    +        else:
    +            string = re.sub('[\U0001D173-\U0001D17A\U000E0020-\U000E007F\U000e0001]', '', string)
    +        string = re.sub(
    +            '[\u0000-\u0008\u000e-\u001f\u007f-\u0084\u0086-\u009f\u06dd\u070f\u180e\u200c-\u200f'
    +            '\u202a-\u202e\u2060-\u2063\u206a-\u206f\ufeff\ufff9-\ufffb]+',
    +            '',
    +            string
    +        )
    +        string = string.replace('\u200b', '')
    +        string = re.sub('[\u00a0\u1680\u2000-\u200a\u2028-\u2029\u202f\u205f\u3000]', ' ', string)
    +
    +        string = ''.join(map(stringprep.map_table_b2, string))
    +
    +        # Normalize step
    +        string = unicodedata.normalize('NFKC', string)
    +
    +        # Prohibit step
    +        for char in string:
    +            if stringprep.in_table_a1(char):
    +                raise ValueError(unwrap(
    +                    '''
    +                    X.509 Name objects may not contain unassigned code points
    +                    '''
    +                ))
    +
    +            if stringprep.in_table_c8(char):
    +                raise ValueError(unwrap(
    +                    '''
    +                    X.509 Name objects may not contain change display or
    +                    zzzzdeprecated characters
    +                    '''
    +                ))
    +
    +            if stringprep.in_table_c3(char):
    +                raise ValueError(unwrap(
    +                    '''
    +                    X.509 Name objects may not contain private use characters
    +                    '''
    +                ))
    +
    +            if stringprep.in_table_c4(char):
    +                raise ValueError(unwrap(
    +                    '''
    +                    X.509 Name objects may not contain non-character code points
    +                    '''
    +                ))
    +
    +            if stringprep.in_table_c5(char):
    +                raise ValueError(unwrap(
    +                    '''
    +                    X.509 Name objects may not contain surrogate code points
    +                    '''
    +                ))
    +
    +            if char == '\ufffd':
    +                raise ValueError(unwrap(
    +                    '''
    +                    X.509 Name objects may not contain the replacement character
    +                    '''
    +                ))
    +
    +        # Check bidirectional step - here we ensure that we are not mixing
    +        # left-to-right and right-to-left text in the string
    +        has_r_and_al_cat = False
    +        has_l_cat = False
    +        for char in string:
    +            if stringprep.in_table_d1(char):
    +                has_r_and_al_cat = True
    +            elif stringprep.in_table_d2(char):
    +                has_l_cat = True
    +
    +        if has_r_and_al_cat:
    +            first_is_r_and_al = stringprep.in_table_d1(string[0])
    +            last_is_r_and_al = stringprep.in_table_d1(string[-1])
    +
    +            if has_l_cat or not first_is_r_and_al or not last_is_r_and_al:
    +                raise ValueError(unwrap(
    +                    '''
    +                    X.509 Name object contains a malformed bidirectional
    +                    sequence
    +                    '''
    +                ))
    +
    +        # Insignificant space handling step
    +        string = ' ' + re.sub(' +', '  ', string).strip() + ' '
    +
    +        return string
    +
    +
    +class RelativeDistinguishedName(SetOf):
    +    _child_spec = NameTypeAndValue
    +
    +    @property
    +    def hashable(self):
    +        """
    +        :return:
    +            A unicode string that can be used as a dict key or in a set
    +        """
    +
    +        output = []
    +        values = self._get_values(self)
    +        for key in sorted(values.keys()):
    +            output.append('%s: %s' % (key, values[key]))
    +        # Unit separator is used here since the normalization process for
    +        # values moves any such character, and the keys are all dotted integers
    +        # or under_score_words
    +        return '\x1F'.join(output)
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __eq__(self, other):
    +        """
    +        Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1
    +
    +        :param other:
    +            Another RelativeDistinguishedName object
    +
    +        :return:
    +            A boolean
    +        """
    +
    +        if not isinstance(other, RelativeDistinguishedName):
    +            return False
    +
    +        if len(self) != len(other):
    +            return False
    +
    +        self_types = self._get_types(self)
    +        other_types = self._get_types(other)
    +
    +        if self_types != other_types:
    +            return False
    +
    +        self_values = self._get_values(self)
    +        other_values = self._get_values(other)
    +
    +        for type_name_ in self_types:
    +            if self_values[type_name_] != other_values[type_name_]:
    +                return False
    +
    +        return True
    +
    +    def _get_types(self, rdn):
    +        """
    +        Returns a set of types contained in an RDN
    +
    +        :param rdn:
    +            A RelativeDistinguishedName object
    +
    +        :return:
    +            A set object with unicode strings of NameTypeAndValue type field
    +            values
    +        """
    +
    +        return set([ntv['type'].native for ntv in rdn])
    +
    +    def _get_values(self, rdn):
    +        """
    +        Returns a dict of prepped values contained in an RDN
    +
    +        :param rdn:
    +            A RelativeDistinguishedName object
    +
    +        :return:
    +            A dict object with unicode strings of NameTypeAndValue value field
    +            values that have been prepped for comparison
    +        """
    +
    +        output = {}
    +        [output.update([(ntv['type'].native, ntv.prepped_value)]) for ntv in rdn]
    +        return output
    +
    +
    +class RDNSequence(SequenceOf):
    +    _child_spec = RelativeDistinguishedName
    +
    +    @property
    +    def hashable(self):
    +        """
    +        :return:
    +            A unicode string that can be used as a dict key or in a set
    +        """
    +
    +        # Record separator is used here since the normalization process for
    +        # values moves any such character, and the keys are all dotted integers
    +        # or under_score_words
    +        return '\x1E'.join(rdn.hashable for rdn in self)
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __eq__(self, other):
    +        """
    +        Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1
    +
    +        :param other:
    +            Another RDNSequence object
    +
    +        :return:
    +            A boolean
    +        """
    +
    +        if not isinstance(other, RDNSequence):
    +            return False
    +
    +        if len(self) != len(other):
    +            return False
    +
    +        for index, self_rdn in enumerate(self):
    +            if other[index] != self_rdn:
    +                return False
    +
    +        return True
    +
    +
    +class Name(Choice):
    +    _alternatives = [
    +        ('', RDNSequence),
    +    ]
    +
    +    _human_friendly = None
    +    _sha1 = None
    +    _sha256 = None
    +
    +    @classmethod
    +    def build(cls, name_dict, use_printable=False):
    +        """
    +        Creates a Name object from a dict of unicode string keys and values.
    +        The keys should be from NameType._map, or a dotted-integer OID unicode
    +        string.
    +
    +        :param name_dict:
    +            A dict of name information, e.g. {"common_name": "Will Bond",
    +            "country_name": "US", "organization": "Codex Non Sufficit LC"}
    +
    +        :param use_printable:
    +            A bool - if PrintableString should be used for encoding instead of
    +            UTF8String. This is for backwards compatibility with old software.
    +
    +        :return:
    +            An x509.Name object
    +        """
    +
    +        rdns = []
    +        if not use_printable:
    +            encoding_name = 'utf8_string'
    +            encoding_class = UTF8String
    +        else:
    +            encoding_name = 'printable_string'
    +            encoding_class = PrintableString
    +
    +        # Sort the attributes according to NameType.preferred_order
    +        name_dict = OrderedDict(
    +            sorted(
    +                name_dict.items(),
    +                key=lambda item: NameType.preferred_ordinal(item[0])
    +            )
    +        )
    +
    +        for attribute_name, attribute_value in name_dict.items():
    +            attribute_name = NameType.map(attribute_name)
    +            if attribute_name == 'email_address':
    +                value = EmailAddress(attribute_value)
    +            elif attribute_name == 'domain_component':
    +                value = DNSName(attribute_value)
    +            elif attribute_name in set(['dn_qualifier', 'country_name', 'serial_number']):
    +                value = DirectoryString(
    +                    name='printable_string',
    +                    value=PrintableString(attribute_value)
    +                )
    +            else:
    +                value = DirectoryString(
    +                    name=encoding_name,
    +                    value=encoding_class(attribute_value)
    +                )
    +
    +            rdns.append(RelativeDistinguishedName([
    +                NameTypeAndValue({
    +                    'type': attribute_name,
    +                    'value': value
    +                })
    +            ]))
    +
    +        return cls(name='', value=RDNSequence(rdns))
    +
    +    @property
    +    def hashable(self):
    +        """
    +        :return:
    +            A unicode string that can be used as a dict key or in a set
    +        """
    +
    +        return self.chosen.hashable
    +
    +    def __len__(self):
    +        return len(self.chosen)
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __eq__(self, other):
    +        """
    +        Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1
    +
    +        :param other:
    +            Another Name object
    +
    +        :return:
    +            A boolean
    +        """
    +
    +        if not isinstance(other, Name):
    +            return False
    +        return self.chosen == other.chosen
    +
    +    @property
    +    def native(self):
    +        if self._native is None:
    +            self._native = OrderedDict()
    +            for rdn in self.chosen.native:
    +                for type_val in rdn:
    +                    field_name = type_val['type']
    +                    if field_name in self._native:
    +                        existing = self._native[field_name]
    +                        if not isinstance(existing, list):
    +                            existing = self._native[field_name] = [existing]
    +                        existing.append(type_val['value'])
    +                    else:
    +                        self._native[field_name] = type_val['value']
    +        return self._native
    +
    +    @property
    +    def human_friendly(self):
    +        """
    +        :return:
    +            A human-friendly unicode string containing the parts of the name
    +        """
    +
    +        if self._human_friendly is None:
    +            data = OrderedDict()
    +            last_field = None
    +            for rdn in self.chosen:
    +                for type_val in rdn:
    +                    field_name = type_val['type'].human_friendly
    +                    last_field = field_name
    +                    if field_name in data:
    +                        data[field_name] = [data[field_name]]
    +                        data[field_name].append(type_val['value'])
    +                    else:
    +                        data[field_name] = type_val['value']
    +            to_join = []
    +            keys = data.keys()
    +            if last_field == 'Country':
    +                keys = reversed(list(keys))
    +            for key in keys:
    +                value = data[key]
    +                native_value = self._recursive_humanize(value)
    +                to_join.append('%s: %s' % (key, native_value))
    +
    +            has_comma = False
    +            for element in to_join:
    +                if element.find(',') != -1:
    +                    has_comma = True
    +                    break
    +
    +            separator = ', ' if not has_comma else '; '
    +            self._human_friendly = separator.join(to_join[::-1])
    +
    +        return self._human_friendly
    +
    +    def _recursive_humanize(self, value):
    +        """
    +        Recursively serializes data compiled from the RDNSequence
    +
    +        :param value:
    +            An Asn1Value object, or a list of Asn1Value objects
    +
    +        :return:
    +            A unicode string
    +        """
    +
    +        if isinstance(value, list):
    +            return', '.join(
    +                reversed([self._recursive_humanize(sub_value) for sub_value in value])
    +            )
    +        return value.native
    +
    +    @property
    +    def sha1(self):
    +        """
    +        :return:
    +            The SHA1 hash of the DER-encoded bytes of this name
    +        """
    +
    +        if self._sha1 is None:
    +            self._sha1 = hashlib.sha1(self.dump()).digest()
    +        return self._sha1
    +
    +    @property
    +    def sha256(self):
    +        """
    +        :return:
    +            The SHA-256 hash of the DER-encoded bytes of this name
    +        """
    +
    +        if self._sha256 is None:
    +            self._sha256 = hashlib.sha256(self.dump()).digest()
    +        return self._sha256
    +
    +
    +class AnotherName(Sequence):
    +    _fields = [
    +        ('type_id', ObjectIdentifier),
    +        ('value', Any, {'explicit': 0}),
    +    ]
    +
    +
    +class CountryName(Choice):
    +    class_ = 1
    +    tag = 1
    +
    +    _alternatives = [
    +        ('x121_dcc_code', NumericString),
    +        ('iso_3166_alpha2_code', PrintableString),
    +    ]
    +
    +
    +class AdministrationDomainName(Choice):
    +    class_ = 1
    +    tag = 2
    +
    +    _alternatives = [
    +        ('numeric', NumericString),
    +        ('printable', PrintableString),
    +    ]
    +
    +
    +class PrivateDomainName(Choice):
    +    _alternatives = [
    +        ('numeric', NumericString),
    +        ('printable', PrintableString),
    +    ]
    +
    +
    +class PersonalName(Set):
    +    _fields = [
    +        ('surname', PrintableString, {'implicit': 0}),
    +        ('given_name', PrintableString, {'implicit': 1, 'optional': True}),
    +        ('initials', PrintableString, {'implicit': 2, 'optional': True}),
    +        ('generation_qualifier', PrintableString, {'implicit': 3, 'optional': True}),
    +    ]
    +
    +
    +class TeletexPersonalName(Set):
    +    _fields = [
    +        ('surname', TeletexString, {'implicit': 0}),
    +        ('given_name', TeletexString, {'implicit': 1, 'optional': True}),
    +        ('initials', TeletexString, {'implicit': 2, 'optional': True}),
    +        ('generation_qualifier', TeletexString, {'implicit': 3, 'optional': True}),
    +    ]
    +
    +
    +class OrganizationalUnitNames(SequenceOf):
    +    _child_spec = PrintableString
    +
    +
    +class TeletexOrganizationalUnitNames(SequenceOf):
    +    _child_spec = TeletexString
    +
    +
    +class BuiltInStandardAttributes(Sequence):
    +    _fields = [
    +        ('country_name', CountryName, {'optional': True}),
    +        ('administration_domain_name', AdministrationDomainName, {'optional': True}),
    +        ('network_address', NumericString, {'implicit': 0, 'optional': True}),
    +        ('terminal_identifier', PrintableString, {'implicit': 1, 'optional': True}),
    +        ('private_domain_name', PrivateDomainName, {'explicit': 2, 'optional': True}),
    +        ('organization_name', PrintableString, {'implicit': 3, 'optional': True}),
    +        ('numeric_user_identifier', NumericString, {'implicit': 4, 'optional': True}),
    +        ('personal_name', PersonalName, {'implicit': 5, 'optional': True}),
    +        ('organizational_unit_names', OrganizationalUnitNames, {'implicit': 6, 'optional': True}),
    +    ]
    +
    +
    +class BuiltInDomainDefinedAttribute(Sequence):
    +    _fields = [
    +        ('type', PrintableString),
    +        ('value', PrintableString),
    +    ]
    +
    +
    +class BuiltInDomainDefinedAttributes(SequenceOf):
    +    _child_spec = BuiltInDomainDefinedAttribute
    +
    +
    +class TeletexDomainDefinedAttribute(Sequence):
    +    _fields = [
    +        ('type', TeletexString),
    +        ('value', TeletexString),
    +    ]
    +
    +
    +class TeletexDomainDefinedAttributes(SequenceOf):
    +    _child_spec = TeletexDomainDefinedAttribute
    +
    +
    +class PhysicalDeliveryCountryName(Choice):
    +    _alternatives = [
    +        ('x121_dcc_code', NumericString),
    +        ('iso_3166_alpha2_code', PrintableString),
    +    ]
    +
    +
    +class PostalCode(Choice):
    +    _alternatives = [
    +        ('numeric_code', NumericString),
    +        ('printable_code', PrintableString),
    +    ]
    +
    +
    +class PDSParameter(Set):
    +    _fields = [
    +        ('printable_string', PrintableString, {'optional': True}),
    +        ('teletex_string', TeletexString, {'optional': True}),
    +    ]
    +
    +
    +class PrintableAddress(SequenceOf):
    +    _child_spec = PrintableString
    +
    +
    +class UnformattedPostalAddress(Set):
    +    _fields = [
    +        ('printable_address', PrintableAddress, {'optional': True}),
    +        ('teletex_string', TeletexString, {'optional': True}),
    +    ]
    +
    +
    +class E1634Address(Sequence):
    +    _fields = [
    +        ('number', NumericString, {'implicit': 0}),
    +        ('sub_address', NumericString, {'implicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class NAddresses(SetOf):
    +    _child_spec = OctetString
    +
    +
    +class PresentationAddress(Sequence):
    +    _fields = [
    +        ('p_selector', OctetString, {'explicit': 0, 'optional': True}),
    +        ('s_selector', OctetString, {'explicit': 1, 'optional': True}),
    +        ('t_selector', OctetString, {'explicit': 2, 'optional': True}),
    +        ('n_addresses', NAddresses, {'explicit': 3}),
    +    ]
    +
    +
    +class ExtendedNetworkAddress(Choice):
    +    _alternatives = [
    +        ('e163_4_address', E1634Address),
    +        ('psap_address', PresentationAddress, {'implicit': 0})
    +    ]
    +
    +
    +class TerminalType(Integer):
    +    _map = {
    +        3: 'telex',
    +        4: 'teletex',
    +        5: 'g3_facsimile',
    +        6: 'g4_facsimile',
    +        7: 'ia5_terminal',
    +        8: 'videotex',
    +    }
    +
    +
    +class ExtensionAttributeType(Integer):
    +    _map = {
    +        1: 'common_name',
    +        2: 'teletex_common_name',
    +        3: 'teletex_organization_name',
    +        4: 'teletex_personal_name',
    +        5: 'teletex_organization_unit_names',
    +        6: 'teletex_domain_defined_attributes',
    +        7: 'pds_name',
    +        8: 'physical_delivery_country_name',
    +        9: 'postal_code',
    +        10: 'physical_delivery_office_name',
    +        11: 'physical_delivery_office_number',
    +        12: 'extension_of_address_components',
    +        13: 'physical_delivery_personal_name',
    +        14: 'physical_delivery_organization_name',
    +        15: 'extension_physical_delivery_address_components',
    +        16: 'unformatted_postal_address',
    +        17: 'street_address',
    +        18: 'post_office_box_address',
    +        19: 'poste_restante_address',
    +        20: 'unique_postal_name',
    +        21: 'local_postal_attributes',
    +        22: 'extended_network_address',
    +        23: 'terminal_type',
    +    }
    +
    +
    +class ExtensionAttribute(Sequence):
    +    _fields = [
    +        ('extension_attribute_type', ExtensionAttributeType, {'implicit': 0}),
    +        ('extension_attribute_value', Any, {'explicit': 1}),
    +    ]
    +
    +    _oid_pair = ('extension_attribute_type', 'extension_attribute_value')
    +    _oid_specs = {
    +        'common_name': PrintableString,
    +        'teletex_common_name': TeletexString,
    +        'teletex_organization_name': TeletexString,
    +        'teletex_personal_name': TeletexPersonalName,
    +        'teletex_organization_unit_names': TeletexOrganizationalUnitNames,
    +        'teletex_domain_defined_attributes': TeletexDomainDefinedAttributes,
    +        'pds_name': PrintableString,
    +        'physical_delivery_country_name': PhysicalDeliveryCountryName,
    +        'postal_code': PostalCode,
    +        'physical_delivery_office_name': PDSParameter,
    +        'physical_delivery_office_number': PDSParameter,
    +        'extension_of_address_components': PDSParameter,
    +        'physical_delivery_personal_name': PDSParameter,
    +        'physical_delivery_organization_name': PDSParameter,
    +        'extension_physical_delivery_address_components': PDSParameter,
    +        'unformatted_postal_address': UnformattedPostalAddress,
    +        'street_address': PDSParameter,
    +        'post_office_box_address': PDSParameter,
    +        'poste_restante_address': PDSParameter,
    +        'unique_postal_name': PDSParameter,
    +        'local_postal_attributes': PDSParameter,
    +        'extended_network_address': ExtendedNetworkAddress,
    +        'terminal_type': TerminalType,
    +    }
    +
    +
    +class ExtensionAttributes(SequenceOf):
    +    _child_spec = ExtensionAttribute
    +
    +
    +class ORAddress(Sequence):
    +    _fields = [
    +        ('built_in_standard_attributes', BuiltInStandardAttributes),
    +        ('built_in_domain_defined_attributes', BuiltInDomainDefinedAttributes, {'optional': True}),
    +        ('extension_attributes', ExtensionAttributes, {'optional': True}),
    +    ]
    +
    +
    +class EDIPartyName(Sequence):
    +    _fields = [
    +        ('name_assigner', DirectoryString, {'implicit': 0, 'optional': True}),
    +        ('party_name', DirectoryString, {'implicit': 1}),
    +    ]
    +
    +
    +class GeneralName(Choice):
    +    _alternatives = [
    +        ('other_name', AnotherName, {'implicit': 0}),
    +        ('rfc822_name', EmailAddress, {'implicit': 1}),
    +        ('dns_name', DNSName, {'implicit': 2}),
    +        ('x400_address', ORAddress, {'implicit': 3}),
    +        ('directory_name', Name, {'explicit': 4}),
    +        ('edi_party_name', EDIPartyName, {'implicit': 5}),
    +        ('uniform_resource_identifier', URI, {'implicit': 6}),
    +        ('ip_address', IPAddress, {'implicit': 7}),
    +        ('registered_id', ObjectIdentifier, {'implicit': 8}),
    +    ]
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __eq__(self, other):
    +        """
    +        Does not support other_name, x400_address or edi_party_name
    +
    +        :param other:
    +            The other GeneralName to compare to
    +
    +        :return:
    +            A boolean
    +        """
    +
    +        if self.name in ('other_name', 'x400_address', 'edi_party_name'):
    +            raise ValueError(unwrap(
    +                '''
    +                Comparison is not supported for GeneralName objects of
    +                choice %s
    +                ''',
    +                self.name
    +            ))
    +
    +        if other.name in ('other_name', 'x400_address', 'edi_party_name'):
    +            raise ValueError(unwrap(
    +                '''
    +                Comparison is not supported for GeneralName objects of choice
    +                %s''',
    +                other.name
    +            ))
    +
    +        if self.name != other.name:
    +            return False
    +
    +        return self.chosen == other.chosen
    +
    +
    +class GeneralNames(SequenceOf):
    +    _child_spec = GeneralName
    +
    +
    +class Time(Choice):
    +    _alternatives = [
    +        ('utc_time', UTCTime),
    +        ('general_time', GeneralizedTime),
    +    ]
    +
    +
    +class Validity(Sequence):
    +    _fields = [
    +        ('not_before', Time),
    +        ('not_after', Time),
    +    ]
    +
    +
    +class BasicConstraints(Sequence):
    +    _fields = [
    +        ('ca', Boolean, {'default': False}),
    +        ('path_len_constraint', Integer, {'optional': True}),
    +    ]
    +
    +
    +class AuthorityKeyIdentifier(Sequence):
    +    _fields = [
    +        ('key_identifier', OctetString, {'implicit': 0, 'optional': True}),
    +        ('authority_cert_issuer', GeneralNames, {'implicit': 1, 'optional': True}),
    +        ('authority_cert_serial_number', Integer, {'implicit': 2, 'optional': True}),
    +    ]
    +
    +
    +class DistributionPointName(Choice):
    +    _alternatives = [
    +        ('full_name', GeneralNames, {'implicit': 0}),
    +        ('name_relative_to_crl_issuer', RelativeDistinguishedName, {'implicit': 1}),
    +    ]
    +
    +
    +class ReasonFlags(BitString):
    +    _map = {
    +        0: 'unused',
    +        1: 'key_compromise',
    +        2: 'ca_compromise',
    +        3: 'affiliation_changed',
    +        4: 'superseded',
    +        5: 'cessation_of_operation',
    +        6: 'certificate_hold',
    +        7: 'privilege_withdrawn',
    +        8: 'aa_compromise',
    +    }
    +
    +
    +class GeneralSubtree(Sequence):
    +    _fields = [
    +        ('base', GeneralName),
    +        ('minimum', Integer, {'implicit': 0, 'default': 0}),
    +        ('maximum', Integer, {'implicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class GeneralSubtrees(SequenceOf):
    +    _child_spec = GeneralSubtree
    +
    +
    +class NameConstraints(Sequence):
    +    _fields = [
    +        ('permitted_subtrees', GeneralSubtrees, {'implicit': 0, 'optional': True}),
    +        ('excluded_subtrees', GeneralSubtrees, {'implicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class DistributionPoint(Sequence):
    +    _fields = [
    +        ('distribution_point', DistributionPointName, {'explicit': 0, 'optional': True}),
    +        ('reasons', ReasonFlags, {'implicit': 1, 'optional': True}),
    +        ('crl_issuer', GeneralNames, {'implicit': 2, 'optional': True}),
    +    ]
    +
    +    _url = False
    +
    +    @property
    +    def url(self):
    +        """
    +        :return:
    +            None or a unicode string of the distribution point's URL
    +        """
    +
    +        if self._url is False:
    +            self._url = None
    +            name = self['distribution_point']
    +            if name.name != 'full_name':
    +                raise ValueError(unwrap(
    +                    '''
    +                    CRL distribution points that are relative to the issuer are
    +                    not supported
    +                    '''
    +                ))
    +
    +            for general_name in name.chosen:
    +                if general_name.name == 'uniform_resource_identifier':
    +                    url = general_name.native
    +                    if url.lower().startswith(('http://', 'https://', 'ldap://', 'ldaps://')):
    +                        self._url = url
    +                        break
    +
    +        return self._url
    +
    +
    +class CRLDistributionPoints(SequenceOf):
    +    _child_spec = DistributionPoint
    +
    +
    +class DisplayText(Choice):
    +    _alternatives = [
    +        ('ia5_string', IA5String),
    +        ('visible_string', VisibleString),
    +        ('bmp_string', BMPString),
    +        ('utf8_string', UTF8String),
    +    ]
    +
    +
    +class NoticeNumbers(SequenceOf):
    +    _child_spec = Integer
    +
    +
    +class NoticeReference(Sequence):
    +    _fields = [
    +        ('organization', DisplayText),
    +        ('notice_numbers', NoticeNumbers),
    +    ]
    +
    +
    +class UserNotice(Sequence):
    +    _fields = [
    +        ('notice_ref', NoticeReference, {'optional': True}),
    +        ('explicit_text', DisplayText, {'optional': True}),
    +    ]
    +
    +
    +class PolicyQualifierId(ObjectIdentifier):
    +    _map = {
    +        '1.3.6.1.5.5.7.2.1': 'certification_practice_statement',
    +        '1.3.6.1.5.5.7.2.2': 'user_notice',
    +    }
    +
    +
    +class PolicyQualifierInfo(Sequence):
    +    _fields = [
    +        ('policy_qualifier_id', PolicyQualifierId),
    +        ('qualifier', Any),
    +    ]
    +
    +    _oid_pair = ('policy_qualifier_id', 'qualifier')
    +    _oid_specs = {
    +        'certification_practice_statement': IA5String,
    +        'user_notice': UserNotice,
    +    }
    +
    +
    +class PolicyQualifierInfos(SequenceOf):
    +    _child_spec = PolicyQualifierInfo
    +
    +
    +class PolicyIdentifier(ObjectIdentifier):
    +    _map = {
    +        '2.5.29.32.0': 'any_policy',
    +    }
    +
    +
    +class PolicyInformation(Sequence):
    +    _fields = [
    +        ('policy_identifier', PolicyIdentifier),
    +        ('policy_qualifiers', PolicyQualifierInfos, {'optional': True})
    +    ]
    +
    +
    +class CertificatePolicies(SequenceOf):
    +    _child_spec = PolicyInformation
    +
    +
    +class PolicyMapping(Sequence):
    +    _fields = [
    +        ('issuer_domain_policy', PolicyIdentifier),
    +        ('subject_domain_policy', PolicyIdentifier),
    +    ]
    +
    +
    +class PolicyMappings(SequenceOf):
    +    _child_spec = PolicyMapping
    +
    +
    +class PolicyConstraints(Sequence):
    +    _fields = [
    +        ('require_explicit_policy', Integer, {'implicit': 0, 'optional': True}),
    +        ('inhibit_policy_mapping', Integer, {'implicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class KeyPurposeId(ObjectIdentifier):
    +    _map = {
    +        # https://tools.ietf.org/html/rfc5280#page-45
    +        '2.5.29.37.0': 'any_extended_key_usage',
    +        '1.3.6.1.5.5.7.3.1': 'server_auth',
    +        '1.3.6.1.5.5.7.3.2': 'client_auth',
    +        '1.3.6.1.5.5.7.3.3': 'code_signing',
    +        '1.3.6.1.5.5.7.3.4': 'email_protection',
    +        '1.3.6.1.5.5.7.3.5': 'ipsec_end_system',
    +        '1.3.6.1.5.5.7.3.6': 'ipsec_tunnel',
    +        '1.3.6.1.5.5.7.3.7': 'ipsec_user',
    +        '1.3.6.1.5.5.7.3.8': 'time_stamping',
    +        '1.3.6.1.5.5.7.3.9': 'ocsp_signing',
    +        # http://tools.ietf.org/html/rfc3029.html#page-9
    +        '1.3.6.1.5.5.7.3.10': 'dvcs',
    +        # http://tools.ietf.org/html/rfc6268.html#page-16
    +        '1.3.6.1.5.5.7.3.13': 'eap_over_ppp',
    +        '1.3.6.1.5.5.7.3.14': 'eap_over_lan',
    +        # https://tools.ietf.org/html/rfc5055#page-76
    +        '1.3.6.1.5.5.7.3.15': 'scvp_server',
    +        '1.3.6.1.5.5.7.3.16': 'scvp_client',
    +        # https://tools.ietf.org/html/rfc4945#page-31
    +        '1.3.6.1.5.5.7.3.17': 'ipsec_ike',
    +        # https://tools.ietf.org/html/rfc5415#page-38
    +        '1.3.6.1.5.5.7.3.18': 'capwap_ac',
    +        '1.3.6.1.5.5.7.3.19': 'capwap_wtp',
    +        # https://tools.ietf.org/html/rfc5924#page-8
    +        '1.3.6.1.5.5.7.3.20': 'sip_domain',
    +        # https://tools.ietf.org/html/rfc6187#page-7
    +        '1.3.6.1.5.5.7.3.21': 'secure_shell_client',
    +        '1.3.6.1.5.5.7.3.22': 'secure_shell_server',
    +        # https://tools.ietf.org/html/rfc6494#page-7
    +        '1.3.6.1.5.5.7.3.23': 'send_router',
    +        '1.3.6.1.5.5.7.3.24': 'send_proxied_router',
    +        '1.3.6.1.5.5.7.3.25': 'send_owner',
    +        '1.3.6.1.5.5.7.3.26': 'send_proxied_owner',
    +        # https://tools.ietf.org/html/rfc6402#page-10
    +        '1.3.6.1.5.5.7.3.27': 'cmc_ca',
    +        '1.3.6.1.5.5.7.3.28': 'cmc_ra',
    +        '1.3.6.1.5.5.7.3.29': 'cmc_archive',
    +        # https://tools.ietf.org/html/draft-ietf-sidr-bgpsec-pki-profiles-15#page-6
    +        '1.3.6.1.5.5.7.3.30': 'bgpspec_router',
    +        # https://www.ietf.org/proceedings/44/I-D/draft-ietf-ipsec-pki-req-01.txt
    +        '1.3.6.1.5.5.8.2.2': 'ike_intermediate',
    +        # https://msdn.microsoft.com/en-us/library/windows/desktop/aa378132(v=vs.85).aspx
    +        # and https://support.microsoft.com/en-us/kb/287547
    +        '1.3.6.1.4.1.311.10.3.1': 'microsoft_trust_list_signing',
    +        '1.3.6.1.4.1.311.10.3.2': 'microsoft_time_stamp_signing',
    +        '1.3.6.1.4.1.311.10.3.3': 'microsoft_server_gated',
    +        '1.3.6.1.4.1.311.10.3.3.1': 'microsoft_serialized',
    +        '1.3.6.1.4.1.311.10.3.4': 'microsoft_efs',
    +        '1.3.6.1.4.1.311.10.3.4.1': 'microsoft_efs_recovery',
    +        '1.3.6.1.4.1.311.10.3.5': 'microsoft_whql',
    +        '1.3.6.1.4.1.311.10.3.6': 'microsoft_nt5',
    +        '1.3.6.1.4.1.311.10.3.7': 'microsoft_oem_whql',
    +        '1.3.6.1.4.1.311.10.3.8': 'microsoft_embedded_nt',
    +        '1.3.6.1.4.1.311.10.3.9': 'microsoft_root_list_signer',
    +        '1.3.6.1.4.1.311.10.3.10': 'microsoft_qualified_subordination',
    +        '1.3.6.1.4.1.311.10.3.11': 'microsoft_key_recovery',
    +        '1.3.6.1.4.1.311.10.3.12': 'microsoft_document_signing',
    +        '1.3.6.1.4.1.311.10.3.13': 'microsoft_lifetime_signing',
    +        '1.3.6.1.4.1.311.10.3.14': 'microsoft_mobile_device_software',
    +        # https://support.microsoft.com/en-us/help/287547/object-ids-associated-with-microsoft-cryptography
    +        '1.3.6.1.4.1.311.20.2.2': 'microsoft_smart_card_logon',
    +        # https://opensource.apple.com/source
    +        #  - /Security/Security-57031.40.6/Security/libsecurity_keychain/lib/SecPolicy.cpp
    +        #  - /libsecurity_cssm/libsecurity_cssm-36064/lib/oidsalg.c
    +        '1.2.840.113635.100.1.2': 'apple_x509_basic',
    +        '1.2.840.113635.100.1.3': 'apple_ssl',
    +        '1.2.840.113635.100.1.4': 'apple_local_cert_gen',
    +        '1.2.840.113635.100.1.5': 'apple_csr_gen',
    +        '1.2.840.113635.100.1.6': 'apple_revocation_crl',
    +        '1.2.840.113635.100.1.7': 'apple_revocation_ocsp',
    +        '1.2.840.113635.100.1.8': 'apple_smime',
    +        '1.2.840.113635.100.1.9': 'apple_eap',
    +        '1.2.840.113635.100.1.10': 'apple_software_update_signing',
    +        '1.2.840.113635.100.1.11': 'apple_ipsec',
    +        '1.2.840.113635.100.1.12': 'apple_ichat',
    +        '1.2.840.113635.100.1.13': 'apple_resource_signing',
    +        '1.2.840.113635.100.1.14': 'apple_pkinit_client',
    +        '1.2.840.113635.100.1.15': 'apple_pkinit_server',
    +        '1.2.840.113635.100.1.16': 'apple_code_signing',
    +        '1.2.840.113635.100.1.17': 'apple_package_signing',
    +        '1.2.840.113635.100.1.18': 'apple_id_validation',
    +        '1.2.840.113635.100.1.20': 'apple_time_stamping',
    +        '1.2.840.113635.100.1.21': 'apple_revocation',
    +        '1.2.840.113635.100.1.22': 'apple_passbook_signing',
    +        '1.2.840.113635.100.1.23': 'apple_mobile_store',
    +        '1.2.840.113635.100.1.24': 'apple_escrow_service',
    +        '1.2.840.113635.100.1.25': 'apple_profile_signer',
    +        '1.2.840.113635.100.1.26': 'apple_qa_profile_signer',
    +        '1.2.840.113635.100.1.27': 'apple_test_mobile_store',
    +        '1.2.840.113635.100.1.28': 'apple_otapki_signer',
    +        '1.2.840.113635.100.1.29': 'apple_test_otapki_signer',
    +        '1.2.840.113625.100.1.30': 'apple_id_validation_record_signing_policy',
    +        '1.2.840.113625.100.1.31': 'apple_smp_encryption',
    +        '1.2.840.113625.100.1.32': 'apple_test_smp_encryption',
    +        '1.2.840.113635.100.1.33': 'apple_server_authentication',
    +        '1.2.840.113635.100.1.34': 'apple_pcs_escrow_service',
    +        # http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.201-2.pdf
    +        '2.16.840.1.101.3.6.8': 'piv_card_authentication',
    +        '2.16.840.1.101.3.6.7': 'piv_content_signing',
    +        # https://tools.ietf.org/html/rfc4556.html
    +        '1.3.6.1.5.2.3.4': 'pkinit_kpclientauth',
    +        '1.3.6.1.5.2.3.5': 'pkinit_kpkdc',
    +        # https://www.adobe.com/devnet-docs/acrobatetk/tools/DigSig/changes.html
    +        '1.2.840.113583.1.1.5': 'adobe_authentic_documents_trust',
    +        # https://www.idmanagement.gov/wp-content/uploads/sites/1171/uploads/fpki-pivi-cert-profiles.pdf
    +        '2.16.840.1.101.3.8.7': 'fpki_pivi_content_signing'
    +    }
    +
    +
    +class ExtKeyUsageSyntax(SequenceOf):
    +    _child_spec = KeyPurposeId
    +
    +
    +class AccessMethod(ObjectIdentifier):
    +    _map = {
    +        '1.3.6.1.5.5.7.48.1': 'ocsp',
    +        '1.3.6.1.5.5.7.48.2': 'ca_issuers',
    +        '1.3.6.1.5.5.7.48.3': 'time_stamping',
    +        '1.3.6.1.5.5.7.48.5': 'ca_repository',
    +    }
    +
    +
    +class AccessDescription(Sequence):
    +    _fields = [
    +        ('access_method', AccessMethod),
    +        ('access_location', GeneralName),
    +    ]
    +
    +
    +class AuthorityInfoAccessSyntax(SequenceOf):
    +    _child_spec = AccessDescription
    +
    +
    +class SubjectInfoAccessSyntax(SequenceOf):
    +    _child_spec = AccessDescription
    +
    +
    +# https://tools.ietf.org/html/rfc7633
    +class Features(SequenceOf):
    +    _child_spec = Integer
    +
    +
    +class EntrustVersionInfo(Sequence):
    +    _fields = [
    +        ('entrust_vers', GeneralString),
    +        ('entrust_info_flags', BitString)
    +    ]
    +
    +
    +class NetscapeCertificateType(BitString):
    +    _map = {
    +        0: 'ssl_client',
    +        1: 'ssl_server',
    +        2: 'email',
    +        3: 'object_signing',
    +        4: 'reserved',
    +        5: 'ssl_ca',
    +        6: 'email_ca',
    +        7: 'object_signing_ca',
    +    }
    +
    +
    +class Version(Integer):
    +    _map = {
    +        0: 'v1',
    +        1: 'v2',
    +        2: 'v3',
    +    }
    +
    +
    +class TPMSpecification(Sequence):
    +    _fields = [
    +        ('family', UTF8String),
    +        ('level', Integer),
    +        ('revision', Integer),
    +    ]
    +
    +
    +class SetOfTPMSpecification(SetOf):
    +    _child_spec = TPMSpecification
    +
    +
    +class TCGSpecificationVersion(Sequence):
    +    _fields = [
    +        ('major_version', Integer),
    +        ('minor_version', Integer),
    +        ('revision', Integer),
    +    ]
    +
    +
    +class TCGPlatformSpecification(Sequence):
    +    _fields = [
    +        ('version', TCGSpecificationVersion),
    +        ('platform_class', OctetString),
    +    ]
    +
    +
    +class SetOfTCGPlatformSpecification(SetOf):
    +    _child_spec = TCGPlatformSpecification
    +
    +
    +class EKGenerationType(Enumerated):
    +    _map = {
    +        0: 'internal',
    +        1: 'injected',
    +        2: 'internal_revocable',
    +        3: 'injected_revocable',
    +    }
    +
    +
    +class EKGenerationLocation(Enumerated):
    +    _map = {
    +        0: 'tpm_manufacturer',
    +        1: 'platform_manufacturer',
    +        2: 'ek_cert_signer',
    +    }
    +
    +
    +class EKCertificateGenerationLocation(Enumerated):
    +    _map = {
    +        0: 'tpm_manufacturer',
    +        1: 'platform_manufacturer',
    +        2: 'ek_cert_signer',
    +    }
    +
    +
    +class EvaluationAssuranceLevel(Enumerated):
    +    _map = {
    +        1: 'level1',
    +        2: 'level2',
    +        3: 'level3',
    +        4: 'level4',
    +        5: 'level5',
    +        6: 'level6',
    +        7: 'level7',
    +    }
    +
    +
    +class EvaluationStatus(Enumerated):
    +    _map = {
    +        0: 'designed_to_meet',
    +        1: 'evaluation_in_progress',
    +        2: 'evaluation_completed',
    +    }
    +
    +
    +class StrengthOfFunction(Enumerated):
    +    _map = {
    +        0: 'basic',
    +        1: 'medium',
    +        2: 'high',
    +    }
    +
    +
    +class URIReference(Sequence):
    +    _fields = [
    +        ('uniform_resource_identifier', IA5String),
    +        ('hash_algorithm', DigestAlgorithm, {'optional': True}),
    +        ('hash_value', BitString, {'optional': True}),
    +    ]
    +
    +
    +class CommonCriteriaMeasures(Sequence):
    +    _fields = [
    +        ('version', IA5String),
    +        ('assurance_level', EvaluationAssuranceLevel),
    +        ('evaluation_status', EvaluationStatus),
    +        ('plus', Boolean, {'default': False}),
    +        ('strengh_of_function', StrengthOfFunction, {'implicit': 0, 'optional': True}),
    +        ('profile_oid', ObjectIdentifier, {'implicit': 1, 'optional': True}),
    +        ('profile_url', URIReference, {'implicit': 2, 'optional': True}),
    +        ('target_oid', ObjectIdentifier, {'implicit': 3, 'optional': True}),
    +        ('target_uri', URIReference, {'implicit': 4, 'optional': True}),
    +    ]
    +
    +
    +class SecurityLevel(Enumerated):
    +    _map = {
    +        1: 'level1',
    +        2: 'level2',
    +        3: 'level3',
    +        4: 'level4',
    +    }
    +
    +
    +class FIPSLevel(Sequence):
    +    _fields = [
    +        ('version', IA5String),
    +        ('level', SecurityLevel),
    +        ('plus', Boolean, {'default': False}),
    +    ]
    +
    +
    +class TPMSecurityAssertions(Sequence):
    +    _fields = [
    +        ('version', Version, {'default': 'v1'}),
    +        ('field_upgradable', Boolean, {'default': False}),
    +        ('ek_generation_type', EKGenerationType, {'implicit': 0, 'optional': True}),
    +        ('ek_generation_location', EKGenerationLocation, {'implicit': 1, 'optional': True}),
    +        ('ek_certificate_generation_location', EKCertificateGenerationLocation, {'implicit': 2, 'optional': True}),
    +        ('cc_info', CommonCriteriaMeasures, {'implicit': 3, 'optional': True}),
    +        ('fips_level', FIPSLevel, {'implicit': 4, 'optional': True}),
    +        ('iso_9000_certified', Boolean, {'implicit': 5, 'default': False}),
    +        ('iso_9000_uri', IA5String, {'optional': True}),
    +    ]
    +
    +
    +class SetOfTPMSecurityAssertions(SetOf):
    +    _child_spec = TPMSecurityAssertions
    +
    +
    +class SubjectDirectoryAttributeId(ObjectIdentifier):
    +    _map = {
    +        # https://tools.ietf.org/html/rfc2256#page-11
    +        '2.5.4.52': 'supported_algorithms',
    +        # https://www.trustedcomputinggroup.org/wp-content/uploads/Credential_Profile_EK_V2.0_R14_published.pdf
    +        '2.23.133.2.16': 'tpm_specification',
    +        '2.23.133.2.17': 'tcg_platform_specification',
    +        '2.23.133.2.18': 'tpm_security_assertions',
    +        # https://tools.ietf.org/html/rfc3739#page-18
    +        '1.3.6.1.5.5.7.9.1': 'pda_date_of_birth',
    +        '1.3.6.1.5.5.7.9.2': 'pda_place_of_birth',
    +        '1.3.6.1.5.5.7.9.3': 'pda_gender',
    +        '1.3.6.1.5.5.7.9.4': 'pda_country_of_citizenship',
    +        '1.3.6.1.5.5.7.9.5': 'pda_country_of_residence',
    +        # https://holtstrom.com/michael/tools/asn1decoder.php
    +        '1.2.840.113533.7.68.29': 'entrust_user_role',
    +    }
    +
    +
    +class SetOfGeneralizedTime(SetOf):
    +    _child_spec = GeneralizedTime
    +
    +
    +class SetOfDirectoryString(SetOf):
    +    _child_spec = DirectoryString
    +
    +
    +class SetOfPrintableString(SetOf):
    +    _child_spec = PrintableString
    +
    +
    +class SupportedAlgorithm(Sequence):
    +    _fields = [
    +        ('algorithm_identifier', AnyAlgorithmIdentifier),
    +        ('intended_usage', KeyUsage, {'explicit': 0, 'optional': True}),
    +        ('intended_certificate_policies', CertificatePolicies, {'explicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class SetOfSupportedAlgorithm(SetOf):
    +    _child_spec = SupportedAlgorithm
    +
    +
    +class SubjectDirectoryAttribute(Sequence):
    +    _fields = [
    +        ('type', SubjectDirectoryAttributeId),
    +        ('values', Any),
    +    ]
    +
    +    _oid_pair = ('type', 'values')
    +    _oid_specs = {
    +        'supported_algorithms': SetOfSupportedAlgorithm,
    +        'tpm_specification': SetOfTPMSpecification,
    +        'tcg_platform_specification': SetOfTCGPlatformSpecification,
    +        'tpm_security_assertions': SetOfTPMSecurityAssertions,
    +        'pda_date_of_birth': SetOfGeneralizedTime,
    +        'pda_place_of_birth': SetOfDirectoryString,
    +        'pda_gender': SetOfPrintableString,
    +        'pda_country_of_citizenship': SetOfPrintableString,
    +        'pda_country_of_residence': SetOfPrintableString,
    +    }
    +
    +    def _values_spec(self):
    +        type_ = self['type'].native
    +        if type_ in self._oid_specs:
    +            return self._oid_specs[type_]
    +        return SetOf
    +
    +    _spec_callbacks = {
    +        'values': _values_spec
    +    }
    +
    +
    +class SubjectDirectoryAttributes(SequenceOf):
    +    _child_spec = SubjectDirectoryAttribute
    +
    +
    +class ExtensionId(ObjectIdentifier):
    +    _map = {
    +        '2.5.29.9': 'subject_directory_attributes',
    +        '2.5.29.14': 'key_identifier',
    +        '2.5.29.15': 'key_usage',
    +        '2.5.29.16': 'private_key_usage_period',
    +        '2.5.29.17': 'subject_alt_name',
    +        '2.5.29.18': 'issuer_alt_name',
    +        '2.5.29.19': 'basic_constraints',
    +        '2.5.29.30': 'name_constraints',
    +        '2.5.29.31': 'crl_distribution_points',
    +        '2.5.29.32': 'certificate_policies',
    +        '2.5.29.33': 'policy_mappings',
    +        '2.5.29.35': 'authority_key_identifier',
    +        '2.5.29.36': 'policy_constraints',
    +        '2.5.29.37': 'extended_key_usage',
    +        '2.5.29.46': 'freshest_crl',
    +        '2.5.29.54': 'inhibit_any_policy',
    +        '1.3.6.1.5.5.7.1.1': 'authority_information_access',
    +        '1.3.6.1.5.5.7.1.11': 'subject_information_access',
    +        # https://tools.ietf.org/html/rfc7633
    +        '1.3.6.1.5.5.7.1.24': 'tls_feature',
    +        '1.3.6.1.5.5.7.48.1.5': 'ocsp_no_check',
    +        '1.2.840.113533.7.65.0': 'entrust_version_extension',
    +        '2.16.840.1.113730.1.1': 'netscape_certificate_type',
    +        # https://tools.ietf.org/html/rfc6962.html#page-14
    +        '1.3.6.1.4.1.11129.2.4.2': 'signed_certificate_timestamp_list',
    +    }
    +
    +
    +class Extension(Sequence):
    +    _fields = [
    +        ('extn_id', ExtensionId),
    +        ('critical', Boolean, {'default': False}),
    +        ('extn_value', ParsableOctetString),
    +    ]
    +
    +    _oid_pair = ('extn_id', 'extn_value')
    +    _oid_specs = {
    +        'subject_directory_attributes': SubjectDirectoryAttributes,
    +        'key_identifier': OctetString,
    +        'key_usage': KeyUsage,
    +        'private_key_usage_period': PrivateKeyUsagePeriod,
    +        'subject_alt_name': GeneralNames,
    +        'issuer_alt_name': GeneralNames,
    +        'basic_constraints': BasicConstraints,
    +        'name_constraints': NameConstraints,
    +        'crl_distribution_points': CRLDistributionPoints,
    +        'certificate_policies': CertificatePolicies,
    +        'policy_mappings': PolicyMappings,
    +        'authority_key_identifier': AuthorityKeyIdentifier,
    +        'policy_constraints': PolicyConstraints,
    +        'extended_key_usage': ExtKeyUsageSyntax,
    +        'freshest_crl': CRLDistributionPoints,
    +        'inhibit_any_policy': Integer,
    +        'authority_information_access': AuthorityInfoAccessSyntax,
    +        'subject_information_access': SubjectInfoAccessSyntax,
    +        'tls_feature': Features,
    +        'ocsp_no_check': Null,
    +        'entrust_version_extension': EntrustVersionInfo,
    +        'netscape_certificate_type': NetscapeCertificateType,
    +        'signed_certificate_timestamp_list': OctetString,
    +    }
    +
    +
    +class Extensions(SequenceOf):
    +    _child_spec = Extension
    +
    +
    +class TbsCertificate(Sequence):
    +    _fields = [
    +        ('version', Version, {'explicit': 0, 'default': 'v1'}),
    +        ('serial_number', Integer),
    +        ('signature', SignedDigestAlgorithm),
    +        ('issuer', Name),
    +        ('validity', Validity),
    +        ('subject', Name),
    +        ('subject_public_key_info', PublicKeyInfo),
    +        ('issuer_unique_id', OctetBitString, {'implicit': 1, 'optional': True}),
    +        ('subject_unique_id', OctetBitString, {'implicit': 2, 'optional': True}),
    +        ('extensions', Extensions, {'explicit': 3, 'optional': True}),
    +    ]
    +
    +
    +class Certificate(Sequence):
    +    _fields = [
    +        ('tbs_certificate', TbsCertificate),
    +        ('signature_algorithm', SignedDigestAlgorithm),
    +        ('signature_value', OctetBitString),
    +    ]
    +
    +    _processed_extensions = False
    +    _critical_extensions = None
    +    _subject_directory_attributes_value = None
    +    _key_identifier_value = None
    +    _key_usage_value = None
    +    _subject_alt_name_value = None
    +    _issuer_alt_name_value = None
    +    _basic_constraints_value = None
    +    _name_constraints_value = None
    +    _crl_distribution_points_value = None
    +    _certificate_policies_value = None
    +    _policy_mappings_value = None
    +    _authority_key_identifier_value = None
    +    _policy_constraints_value = None
    +    _freshest_crl_value = None
    +    _inhibit_any_policy_value = None
    +    _extended_key_usage_value = None
    +    _authority_information_access_value = None
    +    _subject_information_access_value = None
    +    _private_key_usage_period_value = None
    +    _tls_feature_value = None
    +    _ocsp_no_check_value = None
    +    _issuer_serial = None
    +    _authority_issuer_serial = False
    +    _crl_distribution_points = None
    +    _delta_crl_distribution_points = None
    +    _valid_domains = None
    +    _valid_ips = None
    +    _self_issued = None
    +    _self_signed = None
    +    _sha1 = None
    +    _sha256 = None
    +
    +    def _set_extensions(self):
    +        """
    +        Sets common named extensions to private attributes and creates a list
    +        of critical extensions
    +        """
    +
    +        self._critical_extensions = set()
    +
    +        for extension in self['tbs_certificate']['extensions']:
    +            name = extension['extn_id'].native
    +            attribute_name = '_%s_value' % name
    +            if hasattr(self, attribute_name):
    +                setattr(self, attribute_name, extension['extn_value'].parsed)
    +            if extension['critical'].native:
    +                self._critical_extensions.add(name)
    +
    +        self._processed_extensions = True
    +
    +    @property
    +    def critical_extensions(self):
    +        """
    +        Returns a set of the names (or OID if not a known extension) of the
    +        extensions marked as critical
    +
    +        :return:
    +            A set of unicode strings
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._critical_extensions
    +
    +    @property
    +    def private_key_usage_period_value(self):
    +        """
    +        This extension is used to constrain the period over which the subject
    +        private key may be used
    +
    +        :return:
    +            None or a PrivateKeyUsagePeriod object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._private_key_usage_period_value
    +
    +    @property
    +    def subject_directory_attributes_value(self):
    +        """
    +        This extension is used to contain additional identification attributes
    +        about the subject.
    +
    +        :return:
    +            None or a SubjectDirectoryAttributes object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._subject_directory_attributes_value
    +
    +    @property
    +    def key_identifier_value(self):
    +        """
    +        This extension is used to help in creating certificate validation paths.
    +        It contains an identifier that should generally, but is not guaranteed
    +        to, be unique.
    +
    +        :return:
    +            None or an OctetString object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._key_identifier_value
    +
    +    @property
    +    def key_usage_value(self):
    +        """
    +        This extension is used to define the purpose of the public key
    +        contained within the certificate.
    +
    +        :return:
    +            None or a KeyUsage
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._key_usage_value
    +
    +    @property
    +    def subject_alt_name_value(self):
    +        """
    +        This extension allows for additional names to be associate with the
    +        subject of the certificate. While it may contain a whole host of
    +        possible names, it is usually used to allow certificates to be used
    +        with multiple different domain names.
    +
    +        :return:
    +            None or a GeneralNames object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._subject_alt_name_value
    +
    +    @property
    +    def issuer_alt_name_value(self):
    +        """
    +        This extension allows associating one or more alternative names with
    +        the issuer of the certificate.
    +
    +        :return:
    +            None or an x509.GeneralNames object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._issuer_alt_name_value
    +
    +    @property
    +    def basic_constraints_value(self):
    +        """
    +        This extension is used to determine if the subject of the certificate
    +        is a CA, and if so, what the maximum number of intermediate CA certs
    +        after this are, before an end-entity certificate is found.
    +
    +        :return:
    +            None or a BasicConstraints object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._basic_constraints_value
    +
    +    @property
    +    def name_constraints_value(self):
    +        """
    +        This extension is used in CA certificates, and is used to limit the
    +        possible names of certificates issued.
    +
    +        :return:
    +            None or a NameConstraints object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._name_constraints_value
    +
    +    @property
    +    def crl_distribution_points_value(self):
    +        """
    +        This extension is used to help in locating the CRL for this certificate.
    +
    +        :return:
    +            None or a CRLDistributionPoints object
    +            extension
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._crl_distribution_points_value
    +
    +    @property
    +    def certificate_policies_value(self):
    +        """
    +        This extension defines policies in CA certificates under which
    +        certificates may be issued. In end-entity certificates, the inclusion
    +        of a policy indicates the issuance of the certificate follows the
    +        policy.
    +
    +        :return:
    +            None or a CertificatePolicies object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._certificate_policies_value
    +
    +    @property
    +    def policy_mappings_value(self):
    +        """
    +        This extension allows mapping policy OIDs to other OIDs. This is used
    +        to allow different policies to be treated as equivalent in the process
    +        of validation.
    +
    +        :return:
    +            None or a PolicyMappings object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._policy_mappings_value
    +
    +    @property
    +    def authority_key_identifier_value(self):
    +        """
    +        This extension helps in identifying the public key with which to
    +        validate the authenticity of the certificate.
    +
    +        :return:
    +            None or an AuthorityKeyIdentifier object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._authority_key_identifier_value
    +
    +    @property
    +    def policy_constraints_value(self):
    +        """
    +        This extension is used to control if policy mapping is allowed and
    +        when policies are required.
    +
    +        :return:
    +            None or a PolicyConstraints object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._policy_constraints_value
    +
    +    @property
    +    def freshest_crl_value(self):
    +        """
    +        This extension is used to help locate any available delta CRLs
    +
    +        :return:
    +            None or an CRLDistributionPoints object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._freshest_crl_value
    +
    +    @property
    +    def inhibit_any_policy_value(self):
    +        """
    +        This extension is used to prevent mapping of the any policy to
    +        specific requirements
    +
    +        :return:
    +            None or a Integer object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._inhibit_any_policy_value
    +
    +    @property
    +    def extended_key_usage_value(self):
    +        """
    +        This extension is used to define additional purposes for the public key
    +        beyond what is contained in the basic constraints.
    +
    +        :return:
    +            None or an ExtKeyUsageSyntax object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._extended_key_usage_value
    +
    +    @property
    +    def authority_information_access_value(self):
    +        """
    +        This extension is used to locate the CA certificate used to sign this
    +        certificate, or the OCSP responder for this certificate.
    +
    +        :return:
    +            None or an AuthorityInfoAccessSyntax object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._authority_information_access_value
    +
    +    @property
    +    def subject_information_access_value(self):
    +        """
    +        This extension is used to access information about the subject of this
    +        certificate.
    +
    +        :return:
    +            None or a SubjectInfoAccessSyntax object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._subject_information_access_value
    +
    +    @property
    +    def tls_feature_value(self):
    +        """
    +        This extension is used to list the TLS features a server must respond
    +        with if a client initiates a request supporting them.
    +
    +        :return:
    +            None or a Features object
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._tls_feature_value
    +
    +    @property
    +    def ocsp_no_check_value(self):
    +        """
    +        This extension is used on certificates of OCSP responders, indicating
    +        that revocation information for the certificate should never need to
    +        be verified, thus preventing possible loops in path validation.
    +
    +        :return:
    +            None or a Null object (if present)
    +        """
    +
    +        if not self._processed_extensions:
    +            self._set_extensions()
    +        return self._ocsp_no_check_value
    +
    +    @property
    +    def signature(self):
    +        """
    +        :return:
    +            A byte string of the signature
    +        """
    +
    +        return self['signature_value'].native
    +
    +    @property
    +    def signature_algo(self):
    +        """
    +        :return:
    +            A unicode string of "rsassa_pkcs1v15", "rsassa_pss", "dsa", "ecdsa"
    +        """
    +
    +        return self['signature_algorithm'].signature_algo
    +
    +    @property
    +    def hash_algo(self):
    +        """
    +        :return:
    +            A unicode string of "md2", "md5", "sha1", "sha224", "sha256",
    +            "sha384", "sha512", "sha512_224", "sha512_256"
    +        """
    +
    +        return self['signature_algorithm'].hash_algo
    +
    +    @property
    +    def public_key(self):
    +        """
    +        :return:
    +            The PublicKeyInfo object for this certificate
    +        """
    +
    +        return self['tbs_certificate']['subject_public_key_info']
    +
    +    @property
    +    def subject(self):
    +        """
    +        :return:
    +            The Name object for the subject of this certificate
    +        """
    +
    +        return self['tbs_certificate']['subject']
    +
    +    @property
    +    def issuer(self):
    +        """
    +        :return:
    +            The Name object for the issuer of this certificate
    +        """
    +
    +        return self['tbs_certificate']['issuer']
    +
    +    @property
    +    def serial_number(self):
    +        """
    +        :return:
    +            An integer of the certificate's serial number
    +        """
    +
    +        return self['tbs_certificate']['serial_number'].native
    +
    +    @property
    +    def key_identifier(self):
    +        """
    +        :return:
    +            None or a byte string of the certificate's key identifier from the
    +            key identifier extension
    +        """
    +
    +        if not self.key_identifier_value:
    +            return None
    +
    +        return self.key_identifier_value.native
    +
    +    @property
    +    def issuer_serial(self):
    +        """
    +        :return:
    +            A byte string of the SHA-256 hash of the issuer concatenated with
    +            the ascii character ":", concatenated with the serial number as
    +            an ascii string
    +        """
    +
    +        if self._issuer_serial is None:
    +            self._issuer_serial = self.issuer.sha256 + b':' + str_cls(self.serial_number).encode('ascii')
    +        return self._issuer_serial
    +
    +    @property
    +    def not_valid_after(self):
    +        """
    +        :return:
    +            A datetime of latest time when the certificate is still valid
    +        """
    +        return self['tbs_certificate']['validity']['not_after'].native
    +
    +    @property
    +    def not_valid_before(self):
    +        """
    +        :return:
    +            A datetime of the earliest time when the certificate is valid
    +        """
    +        return self['tbs_certificate']['validity']['not_before'].native
    +
    +    @property
    +    def authority_key_identifier(self):
    +        """
    +        :return:
    +            None or a byte string of the key_identifier from the authority key
    +            identifier extension
    +        """
    +
    +        if not self.authority_key_identifier_value:
    +            return None
    +
    +        return self.authority_key_identifier_value['key_identifier'].native
    +
    +    @property
    +    def authority_issuer_serial(self):
    +        """
    +        :return:
    +            None or a byte string of the SHA-256 hash of the isser from the
    +            authority key identifier extension concatenated with the ascii
    +            character ":", concatenated with the serial number from the
    +            authority key identifier extension as an ascii string
    +        """
    +
    +        if self._authority_issuer_serial is False:
    +            akiv = self.authority_key_identifier_value
    +            if akiv and akiv['authority_cert_issuer'].native:
    +                issuer = self.authority_key_identifier_value['authority_cert_issuer'][0].chosen
    +                # We untag the element since it is tagged via being a choice from GeneralName
    +                issuer = issuer.untag()
    +                authority_serial = self.authority_key_identifier_value['authority_cert_serial_number'].native
    +                self._authority_issuer_serial = issuer.sha256 + b':' + str_cls(authority_serial).encode('ascii')
    +            else:
    +                self._authority_issuer_serial = None
    +        return self._authority_issuer_serial
    +
    +    @property
    +    def crl_distribution_points(self):
    +        """
    +        Returns complete CRL URLs - does not include delta CRLs
    +
    +        :return:
    +            A list of zero or more DistributionPoint objects
    +        """
    +
    +        if self._crl_distribution_points is None:
    +            self._crl_distribution_points = self._get_http_crl_distribution_points(self.crl_distribution_points_value)
    +        return self._crl_distribution_points
    +
    +    @property
    +    def delta_crl_distribution_points(self):
    +        """
    +        Returns delta CRL URLs - does not include complete CRLs
    +
    +        :return:
    +            A list of zero or more DistributionPoint objects
    +        """
    +
    +        if self._delta_crl_distribution_points is None:
    +            self._delta_crl_distribution_points = self._get_http_crl_distribution_points(self.freshest_crl_value)
    +        return self._delta_crl_distribution_points
    +
    +    def _get_http_crl_distribution_points(self, crl_distribution_points):
    +        """
    +        Fetches the DistributionPoint object for non-relative, HTTP CRLs
    +        referenced by the certificate
    +
    +        :param crl_distribution_points:
    +            A CRLDistributionPoints object to grab the DistributionPoints from
    +
    +        :return:
    +            A list of zero or more DistributionPoint objects
    +        """
    +
    +        output = []
    +
    +        if crl_distribution_points is None:
    +            return []
    +
    +        for distribution_point in crl_distribution_points:
    +            distribution_point_name = distribution_point['distribution_point']
    +            if distribution_point_name is VOID:
    +                continue
    +            # RFC 5280 indicates conforming CA should not use the relative form
    +            if distribution_point_name.name == 'name_relative_to_crl_issuer':
    +                continue
    +            # This library is currently only concerned with HTTP-based CRLs
    +            for general_name in distribution_point_name.chosen:
    +                if general_name.name == 'uniform_resource_identifier':
    +                    output.append(distribution_point)
    +
    +        return output
    +
    +    @property
    +    def ocsp_urls(self):
    +        """
    +        :return:
    +            A list of zero or more unicode strings of the OCSP URLs for this
    +            cert
    +        """
    +
    +        if not self.authority_information_access_value:
    +            return []
    +
    +        output = []
    +        for entry in self.authority_information_access_value:
    +            if entry['access_method'].native == 'ocsp':
    +                location = entry['access_location']
    +                if location.name != 'uniform_resource_identifier':
    +                    continue
    +                url = location.native
    +                if url.lower().startswith(('http://', 'https://', 'ldap://', 'ldaps://')):
    +                    output.append(url)
    +        return output
    +
    +    @property
    +    def valid_domains(self):
    +        """
    +        :return:
    +            A list of unicode strings of valid domain names for the certificate.
    +            Wildcard certificates will have a domain in the form: *.example.com
    +        """
    +
    +        if self._valid_domains is None:
    +            self._valid_domains = []
    +
    +            # For the subject alt name extension, we can look at the name of
    +            # the choice selected since it distinguishes between domain names,
    +            # email addresses, IPs, etc
    +            if self.subject_alt_name_value:
    +                for general_name in self.subject_alt_name_value:
    +                    if general_name.name == 'dns_name' and general_name.native not in self._valid_domains:
    +                        self._valid_domains.append(general_name.native)
    +
    +            # If there was no subject alt name extension, and the common name
    +            # in the subject looks like a domain, that is considered the valid
    +            # list. This is done because according to
    +            # https://tools.ietf.org/html/rfc6125#section-6.4.4, the common
    +            # name should not be used if the subject alt name is present.
    +            else:
    +                pattern = re.compile('^(\\*\\.)?(?:[a-zA-Z0-9](?:[a-zA-Z0-9\\-]*[a-zA-Z0-9])?\\.)+[a-zA-Z]{2,}$')
    +                for rdn in self.subject.chosen:
    +                    for name_type_value in rdn:
    +                        if name_type_value['type'].native == 'common_name':
    +                            value = name_type_value['value'].native
    +                            if pattern.match(value):
    +                                self._valid_domains.append(value)
    +
    +        return self._valid_domains
    +
    +    @property
    +    def valid_ips(self):
    +        """
    +        :return:
    +            A list of unicode strings of valid IP addresses for the certificate
    +        """
    +
    +        if self._valid_ips is None:
    +            self._valid_ips = []
    +
    +            if self.subject_alt_name_value:
    +                for general_name in self.subject_alt_name_value:
    +                    if general_name.name == 'ip_address':
    +                        self._valid_ips.append(general_name.native)
    +
    +        return self._valid_ips
    +
    +    @property
    +    def ca(self):
    +        """
    +        :return;
    +            A boolean - if the certificate is marked as a CA
    +        """
    +
    +        return self.basic_constraints_value and self.basic_constraints_value['ca'].native
    +
    +    @property
    +    def max_path_length(self):
    +        """
    +        :return;
    +            None or an integer of the maximum path length
    +        """
    +
    +        if not self.ca:
    +            return None
    +        return self.basic_constraints_value['path_len_constraint'].native
    +
    +    @property
    +    def self_issued(self):
    +        """
    +        :return:
    +            A boolean - if the certificate is self-issued, as defined by RFC
    +            5280
    +        """
    +
    +        if self._self_issued is None:
    +            self._self_issued = self.subject == self.issuer
    +        return self._self_issued
    +
    +    @property
    +    def self_signed(self):
    +        """
    +        :return:
    +            A unicode string of "no" or "maybe". The "maybe" result will
    +            be returned if the certificate issuer and subject are the same.
    +            If a key identifier and authority key identifier are present,
    +            they will need to match otherwise "no" will be returned.
    +
    +            To verify is a certificate is truly self-signed, the signature
    +            will need to be verified. See the certvalidator package for
    +            one possible solution.
    +        """
    +
    +        if self._self_signed is None:
    +            self._self_signed = 'no'
    +            if self.self_issued:
    +                if self.key_identifier:
    +                    if not self.authority_key_identifier:
    +                        self._self_signed = 'maybe'
    +                    elif self.authority_key_identifier == self.key_identifier:
    +                        self._self_signed = 'maybe'
    +                else:
    +                    self._self_signed = 'maybe'
    +        return self._self_signed
    +
    +    @property
    +    def sha1(self):
    +        """
    +        :return:
    +            The SHA-1 hash of the DER-encoded bytes of this complete certificate
    +        """
    +
    +        if self._sha1 is None:
    +            self._sha1 = hashlib.sha1(self.dump()).digest()
    +        return self._sha1
    +
    +    @property
    +    def sha1_fingerprint(self):
    +        """
    +        :return:
    +            A unicode string of the SHA-1 hash, formatted using hex encoding
    +            with a space between each pair of characters, all uppercase
    +        """
    +
    +        return ' '.join('%02X' % c for c in bytes_to_list(self.sha1))
    +
    +    @property
    +    def sha256(self):
    +        """
    +        :return:
    +            The SHA-256 hash of the DER-encoded bytes of this complete
    +            certificate
    +        """
    +
    +        if self._sha256 is None:
    +            self._sha256 = hashlib.sha256(self.dump()).digest()
    +        return self._sha256
    +
    +    @property
    +    def sha256_fingerprint(self):
    +        """
    +        :return:
    +            A unicode string of the SHA-256 hash, formatted using hex encoding
    +            with a space between each pair of characters, all uppercase
    +        """
    +
    +        return ' '.join('%02X' % c for c in bytes_to_list(self.sha256))
    +
    +    def is_valid_domain_ip(self, domain_ip):
    +        """
    +        Check if a domain name or IP address is valid according to the
    +        certificate
    +
    +        :param domain_ip:
    +            A unicode string of a domain name or IP address
    +
    +        :return:
    +            A boolean - if the domain or IP is valid for the certificate
    +        """
    +
    +        if not isinstance(domain_ip, str_cls):
    +            raise TypeError(unwrap(
    +                '''
    +                domain_ip must be a unicode string, not %s
    +                ''',
    +                type_name(domain_ip)
    +            ))
    +
    +        encoded_domain_ip = domain_ip.encode('idna').decode('ascii').lower()
    +
    +        is_ipv6 = encoded_domain_ip.find(':') != -1
    +        is_ipv4 = not is_ipv6 and re.match('^\\d+\\.\\d+\\.\\d+\\.\\d+$', encoded_domain_ip)
    +        is_domain = not is_ipv6 and not is_ipv4
    +
    +        # Handle domain name checks
    +        if is_domain:
    +            if not self.valid_domains:
    +                return False
    +
    +            domain_labels = encoded_domain_ip.split('.')
    +
    +            for valid_domain in self.valid_domains:
    +                encoded_valid_domain = valid_domain.encode('idna').decode('ascii').lower()
    +                valid_domain_labels = encoded_valid_domain.split('.')
    +
    +                # The domain must be equal in label length to match
    +                if len(valid_domain_labels) != len(domain_labels):
    +                    continue
    +
    +                if valid_domain_labels == domain_labels:
    +                    return True
    +
    +                is_wildcard = self._is_wildcard_domain(encoded_valid_domain)
    +                if is_wildcard and self._is_wildcard_match(domain_labels, valid_domain_labels):
    +                    return True
    +
    +            return False
    +
    +        # Handle IP address checks
    +        if not self.valid_ips:
    +            return False
    +
    +        family = socket.AF_INET if is_ipv4 else socket.AF_INET6
    +        normalized_ip = inet_pton(family, encoded_domain_ip)
    +
    +        for valid_ip in self.valid_ips:
    +            valid_family = socket.AF_INET if valid_ip.find('.') != -1 else socket.AF_INET6
    +            normalized_valid_ip = inet_pton(valid_family, valid_ip)
    +
    +            if normalized_valid_ip == normalized_ip:
    +                return True
    +
    +        return False
    +
    +    def _is_wildcard_domain(self, domain):
    +        """
    +        Checks if a domain is a valid wildcard according to
    +        https://tools.ietf.org/html/rfc6125#section-6.4.3
    +
    +        :param domain:
    +            A unicode string of the domain name, where any U-labels from an IDN
    +            have been converted to A-labels
    +
    +        :return:
    +            A boolean - if the domain is a valid wildcard domain
    +        """
    +
    +        # The * character must be present for a wildcard match, and if there is
    +        # most than one, it is an invalid wildcard specification
    +        if domain.count('*') != 1:
    +            return False
    +
    +        labels = domain.lower().split('.')
    +
    +        if not labels:
    +            return False
    +
    +        # Wildcards may only appear in the left-most label
    +        if labels[0].find('*') == -1:
    +            return False
    +
    +        # Wildcards may not be embedded in an A-label from an IDN
    +        if labels[0][0:4] == 'xn--':
    +            return False
    +
    +        return True
    +
    +    def _is_wildcard_match(self, domain_labels, valid_domain_labels):
    +        """
    +        Determines if the labels in a domain are a match for labels from a
    +        wildcard valid domain name
    +
    +        :param domain_labels:
    +            A list of unicode strings, with A-label form for IDNs, of the labels
    +            in the domain name to check
    +
    +        :param valid_domain_labels:
    +            A list of unicode strings, with A-label form for IDNs, of the labels
    +            in a wildcard domain pattern
    +
    +        :return:
    +            A boolean - if the domain matches the valid domain
    +        """
    +
    +        first_domain_label = domain_labels[0]
    +        other_domain_labels = domain_labels[1:]
    +
    +        wildcard_label = valid_domain_labels[0]
    +        other_valid_domain_labels = valid_domain_labels[1:]
    +
    +        # The wildcard is only allowed in the first label, so if
    +        # The subsequent labels are not equal, there is no match
    +        if other_domain_labels != other_valid_domain_labels:
    +            return False
    +
    +        if wildcard_label == '*':
    +            return True
    +
    +        wildcard_regex = re.compile('^' + wildcard_label.replace('*', '.*') + '$')
    +        if wildcard_regex.match(first_domain_label):
    +            return True
    +
    +        return False
    +
    +
    +# The structures are taken from the OpenSSL source file x_x509a.c, and specify
    +# extra information that is added to X.509 certificates to store trust
    +# information about the certificate.
    +
    +class KeyPurposeIdentifiers(SequenceOf):
    +    _child_spec = KeyPurposeId
    +
    +
    +class SequenceOfAlgorithmIdentifiers(SequenceOf):
    +    _child_spec = AlgorithmIdentifier
    +
    +
    +class CertificateAux(Sequence):
    +    _fields = [
    +        ('trust', KeyPurposeIdentifiers, {'optional': True}),
    +        ('reject', KeyPurposeIdentifiers, {'implicit': 0, 'optional': True}),
    +        ('alias', UTF8String, {'optional': True}),
    +        ('keyid', OctetString, {'optional': True}),
    +        ('other', SequenceOfAlgorithmIdentifiers, {'implicit': 1, 'optional': True}),
    +    ]
    +
    +
    +class TrustedCertificate(Concat):
    +    _child_specs = [Certificate, CertificateAux]
    diff --git a/server/www/packages/packages-windows/x86/cffi/__init__.py b/server/www/packages/packages-windows/x86/cffi/__init__.py
    new file mode 100644
    index 0000000..709dc8c
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/__init__.py
    @@ -0,0 +1,14 @@
    +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
    +           'FFIError']
    +
    +from .api import FFI
    +from .error import CDefError, FFIError, VerificationError, VerificationMissing
    +from .error import PkgConfigError
    +
    +__version__ = "1.14.0"
    +__version_info__ = (1, 14, 0)
    +
    +# The verifier module file names are based on the CRC32 of a string that
    +# contains the following version number.  It may be older than __version__
    +# if nothing is clearly incompatible.
    +__version_verifier_modules__ = "0.8.6"
    diff --git a/server/www/packages/packages-windows/x86/cffi/_cffi_errors.h b/server/www/packages/packages-windows/x86/cffi/_cffi_errors.h
    new file mode 100644
    index 0000000..46797b0
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/_cffi_errors.h
    @@ -0,0 +1,147 @@
    +#ifndef CFFI_MESSAGEBOX
    +# ifdef _MSC_VER
    +#  define CFFI_MESSAGEBOX  1
    +# else
    +#  define CFFI_MESSAGEBOX  0
    +# endif
    +#endif
    +
    +
    +#if CFFI_MESSAGEBOX
    +/* Windows only: logic to take the Python-CFFI embedding logic
    +   initialization errors and display them in a background thread
    +   with MessageBox.  The idea is that if the whole program closes
    +   as a result of this problem, then likely it is already a console
    +   program and you can read the stderr output in the console too.
    +   If it is not a console program, then it will likely show its own
    +   dialog to complain, or generally not abruptly close, and for this
    +   case the background thread should stay alive.
    +*/
    +static void *volatile _cffi_bootstrap_text;
    +
    +static PyObject *_cffi_start_error_capture(void)
    +{
    +    PyObject *result = NULL;
    +    PyObject *x, *m, *bi;
    +
    +    if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text,
    +            (void *)1, NULL) != NULL)
    +        return (PyObject *)1;
    +
    +    m = PyImport_AddModule("_cffi_error_capture");
    +    if (m == NULL)
    +        goto error;
    +
    +    result = PyModule_GetDict(m);
    +    if (result == NULL)
    +        goto error;
    +
    +#if PY_MAJOR_VERSION >= 3
    +    bi = PyImport_ImportModule("builtins");
    +#else
    +    bi = PyImport_ImportModule("__builtin__");
    +#endif
    +    if (bi == NULL)
    +        goto error;
    +    PyDict_SetItemString(result, "__builtins__", bi);
    +    Py_DECREF(bi);
    +
    +    x = PyRun_String(
    +        "import sys\n"
    +        "class FileLike:\n"
    +        "  def write(self, x):\n"
    +        "    try:\n"
    +        "      of.write(x)\n"
    +        "    except: pass\n"
    +        "    self.buf += x\n"
    +        "fl = FileLike()\n"
    +        "fl.buf = ''\n"
    +        "of = sys.stderr\n"
    +        "sys.stderr = fl\n"
    +        "def done():\n"
    +        "  sys.stderr = of\n"
    +        "  return fl.buf\n",   /* make sure the returned value stays alive */
    +        Py_file_input,
    +        result, result);
    +    Py_XDECREF(x);
    +
    + error:
    +    if (PyErr_Occurred())
    +    {
    +        PyErr_WriteUnraisable(Py_None);
    +        PyErr_Clear();
    +    }
    +    return result;
    +}
    +
    +#pragma comment(lib, "user32.lib")
    +
    +static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored)
    +{
    +    Sleep(666);    /* may be interrupted if the whole process is closing */
    +#if PY_MAJOR_VERSION >= 3
    +    MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text,
    +                L"Python-CFFI error",
    +                MB_OK | MB_ICONERROR);
    +#else
    +    MessageBoxA(NULL, (char *)_cffi_bootstrap_text,
    +                "Python-CFFI error",
    +                MB_OK | MB_ICONERROR);
    +#endif
    +    _cffi_bootstrap_text = NULL;
    +    return 0;
    +}
    +
    +static void _cffi_stop_error_capture(PyObject *ecap)
    +{
    +    PyObject *s;
    +    void *text;
    +
    +    if (ecap == (PyObject *)1)
    +        return;
    +
    +    if (ecap == NULL)
    +        goto error;
    +
    +    s = PyRun_String("done()", Py_eval_input, ecap, ecap);
    +    if (s == NULL)
    +        goto error;
    +
    +    /* Show a dialog box, but in a background thread, and
    +       never show multiple dialog boxes at once. */
    +#if PY_MAJOR_VERSION >= 3
    +    text = PyUnicode_AsWideCharString(s, NULL);
    +#else
    +    text = PyString_AsString(s);
    +#endif
    +
    +    _cffi_bootstrap_text = text;
    +
    +    if (text != NULL)
    +    {
    +        HANDLE h;
    +        h = CreateThread(NULL, 0, _cffi_bootstrap_dialog,
    +                         NULL, 0, NULL);
    +        if (h != NULL)
    +            CloseHandle(h);
    +    }
    +    /* decref the string, but it should stay alive as 'fl.buf'
    +       in the small module above.  It will really be freed only if
    +       we later get another similar error.  So it's a leak of at
    +       most one copy of the small module.  That's fine for this
    +       situation which is usually a "fatal error" anyway. */
    +    Py_DECREF(s);
    +    PyErr_Clear();
    +    return;
    +
    +  error:
    +    _cffi_bootstrap_text = NULL;
    +    PyErr_Clear();
    +}
    +
    +#else
    +
    +static PyObject *_cffi_start_error_capture(void) { return NULL; }
    +static void _cffi_stop_error_capture(PyObject *ecap) { }
    +
    +#endif
    diff --git a/server/www/packages/packages-windows/x86/cffi/_cffi_include.h b/server/www/packages/packages-windows/x86/cffi/_cffi_include.h
    new file mode 100644
    index 0000000..a47196a
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/_cffi_include.h
    @@ -0,0 +1,356 @@
    +#define _CFFI_
    +
    +/* We try to define Py_LIMITED_API before including Python.h.
    +
    +   Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and
    +   Py_REF_DEBUG are not defined.  This is a best-effort approximation:
    +   we can learn about Py_DEBUG from pyconfig.h, but it is unclear if
    +   the same works for the other two macros.  Py_DEBUG implies them,
    +   but not the other way around.
    +
    +   Issue #350 is still open: on Windows, the code here causes it to link
    +   with PYTHON36.DLL (for example) instead of PYTHON3.DLL.  A fix was
    +   attempted in 164e526a5515 and 14ce6985e1c3, but reverted: virtualenv
    +   does not make PYTHON3.DLL available, and so the "correctly" compiled
    +   version would not run inside a virtualenv.  We will re-apply the fix
    +   after virtualenv has been fixed for some time.  For explanation, see
    +   issue #355.  For a workaround if you want PYTHON3.DLL and don't worry
    +   about virtualenv, see issue #350.  See also 'py_limited_api' in
    +   setuptools_ext.py.
    +*/
    +#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API)
    +#  include 
    +#  if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG)
    +#    define Py_LIMITED_API
    +#  endif
    +#endif
    +
    +#include 
    +#ifdef __cplusplus
    +extern "C" {
    +#endif
    +#include 
    +#include "parse_c_type.h"
    +
    +/* this block of #ifs should be kept exactly identical between
    +   c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
    +   and cffi/_cffi_include.h */
    +#if defined(_MSC_VER)
    +# include    /* for alloca() */
    +# if _MSC_VER < 1600   /* MSVC < 2010 */
    +   typedef __int8 int8_t;
    +   typedef __int16 int16_t;
    +   typedef __int32 int32_t;
    +   typedef __int64 int64_t;
    +   typedef unsigned __int8 uint8_t;
    +   typedef unsigned __int16 uint16_t;
    +   typedef unsigned __int32 uint32_t;
    +   typedef unsigned __int64 uint64_t;
    +   typedef __int8 int_least8_t;
    +   typedef __int16 int_least16_t;
    +   typedef __int32 int_least32_t;
    +   typedef __int64 int_least64_t;
    +   typedef unsigned __int8 uint_least8_t;
    +   typedef unsigned __int16 uint_least16_t;
    +   typedef unsigned __int32 uint_least32_t;
    +   typedef unsigned __int64 uint_least64_t;
    +   typedef __int8 int_fast8_t;
    +   typedef __int16 int_fast16_t;
    +   typedef __int32 int_fast32_t;
    +   typedef __int64 int_fast64_t;
    +   typedef unsigned __int8 uint_fast8_t;
    +   typedef unsigned __int16 uint_fast16_t;
    +   typedef unsigned __int32 uint_fast32_t;
    +   typedef unsigned __int64 uint_fast64_t;
    +   typedef __int64 intmax_t;
    +   typedef unsigned __int64 uintmax_t;
    +# else
    +#  include 
    +# endif
    +# if _MSC_VER < 1800   /* MSVC < 2013 */
    +#  ifndef __cplusplus
    +    typedef unsigned char _Bool;
    +#  endif
    +# endif
    +#else
    +# include 
    +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
    +#  include 
    +# endif
    +#endif
    +
    +#ifdef __GNUC__
    +# define _CFFI_UNUSED_FN  __attribute__((unused))
    +#else
    +# define _CFFI_UNUSED_FN  /* nothing */
    +#endif
    +
    +#ifdef __cplusplus
    +# ifndef _Bool
    +   typedef bool _Bool;   /* semi-hackish: C++ has no _Bool; bool is builtin */
    +# endif
    +#endif
    +
    +/**********  CPython-specific section  **********/
    +#ifndef PYPY_VERSION
    +
    +
    +#if PY_MAJOR_VERSION >= 3
    +# define PyInt_FromLong PyLong_FromLong
    +#endif
    +
    +#define _cffi_from_c_double PyFloat_FromDouble
    +#define _cffi_from_c_float PyFloat_FromDouble
    +#define _cffi_from_c_long PyInt_FromLong
    +#define _cffi_from_c_ulong PyLong_FromUnsignedLong
    +#define _cffi_from_c_longlong PyLong_FromLongLong
    +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
    +#define _cffi_from_c__Bool PyBool_FromLong
    +
    +#define _cffi_to_c_double PyFloat_AsDouble
    +#define _cffi_to_c_float PyFloat_AsDouble
    +
    +#define _cffi_from_c_int(x, type)                                        \
    +    (((type)-1) > 0 ? /* unsigned */                                     \
    +        (sizeof(type) < sizeof(long) ?                                   \
    +            PyInt_FromLong((long)x) :                                    \
    +         sizeof(type) == sizeof(long) ?                                  \
    +            PyLong_FromUnsignedLong((unsigned long)x) :                  \
    +            PyLong_FromUnsignedLongLong((unsigned long long)x)) :        \
    +        (sizeof(type) <= sizeof(long) ?                                  \
    +            PyInt_FromLong((long)x) :                                    \
    +            PyLong_FromLongLong((long long)x)))
    +
    +#define _cffi_to_c_int(o, type)                                          \
    +    ((type)(                                                             \
    +     sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o)        \
    +                                         : (type)_cffi_to_c_i8(o)) :     \
    +     sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o)       \
    +                                         : (type)_cffi_to_c_i16(o)) :    \
    +     sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o)       \
    +                                         : (type)_cffi_to_c_i32(o)) :    \
    +     sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o)       \
    +                                         : (type)_cffi_to_c_i64(o)) :    \
    +     (Py_FatalError("unsupported size for type " #type), (type)0)))
    +
    +#define _cffi_to_c_i8                                                    \
    +                 ((int(*)(PyObject *))_cffi_exports[1])
    +#define _cffi_to_c_u8                                                    \
    +                 ((int(*)(PyObject *))_cffi_exports[2])
    +#define _cffi_to_c_i16                                                   \
    +                 ((int(*)(PyObject *))_cffi_exports[3])
    +#define _cffi_to_c_u16                                                   \
    +                 ((int(*)(PyObject *))_cffi_exports[4])
    +#define _cffi_to_c_i32                                                   \
    +                 ((int(*)(PyObject *))_cffi_exports[5])
    +#define _cffi_to_c_u32                                                   \
    +                 ((unsigned int(*)(PyObject *))_cffi_exports[6])
    +#define _cffi_to_c_i64                                                   \
    +                 ((long long(*)(PyObject *))_cffi_exports[7])
    +#define _cffi_to_c_u64                                                   \
    +                 ((unsigned long long(*)(PyObject *))_cffi_exports[8])
    +#define _cffi_to_c_char                                                  \
    +                 ((int(*)(PyObject *))_cffi_exports[9])
    +#define _cffi_from_c_pointer                                             \
    +    ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10])
    +#define _cffi_to_c_pointer                                               \
    +    ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11])
    +#define _cffi_get_struct_layout                                          \
    +    not used any more
    +#define _cffi_restore_errno                                              \
    +    ((void(*)(void))_cffi_exports[13])
    +#define _cffi_save_errno                                                 \
    +    ((void(*)(void))_cffi_exports[14])
    +#define _cffi_from_c_char                                                \
    +    ((PyObject *(*)(char))_cffi_exports[15])
    +#define _cffi_from_c_deref                                               \
    +    ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16])
    +#define _cffi_to_c                                                       \
    +    ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17])
    +#define _cffi_from_c_struct                                              \
    +    ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18])
    +#define _cffi_to_c_wchar_t                                               \
    +    ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19])
    +#define _cffi_from_c_wchar_t                                             \
    +    ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20])
    +#define _cffi_to_c_long_double                                           \
    +    ((long double(*)(PyObject *))_cffi_exports[21])
    +#define _cffi_to_c__Bool                                                 \
    +    ((_Bool(*)(PyObject *))_cffi_exports[22])
    +#define _cffi_prepare_pointer_call_argument                              \
    +    ((Py_ssize_t(*)(struct _cffi_ctypedescr *,                           \
    +                    PyObject *, char **))_cffi_exports[23])
    +#define _cffi_convert_array_from_object                                  \
    +    ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24])
    +#define _CFFI_CPIDX  25
    +#define _cffi_call_python                                                \
    +    ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX])
    +#define _cffi_to_c_wchar3216_t                                           \
    +    ((int(*)(PyObject *))_cffi_exports[26])
    +#define _cffi_from_c_wchar3216_t                                         \
    +    ((PyObject *(*)(int))_cffi_exports[27])
    +#define _CFFI_NUM_EXPORTS 28
    +
    +struct _cffi_ctypedescr;
    +
    +static void *_cffi_exports[_CFFI_NUM_EXPORTS];
    +
    +#define _cffi_type(index)   (                           \
    +    assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \
    +    (struct _cffi_ctypedescr *)_cffi_types[index])
    +
    +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version,
    +                            const struct _cffi_type_context_s *ctx)
    +{
    +    PyObject *module, *o_arg, *new_module;
    +    void *raw[] = {
    +        (void *)module_name,
    +        (void *)version,
    +        (void *)_cffi_exports,
    +        (void *)ctx,
    +    };
    +
    +    module = PyImport_ImportModule("_cffi_backend");
    +    if (module == NULL)
    +        goto failure;
    +
    +    o_arg = PyLong_FromVoidPtr((void *)raw);
    +    if (o_arg == NULL)
    +        goto failure;
    +
    +    new_module = PyObject_CallMethod(
    +        module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg);
    +
    +    Py_DECREF(o_arg);
    +    Py_DECREF(module);
    +    return new_module;
    +
    +  failure:
    +    Py_XDECREF(module);
    +    return NULL;
    +}
    +
    +
    +#ifdef HAVE_WCHAR_H
    +typedef wchar_t _cffi_wchar_t;
    +#else
    +typedef uint16_t _cffi_wchar_t;   /* same random pick as _cffi_backend.c */
    +#endif
    +
    +_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o)
    +{
    +    if (sizeof(_cffi_wchar_t) == 2)
    +        return (uint16_t)_cffi_to_c_wchar_t(o);
    +    else
    +        return (uint16_t)_cffi_to_c_wchar3216_t(o);
    +}
    +
    +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x)
    +{
    +    if (sizeof(_cffi_wchar_t) == 2)
    +        return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
    +    else
    +        return _cffi_from_c_wchar3216_t((int)x);
    +}
    +
    +_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o)
    +{
    +    if (sizeof(_cffi_wchar_t) == 4)
    +        return (int)_cffi_to_c_wchar_t(o);
    +    else
    +        return (int)_cffi_to_c_wchar3216_t(o);
    +}
    +
    +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x)
    +{
    +    if (sizeof(_cffi_wchar_t) == 4)
    +        return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
    +    else
    +        return _cffi_from_c_wchar3216_t((int)x);
    +}
    +
    +union _cffi_union_alignment_u {
    +    unsigned char m_char;
    +    unsigned short m_short;
    +    unsigned int m_int;
    +    unsigned long m_long;
    +    unsigned long long m_longlong;
    +    float m_float;
    +    double m_double;
    +    long double m_longdouble;
    +};
    +
    +struct _cffi_freeme_s {
    +    struct _cffi_freeme_s *next;
    +    union _cffi_union_alignment_u alignment;
    +};
    +
    +_CFFI_UNUSED_FN static int
    +_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg,
    +                             char **output_data, Py_ssize_t datasize,
    +                             struct _cffi_freeme_s **freeme)
    +{
    +    char *p;
    +    if (datasize < 0)
    +        return -1;
    +
    +    p = *output_data;
    +    if (p == NULL) {
    +        struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc(
    +            offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize);
    +        if (fp == NULL)
    +            return -1;
    +        fp->next = *freeme;
    +        *freeme = fp;
    +        p = *output_data = (char *)&fp->alignment;
    +    }
    +    memset((void *)p, 0, (size_t)datasize);
    +    return _cffi_convert_array_from_object(p, ctptr, arg);
    +}
    +
    +_CFFI_UNUSED_FN static void
    +_cffi_free_array_arguments(struct _cffi_freeme_s *freeme)
    +{
    +    do {
    +        void *p = (void *)freeme;
    +        freeme = freeme->next;
    +        PyObject_Free(p);
    +    } while (freeme != NULL);
    +}
    +
    +/**********  end CPython-specific section  **********/
    +#else
    +_CFFI_UNUSED_FN
    +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *);
    +# define _cffi_call_python  _cffi_call_python_org
    +#endif
    +
    +
    +#define _cffi_array_len(array)   (sizeof(array) / sizeof((array)[0]))
    +
    +#define _cffi_prim_int(size, sign)                                      \
    +    ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8  : _CFFI_PRIM_UINT8)  :    \
    +     (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) :    \
    +     (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) :    \
    +     (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) :    \
    +     _CFFI__UNKNOWN_PRIM)
    +
    +#define _cffi_prim_float(size)                                          \
    +    ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT :                       \
    +     (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE :                     \
    +     (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE :       \
    +     _CFFI__UNKNOWN_FLOAT_PRIM)
    +
    +#define _cffi_check_int(got, got_nonpos, expected)      \
    +    ((got_nonpos) == (expected <= 0) &&                 \
    +     (got) == (unsigned long long)expected)
    +
    +#ifdef MS_WIN32
    +# define _cffi_stdcall  __stdcall
    +#else
    +# define _cffi_stdcall  /* nothing */
    +#endif
    +
    +#ifdef __cplusplus
    +}
    +#endif
    diff --git a/server/www/packages/packages-windows/x86/cffi/_embedding.h b/server/www/packages/packages-windows/x86/cffi/_embedding.h
    new file mode 100644
    index 0000000..1c82286
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/_embedding.h
    @@ -0,0 +1,520 @@
    +
    +/***** Support code for embedding *****/
    +
    +#ifdef __cplusplus
    +extern "C" {
    +#endif
    +
    +
    +#if defined(_WIN32)
    +#  define CFFI_DLLEXPORT  __declspec(dllexport)
    +#elif defined(__GNUC__)
    +#  define CFFI_DLLEXPORT  __attribute__((visibility("default")))
    +#else
    +#  define CFFI_DLLEXPORT  /* nothing */
    +#endif
    +
    +
    +/* There are two global variables of type _cffi_call_python_fnptr:
    +
    +   * _cffi_call_python, which we declare just below, is the one called
    +     by ``extern "Python"`` implementations.
    +
    +   * _cffi_call_python_org, which on CPython is actually part of the
    +     _cffi_exports[] array, is the function pointer copied from
    +     _cffi_backend.
    +
    +   After initialization is complete, both are equal.  However, the
    +   first one remains equal to &_cffi_start_and_call_python until the
    +   very end of initialization, when we are (or should be) sure that
    +   concurrent threads also see a completely initialized world, and
    +   only then is it changed.
    +*/
    +#undef _cffi_call_python
    +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *);
    +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *);
    +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python;
    +
    +
    +#ifndef _MSC_VER
    +   /* --- Assuming a GCC not infinitely old --- */
    +# define cffi_compare_and_swap(l,o,n)  __sync_bool_compare_and_swap(l,o,n)
    +# define cffi_write_barrier()          __sync_synchronize()
    +# if !defined(__amd64__) && !defined(__x86_64__) &&   \
    +     !defined(__i386__) && !defined(__i386)
    +#   define cffi_read_barrier()         __sync_synchronize()
    +# else
    +#   define cffi_read_barrier()         (void)0
    +# endif
    +#else
    +   /* --- Windows threads version --- */
    +# include 
    +# define cffi_compare_and_swap(l,o,n) \
    +                               (InterlockedCompareExchangePointer(l,n,o) == (o))
    +# define cffi_write_barrier()       InterlockedCompareExchange(&_cffi_dummy,0,0)
    +# define cffi_read_barrier()           (void)0
    +static volatile LONG _cffi_dummy;
    +#endif
    +
    +#ifdef WITH_THREAD
    +# ifndef _MSC_VER
    +#  include 
    +   static pthread_mutex_t _cffi_embed_startup_lock;
    +# else
    +   static CRITICAL_SECTION _cffi_embed_startup_lock;
    +# endif
    +  static char _cffi_embed_startup_lock_ready = 0;
    +#endif
    +
    +static void _cffi_acquire_reentrant_mutex(void)
    +{
    +    static void *volatile lock = NULL;
    +
    +    while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) {
    +        /* should ideally do a spin loop instruction here, but
    +           hard to do it portably and doesn't really matter I
    +           think: pthread_mutex_init() should be very fast, and
    +           this is only run at start-up anyway. */
    +    }
    +
    +#ifdef WITH_THREAD
    +    if (!_cffi_embed_startup_lock_ready) {
    +# ifndef _MSC_VER
    +        pthread_mutexattr_t attr;
    +        pthread_mutexattr_init(&attr);
    +        pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE);
    +        pthread_mutex_init(&_cffi_embed_startup_lock, &attr);
    +# else
    +        InitializeCriticalSection(&_cffi_embed_startup_lock);
    +# endif
    +        _cffi_embed_startup_lock_ready = 1;
    +    }
    +#endif
    +
    +    while (!cffi_compare_and_swap(&lock, (void *)1, NULL))
    +        ;
    +
    +#ifndef _MSC_VER
    +    pthread_mutex_lock(&_cffi_embed_startup_lock);
    +#else
    +    EnterCriticalSection(&_cffi_embed_startup_lock);
    +#endif
    +}
    +
    +static void _cffi_release_reentrant_mutex(void)
    +{
    +#ifndef _MSC_VER
    +    pthread_mutex_unlock(&_cffi_embed_startup_lock);
    +#else
    +    LeaveCriticalSection(&_cffi_embed_startup_lock);
    +#endif
    +}
    +
    +
    +/**********  CPython-specific section  **********/
    +#ifndef PYPY_VERSION
    +
    +#include "_cffi_errors.h"
    +
    +
    +#define _cffi_call_python_org  _cffi_exports[_CFFI_CPIDX]
    +
    +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void);   /* forward */
    +
    +static void _cffi_py_initialize(void)
    +{
    +    /* XXX use initsigs=0, which "skips initialization registration of
    +       signal handlers, which might be useful when Python is
    +       embedded" according to the Python docs.  But review and think
    +       if it should be a user-controllable setting.
    +
    +       XXX we should also give a way to write errors to a buffer
    +       instead of to stderr.
    +
    +       XXX if importing 'site' fails, CPython (any version) calls
    +       exit().  Should we try to work around this behavior here?
    +    */
    +    Py_InitializeEx(0);
    +}
    +
    +static int _cffi_initialize_python(void)
    +{
    +    /* This initializes Python, imports _cffi_backend, and then the
    +       present .dll/.so is set up as a CPython C extension module.
    +    */
    +    int result;
    +    PyGILState_STATE state;
    +    PyObject *pycode=NULL, *global_dict=NULL, *x;
    +    PyObject *builtins;
    +
    +    state = PyGILState_Ensure();
    +
    +    /* Call the initxxx() function from the present module.  It will
    +       create and initialize us as a CPython extension module, instead
    +       of letting the startup Python code do it---it might reimport
    +       the same .dll/.so and get maybe confused on some platforms.
    +       It might also have troubles locating the .dll/.so again for all
    +       I know.
    +    */
    +    (void)_CFFI_PYTHON_STARTUP_FUNC();
    +    if (PyErr_Occurred())
    +        goto error;
    +
    +    /* Now run the Python code provided to ffi.embedding_init_code().
    +     */
    +    pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE,
    +                              "",
    +                              Py_file_input);
    +    if (pycode == NULL)
    +        goto error;
    +    global_dict = PyDict_New();
    +    if (global_dict == NULL)
    +        goto error;
    +    builtins = PyEval_GetBuiltins();
    +    if (builtins == NULL)
    +        goto error;
    +    if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0)
    +        goto error;
    +    x = PyEval_EvalCode(
    +#if PY_MAJOR_VERSION < 3
    +                        (PyCodeObject *)
    +#endif
    +                        pycode, global_dict, global_dict);
    +    if (x == NULL)
    +        goto error;
    +    Py_DECREF(x);
    +
    +    /* Done!  Now if we've been called from
    +       _cffi_start_and_call_python() in an ``extern "Python"``, we can
    +       only hope that the Python code did correctly set up the
    +       corresponding @ffi.def_extern() function.  Otherwise, the
    +       general logic of ``extern "Python"`` functions (inside the
    +       _cffi_backend module) will find that the reference is still
    +       missing and print an error.
    +     */
    +    result = 0;
    + done:
    +    Py_XDECREF(pycode);
    +    Py_XDECREF(global_dict);
    +    PyGILState_Release(state);
    +    return result;
    +
    + error:;
    +    {
    +        /* Print as much information as potentially useful.
    +           Debugging load-time failures with embedding is not fun
    +        */
    +        PyObject *ecap;
    +        PyObject *exception, *v, *tb, *f, *modules, *mod;
    +        PyErr_Fetch(&exception, &v, &tb);
    +        ecap = _cffi_start_error_capture();
    +        f = PySys_GetObject((char *)"stderr");
    +        if (f != NULL && f != Py_None) {
    +            PyFile_WriteString(
    +                "Failed to initialize the Python-CFFI embedding logic:\n\n", f);
    +        }
    +
    +        if (exception != NULL) {
    +            PyErr_NormalizeException(&exception, &v, &tb);
    +            PyErr_Display(exception, v, tb);
    +        }
    +        Py_XDECREF(exception);
    +        Py_XDECREF(v);
    +        Py_XDECREF(tb);
    +
    +        if (f != NULL && f != Py_None) {
    +            PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
    +                               "\ncompiled with cffi version: 1.14.0"
    +                               "\n_cffi_backend module: ", f);
    +            modules = PyImport_GetModuleDict();
    +            mod = PyDict_GetItemString(modules, "_cffi_backend");
    +            if (mod == NULL) {
    +                PyFile_WriteString("not loaded", f);
    +            }
    +            else {
    +                v = PyObject_GetAttrString(mod, "__file__");
    +                PyFile_WriteObject(v, f, 0);
    +                Py_XDECREF(v);
    +            }
    +            PyFile_WriteString("\nsys.path: ", f);
    +            PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0);
    +            PyFile_WriteString("\n\n", f);
    +        }
    +        _cffi_stop_error_capture(ecap);
    +    }
    +    result = -1;
    +    goto done;
    +}
    +
    +PyAPI_DATA(char *) _PyParser_TokenNames[];  /* from CPython */
    +
    +static int _cffi_carefully_make_gil(void)
    +{
    +    /* This does the basic initialization of Python.  It can be called
    +       completely concurrently from unrelated threads.  It assumes
    +       that we don't hold the GIL before (if it exists), and we don't
    +       hold it afterwards.
    +
    +       (What it really does used to be completely different in Python 2
    +       and Python 3, with the Python 2 solution avoiding the spin-lock
    +       around the Py_InitializeEx() call.  However, after recent changes
    +       to CPython 2.7 (issue #358) it no longer works.  So we use the
    +       Python 3 solution everywhere.)
    +
    +       This initializes Python by calling Py_InitializeEx().
    +       Important: this must not be called concurrently at all.
    +       So we use a global variable as a simple spin lock.  This global
    +       variable must be from 'libpythonX.Y.so', not from this
    +       cffi-based extension module, because it must be shared from
    +       different cffi-based extension modules.
    +
    +       In Python < 3.8, we choose
    +       _PyParser_TokenNames[0] as a completely arbitrary pointer value
    +       that is never written to.  The default is to point to the
    +       string "ENDMARKER".  We change it temporarily to point to the
    +       next character in that string.  (Yes, I know it's REALLY
    +       obscure.)
    +
    +       In Python >= 3.8, this string array is no longer writable, so
    +       instead we pick PyCapsuleType.tp_version_tag.  We can't change
    +       Python < 3.8 because someone might use a mixture of cffi
    +       embedded modules, some of which were compiled before this file
    +       changed.
    +    */
    +
    +#ifdef WITH_THREAD
    +# if PY_VERSION_HEX < 0x03080000
    +    char *volatile *lock = (char *volatile *)_PyParser_TokenNames;
    +    char *old_value, *locked_value;
    +
    +    while (1) {    /* spin loop */
    +        old_value = *lock;
    +        locked_value = old_value + 1;
    +        if (old_value[0] == 'E') {
    +            assert(old_value[1] == 'N');
    +            if (cffi_compare_and_swap(lock, old_value, locked_value))
    +                break;
    +        }
    +        else {
    +            assert(old_value[0] == 'N');
    +            /* should ideally do a spin loop instruction here, but
    +               hard to do it portably and doesn't really matter I
    +               think: PyEval_InitThreads() should be very fast, and
    +               this is only run at start-up anyway. */
    +        }
    +    }
    +# else
    +    int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag;
    +    int old_value, locked_value;
    +    assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG));
    +
    +    while (1) {    /* spin loop */
    +        old_value = *lock;
    +        locked_value = -42;
    +        if (old_value == 0) {
    +            if (cffi_compare_and_swap(lock, old_value, locked_value))
    +                break;
    +        }
    +        else {
    +            assert(old_value == locked_value);
    +            /* should ideally do a spin loop instruction here, but
    +               hard to do it portably and doesn't really matter I
    +               think: PyEval_InitThreads() should be very fast, and
    +               this is only run at start-up anyway. */
    +        }
    +    }
    +# endif
    +#endif
    +
    +    /* call Py_InitializeEx() */
    +    if (!Py_IsInitialized()) {
    +        _cffi_py_initialize();
    +        PyEval_InitThreads();
    +        PyEval_SaveThread();  /* release the GIL */
    +        /* the returned tstate must be the one that has been stored into the
    +           autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */
    +    }
    +    else {
    +        PyGILState_STATE state = PyGILState_Ensure();
    +        PyEval_InitThreads();
    +        PyGILState_Release(state);
    +    }
    +
    +#ifdef WITH_THREAD
    +    /* release the lock */
    +    while (!cffi_compare_and_swap(lock, locked_value, old_value))
    +        ;
    +#endif
    +
    +    return 0;
    +}
    +
    +/**********  end CPython-specific section  **********/
    +
    +
    +#else
    +
    +
    +/**********  PyPy-specific section  **********/
    +
    +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]);   /* forward */
    +
    +static struct _cffi_pypy_init_s {
    +    const char *name;
    +    void (*func)(const void *[]);
    +    const char *code;
    +} _cffi_pypy_init = {
    +    _CFFI_MODULE_NAME,
    +    (void(*)(const void *[]))_CFFI_PYTHON_STARTUP_FUNC,
    +    _CFFI_PYTHON_STARTUP_CODE,
    +};
    +
    +extern int pypy_carefully_make_gil(const char *);
    +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *);
    +
    +static int _cffi_carefully_make_gil(void)
    +{
    +    return pypy_carefully_make_gil(_CFFI_MODULE_NAME);
    +}
    +
    +static int _cffi_initialize_python(void)
    +{
    +    return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init);
    +}
    +
    +/**********  end PyPy-specific section  **********/
    +
    +
    +#endif
    +
    +
    +#ifdef __GNUC__
    +__attribute__((noinline))
    +#endif
    +static _cffi_call_python_fnptr _cffi_start_python(void)
    +{
    +    /* Delicate logic to initialize Python.  This function can be
    +       called multiple times concurrently, e.g. when the process calls
    +       its first ``extern "Python"`` functions in multiple threads at
    +       once.  It can also be called recursively, in which case we must
    +       ignore it.  We also have to consider what occurs if several
    +       different cffi-based extensions reach this code in parallel
    +       threads---it is a different copy of the code, then, and we
    +       can't have any shared global variable unless it comes from
    +       'libpythonX.Y.so'.
    +
    +       Idea:
    +
    +       * _cffi_carefully_make_gil(): "carefully" call
    +         PyEval_InitThreads() (possibly with Py_InitializeEx() first).
    +
    +       * then we use a (local) custom lock to make sure that a call to this
    +         cffi-based extension will wait if another call to the *same*
    +         extension is running the initialization in another thread.
    +         It is reentrant, so that a recursive call will not block, but
    +         only one from a different thread.
    +
    +       * then we grab the GIL and (Python 2) we call Py_InitializeEx().
    +         At this point, concurrent calls to Py_InitializeEx() are not
    +         possible: we have the GIL.
    +
    +       * do the rest of the specific initialization, which may
    +         temporarily release the GIL but not the custom lock.
    +         Only release the custom lock when we are done.
    +    */
    +    static char called = 0;
    +
    +    if (_cffi_carefully_make_gil() != 0)
    +        return NULL;
    +
    +    _cffi_acquire_reentrant_mutex();
    +
    +    /* Here the GIL exists, but we don't have it.  We're only protected
    +       from concurrency by the reentrant mutex. */
    +
    +    /* This file only initializes the embedded module once, the first
    +       time this is called, even if there are subinterpreters. */
    +    if (!called) {
    +        called = 1;  /* invoke _cffi_initialize_python() only once,
    +                        but don't set '_cffi_call_python' right now,
    +                        otherwise concurrent threads won't call
    +                        this function at all (we need them to wait) */
    +        if (_cffi_initialize_python() == 0) {
    +            /* now initialization is finished.  Switch to the fast-path. */
    +
    +            /* We would like nobody to see the new value of
    +               '_cffi_call_python' without also seeing the rest of the
    +               data initialized.  However, this is not possible.  But
    +               the new value of '_cffi_call_python' is the function
    +               'cffi_call_python()' from _cffi_backend.  So:  */
    +            cffi_write_barrier();
    +            /* ^^^ we put a write barrier here, and a corresponding
    +               read barrier at the start of cffi_call_python().  This
    +               ensures that after that read barrier, we see everything
    +               done here before the write barrier.
    +            */
    +
    +            assert(_cffi_call_python_org != NULL);
    +            _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org;
    +        }
    +        else {
    +            /* initialization failed.  Reset this to NULL, even if it was
    +               already set to some other value.  Future calls to
    +               _cffi_start_python() are still forced to occur, and will
    +               always return NULL from now on. */
    +            _cffi_call_python_org = NULL;
    +        }
    +    }
    +
    +    _cffi_release_reentrant_mutex();
    +
    +    return (_cffi_call_python_fnptr)_cffi_call_python_org;
    +}
    +
    +static
    +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args)
    +{
    +    _cffi_call_python_fnptr fnptr;
    +    int current_err = errno;
    +#ifdef _MSC_VER
    +    int current_lasterr = GetLastError();
    +#endif
    +    fnptr = _cffi_start_python();
    +    if (fnptr == NULL) {
    +        fprintf(stderr, "function %s() called, but initialization code "
    +                        "failed.  Returning 0.\n", externpy->name);
    +        memset(args, 0, externpy->size_of_result);
    +    }
    +#ifdef _MSC_VER
    +    SetLastError(current_lasterr);
    +#endif
    +    errno = current_err;
    +
    +    if (fnptr != NULL)
    +        fnptr(externpy, args);
    +}
    +
    +
    +/* The cffi_start_python() function makes sure Python is initialized
    +   and our cffi module is set up.  It can be called manually from the
    +   user C code.  The same effect is obtained automatically from any
    +   dll-exported ``extern "Python"`` function.  This function returns
    +   -1 if initialization failed, 0 if all is OK.  */
    +_CFFI_UNUSED_FN
    +static int cffi_start_python(void)
    +{
    +    if (_cffi_call_python == &_cffi_start_and_call_python) {
    +        if (_cffi_start_python() == NULL)
    +            return -1;
    +    }
    +    cffi_read_barrier();
    +    return 0;
    +}
    +
    +#undef cffi_compare_and_swap
    +#undef cffi_write_barrier
    +#undef cffi_read_barrier
    +
    +#ifdef __cplusplus
    +}
    +#endif
    diff --git a/server/www/packages/packages-windows/x86/cffi/api.py b/server/www/packages/packages-windows/x86/cffi/api.py
    new file mode 100644
    index 0000000..10090fe
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/api.py
    @@ -0,0 +1,965 @@
    +import sys, types
    +from .lock import allocate_lock
    +from .error import CDefError
    +from . import model
    +
    +try:
    +    callable
    +except NameError:
    +    # Python 3.1
    +    from collections import Callable
    +    callable = lambda x: isinstance(x, Callable)
    +
    +try:
    +    basestring
    +except NameError:
    +    # Python 3.x
    +    basestring = str
    +
    +_unspecified = object()
    +
    +
    +
    +class FFI(object):
    +    r'''
    +    The main top-level class that you instantiate once, or once per module.
    +
    +    Example usage:
    +
    +        ffi = FFI()
    +        ffi.cdef("""
    +            int printf(const char *, ...);
    +        """)
    +
    +        C = ffi.dlopen(None)   # standard library
    +        -or-
    +        C = ffi.verify()  # use a C compiler: verify the decl above is right
    +
    +        C.printf("hello, %s!\n", ffi.new("char[]", "world"))
    +    '''
    +
    +    def __init__(self, backend=None):
    +        """Create an FFI instance.  The 'backend' argument is used to
    +        select a non-default backend, mostly for tests.
    +        """
    +        if backend is None:
    +            # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with
    +            # _cffi_backend.so compiled.
    +            import _cffi_backend as backend
    +            from . import __version__
    +            if backend.__version__ != __version__:
    +                # bad version!  Try to be as explicit as possible.
    +                if hasattr(backend, '__file__'):
    +                    # CPython
    +                    raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r.  When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r.  The two versions should be equal; check your installation." % (
    +                        __version__, __file__,
    +                        backend.__version__, backend.__file__))
    +                else:
    +                    # PyPy
    +                    raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r.  This interpreter comes with a built-in '_cffi_backend' module, which is version %s.  The two versions should be equal; check your installation." % (
    +                        __version__, __file__, backend.__version__))
    +            # (If you insist you can also try to pass the option
    +            # 'backend=backend_ctypes.CTypesBackend()', but don't
    +            # rely on it!  It's probably not going to work well.)
    +
    +        from . import cparser
    +        self._backend = backend
    +        self._lock = allocate_lock()
    +        self._parser = cparser.Parser()
    +        self._cached_btypes = {}
    +        self._parsed_types = types.ModuleType('parsed_types').__dict__
    +        self._new_types = types.ModuleType('new_types').__dict__
    +        self._function_caches = []
    +        self._libraries = []
    +        self._cdefsources = []
    +        self._included_ffis = []
    +        self._windows_unicode = None
    +        self._init_once_cache = {}
    +        self._cdef_version = None
    +        self._embedding = None
    +        self._typecache = model.get_typecache(backend)
    +        if hasattr(backend, 'set_ffi'):
    +            backend.set_ffi(self)
    +        for name in list(backend.__dict__):
    +            if name.startswith('RTLD_'):
    +                setattr(self, name, getattr(backend, name))
    +        #
    +        with self._lock:
    +            self.BVoidP = self._get_cached_btype(model.voidp_type)
    +            self.BCharA = self._get_cached_btype(model.char_array_type)
    +        if isinstance(backend, types.ModuleType):
    +            # _cffi_backend: attach these constants to the class
    +            if not hasattr(FFI, 'NULL'):
    +                FFI.NULL = self.cast(self.BVoidP, 0)
    +                FFI.CData, FFI.CType = backend._get_types()
    +        else:
    +            # ctypes backend: attach these constants to the instance
    +            self.NULL = self.cast(self.BVoidP, 0)
    +            self.CData, self.CType = backend._get_types()
    +        self.buffer = backend.buffer
    +
    +    def cdef(self, csource, override=False, packed=False, pack=None):
    +        """Parse the given C source.  This registers all declared functions,
    +        types, and global variables.  The functions and global variables can
    +        then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'.
    +        The types can be used in 'ffi.new()' and other functions.
    +        If 'packed' is specified as True, all structs declared inside this
    +        cdef are packed, i.e. laid out without any field alignment at all.
    +        Alternatively, 'pack' can be a small integer, and requests for
    +        alignment greater than that are ignored (pack=1 is equivalent to
    +        packed=True).
    +        """
    +        self._cdef(csource, override=override, packed=packed, pack=pack)
    +
    +    def embedding_api(self, csource, packed=False, pack=None):
    +        self._cdef(csource, packed=packed, pack=pack, dllexport=True)
    +        if self._embedding is None:
    +            self._embedding = ''
    +
    +    def _cdef(self, csource, override=False, **options):
    +        if not isinstance(csource, str):    # unicode, on Python 2
    +            if not isinstance(csource, basestring):
    +                raise TypeError("cdef() argument must be a string")
    +            csource = csource.encode('ascii')
    +        with self._lock:
    +            self._cdef_version = object()
    +            self._parser.parse(csource, override=override, **options)
    +            self._cdefsources.append(csource)
    +            if override:
    +                for cache in self._function_caches:
    +                    cache.clear()
    +            finishlist = self._parser._recomplete
    +            if finishlist:
    +                self._parser._recomplete = []
    +                for tp in finishlist:
    +                    tp.finish_backend_type(self, finishlist)
    +
    +    def dlopen(self, name, flags=0):
    +        """Load and return a dynamic library identified by 'name'.
    +        The standard C library can be loaded by passing None.
    +        Note that functions and types declared by 'ffi.cdef()' are not
    +        linked to a particular library, just like C headers; in the
    +        library we only look for the actual (untyped) symbols.
    +        """
    +        if not (isinstance(name, basestring) or
    +                name is None or
    +                isinstance(name, self.CData)):
    +            raise TypeError("dlopen(name): name must be a file name, None, "
    +                            "or an already-opened 'void *' handle")
    +        with self._lock:
    +            lib, function_cache = _make_ffi_library(self, name, flags)
    +            self._function_caches.append(function_cache)
    +            self._libraries.append(lib)
    +        return lib
    +
    +    def dlclose(self, lib):
    +        """Close a library obtained with ffi.dlopen().  After this call,
    +        access to functions or variables from the library will fail
    +        (possibly with a segmentation fault).
    +        """
    +        type(lib).__cffi_close__(lib)
    +
    +    def _typeof_locked(self, cdecl):
    +        # call me with the lock!
    +        key = cdecl
    +        if key in self._parsed_types:
    +            return self._parsed_types[key]
    +        #
    +        if not isinstance(cdecl, str):    # unicode, on Python 2
    +            cdecl = cdecl.encode('ascii')
    +        #
    +        type = self._parser.parse_type(cdecl)
    +        really_a_function_type = type.is_raw_function
    +        if really_a_function_type:
    +            type = type.as_function_pointer()
    +        btype = self._get_cached_btype(type)
    +        result = btype, really_a_function_type
    +        self._parsed_types[key] = result
    +        return result
    +
    +    def _typeof(self, cdecl, consider_function_as_funcptr=False):
    +        # string -> ctype object
    +        try:
    +            result = self._parsed_types[cdecl]
    +        except KeyError:
    +            with self._lock:
    +                result = self._typeof_locked(cdecl)
    +        #
    +        btype, really_a_function_type = result
    +        if really_a_function_type and not consider_function_as_funcptr:
    +            raise CDefError("the type %r is a function type, not a "
    +                            "pointer-to-function type" % (cdecl,))
    +        return btype
    +
    +    def typeof(self, cdecl):
    +        """Parse the C type given as a string and return the
    +        corresponding  object.
    +        It can also be used on 'cdata' instance to get its C type.
    +        """
    +        if isinstance(cdecl, basestring):
    +            return self._typeof(cdecl)
    +        if isinstance(cdecl, self.CData):
    +            return self._backend.typeof(cdecl)
    +        if isinstance(cdecl, types.BuiltinFunctionType):
    +            res = _builtin_function_type(cdecl)
    +            if res is not None:
    +                return res
    +        if (isinstance(cdecl, types.FunctionType)
    +                and hasattr(cdecl, '_cffi_base_type')):
    +            with self._lock:
    +                return self._get_cached_btype(cdecl._cffi_base_type)
    +        raise TypeError(type(cdecl))
    +
    +    def sizeof(self, cdecl):
    +        """Return the size in bytes of the argument.  It can be a
    +        string naming a C type, or a 'cdata' instance.
    +        """
    +        if isinstance(cdecl, basestring):
    +            BType = self._typeof(cdecl)
    +            return self._backend.sizeof(BType)
    +        else:
    +            return self._backend.sizeof(cdecl)
    +
    +    def alignof(self, cdecl):
    +        """Return the natural alignment size in bytes of the C type
    +        given as a string.
    +        """
    +        if isinstance(cdecl, basestring):
    +            cdecl = self._typeof(cdecl)
    +        return self._backend.alignof(cdecl)
    +
    +    def offsetof(self, cdecl, *fields_or_indexes):
    +        """Return the offset of the named field inside the given
    +        structure or array, which must be given as a C type name.
    +        You can give several field names in case of nested structures.
    +        You can also give numeric values which correspond to array
    +        items, in case of an array type.
    +        """
    +        if isinstance(cdecl, basestring):
    +            cdecl = self._typeof(cdecl)
    +        return self._typeoffsetof(cdecl, *fields_or_indexes)[1]
    +
    +    def new(self, cdecl, init=None):
    +        """Allocate an instance according to the specified C type and
    +        return a pointer to it.  The specified C type must be either a
    +        pointer or an array: ``new('X *')`` allocates an X and returns
    +        a pointer to it, whereas ``new('X[n]')`` allocates an array of
    +        n X'es and returns an array referencing it (which works
    +        mostly like a pointer, like in C).  You can also use
    +        ``new('X[]', n)`` to allocate an array of a non-constant
    +        length n.
    +
    +        The memory is initialized following the rules of declaring a
    +        global variable in C: by default it is zero-initialized, but
    +        an explicit initializer can be given which can be used to
    +        fill all or part of the memory.
    +
    +        When the returned  object goes out of scope, the memory
    +        is freed.  In other words the returned  object has
    +        ownership of the value of type 'cdecl' that it points to.  This
    +        means that the raw data can be used as long as this object is
    +        kept alive, but must not be used for a longer time.  Be careful
    +        about that when copying the pointer to the memory somewhere
    +        else, e.g. into another structure.
    +        """
    +        if isinstance(cdecl, basestring):
    +            cdecl = self._typeof(cdecl)
    +        return self._backend.newp(cdecl, init)
    +
    +    def new_allocator(self, alloc=None, free=None,
    +                      should_clear_after_alloc=True):
    +        """Return a new allocator, i.e. a function that behaves like ffi.new()
    +        but uses the provided low-level 'alloc' and 'free' functions.
    +
    +        'alloc' is called with the size as argument.  If it returns NULL, a
    +        MemoryError is raised.  'free' is called with the result of 'alloc'
    +        as argument.  Both can be either Python function or directly C
    +        functions.  If 'free' is None, then no free function is called.
    +        If both 'alloc' and 'free' are None, the default is used.
    +
    +        If 'should_clear_after_alloc' is set to False, then the memory
    +        returned by 'alloc' is assumed to be already cleared (or you are
    +        fine with garbage); otherwise CFFI will clear it.
    +        """
    +        compiled_ffi = self._backend.FFI()
    +        allocator = compiled_ffi.new_allocator(alloc, free,
    +                                               should_clear_after_alloc)
    +        def allocate(cdecl, init=None):
    +            if isinstance(cdecl, basestring):
    +                cdecl = self._typeof(cdecl)
    +            return allocator(cdecl, init)
    +        return allocate
    +
    +    def cast(self, cdecl, source):
    +        """Similar to a C cast: returns an instance of the named C
    +        type initialized with the given 'source'.  The source is
    +        casted between integers or pointers of any type.
    +        """
    +        if isinstance(cdecl, basestring):
    +            cdecl = self._typeof(cdecl)
    +        return self._backend.cast(cdecl, source)
    +
    +    def string(self, cdata, maxlen=-1):
    +        """Return a Python string (or unicode string) from the 'cdata'.
    +        If 'cdata' is a pointer or array of characters or bytes, returns
    +        the null-terminated string.  The returned string extends until
    +        the first null character, or at most 'maxlen' characters.  If
    +        'cdata' is an array then 'maxlen' defaults to its length.
    +
    +        If 'cdata' is a pointer or array of wchar_t, returns a unicode
    +        string following the same rules.
    +
    +        If 'cdata' is a single character or byte or a wchar_t, returns
    +        it as a string or unicode string.
    +
    +        If 'cdata' is an enum, returns the value of the enumerator as a
    +        string, or 'NUMBER' if the value is out of range.
    +        """
    +        return self._backend.string(cdata, maxlen)
    +
    +    def unpack(self, cdata, length):
    +        """Unpack an array of C data of the given length,
    +        returning a Python string/unicode/list.
    +
    +        If 'cdata' is a pointer to 'char', returns a byte string.
    +        It does not stop at the first null.  This is equivalent to:
    +        ffi.buffer(cdata, length)[:]
    +
    +        If 'cdata' is a pointer to 'wchar_t', returns a unicode string.
    +        'length' is measured in wchar_t's; it is not the size in bytes.
    +
    +        If 'cdata' is a pointer to anything else, returns a list of
    +        'length' items.  This is a faster equivalent to:
    +        [cdata[i] for i in range(length)]
    +        """
    +        return self._backend.unpack(cdata, length)
    +
    +   #def buffer(self, cdata, size=-1):
    +   #    """Return a read-write buffer object that references the raw C data
    +   #    pointed to by the given 'cdata'.  The 'cdata' must be a pointer or
    +   #    an array.  Can be passed to functions expecting a buffer, or directly
    +   #    manipulated with:
    +   #
    +   #        buf[:]          get a copy of it in a regular string, or
    +   #        buf[idx]        as a single character
    +   #        buf[:] = ...
    +   #        buf[idx] = ...  change the content
    +   #    """
    +   #    note that 'buffer' is a type, set on this instance by __init__
    +
    +    def from_buffer(self, cdecl, python_buffer=_unspecified,
    +                    require_writable=False):
    +        """Return a cdata of the given type pointing to the data of the
    +        given Python object, which must support the buffer interface.
    +        Note that this is not meant to be used on the built-in types
    +        str or unicode (you can build 'char[]' arrays explicitly)
    +        but only on objects containing large quantities of raw data
    +        in some other format, like 'array.array' or numpy arrays.
    +
    +        The first argument is optional and default to 'char[]'.
    +        """
    +        if python_buffer is _unspecified:
    +            cdecl, python_buffer = self.BCharA, cdecl
    +        elif isinstance(cdecl, basestring):
    +            cdecl = self._typeof(cdecl)
    +        return self._backend.from_buffer(cdecl, python_buffer,
    +                                         require_writable)
    +
    +    def memmove(self, dest, src, n):
    +        """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest.
    +
    +        Like the C function memmove(), the memory areas may overlap;
    +        apart from that it behaves like the C function memcpy().
    +
    +        'src' can be any cdata ptr or array, or any Python buffer object.
    +        'dest' can be any cdata ptr or array, or a writable Python buffer
    +        object.  The size to copy, 'n', is always measured in bytes.
    +
    +        Unlike other methods, this one supports all Python buffer including
    +        byte strings and bytearrays---but it still does not support
    +        non-contiguous buffers.
    +        """
    +        return self._backend.memmove(dest, src, n)
    +
    +    def callback(self, cdecl, python_callable=None, error=None, onerror=None):
    +        """Return a callback object or a decorator making such a
    +        callback object.  'cdecl' must name a C function pointer type.
    +        The callback invokes the specified 'python_callable' (which may
    +        be provided either directly or via a decorator).  Important: the
    +        callback object must be manually kept alive for as long as the
    +        callback may be invoked from the C level.
    +        """
    +        def callback_decorator_wrap(python_callable):
    +            if not callable(python_callable):
    +                raise TypeError("the 'python_callable' argument "
    +                                "is not callable")
    +            return self._backend.callback(cdecl, python_callable,
    +                                          error, onerror)
    +        if isinstance(cdecl, basestring):
    +            cdecl = self._typeof(cdecl, consider_function_as_funcptr=True)
    +        if python_callable is None:
    +            return callback_decorator_wrap                # decorator mode
    +        else:
    +            return callback_decorator_wrap(python_callable)  # direct mode
    +
    +    def getctype(self, cdecl, replace_with=''):
    +        """Return a string giving the C type 'cdecl', which may be itself
    +        a string or a  object.  If 'replace_with' is given, it gives
    +        extra text to append (or insert for more complicated C types), like
    +        a variable name, or '*' to get actually the C type 'pointer-to-cdecl'.
    +        """
    +        if isinstance(cdecl, basestring):
    +            cdecl = self._typeof(cdecl)
    +        replace_with = replace_with.strip()
    +        if (replace_with.startswith('*')
    +                and '&[' in self._backend.getcname(cdecl, '&')):
    +            replace_with = '(%s)' % replace_with
    +        elif replace_with and not replace_with[0] in '[(':
    +            replace_with = ' ' + replace_with
    +        return self._backend.getcname(cdecl, replace_with)
    +
    +    def gc(self, cdata, destructor, size=0):
    +        """Return a new cdata object that points to the same
    +        data.  Later, when this new cdata object is garbage-collected,
    +        'destructor(old_cdata_object)' will be called.
    +
    +        The optional 'size' gives an estimate of the size, used to
    +        trigger the garbage collection more eagerly.  So far only used
    +        on PyPy.  It tells the GC that the returned object keeps alive
    +        roughly 'size' bytes of external memory.
    +        """
    +        return self._backend.gcp(cdata, destructor, size)
    +
    +    def _get_cached_btype(self, type):
    +        assert self._lock.acquire(False) is False
    +        # call me with the lock!
    +        try:
    +            BType = self._cached_btypes[type]
    +        except KeyError:
    +            finishlist = []
    +            BType = type.get_cached_btype(self, finishlist)
    +            for type in finishlist:
    +                type.finish_backend_type(self, finishlist)
    +        return BType
    +
    +    def verify(self, source='', tmpdir=None, **kwargs):
    +        """Verify that the current ffi signatures compile on this
    +        machine, and return a dynamic library object.  The dynamic
    +        library can be used to call functions and access global
    +        variables declared in this 'ffi'.  The library is compiled
    +        by the C compiler: it gives you C-level API compatibility
    +        (including calling macros).  This is unlike 'ffi.dlopen()',
    +        which requires binary compatibility in the signatures.
    +        """
    +        from .verifier import Verifier, _caller_dir_pycache
    +        #
    +        # If set_unicode(True) was called, insert the UNICODE and
    +        # _UNICODE macro declarations
    +        if self._windows_unicode:
    +            self._apply_windows_unicode(kwargs)
    +        #
    +        # Set the tmpdir here, and not in Verifier.__init__: it picks
    +        # up the caller's directory, which we want to be the caller of
    +        # ffi.verify(), as opposed to the caller of Veritier().
    +        tmpdir = tmpdir or _caller_dir_pycache()
    +        #
    +        # Make a Verifier() and use it to load the library.
    +        self.verifier = Verifier(self, source, tmpdir, **kwargs)
    +        lib = self.verifier.load_library()
    +        #
    +        # Save the loaded library for keep-alive purposes, even
    +        # if the caller doesn't keep it alive itself (it should).
    +        self._libraries.append(lib)
    +        return lib
    +
    +    def _get_errno(self):
    +        return self._backend.get_errno()
    +    def _set_errno(self, errno):
    +        self._backend.set_errno(errno)
    +    errno = property(_get_errno, _set_errno, None,
    +                     "the value of 'errno' from/to the C calls")
    +
    +    def getwinerror(self, code=-1):
    +        return self._backend.getwinerror(code)
    +
    +    def _pointer_to(self, ctype):
    +        with self._lock:
    +            return model.pointer_cache(self, ctype)
    +
    +    def addressof(self, cdata, *fields_or_indexes):
    +        """Return the address of a .
    +        If 'fields_or_indexes' are given, returns the address of that
    +        field or array item in the structure or array, recursively in
    +        case of nested structures.
    +        """
    +        try:
    +            ctype = self._backend.typeof(cdata)
    +        except TypeError:
    +            if '__addressof__' in type(cdata).__dict__:
    +                return type(cdata).__addressof__(cdata, *fields_or_indexes)
    +            raise
    +        if fields_or_indexes:
    +            ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes)
    +        else:
    +            if ctype.kind == "pointer":
    +                raise TypeError("addressof(pointer)")
    +            offset = 0
    +        ctypeptr = self._pointer_to(ctype)
    +        return self._backend.rawaddressof(ctypeptr, cdata, offset)
    +
    +    def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes):
    +        ctype, offset = self._backend.typeoffsetof(ctype, field_or_index)
    +        for field1 in fields_or_indexes:
    +            ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1)
    +            offset += offset1
    +        return ctype, offset
    +
    +    def include(self, ffi_to_include):
    +        """Includes the typedefs, structs, unions and enums defined
    +        in another FFI instance.  Usage is similar to a #include in C,
    +        where a part of the program might include types defined in
    +        another part for its own usage.  Note that the include()
    +        method has no effect on functions, constants and global
    +        variables, which must anyway be accessed directly from the
    +        lib object returned by the original FFI instance.
    +        """
    +        if not isinstance(ffi_to_include, FFI):
    +            raise TypeError("ffi.include() expects an argument that is also of"
    +                            " type cffi.FFI, not %r" % (
    +                                type(ffi_to_include).__name__,))
    +        if ffi_to_include is self:
    +            raise ValueError("self.include(self)")
    +        with ffi_to_include._lock:
    +            with self._lock:
    +                self._parser.include(ffi_to_include._parser)
    +                self._cdefsources.append('[')
    +                self._cdefsources.extend(ffi_to_include._cdefsources)
    +                self._cdefsources.append(']')
    +                self._included_ffis.append(ffi_to_include)
    +
    +    def new_handle(self, x):
    +        return self._backend.newp_handle(self.BVoidP, x)
    +
    +    def from_handle(self, x):
    +        return self._backend.from_handle(x)
    +
    +    def release(self, x):
    +        self._backend.release(x)
    +
    +    def set_unicode(self, enabled_flag):
    +        """Windows: if 'enabled_flag' is True, enable the UNICODE and
    +        _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
    +        to be (pointers to) wchar_t.  If 'enabled_flag' is False,
    +        declare these types to be (pointers to) plain 8-bit characters.
    +        This is mostly for backward compatibility; you usually want True.
    +        """
    +        if self._windows_unicode is not None:
    +            raise ValueError("set_unicode() can only be called once")
    +        enabled_flag = bool(enabled_flag)
    +        if enabled_flag:
    +            self.cdef("typedef wchar_t TBYTE;"
    +                      "typedef wchar_t TCHAR;"
    +                      "typedef const wchar_t *LPCTSTR;"
    +                      "typedef const wchar_t *PCTSTR;"
    +                      "typedef wchar_t *LPTSTR;"
    +                      "typedef wchar_t *PTSTR;"
    +                      "typedef TBYTE *PTBYTE;"
    +                      "typedef TCHAR *PTCHAR;")
    +        else:
    +            self.cdef("typedef char TBYTE;"
    +                      "typedef char TCHAR;"
    +                      "typedef const char *LPCTSTR;"
    +                      "typedef const char *PCTSTR;"
    +                      "typedef char *LPTSTR;"
    +                      "typedef char *PTSTR;"
    +                      "typedef TBYTE *PTBYTE;"
    +                      "typedef TCHAR *PTCHAR;")
    +        self._windows_unicode = enabled_flag
    +
    +    def _apply_windows_unicode(self, kwds):
    +        defmacros = kwds.get('define_macros', ())
    +        if not isinstance(defmacros, (list, tuple)):
    +            raise TypeError("'define_macros' must be a list or tuple")
    +        defmacros = list(defmacros) + [('UNICODE', '1'),
    +                                       ('_UNICODE', '1')]
    +        kwds['define_macros'] = defmacros
    +
    +    def _apply_embedding_fix(self, kwds):
    +        # must include an argument like "-lpython2.7" for the compiler
    +        def ensure(key, value):
    +            lst = kwds.setdefault(key, [])
    +            if value not in lst:
    +                lst.append(value)
    +        #
    +        if '__pypy__' in sys.builtin_module_names:
    +            import os
    +            if sys.platform == "win32":
    +                # we need 'libpypy-c.lib'.  Current distributions of
    +                # pypy (>= 4.1) contain it as 'libs/python27.lib'.
    +                pythonlib = "python{0[0]}{0[1]}".format(sys.version_info)
    +                if hasattr(sys, 'prefix'):
    +                    ensure('library_dirs', os.path.join(sys.prefix, 'libs'))
    +            else:
    +                # we need 'libpypy-c.{so,dylib}', which should be by
    +                # default located in 'sys.prefix/bin' for installed
    +                # systems.
    +                if sys.version_info < (3,):
    +                    pythonlib = "pypy-c"
    +                else:
    +                    pythonlib = "pypy3-c"
    +                if hasattr(sys, 'prefix'):
    +                    ensure('library_dirs', os.path.join(sys.prefix, 'bin'))
    +            # On uninstalled pypy's, the libpypy-c is typically found in
    +            # .../pypy/goal/.
    +            if hasattr(sys, 'prefix'):
    +                ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal'))
    +        else:
    +            if sys.platform == "win32":
    +                template = "python%d%d"
    +                if hasattr(sys, 'gettotalrefcount'):
    +                    template += '_d'
    +            else:
    +                try:
    +                    import sysconfig
    +                except ImportError:    # 2.6
    +                    from distutils import sysconfig
    +                template = "python%d.%d"
    +                if sysconfig.get_config_var('DEBUG_EXT'):
    +                    template += sysconfig.get_config_var('DEBUG_EXT')
    +            pythonlib = (template %
    +                    (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
    +            if hasattr(sys, 'abiflags'):
    +                pythonlib += sys.abiflags
    +        ensure('libraries', pythonlib)
    +        if sys.platform == "win32":
    +            ensure('extra_link_args', '/MANIFEST')
    +
    +    def set_source(self, module_name, source, source_extension='.c', **kwds):
    +        import os
    +        if hasattr(self, '_assigned_source'):
    +            raise ValueError("set_source() cannot be called several times "
    +                             "per ffi object")
    +        if not isinstance(module_name, basestring):
    +            raise TypeError("'module_name' must be a string")
    +        if os.sep in module_name or (os.altsep and os.altsep in module_name):
    +            raise ValueError("'module_name' must not contain '/': use a dotted "
    +                             "name to make a 'package.module' location")
    +        self._assigned_source = (str(module_name), source,
    +                                 source_extension, kwds)
    +
    +    def set_source_pkgconfig(self, module_name, pkgconfig_libs, source,
    +                             source_extension='.c', **kwds):
    +        from . import pkgconfig
    +        if not isinstance(pkgconfig_libs, list):
    +            raise TypeError("the pkgconfig_libs argument must be a list "
    +                            "of package names")
    +        kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs)
    +        pkgconfig.merge_flags(kwds, kwds2)
    +        self.set_source(module_name, source, source_extension, **kwds)
    +
    +    def distutils_extension(self, tmpdir='build', verbose=True):
    +        from distutils.dir_util import mkpath
    +        from .recompiler import recompile
    +        #
    +        if not hasattr(self, '_assigned_source'):
    +            if hasattr(self, 'verifier'):     # fallback, 'tmpdir' ignored
    +                return self.verifier.get_extension()
    +            raise ValueError("set_source() must be called before"
    +                             " distutils_extension()")
    +        module_name, source, source_extension, kwds = self._assigned_source
    +        if source is None:
    +            raise TypeError("distutils_extension() is only for C extension "
    +                            "modules, not for dlopen()-style pure Python "
    +                            "modules")
    +        mkpath(tmpdir)
    +        ext, updated = recompile(self, module_name,
    +                                 source, tmpdir=tmpdir, extradir=tmpdir,
    +                                 source_extension=source_extension,
    +                                 call_c_compiler=False, **kwds)
    +        if verbose:
    +            if updated:
    +                sys.stderr.write("regenerated: %r\n" % (ext.sources[0],))
    +            else:
    +                sys.stderr.write("not modified: %r\n" % (ext.sources[0],))
    +        return ext
    +
    +    def emit_c_code(self, filename):
    +        from .recompiler import recompile
    +        #
    +        if not hasattr(self, '_assigned_source'):
    +            raise ValueError("set_source() must be called before emit_c_code()")
    +        module_name, source, source_extension, kwds = self._assigned_source
    +        if source is None:
    +            raise TypeError("emit_c_code() is only for C extension modules, "
    +                            "not for dlopen()-style pure Python modules")
    +        recompile(self, module_name, source,
    +                  c_file=filename, call_c_compiler=False, **kwds)
    +
    +    def emit_python_code(self, filename):
    +        from .recompiler import recompile
    +        #
    +        if not hasattr(self, '_assigned_source'):
    +            raise ValueError("set_source() must be called before emit_c_code()")
    +        module_name, source, source_extension, kwds = self._assigned_source
    +        if source is not None:
    +            raise TypeError("emit_python_code() is only for dlopen()-style "
    +                            "pure Python modules, not for C extension modules")
    +        recompile(self, module_name, source,
    +                  c_file=filename, call_c_compiler=False, **kwds)
    +
    +    def compile(self, tmpdir='.', verbose=0, target=None, debug=None):
    +        """The 'target' argument gives the final file name of the
    +        compiled DLL.  Use '*' to force distutils' choice, suitable for
    +        regular CPython C API modules.  Use a file name ending in '.*'
    +        to ask for the system's default extension for dynamic libraries
    +        (.so/.dll/.dylib).
    +
    +        The default is '*' when building a non-embedded C API extension,
    +        and (module_name + '.*') when building an embedded library.
    +        """
    +        from .recompiler import recompile
    +        #
    +        if not hasattr(self, '_assigned_source'):
    +            raise ValueError("set_source() must be called before compile()")
    +        module_name, source, source_extension, kwds = self._assigned_source
    +        return recompile(self, module_name, source, tmpdir=tmpdir,
    +                         target=target, source_extension=source_extension,
    +                         compiler_verbose=verbose, debug=debug, **kwds)
    +
    +    def init_once(self, func, tag):
    +        # Read _init_once_cache[tag], which is either (False, lock) if
    +        # we're calling the function now in some thread, or (True, result).
    +        # Don't call setdefault() in most cases, to avoid allocating and
    +        # immediately freeing a lock; but still use setdefaut() to avoid
    +        # races.
    +        try:
    +            x = self._init_once_cache[tag]
    +        except KeyError:
    +            x = self._init_once_cache.setdefault(tag, (False, allocate_lock()))
    +        # Common case: we got (True, result), so we return the result.
    +        if x[0]:
    +            return x[1]
    +        # Else, it's a lock.  Acquire it to serialize the following tests.
    +        with x[1]:
    +            # Read again from _init_once_cache the current status.
    +            x = self._init_once_cache[tag]
    +            if x[0]:
    +                return x[1]
    +            # Call the function and store the result back.
    +            result = func()
    +            self._init_once_cache[tag] = (True, result)
    +        return result
    +
    +    def embedding_init_code(self, pysource):
    +        if self._embedding:
    +            raise ValueError("embedding_init_code() can only be called once")
    +        # fix 'pysource' before it gets dumped into the C file:
    +        # - remove empty lines at the beginning, so it starts at "line 1"
    +        # - dedent, if all non-empty lines are indented
    +        # - check for SyntaxErrors
    +        import re
    +        match = re.match(r'\s*\n', pysource)
    +        if match:
    +            pysource = pysource[match.end():]
    +        lines = pysource.splitlines() or ['']
    +        prefix = re.match(r'\s*', lines[0]).group()
    +        for i in range(1, len(lines)):
    +            line = lines[i]
    +            if line.rstrip():
    +                while not line.startswith(prefix):
    +                    prefix = prefix[:-1]
    +        i = len(prefix)
    +        lines = [line[i:]+'\n' for line in lines]
    +        pysource = ''.join(lines)
    +        #
    +        compile(pysource, "cffi_init", "exec")
    +        #
    +        self._embedding = pysource
    +
    +    def def_extern(self, *args, **kwds):
    +        raise ValueError("ffi.def_extern() is only available on API-mode FFI "
    +                         "objects")
    +
    +    def list_types(self):
    +        """Returns the user type names known to this FFI instance.
    +        This returns a tuple containing three lists of names:
    +        (typedef_names, names_of_structs, names_of_unions)
    +        """
    +        typedefs = []
    +        structs = []
    +        unions = []
    +        for key in self._parser._declarations:
    +            if key.startswith('typedef '):
    +                typedefs.append(key[8:])
    +            elif key.startswith('struct '):
    +                structs.append(key[7:])
    +            elif key.startswith('union '):
    +                unions.append(key[6:])
    +        typedefs.sort()
    +        structs.sort()
    +        unions.sort()
    +        return (typedefs, structs, unions)
    +
    +
    +def _load_backend_lib(backend, name, flags):
    +    import os
    +    if not isinstance(name, basestring):
    +        if sys.platform != "win32" or name is not None:
    +            return backend.load_library(name, flags)
    +        name = "c"    # Windows: load_library(None) fails, but this works
    +                      # on Python 2 (backward compatibility hack only)
    +    first_error = None
    +    if '.' in name or '/' in name or os.sep in name:
    +        try:
    +            return backend.load_library(name, flags)
    +        except OSError as e:
    +            first_error = e
    +    import ctypes.util
    +    path = ctypes.util.find_library(name)
    +    if path is None:
    +        if name == "c" and sys.platform == "win32" and sys.version_info >= (3,):
    +            raise OSError("dlopen(None) cannot work on Windows for Python 3 "
    +                          "(see http://bugs.python.org/issue23606)")
    +        msg = ("ctypes.util.find_library() did not manage "
    +               "to locate a library called %r" % (name,))
    +        if first_error is not None:
    +            msg = "%s.  Additionally, %s" % (first_error, msg)
    +        raise OSError(msg)
    +    return backend.load_library(path, flags)
    +
    +def _make_ffi_library(ffi, libname, flags):
    +    backend = ffi._backend
    +    backendlib = _load_backend_lib(backend, libname, flags)
    +    #
    +    def accessor_function(name):
    +        key = 'function ' + name
    +        tp, _ = ffi._parser._declarations[key]
    +        BType = ffi._get_cached_btype(tp)
    +        value = backendlib.load_function(BType, name)
    +        library.__dict__[name] = value
    +    #
    +    def accessor_variable(name):
    +        key = 'variable ' + name
    +        tp, _ = ffi._parser._declarations[key]
    +        BType = ffi._get_cached_btype(tp)
    +        read_variable = backendlib.read_variable
    +        write_variable = backendlib.write_variable
    +        setattr(FFILibrary, name, property(
    +            lambda self: read_variable(BType, name),
    +            lambda self, value: write_variable(BType, name, value)))
    +    #
    +    def addressof_var(name):
    +        try:
    +            return addr_variables[name]
    +        except KeyError:
    +            with ffi._lock:
    +                if name not in addr_variables:
    +                    key = 'variable ' + name
    +                    tp, _ = ffi._parser._declarations[key]
    +                    BType = ffi._get_cached_btype(tp)
    +                    if BType.kind != 'array':
    +                        BType = model.pointer_cache(ffi, BType)
    +                    p = backendlib.load_function(BType, name)
    +                    addr_variables[name] = p
    +            return addr_variables[name]
    +    #
    +    def accessor_constant(name):
    +        raise NotImplementedError("non-integer constant '%s' cannot be "
    +                                  "accessed from a dlopen() library" % (name,))
    +    #
    +    def accessor_int_constant(name):
    +        library.__dict__[name] = ffi._parser._int_constants[name]
    +    #
    +    accessors = {}
    +    accessors_version = [False]
    +    addr_variables = {}
    +    #
    +    def update_accessors():
    +        if accessors_version[0] is ffi._cdef_version:
    +            return
    +        #
    +        for key, (tp, _) in ffi._parser._declarations.items():
    +            if not isinstance(tp, model.EnumType):
    +                tag, name = key.split(' ', 1)
    +                if tag == 'function':
    +                    accessors[name] = accessor_function
    +                elif tag == 'variable':
    +                    accessors[name] = accessor_variable
    +                elif tag == 'constant':
    +                    accessors[name] = accessor_constant
    +            else:
    +                for i, enumname in enumerate(tp.enumerators):
    +                    def accessor_enum(name, tp=tp, i=i):
    +                        tp.check_not_partial()
    +                        library.__dict__[name] = tp.enumvalues[i]
    +                    accessors[enumname] = accessor_enum
    +        for name in ffi._parser._int_constants:
    +            accessors.setdefault(name, accessor_int_constant)
    +        accessors_version[0] = ffi._cdef_version
    +    #
    +    def make_accessor(name):
    +        with ffi._lock:
    +            if name in library.__dict__ or name in FFILibrary.__dict__:
    +                return    # added by another thread while waiting for the lock
    +            if name not in accessors:
    +                update_accessors()
    +                if name not in accessors:
    +                    raise AttributeError(name)
    +            accessors[name](name)
    +    #
    +    class FFILibrary(object):
    +        def __getattr__(self, name):
    +            make_accessor(name)
    +            return getattr(self, name)
    +        def __setattr__(self, name, value):
    +            try:
    +                property = getattr(self.__class__, name)
    +            except AttributeError:
    +                make_accessor(name)
    +                setattr(self, name, value)
    +            else:
    +                property.__set__(self, value)
    +        def __dir__(self):
    +            with ffi._lock:
    +                update_accessors()
    +                return accessors.keys()
    +        def __addressof__(self, name):
    +            if name in library.__dict__:
    +                return library.__dict__[name]
    +            if name in FFILibrary.__dict__:
    +                return addressof_var(name)
    +            make_accessor(name)
    +            if name in library.__dict__:
    +                return library.__dict__[name]
    +            if name in FFILibrary.__dict__:
    +                return addressof_var(name)
    +            raise AttributeError("cffi library has no function or "
    +                                 "global variable named '%s'" % (name,))
    +        def __cffi_close__(self):
    +            backendlib.close_lib()
    +            self.__dict__.clear()
    +    #
    +    if isinstance(libname, basestring):
    +        try:
    +            if not isinstance(libname, str):    # unicode, on Python 2
    +                libname = libname.encode('utf-8')
    +            FFILibrary.__name__ = 'FFILibrary_%s' % libname
    +        except UnicodeError:
    +            pass
    +    library = FFILibrary()
    +    return library, library.__dict__
    +
    +def _builtin_function_type(func):
    +    # a hack to make at least ffi.typeof(builtin_function) work,
    +    # if the builtin function was obtained by 'vengine_cpy'.
    +    import sys
    +    try:
    +        module = sys.modules[func.__module__]
    +        ffi = module._cffi_original_ffi
    +        types_of_builtin_funcs = module._cffi_types_of_builtin_funcs
    +        tp = types_of_builtin_funcs[func]
    +    except (KeyError, AttributeError, TypeError):
    +        return None
    +    else:
    +        with ffi._lock:
    +            return ffi._get_cached_btype(tp)
    diff --git a/server/www/packages/packages-windows/x86/cffi/backend_ctypes.py b/server/www/packages/packages-windows/x86/cffi/backend_ctypes.py
    new file mode 100644
    index 0000000..3368a2a
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/backend_ctypes.py
    @@ -0,0 +1,1121 @@
    +import ctypes, ctypes.util, operator, sys
    +from . import model
    +
    +if sys.version_info < (3,):
    +    bytechr = chr
    +else:
    +    unicode = str
    +    long = int
    +    xrange = range
    +    bytechr = lambda num: bytes([num])
    +
    +class CTypesType(type):
    +    pass
    +
    +class CTypesData(object):
    +    __metaclass__ = CTypesType
    +    __slots__ = ['__weakref__']
    +    __name__ = ''
    +
    +    def __init__(self, *args):
    +        raise TypeError("cannot instantiate %r" % (self.__class__,))
    +
    +    @classmethod
    +    def _newp(cls, init):
    +        raise TypeError("expected a pointer or array ctype, got '%s'"
    +                        % (cls._get_c_name(),))
    +
    +    @staticmethod
    +    def _to_ctypes(value):
    +        raise TypeError
    +
    +    @classmethod
    +    def _arg_to_ctypes(cls, *value):
    +        try:
    +            ctype = cls._ctype
    +        except AttributeError:
    +            raise TypeError("cannot create an instance of %r" % (cls,))
    +        if value:
    +            res = cls._to_ctypes(*value)
    +            if not isinstance(res, ctype):
    +                res = cls._ctype(res)
    +        else:
    +            res = cls._ctype()
    +        return res
    +
    +    @classmethod
    +    def _create_ctype_obj(cls, init):
    +        if init is None:
    +            return cls._arg_to_ctypes()
    +        else:
    +            return cls._arg_to_ctypes(init)
    +
    +    @staticmethod
    +    def _from_ctypes(ctypes_value):
    +        raise TypeError
    +
    +    @classmethod
    +    def _get_c_name(cls, replace_with=''):
    +        return cls._reftypename.replace(' &', replace_with)
    +
    +    @classmethod
    +    def _fix_class(cls):
    +        cls.__name__ = 'CData<%s>' % (cls._get_c_name(),)
    +        cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),)
    +        cls.__module__ = 'ffi'
    +
    +    def _get_own_repr(self):
    +        raise NotImplementedError
    +
    +    def _addr_repr(self, address):
    +        if address == 0:
    +            return 'NULL'
    +        else:
    +            if address < 0:
    +                address += 1 << (8*ctypes.sizeof(ctypes.c_void_p))
    +            return '0x%x' % address
    +
    +    def __repr__(self, c_name=None):
    +        own = self._get_own_repr()
    +        return '' % (c_name or self._get_c_name(), own)
    +
    +    def _convert_to_address(self, BClass):
    +        if BClass is None:
    +            raise TypeError("cannot convert %r to an address" % (
    +                self._get_c_name(),))
    +        else:
    +            raise TypeError("cannot convert %r to %r" % (
    +                self._get_c_name(), BClass._get_c_name()))
    +
    +    @classmethod
    +    def _get_size(cls):
    +        return ctypes.sizeof(cls._ctype)
    +
    +    def _get_size_of_instance(self):
    +        return ctypes.sizeof(self._ctype)
    +
    +    @classmethod
    +    def _cast_from(cls, source):
    +        raise TypeError("cannot cast to %r" % (cls._get_c_name(),))
    +
    +    def _cast_to_integer(self):
    +        return self._convert_to_address(None)
    +
    +    @classmethod
    +    def _alignment(cls):
    +        return ctypes.alignment(cls._ctype)
    +
    +    def __iter__(self):
    +        raise TypeError("cdata %r does not support iteration" % (
    +            self._get_c_name()),)
    +
    +    def _make_cmp(name):
    +        cmpfunc = getattr(operator, name)
    +        def cmp(self, other):
    +            v_is_ptr = not isinstance(self, CTypesGenericPrimitive)
    +            w_is_ptr = (isinstance(other, CTypesData) and
    +                           not isinstance(other, CTypesGenericPrimitive))
    +            if v_is_ptr and w_is_ptr:
    +                return cmpfunc(self._convert_to_address(None),
    +                               other._convert_to_address(None))
    +            elif v_is_ptr or w_is_ptr:
    +                return NotImplemented
    +            else:
    +                if isinstance(self, CTypesGenericPrimitive):
    +                    self = self._value
    +                if isinstance(other, CTypesGenericPrimitive):
    +                    other = other._value
    +                return cmpfunc(self, other)
    +        cmp.func_name = name
    +        return cmp
    +
    +    __eq__ = _make_cmp('__eq__')
    +    __ne__ = _make_cmp('__ne__')
    +    __lt__ = _make_cmp('__lt__')
    +    __le__ = _make_cmp('__le__')
    +    __gt__ = _make_cmp('__gt__')
    +    __ge__ = _make_cmp('__ge__')
    +
    +    def __hash__(self):
    +        return hash(self._convert_to_address(None))
    +
    +    def _to_string(self, maxlen):
    +        raise TypeError("string(): %r" % (self,))
    +
    +
    +class CTypesGenericPrimitive(CTypesData):
    +    __slots__ = []
    +
    +    def __hash__(self):
    +        return hash(self._value)
    +
    +    def _get_own_repr(self):
    +        return repr(self._from_ctypes(self._value))
    +
    +
    +class CTypesGenericArray(CTypesData):
    +    __slots__ = []
    +
    +    @classmethod
    +    def _newp(cls, init):
    +        return cls(init)
    +
    +    def __iter__(self):
    +        for i in xrange(len(self)):
    +            yield self[i]
    +
    +    def _get_own_repr(self):
    +        return self._addr_repr(ctypes.addressof(self._blob))
    +
    +
    +class CTypesGenericPtr(CTypesData):
    +    __slots__ = ['_address', '_as_ctype_ptr']
    +    _automatic_casts = False
    +    kind = "pointer"
    +
    +    @classmethod
    +    def _newp(cls, init):
    +        return cls(init)
    +
    +    @classmethod
    +    def _cast_from(cls, source):
    +        if source is None:
    +            address = 0
    +        elif isinstance(source, CTypesData):
    +            address = source._cast_to_integer()
    +        elif isinstance(source, (int, long)):
    +            address = source
    +        else:
    +            raise TypeError("bad type for cast to %r: %r" %
    +                            (cls, type(source).__name__))
    +        return cls._new_pointer_at(address)
    +
    +    @classmethod
    +    def _new_pointer_at(cls, address):
    +        self = cls.__new__(cls)
    +        self._address = address
    +        self._as_ctype_ptr = ctypes.cast(address, cls._ctype)
    +        return self
    +
    +    def _get_own_repr(self):
    +        try:
    +            return self._addr_repr(self._address)
    +        except AttributeError:
    +            return '???'
    +
    +    def _cast_to_integer(self):
    +        return self._address
    +
    +    def __nonzero__(self):
    +        return bool(self._address)
    +    __bool__ = __nonzero__
    +
    +    @classmethod
    +    def _to_ctypes(cls, value):
    +        if not isinstance(value, CTypesData):
    +            raise TypeError("unexpected %s object" % type(value).__name__)
    +        address = value._convert_to_address(cls)
    +        return ctypes.cast(address, cls._ctype)
    +
    +    @classmethod
    +    def _from_ctypes(cls, ctypes_ptr):
    +        address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0
    +        return cls._new_pointer_at(address)
    +
    +    @classmethod
    +    def _initialize(cls, ctypes_ptr, value):
    +        if value:
    +            ctypes_ptr.contents = cls._to_ctypes(value).contents
    +
    +    def _convert_to_address(self, BClass):
    +        if (BClass in (self.__class__, None) or BClass._automatic_casts
    +            or self._automatic_casts):
    +            return self._address
    +        else:
    +            return CTypesData._convert_to_address(self, BClass)
    +
    +
    +class CTypesBaseStructOrUnion(CTypesData):
    +    __slots__ = ['_blob']
    +
    +    @classmethod
    +    def _create_ctype_obj(cls, init):
    +        # may be overridden
    +        raise TypeError("cannot instantiate opaque type %s" % (cls,))
    +
    +    def _get_own_repr(self):
    +        return self._addr_repr(ctypes.addressof(self._blob))
    +
    +    @classmethod
    +    def _offsetof(cls, fieldname):
    +        return getattr(cls._ctype, fieldname).offset
    +
    +    def _convert_to_address(self, BClass):
    +        if getattr(BClass, '_BItem', None) is self.__class__:
    +            return ctypes.addressof(self._blob)
    +        else:
    +            return CTypesData._convert_to_address(self, BClass)
    +
    +    @classmethod
    +    def _from_ctypes(cls, ctypes_struct_or_union):
    +        self = cls.__new__(cls)
    +        self._blob = ctypes_struct_or_union
    +        return self
    +
    +    @classmethod
    +    def _to_ctypes(cls, value):
    +        return value._blob
    +
    +    def __repr__(self, c_name=None):
    +        return CTypesData.__repr__(self, c_name or self._get_c_name(' &'))
    +
    +
    +class CTypesBackend(object):
    +
    +    PRIMITIVE_TYPES = {
    +        'char': ctypes.c_char,
    +        'short': ctypes.c_short,
    +        'int': ctypes.c_int,
    +        'long': ctypes.c_long,
    +        'long long': ctypes.c_longlong,
    +        'signed char': ctypes.c_byte,
    +        'unsigned char': ctypes.c_ubyte,
    +        'unsigned short': ctypes.c_ushort,
    +        'unsigned int': ctypes.c_uint,
    +        'unsigned long': ctypes.c_ulong,
    +        'unsigned long long': ctypes.c_ulonglong,
    +        'float': ctypes.c_float,
    +        'double': ctypes.c_double,
    +        '_Bool': ctypes.c_bool,
    +        }
    +
    +    for _name in ['unsigned long long', 'unsigned long',
    +                  'unsigned int', 'unsigned short', 'unsigned char']:
    +        _size = ctypes.sizeof(PRIMITIVE_TYPES[_name])
    +        PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name]
    +        if _size == ctypes.sizeof(ctypes.c_void_p):
    +            PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name]
    +        if _size == ctypes.sizeof(ctypes.c_size_t):
    +            PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name]
    +
    +    for _name in ['long long', 'long', 'int', 'short', 'signed char']:
    +        _size = ctypes.sizeof(PRIMITIVE_TYPES[_name])
    +        PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name]
    +        if _size == ctypes.sizeof(ctypes.c_void_p):
    +            PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name]
    +            PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name]
    +        if _size == ctypes.sizeof(ctypes.c_size_t):
    +            PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name]
    +
    +
    +    def __init__(self):
    +        self.RTLD_LAZY = 0   # not supported anyway by ctypes
    +        self.RTLD_NOW  = 0
    +        self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL
    +        self.RTLD_LOCAL = ctypes.RTLD_LOCAL
    +
    +    def set_ffi(self, ffi):
    +        self.ffi = ffi
    +
    +    def _get_types(self):
    +        return CTypesData, CTypesType
    +
    +    def load_library(self, path, flags=0):
    +        cdll = ctypes.CDLL(path, flags)
    +        return CTypesLibrary(self, cdll)
    +
    +    def new_void_type(self):
    +        class CTypesVoid(CTypesData):
    +            __slots__ = []
    +            _reftypename = 'void &'
    +            @staticmethod
    +            def _from_ctypes(novalue):
    +                return None
    +            @staticmethod
    +            def _to_ctypes(novalue):
    +                if novalue is not None:
    +                    raise TypeError("None expected, got %s object" %
    +                                    (type(novalue).__name__,))
    +                return None
    +        CTypesVoid._fix_class()
    +        return CTypesVoid
    +
    +    def new_primitive_type(self, name):
    +        if name == 'wchar_t':
    +            raise NotImplementedError(name)
    +        ctype = self.PRIMITIVE_TYPES[name]
    +        if name == 'char':
    +            kind = 'char'
    +        elif name in ('float', 'double'):
    +            kind = 'float'
    +        else:
    +            if name in ('signed char', 'unsigned char'):
    +                kind = 'byte'
    +            elif name == '_Bool':
    +                kind = 'bool'
    +            else:
    +                kind = 'int'
    +            is_signed = (ctype(-1).value == -1)
    +        #
    +        def _cast_source_to_int(source):
    +            if isinstance(source, (int, long, float)):
    +                source = int(source)
    +            elif isinstance(source, CTypesData):
    +                source = source._cast_to_integer()
    +            elif isinstance(source, bytes):
    +                source = ord(source)
    +            elif source is None:
    +                source = 0
    +            else:
    +                raise TypeError("bad type for cast to %r: %r" %
    +                                (CTypesPrimitive, type(source).__name__))
    +            return source
    +        #
    +        kind1 = kind
    +        class CTypesPrimitive(CTypesGenericPrimitive):
    +            __slots__ = ['_value']
    +            _ctype = ctype
    +            _reftypename = '%s &' % name
    +            kind = kind1
    +
    +            def __init__(self, value):
    +                self._value = value
    +
    +            @staticmethod
    +            def _create_ctype_obj(init):
    +                if init is None:
    +                    return ctype()
    +                return ctype(CTypesPrimitive._to_ctypes(init))
    +
    +            if kind == 'int' or kind == 'byte':
    +                @classmethod
    +                def _cast_from(cls, source):
    +                    source = _cast_source_to_int(source)
    +                    source = ctype(source).value     # cast within range
    +                    return cls(source)
    +                def __int__(self):
    +                    return self._value
    +
    +            if kind == 'bool':
    +                @classmethod
    +                def _cast_from(cls, source):
    +                    if not isinstance(source, (int, long, float)):
    +                        source = _cast_source_to_int(source)
    +                    return cls(bool(source))
    +                def __int__(self):
    +                    return int(self._value)
    +
    +            if kind == 'char':
    +                @classmethod
    +                def _cast_from(cls, source):
    +                    source = _cast_source_to_int(source)
    +                    source = bytechr(source & 0xFF)
    +                    return cls(source)
    +                def __int__(self):
    +                    return ord(self._value)
    +
    +            if kind == 'float':
    +                @classmethod
    +                def _cast_from(cls, source):
    +                    if isinstance(source, float):
    +                        pass
    +                    elif isinstance(source, CTypesGenericPrimitive):
    +                        if hasattr(source, '__float__'):
    +                            source = float(source)
    +                        else:
    +                            source = int(source)
    +                    else:
    +                        source = _cast_source_to_int(source)
    +                    source = ctype(source).value     # fix precision
    +                    return cls(source)
    +                def __int__(self):
    +                    return int(self._value)
    +                def __float__(self):
    +                    return self._value
    +
    +            _cast_to_integer = __int__
    +
    +            if kind == 'int' or kind == 'byte' or kind == 'bool':
    +                @staticmethod
    +                def _to_ctypes(x):
    +                    if not isinstance(x, (int, long)):
    +                        if isinstance(x, CTypesData):
    +                            x = int(x)
    +                        else:
    +                            raise TypeError("integer expected, got %s" %
    +                                            type(x).__name__)
    +                    if ctype(x).value != x:
    +                        if not is_signed and x < 0:
    +                            raise OverflowError("%s: negative integer" % name)
    +                        else:
    +                            raise OverflowError("%s: integer out of bounds"
    +                                                % name)
    +                    return x
    +
    +            if kind == 'char':
    +                @staticmethod
    +                def _to_ctypes(x):
    +                    if isinstance(x, bytes) and len(x) == 1:
    +                        return x
    +                    if isinstance(x, CTypesPrimitive):    # >
    +                        return x._value
    +                    raise TypeError("character expected, got %s" %
    +                                    type(x).__name__)
    +                def __nonzero__(self):
    +                    return ord(self._value) != 0
    +            else:
    +                def __nonzero__(self):
    +                    return self._value != 0
    +            __bool__ = __nonzero__
    +
    +            if kind == 'float':
    +                @staticmethod
    +                def _to_ctypes(x):
    +                    if not isinstance(x, (int, long, float, CTypesData)):
    +                        raise TypeError("float expected, got %s" %
    +                                        type(x).__name__)
    +                    return ctype(x).value
    +
    +            @staticmethod
    +            def _from_ctypes(value):
    +                return getattr(value, 'value', value)
    +
    +            @staticmethod
    +            def _initialize(blob, init):
    +                blob.value = CTypesPrimitive._to_ctypes(init)
    +
    +            if kind == 'char':
    +                def _to_string(self, maxlen):
    +                    return self._value
    +            if kind == 'byte':
    +                def _to_string(self, maxlen):
    +                    return chr(self._value & 0xff)
    +        #
    +        CTypesPrimitive._fix_class()
    +        return CTypesPrimitive
    +
    +    def new_pointer_type(self, BItem):
    +        getbtype = self.ffi._get_cached_btype
    +        if BItem is getbtype(model.PrimitiveType('char')):
    +            kind = 'charp'
    +        elif BItem in (getbtype(model.PrimitiveType('signed char')),
    +                       getbtype(model.PrimitiveType('unsigned char'))):
    +            kind = 'bytep'
    +        elif BItem is getbtype(model.void_type):
    +            kind = 'voidp'
    +        else:
    +            kind = 'generic'
    +        #
    +        class CTypesPtr(CTypesGenericPtr):
    +            __slots__ = ['_own']
    +            if kind == 'charp':
    +                __slots__ += ['__as_strbuf']
    +            _BItem = BItem
    +            if hasattr(BItem, '_ctype'):
    +                _ctype = ctypes.POINTER(BItem._ctype)
    +                _bitem_size = ctypes.sizeof(BItem._ctype)
    +            else:
    +                _ctype = ctypes.c_void_p
    +            if issubclass(BItem, CTypesGenericArray):
    +                _reftypename = BItem._get_c_name('(* &)')
    +            else:
    +                _reftypename = BItem._get_c_name(' * &')
    +
    +            def __init__(self, init):
    +                ctypeobj = BItem._create_ctype_obj(init)
    +                if kind == 'charp':
    +                    self.__as_strbuf = ctypes.create_string_buffer(
    +                        ctypeobj.value + b'\x00')
    +                    self._as_ctype_ptr = ctypes.cast(
    +                        self.__as_strbuf, self._ctype)
    +                else:
    +                    self._as_ctype_ptr = ctypes.pointer(ctypeobj)
    +                self._address = ctypes.cast(self._as_ctype_ptr,
    +                                            ctypes.c_void_p).value
    +                self._own = True
    +
    +            def __add__(self, other):
    +                if isinstance(other, (int, long)):
    +                    return self._new_pointer_at(self._address +
    +                                                other * self._bitem_size)
    +                else:
    +                    return NotImplemented
    +
    +            def __sub__(self, other):
    +                if isinstance(other, (int, long)):
    +                    return self._new_pointer_at(self._address -
    +                                                other * self._bitem_size)
    +                elif type(self) is type(other):
    +                    return (self._address - other._address) // self._bitem_size
    +                else:
    +                    return NotImplemented
    +
    +            def __getitem__(self, index):
    +                if getattr(self, '_own', False) and index != 0:
    +                    raise IndexError
    +                return BItem._from_ctypes(self._as_ctype_ptr[index])
    +
    +            def __setitem__(self, index, value):
    +                self._as_ctype_ptr[index] = BItem._to_ctypes(value)
    +
    +            if kind == 'charp' or kind == 'voidp':
    +                @classmethod
    +                def _arg_to_ctypes(cls, *value):
    +                    if value and isinstance(value[0], bytes):
    +                        return ctypes.c_char_p(value[0])
    +                    else:
    +                        return super(CTypesPtr, cls)._arg_to_ctypes(*value)
    +
    +            if kind == 'charp' or kind == 'bytep':
    +                def _to_string(self, maxlen):
    +                    if maxlen < 0:
    +                        maxlen = sys.maxsize
    +                    p = ctypes.cast(self._as_ctype_ptr,
    +                                    ctypes.POINTER(ctypes.c_char))
    +                    n = 0
    +                    while n < maxlen and p[n] != b'\x00':
    +                        n += 1
    +                    return b''.join([p[i] for i in range(n)])
    +
    +            def _get_own_repr(self):
    +                if getattr(self, '_own', False):
    +                    return 'owning %d bytes' % (
    +                        ctypes.sizeof(self._as_ctype_ptr.contents),)
    +                return super(CTypesPtr, self)._get_own_repr()
    +        #
    +        if (BItem is self.ffi._get_cached_btype(model.void_type) or
    +            BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))):
    +            CTypesPtr._automatic_casts = True
    +        #
    +        CTypesPtr._fix_class()
    +        return CTypesPtr
    +
    +    def new_array_type(self, CTypesPtr, length):
    +        if length is None:
    +            brackets = ' &[]'
    +        else:
    +            brackets = ' &[%d]' % length
    +        BItem = CTypesPtr._BItem
    +        getbtype = self.ffi._get_cached_btype
    +        if BItem is getbtype(model.PrimitiveType('char')):
    +            kind = 'char'
    +        elif BItem in (getbtype(model.PrimitiveType('signed char')),
    +                       getbtype(model.PrimitiveType('unsigned char'))):
    +            kind = 'byte'
    +        else:
    +            kind = 'generic'
    +        #
    +        class CTypesArray(CTypesGenericArray):
    +            __slots__ = ['_blob', '_own']
    +            if length is not None:
    +                _ctype = BItem._ctype * length
    +            else:
    +                __slots__.append('_ctype')
    +            _reftypename = BItem._get_c_name(brackets)
    +            _declared_length = length
    +            _CTPtr = CTypesPtr
    +
    +            def __init__(self, init):
    +                if length is None:
    +                    if isinstance(init, (int, long)):
    +                        len1 = init
    +                        init = None
    +                    elif kind == 'char' and isinstance(init, bytes):
    +                        len1 = len(init) + 1    # extra null
    +                    else:
    +                        init = tuple(init)
    +                        len1 = len(init)
    +                    self._ctype = BItem._ctype * len1
    +                self._blob = self._ctype()
    +                self._own = True
    +                if init is not None:
    +                    self._initialize(self._blob, init)
    +
    +            @staticmethod
    +            def _initialize(blob, init):
    +                if isinstance(init, bytes):
    +                    init = [init[i:i+1] for i in range(len(init))]
    +                else:
    +                    if isinstance(init, CTypesGenericArray):
    +                        if (len(init) != len(blob) or
    +                            not isinstance(init, CTypesArray)):
    +                            raise TypeError("length/type mismatch: %s" % (init,))
    +                    init = tuple(init)
    +                if len(init) > len(blob):
    +                    raise IndexError("too many initializers")
    +                addr = ctypes.cast(blob, ctypes.c_void_p).value
    +                PTR = ctypes.POINTER(BItem._ctype)
    +                itemsize = ctypes.sizeof(BItem._ctype)
    +                for i, value in enumerate(init):
    +                    p = ctypes.cast(addr + i * itemsize, PTR)
    +                    BItem._initialize(p.contents, value)
    +
    +            def __len__(self):
    +                return len(self._blob)
    +
    +            def __getitem__(self, index):
    +                if not (0 <= index < len(self._blob)):
    +                    raise IndexError
    +                return BItem._from_ctypes(self._blob[index])
    +
    +            def __setitem__(self, index, value):
    +                if not (0 <= index < len(self._blob)):
    +                    raise IndexError
    +                self._blob[index] = BItem._to_ctypes(value)
    +
    +            if kind == 'char' or kind == 'byte':
    +                def _to_string(self, maxlen):
    +                    if maxlen < 0:
    +                        maxlen = len(self._blob)
    +                    p = ctypes.cast(self._blob,
    +                                    ctypes.POINTER(ctypes.c_char))
    +                    n = 0
    +                    while n < maxlen and p[n] != b'\x00':
    +                        n += 1
    +                    return b''.join([p[i] for i in range(n)])
    +
    +            def _get_own_repr(self):
    +                if getattr(self, '_own', False):
    +                    return 'owning %d bytes' % (ctypes.sizeof(self._blob),)
    +                return super(CTypesArray, self)._get_own_repr()
    +
    +            def _convert_to_address(self, BClass):
    +                if BClass in (CTypesPtr, None) or BClass._automatic_casts:
    +                    return ctypes.addressof(self._blob)
    +                else:
    +                    return CTypesData._convert_to_address(self, BClass)
    +
    +            @staticmethod
    +            def _from_ctypes(ctypes_array):
    +                self = CTypesArray.__new__(CTypesArray)
    +                self._blob = ctypes_array
    +                return self
    +
    +            @staticmethod
    +            def _arg_to_ctypes(value):
    +                return CTypesPtr._arg_to_ctypes(value)
    +
    +            def __add__(self, other):
    +                if isinstance(other, (int, long)):
    +                    return CTypesPtr._new_pointer_at(
    +                        ctypes.addressof(self._blob) +
    +                        other * ctypes.sizeof(BItem._ctype))
    +                else:
    +                    return NotImplemented
    +
    +            @classmethod
    +            def _cast_from(cls, source):
    +                raise NotImplementedError("casting to %r" % (
    +                    cls._get_c_name(),))
    +        #
    +        CTypesArray._fix_class()
    +        return CTypesArray
    +
    +    def _new_struct_or_union(self, kind, name, base_ctypes_class):
    +        #
    +        class struct_or_union(base_ctypes_class):
    +            pass
    +        struct_or_union.__name__ = '%s_%s' % (kind, name)
    +        kind1 = kind
    +        #
    +        class CTypesStructOrUnion(CTypesBaseStructOrUnion):
    +            __slots__ = ['_blob']
    +            _ctype = struct_or_union
    +            _reftypename = '%s &' % (name,)
    +            _kind = kind = kind1
    +        #
    +        CTypesStructOrUnion._fix_class()
    +        return CTypesStructOrUnion
    +
    +    def new_struct_type(self, name):
    +        return self._new_struct_or_union('struct', name, ctypes.Structure)
    +
    +    def new_union_type(self, name):
    +        return self._new_struct_or_union('union', name, ctypes.Union)
    +
    +    def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp,
    +                                 totalsize=-1, totalalignment=-1, sflags=0,
    +                                 pack=0):
    +        if totalsize >= 0 or totalalignment >= 0:
    +            raise NotImplementedError("the ctypes backend of CFFI does not support "
    +                                      "structures completed by verify(); please "
    +                                      "compile and install the _cffi_backend module.")
    +        struct_or_union = CTypesStructOrUnion._ctype
    +        fnames = [fname for (fname, BField, bitsize) in fields]
    +        btypes = [BField for (fname, BField, bitsize) in fields]
    +        bitfields = [bitsize for (fname, BField, bitsize) in fields]
    +        #
    +        bfield_types = {}
    +        cfields = []
    +        for (fname, BField, bitsize) in fields:
    +            if bitsize < 0:
    +                cfields.append((fname, BField._ctype))
    +                bfield_types[fname] = BField
    +            else:
    +                cfields.append((fname, BField._ctype, bitsize))
    +                bfield_types[fname] = Ellipsis
    +        if sflags & 8:
    +            struct_or_union._pack_ = 1
    +        elif pack:
    +            struct_or_union._pack_ = pack
    +        struct_or_union._fields_ = cfields
    +        CTypesStructOrUnion._bfield_types = bfield_types
    +        #
    +        @staticmethod
    +        def _create_ctype_obj(init):
    +            result = struct_or_union()
    +            if init is not None:
    +                initialize(result, init)
    +            return result
    +        CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj
    +        #
    +        def initialize(blob, init):
    +            if is_union:
    +                if len(init) > 1:
    +                    raise ValueError("union initializer: %d items given, but "
    +                                    "only one supported (use a dict if needed)"
    +                                     % (len(init),))
    +            if not isinstance(init, dict):
    +                if isinstance(init, (bytes, unicode)):
    +                    raise TypeError("union initializer: got a str")
    +                init = tuple(init)
    +                if len(init) > len(fnames):
    +                    raise ValueError("too many values for %s initializer" %
    +                                     CTypesStructOrUnion._get_c_name())
    +                init = dict(zip(fnames, init))
    +            addr = ctypes.addressof(blob)
    +            for fname, value in init.items():
    +                BField, bitsize = name2fieldtype[fname]
    +                assert bitsize < 0, \
    +                       "not implemented: initializer with bit fields"
    +                offset = CTypesStructOrUnion._offsetof(fname)
    +                PTR = ctypes.POINTER(BField._ctype)
    +                p = ctypes.cast(addr + offset, PTR)
    +                BField._initialize(p.contents, value)
    +        is_union = CTypesStructOrUnion._kind == 'union'
    +        name2fieldtype = dict(zip(fnames, zip(btypes, bitfields)))
    +        #
    +        for fname, BField, bitsize in fields:
    +            if fname == '':
    +                raise NotImplementedError("nested anonymous structs/unions")
    +            if hasattr(CTypesStructOrUnion, fname):
    +                raise ValueError("the field name %r conflicts in "
    +                                 "the ctypes backend" % fname)
    +            if bitsize < 0:
    +                def getter(self, fname=fname, BField=BField,
    +                           offset=CTypesStructOrUnion._offsetof(fname),
    +                           PTR=ctypes.POINTER(BField._ctype)):
    +                    addr = ctypes.addressof(self._blob)
    +                    p = ctypes.cast(addr + offset, PTR)
    +                    return BField._from_ctypes(p.contents)
    +                def setter(self, value, fname=fname, BField=BField):
    +                    setattr(self._blob, fname, BField._to_ctypes(value))
    +                #
    +                if issubclass(BField, CTypesGenericArray):
    +                    setter = None
    +                    if BField._declared_length == 0:
    +                        def getter(self, fname=fname, BFieldPtr=BField._CTPtr,
    +                                   offset=CTypesStructOrUnion._offsetof(fname),
    +                                   PTR=ctypes.POINTER(BField._ctype)):
    +                            addr = ctypes.addressof(self._blob)
    +                            p = ctypes.cast(addr + offset, PTR)
    +                            return BFieldPtr._from_ctypes(p)
    +                #
    +            else:
    +                def getter(self, fname=fname, BField=BField):
    +                    return BField._from_ctypes(getattr(self._blob, fname))
    +                def setter(self, value, fname=fname, BField=BField):
    +                    # xxx obscure workaround
    +                    value = BField._to_ctypes(value)
    +                    oldvalue = getattr(self._blob, fname)
    +                    setattr(self._blob, fname, value)
    +                    if value != getattr(self._blob, fname):
    +                        setattr(self._blob, fname, oldvalue)
    +                        raise OverflowError("value too large for bitfield")
    +            setattr(CTypesStructOrUnion, fname, property(getter, setter))
    +        #
    +        CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp))
    +        for fname in fnames:
    +            if hasattr(CTypesPtr, fname):
    +                raise ValueError("the field name %r conflicts in "
    +                                 "the ctypes backend" % fname)
    +            def getter(self, fname=fname):
    +                return getattr(self[0], fname)
    +            def setter(self, value, fname=fname):
    +                setattr(self[0], fname, value)
    +            setattr(CTypesPtr, fname, property(getter, setter))
    +
    +    def new_function_type(self, BArgs, BResult, has_varargs):
    +        nameargs = [BArg._get_c_name() for BArg in BArgs]
    +        if has_varargs:
    +            nameargs.append('...')
    +        nameargs = ', '.join(nameargs)
    +        #
    +        class CTypesFunctionPtr(CTypesGenericPtr):
    +            __slots__ = ['_own_callback', '_name']
    +            _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None),
    +                                      *[BArg._ctype for BArg in BArgs],
    +                                      use_errno=True)
    +            _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,))
    +
    +            def __init__(self, init, error=None):
    +                # create a callback to the Python callable init()
    +                import traceback
    +                assert not has_varargs, "varargs not supported for callbacks"
    +                if getattr(BResult, '_ctype', None) is not None:
    +                    error = BResult._from_ctypes(
    +                        BResult._create_ctype_obj(error))
    +                else:
    +                    error = None
    +                def callback(*args):
    +                    args2 = []
    +                    for arg, BArg in zip(args, BArgs):
    +                        args2.append(BArg._from_ctypes(arg))
    +                    try:
    +                        res2 = init(*args2)
    +                        res2 = BResult._to_ctypes(res2)
    +                    except:
    +                        traceback.print_exc()
    +                        res2 = error
    +                    if issubclass(BResult, CTypesGenericPtr):
    +                        if res2:
    +                            res2 = ctypes.cast(res2, ctypes.c_void_p).value
    +                                # .value: http://bugs.python.org/issue1574593
    +                        else:
    +                            res2 = None
    +                    #print repr(res2)
    +                    return res2
    +                if issubclass(BResult, CTypesGenericPtr):
    +                    # The only pointers callbacks can return are void*s:
    +                    # http://bugs.python.org/issue5710
    +                    callback_ctype = ctypes.CFUNCTYPE(
    +                        ctypes.c_void_p,
    +                        *[BArg._ctype for BArg in BArgs],
    +                        use_errno=True)
    +                else:
    +                    callback_ctype = CTypesFunctionPtr._ctype
    +                self._as_ctype_ptr = callback_ctype(callback)
    +                self._address = ctypes.cast(self._as_ctype_ptr,
    +                                            ctypes.c_void_p).value
    +                self._own_callback = init
    +
    +            @staticmethod
    +            def _initialize(ctypes_ptr, value):
    +                if value:
    +                    raise NotImplementedError("ctypes backend: not supported: "
    +                                          "initializers for function pointers")
    +
    +            def __repr__(self):
    +                c_name = getattr(self, '_name', None)
    +                if c_name:
    +                    i = self._reftypename.index('(* &)')
    +                    if self._reftypename[i-1] not in ' )*':
    +                        c_name = ' ' + c_name
    +                    c_name = self._reftypename.replace('(* &)', c_name)
    +                return CTypesData.__repr__(self, c_name)
    +
    +            def _get_own_repr(self):
    +                if getattr(self, '_own_callback', None) is not None:
    +                    return 'calling %r' % (self._own_callback,)
    +                return super(CTypesFunctionPtr, self)._get_own_repr()
    +
    +            def __call__(self, *args):
    +                if has_varargs:
    +                    assert len(args) >= len(BArgs)
    +                    extraargs = args[len(BArgs):]
    +                    args = args[:len(BArgs)]
    +                else:
    +                    assert len(args) == len(BArgs)
    +                ctypes_args = []
    +                for arg, BArg in zip(args, BArgs):
    +                    ctypes_args.append(BArg._arg_to_ctypes(arg))
    +                if has_varargs:
    +                    for i, arg in enumerate(extraargs):
    +                        if arg is None:
    +                            ctypes_args.append(ctypes.c_void_p(0))  # NULL
    +                            continue
    +                        if not isinstance(arg, CTypesData):
    +                            raise TypeError(
    +                                "argument %d passed in the variadic part "
    +                                "needs to be a cdata object (got %s)" %
    +                                (1 + len(BArgs) + i, type(arg).__name__))
    +                        ctypes_args.append(arg._arg_to_ctypes(arg))
    +                result = self._as_ctype_ptr(*ctypes_args)
    +                return BResult._from_ctypes(result)
    +        #
    +        CTypesFunctionPtr._fix_class()
    +        return CTypesFunctionPtr
    +
    +    def new_enum_type(self, name, enumerators, enumvalues, CTypesInt):
    +        assert isinstance(name, str)
    +        reverse_mapping = dict(zip(reversed(enumvalues),
    +                                   reversed(enumerators)))
    +        #
    +        class CTypesEnum(CTypesInt):
    +            __slots__ = []
    +            _reftypename = '%s &' % name
    +
    +            def _get_own_repr(self):
    +                value = self._value
    +                try:
    +                    return '%d: %s' % (value, reverse_mapping[value])
    +                except KeyError:
    +                    return str(value)
    +
    +            def _to_string(self, maxlen):
    +                value = self._value
    +                try:
    +                    return reverse_mapping[value]
    +                except KeyError:
    +                    return str(value)
    +        #
    +        CTypesEnum._fix_class()
    +        return CTypesEnum
    +
    +    def get_errno(self):
    +        return ctypes.get_errno()
    +
    +    def set_errno(self, value):
    +        ctypes.set_errno(value)
    +
    +    def string(self, b, maxlen=-1):
    +        return b._to_string(maxlen)
    +
    +    def buffer(self, bptr, size=-1):
    +        raise NotImplementedError("buffer() with ctypes backend")
    +
    +    def sizeof(self, cdata_or_BType):
    +        if isinstance(cdata_or_BType, CTypesData):
    +            return cdata_or_BType._get_size_of_instance()
    +        else:
    +            assert issubclass(cdata_or_BType, CTypesData)
    +            return cdata_or_BType._get_size()
    +
    +    def alignof(self, BType):
    +        assert issubclass(BType, CTypesData)
    +        return BType._alignment()
    +
    +    def newp(self, BType, source):
    +        if not issubclass(BType, CTypesData):
    +            raise TypeError
    +        return BType._newp(source)
    +
    +    def cast(self, BType, source):
    +        return BType._cast_from(source)
    +
    +    def callback(self, BType, source, error, onerror):
    +        assert onerror is None   # XXX not implemented
    +        return BType(source, error)
    +
    +    _weakref_cache_ref = None
    +
    +    def gcp(self, cdata, destructor, size=0):
    +        if self._weakref_cache_ref is None:
    +            import weakref
    +            class MyRef(weakref.ref):
    +                def __eq__(self, other):
    +                    myref = self()
    +                    return self is other or (
    +                        myref is not None and myref is other())
    +                def __ne__(self, other):
    +                    return not (self == other)
    +                def __hash__(self):
    +                    try:
    +                        return self._hash
    +                    except AttributeError:
    +                        self._hash = hash(self())
    +                        return self._hash
    +            self._weakref_cache_ref = {}, MyRef
    +        weak_cache, MyRef = self._weakref_cache_ref
    +
    +        if destructor is None:
    +            try:
    +                del weak_cache[MyRef(cdata)]
    +            except KeyError:
    +                raise TypeError("Can remove destructor only on a object "
    +                                "previously returned by ffi.gc()")
    +            return None
    +
    +        def remove(k):
    +            cdata, destructor = weak_cache.pop(k, (None, None))
    +            if destructor is not None:
    +                destructor(cdata)
    +
    +        new_cdata = self.cast(self.typeof(cdata), cdata)
    +        assert new_cdata is not cdata
    +        weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor)
    +        return new_cdata
    +
    +    typeof = type
    +
    +    def getcname(self, BType, replace_with):
    +        return BType._get_c_name(replace_with)
    +
    +    def typeoffsetof(self, BType, fieldname, num=0):
    +        if isinstance(fieldname, str):
    +            if num == 0 and issubclass(BType, CTypesGenericPtr):
    +                BType = BType._BItem
    +            if not issubclass(BType, CTypesBaseStructOrUnion):
    +                raise TypeError("expected a struct or union ctype")
    +            BField = BType._bfield_types[fieldname]
    +            if BField is Ellipsis:
    +                raise TypeError("not supported for bitfields")
    +            return (BField, BType._offsetof(fieldname))
    +        elif isinstance(fieldname, (int, long)):
    +            if issubclass(BType, CTypesGenericArray):
    +                BType = BType._CTPtr
    +            if not issubclass(BType, CTypesGenericPtr):
    +                raise TypeError("expected an array or ptr ctype")
    +            BItem = BType._BItem
    +            offset = BItem._get_size() * fieldname
    +            if offset > sys.maxsize:
    +                raise OverflowError
    +            return (BItem, offset)
    +        else:
    +            raise TypeError(type(fieldname))
    +
    +    def rawaddressof(self, BTypePtr, cdata, offset=None):
    +        if isinstance(cdata, CTypesBaseStructOrUnion):
    +            ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata))
    +        elif isinstance(cdata, CTypesGenericPtr):
    +            if offset is None or not issubclass(type(cdata)._BItem,
    +                                                CTypesBaseStructOrUnion):
    +                raise TypeError("unexpected cdata type")
    +            ptr = type(cdata)._to_ctypes(cdata)
    +        elif isinstance(cdata, CTypesGenericArray):
    +            ptr = type(cdata)._to_ctypes(cdata)
    +        else:
    +            raise TypeError("expected a ")
    +        if offset:
    +            ptr = ctypes.cast(
    +                ctypes.c_void_p(
    +                    ctypes.cast(ptr, ctypes.c_void_p).value + offset),
    +                type(ptr))
    +        return BTypePtr._from_ctypes(ptr)
    +
    +
    +class CTypesLibrary(object):
    +
    +    def __init__(self, backend, cdll):
    +        self.backend = backend
    +        self.cdll = cdll
    +
    +    def load_function(self, BType, name):
    +        c_func = getattr(self.cdll, name)
    +        funcobj = BType._from_ctypes(c_func)
    +        funcobj._name = name
    +        return funcobj
    +
    +    def read_variable(self, BType, name):
    +        try:
    +            ctypes_obj = BType._ctype.in_dll(self.cdll, name)
    +        except AttributeError as e:
    +            raise NotImplementedError(e)
    +        return BType._from_ctypes(ctypes_obj)
    +
    +    def write_variable(self, BType, name, value):
    +        new_ctypes_obj = BType._to_ctypes(value)
    +        ctypes_obj = BType._ctype.in_dll(self.cdll, name)
    +        ctypes.memmove(ctypes.addressof(ctypes_obj),
    +                       ctypes.addressof(new_ctypes_obj),
    +                       ctypes.sizeof(BType._ctype))
    diff --git a/server/www/packages/packages-windows/x86/cffi/cffi_opcode.py b/server/www/packages/packages-windows/x86/cffi/cffi_opcode.py
    new file mode 100644
    index 0000000..b26ccc9
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/cffi_opcode.py
    @@ -0,0 +1,187 @@
    +from .error import VerificationError
    +
    +class CffiOp(object):
    +    def __init__(self, op, arg):
    +        self.op = op
    +        self.arg = arg
    +
    +    def as_c_expr(self):
    +        if self.op is None:
    +            assert isinstance(self.arg, str)
    +            return '(_cffi_opcode_t)(%s)' % (self.arg,)
    +        classname = CLASS_NAME[self.op]
    +        return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg)
    +
    +    def as_python_bytes(self):
    +        if self.op is None and self.arg.isdigit():
    +            value = int(self.arg)     # non-negative: '-' not in self.arg
    +            if value >= 2**31:
    +                raise OverflowError("cannot emit %r: limited to 2**31-1"
    +                                    % (self.arg,))
    +            return format_four_bytes(value)
    +        if isinstance(self.arg, str):
    +            raise VerificationError("cannot emit to Python: %r" % (self.arg,))
    +        return format_four_bytes((self.arg << 8) | self.op)
    +
    +    def __str__(self):
    +        classname = CLASS_NAME.get(self.op, self.op)
    +        return '(%s %s)' % (classname, self.arg)
    +
    +def format_four_bytes(num):
    +    return '\\x%02X\\x%02X\\x%02X\\x%02X' % (
    +        (num >> 24) & 0xFF,
    +        (num >> 16) & 0xFF,
    +        (num >>  8) & 0xFF,
    +        (num      ) & 0xFF)
    +
    +OP_PRIMITIVE       = 1
    +OP_POINTER         = 3
    +OP_ARRAY           = 5
    +OP_OPEN_ARRAY      = 7
    +OP_STRUCT_UNION    = 9
    +OP_ENUM            = 11
    +OP_FUNCTION        = 13
    +OP_FUNCTION_END    = 15
    +OP_NOOP            = 17
    +OP_BITFIELD        = 19
    +OP_TYPENAME        = 21
    +OP_CPYTHON_BLTN_V  = 23   # varargs
    +OP_CPYTHON_BLTN_N  = 25   # noargs
    +OP_CPYTHON_BLTN_O  = 27   # O  (i.e. a single arg)
    +OP_CONSTANT        = 29
    +OP_CONSTANT_INT    = 31
    +OP_GLOBAL_VAR      = 33
    +OP_DLOPEN_FUNC     = 35
    +OP_DLOPEN_CONST    = 37
    +OP_GLOBAL_VAR_F    = 39
    +OP_EXTERN_PYTHON   = 41
    +
    +PRIM_VOID          = 0
    +PRIM_BOOL          = 1
    +PRIM_CHAR          = 2
    +PRIM_SCHAR         = 3
    +PRIM_UCHAR         = 4
    +PRIM_SHORT         = 5
    +PRIM_USHORT        = 6
    +PRIM_INT           = 7
    +PRIM_UINT          = 8
    +PRIM_LONG          = 9
    +PRIM_ULONG         = 10
    +PRIM_LONGLONG      = 11
    +PRIM_ULONGLONG     = 12
    +PRIM_FLOAT         = 13
    +PRIM_DOUBLE        = 14
    +PRIM_LONGDOUBLE    = 15
    +
    +PRIM_WCHAR         = 16
    +PRIM_INT8          = 17
    +PRIM_UINT8         = 18
    +PRIM_INT16         = 19
    +PRIM_UINT16        = 20
    +PRIM_INT32         = 21
    +PRIM_UINT32        = 22
    +PRIM_INT64         = 23
    +PRIM_UINT64        = 24
    +PRIM_INTPTR        = 25
    +PRIM_UINTPTR       = 26
    +PRIM_PTRDIFF       = 27
    +PRIM_SIZE          = 28
    +PRIM_SSIZE         = 29
    +PRIM_INT_LEAST8    = 30
    +PRIM_UINT_LEAST8   = 31
    +PRIM_INT_LEAST16   = 32
    +PRIM_UINT_LEAST16  = 33
    +PRIM_INT_LEAST32   = 34
    +PRIM_UINT_LEAST32  = 35
    +PRIM_INT_LEAST64   = 36
    +PRIM_UINT_LEAST64  = 37
    +PRIM_INT_FAST8     = 38
    +PRIM_UINT_FAST8    = 39
    +PRIM_INT_FAST16    = 40
    +PRIM_UINT_FAST16   = 41
    +PRIM_INT_FAST32    = 42
    +PRIM_UINT_FAST32   = 43
    +PRIM_INT_FAST64    = 44
    +PRIM_UINT_FAST64   = 45
    +PRIM_INTMAX        = 46
    +PRIM_UINTMAX       = 47
    +PRIM_FLOATCOMPLEX  = 48
    +PRIM_DOUBLECOMPLEX = 49
    +PRIM_CHAR16        = 50
    +PRIM_CHAR32        = 51
    +
    +_NUM_PRIM          = 52
    +_UNKNOWN_PRIM          = -1
    +_UNKNOWN_FLOAT_PRIM    = -2
    +_UNKNOWN_LONG_DOUBLE   = -3
    +
    +_IO_FILE_STRUCT        = -1
    +
    +PRIMITIVE_TO_INDEX = {
    +    'char':               PRIM_CHAR,
    +    'short':              PRIM_SHORT,
    +    'int':                PRIM_INT,
    +    'long':               PRIM_LONG,
    +    'long long':          PRIM_LONGLONG,
    +    'signed char':        PRIM_SCHAR,
    +    'unsigned char':      PRIM_UCHAR,
    +    'unsigned short':     PRIM_USHORT,
    +    'unsigned int':       PRIM_UINT,
    +    'unsigned long':      PRIM_ULONG,
    +    'unsigned long long': PRIM_ULONGLONG,
    +    'float':              PRIM_FLOAT,
    +    'double':             PRIM_DOUBLE,
    +    'long double':        PRIM_LONGDOUBLE,
    +    'float _Complex':     PRIM_FLOATCOMPLEX,
    +    'double _Complex':    PRIM_DOUBLECOMPLEX,
    +    '_Bool':              PRIM_BOOL,
    +    'wchar_t':            PRIM_WCHAR,
    +    'char16_t':           PRIM_CHAR16,
    +    'char32_t':           PRIM_CHAR32,
    +    'int8_t':             PRIM_INT8,
    +    'uint8_t':            PRIM_UINT8,
    +    'int16_t':            PRIM_INT16,
    +    'uint16_t':           PRIM_UINT16,
    +    'int32_t':            PRIM_INT32,
    +    'uint32_t':           PRIM_UINT32,
    +    'int64_t':            PRIM_INT64,
    +    'uint64_t':           PRIM_UINT64,
    +    'intptr_t':           PRIM_INTPTR,
    +    'uintptr_t':          PRIM_UINTPTR,
    +    'ptrdiff_t':          PRIM_PTRDIFF,
    +    'size_t':             PRIM_SIZE,
    +    'ssize_t':            PRIM_SSIZE,
    +    'int_least8_t':       PRIM_INT_LEAST8,
    +    'uint_least8_t':      PRIM_UINT_LEAST8,
    +    'int_least16_t':      PRIM_INT_LEAST16,
    +    'uint_least16_t':     PRIM_UINT_LEAST16,
    +    'int_least32_t':      PRIM_INT_LEAST32,
    +    'uint_least32_t':     PRIM_UINT_LEAST32,
    +    'int_least64_t':      PRIM_INT_LEAST64,
    +    'uint_least64_t':     PRIM_UINT_LEAST64,
    +    'int_fast8_t':        PRIM_INT_FAST8,
    +    'uint_fast8_t':       PRIM_UINT_FAST8,
    +    'int_fast16_t':       PRIM_INT_FAST16,
    +    'uint_fast16_t':      PRIM_UINT_FAST16,
    +    'int_fast32_t':       PRIM_INT_FAST32,
    +    'uint_fast32_t':      PRIM_UINT_FAST32,
    +    'int_fast64_t':       PRIM_INT_FAST64,
    +    'uint_fast64_t':      PRIM_UINT_FAST64,
    +    'intmax_t':           PRIM_INTMAX,
    +    'uintmax_t':          PRIM_UINTMAX,
    +    }
    +
    +F_UNION         = 0x01
    +F_CHECK_FIELDS  = 0x02
    +F_PACKED        = 0x04
    +F_EXTERNAL      = 0x08
    +F_OPAQUE        = 0x10
    +
    +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key])
    +                for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED',
    +                             'F_EXTERNAL', 'F_OPAQUE']])
    +
    +CLASS_NAME = {}
    +for _name, _value in list(globals().items()):
    +    if _name.startswith('OP_') and isinstance(_value, int):
    +        CLASS_NAME[_value] = _name[3:]
    diff --git a/server/www/packages/packages-windows/x86/cffi/commontypes.py b/server/www/packages/packages-windows/x86/cffi/commontypes.py
    new file mode 100644
    index 0000000..e5045ee
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/commontypes.py
    @@ -0,0 +1,80 @@
    +import sys
    +from . import model
    +from .error import FFIError
    +
    +
    +COMMON_TYPES = {}
    +
    +try:
    +    # fetch "bool" and all simple Windows types
    +    from _cffi_backend import _get_common_types
    +    _get_common_types(COMMON_TYPES)
    +except ImportError:
    +    pass
    +
    +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE')
    +COMMON_TYPES['bool'] = '_Bool'    # in case we got ImportError above
    +
    +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
    +    if _type.endswith('_t'):
    +        COMMON_TYPES[_type] = _type
    +del _type
    +
    +_CACHE = {}
    +
    +def resolve_common_type(parser, commontype):
    +    try:
    +        return _CACHE[commontype]
    +    except KeyError:
    +        cdecl = COMMON_TYPES.get(commontype, commontype)
    +        if not isinstance(cdecl, str):
    +            result, quals = cdecl, 0    # cdecl is already a BaseType
    +        elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
    +            result, quals = model.PrimitiveType(cdecl), 0
    +        elif cdecl == 'set-unicode-needed':
    +            raise FFIError("The Windows type %r is only available after "
    +                           "you call ffi.set_unicode()" % (commontype,))
    +        else:
    +            if commontype == cdecl:
    +                raise FFIError(
    +                    "Unsupported type: %r.  Please look at "
    +        "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations "
    +                    "and file an issue if you think this type should really "
    +                    "be supported." % (commontype,))
    +            result, quals = parser.parse_type_and_quals(cdecl)   # recursive
    +
    +        assert isinstance(result, model.BaseTypeByIdentity)
    +        _CACHE[commontype] = result, quals
    +        return result, quals
    +
    +
    +# ____________________________________________________________
    +# extra types for Windows (most of them are in commontypes.c)
    +
    +
    +def win_common_types():
    +    return {
    +        "UNICODE_STRING": model.StructType(
    +            "_UNICODE_STRING",
    +            ["Length",
    +             "MaximumLength",
    +             "Buffer"],
    +            [model.PrimitiveType("unsigned short"),
    +             model.PrimitiveType("unsigned short"),
    +             model.PointerType(model.PrimitiveType("wchar_t"))],
    +            [-1, -1, -1]),
    +        "PUNICODE_STRING": "UNICODE_STRING *",
    +        "PCUNICODE_STRING": "const UNICODE_STRING *",
    +
    +        "TBYTE": "set-unicode-needed",
    +        "TCHAR": "set-unicode-needed",
    +        "LPCTSTR": "set-unicode-needed",
    +        "PCTSTR": "set-unicode-needed",
    +        "LPTSTR": "set-unicode-needed",
    +        "PTSTR": "set-unicode-needed",
    +        "PTBYTE": "set-unicode-needed",
    +        "PTCHAR": "set-unicode-needed",
    +        }
    +
    +if sys.platform == 'win32':
    +    COMMON_TYPES.update(win_common_types())
    diff --git a/server/www/packages/packages-windows/x86/cffi/cparser.py b/server/www/packages/packages-windows/x86/cffi/cparser.py
    new file mode 100644
    index 0000000..50f2494
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/cparser.py
    @@ -0,0 +1,963 @@
    +from . import model
    +from .commontypes import COMMON_TYPES, resolve_common_type
    +from .error import FFIError, CDefError
    +try:
    +    from . import _pycparser as pycparser
    +except ImportError:
    +    import pycparser
    +import weakref, re, sys
    +
    +try:
    +    if sys.version_info < (3,):
    +        import thread as _thread
    +    else:
    +        import _thread
    +    lock = _thread.allocate_lock()
    +except ImportError:
    +    lock = None
    +
    +def _workaround_for_static_import_finders():
    +    # Issue #392: packaging tools like cx_Freeze can not find these
    +    # because pycparser uses exec dynamic import.  This is an obscure
    +    # workaround.  This function is never called.
    +    import pycparser.yacctab
    +    import pycparser.lextab
    +
    +CDEF_SOURCE_STRING = ""
    +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$",
    +                        re.DOTALL | re.MULTILINE)
    +_r_define  = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)"
    +                        r"\b((?:[^\n\\]|\\.)*?)$",
    +                        re.DOTALL | re.MULTILINE)
    +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}")
    +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$")
    +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]")
    +_r_words = re.compile(r"\w+|\S")
    +_parser_cache = None
    +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE)
    +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b")
    +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b")
    +_r_cdecl = re.compile(r"\b__cdecl\b")
    +_r_extern_python = re.compile(r'\bextern\s*"'
    +                              r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.')
    +_r_star_const_space = re.compile(       # matches "* const "
    +    r"[*]\s*((const|volatile|restrict)\b\s*)+")
    +_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+"
    +                              r"\.\.\.")
    +_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.")
    +
    +def _get_parser():
    +    global _parser_cache
    +    if _parser_cache is None:
    +        _parser_cache = pycparser.CParser()
    +    return _parser_cache
    +
    +def _workaround_for_old_pycparser(csource):
    +    # Workaround for a pycparser issue (fixed between pycparser 2.10 and
    +    # 2.14): "char*const***" gives us a wrong syntax tree, the same as
    +    # for "char***(*const)".  This means we can't tell the difference
    +    # afterwards.  But "char(*const(***))" gives us the right syntax
    +    # tree.  The issue only occurs if there are several stars in
    +    # sequence with no parenthesis inbetween, just possibly qualifiers.
    +    # Attempt to fix it by adding some parentheses in the source: each
    +    # time we see "* const" or "* const *", we add an opening
    +    # parenthesis before each star---the hard part is figuring out where
    +    # to close them.
    +    parts = []
    +    while True:
    +        match = _r_star_const_space.search(csource)
    +        if not match:
    +            break
    +        #print repr(''.join(parts)+csource), '=>',
    +        parts.append(csource[:match.start()])
    +        parts.append('('); closing = ')'
    +        parts.append(match.group())   # e.g. "* const "
    +        endpos = match.end()
    +        if csource.startswith('*', endpos):
    +            parts.append('('); closing += ')'
    +        level = 0
    +        i = endpos
    +        while i < len(csource):
    +            c = csource[i]
    +            if c == '(':
    +                level += 1
    +            elif c == ')':
    +                if level == 0:
    +                    break
    +                level -= 1
    +            elif c in ',;=':
    +                if level == 0:
    +                    break
    +            i += 1
    +        csource = csource[endpos:i] + closing + csource[i:]
    +        #print repr(''.join(parts)+csource)
    +    parts.append(csource)
    +    return ''.join(parts)
    +
    +def _preprocess_extern_python(csource):
    +    # input: `extern "Python" int foo(int);` or
    +    #        `extern "Python" { int foo(int); }`
    +    # output:
    +    #     void __cffi_extern_python_start;
    +    #     int foo(int);
    +    #     void __cffi_extern_python_stop;
    +    #
    +    # input: `extern "Python+C" int foo(int);`
    +    # output:
    +    #     void __cffi_extern_python_plus_c_start;
    +    #     int foo(int);
    +    #     void __cffi_extern_python_stop;
    +    parts = []
    +    while True:
    +        match = _r_extern_python.search(csource)
    +        if not match:
    +            break
    +        endpos = match.end() - 1
    +        #print
    +        #print ''.join(parts)+csource
    +        #print '=>'
    +        parts.append(csource[:match.start()])
    +        if 'C' in match.group(1):
    +            parts.append('void __cffi_extern_python_plus_c_start; ')
    +        else:
    +            parts.append('void __cffi_extern_python_start; ')
    +        if csource[endpos] == '{':
    +            # grouping variant
    +            closing = csource.find('}', endpos)
    +            if closing < 0:
    +                raise CDefError("'extern \"Python\" {': no '}' found")
    +            if csource.find('{', endpos + 1, closing) >= 0:
    +                raise NotImplementedError("cannot use { } inside a block "
    +                                          "'extern \"Python\" { ... }'")
    +            parts.append(csource[endpos+1:closing])
    +            csource = csource[closing+1:]
    +        else:
    +            # non-grouping variant
    +            semicolon = csource.find(';', endpos)
    +            if semicolon < 0:
    +                raise CDefError("'extern \"Python\": no ';' found")
    +            parts.append(csource[endpos:semicolon+1])
    +            csource = csource[semicolon+1:]
    +        parts.append(' void __cffi_extern_python_stop;')
    +        #print ''.join(parts)+csource
    +        #print
    +    parts.append(csource)
    +    return ''.join(parts)
    +
    +def _warn_for_string_literal(csource):
    +    if '"' not in csource:
    +        return
    +    for line in csource.splitlines():
    +        if '"' in line and not line.lstrip().startswith('#'):
    +            import warnings
    +            warnings.warn("String literal found in cdef() or type source. "
    +                          "String literals are ignored here, but you should "
    +                          "remove them anyway because some character sequences "
    +                          "confuse pre-parsing.")
    +            break
    +
    +def _warn_for_non_extern_non_static_global_variable(decl):
    +    if not decl.storage:
    +        import warnings
    +        warnings.warn("Global variable '%s' in cdef(): for consistency "
    +                      "with C it should have a storage class specifier "
    +                      "(usually 'extern')" % (decl.name,))
    +
    +def _preprocess(csource):
    +    # Remove comments.  NOTE: this only work because the cdef() section
    +    # should not contain any string literal!
    +    csource = _r_comment.sub(' ', csource)
    +    # Remove the "#define FOO x" lines
    +    macros = {}
    +    for match in _r_define.finditer(csource):
    +        macroname, macrovalue = match.groups()
    +        macrovalue = macrovalue.replace('\\\n', '').strip()
    +        macros[macroname] = macrovalue
    +    csource = _r_define.sub('', csource)
    +    #
    +    if pycparser.__version__ < '2.14':
    +        csource = _workaround_for_old_pycparser(csource)
    +    #
    +    # BIG HACK: replace WINAPI or __stdcall with "volatile const".
    +    # It doesn't make sense for the return type of a function to be
    +    # "volatile volatile const", so we abuse it to detect __stdcall...
    +    # Hack number 2 is that "int(volatile *fptr)();" is not valid C
    +    # syntax, so we place the "volatile" before the opening parenthesis.
    +    csource = _r_stdcall2.sub(' volatile volatile const(', csource)
    +    csource = _r_stdcall1.sub(' volatile volatile const ', csource)
    +    csource = _r_cdecl.sub(' ', csource)
    +    #
    +    # Replace `extern "Python"` with start/end markers
    +    csource = _preprocess_extern_python(csource)
    +    #
    +    # Now there should not be any string literal left; warn if we get one
    +    _warn_for_string_literal(csource)
    +    #
    +    # Replace "[...]" with "[__dotdotdotarray__]"
    +    csource = _r_partial_array.sub('[__dotdotdotarray__]', csource)
    +    #
    +    # Replace "...}" with "__dotdotdotNUM__}".  This construction should
    +    # occur only at the end of enums; at the end of structs we have "...;}"
    +    # and at the end of vararg functions "...);".  Also replace "=...[,}]"
    +    # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when
    +    # giving an unknown value.
    +    matches = list(_r_partial_enum.finditer(csource))
    +    for number, match in enumerate(reversed(matches)):
    +        p = match.start()
    +        if csource[p] == '=':
    +            p2 = csource.find('...', p, match.end())
    +            assert p2 > p
    +            csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number,
    +                                                 csource[p2+3:])
    +        else:
    +            assert csource[p:p+3] == '...'
    +            csource = '%s __dotdotdot%d__ %s' % (csource[:p], number,
    +                                                 csource[p+3:])
    +    # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__"
    +    csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource)
    +    # Replace "float ..." or "double..." with "__dotdotdotfloat__"
    +    csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource)
    +    # Replace all remaining "..." with the same name, "__dotdotdot__",
    +    # which is declared with a typedef for the purpose of C parsing.
    +    return csource.replace('...', ' __dotdotdot__ '), macros
    +
    +def _common_type_names(csource):
    +    # Look in the source for what looks like usages of types from the
    +    # list of common types.  A "usage" is approximated here as the
    +    # appearance of the word, minus a "definition" of the type, which
    +    # is the last word in a "typedef" statement.  Approximative only
    +    # but should be fine for all the common types.
    +    look_for_words = set(COMMON_TYPES)
    +    look_for_words.add(';')
    +    look_for_words.add(',')
    +    look_for_words.add('(')
    +    look_for_words.add(')')
    +    look_for_words.add('typedef')
    +    words_used = set()
    +    is_typedef = False
    +    paren = 0
    +    previous_word = ''
    +    for word in _r_words.findall(csource):
    +        if word in look_for_words:
    +            if word == ';':
    +                if is_typedef:
    +                    words_used.discard(previous_word)
    +                    look_for_words.discard(previous_word)
    +                    is_typedef = False
    +            elif word == 'typedef':
    +                is_typedef = True
    +                paren = 0
    +            elif word == '(':
    +                paren += 1
    +            elif word == ')':
    +                paren -= 1
    +            elif word == ',':
    +                if is_typedef and paren == 0:
    +                    words_used.discard(previous_word)
    +                    look_for_words.discard(previous_word)
    +            else:   # word in COMMON_TYPES
    +                words_used.add(word)
    +        previous_word = word
    +    return words_used
    +
    +
    +class Parser(object):
    +
    +    def __init__(self):
    +        self._declarations = {}
    +        self._included_declarations = set()
    +        self._anonymous_counter = 0
    +        self._structnode2type = weakref.WeakKeyDictionary()
    +        self._options = {}
    +        self._int_constants = {}
    +        self._recomplete = []
    +        self._uses_new_feature = None
    +
    +    def _parse(self, csource):
    +        csource, macros = _preprocess(csource)
    +        # XXX: for more efficiency we would need to poke into the
    +        # internals of CParser...  the following registers the
    +        # typedefs, because their presence or absence influences the
    +        # parsing itself (but what they are typedef'ed to plays no role)
    +        ctn = _common_type_names(csource)
    +        typenames = []
    +        for name in sorted(self._declarations):
    +            if name.startswith('typedef '):
    +                name = name[8:]
    +                typenames.append(name)
    +                ctn.discard(name)
    +        typenames += sorted(ctn)
    +        #
    +        csourcelines = []
    +        csourcelines.append('# 1 ""')
    +        for typename in typenames:
    +            csourcelines.append('typedef int %s;' % typename)
    +        csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,'
    +                            ' __dotdotdot__;')
    +        # this forces pycparser to consider the following in the file
    +        # called  from line 1
    +        csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,))
    +        csourcelines.append(csource)
    +        fullcsource = '\n'.join(csourcelines)
    +        if lock is not None:
    +            lock.acquire()     # pycparser is not thread-safe...
    +        try:
    +            ast = _get_parser().parse(fullcsource)
    +        except pycparser.c_parser.ParseError as e:
    +            self.convert_pycparser_error(e, csource)
    +        finally:
    +            if lock is not None:
    +                lock.release()
    +        # csource will be used to find buggy source text
    +        return ast, macros, csource
    +
    +    def _convert_pycparser_error(self, e, csource):
    +        # xxx look for ":NUM:" at the start of str(e)
    +        # and interpret that as a line number.  This will not work if
    +        # the user gives explicit ``# NUM "FILE"`` directives.
    +        line = None
    +        msg = str(e)
    +        match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg)
    +        if match:
    +            linenum = int(match.group(1), 10)
    +            csourcelines = csource.splitlines()
    +            if 1 <= linenum <= len(csourcelines):
    +                line = csourcelines[linenum-1]
    +        return line
    +
    +    def convert_pycparser_error(self, e, csource):
    +        line = self._convert_pycparser_error(e, csource)
    +
    +        msg = str(e)
    +        if line:
    +            msg = 'cannot parse "%s"\n%s' % (line.strip(), msg)
    +        else:
    +            msg = 'parse error\n%s' % (msg,)
    +        raise CDefError(msg)
    +
    +    def parse(self, csource, override=False, packed=False, pack=None,
    +                    dllexport=False):
    +        if packed:
    +            if packed != True:
    +                raise ValueError("'packed' should be False or True; use "
    +                                 "'pack' to give another value")
    +            if pack:
    +                raise ValueError("cannot give both 'pack' and 'packed'")
    +            pack = 1
    +        elif pack:
    +            if pack & (pack - 1):
    +                raise ValueError("'pack' must be a power of two, not %r" %
    +                    (pack,))
    +        else:
    +            pack = 0
    +        prev_options = self._options
    +        try:
    +            self._options = {'override': override,
    +                             'packed': pack,
    +                             'dllexport': dllexport}
    +            self._internal_parse(csource)
    +        finally:
    +            self._options = prev_options
    +
    +    def _internal_parse(self, csource):
    +        ast, macros, csource = self._parse(csource)
    +        # add the macros
    +        self._process_macros(macros)
    +        # find the first "__dotdotdot__" and use that as a separator
    +        # between the repeated typedefs and the real csource
    +        iterator = iter(ast.ext)
    +        for decl in iterator:
    +            if decl.name == '__dotdotdot__':
    +                break
    +        else:
    +            assert 0
    +        current_decl = None
    +        #
    +        try:
    +            self._inside_extern_python = '__cffi_extern_python_stop'
    +            for decl in iterator:
    +                current_decl = decl
    +                if isinstance(decl, pycparser.c_ast.Decl):
    +                    self._parse_decl(decl)
    +                elif isinstance(decl, pycparser.c_ast.Typedef):
    +                    if not decl.name:
    +                        raise CDefError("typedef does not declare any name",
    +                                        decl)
    +                    quals = 0
    +                    if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and
    +                            decl.type.type.names[-1].startswith('__dotdotdot')):
    +                        realtype = self._get_unknown_type(decl)
    +                    elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and
    +                          isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and
    +                          isinstance(decl.type.type.type,
    +                                     pycparser.c_ast.IdentifierType) and
    +                          decl.type.type.type.names[-1].startswith('__dotdotdot')):
    +                        realtype = self._get_unknown_ptr_type(decl)
    +                    else:
    +                        realtype, quals = self._get_type_and_quals(
    +                            decl.type, name=decl.name, partial_length_ok=True)
    +                    self._declare('typedef ' + decl.name, realtype, quals=quals)
    +                elif decl.__class__.__name__ == 'Pragma':
    +                    pass    # skip pragma, only in pycparser 2.15
    +                else:
    +                    raise CDefError("unexpected <%s>: this construct is valid "
    +                                    "C but not valid in cdef()" %
    +                                    decl.__class__.__name__, decl)
    +        except CDefError as e:
    +            if len(e.args) == 1:
    +                e.args = e.args + (current_decl,)
    +            raise
    +        except FFIError as e:
    +            msg = self._convert_pycparser_error(e, csource)
    +            if msg:
    +                e.args = (e.args[0] + "\n    *** Err: %s" % msg,)
    +            raise
    +
    +    def _add_constants(self, key, val):
    +        if key in self._int_constants:
    +            if self._int_constants[key] == val:
    +                return     # ignore identical double declarations
    +            raise FFIError(
    +                "multiple declarations of constant: %s" % (key,))
    +        self._int_constants[key] = val
    +
    +    def _add_integer_constant(self, name, int_str):
    +        int_str = int_str.lower().rstrip("ul")
    +        neg = int_str.startswith('-')
    +        if neg:
    +            int_str = int_str[1:]
    +        # "010" is not valid oct in py3
    +        if (int_str.startswith("0") and int_str != '0'
    +                and not int_str.startswith("0x")):
    +            int_str = "0o" + int_str[1:]
    +        pyvalue = int(int_str, 0)
    +        if neg:
    +            pyvalue = -pyvalue
    +        self._add_constants(name, pyvalue)
    +        self._declare('macro ' + name, pyvalue)
    +
    +    def _process_macros(self, macros):
    +        for key, value in macros.items():
    +            value = value.strip()
    +            if _r_int_literal.match(value):
    +                self._add_integer_constant(key, value)
    +            elif value == '...':
    +                self._declare('macro ' + key, value)
    +            else:
    +                raise CDefError(
    +                    'only supports one of the following syntax:\n'
    +                    '  #define %s ...     (literally dot-dot-dot)\n'
    +                    '  #define %s NUMBER  (with NUMBER an integer'
    +                                    ' constant, decimal/hex/octal)\n'
    +                    'got:\n'
    +                    '  #define %s %s'
    +                    % (key, key, key, value))
    +
    +    def _declare_function(self, tp, quals, decl):
    +        tp = self._get_type_pointer(tp, quals)
    +        if self._options.get('dllexport'):
    +            tag = 'dllexport_python '
    +        elif self._inside_extern_python == '__cffi_extern_python_start':
    +            tag = 'extern_python '
    +        elif self._inside_extern_python == '__cffi_extern_python_plus_c_start':
    +            tag = 'extern_python_plus_c '
    +        else:
    +            tag = 'function '
    +        self._declare(tag + decl.name, tp)
    +
    +    def _parse_decl(self, decl):
    +        node = decl.type
    +        if isinstance(node, pycparser.c_ast.FuncDecl):
    +            tp, quals = self._get_type_and_quals(node, name=decl.name)
    +            assert isinstance(tp, model.RawFunctionType)
    +            self._declare_function(tp, quals, decl)
    +        else:
    +            if isinstance(node, pycparser.c_ast.Struct):
    +                self._get_struct_union_enum_type('struct', node)
    +            elif isinstance(node, pycparser.c_ast.Union):
    +                self._get_struct_union_enum_type('union', node)
    +            elif isinstance(node, pycparser.c_ast.Enum):
    +                self._get_struct_union_enum_type('enum', node)
    +            elif not decl.name:
    +                raise CDefError("construct does not declare any variable",
    +                                decl)
    +            #
    +            if decl.name:
    +                tp, quals = self._get_type_and_quals(node,
    +                                                     partial_length_ok=True)
    +                if tp.is_raw_function:
    +                    self._declare_function(tp, quals, decl)
    +                elif (tp.is_integer_type() and
    +                        hasattr(decl, 'init') and
    +                        hasattr(decl.init, 'value') and
    +                        _r_int_literal.match(decl.init.value)):
    +                    self._add_integer_constant(decl.name, decl.init.value)
    +                elif (tp.is_integer_type() and
    +                        isinstance(decl.init, pycparser.c_ast.UnaryOp) and
    +                        decl.init.op == '-' and
    +                        hasattr(decl.init.expr, 'value') and
    +                        _r_int_literal.match(decl.init.expr.value)):
    +                    self._add_integer_constant(decl.name,
    +                                               '-' + decl.init.expr.value)
    +                elif (tp is model.void_type and
    +                      decl.name.startswith('__cffi_extern_python_')):
    +                    # hack: `extern "Python"` in the C source is replaced
    +                    # with "void __cffi_extern_python_start;" and
    +                    # "void __cffi_extern_python_stop;"
    +                    self._inside_extern_python = decl.name
    +                else:
    +                    if self._inside_extern_python !='__cffi_extern_python_stop':
    +                        raise CDefError(
    +                            "cannot declare constants or "
    +                            "variables with 'extern \"Python\"'")
    +                    if (quals & model.Q_CONST) and not tp.is_array_type:
    +                        self._declare('constant ' + decl.name, tp, quals=quals)
    +                    else:
    +                        _warn_for_non_extern_non_static_global_variable(decl)
    +                        self._declare('variable ' + decl.name, tp, quals=quals)
    +
    +    def parse_type(self, cdecl):
    +        return self.parse_type_and_quals(cdecl)[0]
    +
    +    def parse_type_and_quals(self, cdecl):
    +        ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2]
    +        assert not macros
    +        exprnode = ast.ext[-1].type.args.params[0]
    +        if isinstance(exprnode, pycparser.c_ast.ID):
    +            raise CDefError("unknown identifier '%s'" % (exprnode.name,))
    +        return self._get_type_and_quals(exprnode.type)
    +
    +    def _declare(self, name, obj, included=False, quals=0):
    +        if name in self._declarations:
    +            prevobj, prevquals = self._declarations[name]
    +            if prevobj is obj and prevquals == quals:
    +                return
    +            if not self._options.get('override'):
    +                raise FFIError(
    +                    "multiple declarations of %s (for interactive usage, "
    +                    "try cdef(xx, override=True))" % (name,))
    +        assert '__dotdotdot__' not in name.split()
    +        self._declarations[name] = (obj, quals)
    +        if included:
    +            self._included_declarations.add(obj)
    +
    +    def _extract_quals(self, type):
    +        quals = 0
    +        if isinstance(type, (pycparser.c_ast.TypeDecl,
    +                             pycparser.c_ast.PtrDecl)):
    +            if 'const' in type.quals:
    +                quals |= model.Q_CONST
    +            if 'volatile' in type.quals:
    +                quals |= model.Q_VOLATILE
    +            if 'restrict' in type.quals:
    +                quals |= model.Q_RESTRICT
    +        return quals
    +
    +    def _get_type_pointer(self, type, quals, declname=None):
    +        if isinstance(type, model.RawFunctionType):
    +            return type.as_function_pointer()
    +        if (isinstance(type, model.StructOrUnionOrEnum) and
    +                type.name.startswith('$') and type.name[1:].isdigit() and
    +                type.forcename is None and declname is not None):
    +            return model.NamedPointerType(type, declname, quals)
    +        return model.PointerType(type, quals)
    +
    +    def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False):
    +        # first, dereference typedefs, if we have it already parsed, we're good
    +        if (isinstance(typenode, pycparser.c_ast.TypeDecl) and
    +            isinstance(typenode.type, pycparser.c_ast.IdentifierType) and
    +            len(typenode.type.names) == 1 and
    +            ('typedef ' + typenode.type.names[0]) in self._declarations):
    +            tp, quals = self._declarations['typedef ' + typenode.type.names[0]]
    +            quals |= self._extract_quals(typenode)
    +            return tp, quals
    +        #
    +        if isinstance(typenode, pycparser.c_ast.ArrayDecl):
    +            # array type
    +            if typenode.dim is None:
    +                length = None
    +            else:
    +                length = self._parse_constant(
    +                    typenode.dim, partial_length_ok=partial_length_ok)
    +            tp, quals = self._get_type_and_quals(typenode.type,
    +                                partial_length_ok=partial_length_ok)
    +            return model.ArrayType(tp, length), quals
    +        #
    +        if isinstance(typenode, pycparser.c_ast.PtrDecl):
    +            # pointer type
    +            itemtype, itemquals = self._get_type_and_quals(typenode.type)
    +            tp = self._get_type_pointer(itemtype, itemquals, declname=name)
    +            quals = self._extract_quals(typenode)
    +            return tp, quals
    +        #
    +        if isinstance(typenode, pycparser.c_ast.TypeDecl):
    +            quals = self._extract_quals(typenode)
    +            type = typenode.type
    +            if isinstance(type, pycparser.c_ast.IdentifierType):
    +                # assume a primitive type.  get it from .names, but reduce
    +                # synonyms to a single chosen combination
    +                names = list(type.names)
    +                if names != ['signed', 'char']:    # keep this unmodified
    +                    prefixes = {}
    +                    while names:
    +                        name = names[0]
    +                        if name in ('short', 'long', 'signed', 'unsigned'):
    +                            prefixes[name] = prefixes.get(name, 0) + 1
    +                            del names[0]
    +                        else:
    +                            break
    +                    # ignore the 'signed' prefix below, and reorder the others
    +                    newnames = []
    +                    for prefix in ('unsigned', 'short', 'long'):
    +                        for i in range(prefixes.get(prefix, 0)):
    +                            newnames.append(prefix)
    +                    if not names:
    +                        names = ['int']    # implicitly
    +                    if names == ['int']:   # but kill it if 'short' or 'long'
    +                        if 'short' in prefixes or 'long' in prefixes:
    +                            names = []
    +                    names = newnames + names
    +                ident = ' '.join(names)
    +                if ident == 'void':
    +                    return model.void_type, quals
    +                if ident == '__dotdotdot__':
    +                    raise FFIError(':%d: bad usage of "..."' %
    +                            typenode.coord.line)
    +                tp0, quals0 = resolve_common_type(self, ident)
    +                return tp0, (quals | quals0)
    +            #
    +            if isinstance(type, pycparser.c_ast.Struct):
    +                # 'struct foobar'
    +                tp = self._get_struct_union_enum_type('struct', type, name)
    +                return tp, quals
    +            #
    +            if isinstance(type, pycparser.c_ast.Union):
    +                # 'union foobar'
    +                tp = self._get_struct_union_enum_type('union', type, name)
    +                return tp, quals
    +            #
    +            if isinstance(type, pycparser.c_ast.Enum):
    +                # 'enum foobar'
    +                tp = self._get_struct_union_enum_type('enum', type, name)
    +                return tp, quals
    +        #
    +        if isinstance(typenode, pycparser.c_ast.FuncDecl):
    +            # a function type
    +            return self._parse_function_type(typenode, name), 0
    +        #
    +        # nested anonymous structs or unions end up here
    +        if isinstance(typenode, pycparser.c_ast.Struct):
    +            return self._get_struct_union_enum_type('struct', typenode, name,
    +                                                    nested=True), 0
    +        if isinstance(typenode, pycparser.c_ast.Union):
    +            return self._get_struct_union_enum_type('union', typenode, name,
    +                                                    nested=True), 0
    +        #
    +        raise FFIError(":%d: bad or unsupported type declaration" %
    +                typenode.coord.line)
    +
    +    def _parse_function_type(self, typenode, funcname=None):
    +        params = list(getattr(typenode.args, 'params', []))
    +        for i, arg in enumerate(params):
    +            if not hasattr(arg, 'type'):
    +                raise CDefError("%s arg %d: unknown type '%s'"
    +                    " (if you meant to use the old C syntax of giving"
    +                    " untyped arguments, it is not supported)"
    +                    % (funcname or 'in expression', i + 1,
    +                       getattr(arg, 'name', '?')))
    +        ellipsis = (
    +            len(params) > 0 and
    +            isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and
    +            isinstance(params[-1].type.type,
    +                       pycparser.c_ast.IdentifierType) and
    +            params[-1].type.type.names == ['__dotdotdot__'])
    +        if ellipsis:
    +            params.pop()
    +            if not params:
    +                raise CDefError(
    +                    "%s: a function with only '(...)' as argument"
    +                    " is not correct C" % (funcname or 'in expression'))
    +        args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type))
    +                for argdeclnode in params]
    +        if not ellipsis and args == [model.void_type]:
    +            args = []
    +        result, quals = self._get_type_and_quals(typenode.type)
    +        # the 'quals' on the result type are ignored.  HACK: we absure them
    +        # to detect __stdcall functions: we textually replace "__stdcall"
    +        # with "volatile volatile const" above.
    +        abi = None
    +        if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway
    +            if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']:
    +                abi = '__stdcall'
    +        return model.RawFunctionType(tuple(args), result, ellipsis, abi)
    +
    +    def _as_func_arg(self, type, quals):
    +        if isinstance(type, model.ArrayType):
    +            return model.PointerType(type.item, quals)
    +        elif isinstance(type, model.RawFunctionType):
    +            return type.as_function_pointer()
    +        else:
    +            return type
    +
    +    def _get_struct_union_enum_type(self, kind, type, name=None, nested=False):
    +        # First, a level of caching on the exact 'type' node of the AST.
    +        # This is obscure, but needed because pycparser "unrolls" declarations
    +        # such as "typedef struct { } foo_t, *foo_p" and we end up with
    +        # an AST that is not a tree, but a DAG, with the "type" node of the
    +        # two branches foo_t and foo_p of the trees being the same node.
    +        # It's a bit silly but detecting "DAG-ness" in the AST tree seems
    +        # to be the only way to distinguish this case from two independent
    +        # structs.  See test_struct_with_two_usages.
    +        try:
    +            return self._structnode2type[type]
    +        except KeyError:
    +            pass
    +        #
    +        # Note that this must handle parsing "struct foo" any number of
    +        # times and always return the same StructType object.  Additionally,
    +        # one of these times (not necessarily the first), the fields of
    +        # the struct can be specified with "struct foo { ...fields... }".
    +        # If no name is given, then we have to create a new anonymous struct
    +        # with no caching; in this case, the fields are either specified
    +        # right now or never.
    +        #
    +        force_name = name
    +        name = type.name
    +        #
    +        # get the type or create it if needed
    +        if name is None:
    +            # 'force_name' is used to guess a more readable name for
    +            # anonymous structs, for the common case "typedef struct { } foo".
    +            if force_name is not None:
    +                explicit_name = '$%s' % force_name
    +            else:
    +                self._anonymous_counter += 1
    +                explicit_name = '$%d' % self._anonymous_counter
    +            tp = None
    +        else:
    +            explicit_name = name
    +            key = '%s %s' % (kind, name)
    +            tp, _ = self._declarations.get(key, (None, None))
    +        #
    +        if tp is None:
    +            if kind == 'struct':
    +                tp = model.StructType(explicit_name, None, None, None)
    +            elif kind == 'union':
    +                tp = model.UnionType(explicit_name, None, None, None)
    +            elif kind == 'enum':
    +                if explicit_name == '__dotdotdot__':
    +                    raise CDefError("Enums cannot be declared with ...")
    +                tp = self._build_enum_type(explicit_name, type.values)
    +            else:
    +                raise AssertionError("kind = %r" % (kind,))
    +            if name is not None:
    +                self._declare(key, tp)
    +        else:
    +            if kind == 'enum' and type.values is not None:
    +                raise NotImplementedError(
    +                    "enum %s: the '{}' declaration should appear on the first "
    +                    "time the enum is mentioned, not later" % explicit_name)
    +        if not tp.forcename:
    +            tp.force_the_name(force_name)
    +        if tp.forcename and '$' in tp.name:
    +            self._declare('anonymous %s' % tp.forcename, tp)
    +        #
    +        self._structnode2type[type] = tp
    +        #
    +        # enums: done here
    +        if kind == 'enum':
    +            return tp
    +        #
    +        # is there a 'type.decls'?  If yes, then this is the place in the
    +        # C sources that declare the fields.  If no, then just return the
    +        # existing type, possibly still incomplete.
    +        if type.decls is None:
    +            return tp
    +        #
    +        if tp.fldnames is not None:
    +            raise CDefError("duplicate declaration of struct %s" % name)
    +        fldnames = []
    +        fldtypes = []
    +        fldbitsize = []
    +        fldquals = []
    +        for decl in type.decls:
    +            if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and
    +                    ''.join(decl.type.names) == '__dotdotdot__'):
    +                # XXX pycparser is inconsistent: 'names' should be a list
    +                # of strings, but is sometimes just one string.  Use
    +                # str.join() as a way to cope with both.
    +                self._make_partial(tp, nested)
    +                continue
    +            if decl.bitsize is None:
    +                bitsize = -1
    +            else:
    +                bitsize = self._parse_constant(decl.bitsize)
    +            self._partial_length = False
    +            type, fqual = self._get_type_and_quals(decl.type,
    +                                                   partial_length_ok=True)
    +            if self._partial_length:
    +                self._make_partial(tp, nested)
    +            if isinstance(type, model.StructType) and type.partial:
    +                self._make_partial(tp, nested)
    +            fldnames.append(decl.name or '')
    +            fldtypes.append(type)
    +            fldbitsize.append(bitsize)
    +            fldquals.append(fqual)
    +        tp.fldnames = tuple(fldnames)
    +        tp.fldtypes = tuple(fldtypes)
    +        tp.fldbitsize = tuple(fldbitsize)
    +        tp.fldquals = tuple(fldquals)
    +        if fldbitsize != [-1] * len(fldbitsize):
    +            if isinstance(tp, model.StructType) and tp.partial:
    +                raise NotImplementedError("%s: using both bitfields and '...;'"
    +                                          % (tp,))
    +        tp.packed = self._options.get('packed')
    +        if tp.completed:    # must be re-completed: it is not opaque any more
    +            tp.completed = 0
    +            self._recomplete.append(tp)
    +        return tp
    +
    +    def _make_partial(self, tp, nested):
    +        if not isinstance(tp, model.StructOrUnion):
    +            raise CDefError("%s cannot be partial" % (tp,))
    +        if not tp.has_c_name() and not nested:
    +            raise NotImplementedError("%s is partial but has no C name" %(tp,))
    +        tp.partial = True
    +
    +    def _parse_constant(self, exprnode, partial_length_ok=False):
    +        # for now, limited to expressions that are an immediate number
    +        # or positive/negative number
    +        if isinstance(exprnode, pycparser.c_ast.Constant):
    +            s = exprnode.value
    +            if '0' <= s[0] <= '9':
    +                s = s.rstrip('uUlL')
    +                try:
    +                    if s.startswith('0'):
    +                        return int(s, 8)
    +                    else:
    +                        return int(s, 10)
    +                except ValueError:
    +                    if len(s) > 1:
    +                        if s.lower()[0:2] == '0x':
    +                            return int(s, 16)
    +                        elif s.lower()[0:2] == '0b':
    +                            return int(s, 2)
    +                raise CDefError("invalid constant %r" % (s,))
    +            elif s[0] == "'" and s[-1] == "'" and (
    +                    len(s) == 3 or (len(s) == 4 and s[1] == "\\")):
    +                return ord(s[-2])
    +            else:
    +                raise CDefError("invalid constant %r" % (s,))
    +        #
    +        if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
    +                exprnode.op == '+'):
    +            return self._parse_constant(exprnode.expr)
    +        #
    +        if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
    +                exprnode.op == '-'):
    +            return -self._parse_constant(exprnode.expr)
    +        # load previously defined int constant
    +        if (isinstance(exprnode, pycparser.c_ast.ID) and
    +                exprnode.name in self._int_constants):
    +            return self._int_constants[exprnode.name]
    +        #
    +        if (isinstance(exprnode, pycparser.c_ast.ID) and
    +                    exprnode.name == '__dotdotdotarray__'):
    +            if partial_length_ok:
    +                self._partial_length = True
    +                return '...'
    +            raise FFIError(":%d: unsupported '[...]' here, cannot derive "
    +                           "the actual array length in this context"
    +                           % exprnode.coord.line)
    +        #
    +        if isinstance(exprnode, pycparser.c_ast.BinaryOp):
    +            left = self._parse_constant(exprnode.left)
    +            right = self._parse_constant(exprnode.right)
    +            if exprnode.op == '+':
    +                return left + right
    +            elif exprnode.op == '-':
    +                return left - right
    +            elif exprnode.op == '*':
    +                return left * right
    +            elif exprnode.op == '/':
    +                return self._c_div(left, right)
    +            elif exprnode.op == '%':
    +                return left - self._c_div(left, right) * right
    +            elif exprnode.op == '<<':
    +                return left << right
    +            elif exprnode.op == '>>':
    +                return left >> right
    +            elif exprnode.op == '&':
    +                return left & right
    +            elif exprnode.op == '|':
    +                return left | right
    +            elif exprnode.op == '^':
    +                return left ^ right
    +        #
    +        raise FFIError(":%d: unsupported expression: expected a "
    +                       "simple numeric constant" % exprnode.coord.line)
    +
    +    def _c_div(self, a, b):
    +        result = a // b
    +        if ((a < 0) ^ (b < 0)) and (a % b) != 0:
    +            result += 1
    +        return result
    +
    +    def _build_enum_type(self, explicit_name, decls):
    +        if decls is not None:
    +            partial = False
    +            enumerators = []
    +            enumvalues = []
    +            nextenumvalue = 0
    +            for enum in decls.enumerators:
    +                if _r_enum_dotdotdot.match(enum.name):
    +                    partial = True
    +                    continue
    +                if enum.value is not None:
    +                    nextenumvalue = self._parse_constant(enum.value)
    +                enumerators.append(enum.name)
    +                enumvalues.append(nextenumvalue)
    +                self._add_constants(enum.name, nextenumvalue)
    +                nextenumvalue += 1
    +            enumerators = tuple(enumerators)
    +            enumvalues = tuple(enumvalues)
    +            tp = model.EnumType(explicit_name, enumerators, enumvalues)
    +            tp.partial = partial
    +        else:   # opaque enum
    +            tp = model.EnumType(explicit_name, (), ())
    +        return tp
    +
    +    def include(self, other):
    +        for name, (tp, quals) in other._declarations.items():
    +            if name.startswith('anonymous $enum_$'):
    +                continue   # fix for test_anonymous_enum_include
    +            kind = name.split(' ', 1)[0]
    +            if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'):
    +                self._declare(name, tp, included=True, quals=quals)
    +        for k, v in other._int_constants.items():
    +            self._add_constants(k, v)
    +
    +    def _get_unknown_type(self, decl):
    +        typenames = decl.type.type.names
    +        if typenames == ['__dotdotdot__']:
    +            return model.unknown_type(decl.name)
    +
    +        if typenames == ['__dotdotdotint__']:
    +            if self._uses_new_feature is None:
    +                self._uses_new_feature = "'typedef int... %s'" % decl.name
    +            return model.UnknownIntegerType(decl.name)
    +
    +        if typenames == ['__dotdotdotfloat__']:
    +            # note: not for 'long double' so far
    +            if self._uses_new_feature is None:
    +                self._uses_new_feature = "'typedef float... %s'" % decl.name
    +            return model.UnknownFloatType(decl.name)
    +
    +        raise FFIError(':%d: unsupported usage of "..." in typedef'
    +                       % decl.coord.line)
    +
    +    def _get_unknown_ptr_type(self, decl):
    +        if decl.type.type.type.names == ['__dotdotdot__']:
    +            return model.unknown_ptr_type(decl.name)
    +        raise FFIError(':%d: unsupported usage of "..." in typedef'
    +                       % decl.coord.line)
    diff --git a/server/www/packages/packages-windows/x86/cffi/error.py b/server/www/packages/packages-windows/x86/cffi/error.py
    new file mode 100644
    index 0000000..0f8f406
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/error.py
    @@ -0,0 +1,31 @@
    +
    +class FFIError(Exception):
    +    __module__ = 'cffi'
    +
    +class CDefError(Exception):
    +    __module__ = 'cffi'
    +    def __str__(self):
    +        try:
    +            current_decl = self.args[1]
    +            filename = current_decl.coord.file
    +            linenum = current_decl.coord.line
    +            prefix = '%s:%d: ' % (filename, linenum)
    +        except (AttributeError, TypeError, IndexError):
    +            prefix = ''
    +        return '%s%s' % (prefix, self.args[0])
    +
    +class VerificationError(Exception):
    +    """ An error raised when verification fails
    +    """
    +    __module__ = 'cffi'
    +
    +class VerificationMissing(Exception):
    +    """ An error raised when incomplete structures are passed into
    +    cdef, but no verification has been done
    +    """
    +    __module__ = 'cffi'
    +
    +class PkgConfigError(Exception):
    +    """ An error raised for missing modules in pkg-config
    +    """
    +    __module__ = 'cffi'
    diff --git a/server/www/packages/packages-windows/x86/cffi/ffiplatform.py b/server/www/packages/packages-windows/x86/cffi/ffiplatform.py
    new file mode 100644
    index 0000000..0feab6e
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/ffiplatform.py
    @@ -0,0 +1,127 @@
    +import sys, os
    +from .error import VerificationError
    +
    +
    +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs',
    +                      'extra_objects', 'depends']
    +
    +def get_extension(srcfilename, modname, sources=(), **kwds):
    +    _hack_at_distutils()
    +    from distutils.core import Extension
    +    allsources = [srcfilename]
    +    for src in sources:
    +        allsources.append(os.path.normpath(src))
    +    return Extension(name=modname, sources=allsources, **kwds)
    +
    +def compile(tmpdir, ext, compiler_verbose=0, debug=None):
    +    """Compile a C extension module using distutils."""
    +
    +    _hack_at_distutils()
    +    saved_environ = os.environ.copy()
    +    try:
    +        outputfilename = _build(tmpdir, ext, compiler_verbose, debug)
    +        outputfilename = os.path.abspath(outputfilename)
    +    finally:
    +        # workaround for a distutils bugs where some env vars can
    +        # become longer and longer every time it is used
    +        for key, value in saved_environ.items():
    +            if os.environ.get(key) != value:
    +                os.environ[key] = value
    +    return outputfilename
    +
    +def _build(tmpdir, ext, compiler_verbose=0, debug=None):
    +    # XXX compact but horrible :-(
    +    from distutils.core import Distribution
    +    import distutils.errors, distutils.log
    +    #
    +    dist = Distribution({'ext_modules': [ext]})
    +    dist.parse_config_files()
    +    options = dist.get_option_dict('build_ext')
    +    if debug is None:
    +        debug = sys.flags.debug
    +    options['debug'] = ('ffiplatform', debug)
    +    options['force'] = ('ffiplatform', True)
    +    options['build_lib'] = ('ffiplatform', tmpdir)
    +    options['build_temp'] = ('ffiplatform', tmpdir)
    +    #
    +    try:
    +        old_level = distutils.log.set_threshold(0) or 0
    +        try:
    +            distutils.log.set_verbosity(compiler_verbose)
    +            dist.run_command('build_ext')
    +            cmd_obj = dist.get_command_obj('build_ext')
    +            [soname] = cmd_obj.get_outputs()
    +        finally:
    +            distutils.log.set_threshold(old_level)
    +    except (distutils.errors.CompileError,
    +            distutils.errors.LinkError) as e:
    +        raise VerificationError('%s: %s' % (e.__class__.__name__, e))
    +    #
    +    return soname
    +
    +try:
    +    from os.path import samefile
    +except ImportError:
    +    def samefile(f1, f2):
    +        return os.path.abspath(f1) == os.path.abspath(f2)
    +
    +def maybe_relative_path(path):
    +    if not os.path.isabs(path):
    +        return path      # already relative
    +    dir = path
    +    names = []
    +    while True:
    +        prevdir = dir
    +        dir, name = os.path.split(prevdir)
    +        if dir == prevdir or not dir:
    +            return path     # failed to make it relative
    +        names.append(name)
    +        try:
    +            if samefile(dir, os.curdir):
    +                names.reverse()
    +                return os.path.join(*names)
    +        except OSError:
    +            pass
    +
    +# ____________________________________________________________
    +
    +try:
    +    int_or_long = (int, long)
    +    import cStringIO
    +except NameError:
    +    int_or_long = int      # Python 3
    +    import io as cStringIO
    +
    +def _flatten(x, f):
    +    if isinstance(x, str):
    +        f.write('%ds%s' % (len(x), x))
    +    elif isinstance(x, dict):
    +        keys = sorted(x.keys())
    +        f.write('%dd' % len(keys))
    +        for key in keys:
    +            _flatten(key, f)
    +            _flatten(x[key], f)
    +    elif isinstance(x, (list, tuple)):
    +        f.write('%dl' % len(x))
    +        for value in x:
    +            _flatten(value, f)
    +    elif isinstance(x, int_or_long):
    +        f.write('%di' % (x,))
    +    else:
    +        raise TypeError(
    +            "the keywords to verify() contains unsupported object %r" % (x,))
    +
    +def flatten(x):
    +    f = cStringIO.StringIO()
    +    _flatten(x, f)
    +    return f.getvalue()
    +
    +def _hack_at_distutils():
    +    # Windows-only workaround for some configurations: see
    +    # https://bugs.python.org/issue23246 (Python 2.7 with 
    +    # a specific MS compiler suite download)
    +    if sys.platform == "win32":
    +        try:
    +            import setuptools    # for side-effects, patches distutils
    +        except ImportError:
    +            pass
    diff --git a/server/www/packages/packages-windows/x86/cffi/lock.py b/server/www/packages/packages-windows/x86/cffi/lock.py
    new file mode 100644
    index 0000000..2e40ed8
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/lock.py
    @@ -0,0 +1,30 @@
    +import sys
    +
    +if sys.version_info < (3,):
    +    try:
    +        from thread import allocate_lock
    +    except ImportError:
    +        from dummy_thread import allocate_lock
    +else:
    +    try:
    +        from _thread import allocate_lock
    +    except ImportError:
    +        from _dummy_thread import allocate_lock
    +
    +
    +##import sys
    +##l1 = allocate_lock
    +
    +##class allocate_lock(object):
    +##    def __init__(self):
    +##        self._real = l1()
    +##    def __enter__(self):
    +##        for i in range(4, 0, -1):
    +##            print sys._getframe(i).f_code
    +##        print
    +##        return self._real.__enter__()
    +##    def __exit__(self, *args):
    +##        return self._real.__exit__(*args)
    +##    def acquire(self, f):
    +##        assert f is False
    +##        return self._real.acquire(f)
    diff --git a/server/www/packages/packages-windows/x86/cffi/model.py b/server/www/packages/packages-windows/x86/cffi/model.py
    new file mode 100644
    index 0000000..7ee92a0
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/model.py
    @@ -0,0 +1,614 @@
    +import types
    +import weakref
    +
    +from .lock import allocate_lock
    +from .error import CDefError, VerificationError, VerificationMissing
    +
    +# type qualifiers
    +Q_CONST    = 0x01
    +Q_RESTRICT = 0x02
    +Q_VOLATILE = 0x04
    +
    +def qualify(quals, replace_with):
    +    if quals & Q_CONST:
    +        replace_with = ' const ' + replace_with.lstrip()
    +    if quals & Q_VOLATILE:
    +        replace_with = ' volatile ' + replace_with.lstrip()
    +    if quals & Q_RESTRICT:
    +        # It seems that __restrict is supported by gcc and msvc.
    +        # If you hit some different compiler, add a #define in
    +        # _cffi_include.h for it (and in its copies, documented there)
    +        replace_with = ' __restrict ' + replace_with.lstrip()
    +    return replace_with
    +
    +
    +class BaseTypeByIdentity(object):
    +    is_array_type = False
    +    is_raw_function = False
    +
    +    def get_c_name(self, replace_with='', context='a C file', quals=0):
    +        result = self.c_name_with_marker
    +        assert result.count('&') == 1
    +        # some logic duplication with ffi.getctype()... :-(
    +        replace_with = replace_with.strip()
    +        if replace_with:
    +            if replace_with.startswith('*') and '&[' in result:
    +                replace_with = '(%s)' % replace_with
    +            elif not replace_with[0] in '[(':
    +                replace_with = ' ' + replace_with
    +        replace_with = qualify(quals, replace_with)
    +        result = result.replace('&', replace_with)
    +        if '$' in result:
    +            raise VerificationError(
    +                "cannot generate '%s' in %s: unknown type name"
    +                % (self._get_c_name(), context))
    +        return result
    +
    +    def _get_c_name(self):
    +        return self.c_name_with_marker.replace('&', '')
    +
    +    def has_c_name(self):
    +        return '$' not in self._get_c_name()
    +
    +    def is_integer_type(self):
    +        return False
    +
    +    def get_cached_btype(self, ffi, finishlist, can_delay=False):
    +        try:
    +            BType = ffi._cached_btypes[self]
    +        except KeyError:
    +            BType = self.build_backend_type(ffi, finishlist)
    +            BType2 = ffi._cached_btypes.setdefault(self, BType)
    +            assert BType2 is BType
    +        return BType
    +
    +    def __repr__(self):
    +        return '<%s>' % (self._get_c_name(),)
    +
    +    def _get_items(self):
    +        return [(name, getattr(self, name)) for name in self._attrs_]
    +
    +
    +class BaseType(BaseTypeByIdentity):
    +
    +    def __eq__(self, other):
    +        return (self.__class__ == other.__class__ and
    +                self._get_items() == other._get_items())
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __hash__(self):
    +        return hash((self.__class__, tuple(self._get_items())))
    +
    +
    +class VoidType(BaseType):
    +    _attrs_ = ()
    +
    +    def __init__(self):
    +        self.c_name_with_marker = 'void&'
    +
    +    def build_backend_type(self, ffi, finishlist):
    +        return global_cache(self, ffi, 'new_void_type')
    +
    +void_type = VoidType()
    +
    +
    +class BasePrimitiveType(BaseType):
    +    def is_complex_type(self):
    +        return False
    +
    +
    +class PrimitiveType(BasePrimitiveType):
    +    _attrs_ = ('name',)
    +
    +    ALL_PRIMITIVE_TYPES = {
    +        'char':               'c',
    +        'short':              'i',
    +        'int':                'i',
    +        'long':               'i',
    +        'long long':          'i',
    +        'signed char':        'i',
    +        'unsigned char':      'i',
    +        'unsigned short':     'i',
    +        'unsigned int':       'i',
    +        'unsigned long':      'i',
    +        'unsigned long long': 'i',
    +        'float':              'f',
    +        'double':             'f',
    +        'long double':        'f',
    +        'float _Complex':     'j',
    +        'double _Complex':    'j',
    +        '_Bool':              'i',
    +        # the following types are not primitive in the C sense
    +        'wchar_t':            'c',
    +        'char16_t':           'c',
    +        'char32_t':           'c',
    +        'int8_t':             'i',
    +        'uint8_t':            'i',
    +        'int16_t':            'i',
    +        'uint16_t':           'i',
    +        'int32_t':            'i',
    +        'uint32_t':           'i',
    +        'int64_t':            'i',
    +        'uint64_t':           'i',
    +        'int_least8_t':       'i',
    +        'uint_least8_t':      'i',
    +        'int_least16_t':      'i',
    +        'uint_least16_t':     'i',
    +        'int_least32_t':      'i',
    +        'uint_least32_t':     'i',
    +        'int_least64_t':      'i',
    +        'uint_least64_t':     'i',
    +        'int_fast8_t':        'i',
    +        'uint_fast8_t':       'i',
    +        'int_fast16_t':       'i',
    +        'uint_fast16_t':      'i',
    +        'int_fast32_t':       'i',
    +        'uint_fast32_t':      'i',
    +        'int_fast64_t':       'i',
    +        'uint_fast64_t':      'i',
    +        'intptr_t':           'i',
    +        'uintptr_t':          'i',
    +        'intmax_t':           'i',
    +        'uintmax_t':          'i',
    +        'ptrdiff_t':          'i',
    +        'size_t':             'i',
    +        'ssize_t':            'i',
    +        }
    +
    +    def __init__(self, name):
    +        assert name in self.ALL_PRIMITIVE_TYPES
    +        self.name = name
    +        self.c_name_with_marker = name + '&'
    +
    +    def is_char_type(self):
    +        return self.ALL_PRIMITIVE_TYPES[self.name] == 'c'
    +    def is_integer_type(self):
    +        return self.ALL_PRIMITIVE_TYPES[self.name] == 'i'
    +    def is_float_type(self):
    +        return self.ALL_PRIMITIVE_TYPES[self.name] == 'f'
    +    def is_complex_type(self):
    +        return self.ALL_PRIMITIVE_TYPES[self.name] == 'j'
    +
    +    def build_backend_type(self, ffi, finishlist):
    +        return global_cache(self, ffi, 'new_primitive_type', self.name)
    +
    +
    +class UnknownIntegerType(BasePrimitiveType):
    +    _attrs_ = ('name',)
    +
    +    def __init__(self, name):
    +        self.name = name
    +        self.c_name_with_marker = name + '&'
    +
    +    def is_integer_type(self):
    +        return True
    +
    +    def build_backend_type(self, ffi, finishlist):
    +        raise NotImplementedError("integer type '%s' can only be used after "
    +                                  "compilation" % self.name)
    +
    +class UnknownFloatType(BasePrimitiveType):
    +    _attrs_ = ('name', )
    +
    +    def __init__(self, name):
    +        self.name = name
    +        self.c_name_with_marker = name + '&'
    +
    +    def build_backend_type(self, ffi, finishlist):
    +        raise NotImplementedError("float type '%s' can only be used after "
    +                                  "compilation" % self.name)
    +
    +
    +class BaseFunctionType(BaseType):
    +    _attrs_ = ('args', 'result', 'ellipsis', 'abi')
    +
    +    def __init__(self, args, result, ellipsis, abi=None):
    +        self.args = args
    +        self.result = result
    +        self.ellipsis = ellipsis
    +        self.abi = abi
    +        #
    +        reprargs = [arg._get_c_name() for arg in self.args]
    +        if self.ellipsis:
    +            reprargs.append('...')
    +        reprargs = reprargs or ['void']
    +        replace_with = self._base_pattern % (', '.join(reprargs),)
    +        if abi is not None:
    +            replace_with = replace_with[:1] + abi + ' ' + replace_with[1:]
    +        self.c_name_with_marker = (
    +            self.result.c_name_with_marker.replace('&', replace_with))
    +
    +
    +class RawFunctionType(BaseFunctionType):
    +    # Corresponds to a C type like 'int(int)', which is the C type of
    +    # a function, but not a pointer-to-function.  The backend has no
    +    # notion of such a type; it's used temporarily by parsing.
    +    _base_pattern = '(&)(%s)'
    +    is_raw_function = True
    +
    +    def build_backend_type(self, ffi, finishlist):
    +        raise CDefError("cannot render the type %r: it is a function "
    +                        "type, not a pointer-to-function type" % (self,))
    +
    +    def as_function_pointer(self):
    +        return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi)
    +
    +
    +class FunctionPtrType(BaseFunctionType):
    +    _base_pattern = '(*&)(%s)'
    +
    +    def build_backend_type(self, ffi, finishlist):
    +        result = self.result.get_cached_btype(ffi, finishlist)
    +        args = []
    +        for tp in self.args:
    +            args.append(tp.get_cached_btype(ffi, finishlist))
    +        abi_args = ()
    +        if self.abi == "__stdcall":
    +            if not self.ellipsis:    # __stdcall ignored for variadic funcs
    +                try:
    +                    abi_args = (ffi._backend.FFI_STDCALL,)
    +                except AttributeError:
    +                    pass
    +        return global_cache(self, ffi, 'new_function_type',
    +                            tuple(args), result, self.ellipsis, *abi_args)
    +
    +    def as_raw_function(self):
    +        return RawFunctionType(self.args, self.result, self.ellipsis, self.abi)
    +
    +
    +class PointerType(BaseType):
    +    _attrs_ = ('totype', 'quals')
    +
    +    def __init__(self, totype, quals=0):
    +        self.totype = totype
    +        self.quals = quals
    +        extra = qualify(quals, " *&")
    +        if totype.is_array_type:
    +            extra = "(%s)" % (extra.lstrip(),)
    +        self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra)
    +
    +    def build_backend_type(self, ffi, finishlist):
    +        BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True)
    +        return global_cache(self, ffi, 'new_pointer_type', BItem)
    +
    +voidp_type = PointerType(void_type)
    +
    +def ConstPointerType(totype):
    +    return PointerType(totype, Q_CONST)
    +
    +const_voidp_type = ConstPointerType(void_type)
    +
    +
    +class NamedPointerType(PointerType):
    +    _attrs_ = ('totype', 'name')
    +
    +    def __init__(self, totype, name, quals=0):
    +        PointerType.__init__(self, totype, quals)
    +        self.name = name
    +        self.c_name_with_marker = name + '&'
    +
    +
    +class ArrayType(BaseType):
    +    _attrs_ = ('item', 'length')
    +    is_array_type = True
    +
    +    def __init__(self, item, length):
    +        self.item = item
    +        self.length = length
    +        #
    +        if length is None:
    +            brackets = '&[]'
    +        elif length == '...':
    +            brackets = '&[/*...*/]'
    +        else:
    +            brackets = '&[%s]' % length
    +        self.c_name_with_marker = (
    +            self.item.c_name_with_marker.replace('&', brackets))
    +
    +    def resolve_length(self, newlength):
    +        return ArrayType(self.item, newlength)
    +
    +    def build_backend_type(self, ffi, finishlist):
    +        if self.length == '...':
    +            raise CDefError("cannot render the type %r: unknown length" %
    +                            (self,))
    +        self.item.get_cached_btype(ffi, finishlist)   # force the item BType
    +        BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist)
    +        return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length)
    +
    +char_array_type = ArrayType(PrimitiveType('char'), None)
    +
    +
    +class StructOrUnionOrEnum(BaseTypeByIdentity):
    +    _attrs_ = ('name',)
    +    forcename = None
    +
    +    def build_c_name_with_marker(self):
    +        name = self.forcename or '%s %s' % (self.kind, self.name)
    +        self.c_name_with_marker = name + '&'
    +
    +    def force_the_name(self, forcename):
    +        self.forcename = forcename
    +        self.build_c_name_with_marker()
    +
    +    def get_official_name(self):
    +        assert self.c_name_with_marker.endswith('&')
    +        return self.c_name_with_marker[:-1]
    +
    +
    +class StructOrUnion(StructOrUnionOrEnum):
    +    fixedlayout = None
    +    completed = 0
    +    partial = False
    +    packed = 0
    +
    +    def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None):
    +        self.name = name
    +        self.fldnames = fldnames
    +        self.fldtypes = fldtypes
    +        self.fldbitsize = fldbitsize
    +        self.fldquals = fldquals
    +        self.build_c_name_with_marker()
    +
    +    def anonymous_struct_fields(self):
    +        if self.fldtypes is not None:
    +            for name, type in zip(self.fldnames, self.fldtypes):
    +                if name == '' and isinstance(type, StructOrUnion):
    +                    yield type
    +
    +    def enumfields(self, expand_anonymous_struct_union=True):
    +        fldquals = self.fldquals
    +        if fldquals is None:
    +            fldquals = (0,) * len(self.fldnames)
    +        for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes,
    +                                              self.fldbitsize, fldquals):
    +            if (name == '' and isinstance(type, StructOrUnion)
    +                    and expand_anonymous_struct_union):
    +                # nested anonymous struct/union
    +                for result in type.enumfields():
    +                    yield result
    +            else:
    +                yield (name, type, bitsize, quals)
    +
    +    def force_flatten(self):
    +        # force the struct or union to have a declaration that lists
    +        # directly all fields returned by enumfields(), flattening
    +        # nested anonymous structs/unions.
    +        names = []
    +        types = []
    +        bitsizes = []
    +        fldquals = []
    +        for name, type, bitsize, quals in self.enumfields():
    +            names.append(name)
    +            types.append(type)
    +            bitsizes.append(bitsize)
    +            fldquals.append(quals)
    +        self.fldnames = tuple(names)
    +        self.fldtypes = tuple(types)
    +        self.fldbitsize = tuple(bitsizes)
    +        self.fldquals = tuple(fldquals)
    +
    +    def get_cached_btype(self, ffi, finishlist, can_delay=False):
    +        BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist,
    +                                                     can_delay)
    +        if not can_delay:
    +            self.finish_backend_type(ffi, finishlist)
    +        return BType
    +
    +    def finish_backend_type(self, ffi, finishlist):
    +        if self.completed:
    +            if self.completed != 2:
    +                raise NotImplementedError("recursive structure declaration "
    +                                          "for '%s'" % (self.name,))
    +            return
    +        BType = ffi._cached_btypes[self]
    +        #
    +        self.completed = 1
    +        #
    +        if self.fldtypes is None:
    +            pass    # not completing it: it's an opaque struct
    +            #
    +        elif self.fixedlayout is None:
    +            fldtypes = [tp.get_cached_btype(ffi, finishlist)
    +                        for tp in self.fldtypes]
    +            lst = list(zip(self.fldnames, fldtypes, self.fldbitsize))
    +            extra_flags = ()
    +            if self.packed:
    +                if self.packed == 1:
    +                    extra_flags = (8,)    # SF_PACKED
    +                else:
    +                    extra_flags = (0, self.packed)
    +            ffi._backend.complete_struct_or_union(BType, lst, self,
    +                                                  -1, -1, *extra_flags)
    +            #
    +        else:
    +            fldtypes = []
    +            fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout
    +            for i in range(len(self.fldnames)):
    +                fsize = fieldsize[i]
    +                ftype = self.fldtypes[i]
    +                #
    +                if isinstance(ftype, ArrayType) and ftype.length == '...':
    +                    # fix the length to match the total size
    +                    BItemType = ftype.item.get_cached_btype(ffi, finishlist)
    +                    nlen, nrest = divmod(fsize, ffi.sizeof(BItemType))
    +                    if nrest != 0:
    +                        self._verification_error(
    +                            "field '%s.%s' has a bogus size?" % (
    +                            self.name, self.fldnames[i] or '{}'))
    +                    ftype = ftype.resolve_length(nlen)
    +                    self.fldtypes = (self.fldtypes[:i] + (ftype,) +
    +                                     self.fldtypes[i+1:])
    +                #
    +                BFieldType = ftype.get_cached_btype(ffi, finishlist)
    +                if isinstance(ftype, ArrayType) and ftype.length is None:
    +                    assert fsize == 0
    +                else:
    +                    bitemsize = ffi.sizeof(BFieldType)
    +                    if bitemsize != fsize:
    +                        self._verification_error(
    +                            "field '%s.%s' is declared as %d bytes, but is "
    +                            "really %d bytes" % (self.name,
    +                                                 self.fldnames[i] or '{}',
    +                                                 bitemsize, fsize))
    +                fldtypes.append(BFieldType)
    +            #
    +            lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs))
    +            ffi._backend.complete_struct_or_union(BType, lst, self,
    +                                                  totalsize, totalalignment)
    +        self.completed = 2
    +
    +    def _verification_error(self, msg):
    +        raise VerificationError(msg)
    +
    +    def check_not_partial(self):
    +        if self.partial and self.fixedlayout is None:
    +            raise VerificationMissing(self._get_c_name())
    +
    +    def build_backend_type(self, ffi, finishlist):
    +        self.check_not_partial()
    +        finishlist.append(self)
    +        #
    +        return global_cache(self, ffi, 'new_%s_type' % self.kind,
    +                            self.get_official_name(), key=self)
    +
    +
    +class StructType(StructOrUnion):
    +    kind = 'struct'
    +
    +
    +class UnionType(StructOrUnion):
    +    kind = 'union'
    +
    +
    +class EnumType(StructOrUnionOrEnum):
    +    kind = 'enum'
    +    partial = False
    +    partial_resolved = False
    +
    +    def __init__(self, name, enumerators, enumvalues, baseinttype=None):
    +        self.name = name
    +        self.enumerators = enumerators
    +        self.enumvalues = enumvalues
    +        self.baseinttype = baseinttype
    +        self.build_c_name_with_marker()
    +
    +    def force_the_name(self, forcename):
    +        StructOrUnionOrEnum.force_the_name(self, forcename)
    +        if self.forcename is None:
    +            name = self.get_official_name()
    +            self.forcename = '$' + name.replace(' ', '_')
    +
    +    def check_not_partial(self):
    +        if self.partial and not self.partial_resolved:
    +            raise VerificationMissing(self._get_c_name())
    +
    +    def build_backend_type(self, ffi, finishlist):
    +        self.check_not_partial()
    +        base_btype = self.build_baseinttype(ffi, finishlist)
    +        return global_cache(self, ffi, 'new_enum_type',
    +                            self.get_official_name(),
    +                            self.enumerators, self.enumvalues,
    +                            base_btype, key=self)
    +
    +    def build_baseinttype(self, ffi, finishlist):
    +        if self.baseinttype is not None:
    +            return self.baseinttype.get_cached_btype(ffi, finishlist)
    +        #
    +        if self.enumvalues:
    +            smallest_value = min(self.enumvalues)
    +            largest_value = max(self.enumvalues)
    +        else:
    +            import warnings
    +            try:
    +                # XXX!  The goal is to ensure that the warnings.warn()
    +                # will not suppress the warning.  We want to get it
    +                # several times if we reach this point several times.
    +                __warningregistry__.clear()
    +            except NameError:
    +                pass
    +            warnings.warn("%r has no values explicitly defined; "
    +                          "guessing that it is equivalent to 'unsigned int'"
    +                          % self._get_c_name())
    +            smallest_value = largest_value = 0
    +        if smallest_value < 0:   # needs a signed type
    +            sign = 1
    +            candidate1 = PrimitiveType("int")
    +            candidate2 = PrimitiveType("long")
    +        else:
    +            sign = 0
    +            candidate1 = PrimitiveType("unsigned int")
    +            candidate2 = PrimitiveType("unsigned long")
    +        btype1 = candidate1.get_cached_btype(ffi, finishlist)
    +        btype2 = candidate2.get_cached_btype(ffi, finishlist)
    +        size1 = ffi.sizeof(btype1)
    +        size2 = ffi.sizeof(btype2)
    +        if (smallest_value >= ((-1) << (8*size1-1)) and
    +            largest_value < (1 << (8*size1-sign))):
    +            return btype1
    +        if (smallest_value >= ((-1) << (8*size2-1)) and
    +            largest_value < (1 << (8*size2-sign))):
    +            return btype2
    +        raise CDefError("%s values don't all fit into either 'long' "
    +                        "or 'unsigned long'" % self._get_c_name())
    +
    +def unknown_type(name, structname=None):
    +    if structname is None:
    +        structname = '$%s' % name
    +    tp = StructType(structname, None, None, None)
    +    tp.force_the_name(name)
    +    tp.origin = "unknown_type"
    +    return tp
    +
    +def unknown_ptr_type(name, structname=None):
    +    if structname is None:
    +        structname = '$$%s' % name
    +    tp = StructType(structname, None, None, None)
    +    return NamedPointerType(tp, name)
    +
    +
    +global_lock = allocate_lock()
    +_typecache_cffi_backend = weakref.WeakValueDictionary()
    +
    +def get_typecache(backend):
    +    # returns _typecache_cffi_backend if backend is the _cffi_backend
    +    # module, or type(backend).__typecache if backend is an instance of
    +    # CTypesBackend (or some FakeBackend class during tests)
    +    if isinstance(backend, types.ModuleType):
    +        return _typecache_cffi_backend
    +    with global_lock:
    +        if not hasattr(type(backend), '__typecache'):
    +            type(backend).__typecache = weakref.WeakValueDictionary()
    +        return type(backend).__typecache
    +
    +def global_cache(srctype, ffi, funcname, *args, **kwds):
    +    key = kwds.pop('key', (funcname, args))
    +    assert not kwds
    +    try:
    +        return ffi._typecache[key]
    +    except KeyError:
    +        pass
    +    try:
    +        res = getattr(ffi._backend, funcname)(*args)
    +    except NotImplementedError as e:
    +        raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e))
    +    # note that setdefault() on WeakValueDictionary is not atomic
    +    # and contains a rare bug (http://bugs.python.org/issue19542);
    +    # we have to use a lock and do it ourselves
    +    cache = ffi._typecache
    +    with global_lock:
    +        res1 = cache.get(key)
    +        if res1 is None:
    +            cache[key] = res
    +            return res
    +        else:
    +            return res1
    +
    +def pointer_cache(ffi, BType):
    +    return global_cache('?', ffi, 'new_pointer_type', BType)
    +
    +def attach_exception_info(e, name):
    +    if e.args and type(e.args[0]) is str:
    +        e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:]
    diff --git a/server/www/packages/packages-windows/x86/cffi/parse_c_type.h b/server/www/packages/packages-windows/x86/cffi/parse_c_type.h
    new file mode 100644
    index 0000000..ea1aa24
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/parse_c_type.h
    @@ -0,0 +1,181 @@
    +
    +/* This part is from file 'cffi/parse_c_type.h'.  It is copied at the
    +   beginning of C sources generated by CFFI's ffi.set_source(). */
    +
    +typedef void *_cffi_opcode_t;
    +
    +#define _CFFI_OP(opcode, arg)   (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8))
    +#define _CFFI_GETOP(cffi_opcode)    ((unsigned char)(uintptr_t)cffi_opcode)
    +#define _CFFI_GETARG(cffi_opcode)   (((intptr_t)cffi_opcode) >> 8)
    +
    +#define _CFFI_OP_PRIMITIVE       1
    +#define _CFFI_OP_POINTER         3
    +#define _CFFI_OP_ARRAY           5
    +#define _CFFI_OP_OPEN_ARRAY      7
    +#define _CFFI_OP_STRUCT_UNION    9
    +#define _CFFI_OP_ENUM           11
    +#define _CFFI_OP_FUNCTION       13
    +#define _CFFI_OP_FUNCTION_END   15
    +#define _CFFI_OP_NOOP           17
    +#define _CFFI_OP_BITFIELD       19
    +#define _CFFI_OP_TYPENAME       21
    +#define _CFFI_OP_CPYTHON_BLTN_V 23   // varargs
    +#define _CFFI_OP_CPYTHON_BLTN_N 25   // noargs
    +#define _CFFI_OP_CPYTHON_BLTN_O 27   // O  (i.e. a single arg)
    +#define _CFFI_OP_CONSTANT       29
    +#define _CFFI_OP_CONSTANT_INT   31
    +#define _CFFI_OP_GLOBAL_VAR     33
    +#define _CFFI_OP_DLOPEN_FUNC    35
    +#define _CFFI_OP_DLOPEN_CONST   37
    +#define _CFFI_OP_GLOBAL_VAR_F   39
    +#define _CFFI_OP_EXTERN_PYTHON  41
    +
    +#define _CFFI_PRIM_VOID          0
    +#define _CFFI_PRIM_BOOL          1
    +#define _CFFI_PRIM_CHAR          2
    +#define _CFFI_PRIM_SCHAR         3
    +#define _CFFI_PRIM_UCHAR         4
    +#define _CFFI_PRIM_SHORT         5
    +#define _CFFI_PRIM_USHORT        6
    +#define _CFFI_PRIM_INT           7
    +#define _CFFI_PRIM_UINT          8
    +#define _CFFI_PRIM_LONG          9
    +#define _CFFI_PRIM_ULONG        10
    +#define _CFFI_PRIM_LONGLONG     11
    +#define _CFFI_PRIM_ULONGLONG    12
    +#define _CFFI_PRIM_FLOAT        13
    +#define _CFFI_PRIM_DOUBLE       14
    +#define _CFFI_PRIM_LONGDOUBLE   15
    +
    +#define _CFFI_PRIM_WCHAR        16
    +#define _CFFI_PRIM_INT8         17
    +#define _CFFI_PRIM_UINT8        18
    +#define _CFFI_PRIM_INT16        19
    +#define _CFFI_PRIM_UINT16       20
    +#define _CFFI_PRIM_INT32        21
    +#define _CFFI_PRIM_UINT32       22
    +#define _CFFI_PRIM_INT64        23
    +#define _CFFI_PRIM_UINT64       24
    +#define _CFFI_PRIM_INTPTR       25
    +#define _CFFI_PRIM_UINTPTR      26
    +#define _CFFI_PRIM_PTRDIFF      27
    +#define _CFFI_PRIM_SIZE         28
    +#define _CFFI_PRIM_SSIZE        29
    +#define _CFFI_PRIM_INT_LEAST8   30
    +#define _CFFI_PRIM_UINT_LEAST8  31
    +#define _CFFI_PRIM_INT_LEAST16  32
    +#define _CFFI_PRIM_UINT_LEAST16 33
    +#define _CFFI_PRIM_INT_LEAST32  34
    +#define _CFFI_PRIM_UINT_LEAST32 35
    +#define _CFFI_PRIM_INT_LEAST64  36
    +#define _CFFI_PRIM_UINT_LEAST64 37
    +#define _CFFI_PRIM_INT_FAST8    38
    +#define _CFFI_PRIM_UINT_FAST8   39
    +#define _CFFI_PRIM_INT_FAST16   40
    +#define _CFFI_PRIM_UINT_FAST16  41
    +#define _CFFI_PRIM_INT_FAST32   42
    +#define _CFFI_PRIM_UINT_FAST32  43
    +#define _CFFI_PRIM_INT_FAST64   44
    +#define _CFFI_PRIM_UINT_FAST64  45
    +#define _CFFI_PRIM_INTMAX       46
    +#define _CFFI_PRIM_UINTMAX      47
    +#define _CFFI_PRIM_FLOATCOMPLEX 48
    +#define _CFFI_PRIM_DOUBLECOMPLEX 49
    +#define _CFFI_PRIM_CHAR16       50
    +#define _CFFI_PRIM_CHAR32       51
    +
    +#define _CFFI__NUM_PRIM         52
    +#define _CFFI__UNKNOWN_PRIM           (-1)
    +#define _CFFI__UNKNOWN_FLOAT_PRIM     (-2)
    +#define _CFFI__UNKNOWN_LONG_DOUBLE    (-3)
    +
    +#define _CFFI__IO_FILE_STRUCT         (-1)
    +
    +
    +struct _cffi_global_s {
    +    const char *name;
    +    void *address;
    +    _cffi_opcode_t type_op;
    +    void *size_or_direct_fn;  // OP_GLOBAL_VAR: size, or 0 if unknown
    +                              // OP_CPYTHON_BLTN_*: addr of direct function
    +};
    +
    +struct _cffi_getconst_s {
    +    unsigned long long value;
    +    const struct _cffi_type_context_s *ctx;
    +    int gindex;
    +};
    +
    +struct _cffi_struct_union_s {
    +    const char *name;
    +    int type_index;          // -> _cffi_types, on a OP_STRUCT_UNION
    +    int flags;               // _CFFI_F_* flags below
    +    size_t size;
    +    int alignment;
    +    int first_field_index;   // -> _cffi_fields array
    +    int num_fields;
    +};
    +#define _CFFI_F_UNION         0x01   // is a union, not a struct
    +#define _CFFI_F_CHECK_FIELDS  0x02   // complain if fields are not in the
    +                                     // "standard layout" or if some are missing
    +#define _CFFI_F_PACKED        0x04   // for CHECK_FIELDS, assume a packed struct
    +#define _CFFI_F_EXTERNAL      0x08   // in some other ffi.include()
    +#define _CFFI_F_OPAQUE        0x10   // opaque
    +
    +struct _cffi_field_s {
    +    const char *name;
    +    size_t field_offset;
    +    size_t field_size;
    +    _cffi_opcode_t field_type_op;
    +};
    +
    +struct _cffi_enum_s {
    +    const char *name;
    +    int type_index;          // -> _cffi_types, on a OP_ENUM
    +    int type_prim;           // _CFFI_PRIM_xxx
    +    const char *enumerators; // comma-delimited string
    +};
    +
    +struct _cffi_typename_s {
    +    const char *name;
    +    int type_index;   /* if opaque, points to a possibly artificial
    +                         OP_STRUCT which is itself opaque */
    +};
    +
    +struct _cffi_type_context_s {
    +    _cffi_opcode_t *types;
    +    const struct _cffi_global_s *globals;
    +    const struct _cffi_field_s *fields;
    +    const struct _cffi_struct_union_s *struct_unions;
    +    const struct _cffi_enum_s *enums;
    +    const struct _cffi_typename_s *typenames;
    +    int num_globals;
    +    int num_struct_unions;
    +    int num_enums;
    +    int num_typenames;
    +    const char *const *includes;
    +    int num_types;
    +    int flags;      /* future extension */
    +};
    +
    +struct _cffi_parse_info_s {
    +    const struct _cffi_type_context_s *ctx;
    +    _cffi_opcode_t *output;
    +    unsigned int output_size;
    +    size_t error_location;
    +    const char *error_message;
    +};
    +
    +struct _cffi_externpy_s {
    +    const char *name;
    +    size_t size_of_result;
    +    void *reserved1, *reserved2;
    +};
    +
    +#ifdef _CFFI_INTERNAL
    +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input);
    +static int search_in_globals(const struct _cffi_type_context_s *ctx,
    +                             const char *search, size_t search_len);
    +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx,
    +                                   const char *search, size_t search_len);
    +#endif
    diff --git a/server/www/packages/packages-windows/x86/cffi/pkgconfig.py b/server/www/packages/packages-windows/x86/cffi/pkgconfig.py
    new file mode 100644
    index 0000000..89708a5
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/pkgconfig.py
    @@ -0,0 +1,121 @@
    +# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi
    +import sys, os, subprocess
    +
    +from .error import PkgConfigError
    +
    +
    +def merge_flags(cfg1, cfg2):
    +    """Merge values from cffi config flags cfg2 to cf1
    +
    +    Example:
    +        merge_flags({"libraries": ["one"]}, {"libraries": ["two"]})
    +        {"libraries": ["one", "two"]}
    +    """
    +    for key, value in cfg2.items():
    +        if key not in cfg1:
    +            cfg1[key] = value
    +        else:
    +            if not isinstance(cfg1[key], list):
    +                raise TypeError("cfg1[%r] should be a list of strings" % (key,))
    +            if not isinstance(value, list):
    +                raise TypeError("cfg2[%r] should be a list of strings" % (key,))
    +            cfg1[key].extend(value)
    +    return cfg1
    +
    +
    +def call(libname, flag, encoding=sys.getfilesystemencoding()):
    +    """Calls pkg-config and returns the output if found
    +    """
    +    a = ["pkg-config", "--print-errors"]
    +    a.append(flag)
    +    a.append(libname)
    +    try:
    +        pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    +    except EnvironmentError as e:
    +        raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),))
    +
    +    bout, berr = pc.communicate()
    +    if pc.returncode != 0:
    +        try:
    +            berr = berr.decode(encoding)
    +        except Exception:
    +            pass
    +        raise PkgConfigError(berr.strip())
    +
    +    if sys.version_info >= (3,) and not isinstance(bout, str):   # Python 3.x
    +        try:
    +            bout = bout.decode(encoding)
    +        except UnicodeDecodeError:
    +            raise PkgConfigError("pkg-config %s %s returned bytes that cannot "
    +                                 "be decoded with encoding %r:\n%r" %
    +                                 (flag, libname, encoding, bout))
    +
    +    if os.altsep != '\\' and '\\' in bout:
    +        raise PkgConfigError("pkg-config %s %s returned an unsupported "
    +                             "backslash-escaped output:\n%r" %
    +                             (flag, libname, bout))
    +    return bout
    +
    +
    +def flags_from_pkgconfig(libs):
    +    r"""Return compiler line flags for FFI.set_source based on pkg-config output
    +
    +    Usage
    +        ...
    +        ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"])
    +
    +    If pkg-config is installed on build machine, then arguments include_dirs,
    +    library_dirs, libraries, define_macros, extra_compile_args and
    +    extra_link_args are extended with an output of pkg-config for libfoo and
    +    libbar.
    +
    +    Raises PkgConfigError in case the pkg-config call fails.
    +    """
    +
    +    def get_include_dirs(string):
    +        return [x[2:] for x in string.split() if x.startswith("-I")]
    +
    +    def get_library_dirs(string):
    +        return [x[2:] for x in string.split() if x.startswith("-L")]
    +
    +    def get_libraries(string):
    +        return [x[2:] for x in string.split() if x.startswith("-l")]
    +
    +    # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils
    +    def get_macros(string):
    +        def _macro(x):
    +            x = x[2:]    # drop "-D"
    +            if '=' in x:
    +                return tuple(x.split("=", 1))  # "-Dfoo=bar" => ("foo", "bar")
    +            else:
    +                return (x, None)               # "-Dfoo" => ("foo", None)
    +        return [_macro(x) for x in string.split() if x.startswith("-D")]
    +
    +    def get_other_cflags(string):
    +        return [x for x in string.split() if not x.startswith("-I") and
    +                                             not x.startswith("-D")]
    +
    +    def get_other_libs(string):
    +        return [x for x in string.split() if not x.startswith("-L") and
    +                                             not x.startswith("-l")]
    +
    +    # return kwargs for given libname
    +    def kwargs(libname):
    +        fse = sys.getfilesystemencoding()
    +        all_cflags = call(libname, "--cflags")
    +        all_libs = call(libname, "--libs")
    +        return {
    +            "include_dirs": get_include_dirs(all_cflags),
    +            "library_dirs": get_library_dirs(all_libs),
    +            "libraries": get_libraries(all_libs),
    +            "define_macros": get_macros(all_cflags),
    +            "extra_compile_args": get_other_cflags(all_cflags),
    +            "extra_link_args": get_other_libs(all_libs),
    +            }
    +
    +    # merge all arguments together
    +    ret = {}
    +    for libname in libs:
    +        lib_flags = kwargs(libname)
    +        merge_flags(ret, lib_flags)
    +    return ret
    diff --git a/server/www/packages/packages-windows/x86/cffi/recompiler.py b/server/www/packages/packages-windows/x86/cffi/recompiler.py
    new file mode 100644
    index 0000000..2811493
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/recompiler.py
    @@ -0,0 +1,1552 @@
    +import os, sys, io
    +from . import ffiplatform, model
    +from .error import VerificationError
    +from .cffi_opcode import *
    +
    +VERSION_BASE = 0x2601
    +VERSION_EMBEDDED = 0x2701
    +VERSION_CHAR16CHAR32 = 0x2801
    +
    +
    +class GlobalExpr:
    +    def __init__(self, name, address, type_op, size=0, check_value=0):
    +        self.name = name
    +        self.address = address
    +        self.type_op = type_op
    +        self.size = size
    +        self.check_value = check_value
    +
    +    def as_c_expr(self):
    +        return '  { "%s", (void *)%s, %s, (void *)%s },' % (
    +            self.name, self.address, self.type_op.as_c_expr(), self.size)
    +
    +    def as_python_expr(self):
    +        return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name,
    +                               self.check_value)
    +
    +class FieldExpr:
    +    def __init__(self, name, field_offset, field_size, fbitsize, field_type_op):
    +        self.name = name
    +        self.field_offset = field_offset
    +        self.field_size = field_size
    +        self.fbitsize = fbitsize
    +        self.field_type_op = field_type_op
    +
    +    def as_c_expr(self):
    +        spaces = " " * len(self.name)
    +        return ('  { "%s", %s,\n' % (self.name, self.field_offset) +
    +                '     %s   %s,\n' % (spaces, self.field_size) +
    +                '     %s   %s },' % (spaces, self.field_type_op.as_c_expr()))
    +
    +    def as_python_expr(self):
    +        raise NotImplementedError
    +
    +    def as_field_python_expr(self):
    +        if self.field_type_op.op == OP_NOOP:
    +            size_expr = ''
    +        elif self.field_type_op.op == OP_BITFIELD:
    +            size_expr = format_four_bytes(self.fbitsize)
    +        else:
    +            raise NotImplementedError
    +        return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(),
    +                              size_expr,
    +                              self.name)
    +
    +class StructUnionExpr:
    +    def __init__(self, name, type_index, flags, size, alignment, comment,
    +                 first_field_index, c_fields):
    +        self.name = name
    +        self.type_index = type_index
    +        self.flags = flags
    +        self.size = size
    +        self.alignment = alignment
    +        self.comment = comment
    +        self.first_field_index = first_field_index
    +        self.c_fields = c_fields
    +
    +    def as_c_expr(self):
    +        return ('  { "%s", %d, %s,' % (self.name, self.type_index, self.flags)
    +                + '\n    %s, %s, ' % (self.size, self.alignment)
    +                + '%d, %d ' % (self.first_field_index, len(self.c_fields))
    +                + ('/* %s */ ' % self.comment if self.comment else '')
    +                + '},')
    +
    +    def as_python_expr(self):
    +        flags = eval(self.flags, G_FLAGS)
    +        fields_expr = [c_field.as_field_python_expr()
    +                       for c_field in self.c_fields]
    +        return "(b'%s%s%s',%s)" % (
    +            format_four_bytes(self.type_index),
    +            format_four_bytes(flags),
    +            self.name,
    +            ','.join(fields_expr))
    +
    +class EnumExpr:
    +    def __init__(self, name, type_index, size, signed, allenums):
    +        self.name = name
    +        self.type_index = type_index
    +        self.size = size
    +        self.signed = signed
    +        self.allenums = allenums
    +
    +    def as_c_expr(self):
    +        return ('  { "%s", %d, _cffi_prim_int(%s, %s),\n'
    +                '    "%s" },' % (self.name, self.type_index,
    +                                 self.size, self.signed, self.allenums))
    +
    +    def as_python_expr(self):
    +        prim_index = {
    +            (1, 0): PRIM_UINT8,  (1, 1):  PRIM_INT8,
    +            (2, 0): PRIM_UINT16, (2, 1):  PRIM_INT16,
    +            (4, 0): PRIM_UINT32, (4, 1):  PRIM_INT32,
    +            (8, 0): PRIM_UINT64, (8, 1):  PRIM_INT64,
    +            }[self.size, self.signed]
    +        return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index),
    +                                     format_four_bytes(prim_index),
    +                                     self.name, self.allenums)
    +
    +class TypenameExpr:
    +    def __init__(self, name, type_index):
    +        self.name = name
    +        self.type_index = type_index
    +
    +    def as_c_expr(self):
    +        return '  { "%s", %d },' % (self.name, self.type_index)
    +
    +    def as_python_expr(self):
    +        return "b'%s%s'" % (format_four_bytes(self.type_index), self.name)
    +
    +
    +# ____________________________________________________________
    +
    +
    +class Recompiler:
    +    _num_externpy = 0
    +
    +    def __init__(self, ffi, module_name, target_is_python=False):
    +        self.ffi = ffi
    +        self.module_name = module_name
    +        self.target_is_python = target_is_python
    +        self._version = VERSION_BASE
    +
    +    def needs_version(self, ver):
    +        self._version = max(self._version, ver)
    +
    +    def collect_type_table(self):
    +        self._typesdict = {}
    +        self._generate("collecttype")
    +        #
    +        all_decls = sorted(self._typesdict, key=str)
    +        #
    +        # prepare all FUNCTION bytecode sequences first
    +        self.cffi_types = []
    +        for tp in all_decls:
    +            if tp.is_raw_function:
    +                assert self._typesdict[tp] is None
    +                self._typesdict[tp] = len(self.cffi_types)
    +                self.cffi_types.append(tp)     # placeholder
    +                for tp1 in tp.args:
    +                    assert isinstance(tp1, (model.VoidType,
    +                                            model.BasePrimitiveType,
    +                                            model.PointerType,
    +                                            model.StructOrUnionOrEnum,
    +                                            model.FunctionPtrType))
    +                    if self._typesdict[tp1] is None:
    +                        self._typesdict[tp1] = len(self.cffi_types)
    +                    self.cffi_types.append(tp1)   # placeholder
    +                self.cffi_types.append('END')     # placeholder
    +        #
    +        # prepare all OTHER bytecode sequences
    +        for tp in all_decls:
    +            if not tp.is_raw_function and self._typesdict[tp] is None:
    +                self._typesdict[tp] = len(self.cffi_types)
    +                self.cffi_types.append(tp)        # placeholder
    +                if tp.is_array_type and tp.length is not None:
    +                    self.cffi_types.append('LEN') # placeholder
    +        assert None not in self._typesdict.values()
    +        #
    +        # collect all structs and unions and enums
    +        self._struct_unions = {}
    +        self._enums = {}
    +        for tp in all_decls:
    +            if isinstance(tp, model.StructOrUnion):
    +                self._struct_unions[tp] = None
    +            elif isinstance(tp, model.EnumType):
    +                self._enums[tp] = None
    +        for i, tp in enumerate(sorted(self._struct_unions,
    +                                      key=lambda tp: tp.name)):
    +            self._struct_unions[tp] = i
    +        for i, tp in enumerate(sorted(self._enums,
    +                                      key=lambda tp: tp.name)):
    +            self._enums[tp] = i
    +        #
    +        # emit all bytecode sequences now
    +        for tp in all_decls:
    +            method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__)
    +            method(tp, self._typesdict[tp])
    +        #
    +        # consistency check
    +        for op in self.cffi_types:
    +            assert isinstance(op, CffiOp)
    +        self.cffi_types = tuple(self.cffi_types)    # don't change any more
    +
    +    def _do_collect_type(self, tp):
    +        if not isinstance(tp, model.BaseTypeByIdentity):
    +            if isinstance(tp, tuple):
    +                for x in tp:
    +                    self._do_collect_type(x)
    +            return
    +        if tp not in self._typesdict:
    +            self._typesdict[tp] = None
    +            if isinstance(tp, model.FunctionPtrType):
    +                self._do_collect_type(tp.as_raw_function())
    +            elif isinstance(tp, model.StructOrUnion):
    +                if tp.fldtypes is not None and (
    +                        tp not in self.ffi._parser._included_declarations):
    +                    for name1, tp1, _, _ in tp.enumfields():
    +                        self._do_collect_type(self._field_type(tp, name1, tp1))
    +            else:
    +                for _, x in tp._get_items():
    +                    self._do_collect_type(x)
    +
    +    def _generate(self, step_name):
    +        lst = self.ffi._parser._declarations.items()
    +        for name, (tp, quals) in sorted(lst):
    +            kind, realname = name.split(' ', 1)
    +            try:
    +                method = getattr(self, '_generate_cpy_%s_%s' % (kind,
    +                                                                step_name))
    +            except AttributeError:
    +                raise VerificationError(
    +                    "not implemented in recompile(): %r" % name)
    +            try:
    +                self._current_quals = quals
    +                method(tp, realname)
    +            except Exception as e:
    +                model.attach_exception_info(e, name)
    +                raise
    +
    +    # ----------
    +
    +    ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"]
    +
    +    def collect_step_tables(self):
    +        # collect the declarations for '_cffi_globals', '_cffi_typenames', etc.
    +        self._lsts = {}
    +        for step_name in self.ALL_STEPS:
    +            self._lsts[step_name] = []
    +        self._seen_struct_unions = set()
    +        self._generate("ctx")
    +        self._add_missing_struct_unions()
    +        #
    +        for step_name in self.ALL_STEPS:
    +            lst = self._lsts[step_name]
    +            if step_name != "field":
    +                lst.sort(key=lambda entry: entry.name)
    +            self._lsts[step_name] = tuple(lst)    # don't change any more
    +        #
    +        # check for a possible internal inconsistency: _cffi_struct_unions
    +        # should have been generated with exactly self._struct_unions
    +        lst = self._lsts["struct_union"]
    +        for tp, i in self._struct_unions.items():
    +            assert i < len(lst)
    +            assert lst[i].name == tp.name
    +        assert len(lst) == len(self._struct_unions)
    +        # same with enums
    +        lst = self._lsts["enum"]
    +        for tp, i in self._enums.items():
    +            assert i < len(lst)
    +            assert lst[i].name == tp.name
    +        assert len(lst) == len(self._enums)
    +
    +    # ----------
    +
    +    def _prnt(self, what=''):
    +        self._f.write(what + '\n')
    +
    +    def write_source_to_f(self, f, preamble):
    +        if self.target_is_python:
    +            assert preamble is None
    +            self.write_py_source_to_f(f)
    +        else:
    +            assert preamble is not None
    +            self.write_c_source_to_f(f, preamble)
    +
    +    def _rel_readlines(self, filename):
    +        g = open(os.path.join(os.path.dirname(__file__), filename), 'r')
    +        lines = g.readlines()
    +        g.close()
    +        return lines
    +
    +    def write_c_source_to_f(self, f, preamble):
    +        self._f = f
    +        prnt = self._prnt
    +        if self.ffi._embedding is not None:
    +            prnt('#define _CFFI_USE_EMBEDDING')
    +        #
    +        # first the '#include' (actually done by inlining the file's content)
    +        lines = self._rel_readlines('_cffi_include.h')
    +        i = lines.index('#include "parse_c_type.h"\n')
    +        lines[i:i+1] = self._rel_readlines('parse_c_type.h')
    +        prnt(''.join(lines))
    +        #
    +        # if we have ffi._embedding != None, we give it here as a macro
    +        # and include an extra file
    +        base_module_name = self.module_name.split('.')[-1]
    +        if self.ffi._embedding is not None:
    +            prnt('#define _CFFI_MODULE_NAME  "%s"' % (self.module_name,))
    +            prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {')
    +            self._print_string_literal_in_array(self.ffi._embedding)
    +            prnt('0 };')
    +            prnt('#ifdef PYPY_VERSION')
    +            prnt('# define _CFFI_PYTHON_STARTUP_FUNC  _cffi_pypyinit_%s' % (
    +                base_module_name,))
    +            prnt('#elif PY_MAJOR_VERSION >= 3')
    +            prnt('# define _CFFI_PYTHON_STARTUP_FUNC  PyInit_%s' % (
    +                base_module_name,))
    +            prnt('#else')
    +            prnt('# define _CFFI_PYTHON_STARTUP_FUNC  init%s' % (
    +                base_module_name,))
    +            prnt('#endif')
    +            lines = self._rel_readlines('_embedding.h')
    +            i = lines.index('#include "_cffi_errors.h"\n')
    +            lines[i:i+1] = self._rel_readlines('_cffi_errors.h')
    +            prnt(''.join(lines))
    +            self.needs_version(VERSION_EMBEDDED)
    +        #
    +        # then paste the C source given by the user, verbatim.
    +        prnt('/************************************************************/')
    +        prnt()
    +        prnt(preamble)
    +        prnt()
    +        prnt('/************************************************************/')
    +        prnt()
    +        #
    +        # the declaration of '_cffi_types'
    +        prnt('static void *_cffi_types[] = {')
    +        typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()])
    +        for i, op in enumerate(self.cffi_types):
    +            comment = ''
    +            if i in typeindex2type:
    +                comment = ' // ' + typeindex2type[i]._get_c_name()
    +            prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment))
    +        if not self.cffi_types:
    +            prnt('  0')
    +        prnt('};')
    +        prnt()
    +        #
    +        # call generate_cpy_xxx_decl(), for every xxx found from
    +        # ffi._parser._declarations.  This generates all the functions.
    +        self._seen_constants = set()
    +        self._generate("decl")
    +        #
    +        # the declaration of '_cffi_globals' and '_cffi_typenames'
    +        nums = {}
    +        for step_name in self.ALL_STEPS:
    +            lst = self._lsts[step_name]
    +            nums[step_name] = len(lst)
    +            if nums[step_name] > 0:
    +                prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % (
    +                    step_name, step_name))
    +                for entry in lst:
    +                    prnt(entry.as_c_expr())
    +                prnt('};')
    +                prnt()
    +        #
    +        # the declaration of '_cffi_includes'
    +        if self.ffi._included_ffis:
    +            prnt('static const char * const _cffi_includes[] = {')
    +            for ffi_to_include in self.ffi._included_ffis:
    +                try:
    +                    included_module_name, included_source = (
    +                        ffi_to_include._assigned_source[:2])
    +                except AttributeError:
    +                    raise VerificationError(
    +                        "ffi object %r includes %r, but the latter has not "
    +                        "been prepared with set_source()" % (
    +                            self.ffi, ffi_to_include,))
    +                if included_source is None:
    +                    raise VerificationError(
    +                        "not implemented yet: ffi.include() of a Python-based "
    +                        "ffi inside a C-based ffi")
    +                prnt('  "%s",' % (included_module_name,))
    +            prnt('  NULL')
    +            prnt('};')
    +            prnt()
    +        #
    +        # the declaration of '_cffi_type_context'
    +        prnt('static const struct _cffi_type_context_s _cffi_type_context = {')
    +        prnt('  _cffi_types,')
    +        for step_name in self.ALL_STEPS:
    +            if nums[step_name] > 0:
    +                prnt('  _cffi_%ss,' % step_name)
    +            else:
    +                prnt('  NULL,  /* no %ss */' % step_name)
    +        for step_name in self.ALL_STEPS:
    +            if step_name != "field":
    +                prnt('  %d,  /* num_%ss */' % (nums[step_name], step_name))
    +        if self.ffi._included_ffis:
    +            prnt('  _cffi_includes,')
    +        else:
    +            prnt('  NULL,  /* no includes */')
    +        prnt('  %d,  /* num_types */' % (len(self.cffi_types),))
    +        flags = 0
    +        if self._num_externpy:
    +            flags |= 1     # set to mean that we use extern "Python"
    +        prnt('  %d,  /* flags */' % flags)
    +        prnt('};')
    +        prnt()
    +        #
    +        # the init function
    +        prnt('#ifdef __GNUC__')
    +        prnt('#  pragma GCC visibility push(default)  /* for -fvisibility= */')
    +        prnt('#endif')
    +        prnt()
    +        prnt('#ifdef PYPY_VERSION')
    +        prnt('PyMODINIT_FUNC')
    +        prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,))
    +        prnt('{')
    +        if self._num_externpy:
    +            prnt('    if (((intptr_t)p[0]) >= 0x0A03) {')
    +            prnt('        _cffi_call_python_org = '
    +                 '(void(*)(struct _cffi_externpy_s *, char *))p[1];')
    +            prnt('    }')
    +        prnt('    p[0] = (const void *)0x%x;' % self._version)
    +        prnt('    p[1] = &_cffi_type_context;')
    +        prnt('#if PY_MAJOR_VERSION >= 3')
    +        prnt('    return NULL;')
    +        prnt('#endif')
    +        prnt('}')
    +        # on Windows, distutils insists on putting init_cffi_xyz in
    +        # 'export_symbols', so instead of fighting it, just give up and
    +        # give it one
    +        prnt('#  ifdef _MSC_VER')
    +        prnt('     PyMODINIT_FUNC')
    +        prnt('#  if PY_MAJOR_VERSION >= 3')
    +        prnt('     PyInit_%s(void) { return NULL; }' % (base_module_name,))
    +        prnt('#  else')
    +        prnt('     init%s(void) { }' % (base_module_name,))
    +        prnt('#  endif')
    +        prnt('#  endif')
    +        prnt('#elif PY_MAJOR_VERSION >= 3')
    +        prnt('PyMODINIT_FUNC')
    +        prnt('PyInit_%s(void)' % (base_module_name,))
    +        prnt('{')
    +        prnt('  return _cffi_init("%s", 0x%x, &_cffi_type_context);' % (
    +            self.module_name, self._version))
    +        prnt('}')
    +        prnt('#else')
    +        prnt('PyMODINIT_FUNC')
    +        prnt('init%s(void)' % (base_module_name,))
    +        prnt('{')
    +        prnt('  _cffi_init("%s", 0x%x, &_cffi_type_context);' % (
    +            self.module_name, self._version))
    +        prnt('}')
    +        prnt('#endif')
    +        prnt()
    +        prnt('#ifdef __GNUC__')
    +        prnt('#  pragma GCC visibility pop')
    +        prnt('#endif')
    +        self._version = None
    +
    +    def _to_py(self, x):
    +        if isinstance(x, str):
    +            return "b'%s'" % (x,)
    +        if isinstance(x, (list, tuple)):
    +            rep = [self._to_py(item) for item in x]
    +            if len(rep) == 1:
    +                rep.append('')
    +            return "(%s)" % (','.join(rep),)
    +        return x.as_python_expr()  # Py2: unicode unexpected; Py3: bytes unexp.
    +
    +    def write_py_source_to_f(self, f):
    +        self._f = f
    +        prnt = self._prnt
    +        #
    +        # header
    +        prnt("# auto-generated file")
    +        prnt("import _cffi_backend")
    +        #
    +        # the 'import' of the included ffis
    +        num_includes = len(self.ffi._included_ffis or ())
    +        for i in range(num_includes):
    +            ffi_to_include = self.ffi._included_ffis[i]
    +            try:
    +                included_module_name, included_source = (
    +                    ffi_to_include._assigned_source[:2])
    +            except AttributeError:
    +                raise VerificationError(
    +                    "ffi object %r includes %r, but the latter has not "
    +                    "been prepared with set_source()" % (
    +                        self.ffi, ffi_to_include,))
    +            if included_source is not None:
    +                raise VerificationError(
    +                    "not implemented yet: ffi.include() of a C-based "
    +                    "ffi inside a Python-based ffi")
    +            prnt('from %s import ffi as _ffi%d' % (included_module_name, i))
    +        prnt()
    +        prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,))
    +        prnt("    _version = 0x%x," % (self._version,))
    +        self._version = None
    +        #
    +        # the '_types' keyword argument
    +        self.cffi_types = tuple(self.cffi_types)    # don't change any more
    +        types_lst = [op.as_python_bytes() for op in self.cffi_types]
    +        prnt('    _types = %s,' % (self._to_py(''.join(types_lst)),))
    +        typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()])
    +        #
    +        # the keyword arguments from ALL_STEPS
    +        for step_name in self.ALL_STEPS:
    +            lst = self._lsts[step_name]
    +            if len(lst) > 0 and step_name != "field":
    +                prnt('    _%ss = %s,' % (step_name, self._to_py(lst)))
    +        #
    +        # the '_includes' keyword argument
    +        if num_includes > 0:
    +            prnt('    _includes = (%s,),' % (
    +                ', '.join(['_ffi%d' % i for i in range(num_includes)]),))
    +        #
    +        # the footer
    +        prnt(')')
    +
    +    # ----------
    +
    +    def _gettypenum(self, type):
    +        # a KeyError here is a bug.  please report it! :-)
    +        return self._typesdict[type]
    +
    +    def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode):
    +        extraarg = ''
    +        if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type():
    +            if tp.is_integer_type() and tp.name != '_Bool':
    +                converter = '_cffi_to_c_int'
    +                extraarg = ', %s' % tp.name
    +            elif isinstance(tp, model.UnknownFloatType):
    +                # don't check with is_float_type(): it may be a 'long
    +                # double' here, and _cffi_to_c_double would loose precision
    +                converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),)
    +            else:
    +                cname = tp.get_c_name('')
    +                converter = '(%s)_cffi_to_c_%s' % (cname,
    +                                                   tp.name.replace(' ', '_'))
    +                if cname in ('char16_t', 'char32_t'):
    +                    self.needs_version(VERSION_CHAR16CHAR32)
    +            errvalue = '-1'
    +        #
    +        elif isinstance(tp, model.PointerType):
    +            self._convert_funcarg_to_c_ptr_or_array(tp, fromvar,
    +                                                    tovar, errcode)
    +            return
    +        #
    +        elif (isinstance(tp, model.StructOrUnionOrEnum) or
    +              isinstance(tp, model.BasePrimitiveType)):
    +            # a struct (not a struct pointer) as a function argument;
    +            # or, a complex (the same code works)
    +            self._prnt('  if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)'
    +                      % (tovar, self._gettypenum(tp), fromvar))
    +            self._prnt('    %s;' % errcode)
    +            return
    +        #
    +        elif isinstance(tp, model.FunctionPtrType):
    +            converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('')
    +            extraarg = ', _cffi_type(%d)' % self._gettypenum(tp)
    +            errvalue = 'NULL'
    +        #
    +        else:
    +            raise NotImplementedError(tp)
    +        #
    +        self._prnt('  %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg))
    +        self._prnt('  if (%s == (%s)%s && PyErr_Occurred())' % (
    +            tovar, tp.get_c_name(''), errvalue))
    +        self._prnt('    %s;' % errcode)
    +
    +    def _extra_local_variables(self, tp, localvars, freelines):
    +        if isinstance(tp, model.PointerType):
    +            localvars.add('Py_ssize_t datasize')
    +            localvars.add('struct _cffi_freeme_s *large_args_free = NULL')
    +            freelines.add('if (large_args_free != NULL)'
    +                          ' _cffi_free_array_arguments(large_args_free);')
    +
    +    def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode):
    +        self._prnt('  datasize = _cffi_prepare_pointer_call_argument(')
    +        self._prnt('      _cffi_type(%d), %s, (char **)&%s);' % (
    +            self._gettypenum(tp), fromvar, tovar))
    +        self._prnt('  if (datasize != 0) {')
    +        self._prnt('    %s = ((size_t)datasize) <= 640 ? '
    +                   '(%s)alloca((size_t)datasize) : NULL;' % (
    +            tovar, tp.get_c_name('')))
    +        self._prnt('    if (_cffi_convert_array_argument(_cffi_type(%d), %s, '
    +                   '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar))
    +        self._prnt('            datasize, &large_args_free) < 0)')
    +        self._prnt('      %s;' % errcode)
    +        self._prnt('  }')
    +
    +    def _convert_expr_from_c(self, tp, var, context):
    +        if isinstance(tp, model.BasePrimitiveType):
    +            if tp.is_integer_type() and tp.name != '_Bool':
    +                return '_cffi_from_c_int(%s, %s)' % (var, tp.name)
    +            elif isinstance(tp, model.UnknownFloatType):
    +                return '_cffi_from_c_double(%s)' % (var,)
    +            elif tp.name != 'long double' and not tp.is_complex_type():
    +                cname = tp.name.replace(' ', '_')
    +                if cname in ('char16_t', 'char32_t'):
    +                    self.needs_version(VERSION_CHAR16CHAR32)
    +                return '_cffi_from_c_%s(%s)' % (cname, var)
    +            else:
    +                return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
    +                    var, self._gettypenum(tp))
    +        elif isinstance(tp, (model.PointerType, model.FunctionPtrType)):
    +            return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
    +                var, self._gettypenum(tp))
    +        elif isinstance(tp, model.ArrayType):
    +            return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
    +                var, self._gettypenum(model.PointerType(tp.item)))
    +        elif isinstance(tp, model.StructOrUnion):
    +            if tp.fldnames is None:
    +                raise TypeError("'%s' is used as %s, but is opaque" % (
    +                    tp._get_c_name(), context))
    +            return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % (
    +                var, self._gettypenum(tp))
    +        elif isinstance(tp, model.EnumType):
    +            return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
    +                var, self._gettypenum(tp))
    +        else:
    +            raise NotImplementedError(tp)
    +
    +    # ----------
    +    # typedefs
    +
    +    def _typedef_type(self, tp, name):
    +        return self._global_type(tp, "(*(%s *)0)" % (name,))
    +
    +    def _generate_cpy_typedef_collecttype(self, tp, name):
    +        self._do_collect_type(self._typedef_type(tp, name))
    +
    +    def _generate_cpy_typedef_decl(self, tp, name):
    +        pass
    +
    +    def _typedef_ctx(self, tp, name):
    +        type_index = self._typesdict[tp]
    +        self._lsts["typename"].append(TypenameExpr(name, type_index))
    +
    +    def _generate_cpy_typedef_ctx(self, tp, name):
    +        tp = self._typedef_type(tp, name)
    +        self._typedef_ctx(tp, name)
    +        if getattr(tp, "origin", None) == "unknown_type":
    +            self._struct_ctx(tp, tp.name, approxname=None)
    +        elif isinstance(tp, model.NamedPointerType):
    +            self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name,
    +                             named_ptr=tp)
    +
    +    # ----------
    +    # function declarations
    +
    +    def _generate_cpy_function_collecttype(self, tp, name):
    +        self._do_collect_type(tp.as_raw_function())
    +        if tp.ellipsis and not self.target_is_python:
    +            self._do_collect_type(tp)
    +
    +    def _generate_cpy_function_decl(self, tp, name):
    +        assert not self.target_is_python
    +        assert isinstance(tp, model.FunctionPtrType)
    +        if tp.ellipsis:
    +            # cannot support vararg functions better than this: check for its
    +            # exact type (including the fixed arguments), and build it as a
    +            # constant function pointer (no CPython wrapper)
    +            self._generate_cpy_constant_decl(tp, name)
    +            return
    +        prnt = self._prnt
    +        numargs = len(tp.args)
    +        if numargs == 0:
    +            argname = 'noarg'
    +        elif numargs == 1:
    +            argname = 'arg0'
    +        else:
    +            argname = 'args'
    +        #
    +        # ------------------------------
    +        # the 'd' version of the function, only for addressof(lib, 'func')
    +        arguments = []
    +        call_arguments = []
    +        context = 'argument of %s' % name
    +        for i, type in enumerate(tp.args):
    +            arguments.append(type.get_c_name(' x%d' % i, context))
    +            call_arguments.append('x%d' % i)
    +        repr_arguments = ', '.join(arguments)
    +        repr_arguments = repr_arguments or 'void'
    +        if tp.abi:
    +            abi = tp.abi + ' '
    +        else:
    +            abi = ''
    +        name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments)
    +        prnt('static %s' % (tp.result.get_c_name(name_and_arguments),))
    +        prnt('{')
    +        call_arguments = ', '.join(call_arguments)
    +        result_code = 'return '
    +        if isinstance(tp.result, model.VoidType):
    +            result_code = ''
    +        prnt('  %s%s(%s);' % (result_code, name, call_arguments))
    +        prnt('}')
    +        #
    +        prnt('#ifndef PYPY_VERSION')        # ------------------------------
    +        #
    +        prnt('static PyObject *')
    +        prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname))
    +        prnt('{')
    +        #
    +        context = 'argument of %s' % name
    +        for i, type in enumerate(tp.args):
    +            arg = type.get_c_name(' x%d' % i, context)
    +            prnt('  %s;' % arg)
    +        #
    +        localvars = set()
    +        freelines = set()
    +        for type in tp.args:
    +            self._extra_local_variables(type, localvars, freelines)
    +        for decl in sorted(localvars):
    +            prnt('  %s;' % (decl,))
    +        #
    +        if not isinstance(tp.result, model.VoidType):
    +            result_code = 'result = '
    +            context = 'result of %s' % name
    +            result_decl = '  %s;' % tp.result.get_c_name(' result', context)
    +            prnt(result_decl)
    +            prnt('  PyObject *pyresult;')
    +        else:
    +            result_decl = None
    +            result_code = ''
    +        #
    +        if len(tp.args) > 1:
    +            rng = range(len(tp.args))
    +            for i in rng:
    +                prnt('  PyObject *arg%d;' % i)
    +            prnt()
    +            prnt('  if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % (
    +                name, len(rng), len(rng),
    +                ', '.join(['&arg%d' % i for i in rng])))
    +            prnt('    return NULL;')
    +        prnt()
    +        #
    +        for i, type in enumerate(tp.args):
    +            self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i,
    +                                       'return NULL')
    +            prnt()
    +        #
    +        prnt('  Py_BEGIN_ALLOW_THREADS')
    +        prnt('  _cffi_restore_errno();')
    +        call_arguments = ['x%d' % i for i in range(len(tp.args))]
    +        call_arguments = ', '.join(call_arguments)
    +        prnt('  { %s%s(%s); }' % (result_code, name, call_arguments))
    +        prnt('  _cffi_save_errno();')
    +        prnt('  Py_END_ALLOW_THREADS')
    +        prnt()
    +        #
    +        prnt('  (void)self; /* unused */')
    +        if numargs == 0:
    +            prnt('  (void)noarg; /* unused */')
    +        if result_code:
    +            prnt('  pyresult = %s;' %
    +                 self._convert_expr_from_c(tp.result, 'result', 'result type'))
    +            for freeline in freelines:
    +                prnt('  ' + freeline)
    +            prnt('  return pyresult;')
    +        else:
    +            for freeline in freelines:
    +                prnt('  ' + freeline)
    +            prnt('  Py_INCREF(Py_None);')
    +            prnt('  return Py_None;')
    +        prnt('}')
    +        #
    +        prnt('#else')        # ------------------------------
    +        #
    +        # the PyPy version: need to replace struct/union arguments with
    +        # pointers, and if the result is a struct/union, insert a first
    +        # arg that is a pointer to the result.  We also do that for
    +        # complex args and return type.
    +        def need_indirection(type):
    +            return (isinstance(type, model.StructOrUnion) or
    +                    (isinstance(type, model.PrimitiveType) and
    +                     type.is_complex_type()))
    +        difference = False
    +        arguments = []
    +        call_arguments = []
    +        context = 'argument of %s' % name
    +        for i, type in enumerate(tp.args):
    +            indirection = ''
    +            if need_indirection(type):
    +                indirection = '*'
    +                difference = True
    +            arg = type.get_c_name(' %sx%d' % (indirection, i), context)
    +            arguments.append(arg)
    +            call_arguments.append('%sx%d' % (indirection, i))
    +        tp_result = tp.result
    +        if need_indirection(tp_result):
    +            context = 'result of %s' % name
    +            arg = tp_result.get_c_name(' *result', context)
    +            arguments.insert(0, arg)
    +            tp_result = model.void_type
    +            result_decl = None
    +            result_code = '*result = '
    +            difference = True
    +        if difference:
    +            repr_arguments = ', '.join(arguments)
    +            repr_arguments = repr_arguments or 'void'
    +            name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name,
    +                                                       repr_arguments)
    +            prnt('static %s' % (tp_result.get_c_name(name_and_arguments),))
    +            prnt('{')
    +            if result_decl:
    +                prnt(result_decl)
    +            call_arguments = ', '.join(call_arguments)
    +            prnt('  { %s%s(%s); }' % (result_code, name, call_arguments))
    +            if result_decl:
    +                prnt('  return result;')
    +            prnt('}')
    +        else:
    +            prnt('#  define _cffi_f_%s _cffi_d_%s' % (name, name))
    +        #
    +        prnt('#endif')        # ------------------------------
    +        prnt()
    +
    +    def _generate_cpy_function_ctx(self, tp, name):
    +        if tp.ellipsis and not self.target_is_python:
    +            self._generate_cpy_constant_ctx(tp, name)
    +            return
    +        type_index = self._typesdict[tp.as_raw_function()]
    +        numargs = len(tp.args)
    +        if self.target_is_python:
    +            meth_kind = OP_DLOPEN_FUNC
    +        elif numargs == 0:
    +            meth_kind = OP_CPYTHON_BLTN_N   # 'METH_NOARGS'
    +        elif numargs == 1:
    +            meth_kind = OP_CPYTHON_BLTN_O   # 'METH_O'
    +        else:
    +            meth_kind = OP_CPYTHON_BLTN_V   # 'METH_VARARGS'
    +        self._lsts["global"].append(
    +            GlobalExpr(name, '_cffi_f_%s' % name,
    +                       CffiOp(meth_kind, type_index),
    +                       size='_cffi_d_%s' % name))
    +
    +    # ----------
    +    # named structs or unions
    +
    +    def _field_type(self, tp_struct, field_name, tp_field):
    +        if isinstance(tp_field, model.ArrayType):
    +            actual_length = tp_field.length
    +            if actual_length == '...':
    +                ptr_struct_name = tp_struct.get_c_name('*')
    +                actual_length = '_cffi_array_len(((%s)0)->%s)' % (
    +                    ptr_struct_name, field_name)
    +            tp_item = self._field_type(tp_struct, '%s[0]' % field_name,
    +                                       tp_field.item)
    +            tp_field = model.ArrayType(tp_item, actual_length)
    +        return tp_field
    +
    +    def _struct_collecttype(self, tp):
    +        self._do_collect_type(tp)
    +        if self.target_is_python:
    +            # also requires nested anon struct/unions in ABI mode, recursively
    +            for fldtype in tp.anonymous_struct_fields():
    +                self._struct_collecttype(fldtype)
    +
    +    def _struct_decl(self, tp, cname, approxname):
    +        if tp.fldtypes is None:
    +            return
    +        prnt = self._prnt
    +        checkfuncname = '_cffi_checkfld_%s' % (approxname,)
    +        prnt('_CFFI_UNUSED_FN')
    +        prnt('static void %s(%s *p)' % (checkfuncname, cname))
    +        prnt('{')
    +        prnt('  /* only to generate compile-time warnings or errors */')
    +        prnt('  (void)p;')
    +        for fname, ftype, fbitsize, fqual in tp.enumfields():
    +            try:
    +                if ftype.is_integer_type() or fbitsize >= 0:
    +                    # accept all integers, but complain on float or double
    +                    if fname != '':
    +                        prnt("  (void)((p->%s) | 0);  /* check that '%s.%s' is "
    +                             "an integer */" % (fname, cname, fname))
    +                    continue
    +                # only accept exactly the type declared, except that '[]'
    +                # is interpreted as a '*' and so will match any array length.
    +                # (It would also match '*', but that's harder to detect...)
    +                while (isinstance(ftype, model.ArrayType)
    +                       and (ftype.length is None or ftype.length == '...')):
    +                    ftype = ftype.item
    +                    fname = fname + '[0]'
    +                prnt('  { %s = &p->%s; (void)tmp; }' % (
    +                    ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
    +                    fname))
    +            except VerificationError as e:
    +                prnt('  /* %s */' % str(e))   # cannot verify it, ignore
    +        prnt('}')
    +        prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname))
    +        prnt()
    +
    +    def _struct_ctx(self, tp, cname, approxname, named_ptr=None):
    +        type_index = self._typesdict[tp]
    +        reason_for_not_expanding = None
    +        flags = []
    +        if isinstance(tp, model.UnionType):
    +            flags.append("_CFFI_F_UNION")
    +        if tp.fldtypes is None:
    +            flags.append("_CFFI_F_OPAQUE")
    +            reason_for_not_expanding = "opaque"
    +        if (tp not in self.ffi._parser._included_declarations and
    +                (named_ptr is None or
    +                 named_ptr not in self.ffi._parser._included_declarations)):
    +            if tp.fldtypes is None:
    +                pass    # opaque
    +            elif tp.partial or any(tp.anonymous_struct_fields()):
    +                pass    # field layout obtained silently from the C compiler
    +            else:
    +                flags.append("_CFFI_F_CHECK_FIELDS")
    +            if tp.packed:
    +                if tp.packed > 1:
    +                    raise NotImplementedError(
    +                        "%r is declared with 'pack=%r'; only 0 or 1 are "
    +                        "supported in API mode (try to use \"...;\", which "
    +                        "does not require a 'pack' declaration)" %
    +                        (tp, tp.packed))
    +                flags.append("_CFFI_F_PACKED")
    +        else:
    +            flags.append("_CFFI_F_EXTERNAL")
    +            reason_for_not_expanding = "external"
    +        flags = '|'.join(flags) or '0'
    +        c_fields = []
    +        if reason_for_not_expanding is None:
    +            expand_anonymous_struct_union = not self.target_is_python
    +            enumfields = list(tp.enumfields(expand_anonymous_struct_union))
    +            for fldname, fldtype, fbitsize, fqual in enumfields:
    +                fldtype = self._field_type(tp, fldname, fldtype)
    +                self._check_not_opaque(fldtype,
    +                                       "field '%s.%s'" % (tp.name, fldname))
    +                # cname is None for _add_missing_struct_unions() only
    +                op = OP_NOOP
    +                if fbitsize >= 0:
    +                    op = OP_BITFIELD
    +                    size = '%d /* bits */' % fbitsize
    +                elif cname is None or (
    +                        isinstance(fldtype, model.ArrayType) and
    +                        fldtype.length is None):
    +                    size = '(size_t)-1'
    +                else:
    +                    size = 'sizeof(((%s)0)->%s)' % (
    +                        tp.get_c_name('*') if named_ptr is None
    +                                           else named_ptr.name,
    +                        fldname)
    +                if cname is None or fbitsize >= 0:
    +                    offset = '(size_t)-1'
    +                elif named_ptr is not None:
    +                    offset = '((char *)&((%s)0)->%s) - (char *)0' % (
    +                        named_ptr.name, fldname)
    +                else:
    +                    offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname)
    +                c_fields.append(
    +                    FieldExpr(fldname, offset, size, fbitsize,
    +                              CffiOp(op, self._typesdict[fldtype])))
    +            first_field_index = len(self._lsts["field"])
    +            self._lsts["field"].extend(c_fields)
    +            #
    +            if cname is None:  # unknown name, for _add_missing_struct_unions
    +                size = '(size_t)-2'
    +                align = -2
    +                comment = "unnamed"
    +            else:
    +                if named_ptr is not None:
    +                    size = 'sizeof(*(%s)0)' % (named_ptr.name,)
    +                    align = '-1 /* unknown alignment */'
    +                else:
    +                    size = 'sizeof(%s)' % (cname,)
    +                    align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,)
    +                comment = None
    +        else:
    +            size = '(size_t)-1'
    +            align = -1
    +            first_field_index = -1
    +            comment = reason_for_not_expanding
    +        self._lsts["struct_union"].append(
    +            StructUnionExpr(tp.name, type_index, flags, size, align, comment,
    +                            first_field_index, c_fields))
    +        self._seen_struct_unions.add(tp)
    +
    +    def _check_not_opaque(self, tp, location):
    +        while isinstance(tp, model.ArrayType):
    +            tp = tp.item
    +        if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None:
    +            raise TypeError(
    +                "%s is of an opaque type (not declared in cdef())" % location)
    +
    +    def _add_missing_struct_unions(self):
    +        # not very nice, but some struct declarations might be missing
    +        # because they don't have any known C name.  Check that they are
    +        # not partial (we can't complete or verify them!) and emit them
    +        # anonymously.
    +        lst = list(self._struct_unions.items())
    +        lst.sort(key=lambda tp_order: tp_order[1])
    +        for tp, order in lst:
    +            if tp not in self._seen_struct_unions:
    +                if tp.partial:
    +                    raise NotImplementedError("internal inconsistency: %r is "
    +                                              "partial but was not seen at "
    +                                              "this point" % (tp,))
    +                if tp.name.startswith('$') and tp.name[1:].isdigit():
    +                    approxname = tp.name[1:]
    +                elif tp.name == '_IO_FILE' and tp.forcename == 'FILE':
    +                    approxname = 'FILE'
    +                    self._typedef_ctx(tp, 'FILE')
    +                else:
    +                    raise NotImplementedError("internal inconsistency: %r" %
    +                                              (tp,))
    +                self._struct_ctx(tp, None, approxname)
    +
    +    def _generate_cpy_struct_collecttype(self, tp, name):
    +        self._struct_collecttype(tp)
    +    _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype
    +
    +    def _struct_names(self, tp):
    +        cname = tp.get_c_name('')
    +        if ' ' in cname:
    +            return cname, cname.replace(' ', '_')
    +        else:
    +            return cname, '_' + cname
    +
    +    def _generate_cpy_struct_decl(self, tp, name):
    +        self._struct_decl(tp, *self._struct_names(tp))
    +    _generate_cpy_union_decl = _generate_cpy_struct_decl
    +
    +    def _generate_cpy_struct_ctx(self, tp, name):
    +        self._struct_ctx(tp, *self._struct_names(tp))
    +    _generate_cpy_union_ctx = _generate_cpy_struct_ctx
    +
    +    # ----------
    +    # 'anonymous' declarations.  These are produced for anonymous structs
    +    # or unions; the 'name' is obtained by a typedef.
    +
    +    def _generate_cpy_anonymous_collecttype(self, tp, name):
    +        if isinstance(tp, model.EnumType):
    +            self._generate_cpy_enum_collecttype(tp, name)
    +        else:
    +            self._struct_collecttype(tp)
    +
    +    def _generate_cpy_anonymous_decl(self, tp, name):
    +        if isinstance(tp, model.EnumType):
    +            self._generate_cpy_enum_decl(tp)
    +        else:
    +            self._struct_decl(tp, name, 'typedef_' + name)
    +
    +    def _generate_cpy_anonymous_ctx(self, tp, name):
    +        if isinstance(tp, model.EnumType):
    +            self._enum_ctx(tp, name)
    +        else:
    +            self._struct_ctx(tp, name, 'typedef_' + name)
    +
    +    # ----------
    +    # constants, declared with "static const ..."
    +
    +    def _generate_cpy_const(self, is_int, name, tp=None, category='const',
    +                            check_value=None):
    +        if (category, name) in self._seen_constants:
    +            raise VerificationError(
    +                "duplicate declaration of %s '%s'" % (category, name))
    +        self._seen_constants.add((category, name))
    +        #
    +        prnt = self._prnt
    +        funcname = '_cffi_%s_%s' % (category, name)
    +        if is_int:
    +            prnt('static int %s(unsigned long long *o)' % funcname)
    +            prnt('{')
    +            prnt('  int n = (%s) <= 0;' % (name,))
    +            prnt('  *o = (unsigned long long)((%s) | 0);'
    +                 '  /* check that %s is an integer */' % (name, name))
    +            if check_value is not None:
    +                if check_value > 0:
    +                    check_value = '%dU' % (check_value,)
    +                prnt('  if (!_cffi_check_int(*o, n, %s))' % (check_value,))
    +                prnt('    n |= 2;')
    +            prnt('  return n;')
    +            prnt('}')
    +        else:
    +            assert check_value is None
    +            prnt('static void %s(char *o)' % funcname)
    +            prnt('{')
    +            prnt('  *(%s)o = %s;' % (tp.get_c_name('*'), name))
    +            prnt('}')
    +        prnt()
    +
    +    def _generate_cpy_constant_collecttype(self, tp, name):
    +        is_int = tp.is_integer_type()
    +        if not is_int or self.target_is_python:
    +            self._do_collect_type(tp)
    +
    +    def _generate_cpy_constant_decl(self, tp, name):
    +        is_int = tp.is_integer_type()
    +        self._generate_cpy_const(is_int, name, tp)
    +
    +    def _generate_cpy_constant_ctx(self, tp, name):
    +        if not self.target_is_python and tp.is_integer_type():
    +            type_op = CffiOp(OP_CONSTANT_INT, -1)
    +        else:
    +            if self.target_is_python:
    +                const_kind = OP_DLOPEN_CONST
    +            else:
    +                const_kind = OP_CONSTANT
    +            type_index = self._typesdict[tp]
    +            type_op = CffiOp(const_kind, type_index)
    +        self._lsts["global"].append(
    +            GlobalExpr(name, '_cffi_const_%s' % name, type_op))
    +
    +    # ----------
    +    # enums
    +
    +    def _generate_cpy_enum_collecttype(self, tp, name):
    +        self._do_collect_type(tp)
    +
    +    def _generate_cpy_enum_decl(self, tp, name=None):
    +        for enumerator in tp.enumerators:
    +            self._generate_cpy_const(True, enumerator)
    +
    +    def _enum_ctx(self, tp, cname):
    +        type_index = self._typesdict[tp]
    +        type_op = CffiOp(OP_ENUM, -1)
    +        if self.target_is_python:
    +            tp.check_not_partial()
    +        for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
    +            self._lsts["global"].append(
    +                GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op,
    +                           check_value=enumvalue))
    +        #
    +        if cname is not None and '$' not in cname and not self.target_is_python:
    +            size = "sizeof(%s)" % cname
    +            signed = "((%s)-1) <= 0" % cname
    +        else:
    +            basetp = tp.build_baseinttype(self.ffi, [])
    +            size = self.ffi.sizeof(basetp)
    +            signed = int(int(self.ffi.cast(basetp, -1)) < 0)
    +        allenums = ",".join(tp.enumerators)
    +        self._lsts["enum"].append(
    +            EnumExpr(tp.name, type_index, size, signed, allenums))
    +
    +    def _generate_cpy_enum_ctx(self, tp, name):
    +        self._enum_ctx(tp, tp._get_c_name())
    +
    +    # ----------
    +    # macros: for now only for integers
    +
    +    def _generate_cpy_macro_collecttype(self, tp, name):
    +        pass
    +
    +    def _generate_cpy_macro_decl(self, tp, name):
    +        if tp == '...':
    +            check_value = None
    +        else:
    +            check_value = tp     # an integer
    +        self._generate_cpy_const(True, name, check_value=check_value)
    +
    +    def _generate_cpy_macro_ctx(self, tp, name):
    +        if tp == '...':
    +            if self.target_is_python:
    +                raise VerificationError(
    +                    "cannot use the syntax '...' in '#define %s ...' when "
    +                    "using the ABI mode" % (name,))
    +            check_value = None
    +        else:
    +            check_value = tp     # an integer
    +        type_op = CffiOp(OP_CONSTANT_INT, -1)
    +        self._lsts["global"].append(
    +            GlobalExpr(name, '_cffi_const_%s' % name, type_op,
    +                       check_value=check_value))
    +
    +    # ----------
    +    # global variables
    +
    +    def _global_type(self, tp, global_name):
    +        if isinstance(tp, model.ArrayType):
    +            actual_length = tp.length
    +            if actual_length == '...':
    +                actual_length = '_cffi_array_len(%s)' % (global_name,)
    +            tp_item = self._global_type(tp.item, '%s[0]' % global_name)
    +            tp = model.ArrayType(tp_item, actual_length)
    +        return tp
    +
    +    def _generate_cpy_variable_collecttype(self, tp, name):
    +        self._do_collect_type(self._global_type(tp, name))
    +
    +    def _generate_cpy_variable_decl(self, tp, name):
    +        prnt = self._prnt
    +        tp = self._global_type(tp, name)
    +        if isinstance(tp, model.ArrayType) and tp.length is None:
    +            tp = tp.item
    +            ampersand = ''
    +        else:
    +            ampersand = '&'
    +        # This code assumes that casts from "tp *" to "void *" is a
    +        # no-op, i.e. a function that returns a "tp *" can be called
    +        # as if it returned a "void *".  This should be generally true
    +        # on any modern machine.  The only exception to that rule (on
    +        # uncommon architectures, and as far as I can tell) might be
    +        # if 'tp' were a function type, but that is not possible here.
    +        # (If 'tp' is a function _pointer_ type, then casts from "fn_t
    +        # **" to "void *" are again no-ops, as far as I can tell.)
    +        decl = '*_cffi_var_%s(void)' % (name,)
    +        prnt('static ' + tp.get_c_name(decl, quals=self._current_quals))
    +        prnt('{')
    +        prnt('  return %s(%s);' % (ampersand, name))
    +        prnt('}')
    +        prnt()
    +
    +    def _generate_cpy_variable_ctx(self, tp, name):
    +        tp = self._global_type(tp, name)
    +        type_index = self._typesdict[tp]
    +        if self.target_is_python:
    +            op = OP_GLOBAL_VAR
    +        else:
    +            op = OP_GLOBAL_VAR_F
    +        self._lsts["global"].append(
    +            GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index)))
    +
    +    # ----------
    +    # extern "Python"
    +
    +    def _generate_cpy_extern_python_collecttype(self, tp, name):
    +        assert isinstance(tp, model.FunctionPtrType)
    +        self._do_collect_type(tp)
    +    _generate_cpy_dllexport_python_collecttype = \
    +      _generate_cpy_extern_python_plus_c_collecttype = \
    +      _generate_cpy_extern_python_collecttype
    +
    +    def _extern_python_decl(self, tp, name, tag_and_space):
    +        prnt = self._prnt
    +        if isinstance(tp.result, model.VoidType):
    +            size_of_result = '0'
    +        else:
    +            context = 'result of %s' % name
    +            size_of_result = '(int)sizeof(%s)' % (
    +                tp.result.get_c_name('', context),)
    +        prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name)
    +        prnt('  { "%s.%s", %s, 0, 0 };' % (
    +            self.module_name, name, size_of_result))
    +        prnt()
    +        #
    +        arguments = []
    +        context = 'argument of %s' % name
    +        for i, type in enumerate(tp.args):
    +            arg = type.get_c_name(' a%d' % i, context)
    +            arguments.append(arg)
    +        #
    +        repr_arguments = ', '.join(arguments)
    +        repr_arguments = repr_arguments or 'void'
    +        name_and_arguments = '%s(%s)' % (name, repr_arguments)
    +        if tp.abi == "__stdcall":
    +            name_and_arguments = '_cffi_stdcall ' + name_and_arguments
    +        #
    +        def may_need_128_bits(tp):
    +            return (isinstance(tp, model.PrimitiveType) and
    +                    tp.name == 'long double')
    +        #
    +        size_of_a = max(len(tp.args)*8, 8)
    +        if may_need_128_bits(tp.result):
    +            size_of_a = max(size_of_a, 16)
    +        if isinstance(tp.result, model.StructOrUnion):
    +            size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % (
    +                tp.result.get_c_name(''), size_of_a,
    +                tp.result.get_c_name(''), size_of_a)
    +        prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments)))
    +        prnt('{')
    +        prnt('  char a[%s];' % size_of_a)
    +        prnt('  char *p = a;')
    +        for i, type in enumerate(tp.args):
    +            arg = 'a%d' % i
    +            if (isinstance(type, model.StructOrUnion) or
    +                    may_need_128_bits(type)):
    +                arg = '&' + arg
    +                type = model.PointerType(type)
    +            prnt('  *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg))
    +        prnt('  _cffi_call_python(&_cffi_externpy__%s, p);' % name)
    +        if not isinstance(tp.result, model.VoidType):
    +            prnt('  return *(%s)p;' % (tp.result.get_c_name('*'),))
    +        prnt('}')
    +        prnt()
    +        self._num_externpy += 1
    +
    +    def _generate_cpy_extern_python_decl(self, tp, name):
    +        self._extern_python_decl(tp, name, 'static ')
    +
    +    def _generate_cpy_dllexport_python_decl(self, tp, name):
    +        self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ')
    +
    +    def _generate_cpy_extern_python_plus_c_decl(self, tp, name):
    +        self._extern_python_decl(tp, name, '')
    +
    +    def _generate_cpy_extern_python_ctx(self, tp, name):
    +        if self.target_is_python:
    +            raise VerificationError(
    +                "cannot use 'extern \"Python\"' in the ABI mode")
    +        if tp.ellipsis:
    +            raise NotImplementedError("a vararg function is extern \"Python\"")
    +        type_index = self._typesdict[tp]
    +        type_op = CffiOp(OP_EXTERN_PYTHON, type_index)
    +        self._lsts["global"].append(
    +            GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name))
    +
    +    _generate_cpy_dllexport_python_ctx = \
    +      _generate_cpy_extern_python_plus_c_ctx = \
    +      _generate_cpy_extern_python_ctx
    +
    +    def _print_string_literal_in_array(self, s):
    +        prnt = self._prnt
    +        prnt('// # NB. this is not a string because of a size limit in MSVC')
    +        for line in s.splitlines(True):
    +            prnt(('// ' + line).rstrip())
    +            printed_line = ''
    +            for c in line:
    +                if len(printed_line) >= 76:
    +                    prnt(printed_line)
    +                    printed_line = ''
    +                printed_line += '%d,' % (ord(c),)
    +            prnt(printed_line)
    +
    +    # ----------
    +    # emitting the opcodes for individual types
    +
    +    def _emit_bytecode_VoidType(self, tp, index):
    +        self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID)
    +
    +    def _emit_bytecode_PrimitiveType(self, tp, index):
    +        prim_index = PRIMITIVE_TO_INDEX[tp.name]
    +        self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index)
    +
    +    def _emit_bytecode_UnknownIntegerType(self, tp, index):
    +        s = ('_cffi_prim_int(sizeof(%s), (\n'
    +             '           ((%s)-1) | 0 /* check that %s is an integer type */\n'
    +             '         ) <= 0)' % (tp.name, tp.name, tp.name))
    +        self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s)
    +
    +    def _emit_bytecode_UnknownFloatType(self, tp, index):
    +        s = ('_cffi_prim_float(sizeof(%s) *\n'
    +             '           (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n'
    +             '         )' % (tp.name, tp.name))
    +        self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s)
    +
    +    def _emit_bytecode_RawFunctionType(self, tp, index):
    +        self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result])
    +        index += 1
    +        for tp1 in tp.args:
    +            realindex = self._typesdict[tp1]
    +            if index != realindex:
    +                if isinstance(tp1, model.PrimitiveType):
    +                    self._emit_bytecode_PrimitiveType(tp1, index)
    +                else:
    +                    self.cffi_types[index] = CffiOp(OP_NOOP, realindex)
    +            index += 1
    +        flags = int(tp.ellipsis)
    +        if tp.abi is not None:
    +            if tp.abi == '__stdcall':
    +                flags |= 2
    +            else:
    +                raise NotImplementedError("abi=%r" % (tp.abi,))
    +        self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags)
    +
    +    def _emit_bytecode_PointerType(self, tp, index):
    +        self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype])
    +
    +    _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType
    +    _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType
    +
    +    def _emit_bytecode_FunctionPtrType(self, tp, index):
    +        raw = tp.as_raw_function()
    +        self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw])
    +
    +    def _emit_bytecode_ArrayType(self, tp, index):
    +        item_index = self._typesdict[tp.item]
    +        if tp.length is None:
    +            self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index)
    +        elif tp.length == '...':
    +            raise VerificationError(
    +                "type %s badly placed: the '...' array length can only be "
    +                "used on global arrays or on fields of structures" % (
    +                    str(tp).replace('/*...*/', '...'),))
    +        else:
    +            assert self.cffi_types[index + 1] == 'LEN'
    +            self.cffi_types[index] = CffiOp(OP_ARRAY, item_index)
    +            self.cffi_types[index + 1] = CffiOp(None, str(tp.length))
    +
    +    def _emit_bytecode_StructType(self, tp, index):
    +        struct_index = self._struct_unions[tp]
    +        self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index)
    +    _emit_bytecode_UnionType = _emit_bytecode_StructType
    +
    +    def _emit_bytecode_EnumType(self, tp, index):
    +        enum_index = self._enums[tp]
    +        self.cffi_types[index] = CffiOp(OP_ENUM, enum_index)
    +
    +
    +if sys.version_info >= (3,):
    +    NativeIO = io.StringIO
    +else:
    +    class NativeIO(io.BytesIO):
    +        def write(self, s):
    +            if isinstance(s, unicode):
    +                s = s.encode('ascii')
    +            super(NativeIO, self).write(s)
    +
    +def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose):
    +    if verbose:
    +        print("generating %s" % (target_file,))
    +    recompiler = Recompiler(ffi, module_name,
    +                            target_is_python=(preamble is None))
    +    recompiler.collect_type_table()
    +    recompiler.collect_step_tables()
    +    f = NativeIO()
    +    recompiler.write_source_to_f(f, preamble)
    +    output = f.getvalue()
    +    try:
    +        with open(target_file, 'r') as f1:
    +            if f1.read(len(output) + 1) != output:
    +                raise IOError
    +        if verbose:
    +            print("(already up-to-date)")
    +        return False     # already up-to-date
    +    except IOError:
    +        tmp_file = '%s.~%d' % (target_file, os.getpid())
    +        with open(tmp_file, 'w') as f1:
    +            f1.write(output)
    +        try:
    +            os.rename(tmp_file, target_file)
    +        except OSError:
    +            os.unlink(target_file)
    +            os.rename(tmp_file, target_file)
    +        return True
    +
    +def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False):
    +    assert preamble is not None
    +    return _make_c_or_py_source(ffi, module_name, preamble, target_c_file,
    +                                verbose)
    +
    +def make_py_source(ffi, module_name, target_py_file, verbose=False):
    +    return _make_c_or_py_source(ffi, module_name, None, target_py_file,
    +                                verbose)
    +
    +def _modname_to_file(outputdir, modname, extension):
    +    parts = modname.split('.')
    +    try:
    +        os.makedirs(os.path.join(outputdir, *parts[:-1]))
    +    except OSError:
    +        pass
    +    parts[-1] += extension
    +    return os.path.join(outputdir, *parts), parts
    +
    +
    +# Aaargh.  Distutils is not tested at all for the purpose of compiling
    +# DLLs that are not extension modules.  Here are some hacks to work
    +# around that, in the _patch_for_*() functions...
    +
    +def _patch_meth(patchlist, cls, name, new_meth):
    +    old = getattr(cls, name)
    +    patchlist.append((cls, name, old))
    +    setattr(cls, name, new_meth)
    +    return old
    +
    +def _unpatch_meths(patchlist):
    +    for cls, name, old_meth in reversed(patchlist):
    +        setattr(cls, name, old_meth)
    +
    +def _patch_for_embedding(patchlist):
    +    if sys.platform == 'win32':
    +        # we must not remove the manifest when building for embedding!
    +        from distutils.msvc9compiler import MSVCCompiler
    +        _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref',
    +                    lambda self, manifest_file: manifest_file)
    +
    +    if sys.platform == 'darwin':
    +        # we must not make a '-bundle', but a '-dynamiclib' instead
    +        from distutils.ccompiler import CCompiler
    +        def my_link_shared_object(self, *args, **kwds):
    +            if '-bundle' in self.linker_so:
    +                self.linker_so = list(self.linker_so)
    +                i = self.linker_so.index('-bundle')
    +                self.linker_so[i] = '-dynamiclib'
    +            return old_link_shared_object(self, *args, **kwds)
    +        old_link_shared_object = _patch_meth(patchlist, CCompiler,
    +                                             'link_shared_object',
    +                                             my_link_shared_object)
    +
    +def _patch_for_target(patchlist, target):
    +    from distutils.command.build_ext import build_ext
    +    # if 'target' is different from '*', we need to patch some internal
    +    # method to just return this 'target' value, instead of having it
    +    # built from module_name
    +    if target.endswith('.*'):
    +        target = target[:-2]
    +        if sys.platform == 'win32':
    +            target += '.dll'
    +        elif sys.platform == 'darwin':
    +            target += '.dylib'
    +        else:
    +            target += '.so'
    +    _patch_meth(patchlist, build_ext, 'get_ext_filename',
    +                lambda self, ext_name: target)
    +
    +
    +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True,
    +              c_file=None, source_extension='.c', extradir=None,
    +              compiler_verbose=1, target=None, debug=None, **kwds):
    +    if not isinstance(module_name, str):
    +        module_name = module_name.encode('ascii')
    +    if ffi._windows_unicode:
    +        ffi._apply_windows_unicode(kwds)
    +    if preamble is not None:
    +        embedding = (ffi._embedding is not None)
    +        if embedding:
    +            ffi._apply_embedding_fix(kwds)
    +        if c_file is None:
    +            c_file, parts = _modname_to_file(tmpdir, module_name,
    +                                             source_extension)
    +            if extradir:
    +                parts = [extradir] + parts
    +            ext_c_file = os.path.join(*parts)
    +        else:
    +            ext_c_file = c_file
    +        #
    +        if target is None:
    +            if embedding:
    +                target = '%s.*' % module_name
    +            else:
    +                target = '*'
    +        #
    +        ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds)
    +        updated = make_c_source(ffi, module_name, preamble, c_file,
    +                                verbose=compiler_verbose)
    +        if call_c_compiler:
    +            patchlist = []
    +            cwd = os.getcwd()
    +            try:
    +                if embedding:
    +                    _patch_for_embedding(patchlist)
    +                if target != '*':
    +                    _patch_for_target(patchlist, target)
    +                if compiler_verbose:
    +                    if tmpdir == '.':
    +                        msg = 'the current directory is'
    +                    else:
    +                        msg = 'setting the current directory to'
    +                    print('%s %r' % (msg, os.path.abspath(tmpdir)))
    +                os.chdir(tmpdir)
    +                outputfilename = ffiplatform.compile('.', ext,
    +                                                     compiler_verbose, debug)
    +            finally:
    +                os.chdir(cwd)
    +                _unpatch_meths(patchlist)
    +            return outputfilename
    +        else:
    +            return ext, updated
    +    else:
    +        if c_file is None:
    +            c_file, _ = _modname_to_file(tmpdir, module_name, '.py')
    +        updated = make_py_source(ffi, module_name, c_file,
    +                                 verbose=compiler_verbose)
    +        if call_c_compiler:
    +            return c_file
    +        else:
    +            return None, updated
    +
    diff --git a/server/www/packages/packages-windows/x86/cffi/setuptools_ext.py b/server/www/packages/packages-windows/x86/cffi/setuptools_ext.py
    new file mode 100644
    index 0000000..b380f86
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/setuptools_ext.py
    @@ -0,0 +1,217 @@
    +import os
    +import sys
    +
    +try:
    +    basestring
    +except NameError:
    +    # Python 3.x
    +    basestring = str
    +
    +def error(msg):
    +    from distutils.errors import DistutilsSetupError
    +    raise DistutilsSetupError(msg)
    +
    +
    +def execfile(filename, glob):
    +    # We use execfile() (here rewritten for Python 3) instead of
    +    # __import__() to load the build script.  The problem with
    +    # a normal import is that in some packages, the intermediate
    +    # __init__.py files may already try to import the file that
    +    # we are generating.
    +    with open(filename) as f:
    +        src = f.read()
    +    src += '\n'      # Python 2.6 compatibility
    +    code = compile(src, filename, 'exec')
    +    exec(code, glob, glob)
    +
    +
    +def add_cffi_module(dist, mod_spec):
    +    from cffi.api import FFI
    +
    +    if not isinstance(mod_spec, basestring):
    +        error("argument to 'cffi_modules=...' must be a str or a list of str,"
    +              " not %r" % (type(mod_spec).__name__,))
    +    mod_spec = str(mod_spec)
    +    try:
    +        build_file_name, ffi_var_name = mod_spec.split(':')
    +    except ValueError:
    +        error("%r must be of the form 'path/build.py:ffi_variable'" %
    +              (mod_spec,))
    +    if not os.path.exists(build_file_name):
    +        ext = ''
    +        rewritten = build_file_name.replace('.', '/') + '.py'
    +        if os.path.exists(rewritten):
    +            ext = ' (rewrite cffi_modules to [%r])' % (
    +                rewritten + ':' + ffi_var_name,)
    +        error("%r does not name an existing file%s" % (build_file_name, ext))
    +
    +    mod_vars = {'__name__': '__cffi__', '__file__': build_file_name}
    +    execfile(build_file_name, mod_vars)
    +
    +    try:
    +        ffi = mod_vars[ffi_var_name]
    +    except KeyError:
    +        error("%r: object %r not found in module" % (mod_spec,
    +                                                     ffi_var_name))
    +    if not isinstance(ffi, FFI):
    +        ffi = ffi()      # maybe it's a function instead of directly an ffi
    +    if not isinstance(ffi, FFI):
    +        error("%r is not an FFI instance (got %r)" % (mod_spec,
    +                                                      type(ffi).__name__))
    +    if not hasattr(ffi, '_assigned_source'):
    +        error("%r: the set_source() method was not called" % (mod_spec,))
    +    module_name, source, source_extension, kwds = ffi._assigned_source
    +    if ffi._windows_unicode:
    +        kwds = kwds.copy()
    +        ffi._apply_windows_unicode(kwds)
    +
    +    if source is None:
    +        _add_py_module(dist, ffi, module_name)
    +    else:
    +        _add_c_module(dist, ffi, module_name, source, source_extension, kwds)
    +
    +def _set_py_limited_api(Extension, kwds):
    +    """
    +    Add py_limited_api to kwds if setuptools >= 26 is in use.
    +    Do not alter the setting if it already exists.
    +    Setuptools takes care of ignoring the flag on Python 2 and PyPy.
    +
    +    CPython itself should ignore the flag in a debugging version
    +    (by not listing .abi3.so in the extensions it supports), but
    +    it doesn't so far, creating troubles.  That's why we check
    +    for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent
    +    of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401)
    +
    +    On Windows, with CPython <= 3.4, it's better not to use py_limited_api
    +    because virtualenv *still* doesn't copy PYTHON3.DLL on these versions.
    +    For now we'll skip py_limited_api on all Windows versions to avoid an
    +    inconsistent mess.
    +    """
    +    if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount')
    +            and sys.platform != 'win32'):
    +        import setuptools
    +        try:
    +            setuptools_major_version = int(setuptools.__version__.partition('.')[0])
    +            if setuptools_major_version >= 26:
    +                kwds['py_limited_api'] = True
    +        except ValueError:  # certain development versions of setuptools
    +            # If we don't know the version number of setuptools, we
    +            # try to set 'py_limited_api' anyway.  At worst, we get a
    +            # warning.
    +            kwds['py_limited_api'] = True
    +    return kwds
    +
    +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
    +    from distutils.core import Extension
    +    # We are a setuptools extension. Need this build_ext for py_limited_api.
    +    from setuptools.command.build_ext import build_ext
    +    from distutils.dir_util import mkpath
    +    from distutils import log
    +    from cffi import recompiler
    +
    +    allsources = ['$PLACEHOLDER']
    +    allsources.extend(kwds.pop('sources', []))
    +    kwds = _set_py_limited_api(Extension, kwds)
    +    ext = Extension(name=module_name, sources=allsources, **kwds)
    +
    +    def make_mod(tmpdir, pre_run=None):
    +        c_file = os.path.join(tmpdir, module_name + source_extension)
    +        log.info("generating cffi module %r" % c_file)
    +        mkpath(tmpdir)
    +        # a setuptools-only, API-only hook: called with the "ext" and "ffi"
    +        # arguments just before we turn the ffi into C code.  To use it,
    +        # subclass the 'distutils.command.build_ext.build_ext' class and
    +        # add a method 'def pre_run(self, ext, ffi)'.
    +        if pre_run is not None:
    +            pre_run(ext, ffi)
    +        updated = recompiler.make_c_source(ffi, module_name, source, c_file)
    +        if not updated:
    +            log.info("already up-to-date")
    +        return c_file
    +
    +    if dist.ext_modules is None:
    +        dist.ext_modules = []
    +    dist.ext_modules.append(ext)
    +
    +    base_class = dist.cmdclass.get('build_ext', build_ext)
    +    class build_ext_make_mod(base_class):
    +        def run(self):
    +            if ext.sources[0] == '$PLACEHOLDER':
    +                pre_run = getattr(self, 'pre_run', None)
    +                ext.sources[0] = make_mod(self.build_temp, pre_run)
    +            base_class.run(self)
    +    dist.cmdclass['build_ext'] = build_ext_make_mod
    +    # NB. multiple runs here will create multiple 'build_ext_make_mod'
    +    # classes.  Even in this case the 'build_ext' command should be
    +    # run once; but just in case, the logic above does nothing if
    +    # called again.
    +
    +
    +def _add_py_module(dist, ffi, module_name):
    +    from distutils.dir_util import mkpath
    +    from setuptools.command.build_py import build_py
    +    from setuptools.command.build_ext import build_ext
    +    from distutils import log
    +    from cffi import recompiler
    +
    +    def generate_mod(py_file):
    +        log.info("generating cffi module %r" % py_file)
    +        mkpath(os.path.dirname(py_file))
    +        updated = recompiler.make_py_source(ffi, module_name, py_file)
    +        if not updated:
    +            log.info("already up-to-date")
    +
    +    base_class = dist.cmdclass.get('build_py', build_py)
    +    class build_py_make_mod(base_class):
    +        def run(self):
    +            base_class.run(self)
    +            module_path = module_name.split('.')
    +            module_path[-1] += '.py'
    +            generate_mod(os.path.join(self.build_lib, *module_path))
    +        def get_source_files(self):
    +            # This is called from 'setup.py sdist' only.  Exclude
    +            # the generate .py module in this case.
    +            saved_py_modules = self.py_modules
    +            try:
    +                if saved_py_modules:
    +                    self.py_modules = [m for m in saved_py_modules
    +                                         if m != module_name]
    +                return base_class.get_source_files(self)
    +            finally:
    +                self.py_modules = saved_py_modules
    +    dist.cmdclass['build_py'] = build_py_make_mod
    +
    +    # distutils and setuptools have no notion I could find of a
    +    # generated python module.  If we don't add module_name to
    +    # dist.py_modules, then things mostly work but there are some
    +    # combination of options (--root and --record) that will miss
    +    # the module.  So we add it here, which gives a few apparently
    +    # harmless warnings about not finding the file outside the
    +    # build directory.
    +    # Then we need to hack more in get_source_files(); see above.
    +    if dist.py_modules is None:
    +        dist.py_modules = []
    +    dist.py_modules.append(module_name)
    +
    +    # the following is only for "build_ext -i"
    +    base_class_2 = dist.cmdclass.get('build_ext', build_ext)
    +    class build_ext_make_mod(base_class_2):
    +        def run(self):
    +            base_class_2.run(self)
    +            if self.inplace:
    +                # from get_ext_fullpath() in distutils/command/build_ext.py
    +                module_path = module_name.split('.')
    +                package = '.'.join(module_path[:-1])
    +                build_py = self.get_finalized_command('build_py')
    +                package_dir = build_py.get_package_dir(package)
    +                file_name = module_path[-1] + '.py'
    +                generate_mod(os.path.join(package_dir, file_name))
    +    dist.cmdclass['build_ext'] = build_ext_make_mod
    +
    +def cffi_modules(dist, attr, value):
    +    assert attr == 'cffi_modules'
    +    if isinstance(value, basestring):
    +        value = [value]
    +
    +    for cffi_module in value:
    +        add_cffi_module(dist, cffi_module)
    diff --git a/server/www/packages/packages-windows/x86/cffi/vengine_cpy.py b/server/www/packages/packages-windows/x86/cffi/vengine_cpy.py
    new file mode 100644
    index 0000000..ca50ac4
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/vengine_cpy.py
    @@ -0,0 +1,1076 @@
    +#
    +# DEPRECATED: implementation for ffi.verify()
    +#
    +import sys, imp
    +from . import model
    +from .error import VerificationError
    +
    +
    +class VCPythonEngine(object):
    +    _class_key = 'x'
    +    _gen_python_module = True
    +
    +    def __init__(self, verifier):
    +        self.verifier = verifier
    +        self.ffi = verifier.ffi
    +        self._struct_pending_verification = {}
    +        self._types_of_builtin_functions = {}
    +
    +    def patch_extension_kwds(self, kwds):
    +        pass
    +
    +    def find_module(self, module_name, path, so_suffixes):
    +        try:
    +            f, filename, descr = imp.find_module(module_name, path)
    +        except ImportError:
    +            return None
    +        if f is not None:
    +            f.close()
    +        # Note that after a setuptools installation, there are both .py
    +        # and .so files with the same basename.  The code here relies on
    +        # imp.find_module() locating the .so in priority.
    +        if descr[0] not in so_suffixes:
    +            return None
    +        return filename
    +
    +    def collect_types(self):
    +        self._typesdict = {}
    +        self._generate("collecttype")
    +
    +    def _prnt(self, what=''):
    +        self._f.write(what + '\n')
    +
    +    def _gettypenum(self, type):
    +        # a KeyError here is a bug.  please report it! :-)
    +        return self._typesdict[type]
    +
    +    def _do_collect_type(self, tp):
    +        if ((not isinstance(tp, model.PrimitiveType)
    +             or tp.name == 'long double')
    +                and tp not in self._typesdict):
    +            num = len(self._typesdict)
    +            self._typesdict[tp] = num
    +
    +    def write_source_to_f(self):
    +        self.collect_types()
    +        #
    +        # The new module will have a _cffi_setup() function that receives
    +        # objects from the ffi world, and that calls some setup code in
    +        # the module.  This setup code is split in several independent
    +        # functions, e.g. one per constant.  The functions are "chained"
    +        # by ending in a tail call to each other.
    +        #
    +        # This is further split in two chained lists, depending on if we
    +        # can do it at import-time or if we must wait for _cffi_setup() to
    +        # provide us with the  objects.  This is needed because we
    +        # need the values of the enum constants in order to build the
    +        #  that we may have to pass to _cffi_setup().
    +        #
    +        # The following two 'chained_list_constants' items contains
    +        # the head of these two chained lists, as a string that gives the
    +        # call to do, if any.
    +        self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)']
    +        #
    +        prnt = self._prnt
    +        # first paste some standard set of lines that are mostly '#define'
    +        prnt(cffimod_header)
    +        prnt()
    +        # then paste the C source given by the user, verbatim.
    +        prnt(self.verifier.preamble)
    +        prnt()
    +        #
    +        # call generate_cpy_xxx_decl(), for every xxx found from
    +        # ffi._parser._declarations.  This generates all the functions.
    +        self._generate("decl")
    +        #
    +        # implement the function _cffi_setup_custom() as calling the
    +        # head of the chained list.
    +        self._generate_setup_custom()
    +        prnt()
    +        #
    +        # produce the method table, including the entries for the
    +        # generated Python->C function wrappers, which are done
    +        # by generate_cpy_function_method().
    +        prnt('static PyMethodDef _cffi_methods[] = {')
    +        self._generate("method")
    +        prnt('  {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},')
    +        prnt('  {NULL, NULL, 0, NULL}    /* Sentinel */')
    +        prnt('};')
    +        prnt()
    +        #
    +        # standard init.
    +        modname = self.verifier.get_module_name()
    +        constants = self._chained_list_constants[False]
    +        prnt('#if PY_MAJOR_VERSION >= 3')
    +        prnt()
    +        prnt('static struct PyModuleDef _cffi_module_def = {')
    +        prnt('  PyModuleDef_HEAD_INIT,')
    +        prnt('  "%s",' % modname)
    +        prnt('  NULL,')
    +        prnt('  -1,')
    +        prnt('  _cffi_methods,')
    +        prnt('  NULL, NULL, NULL, NULL')
    +        prnt('};')
    +        prnt()
    +        prnt('PyMODINIT_FUNC')
    +        prnt('PyInit_%s(void)' % modname)
    +        prnt('{')
    +        prnt('  PyObject *lib;')
    +        prnt('  lib = PyModule_Create(&_cffi_module_def);')
    +        prnt('  if (lib == NULL)')
    +        prnt('    return NULL;')
    +        prnt('  if (%s < 0 || _cffi_init() < 0) {' % (constants,))
    +        prnt('    Py_DECREF(lib);')
    +        prnt('    return NULL;')
    +        prnt('  }')
    +        prnt('  return lib;')
    +        prnt('}')
    +        prnt()
    +        prnt('#else')
    +        prnt()
    +        prnt('PyMODINIT_FUNC')
    +        prnt('init%s(void)' % modname)
    +        prnt('{')
    +        prnt('  PyObject *lib;')
    +        prnt('  lib = Py_InitModule("%s", _cffi_methods);' % modname)
    +        prnt('  if (lib == NULL)')
    +        prnt('    return;')
    +        prnt('  if (%s < 0 || _cffi_init() < 0)' % (constants,))
    +        prnt('    return;')
    +        prnt('  return;')
    +        prnt('}')
    +        prnt()
    +        prnt('#endif')
    +
    +    def load_library(self, flags=None):
    +        # XXX review all usages of 'self' here!
    +        # import it as a new extension module
    +        imp.acquire_lock()
    +        try:
    +            if hasattr(sys, "getdlopenflags"):
    +                previous_flags = sys.getdlopenflags()
    +            try:
    +                if hasattr(sys, "setdlopenflags") and flags is not None:
    +                    sys.setdlopenflags(flags)
    +                module = imp.load_dynamic(self.verifier.get_module_name(),
    +                                          self.verifier.modulefilename)
    +            except ImportError as e:
    +                error = "importing %r: %s" % (self.verifier.modulefilename, e)
    +                raise VerificationError(error)
    +            finally:
    +                if hasattr(sys, "setdlopenflags"):
    +                    sys.setdlopenflags(previous_flags)
    +        finally:
    +            imp.release_lock()
    +        #
    +        # call loading_cpy_struct() to get the struct layout inferred by
    +        # the C compiler
    +        self._load(module, 'loading')
    +        #
    +        # the C code will need the  objects.  Collect them in
    +        # order in a list.
    +        revmapping = dict([(value, key)
    +                           for (key, value) in self._typesdict.items()])
    +        lst = [revmapping[i] for i in range(len(revmapping))]
    +        lst = list(map(self.ffi._get_cached_btype, lst))
    +        #
    +        # build the FFILibrary class and instance and call _cffi_setup().
    +        # this will set up some fields like '_cffi_types', and only then
    +        # it will invoke the chained list of functions that will really
    +        # build (notably) the constant objects, as  if they are
    +        # pointers, and store them as attributes on the 'library' object.
    +        class FFILibrary(object):
    +            _cffi_python_module = module
    +            _cffi_ffi = self.ffi
    +            _cffi_dir = []
    +            def __dir__(self):
    +                return FFILibrary._cffi_dir + list(self.__dict__)
    +        library = FFILibrary()
    +        if module._cffi_setup(lst, VerificationError, library):
    +            import warnings
    +            warnings.warn("reimporting %r might overwrite older definitions"
    +                          % (self.verifier.get_module_name()))
    +        #
    +        # finally, call the loaded_cpy_xxx() functions.  This will perform
    +        # the final adjustments, like copying the Python->C wrapper
    +        # functions from the module to the 'library' object, and setting
    +        # up the FFILibrary class with properties for the global C variables.
    +        self._load(module, 'loaded', library=library)
    +        module._cffi_original_ffi = self.ffi
    +        module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions
    +        return library
    +
    +    def _get_declarations(self):
    +        lst = [(key, tp) for (key, (tp, qual)) in
    +                                self.ffi._parser._declarations.items()]
    +        lst.sort()
    +        return lst
    +
    +    def _generate(self, step_name):
    +        for name, tp in self._get_declarations():
    +            kind, realname = name.split(' ', 1)
    +            try:
    +                method = getattr(self, '_generate_cpy_%s_%s' % (kind,
    +                                                                step_name))
    +            except AttributeError:
    +                raise VerificationError(
    +                    "not implemented in verify(): %r" % name)
    +            try:
    +                method(tp, realname)
    +            except Exception as e:
    +                model.attach_exception_info(e, name)
    +                raise
    +
    +    def _load(self, module, step_name, **kwds):
    +        for name, tp in self._get_declarations():
    +            kind, realname = name.split(' ', 1)
    +            method = getattr(self, '_%s_cpy_%s' % (step_name, kind))
    +            try:
    +                method(tp, realname, module, **kwds)
    +            except Exception as e:
    +                model.attach_exception_info(e, name)
    +                raise
    +
    +    def _generate_nothing(self, tp, name):
    +        pass
    +
    +    def _loaded_noop(self, tp, name, module, **kwds):
    +        pass
    +
    +    # ----------
    +
    +    def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode):
    +        extraarg = ''
    +        if isinstance(tp, model.PrimitiveType):
    +            if tp.is_integer_type() and tp.name != '_Bool':
    +                converter = '_cffi_to_c_int'
    +                extraarg = ', %s' % tp.name
    +            else:
    +                converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''),
    +                                                   tp.name.replace(' ', '_'))
    +            errvalue = '-1'
    +        #
    +        elif isinstance(tp, model.PointerType):
    +            self._convert_funcarg_to_c_ptr_or_array(tp, fromvar,
    +                                                    tovar, errcode)
    +            return
    +        #
    +        elif isinstance(tp, (model.StructOrUnion, model.EnumType)):
    +            # a struct (not a struct pointer) as a function argument
    +            self._prnt('  if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)'
    +                      % (tovar, self._gettypenum(tp), fromvar))
    +            self._prnt('    %s;' % errcode)
    +            return
    +        #
    +        elif isinstance(tp, model.FunctionPtrType):
    +            converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('')
    +            extraarg = ', _cffi_type(%d)' % self._gettypenum(tp)
    +            errvalue = 'NULL'
    +        #
    +        else:
    +            raise NotImplementedError(tp)
    +        #
    +        self._prnt('  %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg))
    +        self._prnt('  if (%s == (%s)%s && PyErr_Occurred())' % (
    +            tovar, tp.get_c_name(''), errvalue))
    +        self._prnt('    %s;' % errcode)
    +
    +    def _extra_local_variables(self, tp, localvars, freelines):
    +        if isinstance(tp, model.PointerType):
    +            localvars.add('Py_ssize_t datasize')
    +            localvars.add('struct _cffi_freeme_s *large_args_free = NULL')
    +            freelines.add('if (large_args_free != NULL)'
    +                          ' _cffi_free_array_arguments(large_args_free);')
    +
    +    def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode):
    +        self._prnt('  datasize = _cffi_prepare_pointer_call_argument(')
    +        self._prnt('      _cffi_type(%d), %s, (char **)&%s);' % (
    +            self._gettypenum(tp), fromvar, tovar))
    +        self._prnt('  if (datasize != 0) {')
    +        self._prnt('    %s = ((size_t)datasize) <= 640 ? '
    +                   'alloca((size_t)datasize) : NULL;' % (tovar,))
    +        self._prnt('    if (_cffi_convert_array_argument(_cffi_type(%d), %s, '
    +                   '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar))
    +        self._prnt('            datasize, &large_args_free) < 0)')
    +        self._prnt('      %s;' % errcode)
    +        self._prnt('  }')
    +
    +    def _convert_expr_from_c(self, tp, var, context):
    +        if isinstance(tp, model.PrimitiveType):
    +            if tp.is_integer_type() and tp.name != '_Bool':
    +                return '_cffi_from_c_int(%s, %s)' % (var, tp.name)
    +            elif tp.name != 'long double':
    +                return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var)
    +            else:
    +                return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
    +                    var, self._gettypenum(tp))
    +        elif isinstance(tp, (model.PointerType, model.FunctionPtrType)):
    +            return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
    +                var, self._gettypenum(tp))
    +        elif isinstance(tp, model.ArrayType):
    +            return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
    +                var, self._gettypenum(model.PointerType(tp.item)))
    +        elif isinstance(tp, model.StructOrUnion):
    +            if tp.fldnames is None:
    +                raise TypeError("'%s' is used as %s, but is opaque" % (
    +                    tp._get_c_name(), context))
    +            return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % (
    +                var, self._gettypenum(tp))
    +        elif isinstance(tp, model.EnumType):
    +            return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
    +                var, self._gettypenum(tp))
    +        else:
    +            raise NotImplementedError(tp)
    +
    +    # ----------
    +    # typedefs: generates no code so far
    +
    +    _generate_cpy_typedef_collecttype = _generate_nothing
    +    _generate_cpy_typedef_decl   = _generate_nothing
    +    _generate_cpy_typedef_method = _generate_nothing
    +    _loading_cpy_typedef         = _loaded_noop
    +    _loaded_cpy_typedef          = _loaded_noop
    +
    +    # ----------
    +    # function declarations
    +
    +    def _generate_cpy_function_collecttype(self, tp, name):
    +        assert isinstance(tp, model.FunctionPtrType)
    +        if tp.ellipsis:
    +            self._do_collect_type(tp)
    +        else:
    +            # don't call _do_collect_type(tp) in this common case,
    +            # otherwise test_autofilled_struct_as_argument fails
    +            for type in tp.args:
    +                self._do_collect_type(type)
    +            self._do_collect_type(tp.result)
    +
    +    def _generate_cpy_function_decl(self, tp, name):
    +        assert isinstance(tp, model.FunctionPtrType)
    +        if tp.ellipsis:
    +            # cannot support vararg functions better than this: check for its
    +            # exact type (including the fixed arguments), and build it as a
    +            # constant function pointer (no CPython wrapper)
    +            self._generate_cpy_const(False, name, tp)
    +            return
    +        prnt = self._prnt
    +        numargs = len(tp.args)
    +        if numargs == 0:
    +            argname = 'noarg'
    +        elif numargs == 1:
    +            argname = 'arg0'
    +        else:
    +            argname = 'args'
    +        prnt('static PyObject *')
    +        prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname))
    +        prnt('{')
    +        #
    +        context = 'argument of %s' % name
    +        for i, type in enumerate(tp.args):
    +            prnt('  %s;' % type.get_c_name(' x%d' % i, context))
    +        #
    +        localvars = set()
    +        freelines = set()
    +        for type in tp.args:
    +            self._extra_local_variables(type, localvars, freelines)
    +        for decl in sorted(localvars):
    +            prnt('  %s;' % (decl,))
    +        #
    +        if not isinstance(tp.result, model.VoidType):
    +            result_code = 'result = '
    +            context = 'result of %s' % name
    +            prnt('  %s;' % tp.result.get_c_name(' result', context))
    +            prnt('  PyObject *pyresult;')
    +        else:
    +            result_code = ''
    +        #
    +        if len(tp.args) > 1:
    +            rng = range(len(tp.args))
    +            for i in rng:
    +                prnt('  PyObject *arg%d;' % i)
    +            prnt()
    +            prnt('  if (!PyArg_ParseTuple(args, "%s:%s", %s))' % (
    +                'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng])))
    +            prnt('    return NULL;')
    +        prnt()
    +        #
    +        for i, type in enumerate(tp.args):
    +            self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i,
    +                                       'return NULL')
    +            prnt()
    +        #
    +        prnt('  Py_BEGIN_ALLOW_THREADS')
    +        prnt('  _cffi_restore_errno();')
    +        prnt('  { %s%s(%s); }' % (
    +            result_code, name,
    +            ', '.join(['x%d' % i for i in range(len(tp.args))])))
    +        prnt('  _cffi_save_errno();')
    +        prnt('  Py_END_ALLOW_THREADS')
    +        prnt()
    +        #
    +        prnt('  (void)self; /* unused */')
    +        if numargs == 0:
    +            prnt('  (void)noarg; /* unused */')
    +        if result_code:
    +            prnt('  pyresult = %s;' %
    +                 self._convert_expr_from_c(tp.result, 'result', 'result type'))
    +            for freeline in freelines:
    +                prnt('  ' + freeline)
    +            prnt('  return pyresult;')
    +        else:
    +            for freeline in freelines:
    +                prnt('  ' + freeline)
    +            prnt('  Py_INCREF(Py_None);')
    +            prnt('  return Py_None;')
    +        prnt('}')
    +        prnt()
    +
    +    def _generate_cpy_function_method(self, tp, name):
    +        if tp.ellipsis:
    +            return
    +        numargs = len(tp.args)
    +        if numargs == 0:
    +            meth = 'METH_NOARGS'
    +        elif numargs == 1:
    +            meth = 'METH_O'
    +        else:
    +            meth = 'METH_VARARGS'
    +        self._prnt('  {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth))
    +
    +    _loading_cpy_function = _loaded_noop
    +
    +    def _loaded_cpy_function(self, tp, name, module, library):
    +        if tp.ellipsis:
    +            return
    +        func = getattr(module, name)
    +        setattr(library, name, func)
    +        self._types_of_builtin_functions[func] = tp
    +
    +    # ----------
    +    # named structs
    +
    +    _generate_cpy_struct_collecttype = _generate_nothing
    +    def _generate_cpy_struct_decl(self, tp, name):
    +        assert name == tp.name
    +        self._generate_struct_or_union_decl(tp, 'struct', name)
    +    def _generate_cpy_struct_method(self, tp, name):
    +        self._generate_struct_or_union_method(tp, 'struct', name)
    +    def _loading_cpy_struct(self, tp, name, module):
    +        self._loading_struct_or_union(tp, 'struct', name, module)
    +    def _loaded_cpy_struct(self, tp, name, module, **kwds):
    +        self._loaded_struct_or_union(tp)
    +
    +    _generate_cpy_union_collecttype = _generate_nothing
    +    def _generate_cpy_union_decl(self, tp, name):
    +        assert name == tp.name
    +        self._generate_struct_or_union_decl(tp, 'union', name)
    +    def _generate_cpy_union_method(self, tp, name):
    +        self._generate_struct_or_union_method(tp, 'union', name)
    +    def _loading_cpy_union(self, tp, name, module):
    +        self._loading_struct_or_union(tp, 'union', name, module)
    +    def _loaded_cpy_union(self, tp, name, module, **kwds):
    +        self._loaded_struct_or_union(tp)
    +
    +    def _generate_struct_or_union_decl(self, tp, prefix, name):
    +        if tp.fldnames is None:
    +            return     # nothing to do with opaque structs
    +        checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
    +        layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
    +        cname = ('%s %s' % (prefix, name)).strip()
    +        #
    +        prnt = self._prnt
    +        prnt('static void %s(%s *p)' % (checkfuncname, cname))
    +        prnt('{')
    +        prnt('  /* only to generate compile-time warnings or errors */')
    +        prnt('  (void)p;')
    +        for fname, ftype, fbitsize, fqual in tp.enumfields():
    +            if (isinstance(ftype, model.PrimitiveType)
    +                and ftype.is_integer_type()) or fbitsize >= 0:
    +                # accept all integers, but complain on float or double
    +                prnt('  (void)((p->%s) << 1);' % fname)
    +            else:
    +                # only accept exactly the type declared.
    +                try:
    +                    prnt('  { %s = &p->%s; (void)tmp; }' % (
    +                        ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
    +                        fname))
    +                except VerificationError as e:
    +                    prnt('  /* %s */' % str(e))   # cannot verify it, ignore
    +        prnt('}')
    +        prnt('static PyObject *')
    +        prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,))
    +        prnt('{')
    +        prnt('  struct _cffi_aligncheck { char x; %s y; };' % cname)
    +        prnt('  static Py_ssize_t nums[] = {')
    +        prnt('    sizeof(%s),' % cname)
    +        prnt('    offsetof(struct _cffi_aligncheck, y),')
    +        for fname, ftype, fbitsize, fqual in tp.enumfields():
    +            if fbitsize >= 0:
    +                continue      # xxx ignore fbitsize for now
    +            prnt('    offsetof(%s, %s),' % (cname, fname))
    +            if isinstance(ftype, model.ArrayType) and ftype.length is None:
    +                prnt('    0,  /* %s */' % ftype._get_c_name())
    +            else:
    +                prnt('    sizeof(((%s *)0)->%s),' % (cname, fname))
    +        prnt('    -1')
    +        prnt('  };')
    +        prnt('  (void)self; /* unused */')
    +        prnt('  (void)noarg; /* unused */')
    +        prnt('  return _cffi_get_struct_layout(nums);')
    +        prnt('  /* the next line is not executed, but compiled */')
    +        prnt('  %s(0);' % (checkfuncname,))
    +        prnt('}')
    +        prnt()
    +
    +    def _generate_struct_or_union_method(self, tp, prefix, name):
    +        if tp.fldnames is None:
    +            return     # nothing to do with opaque structs
    +        layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
    +        self._prnt('  {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname,
    +                                                         layoutfuncname))
    +
    +    def _loading_struct_or_union(self, tp, prefix, name, module):
    +        if tp.fldnames is None:
    +            return     # nothing to do with opaque structs
    +        layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
    +        #
    +        function = getattr(module, layoutfuncname)
    +        layout = function()
    +        if isinstance(tp, model.StructOrUnion) and tp.partial:
    +            # use the function()'s sizes and offsets to guide the
    +            # layout of the struct
    +            totalsize = layout[0]
    +            totalalignment = layout[1]
    +            fieldofs = layout[2::2]
    +            fieldsize = layout[3::2]
    +            tp.force_flatten()
    +            assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
    +            tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
    +        else:
    +            cname = ('%s %s' % (prefix, name)).strip()
    +            self._struct_pending_verification[tp] = layout, cname
    +
    +    def _loaded_struct_or_union(self, tp):
    +        if tp.fldnames is None:
    +            return     # nothing to do with opaque structs
    +        self.ffi._get_cached_btype(tp)   # force 'fixedlayout' to be considered
    +
    +        if tp in self._struct_pending_verification:
    +            # check that the layout sizes and offsets match the real ones
    +            def check(realvalue, expectedvalue, msg):
    +                if realvalue != expectedvalue:
    +                    raise VerificationError(
    +                        "%s (we have %d, but C compiler says %d)"
    +                        % (msg, expectedvalue, realvalue))
    +            ffi = self.ffi
    +            BStruct = ffi._get_cached_btype(tp)
    +            layout, cname = self._struct_pending_verification.pop(tp)
    +            check(layout[0], ffi.sizeof(BStruct), "wrong total size")
    +            check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
    +            i = 2
    +            for fname, ftype, fbitsize, fqual in tp.enumfields():
    +                if fbitsize >= 0:
    +                    continue        # xxx ignore fbitsize for now
    +                check(layout[i], ffi.offsetof(BStruct, fname),
    +                      "wrong offset for field %r" % (fname,))
    +                if layout[i+1] != 0:
    +                    BField = ffi._get_cached_btype(ftype)
    +                    check(layout[i+1], ffi.sizeof(BField),
    +                          "wrong size for field %r" % (fname,))
    +                i += 2
    +            assert i == len(layout)
    +
    +    # ----------
    +    # 'anonymous' declarations.  These are produced for anonymous structs
    +    # or unions; the 'name' is obtained by a typedef.
    +
    +    _generate_cpy_anonymous_collecttype = _generate_nothing
    +
    +    def _generate_cpy_anonymous_decl(self, tp, name):
    +        if isinstance(tp, model.EnumType):
    +            self._generate_cpy_enum_decl(tp, name, '')
    +        else:
    +            self._generate_struct_or_union_decl(tp, '', name)
    +
    +    def _generate_cpy_anonymous_method(self, tp, name):
    +        if not isinstance(tp, model.EnumType):
    +            self._generate_struct_or_union_method(tp, '', name)
    +
    +    def _loading_cpy_anonymous(self, tp, name, module):
    +        if isinstance(tp, model.EnumType):
    +            self._loading_cpy_enum(tp, name, module)
    +        else:
    +            self._loading_struct_or_union(tp, '', name, module)
    +
    +    def _loaded_cpy_anonymous(self, tp, name, module, **kwds):
    +        if isinstance(tp, model.EnumType):
    +            self._loaded_cpy_enum(tp, name, module, **kwds)
    +        else:
    +            self._loaded_struct_or_union(tp)
    +
    +    # ----------
    +    # constants, likely declared with '#define'
    +
    +    def _generate_cpy_const(self, is_int, name, tp=None, category='const',
    +                            vartp=None, delayed=True, size_too=False,
    +                            check_value=None):
    +        prnt = self._prnt
    +        funcname = '_cffi_%s_%s' % (category, name)
    +        prnt('static int %s(PyObject *lib)' % funcname)
    +        prnt('{')
    +        prnt('  PyObject *o;')
    +        prnt('  int res;')
    +        if not is_int:
    +            prnt('  %s;' % (vartp or tp).get_c_name(' i', name))
    +        else:
    +            assert category == 'const'
    +        #
    +        if check_value is not None:
    +            self._check_int_constant_value(name, check_value)
    +        #
    +        if not is_int:
    +            if category == 'var':
    +                realexpr = '&' + name
    +            else:
    +                realexpr = name
    +            prnt('  i = (%s);' % (realexpr,))
    +            prnt('  o = %s;' % (self._convert_expr_from_c(tp, 'i',
    +                                                          'variable type'),))
    +            assert delayed
    +        else:
    +            prnt('  o = _cffi_from_c_int_const(%s);' % name)
    +        prnt('  if (o == NULL)')
    +        prnt('    return -1;')
    +        if size_too:
    +            prnt('  {')
    +            prnt('    PyObject *o1 = o;')
    +            prnt('    o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));'
    +                 % (name,))
    +            prnt('    Py_DECREF(o1);')
    +            prnt('    if (o == NULL)')
    +            prnt('      return -1;')
    +            prnt('  }')
    +        prnt('  res = PyObject_SetAttrString(lib, "%s", o);' % name)
    +        prnt('  Py_DECREF(o);')
    +        prnt('  if (res < 0)')
    +        prnt('    return -1;')
    +        prnt('  return %s;' % self._chained_list_constants[delayed])
    +        self._chained_list_constants[delayed] = funcname + '(lib)'
    +        prnt('}')
    +        prnt()
    +
    +    def _generate_cpy_constant_collecttype(self, tp, name):
    +        is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
    +        if not is_int:
    +            self._do_collect_type(tp)
    +
    +    def _generate_cpy_constant_decl(self, tp, name):
    +        is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
    +        self._generate_cpy_const(is_int, name, tp)
    +
    +    _generate_cpy_constant_method = _generate_nothing
    +    _loading_cpy_constant = _loaded_noop
    +    _loaded_cpy_constant  = _loaded_noop
    +
    +    # ----------
    +    # enums
    +
    +    def _check_int_constant_value(self, name, value, err_prefix=''):
    +        prnt = self._prnt
    +        if value <= 0:
    +            prnt('  if ((%s) > 0 || (long)(%s) != %dL) {' % (
    +                name, name, value))
    +        else:
    +            prnt('  if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
    +                name, name, value))
    +        prnt('    char buf[64];')
    +        prnt('    if ((%s) <= 0)' % name)
    +        prnt('        snprintf(buf, 63, "%%ld", (long)(%s));' % name)
    +        prnt('    else')
    +        prnt('        snprintf(buf, 63, "%%lu", (unsigned long)(%s));' %
    +             name)
    +        prnt('    PyErr_Format(_cffi_VerificationError,')
    +        prnt('                 "%s%s has the real value %s, not %s",')
    +        prnt('                 "%s", "%s", buf, "%d");' % (
    +            err_prefix, name, value))
    +        prnt('    return -1;')
    +        prnt('  }')
    +
    +    def _enum_funcname(self, prefix, name):
    +        # "$enum_$1" => "___D_enum____D_1"
    +        name = name.replace('$', '___D_')
    +        return '_cffi_e_%s_%s' % (prefix, name)
    +
    +    def _generate_cpy_enum_decl(self, tp, name, prefix='enum'):
    +        if tp.partial:
    +            for enumerator in tp.enumerators:
    +                self._generate_cpy_const(True, enumerator, delayed=False)
    +            return
    +        #
    +        funcname = self._enum_funcname(prefix, name)
    +        prnt = self._prnt
    +        prnt('static int %s(PyObject *lib)' % funcname)
    +        prnt('{')
    +        for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
    +            self._check_int_constant_value(enumerator, enumvalue,
    +                                           "enum %s: " % name)
    +        prnt('  return %s;' % self._chained_list_constants[True])
    +        self._chained_list_constants[True] = funcname + '(lib)'
    +        prnt('}')
    +        prnt()
    +
    +    _generate_cpy_enum_collecttype = _generate_nothing
    +    _generate_cpy_enum_method = _generate_nothing
    +
    +    def _loading_cpy_enum(self, tp, name, module):
    +        if tp.partial:
    +            enumvalues = [getattr(module, enumerator)
    +                          for enumerator in tp.enumerators]
    +            tp.enumvalues = tuple(enumvalues)
    +            tp.partial_resolved = True
    +
    +    def _loaded_cpy_enum(self, tp, name, module, library):
    +        for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
    +            setattr(library, enumerator, enumvalue)
    +
    +    # ----------
    +    # macros: for now only for integers
    +
    +    def _generate_cpy_macro_decl(self, tp, name):
    +        if tp == '...':
    +            check_value = None
    +        else:
    +            check_value = tp     # an integer
    +        self._generate_cpy_const(True, name, check_value=check_value)
    +
    +    _generate_cpy_macro_collecttype = _generate_nothing
    +    _generate_cpy_macro_method = _generate_nothing
    +    _loading_cpy_macro = _loaded_noop
    +    _loaded_cpy_macro  = _loaded_noop
    +
    +    # ----------
    +    # global variables
    +
    +    def _generate_cpy_variable_collecttype(self, tp, name):
    +        if isinstance(tp, model.ArrayType):
    +            tp_ptr = model.PointerType(tp.item)
    +        else:
    +            tp_ptr = model.PointerType(tp)
    +        self._do_collect_type(tp_ptr)
    +
    +    def _generate_cpy_variable_decl(self, tp, name):
    +        if isinstance(tp, model.ArrayType):
    +            tp_ptr = model.PointerType(tp.item)
    +            self._generate_cpy_const(False, name, tp, vartp=tp_ptr,
    +                                     size_too = (tp.length == '...'))
    +        else:
    +            tp_ptr = model.PointerType(tp)
    +            self._generate_cpy_const(False, name, tp_ptr, category='var')
    +
    +    _generate_cpy_variable_method = _generate_nothing
    +    _loading_cpy_variable = _loaded_noop
    +
    +    def _loaded_cpy_variable(self, tp, name, module, library):
    +        value = getattr(library, name)
    +        if isinstance(tp, model.ArrayType):   # int a[5] is "constant" in the
    +                                              # sense that "a=..." is forbidden
    +            if tp.length == '...':
    +                assert isinstance(value, tuple)
    +                (value, size) = value
    +                BItemType = self.ffi._get_cached_btype(tp.item)
    +                length, rest = divmod(size, self.ffi.sizeof(BItemType))
    +                if rest != 0:
    +                    raise VerificationError(
    +                        "bad size: %r does not seem to be an array of %s" %
    +                        (name, tp.item))
    +                tp = tp.resolve_length(length)
    +            # 'value' is a  which we have to replace with
    +            # a  if the N is actually known
    +            if tp.length is not None:
    +                BArray = self.ffi._get_cached_btype(tp)
    +                value = self.ffi.cast(BArray, value)
    +                setattr(library, name, value)
    +            return
    +        # remove ptr= from the library instance, and replace
    +        # it by a property on the class, which reads/writes into ptr[0].
    +        ptr = value
    +        delattr(library, name)
    +        def getter(library):
    +            return ptr[0]
    +        def setter(library, value):
    +            ptr[0] = value
    +        setattr(type(library), name, property(getter, setter))
    +        type(library)._cffi_dir.append(name)
    +
    +    # ----------
    +
    +    def _generate_setup_custom(self):
    +        prnt = self._prnt
    +        prnt('static int _cffi_setup_custom(PyObject *lib)')
    +        prnt('{')
    +        prnt('  return %s;' % self._chained_list_constants[True])
    +        prnt('}')
    +
    +cffimod_header = r'''
    +#include 
    +#include 
    +
    +/* this block of #ifs should be kept exactly identical between
    +   c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
    +   and cffi/_cffi_include.h */
    +#if defined(_MSC_VER)
    +# include    /* for alloca() */
    +# if _MSC_VER < 1600   /* MSVC < 2010 */
    +   typedef __int8 int8_t;
    +   typedef __int16 int16_t;
    +   typedef __int32 int32_t;
    +   typedef __int64 int64_t;
    +   typedef unsigned __int8 uint8_t;
    +   typedef unsigned __int16 uint16_t;
    +   typedef unsigned __int32 uint32_t;
    +   typedef unsigned __int64 uint64_t;
    +   typedef __int8 int_least8_t;
    +   typedef __int16 int_least16_t;
    +   typedef __int32 int_least32_t;
    +   typedef __int64 int_least64_t;
    +   typedef unsigned __int8 uint_least8_t;
    +   typedef unsigned __int16 uint_least16_t;
    +   typedef unsigned __int32 uint_least32_t;
    +   typedef unsigned __int64 uint_least64_t;
    +   typedef __int8 int_fast8_t;
    +   typedef __int16 int_fast16_t;
    +   typedef __int32 int_fast32_t;
    +   typedef __int64 int_fast64_t;
    +   typedef unsigned __int8 uint_fast8_t;
    +   typedef unsigned __int16 uint_fast16_t;
    +   typedef unsigned __int32 uint_fast32_t;
    +   typedef unsigned __int64 uint_fast64_t;
    +   typedef __int64 intmax_t;
    +   typedef unsigned __int64 uintmax_t;
    +# else
    +#  include 
    +# endif
    +# if _MSC_VER < 1800   /* MSVC < 2013 */
    +#  ifndef __cplusplus
    +    typedef unsigned char _Bool;
    +#  endif
    +# endif
    +#else
    +# include 
    +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
    +#  include 
    +# endif
    +#endif
    +
    +#if PY_MAJOR_VERSION < 3
    +# undef PyCapsule_CheckExact
    +# undef PyCapsule_GetPointer
    +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule))
    +# define PyCapsule_GetPointer(capsule, name) \
    +    (PyCObject_AsVoidPtr(capsule))
    +#endif
    +
    +#if PY_MAJOR_VERSION >= 3
    +# define PyInt_FromLong PyLong_FromLong
    +#endif
    +
    +#define _cffi_from_c_double PyFloat_FromDouble
    +#define _cffi_from_c_float PyFloat_FromDouble
    +#define _cffi_from_c_long PyInt_FromLong
    +#define _cffi_from_c_ulong PyLong_FromUnsignedLong
    +#define _cffi_from_c_longlong PyLong_FromLongLong
    +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
    +#define _cffi_from_c__Bool PyBool_FromLong
    +
    +#define _cffi_to_c_double PyFloat_AsDouble
    +#define _cffi_to_c_float PyFloat_AsDouble
    +
    +#define _cffi_from_c_int_const(x)                                        \
    +    (((x) > 0) ?                                                         \
    +        ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ?      \
    +            PyInt_FromLong((long)(x)) :                                  \
    +            PyLong_FromUnsignedLongLong((unsigned long long)(x)) :       \
    +        ((long long)(x) >= (long long)LONG_MIN) ?                        \
    +            PyInt_FromLong((long)(x)) :                                  \
    +            PyLong_FromLongLong((long long)(x)))
    +
    +#define _cffi_from_c_int(x, type)                                        \
    +    (((type)-1) > 0 ? /* unsigned */                                     \
    +        (sizeof(type) < sizeof(long) ?                                   \
    +            PyInt_FromLong((long)x) :                                    \
    +         sizeof(type) == sizeof(long) ?                                  \
    +            PyLong_FromUnsignedLong((unsigned long)x) :                  \
    +            PyLong_FromUnsignedLongLong((unsigned long long)x)) :        \
    +        (sizeof(type) <= sizeof(long) ?                                  \
    +            PyInt_FromLong((long)x) :                                    \
    +            PyLong_FromLongLong((long long)x)))
    +
    +#define _cffi_to_c_int(o, type)                                          \
    +    ((type)(                                                             \
    +     sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o)        \
    +                                         : (type)_cffi_to_c_i8(o)) :     \
    +     sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o)       \
    +                                         : (type)_cffi_to_c_i16(o)) :    \
    +     sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o)       \
    +                                         : (type)_cffi_to_c_i32(o)) :    \
    +     sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o)       \
    +                                         : (type)_cffi_to_c_i64(o)) :    \
    +     (Py_FatalError("unsupported size for type " #type), (type)0)))
    +
    +#define _cffi_to_c_i8                                                    \
    +                 ((int(*)(PyObject *))_cffi_exports[1])
    +#define _cffi_to_c_u8                                                    \
    +                 ((int(*)(PyObject *))_cffi_exports[2])
    +#define _cffi_to_c_i16                                                   \
    +                 ((int(*)(PyObject *))_cffi_exports[3])
    +#define _cffi_to_c_u16                                                   \
    +                 ((int(*)(PyObject *))_cffi_exports[4])
    +#define _cffi_to_c_i32                                                   \
    +                 ((int(*)(PyObject *))_cffi_exports[5])
    +#define _cffi_to_c_u32                                                   \
    +                 ((unsigned int(*)(PyObject *))_cffi_exports[6])
    +#define _cffi_to_c_i64                                                   \
    +                 ((long long(*)(PyObject *))_cffi_exports[7])
    +#define _cffi_to_c_u64                                                   \
    +                 ((unsigned long long(*)(PyObject *))_cffi_exports[8])
    +#define _cffi_to_c_char                                                  \
    +                 ((int(*)(PyObject *))_cffi_exports[9])
    +#define _cffi_from_c_pointer                                             \
    +    ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10])
    +#define _cffi_to_c_pointer                                               \
    +    ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11])
    +#define _cffi_get_struct_layout                                          \
    +    ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12])
    +#define _cffi_restore_errno                                              \
    +    ((void(*)(void))_cffi_exports[13])
    +#define _cffi_save_errno                                                 \
    +    ((void(*)(void))_cffi_exports[14])
    +#define _cffi_from_c_char                                                \
    +    ((PyObject *(*)(char))_cffi_exports[15])
    +#define _cffi_from_c_deref                                               \
    +    ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16])
    +#define _cffi_to_c                                                       \
    +    ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17])
    +#define _cffi_from_c_struct                                              \
    +    ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18])
    +#define _cffi_to_c_wchar_t                                               \
    +    ((wchar_t(*)(PyObject *))_cffi_exports[19])
    +#define _cffi_from_c_wchar_t                                             \
    +    ((PyObject *(*)(wchar_t))_cffi_exports[20])
    +#define _cffi_to_c_long_double                                           \
    +    ((long double(*)(PyObject *))_cffi_exports[21])
    +#define _cffi_to_c__Bool                                                 \
    +    ((_Bool(*)(PyObject *))_cffi_exports[22])
    +#define _cffi_prepare_pointer_call_argument                              \
    +    ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23])
    +#define _cffi_convert_array_from_object                                  \
    +    ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24])
    +#define _CFFI_NUM_EXPORTS 25
    +
    +typedef struct _ctypedescr CTypeDescrObject;
    +
    +static void *_cffi_exports[_CFFI_NUM_EXPORTS];
    +static PyObject *_cffi_types, *_cffi_VerificationError;
    +
    +static int _cffi_setup_custom(PyObject *lib);   /* forward */
    +
    +static PyObject *_cffi_setup(PyObject *self, PyObject *args)
    +{
    +    PyObject *library;
    +    int was_alive = (_cffi_types != NULL);
    +    (void)self; /* unused */
    +    if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError,
    +                                       &library))
    +        return NULL;
    +    Py_INCREF(_cffi_types);
    +    Py_INCREF(_cffi_VerificationError);
    +    if (_cffi_setup_custom(library) < 0)
    +        return NULL;
    +    return PyBool_FromLong(was_alive);
    +}
    +
    +union _cffi_union_alignment_u {
    +    unsigned char m_char;
    +    unsigned short m_short;
    +    unsigned int m_int;
    +    unsigned long m_long;
    +    unsigned long long m_longlong;
    +    float m_float;
    +    double m_double;
    +    long double m_longdouble;
    +};
    +
    +struct _cffi_freeme_s {
    +    struct _cffi_freeme_s *next;
    +    union _cffi_union_alignment_u alignment;
    +};
    +
    +#ifdef __GNUC__
    +  __attribute__((unused))
    +#endif
    +static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg,
    +                                        char **output_data, Py_ssize_t datasize,
    +                                        struct _cffi_freeme_s **freeme)
    +{
    +    char *p;
    +    if (datasize < 0)
    +        return -1;
    +
    +    p = *output_data;
    +    if (p == NULL) {
    +        struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc(
    +            offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize);
    +        if (fp == NULL)
    +            return -1;
    +        fp->next = *freeme;
    +        *freeme = fp;
    +        p = *output_data = (char *)&fp->alignment;
    +    }
    +    memset((void *)p, 0, (size_t)datasize);
    +    return _cffi_convert_array_from_object(p, ctptr, arg);
    +}
    +
    +#ifdef __GNUC__
    +  __attribute__((unused))
    +#endif
    +static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme)
    +{
    +    do {
    +        void *p = (void *)freeme;
    +        freeme = freeme->next;
    +        PyObject_Free(p);
    +    } while (freeme != NULL);
    +}
    +
    +static int _cffi_init(void)
    +{
    +    PyObject *module, *c_api_object = NULL;
    +
    +    module = PyImport_ImportModule("_cffi_backend");
    +    if (module == NULL)
    +        goto failure;
    +
    +    c_api_object = PyObject_GetAttrString(module, "_C_API");
    +    if (c_api_object == NULL)
    +        goto failure;
    +    if (!PyCapsule_CheckExact(c_api_object)) {
    +        PyErr_SetNone(PyExc_ImportError);
    +        goto failure;
    +    }
    +    memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"),
    +           _CFFI_NUM_EXPORTS * sizeof(void *));
    +
    +    Py_DECREF(module);
    +    Py_DECREF(c_api_object);
    +    return 0;
    +
    +  failure:
    +    Py_XDECREF(module);
    +    Py_XDECREF(c_api_object);
    +    return -1;
    +}
    +
    +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num))
    +
    +/**********/
    +'''
    diff --git a/server/www/packages/packages-windows/x86/cffi/vengine_gen.py b/server/www/packages/packages-windows/x86/cffi/vengine_gen.py
    new file mode 100644
    index 0000000..e84c3d5
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/vengine_gen.py
    @@ -0,0 +1,675 @@
    +#
    +# DEPRECATED: implementation for ffi.verify()
    +#
    +import sys, os
    +import types
    +
    +from . import model
    +from .error import VerificationError
    +
    +
    +class VGenericEngine(object):
    +    _class_key = 'g'
    +    _gen_python_module = False
    +
    +    def __init__(self, verifier):
    +        self.verifier = verifier
    +        self.ffi = verifier.ffi
    +        self.export_symbols = []
    +        self._struct_pending_verification = {}
    +
    +    def patch_extension_kwds(self, kwds):
    +        # add 'export_symbols' to the dictionary.  Note that we add the
    +        # list before filling it.  When we fill it, it will thus also show
    +        # up in kwds['export_symbols'].
    +        kwds.setdefault('export_symbols', self.export_symbols)
    +
    +    def find_module(self, module_name, path, so_suffixes):
    +        for so_suffix in so_suffixes:
    +            basename = module_name + so_suffix
    +            if path is None:
    +                path = sys.path
    +            for dirname in path:
    +                filename = os.path.join(dirname, basename)
    +                if os.path.isfile(filename):
    +                    return filename
    +
    +    def collect_types(self):
    +        pass      # not needed in the generic engine
    +
    +    def _prnt(self, what=''):
    +        self._f.write(what + '\n')
    +
    +    def write_source_to_f(self):
    +        prnt = self._prnt
    +        # first paste some standard set of lines that are mostly '#include'
    +        prnt(cffimod_header)
    +        # then paste the C source given by the user, verbatim.
    +        prnt(self.verifier.preamble)
    +        #
    +        # call generate_gen_xxx_decl(), for every xxx found from
    +        # ffi._parser._declarations.  This generates all the functions.
    +        self._generate('decl')
    +        #
    +        # on Windows, distutils insists on putting init_cffi_xyz in
    +        # 'export_symbols', so instead of fighting it, just give up and
    +        # give it one
    +        if sys.platform == 'win32':
    +            if sys.version_info >= (3,):
    +                prefix = 'PyInit_'
    +            else:
    +                prefix = 'init'
    +            modname = self.verifier.get_module_name()
    +            prnt("void %s%s(void) { }\n" % (prefix, modname))
    +
    +    def load_library(self, flags=0):
    +        # import it with the CFFI backend
    +        backend = self.ffi._backend
    +        # needs to make a path that contains '/', on Posix
    +        filename = os.path.join(os.curdir, self.verifier.modulefilename)
    +        module = backend.load_library(filename, flags)
    +        #
    +        # call loading_gen_struct() to get the struct layout inferred by
    +        # the C compiler
    +        self._load(module, 'loading')
    +
    +        # build the FFILibrary class and instance, this is a module subclass
    +        # because modules are expected to have usually-constant-attributes and
    +        # in PyPy this means the JIT is able to treat attributes as constant,
    +        # which we want.
    +        class FFILibrary(types.ModuleType):
    +            _cffi_generic_module = module
    +            _cffi_ffi = self.ffi
    +            _cffi_dir = []
    +            def __dir__(self):
    +                return FFILibrary._cffi_dir
    +        library = FFILibrary("")
    +        #
    +        # finally, call the loaded_gen_xxx() functions.  This will set
    +        # up the 'library' object.
    +        self._load(module, 'loaded', library=library)
    +        return library
    +
    +    def _get_declarations(self):
    +        lst = [(key, tp) for (key, (tp, qual)) in
    +                                self.ffi._parser._declarations.items()]
    +        lst.sort()
    +        return lst
    +
    +    def _generate(self, step_name):
    +        for name, tp in self._get_declarations():
    +            kind, realname = name.split(' ', 1)
    +            try:
    +                method = getattr(self, '_generate_gen_%s_%s' % (kind,
    +                                                                step_name))
    +            except AttributeError:
    +                raise VerificationError(
    +                    "not implemented in verify(): %r" % name)
    +            try:
    +                method(tp, realname)
    +            except Exception as e:
    +                model.attach_exception_info(e, name)
    +                raise
    +
    +    def _load(self, module, step_name, **kwds):
    +        for name, tp in self._get_declarations():
    +            kind, realname = name.split(' ', 1)
    +            method = getattr(self, '_%s_gen_%s' % (step_name, kind))
    +            try:
    +                method(tp, realname, module, **kwds)
    +            except Exception as e:
    +                model.attach_exception_info(e, name)
    +                raise
    +
    +    def _generate_nothing(self, tp, name):
    +        pass
    +
    +    def _loaded_noop(self, tp, name, module, **kwds):
    +        pass
    +
    +    # ----------
    +    # typedefs: generates no code so far
    +
    +    _generate_gen_typedef_decl   = _generate_nothing
    +    _loading_gen_typedef         = _loaded_noop
    +    _loaded_gen_typedef          = _loaded_noop
    +
    +    # ----------
    +    # function declarations
    +
    +    def _generate_gen_function_decl(self, tp, name):
    +        assert isinstance(tp, model.FunctionPtrType)
    +        if tp.ellipsis:
    +            # cannot support vararg functions better than this: check for its
    +            # exact type (including the fixed arguments), and build it as a
    +            # constant function pointer (no _cffi_f_%s wrapper)
    +            self._generate_gen_const(False, name, tp)
    +            return
    +        prnt = self._prnt
    +        numargs = len(tp.args)
    +        argnames = []
    +        for i, type in enumerate(tp.args):
    +            indirection = ''
    +            if isinstance(type, model.StructOrUnion):
    +                indirection = '*'
    +            argnames.append('%sx%d' % (indirection, i))
    +        context = 'argument of %s' % name
    +        arglist = [type.get_c_name(' %s' % arg, context)
    +                   for type, arg in zip(tp.args, argnames)]
    +        tpresult = tp.result
    +        if isinstance(tpresult, model.StructOrUnion):
    +            arglist.insert(0, tpresult.get_c_name(' *r', context))
    +            tpresult = model.void_type
    +        arglist = ', '.join(arglist) or 'void'
    +        wrappername = '_cffi_f_%s' % name
    +        self.export_symbols.append(wrappername)
    +        if tp.abi:
    +            abi = tp.abi + ' '
    +        else:
    +            abi = ''
    +        funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist)
    +        context = 'result of %s' % name
    +        prnt(tpresult.get_c_name(funcdecl, context))
    +        prnt('{')
    +        #
    +        if isinstance(tp.result, model.StructOrUnion):
    +            result_code = '*r = '
    +        elif not isinstance(tp.result, model.VoidType):
    +            result_code = 'return '
    +        else:
    +            result_code = ''
    +        prnt('  %s%s(%s);' % (result_code, name, ', '.join(argnames)))
    +        prnt('}')
    +        prnt()
    +
    +    _loading_gen_function = _loaded_noop
    +
    +    def _loaded_gen_function(self, tp, name, module, library):
    +        assert isinstance(tp, model.FunctionPtrType)
    +        if tp.ellipsis:
    +            newfunction = self._load_constant(False, tp, name, module)
    +        else:
    +            indirections = []
    +            base_tp = tp
    +            if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args)
    +                    or isinstance(tp.result, model.StructOrUnion)):
    +                indirect_args = []
    +                for i, typ in enumerate(tp.args):
    +                    if isinstance(typ, model.StructOrUnion):
    +                        typ = model.PointerType(typ)
    +                        indirections.append((i, typ))
    +                    indirect_args.append(typ)
    +                indirect_result = tp.result
    +                if isinstance(indirect_result, model.StructOrUnion):
    +                    if indirect_result.fldtypes is None:
    +                        raise TypeError("'%s' is used as result type, "
    +                                        "but is opaque" % (
    +                                            indirect_result._get_c_name(),))
    +                    indirect_result = model.PointerType(indirect_result)
    +                    indirect_args.insert(0, indirect_result)
    +                    indirections.insert(0, ("result", indirect_result))
    +                    indirect_result = model.void_type
    +                tp = model.FunctionPtrType(tuple(indirect_args),
    +                                           indirect_result, tp.ellipsis)
    +            BFunc = self.ffi._get_cached_btype(tp)
    +            wrappername = '_cffi_f_%s' % name
    +            newfunction = module.load_function(BFunc, wrappername)
    +            for i, typ in indirections:
    +                newfunction = self._make_struct_wrapper(newfunction, i, typ,
    +                                                        base_tp)
    +        setattr(library, name, newfunction)
    +        type(library)._cffi_dir.append(name)
    +
    +    def _make_struct_wrapper(self, oldfunc, i, tp, base_tp):
    +        backend = self.ffi._backend
    +        BType = self.ffi._get_cached_btype(tp)
    +        if i == "result":
    +            ffi = self.ffi
    +            def newfunc(*args):
    +                res = ffi.new(BType)
    +                oldfunc(res, *args)
    +                return res[0]
    +        else:
    +            def newfunc(*args):
    +                args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:]
    +                return oldfunc(*args)
    +        newfunc._cffi_base_type = base_tp
    +        return newfunc
    +
    +    # ----------
    +    # named structs
    +
    +    def _generate_gen_struct_decl(self, tp, name):
    +        assert name == tp.name
    +        self._generate_struct_or_union_decl(tp, 'struct', name)
    +
    +    def _loading_gen_struct(self, tp, name, module):
    +        self._loading_struct_or_union(tp, 'struct', name, module)
    +
    +    def _loaded_gen_struct(self, tp, name, module, **kwds):
    +        self._loaded_struct_or_union(tp)
    +
    +    def _generate_gen_union_decl(self, tp, name):
    +        assert name == tp.name
    +        self._generate_struct_or_union_decl(tp, 'union', name)
    +
    +    def _loading_gen_union(self, tp, name, module):
    +        self._loading_struct_or_union(tp, 'union', name, module)
    +
    +    def _loaded_gen_union(self, tp, name, module, **kwds):
    +        self._loaded_struct_or_union(tp)
    +
    +    def _generate_struct_or_union_decl(self, tp, prefix, name):
    +        if tp.fldnames is None:
    +            return     # nothing to do with opaque structs
    +        checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
    +        layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
    +        cname = ('%s %s' % (prefix, name)).strip()
    +        #
    +        prnt = self._prnt
    +        prnt('static void %s(%s *p)' % (checkfuncname, cname))
    +        prnt('{')
    +        prnt('  /* only to generate compile-time warnings or errors */')
    +        prnt('  (void)p;')
    +        for fname, ftype, fbitsize, fqual in tp.enumfields():
    +            if (isinstance(ftype, model.PrimitiveType)
    +                and ftype.is_integer_type()) or fbitsize >= 0:
    +                # accept all integers, but complain on float or double
    +                prnt('  (void)((p->%s) << 1);' % fname)
    +            else:
    +                # only accept exactly the type declared.
    +                try:
    +                    prnt('  { %s = &p->%s; (void)tmp; }' % (
    +                        ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
    +                        fname))
    +                except VerificationError as e:
    +                    prnt('  /* %s */' % str(e))   # cannot verify it, ignore
    +        prnt('}')
    +        self.export_symbols.append(layoutfuncname)
    +        prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,))
    +        prnt('{')
    +        prnt('  struct _cffi_aligncheck { char x; %s y; };' % cname)
    +        prnt('  static intptr_t nums[] = {')
    +        prnt('    sizeof(%s),' % cname)
    +        prnt('    offsetof(struct _cffi_aligncheck, y),')
    +        for fname, ftype, fbitsize, fqual in tp.enumfields():
    +            if fbitsize >= 0:
    +                continue      # xxx ignore fbitsize for now
    +            prnt('    offsetof(%s, %s),' % (cname, fname))
    +            if isinstance(ftype, model.ArrayType) and ftype.length is None:
    +                prnt('    0,  /* %s */' % ftype._get_c_name())
    +            else:
    +                prnt('    sizeof(((%s *)0)->%s),' % (cname, fname))
    +        prnt('    -1')
    +        prnt('  };')
    +        prnt('  return nums[i];')
    +        prnt('  /* the next line is not executed, but compiled */')
    +        prnt('  %s(0);' % (checkfuncname,))
    +        prnt('}')
    +        prnt()
    +
    +    def _loading_struct_or_union(self, tp, prefix, name, module):
    +        if tp.fldnames is None:
    +            return     # nothing to do with opaque structs
    +        layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
    +        #
    +        BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0]
    +        function = module.load_function(BFunc, layoutfuncname)
    +        layout = []
    +        num = 0
    +        while True:
    +            x = function(num)
    +            if x < 0: break
    +            layout.append(x)
    +            num += 1
    +        if isinstance(tp, model.StructOrUnion) and tp.partial:
    +            # use the function()'s sizes and offsets to guide the
    +            # layout of the struct
    +            totalsize = layout[0]
    +            totalalignment = layout[1]
    +            fieldofs = layout[2::2]
    +            fieldsize = layout[3::2]
    +            tp.force_flatten()
    +            assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
    +            tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
    +        else:
    +            cname = ('%s %s' % (prefix, name)).strip()
    +            self._struct_pending_verification[tp] = layout, cname
    +
    +    def _loaded_struct_or_union(self, tp):
    +        if tp.fldnames is None:
    +            return     # nothing to do with opaque structs
    +        self.ffi._get_cached_btype(tp)   # force 'fixedlayout' to be considered
    +
    +        if tp in self._struct_pending_verification:
    +            # check that the layout sizes and offsets match the real ones
    +            def check(realvalue, expectedvalue, msg):
    +                if realvalue != expectedvalue:
    +                    raise VerificationError(
    +                        "%s (we have %d, but C compiler says %d)"
    +                        % (msg, expectedvalue, realvalue))
    +            ffi = self.ffi
    +            BStruct = ffi._get_cached_btype(tp)
    +            layout, cname = self._struct_pending_verification.pop(tp)
    +            check(layout[0], ffi.sizeof(BStruct), "wrong total size")
    +            check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
    +            i = 2
    +            for fname, ftype, fbitsize, fqual in tp.enumfields():
    +                if fbitsize >= 0:
    +                    continue        # xxx ignore fbitsize for now
    +                check(layout[i], ffi.offsetof(BStruct, fname),
    +                      "wrong offset for field %r" % (fname,))
    +                if layout[i+1] != 0:
    +                    BField = ffi._get_cached_btype(ftype)
    +                    check(layout[i+1], ffi.sizeof(BField),
    +                          "wrong size for field %r" % (fname,))
    +                i += 2
    +            assert i == len(layout)
    +
    +    # ----------
    +    # 'anonymous' declarations.  These are produced for anonymous structs
    +    # or unions; the 'name' is obtained by a typedef.
    +
    +    def _generate_gen_anonymous_decl(self, tp, name):
    +        if isinstance(tp, model.EnumType):
    +            self._generate_gen_enum_decl(tp, name, '')
    +        else:
    +            self._generate_struct_or_union_decl(tp, '', name)
    +
    +    def _loading_gen_anonymous(self, tp, name, module):
    +        if isinstance(tp, model.EnumType):
    +            self._loading_gen_enum(tp, name, module, '')
    +        else:
    +            self._loading_struct_or_union(tp, '', name, module)
    +
    +    def _loaded_gen_anonymous(self, tp, name, module, **kwds):
    +        if isinstance(tp, model.EnumType):
    +            self._loaded_gen_enum(tp, name, module, **kwds)
    +        else:
    +            self._loaded_struct_or_union(tp)
    +
    +    # ----------
    +    # constants, likely declared with '#define'
    +
    +    def _generate_gen_const(self, is_int, name, tp=None, category='const',
    +                            check_value=None):
    +        prnt = self._prnt
    +        funcname = '_cffi_%s_%s' % (category, name)
    +        self.export_symbols.append(funcname)
    +        if check_value is not None:
    +            assert is_int
    +            assert category == 'const'
    +            prnt('int %s(char *out_error)' % funcname)
    +            prnt('{')
    +            self._check_int_constant_value(name, check_value)
    +            prnt('  return 0;')
    +            prnt('}')
    +        elif is_int:
    +            assert category == 'const'
    +            prnt('int %s(long long *out_value)' % funcname)
    +            prnt('{')
    +            prnt('  *out_value = (long long)(%s);' % (name,))
    +            prnt('  return (%s) <= 0;' % (name,))
    +            prnt('}')
    +        else:
    +            assert tp is not None
    +            assert check_value is None
    +            if category == 'var':
    +                ampersand = '&'
    +            else:
    +                ampersand = ''
    +            extra = ''
    +            if category == 'const' and isinstance(tp, model.StructOrUnion):
    +                extra = 'const *'
    +                ampersand = '&'
    +            prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name))
    +            prnt('{')
    +            prnt('  return (%s%s);' % (ampersand, name))
    +            prnt('}')
    +        prnt()
    +
    +    def _generate_gen_constant_decl(self, tp, name):
    +        is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
    +        self._generate_gen_const(is_int, name, tp)
    +
    +    _loading_gen_constant = _loaded_noop
    +
    +    def _load_constant(self, is_int, tp, name, module, check_value=None):
    +        funcname = '_cffi_const_%s' % name
    +        if check_value is not None:
    +            assert is_int
    +            self._load_known_int_constant(module, funcname)
    +            value = check_value
    +        elif is_int:
    +            BType = self.ffi._typeof_locked("long long*")[0]
    +            BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0]
    +            function = module.load_function(BFunc, funcname)
    +            p = self.ffi.new(BType)
    +            negative = function(p)
    +            value = int(p[0])
    +            if value < 0 and not negative:
    +                BLongLong = self.ffi._typeof_locked("long long")[0]
    +                value += (1 << (8*self.ffi.sizeof(BLongLong)))
    +        else:
    +            assert check_value is None
    +            fntypeextra = '(*)(void)'
    +            if isinstance(tp, model.StructOrUnion):
    +                fntypeextra = '*' + fntypeextra
    +            BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0]
    +            function = module.load_function(BFunc, funcname)
    +            value = function()
    +            if isinstance(tp, model.StructOrUnion):
    +                value = value[0]
    +        return value
    +
    +    def _loaded_gen_constant(self, tp, name, module, library):
    +        is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
    +        value = self._load_constant(is_int, tp, name, module)
    +        setattr(library, name, value)
    +        type(library)._cffi_dir.append(name)
    +
    +    # ----------
    +    # enums
    +
    +    def _check_int_constant_value(self, name, value):
    +        prnt = self._prnt
    +        if value <= 0:
    +            prnt('  if ((%s) > 0 || (long)(%s) != %dL) {' % (
    +                name, name, value))
    +        else:
    +            prnt('  if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
    +                name, name, value))
    +        prnt('    char buf[64];')
    +        prnt('    if ((%s) <= 0)' % name)
    +        prnt('        sprintf(buf, "%%ld", (long)(%s));' % name)
    +        prnt('    else')
    +        prnt('        sprintf(buf, "%%lu", (unsigned long)(%s));' %
    +             name)
    +        prnt('    sprintf(out_error, "%s has the real value %s, not %s",')
    +        prnt('            "%s", buf, "%d");' % (name[:100], value))
    +        prnt('    return -1;')
    +        prnt('  }')
    +
    +    def _load_known_int_constant(self, module, funcname):
    +        BType = self.ffi._typeof_locked("char[]")[0]
    +        BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
    +        function = module.load_function(BFunc, funcname)
    +        p = self.ffi.new(BType, 256)
    +        if function(p) < 0:
    +            error = self.ffi.string(p)
    +            if sys.version_info >= (3,):
    +                error = str(error, 'utf-8')
    +            raise VerificationError(error)
    +
    +    def _enum_funcname(self, prefix, name):
    +        # "$enum_$1" => "___D_enum____D_1"
    +        name = name.replace('$', '___D_')
    +        return '_cffi_e_%s_%s' % (prefix, name)
    +
    +    def _generate_gen_enum_decl(self, tp, name, prefix='enum'):
    +        if tp.partial:
    +            for enumerator in tp.enumerators:
    +                self._generate_gen_const(True, enumerator)
    +            return
    +        #
    +        funcname = self._enum_funcname(prefix, name)
    +        self.export_symbols.append(funcname)
    +        prnt = self._prnt
    +        prnt('int %s(char *out_error)' % funcname)
    +        prnt('{')
    +        for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
    +            self._check_int_constant_value(enumerator, enumvalue)
    +        prnt('  return 0;')
    +        prnt('}')
    +        prnt()
    +
    +    def _loading_gen_enum(self, tp, name, module, prefix='enum'):
    +        if tp.partial:
    +            enumvalues = [self._load_constant(True, tp, enumerator, module)
    +                          for enumerator in tp.enumerators]
    +            tp.enumvalues = tuple(enumvalues)
    +            tp.partial_resolved = True
    +        else:
    +            funcname = self._enum_funcname(prefix, name)
    +            self._load_known_int_constant(module, funcname)
    +
    +    def _loaded_gen_enum(self, tp, name, module, library):
    +        for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
    +            setattr(library, enumerator, enumvalue)
    +            type(library)._cffi_dir.append(enumerator)
    +
    +    # ----------
    +    # macros: for now only for integers
    +
    +    def _generate_gen_macro_decl(self, tp, name):
    +        if tp == '...':
    +            check_value = None
    +        else:
    +            check_value = tp     # an integer
    +        self._generate_gen_const(True, name, check_value=check_value)
    +
    +    _loading_gen_macro = _loaded_noop
    +
    +    def _loaded_gen_macro(self, tp, name, module, library):
    +        if tp == '...':
    +            check_value = None
    +        else:
    +            check_value = tp     # an integer
    +        value = self._load_constant(True, tp, name, module,
    +                                    check_value=check_value)
    +        setattr(library, name, value)
    +        type(library)._cffi_dir.append(name)
    +
    +    # ----------
    +    # global variables
    +
    +    def _generate_gen_variable_decl(self, tp, name):
    +        if isinstance(tp, model.ArrayType):
    +            if tp.length == '...':
    +                prnt = self._prnt
    +                funcname = '_cffi_sizeof_%s' % (name,)
    +                self.export_symbols.append(funcname)
    +                prnt("size_t %s(void)" % funcname)
    +                prnt("{")
    +                prnt("  return sizeof(%s);" % (name,))
    +                prnt("}")
    +            tp_ptr = model.PointerType(tp.item)
    +            self._generate_gen_const(False, name, tp_ptr)
    +        else:
    +            tp_ptr = model.PointerType(tp)
    +            self._generate_gen_const(False, name, tp_ptr, category='var')
    +
    +    _loading_gen_variable = _loaded_noop
    +
    +    def _loaded_gen_variable(self, tp, name, module, library):
    +        if isinstance(tp, model.ArrayType):   # int a[5] is "constant" in the
    +                                              # sense that "a=..." is forbidden
    +            if tp.length == '...':
    +                funcname = '_cffi_sizeof_%s' % (name,)
    +                BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0]
    +                function = module.load_function(BFunc, funcname)
    +                size = function()
    +                BItemType = self.ffi._get_cached_btype(tp.item)
    +                length, rest = divmod(size, self.ffi.sizeof(BItemType))
    +                if rest != 0:
    +                    raise VerificationError(
    +                        "bad size: %r does not seem to be an array of %s" %
    +                        (name, tp.item))
    +                tp = tp.resolve_length(length)
    +            tp_ptr = model.PointerType(tp.item)
    +            value = self._load_constant(False, tp_ptr, name, module)
    +            # 'value' is a  which we have to replace with
    +            # a  if the N is actually known
    +            if tp.length is not None:
    +                BArray = self.ffi._get_cached_btype(tp)
    +                value = self.ffi.cast(BArray, value)
    +            setattr(library, name, value)
    +            type(library)._cffi_dir.append(name)
    +            return
    +        # remove ptr= from the library instance, and replace
    +        # it by a property on the class, which reads/writes into ptr[0].
    +        funcname = '_cffi_var_%s' % name
    +        BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0]
    +        function = module.load_function(BFunc, funcname)
    +        ptr = function()
    +        def getter(library):
    +            return ptr[0]
    +        def setter(library, value):
    +            ptr[0] = value
    +        setattr(type(library), name, property(getter, setter))
    +        type(library)._cffi_dir.append(name)
    +
    +cffimod_header = r'''
    +#include 
    +#include 
    +#include 
    +#include 
    +#include    /* XXX for ssize_t on some platforms */
    +
    +/* this block of #ifs should be kept exactly identical between
    +   c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
    +   and cffi/_cffi_include.h */
    +#if defined(_MSC_VER)
    +# include    /* for alloca() */
    +# if _MSC_VER < 1600   /* MSVC < 2010 */
    +   typedef __int8 int8_t;
    +   typedef __int16 int16_t;
    +   typedef __int32 int32_t;
    +   typedef __int64 int64_t;
    +   typedef unsigned __int8 uint8_t;
    +   typedef unsigned __int16 uint16_t;
    +   typedef unsigned __int32 uint32_t;
    +   typedef unsigned __int64 uint64_t;
    +   typedef __int8 int_least8_t;
    +   typedef __int16 int_least16_t;
    +   typedef __int32 int_least32_t;
    +   typedef __int64 int_least64_t;
    +   typedef unsigned __int8 uint_least8_t;
    +   typedef unsigned __int16 uint_least16_t;
    +   typedef unsigned __int32 uint_least32_t;
    +   typedef unsigned __int64 uint_least64_t;
    +   typedef __int8 int_fast8_t;
    +   typedef __int16 int_fast16_t;
    +   typedef __int32 int_fast32_t;
    +   typedef __int64 int_fast64_t;
    +   typedef unsigned __int8 uint_fast8_t;
    +   typedef unsigned __int16 uint_fast16_t;
    +   typedef unsigned __int32 uint_fast32_t;
    +   typedef unsigned __int64 uint_fast64_t;
    +   typedef __int64 intmax_t;
    +   typedef unsigned __int64 uintmax_t;
    +# else
    +#  include 
    +# endif
    +# if _MSC_VER < 1800   /* MSVC < 2013 */
    +#  ifndef __cplusplus
    +    typedef unsigned char _Bool;
    +#  endif
    +# endif
    +#else
    +# include 
    +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
    +#  include 
    +# endif
    +#endif
    +'''
    diff --git a/server/www/packages/packages-windows/x86/cffi/verifier.py b/server/www/packages/packages-windows/x86/cffi/verifier.py
    new file mode 100644
    index 0000000..c674e9f
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cffi/verifier.py
    @@ -0,0 +1,306 @@
    +#
    +# DEPRECATED: implementation for ffi.verify()
    +#
    +import sys, os, binascii, shutil, io
    +from . import __version_verifier_modules__
    +from . import ffiplatform
    +from .error import VerificationError
    +
    +if sys.version_info >= (3, 3):
    +    import importlib.machinery
    +    def _extension_suffixes():
    +        return importlib.machinery.EXTENSION_SUFFIXES[:]
    +else:
    +    import imp
    +    def _extension_suffixes():
    +        return [suffix for suffix, _, type in imp.get_suffixes()
    +                if type == imp.C_EXTENSION]
    +
    +
    +if sys.version_info >= (3,):
    +    NativeIO = io.StringIO
    +else:
    +    class NativeIO(io.BytesIO):
    +        def write(self, s):
    +            if isinstance(s, unicode):
    +                s = s.encode('ascii')
    +            super(NativeIO, self).write(s)
    +
    +
    +class Verifier(object):
    +
    +    def __init__(self, ffi, preamble, tmpdir=None, modulename=None,
    +                 ext_package=None, tag='', force_generic_engine=False,
    +                 source_extension='.c', flags=None, relative_to=None, **kwds):
    +        if ffi._parser._uses_new_feature:
    +            raise VerificationError(
    +                "feature not supported with ffi.verify(), but only "
    +                "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,))
    +        self.ffi = ffi
    +        self.preamble = preamble
    +        if not modulename:
    +            flattened_kwds = ffiplatform.flatten(kwds)
    +        vengine_class = _locate_engine_class(ffi, force_generic_engine)
    +        self._vengine = vengine_class(self)
    +        self._vengine.patch_extension_kwds(kwds)
    +        self.flags = flags
    +        self.kwds = self.make_relative_to(kwds, relative_to)
    +        #
    +        if modulename:
    +            if tag:
    +                raise TypeError("can't specify both 'modulename' and 'tag'")
    +        else:
    +            key = '\x00'.join([sys.version[:3], __version_verifier_modules__,
    +                               preamble, flattened_kwds] +
    +                              ffi._cdefsources)
    +            if sys.version_info >= (3,):
    +                key = key.encode('utf-8')
    +            k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff)
    +            k1 = k1.lstrip('0x').rstrip('L')
    +            k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
    +            k2 = k2.lstrip('0').rstrip('L')
    +            modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key,
    +                                              k1, k2)
    +        suffix = _get_so_suffixes()[0]
    +        self.tmpdir = tmpdir or _caller_dir_pycache()
    +        self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension)
    +        self.modulefilename = os.path.join(self.tmpdir, modulename + suffix)
    +        self.ext_package = ext_package
    +        self._has_source = False
    +        self._has_module = False
    +
    +    def write_source(self, file=None):
    +        """Write the C source code.  It is produced in 'self.sourcefilename',
    +        which can be tweaked beforehand."""
    +        with self.ffi._lock:
    +            if self._has_source and file is None:
    +                raise VerificationError(
    +                    "source code already written")
    +            self._write_source(file)
    +
    +    def compile_module(self):
    +        """Write the C source code (if not done already) and compile it.
    +        This produces a dynamic link library in 'self.modulefilename'."""
    +        with self.ffi._lock:
    +            if self._has_module:
    +                raise VerificationError("module already compiled")
    +            if not self._has_source:
    +                self._write_source()
    +            self._compile_module()
    +
    +    def load_library(self):
    +        """Get a C module from this Verifier instance.
    +        Returns an instance of a FFILibrary class that behaves like the
    +        objects returned by ffi.dlopen(), but that delegates all
    +        operations to the C module.  If necessary, the C code is written
    +        and compiled first.
    +        """
    +        with self.ffi._lock:
    +            if not self._has_module:
    +                self._locate_module()
    +                if not self._has_module:
    +                    if not self._has_source:
    +                        self._write_source()
    +                    self._compile_module()
    +            return self._load_library()
    +
    +    def get_module_name(self):
    +        basename = os.path.basename(self.modulefilename)
    +        # kill both the .so extension and the other .'s, as introduced
    +        # by Python 3: 'basename.cpython-33m.so'
    +        basename = basename.split('.', 1)[0]
    +        # and the _d added in Python 2 debug builds --- but try to be
    +        # conservative and not kill a legitimate _d
    +        if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'):
    +            basename = basename[:-2]
    +        return basename
    +
    +    def get_extension(self):
    +        ffiplatform._hack_at_distutils() # backward compatibility hack
    +        if not self._has_source:
    +            with self.ffi._lock:
    +                if not self._has_source:
    +                    self._write_source()
    +        sourcename = ffiplatform.maybe_relative_path(self.sourcefilename)
    +        modname = self.get_module_name()
    +        return ffiplatform.get_extension(sourcename, modname, **self.kwds)
    +
    +    def generates_python_module(self):
    +        return self._vengine._gen_python_module
    +
    +    def make_relative_to(self, kwds, relative_to):
    +        if relative_to and os.path.dirname(relative_to):
    +            dirname = os.path.dirname(relative_to)
    +            kwds = kwds.copy()
    +            for key in ffiplatform.LIST_OF_FILE_NAMES:
    +                if key in kwds:
    +                    lst = kwds[key]
    +                    if not isinstance(lst, (list, tuple)):
    +                        raise TypeError("keyword '%s' should be a list or tuple"
    +                                        % (key,))
    +                    lst = [os.path.join(dirname, fn) for fn in lst]
    +                    kwds[key] = lst
    +        return kwds
    +
    +    # ----------
    +
    +    def _locate_module(self):
    +        if not os.path.isfile(self.modulefilename):
    +            if self.ext_package:
    +                try:
    +                    pkg = __import__(self.ext_package, None, None, ['__doc__'])
    +                except ImportError:
    +                    return      # cannot import the package itself, give up
    +                    # (e.g. it might be called differently before installation)
    +                path = pkg.__path__
    +            else:
    +                path = None
    +            filename = self._vengine.find_module(self.get_module_name(), path,
    +                                                 _get_so_suffixes())
    +            if filename is None:
    +                return
    +            self.modulefilename = filename
    +        self._vengine.collect_types()
    +        self._has_module = True
    +
    +    def _write_source_to(self, file):
    +        self._vengine._f = file
    +        try:
    +            self._vengine.write_source_to_f()
    +        finally:
    +            del self._vengine._f
    +
    +    def _write_source(self, file=None):
    +        if file is not None:
    +            self._write_source_to(file)
    +        else:
    +            # Write our source file to an in memory file.
    +            f = NativeIO()
    +            self._write_source_to(f)
    +            source_data = f.getvalue()
    +
    +            # Determine if this matches the current file
    +            if os.path.exists(self.sourcefilename):
    +                with open(self.sourcefilename, "r") as fp:
    +                    needs_written = not (fp.read() == source_data)
    +            else:
    +                needs_written = True
    +
    +            # Actually write the file out if it doesn't match
    +            if needs_written:
    +                _ensure_dir(self.sourcefilename)
    +                with open(self.sourcefilename, "w") as fp:
    +                    fp.write(source_data)
    +
    +            # Set this flag
    +            self._has_source = True
    +
    +    def _compile_module(self):
    +        # compile this C source
    +        tmpdir = os.path.dirname(self.sourcefilename)
    +        outputfilename = ffiplatform.compile(tmpdir, self.get_extension())
    +        try:
    +            same = ffiplatform.samefile(outputfilename, self.modulefilename)
    +        except OSError:
    +            same = False
    +        if not same:
    +            _ensure_dir(self.modulefilename)
    +            shutil.move(outputfilename, self.modulefilename)
    +        self._has_module = True
    +
    +    def _load_library(self):
    +        assert self._has_module
    +        if self.flags is not None:
    +            return self._vengine.load_library(self.flags)
    +        else:
    +            return self._vengine.load_library()
    +
    +# ____________________________________________________________
    +
    +_FORCE_GENERIC_ENGINE = False      # for tests
    +
    +def _locate_engine_class(ffi, force_generic_engine):
    +    if _FORCE_GENERIC_ENGINE:
    +        force_generic_engine = True
    +    if not force_generic_engine:
    +        if '__pypy__' in sys.builtin_module_names:
    +            force_generic_engine = True
    +        else:
    +            try:
    +                import _cffi_backend
    +            except ImportError:
    +                _cffi_backend = '?'
    +            if ffi._backend is not _cffi_backend:
    +                force_generic_engine = True
    +    if force_generic_engine:
    +        from . import vengine_gen
    +        return vengine_gen.VGenericEngine
    +    else:
    +        from . import vengine_cpy
    +        return vengine_cpy.VCPythonEngine
    +
    +# ____________________________________________________________
    +
    +_TMPDIR = None
    +
    +def _caller_dir_pycache():
    +    if _TMPDIR:
    +        return _TMPDIR
    +    result = os.environ.get('CFFI_TMPDIR')
    +    if result:
    +        return result
    +    filename = sys._getframe(2).f_code.co_filename
    +    return os.path.abspath(os.path.join(os.path.dirname(filename),
    +                           '__pycache__'))
    +
    +def set_tmpdir(dirname):
    +    """Set the temporary directory to use instead of __pycache__."""
    +    global _TMPDIR
    +    _TMPDIR = dirname
    +
    +def cleanup_tmpdir(tmpdir=None, keep_so=False):
    +    """Clean up the temporary directory by removing all files in it
    +    called `_cffi_*.{c,so}` as well as the `build` subdirectory."""
    +    tmpdir = tmpdir or _caller_dir_pycache()
    +    try:
    +        filelist = os.listdir(tmpdir)
    +    except OSError:
    +        return
    +    if keep_so:
    +        suffix = '.c'   # only remove .c files
    +    else:
    +        suffix = _get_so_suffixes()[0].lower()
    +    for fn in filelist:
    +        if fn.lower().startswith('_cffi_') and (
    +                fn.lower().endswith(suffix) or fn.lower().endswith('.c')):
    +            try:
    +                os.unlink(os.path.join(tmpdir, fn))
    +            except OSError:
    +                pass
    +    clean_dir = [os.path.join(tmpdir, 'build')]
    +    for dir in clean_dir:
    +        try:
    +            for fn in os.listdir(dir):
    +                fn = os.path.join(dir, fn)
    +                if os.path.isdir(fn):
    +                    clean_dir.append(fn)
    +                else:
    +                    os.unlink(fn)
    +        except OSError:
    +            pass
    +
    +def _get_so_suffixes():
    +    suffixes = _extension_suffixes()
    +    if not suffixes:
    +        # bah, no C_EXTENSION available.  Occurs on pypy without cpyext
    +        if sys.platform == 'win32':
    +            suffixes = [".pyd"]
    +        else:
    +            suffixes = [".so"]
    +
    +    return suffixes
    +
    +def _ensure_dir(filename):
    +    dirname = os.path.dirname(filename)
    +    if dirname and not os.path.isdir(dirname):
    +        os.makedirs(dirname)
    diff --git a/server/www/packages/packages-windows/x86/cryptography/__about__.py b/server/www/packages/packages-windows/x86/cryptography/__about__.py
    index a99f58f..218b238 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/__about__.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/__about__.py
    @@ -14,10 +14,10 @@ __summary__ = ("cryptography is a package which provides cryptographic recipes"
                    " and primitives to Python developers.")
     __uri__ = "https://github.com/pyca/cryptography"
     
    -__version__ = "2.3.1"
    +__version__ = "2.9.2"
     
     __author__ = "The cryptography developers"
     __email__ = "cryptography-dev@python.org"
     
     __license__ = "BSD or Apache License, Version 2.0"
    -__copyright__ = "Copyright 2013-2017 {0}".format(__author__)
    +__copyright__ = "Copyright 2013-2019 {}".format(__author__)
    diff --git a/server/www/packages/packages-windows/x86/cryptography/exceptions.py b/server/www/packages/packages-windows/x86/cryptography/exceptions.py
    index 648cf9d..1d52d7d 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/exceptions.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/exceptions.py
    @@ -19,6 +19,7 @@ class _Reasons(Enum):
         UNSUPPORTED_X509 = 8
         UNSUPPORTED_EXCHANGE_ALGORITHM = 9
         UNSUPPORTED_DIFFIE_HELLMAN = 10
    +    UNSUPPORTED_MAC = 11
     
     
     class UnsupportedAlgorithm(Exception):
    diff --git a/server/www/packages/packages-windows/x86/cryptography/fernet.py b/server/www/packages/packages-windows/x86/cryptography/fernet.py
    index ac2dd0b..b990def 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/fernet.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/fernet.py
    @@ -12,6 +12,7 @@ import time
     
     import six
     
    +from cryptography import utils
     from cryptography.exceptions import InvalidSignature
     from cryptography.hazmat.backends import default_backend
     from cryptography.hazmat.primitives import hashes, padding
    @@ -51,8 +52,7 @@ class Fernet(object):
             return self._encrypt_from_parts(data, current_time, iv)
     
         def _encrypt_from_parts(self, data, current_time, iv):
    -        if not isinstance(data, bytes):
    -            raise TypeError("data must be bytes.")
    +        utils._check_bytes("data", data)
     
             padder = padding.PKCS7(algorithms.AES.block_size).padder()
             padded_data = padder.update(data) + padder.finalize()
    @@ -82,9 +82,7 @@ class Fernet(object):
     
         @staticmethod
         def _get_unverified_token_data(token):
    -        if not isinstance(token, bytes):
    -            raise TypeError("token must be bytes.")
    -
    +        utils._check_bytes("token", token)
             try:
                 data = base64.urlsafe_b64decode(token)
             except (TypeError, binascii.Error):
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/_der.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/_der.py
    new file mode 100644
    index 0000000..51518d6
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/_der.py
    @@ -0,0 +1,156 @@
    +# This file is dual licensed under the terms of the Apache License, Version
    +# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    +# for complete details.
    +
    +from __future__ import absolute_import, division, print_function
    +
    +import six
    +
    +from cryptography.utils import int_from_bytes, int_to_bytes
    +
    +
    +# This module contains a lightweight DER encoder and decoder. See X.690 for the
    +# specification. This module intentionally does not implement the more complex
    +# BER encoding, only DER.
    +#
    +# Note this implementation treats an element's constructed bit as part of the
    +# tag. This is fine for DER, where the bit is always computable from the type.
    +
    +
    +CONSTRUCTED = 0x20
    +CONTEXT_SPECIFIC = 0x80
    +
    +INTEGER = 0x02
    +BIT_STRING = 0x03
    +OCTET_STRING = 0x04
    +NULL = 0x05
    +OBJECT_IDENTIFIER = 0x06
    +SEQUENCE = 0x10 | CONSTRUCTED
    +SET = 0x11 | CONSTRUCTED
    +PRINTABLE_STRING = 0x13
    +UTC_TIME = 0x17
    +GENERALIZED_TIME = 0x18
    +
    +
    +class DERReader(object):
    +    def __init__(self, data):
    +        self.data = memoryview(data)
    +
    +    def __enter__(self):
    +        return self
    +
    +    def __exit__(self, exc_type, exc_value, tb):
    +        if exc_value is None:
    +            self.check_empty()
    +
    +    def is_empty(self):
    +        return len(self.data) == 0
    +
    +    def check_empty(self):
    +        if not self.is_empty():
    +            raise ValueError("Invalid DER input: trailing data")
    +
    +    def read_byte(self):
    +        if len(self.data) < 1:
    +            raise ValueError("Invalid DER input: insufficient data")
    +        ret = six.indexbytes(self.data, 0)
    +        self.data = self.data[1:]
    +        return ret
    +
    +    def read_bytes(self, n):
    +        if len(self.data) < n:
    +            raise ValueError("Invalid DER input: insufficient data")
    +        ret = self.data[:n]
    +        self.data = self.data[n:]
    +        return ret
    +
    +    def read_any_element(self):
    +        tag = self.read_byte()
    +        # Tag numbers 31 or higher are stored in multiple bytes. No supported
    +        # ASN.1 types use such tags, so reject these.
    +        if tag & 0x1f == 0x1f:
    +            raise ValueError("Invalid DER input: unexpected high tag number")
    +        length_byte = self.read_byte()
    +        if length_byte & 0x80 == 0:
    +            # If the high bit is clear, the first length byte is the length.
    +            length = length_byte
    +        else:
    +            # If the high bit is set, the first length byte encodes the length
    +            # of the length.
    +            length_byte &= 0x7f
    +            if length_byte == 0:
    +                raise ValueError(
    +                    "Invalid DER input: indefinite length form is not allowed "
    +                    "in DER"
    +                )
    +            length = 0
    +            for i in range(length_byte):
    +                length <<= 8
    +                length |= self.read_byte()
    +                if length == 0:
    +                    raise ValueError(
    +                        "Invalid DER input: length was not minimally-encoded"
    +                    )
    +            if length < 0x80:
    +                # If the length could have been encoded in short form, it must
    +                # not use long form.
    +                raise ValueError(
    +                    "Invalid DER input: length was not minimally-encoded"
    +                )
    +        body = self.read_bytes(length)
    +        return tag, DERReader(body)
    +
    +    def read_element(self, expected_tag):
    +        tag, body = self.read_any_element()
    +        if tag != expected_tag:
    +            raise ValueError("Invalid DER input: unexpected tag")
    +        return body
    +
    +    def read_single_element(self, expected_tag):
    +        with self:
    +            return self.read_element(expected_tag)
    +
    +    def read_optional_element(self, expected_tag):
    +        if len(self.data) > 0 and six.indexbytes(self.data, 0) == expected_tag:
    +            return self.read_element(expected_tag)
    +        return None
    +
    +    def as_integer(self):
    +        if len(self.data) == 0:
    +            raise ValueError("Invalid DER input: empty integer contents")
    +        first = six.indexbytes(self.data, 0)
    +        if first & 0x80 == 0x80:
    +            raise ValueError("Negative DER integers are not supported")
    +        # The first 9 bits must not all be zero or all be ones. Otherwise, the
    +        # encoding should have been one byte shorter.
    +        if len(self.data) > 1:
    +            second = six.indexbytes(self.data, 1)
    +            if first == 0 and second & 0x80 == 0:
    +                raise ValueError(
    +                    "Invalid DER input: integer not minimally-encoded"
    +                )
    +        return int_from_bytes(self.data, "big")
    +
    +
    +def encode_der_integer(x):
    +    if not isinstance(x, six.integer_types):
    +        raise ValueError("Value must be an integer")
    +    if x < 0:
    +        raise ValueError("Negative integers are not supported")
    +    n = x.bit_length() // 8 + 1
    +    return int_to_bytes(x, n)
    +
    +
    +def encode_der(tag, *children):
    +    length = 0
    +    for child in children:
    +        length += len(child)
    +    chunks = [six.int2byte(tag)]
    +    if length < 0x80:
    +        chunks.append(six.int2byte(length))
    +    else:
    +        length_bytes = int_to_bytes(length)
    +        chunks.append(six.int2byte(0x80 | len(length_bytes)))
    +        chunks.append(length_bytes)
    +    chunks.extend(children)
    +    return b"".join(chunks)
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/_oid.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/_oid.py
    new file mode 100644
    index 0000000..f98912f
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/_oid.py
    @@ -0,0 +1,72 @@
    +# This file is dual licensed under the terms of the Apache License, Version
    +# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    +# for complete details.
    +
    +from __future__ import absolute_import, division, print_function
    +
    +from cryptography import utils
    +
    +
    +class ObjectIdentifier(object):
    +    def __init__(self, dotted_string):
    +        self._dotted_string = dotted_string
    +
    +        nodes = self._dotted_string.split(".")
    +        intnodes = []
    +
    +        # There must be at least 2 nodes, the first node must be 0..2, and
    +        # if less than 2, the second node cannot have a value outside the
    +        # range 0..39.  All nodes must be integers.
    +        for node in nodes:
    +            try:
    +                node_value = int(node, 10)
    +            except ValueError:
    +                raise ValueError(
    +                    "Malformed OID: %s (non-integer nodes)" % (
    +                        self._dotted_string))
    +            if node_value < 0:
    +                raise ValueError(
    +                    "Malformed OID: %s (negative-integer nodes)" % (
    +                        self._dotted_string))
    +            intnodes.append(node_value)
    +
    +        if len(nodes) < 2:
    +            raise ValueError(
    +                "Malformed OID: %s (insufficient number of nodes)" % (
    +                    self._dotted_string))
    +
    +        if intnodes[0] > 2:
    +            raise ValueError(
    +                "Malformed OID: %s (first node outside valid range)" % (
    +                    self._dotted_string))
    +
    +        if intnodes[0] < 2 and intnodes[1] >= 40:
    +            raise ValueError(
    +                "Malformed OID: %s (second node outside valid range)" % (
    +                    self._dotted_string))
    +
    +    def __eq__(self, other):
    +        if not isinstance(other, ObjectIdentifier):
    +            return NotImplemented
    +
    +        return self.dotted_string == other.dotted_string
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __repr__(self):
    +        return "".format(
    +            self.dotted_string,
    +            self._name
    +        )
    +
    +    def __hash__(self):
    +        return hash(self.dotted_string)
    +
    +    @property
    +    def _name(self):
    +        # Lazy import to avoid an import cycle
    +        from cryptography.x509.oid import _OID_NAMES
    +        return _OID_NAMES.get(self, "Unknown OID")
    +
    +    dotted_string = utils.read_only_property("_dotted_string")
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/interfaces.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/interfaces.py
    index 0a476b9..20f4164 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/interfaces.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/interfaces.py
    @@ -57,7 +57,7 @@ class HMACBackend(object):
         @abc.abstractmethod
         def create_hmac_ctx(self, key, algorithm):
             """
    -        Create a MACContext for calculating a message authentication code.
    +        Create a context for calculating a message authentication code.
             """
     
     
    @@ -72,7 +72,7 @@ class CMACBackend(object):
         @abc.abstractmethod
         def create_cmac_ctx(self, algorithm):
             """
    -        Create a MACContext for calculating a message authentication code.
    +        Create a context for calculating a message authentication code.
             """
     
     
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/aead.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/aead.py
    index 9cec3e2..0cad15c 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/aead.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/aead.py
    @@ -18,10 +18,10 @@ def _aead_cipher_name(cipher):
         if isinstance(cipher, ChaCha20Poly1305):
             return b"chacha20-poly1305"
         elif isinstance(cipher, AESCCM):
    -        return "aes-{0}-ccm".format(len(cipher._key) * 8).encode("ascii")
    +        return "aes-{}-ccm".format(len(cipher._key) * 8).encode("ascii")
         else:
             assert isinstance(cipher, AESGCM)
    -        return "aes-{0}-gcm".format(len(cipher._key) * 8).encode("ascii")
    +        return "aes-{}-gcm".format(len(cipher._key) * 8).encode("ascii")
     
     
     def _aead_setup(backend, cipher_name, key, nonce, tag, tag_len, operation):
    @@ -49,17 +49,20 @@ def _aead_setup(backend, cipher_name, key, nonce, tag, tag_len, operation):
                 ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag
             )
             backend.openssl_assert(res != 0)
    -    else:
    +    elif cipher_name.endswith(b"-ccm"):
             res = backend._lib.EVP_CIPHER_CTX_ctrl(
                 ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, tag_len, backend._ffi.NULL
             )
    +        backend.openssl_assert(res != 0)
     
    +    nonce_ptr = backend._ffi.from_buffer(nonce)
    +    key_ptr = backend._ffi.from_buffer(key)
         res = backend._lib.EVP_CipherInit_ex(
             ctx,
             backend._ffi.NULL,
             backend._ffi.NULL,
    -        key,
    -        nonce,
    +        key_ptr,
    +        nonce_ptr,
             int(operation == _ENCRYPT)
         )
         backend.openssl_assert(res != 0)
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/backend.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/backend.py
    index af14bfa..96fa9ff 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/backend.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/backend.py
    @@ -5,16 +5,19 @@
     from __future__ import absolute_import, division, print_function
     
     import base64
    -import calendar
     import collections
     import contextlib
     import itertools
     from contextlib import contextmanager
     
     import six
    +from six.moves import range
     
     from cryptography import utils, x509
     from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
    +from cryptography.hazmat._der import (
    +    INTEGER, NULL, SEQUENCE, encode_der, encode_der_integer
    +)
     from cryptography.hazmat.backends.interfaces import (
         CMACBackend, CipherBackend, DERSerializationBackend, DHBackend, DSABackend,
         EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend,
    @@ -23,7 +26,9 @@ from cryptography.hazmat.backends.interfaces import (
     from cryptography.hazmat.backends.openssl import aead
     from cryptography.hazmat.backends.openssl.ciphers import _CipherContext
     from cryptography.hazmat.backends.openssl.cmac import _CMACContext
    -from cryptography.hazmat.backends.openssl.decode_asn1 import _Integers
    +from cryptography.hazmat.backends.openssl.decode_asn1 import (
    +    _CRL_ENTRY_REASON_ENUM_TO_CODE
    +)
     from cryptography.hazmat.backends.openssl.dh import (
         _DHParameters, _DHPrivateKey, _DHPublicKey, _dh_params_dup
     )
    @@ -33,26 +38,45 @@ from cryptography.hazmat.backends.openssl.dsa import (
     from cryptography.hazmat.backends.openssl.ec import (
         _EllipticCurvePrivateKey, _EllipticCurvePublicKey
     )
    +from cryptography.hazmat.backends.openssl.ed25519 import (
    +    _Ed25519PrivateKey, _Ed25519PublicKey
    +)
    +from cryptography.hazmat.backends.openssl.ed448 import (
    +    _ED448_KEY_SIZE, _Ed448PrivateKey, _Ed448PublicKey
    +)
     from cryptography.hazmat.backends.openssl.encode_asn1 import (
         _CRL_ENTRY_EXTENSION_ENCODE_HANDLERS,
         _CRL_EXTENSION_ENCODE_HANDLERS, _EXTENSION_ENCODE_HANDLERS,
    +    _OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS,
    +    _OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS,
         _encode_asn1_int_gc, _encode_asn1_str_gc, _encode_name_gc, _txt2obj_gc,
     )
     from cryptography.hazmat.backends.openssl.hashes import _HashContext
     from cryptography.hazmat.backends.openssl.hmac import _HMACContext
    +from cryptography.hazmat.backends.openssl.ocsp import (
    +    _OCSPRequest, _OCSPResponse
    +)
    +from cryptography.hazmat.backends.openssl.poly1305 import (
    +    _POLY1305_KEY_SIZE, _Poly1305Context
    +)
     from cryptography.hazmat.backends.openssl.rsa import (
         _RSAPrivateKey, _RSAPublicKey
     )
     from cryptography.hazmat.backends.openssl.x25519 import (
         _X25519PrivateKey, _X25519PublicKey
     )
    +from cryptography.hazmat.backends.openssl.x448 import (
    +    _X448PrivateKey, _X448PublicKey
    +)
     from cryptography.hazmat.backends.openssl.x509 import (
         _Certificate, _CertificateRevocationList,
         _CertificateSigningRequest, _RevokedCertificate
     )
     from cryptography.hazmat.bindings.openssl import binding
     from cryptography.hazmat.primitives import hashes, serialization
    -from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
    +from cryptography.hazmat.primitives.asymmetric import (
    +    dsa, ec, ed25519, ed448, rsa
    +)
     from cryptography.hazmat.primitives.asymmetric.padding import (
         MGF1, OAEP, PKCS1v15, PSS
     )
    @@ -63,11 +87,18 @@ from cryptography.hazmat.primitives.ciphers.modes import (
         CBC, CFB, CFB8, CTR, ECB, GCM, OFB, XTS
     )
     from cryptography.hazmat.primitives.kdf import scrypt
    +from cryptography.hazmat.primitives.serialization import ssh
    +from cryptography.x509 import ocsp
     
     
     _MemoryBIO = collections.namedtuple("_MemoryBIO", ["bio", "char_ptr"])
     
     
    +# Not actually supported, just used as a marker for some serialization tests.
    +class _RC2(object):
    +    pass
    +
    +
     @utils.register_interface(CipherBackend)
     @utils.register_interface(CMACBackend)
     @utils.register_interface(DERSerializationBackend)
    @@ -105,21 +136,23 @@ class Backend(object):
             return binding._openssl_assert(self._lib, ok)
     
         def activate_builtin_random(self):
    -        # Obtain a new structural reference.
    -        e = self._lib.ENGINE_get_default_RAND()
    -        if e != self._ffi.NULL:
    -            self._lib.ENGINE_unregister_RAND(e)
    -            # Reset the RNG to use the new engine.
    -            self._lib.RAND_cleanup()
    -            # decrement the structural reference from get_default_RAND
    -            res = self._lib.ENGINE_finish(e)
    -            self.openssl_assert(res == 1)
    +        if self._lib.Cryptography_HAS_ENGINE:
    +            # Obtain a new structural reference.
    +            e = self._lib.ENGINE_get_default_RAND()
    +            if e != self._ffi.NULL:
    +                self._lib.ENGINE_unregister_RAND(e)
    +                # Reset the RNG to use the built-in.
    +                res = self._lib.RAND_set_rand_method(self._ffi.NULL)
    +                self.openssl_assert(res == 1)
    +                # decrement the structural reference from get_default_RAND
    +                res = self._lib.ENGINE_finish(e)
    +                self.openssl_assert(res == 1)
     
         @contextlib.contextmanager
         def _get_osurandom_engine(self):
             # Fetches an engine by id and returns it. This creates a structural
             # reference.
    -        e = self._lib.ENGINE_by_id(self._binding._osrandom_engine_id)
    +        e = self._lib.ENGINE_by_id(self._lib.Cryptography_osrandom_engine_id)
             self.openssl_assert(e != self._ffi.NULL)
             # Initialize the engine for use. This adds a functional reference.
             res = self._lib.ENGINE_init(e)
    @@ -136,14 +169,16 @@ class Backend(object):
                 self.openssl_assert(res == 1)
     
         def activate_osrandom_engine(self):
    -        # Unregister and free the current engine.
    -        self.activate_builtin_random()
    -        with self._get_osurandom_engine() as e:
    -            # Set the engine as the default RAND provider.
    -            res = self._lib.ENGINE_set_default_RAND(e)
    +        if self._lib.Cryptography_HAS_ENGINE:
    +            # Unregister and free the current engine.
    +            self.activate_builtin_random()
    +            with self._get_osurandom_engine() as e:
    +                # Set the engine as the default RAND provider.
    +                res = self._lib.ENGINE_set_default_RAND(e)
    +                self.openssl_assert(res == 1)
    +            # Reset the RNG to use the engine
    +            res = self._lib.RAND_set_rand_method(self._ffi.NULL)
                 self.openssl_assert(res == 1)
    -        # Reset the RNG to use the new engine.
    -        self._lib.RAND_cleanup()
     
         def osrandom_engine_implementation(self):
             buf = self._ffi.new("char[]", 64)
    @@ -171,20 +206,25 @@ class Backend(object):
         def create_hmac_ctx(self, key, algorithm):
             return _HMACContext(self, key, algorithm)
     
    -    def _build_openssl_digest_name(self, algorithm):
    +    def _evp_md_from_algorithm(self, algorithm):
             if algorithm.name == "blake2b" or algorithm.name == "blake2s":
    -            alg = "{0}{1}".format(
    +            alg = "{}{}".format(
                     algorithm.name, algorithm.digest_size * 8
                 ).encode("ascii")
             else:
                 alg = algorithm.name.encode("ascii")
     
    -        return alg
    +        evp_md = self._lib.EVP_get_digestbyname(alg)
    +        return evp_md
    +
    +    def _evp_md_non_null_from_algorithm(self, algorithm):
    +        evp_md = self._evp_md_from_algorithm(algorithm)
    +        self.openssl_assert(evp_md != self._ffi.NULL)
    +        return evp_md
     
         def hash_supported(self, algorithm):
    -        name = self._build_openssl_digest_name(algorithm)
    -        digest = self._lib.EVP_get_digestbyname(name)
    -        return digest != self._ffi.NULL
    +        evp_md = self._evp_md_from_algorithm(algorithm)
    +        return evp_md != self._ffi.NULL
     
         def hmac_supported(self, algorithm):
             return self.hash_supported(algorithm)
    @@ -202,7 +242,7 @@ class Backend(object):
     
         def register_cipher_adapter(self, cipher_cls, mode_cls, adapter):
             if (cipher_cls, mode_cls) in self._cipher_registry:
    -            raise ValueError("Duplicate registration for: {0} {1}.".format(
    +            raise ValueError("Duplicate registration for: {} {}.".format(
                     cipher_cls, mode_cls)
                 )
             self._cipher_registry[cipher_cls, mode_cls] = adapter
    @@ -257,6 +297,10 @@ class Backend(object):
                 type(None),
                 GetCipherByName("rc4")
             )
    +        # We don't actually support RC2, this is just used by some tests.
    +        self.register_cipher_adapter(
    +            _RC2, type(None), GetCipherByName("rc2")
    +        )
             self.register_cipher_adapter(
                 ChaCha20,
                 type(None),
    @@ -276,11 +320,10 @@ class Backend(object):
         def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
                                key_material):
             buf = self._ffi.new("unsigned char[]", length)
    -        evp_md = self._lib.EVP_get_digestbyname(
    -            algorithm.name.encode("ascii"))
    -        self.openssl_assert(evp_md != self._ffi.NULL)
    +        evp_md = self._evp_md_non_null_from_algorithm(algorithm)
    +        key_material_ptr = self._ffi.from_buffer(key_material)
             res = self._lib.PKCS5_PBKDF2_HMAC(
    -            key_material,
    +            key_material_ptr,
                 len(key_material),
                 salt,
                 len(salt),
    @@ -305,7 +348,10 @@ class Backend(object):
                 bin_len = self._lib.BN_bn2bin(bn, bin_ptr)
                 # A zero length means the BN has value 0
                 self.openssl_assert(bin_len >= 0)
    -            return int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big")
    +            val = int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big")
    +            if self._lib.BN_is_negative(bn):
    +                val = -val
    +            return val
             else:
                 # Under Python 2 the best we can do is hex()
                 hex_cdata = self._lib.BN_bn2hex(bn)
    @@ -433,13 +479,13 @@ class Backend(object):
             The char* is the storage for the BIO and it must stay alive until the
             BIO is finished with.
             """
    -        data_char_p = self._ffi.new("char[]", data)
    +        data_ptr = self._ffi.from_buffer(data)
             bio = self._lib.BIO_new_mem_buf(
    -            data_char_p, len(data)
    +            data_ptr, len(data)
             )
             self.openssl_assert(bio != self._ffi.NULL)
     
    -        return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_char_p)
    +        return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_ptr)
     
         def _create_mem_bio_gc(self):
             """
    @@ -491,6 +537,18 @@ class Backend(object):
                 self.openssl_assert(dh_cdata != self._ffi.NULL)
                 dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
                 return _DHPrivateKey(self, dh_cdata, evp_pkey)
    +        elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None):
    +            # EVP_PKEY_ED25519 is not present in OpenSSL < 1.1.1
    +            return _Ed25519PrivateKey(self, evp_pkey)
    +        elif key_type == getattr(self._lib, "EVP_PKEY_X448", None):
    +            # EVP_PKEY_X448 is not present in OpenSSL < 1.1.1
    +            return _X448PrivateKey(self, evp_pkey)
    +        elif key_type == getattr(self._lib, "EVP_PKEY_X25519", None):
    +            # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.0
    +            return _X25519PrivateKey(self, evp_pkey)
    +        elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None):
    +            # EVP_PKEY_ED448 is not present in OpenSSL < 1.1.1
    +            return _Ed448PrivateKey(self, evp_pkey)
             else:
                 raise UnsupportedAlgorithm("Unsupported key type.")
     
    @@ -522,6 +580,18 @@ class Backend(object):
                 self.openssl_assert(dh_cdata != self._ffi.NULL)
                 dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
                 return _DHPublicKey(self, dh_cdata, evp_pkey)
    +        elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None):
    +            # EVP_PKEY_ED25519 is not present in OpenSSL < 1.1.1
    +            return _Ed25519PublicKey(self, evp_pkey)
    +        elif key_type == getattr(self._lib, "EVP_PKEY_X448", None):
    +            # EVP_PKEY_X448 is not present in OpenSSL < 1.1.1
    +            return _X448PublicKey(self, evp_pkey)
    +        elif key_type == getattr(self._lib, "EVP_PKEY_X25519", None):
    +            # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.0
    +            return _X25519PublicKey(self, evp_pkey)
    +        elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None):
    +            # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.1
    +            return _Ed448PublicKey(self, evp_pkey)
             else:
                 raise UnsupportedAlgorithm("Unsupported key type.")
     
    @@ -663,10 +733,18 @@ class Backend(object):
             return _CMACContext(self, algorithm)
     
         def create_x509_csr(self, builder, private_key, algorithm):
    -        if not isinstance(algorithm, hashes.HashAlgorithm):
    -            raise TypeError('Algorithm must be a registered hash algorithm.')
    +        if not isinstance(builder, x509.CertificateSigningRequestBuilder):
    +            raise TypeError('Builder type mismatch.')
     
    -        if (
    +        if isinstance(private_key,
    +                      (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)):
    +            if algorithm is not None:
    +                raise ValueError(
    +                    "algorithm must be None when signing via ed25519 or ed448"
    +                )
    +        elif not isinstance(algorithm, hashes.HashAlgorithm):
    +            raise TypeError('Algorithm must be a registered hash algorithm.')
    +        elif (
                 isinstance(algorithm, hashes.MD5) and not
                 isinstance(private_key, rsa.RSAPrivateKey)
             ):
    @@ -675,10 +753,7 @@ class Backend(object):
                 )
     
             # Resolve the signature algorithm.
    -        evp_md = self._lib.EVP_get_digestbyname(
    -            algorithm.name.encode('ascii')
    -        )
    -        self.openssl_assert(evp_md != self._ffi.NULL)
    +        evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm)
     
             # Create an empty request.
             x509_req = self._lib.X509_REQ_new()
    @@ -706,10 +781,15 @@ class Backend(object):
             sk_extension = self._lib.sk_X509_EXTENSION_new_null()
             self.openssl_assert(sk_extension != self._ffi.NULL)
             sk_extension = self._ffi.gc(
    -            sk_extension, self._lib.sk_X509_EXTENSION_free
    +            sk_extension,
    +            lambda x: self._lib.sk_X509_EXTENSION_pop_free(
    +                x, self._ffi.addressof(
    +                    self._lib._original_lib, "X509_EXTENSION_free"
    +                )
    +            )
             )
    -        # gc is not necessary for CSRs, as sk_X509_EXTENSION_free
    -        # will release all the X509_EXTENSIONs.
    +        # Don't GC individual extensions because the memory is owned by
    +        # sk_extensions and will be freed along with it.
             self._create_x509_extensions(
                 extensions=builder._extensions,
                 handlers=_EXTENSION_ENCODE_HANDLERS,
    @@ -740,7 +820,13 @@ class Backend(object):
         def create_x509_certificate(self, builder, private_key, algorithm):
             if not isinstance(builder, x509.CertificateBuilder):
                 raise TypeError('Builder type mismatch.')
    -        if not isinstance(algorithm, hashes.HashAlgorithm):
    +        if isinstance(private_key,
    +                      (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)):
    +            if algorithm is not None:
    +                raise ValueError(
    +                    "algorithm must be None when signing via ed25519 or ed448"
    +                )
    +        elif not isinstance(algorithm, hashes.HashAlgorithm):
                 raise TypeError('Algorithm must be a registered hash algorithm.')
     
             if (
    @@ -748,14 +834,11 @@ class Backend(object):
                 isinstance(private_key, rsa.RSAPrivateKey)
             ):
                 raise ValueError(
    -                "MD5 is not a supported hash algorithm for EC/DSA certificates"
    +                "MD5 is only (reluctantly) supported for RSA certificates"
                 )
     
             # Resolve the signature algorithm.
    -        evp_md = self._lib.EVP_get_digestbyname(
    -            algorithm.name.encode('ascii')
    -        )
    -        self.openssl_assert(evp_md != self._ffi.NULL)
    +        evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm)
     
             # Create an empty certificate.
             x509_cert = self._lib.X509_new()
    @@ -783,20 +866,14 @@ class Backend(object):
             self.openssl_assert(res == 1)
     
             # Set the "not before" time.
    -        res = self._lib.ASN1_TIME_set(
    -            self._lib.X509_get_notBefore(x509_cert),
    -            calendar.timegm(builder._not_valid_before.timetuple())
    +        self._set_asn1_time(
    +            self._lib.X509_getm_notBefore(x509_cert), builder._not_valid_before
             )
    -        if res == self._ffi.NULL:
    -            self._raise_time_set_error()
     
             # Set the "not after" time.
    -        res = self._lib.ASN1_TIME_set(
    -            self._lib.X509_get_notAfter(x509_cert),
    -            calendar.timegm(builder._not_valid_after.timetuple())
    +        self._set_asn1_time(
    +            self._lib.X509_getm_notAfter(x509_cert), builder._not_valid_after
             )
    -        if res == self._ffi.NULL:
    -            self._raise_time_set_error()
     
             # Add extensions.
             self._create_x509_extensions(
    @@ -829,23 +906,39 @@ class Backend(object):
     
             return _Certificate(self, x509_cert)
     
    -    def _raise_time_set_error(self):
    -        errors = self._consume_errors()
    -        self.openssl_assert(
    -            errors[0]._lib_reason_match(
    -                self._lib.ERR_LIB_ASN1,
    -                self._lib.ASN1_R_ERROR_GETTING_TIME
    -            )
    -        )
    -        raise ValueError(
    -            "Invalid time. This error can occur if you set a time too far in "
    -            "the future on Windows."
    -        )
    +    def _evp_md_x509_null_if_eddsa(self, private_key, algorithm):
    +        if isinstance(private_key,
    +                      (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)):
    +            # OpenSSL requires us to pass NULL for EVP_MD for ed25519/ed448
    +            return self._ffi.NULL
    +        else:
    +            return self._evp_md_non_null_from_algorithm(algorithm)
    +
    +    def _set_asn1_time(self, asn1_time, time):
    +        if time.year >= 2050:
    +            asn1_str = time.strftime('%Y%m%d%H%M%SZ').encode('ascii')
    +        else:
    +            asn1_str = time.strftime('%y%m%d%H%M%SZ').encode('ascii')
    +        res = self._lib.ASN1_TIME_set_string(asn1_time, asn1_str)
    +        self.openssl_assert(res == 1)
    +
    +    def _create_asn1_time(self, time):
    +        asn1_time = self._lib.ASN1_TIME_new()
    +        self.openssl_assert(asn1_time != self._ffi.NULL)
    +        asn1_time = self._ffi.gc(asn1_time, self._lib.ASN1_TIME_free)
    +        self._set_asn1_time(asn1_time, time)
    +        return asn1_time
     
         def create_x509_crl(self, builder, private_key, algorithm):
             if not isinstance(builder, x509.CertificateRevocationListBuilder):
                 raise TypeError('Builder type mismatch.')
    -        if not isinstance(algorithm, hashes.HashAlgorithm):
    +        if isinstance(private_key,
    +                      (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)):
    +            if algorithm is not None:
    +                raise ValueError(
    +                    "algorithm must be None when signing via ed25519 or ed448"
    +                )
    +        elif not isinstance(algorithm, hashes.HashAlgorithm):
                 raise TypeError('Algorithm must be a registered hash algorithm.')
     
             if (
    @@ -856,10 +949,7 @@ class Backend(object):
                     "MD5 is not a supported hash algorithm for EC/DSA CRLs"
                 )
     
    -        evp_md = self._lib.EVP_get_digestbyname(
    -            algorithm.name.encode('ascii')
    -        )
    -        self.openssl_assert(evp_md != self._ffi.NULL)
    +        evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm)
     
             # Create an empty CRL.
             x509_crl = self._lib.X509_CRL_new()
    @@ -876,20 +966,12 @@ class Backend(object):
             self.openssl_assert(res == 1)
     
             # Set the last update time.
    -        last_update = self._lib.ASN1_TIME_set(
    -            self._ffi.NULL, calendar.timegm(builder._last_update.timetuple())
    -        )
    -        self.openssl_assert(last_update != self._ffi.NULL)
    -        last_update = self._ffi.gc(last_update, self._lib.ASN1_TIME_free)
    +        last_update = self._create_asn1_time(builder._last_update)
             res = self._lib.X509_CRL_set_lastUpdate(x509_crl, last_update)
             self.openssl_assert(res == 1)
     
             # Set the next update time.
    -        next_update = self._lib.ASN1_TIME_set(
    -            self._ffi.NULL, calendar.timegm(builder._next_update.timetuple())
    -        )
    -        self.openssl_assert(next_update != self._ffi.NULL)
    -        next_update = self._ffi.gc(next_update, self._lib.ASN1_TIME_free)
    +        next_update = self._create_asn1_time(builder._next_update)
             res = self._lib.X509_CRL_set_nextUpdate(x509_crl, next_update)
             self.openssl_assert(res == 1)
     
    @@ -951,20 +1033,27 @@ class Backend(object):
     
         def _create_x509_extension(self, handlers, extension):
             if isinstance(extension.value, x509.UnrecognizedExtension):
    -            value = _encode_asn1_str_gc(
    -                self, extension.value.value, len(extension.value.value)
    -            )
    +            value = _encode_asn1_str_gc(self, extension.value.value)
                 return self._create_raw_x509_extension(extension, value)
             elif isinstance(extension.value, x509.TLSFeature):
    -            asn1 = _Integers([x.value for x in extension.value]).dump()
    -            value = _encode_asn1_str_gc(self, asn1, len(asn1))
    +            asn1 = encode_der(
    +                SEQUENCE,
    +                *[
    +                    encode_der(INTEGER, encode_der_integer(x.value))
    +                    for x in extension.value
    +                ]
    +            )
    +            value = _encode_asn1_str_gc(self, asn1)
    +            return self._create_raw_x509_extension(extension, value)
    +        elif isinstance(extension.value, x509.PrecertPoison):
    +            value = _encode_asn1_str_gc(self, encode_der(NULL))
                 return self._create_raw_x509_extension(extension, value)
             else:
                 try:
                     encode = handlers[extension.oid]
                 except KeyError:
                     raise NotImplementedError(
    -                    'Extension not supported: {0}'.format(extension.oid)
    +                    'Extension not supported: {}'.format(extension.oid)
                     )
     
                 ext_struct = encode(self, extension.value)
    @@ -988,12 +1077,7 @@ class Backend(object):
                 x509_revoked, serial_number
             )
             self.openssl_assert(res == 1)
    -        rev_date = self._lib.ASN1_TIME_set(
    -            self._ffi.NULL,
    -            calendar.timegm(builder._revocation_date.timetuple())
    -        )
    -        self.openssl_assert(rev_date != self._ffi.NULL)
    -        rev_date = self._ffi.gc(rev_date, self._lib.ASN1_TIME_free)
    +        rev_date = self._create_asn1_time(builder._revocation_date)
             res = self._lib.X509_REVOKED_set_revocationDate(x509_revoked, rev_date)
             self.openssl_assert(res == 1)
             # add CRL entry extensions
    @@ -1135,7 +1219,10 @@ class Backend(object):
             )
             if x509 == self._ffi.NULL:
                 self._consume_errors()
    -            raise ValueError("Unable to load certificate")
    +            raise ValueError(
    +                "Unable to load certificate. See https://cryptography.io/en/la"
    +                "test/faq/#why-can-t-i-import-my-pem-file for more details."
    +            )
     
             x509 = self._ffi.gc(x509, self._lib.X509_free)
             return _Certificate(self, x509)
    @@ -1157,7 +1244,10 @@ class Backend(object):
             )
             if x509_crl == self._ffi.NULL:
                 self._consume_errors()
    -            raise ValueError("Unable to load CRL")
    +            raise ValueError(
    +                "Unable to load CRL. See https://cryptography.io/en/la"
    +                "test/faq/#why-can-t-i-import-my-pem-file for more details."
    +            )
     
             x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free)
             return _CertificateRevocationList(self, x509_crl)
    @@ -1179,7 +1269,10 @@ class Backend(object):
             )
             if x509_req == self._ffi.NULL:
                 self._consume_errors()
    -            raise ValueError("Unable to load request")
    +            raise ValueError(
    +                "Unable to load request. See https://cryptography.io/en/la"
    +                "test/faq/#why-can-t-i-import-my-pem-file for more details."
    +            )
     
             x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free)
             return _CertificateSigningRequest(self, x509_req)
    @@ -1197,13 +1290,11 @@ class Backend(object):
         def _load_key(self, openssl_read_func, convert_func, data, password):
             mem_bio = self._bytes_to_bio(data)
     
    -        if password is not None and not isinstance(password, bytes):
    -            raise TypeError("Password must be bytes")
    -
             userdata = self._ffi.new("CRYPTOGRAPHY_PASSWORD_DATA *")
             if password is not None:
    -            password_buf = self._ffi.new("char []", password)
    -            userdata.password = password_buf
    +            utils._check_byteslike("password", password)
    +            password_ptr = self._ffi.from_buffer(password)
    +            userdata.password = password_ptr
                 userdata.length = len(password)
     
             evp_pkey = openssl_read_func(
    @@ -1226,7 +1317,7 @@ class Backend(object):
                     else:
                         assert userdata.error == -2
                         raise ValueError(
    -                        "Passwords longer than {0} bytes are not supported "
    +                        "Passwords longer than {} bytes are not supported "
                             "by this backend.".format(userdata.maxsize - 1)
                         )
                 else:
    @@ -1328,11 +1419,7 @@ class Backend(object):
             """
     
             if self.elliptic_curve_supported(curve):
    -            curve_nid = self._elliptic_curve_to_nid(curve)
    -
    -            ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid)
    -            self.openssl_assert(ec_cdata != self._ffi.NULL)
    -            ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
    +            ec_cdata = self._ec_key_new_by_curve(curve)
     
                 res = self._lib.EC_KEY_generate_key(ec_cdata)
                 self.openssl_assert(res == 1)
    @@ -1342,18 +1429,14 @@ class Backend(object):
                 return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
             else:
                 raise UnsupportedAlgorithm(
    -                "Backend object does not support {0}.".format(curve.name),
    +                "Backend object does not support {}.".format(curve.name),
                     _Reasons.UNSUPPORTED_ELLIPTIC_CURVE
                 )
     
         def load_elliptic_curve_private_numbers(self, numbers):
             public = numbers.public_numbers
     
    -        curve_nid = self._elliptic_curve_to_nid(public.curve)
    -
    -        ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid)
    -        self.openssl_assert(ec_cdata != self._ffi.NULL)
    -        ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
    +        ec_cdata = self._ec_key_new_by_curve(public.curve)
     
             private_value = self._ffi.gc(
                 self._int_to_bn(numbers.private_value), self._lib.BN_clear_free
    @@ -1369,24 +1452,35 @@ class Backend(object):
             return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
     
         def load_elliptic_curve_public_numbers(self, numbers):
    -        curve_nid = self._elliptic_curve_to_nid(numbers.curve)
    -
    -        ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid)
    -        self.openssl_assert(ec_cdata != self._ffi.NULL)
    -        ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
    -
    +        ec_cdata = self._ec_key_new_by_curve(numbers.curve)
             ec_cdata = self._ec_key_set_public_key_affine_coordinates(
                 ec_cdata, numbers.x, numbers.y)
             evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
     
             return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
     
    -    def derive_elliptic_curve_private_key(self, private_value, curve):
    -        curve_nid = self._elliptic_curve_to_nid(curve)
    +    def load_elliptic_curve_public_bytes(self, curve, point_bytes):
    +        ec_cdata = self._ec_key_new_by_curve(curve)
    +        group = self._lib.EC_KEY_get0_group(ec_cdata)
    +        self.openssl_assert(group != self._ffi.NULL)
    +        point = self._lib.EC_POINT_new(group)
    +        self.openssl_assert(point != self._ffi.NULL)
    +        point = self._ffi.gc(point, self._lib.EC_POINT_free)
    +        with self._tmp_bn_ctx() as bn_ctx:
    +            res = self._lib.EC_POINT_oct2point(
    +                group, point, point_bytes, len(point_bytes), bn_ctx
    +            )
    +            if res != 1:
    +                self._consume_errors()
    +                raise ValueError("Invalid public bytes for the given curve")
     
    -        ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid)
    -        self.openssl_assert(ec_cdata != self._ffi.NULL)
    -        ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
    +        res = self._lib.EC_KEY_set_public_key(ec_cdata, point)
    +        self.openssl_assert(res == 1)
    +        evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
    +        return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
    +
    +    def derive_elliptic_curve_private_key(self, private_value, curve):
    +        ec_cdata = self._ec_key_new_by_curve(curve)
     
             get_func, group = self._ec_key_determine_group_get_func(ec_cdata)
     
    @@ -1419,6 +1513,149 @@ class Backend(object):
     
             return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
     
    +    def _ec_key_new_by_curve(self, curve):
    +        curve_nid = self._elliptic_curve_to_nid(curve)
    +        ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid)
    +        self.openssl_assert(ec_cdata != self._ffi.NULL)
    +        return self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
    +
    +    def load_der_ocsp_request(self, data):
    +        mem_bio = self._bytes_to_bio(data)
    +        request = self._lib.d2i_OCSP_REQUEST_bio(mem_bio.bio, self._ffi.NULL)
    +        if request == self._ffi.NULL:
    +            self._consume_errors()
    +            raise ValueError("Unable to load OCSP request")
    +
    +        request = self._ffi.gc(request, self._lib.OCSP_REQUEST_free)
    +        return _OCSPRequest(self, request)
    +
    +    def load_der_ocsp_response(self, data):
    +        mem_bio = self._bytes_to_bio(data)
    +        response = self._lib.d2i_OCSP_RESPONSE_bio(mem_bio.bio, self._ffi.NULL)
    +        if response == self._ffi.NULL:
    +            self._consume_errors()
    +            raise ValueError("Unable to load OCSP response")
    +
    +        response = self._ffi.gc(response, self._lib.OCSP_RESPONSE_free)
    +        return _OCSPResponse(self, response)
    +
    +    def create_ocsp_request(self, builder):
    +        ocsp_req = self._lib.OCSP_REQUEST_new()
    +        self.openssl_assert(ocsp_req != self._ffi.NULL)
    +        ocsp_req = self._ffi.gc(ocsp_req, self._lib.OCSP_REQUEST_free)
    +        cert, issuer, algorithm = builder._request
    +        evp_md = self._evp_md_non_null_from_algorithm(algorithm)
    +        certid = self._lib.OCSP_cert_to_id(
    +            evp_md, cert._x509, issuer._x509
    +        )
    +        self.openssl_assert(certid != self._ffi.NULL)
    +        onereq = self._lib.OCSP_request_add0_id(ocsp_req, certid)
    +        self.openssl_assert(onereq != self._ffi.NULL)
    +        self._create_x509_extensions(
    +            extensions=builder._extensions,
    +            handlers=_OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS,
    +            x509_obj=ocsp_req,
    +            add_func=self._lib.OCSP_REQUEST_add_ext,
    +            gc=True,
    +        )
    +        return _OCSPRequest(self, ocsp_req)
    +
    +    def _create_ocsp_basic_response(self, builder, private_key, algorithm):
    +        basic = self._lib.OCSP_BASICRESP_new()
    +        self.openssl_assert(basic != self._ffi.NULL)
    +        basic = self._ffi.gc(basic, self._lib.OCSP_BASICRESP_free)
    +        evp_md = self._evp_md_non_null_from_algorithm(
    +            builder._response._algorithm
    +        )
    +        certid = self._lib.OCSP_cert_to_id(
    +            evp_md, builder._response._cert._x509,
    +            builder._response._issuer._x509
    +        )
    +        self.openssl_assert(certid != self._ffi.NULL)
    +        certid = self._ffi.gc(certid, self._lib.OCSP_CERTID_free)
    +        if builder._response._revocation_reason is None:
    +            reason = -1
    +        else:
    +            reason = _CRL_ENTRY_REASON_ENUM_TO_CODE[
    +                builder._response._revocation_reason
    +            ]
    +        if builder._response._revocation_time is None:
    +            rev_time = self._ffi.NULL
    +        else:
    +            rev_time = self._create_asn1_time(
    +                builder._response._revocation_time
    +            )
    +
    +        next_update = self._ffi.NULL
    +        if builder._response._next_update is not None:
    +            next_update = self._create_asn1_time(
    +                builder._response._next_update
    +            )
    +
    +        this_update = self._create_asn1_time(builder._response._this_update)
    +
    +        res = self._lib.OCSP_basic_add1_status(
    +            basic,
    +            certid,
    +            builder._response._cert_status.value,
    +            reason,
    +            rev_time,
    +            this_update,
    +            next_update
    +        )
    +        self.openssl_assert(res != self._ffi.NULL)
    +        # okay, now sign the basic structure
    +        evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm)
    +        responder_cert, responder_encoding = builder._responder_id
    +        flags = self._lib.OCSP_NOCERTS
    +        if responder_encoding is ocsp.OCSPResponderEncoding.HASH:
    +            flags |= self._lib.OCSP_RESPID_KEY
    +
    +        if builder._certs is not None:
    +            for cert in builder._certs:
    +                res = self._lib.OCSP_basic_add1_cert(basic, cert._x509)
    +                self.openssl_assert(res == 1)
    +
    +        self._create_x509_extensions(
    +            extensions=builder._extensions,
    +            handlers=_OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS,
    +            x509_obj=basic,
    +            add_func=self._lib.OCSP_BASICRESP_add_ext,
    +            gc=True,
    +        )
    +
    +        res = self._lib.OCSP_basic_sign(
    +            basic, responder_cert._x509, private_key._evp_pkey,
    +            evp_md, self._ffi.NULL, flags
    +        )
    +        if res != 1:
    +            errors = self._consume_errors()
    +            self.openssl_assert(
    +                errors[0]._lib_reason_match(
    +                    self._lib.ERR_LIB_X509,
    +                    self._lib.X509_R_KEY_VALUES_MISMATCH
    +                )
    +            )
    +            raise ValueError("responder_cert must be signed by private_key")
    +
    +        return basic
    +
    +    def create_ocsp_response(self, response_status, builder, private_key,
    +                             algorithm):
    +        if response_status is ocsp.OCSPResponseStatus.SUCCESSFUL:
    +            basic = self._create_ocsp_basic_response(
    +                builder, private_key, algorithm
    +            )
    +        else:
    +            basic = self._ffi.NULL
    +
    +        ocsp_resp = self._lib.OCSP_response_create(
    +            response_status.value, basic
    +        )
    +        self.openssl_assert(ocsp_resp != self._ffi.NULL)
    +        ocsp_resp = self._ffi.gc(ocsp_resp, self._lib.OCSP_RESPONSE_free)
    +        return _OCSPResponse(self, ocsp_resp)
    +
         def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
             return (
                 self.elliptic_curve_supported(curve) and
    @@ -1446,7 +1683,7 @@ class Backend(object):
             curve_nid = self._lib.OBJ_sn2nid(curve_name.encode())
             if curve_nid == self._lib.NID_undef:
                 raise UnsupportedAlgorithm(
    -                "{0} is not a supported elliptic curve".format(curve.name),
    +                "{} is not a supported elliptic curve".format(curve.name),
                     _Reasons.UNSUPPORTED_ELLIPTIC_CURVE
                 )
             return curve_nid
    @@ -1517,6 +1754,20 @@ class Backend(object):
                     "format must be an item from the PrivateFormat enum"
                 )
     
    +        # X9.62 encoding is only valid for EC public keys
    +        if encoding is serialization.Encoding.X962:
    +            raise ValueError("X9.62 format is only valid for EC public keys")
    +
    +        # Raw format and encoding are only valid for X25519, Ed25519, X448, and
    +        # Ed448 keys. We capture those cases before this method is called so if
    +        # we see those enum values here it means the caller has passed them to
    +        # a key that doesn't support raw type
    +        if format is serialization.PrivateFormat.Raw:
    +            raise ValueError("raw format is invalid with this key or encoding")
    +
    +        if encoding is serialization.Encoding.Raw:
    +            raise ValueError("raw encoding is invalid with this key or format")
    +
             if not isinstance(encryption_algorithm,
                               serialization.KeySerializationEncryption):
                 raise TypeError(
    @@ -1576,7 +1827,7 @@ class Backend(object):
                     write_bio = self._lib.i2d_PKCS8PrivateKey_bio
                     key = evp_pkey
             else:
    -            raise TypeError("encoding must be an item from the Encoding enum")
    +            raise TypeError("encoding must be Encoding.PEM or Encoding.DER")
     
             bio = self._create_mem_bio_gc()
             res = write_bio(
    @@ -1609,6 +1860,23 @@ class Backend(object):
             if not isinstance(encoding, serialization.Encoding):
                 raise TypeError("encoding must be an item from the Encoding enum")
     
    +        # Compressed/UncompressedPoint are only valid for EC keys and those
    +        # cases are handled by the ECPublicKey public_bytes method before this
    +        # method is called
    +        if format in (serialization.PublicFormat.UncompressedPoint,
    +                      serialization.PublicFormat.CompressedPoint):
    +            raise ValueError("Point formats are not valid for this key type")
    +
    +        # Raw format and encoding are only valid for X25519, Ed25519, X448, and
    +        # Ed448 keys. We capture those cases before this method is called so if
    +        # we see those enum values here it means the caller has passed them to
    +        # a key that doesn't support raw type
    +        if format is serialization.PublicFormat.Raw:
    +            raise ValueError("raw format is invalid with this key or encoding")
    +
    +        if encoding is serialization.Encoding.Raw:
    +            raise ValueError("raw encoding is invalid with this key or format")
    +
             if (
                 format is serialization.PublicFormat.OpenSSH or
                 encoding is serialization.Encoding.OpenSSH
    @@ -1653,22 +1921,28 @@ class Backend(object):
             if isinstance(key, rsa.RSAPublicKey):
                 public_numbers = key.public_numbers()
                 return b"ssh-rsa " + base64.b64encode(
    -                serialization._ssh_write_string(b"ssh-rsa") +
    -                serialization._ssh_write_mpint(public_numbers.e) +
    -                serialization._ssh_write_mpint(public_numbers.n)
    +                ssh._ssh_write_string(b"ssh-rsa") +
    +                ssh._ssh_write_mpint(public_numbers.e) +
    +                ssh._ssh_write_mpint(public_numbers.n)
                 )
             elif isinstance(key, dsa.DSAPublicKey):
                 public_numbers = key.public_numbers()
                 parameter_numbers = public_numbers.parameter_numbers
                 return b"ssh-dss " + base64.b64encode(
    -                serialization._ssh_write_string(b"ssh-dss") +
    -                serialization._ssh_write_mpint(parameter_numbers.p) +
    -                serialization._ssh_write_mpint(parameter_numbers.q) +
    -                serialization._ssh_write_mpint(parameter_numbers.g) +
    -                serialization._ssh_write_mpint(public_numbers.y)
    +                ssh._ssh_write_string(b"ssh-dss") +
    +                ssh._ssh_write_mpint(parameter_numbers.p) +
    +                ssh._ssh_write_mpint(parameter_numbers.q) +
    +                ssh._ssh_write_mpint(parameter_numbers.g) +
    +                ssh._ssh_write_mpint(public_numbers.y)
                 )
    -        else:
    -            assert isinstance(key, ec.EllipticCurvePublicKey)
    +        elif isinstance(key, ed25519.Ed25519PublicKey):
    +            raw_bytes = key.public_bytes(serialization.Encoding.Raw,
    +                                         serialization.PublicFormat.Raw)
    +            return b"ssh-ed25519 " + base64.b64encode(
    +                ssh._ssh_write_string(b"ssh-ed25519") +
    +                ssh._ssh_write_string(raw_bytes)
    +            )
    +        elif isinstance(key, ec.EllipticCurvePublicKey):
                 public_numbers = key.public_numbers()
                 try:
                     curve_name = {
    @@ -1681,10 +1955,19 @@ class Backend(object):
                         "Only SECP256R1, SECP384R1, and SECP521R1 curves are "
                         "supported by the SSH public key format"
                     )
    +
    +            point = key.public_bytes(
    +                serialization.Encoding.X962,
    +                serialization.PublicFormat.UncompressedPoint
    +            )
                 return b"ecdsa-sha2-" + curve_name + b" " + base64.b64encode(
    -                serialization._ssh_write_string(b"ecdsa-sha2-" + curve_name) +
    -                serialization._ssh_write_string(curve_name) +
    -                serialization._ssh_write_string(public_numbers.encode_point())
    +                ssh._ssh_write_string(b"ecdsa-sha2-" + curve_name) +
    +                ssh._ssh_write_string(curve_name) +
    +                ssh._ssh_write_string(point)
    +            )
    +        else:
    +            raise ValueError(
    +                "OpenSSH encoding is not supported for this key type"
                 )
     
         def _parameter_bytes(self, encoding, format, cdata):
    @@ -1888,6 +2171,11 @@ class Backend(object):
             return self._ffi.buffer(pp[0], res)[:]
     
         def x25519_load_public_bytes(self, data):
    +        # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can
    +        # switch this to EVP_PKEY_new_raw_public_key
    +        if len(data) != 32:
    +            raise ValueError("An X25519 public key is 32 bytes long")
    +
             evp_pkey = self._create_evp_pkey_gc()
             res = self._lib.EVP_PKEY_set_type(evp_pkey, self._lib.NID_X25519)
             backend.openssl_assert(res == 1)
    @@ -1898,6 +2186,9 @@ class Backend(object):
             return _X25519PublicKey(self, evp_pkey)
     
         def x25519_load_private_bytes(self, data):
    +        # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can
    +        # switch this to EVP_PKEY_new_raw_private_key and drop the
    +        # zeroed_bytearray garbage.
             # OpenSSL only has facilities for loading PKCS8 formatted private
             # keys using the algorithm identifiers specified in
             # https://tools.ietf.org/html/draft-ietf-curdle-pkix-09.
    @@ -1911,9 +2202,16 @@ class Backend(object):
             # Of course there's a bit more complexity. In reality OCTET STRING
             # contains an OCTET STRING of length 32! So the last two bytes here
             # are \x04\x20, which is an OCTET STRING of length 32.
    +        if len(data) != 32:
    +            raise ValueError("An X25519 private key is 32 bytes long")
    +
             pkcs8_prefix = b'0.\x02\x01\x000\x05\x06\x03+en\x04"\x04 '
    -        bio = self._bytes_to_bio(pkcs8_prefix + data)
    -        evp_pkey = backend._lib.d2i_PrivateKey_bio(bio.bio, self._ffi.NULL)
    +        with self._zeroed_bytearray(48) as ba:
    +            ba[0:16] = pkcs8_prefix
    +            ba[16:] = data
    +            bio = self._bytes_to_bio(ba)
    +            evp_pkey = backend._lib.d2i_PrivateKey_bio(bio.bio, self._ffi.NULL)
    +
             self.openssl_assert(evp_pkey != self._ffi.NULL)
             evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
             self.openssl_assert(
    @@ -1921,14 +2219,10 @@ class Backend(object):
             )
             return _X25519PrivateKey(self, evp_pkey)
     
    -    def x25519_generate_key(self):
    -        evp_pkey_ctx = self._lib.EVP_PKEY_CTX_new_id(
    -            self._lib.NID_X25519, self._ffi.NULL
    -        )
    +    def _evp_pkey_keygen_gc(self, nid):
    +        evp_pkey_ctx = self._lib.EVP_PKEY_CTX_new_id(nid, self._ffi.NULL)
             self.openssl_assert(evp_pkey_ctx != self._ffi.NULL)
    -        evp_pkey_ctx = self._ffi.gc(
    -            evp_pkey_ctx, self._lib.EVP_PKEY_CTX_free
    -        )
    +        evp_pkey_ctx = self._ffi.gc(evp_pkey_ctx, self._lib.EVP_PKEY_CTX_free)
             res = self._lib.EVP_PKEY_keygen_init(evp_pkey_ctx)
             self.openssl_assert(res == 1)
             evp_ppkey = self._ffi.new("EVP_PKEY **")
    @@ -1936,18 +2230,143 @@ class Backend(object):
             self.openssl_assert(res == 1)
             self.openssl_assert(evp_ppkey[0] != self._ffi.NULL)
             evp_pkey = self._ffi.gc(evp_ppkey[0], self._lib.EVP_PKEY_free)
    +        return evp_pkey
    +
    +    def x25519_generate_key(self):
    +        evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_X25519)
             return _X25519PrivateKey(self, evp_pkey)
     
         def x25519_supported(self):
             return self._lib.CRYPTOGRAPHY_OPENSSL_110_OR_GREATER
     
    +    def x448_load_public_bytes(self, data):
    +        if len(data) != 56:
    +            raise ValueError("An X448 public key is 56 bytes long")
    +
    +        evp_pkey = self._lib.EVP_PKEY_new_raw_public_key(
    +            self._lib.NID_X448, self._ffi.NULL, data, len(data)
    +        )
    +        self.openssl_assert(evp_pkey != self._ffi.NULL)
    +        evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
    +        return _X448PublicKey(self, evp_pkey)
    +
    +    def x448_load_private_bytes(self, data):
    +        if len(data) != 56:
    +            raise ValueError("An X448 private key is 56 bytes long")
    +
    +        data_ptr = self._ffi.from_buffer(data)
    +        evp_pkey = self._lib.EVP_PKEY_new_raw_private_key(
    +            self._lib.NID_X448, self._ffi.NULL, data_ptr, len(data)
    +        )
    +        self.openssl_assert(evp_pkey != self._ffi.NULL)
    +        evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
    +        return _X448PrivateKey(self, evp_pkey)
    +
    +    def x448_generate_key(self):
    +        evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_X448)
    +        return _X448PrivateKey(self, evp_pkey)
    +
    +    def x448_supported(self):
    +        return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111
    +
    +    def ed25519_supported(self):
    +        return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111B
    +
    +    def ed25519_load_public_bytes(self, data):
    +        utils._check_bytes("data", data)
    +
    +        if len(data) != ed25519._ED25519_KEY_SIZE:
    +            raise ValueError("An Ed25519 public key is 32 bytes long")
    +
    +        evp_pkey = self._lib.EVP_PKEY_new_raw_public_key(
    +            self._lib.NID_ED25519, self._ffi.NULL, data, len(data)
    +        )
    +        self.openssl_assert(evp_pkey != self._ffi.NULL)
    +        evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
    +
    +        return _Ed25519PublicKey(self, evp_pkey)
    +
    +    def ed25519_load_private_bytes(self, data):
    +        if len(data) != ed25519._ED25519_KEY_SIZE:
    +            raise ValueError("An Ed25519 private key is 32 bytes long")
    +
    +        utils._check_byteslike("data", data)
    +        data_ptr = self._ffi.from_buffer(data)
    +        evp_pkey = self._lib.EVP_PKEY_new_raw_private_key(
    +            self._lib.NID_ED25519, self._ffi.NULL, data_ptr, len(data)
    +        )
    +        self.openssl_assert(evp_pkey != self._ffi.NULL)
    +        evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
    +
    +        return _Ed25519PrivateKey(self, evp_pkey)
    +
    +    def ed25519_generate_key(self):
    +        evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_ED25519)
    +        return _Ed25519PrivateKey(self, evp_pkey)
    +
    +    def ed448_supported(self):
    +        return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111B
    +
    +    def ed448_load_public_bytes(self, data):
    +        utils._check_bytes("data", data)
    +        if len(data) != _ED448_KEY_SIZE:
    +            raise ValueError("An Ed448 public key is 57 bytes long")
    +
    +        evp_pkey = self._lib.EVP_PKEY_new_raw_public_key(
    +            self._lib.NID_ED448, self._ffi.NULL, data, len(data)
    +        )
    +        self.openssl_assert(evp_pkey != self._ffi.NULL)
    +        evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
    +
    +        return _Ed448PublicKey(self, evp_pkey)
    +
    +    def ed448_load_private_bytes(self, data):
    +        utils._check_byteslike("data", data)
    +        if len(data) != _ED448_KEY_SIZE:
    +            raise ValueError("An Ed448 private key is 57 bytes long")
    +
    +        data_ptr = self._ffi.from_buffer(data)
    +        evp_pkey = self._lib.EVP_PKEY_new_raw_private_key(
    +            self._lib.NID_ED448, self._ffi.NULL, data_ptr, len(data)
    +        )
    +        self.openssl_assert(evp_pkey != self._ffi.NULL)
    +        evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
    +
    +        return _Ed448PrivateKey(self, evp_pkey)
    +
    +    def ed448_generate_key(self):
    +        evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_ED448)
    +        return _Ed448PrivateKey(self, evp_pkey)
    +
         def derive_scrypt(self, key_material, salt, length, n, r, p):
             buf = self._ffi.new("unsigned char[]", length)
    +        key_material_ptr = self._ffi.from_buffer(key_material)
             res = self._lib.EVP_PBE_scrypt(
    -            key_material, len(key_material), salt, len(salt), n, r, p,
    +            key_material_ptr, len(key_material), salt, len(salt), n, r, p,
                 scrypt._MEM_LIMIT, buf, length
             )
    -        self.openssl_assert(res == 1)
    +        if res != 1:
    +            errors = self._consume_errors()
    +            if not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111:
    +                # This error is only added to the stack in 1.1.1+
    +                self.openssl_assert(
    +                    errors[0]._lib_reason_match(
    +                        self._lib.ERR_LIB_EVP,
    +                        self._lib.ERR_R_MALLOC_FAILURE
    +                    ) or
    +                    errors[0]._lib_reason_match(
    +                        self._lib.ERR_LIB_EVP,
    +                        self._lib.EVP_R_MEMORY_LIMIT_EXCEEDED
    +                    )
    +                )
    +
    +            # memory required formula explained here:
    +            # https://blog.filippo.io/the-scrypt-parameters/
    +            min_memory = 128 * n * r // (1024**2)
    +            raise MemoryError(
    +                "Not enough memory to derive key. These parameters require"
    +                " {} MB of memory.".format(min_memory)
    +            )
             return self._ffi.buffer(buf)[:]
     
         def aead_cipher_supported(self, cipher):
    @@ -1956,6 +2375,105 @@ class Backend(object):
                 self._lib.EVP_get_cipherbyname(cipher_name) != self._ffi.NULL
             )
     
    +    @contextlib.contextmanager
    +    def _zeroed_bytearray(self, length):
    +        """
    +        This method creates a bytearray, which we copy data into (hopefully
    +        also from a mutable buffer that can be dynamically erased!), and then
    +        zero when we're done.
    +        """
    +        ba = bytearray(length)
    +        try:
    +            yield ba
    +        finally:
    +            self._zero_data(ba, length)
    +
    +    def _zero_data(self, data, length):
    +        # We clear things this way because at the moment we're not
    +        # sure of a better way that can guarantee it overwrites the
    +        # memory of a bytearray and doesn't just replace the underlying char *.
    +        for i in range(length):
    +            data[i] = 0
    +
    +    @contextlib.contextmanager
    +    def _zeroed_null_terminated_buf(self, data):
    +        """
    +        This method takes bytes, which can be a bytestring or a mutable
    +        buffer like a bytearray, and yields a null-terminated version of that
    +        data. This is required because PKCS12_parse doesn't take a length with
    +        its password char * and ffi.from_buffer doesn't provide null
    +        termination. So, to support zeroing the data via bytearray we
    +        need to build this ridiculous construct that copies the memory, but
    +        zeroes it after use.
    +        """
    +        if data is None:
    +            yield self._ffi.NULL
    +        else:
    +            data_len = len(data)
    +            buf = self._ffi.new("char[]", data_len + 1)
    +            self._ffi.memmove(buf, data, data_len)
    +            try:
    +                yield buf
    +            finally:
    +                # Cast to a uint8_t * so we can assign by integer
    +                self._zero_data(self._ffi.cast("uint8_t *", buf), data_len)
    +
    +    def load_key_and_certificates_from_pkcs12(self, data, password):
    +        if password is not None:
    +            utils._check_byteslike("password", password)
    +
    +        bio = self._bytes_to_bio(data)
    +        p12 = self._lib.d2i_PKCS12_bio(bio.bio, self._ffi.NULL)
    +        if p12 == self._ffi.NULL:
    +            self._consume_errors()
    +            raise ValueError("Could not deserialize PKCS12 data")
    +
    +        p12 = self._ffi.gc(p12, self._lib.PKCS12_free)
    +        evp_pkey_ptr = self._ffi.new("EVP_PKEY **")
    +        x509_ptr = self._ffi.new("X509 **")
    +        sk_x509_ptr = self._ffi.new("Cryptography_STACK_OF_X509 **")
    +        with self._zeroed_null_terminated_buf(password) as password_buf:
    +            res = self._lib.PKCS12_parse(
    +                p12, password_buf, evp_pkey_ptr, x509_ptr, sk_x509_ptr
    +            )
    +
    +        if res == 0:
    +            self._consume_errors()
    +            raise ValueError("Invalid password or PKCS12 data")
    +
    +        cert = None
    +        key = None
    +        additional_certificates = []
    +
    +        if evp_pkey_ptr[0] != self._ffi.NULL:
    +            evp_pkey = self._ffi.gc(evp_pkey_ptr[0], self._lib.EVP_PKEY_free)
    +            key = self._evp_pkey_to_private_key(evp_pkey)
    +
    +        if x509_ptr[0] != self._ffi.NULL:
    +            x509 = self._ffi.gc(x509_ptr[0], self._lib.X509_free)
    +            cert = _Certificate(self, x509)
    +
    +        if sk_x509_ptr[0] != self._ffi.NULL:
    +            sk_x509 = self._ffi.gc(sk_x509_ptr[0], self._lib.sk_X509_free)
    +            num = self._lib.sk_X509_num(sk_x509_ptr[0])
    +            for i in range(num):
    +                x509 = self._lib.sk_X509_value(sk_x509, i)
    +                x509 = self._ffi.gc(x509, self._lib.X509_free)
    +                self.openssl_assert(x509 != self._ffi.NULL)
    +                additional_certificates.append(_Certificate(self, x509))
    +
    +        return (key, cert, additional_certificates)
    +
    +    def poly1305_supported(self):
    +        return self._lib.Cryptography_HAS_POLY1305 == 1
    +
    +    def create_poly1305_ctx(self, key):
    +        utils._check_byteslike("key", key)
    +        if len(key) != _POLY1305_KEY_SIZE:
    +            raise ValueError("A poly1305 key is 32 bytes long")
    +
    +        return _Poly1305Context(self, key)
    +
     
     class GetCipherByName(object):
         def __init__(self, fmt):
    @@ -1967,7 +2485,7 @@ class GetCipherByName(object):
     
     
     def _get_xts_cipher(backend, cipher, mode):
    -    cipher_name = "aes-{0}-xts".format(cipher.key_size // 2)
    +    cipher_name = "aes-{}-xts".format(cipher.key_size // 2)
         return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii"))
     
     
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ciphers.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ciphers.py
    index e0ee06e..94b48f5 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ciphers.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ciphers.py
    @@ -40,7 +40,7 @@ class _CipherContext(object):
                 adapter = registry[type(cipher), type(mode)]
             except KeyError:
                 raise UnsupportedAlgorithm(
    -                "cipher {0} in {1} mode is not supported "
    +                "cipher {} in {} mode is not supported "
                     "by this backend.".format(
                         cipher.name, mode.name if mode else mode),
                     _Reasons.UNSUPPORTED_CIPHER
    @@ -48,21 +48,25 @@ class _CipherContext(object):
     
             evp_cipher = adapter(self._backend, cipher, mode)
             if evp_cipher == self._backend._ffi.NULL:
    -            raise UnsupportedAlgorithm(
    -                "cipher {0} in {1} mode is not supported "
    -                "by this backend.".format(
    -                    cipher.name, mode.name if mode else mode),
    -                _Reasons.UNSUPPORTED_CIPHER
    -            )
    +            msg = "cipher {0.name} ".format(cipher)
    +            if mode is not None:
    +                msg += "in {0.name} mode ".format(mode)
    +            msg += (
    +                "is not supported by this backend (Your version of OpenSSL "
    +                "may be too old. Current version: {}.)"
    +            ).format(self._backend.openssl_version_text())
    +            raise UnsupportedAlgorithm(msg, _Reasons.UNSUPPORTED_CIPHER)
     
             if isinstance(mode, modes.ModeWithInitializationVector):
    -            iv_nonce = mode.initialization_vector
    +            iv_nonce = self._backend._ffi.from_buffer(
    +                mode.initialization_vector
    +            )
             elif isinstance(mode, modes.ModeWithTweak):
    -            iv_nonce = mode.tweak
    +            iv_nonce = self._backend._ffi.from_buffer(mode.tweak)
             elif isinstance(mode, modes.ModeWithNonce):
    -            iv_nonce = mode.nonce
    +            iv_nonce = self._backend._ffi.from_buffer(mode.nonce)
             elif isinstance(cipher, modes.ModeWithNonce):
    -            iv_nonce = cipher.nonce
    +            iv_nonce = self._backend._ffi.from_buffer(cipher.nonce)
             else:
                 iv_nonce = self._backend._ffi.NULL
             # begin init with cipher and operation type
    @@ -105,7 +109,7 @@ class _CipherContext(object):
                 ctx,
                 self._backend._ffi.NULL,
                 self._backend._ffi.NULL,
    -            cipher.key,
    +            self._backend._ffi.from_buffer(cipher.key),
                 iv_nonce,
                 operation
             )
    @@ -123,7 +127,7 @@ class _CipherContext(object):
         def update_into(self, data, buf):
             if len(buf) < (len(data) + self._block_size_bytes - 1):
                 raise ValueError(
    -                "buffer must be at least {0} bytes for this "
    +                "buffer must be at least {} bytes for this "
                     "payload".format(len(data) + self._block_size_bytes - 1)
                 )
     
    @@ -131,8 +135,10 @@ class _CipherContext(object):
                 "unsigned char *", self._backend._ffi.from_buffer(buf)
             )
             outlen = self._backend._ffi.new("int *")
    -        res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen,
    -                                                  data, len(data))
    +        res = self._backend._lib.EVP_CipherUpdate(
    +            self._ctx, buf, outlen,
    +            self._backend._ffi.from_buffer(data), len(data)
    +        )
             self._backend.openssl_assert(res != 0)
             return outlen[0]
     
    @@ -201,7 +207,7 @@ class _CipherContext(object):
                 )
             if len(tag) < self._mode._min_tag_length:
                 raise ValueError(
    -                "Authentication tag must be {0} bytes or longer.".format(
    +                "Authentication tag must be {} bytes or longer.".format(
                         self._mode._min_tag_length)
                 )
             res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
    @@ -215,7 +221,8 @@ class _CipherContext(object):
         def authenticate_additional_data(self, data):
             outlen = self._backend._ffi.new("int *")
             res = self._backend._lib.EVP_CipherUpdate(
    -            self._ctx, self._backend._ffi.NULL, outlen, data, len(data)
    +            self._ctx, self._backend._ffi.NULL, outlen,
    +            self._backend._ffi.from_buffer(data), len(data)
             )
             self._backend.openssl_assert(res != 0)
     
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/cmac.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/cmac.py
    index e20f66d..d4d46f5 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/cmac.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/cmac.py
    @@ -9,11 +9,10 @@ from cryptography import utils
     from cryptography.exceptions import (
         InvalidSignature, UnsupportedAlgorithm, _Reasons
     )
    -from cryptography.hazmat.primitives import constant_time, mac
    +from cryptography.hazmat.primitives import constant_time
     from cryptography.hazmat.primitives.ciphers.modes import CBC
     
     
    -@utils.register_interface(mac.MACContext)
     class _CMACContext(object):
         def __init__(self, backend, algorithm, ctx=None):
             if not backend.cmac_algorithm_supported(algorithm):
    @@ -36,8 +35,9 @@ class _CMACContext(object):
                 self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
                 ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free)
     
    +            key_ptr = self._backend._ffi.from_buffer(self._key)
                 res = self._backend._lib.CMAC_Init(
    -                ctx, self._key, len(self._key),
    +                ctx, key_ptr, len(self._key),
                     evp_cipher, self._backend._ffi.NULL
                 )
                 self._backend.openssl_assert(res == 1)
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/decode_asn1.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/decode_asn1.py
    index 31fb8cf..7639e68 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/decode_asn1.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/decode_asn1.py
    @@ -7,20 +7,18 @@ from __future__ import absolute_import, division, print_function
     import datetime
     import ipaddress
     
    -from asn1crypto.core import Integer, SequenceOf
    +import six
     
     from cryptography import x509
    +from cryptography.hazmat._der import DERReader, INTEGER, NULL, SEQUENCE
     from cryptography.x509.extensions import _TLS_FEATURE_TYPE_TO_ENUM
     from cryptography.x509.name import _ASN1_TYPE_TO_ENUM
     from cryptography.x509.oid import (
    -    CRLEntryExtensionOID, CertificatePoliciesOID, ExtensionOID
    +    CRLEntryExtensionOID, CertificatePoliciesOID, ExtensionOID,
    +    OCSPExtensionOID,
     )
     
     
    -class _Integers(SequenceOf):
    -    _child_spec = Integer
    -
    -
     def _obj2txt(backend, obj):
         # Set to 80 on the recommendation of
         # https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values
    @@ -66,7 +64,7 @@ def _decode_x509_name(backend, x509_name):
             attribute = _decode_x509_name_entry(backend, entry)
             set_id = backend._lib.Cryptography_X509_NAME_ENTRY_set(entry)
             if set_id != prev_set_id:
    -            attributes.append(set([attribute]))
    +            attributes.append({attribute})
             else:
                 # is in the same RDN a previous entry
                 attributes[-1].add(attribute)
    @@ -132,7 +130,7 @@ def _decode_general_name(backend, gn):
                 if "1" in bits[prefix:]:
                     raise ValueError("Invalid netmask")
     
    -            ip = ipaddress.ip_network(base.exploded + u"/{0}".format(prefix))
    +            ip = ipaddress.ip_network(base.exploded + u"/{}".format(prefix))
             else:
                 ip = ipaddress.ip_address(data)
     
    @@ -157,7 +155,7 @@ def _decode_general_name(backend, gn):
         else:
             # x400Address or ediPartyName
             raise x509.UnsupportedGeneralNameType(
    -            "{0} is not a supported type".format(
    +            "{} is not a supported type".format(
                     x509._GENERAL_NAMES.get(gn.type, gn.type)
                 ),
                 gn.type
    @@ -199,21 +197,37 @@ class _X509ExtensionParser(object):
                 )
                 if oid in seen_oids:
                     raise x509.DuplicateExtension(
    -                    "Duplicate {0} extension found".format(oid), oid
    +                    "Duplicate {} extension found".format(oid), oid
                     )
     
    -            # This OID is only supported in OpenSSL 1.1.0+ but we want
    -            # to support it in all versions of OpenSSL so we decode it
    +            # These OIDs are only supported in OpenSSL 1.1.0+ but we want
    +            # to support them in all versions of OpenSSL so we decode them
                 # ourselves.
                 if oid == ExtensionOID.TLS_FEATURE:
    +                # The extension contents are a SEQUENCE OF INTEGERs.
                     data = backend._lib.X509_EXTENSION_get_data(ext)
    -                parsed = _Integers.load(_asn1_string_to_bytes(backend, data))
    +                data_bytes = _asn1_string_to_bytes(backend, data)
    +                features = DERReader(data_bytes).read_single_element(SEQUENCE)
    +                parsed = []
    +                while not features.is_empty():
    +                    parsed.append(features.read_element(INTEGER).as_integer())
    +                # Map the features to their enum value.
                     value = x509.TLSFeature(
    -                    [_TLS_FEATURE_TYPE_TO_ENUM[x.native] for x in parsed]
    +                    [_TLS_FEATURE_TYPE_TO_ENUM[x] for x in parsed]
                     )
                     extensions.append(x509.Extension(oid, critical, value))
                     seen_oids.add(oid)
                     continue
    +            elif oid == ExtensionOID.PRECERT_POISON:
    +                data = backend._lib.X509_EXTENSION_get_data(ext)
    +                # The contents of the extension must be an ASN.1 NULL.
    +                reader = DERReader(_asn1_string_to_bytes(backend, data))
    +                reader.read_single_element(NULL).check_empty()
    +                extensions.append(x509.Extension(
    +                    oid, critical, x509.PrecertPoison()
    +                ))
    +                seen_oids.add(oid)
    +                continue
     
                 try:
                     handler = self.handlers[oid]
    @@ -231,7 +245,7 @@ class _X509ExtensionParser(object):
                     if ext_data == backend._ffi.NULL:
                         backend._consume_errors()
                         raise ValueError(
    -                        "The {0} extension is invalid and can't be "
    +                        "The {} extension is invalid and can't be "
                             "parsed".format(oid)
                         )
     
    @@ -365,7 +379,14 @@ def _decode_authority_key_identifier(backend, akid):
     
     def _decode_authority_information_access(backend, aia):
         aia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", aia)
    -    aia = backend._ffi.gc(aia, backend._lib.sk_ACCESS_DESCRIPTION_free)
    +    aia = backend._ffi.gc(
    +        aia,
    +        lambda x: backend._lib.sk_ACCESS_DESCRIPTION_pop_free(
    +            x, backend._ffi.addressof(
    +                backend._lib._original_lib, "ACCESS_DESCRIPTION_free"
    +            )
    +        )
    +    )
         num = backend._lib.sk_ACCESS_DESCRIPTION_num(aia)
         access_descriptions = []
         for i in range(num):
    @@ -450,6 +471,30 @@ def _decode_general_subtrees(backend, stack_subtrees):
         return subtrees
     
     
    +def _decode_issuing_dist_point(backend, idp):
    +    idp = backend._ffi.cast("ISSUING_DIST_POINT *", idp)
    +    idp = backend._ffi.gc(idp, backend._lib.ISSUING_DIST_POINT_free)
    +    if idp.distpoint != backend._ffi.NULL:
    +        full_name, relative_name = _decode_distpoint(backend, idp.distpoint)
    +    else:
    +        full_name = None
    +        relative_name = None
    +
    +    only_user = idp.onlyuser == 255
    +    only_ca = idp.onlyCA == 255
    +    indirect_crl = idp.indirectCRL == 255
    +    only_attr = idp.onlyattr == 255
    +    if idp.onlysomereasons != backend._ffi.NULL:
    +        only_some_reasons = _decode_reasons(backend, idp.onlysomereasons)
    +    else:
    +        only_some_reasons = None
    +
    +    return x509.IssuingDistributionPoint(
    +        full_name, relative_name, only_user, only_ca, only_some_reasons,
    +        indirect_crl, only_attr
    +    )
    +
    +
     def _decode_policy_constraints(backend, pc):
         pc = backend._ffi.cast("POLICY_CONSTRAINTS *", pc)
         pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free)
    @@ -498,44 +543,7 @@ def _decode_dist_points(backend, cdps):
             reasons = None
             cdp = backend._lib.sk_DIST_POINT_value(cdps, i)
             if cdp.reasons != backend._ffi.NULL:
    -            # We will check each bit from RFC 5280
    -            # ReasonFlags ::= BIT STRING {
    -            #      unused                  (0),
    -            #      keyCompromise           (1),
    -            #      cACompromise            (2),
    -            #      affiliationChanged      (3),
    -            #      superseded              (4),
    -            #      cessationOfOperation    (5),
    -            #      certificateHold         (6),
    -            #      privilegeWithdrawn      (7),
    -            #      aACompromise            (8) }
    -            reasons = []
    -            get_bit = backend._lib.ASN1_BIT_STRING_get_bit
    -            if get_bit(cdp.reasons, 1):
    -                reasons.append(x509.ReasonFlags.key_compromise)
    -
    -            if get_bit(cdp.reasons, 2):
    -                reasons.append(x509.ReasonFlags.ca_compromise)
    -
    -            if get_bit(cdp.reasons, 3):
    -                reasons.append(x509.ReasonFlags.affiliation_changed)
    -
    -            if get_bit(cdp.reasons, 4):
    -                reasons.append(x509.ReasonFlags.superseded)
    -
    -            if get_bit(cdp.reasons, 5):
    -                reasons.append(x509.ReasonFlags.cessation_of_operation)
    -
    -            if get_bit(cdp.reasons, 6):
    -                reasons.append(x509.ReasonFlags.certificate_hold)
    -
    -            if get_bit(cdp.reasons, 7):
    -                reasons.append(x509.ReasonFlags.privilege_withdrawn)
    -
    -            if get_bit(cdp.reasons, 8):
    -                reasons.append(x509.ReasonFlags.aa_compromise)
    -
    -            reasons = frozenset(reasons)
    +            reasons = _decode_reasons(backend, cdp.reasons)
     
             if cdp.CRLissuer != backend._ffi.NULL:
                 crl_issuer = _decode_general_names(backend, cdp.CRLissuer)
    @@ -543,32 +551,9 @@ def _decode_dist_points(backend, cdps):
             # Certificates may have a crl_issuer/reasons and no distribution
             # point so make sure it's not null.
             if cdp.distpoint != backend._ffi.NULL:
    -            # Type 0 is fullName, there is no #define for it in the code.
    -            if cdp.distpoint.type == _DISTPOINT_TYPE_FULLNAME:
    -                full_name = _decode_general_names(
    -                    backend, cdp.distpoint.name.fullname
    -                )
    -            # OpenSSL code doesn't test for a specific type for
    -            # relativename, everything that isn't fullname is considered
    -            # relativename.  Per RFC 5280:
    -            #
    -            # DistributionPointName ::= CHOICE {
    -            #      fullName                [0]      GeneralNames,
    -            #      nameRelativeToCRLIssuer [1]      RelativeDistinguishedName }
    -            else:
    -                rns = cdp.distpoint.name.relativename
    -                rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns)
    -                attributes = set()
    -                for i in range(rnum):
    -                    rn = backend._lib.sk_X509_NAME_ENTRY_value(
    -                        rns, i
    -                    )
    -                    backend.openssl_assert(rn != backend._ffi.NULL)
    -                    attributes.add(
    -                        _decode_x509_name_entry(backend, rn)
    -                    )
    -
    -                relative_name = x509.RelativeDistinguishedName(attributes)
    +            full_name, relative_name = _decode_distpoint(
    +                backend, cdp.distpoint
    +            )
     
             dist_points.append(
                 x509.DistributionPoint(
    @@ -579,6 +564,67 @@ def _decode_dist_points(backend, cdps):
         return dist_points
     
     
    +# ReasonFlags ::= BIT STRING {
    +#      unused                  (0),
    +#      keyCompromise           (1),
    +#      cACompromise            (2),
    +#      affiliationChanged      (3),
    +#      superseded              (4),
    +#      cessationOfOperation    (5),
    +#      certificateHold         (6),
    +#      privilegeWithdrawn      (7),
    +#      aACompromise            (8) }
    +_REASON_BIT_MAPPING = {
    +    1: x509.ReasonFlags.key_compromise,
    +    2: x509.ReasonFlags.ca_compromise,
    +    3: x509.ReasonFlags.affiliation_changed,
    +    4: x509.ReasonFlags.superseded,
    +    5: x509.ReasonFlags.cessation_of_operation,
    +    6: x509.ReasonFlags.certificate_hold,
    +    7: x509.ReasonFlags.privilege_withdrawn,
    +    8: x509.ReasonFlags.aa_compromise,
    +}
    +
    +
    +def _decode_reasons(backend, reasons):
    +    # We will check each bit from RFC 5280
    +    enum_reasons = []
    +    for bit_position, reason in six.iteritems(_REASON_BIT_MAPPING):
    +        if backend._lib.ASN1_BIT_STRING_get_bit(reasons, bit_position):
    +            enum_reasons.append(reason)
    +
    +    return frozenset(enum_reasons)
    +
    +
    +def _decode_distpoint(backend, distpoint):
    +    if distpoint.type == _DISTPOINT_TYPE_FULLNAME:
    +        full_name = _decode_general_names(backend, distpoint.name.fullname)
    +        return full_name, None
    +
    +    # OpenSSL code doesn't test for a specific type for
    +    # relativename, everything that isn't fullname is considered
    +    # relativename.  Per RFC 5280:
    +    #
    +    # DistributionPointName ::= CHOICE {
    +    #      fullName                [0]      GeneralNames,
    +    #      nameRelativeToCRLIssuer [1]      RelativeDistinguishedName }
    +    rns = distpoint.name.relativename
    +    rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns)
    +    attributes = set()
    +    for i in range(rnum):
    +        rn = backend._lib.sk_X509_NAME_ENTRY_value(
    +            rns, i
    +        )
    +        backend.openssl_assert(rn != backend._ffi.NULL)
    +        attributes.add(
    +            _decode_x509_name_entry(backend, rn)
    +        )
    +
    +    relative_name = x509.RelativeDistinguishedName(attributes)
    +
    +    return None, relative_name
    +
    +
     def _decode_crl_distribution_points(backend, cdps):
         dist_points = _decode_dist_points(backend, cdps)
         return x509.CRLDistributionPoints(dist_points)
    @@ -659,7 +705,7 @@ def _decode_crl_reason(backend, enum):
         try:
             return x509.CRLReason(_CRL_ENTRY_REASON_CODE_TO_ENUM[code])
         except KeyError:
    -        raise ValueError("Unsupported reason code: {0}".format(code))
    +        raise ValueError("Unsupported reason code: {}".format(code))
     
     
     def _decode_invalidity_date(backend, inv_date):
    @@ -719,7 +765,7 @@ def _asn1_string_to_utf8(backend, asn1_string):
         res = backend._lib.ASN1_STRING_to_UTF8(buf, asn1_string)
         if res == -1:
             raise ValueError(
    -            "Unsupported ASN1 string type. Type: {0}".format(asn1_string.type)
    +            "Unsupported ASN1 string type. Type: {}".format(asn1_string.type)
             )
     
         backend.openssl_assert(buf[0] != backend._ffi.NULL)
    @@ -754,6 +800,12 @@ def _parse_asn1_generalized_time(backend, generalized_time):
         return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ")
     
     
    +def _decode_nonce(backend, nonce):
    +    nonce = backend._ffi.cast("ASN1_OCTET_STRING *", nonce)
    +    nonce = backend._ffi.gc(nonce, backend._lib.ASN1_OCTET_STRING_free)
    +    return x509.OCSPNonce(_asn1_string_to_bytes(backend, nonce))
    +
    +
     _EXTENSION_HANDLERS_NO_SCT = {
         ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints,
         ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier,
    @@ -793,8 +845,22 @@ _CRL_EXTENSION_HANDLERS = {
         ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
             _decode_authority_information_access
         ),
    +    ExtensionOID.ISSUING_DISTRIBUTION_POINT: _decode_issuing_dist_point,
    +    ExtensionOID.FRESHEST_CRL: _decode_freshest_crl,
     }
     
    +_OCSP_REQ_EXTENSION_HANDLERS = {
    +    OCSPExtensionOID.NONCE: _decode_nonce,
    +}
    +
    +_OCSP_BASICRESP_EXTENSION_HANDLERS = {
    +    OCSPExtensionOID.NONCE: _decode_nonce,
    +}
    +
    +# All revoked extensions are valid single response extensions, see:
    +# https://tools.ietf.org/html/rfc6960#section-4.4.5
    +_OCSP_SINGLERESP_EXTENSION_HANDLERS = _REVOKED_EXTENSION_HANDLERS.copy()
    +
     _CERTIFICATE_EXTENSION_PARSER_NO_SCT = _X509ExtensionParser(
         ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x),
         get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i),
    @@ -824,3 +890,21 @@ _CRL_EXTENSION_PARSER = _X509ExtensionParser(
         get_ext=lambda backend, x, i: backend._lib.X509_CRL_get_ext(x, i),
         handlers=_CRL_EXTENSION_HANDLERS,
     )
    +
    +_OCSP_REQ_EXT_PARSER = _X509ExtensionParser(
    +    ext_count=lambda backend, x: backend._lib.OCSP_REQUEST_get_ext_count(x),
    +    get_ext=lambda backend, x, i: backend._lib.OCSP_REQUEST_get_ext(x, i),
    +    handlers=_OCSP_REQ_EXTENSION_HANDLERS,
    +)
    +
    +_OCSP_BASICRESP_EXT_PARSER = _X509ExtensionParser(
    +    ext_count=lambda backend, x: backend._lib.OCSP_BASICRESP_get_ext_count(x),
    +    get_ext=lambda backend, x, i: backend._lib.OCSP_BASICRESP_get_ext(x, i),
    +    handlers=_OCSP_BASICRESP_EXTENSION_HANDLERS,
    +)
    +
    +_OCSP_SINGLERESP_EXT_PARSER = _X509ExtensionParser(
    +    ext_count=lambda backend, x: backend._lib.OCSP_SINGLERESP_get_ext_count(x),
    +    get_ext=lambda backend, x, i: backend._lib.OCSP_SINGLERESP_get_ext(x, i),
    +    handlers=_OCSP_SINGLERESP_EXTENSION_HANDLERS,
    +)
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/dsa.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/dsa.py
    index 48886e4..de61f08 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/dsa.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/dsa.py
    @@ -211,8 +211,7 @@ class _DSAPublicKey(object):
     
         def verifier(self, signature, signature_algorithm):
             _warn_sign_verify_deprecated()
    -        if not isinstance(signature, bytes):
    -            raise TypeError("signature must be bytes.")
    +        utils._check_bytes("signature", signature)
     
             _check_not_prehashed(signature_algorithm)
             return _DSAVerificationContext(
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ec.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ec.py
    index 69da234..3d8681b 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ec.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ec.py
    @@ -34,7 +34,19 @@ def _ec_key_curve_sn(backend, ec_key):
         # an error for now.
         if nid == backend._lib.NID_undef:
             raise NotImplementedError(
    -            "ECDSA certificates with unnamed curves are unsupported "
    +            "ECDSA keys with unnamed curves are unsupported "
    +            "at this time"
    +        )
    +
    +    # This is like the above check, but it also catches the case where you
    +    # explicitly encoded a curve with the same parameters as a named curve.
    +    # Don't do that.
    +    if (
    +        backend._lib.CRYPTOGRAPHY_OPENSSL_110_OR_GREATER and
    +        backend._lib.EC_GROUP_get_asn1_flag(group) == 0
    +    ):
    +        raise NotImplementedError(
    +            "ECDSA keys with unnamed curves are unsupported "
                 "at this time"
             )
     
    @@ -62,7 +74,7 @@ def _sn_to_elliptic_curve(backend, sn):
             return ec._CURVE_TYPES[sn]()
         except KeyError:
             raise UnsupportedAlgorithm(
    -            "{0} is not a supported elliptic curve".format(sn),
    +            "{} is not a supported elliptic curve".format(sn),
                 _Reasons.UNSUPPORTED_ELLIPTIC_CURVE
             )
     
    @@ -127,12 +139,12 @@ class _ECDSAVerificationContext(object):
     class _EllipticCurvePrivateKey(object):
         def __init__(self, backend, ec_key_cdata, evp_pkey):
             self._backend = backend
    -        _mark_asn1_named_ec_curve(backend, ec_key_cdata)
             self._ec_key = ec_key_cdata
             self._evp_pkey = evp_pkey
     
             sn = _ec_key_curve_sn(backend, ec_key_cdata)
             self._curve = _sn_to_elliptic_curve(backend, sn)
    +        _mark_asn1_named_ec_curve(backend, ec_key_cdata)
     
         curve = utils.read_only_property("_curve")
     
    @@ -229,12 +241,12 @@ class _EllipticCurvePrivateKey(object):
     class _EllipticCurvePublicKey(object):
         def __init__(self, backend, ec_key_cdata, evp_pkey):
             self._backend = backend
    -        _mark_asn1_named_ec_curve(backend, ec_key_cdata)
             self._ec_key = ec_key_cdata
             self._evp_pkey = evp_pkey
     
             sn = _ec_key_curve_sn(backend, ec_key_cdata)
             self._curve = _sn_to_elliptic_curve(backend, sn)
    +        _mark_asn1_named_ec_curve(backend, ec_key_cdata)
     
         curve = utils.read_only_property("_curve")
     
    @@ -244,8 +256,7 @@ class _EllipticCurvePublicKey(object):
     
         def verifier(self, signature, signature_algorithm):
             _warn_sign_verify_deprecated()
    -        if not isinstance(signature, bytes):
    -            raise TypeError("signature must be bytes.")
    +        utils._check_bytes("signature", signature)
     
             _check_signature_algorithm(signature_algorithm)
             _check_not_prehashed(signature_algorithm.algorithm)
    @@ -276,19 +287,62 @@ class _EllipticCurvePublicKey(object):
                 curve=self._curve
             )
     
    +    def _encode_point(self, format):
    +        if format is serialization.PublicFormat.CompressedPoint:
    +            conversion = self._backend._lib.POINT_CONVERSION_COMPRESSED
    +        else:
    +            assert format is serialization.PublicFormat.UncompressedPoint
    +            conversion = self._backend._lib.POINT_CONVERSION_UNCOMPRESSED
    +
    +        group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
    +        self._backend.openssl_assert(group != self._backend._ffi.NULL)
    +        point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
    +        self._backend.openssl_assert(point != self._backend._ffi.NULL)
    +        with self._backend._tmp_bn_ctx() as bn_ctx:
    +            buflen = self._backend._lib.EC_POINT_point2oct(
    +                group, point, conversion, self._backend._ffi.NULL, 0, bn_ctx
    +            )
    +            self._backend.openssl_assert(buflen > 0)
    +            buf = self._backend._ffi.new("char[]", buflen)
    +            res = self._backend._lib.EC_POINT_point2oct(
    +                group, point, conversion, buf, buflen, bn_ctx
    +            )
    +            self._backend.openssl_assert(buflen == res)
    +
    +        return self._backend._ffi.buffer(buf)[:]
    +
         def public_bytes(self, encoding, format):
             if format is serialization.PublicFormat.PKCS1:
                 raise ValueError(
                     "EC public keys do not support PKCS1 serialization"
                 )
     
    -        return self._backend._public_key_bytes(
    -            encoding,
    -            format,
    -            self,
    -            self._evp_pkey,
    -            None
    -        )
    +        if (
    +            encoding is serialization.Encoding.X962 or
    +            format is serialization.PublicFormat.CompressedPoint or
    +            format is serialization.PublicFormat.UncompressedPoint
    +        ):
    +            if (
    +                encoding is not serialization.Encoding.X962 or
    +                format not in (
    +                    serialization.PublicFormat.CompressedPoint,
    +                    serialization.PublicFormat.UncompressedPoint
    +                )
    +            ):
    +                raise ValueError(
    +                    "X962 encoding must be used with CompressedPoint or "
    +                    "UncompressedPoint format"
    +                )
    +
    +            return self._encode_point(format)
    +        else:
    +            return self._backend._public_key_bytes(
    +                encoding,
    +                format,
    +                self,
    +                self._evp_pkey,
    +                None
    +            )
     
         def verify(self, signature, data, signature_algorithm):
             _check_signature_algorithm(signature_algorithm)
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ed25519.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ed25519.py
    new file mode 100644
    index 0000000..f38f11d
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ed25519.py
    @@ -0,0 +1,151 @@
    +# This file is dual licensed under the terms of the Apache License, Version
    +# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    +# for complete details.
    +
    +from __future__ import absolute_import, division, print_function
    +
    +from cryptography import exceptions, utils
    +from cryptography.hazmat.primitives import serialization
    +from cryptography.hazmat.primitives.asymmetric.ed25519 import (
    +    Ed25519PrivateKey, Ed25519PublicKey, _ED25519_KEY_SIZE, _ED25519_SIG_SIZE
    +)
    +
    +
    +@utils.register_interface(Ed25519PublicKey)
    +class _Ed25519PublicKey(object):
    +    def __init__(self, backend, evp_pkey):
    +        self._backend = backend
    +        self._evp_pkey = evp_pkey
    +
    +    def public_bytes(self, encoding, format):
    +        if (
    +            encoding is serialization.Encoding.Raw or
    +            format is serialization.PublicFormat.Raw
    +        ):
    +            if (
    +                encoding is not serialization.Encoding.Raw or
    +                format is not serialization.PublicFormat.Raw
    +            ):
    +                raise ValueError(
    +                    "When using Raw both encoding and format must be Raw"
    +                )
    +
    +            return self._raw_public_bytes()
    +
    +        if (
    +            encoding in serialization._PEM_DER and
    +            format is not serialization.PublicFormat.SubjectPublicKeyInfo
    +        ):
    +            raise ValueError(
    +                "format must be SubjectPublicKeyInfo when encoding is PEM or "
    +                "DER"
    +            )
    +
    +        return self._backend._public_key_bytes(
    +            encoding, format, self, self._evp_pkey, None
    +        )
    +
    +    def _raw_public_bytes(self):
    +        buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE)
    +        buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE)
    +        res = self._backend._lib.EVP_PKEY_get_raw_public_key(
    +            self._evp_pkey, buf, buflen
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE)
    +        return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:]
    +
    +    def verify(self, signature, data):
    +        evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
    +        self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
    +        evp_md_ctx = self._backend._ffi.gc(
    +            evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
    +        )
    +        res = self._backend._lib.EVP_DigestVerifyInit(
    +            evp_md_ctx, self._backend._ffi.NULL, self._backend._ffi.NULL,
    +            self._backend._ffi.NULL, self._evp_pkey
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        res = self._backend._lib.EVP_DigestVerify(
    +            evp_md_ctx, signature, len(signature), data, len(data)
    +        )
    +        if res != 1:
    +            self._backend._consume_errors()
    +            raise exceptions.InvalidSignature
    +
    +
    +@utils.register_interface(Ed25519PrivateKey)
    +class _Ed25519PrivateKey(object):
    +    def __init__(self, backend, evp_pkey):
    +        self._backend = backend
    +        self._evp_pkey = evp_pkey
    +
    +    def public_key(self):
    +        buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE)
    +        buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE)
    +        res = self._backend._lib.EVP_PKEY_get_raw_public_key(
    +            self._evp_pkey, buf, buflen
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE)
    +        public_bytes = self._backend._ffi.buffer(buf)[:]
    +        return self._backend.ed25519_load_public_bytes(public_bytes)
    +
    +    def sign(self, data):
    +        evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
    +        self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
    +        evp_md_ctx = self._backend._ffi.gc(
    +            evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
    +        )
    +        res = self._backend._lib.EVP_DigestSignInit(
    +            evp_md_ctx, self._backend._ffi.NULL, self._backend._ffi.NULL,
    +            self._backend._ffi.NULL, self._evp_pkey
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        buf = self._backend._ffi.new("unsigned char[]", _ED25519_SIG_SIZE)
    +        buflen = self._backend._ffi.new("size_t *", len(buf))
    +        res = self._backend._lib.EVP_DigestSign(
    +            evp_md_ctx, buf, buflen, data, len(data)
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        self._backend.openssl_assert(buflen[0] == _ED25519_SIG_SIZE)
    +        return self._backend._ffi.buffer(buf, buflen[0])[:]
    +
    +    def private_bytes(self, encoding, format, encryption_algorithm):
    +        if (
    +            encoding is serialization.Encoding.Raw or
    +            format is serialization.PublicFormat.Raw
    +        ):
    +            if (
    +                format is not serialization.PrivateFormat.Raw or
    +                encoding is not serialization.Encoding.Raw or not
    +                isinstance(encryption_algorithm, serialization.NoEncryption)
    +            ):
    +                raise ValueError(
    +                    "When using Raw both encoding and format must be Raw "
    +                    "and encryption_algorithm must be NoEncryption()"
    +                )
    +
    +            return self._raw_private_bytes()
    +
    +        if (
    +            encoding in serialization._PEM_DER and
    +            format is not serialization.PrivateFormat.PKCS8
    +        ):
    +            raise ValueError(
    +                "format must be PKCS8 when encoding is PEM or DER"
    +            )
    +
    +        return self._backend._private_key_bytes(
    +            encoding, format, encryption_algorithm, self._evp_pkey, None
    +        )
    +
    +    def _raw_private_bytes(self):
    +        buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE)
    +        buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE)
    +        res = self._backend._lib.EVP_PKEY_get_raw_private_key(
    +            self._evp_pkey, buf, buflen
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE)
    +        return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:]
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ed448.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ed448.py
    new file mode 100644
    index 0000000..f541f05
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ed448.py
    @@ -0,0 +1,154 @@
    +# This file is dual licensed under the terms of the Apache License, Version
    +# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    +# for complete details.
    +
    +from __future__ import absolute_import, division, print_function
    +
    +from cryptography import exceptions, utils
    +from cryptography.hazmat.primitives import serialization
    +from cryptography.hazmat.primitives.asymmetric.ed448 import (
    +    Ed448PrivateKey, Ed448PublicKey
    +)
    +
    +_ED448_KEY_SIZE = 57
    +_ED448_SIG_SIZE = 114
    +
    +
    +@utils.register_interface(Ed448PublicKey)
    +class _Ed448PublicKey(object):
    +    def __init__(self, backend, evp_pkey):
    +        self._backend = backend
    +        self._evp_pkey = evp_pkey
    +
    +    def public_bytes(self, encoding, format):
    +        if (
    +            encoding is serialization.Encoding.Raw or
    +            format is serialization.PublicFormat.Raw
    +        ):
    +            if (
    +                encoding is not serialization.Encoding.Raw or
    +                format is not serialization.PublicFormat.Raw
    +            ):
    +                raise ValueError(
    +                    "When using Raw both encoding and format must be Raw"
    +                )
    +
    +            return self._raw_public_bytes()
    +
    +        if (
    +            encoding in serialization._PEM_DER and
    +            format is not serialization.PublicFormat.SubjectPublicKeyInfo
    +        ):
    +            raise ValueError(
    +                "format must be SubjectPublicKeyInfo when encoding is PEM or "
    +                "DER"
    +            )
    +
    +        return self._backend._public_key_bytes(
    +            encoding, format, self, self._evp_pkey, None
    +        )
    +
    +    def _raw_public_bytes(self):
    +        buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE)
    +        buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE)
    +        res = self._backend._lib.EVP_PKEY_get_raw_public_key(
    +            self._evp_pkey, buf, buflen
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE)
    +        return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:]
    +
    +    def verify(self, signature, data):
    +        evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
    +        self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
    +        evp_md_ctx = self._backend._ffi.gc(
    +            evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
    +        )
    +        res = self._backend._lib.EVP_DigestVerifyInit(
    +            evp_md_ctx, self._backend._ffi.NULL, self._backend._ffi.NULL,
    +            self._backend._ffi.NULL, self._evp_pkey
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        res = self._backend._lib.EVP_DigestVerify(
    +            evp_md_ctx, signature, len(signature), data, len(data)
    +        )
    +        if res != 1:
    +            self._backend._consume_errors()
    +            raise exceptions.InvalidSignature
    +
    +
    +@utils.register_interface(Ed448PrivateKey)
    +class _Ed448PrivateKey(object):
    +    def __init__(self, backend, evp_pkey):
    +        self._backend = backend
    +        self._evp_pkey = evp_pkey
    +
    +    def public_key(self):
    +        buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE)
    +        buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE)
    +        res = self._backend._lib.EVP_PKEY_get_raw_public_key(
    +            self._evp_pkey, buf, buflen
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE)
    +        public_bytes = self._backend._ffi.buffer(buf)[:]
    +        return self._backend.ed448_load_public_bytes(public_bytes)
    +
    +    def sign(self, data):
    +        evp_md_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
    +        self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
    +        evp_md_ctx = self._backend._ffi.gc(
    +            evp_md_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
    +        )
    +        res = self._backend._lib.EVP_DigestSignInit(
    +            evp_md_ctx, self._backend._ffi.NULL, self._backend._ffi.NULL,
    +            self._backend._ffi.NULL, self._evp_pkey
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        buf = self._backend._ffi.new("unsigned char[]", _ED448_SIG_SIZE)
    +        buflen = self._backend._ffi.new("size_t *", len(buf))
    +        res = self._backend._lib.EVP_DigestSign(
    +            evp_md_ctx, buf, buflen, data, len(data)
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        self._backend.openssl_assert(buflen[0] == _ED448_SIG_SIZE)
    +        return self._backend._ffi.buffer(buf, buflen[0])[:]
    +
    +    def private_bytes(self, encoding, format, encryption_algorithm):
    +        if (
    +            encoding is serialization.Encoding.Raw or
    +            format is serialization.PublicFormat.Raw
    +        ):
    +            if (
    +                format is not serialization.PrivateFormat.Raw or
    +                encoding is not serialization.Encoding.Raw or not
    +                isinstance(encryption_algorithm, serialization.NoEncryption)
    +            ):
    +                raise ValueError(
    +                    "When using Raw both encoding and format must be Raw "
    +                    "and encryption_algorithm must be NoEncryption()"
    +                )
    +
    +            return self._raw_private_bytes()
    +
    +        if (
    +            encoding in serialization._PEM_DER and
    +            format is not serialization.PrivateFormat.PKCS8
    +        ):
    +            raise ValueError(
    +                "format must be PKCS8 when encoding is PEM or DER"
    +            )
    +
    +        return self._backend._private_key_bytes(
    +            encoding, format, encryption_algorithm, self._evp_pkey, None
    +        )
    +
    +    def _raw_private_bytes(self):
    +        buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE)
    +        buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE)
    +        res = self._backend._lib.EVP_PKEY_get_raw_private_key(
    +            self._evp_pkey, buf, buflen
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE)
    +        return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:]
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/encode_asn1.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/encode_asn1.py
    index a2c7ed7..ca35f0e 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/encode_asn1.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/encode_asn1.py
    @@ -15,7 +15,9 @@ from cryptography.hazmat.backends.openssl.decode_asn1 import (
         _DISTPOINT_TYPE_RELATIVENAME
     )
     from cryptography.x509.name import _ASN1Type
    -from cryptography.x509.oid import CRLEntryExtensionOID, ExtensionOID
    +from cryptography.x509.oid import (
    +    CRLEntryExtensionOID, ExtensionOID, OCSPExtensionOID,
    +)
     
     
     def _encode_asn1_int(backend, x):
    @@ -44,12 +46,12 @@ def _encode_asn1_int_gc(backend, x):
         return i
     
     
    -def _encode_asn1_str(backend, data, length):
    +def _encode_asn1_str(backend, data):
         """
         Create an ASN1_OCTET_STRING from a Python byte string.
         """
         s = backend._lib.ASN1_OCTET_STRING_new()
    -    res = backend._lib.ASN1_OCTET_STRING_set(s, data, length)
    +    res = backend._lib.ASN1_OCTET_STRING_set(s, data, len(data))
         backend.openssl_assert(res == 1)
         return s
     
    @@ -68,8 +70,8 @@ def _encode_asn1_utf8_str(backend, string):
         return s
     
     
    -def _encode_asn1_str_gc(backend, data, length):
    -    s = _encode_asn1_str(backend, data, length)
    +def _encode_asn1_str_gc(backend, data):
    +    s = _encode_asn1_str(backend, data)
         s = backend._ffi.gc(s, backend._lib.ASN1_OCTET_STRING_free)
         return s
     
    @@ -112,13 +114,15 @@ def _encode_sk_name_entry(backend, attributes):
         for attribute in attributes:
             name_entry = _encode_name_entry(backend, attribute)
             res = backend._lib.sk_X509_NAME_ENTRY_push(stack, name_entry)
    -        backend.openssl_assert(res == 1)
    +        backend.openssl_assert(res >= 1)
         return stack
     
     
     def _encode_name_entry(backend, attribute):
         if attribute._type is _ASN1Type.BMPString:
             value = attribute.value.encode('utf_16_be')
    +    elif attribute._type is _ASN1Type.UniversalString:
    +        value = attribute.value.encode('utf_32_be')
         else:
             value = attribute.value.encode('utf8')
     
    @@ -134,6 +138,28 @@ def _encode_crl_number_delta_crl_indicator(backend, ext):
         return _encode_asn1_int_gc(backend, ext.crl_number)
     
     
    +def _encode_issuing_dist_point(backend, ext):
    +    idp = backend._lib.ISSUING_DIST_POINT_new()
    +    backend.openssl_assert(idp != backend._ffi.NULL)
    +    idp = backend._ffi.gc(idp, backend._lib.ISSUING_DIST_POINT_free)
    +    idp.onlyuser = 255 if ext.only_contains_user_certs else 0
    +    idp.onlyCA = 255 if ext.only_contains_ca_certs else 0
    +    idp.indirectCRL = 255 if ext.indirect_crl else 0
    +    idp.onlyattr = 255 if ext.only_contains_attribute_certs else 0
    +    if ext.only_some_reasons:
    +        idp.onlysomereasons = _encode_reasonflags(
    +            backend, ext.only_some_reasons
    +        )
    +
    +    if ext.full_name:
    +        idp.distpoint = _encode_full_name(backend, ext.full_name)
    +
    +    if ext.relative_name:
    +        idp.distpoint = _encode_relative_name(backend, ext.relative_name)
    +
    +    return idp
    +
    +
     def _encode_crl_reason(backend, crl_reason):
         asn1enum = backend._lib.ASN1_ENUMERATED_new()
         backend.openssl_assert(asn1enum != backend._ffi.NULL)
    @@ -184,7 +210,6 @@ def _encode_certificate_policies(backend, certificate_policies):
                         pqi.d.cpsuri = _encode_asn1_str(
                             backend,
                             qualifier.encode("ascii"),
    -                        len(qualifier.encode("ascii"))
                         )
                     else:
                         assert isinstance(qualifier, x509.UserNotice)
    @@ -245,11 +270,8 @@ def _txt2obj_gc(backend, name):
     
     
     def _encode_ocsp_nocheck(backend, ext):
    -    """
    -    The OCSP No Check extension is defined as a null ASN.1 value embedded in
    -    an ASN.1 string.
    -    """
    -    return _encode_asn1_str_gc(backend, b"\x05\x00", 2)
    +    # Doesn't need to be GC'd
    +    return backend._lib.ASN1_NULL_new()
     
     
     def _encode_key_usage(backend, key_usage):
    @@ -292,7 +314,6 @@ def _encode_authority_key_identifier(backend, authority_keyid):
             akid.keyid = _encode_asn1_str(
                 backend,
                 authority_keyid.key_identifier,
    -            len(authority_keyid.key_identifier)
             )
     
         if authority_keyid.authority_cert_issuer is not None:
    @@ -326,16 +347,22 @@ def _encode_authority_information_access(backend, authority_info_access):
         aia = backend._lib.sk_ACCESS_DESCRIPTION_new_null()
         backend.openssl_assert(aia != backend._ffi.NULL)
         aia = backend._ffi.gc(
    -        aia, backend._lib.sk_ACCESS_DESCRIPTION_free
    +        aia,
    +        lambda x: backend._lib.sk_ACCESS_DESCRIPTION_pop_free(
    +            x, backend._ffi.addressof(
    +                backend._lib._original_lib, "ACCESS_DESCRIPTION_free"
    +            )
    +        )
         )
         for access_description in authority_info_access:
             ad = backend._lib.ACCESS_DESCRIPTION_new()
             method = _txt2obj(
                 backend, access_description.access_method.dotted_string
             )
    -        gn = _encode_general_name(backend, access_description.access_location)
    +        _encode_general_name_preallocated(
    +            backend, access_description.access_location, ad.location
    +        )
             ad.method = method
    -        ad.location = gn
             res = backend._lib.sk_ACCESS_DESCRIPTION_push(aia, ad)
             backend.openssl_assert(res >= 1)
     
    @@ -362,12 +389,17 @@ def _encode_alt_name(backend, san):
     
     
     def _encode_subject_key_identifier(backend, ski):
    -    return _encode_asn1_str_gc(backend, ski.digest, len(ski.digest))
    +    return _encode_asn1_str_gc(backend, ski.digest)
     
     
     def _encode_general_name(backend, name):
    +    gn = backend._lib.GENERAL_NAME_new()
    +    _encode_general_name_preallocated(backend, name, gn)
    +    return gn
    +
    +
    +def _encode_general_name_preallocated(backend, name, gn):
         if isinstance(name, x509.DNSName):
    -        gn = backend._lib.GENERAL_NAME_new()
             backend.openssl_assert(gn != backend._ffi.NULL)
             gn.type = backend._lib.GEN_DNS
     
    @@ -381,7 +413,6 @@ def _encode_general_name(backend, name):
             backend.openssl_assert(res == 1)
             gn.d.dNSName = ia5
         elif isinstance(name, x509.RegisteredID):
    -        gn = backend._lib.GENERAL_NAME_new()
             backend.openssl_assert(gn != backend._ffi.NULL)
             gn.type = backend._lib.GEN_RID
             obj = backend._lib.OBJ_txt2obj(
    @@ -390,13 +421,11 @@ def _encode_general_name(backend, name):
             backend.openssl_assert(obj != backend._ffi.NULL)
             gn.d.registeredID = obj
         elif isinstance(name, x509.DirectoryName):
    -        gn = backend._lib.GENERAL_NAME_new()
             backend.openssl_assert(gn != backend._ffi.NULL)
             dir_name = _encode_name(backend, name.value)
             gn.type = backend._lib.GEN_DIRNAME
             gn.d.directoryName = dir_name
         elif isinstance(name, x509.IPAddress):
    -        gn = backend._lib.GENERAL_NAME_new()
             backend.openssl_assert(gn != backend._ffi.NULL)
             if isinstance(name.value, ipaddress.IPv4Network):
                 packed = (
    @@ -410,11 +439,10 @@ def _encode_general_name(backend, name):
                 )
             else:
                 packed = name.value.packed
    -        ipaddr = _encode_asn1_str(backend, packed, len(packed))
    +        ipaddr = _encode_asn1_str(backend, packed)
             gn.type = backend._lib.GEN_IPADD
             gn.d.iPAddress = ipaddr
         elif isinstance(name, x509.OtherName):
    -        gn = backend._lib.GENERAL_NAME_new()
             backend.openssl_assert(gn != backend._ffi.NULL)
             other_name = backend._lib.OTHERNAME_new()
             backend.openssl_assert(other_name != backend._ffi.NULL)
    @@ -437,30 +465,26 @@ def _encode_general_name(backend, name):
             gn.type = backend._lib.GEN_OTHERNAME
             gn.d.otherName = other_name
         elif isinstance(name, x509.RFC822Name):
    -        gn = backend._lib.GENERAL_NAME_new()
             backend.openssl_assert(gn != backend._ffi.NULL)
             # ia5strings are supposed to be ITU T.50 but to allow round-tripping
             # of broken certs that encode utf8 we'll encode utf8 here too.
             data = name.value.encode("utf8")
    -        asn1_str = _encode_asn1_str(backend, data, len(data))
    +        asn1_str = _encode_asn1_str(backend, data)
             gn.type = backend._lib.GEN_EMAIL
             gn.d.rfc822Name = asn1_str
         elif isinstance(name, x509.UniformResourceIdentifier):
    -        gn = backend._lib.GENERAL_NAME_new()
             backend.openssl_assert(gn != backend._ffi.NULL)
             # ia5strings are supposed to be ITU T.50 but to allow round-tripping
             # of broken certs that encode utf8 we'll encode utf8 here too.
             data = name.value.encode("utf8")
    -        asn1_str = _encode_asn1_str(backend, data, len(data))
    +        asn1_str = _encode_asn1_str(backend, data)
             gn.type = backend._lib.GEN_URI
             gn.d.uniformResourceIdentifier = asn1_str
         else:
             raise ValueError(
    -            "{0} is an unknown GeneralName type".format(name)
    +            "{} is an unknown GeneralName type".format(name)
             )
     
    -    return gn
    -
     
     def _encode_extended_key_usage(backend, extended_key_usage):
         eku = backend._lib.sk_ASN1_OBJECT_new_null()
    @@ -485,6 +509,34 @@ _CRLREASONFLAGS = {
     }
     
     
    +def _encode_reasonflags(backend, reasons):
    +    bitmask = backend._lib.ASN1_BIT_STRING_new()
    +    backend.openssl_assert(bitmask != backend._ffi.NULL)
    +    for reason in reasons:
    +        res = backend._lib.ASN1_BIT_STRING_set_bit(
    +            bitmask, _CRLREASONFLAGS[reason], 1
    +        )
    +        backend.openssl_assert(res == 1)
    +
    +    return bitmask
    +
    +
    +def _encode_full_name(backend, full_name):
    +    dpn = backend._lib.DIST_POINT_NAME_new()
    +    backend.openssl_assert(dpn != backend._ffi.NULL)
    +    dpn.type = _DISTPOINT_TYPE_FULLNAME
    +    dpn.name.fullname = _encode_general_names(backend, full_name)
    +    return dpn
    +
    +
    +def _encode_relative_name(backend, relative_name):
    +    dpn = backend._lib.DIST_POINT_NAME_new()
    +    backend.openssl_assert(dpn != backend._ffi.NULL)
    +    dpn.type = _DISTPOINT_TYPE_RELATIVENAME
    +    dpn.name.relativename = _encode_sk_name_entry(backend, relative_name)
    +    return dpn
    +
    +
     def _encode_cdps_freshest_crl(backend, cdps):
         cdp = backend._lib.sk_DIST_POINT_new_null()
         cdp = backend._ffi.gc(cdp, backend._lib.sk_DIST_POINT_free)
    @@ -493,30 +545,13 @@ def _encode_cdps_freshest_crl(backend, cdps):
             backend.openssl_assert(dp != backend._ffi.NULL)
     
             if point.reasons:
    -            bitmask = backend._lib.ASN1_BIT_STRING_new()
    -            backend.openssl_assert(bitmask != backend._ffi.NULL)
    -            dp.reasons = bitmask
    -            for reason in point.reasons:
    -                res = backend._lib.ASN1_BIT_STRING_set_bit(
    -                    bitmask, _CRLREASONFLAGS[reason], 1
    -                )
    -                backend.openssl_assert(res == 1)
    +            dp.reasons = _encode_reasonflags(backend, point.reasons)
     
             if point.full_name:
    -            dpn = backend._lib.DIST_POINT_NAME_new()
    -            backend.openssl_assert(dpn != backend._ffi.NULL)
    -            dpn.type = _DISTPOINT_TYPE_FULLNAME
    -            dpn.name.fullname = _encode_general_names(backend, point.full_name)
    -            dp.distpoint = dpn
    +            dp.distpoint = _encode_full_name(backend, point.full_name)
     
             if point.relative_name:
    -            dpn = backend._lib.DIST_POINT_NAME_new()
    -            backend.openssl_assert(dpn != backend._ffi.NULL)
    -            dpn.type = _DISTPOINT_TYPE_RELATIVENAME
    -            relativename = _encode_sk_name_entry(backend, point.relative_name)
    -            backend.openssl_assert(relativename != backend._ffi.NULL)
    -            dpn.name.relativename = relativename
    -            dp.distpoint = dpn
    +            dp.distpoint = _encode_relative_name(backend, point.relative_name)
     
             if point.crl_issuer:
                 dp.CRLissuer = _encode_general_names(backend, point.crl_issuer)
    @@ -574,6 +609,10 @@ def _encode_general_subtree(backend, subtrees):
             return general_subtrees
     
     
    +def _encode_nonce(backend, nonce):
    +    return _encode_asn1_str_gc(backend, nonce.nonce)
    +
    +
     _EXTENSION_ENCODE_HANDLERS = {
         ExtensionOID.BASIC_CONSTRAINTS: _encode_basic_constraints,
         ExtensionOID.SUBJECT_KEY_IDENTIFIER: _encode_subject_key_identifier,
    @@ -602,6 +641,8 @@ _CRL_EXTENSION_ENCODE_HANDLERS = {
         ),
         ExtensionOID.CRL_NUMBER: _encode_crl_number_delta_crl_indicator,
         ExtensionOID.DELTA_CRL_INDICATOR: _encode_crl_number_delta_crl_indicator,
    +    ExtensionOID.ISSUING_DISTRIBUTION_POINT: _encode_issuing_dist_point,
    +    ExtensionOID.FRESHEST_CRL: _encode_cdps_freshest_crl,
     }
     
     _CRL_ENTRY_EXTENSION_ENCODE_HANDLERS = {
    @@ -609,3 +650,11 @@ _CRL_ENTRY_EXTENSION_ENCODE_HANDLERS = {
         CRLEntryExtensionOID.CRL_REASON: _encode_crl_reason,
         CRLEntryExtensionOID.INVALIDITY_DATE: _encode_invalidity_date,
     }
    +
    +_OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS = {
    +    OCSPExtensionOID.NONCE: _encode_nonce,
    +}
    +
    +_OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS = {
    +    OCSPExtensionOID.NONCE: _encode_nonce,
    +}
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/hashes.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/hashes.py
    index 92ea53b..7f9d840 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/hashes.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/hashes.py
    @@ -22,12 +22,11 @@ class _HashContext(object):
                 ctx = self._backend._ffi.gc(
                     ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
                 )
    -            name = self._backend._build_openssl_digest_name(algorithm)
    -            evp_md = self._backend._lib.EVP_get_digestbyname(name)
    +            evp_md = self._backend._evp_md_from_algorithm(algorithm)
                 if evp_md == self._backend._ffi.NULL:
                     raise UnsupportedAlgorithm(
    -                    "{0} is not a supported hash on this backend.".format(
    -                        name),
    +                    "{} is not a supported hash on this backend.".format(
    +                        algorithm.name),
                         _Reasons.UNSUPPORTED_HASH
                     )
                 res = self._backend._lib.EVP_DigestInit_ex(ctx, evp_md,
    @@ -48,14 +47,32 @@ class _HashContext(object):
             return _HashContext(self._backend, self.algorithm, ctx=copied_ctx)
     
         def update(self, data):
    -        res = self._backend._lib.EVP_DigestUpdate(self._ctx, data, len(data))
    +        data_ptr = self._backend._ffi.from_buffer(data)
    +        res = self._backend._lib.EVP_DigestUpdate(
    +            self._ctx, data_ptr, len(data)
    +        )
             self._backend.openssl_assert(res != 0)
     
         def finalize(self):
    +        if isinstance(self.algorithm, hashes.ExtendableOutputFunction):
    +            # extendable output functions use a different finalize
    +            return self._finalize_xof()
    +        else:
    +            buf = self._backend._ffi.new("unsigned char[]",
    +                                         self._backend._lib.EVP_MAX_MD_SIZE)
    +            outlen = self._backend._ffi.new("unsigned int *")
    +            res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen)
    +            self._backend.openssl_assert(res != 0)
    +            self._backend.openssl_assert(
    +                outlen[0] == self.algorithm.digest_size
    +            )
    +            return self._backend._ffi.buffer(buf)[:outlen[0]]
    +
    +    def _finalize_xof(self):
             buf = self._backend._ffi.new("unsigned char[]",
    -                                     self._backend._lib.EVP_MAX_MD_SIZE)
    -        outlen = self._backend._ffi.new("unsigned int *")
    -        res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen)
    +                                     self.algorithm.digest_size)
    +        res = self._backend._lib.EVP_DigestFinalXOF(
    +            self._ctx, buf, self.algorithm.digest_size
    +        )
             self._backend.openssl_assert(res != 0)
    -        self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
    -        return self._backend._ffi.buffer(buf)[:outlen[0]]
    +        return self._backend._ffi.buffer(buf)[:self.algorithm.digest_size]
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/hmac.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/hmac.py
    index 3577f47..2e09cbc 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/hmac.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/hmac.py
    @@ -9,10 +9,9 @@ from cryptography import utils
     from cryptography.exceptions import (
         InvalidSignature, UnsupportedAlgorithm, _Reasons
     )
    -from cryptography.hazmat.primitives import constant_time, hashes, mac
    +from cryptography.hazmat.primitives import constant_time, hashes
     
     
    -@utils.register_interface(mac.MACContext)
     @utils.register_interface(hashes.HashContext)
     class _HMACContext(object):
         def __init__(self, backend, key, algorithm, ctx=None):
    @@ -25,15 +24,16 @@ class _HMACContext(object):
                 ctx = self._backend._ffi.gc(
                     ctx, self._backend._lib.Cryptography_HMAC_CTX_free
                 )
    -            name = self._backend._build_openssl_digest_name(algorithm)
    -            evp_md = self._backend._lib.EVP_get_digestbyname(name)
    +            evp_md = self._backend._evp_md_from_algorithm(algorithm)
                 if evp_md == self._backend._ffi.NULL:
                     raise UnsupportedAlgorithm(
    -                    "{0} is not a supported hash on this backend".format(name),
    +                    "{} is not a supported hash on this backend".format(
    +                        algorithm.name),
                         _Reasons.UNSUPPORTED_HASH
                     )
    +            key_ptr = self._backend._ffi.from_buffer(key)
                 res = self._backend._lib.HMAC_Init_ex(
    -                ctx, key, len(key), evp_md, self._backend._ffi.NULL
    +                ctx, key_ptr, len(key), evp_md, self._backend._ffi.NULL
                 )
                 self._backend.openssl_assert(res != 0)
     
    @@ -55,7 +55,8 @@ class _HMACContext(object):
             )
     
         def update(self, data):
    -        res = self._backend._lib.HMAC_Update(self._ctx, data, len(data))
    +        data_ptr = self._backend._ffi.from_buffer(data)
    +        res = self._backend._lib.HMAC_Update(self._ctx, data_ptr, len(data))
             self._backend.openssl_assert(res != 0)
     
         def finalize(self):
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ocsp.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ocsp.py
    new file mode 100644
    index 0000000..e42565e
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/ocsp.py
    @@ -0,0 +1,389 @@
    +# This file is dual licensed under the terms of the Apache License, Version
    +# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    +# for complete details.
    +
    +from __future__ import absolute_import, division, print_function
    +
    +import functools
    +
    +from cryptography import utils, x509
    +from cryptography.exceptions import UnsupportedAlgorithm
    +from cryptography.hazmat.backends.openssl.decode_asn1 import (
    +    _CRL_ENTRY_REASON_CODE_TO_ENUM, _OCSP_BASICRESP_EXT_PARSER,
    +    _OCSP_REQ_EXT_PARSER, _OCSP_SINGLERESP_EXT_PARSER,
    +    _asn1_integer_to_int,
    +    _asn1_string_to_bytes, _decode_x509_name, _obj2txt,
    +    _parse_asn1_generalized_time,
    +)
    +from cryptography.hazmat.backends.openssl.x509 import _Certificate
    +from cryptography.hazmat.primitives import serialization
    +from cryptography.x509.ocsp import (
    +    OCSPCertStatus, OCSPRequest, OCSPResponse, OCSPResponseStatus,
    +    _CERT_STATUS_TO_ENUM, _OIDS_TO_HASH, _RESPONSE_STATUS_TO_ENUM,
    +)
    +
    +
    +def _requires_successful_response(func):
    +    @functools.wraps(func)
    +    def wrapper(self, *args):
    +        if self.response_status != OCSPResponseStatus.SUCCESSFUL:
    +            raise ValueError(
    +                "OCSP response status is not successful so the property "
    +                "has no value"
    +            )
    +        else:
    +            return func(self, *args)
    +
    +    return wrapper
    +
    +
    +def _issuer_key_hash(backend, cert_id):
    +    key_hash = backend._ffi.new("ASN1_OCTET_STRING **")
    +    res = backend._lib.OCSP_id_get0_info(
    +        backend._ffi.NULL, backend._ffi.NULL,
    +        key_hash, backend._ffi.NULL, cert_id
    +    )
    +    backend.openssl_assert(res == 1)
    +    backend.openssl_assert(key_hash[0] != backend._ffi.NULL)
    +    return _asn1_string_to_bytes(backend, key_hash[0])
    +
    +
    +def _issuer_name_hash(backend, cert_id):
    +    name_hash = backend._ffi.new("ASN1_OCTET_STRING **")
    +    res = backend._lib.OCSP_id_get0_info(
    +        name_hash, backend._ffi.NULL,
    +        backend._ffi.NULL, backend._ffi.NULL, cert_id
    +    )
    +    backend.openssl_assert(res == 1)
    +    backend.openssl_assert(name_hash[0] != backend._ffi.NULL)
    +    return _asn1_string_to_bytes(backend, name_hash[0])
    +
    +
    +def _serial_number(backend, cert_id):
    +    num = backend._ffi.new("ASN1_INTEGER **")
    +    res = backend._lib.OCSP_id_get0_info(
    +        backend._ffi.NULL, backend._ffi.NULL,
    +        backend._ffi.NULL, num, cert_id
    +    )
    +    backend.openssl_assert(res == 1)
    +    backend.openssl_assert(num[0] != backend._ffi.NULL)
    +    return _asn1_integer_to_int(backend, num[0])
    +
    +
    +def _hash_algorithm(backend, cert_id):
    +    asn1obj = backend._ffi.new("ASN1_OBJECT **")
    +    res = backend._lib.OCSP_id_get0_info(
    +        backend._ffi.NULL, asn1obj,
    +        backend._ffi.NULL, backend._ffi.NULL, cert_id
    +    )
    +    backend.openssl_assert(res == 1)
    +    backend.openssl_assert(asn1obj[0] != backend._ffi.NULL)
    +    oid = _obj2txt(backend, asn1obj[0])
    +    try:
    +        return _OIDS_TO_HASH[oid]
    +    except KeyError:
    +        raise UnsupportedAlgorithm(
    +            "Signature algorithm OID: {} not recognized".format(oid)
    +        )
    +
    +
    +@utils.register_interface(OCSPResponse)
    +class _OCSPResponse(object):
    +    def __init__(self, backend, ocsp_response):
    +        self._backend = backend
    +        self._ocsp_response = ocsp_response
    +        status = self._backend._lib.OCSP_response_status(self._ocsp_response)
    +        self._backend.openssl_assert(status in _RESPONSE_STATUS_TO_ENUM)
    +        self._status = _RESPONSE_STATUS_TO_ENUM[status]
    +        if self._status is OCSPResponseStatus.SUCCESSFUL:
    +            basic = self._backend._lib.OCSP_response_get1_basic(
    +                self._ocsp_response
    +            )
    +            self._backend.openssl_assert(basic != self._backend._ffi.NULL)
    +            self._basic = self._backend._ffi.gc(
    +                basic, self._backend._lib.OCSP_BASICRESP_free
    +            )
    +            self._backend.openssl_assert(
    +                self._backend._lib.OCSP_resp_count(self._basic) == 1
    +            )
    +            self._single = self._backend._lib.OCSP_resp_get0(self._basic, 0)
    +            self._backend.openssl_assert(
    +                self._single != self._backend._ffi.NULL
    +            )
    +            self._cert_id = self._backend._lib.OCSP_SINGLERESP_get0_id(
    +                self._single
    +            )
    +            self._backend.openssl_assert(
    +                self._cert_id != self._backend._ffi.NULL
    +            )
    +
    +    response_status = utils.read_only_property("_status")
    +
    +    @property
    +    @_requires_successful_response
    +    def signature_algorithm_oid(self):
    +        alg = self._backend._lib.OCSP_resp_get0_tbs_sigalg(self._basic)
    +        self._backend.openssl_assert(alg != self._backend._ffi.NULL)
    +        oid = _obj2txt(self._backend, alg.algorithm)
    +        return x509.ObjectIdentifier(oid)
    +
    +    @property
    +    @_requires_successful_response
    +    def signature_hash_algorithm(self):
    +        oid = self.signature_algorithm_oid
    +        try:
    +            return x509._SIG_OIDS_TO_HASH[oid]
    +        except KeyError:
    +            raise UnsupportedAlgorithm(
    +                "Signature algorithm OID:{} not recognized".format(oid)
    +            )
    +
    +    @property
    +    @_requires_successful_response
    +    def signature(self):
    +        sig = self._backend._lib.OCSP_resp_get0_signature(self._basic)
    +        self._backend.openssl_assert(sig != self._backend._ffi.NULL)
    +        return _asn1_string_to_bytes(self._backend, sig)
    +
    +    @property
    +    @_requires_successful_response
    +    def tbs_response_bytes(self):
    +        respdata = self._backend._lib.OCSP_resp_get0_respdata(self._basic)
    +        self._backend.openssl_assert(respdata != self._backend._ffi.NULL)
    +        pp = self._backend._ffi.new("unsigned char **")
    +        res = self._backend._lib.i2d_OCSP_RESPDATA(respdata, pp)
    +        self._backend.openssl_assert(pp[0] != self._backend._ffi.NULL)
    +        pp = self._backend._ffi.gc(
    +            pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
    +        )
    +        self._backend.openssl_assert(res > 0)
    +        return self._backend._ffi.buffer(pp[0], res)[:]
    +
    +    @property
    +    @_requires_successful_response
    +    def certificates(self):
    +        sk_x509 = self._backend._lib.OCSP_resp_get0_certs(self._basic)
    +        num = self._backend._lib.sk_X509_num(sk_x509)
    +        certs = []
    +        for i in range(num):
    +            x509 = self._backend._lib.sk_X509_value(sk_x509, i)
    +            self._backend.openssl_assert(x509 != self._backend._ffi.NULL)
    +            cert = _Certificate(self._backend, x509)
    +            # We need to keep the OCSP response that the certificate came from
    +            # alive until the Certificate object itself goes out of scope, so
    +            # we give it a private reference.
    +            cert._ocsp_resp = self
    +            certs.append(cert)
    +
    +        return certs
    +
    +    @property
    +    @_requires_successful_response
    +    def responder_key_hash(self):
    +        _, asn1_string = self._responder_key_name()
    +        if asn1_string == self._backend._ffi.NULL:
    +            return None
    +        else:
    +            return _asn1_string_to_bytes(self._backend, asn1_string)
    +
    +    @property
    +    @_requires_successful_response
    +    def responder_name(self):
    +        x509_name, _ = self._responder_key_name()
    +        if x509_name == self._backend._ffi.NULL:
    +            return None
    +        else:
    +            return _decode_x509_name(self._backend, x509_name)
    +
    +    def _responder_key_name(self):
    +        asn1_string = self._backend._ffi.new("ASN1_OCTET_STRING **")
    +        x509_name = self._backend._ffi.new("X509_NAME **")
    +        res = self._backend._lib.OCSP_resp_get0_id(
    +            self._basic, asn1_string, x509_name
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        return x509_name[0], asn1_string[0]
    +
    +    @property
    +    @_requires_successful_response
    +    def produced_at(self):
    +        produced_at = self._backend._lib.OCSP_resp_get0_produced_at(
    +            self._basic
    +        )
    +        return _parse_asn1_generalized_time(self._backend, produced_at)
    +
    +    @property
    +    @_requires_successful_response
    +    def certificate_status(self):
    +        status = self._backend._lib.OCSP_single_get0_status(
    +            self._single,
    +            self._backend._ffi.NULL,
    +            self._backend._ffi.NULL,
    +            self._backend._ffi.NULL,
    +            self._backend._ffi.NULL,
    +        )
    +        self._backend.openssl_assert(status in _CERT_STATUS_TO_ENUM)
    +        return _CERT_STATUS_TO_ENUM[status]
    +
    +    @property
    +    @_requires_successful_response
    +    def revocation_time(self):
    +        if self.certificate_status is not OCSPCertStatus.REVOKED:
    +            return None
    +
    +        asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
    +        self._backend._lib.OCSP_single_get0_status(
    +            self._single,
    +            self._backend._ffi.NULL,
    +            asn1_time,
    +            self._backend._ffi.NULL,
    +            self._backend._ffi.NULL,
    +        )
    +        self._backend.openssl_assert(asn1_time[0] != self._backend._ffi.NULL)
    +        return _parse_asn1_generalized_time(self._backend, asn1_time[0])
    +
    +    @property
    +    @_requires_successful_response
    +    def revocation_reason(self):
    +        if self.certificate_status is not OCSPCertStatus.REVOKED:
    +            return None
    +
    +        reason_ptr = self._backend._ffi.new("int *")
    +        self._backend._lib.OCSP_single_get0_status(
    +            self._single,
    +            reason_ptr,
    +            self._backend._ffi.NULL,
    +            self._backend._ffi.NULL,
    +            self._backend._ffi.NULL,
    +        )
    +        # If no reason is encoded OpenSSL returns -1
    +        if reason_ptr[0] == -1:
    +            return None
    +        else:
    +            self._backend.openssl_assert(
    +                reason_ptr[0] in _CRL_ENTRY_REASON_CODE_TO_ENUM
    +            )
    +            return _CRL_ENTRY_REASON_CODE_TO_ENUM[reason_ptr[0]]
    +
    +    @property
    +    @_requires_successful_response
    +    def this_update(self):
    +        asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
    +        self._backend._lib.OCSP_single_get0_status(
    +            self._single,
    +            self._backend._ffi.NULL,
    +            self._backend._ffi.NULL,
    +            asn1_time,
    +            self._backend._ffi.NULL,
    +        )
    +        self._backend.openssl_assert(asn1_time[0] != self._backend._ffi.NULL)
    +        return _parse_asn1_generalized_time(self._backend, asn1_time[0])
    +
    +    @property
    +    @_requires_successful_response
    +    def next_update(self):
    +        asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
    +        self._backend._lib.OCSP_single_get0_status(
    +            self._single,
    +            self._backend._ffi.NULL,
    +            self._backend._ffi.NULL,
    +            self._backend._ffi.NULL,
    +            asn1_time,
    +        )
    +        if asn1_time[0] != self._backend._ffi.NULL:
    +            return _parse_asn1_generalized_time(self._backend, asn1_time[0])
    +        else:
    +            return None
    +
    +    @property
    +    @_requires_successful_response
    +    def issuer_key_hash(self):
    +        return _issuer_key_hash(self._backend, self._cert_id)
    +
    +    @property
    +    @_requires_successful_response
    +    def issuer_name_hash(self):
    +        return _issuer_name_hash(self._backend, self._cert_id)
    +
    +    @property
    +    @_requires_successful_response
    +    def hash_algorithm(self):
    +        return _hash_algorithm(self._backend, self._cert_id)
    +
    +    @property
    +    @_requires_successful_response
    +    def serial_number(self):
    +        return _serial_number(self._backend, self._cert_id)
    +
    +    @utils.cached_property
    +    @_requires_successful_response
    +    def extensions(self):
    +        return _OCSP_BASICRESP_EXT_PARSER.parse(self._backend, self._basic)
    +
    +    @utils.cached_property
    +    @_requires_successful_response
    +    def single_extensions(self):
    +        return _OCSP_SINGLERESP_EXT_PARSER.parse(
    +            self._backend, self._single
    +        )
    +
    +    def public_bytes(self, encoding):
    +        if encoding is not serialization.Encoding.DER:
    +            raise ValueError(
    +                "The only allowed encoding value is Encoding.DER"
    +            )
    +
    +        bio = self._backend._create_mem_bio_gc()
    +        res = self._backend._lib.i2d_OCSP_RESPONSE_bio(
    +            bio, self._ocsp_response
    +        )
    +        self._backend.openssl_assert(res > 0)
    +        return self._backend._read_mem_bio(bio)
    +
    +
    +@utils.register_interface(OCSPRequest)
    +class _OCSPRequest(object):
    +    def __init__(self, backend, ocsp_request):
    +        if backend._lib.OCSP_request_onereq_count(ocsp_request) > 1:
    +            raise NotImplementedError(
    +                'OCSP request contains more than one request'
    +            )
    +        self._backend = backend
    +        self._ocsp_request = ocsp_request
    +        self._request = self._backend._lib.OCSP_request_onereq_get0(
    +            self._ocsp_request, 0
    +        )
    +        self._backend.openssl_assert(self._request != self._backend._ffi.NULL)
    +        self._cert_id = self._backend._lib.OCSP_onereq_get0_id(self._request)
    +        self._backend.openssl_assert(self._cert_id != self._backend._ffi.NULL)
    +
    +    @property
    +    def issuer_key_hash(self):
    +        return _issuer_key_hash(self._backend, self._cert_id)
    +
    +    @property
    +    def issuer_name_hash(self):
    +        return _issuer_name_hash(self._backend, self._cert_id)
    +
    +    @property
    +    def serial_number(self):
    +        return _serial_number(self._backend, self._cert_id)
    +
    +    @property
    +    def hash_algorithm(self):
    +        return _hash_algorithm(self._backend, self._cert_id)
    +
    +    @utils.cached_property
    +    def extensions(self):
    +        return _OCSP_REQ_EXT_PARSER.parse(self._backend, self._ocsp_request)
    +
    +    def public_bytes(self, encoding):
    +        if encoding is not serialization.Encoding.DER:
    +            raise ValueError(
    +                "The only allowed encoding value is Encoding.DER"
    +            )
    +
    +        bio = self._backend._create_mem_bio_gc()
    +        res = self._backend._lib.i2d_OCSP_REQUEST_bio(bio, self._ocsp_request)
    +        self._backend.openssl_assert(res > 0)
    +        return self._backend._read_mem_bio(bio)
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/poly1305.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/poly1305.py
    new file mode 100644
    index 0000000..25448dd
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/poly1305.py
    @@ -0,0 +1,60 @@
    +# This file is dual licensed under the terms of the Apache License, Version
    +# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    +# for complete details.
    +
    +from __future__ import absolute_import, division, print_function
    +
    +
    +from cryptography.exceptions import InvalidSignature
    +from cryptography.hazmat.primitives import constant_time
    +
    +
    +_POLY1305_TAG_SIZE = 16
    +_POLY1305_KEY_SIZE = 32
    +
    +
    +class _Poly1305Context(object):
    +    def __init__(self, backend, key):
    +        self._backend = backend
    +
    +        key_ptr = self._backend._ffi.from_buffer(key)
    +        # This function copies the key into OpenSSL-owned memory so we don't
    +        # need to retain it ourselves
    +        evp_pkey = self._backend._lib.EVP_PKEY_new_raw_private_key(
    +            self._backend._lib.NID_poly1305,
    +            self._backend._ffi.NULL, key_ptr, len(key)
    +        )
    +        self._backend.openssl_assert(evp_pkey != self._backend._ffi.NULL)
    +        self._evp_pkey = self._backend._ffi.gc(
    +            evp_pkey, self._backend._lib.EVP_PKEY_free
    +        )
    +        ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
    +        self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
    +        self._ctx = self._backend._ffi.gc(
    +            ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
    +        )
    +        res = self._backend._lib.EVP_DigestSignInit(
    +            self._ctx, self._backend._ffi.NULL, self._backend._ffi.NULL,
    +            self._backend._ffi.NULL, self._evp_pkey
    +        )
    +        self._backend.openssl_assert(res == 1)
    +
    +    def update(self, data):
    +        data_ptr = self._backend._ffi.from_buffer(data)
    +        res = self._backend._lib.EVP_DigestSignUpdate(
    +            self._ctx, data_ptr, len(data)
    +        )
    +        self._backend.openssl_assert(res != 0)
    +
    +    def finalize(self):
    +        buf = self._backend._ffi.new("unsigned char[]", _POLY1305_TAG_SIZE)
    +        outlen = self._backend._ffi.new("size_t *")
    +        res = self._backend._lib.EVP_DigestSignFinal(self._ctx, buf, outlen)
    +        self._backend.openssl_assert(res != 0)
    +        self._backend.openssl_assert(outlen[0] == _POLY1305_TAG_SIZE)
    +        return self._backend._ffi.buffer(buf)[:outlen[0]]
    +
    +    def verify(self, tag):
    +        mac = self.finalize()
    +        if not constant_time.bytes_eq(mac, tag):
    +            raise InvalidSignature("Value did not match computed tag.")
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/rsa.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/rsa.py
    index 9a7bfaa..3e4c2fd 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/rsa.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/rsa.py
    @@ -59,7 +59,7 @@ def _enc_dec_rsa(backend, key, data, padding):
     
         else:
             raise UnsupportedAlgorithm(
    -            "{0} is not supported by this backend.".format(
    +            "{} is not supported by this backend.".format(
                     padding.name
                 ),
                 _Reasons.UNSUPPORTED_PADDING
    @@ -92,14 +92,11 @@ def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding):
             isinstance(padding, OAEP) and
             backend._lib.Cryptography_HAS_RSA_OAEP_MD
         ):
    -        mgf1_md = backend._lib.EVP_get_digestbyname(
    -            padding._mgf._algorithm.name.encode("ascii"))
    -        backend.openssl_assert(mgf1_md != backend._ffi.NULL)
    +        mgf1_md = backend._evp_md_non_null_from_algorithm(
    +            padding._mgf._algorithm)
             res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
             backend.openssl_assert(res > 0)
    -        oaep_md = backend._lib.EVP_get_digestbyname(
    -            padding._algorithm.name.encode("ascii"))
    -        backend.openssl_assert(oaep_md != backend._ffi.NULL)
    +        oaep_md = backend._evp_md_non_null_from_algorithm(padding._algorithm)
             res = backend._lib.EVP_PKEY_CTX_set_rsa_oaep_md(pkey_ctx, oaep_md)
             backend.openssl_assert(res > 0)
     
    @@ -130,10 +127,11 @@ def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding):
     def _handle_rsa_enc_dec_error(backend, key):
         errors = backend._consume_errors()
         backend.openssl_assert(errors)
    -    assert errors[0].lib == backend._lib.ERR_LIB_RSA
    +    backend.openssl_assert(errors[0].lib == backend._lib.ERR_LIB_RSA)
         if isinstance(key, _RSAPublicKey):
    -        assert (errors[0].reason ==
    -                backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE)
    +        backend.openssl_assert(
    +            errors[0].reason == backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE
    +        )
             raise ValueError(
                 "Data too long for key size. Encrypt less data or use a "
                 "larger key size."
    @@ -151,7 +149,7 @@ def _handle_rsa_enc_dec_error(backend, key):
             if backend._lib.Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR:
                 decoding_errors.append(backend._lib.RSA_R_PKCS_DECODING_ERROR)
     
    -        assert errors[0].reason in decoding_errors
    +        backend.openssl_assert(errors[0].reason in decoding_errors)
             raise ValueError("Decryption failed.")
     
     
    @@ -180,7 +178,7 @@ def _rsa_sig_determine_padding(backend, key, padding, algorithm):
             padding_enum = backend._lib.RSA_PKCS1_PSS_PADDING
         else:
             raise UnsupportedAlgorithm(
    -            "{0} is not supported by this backend.".format(padding.name),
    +            "{} is not supported by this backend.".format(padding.name),
                 _Reasons.UNSUPPORTED_PADDING
             )
     
    @@ -189,15 +187,21 @@ def _rsa_sig_determine_padding(backend, key, padding, algorithm):
     
     def _rsa_sig_setup(backend, padding, algorithm, key, data, init_func):
         padding_enum = _rsa_sig_determine_padding(backend, key, padding, algorithm)
    -    evp_md = backend._lib.EVP_get_digestbyname(algorithm.name.encode("ascii"))
    -    backend.openssl_assert(evp_md != backend._ffi.NULL)
    +    evp_md = backend._evp_md_non_null_from_algorithm(algorithm)
         pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
         backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
         pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
         res = init_func(pkey_ctx)
         backend.openssl_assert(res == 1)
         res = backend._lib.EVP_PKEY_CTX_set_signature_md(pkey_ctx, evp_md)
    -    backend.openssl_assert(res > 0)
    +    if res == 0:
    +        backend._consume_errors()
    +        raise UnsupportedAlgorithm(
    +            "{} is not supported by this backend for RSA signing.".format(
    +                algorithm.name
    +            ),
    +            _Reasons.UNSUPPORTED_HASH
    +        )
         res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
         backend.openssl_assert(res > 0)
         if isinstance(padding, PSS):
    @@ -206,10 +210,8 @@ def _rsa_sig_setup(backend, padding, algorithm, key, data, init_func):
             )
             backend.openssl_assert(res > 0)
     
    -        mgf1_md = backend._lib.EVP_get_digestbyname(
    -            padding._mgf._algorithm.name.encode("ascii")
    -        )
    -        backend.openssl_assert(mgf1_md != backend._ffi.NULL)
    +        mgf1_md = backend._evp_md_non_null_from_algorithm(
    +            padding._mgf._algorithm)
             res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
             backend.openssl_assert(res > 0)
     
    @@ -235,17 +237,19 @@ def _rsa_sig_sign(backend, padding, algorithm, private_key, data):
             pkey_ctx, buf, buflen, data, len(data))
         if res != 1:
             errors = backend._consume_errors()
    -        assert errors[0].lib == backend._lib.ERR_LIB_RSA
    -        reason = None
    -        if (errors[0].reason ==
    -                backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE):
    +        backend.openssl_assert(errors[0].lib == backend._lib.ERR_LIB_RSA)
    +        if (
    +            errors[0].reason ==
    +            backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE
    +        ):
                 reason = ("Salt length too long for key size. Try using "
                           "MAX_LENGTH instead.")
             else:
    -            assert (errors[0].reason ==
    -                    backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY)
    +            backend.openssl_assert(
    +                errors[0].reason ==
    +                backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY
    +            )
                 reason = "Digest too large for key size. Use a larger key."
    -        assert reason is not None
             raise ValueError(reason)
     
         return backend._ffi.buffer(buf)[:]
    @@ -433,8 +437,7 @@ class _RSAPublicKey(object):
     
         def verifier(self, signature, padding, algorithm):
             _warn_sign_verify_deprecated()
    -        if not isinstance(signature, bytes):
    -            raise TypeError("signature must be bytes.")
    +        utils._check_bytes("signature", signature)
     
             _check_not_prehashed(algorithm)
             return _RSAVerificationContext(
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/utils.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/utils.py
    index 05d0fe5..ee472c0 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/utils.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/utils.py
    @@ -11,6 +11,30 @@ from cryptography.hazmat.primitives import hashes
     from cryptography.hazmat.primitives.asymmetric.utils import Prehashed
     
     
    +def _evp_pkey_derive(backend, evp_pkey, peer_public_key):
    +    ctx = backend._lib.EVP_PKEY_CTX_new(evp_pkey, backend._ffi.NULL)
    +    backend.openssl_assert(ctx != backend._ffi.NULL)
    +    ctx = backend._ffi.gc(ctx, backend._lib.EVP_PKEY_CTX_free)
    +    res = backend._lib.EVP_PKEY_derive_init(ctx)
    +    backend.openssl_assert(res == 1)
    +    res = backend._lib.EVP_PKEY_derive_set_peer(
    +        ctx, peer_public_key._evp_pkey
    +    )
    +    backend.openssl_assert(res == 1)
    +    keylen = backend._ffi.new("size_t *")
    +    res = backend._lib.EVP_PKEY_derive(ctx, backend._ffi.NULL, keylen)
    +    backend.openssl_assert(res == 1)
    +    backend.openssl_assert(keylen[0] > 0)
    +    buf = backend._ffi.new("unsigned char[]", keylen[0])
    +    res = backend._lib.EVP_PKEY_derive(ctx, buf, keylen)
    +    if res != 1:
    +        raise ValueError(
    +            "Null shared key derived from public/private pair."
    +        )
    +
    +    return backend._ffi.buffer(buf, keylen[0])[:]
    +
    +
     def _calculate_digest_and_algorithm(backend, data, algorithm):
         if not isinstance(algorithm, Prehashed):
             hash_ctx = hashes.Hash(algorithm, backend)
    @@ -40,6 +64,6 @@ def _warn_sign_verify_deprecated():
         warnings.warn(
             "signer and verifier have been deprecated. Please use sign "
             "and verify instead.",
    -        utils.PersistentlyDeprecated,
    +        utils.PersistentlyDeprecated2017,
             stacklevel=3
         )
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/x25519.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/x25519.py
    index 983ece6..8708834 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/x25519.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/x25519.py
    @@ -5,18 +5,51 @@
     from __future__ import absolute_import, division, print_function
     
     from cryptography import utils
    +from cryptography.hazmat.backends.openssl.utils import _evp_pkey_derive
    +from cryptography.hazmat.primitives import serialization
     from cryptography.hazmat.primitives.asymmetric.x25519 import (
         X25519PrivateKey, X25519PublicKey
     )
     
     
    +_X25519_KEY_SIZE = 32
    +
    +
     @utils.register_interface(X25519PublicKey)
     class _X25519PublicKey(object):
         def __init__(self, backend, evp_pkey):
             self._backend = backend
             self._evp_pkey = evp_pkey
     
    -    def public_bytes(self):
    +    def public_bytes(self, encoding, format):
    +        if (
    +            encoding is serialization.Encoding.Raw or
    +            format is serialization.PublicFormat.Raw
    +        ):
    +            if (
    +                encoding is not serialization.Encoding.Raw or
    +                format is not serialization.PublicFormat.Raw
    +            ):
    +                raise ValueError(
    +                    "When using Raw both encoding and format must be Raw"
    +                )
    +
    +            return self._raw_public_bytes()
    +
    +        if (
    +            encoding in serialization._PEM_DER and
    +            format is not serialization.PublicFormat.SubjectPublicKeyInfo
    +        ):
    +            raise ValueError(
    +                "format must be SubjectPublicKeyInfo when encoding is PEM or "
    +                "DER"
    +            )
    +
    +        return self._backend._public_key_bytes(
    +            encoding, format, self, self._evp_pkey, None
    +        )
    +
    +    def _raw_public_bytes(self):
             ucharpp = self._backend._ffi.new("unsigned char **")
             res = self._backend._lib.EVP_PKEY_get1_tls_encodedpoint(
                 self._evp_pkey, ucharpp
    @@ -52,28 +85,51 @@ class _X25519PrivateKey(object):
             if not isinstance(peer_public_key, X25519PublicKey):
                 raise TypeError("peer_public_key must be X25519PublicKey.")
     
    -        ctx = self._backend._lib.EVP_PKEY_CTX_new(
    -            self._evp_pkey, self._backend._ffi.NULL
    +        return _evp_pkey_derive(
    +            self._backend, self._evp_pkey, peer_public_key
             )
    -        self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
    -        ctx = self._backend._ffi.gc(ctx, self._backend._lib.EVP_PKEY_CTX_free)
    -        res = self._backend._lib.EVP_PKEY_derive_init(ctx)
    -        self._backend.openssl_assert(res == 1)
    -        res = self._backend._lib.EVP_PKEY_derive_set_peer(
    -            ctx, peer_public_key._evp_pkey
    -        )
    -        self._backend.openssl_assert(res == 1)
    -        keylen = self._backend._ffi.new("size_t *")
    -        res = self._backend._lib.EVP_PKEY_derive(
    -            ctx, self._backend._ffi.NULL, keylen
    -        )
    -        self._backend.openssl_assert(res == 1)
    -        self._backend.openssl_assert(keylen[0] > 0)
    -        buf = self._backend._ffi.new("unsigned char[]", keylen[0])
    -        res = self._backend._lib.EVP_PKEY_derive(ctx, buf, keylen)
    -        if res != 1:
    +
    +    def private_bytes(self, encoding, format, encryption_algorithm):
    +        if (
    +            encoding is serialization.Encoding.Raw or
    +            format is serialization.PublicFormat.Raw
    +        ):
    +            if (
    +                format is not serialization.PrivateFormat.Raw or
    +                encoding is not serialization.Encoding.Raw or not
    +                isinstance(encryption_algorithm, serialization.NoEncryption)
    +            ):
    +                raise ValueError(
    +                    "When using Raw both encoding and format must be Raw "
    +                    "and encryption_algorithm must be NoEncryption()"
    +                )
    +
    +            return self._raw_private_bytes()
    +
    +        if (
    +            encoding in serialization._PEM_DER and
    +            format is not serialization.PrivateFormat.PKCS8
    +        ):
                 raise ValueError(
    -                "Null shared key derived from public/private pair."
    +                "format must be PKCS8 when encoding is PEM or DER"
                 )
     
    -        return self._backend._ffi.buffer(buf, keylen[0])[:]
    +        return self._backend._private_key_bytes(
    +            encoding, format, encryption_algorithm, self._evp_pkey, None
    +        )
    +
    +    def _raw_private_bytes(self):
    +        # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can
    +        # switch this to EVP_PKEY_new_raw_private_key
    +        # The trick we use here is serializing to a PKCS8 key and just
    +        # using the last 32 bytes, which is the key itself.
    +        bio = self._backend._create_mem_bio_gc()
    +        res = self._backend._lib.i2d_PKCS8PrivateKey_bio(
    +            bio, self._evp_pkey,
    +            self._backend._ffi.NULL, self._backend._ffi.NULL,
    +            0, self._backend._ffi.NULL, self._backend._ffi.NULL
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        pkcs8 = self._backend._read_mem_bio(bio)
    +        self._backend.openssl_assert(len(pkcs8) == 48)
    +        return pkcs8[-_X25519_KEY_SIZE:]
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/x448.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/x448.py
    new file mode 100644
    index 0000000..fe0dcd9
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/x448.py
    @@ -0,0 +1,123 @@
    +# This file is dual licensed under the terms of the Apache License, Version
    +# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    +# for complete details.
    +
    +from __future__ import absolute_import, division, print_function
    +
    +from cryptography import utils
    +from cryptography.hazmat.backends.openssl.utils import _evp_pkey_derive
    +from cryptography.hazmat.primitives import serialization
    +from cryptography.hazmat.primitives.asymmetric.x448 import (
    +    X448PrivateKey, X448PublicKey
    +)
    +
    +_X448_KEY_SIZE = 56
    +
    +
    +@utils.register_interface(X448PublicKey)
    +class _X448PublicKey(object):
    +    def __init__(self, backend, evp_pkey):
    +        self._backend = backend
    +        self._evp_pkey = evp_pkey
    +
    +    def public_bytes(self, encoding, format):
    +        if (
    +            encoding is serialization.Encoding.Raw or
    +            format is serialization.PublicFormat.Raw
    +        ):
    +            if (
    +                encoding is not serialization.Encoding.Raw or
    +                format is not serialization.PublicFormat.Raw
    +            ):
    +                raise ValueError(
    +                    "When using Raw both encoding and format must be Raw"
    +                )
    +
    +            return self._raw_public_bytes()
    +
    +        if (
    +            encoding in serialization._PEM_DER and
    +            format is not serialization.PublicFormat.SubjectPublicKeyInfo
    +        ):
    +            raise ValueError(
    +                "format must be SubjectPublicKeyInfo when encoding is PEM or "
    +                "DER"
    +            )
    +
    +        return self._backend._public_key_bytes(
    +            encoding, format, self, self._evp_pkey, None
    +        )
    +
    +    def _raw_public_bytes(self):
    +        buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE)
    +        buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE)
    +        res = self._backend._lib.EVP_PKEY_get_raw_public_key(
    +            self._evp_pkey, buf, buflen
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE)
    +        return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:]
    +
    +
    +@utils.register_interface(X448PrivateKey)
    +class _X448PrivateKey(object):
    +    def __init__(self, backend, evp_pkey):
    +        self._backend = backend
    +        self._evp_pkey = evp_pkey
    +
    +    def public_key(self):
    +        buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE)
    +        buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE)
    +        res = self._backend._lib.EVP_PKEY_get_raw_public_key(
    +            self._evp_pkey, buf, buflen
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE)
    +        return self._backend.x448_load_public_bytes(buf)
    +
    +    def exchange(self, peer_public_key):
    +        if not isinstance(peer_public_key, X448PublicKey):
    +            raise TypeError("peer_public_key must be X448PublicKey.")
    +
    +        return _evp_pkey_derive(
    +            self._backend, self._evp_pkey, peer_public_key
    +        )
    +
    +    def private_bytes(self, encoding, format, encryption_algorithm):
    +        if (
    +            encoding is serialization.Encoding.Raw or
    +            format is serialization.PublicFormat.Raw
    +        ):
    +            if (
    +                format is not serialization.PrivateFormat.Raw or
    +                encoding is not serialization.Encoding.Raw or not
    +                isinstance(encryption_algorithm, serialization.NoEncryption)
    +            ):
    +                raise ValueError(
    +                    "When using Raw both encoding and format must be Raw "
    +                    "and encryption_algorithm must be NoEncryption()"
    +                )
    +
    +            return self._raw_private_bytes()
    +
    +        if (
    +            encoding in serialization._PEM_DER and
    +            format is not serialization.PrivateFormat.PKCS8
    +        ):
    +            raise ValueError(
    +                "format must be PKCS8 when encoding is PEM or DER"
    +            )
    +
    +        return self._backend._private_key_bytes(
    +            encoding, format, encryption_algorithm, self._evp_pkey, None
    +        )
    +
    +    def _raw_private_bytes(self):
    +        buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE)
    +        buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE)
    +        res = self._backend._lib.EVP_PKEY_get_raw_private_key(
    +            self._evp_pkey, buf, buflen
    +        )
    +        self._backend.openssl_assert(res == 1)
    +        self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE)
    +        return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:]
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/x509.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/x509.py
    index b870eeb..efbb179 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/x509.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/backends/openssl/x509.py
    @@ -6,7 +6,6 @@ from __future__ import absolute_import, division, print_function
     
     import datetime
     import operator
    -import warnings
     
     from cryptography import utils, x509
     from cryptography.exceptions import UnsupportedAlgorithm
    @@ -30,7 +29,7 @@ class _Certificate(object):
             self._x509 = x509
     
         def __repr__(self):
    -        return "".format(self.subject)
    +        return "".format(self.subject)
     
         def __eq__(self, other):
             if not isinstance(other, x509.Certificate):
    @@ -59,18 +58,9 @@ class _Certificate(object):
                 return x509.Version.v3
             else:
                 raise x509.InvalidVersion(
    -                "{0} is not a valid X509 version".format(version), version
    +                "{} is not a valid X509 version".format(version), version
                 )
     
    -    @property
    -    def serial(self):
    -        warnings.warn(
    -            "Certificate serial is deprecated, use serial_number instead.",
    -            utils.PersistentlyDeprecated,
    -            stacklevel=2
    -        )
    -        return self.serial_number
    -
         @property
         def serial_number(self):
             asn1_int = self._backend._lib.X509_get_serialNumber(self._x509)
    @@ -90,12 +80,12 @@ class _Certificate(object):
     
         @property
         def not_valid_before(self):
    -        asn1_time = self._backend._lib.X509_get_notBefore(self._x509)
    +        asn1_time = self._backend._lib.X509_getm_notBefore(self._x509)
             return _parse_asn1_time(self._backend, asn1_time)
     
         @property
         def not_valid_after(self):
    -        asn1_time = self._backend._lib.X509_get_notAfter(self._x509)
    +        asn1_time = self._backend._lib.X509_getm_notAfter(self._x509)
             return _parse_asn1_time(self._backend, asn1_time)
     
         @property
    @@ -117,7 +107,7 @@ class _Certificate(object):
                 return x509._SIG_OIDS_TO_HASH[oid]
             except KeyError:
                 raise UnsupportedAlgorithm(
    -                "Signature algorithm OID:{0} not recognized".format(oid)
    +                "Signature algorithm OID:{} not recognized".format(oid)
                 )
     
         @property
    @@ -238,11 +228,21 @@ class _CertificateRevocationList(object):
             h.update(der)
             return h.finalize()
     
    +    @utils.cached_property
    +    def _sorted_crl(self):
    +        # X509_CRL_get0_by_serial sorts in place, which breaks a variety of
    +        # things we don't want to break (like iteration and the signature).
    +        # Let's dupe it and sort that instead.
    +        dup = self._backend._lib.X509_CRL_dup(self._x509_crl)
    +        self._backend.openssl_assert(dup != self._backend._ffi.NULL)
    +        dup = self._backend._ffi.gc(dup, self._backend._lib.X509_CRL_free)
    +        return dup
    +
         def get_revoked_certificate_by_serial_number(self, serial_number):
             revoked = self._backend._ffi.new("X509_REVOKED **")
             asn1_int = _encode_asn1_int_gc(self._backend, serial_number)
             res = self._backend._lib.X509_CRL_get0_by_serial(
    -            self._x509_crl, revoked, asn1_int
    +            self._sorted_crl, revoked, asn1_int
             )
             if res == 0:
                 return None
    @@ -251,7 +251,7 @@ class _CertificateRevocationList(object):
                     revoked[0] != self._backend._ffi.NULL
                 )
                 return _RevokedCertificate(
    -                self._backend, self._x509_crl, revoked[0]
    +                self._backend, self._sorted_crl, revoked[0]
                 )
     
         @property
    @@ -261,7 +261,7 @@ class _CertificateRevocationList(object):
                 return x509._SIG_OIDS_TO_HASH[oid]
             except KeyError:
                 raise UnsupportedAlgorithm(
    -                "Signature algorithm OID:{0} not recognized".format(oid)
    +                "Signature algorithm OID:{} not recognized".format(oid)
                 )
     
         @property
    @@ -413,7 +413,7 @@ class _CertificateSigningRequest(object):
                 return x509._SIG_OIDS_TO_HASH[oid]
             except KeyError:
                 raise UnsupportedAlgorithm(
    -                "Signature algorithm OID:{0} not recognized".format(oid)
    +                "Signature algorithm OID:{} not recognized".format(oid)
                 )
     
         @property
    @@ -429,6 +429,14 @@ class _CertificateSigningRequest(object):
         @utils.cached_property
         def extensions(self):
             x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req)
    +        x509_exts = self._backend._ffi.gc(
    +            x509_exts,
    +            lambda x: self._backend._lib.sk_X509_EXTENSION_pop_free(
    +                x, self._backend._ffi.addressof(
    +                    self._backend._lib._original_lib, "X509_EXTENSION_free"
    +                )
    +            )
    +        )
             return _CSR_EXTENSION_PARSER.parse(self._backend, x509_exts)
     
         def public_bytes(self, encoding):
    @@ -516,3 +524,23 @@ class _SignedCertificateTimestamp(object):
             # we only have precerts.
             assert entry_type == self._backend._lib.CT_LOG_ENTRY_TYPE_PRECERT
             return x509.certificate_transparency.LogEntryType.PRE_CERTIFICATE
    +
    +    @property
    +    def _signature(self):
    +        ptrptr = self._backend._ffi.new("unsigned char **")
    +        res = self._backend._lib.SCT_get0_signature(self._sct, ptrptr)
    +        self._backend.openssl_assert(res > 0)
    +        self._backend.openssl_assert(ptrptr[0] != self._backend._ffi.NULL)
    +        return self._backend._ffi.buffer(ptrptr[0], res)[:]
    +
    +    def __hash__(self):
    +        return hash(self._signature)
    +
    +    def __eq__(self, other):
    +        if not isinstance(other, _SignedCertificateTimestamp):
    +            return NotImplemented
    +
    +        return self._signature == other._signature
    +
    +    def __ne__(self, other):
    +        return not self == other
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/_constant_time.cp37-win32.pyd b/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/_constant_time.cp37-win32.pyd
    index 7e80078..172e77c 100644
    Binary files a/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/_constant_time.cp37-win32.pyd and b/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/_constant_time.cp37-win32.pyd differ
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/_openssl.cp37-win32.pyd b/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/_openssl.cp37-win32.pyd
    index 06ec31e..6c9cb89 100644
    Binary files a/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/_openssl.cp37-win32.pyd and b/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/_openssl.cp37-win32.pyd differ
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/_padding.cp37-win32.pyd b/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/_padding.cp37-win32.pyd
    index 72ae813..4966c42 100644
    Binary files a/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/_padding.cp37-win32.pyd and b/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/_padding.cp37-win32.pyd differ
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/openssl/_conditional.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/openssl/_conditional.py
    index b3e4e8b..a293fb0 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/openssl/_conditional.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/openssl/_conditional.py
    @@ -98,6 +98,19 @@ def cryptography_has_102_verification_params():
             "X509_VERIFY_PARAM_set1_ip",
             "X509_VERIFY_PARAM_set1_ip_asc",
             "X509_VERIFY_PARAM_set_hostflags",
    +        "SSL_get0_param",
    +        "SSL_CTX_get0_param",
    +        "X509_CHECK_FLAG_ALWAYS_CHECK_SUBJECT",
    +        "X509_CHECK_FLAG_NO_WILDCARDS",
    +        "X509_CHECK_FLAG_NO_PARTIAL_WILDCARDS",
    +        "X509_CHECK_FLAG_MULTI_LABEL_WILDCARDS",
    +        "X509_CHECK_FLAG_SINGLE_LABEL_SUBDOMAINS"
    +    ]
    +
    +
    +def cryptography_has_110_verification_params():
    +    return [
    +        "X509_CHECK_FLAG_NEVER_CHECK_SUBJECT"
         ]
     
     
    @@ -138,11 +151,7 @@ def cryptography_has_tls_st():
     
     def cryptography_has_locking_callbacks():
         return [
    -        "CRYPTO_LOCK",
    -        "CRYPTO_UNLOCK",
    -        "CRYPTO_READ",
    -        "CRYPTO_LOCK_SSL",
    -        "CRYPTO_lock",
    +        "Cryptography_setup_ssl_threads",
         ]
     
     
    @@ -181,11 +190,19 @@ def cryptography_has_sct():
             "SCT_get_version",
             "SCT_get_log_entry_type",
             "SCT_get0_log_id",
    +        "SCT_get0_signature",
             "SCT_get_timestamp",
             "SCT_set_source",
             "sk_SCT_num",
             "sk_SCT_value",
             "SCT_LIST_free",
    +        "sk_SCT_push",
    +        "sk_SCT_new_null",
    +        "SCT_new",
    +        "SCT_set1_log_id",
    +        "SCT_set_timestamp",
    +        "SCT_set_version",
    +        "SCT_set_log_entry_type",
         ]
     
     
    @@ -203,6 +220,47 @@ def cryptography_has_x25519():
         ]
     
     
    +def cryptography_has_x448():
    +    return [
    +        "EVP_PKEY_X448",
    +        "NID_X448",
    +    ]
    +
    +
    +def cryptography_has_ed448():
    +    return [
    +        "EVP_PKEY_ED448",
    +        "NID_ED448",
    +    ]
    +
    +
    +def cryptography_has_ed25519():
    +    return [
    +        "NID_ED25519",
    +        "EVP_PKEY_ED25519",
    +    ]
    +
    +
    +def cryptography_has_poly1305():
    +    return [
    +        "NID_poly1305",
    +        "EVP_PKEY_POLY1305",
    +    ]
    +
    +
    +def cryptography_has_oneshot_evp_digest_sign_verify():
    +    return [
    +        "EVP_DigestSign",
    +        "EVP_DigestVerify",
    +    ]
    +
    +
    +def cryptography_has_evp_digestfinal_xof():
    +    return [
    +        "EVP_DigestFinalXOF",
    +    ]
    +
    +
     def cryptography_has_evp_pkey_get_set_tls_encodedpoint():
         return [
             "EVP_PKEY_get1_tls_encodedpoint",
    @@ -212,7 +270,7 @@ def cryptography_has_evp_pkey_get_set_tls_encodedpoint():
     
     def cryptography_has_fips():
         return [
    -        "FIPS_set_mode",
    +        "FIPS_mode_set",
             "FIPS_mode",
         ]
     
    @@ -246,6 +304,67 @@ def cryptography_has_openssl_cleanup():
         ]
     
     
    +def cryptography_has_cipher_details():
    +    return [
    +        "SSL_CIPHER_is_aead",
    +        "SSL_CIPHER_get_cipher_nid",
    +        "SSL_CIPHER_get_digest_nid",
    +        "SSL_CIPHER_get_kx_nid",
    +        "SSL_CIPHER_get_auth_nid",
    +    ]
    +
    +
    +def cryptography_has_tlsv13():
    +    return [
    +        "SSL_OP_NO_TLSv1_3",
    +        "SSL_VERIFY_POST_HANDSHAKE",
    +        "SSL_CTX_set_ciphersuites",
    +        "SSL_verify_client_post_handshake",
    +        "SSL_CTX_set_post_handshake_auth",
    +        "SSL_set_post_handshake_auth",
    +        "SSL_SESSION_get_max_early_data",
    +        "SSL_write_early_data",
    +        "SSL_read_early_data",
    +        "SSL_CTX_set_max_early_data",
    +    ]
    +
    +
    +def cryptography_has_raw_key():
    +    return [
    +        "EVP_PKEY_new_raw_private_key",
    +        "EVP_PKEY_new_raw_public_key",
    +        "EVP_PKEY_get_raw_private_key",
    +        "EVP_PKEY_get_raw_public_key",
    +    ]
    +
    +
    +def cryptography_has_evp_r_memory_limit_exceeded():
    +    return [
    +        "EVP_R_MEMORY_LIMIT_EXCEEDED",
    +    ]
    +
    +
    +def cryptography_has_engine():
    +    return [
    +        "ENGINE_by_id",
    +        "ENGINE_init",
    +        "ENGINE_finish",
    +        "ENGINE_get_default_RAND",
    +        "ENGINE_set_default_RAND",
    +        "ENGINE_unregister_RAND",
    +        "ENGINE_ctrl_cmd",
    +        "ENGINE_free",
    +        "ENGINE_get_name",
    +        "Cryptography_add_osrandom_engine",
    +    ]
    +
    +
    +def cryptography_has_verified_chain():
    +    return [
    +        "SSL_get0_verified_chain",
    +    ]
    +
    +
     # This is a mapping of
     # {condition: function-returning-names-dependent-on-that-condition} so we can
     # loop over them and delete unsupported names at runtime. It will be removed
    @@ -270,6 +389,9 @@ CONDITIONAL_NAMES = {
         "Cryptography_HAS_102_VERIFICATION_PARAMS": (
             cryptography_has_102_verification_params
         ),
    +    "Cryptography_HAS_110_VERIFICATION_PARAMS": (
    +        cryptography_has_110_verification_params
    +    ),
         "Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST": (
             cryptography_has_x509_v_flag_trusted_first
         ),
    @@ -291,6 +413,13 @@ CONDITIONAL_NAMES = {
             cryptography_has_x509_store_ctx_get_issuer
         ),
         "Cryptography_HAS_X25519": cryptography_has_x25519,
    +    "Cryptography_HAS_X448": cryptography_has_x448,
    +    "Cryptography_HAS_ED448": cryptography_has_ed448,
    +    "Cryptography_HAS_ED25519": cryptography_has_ed25519,
    +    "Cryptography_HAS_POLY1305": cryptography_has_poly1305,
    +    "Cryptography_HAS_ONESHOT_EVP_DIGEST_SIGN_VERIFY": (
    +        cryptography_has_oneshot_evp_digest_sign_verify
    +    ),
         "Cryptography_HAS_EVP_PKEY_get_set_tls_encodedpoint": (
             cryptography_has_evp_pkey_get_set_tls_encodedpoint
         ),
    @@ -299,4 +428,15 @@ CONDITIONAL_NAMES = {
         "Cryptography_HAS_PSK": cryptography_has_psk,
         "Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext,
         "Cryptography_HAS_OPENSSL_CLEANUP": cryptography_has_openssl_cleanup,
    +    "Cryptography_HAS_CIPHER_DETAILS": cryptography_has_cipher_details,
    +    "Cryptography_HAS_TLSv1_3": cryptography_has_tlsv13,
    +    "Cryptography_HAS_RAW_KEY": cryptography_has_raw_key,
    +    "Cryptography_HAS_EVP_DIGESTFINAL_XOF": (
    +        cryptography_has_evp_digestfinal_xof
    +    ),
    +    "Cryptography_HAS_EVP_R_MEMORY_LIMIT_EXCEEDED": (
    +        cryptography_has_evp_r_memory_limit_exceeded
    +    ),
    +    "Cryptography_HAS_ENGINE": cryptography_has_engine,
    +    "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain,
     }
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/openssl/binding.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/openssl/binding.py
    index 81cf547..1e0f34c 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/openssl/binding.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/bindings/openssl/binding.py
    @@ -5,9 +5,12 @@
     from __future__ import absolute_import, division, print_function
     
     import collections
    +import os
     import threading
     import types
    +import warnings
     
    +import cryptography
     from cryptography import utils
     from cryptography.exceptions import InternalError
     from cryptography.hazmat.bindings._openssl import ffi, lib
    @@ -113,10 +116,9 @@ class Binding(object):
             # reliably clear the error queue. Once we clear it here we will
             # error on any subsequent unexpected item in the stack.
             cls.lib.ERR_clear_error()
    -        cls._osrandom_engine_id = cls.lib.Cryptography_osrandom_engine_id
    -        cls._osrandom_engine_name = cls.lib.Cryptography_osrandom_engine_name
    -        result = cls.lib.Cryptography_add_osrandom_engine()
    -        _openssl_assert(cls.lib, result in (1, 2))
    +        if cls.lib.Cryptography_HAS_ENGINE:
    +            result = cls.lib.Cryptography_add_osrandom_engine()
    +            _openssl_assert(cls.lib, result in (1, 2))
     
         @classmethod
         def _ensure_ffi_initialized(cls):
    @@ -140,7 +142,8 @@ class Binding(object):
                 # the setup for this.
                 __import__("_ssl")
     
    -            if cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL:
    +            if (not cls.lib.Cryptography_HAS_LOCKING_CALLBACKS or
    +                    cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL):
                     return
     
                 # If nothing else has setup a locking callback already, we set up
    @@ -149,9 +152,54 @@ class Binding(object):
                 _openssl_assert(cls.lib, res == 1)
     
     
    +def _verify_openssl_version(lib):
    +    if (
    +        lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 and
    +        not lib.CRYPTOGRAPHY_IS_LIBRESSL
    +    ):
    +        if os.environ.get("CRYPTOGRAPHY_ALLOW_OPENSSL_101"):
    +            warnings.warn(
    +                "OpenSSL version 1.0.1 is no longer supported by the OpenSSL "
    +                "project, please upgrade. The next version of cryptography "
    +                "will completely remove support for it.",
    +                utils.CryptographyDeprecationWarning
    +            )
    +        else:
    +            raise RuntimeError(
    +                "You are linking against OpenSSL 1.0.1, which is no longer "
    +                "supported by the OpenSSL project. You need to upgrade to a "
    +                "newer version of OpenSSL."
    +            )
    +
    +
    +def _verify_package_version(version):
    +    # Occasionally we run into situations where the version of the Python
    +    # package does not match the version of the shared object that is loaded.
    +    # This may occur in environments where multiple versions of cryptography
    +    # are installed and available in the python path. To avoid errors cropping
    +    # up later this code checks that the currently imported package and the
    +    # shared object that were loaded have the same version and raise an
    +    # ImportError if they do not
    +    so_package_version = ffi.string(lib.CRYPTOGRAPHY_PACKAGE_VERSION)
    +    if version.encode("ascii") != so_package_version:
    +        raise ImportError(
    +            "The version of cryptography does not match the loaded "
    +            "shared object. This can happen if you have multiple copies of "
    +            "cryptography installed in your Python path. Please try creating "
    +            "a new virtual environment to resolve this issue. "
    +            "Loaded python version: {}, shared object version: {}".format(
    +                version, so_package_version
    +            )
    +        )
    +
    +
    +_verify_package_version(cryptography.__version__)
    +
     # OpenSSL is not thread safe until the locks are initialized. We call this
     # method in module scope so that it executes with the import lock. On
     # Pythons < 3.4 this import lock is a global lock, which can prevent a race
     # condition registering the OpenSSL locks. On Python 3.4+ the import lock
     # is per module so this approach will not work.
     Binding.init_static_locks()
    +
    +_verify_openssl_version(Binding.lib)
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/ec.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/ec.py
    index 6cbfcab..eef922d 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/ec.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/ec.py
    @@ -5,10 +5,34 @@
     from __future__ import absolute_import, division, print_function
     
     import abc
    +import warnings
     
     import six
     
     from cryptography import utils
    +from cryptography.hazmat._oid import ObjectIdentifier
    +
    +
    +class EllipticCurveOID(object):
    +    SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1")
    +    SECP224R1 = ObjectIdentifier("1.3.132.0.33")
    +    SECP256K1 = ObjectIdentifier("1.3.132.0.10")
    +    SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7")
    +    SECP384R1 = ObjectIdentifier("1.3.132.0.34")
    +    SECP521R1 = ObjectIdentifier("1.3.132.0.35")
    +    BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7")
    +    BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11")
    +    BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13")
    +    SECT163K1 = ObjectIdentifier("1.3.132.0.1")
    +    SECT163R2 = ObjectIdentifier("1.3.132.0.15")
    +    SECT233K1 = ObjectIdentifier("1.3.132.0.26")
    +    SECT233R1 = ObjectIdentifier("1.3.132.0.27")
    +    SECT283K1 = ObjectIdentifier("1.3.132.0.16")
    +    SECT283R1 = ObjectIdentifier("1.3.132.0.17")
    +    SECT409K1 = ObjectIdentifier("1.3.132.0.36")
    +    SECT409R1 = ObjectIdentifier("1.3.132.0.37")
    +    SECT571K1 = ObjectIdentifier("1.3.132.0.38")
    +    SECT571R1 = ObjectIdentifier("1.3.132.0.39")
     
     
     @six.add_metaclass(abc.ABCMeta)
    @@ -68,7 +92,7 @@ class EllipticCurvePrivateKey(object):
             Bit size of a secret scalar for the curve.
             """
     
    -    @abc.abstractproperty
    +    @abc.abstractmethod
         def sign(self, data, signature_algorithm):
             """
             Signs the data
    @@ -128,6 +152,22 @@ class EllipticCurvePublicKey(object):
             Verifies the signature of the data.
             """
     
    +    @classmethod
    +    def from_encoded_point(cls, curve, data):
    +        utils._check_bytes("data", data)
    +
    +        if not isinstance(curve, EllipticCurve):
    +            raise TypeError("curve must be an EllipticCurve instance")
    +
    +        if len(data) == 0:
    +            raise ValueError("data must not be an empty byte string")
    +
    +        if six.indexbytes(data, 0) not in [0x02, 0x03, 0x04]:
    +            raise ValueError("Unsupported elliptic curve point type")
    +
    +        from cryptography.hazmat.backends.openssl.backend import backend
    +        return backend.load_elliptic_curve_public_bytes(curve, data)
    +
     
     EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey
     
    @@ -319,6 +359,14 @@ class EllipticCurvePublicNumbers(object):
             return backend.load_elliptic_curve_public_numbers(self)
     
         def encode_point(self):
    +        warnings.warn(
    +            "encode_point has been deprecated on EllipticCurvePublicNumbers"
    +            " and will be removed in a future version. Please use "
    +            "EllipticCurvePublicKey.public_bytes to obtain both "
    +            "compressed and uncompressed point encoding.",
    +            utils.PersistentlyDeprecated2019,
    +            stacklevel=2,
    +        )
             # key_size is in bits. Convert to bytes and round up
             byte_length = (self.curve.key_size + 7) // 8
             return (
    @@ -331,6 +379,14 @@ class EllipticCurvePublicNumbers(object):
             if not isinstance(curve, EllipticCurve):
                 raise TypeError("curve must be an EllipticCurve instance")
     
    +        warnings.warn(
    +            "Support for unsafe construction of public numbers from "
    +            "encoded data will be removed in a future version. "
    +            "Please use EllipticCurvePublicKey.from_encoded_point",
    +            utils.PersistentlyDeprecated2019,
    +            stacklevel=2,
    +        )
    +
             if data.startswith(b'\x04'):
                 # key_size is in bits. Convert to bytes and round up
                 byte_length = (curve.key_size + 7) // 8
    @@ -409,3 +465,36 @@ class EllipticCurvePrivateNumbers(object):
     
     class ECDH(object):
         pass
    +
    +
    +_OID_TO_CURVE = {
    +    EllipticCurveOID.SECP192R1: SECP192R1,
    +    EllipticCurveOID.SECP224R1: SECP224R1,
    +    EllipticCurveOID.SECP256K1: SECP256K1,
    +    EllipticCurveOID.SECP256R1: SECP256R1,
    +    EllipticCurveOID.SECP384R1: SECP384R1,
    +    EllipticCurveOID.SECP521R1: SECP521R1,
    +    EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1,
    +    EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1,
    +    EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1,
    +    EllipticCurveOID.SECT163K1: SECT163K1,
    +    EllipticCurveOID.SECT163R2: SECT163R2,
    +    EllipticCurveOID.SECT233K1: SECT233K1,
    +    EllipticCurveOID.SECT233R1: SECT233R1,
    +    EllipticCurveOID.SECT283K1: SECT283K1,
    +    EllipticCurveOID.SECT283R1: SECT283R1,
    +    EllipticCurveOID.SECT409K1: SECT409K1,
    +    EllipticCurveOID.SECT409R1: SECT409R1,
    +    EllipticCurveOID.SECT571K1: SECT571K1,
    +    EllipticCurveOID.SECT571R1: SECT571R1,
    +}
    +
    +
    +def get_curve_for_oid(oid):
    +    try:
    +        return _OID_TO_CURVE[oid]
    +    except KeyError:
    +        raise LookupError(
    +            "The provided object identifier has no matching elliptic "
    +            "curve class"
    +        )
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/ed25519.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/ed25519.py
    new file mode 100644
    index 0000000..d89445f
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/ed25519.py
    @@ -0,0 +1,84 @@
    +# This file is dual licensed under the terms of the Apache License, Version
    +# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    +# for complete details.
    +
    +from __future__ import absolute_import, division, print_function
    +
    +import abc
    +
    +import six
    +
    +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
    +
    +
    +_ED25519_KEY_SIZE = 32
    +_ED25519_SIG_SIZE = 64
    +
    +
    +@six.add_metaclass(abc.ABCMeta)
    +class Ed25519PublicKey(object):
    +    @classmethod
    +    def from_public_bytes(cls, data):
    +        from cryptography.hazmat.backends.openssl.backend import backend
    +        if not backend.ed25519_supported():
    +            raise UnsupportedAlgorithm(
    +                "ed25519 is not supported by this version of OpenSSL.",
    +                _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM
    +            )
    +
    +        return backend.ed25519_load_public_bytes(data)
    +
    +    @abc.abstractmethod
    +    def public_bytes(self, encoding, format):
    +        """
    +        The serialized bytes of the public key.
    +        """
    +
    +    @abc.abstractmethod
    +    def verify(self, signature, data):
    +        """
    +        Verify the signature.
    +        """
    +
    +
    +@six.add_metaclass(abc.ABCMeta)
    +class Ed25519PrivateKey(object):
    +    @classmethod
    +    def generate(cls):
    +        from cryptography.hazmat.backends.openssl.backend import backend
    +        if not backend.ed25519_supported():
    +            raise UnsupportedAlgorithm(
    +                "ed25519 is not supported by this version of OpenSSL.",
    +                _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM
    +            )
    +
    +        return backend.ed25519_generate_key()
    +
    +    @classmethod
    +    def from_private_bytes(cls, data):
    +        from cryptography.hazmat.backends.openssl.backend import backend
    +        if not backend.ed25519_supported():
    +            raise UnsupportedAlgorithm(
    +                "ed25519 is not supported by this version of OpenSSL.",
    +                _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM
    +            )
    +
    +        return backend.ed25519_load_private_bytes(data)
    +
    +    @abc.abstractmethod
    +    def public_key(self):
    +        """
    +        The Ed25519PublicKey derived from the private key.
    +        """
    +
    +    @abc.abstractmethod
    +    def private_bytes(self, encoding, format, encryption_algorithm):
    +        """
    +        The serialized bytes of the private key.
    +        """
    +
    +    @abc.abstractmethod
    +    def sign(self, data):
    +        """
    +        Signs the data.
    +        """
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/ed448.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/ed448.py
    new file mode 100644
    index 0000000..939157a
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/ed448.py
    @@ -0,0 +1,79 @@
    +# This file is dual licensed under the terms of the Apache License, Version
    +# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    +# for complete details.
    +
    +from __future__ import absolute_import, division, print_function
    +
    +import abc
    +
    +import six
    +
    +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
    +
    +
    +@six.add_metaclass(abc.ABCMeta)
    +class Ed448PublicKey(object):
    +    @classmethod
    +    def from_public_bytes(cls, data):
    +        from cryptography.hazmat.backends.openssl.backend import backend
    +        if not backend.ed448_supported():
    +            raise UnsupportedAlgorithm(
    +                "ed448 is not supported by this version of OpenSSL.",
    +                _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM
    +            )
    +
    +        return backend.ed448_load_public_bytes(data)
    +
    +    @abc.abstractmethod
    +    def public_bytes(self, encoding, format):
    +        """
    +        The serialized bytes of the public key.
    +        """
    +
    +    @abc.abstractmethod
    +    def verify(self, signature, data):
    +        """
    +        Verify the signature.
    +        """
    +
    +
    +@six.add_metaclass(abc.ABCMeta)
    +class Ed448PrivateKey(object):
    +    @classmethod
    +    def generate(cls):
    +        from cryptography.hazmat.backends.openssl.backend import backend
    +        if not backend.ed448_supported():
    +            raise UnsupportedAlgorithm(
    +                "ed448 is not supported by this version of OpenSSL.",
    +                _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM
    +            )
    +        return backend.ed448_generate_key()
    +
    +    @classmethod
    +    def from_private_bytes(cls, data):
    +        from cryptography.hazmat.backends.openssl.backend import backend
    +        if not backend.ed448_supported():
    +            raise UnsupportedAlgorithm(
    +                "ed448 is not supported by this version of OpenSSL.",
    +                _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM
    +            )
    +
    +        return backend.ed448_load_private_bytes(data)
    +
    +    @abc.abstractmethod
    +    def public_key(self):
    +        """
    +        The Ed448PublicKey derived from the private key.
    +        """
    +
    +    @abc.abstractmethod
    +    def sign(self, data):
    +        """
    +        Signs the data.
    +        """
    +
    +    @abc.abstractmethod
    +    def private_bytes(self, encoding, format, encryption_algorithm):
    +        """
    +        The serialized bytes of the private key.
    +        """
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/rsa.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/rsa.py
    index 27db671..f20cdf9 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/rsa.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/rsa.py
    @@ -184,12 +184,12 @@ def _modinv(e, m):
         """
         Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1
         """
    -    x1, y1, x2, y2 = 1, 0, 0, 1
    +    x1, x2 = 1, 0
         a, b = e, m
         while b > 0:
             q, r = divmod(a, b)
    -        xn, yn = x1 - q * x2, y1 - q * y2
    -        a, b, x1, y1, x2, y2 = b, r, x2, y2, xn, yn
    +        xn = x1 - q * x2
    +        a, b, x1, x2 = b, r, x2, xn
         return x1 % m
     
     
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/utils.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/utils.py
    index ef1e7eb..14d2abe 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/utils.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/utils.py
    @@ -4,49 +4,26 @@
     
     from __future__ import absolute_import, division, print_function
     
    -import warnings
    -
    -from asn1crypto.algos import DSASignature
    -
    -import six
    -
     from cryptography import utils
    +from cryptography.hazmat._der import (
    +    DERReader, INTEGER, SEQUENCE, encode_der, encode_der_integer
    +)
     from cryptography.hazmat.primitives import hashes
     
     
    -def decode_rfc6979_signature(signature):
    -    warnings.warn(
    -        "decode_rfc6979_signature is deprecated and will "
    -        "be removed in a future version, use decode_dss_signature instead.",
    -        utils.PersistentlyDeprecated,
    -        stacklevel=2
    -    )
    -    return decode_dss_signature(signature)
    -
    -
     def decode_dss_signature(signature):
    -    data = DSASignature.load(signature, strict=True).native
    -    return data['r'], data['s']
    -
    -
    -def encode_rfc6979_signature(r, s):
    -    warnings.warn(
    -        "encode_rfc6979_signature is deprecated and will "
    -        "be removed in a future version, use encode_dss_signature instead.",
    -        utils.PersistentlyDeprecated,
    -        stacklevel=2
    -    )
    -    return encode_dss_signature(r, s)
    +    with DERReader(signature).read_single_element(SEQUENCE) as seq:
    +        r = seq.read_element(INTEGER).as_integer()
    +        s = seq.read_element(INTEGER).as_integer()
    +        return r, s
     
     
     def encode_dss_signature(r, s):
    -    if (
    -        not isinstance(r, six.integer_types) or
    -        not isinstance(s, six.integer_types)
    -    ):
    -        raise ValueError("Both r and s must be integers")
    -
    -    return DSASignature({'r': r, 's': s}).dump()
    +    return encode_der(
    +        SEQUENCE,
    +        encode_der(INTEGER, encode_der_integer(r)),
    +        encode_der(INTEGER, encode_der_integer(s)),
    +    )
     
     
     class Prehashed(object):
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/x25519.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/x25519.py
    index 5c4652a..61a95ff 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/x25519.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/x25519.py
    @@ -21,11 +21,14 @@ class X25519PublicKey(object):
                     "X25519 is not supported by this version of OpenSSL.",
                     _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM
                 )
    +
             return backend.x25519_load_public_bytes(data)
     
         @abc.abstractmethod
    -    def public_bytes(self):
    -        pass
    +    def public_bytes(self, encoding, format):
    +        """
    +        The serialized bytes of the public key.
    +        """
     
     
     @six.add_metaclass(abc.ABCMeta)
    @@ -41,14 +44,30 @@ class X25519PrivateKey(object):
             return backend.x25519_generate_key()
     
         @classmethod
    -    def _from_private_bytes(cls, data):
    +    def from_private_bytes(cls, data):
             from cryptography.hazmat.backends.openssl.backend import backend
    +        if not backend.x25519_supported():
    +            raise UnsupportedAlgorithm(
    +                "X25519 is not supported by this version of OpenSSL.",
    +                _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM
    +            )
    +
             return backend.x25519_load_private_bytes(data)
     
         @abc.abstractmethod
         def public_key(self):
    -        pass
    +        """
    +        The serialized bytes of the public key.
    +        """
    +
    +    @abc.abstractmethod
    +    def private_bytes(self, encoding, format, encryption_algorithm):
    +        """
    +        The serialized bytes of the private key.
    +        """
     
         @abc.abstractmethod
         def exchange(self, peer_public_key):
    -        pass
    +        """
    +        Performs a key exchange operation using the provided peer's public key.
    +        """
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/x448.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/x448.py
    new file mode 100644
    index 0000000..475e678
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/asymmetric/x448.py
    @@ -0,0 +1,73 @@
    +# This file is dual licensed under the terms of the Apache License, Version
    +# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    +# for complete details.
    +
    +from __future__ import absolute_import, division, print_function
    +
    +import abc
    +
    +import six
    +
    +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
    +
    +
    +@six.add_metaclass(abc.ABCMeta)
    +class X448PublicKey(object):
    +    @classmethod
    +    def from_public_bytes(cls, data):
    +        from cryptography.hazmat.backends.openssl.backend import backend
    +        if not backend.x448_supported():
    +            raise UnsupportedAlgorithm(
    +                "X448 is not supported by this version of OpenSSL.",
    +                _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM
    +            )
    +
    +        return backend.x448_load_public_bytes(data)
    +
    +    @abc.abstractmethod
    +    def public_bytes(self, encoding, format):
    +        """
    +        The serialized bytes of the public key.
    +        """
    +
    +
    +@six.add_metaclass(abc.ABCMeta)
    +class X448PrivateKey(object):
    +    @classmethod
    +    def generate(cls):
    +        from cryptography.hazmat.backends.openssl.backend import backend
    +        if not backend.x448_supported():
    +            raise UnsupportedAlgorithm(
    +                "X448 is not supported by this version of OpenSSL.",
    +                _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM
    +            )
    +        return backend.x448_generate_key()
    +
    +    @classmethod
    +    def from_private_bytes(cls, data):
    +        from cryptography.hazmat.backends.openssl.backend import backend
    +        if not backend.x448_supported():
    +            raise UnsupportedAlgorithm(
    +                "X448 is not supported by this version of OpenSSL.",
    +                _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM
    +            )
    +
    +        return backend.x448_load_private_bytes(data)
    +
    +    @abc.abstractmethod
    +    def public_key(self):
    +        """
    +        The serialized bytes of the public key.
    +        """
    +
    +    @abc.abstractmethod
    +    def private_bytes(self, encoding, format, encryption_algorithm):
    +        """
    +        The serialized bytes of the private key.
    +        """
    +
    +    @abc.abstractmethod
    +    def exchange(self, peer_public_key):
    +        """
    +        Performs a key exchange operation using the provided peer's public key.
    +        """
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/aead.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/aead.py
    index e519765..a20a80f 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/aead.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/aead.py
    @@ -20,7 +20,7 @@ class ChaCha20Poly1305(object):
                     "ChaCha20Poly1305 is not supported by this version of OpenSSL",
                     exceptions._Reasons.UNSUPPORTED_CIPHER
                 )
    -        utils._check_bytes("key", key)
    +        utils._check_byteslike("key", key)
     
             if len(key) != 32:
                 raise ValueError("ChaCha20Poly1305 key must be 32 bytes.")
    @@ -56,7 +56,7 @@ class ChaCha20Poly1305(object):
             )
     
         def _check_params(self, nonce, data, associated_data):
    -        utils._check_bytes("nonce", nonce)
    +        utils._check_byteslike("nonce", nonce)
             utils._check_bytes("data", data)
             utils._check_bytes("associated_data", associated_data)
             if len(nonce) != 12:
    @@ -67,7 +67,7 @@ class AESCCM(object):
         _MAX_SIZE = 2 ** 32
     
         def __init__(self, key, tag_length=16):
    -        utils._check_bytes("key", key)
    +        utils._check_byteslike("key", key)
             if len(key) not in (16, 24, 32):
                 raise ValueError("AESCCM key must be 128, 192, or 256 bits.")
     
    @@ -75,7 +75,7 @@ class AESCCM(object):
             if not isinstance(tag_length, int):
                 raise TypeError("tag_length must be an integer")
     
    -        if tag_length not in (4, 6, 8, 12, 14, 16):
    +        if tag_length not in (4, 6, 8, 10, 12, 14, 16):
                 raise ValueError("Invalid tag_length")
     
             self._tag_length = tag_length
    @@ -126,10 +126,10 @@ class AESCCM(object):
             # https://tools.ietf.org/html/rfc3610#section-2.1
             l_val = 15 - len(nonce)
             if 2 ** (8 * l_val) < data_len:
    -            raise ValueError("Nonce too long for data")
    +            raise ValueError("Data too long for nonce")
     
         def _check_params(self, nonce, data, associated_data):
    -        utils._check_bytes("nonce", nonce)
    +        utils._check_byteslike("nonce", nonce)
             utils._check_bytes("data", data)
             utils._check_bytes("associated_data", associated_data)
             if not 7 <= len(nonce) <= 13:
    @@ -140,7 +140,7 @@ class AESGCM(object):
         _MAX_SIZE = 2 ** 32
     
         def __init__(self, key):
    -        utils._check_bytes("key", key)
    +        utils._check_byteslike("key", key)
             if len(key) not in (16, 24, 32):
                 raise ValueError("AESGCM key must be 128, 192, or 256 bits.")
     
    @@ -181,7 +181,7 @@ class AESGCM(object):
             )
     
         def _check_params(self, nonce, data, associated_data):
    -        utils._check_bytes("nonce", nonce)
    +        utils._check_byteslike("nonce", nonce)
             utils._check_bytes("data", data)
             utils._check_bytes("associated_data", associated_data)
             if len(nonce) == 0:
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/algorithms.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/algorithms.py
    index 68a9e33..f4d5160 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/algorithms.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/algorithms.py
    @@ -13,11 +13,11 @@ from cryptography.hazmat.primitives.ciphers.modes import ModeWithNonce
     
     def _verify_key_size(algorithm, key):
         # Verify that the key is instance of bytes
    -    utils._check_bytes("key", key)
    +    utils._check_byteslike("key", key)
     
         # Verify that the key size matches the expected key size
         if len(key) * 8 not in algorithm.key_sizes:
    -        raise ValueError("Invalid key size ({0}) for {1}.".format(
    +        raise ValueError("Invalid key size ({}) for {}.".format(
                 len(key) * 8, algorithm.name
             ))
         return key
    @@ -153,8 +153,7 @@ class ChaCha20(object):
     
         def __init__(self, key, nonce):
             self.key = _verify_key_size(self, key)
    -        if not isinstance(nonce, bytes):
    -            raise TypeError("nonce must be bytes")
    +        utils._check_byteslike("nonce", nonce)
     
             if len(nonce) != 16:
                 raise ValueError("nonce must be 128-bits (16 bytes)")
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/base.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/base.py
    index f857041..4d5f8d6 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/base.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/base.py
    @@ -179,7 +179,7 @@ class _AEADCipherContext(object):
             self._bytes_processed += data_size
             if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES:
                 raise ValueError(
    -                "{0} has a maximum encrypted byte limit of {1}".format(
    +                "{} has a maximum encrypted byte limit of {}".format(
                         self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES
                     )
                 )
    @@ -217,7 +217,7 @@ class _AEADCipherContext(object):
             self._aad_bytes_processed += len(data)
             if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES:
                 raise ValueError(
    -                "{0} has a maximum AAD byte limit of {1}".format(
    +                "{} has a maximum AAD byte limit of {}".format(
                         self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES
                     )
                 )
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/modes.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/modes.py
    index e82c1a8..78fa1c4 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/modes.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/ciphers/modes.py
    @@ -72,7 +72,7 @@ def _check_aes_key_length(self, algorithm):
     
     def _check_iv_length(self, algorithm):
         if len(self.initialization_vector) * 8 != algorithm.block_size:
    -        raise ValueError("Invalid IV size ({0}) for {1}.".format(
    +        raise ValueError("Invalid IV size ({}) for {}.".format(
                 len(self.initialization_vector), self.name
             ))
     
    @@ -88,9 +88,7 @@ class CBC(object):
         name = "CBC"
     
         def __init__(self, initialization_vector):
    -        if not isinstance(initialization_vector, bytes):
    -            raise TypeError("initialization_vector must be bytes")
    -
    +        utils._check_byteslike("initialization_vector", initialization_vector)
             self._initialization_vector = initialization_vector
     
         initialization_vector = utils.read_only_property("_initialization_vector")
    @@ -103,8 +101,7 @@ class XTS(object):
         name = "XTS"
     
         def __init__(self, tweak):
    -        if not isinstance(tweak, bytes):
    -            raise TypeError("tweak must be bytes")
    +        utils._check_byteslike("tweak", tweak)
     
             if len(tweak) != 16:
                 raise ValueError("tweak must be 128-bits (16 bytes)")
    @@ -134,9 +131,7 @@ class OFB(object):
         name = "OFB"
     
         def __init__(self, initialization_vector):
    -        if not isinstance(initialization_vector, bytes):
    -            raise TypeError("initialization_vector must be bytes")
    -
    +        utils._check_byteslike("initialization_vector", initialization_vector)
             self._initialization_vector = initialization_vector
     
         initialization_vector = utils.read_only_property("_initialization_vector")
    @@ -149,9 +144,7 @@ class CFB(object):
         name = "CFB"
     
         def __init__(self, initialization_vector):
    -        if not isinstance(initialization_vector, bytes):
    -            raise TypeError("initialization_vector must be bytes")
    -
    +        utils._check_byteslike("initialization_vector", initialization_vector)
             self._initialization_vector = initialization_vector
     
         initialization_vector = utils.read_only_property("_initialization_vector")
    @@ -164,9 +157,7 @@ class CFB8(object):
         name = "CFB8"
     
         def __init__(self, initialization_vector):
    -        if not isinstance(initialization_vector, bytes):
    -            raise TypeError("initialization_vector must be bytes")
    -
    +        utils._check_byteslike("initialization_vector", initialization_vector)
             self._initialization_vector = initialization_vector
     
         initialization_vector = utils.read_only_property("_initialization_vector")
    @@ -179,9 +170,7 @@ class CTR(object):
         name = "CTR"
     
         def __init__(self, nonce):
    -        if not isinstance(nonce, bytes):
    -            raise TypeError("nonce must be bytes")
    -
    +        utils._check_byteslike("nonce", nonce)
             self._nonce = nonce
     
         nonce = utils.read_only_property("_nonce")
    @@ -189,7 +178,7 @@ class CTR(object):
         def validate_for_algorithm(self, algorithm):
             _check_aes_key_length(self, algorithm)
             if len(self.nonce) * 8 != algorithm.block_size:
    -            raise ValueError("Invalid nonce size ({0}) for {1}.".format(
    +            raise ValueError("Invalid nonce size ({}) for {}.".format(
                     len(self.nonce), self.name
                 ))
     
    @@ -206,19 +195,17 @@ class GCM(object):
             # len(initialization_vector) must in [1, 2 ** 64), but it's impossible
             # to actually construct a bytes object that large, so we don't check
             # for it
    -        if not isinstance(initialization_vector, bytes):
    -            raise TypeError("initialization_vector must be bytes")
    +        utils._check_byteslike("initialization_vector", initialization_vector)
             if len(initialization_vector) == 0:
                 raise ValueError("initialization_vector must be at least 1 byte")
             self._initialization_vector = initialization_vector
             if tag is not None:
    -            if not isinstance(tag, bytes):
    -                raise TypeError("tag must be bytes or None")
    +            utils._check_bytes("tag", tag)
                 if min_tag_length < 4:
                     raise ValueError("min_tag_length must be >= 4")
                 if len(tag) < min_tag_length:
                     raise ValueError(
    -                    "Authentication tag must be {0} bytes or longer.".format(
    +                    "Authentication tag must be {} bytes or longer.".format(
                             min_tag_length)
                     )
             self._tag = tag
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/cmac.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/cmac.py
    index 77537f0..95a8d97 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/cmac.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/cmac.py
    @@ -9,10 +9,9 @@ from cryptography.exceptions import (
         AlreadyFinalized, UnsupportedAlgorithm, _Reasons
     )
     from cryptography.hazmat.backends.interfaces import CMACBackend
    -from cryptography.hazmat.primitives import ciphers, mac
    +from cryptography.hazmat.primitives import ciphers
     
     
    -@utils.register_interface(mac.MACContext)
     class CMAC(object):
         def __init__(self, algorithm, backend, ctx=None):
             if not isinstance(backend, CMACBackend):
    @@ -36,8 +35,8 @@ class CMAC(object):
         def update(self, data):
             if self._ctx is None:
                 raise AlreadyFinalized("Context was already finalized.")
    -        if not isinstance(data, bytes):
    -            raise TypeError("data must be bytes.")
    +
    +        utils._check_bytes("data", data)
             self._ctx.update(data)
     
         def finalize(self):
    @@ -48,8 +47,7 @@ class CMAC(object):
             return digest
     
         def verify(self, signature):
    -        if not isinstance(signature, bytes):
    -            raise TypeError("signature must be bytes.")
    +        utils._check_bytes("signature", signature)
             if self._ctx is None:
                 raise AlreadyFinalized("Context was already finalized.")
     
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/constant_time.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/constant_time.py
    index 0e987ea..35ceafe 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/constant_time.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/constant_time.py
    @@ -21,9 +21,9 @@ if hasattr(hmac, "compare_digest"):
     else:
         warnings.warn(
             "Support for your Python version is deprecated. The next version of "
    -        "cryptography will remove support. Please upgrade to a 2.7.x "
    -        "release that supports hmac.compare_digest as soon as possible.",
    -        utils.DeprecatedIn23,
    +        "cryptography will remove support. Please upgrade to a release "
    +        "(2.7.7+) that supports hmac.compare_digest as soon as possible.",
    +        utils.PersistentlyDeprecated2018,
         )
     
         def bytes_eq(a, b):
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/hashes.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/hashes.py
    index 3f3aadd..9be2b60 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/hashes.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/hashes.py
    @@ -57,6 +57,13 @@ class HashContext(object):
             """
     
     
    +@six.add_metaclass(abc.ABCMeta)
    +class ExtendableOutputFunction(object):
    +    """
    +    An interface for extendable output functions.
    +    """
    +
    +
     @utils.register_interface(HashContext)
     class Hash(object):
         def __init__(self, algorithm, backend, ctx=None):
    @@ -82,8 +89,7 @@ class Hash(object):
         def update(self, data):
             if self._ctx is None:
                 raise AlreadyFinalized("Context was already finalized.")
    -        if not isinstance(data, bytes):
    -            raise TypeError("data must be bytes.")
    +        utils._check_byteslike("data", data)
             self._ctx.update(data)
     
         def copy(self):
    @@ -108,6 +114,20 @@ class SHA1(object):
         block_size = 64
     
     
    +@utils.register_interface(HashAlgorithm)
    +class SHA512_224(object):  # noqa: N801
    +    name = "sha512-224"
    +    digest_size = 28
    +    block_size = 128
    +
    +
    +@utils.register_interface(HashAlgorithm)
    +class SHA512_256(object):  # noqa: N801
    +    name = "sha512-256"
    +    digest_size = 32
    +    block_size = 128
    +
    +
     @utils.register_interface(HashAlgorithm)
     class SHA224(object):
         name = "sha224"
    @@ -136,6 +156,64 @@ class SHA512(object):
         block_size = 128
     
     
    +@utils.register_interface(HashAlgorithm)
    +class SHA3_224(object):  # noqa: N801
    +    name = "sha3-224"
    +    digest_size = 28
    +
    +
    +@utils.register_interface(HashAlgorithm)
    +class SHA3_256(object):  # noqa: N801
    +    name = "sha3-256"
    +    digest_size = 32
    +
    +
    +@utils.register_interface(HashAlgorithm)
    +class SHA3_384(object):  # noqa: N801
    +    name = "sha3-384"
    +    digest_size = 48
    +
    +
    +@utils.register_interface(HashAlgorithm)
    +class SHA3_512(object):  # noqa: N801
    +    name = "sha3-512"
    +    digest_size = 64
    +
    +
    +@utils.register_interface(HashAlgorithm)
    +@utils.register_interface(ExtendableOutputFunction)
    +class SHAKE128(object):
    +    name = "shake128"
    +
    +    def __init__(self, digest_size):
    +        if not isinstance(digest_size, six.integer_types):
    +            raise TypeError("digest_size must be an integer")
    +
    +        if digest_size < 1:
    +            raise ValueError("digest_size must be a positive integer")
    +
    +        self._digest_size = digest_size
    +
    +    digest_size = utils.read_only_property("_digest_size")
    +
    +
    +@utils.register_interface(HashAlgorithm)
    +@utils.register_interface(ExtendableOutputFunction)
    +class SHAKE256(object):
    +    name = "shake256"
    +
    +    def __init__(self, digest_size):
    +        if not isinstance(digest_size, six.integer_types):
    +            raise TypeError("digest_size must be an integer")
    +
    +        if digest_size < 1:
    +            raise ValueError("digest_size must be a positive integer")
    +
    +        self._digest_size = digest_size
    +
    +    digest_size = utils.read_only_property("_digest_size")
    +
    +
     @utils.register_interface(HashAlgorithm)
     class MD5(object):
         name = "md5"
    @@ -151,13 +229,9 @@ class BLAKE2b(object):
         block_size = 128
     
         def __init__(self, digest_size):
    -        if (
    -            digest_size > self._max_digest_size or
    -            digest_size < self._min_digest_size
    -        ):
    -            raise ValueError("Digest size must be {0}-{1}".format(
    -                self._min_digest_size, self._max_digest_size)
    -            )
    +
    +        if digest_size != 64:
    +            raise ValueError("Digest size must be 64")
     
             self._digest_size = digest_size
     
    @@ -172,13 +246,9 @@ class BLAKE2s(object):
         _min_digest_size = 1
     
         def __init__(self, digest_size):
    -        if (
    -            digest_size > self._max_digest_size or
    -            digest_size < self._min_digest_size
    -        ):
    -            raise ValueError("Digest size must be {0}-{1}".format(
    -                self._min_digest_size, self._max_digest_size)
    -            )
    +
    +        if digest_size != 32:
    +            raise ValueError("Digest size must be 32")
     
             self._digest_size = digest_size
     
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/hmac.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/hmac.py
    index 2e9a4e2..9eceeac 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/hmac.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/hmac.py
    @@ -9,10 +9,9 @@ from cryptography.exceptions import (
         AlreadyFinalized, UnsupportedAlgorithm, _Reasons
     )
     from cryptography.hazmat.backends.interfaces import HMACBackend
    -from cryptography.hazmat.primitives import hashes, mac
    +from cryptography.hazmat.primitives import hashes
     
     
    -@utils.register_interface(mac.MACContext)
     @utils.register_interface(hashes.HashContext)
     class HMAC(object):
         def __init__(self, key, algorithm, backend, ctx=None):
    @@ -38,8 +37,7 @@ class HMAC(object):
         def update(self, data):
             if self._ctx is None:
                 raise AlreadyFinalized("Context was already finalized.")
    -        if not isinstance(data, bytes):
    -            raise TypeError("data must be bytes.")
    +        utils._check_byteslike("data", data)
             self._ctx.update(data)
     
         def copy(self):
    @@ -60,8 +58,7 @@ class HMAC(object):
             return digest
     
         def verify(self, signature):
    -        if not isinstance(signature, bytes):
    -            raise TypeError("signature must be bytes.")
    +        utils._check_bytes("signature", signature)
             if self._ctx is None:
                 raise AlreadyFinalized("Context was already finalized.")
     
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/concatkdf.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/concatkdf.py
    index c6399e4..7cb6385 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/concatkdf.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/concatkdf.py
    @@ -24,17 +24,15 @@ def _common_args_checks(algorithm, length, otherinfo):
         max_length = algorithm.digest_size * (2 ** 32 - 1)
         if length > max_length:
             raise ValueError(
    -            "Can not derive keys larger than {0} bits.".format(
    +            "Can not derive keys larger than {} bits.".format(
                     max_length
                 ))
    -    if not (otherinfo is None or isinstance(otherinfo, bytes)):
    -        raise TypeError("otherinfo must be bytes.")
    +    if otherinfo is not None:
    +        utils._check_bytes("otherinfo", otherinfo)
     
     
     def _concatkdf_derive(key_material, length, auxfn, otherinfo):
    -    if not isinstance(key_material, bytes):
    -        raise TypeError("key_material must be bytes.")
    -
    +    utils._check_byteslike("key_material", key_material)
         output = [b""]
         outlen = 0
         counter = 1
    @@ -96,10 +94,11 @@ class ConcatKDFHMAC(object):
             if self._otherinfo is None:
                 self._otherinfo = b""
     
    -        if not (salt is None or isinstance(salt, bytes)):
    -            raise TypeError("salt must be bytes.")
             if salt is None:
                 salt = b"\x00" * algorithm.block_size
    +        else:
    +            utils._check_bytes("salt", salt)
    +
             self._salt = salt
     
             if not isinstance(backend, HMACBackend):
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/hkdf.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/hkdf.py
    index 917b4e9..01f0f28 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/hkdf.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/hkdf.py
    @@ -26,11 +26,10 @@ class HKDF(object):
     
             self._algorithm = algorithm
     
    -        if not (salt is None or isinstance(salt, bytes)):
    -            raise TypeError("salt must be bytes.")
    -
             if salt is None:
                 salt = b"\x00" * self._algorithm.digest_size
    +        else:
    +            utils._check_bytes("salt", salt)
     
             self._salt = salt
     
    @@ -44,9 +43,7 @@ class HKDF(object):
             return h.finalize()
     
         def derive(self, key_material):
    -        if not isinstance(key_material, bytes):
    -            raise TypeError("key_material must be bytes.")
    -
    +        utils._check_byteslike("key_material", key_material)
             return self._hkdf_expand.derive(self._extract(key_material))
     
         def verify(self, key_material, expected_key):
    @@ -71,17 +68,16 @@ class HKDFExpand(object):
     
             if length > max_length:
                 raise ValueError(
    -                "Can not derive keys larger than {0} octets.".format(
    +                "Can not derive keys larger than {} octets.".format(
                         max_length
                     ))
     
             self._length = length
     
    -        if not (info is None or isinstance(info, bytes)):
    -            raise TypeError("info must be bytes.")
    -
             if info is None:
                 info = b""
    +        else:
    +            utils._check_bytes("info", info)
     
             self._info = info
     
    @@ -102,9 +98,7 @@ class HKDFExpand(object):
             return b"".join(output)[:self._length]
     
         def derive(self, key_material):
    -        if not isinstance(key_material, bytes):
    -            raise TypeError("key_material must be bytes.")
    -
    +        utils._check_byteslike("key_material", key_material)
             if self._used:
                 raise AlreadyFinalized
     
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/kbkdf.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/kbkdf.py
    index 14de56e..56783a8 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/kbkdf.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/kbkdf.py
    @@ -73,10 +73,8 @@ class KBKDFHMAC(object):
             if context is None:
                 context = b''
     
    -        if (not isinstance(label, bytes) or
    -                not isinstance(context, bytes)):
    -            raise TypeError('label and context must be of type bytes')
    -
    +        utils._check_bytes("label", label)
    +        utils._check_bytes("context", context)
             self._algorithm = algorithm
             self._mode = mode
             self._length = length
    @@ -102,8 +100,7 @@ class KBKDFHMAC(object):
             if self._used:
                 raise AlreadyFinalized
     
    -        if not isinstance(key_material, bytes):
    -            raise TypeError('key_material must be bytes')
    +        utils._check_byteslike("key_material", key_material)
             self._used = True
     
             # inverse floor division (equivalent to ceiling)
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/pbkdf2.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/pbkdf2.py
    index f8ce7a3..07d8ac6 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/pbkdf2.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/pbkdf2.py
    @@ -24,15 +24,14 @@ class PBKDF2HMAC(object):
     
             if not backend.pbkdf2_hmac_supported(algorithm):
                 raise UnsupportedAlgorithm(
    -                "{0} is not supported for PBKDF2 by this backend.".format(
    +                "{} is not supported for PBKDF2 by this backend.".format(
                         algorithm.name),
                     _Reasons.UNSUPPORTED_HASH
                 )
             self._used = False
             self._algorithm = algorithm
             self._length = length
    -        if not isinstance(salt, bytes):
    -            raise TypeError("salt must be bytes.")
    +        utils._check_bytes("salt", salt)
             self._salt = salt
             self._iterations = iterations
             self._backend = backend
    @@ -42,8 +41,7 @@ class PBKDF2HMAC(object):
                 raise AlreadyFinalized("PBKDF2 instances can only be used once.")
             self._used = True
     
    -        if not isinstance(key_material, bytes):
    -            raise TypeError("key_material must be bytes.")
    +        utils._check_byteslike("key_material", key_material)
             return self._backend.derive_pbkdf2_hmac(
                 self._algorithm,
                 self._length,
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/scrypt.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/scrypt.py
    index 77dcf9a..df9745e 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/scrypt.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/scrypt.py
    @@ -30,9 +30,7 @@ class Scrypt(object):
                 )
     
             self._length = length
    -        if not isinstance(salt, bytes):
    -            raise TypeError("salt must be bytes.")
    -
    +        utils._check_bytes("salt", salt)
             if n < 2 or (n & (n - 1)) != 0:
                 raise ValueError("n must be greater than 1 and be a power of 2.")
     
    @@ -54,8 +52,7 @@ class Scrypt(object):
                 raise AlreadyFinalized("Scrypt instances can only be used once.")
             self._used = True
     
    -        if not isinstance(key_material, bytes):
    -            raise TypeError("key_material must be bytes.")
    +        utils._check_byteslike("key_material", key_material)
             return self._backend.derive_scrypt(
                 key_material, self._salt, self._length, self._n, self._r, self._p
             )
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/x963kdf.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/x963kdf.py
    index 83789b3..9eb50b0 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/x963kdf.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/kdf/x963kdf.py
    @@ -26,9 +26,10 @@ class X963KDF(object):
             max_len = algorithm.digest_size * (2 ** 32 - 1)
             if length > max_len:
                 raise ValueError(
    -                "Can not derive keys larger than {0} bits.".format(max_len))
    -        if not (sharedinfo is None or isinstance(sharedinfo, bytes)):
    -            raise TypeError("sharedinfo must be bytes.")
    +                "Can not derive keys larger than {} bits.".format(max_len))
    +        if sharedinfo is not None:
    +            utils._check_bytes("sharedinfo", sharedinfo)
    +
             self._algorithm = algorithm
             self._length = length
             self._sharedinfo = sharedinfo
    @@ -45,10 +46,7 @@ class X963KDF(object):
             if self._used:
                 raise AlreadyFinalized
             self._used = True
    -
    -        if not isinstance(key_material, bytes):
    -            raise TypeError("key_material must be bytes.")
    -
    +        utils._check_byteslike("key_material", key_material)
             output = [b""]
             outlen = 0
             counter = 1
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/mac.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/mac.py
    deleted file mode 100644
    index 4c95190..0000000
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/mac.py
    +++ /dev/null
    @@ -1,37 +0,0 @@
    -# This file is dual licensed under the terms of the Apache License, Version
    -# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    -# for complete details.
    -
    -from __future__ import absolute_import, division, print_function
    -
    -import abc
    -
    -import six
    -
    -
    -@six.add_metaclass(abc.ABCMeta)
    -class MACContext(object):
    -    @abc.abstractmethod
    -    def update(self, data):
    -        """
    -        Processes the provided bytes.
    -        """
    -
    -    @abc.abstractmethod
    -    def finalize(self):
    -        """
    -        Returns the message authentication code as bytes.
    -        """
    -
    -    @abc.abstractmethod
    -    def copy(self):
    -        """
    -        Return a MACContext that is a copy of the current context.
    -        """
    -
    -    @abc.abstractmethod
    -    def verify(self, signature):
    -        """
    -        Checks if the generated message authentication code matches the
    -        signature.
    -        """
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/padding.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/padding.py
    index a081976..170c802 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/padding.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/padding.py
    @@ -40,8 +40,7 @@ def _byte_padding_update(buffer_, data, block_size):
         if buffer_ is None:
             raise AlreadyFinalized("Context was already finalized.")
     
    -    if not isinstance(data, bytes):
    -        raise TypeError("data must be bytes.")
    +    utils._check_bytes("data", data)
     
         buffer_ += data
     
    @@ -65,8 +64,7 @@ def _byte_unpadding_update(buffer_, data, block_size):
         if buffer_ is None:
             raise AlreadyFinalized("Context was already finalized.")
     
    -    if not isinstance(data, bytes):
    -        raise TypeError("data must be bytes.")
    +    utils._check_bytes("data", data)
     
         buffer_ += data
     
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/poly1305.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/poly1305.py
    new file mode 100644
    index 0000000..d92f62a
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/poly1305.py
    @@ -0,0 +1,55 @@
    +# This file is dual licensed under the terms of the Apache License, Version
    +# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    +# for complete details.
    +
    +from __future__ import absolute_import, division, print_function
    +
    +
    +from cryptography import utils
    +from cryptography.exceptions import (
    +    AlreadyFinalized, UnsupportedAlgorithm, _Reasons
    +)
    +
    +
    +class Poly1305(object):
    +    def __init__(self, key):
    +        from cryptography.hazmat.backends.openssl.backend import backend
    +        if not backend.poly1305_supported():
    +            raise UnsupportedAlgorithm(
    +                "poly1305 is not supported by this version of OpenSSL.",
    +                _Reasons.UNSUPPORTED_MAC
    +            )
    +        self._ctx = backend.create_poly1305_ctx(key)
    +
    +    def update(self, data):
    +        if self._ctx is None:
    +            raise AlreadyFinalized("Context was already finalized.")
    +        utils._check_byteslike("data", data)
    +        self._ctx.update(data)
    +
    +    def finalize(self):
    +        if self._ctx is None:
    +            raise AlreadyFinalized("Context was already finalized.")
    +        mac = self._ctx.finalize()
    +        self._ctx = None
    +        return mac
    +
    +    def verify(self, tag):
    +        utils._check_bytes("tag", tag)
    +        if self._ctx is None:
    +            raise AlreadyFinalized("Context was already finalized.")
    +
    +        ctx, self._ctx = self._ctx, None
    +        ctx.verify(tag)
    +
    +    @classmethod
    +    def generate_tag(cls, key, data):
    +        p = Poly1305(key)
    +        p.update(data)
    +        return p.finalize()
    +
    +    @classmethod
    +    def verify_tag(cls, key, data, tag):
    +        p = Poly1305(key)
    +        p.update(data)
    +        p.verify(tag)
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization/__init__.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization/__init__.py
    new file mode 100644
    index 0000000..f6d4ce9
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization/__init__.py
    @@ -0,0 +1,26 @@
    +# This file is dual licensed under the terms of the Apache License, Version
    +# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    +# for complete details.
    +
    +from __future__ import absolute_import, division, print_function
    +
    +from cryptography.hazmat.primitives.serialization.base import (
    +    BestAvailableEncryption, Encoding, KeySerializationEncryption,
    +    NoEncryption, ParameterFormat, PrivateFormat, PublicFormat,
    +    load_der_parameters, load_der_private_key, load_der_public_key,
    +    load_pem_parameters, load_pem_private_key, load_pem_public_key,
    +)
    +from cryptography.hazmat.primitives.serialization.ssh import (
    +    load_ssh_public_key
    +)
    +
    +
    +_PEM_DER = (Encoding.PEM, Encoding.DER)
    +
    +__all__ = [
    +    "load_der_parameters", "load_der_private_key", "load_der_public_key",
    +    "load_pem_parameters", "load_pem_private_key", "load_pem_public_key",
    +    "load_ssh_public_key", "Encoding", "PrivateFormat", "PublicFormat",
    +    "ParameterFormat", "KeySerializationEncryption", "BestAvailableEncryption",
    +    "NoEncryption",
    +]
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization/base.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization/base.py
    new file mode 100644
    index 0000000..4218ea8
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization/base.py
    @@ -0,0 +1,82 @@
    +# This file is dual licensed under the terms of the Apache License, Version
    +# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    +# for complete details.
    +
    +from __future__ import absolute_import, division, print_function
    +
    +import abc
    +from enum import Enum
    +
    +import six
    +
    +from cryptography import utils
    +
    +
    +def load_pem_private_key(data, password, backend):
    +    return backend.load_pem_private_key(data, password)
    +
    +
    +def load_pem_public_key(data, backend):
    +    return backend.load_pem_public_key(data)
    +
    +
    +def load_pem_parameters(data, backend):
    +    return backend.load_pem_parameters(data)
    +
    +
    +def load_der_private_key(data, password, backend):
    +    return backend.load_der_private_key(data, password)
    +
    +
    +def load_der_public_key(data, backend):
    +    return backend.load_der_public_key(data)
    +
    +
    +def load_der_parameters(data, backend):
    +    return backend.load_der_parameters(data)
    +
    +
    +class Encoding(Enum):
    +    PEM = "PEM"
    +    DER = "DER"
    +    OpenSSH = "OpenSSH"
    +    Raw = "Raw"
    +    X962 = "ANSI X9.62"
    +
    +
    +class PrivateFormat(Enum):
    +    PKCS8 = "PKCS8"
    +    TraditionalOpenSSL = "TraditionalOpenSSL"
    +    Raw = "Raw"
    +
    +
    +class PublicFormat(Enum):
    +    SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1"
    +    PKCS1 = "Raw PKCS#1"
    +    OpenSSH = "OpenSSH"
    +    Raw = "Raw"
    +    CompressedPoint = "X9.62 Compressed Point"
    +    UncompressedPoint = "X9.62 Uncompressed Point"
    +
    +
    +class ParameterFormat(Enum):
    +    PKCS3 = "PKCS3"
    +
    +
    +@six.add_metaclass(abc.ABCMeta)
    +class KeySerializationEncryption(object):
    +    pass
    +
    +
    +@utils.register_interface(KeySerializationEncryption)
    +class BestAvailableEncryption(object):
    +    def __init__(self, password):
    +        if not isinstance(password, bytes) or len(password) == 0:
    +            raise ValueError("Password must be 1 or more bytes.")
    +
    +        self.password = password
    +
    +
    +@utils.register_interface(KeySerializationEncryption)
    +class NoEncryption(object):
    +    pass
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization/pkcs12.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization/pkcs12.py
    new file mode 100644
    index 0000000..98161d5
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization/pkcs12.py
    @@ -0,0 +1,9 @@
    +# This file is dual licensed under the terms of the Apache License, Version
    +# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    +# for complete details.
    +
    +from __future__ import absolute_import, division, print_function
    +
    +
    +def load_key_and_certificates(data, password, backend):
    +    return backend.load_key_and_certificates_from_pkcs12(data, password)
    diff --git a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization.py b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization/ssh.py
    similarity index 70%
    rename from server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization.py
    rename to server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization/ssh.py
    index bd09e6e..a1d6c8c 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization/ssh.py
    @@ -4,40 +4,14 @@
     
     from __future__ import absolute_import, division, print_function
     
    -import abc
     import base64
     import struct
    -from enum import Enum
     
     import six
     
     from cryptography import utils
     from cryptography.exceptions import UnsupportedAlgorithm
    -from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
    -
    -
    -def load_pem_private_key(data, password, backend):
    -    return backend.load_pem_private_key(data, password)
    -
    -
    -def load_pem_public_key(data, backend):
    -    return backend.load_pem_public_key(data)
    -
    -
    -def load_pem_parameters(data, backend):
    -    return backend.load_pem_parameters(data)
    -
    -
    -def load_der_private_key(data, password, backend):
    -    return backend.load_der_private_key(data, password)
    -
    -
    -def load_der_public_key(data, backend):
    -    return backend.load_der_public_key(data)
    -
    -
    -def load_der_parameters(data, backend):
    -    return backend.load_der_parameters(data)
    +from cryptography.hazmat.primitives.asymmetric import dsa, ec, ed25519, rsa
     
     
     def load_ssh_public_key(data, backend):
    @@ -57,6 +31,8 @@ def load_ssh_public_key(data, backend):
             b'ecdsa-sha2-nistp256', b'ecdsa-sha2-nistp384', b'ecdsa-sha2-nistp521',
         ]:
             loader = _load_ssh_ecdsa_public_key
    +    elif key_type == b'ssh-ed25519':
    +        loader = _load_ssh_ed25519_public_key
         else:
             raise UnsupportedAlgorithm('Key type is not supported.')
     
    @@ -125,8 +101,16 @@ def _load_ssh_ecdsa_public_key(expected_key_type, decoded_data, backend):
                 "Compressed elliptic curve points are not supported"
             )
     
    -    numbers = ec.EllipticCurvePublicNumbers.from_encoded_point(curve, data)
    -    return numbers.public_key(backend)
    +    return ec.EllipticCurvePublicKey.from_encoded_point(curve, data)
    +
    +
    +def _load_ssh_ed25519_public_key(expected_key_type, decoded_data, backend):
    +    data, rest = _ssh_read_next_string(decoded_data)
    +
    +    if rest:
    +        raise ValueError('Key body contains extra bytes.')
    +
    +    return ed25519.Ed25519PublicKey.from_public_bytes(data)
     
     
     def _ssh_read_next_string(data):
    @@ -167,43 +151,3 @@ def _ssh_write_mpint(value):
         if six.indexbytes(data, 0) & 0x80:
             data = b"\x00" + data
         return _ssh_write_string(data)
    -
    -
    -class Encoding(Enum):
    -    PEM = "PEM"
    -    DER = "DER"
    -    OpenSSH = "OpenSSH"
    -
    -
    -class PrivateFormat(Enum):
    -    PKCS8 = "PKCS8"
    -    TraditionalOpenSSL = "TraditionalOpenSSL"
    -
    -
    -class PublicFormat(Enum):
    -    SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1"
    -    PKCS1 = "Raw PKCS#1"
    -    OpenSSH = "OpenSSH"
    -
    -
    -class ParameterFormat(Enum):
    -    PKCS3 = "PKCS3"
    -
    -
    -@six.add_metaclass(abc.ABCMeta)
    -class KeySerializationEncryption(object):
    -    pass
    -
    -
    -@utils.register_interface(KeySerializationEncryption)
    -class BestAvailableEncryption(object):
    -    def __init__(self, password):
    -        if not isinstance(password, bytes) or len(password) == 0:
    -            raise ValueError("Password must be 1 or more bytes.")
    -
    -        self.password = password
    -
    -
    -@utils.register_interface(KeySerializationEncryption)
    -class NoEncryption(object):
    -    pass
    diff --git a/server/www/packages/packages-windows/x86/cryptography/utils.py b/server/www/packages/packages-windows/x86/cryptography/utils.py
    index 3d45a77..e895aa0 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/utils.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/utils.py
    @@ -20,14 +20,22 @@ class CryptographyDeprecationWarning(UserWarning):
     # Several APIs were deprecated with no specific end-of-life date because of the
     # ubiquity of their use. They should not be removed until we agree on when that
     # cycle ends.
    -PersistentlyDeprecated = CryptographyDeprecationWarning
    -DeprecatedIn21 = CryptographyDeprecationWarning
    -DeprecatedIn23 = CryptographyDeprecationWarning
    +PersistentlyDeprecated2017 = CryptographyDeprecationWarning
    +PersistentlyDeprecated2018 = CryptographyDeprecationWarning
    +PersistentlyDeprecated2019 = CryptographyDeprecationWarning
    +DeprecatedIn27 = CryptographyDeprecationWarning
     
     
     def _check_bytes(name, value):
         if not isinstance(value, bytes):
    -        raise TypeError("{0} must be bytes".format(name))
    +        raise TypeError("{} must be bytes".format(name))
    +
    +
    +def _check_byteslike(name, value):
    +    try:
    +        memoryview(value)
    +    except TypeError:
    +        raise TypeError("{} must be bytes-like".format(name))
     
     
     def read_only_property(name):
    @@ -90,7 +98,7 @@ def verify_interface(iface, klass):
         for method in iface.__abstractmethods__:
             if not hasattr(klass, method):
                 raise InterfaceNotImplemented(
    -                "{0} is missing a {1!r} method".format(klass, method)
    +                "{} is missing a {!r} method".format(klass, method)
                 )
             if isinstance(getattr(iface, method), abc.abstractproperty):
                 # Can't properly verify these yet.
    @@ -99,8 +107,8 @@ def verify_interface(iface, klass):
             actual = signature(getattr(klass, method))
             if sig != actual:
                 raise InterfaceNotImplemented(
    -                "{0}.{1}'s signature differs from the expected. Expected: "
    -                "{2!r}. Received: {3!r}".format(
    +                "{}.{}'s signature differs from the expected. Expected: "
    +                "{!r}. Received: {!r}".format(
                         klass, method, sig, actual
                     )
                 )
    @@ -152,7 +160,7 @@ def deprecated(value, module_name, message, warning_class):
     
     
     def cached_property(func):
    -    cached_name = "_cached_{0}".format(func)
    +    cached_name = "_cached_{}".format(func)
         sentinel = object()
     
         def inner(instance):
    diff --git a/server/www/packages/packages-windows/x86/cryptography/x509/__init__.py b/server/www/packages/packages-windows/x86/cryptography/x509/__init__.py
    index d2f9b04..b761e26 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/x509/__init__.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/x509/__init__.py
    @@ -21,8 +21,9 @@ from cryptography.x509.extensions import (
         DeltaCRLIndicator, DistributionPoint, DuplicateExtension, ExtendedKeyUsage,
         Extension, ExtensionNotFound, ExtensionType, Extensions, FreshestCRL,
         GeneralNames, InhibitAnyPolicy, InvalidityDate, IssuerAlternativeName,
    -    KeyUsage, NameConstraints, NoticeReference, OCSPNoCheck, PolicyConstraints,
    -    PolicyInformation, PrecertificateSignedCertificateTimestamps, ReasonFlags,
    +    IssuingDistributionPoint, KeyUsage, NameConstraints, NoticeReference,
    +    OCSPNoCheck, OCSPNonce, PolicyConstraints, PolicyInformation,
    +    PrecertPoison, PrecertificateSignedCertificateTimestamps, ReasonFlags,
         SubjectAlternativeName, SubjectKeyIdentifier, TLSFeature, TLSFeatureType,
         UnrecognizedExtension, UserNotice
     )
    @@ -133,6 +134,7 @@ __all__ = [
         "Extension",
         "ExtendedKeyUsage",
         "FreshestCRL",
    +    "IssuingDistributionPoint",
         "TLSFeature",
         "TLSFeatureType",
         "OCSPNoCheck",
    @@ -182,4 +184,6 @@ __all__ = [
         "UnrecognizedExtension",
         "PolicyConstraints",
         "PrecertificateSignedCertificateTimestamps",
    +    "PrecertPoison",
    +    "OCSPNonce",
     ]
    diff --git a/server/www/packages/packages-windows/x86/cryptography/x509/base.py b/server/www/packages/packages-windows/x86/cryptography/x509/base.py
    index b14499c..3983c9b 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/x509/base.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/x509/base.py
    @@ -12,12 +12,21 @@ from enum import Enum
     import six
     
     from cryptography import utils
    -from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
    +from cryptography.hazmat.primitives.asymmetric import (
    +    dsa, ec, ed25519, ed448, rsa
    +)
     from cryptography.x509.extensions import Extension, ExtensionType
     from cryptography.x509.name import Name
     
     
    -_UNIX_EPOCH = datetime.datetime(1970, 1, 1)
    +_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1)
    +
    +
    +def _reject_duplicate_extension(extension, extensions):
    +    # This is quadratic in the number of extensions
    +    for e in extensions:
    +        if e.oid == extension.oid:
    +            raise ValueError('This extension has already been set.')
     
     
     def _convert_to_naive_utc_time(time):
    @@ -257,6 +266,24 @@ class CertificateRevocationList(object):
             Checks not equal.
             """
     
    +    @abc.abstractmethod
    +    def __len__(self):
    +        """
    +        Number of revoked certificates in the CRL.
    +        """
    +
    +    @abc.abstractmethod
    +    def __getitem__(self, idx):
    +        """
    +        Returns a revoked certificate (or slice of revoked certificates).
    +        """
    +
    +    @abc.abstractmethod
    +    def __iter__(self):
    +        """
    +        Iterator over the revoked certificates
    +        """
    +
         @abc.abstractmethod
         def is_signature_valid(self, public_key):
             """
    @@ -388,11 +415,8 @@ class CertificateSigningRequestBuilder(object):
                 raise TypeError("extension must be an ExtensionType")
     
             extension = Extension(extension.oid, critical, extension)
    +        _reject_duplicate_extension(extension, self._extensions)
     
    -        # TODO: This is quadratic in the number of extensions
    -        for e in self._extensions:
    -            if e.oid == extension.oid:
    -                raise ValueError('This extension has already been set.')
             return CertificateSigningRequestBuilder(
                 self._subject_name, self._extensions + [extension]
             )
    @@ -452,9 +476,12 @@ class CertificateBuilder(object):
             Sets the requestor's public key (as found in the signing request).
             """
             if not isinstance(key, (dsa.DSAPublicKey, rsa.RSAPublicKey,
    -                                ec.EllipticCurvePublicKey)):
    +                                ec.EllipticCurvePublicKey,
    +                                ed25519.Ed25519PublicKey,
    +                                ed448.Ed448PublicKey)):
                 raise TypeError('Expecting one of DSAPublicKey, RSAPublicKey,'
    -                            ' or EllipticCurvePublicKey.')
    +                            ' EllipticCurvePublicKey, Ed25519PublicKey or'
    +                            ' Ed448PublicKey.')
             if self._public_key is not None:
                 raise ValueError('The public key may only be set once.')
             return CertificateBuilder(
    @@ -494,9 +521,9 @@ class CertificateBuilder(object):
             if self._not_valid_before is not None:
                 raise ValueError('The not valid before may only be set once.')
             time = _convert_to_naive_utc_time(time)
    -        if time <= _UNIX_EPOCH:
    -            raise ValueError('The not valid before date must be after the unix'
    -                             ' epoch (1970 January 1).')
    +        if time < _EARLIEST_UTC_TIME:
    +            raise ValueError('The not valid before date must be on or after'
    +                             ' 1950 January 1).')
             if self._not_valid_after is not None and time > self._not_valid_after:
                 raise ValueError(
                     'The not valid before date must be before the not valid after '
    @@ -517,9 +544,9 @@ class CertificateBuilder(object):
             if self._not_valid_after is not None:
                 raise ValueError('The not valid after may only be set once.')
             time = _convert_to_naive_utc_time(time)
    -        if time <= _UNIX_EPOCH:
    -            raise ValueError('The not valid after date must be after the unix'
    -                             ' epoch (1970 January 1).')
    +        if time < _EARLIEST_UTC_TIME:
    +            raise ValueError('The not valid after date must be on or after'
    +                             ' 1950 January 1.')
             if (self._not_valid_before is not None and
                     time < self._not_valid_before):
                 raise ValueError(
    @@ -540,11 +567,7 @@ class CertificateBuilder(object):
                 raise TypeError("extension must be an ExtensionType")
     
             extension = Extension(extension.oid, critical, extension)
    -
    -        # TODO: This is quadratic in the number of extensions
    -        for e in self._extensions:
    -            if e.oid == extension.oid:
    -                raise ValueError('This extension has already been set.')
    +        _reject_duplicate_extension(extension, self._extensions)
     
             return CertificateBuilder(
                 self._issuer_name, self._subject_name,
    @@ -602,9 +625,9 @@ class CertificateRevocationListBuilder(object):
             if self._last_update is not None:
                 raise ValueError('Last update may only be set once.')
             last_update = _convert_to_naive_utc_time(last_update)
    -        if last_update <= _UNIX_EPOCH:
    -            raise ValueError('The last update date must be after the unix'
    -                             ' epoch (1970 January 1).')
    +        if last_update < _EARLIEST_UTC_TIME:
    +            raise ValueError('The last update date must be on or after'
    +                             ' 1950 January 1.')
             if self._next_update is not None and last_update > self._next_update:
                 raise ValueError(
                     'The last update date must be before the next update date.'
    @@ -620,9 +643,9 @@ class CertificateRevocationListBuilder(object):
             if self._next_update is not None:
                 raise ValueError('Last update may only be set once.')
             next_update = _convert_to_naive_utc_time(next_update)
    -        if next_update <= _UNIX_EPOCH:
    -            raise ValueError('The last update date must be after the unix'
    -                             ' epoch (1970 January 1).')
    +        if next_update < _EARLIEST_UTC_TIME:
    +            raise ValueError('The last update date must be on or after'
    +                             ' 1950 January 1.')
             if self._last_update is not None and next_update < self._last_update:
                 raise ValueError(
                     'The next update date must be after the last update date.'
    @@ -640,11 +663,7 @@ class CertificateRevocationListBuilder(object):
                 raise TypeError("extension must be an ExtensionType")
     
             extension = Extension(extension.oid, critical, extension)
    -
    -        # TODO: This is quadratic in the number of extensions
    -        for e in self._extensions:
    -            if e.oid == extension.oid:
    -                raise ValueError('This extension has already been set.')
    +        _reject_duplicate_extension(extension, self._extensions)
             return CertificateRevocationListBuilder(
                 self._issuer_name, self._last_update, self._next_update,
                 self._extensions + [extension], self._revoked_certificates
    @@ -706,9 +725,9 @@ class RevokedCertificateBuilder(object):
             if self._revocation_date is not None:
                 raise ValueError('The revocation date may only be set once.')
             time = _convert_to_naive_utc_time(time)
    -        if time <= _UNIX_EPOCH:
    -            raise ValueError('The revocation date must be after the unix'
    -                             ' epoch (1970 January 1).')
    +        if time < _EARLIEST_UTC_TIME:
    +            raise ValueError('The revocation date must be on or after'
    +                             ' 1950 January 1.')
             return RevokedCertificateBuilder(
                 self._serial_number, time, self._extensions
             )
    @@ -718,11 +737,7 @@ class RevokedCertificateBuilder(object):
                 raise TypeError("extension must be an ExtensionType")
     
             extension = Extension(extension.oid, critical, extension)
    -
    -        # TODO: This is quadratic in the number of extensions
    -        for e in self._extensions:
    -            if e.oid == extension.oid:
    -                raise ValueError('This extension has already been set.')
    +        _reject_duplicate_extension(extension, self._extensions)
             return RevokedCertificateBuilder(
                 self._serial_number, self._revocation_date,
                 self._extensions + [extension]
    diff --git a/server/www/packages/packages-windows/x86/cryptography/x509/extensions.py b/server/www/packages/packages-windows/x86/cryptography/x509/extensions.py
    index eb4b927..ad90e9b 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/x509/extensions.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/x509/extensions.py
    @@ -8,13 +8,15 @@ import abc
     import datetime
     import hashlib
     import ipaddress
    +import warnings
     from enum import Enum
     
    -from asn1crypto.keys import PublicKeyInfo
    -
     import six
     
     from cryptography import utils
    +from cryptography.hazmat._der import (
    +    BIT_STRING, DERReader, OBJECT_IDENTIFIER, SEQUENCE
    +)
     from cryptography.hazmat.primitives import constant_time, serialization
     from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey
     from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey
    @@ -24,7 +26,7 @@ from cryptography.x509.certificate_transparency import (
     from cryptography.x509.general_name import GeneralName, IPAddress, OtherName
     from cryptography.x509.name import RelativeDistinguishedName
     from cryptography.x509.oid import (
    -    CRLEntryExtensionOID, ExtensionOID, ObjectIdentifier
    +    CRLEntryExtensionOID, ExtensionOID, OCSPExtensionOID, ObjectIdentifier,
     )
     
     
    @@ -35,7 +37,10 @@ def _key_identifier_from_public_key(public_key):
                 serialization.PublicFormat.PKCS1,
             )
         elif isinstance(public_key, EllipticCurvePublicKey):
    -        data = public_key.public_numbers().encode_point()
    +        data = public_key.public_bytes(
    +            serialization.Encoding.X962,
    +            serialization.PublicFormat.UncompressedPoint
    +        )
         else:
             # This is a very slow way to do this.
             serialized = public_key.public_bytes(
    @@ -43,11 +48,41 @@ def _key_identifier_from_public_key(public_key):
                 serialization.PublicFormat.SubjectPublicKeyInfo
             )
     
    -        data = six.binary_type(PublicKeyInfo.load(serialized)['public_key'])
    +        reader = DERReader(serialized)
    +        with reader.read_single_element(SEQUENCE) as public_key_info:
    +            algorithm = public_key_info.read_element(SEQUENCE)
    +            public_key = public_key_info.read_element(BIT_STRING)
    +
    +        # Double-check the algorithm structure.
    +        with algorithm:
    +            algorithm.read_element(OBJECT_IDENTIFIER)
    +            if not algorithm.is_empty():
    +                # Skip the optional parameters field.
    +                algorithm.read_any_element()
    +
    +        # BIT STRING contents begin with the number of padding bytes added. It
    +        # must be zero for SubjectPublicKeyInfo structures.
    +        if public_key.read_byte() != 0:
    +            raise ValueError('Invalid public key encoding')
    +
    +        data = public_key.data
     
         return hashlib.sha1(data).digest()
     
     
    +def _make_sequence_methods(field_name):
    +    def len_method(self):
    +        return len(getattr(self, field_name))
    +
    +    def iter_method(self):
    +        return iter(getattr(self, field_name))
    +
    +    def getitem_method(self, idx):
    +        return getattr(self, field_name)[idx]
    +
    +    return len_method, iter_method, getitem_method
    +
    +
     class DuplicateExtension(Exception):
         def __init__(self, msg, oid):
             super(DuplicateExtension, self).__init__(msg)
    @@ -78,7 +113,7 @@ class Extensions(object):
                 if ext.oid == oid:
                     return ext
     
    -        raise ExtensionNotFound("No {0} extension was found".format(oid), oid)
    +        raise ExtensionNotFound("No {} extension was found".format(oid), oid)
     
         def get_extension_for_class(self, extclass):
             if extclass is UnrecognizedExtension:
    @@ -93,21 +128,14 @@ class Extensions(object):
                     return ext
     
             raise ExtensionNotFound(
    -            "No {0} extension was found".format(extclass), extclass.oid
    +            "No {} extension was found".format(extclass), extclass.oid
             )
     
    -    def __iter__(self):
    -        return iter(self._extensions)
    -
    -    def __len__(self):
    -        return len(self._extensions)
    -
    -    def __getitem__(self, idx):
    -        return self._extensions[idx]
    +    __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions")
     
         def __repr__(self):
             return (
    -            "".format(self._extensions)
    +            "".format(self._extensions)
             )
     
     
    @@ -134,7 +162,7 @@ class CRLNumber(object):
             return hash(self.crl_number)
     
         def __repr__(self):
    -        return "".format(self.crl_number)
    +        return "".format(self.crl_number)
     
         crl_number = utils.read_only_property("_crl_number")
     
    @@ -185,8 +213,21 @@ class AuthorityKeyIdentifier(object):
     
         @classmethod
         def from_issuer_subject_key_identifier(cls, ski):
    +        if isinstance(ski, SubjectKeyIdentifier):
    +            digest = ski.digest
    +        else:
    +            digest = ski.value.digest
    +            warnings.warn(
    +                "Extension objects are deprecated as arguments to "
    +                "from_issuer_subject_key_identifier and support will be "
    +                "removed soon. Please migrate to passing a "
    +                "SubjectKeyIdentifier directly.",
    +                utils.DeprecatedIn27,
    +                stacklevel=2,
    +            )
    +
             return cls(
    -            key_identifier=ski.value.digest,
    +            key_identifier=digest,
                 authority_cert_issuer=None,
                 authority_cert_serial_number=None
             )
    @@ -272,14 +313,10 @@ class AuthorityInformationAccess(object):
     
             self._descriptions = descriptions
     
    -    def __iter__(self):
    -        return iter(self._descriptions)
    -
    -    def __len__(self):
    -        return len(self._descriptions)
    +    __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions")
     
         def __repr__(self):
    -        return "".format(self._descriptions)
    +        return "".format(self._descriptions)
     
         def __eq__(self, other):
             if not isinstance(other, AuthorityInformationAccess):
    @@ -290,9 +327,6 @@ class AuthorityInformationAccess(object):
         def __ne__(self, other):
             return not self == other
     
    -    def __getitem__(self, idx):
    -        return self._descriptions[idx]
    -
         def __hash__(self):
             return hash(tuple(self._descriptions))
     
    @@ -419,14 +453,12 @@ class CRLDistributionPoints(object):
     
             self._distribution_points = distribution_points
     
    -    def __iter__(self):
    -        return iter(self._distribution_points)
    -
    -    def __len__(self):
    -        return len(self._distribution_points)
    +    __len__, __iter__, __getitem__ = _make_sequence_methods(
    +        "_distribution_points"
    +    )
     
         def __repr__(self):
    -        return "".format(self._distribution_points)
    +        return "".format(self._distribution_points)
     
         def __eq__(self, other):
             if not isinstance(other, CRLDistributionPoints):
    @@ -437,9 +469,6 @@ class CRLDistributionPoints(object):
         def __ne__(self, other):
             return not self == other
     
    -    def __getitem__(self, idx):
    -        return self._distribution_points[idx]
    -
         def __hash__(self):
             return hash(tuple(self._distribution_points))
     
    @@ -460,14 +489,12 @@ class FreshestCRL(object):
     
             self._distribution_points = distribution_points
     
    -    def __iter__(self):
    -        return iter(self._distribution_points)
    -
    -    def __len__(self):
    -        return len(self._distribution_points)
    +    __len__, __iter__, __getitem__ = _make_sequence_methods(
    +        "_distribution_points"
    +    )
     
         def __repr__(self):
    -        return "".format(self._distribution_points)
    +        return "".format(self._distribution_points)
     
         def __eq__(self, other):
             if not isinstance(other, FreshestCRL):
    @@ -478,9 +505,6 @@ class FreshestCRL(object):
         def __ne__(self, other):
             return not self == other
     
    -    def __getitem__(self, idx):
    -        return self._distribution_points[idx]
    -
         def __hash__(self):
             return hash(tuple(self._distribution_points))
     
    @@ -541,8 +565,8 @@ class DistributionPoint(object):
         def __repr__(self):
             return (
                 "".format(self)
    +            "tive_name}, reasons={0.reasons}, crl_issuer={0.crl_issuer})>"
    +            .format(self)
             )
     
         def __eq__(self, other):
    @@ -666,14 +690,10 @@ class CertificatePolicies(object):
     
             self._policies = policies
     
    -    def __iter__(self):
    -        return iter(self._policies)
    -
    -    def __len__(self):
    -        return len(self._policies)
    +    __len__, __iter__, __getitem__ = _make_sequence_methods("_policies")
     
         def __repr__(self):
    -        return "".format(self._policies)
    +        return "".format(self._policies)
     
         def __eq__(self, other):
             if not isinstance(other, CertificatePolicies):
    @@ -684,9 +704,6 @@ class CertificatePolicies(object):
         def __ne__(self, other):
             return not self == other
     
    -    def __getitem__(self, idx):
    -        return self._policies[idx]
    -
         def __hash__(self):
             return hash(tuple(self._policies))
     
    @@ -827,14 +844,10 @@ class ExtendedKeyUsage(object):
     
             self._usages = usages
     
    -    def __iter__(self):
    -        return iter(self._usages)
    -
    -    def __len__(self):
    -        return len(self._usages)
    +    __len__, __iter__, __getitem__ = _make_sequence_methods("_usages")
     
         def __repr__(self):
    -        return "".format(self._usages)
    +        return "".format(self._usages)
     
         def __eq__(self, other):
             if not isinstance(other, ExtendedKeyUsage):
    @@ -853,6 +866,41 @@ class ExtendedKeyUsage(object):
     class OCSPNoCheck(object):
         oid = ExtensionOID.OCSP_NO_CHECK
     
    +    def __eq__(self, other):
    +        if not isinstance(other, OCSPNoCheck):
    +            return NotImplemented
    +
    +        return True
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __hash__(self):
    +        return hash(OCSPNoCheck)
    +
    +    def __repr__(self):
    +        return ""
    +
    +
    +@utils.register_interface(ExtensionType)
    +class PrecertPoison(object):
    +    oid = ExtensionOID.PRECERT_POISON
    +
    +    def __eq__(self, other):
    +        if not isinstance(other, PrecertPoison):
    +            return NotImplemented
    +
    +        return True
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __hash__(self):
    +        return hash(PrecertPoison)
    +
    +    def __repr__(self):
    +        return ""
    +
     
     @utils.register_interface(ExtensionType)
     class TLSFeature(object):
    @@ -871,11 +919,7 @@ class TLSFeature(object):
     
             self._features = features
     
    -    def __iter__(self):
    -        return iter(self._features)
    -
    -    def __len__(self):
    -        return len(self._features)
    +    __len__, __iter__, __getitem__ = _make_sequence_methods("_features")
     
         def __repr__(self):
             return "".format(self)
    @@ -886,9 +930,6 @@ class TLSFeature(object):
     
             return self._features == other._features
     
    -    def __getitem__(self, idx):
    -        return self._features[idx]
    -
         def __ne__(self, other):
             return not self == other
     
    @@ -907,7 +948,7 @@ class TLSFeatureType(Enum):
         status_request_v2 = 17
     
     
    -_TLS_FEATURE_TYPE_TO_ENUM = dict((x.value, x) for x in TLSFeatureType)
    +_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType}
     
     
     @utils.register_interface(ExtensionType)
    @@ -1167,12 +1208,7 @@ class GeneralNames(object):
                 )
     
             self._general_names = general_names
    -
    -    def __iter__(self):
    -        return iter(self._general_names)
    -
    -    def __len__(self):
    -        return len(self._general_names)
    +    __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
     
         def get_values_for_type(self, type):
             # Return the value of each GeneralName, except for OtherName instances
    @@ -1184,7 +1220,7 @@ class GeneralNames(object):
             return list(objs)
     
         def __repr__(self):
    -        return "".format(self._general_names)
    +        return "".format(self._general_names)
     
         def __eq__(self, other):
             if not isinstance(other, GeneralNames):
    @@ -1195,9 +1231,6 @@ class GeneralNames(object):
         def __ne__(self, other):
             return not self == other
     
    -    def __getitem__(self, idx):
    -        return self._general_names[idx]
    -
         def __hash__(self):
             return hash(tuple(self._general_names))
     
    @@ -1209,17 +1242,13 @@ class SubjectAlternativeName(object):
         def __init__(self, general_names):
             self._general_names = GeneralNames(general_names)
     
    -    def __iter__(self):
    -        return iter(self._general_names)
    -
    -    def __len__(self):
    -        return len(self._general_names)
    +    __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
     
         def get_values_for_type(self, type):
             return self._general_names.get_values_for_type(type)
     
         def __repr__(self):
    -        return "".format(self._general_names)
    +        return "".format(self._general_names)
     
         def __eq__(self, other):
             if not isinstance(other, SubjectAlternativeName):
    @@ -1227,9 +1256,6 @@ class SubjectAlternativeName(object):
     
             return self._general_names == other._general_names
     
    -    def __getitem__(self, idx):
    -        return self._general_names[idx]
    -
         def __ne__(self, other):
             return not self == other
     
    @@ -1244,17 +1270,13 @@ class IssuerAlternativeName(object):
         def __init__(self, general_names):
             self._general_names = GeneralNames(general_names)
     
    -    def __iter__(self):
    -        return iter(self._general_names)
    -
    -    def __len__(self):
    -        return len(self._general_names)
    +    __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
     
         def get_values_for_type(self, type):
             return self._general_names.get_values_for_type(type)
     
         def __repr__(self):
    -        return "".format(self._general_names)
    +        return "".format(self._general_names)
     
         def __eq__(self, other):
             if not isinstance(other, IssuerAlternativeName):
    @@ -1265,9 +1287,6 @@ class IssuerAlternativeName(object):
         def __ne__(self, other):
             return not self == other
     
    -    def __getitem__(self, idx):
    -        return self._general_names[idx]
    -
         def __hash__(self):
             return hash(self._general_names)
     
    @@ -1279,17 +1298,13 @@ class CertificateIssuer(object):
         def __init__(self, general_names):
             self._general_names = GeneralNames(general_names)
     
    -    def __iter__(self):
    -        return iter(self._general_names)
    -
    -    def __len__(self):
    -        return len(self._general_names)
    +    __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
     
         def get_values_for_type(self, type):
             return self._general_names.get_values_for_type(type)
     
         def __repr__(self):
    -        return "".format(self._general_names)
    +        return "".format(self._general_names)
     
         def __eq__(self, other):
             if not isinstance(other, CertificateIssuer):
    @@ -1300,9 +1315,6 @@ class CertificateIssuer(object):
         def __ne__(self, other):
             return not self == other
     
    -    def __getitem__(self, idx):
    -        return self._general_names[idx]
    -
         def __hash__(self):
             return hash(self._general_names)
     
    @@ -1318,7 +1330,7 @@ class CRLReason(object):
             self._reason = reason
     
         def __repr__(self):
    -        return "".format(self._reason)
    +        return "".format(self._reason)
     
         def __eq__(self, other):
             if not isinstance(other, CRLReason):
    @@ -1346,7 +1358,7 @@ class InvalidityDate(object):
             self._invalidity_date = invalidity_date
     
         def __repr__(self):
    -        return "".format(
    +        return "".format(
                 self._invalidity_date
             )
     
    @@ -1381,22 +1393,190 @@ class PrecertificateSignedCertificateTimestamps(object):
                 )
             self._signed_certificate_timestamps = signed_certificate_timestamps
     
    -    def __iter__(self):
    -        return iter(self._signed_certificate_timestamps)
    -
    -    def __len__(self):
    -        return len(self._signed_certificate_timestamps)
    -
    -    def __getitem__(self, idx):
    -        return self._signed_certificate_timestamps[idx]
    +    __len__, __iter__, __getitem__ = _make_sequence_methods(
    +        "_signed_certificate_timestamps"
    +    )
     
         def __repr__(self):
             return (
    -            "".format(
    +            "".format(
                     list(self)
                 )
             )
     
    +    def __hash__(self):
    +        return hash(tuple(self._signed_certificate_timestamps))
    +
    +    def __eq__(self, other):
    +        if not isinstance(other, PrecertificateSignedCertificateTimestamps):
    +            return NotImplemented
    +
    +        return (
    +            self._signed_certificate_timestamps ==
    +            other._signed_certificate_timestamps
    +        )
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +
    +@utils.register_interface(ExtensionType)
    +class OCSPNonce(object):
    +    oid = OCSPExtensionOID.NONCE
    +
    +    def __init__(self, nonce):
    +        if not isinstance(nonce, bytes):
    +            raise TypeError("nonce must be bytes")
    +
    +        self._nonce = nonce
    +
    +    def __eq__(self, other):
    +        if not isinstance(other, OCSPNonce):
    +            return NotImplemented
    +
    +        return self.nonce == other.nonce
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __hash__(self):
    +        return hash(self.nonce)
    +
    +    def __repr__(self):
    +        return "".format(self)
    +
    +    nonce = utils.read_only_property("_nonce")
    +
    +
    +@utils.register_interface(ExtensionType)
    +class IssuingDistributionPoint(object):
    +    oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT
    +
    +    def __init__(self, full_name, relative_name, only_contains_user_certs,
    +                 only_contains_ca_certs, only_some_reasons, indirect_crl,
    +                 only_contains_attribute_certs):
    +        if (
    +            only_some_reasons and (
    +                not isinstance(only_some_reasons, frozenset) or not all(
    +                    isinstance(x, ReasonFlags) for x in only_some_reasons
    +                )
    +            )
    +        ):
    +            raise TypeError(
    +                "only_some_reasons must be None or frozenset of ReasonFlags"
    +            )
    +
    +        if only_some_reasons and (
    +            ReasonFlags.unspecified in only_some_reasons or
    +            ReasonFlags.remove_from_crl in only_some_reasons
    +        ):
    +            raise ValueError(
    +                "unspecified and remove_from_crl are not valid reasons in an "
    +                "IssuingDistributionPoint"
    +            )
    +
    +        if not (
    +            isinstance(only_contains_user_certs, bool) and
    +            isinstance(only_contains_ca_certs, bool) and
    +            isinstance(indirect_crl, bool) and
    +            isinstance(only_contains_attribute_certs, bool)
    +        ):
    +            raise TypeError(
    +                "only_contains_user_certs, only_contains_ca_certs, "
    +                "indirect_crl and only_contains_attribute_certs "
    +                "must all be boolean."
    +            )
    +
    +        crl_constraints = [
    +            only_contains_user_certs, only_contains_ca_certs,
    +            indirect_crl, only_contains_attribute_certs
    +        ]
    +
    +        if len([x for x in crl_constraints if x]) > 1:
    +            raise ValueError(
    +                "Only one of the following can be set to True: "
    +                "only_contains_user_certs, only_contains_ca_certs, "
    +                "indirect_crl, only_contains_attribute_certs"
    +            )
    +
    +        if (
    +            not any([
    +                only_contains_user_certs, only_contains_ca_certs,
    +                indirect_crl, only_contains_attribute_certs, full_name,
    +                relative_name, only_some_reasons
    +            ])
    +        ):
    +            raise ValueError(
    +                "Cannot create empty extension: "
    +                "if only_contains_user_certs, only_contains_ca_certs, "
    +                "indirect_crl, and only_contains_attribute_certs are all False"
    +                ", then either full_name, relative_name, or only_some_reasons "
    +                "must have a value."
    +            )
    +
    +        self._only_contains_user_certs = only_contains_user_certs
    +        self._only_contains_ca_certs = only_contains_ca_certs
    +        self._indirect_crl = indirect_crl
    +        self._only_contains_attribute_certs = only_contains_attribute_certs
    +        self._only_some_reasons = only_some_reasons
    +        self._full_name = full_name
    +        self._relative_name = relative_name
    +
    +    def __repr__(self):
    +        return (
    +            "".format(self)
    +        )
    +
    +    def __eq__(self, other):
    +        if not isinstance(other, IssuingDistributionPoint):
    +            return NotImplemented
    +
    +        return (
    +            self.full_name == other.full_name and
    +            self.relative_name == other.relative_name and
    +            self.only_contains_user_certs == other.only_contains_user_certs and
    +            self.only_contains_ca_certs == other.only_contains_ca_certs and
    +            self.only_some_reasons == other.only_some_reasons and
    +            self.indirect_crl == other.indirect_crl and
    +            self.only_contains_attribute_certs ==
    +            other.only_contains_attribute_certs
    +        )
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __hash__(self):
    +        return hash((
    +            self.full_name,
    +            self.relative_name,
    +            self.only_contains_user_certs,
    +            self.only_contains_ca_certs,
    +            self.only_some_reasons,
    +            self.indirect_crl,
    +            self.only_contains_attribute_certs,
    +        ))
    +
    +    full_name = utils.read_only_property("_full_name")
    +    relative_name = utils.read_only_property("_relative_name")
    +    only_contains_user_certs = utils.read_only_property(
    +        "_only_contains_user_certs"
    +    )
    +    only_contains_ca_certs = utils.read_only_property(
    +        "_only_contains_ca_certs"
    +    )
    +    only_some_reasons = utils.read_only_property("_only_some_reasons")
    +    indirect_crl = utils.read_only_property("_indirect_crl")
    +    only_contains_attribute_certs = utils.read_only_property(
    +        "_only_contains_attribute_certs"
    +    )
    +
     
     @utils.register_interface(ExtensionType)
     class UnrecognizedExtension(object):
    diff --git a/server/www/packages/packages-windows/x86/cryptography/x509/general_name.py b/server/www/packages/packages-windows/x86/cryptography/x509/general_name.py
    index 26f389a..1233841 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/x509/general_name.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/x509/general_name.py
    @@ -9,8 +9,6 @@ import ipaddress
     import warnings
     from email.utils import parseaddr
     
    -import idna
    -
     import six
     from six.moves import urllib_parse
     
    @@ -32,6 +30,20 @@ _GENERAL_NAMES = {
     }
     
     
    +def _lazy_import_idna():
    +    # Import idna lazily becase it allocates a decent amount of memory, and
    +    # we're only using it in deprecated paths.
    +    try:
    +        import idna
    +        return idna
    +    except ImportError:
    +        raise ImportError(
    +            "idna is not installed, but a deprecated feature that requires it"
    +            " was used. See: https://cryptography.io/en/latest/faq/#importe"
    +            "rror-idna-is-not-installed"
    +        )
    +
    +
     class UnsupportedGeneralNameType(Exception):
         def __init__(self, msg, type):
             super(UnsupportedGeneralNameType, self).__init__(msg)
    @@ -60,7 +72,7 @@ class RFC822Name(object):
                         "This means unicode characters should be encoded via "
                         "idna. Support for passing unicode strings (aka U-label) "
                         "will be removed in a future version.",
    -                    utils.DeprecatedIn21,
    +                    utils.PersistentlyDeprecated2017,
                         stacklevel=2,
                     )
             else:
    @@ -83,6 +95,7 @@ class RFC822Name(object):
             return instance
     
         def _idna_encode(self, value):
    +        idna = _lazy_import_idna()
             _, address = parseaddr(value)
             parts = address.split(u"@")
             return parts[0] + "@" + idna.encode(parts[1]).decode("ascii")
    @@ -104,6 +117,7 @@ class RFC822Name(object):
     
     
     def _idna_encode(value):
    +    idna = _lazy_import_idna()
         # Retain prefixes '*.' for common/alt names and '.' for name constraints
         for prefix in ['*.', '.']:
             if value.startswith(prefix):
    @@ -125,7 +139,7 @@ class DNSName(object):
                         "This means unicode characters should be encoded via "
                         "idna. Support for passing unicode strings (aka U-label) "
                         "will be removed in a future version.",
    -                    utils.DeprecatedIn21,
    +                    utils.PersistentlyDeprecated2017,
                         stacklevel=2,
                     )
             else:
    @@ -170,7 +184,7 @@ class UniformResourceIdentifier(object):
                         "This means unicode characters should be encoded via "
                         "idna. Support for passing unicode strings (aka U-label) "
                         " will be removed in a future version.",
    -                    utils.DeprecatedIn21,
    +                    utils.PersistentlyDeprecated2017,
                         stacklevel=2,
                     )
             else:
    @@ -187,11 +201,12 @@ class UniformResourceIdentifier(object):
             return instance
     
         def _idna_encode(self, value):
    +        idna = _lazy_import_idna()
             parsed = urllib_parse.urlparse(value)
             if parsed.port:
                 netloc = (
                     idna.encode(parsed.hostname) +
    -                ":{0}".format(parsed.port).encode("ascii")
    +                ":{}".format(parsed.port).encode("ascii")
                 ).decode("ascii")
             else:
                 netloc = idna.encode(parsed.hostname).decode("ascii")
    @@ -235,7 +250,7 @@ class DirectoryName(object):
         value = utils.read_only_property("_value")
     
         def __repr__(self):
    -        return "".format(self.value)
    +        return "".format(self.value)
     
         def __eq__(self, other):
             if not isinstance(other, DirectoryName):
    @@ -261,7 +276,7 @@ class RegisteredID(object):
         value = utils.read_only_property("_value")
     
         def __repr__(self):
    -        return "".format(self.value)
    +        return "".format(self.value)
     
         def __eq__(self, other):
             if not isinstance(other, RegisteredID):
    @@ -299,7 +314,7 @@ class IPAddress(object):
         value = utils.read_only_property("_value")
     
         def __repr__(self):
    -        return "".format(self.value)
    +        return "".format(self.value)
     
         def __eq__(self, other):
             if not isinstance(other, IPAddress):
    @@ -329,7 +344,7 @@ class OtherName(object):
         value = utils.read_only_property("_value")
     
         def __repr__(self):
    -        return "".format(
    +        return "".format(
                 self.type_id, self.value)
     
         def __eq__(self, other):
    diff --git a/server/www/packages/packages-windows/x86/cryptography/x509/name.py b/server/www/packages/packages-windows/x86/cryptography/x509/name.py
    index 5548eda..6816e06 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/x509/name.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/x509/name.py
    @@ -25,7 +25,7 @@ class _ASN1Type(Enum):
         BMPString = 30
     
     
    -_ASN1_TYPE_TO_ENUM = dict((i.value, i) for i in _ASN1Type)
    +_ASN1_TYPE_TO_ENUM = {i.value: i for i in _ASN1Type}
     _SENTINEL = object()
     _NAMEOID_DEFAULT_TYPE = {
         NameOID.COUNTRY_NAME: _ASN1Type.PrintableString,
    @@ -36,6 +36,44 @@ _NAMEOID_DEFAULT_TYPE = {
         NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String,
     }
     
    +#: Short attribute names from RFC 4514:
    +#: https://tools.ietf.org/html/rfc4514#page-7
    +_NAMEOID_TO_NAME = {
    +    NameOID.COMMON_NAME: 'CN',
    +    NameOID.LOCALITY_NAME: 'L',
    +    NameOID.STATE_OR_PROVINCE_NAME: 'ST',
    +    NameOID.ORGANIZATION_NAME: 'O',
    +    NameOID.ORGANIZATIONAL_UNIT_NAME: 'OU',
    +    NameOID.COUNTRY_NAME: 'C',
    +    NameOID.STREET_ADDRESS: 'STREET',
    +    NameOID.DOMAIN_COMPONENT: 'DC',
    +    NameOID.USER_ID: 'UID',
    +}
    +
    +
    +def _escape_dn_value(val):
    +    """Escape special characters in RFC4514 Distinguished Name value."""
    +
    +    if not val:
    +        return ''
    +
    +    # See https://tools.ietf.org/html/rfc4514#section-2.4
    +    val = val.replace('\\', '\\\\')
    +    val = val.replace('"', '\\"')
    +    val = val.replace('+', '\\+')
    +    val = val.replace(',', '\\,')
    +    val = val.replace(';', '\\;')
    +    val = val.replace('<', '\\<')
    +    val = val.replace('>', '\\>')
    +    val = val.replace('\0', '\\00')
    +
    +    if val[0] in ('#', ' '):
    +        val = '\\' + val
    +    if val[-1] == ' ':
    +        val = val[:-1] + '\\ '
    +
    +    return val
    +
     
     class NameAttribute(object):
         def __init__(self, oid, value, _type=_SENTINEL):
    @@ -58,9 +96,6 @@ class NameAttribute(object):
                         "Country name must be a 2 character country code"
                     )
     
    -        if len(value) == 0:
    -            raise ValueError("Value cannot be an empty string")
    -
             # The appropriate ASN1 string type varies by OID and is defined across
             # multiple RFCs including 2459, 3280, and 5280. In general UTF8String
             # is preferred (2459), but 3280 and 5280 specify several OIDs with
    @@ -80,6 +115,16 @@ class NameAttribute(object):
         oid = utils.read_only_property("_oid")
         value = utils.read_only_property("_value")
     
    +    def rfc4514_string(self):
    +        """
    +        Format as RFC4514 Distinguished Name string.
    +
    +        Use short attribute name if available, otherwise fall back to OID
    +        dotted string.
    +        """
    +        key = _NAMEOID_TO_NAME.get(self.oid, self.oid.dotted_string)
    +        return '%s=%s' % (key, _escape_dn_value(self.value))
    +
         def __eq__(self, other):
             if not isinstance(other, NameAttribute):
                 return NotImplemented
    @@ -117,6 +162,15 @@ class RelativeDistinguishedName(object):
         def get_attributes_for_oid(self, oid):
             return [i for i in self if i.oid == oid]
     
    +    def rfc4514_string(self):
    +        """
    +        Format as RFC4514 Distinguished Name string.
    +
    +        Within each RDN, attributes are joined by '+', although that is rarely
    +        used in certificates.
    +        """
    +        return '+'.join(attr.rfc4514_string() for attr in self._attributes)
    +
         def __eq__(self, other):
             if not isinstance(other, RelativeDistinguishedName):
                 return NotImplemented
    @@ -136,7 +190,7 @@ class RelativeDistinguishedName(object):
             return len(self._attributes)
     
         def __repr__(self):
    -        return "".format(list(self))
    +        return "".format(self.rfc4514_string())
     
     
     class Name(object):
    @@ -154,6 +208,20 @@ class Name(object):
                     " or a list RelativeDistinguishedName"
                 )
     
    +    def rfc4514_string(self):
    +        """
    +        Format as RFC4514 Distinguished Name string.
    +        For example 'CN=foobar.com,O=Foo Corp,C=US'
    +
    +        An X.509 name is a two-level structure: a list of sets of attributes.
    +        Each list element is separated by ',' and within each list element, set
    +        elements are separated by '+'. The latter is almost never used in
    +        real world certificates. According to RFC4514 section 2.1 the
    +        RDNSequence must be reversed when converting to string representation.
    +        """
    +        return ','.join(
    +            attr.rfc4514_string() for attr in reversed(self._attributes))
    +
         def get_attributes_for_oid(self, oid):
             return [i for i in self if i.oid == oid]
     
    @@ -187,4 +255,9 @@ class Name(object):
             return sum(len(rdn) for rdn in self._attributes)
     
         def __repr__(self):
    -        return "".format(list(self))
    +        rdns = ','.join(attr.rfc4514_string() for attr in self._attributes)
    +
    +        if six.PY2:
    +            return "".format(rdns.encode('utf8'))
    +        else:
    +            return "".format(rdns)
    diff --git a/server/www/packages/packages-windows/x86/cryptography/x509/ocsp.py b/server/www/packages/packages-windows/x86/cryptography/x509/ocsp.py
    new file mode 100644
    index 0000000..4e0c985
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/cryptography/x509/ocsp.py
    @@ -0,0 +1,435 @@
    +# This file is dual licensed under the terms of the Apache License, Version
    +# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    +# for complete details.
    +
    +from __future__ import absolute_import, division, print_function
    +
    +import abc
    +import datetime
    +from enum import Enum
    +
    +import six
    +
    +from cryptography import x509
    +from cryptography.hazmat.primitives import hashes
    +from cryptography.hazmat.primitives.asymmetric import ed25519, ed448
    +from cryptography.x509.base import (
    +    _EARLIEST_UTC_TIME, _convert_to_naive_utc_time, _reject_duplicate_extension
    +)
    +
    +
    +_OIDS_TO_HASH = {
    +    "1.3.14.3.2.26": hashes.SHA1(),
    +    "2.16.840.1.101.3.4.2.4": hashes.SHA224(),
    +    "2.16.840.1.101.3.4.2.1": hashes.SHA256(),
    +    "2.16.840.1.101.3.4.2.2": hashes.SHA384(),
    +    "2.16.840.1.101.3.4.2.3": hashes.SHA512(),
    +}
    +
    +
    +class OCSPResponderEncoding(Enum):
    +    HASH = "By Hash"
    +    NAME = "By Name"
    +
    +
    +class OCSPResponseStatus(Enum):
    +    SUCCESSFUL = 0
    +    MALFORMED_REQUEST = 1
    +    INTERNAL_ERROR = 2
    +    TRY_LATER = 3
    +    SIG_REQUIRED = 5
    +    UNAUTHORIZED = 6
    +
    +
    +_RESPONSE_STATUS_TO_ENUM = {x.value: x for x in OCSPResponseStatus}
    +_ALLOWED_HASHES = (
    +    hashes.SHA1, hashes.SHA224, hashes.SHA256,
    +    hashes.SHA384, hashes.SHA512
    +)
    +
    +
    +def _verify_algorithm(algorithm):
    +    if not isinstance(algorithm, _ALLOWED_HASHES):
    +        raise ValueError(
    +            "Algorithm must be SHA1, SHA224, SHA256, SHA384, or SHA512"
    +        )
    +
    +
    +class OCSPCertStatus(Enum):
    +    GOOD = 0
    +    REVOKED = 1
    +    UNKNOWN = 2
    +
    +
    +_CERT_STATUS_TO_ENUM = {x.value: x for x in OCSPCertStatus}
    +
    +
    +def load_der_ocsp_request(data):
    +    from cryptography.hazmat.backends.openssl.backend import backend
    +    return backend.load_der_ocsp_request(data)
    +
    +
    +def load_der_ocsp_response(data):
    +    from cryptography.hazmat.backends.openssl.backend import backend
    +    return backend.load_der_ocsp_response(data)
    +
    +
    +class OCSPRequestBuilder(object):
    +    def __init__(self, request=None, extensions=[]):
    +        self._request = request
    +        self._extensions = extensions
    +
    +    def add_certificate(self, cert, issuer, algorithm):
    +        if self._request is not None:
    +            raise ValueError("Only one certificate can be added to a request")
    +
    +        _verify_algorithm(algorithm)
    +        if (
    +            not isinstance(cert, x509.Certificate) or
    +            not isinstance(issuer, x509.Certificate)
    +        ):
    +            raise TypeError("cert and issuer must be a Certificate")
    +
    +        return OCSPRequestBuilder((cert, issuer, algorithm), self._extensions)
    +
    +    def add_extension(self, extension, critical):
    +        if not isinstance(extension, x509.ExtensionType):
    +            raise TypeError("extension must be an ExtensionType")
    +
    +        extension = x509.Extension(extension.oid, critical, extension)
    +        _reject_duplicate_extension(extension, self._extensions)
    +
    +        return OCSPRequestBuilder(
    +            self._request, self._extensions + [extension]
    +        )
    +
    +    def build(self):
    +        from cryptography.hazmat.backends.openssl.backend import backend
    +        if self._request is None:
    +            raise ValueError("You must add a certificate before building")
    +
    +        return backend.create_ocsp_request(self)
    +
    +
    +class _SingleResponse(object):
    +    def __init__(self, cert, issuer, algorithm, cert_status, this_update,
    +                 next_update, revocation_time, revocation_reason):
    +        if (
    +            not isinstance(cert, x509.Certificate) or
    +            not isinstance(issuer, x509.Certificate)
    +        ):
    +            raise TypeError("cert and issuer must be a Certificate")
    +
    +        _verify_algorithm(algorithm)
    +        if not isinstance(this_update, datetime.datetime):
    +            raise TypeError("this_update must be a datetime object")
    +        if (
    +            next_update is not None and
    +            not isinstance(next_update, datetime.datetime)
    +        ):
    +            raise TypeError("next_update must be a datetime object or None")
    +
    +        self._cert = cert
    +        self._issuer = issuer
    +        self._algorithm = algorithm
    +        self._this_update = this_update
    +        self._next_update = next_update
    +
    +        if not isinstance(cert_status, OCSPCertStatus):
    +            raise TypeError(
    +                "cert_status must be an item from the OCSPCertStatus enum"
    +            )
    +        if cert_status is not OCSPCertStatus.REVOKED:
    +            if revocation_time is not None:
    +                raise ValueError(
    +                    "revocation_time can only be provided if the certificate "
    +                    "is revoked"
    +                )
    +            if revocation_reason is not None:
    +                raise ValueError(
    +                    "revocation_reason can only be provided if the certificate"
    +                    " is revoked"
    +                )
    +        else:
    +            if not isinstance(revocation_time, datetime.datetime):
    +                raise TypeError("revocation_time must be a datetime object")
    +
    +            revocation_time = _convert_to_naive_utc_time(revocation_time)
    +            if revocation_time < _EARLIEST_UTC_TIME:
    +                raise ValueError('The revocation_time must be on or after'
    +                                 ' 1950 January 1.')
    +
    +            if (
    +                revocation_reason is not None and
    +                not isinstance(revocation_reason, x509.ReasonFlags)
    +            ):
    +                raise TypeError(
    +                    "revocation_reason must be an item from the ReasonFlags "
    +                    "enum or None"
    +                )
    +
    +        self._cert_status = cert_status
    +        self._revocation_time = revocation_time
    +        self._revocation_reason = revocation_reason
    +
    +
    +class OCSPResponseBuilder(object):
    +    def __init__(self, response=None, responder_id=None, certs=None,
    +                 extensions=[]):
    +        self._response = response
    +        self._responder_id = responder_id
    +        self._certs = certs
    +        self._extensions = extensions
    +
    +    def add_response(self, cert, issuer, algorithm, cert_status, this_update,
    +                     next_update, revocation_time, revocation_reason):
    +        if self._response is not None:
    +            raise ValueError("Only one response per OCSPResponse.")
    +
    +        singleresp = _SingleResponse(
    +            cert, issuer, algorithm, cert_status, this_update, next_update,
    +            revocation_time, revocation_reason
    +        )
    +        return OCSPResponseBuilder(
    +            singleresp, self._responder_id,
    +            self._certs, self._extensions,
    +        )
    +
    +    def responder_id(self, encoding, responder_cert):
    +        if self._responder_id is not None:
    +            raise ValueError("responder_id can only be set once")
    +        if not isinstance(responder_cert, x509.Certificate):
    +            raise TypeError("responder_cert must be a Certificate")
    +        if not isinstance(encoding, OCSPResponderEncoding):
    +            raise TypeError(
    +                "encoding must be an element from OCSPResponderEncoding"
    +            )
    +
    +        return OCSPResponseBuilder(
    +            self._response, (responder_cert, encoding),
    +            self._certs, self._extensions,
    +        )
    +
    +    def certificates(self, certs):
    +        if self._certs is not None:
    +            raise ValueError("certificates may only be set once")
    +        certs = list(certs)
    +        if len(certs) == 0:
    +            raise ValueError("certs must not be an empty list")
    +        if not all(isinstance(x, x509.Certificate) for x in certs):
    +            raise TypeError("certs must be a list of Certificates")
    +        return OCSPResponseBuilder(
    +            self._response, self._responder_id,
    +            certs, self._extensions,
    +        )
    +
    +    def add_extension(self, extension, critical):
    +        if not isinstance(extension, x509.ExtensionType):
    +            raise TypeError("extension must be an ExtensionType")
    +
    +        extension = x509.Extension(extension.oid, critical, extension)
    +        _reject_duplicate_extension(extension, self._extensions)
    +
    +        return OCSPResponseBuilder(
    +            self._response, self._responder_id,
    +            self._certs, self._extensions + [extension],
    +        )
    +
    +    def sign(self, private_key, algorithm):
    +        from cryptography.hazmat.backends.openssl.backend import backend
    +        if self._response is None:
    +            raise ValueError("You must add a response before signing")
    +        if self._responder_id is None:
    +            raise ValueError("You must add a responder_id before signing")
    +
    +        if isinstance(private_key,
    +                      (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)):
    +            if algorithm is not None:
    +                raise ValueError(
    +                    "algorithm must be None when signing via ed25519 or ed448"
    +                )
    +        elif not isinstance(algorithm, hashes.HashAlgorithm):
    +            raise TypeError("Algorithm must be a registered hash algorithm.")
    +
    +        return backend.create_ocsp_response(
    +            OCSPResponseStatus.SUCCESSFUL, self, private_key, algorithm
    +        )
    +
    +    @classmethod
    +    def build_unsuccessful(cls, response_status):
    +        from cryptography.hazmat.backends.openssl.backend import backend
    +        if not isinstance(response_status, OCSPResponseStatus):
    +            raise TypeError(
    +                "response_status must be an item from OCSPResponseStatus"
    +            )
    +        if response_status is OCSPResponseStatus.SUCCESSFUL:
    +            raise ValueError("response_status cannot be SUCCESSFUL")
    +
    +        return backend.create_ocsp_response(response_status, None, None, None)
    +
    +
    +@six.add_metaclass(abc.ABCMeta)
    +class OCSPRequest(object):
    +    @abc.abstractproperty
    +    def issuer_key_hash(self):
    +        """
    +        The hash of the issuer public key
    +        """
    +
    +    @abc.abstractproperty
    +    def issuer_name_hash(self):
    +        """
    +        The hash of the issuer name
    +        """
    +
    +    @abc.abstractproperty
    +    def hash_algorithm(self):
    +        """
    +        The hash algorithm used in the issuer name and key hashes
    +        """
    +
    +    @abc.abstractproperty
    +    def serial_number(self):
    +        """
    +        The serial number of the cert whose status is being checked
    +        """
    +    @abc.abstractmethod
    +    def public_bytes(self, encoding):
    +        """
    +        Serializes the request to DER
    +        """
    +
    +    @abc.abstractproperty
    +    def extensions(self):
    +        """
    +        The list of request extensions. Not single request extensions.
    +        """
    +
    +
    +@six.add_metaclass(abc.ABCMeta)
    +class OCSPResponse(object):
    +    @abc.abstractproperty
    +    def response_status(self):
    +        """
    +        The status of the response. This is a value from the OCSPResponseStatus
    +        enumeration
    +        """
    +
    +    @abc.abstractproperty
    +    def signature_algorithm_oid(self):
    +        """
    +        The ObjectIdentifier of the signature algorithm
    +        """
    +
    +    @abc.abstractproperty
    +    def signature_hash_algorithm(self):
    +        """
    +        Returns a HashAlgorithm corresponding to the type of the digest signed
    +        """
    +
    +    @abc.abstractproperty
    +    def signature(self):
    +        """
    +        The signature bytes
    +        """
    +
    +    @abc.abstractproperty
    +    def tbs_response_bytes(self):
    +        """
    +        The tbsResponseData bytes
    +        """
    +
    +    @abc.abstractproperty
    +    def certificates(self):
    +        """
    +        A list of certificates used to help build a chain to verify the OCSP
    +        response. This situation occurs when the OCSP responder uses a delegate
    +        certificate.
    +        """
    +
    +    @abc.abstractproperty
    +    def responder_key_hash(self):
    +        """
    +        The responder's key hash or None
    +        """
    +
    +    @abc.abstractproperty
    +    def responder_name(self):
    +        """
    +        The responder's Name or None
    +        """
    +
    +    @abc.abstractproperty
    +    def produced_at(self):
    +        """
    +        The time the response was produced
    +        """
    +
    +    @abc.abstractproperty
    +    def certificate_status(self):
    +        """
    +        The status of the certificate (an element from the OCSPCertStatus enum)
    +        """
    +
    +    @abc.abstractproperty
    +    def revocation_time(self):
    +        """
    +        The date of when the certificate was revoked or None if not
    +        revoked.
    +        """
    +
    +    @abc.abstractproperty
    +    def revocation_reason(self):
    +        """
    +        The reason the certificate was revoked or None if not specified or
    +        not revoked.
    +        """
    +
    +    @abc.abstractproperty
    +    def this_update(self):
    +        """
    +        The most recent time at which the status being indicated is known by
    +        the responder to have been correct
    +        """
    +
    +    @abc.abstractproperty
    +    def next_update(self):
    +        """
    +        The time when newer information will be available
    +        """
    +
    +    @abc.abstractproperty
    +    def issuer_key_hash(self):
    +        """
    +        The hash of the issuer public key
    +        """
    +
    +    @abc.abstractproperty
    +    def issuer_name_hash(self):
    +        """
    +        The hash of the issuer name
    +        """
    +
    +    @abc.abstractproperty
    +    def hash_algorithm(self):
    +        """
    +        The hash algorithm used in the issuer name and key hashes
    +        """
    +
    +    @abc.abstractproperty
    +    def serial_number(self):
    +        """
    +        The serial number of the cert whose status is being checked
    +        """
    +
    +    @abc.abstractproperty
    +    def extensions(self):
    +        """
    +        The list of response extensions. Not single response extensions.
    +        """
    +
    +    @abc.abstractproperty
    +    def single_extensions(self):
    +        """
    +        The list of single response extensions. Not response extensions.
    +        """
    diff --git a/server/www/packages/packages-windows/x86/cryptography/x509/oid.py b/server/www/packages/packages-windows/x86/cryptography/x509/oid.py
    index 90003d7..c1e5dc5 100644
    --- a/server/www/packages/packages-windows/x86/cryptography/x509/oid.py
    +++ b/server/www/packages/packages-windows/x86/cryptography/x509/oid.py
    @@ -4,68 +4,10 @@
     
     from __future__ import absolute_import, division, print_function
     
    -from cryptography import utils
    +from cryptography.hazmat._oid import ObjectIdentifier
     from cryptography.hazmat.primitives import hashes
     
     
    -class ObjectIdentifier(object):
    -    def __init__(self, dotted_string):
    -        self._dotted_string = dotted_string
    -
    -        nodes = self._dotted_string.split(".")
    -        intnodes = []
    -
    -        # There must be at least 2 nodes, the first node must be 0..2, and
    -        # if less than 2, the second node cannot have a value outside the
    -        # range 0..39.  All nodes must be integers.
    -        for node in nodes:
    -            try:
    -                intnodes.append(int(node, 0))
    -            except ValueError:
    -                raise ValueError(
    -                    "Malformed OID: %s (non-integer nodes)" % (
    -                        self._dotted_string))
    -
    -        if len(nodes) < 2:
    -            raise ValueError(
    -                "Malformed OID: %s (insufficient number of nodes)" % (
    -                    self._dotted_string))
    -
    -        if intnodes[0] > 2:
    -            raise ValueError(
    -                "Malformed OID: %s (first node outside valid range)" % (
    -                    self._dotted_string))
    -
    -        if intnodes[0] < 2 and intnodes[1] >= 40:
    -            raise ValueError(
    -                "Malformed OID: %s (second node outside valid range)" % (
    -                    self._dotted_string))
    -
    -    def __eq__(self, other):
    -        if not isinstance(other, ObjectIdentifier):
    -            return NotImplemented
    -
    -        return self.dotted_string == other.dotted_string
    -
    -    def __ne__(self, other):
    -        return not self == other
    -
    -    def __repr__(self):
    -        return "".format(
    -            self.dotted_string,
    -            self._name
    -        )
    -
    -    def __hash__(self):
    -        return hash(self.dotted_string)
    -
    -    @property
    -    def _name(self):
    -        return _OID_NAMES.get(self, "Unknown OID")
    -
    -    dotted_string = utils.read_only_property("_dotted_string")
    -
    -
     class ExtensionOID(object):
         SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9")
         SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14")
    @@ -82,6 +24,7 @@ class ExtensionOID(object):
         EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37")
         FRESHEST_CRL = ObjectIdentifier("2.5.29.46")
         INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54")
    +    ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28")
         AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1")
         SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11")
         OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5")
    @@ -91,6 +34,13 @@ class ExtensionOID(object):
         PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = (
             ObjectIdentifier("1.3.6.1.4.1.11129.2.4.2")
         )
    +    PRECERT_POISON = (
    +        ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3")
    +    )
    +
    +
    +class OCSPExtensionOID(object):
    +    NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2")
     
     
     class CRLEntryExtensionOID(object):
    @@ -146,6 +96,8 @@ class SignatureAlgorithmOID(object):
         DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3")
         DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1")
         DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2")
    +    ED25519 = ObjectIdentifier("1.3.101.112")
    +    ED448 = ObjectIdentifier("1.3.101.113")
     
     
     _SIG_OIDS_TO_HASH = {
    @@ -163,7 +115,9 @@ _SIG_OIDS_TO_HASH = {
         SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(),
         SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(),
         SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(),
    -    SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256()
    +    SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(),
    +    SignatureAlgorithmOID.ED25519: None,
    +    SignatureAlgorithmOID.ED448: None,
     }
     
     
    @@ -231,6 +185,8 @@ _OID_NAMES = {
         SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1",
         SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224",
         SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256",
    +    SignatureAlgorithmOID.ED25519: "ed25519",
    +    SignatureAlgorithmOID.ED448: "ed448",
         ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth",
         ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth",
         ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning",
    @@ -246,6 +202,7 @@ _OID_NAMES = {
         ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: (
             "signedCertificateTimestampList"
         ),
    +    ExtensionOID.PRECERT_POISON: "ctPoison",
         CRLEntryExtensionOID.CRL_REASON: "cRLReason",
         CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate",
         CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer",
    @@ -258,6 +215,9 @@ _OID_NAMES = {
         ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage",
         ExtensionOID.FRESHEST_CRL: "freshestCRL",
         ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy",
    +    ExtensionOID.ISSUING_DISTRIBUTION_POINT: (
    +        "issuingDistributionPoint"
    +    ),
         ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess",
         ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess",
         ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck",
    @@ -268,4 +228,5 @@ _OID_NAMES = {
         AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers",
         CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps",
         CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice",
    +    OCSPExtensionOID.NONCE: "OCSPNonce",
     }
    diff --git a/server/www/packages/packages-windows/x86/idna/__init__.py b/server/www/packages/packages-windows/x86/idna/__init__.py
    new file mode 100644
    index 0000000..847bf93
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/idna/__init__.py
    @@ -0,0 +1,2 @@
    +from .package_data import __version__
    +from .core import *
    diff --git a/server/www/packages/packages-windows/x86/idna/codec.py b/server/www/packages/packages-windows/x86/idna/codec.py
    new file mode 100644
    index 0000000..98c65ea
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/idna/codec.py
    @@ -0,0 +1,118 @@
    +from .core import encode, decode, alabel, ulabel, IDNAError
    +import codecs
    +import re
    +
    +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]')
    +
    +class Codec(codecs.Codec):
    +
    +    def encode(self, data, errors='strict'):
    +
    +        if errors != 'strict':
    +            raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
    +
    +        if not data:
    +            return "", 0
    +
    +        return encode(data), len(data)
    +
    +    def decode(self, data, errors='strict'):
    +
    +        if errors != 'strict':
    +            raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
    +
    +        if not data:
    +            return u"", 0
    +
    +        return decode(data), len(data)
    +
    +class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
    +    def _buffer_encode(self, data, errors, final):
    +        if errors != 'strict':
    +            raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
    +
    +        if not data:
    +            return ("", 0)
    +
    +        labels = _unicode_dots_re.split(data)
    +        trailing_dot = u''
    +        if labels:
    +            if not labels[-1]:
    +                trailing_dot = '.'
    +                del labels[-1]
    +            elif not final:
    +                # Keep potentially unfinished label until the next call
    +                del labels[-1]
    +                if labels:
    +                    trailing_dot = '.'
    +
    +        result = []
    +        size = 0
    +        for label in labels:
    +            result.append(alabel(label))
    +            if size:
    +                size += 1
    +            size += len(label)
    +
    +        # Join with U+002E
    +        result = ".".join(result) + trailing_dot
    +        size += len(trailing_dot)
    +        return (result, size)
    +
    +class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
    +    def _buffer_decode(self, data, errors, final):
    +        if errors != 'strict':
    +            raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
    +
    +        if not data:
    +            return (u"", 0)
    +
    +        # IDNA allows decoding to operate on Unicode strings, too.
    +        if isinstance(data, unicode):
    +            labels = _unicode_dots_re.split(data)
    +        else:
    +            # Must be ASCII string
    +            data = str(data)
    +            unicode(data, "ascii")
    +            labels = data.split(".")
    +
    +        trailing_dot = u''
    +        if labels:
    +            if not labels[-1]:
    +                trailing_dot = u'.'
    +                del labels[-1]
    +            elif not final:
    +                # Keep potentially unfinished label until the next call
    +                del labels[-1]
    +                if labels:
    +                    trailing_dot = u'.'
    +
    +        result = []
    +        size = 0
    +        for label in labels:
    +            result.append(ulabel(label))
    +            if size:
    +                size += 1
    +            size += len(label)
    +
    +        result = u".".join(result) + trailing_dot
    +        size += len(trailing_dot)
    +        return (result, size)
    +
    +
    +class StreamWriter(Codec, codecs.StreamWriter):
    +    pass
    +
    +class StreamReader(Codec, codecs.StreamReader):
    +    pass
    +
    +def getregentry():
    +    return codecs.CodecInfo(
    +        name='idna',
    +        encode=Codec().encode,
    +        decode=Codec().decode,
    +        incrementalencoder=IncrementalEncoder,
    +        incrementaldecoder=IncrementalDecoder,
    +        streamwriter=StreamWriter,
    +        streamreader=StreamReader,
    +    )
    diff --git a/server/www/packages/packages-windows/x86/idna/compat.py b/server/www/packages/packages-windows/x86/idna/compat.py
    new file mode 100644
    index 0000000..4d47f33
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/idna/compat.py
    @@ -0,0 +1,12 @@
    +from .core import *
    +from .codec import *
    +
    +def ToASCII(label):
    +    return encode(label)
    +
    +def ToUnicode(label):
    +    return decode(label)
    +
    +def nameprep(s):
    +    raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol")
    +
    diff --git a/server/www/packages/packages-windows/x86/idna/core.py b/server/www/packages/packages-windows/x86/idna/core.py
    new file mode 100644
    index 0000000..9c3bba2
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/idna/core.py
    @@ -0,0 +1,398 @@
    +from . import idnadata
    +import bisect
    +import unicodedata
    +import re
    +import sys
    +from .intranges import intranges_contain
    +
    +_virama_combining_class = 9
    +_alabel_prefix = b'xn--'
    +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]')
    +
    +if sys.version_info[0] >= 3:
    +    unicode = str
    +    unichr = chr
    +
    +class IDNAError(UnicodeError):
    +    """ Base exception for all IDNA-encoding related problems """
    +    pass
    +
    +
    +class IDNABidiError(IDNAError):
    +    """ Exception when bidirectional requirements are not satisfied """
    +    pass
    +
    +
    +class InvalidCodepoint(IDNAError):
    +    """ Exception when a disallowed or unallocated codepoint is used """
    +    pass
    +
    +
    +class InvalidCodepointContext(IDNAError):
    +    """ Exception when the codepoint is not valid in the context it is used """
    +    pass
    +
    +
    +def _combining_class(cp):
    +    v = unicodedata.combining(unichr(cp))
    +    if v == 0:
    +        if not unicodedata.name(unichr(cp)):
    +            raise ValueError("Unknown character in unicodedata")
    +    return v
    +
    +def _is_script(cp, script):
    +    return intranges_contain(ord(cp), idnadata.scripts[script])
    +
    +def _punycode(s):
    +    return s.encode('punycode')
    +
    +def _unot(s):
    +    return 'U+{0:04X}'.format(s)
    +
    +
    +def valid_label_length(label):
    +
    +    if len(label) > 63:
    +        return False
    +    return True
    +
    +
    +def valid_string_length(label, trailing_dot):
    +
    +    if len(label) > (254 if trailing_dot else 253):
    +        return False
    +    return True
    +
    +
    +def check_bidi(label, check_ltr=False):
    +
    +    # Bidi rules should only be applied if string contains RTL characters
    +    bidi_label = False
    +    for (idx, cp) in enumerate(label, 1):
    +        direction = unicodedata.bidirectional(cp)
    +        if direction == '':
    +            # String likely comes from a newer version of Unicode
    +            raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx))
    +        if direction in ['R', 'AL', 'AN']:
    +            bidi_label = True
    +    if not bidi_label and not check_ltr:
    +        return True
    +
    +    # Bidi rule 1
    +    direction = unicodedata.bidirectional(label[0])
    +    if direction in ['R', 'AL']:
    +        rtl = True
    +    elif direction == 'L':
    +        rtl = False
    +    else:
    +        raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label)))
    +
    +    valid_ending = False
    +    number_type = False
    +    for (idx, cp) in enumerate(label, 1):
    +        direction = unicodedata.bidirectional(cp)
    +
    +        if rtl:
    +            # Bidi rule 2
    +            if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
    +                raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx))
    +            # Bidi rule 3
    +            if direction in ['R', 'AL', 'EN', 'AN']:
    +                valid_ending = True
    +            elif direction != 'NSM':
    +                valid_ending = False
    +            # Bidi rule 4
    +            if direction in ['AN', 'EN']:
    +                if not number_type:
    +                    number_type = direction
    +                else:
    +                    if number_type != direction:
    +                        raise IDNABidiError('Can not mix numeral types in a right-to-left label')
    +        else:
    +            # Bidi rule 5
    +            if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
    +                raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx))
    +            # Bidi rule 6
    +            if direction in ['L', 'EN']:
    +                valid_ending = True
    +            elif direction != 'NSM':
    +                valid_ending = False
    +
    +    if not valid_ending:
    +        raise IDNABidiError('Label ends with illegal codepoint directionality')
    +
    +    return True
    +
    +
    +def check_initial_combiner(label):
    +
    +    if unicodedata.category(label[0])[0] == 'M':
    +        raise IDNAError('Label begins with an illegal combining character')
    +    return True
    +
    +
    +def check_hyphen_ok(label):
    +
    +    if label[2:4] == '--':
    +        raise IDNAError('Label has disallowed hyphens in 3rd and 4th position')
    +    if label[0] == '-' or label[-1] == '-':
    +        raise IDNAError('Label must not start or end with a hyphen')
    +    return True
    +
    +
    +def check_nfc(label):
    +
    +    if unicodedata.normalize('NFC', label) != label:
    +        raise IDNAError('Label must be in Normalization Form C')
    +
    +
    +def valid_contextj(label, pos):
    +
    +    cp_value = ord(label[pos])
    +
    +    if cp_value == 0x200c:
    +
    +        if pos > 0:
    +            if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
    +                return True
    +
    +        ok = False
    +        for i in range(pos-1, -1, -1):
    +            joining_type = idnadata.joining_types.get(ord(label[i]))
    +            if joining_type == ord('T'):
    +                continue
    +            if joining_type in [ord('L'), ord('D')]:
    +                ok = True
    +                break
    +
    +        if not ok:
    +            return False
    +
    +        ok = False
    +        for i in range(pos+1, len(label)):
    +            joining_type = idnadata.joining_types.get(ord(label[i]))
    +            if joining_type == ord('T'):
    +                continue
    +            if joining_type in [ord('R'), ord('D')]:
    +                ok = True
    +                break
    +        return ok
    +
    +    if cp_value == 0x200d:
    +
    +        if pos > 0:
    +            if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
    +                return True
    +        return False
    +
    +    else:
    +
    +        return False
    +
    +
    +def valid_contexto(label, pos, exception=False):
    +
    +    cp_value = ord(label[pos])
    +
    +    if cp_value == 0x00b7:
    +        if 0 < pos < len(label)-1:
    +            if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c:
    +                return True
    +        return False
    +
    +    elif cp_value == 0x0375:
    +        if pos < len(label)-1 and len(label) > 1:
    +            return _is_script(label[pos + 1], 'Greek')
    +        return False
    +
    +    elif cp_value == 0x05f3 or cp_value == 0x05f4:
    +        if pos > 0:
    +            return _is_script(label[pos - 1], 'Hebrew')
    +        return False
    +
    +    elif cp_value == 0x30fb:
    +        for cp in label:
    +            if cp == u'\u30fb':
    +                continue
    +            if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'):
    +                return True
    +        return False
    +
    +    elif 0x660 <= cp_value <= 0x669:
    +        for cp in label:
    +            if 0x6f0 <= ord(cp) <= 0x06f9:
    +                return False
    +        return True
    +
    +    elif 0x6f0 <= cp_value <= 0x6f9:
    +        for cp in label:
    +            if 0x660 <= ord(cp) <= 0x0669:
    +                return False
    +        return True
    +
    +
    +def check_label(label):
    +
    +    if isinstance(label, (bytes, bytearray)):
    +        label = label.decode('utf-8')
    +    if len(label) == 0:
    +        raise IDNAError('Empty Label')
    +
    +    check_nfc(label)
    +    check_hyphen_ok(label)
    +    check_initial_combiner(label)
    +
    +    for (pos, cp) in enumerate(label):
    +        cp_value = ord(cp)
    +        if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']):
    +            continue
    +        elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']):
    +            try:
    +                if not valid_contextj(label, pos):
    +                    raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(
    +                        _unot(cp_value), pos+1, repr(label)))
    +            except ValueError:
    +                raise IDNAError('Unknown codepoint adjacent to joiner {0} at position {1} in {2}'.format(
    +                    _unot(cp_value), pos+1, repr(label)))
    +        elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']):
    +            if not valid_contexto(label, pos):
    +                raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label)))
    +        else:
    +            raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label)))
    +
    +    check_bidi(label)
    +
    +
    +def alabel(label):
    +
    +    try:
    +        label = label.encode('ascii')
    +        ulabel(label)
    +        if not valid_label_length(label):
    +            raise IDNAError('Label too long')
    +        return label
    +    except UnicodeEncodeError:
    +        pass
    +
    +    if not label:
    +        raise IDNAError('No Input')
    +
    +    label = unicode(label)
    +    check_label(label)
    +    label = _punycode(label)
    +    label = _alabel_prefix + label
    +
    +    if not valid_label_length(label):
    +        raise IDNAError('Label too long')
    +
    +    return label
    +
    +
    +def ulabel(label):
    +
    +    if not isinstance(label, (bytes, bytearray)):
    +        try:
    +            label = label.encode('ascii')
    +        except UnicodeEncodeError:
    +            check_label(label)
    +            return label
    +
    +    label = label.lower()
    +    if label.startswith(_alabel_prefix):
    +        label = label[len(_alabel_prefix):]
    +        if label.decode('ascii')[-1] == '-':
    +            raise IDNAError('A-label must not end with a hyphen')
    +    else:
    +        check_label(label)
    +        return label.decode('ascii')
    +
    +    label = label.decode('punycode')
    +    check_label(label)
    +    return label
    +
    +
    +def uts46_remap(domain, std3_rules=True, transitional=False):
    +    """Re-map the characters in the string according to UTS46 processing."""
    +    from .uts46data import uts46data
    +    output = u""
    +    try:
    +        for pos, char in enumerate(domain):
    +            code_point = ord(char)
    +            uts46row = uts46data[code_point if code_point < 256 else
    +                bisect.bisect_left(uts46data, (code_point, "Z")) - 1]
    +            status = uts46row[1]
    +            replacement = uts46row[2] if len(uts46row) == 3 else None
    +            if (status == "V" or
    +                    (status == "D" and not transitional) or
    +                    (status == "3" and not std3_rules and replacement is None)):
    +                output += char
    +            elif replacement is not None and (status == "M" or
    +                    (status == "3" and not std3_rules) or
    +                    (status == "D" and transitional)):
    +                output += replacement
    +            elif status != "I":
    +                raise IndexError()
    +        return unicodedata.normalize("NFC", output)
    +    except IndexError:
    +        raise InvalidCodepoint(
    +            "Codepoint {0} not allowed at position {1} in {2}".format(
    +            _unot(code_point), pos + 1, repr(domain)))
    +
    +
    +def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False):
    +
    +    if isinstance(s, (bytes, bytearray)):
    +        s = s.decode("ascii")
    +    if uts46:
    +        s = uts46_remap(s, std3_rules, transitional)
    +    trailing_dot = False
    +    result = []
    +    if strict:
    +        labels = s.split('.')
    +    else:
    +        labels = _unicode_dots_re.split(s)
    +    if not labels or labels == ['']:
    +        raise IDNAError('Empty domain')
    +    if labels[-1] == '':
    +        del labels[-1]
    +        trailing_dot = True
    +    for label in labels:
    +        s = alabel(label)
    +        if s:
    +            result.append(s)
    +        else:
    +            raise IDNAError('Empty label')
    +    if trailing_dot:
    +        result.append(b'')
    +    s = b'.'.join(result)
    +    if not valid_string_length(s, trailing_dot):
    +        raise IDNAError('Domain too long')
    +    return s
    +
    +
    +def decode(s, strict=False, uts46=False, std3_rules=False):
    +
    +    if isinstance(s, (bytes, bytearray)):
    +        s = s.decode("ascii")
    +    if uts46:
    +        s = uts46_remap(s, std3_rules, False)
    +    trailing_dot = False
    +    result = []
    +    if not strict:
    +        labels = _unicode_dots_re.split(s)
    +    else:
    +        labels = s.split(u'.')
    +    if not labels or labels == ['']:
    +        raise IDNAError('Empty domain')
    +    if not labels[-1]:
    +        del labels[-1]
    +        trailing_dot = True
    +    for label in labels:
    +        s = ulabel(label)
    +        if s:
    +            result.append(s)
    +        else:
    +            raise IDNAError('Empty label')
    +    if trailing_dot:
    +        result.append(u'')
    +    return u'.'.join(result)
    diff --git a/server/www/packages/packages-windows/x86/idna/idnadata.py b/server/www/packages/packages-windows/x86/idna/idnadata.py
    new file mode 100644
    index 0000000..2b81c52
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/idna/idnadata.py
    @@ -0,0 +1,1991 @@
    +# This file is automatically generated by tools/idna-data
    +
    +__version__ = "12.1.0"
    +scripts = {
    +    'Greek': (
    +        0x37000000374,
    +        0x37500000378,
    +        0x37a0000037e,
    +        0x37f00000380,
    +        0x38400000385,
    +        0x38600000387,
    +        0x3880000038b,
    +        0x38c0000038d,
    +        0x38e000003a2,
    +        0x3a3000003e2,
    +        0x3f000000400,
    +        0x1d2600001d2b,
    +        0x1d5d00001d62,
    +        0x1d6600001d6b,
    +        0x1dbf00001dc0,
    +        0x1f0000001f16,
    +        0x1f1800001f1e,
    +        0x1f2000001f46,
    +        0x1f4800001f4e,
    +        0x1f5000001f58,
    +        0x1f5900001f5a,
    +        0x1f5b00001f5c,
    +        0x1f5d00001f5e,
    +        0x1f5f00001f7e,
    +        0x1f8000001fb5,
    +        0x1fb600001fc5,
    +        0x1fc600001fd4,
    +        0x1fd600001fdc,
    +        0x1fdd00001ff0,
    +        0x1ff200001ff5,
    +        0x1ff600001fff,
    +        0x212600002127,
    +        0xab650000ab66,
    +        0x101400001018f,
    +        0x101a0000101a1,
    +        0x1d2000001d246,
    +    ),
    +    'Han': (
    +        0x2e8000002e9a,
    +        0x2e9b00002ef4,
    +        0x2f0000002fd6,
    +        0x300500003006,
    +        0x300700003008,
    +        0x30210000302a,
    +        0x30380000303c,
    +        0x340000004db6,
    +        0x4e0000009ff0,
    +        0xf9000000fa6e,
    +        0xfa700000fada,
    +        0x200000002a6d7,
    +        0x2a7000002b735,
    +        0x2b7400002b81e,
    +        0x2b8200002cea2,
    +        0x2ceb00002ebe1,
    +        0x2f8000002fa1e,
    +    ),
    +    'Hebrew': (
    +        0x591000005c8,
    +        0x5d0000005eb,
    +        0x5ef000005f5,
    +        0xfb1d0000fb37,
    +        0xfb380000fb3d,
    +        0xfb3e0000fb3f,
    +        0xfb400000fb42,
    +        0xfb430000fb45,
    +        0xfb460000fb50,
    +    ),
    +    'Hiragana': (
    +        0x304100003097,
    +        0x309d000030a0,
    +        0x1b0010001b11f,
    +        0x1b1500001b153,
    +        0x1f2000001f201,
    +    ),
    +    'Katakana': (
    +        0x30a1000030fb,
    +        0x30fd00003100,
    +        0x31f000003200,
    +        0x32d0000032ff,
    +        0x330000003358,
    +        0xff660000ff70,
    +        0xff710000ff9e,
    +        0x1b0000001b001,
    +        0x1b1640001b168,
    +    ),
    +}
    +joining_types = {
    +    0x600: 85,
    +    0x601: 85,
    +    0x602: 85,
    +    0x603: 85,
    +    0x604: 85,
    +    0x605: 85,
    +    0x608: 85,
    +    0x60b: 85,
    +    0x620: 68,
    +    0x621: 85,
    +    0x622: 82,
    +    0x623: 82,
    +    0x624: 82,
    +    0x625: 82,
    +    0x626: 68,
    +    0x627: 82,
    +    0x628: 68,
    +    0x629: 82,
    +    0x62a: 68,
    +    0x62b: 68,
    +    0x62c: 68,
    +    0x62d: 68,
    +    0x62e: 68,
    +    0x62f: 82,
    +    0x630: 82,
    +    0x631: 82,
    +    0x632: 82,
    +    0x633: 68,
    +    0x634: 68,
    +    0x635: 68,
    +    0x636: 68,
    +    0x637: 68,
    +    0x638: 68,
    +    0x639: 68,
    +    0x63a: 68,
    +    0x63b: 68,
    +    0x63c: 68,
    +    0x63d: 68,
    +    0x63e: 68,
    +    0x63f: 68,
    +    0x640: 67,
    +    0x641: 68,
    +    0x642: 68,
    +    0x643: 68,
    +    0x644: 68,
    +    0x645: 68,
    +    0x646: 68,
    +    0x647: 68,
    +    0x648: 82,
    +    0x649: 68,
    +    0x64a: 68,
    +    0x66e: 68,
    +    0x66f: 68,
    +    0x671: 82,
    +    0x672: 82,
    +    0x673: 82,
    +    0x674: 85,
    +    0x675: 82,
    +    0x676: 82,
    +    0x677: 82,
    +    0x678: 68,
    +    0x679: 68,
    +    0x67a: 68,
    +    0x67b: 68,
    +    0x67c: 68,
    +    0x67d: 68,
    +    0x67e: 68,
    +    0x67f: 68,
    +    0x680: 68,
    +    0x681: 68,
    +    0x682: 68,
    +    0x683: 68,
    +    0x684: 68,
    +    0x685: 68,
    +    0x686: 68,
    +    0x687: 68,
    +    0x688: 82,
    +    0x689: 82,
    +    0x68a: 82,
    +    0x68b: 82,
    +    0x68c: 82,
    +    0x68d: 82,
    +    0x68e: 82,
    +    0x68f: 82,
    +    0x690: 82,
    +    0x691: 82,
    +    0x692: 82,
    +    0x693: 82,
    +    0x694: 82,
    +    0x695: 82,
    +    0x696: 82,
    +    0x697: 82,
    +    0x698: 82,
    +    0x699: 82,
    +    0x69a: 68,
    +    0x69b: 68,
    +    0x69c: 68,
    +    0x69d: 68,
    +    0x69e: 68,
    +    0x69f: 68,
    +    0x6a0: 68,
    +    0x6a1: 68,
    +    0x6a2: 68,
    +    0x6a3: 68,
    +    0x6a4: 68,
    +    0x6a5: 68,
    +    0x6a6: 68,
    +    0x6a7: 68,
    +    0x6a8: 68,
    +    0x6a9: 68,
    +    0x6aa: 68,
    +    0x6ab: 68,
    +    0x6ac: 68,
    +    0x6ad: 68,
    +    0x6ae: 68,
    +    0x6af: 68,
    +    0x6b0: 68,
    +    0x6b1: 68,
    +    0x6b2: 68,
    +    0x6b3: 68,
    +    0x6b4: 68,
    +    0x6b5: 68,
    +    0x6b6: 68,
    +    0x6b7: 68,
    +    0x6b8: 68,
    +    0x6b9: 68,
    +    0x6ba: 68,
    +    0x6bb: 68,
    +    0x6bc: 68,
    +    0x6bd: 68,
    +    0x6be: 68,
    +    0x6bf: 68,
    +    0x6c0: 82,
    +    0x6c1: 68,
    +    0x6c2: 68,
    +    0x6c3: 82,
    +    0x6c4: 82,
    +    0x6c5: 82,
    +    0x6c6: 82,
    +    0x6c7: 82,
    +    0x6c8: 82,
    +    0x6c9: 82,
    +    0x6ca: 82,
    +    0x6cb: 82,
    +    0x6cc: 68,
    +    0x6cd: 82,
    +    0x6ce: 68,
    +    0x6cf: 82,
    +    0x6d0: 68,
    +    0x6d1: 68,
    +    0x6d2: 82,
    +    0x6d3: 82,
    +    0x6d5: 82,
    +    0x6dd: 85,
    +    0x6ee: 82,
    +    0x6ef: 82,
    +    0x6fa: 68,
    +    0x6fb: 68,
    +    0x6fc: 68,
    +    0x6ff: 68,
    +    0x70f: 84,
    +    0x710: 82,
    +    0x712: 68,
    +    0x713: 68,
    +    0x714: 68,
    +    0x715: 82,
    +    0x716: 82,
    +    0x717: 82,
    +    0x718: 82,
    +    0x719: 82,
    +    0x71a: 68,
    +    0x71b: 68,
    +    0x71c: 68,
    +    0x71d: 68,
    +    0x71e: 82,
    +    0x71f: 68,
    +    0x720: 68,
    +    0x721: 68,
    +    0x722: 68,
    +    0x723: 68,
    +    0x724: 68,
    +    0x725: 68,
    +    0x726: 68,
    +    0x727: 68,
    +    0x728: 82,
    +    0x729: 68,
    +    0x72a: 82,
    +    0x72b: 68,
    +    0x72c: 82,
    +    0x72d: 68,
    +    0x72e: 68,
    +    0x72f: 82,
    +    0x74d: 82,
    +    0x74e: 68,
    +    0x74f: 68,
    +    0x750: 68,
    +    0x751: 68,
    +    0x752: 68,
    +    0x753: 68,
    +    0x754: 68,
    +    0x755: 68,
    +    0x756: 68,
    +    0x757: 68,
    +    0x758: 68,
    +    0x759: 82,
    +    0x75a: 82,
    +    0x75b: 82,
    +    0x75c: 68,
    +    0x75d: 68,
    +    0x75e: 68,
    +    0x75f: 68,
    +    0x760: 68,
    +    0x761: 68,
    +    0x762: 68,
    +    0x763: 68,
    +    0x764: 68,
    +    0x765: 68,
    +    0x766: 68,
    +    0x767: 68,
    +    0x768: 68,
    +    0x769: 68,
    +    0x76a: 68,
    +    0x76b: 82,
    +    0x76c: 82,
    +    0x76d: 68,
    +    0x76e: 68,
    +    0x76f: 68,
    +    0x770: 68,
    +    0x771: 82,
    +    0x772: 68,
    +    0x773: 82,
    +    0x774: 82,
    +    0x775: 68,
    +    0x776: 68,
    +    0x777: 68,
    +    0x778: 82,
    +    0x779: 82,
    +    0x77a: 68,
    +    0x77b: 68,
    +    0x77c: 68,
    +    0x77d: 68,
    +    0x77e: 68,
    +    0x77f: 68,
    +    0x7ca: 68,
    +    0x7cb: 68,
    +    0x7cc: 68,
    +    0x7cd: 68,
    +    0x7ce: 68,
    +    0x7cf: 68,
    +    0x7d0: 68,
    +    0x7d1: 68,
    +    0x7d2: 68,
    +    0x7d3: 68,
    +    0x7d4: 68,
    +    0x7d5: 68,
    +    0x7d6: 68,
    +    0x7d7: 68,
    +    0x7d8: 68,
    +    0x7d9: 68,
    +    0x7da: 68,
    +    0x7db: 68,
    +    0x7dc: 68,
    +    0x7dd: 68,
    +    0x7de: 68,
    +    0x7df: 68,
    +    0x7e0: 68,
    +    0x7e1: 68,
    +    0x7e2: 68,
    +    0x7e3: 68,
    +    0x7e4: 68,
    +    0x7e5: 68,
    +    0x7e6: 68,
    +    0x7e7: 68,
    +    0x7e8: 68,
    +    0x7e9: 68,
    +    0x7ea: 68,
    +    0x7fa: 67,
    +    0x840: 82,
    +    0x841: 68,
    +    0x842: 68,
    +    0x843: 68,
    +    0x844: 68,
    +    0x845: 68,
    +    0x846: 82,
    +    0x847: 82,
    +    0x848: 68,
    +    0x849: 82,
    +    0x84a: 68,
    +    0x84b: 68,
    +    0x84c: 68,
    +    0x84d: 68,
    +    0x84e: 68,
    +    0x84f: 68,
    +    0x850: 68,
    +    0x851: 68,
    +    0x852: 68,
    +    0x853: 68,
    +    0x854: 82,
    +    0x855: 68,
    +    0x856: 85,
    +    0x857: 85,
    +    0x858: 85,
    +    0x860: 68,
    +    0x861: 85,
    +    0x862: 68,
    +    0x863: 68,
    +    0x864: 68,
    +    0x865: 68,
    +    0x866: 85,
    +    0x867: 82,
    +    0x868: 68,
    +    0x869: 82,
    +    0x86a: 82,
    +    0x8a0: 68,
    +    0x8a1: 68,
    +    0x8a2: 68,
    +    0x8a3: 68,
    +    0x8a4: 68,
    +    0x8a5: 68,
    +    0x8a6: 68,
    +    0x8a7: 68,
    +    0x8a8: 68,
    +    0x8a9: 68,
    +    0x8aa: 82,
    +    0x8ab: 82,
    +    0x8ac: 82,
    +    0x8ad: 85,
    +    0x8ae: 82,
    +    0x8af: 68,
    +    0x8b0: 68,
    +    0x8b1: 82,
    +    0x8b2: 82,
    +    0x8b3: 68,
    +    0x8b4: 68,
    +    0x8b6: 68,
    +    0x8b7: 68,
    +    0x8b8: 68,
    +    0x8b9: 82,
    +    0x8ba: 68,
    +    0x8bb: 68,
    +    0x8bc: 68,
    +    0x8bd: 68,
    +    0x8e2: 85,
    +    0x1806: 85,
    +    0x1807: 68,
    +    0x180a: 67,
    +    0x180e: 85,
    +    0x1820: 68,
    +    0x1821: 68,
    +    0x1822: 68,
    +    0x1823: 68,
    +    0x1824: 68,
    +    0x1825: 68,
    +    0x1826: 68,
    +    0x1827: 68,
    +    0x1828: 68,
    +    0x1829: 68,
    +    0x182a: 68,
    +    0x182b: 68,
    +    0x182c: 68,
    +    0x182d: 68,
    +    0x182e: 68,
    +    0x182f: 68,
    +    0x1830: 68,
    +    0x1831: 68,
    +    0x1832: 68,
    +    0x1833: 68,
    +    0x1834: 68,
    +    0x1835: 68,
    +    0x1836: 68,
    +    0x1837: 68,
    +    0x1838: 68,
    +    0x1839: 68,
    +    0x183a: 68,
    +    0x183b: 68,
    +    0x183c: 68,
    +    0x183d: 68,
    +    0x183e: 68,
    +    0x183f: 68,
    +    0x1840: 68,
    +    0x1841: 68,
    +    0x1842: 68,
    +    0x1843: 68,
    +    0x1844: 68,
    +    0x1845: 68,
    +    0x1846: 68,
    +    0x1847: 68,
    +    0x1848: 68,
    +    0x1849: 68,
    +    0x184a: 68,
    +    0x184b: 68,
    +    0x184c: 68,
    +    0x184d: 68,
    +    0x184e: 68,
    +    0x184f: 68,
    +    0x1850: 68,
    +    0x1851: 68,
    +    0x1852: 68,
    +    0x1853: 68,
    +    0x1854: 68,
    +    0x1855: 68,
    +    0x1856: 68,
    +    0x1857: 68,
    +    0x1858: 68,
    +    0x1859: 68,
    +    0x185a: 68,
    +    0x185b: 68,
    +    0x185c: 68,
    +    0x185d: 68,
    +    0x185e: 68,
    +    0x185f: 68,
    +    0x1860: 68,
    +    0x1861: 68,
    +    0x1862: 68,
    +    0x1863: 68,
    +    0x1864: 68,
    +    0x1865: 68,
    +    0x1866: 68,
    +    0x1867: 68,
    +    0x1868: 68,
    +    0x1869: 68,
    +    0x186a: 68,
    +    0x186b: 68,
    +    0x186c: 68,
    +    0x186d: 68,
    +    0x186e: 68,
    +    0x186f: 68,
    +    0x1870: 68,
    +    0x1871: 68,
    +    0x1872: 68,
    +    0x1873: 68,
    +    0x1874: 68,
    +    0x1875: 68,
    +    0x1876: 68,
    +    0x1877: 68,
    +    0x1878: 68,
    +    0x1880: 85,
    +    0x1881: 85,
    +    0x1882: 85,
    +    0x1883: 85,
    +    0x1884: 85,
    +    0x1885: 84,
    +    0x1886: 84,
    +    0x1887: 68,
    +    0x1888: 68,
    +    0x1889: 68,
    +    0x188a: 68,
    +    0x188b: 68,
    +    0x188c: 68,
    +    0x188d: 68,
    +    0x188e: 68,
    +    0x188f: 68,
    +    0x1890: 68,
    +    0x1891: 68,
    +    0x1892: 68,
    +    0x1893: 68,
    +    0x1894: 68,
    +    0x1895: 68,
    +    0x1896: 68,
    +    0x1897: 68,
    +    0x1898: 68,
    +    0x1899: 68,
    +    0x189a: 68,
    +    0x189b: 68,
    +    0x189c: 68,
    +    0x189d: 68,
    +    0x189e: 68,
    +    0x189f: 68,
    +    0x18a0: 68,
    +    0x18a1: 68,
    +    0x18a2: 68,
    +    0x18a3: 68,
    +    0x18a4: 68,
    +    0x18a5: 68,
    +    0x18a6: 68,
    +    0x18a7: 68,
    +    0x18a8: 68,
    +    0x18aa: 68,
    +    0x200c: 85,
    +    0x200d: 67,
    +    0x202f: 85,
    +    0x2066: 85,
    +    0x2067: 85,
    +    0x2068: 85,
    +    0x2069: 85,
    +    0xa840: 68,
    +    0xa841: 68,
    +    0xa842: 68,
    +    0xa843: 68,
    +    0xa844: 68,
    +    0xa845: 68,
    +    0xa846: 68,
    +    0xa847: 68,
    +    0xa848: 68,
    +    0xa849: 68,
    +    0xa84a: 68,
    +    0xa84b: 68,
    +    0xa84c: 68,
    +    0xa84d: 68,
    +    0xa84e: 68,
    +    0xa84f: 68,
    +    0xa850: 68,
    +    0xa851: 68,
    +    0xa852: 68,
    +    0xa853: 68,
    +    0xa854: 68,
    +    0xa855: 68,
    +    0xa856: 68,
    +    0xa857: 68,
    +    0xa858: 68,
    +    0xa859: 68,
    +    0xa85a: 68,
    +    0xa85b: 68,
    +    0xa85c: 68,
    +    0xa85d: 68,
    +    0xa85e: 68,
    +    0xa85f: 68,
    +    0xa860: 68,
    +    0xa861: 68,
    +    0xa862: 68,
    +    0xa863: 68,
    +    0xa864: 68,
    +    0xa865: 68,
    +    0xa866: 68,
    +    0xa867: 68,
    +    0xa868: 68,
    +    0xa869: 68,
    +    0xa86a: 68,
    +    0xa86b: 68,
    +    0xa86c: 68,
    +    0xa86d: 68,
    +    0xa86e: 68,
    +    0xa86f: 68,
    +    0xa870: 68,
    +    0xa871: 68,
    +    0xa872: 76,
    +    0xa873: 85,
    +    0x10ac0: 68,
    +    0x10ac1: 68,
    +    0x10ac2: 68,
    +    0x10ac3: 68,
    +    0x10ac4: 68,
    +    0x10ac5: 82,
    +    0x10ac6: 85,
    +    0x10ac7: 82,
    +    0x10ac8: 85,
    +    0x10ac9: 82,
    +    0x10aca: 82,
    +    0x10acb: 85,
    +    0x10acc: 85,
    +    0x10acd: 76,
    +    0x10ace: 82,
    +    0x10acf: 82,
    +    0x10ad0: 82,
    +    0x10ad1: 82,
    +    0x10ad2: 82,
    +    0x10ad3: 68,
    +    0x10ad4: 68,
    +    0x10ad5: 68,
    +    0x10ad6: 68,
    +    0x10ad7: 76,
    +    0x10ad8: 68,
    +    0x10ad9: 68,
    +    0x10ada: 68,
    +    0x10adb: 68,
    +    0x10adc: 68,
    +    0x10add: 82,
    +    0x10ade: 68,
    +    0x10adf: 68,
    +    0x10ae0: 68,
    +    0x10ae1: 82,
    +    0x10ae2: 85,
    +    0x10ae3: 85,
    +    0x10ae4: 82,
    +    0x10aeb: 68,
    +    0x10aec: 68,
    +    0x10aed: 68,
    +    0x10aee: 68,
    +    0x10aef: 82,
    +    0x10b80: 68,
    +    0x10b81: 82,
    +    0x10b82: 68,
    +    0x10b83: 82,
    +    0x10b84: 82,
    +    0x10b85: 82,
    +    0x10b86: 68,
    +    0x10b87: 68,
    +    0x10b88: 68,
    +    0x10b89: 82,
    +    0x10b8a: 68,
    +    0x10b8b: 68,
    +    0x10b8c: 82,
    +    0x10b8d: 68,
    +    0x10b8e: 82,
    +    0x10b8f: 82,
    +    0x10b90: 68,
    +    0x10b91: 82,
    +    0x10ba9: 82,
    +    0x10baa: 82,
    +    0x10bab: 82,
    +    0x10bac: 82,
    +    0x10bad: 68,
    +    0x10bae: 68,
    +    0x10baf: 85,
    +    0x10d00: 76,
    +    0x10d01: 68,
    +    0x10d02: 68,
    +    0x10d03: 68,
    +    0x10d04: 68,
    +    0x10d05: 68,
    +    0x10d06: 68,
    +    0x10d07: 68,
    +    0x10d08: 68,
    +    0x10d09: 68,
    +    0x10d0a: 68,
    +    0x10d0b: 68,
    +    0x10d0c: 68,
    +    0x10d0d: 68,
    +    0x10d0e: 68,
    +    0x10d0f: 68,
    +    0x10d10: 68,
    +    0x10d11: 68,
    +    0x10d12: 68,
    +    0x10d13: 68,
    +    0x10d14: 68,
    +    0x10d15: 68,
    +    0x10d16: 68,
    +    0x10d17: 68,
    +    0x10d18: 68,
    +    0x10d19: 68,
    +    0x10d1a: 68,
    +    0x10d1b: 68,
    +    0x10d1c: 68,
    +    0x10d1d: 68,
    +    0x10d1e: 68,
    +    0x10d1f: 68,
    +    0x10d20: 68,
    +    0x10d21: 68,
    +    0x10d22: 82,
    +    0x10d23: 68,
    +    0x10f30: 68,
    +    0x10f31: 68,
    +    0x10f32: 68,
    +    0x10f33: 82,
    +    0x10f34: 68,
    +    0x10f35: 68,
    +    0x10f36: 68,
    +    0x10f37: 68,
    +    0x10f38: 68,
    +    0x10f39: 68,
    +    0x10f3a: 68,
    +    0x10f3b: 68,
    +    0x10f3c: 68,
    +    0x10f3d: 68,
    +    0x10f3e: 68,
    +    0x10f3f: 68,
    +    0x10f40: 68,
    +    0x10f41: 68,
    +    0x10f42: 68,
    +    0x10f43: 68,
    +    0x10f44: 68,
    +    0x10f45: 85,
    +    0x10f51: 68,
    +    0x10f52: 68,
    +    0x10f53: 68,
    +    0x10f54: 82,
    +    0x110bd: 85,
    +    0x110cd: 85,
    +    0x1e900: 68,
    +    0x1e901: 68,
    +    0x1e902: 68,
    +    0x1e903: 68,
    +    0x1e904: 68,
    +    0x1e905: 68,
    +    0x1e906: 68,
    +    0x1e907: 68,
    +    0x1e908: 68,
    +    0x1e909: 68,
    +    0x1e90a: 68,
    +    0x1e90b: 68,
    +    0x1e90c: 68,
    +    0x1e90d: 68,
    +    0x1e90e: 68,
    +    0x1e90f: 68,
    +    0x1e910: 68,
    +    0x1e911: 68,
    +    0x1e912: 68,
    +    0x1e913: 68,
    +    0x1e914: 68,
    +    0x1e915: 68,
    +    0x1e916: 68,
    +    0x1e917: 68,
    +    0x1e918: 68,
    +    0x1e919: 68,
    +    0x1e91a: 68,
    +    0x1e91b: 68,
    +    0x1e91c: 68,
    +    0x1e91d: 68,
    +    0x1e91e: 68,
    +    0x1e91f: 68,
    +    0x1e920: 68,
    +    0x1e921: 68,
    +    0x1e922: 68,
    +    0x1e923: 68,
    +    0x1e924: 68,
    +    0x1e925: 68,
    +    0x1e926: 68,
    +    0x1e927: 68,
    +    0x1e928: 68,
    +    0x1e929: 68,
    +    0x1e92a: 68,
    +    0x1e92b: 68,
    +    0x1e92c: 68,
    +    0x1e92d: 68,
    +    0x1e92e: 68,
    +    0x1e92f: 68,
    +    0x1e930: 68,
    +    0x1e931: 68,
    +    0x1e932: 68,
    +    0x1e933: 68,
    +    0x1e934: 68,
    +    0x1e935: 68,
    +    0x1e936: 68,
    +    0x1e937: 68,
    +    0x1e938: 68,
    +    0x1e939: 68,
    +    0x1e93a: 68,
    +    0x1e93b: 68,
    +    0x1e93c: 68,
    +    0x1e93d: 68,
    +    0x1e93e: 68,
    +    0x1e93f: 68,
    +    0x1e940: 68,
    +    0x1e941: 68,
    +    0x1e942: 68,
    +    0x1e943: 68,
    +    0x1e94b: 84,
    +}
    +codepoint_classes = {
    +    'PVALID': (
    +        0x2d0000002e,
    +        0x300000003a,
    +        0x610000007b,
    +        0xdf000000f7,
    +        0xf800000100,
    +        0x10100000102,
    +        0x10300000104,
    +        0x10500000106,
    +        0x10700000108,
    +        0x1090000010a,
    +        0x10b0000010c,
    +        0x10d0000010e,
    +        0x10f00000110,
    +        0x11100000112,
    +        0x11300000114,
    +        0x11500000116,
    +        0x11700000118,
    +        0x1190000011a,
    +        0x11b0000011c,
    +        0x11d0000011e,
    +        0x11f00000120,
    +        0x12100000122,
    +        0x12300000124,
    +        0x12500000126,
    +        0x12700000128,
    +        0x1290000012a,
    +        0x12b0000012c,
    +        0x12d0000012e,
    +        0x12f00000130,
    +        0x13100000132,
    +        0x13500000136,
    +        0x13700000139,
    +        0x13a0000013b,
    +        0x13c0000013d,
    +        0x13e0000013f,
    +        0x14200000143,
    +        0x14400000145,
    +        0x14600000147,
    +        0x14800000149,
    +        0x14b0000014c,
    +        0x14d0000014e,
    +        0x14f00000150,
    +        0x15100000152,
    +        0x15300000154,
    +        0x15500000156,
    +        0x15700000158,
    +        0x1590000015a,
    +        0x15b0000015c,
    +        0x15d0000015e,
    +        0x15f00000160,
    +        0x16100000162,
    +        0x16300000164,
    +        0x16500000166,
    +        0x16700000168,
    +        0x1690000016a,
    +        0x16b0000016c,
    +        0x16d0000016e,
    +        0x16f00000170,
    +        0x17100000172,
    +        0x17300000174,
    +        0x17500000176,
    +        0x17700000178,
    +        0x17a0000017b,
    +        0x17c0000017d,
    +        0x17e0000017f,
    +        0x18000000181,
    +        0x18300000184,
    +        0x18500000186,
    +        0x18800000189,
    +        0x18c0000018e,
    +        0x19200000193,
    +        0x19500000196,
    +        0x1990000019c,
    +        0x19e0000019f,
    +        0x1a1000001a2,
    +        0x1a3000001a4,
    +        0x1a5000001a6,
    +        0x1a8000001a9,
    +        0x1aa000001ac,
    +        0x1ad000001ae,
    +        0x1b0000001b1,
    +        0x1b4000001b5,
    +        0x1b6000001b7,
    +        0x1b9000001bc,
    +        0x1bd000001c4,
    +        0x1ce000001cf,
    +        0x1d0000001d1,
    +        0x1d2000001d3,
    +        0x1d4000001d5,
    +        0x1d6000001d7,
    +        0x1d8000001d9,
    +        0x1da000001db,
    +        0x1dc000001de,
    +        0x1df000001e0,
    +        0x1e1000001e2,
    +        0x1e3000001e4,
    +        0x1e5000001e6,
    +        0x1e7000001e8,
    +        0x1e9000001ea,
    +        0x1eb000001ec,
    +        0x1ed000001ee,
    +        0x1ef000001f1,
    +        0x1f5000001f6,
    +        0x1f9000001fa,
    +        0x1fb000001fc,
    +        0x1fd000001fe,
    +        0x1ff00000200,
    +        0x20100000202,
    +        0x20300000204,
    +        0x20500000206,
    +        0x20700000208,
    +        0x2090000020a,
    +        0x20b0000020c,
    +        0x20d0000020e,
    +        0x20f00000210,
    +        0x21100000212,
    +        0x21300000214,
    +        0x21500000216,
    +        0x21700000218,
    +        0x2190000021a,
    +        0x21b0000021c,
    +        0x21d0000021e,
    +        0x21f00000220,
    +        0x22100000222,
    +        0x22300000224,
    +        0x22500000226,
    +        0x22700000228,
    +        0x2290000022a,
    +        0x22b0000022c,
    +        0x22d0000022e,
    +        0x22f00000230,
    +        0x23100000232,
    +        0x2330000023a,
    +        0x23c0000023d,
    +        0x23f00000241,
    +        0x24200000243,
    +        0x24700000248,
    +        0x2490000024a,
    +        0x24b0000024c,
    +        0x24d0000024e,
    +        0x24f000002b0,
    +        0x2b9000002c2,
    +        0x2c6000002d2,
    +        0x2ec000002ed,
    +        0x2ee000002ef,
    +        0x30000000340,
    +        0x34200000343,
    +        0x3460000034f,
    +        0x35000000370,
    +        0x37100000372,
    +        0x37300000374,
    +        0x37700000378,
    +        0x37b0000037e,
    +        0x39000000391,
    +        0x3ac000003cf,
    +        0x3d7000003d8,
    +        0x3d9000003da,
    +        0x3db000003dc,
    +        0x3dd000003de,
    +        0x3df000003e0,
    +        0x3e1000003e2,
    +        0x3e3000003e4,
    +        0x3e5000003e6,
    +        0x3e7000003e8,
    +        0x3e9000003ea,
    +        0x3eb000003ec,
    +        0x3ed000003ee,
    +        0x3ef000003f0,
    +        0x3f3000003f4,
    +        0x3f8000003f9,
    +        0x3fb000003fd,
    +        0x43000000460,
    +        0x46100000462,
    +        0x46300000464,
    +        0x46500000466,
    +        0x46700000468,
    +        0x4690000046a,
    +        0x46b0000046c,
    +        0x46d0000046e,
    +        0x46f00000470,
    +        0x47100000472,
    +        0x47300000474,
    +        0x47500000476,
    +        0x47700000478,
    +        0x4790000047a,
    +        0x47b0000047c,
    +        0x47d0000047e,
    +        0x47f00000480,
    +        0x48100000482,
    +        0x48300000488,
    +        0x48b0000048c,
    +        0x48d0000048e,
    +        0x48f00000490,
    +        0x49100000492,
    +        0x49300000494,
    +        0x49500000496,
    +        0x49700000498,
    +        0x4990000049a,
    +        0x49b0000049c,
    +        0x49d0000049e,
    +        0x49f000004a0,
    +        0x4a1000004a2,
    +        0x4a3000004a4,
    +        0x4a5000004a6,
    +        0x4a7000004a8,
    +        0x4a9000004aa,
    +        0x4ab000004ac,
    +        0x4ad000004ae,
    +        0x4af000004b0,
    +        0x4b1000004b2,
    +        0x4b3000004b4,
    +        0x4b5000004b6,
    +        0x4b7000004b8,
    +        0x4b9000004ba,
    +        0x4bb000004bc,
    +        0x4bd000004be,
    +        0x4bf000004c0,
    +        0x4c2000004c3,
    +        0x4c4000004c5,
    +        0x4c6000004c7,
    +        0x4c8000004c9,
    +        0x4ca000004cb,
    +        0x4cc000004cd,
    +        0x4ce000004d0,
    +        0x4d1000004d2,
    +        0x4d3000004d4,
    +        0x4d5000004d6,
    +        0x4d7000004d8,
    +        0x4d9000004da,
    +        0x4db000004dc,
    +        0x4dd000004de,
    +        0x4df000004e0,
    +        0x4e1000004e2,
    +        0x4e3000004e4,
    +        0x4e5000004e6,
    +        0x4e7000004e8,
    +        0x4e9000004ea,
    +        0x4eb000004ec,
    +        0x4ed000004ee,
    +        0x4ef000004f0,
    +        0x4f1000004f2,
    +        0x4f3000004f4,
    +        0x4f5000004f6,
    +        0x4f7000004f8,
    +        0x4f9000004fa,
    +        0x4fb000004fc,
    +        0x4fd000004fe,
    +        0x4ff00000500,
    +        0x50100000502,
    +        0x50300000504,
    +        0x50500000506,
    +        0x50700000508,
    +        0x5090000050a,
    +        0x50b0000050c,
    +        0x50d0000050e,
    +        0x50f00000510,
    +        0x51100000512,
    +        0x51300000514,
    +        0x51500000516,
    +        0x51700000518,
    +        0x5190000051a,
    +        0x51b0000051c,
    +        0x51d0000051e,
    +        0x51f00000520,
    +        0x52100000522,
    +        0x52300000524,
    +        0x52500000526,
    +        0x52700000528,
    +        0x5290000052a,
    +        0x52b0000052c,
    +        0x52d0000052e,
    +        0x52f00000530,
    +        0x5590000055a,
    +        0x56000000587,
    +        0x58800000589,
    +        0x591000005be,
    +        0x5bf000005c0,
    +        0x5c1000005c3,
    +        0x5c4000005c6,
    +        0x5c7000005c8,
    +        0x5d0000005eb,
    +        0x5ef000005f3,
    +        0x6100000061b,
    +        0x62000000640,
    +        0x64100000660,
    +        0x66e00000675,
    +        0x679000006d4,
    +        0x6d5000006dd,
    +        0x6df000006e9,
    +        0x6ea000006f0,
    +        0x6fa00000700,
    +        0x7100000074b,
    +        0x74d000007b2,
    +        0x7c0000007f6,
    +        0x7fd000007fe,
    +        0x8000000082e,
    +        0x8400000085c,
    +        0x8600000086b,
    +        0x8a0000008b5,
    +        0x8b6000008be,
    +        0x8d3000008e2,
    +        0x8e300000958,
    +        0x96000000964,
    +        0x96600000970,
    +        0x97100000984,
    +        0x9850000098d,
    +        0x98f00000991,
    +        0x993000009a9,
    +        0x9aa000009b1,
    +        0x9b2000009b3,
    +        0x9b6000009ba,
    +        0x9bc000009c5,
    +        0x9c7000009c9,
    +        0x9cb000009cf,
    +        0x9d7000009d8,
    +        0x9e0000009e4,
    +        0x9e6000009f2,
    +        0x9fc000009fd,
    +        0x9fe000009ff,
    +        0xa0100000a04,
    +        0xa0500000a0b,
    +        0xa0f00000a11,
    +        0xa1300000a29,
    +        0xa2a00000a31,
    +        0xa3200000a33,
    +        0xa3500000a36,
    +        0xa3800000a3a,
    +        0xa3c00000a3d,
    +        0xa3e00000a43,
    +        0xa4700000a49,
    +        0xa4b00000a4e,
    +        0xa5100000a52,
    +        0xa5c00000a5d,
    +        0xa6600000a76,
    +        0xa8100000a84,
    +        0xa8500000a8e,
    +        0xa8f00000a92,
    +        0xa9300000aa9,
    +        0xaaa00000ab1,
    +        0xab200000ab4,
    +        0xab500000aba,
    +        0xabc00000ac6,
    +        0xac700000aca,
    +        0xacb00000ace,
    +        0xad000000ad1,
    +        0xae000000ae4,
    +        0xae600000af0,
    +        0xaf900000b00,
    +        0xb0100000b04,
    +        0xb0500000b0d,
    +        0xb0f00000b11,
    +        0xb1300000b29,
    +        0xb2a00000b31,
    +        0xb3200000b34,
    +        0xb3500000b3a,
    +        0xb3c00000b45,
    +        0xb4700000b49,
    +        0xb4b00000b4e,
    +        0xb5600000b58,
    +        0xb5f00000b64,
    +        0xb6600000b70,
    +        0xb7100000b72,
    +        0xb8200000b84,
    +        0xb8500000b8b,
    +        0xb8e00000b91,
    +        0xb9200000b96,
    +        0xb9900000b9b,
    +        0xb9c00000b9d,
    +        0xb9e00000ba0,
    +        0xba300000ba5,
    +        0xba800000bab,
    +        0xbae00000bba,
    +        0xbbe00000bc3,
    +        0xbc600000bc9,
    +        0xbca00000bce,
    +        0xbd000000bd1,
    +        0xbd700000bd8,
    +        0xbe600000bf0,
    +        0xc0000000c0d,
    +        0xc0e00000c11,
    +        0xc1200000c29,
    +        0xc2a00000c3a,
    +        0xc3d00000c45,
    +        0xc4600000c49,
    +        0xc4a00000c4e,
    +        0xc5500000c57,
    +        0xc5800000c5b,
    +        0xc6000000c64,
    +        0xc6600000c70,
    +        0xc8000000c84,
    +        0xc8500000c8d,
    +        0xc8e00000c91,
    +        0xc9200000ca9,
    +        0xcaa00000cb4,
    +        0xcb500000cba,
    +        0xcbc00000cc5,
    +        0xcc600000cc9,
    +        0xcca00000cce,
    +        0xcd500000cd7,
    +        0xcde00000cdf,
    +        0xce000000ce4,
    +        0xce600000cf0,
    +        0xcf100000cf3,
    +        0xd0000000d04,
    +        0xd0500000d0d,
    +        0xd0e00000d11,
    +        0xd1200000d45,
    +        0xd4600000d49,
    +        0xd4a00000d4f,
    +        0xd5400000d58,
    +        0xd5f00000d64,
    +        0xd6600000d70,
    +        0xd7a00000d80,
    +        0xd8200000d84,
    +        0xd8500000d97,
    +        0xd9a00000db2,
    +        0xdb300000dbc,
    +        0xdbd00000dbe,
    +        0xdc000000dc7,
    +        0xdca00000dcb,
    +        0xdcf00000dd5,
    +        0xdd600000dd7,
    +        0xdd800000de0,
    +        0xde600000df0,
    +        0xdf200000df4,
    +        0xe0100000e33,
    +        0xe3400000e3b,
    +        0xe4000000e4f,
    +        0xe5000000e5a,
    +        0xe8100000e83,
    +        0xe8400000e85,
    +        0xe8600000e8b,
    +        0xe8c00000ea4,
    +        0xea500000ea6,
    +        0xea700000eb3,
    +        0xeb400000ebe,
    +        0xec000000ec5,
    +        0xec600000ec7,
    +        0xec800000ece,
    +        0xed000000eda,
    +        0xede00000ee0,
    +        0xf0000000f01,
    +        0xf0b00000f0c,
    +        0xf1800000f1a,
    +        0xf2000000f2a,
    +        0xf3500000f36,
    +        0xf3700000f38,
    +        0xf3900000f3a,
    +        0xf3e00000f43,
    +        0xf4400000f48,
    +        0xf4900000f4d,
    +        0xf4e00000f52,
    +        0xf5300000f57,
    +        0xf5800000f5c,
    +        0xf5d00000f69,
    +        0xf6a00000f6d,
    +        0xf7100000f73,
    +        0xf7400000f75,
    +        0xf7a00000f81,
    +        0xf8200000f85,
    +        0xf8600000f93,
    +        0xf9400000f98,
    +        0xf9900000f9d,
    +        0xf9e00000fa2,
    +        0xfa300000fa7,
    +        0xfa800000fac,
    +        0xfad00000fb9,
    +        0xfba00000fbd,
    +        0xfc600000fc7,
    +        0x10000000104a,
    +        0x10500000109e,
    +        0x10d0000010fb,
    +        0x10fd00001100,
    +        0x120000001249,
    +        0x124a0000124e,
    +        0x125000001257,
    +        0x125800001259,
    +        0x125a0000125e,
    +        0x126000001289,
    +        0x128a0000128e,
    +        0x1290000012b1,
    +        0x12b2000012b6,
    +        0x12b8000012bf,
    +        0x12c0000012c1,
    +        0x12c2000012c6,
    +        0x12c8000012d7,
    +        0x12d800001311,
    +        0x131200001316,
    +        0x13180000135b,
    +        0x135d00001360,
    +        0x138000001390,
    +        0x13a0000013f6,
    +        0x14010000166d,
    +        0x166f00001680,
    +        0x16810000169b,
    +        0x16a0000016eb,
    +        0x16f1000016f9,
    +        0x17000000170d,
    +        0x170e00001715,
    +        0x172000001735,
    +        0x174000001754,
    +        0x17600000176d,
    +        0x176e00001771,
    +        0x177200001774,
    +        0x1780000017b4,
    +        0x17b6000017d4,
    +        0x17d7000017d8,
    +        0x17dc000017de,
    +        0x17e0000017ea,
    +        0x18100000181a,
    +        0x182000001879,
    +        0x1880000018ab,
    +        0x18b0000018f6,
    +        0x19000000191f,
    +        0x19200000192c,
    +        0x19300000193c,
    +        0x19460000196e,
    +        0x197000001975,
    +        0x1980000019ac,
    +        0x19b0000019ca,
    +        0x19d0000019da,
    +        0x1a0000001a1c,
    +        0x1a2000001a5f,
    +        0x1a6000001a7d,
    +        0x1a7f00001a8a,
    +        0x1a9000001a9a,
    +        0x1aa700001aa8,
    +        0x1ab000001abe,
    +        0x1b0000001b4c,
    +        0x1b5000001b5a,
    +        0x1b6b00001b74,
    +        0x1b8000001bf4,
    +        0x1c0000001c38,
    +        0x1c4000001c4a,
    +        0x1c4d00001c7e,
    +        0x1cd000001cd3,
    +        0x1cd400001cfb,
    +        0x1d0000001d2c,
    +        0x1d2f00001d30,
    +        0x1d3b00001d3c,
    +        0x1d4e00001d4f,
    +        0x1d6b00001d78,
    +        0x1d7900001d9b,
    +        0x1dc000001dfa,
    +        0x1dfb00001e00,
    +        0x1e0100001e02,
    +        0x1e0300001e04,
    +        0x1e0500001e06,
    +        0x1e0700001e08,
    +        0x1e0900001e0a,
    +        0x1e0b00001e0c,
    +        0x1e0d00001e0e,
    +        0x1e0f00001e10,
    +        0x1e1100001e12,
    +        0x1e1300001e14,
    +        0x1e1500001e16,
    +        0x1e1700001e18,
    +        0x1e1900001e1a,
    +        0x1e1b00001e1c,
    +        0x1e1d00001e1e,
    +        0x1e1f00001e20,
    +        0x1e2100001e22,
    +        0x1e2300001e24,
    +        0x1e2500001e26,
    +        0x1e2700001e28,
    +        0x1e2900001e2a,
    +        0x1e2b00001e2c,
    +        0x1e2d00001e2e,
    +        0x1e2f00001e30,
    +        0x1e3100001e32,
    +        0x1e3300001e34,
    +        0x1e3500001e36,
    +        0x1e3700001e38,
    +        0x1e3900001e3a,
    +        0x1e3b00001e3c,
    +        0x1e3d00001e3e,
    +        0x1e3f00001e40,
    +        0x1e4100001e42,
    +        0x1e4300001e44,
    +        0x1e4500001e46,
    +        0x1e4700001e48,
    +        0x1e4900001e4a,
    +        0x1e4b00001e4c,
    +        0x1e4d00001e4e,
    +        0x1e4f00001e50,
    +        0x1e5100001e52,
    +        0x1e5300001e54,
    +        0x1e5500001e56,
    +        0x1e5700001e58,
    +        0x1e5900001e5a,
    +        0x1e5b00001e5c,
    +        0x1e5d00001e5e,
    +        0x1e5f00001e60,
    +        0x1e6100001e62,
    +        0x1e6300001e64,
    +        0x1e6500001e66,
    +        0x1e6700001e68,
    +        0x1e6900001e6a,
    +        0x1e6b00001e6c,
    +        0x1e6d00001e6e,
    +        0x1e6f00001e70,
    +        0x1e7100001e72,
    +        0x1e7300001e74,
    +        0x1e7500001e76,
    +        0x1e7700001e78,
    +        0x1e7900001e7a,
    +        0x1e7b00001e7c,
    +        0x1e7d00001e7e,
    +        0x1e7f00001e80,
    +        0x1e8100001e82,
    +        0x1e8300001e84,
    +        0x1e8500001e86,
    +        0x1e8700001e88,
    +        0x1e8900001e8a,
    +        0x1e8b00001e8c,
    +        0x1e8d00001e8e,
    +        0x1e8f00001e90,
    +        0x1e9100001e92,
    +        0x1e9300001e94,
    +        0x1e9500001e9a,
    +        0x1e9c00001e9e,
    +        0x1e9f00001ea0,
    +        0x1ea100001ea2,
    +        0x1ea300001ea4,
    +        0x1ea500001ea6,
    +        0x1ea700001ea8,
    +        0x1ea900001eaa,
    +        0x1eab00001eac,
    +        0x1ead00001eae,
    +        0x1eaf00001eb0,
    +        0x1eb100001eb2,
    +        0x1eb300001eb4,
    +        0x1eb500001eb6,
    +        0x1eb700001eb8,
    +        0x1eb900001eba,
    +        0x1ebb00001ebc,
    +        0x1ebd00001ebe,
    +        0x1ebf00001ec0,
    +        0x1ec100001ec2,
    +        0x1ec300001ec4,
    +        0x1ec500001ec6,
    +        0x1ec700001ec8,
    +        0x1ec900001eca,
    +        0x1ecb00001ecc,
    +        0x1ecd00001ece,
    +        0x1ecf00001ed0,
    +        0x1ed100001ed2,
    +        0x1ed300001ed4,
    +        0x1ed500001ed6,
    +        0x1ed700001ed8,
    +        0x1ed900001eda,
    +        0x1edb00001edc,
    +        0x1edd00001ede,
    +        0x1edf00001ee0,
    +        0x1ee100001ee2,
    +        0x1ee300001ee4,
    +        0x1ee500001ee6,
    +        0x1ee700001ee8,
    +        0x1ee900001eea,
    +        0x1eeb00001eec,
    +        0x1eed00001eee,
    +        0x1eef00001ef0,
    +        0x1ef100001ef2,
    +        0x1ef300001ef4,
    +        0x1ef500001ef6,
    +        0x1ef700001ef8,
    +        0x1ef900001efa,
    +        0x1efb00001efc,
    +        0x1efd00001efe,
    +        0x1eff00001f08,
    +        0x1f1000001f16,
    +        0x1f2000001f28,
    +        0x1f3000001f38,
    +        0x1f4000001f46,
    +        0x1f5000001f58,
    +        0x1f6000001f68,
    +        0x1f7000001f71,
    +        0x1f7200001f73,
    +        0x1f7400001f75,
    +        0x1f7600001f77,
    +        0x1f7800001f79,
    +        0x1f7a00001f7b,
    +        0x1f7c00001f7d,
    +        0x1fb000001fb2,
    +        0x1fb600001fb7,
    +        0x1fc600001fc7,
    +        0x1fd000001fd3,
    +        0x1fd600001fd8,
    +        0x1fe000001fe3,
    +        0x1fe400001fe8,
    +        0x1ff600001ff7,
    +        0x214e0000214f,
    +        0x218400002185,
    +        0x2c3000002c5f,
    +        0x2c6100002c62,
    +        0x2c6500002c67,
    +        0x2c6800002c69,
    +        0x2c6a00002c6b,
    +        0x2c6c00002c6d,
    +        0x2c7100002c72,
    +        0x2c7300002c75,
    +        0x2c7600002c7c,
    +        0x2c8100002c82,
    +        0x2c8300002c84,
    +        0x2c8500002c86,
    +        0x2c8700002c88,
    +        0x2c8900002c8a,
    +        0x2c8b00002c8c,
    +        0x2c8d00002c8e,
    +        0x2c8f00002c90,
    +        0x2c9100002c92,
    +        0x2c9300002c94,
    +        0x2c9500002c96,
    +        0x2c9700002c98,
    +        0x2c9900002c9a,
    +        0x2c9b00002c9c,
    +        0x2c9d00002c9e,
    +        0x2c9f00002ca0,
    +        0x2ca100002ca2,
    +        0x2ca300002ca4,
    +        0x2ca500002ca6,
    +        0x2ca700002ca8,
    +        0x2ca900002caa,
    +        0x2cab00002cac,
    +        0x2cad00002cae,
    +        0x2caf00002cb0,
    +        0x2cb100002cb2,
    +        0x2cb300002cb4,
    +        0x2cb500002cb6,
    +        0x2cb700002cb8,
    +        0x2cb900002cba,
    +        0x2cbb00002cbc,
    +        0x2cbd00002cbe,
    +        0x2cbf00002cc0,
    +        0x2cc100002cc2,
    +        0x2cc300002cc4,
    +        0x2cc500002cc6,
    +        0x2cc700002cc8,
    +        0x2cc900002cca,
    +        0x2ccb00002ccc,
    +        0x2ccd00002cce,
    +        0x2ccf00002cd0,
    +        0x2cd100002cd2,
    +        0x2cd300002cd4,
    +        0x2cd500002cd6,
    +        0x2cd700002cd8,
    +        0x2cd900002cda,
    +        0x2cdb00002cdc,
    +        0x2cdd00002cde,
    +        0x2cdf00002ce0,
    +        0x2ce100002ce2,
    +        0x2ce300002ce5,
    +        0x2cec00002ced,
    +        0x2cee00002cf2,
    +        0x2cf300002cf4,
    +        0x2d0000002d26,
    +        0x2d2700002d28,
    +        0x2d2d00002d2e,
    +        0x2d3000002d68,
    +        0x2d7f00002d97,
    +        0x2da000002da7,
    +        0x2da800002daf,
    +        0x2db000002db7,
    +        0x2db800002dbf,
    +        0x2dc000002dc7,
    +        0x2dc800002dcf,
    +        0x2dd000002dd7,
    +        0x2dd800002ddf,
    +        0x2de000002e00,
    +        0x2e2f00002e30,
    +        0x300500003008,
    +        0x302a0000302e,
    +        0x303c0000303d,
    +        0x304100003097,
    +        0x30990000309b,
    +        0x309d0000309f,
    +        0x30a1000030fb,
    +        0x30fc000030ff,
    +        0x310500003130,
    +        0x31a0000031bb,
    +        0x31f000003200,
    +        0x340000004db6,
    +        0x4e0000009ff0,
    +        0xa0000000a48d,
    +        0xa4d00000a4fe,
    +        0xa5000000a60d,
    +        0xa6100000a62c,
    +        0xa6410000a642,
    +        0xa6430000a644,
    +        0xa6450000a646,
    +        0xa6470000a648,
    +        0xa6490000a64a,
    +        0xa64b0000a64c,
    +        0xa64d0000a64e,
    +        0xa64f0000a650,
    +        0xa6510000a652,
    +        0xa6530000a654,
    +        0xa6550000a656,
    +        0xa6570000a658,
    +        0xa6590000a65a,
    +        0xa65b0000a65c,
    +        0xa65d0000a65e,
    +        0xa65f0000a660,
    +        0xa6610000a662,
    +        0xa6630000a664,
    +        0xa6650000a666,
    +        0xa6670000a668,
    +        0xa6690000a66a,
    +        0xa66b0000a66c,
    +        0xa66d0000a670,
    +        0xa6740000a67e,
    +        0xa67f0000a680,
    +        0xa6810000a682,
    +        0xa6830000a684,
    +        0xa6850000a686,
    +        0xa6870000a688,
    +        0xa6890000a68a,
    +        0xa68b0000a68c,
    +        0xa68d0000a68e,
    +        0xa68f0000a690,
    +        0xa6910000a692,
    +        0xa6930000a694,
    +        0xa6950000a696,
    +        0xa6970000a698,
    +        0xa6990000a69a,
    +        0xa69b0000a69c,
    +        0xa69e0000a6e6,
    +        0xa6f00000a6f2,
    +        0xa7170000a720,
    +        0xa7230000a724,
    +        0xa7250000a726,
    +        0xa7270000a728,
    +        0xa7290000a72a,
    +        0xa72b0000a72c,
    +        0xa72d0000a72e,
    +        0xa72f0000a732,
    +        0xa7330000a734,
    +        0xa7350000a736,
    +        0xa7370000a738,
    +        0xa7390000a73a,
    +        0xa73b0000a73c,
    +        0xa73d0000a73e,
    +        0xa73f0000a740,
    +        0xa7410000a742,
    +        0xa7430000a744,
    +        0xa7450000a746,
    +        0xa7470000a748,
    +        0xa7490000a74a,
    +        0xa74b0000a74c,
    +        0xa74d0000a74e,
    +        0xa74f0000a750,
    +        0xa7510000a752,
    +        0xa7530000a754,
    +        0xa7550000a756,
    +        0xa7570000a758,
    +        0xa7590000a75a,
    +        0xa75b0000a75c,
    +        0xa75d0000a75e,
    +        0xa75f0000a760,
    +        0xa7610000a762,
    +        0xa7630000a764,
    +        0xa7650000a766,
    +        0xa7670000a768,
    +        0xa7690000a76a,
    +        0xa76b0000a76c,
    +        0xa76d0000a76e,
    +        0xa76f0000a770,
    +        0xa7710000a779,
    +        0xa77a0000a77b,
    +        0xa77c0000a77d,
    +        0xa77f0000a780,
    +        0xa7810000a782,
    +        0xa7830000a784,
    +        0xa7850000a786,
    +        0xa7870000a789,
    +        0xa78c0000a78d,
    +        0xa78e0000a790,
    +        0xa7910000a792,
    +        0xa7930000a796,
    +        0xa7970000a798,
    +        0xa7990000a79a,
    +        0xa79b0000a79c,
    +        0xa79d0000a79e,
    +        0xa79f0000a7a0,
    +        0xa7a10000a7a2,
    +        0xa7a30000a7a4,
    +        0xa7a50000a7a6,
    +        0xa7a70000a7a8,
    +        0xa7a90000a7aa,
    +        0xa7af0000a7b0,
    +        0xa7b50000a7b6,
    +        0xa7b70000a7b8,
    +        0xa7b90000a7ba,
    +        0xa7bb0000a7bc,
    +        0xa7bd0000a7be,
    +        0xa7bf0000a7c0,
    +        0xa7c30000a7c4,
    +        0xa7f70000a7f8,
    +        0xa7fa0000a828,
    +        0xa8400000a874,
    +        0xa8800000a8c6,
    +        0xa8d00000a8da,
    +        0xa8e00000a8f8,
    +        0xa8fb0000a8fc,
    +        0xa8fd0000a92e,
    +        0xa9300000a954,
    +        0xa9800000a9c1,
    +        0xa9cf0000a9da,
    +        0xa9e00000a9ff,
    +        0xaa000000aa37,
    +        0xaa400000aa4e,
    +        0xaa500000aa5a,
    +        0xaa600000aa77,
    +        0xaa7a0000aac3,
    +        0xaadb0000aade,
    +        0xaae00000aaf0,
    +        0xaaf20000aaf7,
    +        0xab010000ab07,
    +        0xab090000ab0f,
    +        0xab110000ab17,
    +        0xab200000ab27,
    +        0xab280000ab2f,
    +        0xab300000ab5b,
    +        0xab600000ab68,
    +        0xabc00000abeb,
    +        0xabec0000abee,
    +        0xabf00000abfa,
    +        0xac000000d7a4,
    +        0xfa0e0000fa10,
    +        0xfa110000fa12,
    +        0xfa130000fa15,
    +        0xfa1f0000fa20,
    +        0xfa210000fa22,
    +        0xfa230000fa25,
    +        0xfa270000fa2a,
    +        0xfb1e0000fb1f,
    +        0xfe200000fe30,
    +        0xfe730000fe74,
    +        0x100000001000c,
    +        0x1000d00010027,
    +        0x100280001003b,
    +        0x1003c0001003e,
    +        0x1003f0001004e,
    +        0x100500001005e,
    +        0x10080000100fb,
    +        0x101fd000101fe,
    +        0x102800001029d,
    +        0x102a0000102d1,
    +        0x102e0000102e1,
    +        0x1030000010320,
    +        0x1032d00010341,
    +        0x103420001034a,
    +        0x103500001037b,
    +        0x103800001039e,
    +        0x103a0000103c4,
    +        0x103c8000103d0,
    +        0x104280001049e,
    +        0x104a0000104aa,
    +        0x104d8000104fc,
    +        0x1050000010528,
    +        0x1053000010564,
    +        0x1060000010737,
    +        0x1074000010756,
    +        0x1076000010768,
    +        0x1080000010806,
    +        0x1080800010809,
    +        0x1080a00010836,
    +        0x1083700010839,
    +        0x1083c0001083d,
    +        0x1083f00010856,
    +        0x1086000010877,
    +        0x108800001089f,
    +        0x108e0000108f3,
    +        0x108f4000108f6,
    +        0x1090000010916,
    +        0x109200001093a,
    +        0x10980000109b8,
    +        0x109be000109c0,
    +        0x10a0000010a04,
    +        0x10a0500010a07,
    +        0x10a0c00010a14,
    +        0x10a1500010a18,
    +        0x10a1900010a36,
    +        0x10a3800010a3b,
    +        0x10a3f00010a40,
    +        0x10a6000010a7d,
    +        0x10a8000010a9d,
    +        0x10ac000010ac8,
    +        0x10ac900010ae7,
    +        0x10b0000010b36,
    +        0x10b4000010b56,
    +        0x10b6000010b73,
    +        0x10b8000010b92,
    +        0x10c0000010c49,
    +        0x10cc000010cf3,
    +        0x10d0000010d28,
    +        0x10d3000010d3a,
    +        0x10f0000010f1d,
    +        0x10f2700010f28,
    +        0x10f3000010f51,
    +        0x10fe000010ff7,
    +        0x1100000011047,
    +        0x1106600011070,
    +        0x1107f000110bb,
    +        0x110d0000110e9,
    +        0x110f0000110fa,
    +        0x1110000011135,
    +        0x1113600011140,
    +        0x1114400011147,
    +        0x1115000011174,
    +        0x1117600011177,
    +        0x11180000111c5,
    +        0x111c9000111cd,
    +        0x111d0000111db,
    +        0x111dc000111dd,
    +        0x1120000011212,
    +        0x1121300011238,
    +        0x1123e0001123f,
    +        0x1128000011287,
    +        0x1128800011289,
    +        0x1128a0001128e,
    +        0x1128f0001129e,
    +        0x1129f000112a9,
    +        0x112b0000112eb,
    +        0x112f0000112fa,
    +        0x1130000011304,
    +        0x113050001130d,
    +        0x1130f00011311,
    +        0x1131300011329,
    +        0x1132a00011331,
    +        0x1133200011334,
    +        0x113350001133a,
    +        0x1133b00011345,
    +        0x1134700011349,
    +        0x1134b0001134e,
    +        0x1135000011351,
    +        0x1135700011358,
    +        0x1135d00011364,
    +        0x113660001136d,
    +        0x1137000011375,
    +        0x114000001144b,
    +        0x114500001145a,
    +        0x1145e00011460,
    +        0x11480000114c6,
    +        0x114c7000114c8,
    +        0x114d0000114da,
    +        0x11580000115b6,
    +        0x115b8000115c1,
    +        0x115d8000115de,
    +        0x1160000011641,
    +        0x1164400011645,
    +        0x116500001165a,
    +        0x11680000116b9,
    +        0x116c0000116ca,
    +        0x117000001171b,
    +        0x1171d0001172c,
    +        0x117300001173a,
    +        0x118000001183b,
    +        0x118c0000118ea,
    +        0x118ff00011900,
    +        0x119a0000119a8,
    +        0x119aa000119d8,
    +        0x119da000119e2,
    +        0x119e3000119e5,
    +        0x11a0000011a3f,
    +        0x11a4700011a48,
    +        0x11a5000011a9a,
    +        0x11a9d00011a9e,
    +        0x11ac000011af9,
    +        0x11c0000011c09,
    +        0x11c0a00011c37,
    +        0x11c3800011c41,
    +        0x11c5000011c5a,
    +        0x11c7200011c90,
    +        0x11c9200011ca8,
    +        0x11ca900011cb7,
    +        0x11d0000011d07,
    +        0x11d0800011d0a,
    +        0x11d0b00011d37,
    +        0x11d3a00011d3b,
    +        0x11d3c00011d3e,
    +        0x11d3f00011d48,
    +        0x11d5000011d5a,
    +        0x11d6000011d66,
    +        0x11d6700011d69,
    +        0x11d6a00011d8f,
    +        0x11d9000011d92,
    +        0x11d9300011d99,
    +        0x11da000011daa,
    +        0x11ee000011ef7,
    +        0x120000001239a,
    +        0x1248000012544,
    +        0x130000001342f,
    +        0x1440000014647,
    +        0x1680000016a39,
    +        0x16a4000016a5f,
    +        0x16a6000016a6a,
    +        0x16ad000016aee,
    +        0x16af000016af5,
    +        0x16b0000016b37,
    +        0x16b4000016b44,
    +        0x16b5000016b5a,
    +        0x16b6300016b78,
    +        0x16b7d00016b90,
    +        0x16e6000016e80,
    +        0x16f0000016f4b,
    +        0x16f4f00016f88,
    +        0x16f8f00016fa0,
    +        0x16fe000016fe2,
    +        0x16fe300016fe4,
    +        0x17000000187f8,
    +        0x1880000018af3,
    +        0x1b0000001b11f,
    +        0x1b1500001b153,
    +        0x1b1640001b168,
    +        0x1b1700001b2fc,
    +        0x1bc000001bc6b,
    +        0x1bc700001bc7d,
    +        0x1bc800001bc89,
    +        0x1bc900001bc9a,
    +        0x1bc9d0001bc9f,
    +        0x1da000001da37,
    +        0x1da3b0001da6d,
    +        0x1da750001da76,
    +        0x1da840001da85,
    +        0x1da9b0001daa0,
    +        0x1daa10001dab0,
    +        0x1e0000001e007,
    +        0x1e0080001e019,
    +        0x1e01b0001e022,
    +        0x1e0230001e025,
    +        0x1e0260001e02b,
    +        0x1e1000001e12d,
    +        0x1e1300001e13e,
    +        0x1e1400001e14a,
    +        0x1e14e0001e14f,
    +        0x1e2c00001e2fa,
    +        0x1e8000001e8c5,
    +        0x1e8d00001e8d7,
    +        0x1e9220001e94c,
    +        0x1e9500001e95a,
    +        0x200000002a6d7,
    +        0x2a7000002b735,
    +        0x2b7400002b81e,
    +        0x2b8200002cea2,
    +        0x2ceb00002ebe1,
    +    ),
    +    'CONTEXTJ': (
    +        0x200c0000200e,
    +    ),
    +    'CONTEXTO': (
    +        0xb7000000b8,
    +        0x37500000376,
    +        0x5f3000005f5,
    +        0x6600000066a,
    +        0x6f0000006fa,
    +        0x30fb000030fc,
    +    ),
    +}
    diff --git a/server/www/packages/packages-windows/x86/idna/intranges.py b/server/www/packages/packages-windows/x86/idna/intranges.py
    new file mode 100644
    index 0000000..fa8a735
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/idna/intranges.py
    @@ -0,0 +1,53 @@
    +"""
    +Given a list of integers, made up of (hopefully) a small number of long runs
    +of consecutive integers, compute a representation of the form
    +((start1, end1), (start2, end2) ...). Then answer the question "was x present
    +in the original list?" in time O(log(# runs)).
    +"""
    +
    +import bisect
    +
    +def intranges_from_list(list_):
    +    """Represent a list of integers as a sequence of ranges:
    +    ((start_0, end_0), (start_1, end_1), ...), such that the original
    +    integers are exactly those x such that start_i <= x < end_i for some i.
    +
    +    Ranges are encoded as single integers (start << 32 | end), not as tuples.
    +    """
    +
    +    sorted_list = sorted(list_)
    +    ranges = []
    +    last_write = -1
    +    for i in range(len(sorted_list)):
    +        if i+1 < len(sorted_list):
    +            if sorted_list[i] == sorted_list[i+1]-1:
    +                continue
    +        current_range = sorted_list[last_write+1:i+1]
    +        ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
    +        last_write = i
    +
    +    return tuple(ranges)
    +
    +def _encode_range(start, end):
    +    return (start << 32) | end
    +
    +def _decode_range(r):
    +    return (r >> 32), (r & ((1 << 32) - 1))
    +
    +
    +def intranges_contain(int_, ranges):
    +    """Determine if `int_` falls into one of the ranges in `ranges`."""
    +    tuple_ = _encode_range(int_, 0)
    +    pos = bisect.bisect_left(ranges, tuple_)
    +    # we could be immediately ahead of a tuple (start, end)
    +    # with start < int_ <= end
    +    if pos > 0:
    +        left, right = _decode_range(ranges[pos-1])
    +        if left <= int_ < right:
    +            return True
    +    # or we could be immediately behind a tuple (int_, end)
    +    if pos < len(ranges):
    +        left, _ = _decode_range(ranges[pos])
    +        if left == int_:
    +            return True
    +    return False
    diff --git a/server/www/packages/packages-windows/x86/idna/package_data.py b/server/www/packages/packages-windows/x86/idna/package_data.py
    new file mode 100644
    index 0000000..b5d8216
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/idna/package_data.py
    @@ -0,0 +1,2 @@
    +__version__ = '2.9'
    +
    diff --git a/server/www/packages/packages-windows/x86/idna/uts46data.py b/server/www/packages/packages-windows/x86/idna/uts46data.py
    new file mode 100644
    index 0000000..2711136
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/idna/uts46data.py
    @@ -0,0 +1,8317 @@
    +# This file is automatically generated by tools/idna-data
    +# vim: set fileencoding=utf-8 :
    +
    +"""IDNA Mapping Table from UTS46."""
    +
    +
    +__version__ = "12.1.0"
    +def _seg_0():
    +    return [
    +    (0x0, '3'),
    +    (0x1, '3'),
    +    (0x2, '3'),
    +    (0x3, '3'),
    +    (0x4, '3'),
    +    (0x5, '3'),
    +    (0x6, '3'),
    +    (0x7, '3'),
    +    (0x8, '3'),
    +    (0x9, '3'),
    +    (0xA, '3'),
    +    (0xB, '3'),
    +    (0xC, '3'),
    +    (0xD, '3'),
    +    (0xE, '3'),
    +    (0xF, '3'),
    +    (0x10, '3'),
    +    (0x11, '3'),
    +    (0x12, '3'),
    +    (0x13, '3'),
    +    (0x14, '3'),
    +    (0x15, '3'),
    +    (0x16, '3'),
    +    (0x17, '3'),
    +    (0x18, '3'),
    +    (0x19, '3'),
    +    (0x1A, '3'),
    +    (0x1B, '3'),
    +    (0x1C, '3'),
    +    (0x1D, '3'),
    +    (0x1E, '3'),
    +    (0x1F, '3'),
    +    (0x20, '3'),
    +    (0x21, '3'),
    +    (0x22, '3'),
    +    (0x23, '3'),
    +    (0x24, '3'),
    +    (0x25, '3'),
    +    (0x26, '3'),
    +    (0x27, '3'),
    +    (0x28, '3'),
    +    (0x29, '3'),
    +    (0x2A, '3'),
    +    (0x2B, '3'),
    +    (0x2C, '3'),
    +    (0x2D, 'V'),
    +    (0x2E, 'V'),
    +    (0x2F, '3'),
    +    (0x30, 'V'),
    +    (0x31, 'V'),
    +    (0x32, 'V'),
    +    (0x33, 'V'),
    +    (0x34, 'V'),
    +    (0x35, 'V'),
    +    (0x36, 'V'),
    +    (0x37, 'V'),
    +    (0x38, 'V'),
    +    (0x39, 'V'),
    +    (0x3A, '3'),
    +    (0x3B, '3'),
    +    (0x3C, '3'),
    +    (0x3D, '3'),
    +    (0x3E, '3'),
    +    (0x3F, '3'),
    +    (0x40, '3'),
    +    (0x41, 'M', u'a'),
    +    (0x42, 'M', u'b'),
    +    (0x43, 'M', u'c'),
    +    (0x44, 'M', u'd'),
    +    (0x45, 'M', u'e'),
    +    (0x46, 'M', u'f'),
    +    (0x47, 'M', u'g'),
    +    (0x48, 'M', u'h'),
    +    (0x49, 'M', u'i'),
    +    (0x4A, 'M', u'j'),
    +    (0x4B, 'M', u'k'),
    +    (0x4C, 'M', u'l'),
    +    (0x4D, 'M', u'm'),
    +    (0x4E, 'M', u'n'),
    +    (0x4F, 'M', u'o'),
    +    (0x50, 'M', u'p'),
    +    (0x51, 'M', u'q'),
    +    (0x52, 'M', u'r'),
    +    (0x53, 'M', u's'),
    +    (0x54, 'M', u't'),
    +    (0x55, 'M', u'u'),
    +    (0x56, 'M', u'v'),
    +    (0x57, 'M', u'w'),
    +    (0x58, 'M', u'x'),
    +    (0x59, 'M', u'y'),
    +    (0x5A, 'M', u'z'),
    +    (0x5B, '3'),
    +    (0x5C, '3'),
    +    (0x5D, '3'),
    +    (0x5E, '3'),
    +    (0x5F, '3'),
    +    (0x60, '3'),
    +    (0x61, 'V'),
    +    (0x62, 'V'),
    +    (0x63, 'V'),
    +    ]
    +
    +def _seg_1():
    +    return [
    +    (0x64, 'V'),
    +    (0x65, 'V'),
    +    (0x66, 'V'),
    +    (0x67, 'V'),
    +    (0x68, 'V'),
    +    (0x69, 'V'),
    +    (0x6A, 'V'),
    +    (0x6B, 'V'),
    +    (0x6C, 'V'),
    +    (0x6D, 'V'),
    +    (0x6E, 'V'),
    +    (0x6F, 'V'),
    +    (0x70, 'V'),
    +    (0x71, 'V'),
    +    (0x72, 'V'),
    +    (0x73, 'V'),
    +    (0x74, 'V'),
    +    (0x75, 'V'),
    +    (0x76, 'V'),
    +    (0x77, 'V'),
    +    (0x78, 'V'),
    +    (0x79, 'V'),
    +    (0x7A, 'V'),
    +    (0x7B, '3'),
    +    (0x7C, '3'),
    +    (0x7D, '3'),
    +    (0x7E, '3'),
    +    (0x7F, '3'),
    +    (0x80, 'X'),
    +    (0x81, 'X'),
    +    (0x82, 'X'),
    +    (0x83, 'X'),
    +    (0x84, 'X'),
    +    (0x85, 'X'),
    +    (0x86, 'X'),
    +    (0x87, 'X'),
    +    (0x88, 'X'),
    +    (0x89, 'X'),
    +    (0x8A, 'X'),
    +    (0x8B, 'X'),
    +    (0x8C, 'X'),
    +    (0x8D, 'X'),
    +    (0x8E, 'X'),
    +    (0x8F, 'X'),
    +    (0x90, 'X'),
    +    (0x91, 'X'),
    +    (0x92, 'X'),
    +    (0x93, 'X'),
    +    (0x94, 'X'),
    +    (0x95, 'X'),
    +    (0x96, 'X'),
    +    (0x97, 'X'),
    +    (0x98, 'X'),
    +    (0x99, 'X'),
    +    (0x9A, 'X'),
    +    (0x9B, 'X'),
    +    (0x9C, 'X'),
    +    (0x9D, 'X'),
    +    (0x9E, 'X'),
    +    (0x9F, 'X'),
    +    (0xA0, '3', u' '),
    +    (0xA1, 'V'),
    +    (0xA2, 'V'),
    +    (0xA3, 'V'),
    +    (0xA4, 'V'),
    +    (0xA5, 'V'),
    +    (0xA6, 'V'),
    +    (0xA7, 'V'),
    +    (0xA8, '3', u' ̈'),
    +    (0xA9, 'V'),
    +    (0xAA, 'M', u'a'),
    +    (0xAB, 'V'),
    +    (0xAC, 'V'),
    +    (0xAD, 'I'),
    +    (0xAE, 'V'),
    +    (0xAF, '3', u' ̄'),
    +    (0xB0, 'V'),
    +    (0xB1, 'V'),
    +    (0xB2, 'M', u'2'),
    +    (0xB3, 'M', u'3'),
    +    (0xB4, '3', u' ́'),
    +    (0xB5, 'M', u'μ'),
    +    (0xB6, 'V'),
    +    (0xB7, 'V'),
    +    (0xB8, '3', u' ̧'),
    +    (0xB9, 'M', u'1'),
    +    (0xBA, 'M', u'o'),
    +    (0xBB, 'V'),
    +    (0xBC, 'M', u'1⁄4'),
    +    (0xBD, 'M', u'1⁄2'),
    +    (0xBE, 'M', u'3⁄4'),
    +    (0xBF, 'V'),
    +    (0xC0, 'M', u'à'),
    +    (0xC1, 'M', u'á'),
    +    (0xC2, 'M', u'â'),
    +    (0xC3, 'M', u'ã'),
    +    (0xC4, 'M', u'ä'),
    +    (0xC5, 'M', u'å'),
    +    (0xC6, 'M', u'æ'),
    +    (0xC7, 'M', u'ç'),
    +    ]
    +
    +def _seg_2():
    +    return [
    +    (0xC8, 'M', u'è'),
    +    (0xC9, 'M', u'é'),
    +    (0xCA, 'M', u'ê'),
    +    (0xCB, 'M', u'ë'),
    +    (0xCC, 'M', u'ì'),
    +    (0xCD, 'M', u'í'),
    +    (0xCE, 'M', u'î'),
    +    (0xCF, 'M', u'ï'),
    +    (0xD0, 'M', u'ð'),
    +    (0xD1, 'M', u'ñ'),
    +    (0xD2, 'M', u'ò'),
    +    (0xD3, 'M', u'ó'),
    +    (0xD4, 'M', u'ô'),
    +    (0xD5, 'M', u'õ'),
    +    (0xD6, 'M', u'ö'),
    +    (0xD7, 'V'),
    +    (0xD8, 'M', u'ø'),
    +    (0xD9, 'M', u'ù'),
    +    (0xDA, 'M', u'ú'),
    +    (0xDB, 'M', u'û'),
    +    (0xDC, 'M', u'ü'),
    +    (0xDD, 'M', u'ý'),
    +    (0xDE, 'M', u'þ'),
    +    (0xDF, 'D', u'ss'),
    +    (0xE0, 'V'),
    +    (0xE1, 'V'),
    +    (0xE2, 'V'),
    +    (0xE3, 'V'),
    +    (0xE4, 'V'),
    +    (0xE5, 'V'),
    +    (0xE6, 'V'),
    +    (0xE7, 'V'),
    +    (0xE8, 'V'),
    +    (0xE9, 'V'),
    +    (0xEA, 'V'),
    +    (0xEB, 'V'),
    +    (0xEC, 'V'),
    +    (0xED, 'V'),
    +    (0xEE, 'V'),
    +    (0xEF, 'V'),
    +    (0xF0, 'V'),
    +    (0xF1, 'V'),
    +    (0xF2, 'V'),
    +    (0xF3, 'V'),
    +    (0xF4, 'V'),
    +    (0xF5, 'V'),
    +    (0xF6, 'V'),
    +    (0xF7, 'V'),
    +    (0xF8, 'V'),
    +    (0xF9, 'V'),
    +    (0xFA, 'V'),
    +    (0xFB, 'V'),
    +    (0xFC, 'V'),
    +    (0xFD, 'V'),
    +    (0xFE, 'V'),
    +    (0xFF, 'V'),
    +    (0x100, 'M', u'ā'),
    +    (0x101, 'V'),
    +    (0x102, 'M', u'ă'),
    +    (0x103, 'V'),
    +    (0x104, 'M', u'ą'),
    +    (0x105, 'V'),
    +    (0x106, 'M', u'ć'),
    +    (0x107, 'V'),
    +    (0x108, 'M', u'ĉ'),
    +    (0x109, 'V'),
    +    (0x10A, 'M', u'ċ'),
    +    (0x10B, 'V'),
    +    (0x10C, 'M', u'č'),
    +    (0x10D, 'V'),
    +    (0x10E, 'M', u'ď'),
    +    (0x10F, 'V'),
    +    (0x110, 'M', u'đ'),
    +    (0x111, 'V'),
    +    (0x112, 'M', u'ē'),
    +    (0x113, 'V'),
    +    (0x114, 'M', u'ĕ'),
    +    (0x115, 'V'),
    +    (0x116, 'M', u'ė'),
    +    (0x117, 'V'),
    +    (0x118, 'M', u'ę'),
    +    (0x119, 'V'),
    +    (0x11A, 'M', u'ě'),
    +    (0x11B, 'V'),
    +    (0x11C, 'M', u'ĝ'),
    +    (0x11D, 'V'),
    +    (0x11E, 'M', u'ğ'),
    +    (0x11F, 'V'),
    +    (0x120, 'M', u'ġ'),
    +    (0x121, 'V'),
    +    (0x122, 'M', u'ģ'),
    +    (0x123, 'V'),
    +    (0x124, 'M', u'ĥ'),
    +    (0x125, 'V'),
    +    (0x126, 'M', u'ħ'),
    +    (0x127, 'V'),
    +    (0x128, 'M', u'ĩ'),
    +    (0x129, 'V'),
    +    (0x12A, 'M', u'ī'),
    +    (0x12B, 'V'),
    +    ]
    +
    +def _seg_3():
    +    return [
    +    (0x12C, 'M', u'ĭ'),
    +    (0x12D, 'V'),
    +    (0x12E, 'M', u'į'),
    +    (0x12F, 'V'),
    +    (0x130, 'M', u'i̇'),
    +    (0x131, 'V'),
    +    (0x132, 'M', u'ij'),
    +    (0x134, 'M', u'ĵ'),
    +    (0x135, 'V'),
    +    (0x136, 'M', u'ķ'),
    +    (0x137, 'V'),
    +    (0x139, 'M', u'ĺ'),
    +    (0x13A, 'V'),
    +    (0x13B, 'M', u'ļ'),
    +    (0x13C, 'V'),
    +    (0x13D, 'M', u'ľ'),
    +    (0x13E, 'V'),
    +    (0x13F, 'M', u'l·'),
    +    (0x141, 'M', u'ł'),
    +    (0x142, 'V'),
    +    (0x143, 'M', u'ń'),
    +    (0x144, 'V'),
    +    (0x145, 'M', u'ņ'),
    +    (0x146, 'V'),
    +    (0x147, 'M', u'ň'),
    +    (0x148, 'V'),
    +    (0x149, 'M', u'ʼn'),
    +    (0x14A, 'M', u'ŋ'),
    +    (0x14B, 'V'),
    +    (0x14C, 'M', u'ō'),
    +    (0x14D, 'V'),
    +    (0x14E, 'M', u'ŏ'),
    +    (0x14F, 'V'),
    +    (0x150, 'M', u'ő'),
    +    (0x151, 'V'),
    +    (0x152, 'M', u'œ'),
    +    (0x153, 'V'),
    +    (0x154, 'M', u'ŕ'),
    +    (0x155, 'V'),
    +    (0x156, 'M', u'ŗ'),
    +    (0x157, 'V'),
    +    (0x158, 'M', u'ř'),
    +    (0x159, 'V'),
    +    (0x15A, 'M', u'ś'),
    +    (0x15B, 'V'),
    +    (0x15C, 'M', u'ŝ'),
    +    (0x15D, 'V'),
    +    (0x15E, 'M', u'ş'),
    +    (0x15F, 'V'),
    +    (0x160, 'M', u'š'),
    +    (0x161, 'V'),
    +    (0x162, 'M', u'ţ'),
    +    (0x163, 'V'),
    +    (0x164, 'M', u'ť'),
    +    (0x165, 'V'),
    +    (0x166, 'M', u'ŧ'),
    +    (0x167, 'V'),
    +    (0x168, 'M', u'ũ'),
    +    (0x169, 'V'),
    +    (0x16A, 'M', u'ū'),
    +    (0x16B, 'V'),
    +    (0x16C, 'M', u'ŭ'),
    +    (0x16D, 'V'),
    +    (0x16E, 'M', u'ů'),
    +    (0x16F, 'V'),
    +    (0x170, 'M', u'ű'),
    +    (0x171, 'V'),
    +    (0x172, 'M', u'ų'),
    +    (0x173, 'V'),
    +    (0x174, 'M', u'ŵ'),
    +    (0x175, 'V'),
    +    (0x176, 'M', u'ŷ'),
    +    (0x177, 'V'),
    +    (0x178, 'M', u'ÿ'),
    +    (0x179, 'M', u'ź'),
    +    (0x17A, 'V'),
    +    (0x17B, 'M', u'ż'),
    +    (0x17C, 'V'),
    +    (0x17D, 'M', u'ž'),
    +    (0x17E, 'V'),
    +    (0x17F, 'M', u's'),
    +    (0x180, 'V'),
    +    (0x181, 'M', u'ɓ'),
    +    (0x182, 'M', u'ƃ'),
    +    (0x183, 'V'),
    +    (0x184, 'M', u'ƅ'),
    +    (0x185, 'V'),
    +    (0x186, 'M', u'ɔ'),
    +    (0x187, 'M', u'ƈ'),
    +    (0x188, 'V'),
    +    (0x189, 'M', u'ɖ'),
    +    (0x18A, 'M', u'ɗ'),
    +    (0x18B, 'M', u'ƌ'),
    +    (0x18C, 'V'),
    +    (0x18E, 'M', u'ǝ'),
    +    (0x18F, 'M', u'ə'),
    +    (0x190, 'M', u'ɛ'),
    +    (0x191, 'M', u'ƒ'),
    +    (0x192, 'V'),
    +    (0x193, 'M', u'ɠ'),
    +    ]
    +
    +def _seg_4():
    +    return [
    +    (0x194, 'M', u'ɣ'),
    +    (0x195, 'V'),
    +    (0x196, 'M', u'ɩ'),
    +    (0x197, 'M', u'ɨ'),
    +    (0x198, 'M', u'ƙ'),
    +    (0x199, 'V'),
    +    (0x19C, 'M', u'ɯ'),
    +    (0x19D, 'M', u'ɲ'),
    +    (0x19E, 'V'),
    +    (0x19F, 'M', u'ɵ'),
    +    (0x1A0, 'M', u'ơ'),
    +    (0x1A1, 'V'),
    +    (0x1A2, 'M', u'ƣ'),
    +    (0x1A3, 'V'),
    +    (0x1A4, 'M', u'ƥ'),
    +    (0x1A5, 'V'),
    +    (0x1A6, 'M', u'ʀ'),
    +    (0x1A7, 'M', u'ƨ'),
    +    (0x1A8, 'V'),
    +    (0x1A9, 'M', u'ʃ'),
    +    (0x1AA, 'V'),
    +    (0x1AC, 'M', u'ƭ'),
    +    (0x1AD, 'V'),
    +    (0x1AE, 'M', u'ʈ'),
    +    (0x1AF, 'M', u'ư'),
    +    (0x1B0, 'V'),
    +    (0x1B1, 'M', u'ʊ'),
    +    (0x1B2, 'M', u'ʋ'),
    +    (0x1B3, 'M', u'ƴ'),
    +    (0x1B4, 'V'),
    +    (0x1B5, 'M', u'ƶ'),
    +    (0x1B6, 'V'),
    +    (0x1B7, 'M', u'ʒ'),
    +    (0x1B8, 'M', u'ƹ'),
    +    (0x1B9, 'V'),
    +    (0x1BC, 'M', u'ƽ'),
    +    (0x1BD, 'V'),
    +    (0x1C4, 'M', u'dž'),
    +    (0x1C7, 'M', u'lj'),
    +    (0x1CA, 'M', u'nj'),
    +    (0x1CD, 'M', u'ǎ'),
    +    (0x1CE, 'V'),
    +    (0x1CF, 'M', u'ǐ'),
    +    (0x1D0, 'V'),
    +    (0x1D1, 'M', u'ǒ'),
    +    (0x1D2, 'V'),
    +    (0x1D3, 'M', u'ǔ'),
    +    (0x1D4, 'V'),
    +    (0x1D5, 'M', u'ǖ'),
    +    (0x1D6, 'V'),
    +    (0x1D7, 'M', u'ǘ'),
    +    (0x1D8, 'V'),
    +    (0x1D9, 'M', u'ǚ'),
    +    (0x1DA, 'V'),
    +    (0x1DB, 'M', u'ǜ'),
    +    (0x1DC, 'V'),
    +    (0x1DE, 'M', u'ǟ'),
    +    (0x1DF, 'V'),
    +    (0x1E0, 'M', u'ǡ'),
    +    (0x1E1, 'V'),
    +    (0x1E2, 'M', u'ǣ'),
    +    (0x1E3, 'V'),
    +    (0x1E4, 'M', u'ǥ'),
    +    (0x1E5, 'V'),
    +    (0x1E6, 'M', u'ǧ'),
    +    (0x1E7, 'V'),
    +    (0x1E8, 'M', u'ǩ'),
    +    (0x1E9, 'V'),
    +    (0x1EA, 'M', u'ǫ'),
    +    (0x1EB, 'V'),
    +    (0x1EC, 'M', u'ǭ'),
    +    (0x1ED, 'V'),
    +    (0x1EE, 'M', u'ǯ'),
    +    (0x1EF, 'V'),
    +    (0x1F1, 'M', u'dz'),
    +    (0x1F4, 'M', u'ǵ'),
    +    (0x1F5, 'V'),
    +    (0x1F6, 'M', u'ƕ'),
    +    (0x1F7, 'M', u'ƿ'),
    +    (0x1F8, 'M', u'ǹ'),
    +    (0x1F9, 'V'),
    +    (0x1FA, 'M', u'ǻ'),
    +    (0x1FB, 'V'),
    +    (0x1FC, 'M', u'ǽ'),
    +    (0x1FD, 'V'),
    +    (0x1FE, 'M', u'ǿ'),
    +    (0x1FF, 'V'),
    +    (0x200, 'M', u'ȁ'),
    +    (0x201, 'V'),
    +    (0x202, 'M', u'ȃ'),
    +    (0x203, 'V'),
    +    (0x204, 'M', u'ȅ'),
    +    (0x205, 'V'),
    +    (0x206, 'M', u'ȇ'),
    +    (0x207, 'V'),
    +    (0x208, 'M', u'ȉ'),
    +    (0x209, 'V'),
    +    (0x20A, 'M', u'ȋ'),
    +    (0x20B, 'V'),
    +    (0x20C, 'M', u'ȍ'),
    +    ]
    +
    +def _seg_5():
    +    return [
    +    (0x20D, 'V'),
    +    (0x20E, 'M', u'ȏ'),
    +    (0x20F, 'V'),
    +    (0x210, 'M', u'ȑ'),
    +    (0x211, 'V'),
    +    (0x212, 'M', u'ȓ'),
    +    (0x213, 'V'),
    +    (0x214, 'M', u'ȕ'),
    +    (0x215, 'V'),
    +    (0x216, 'M', u'ȗ'),
    +    (0x217, 'V'),
    +    (0x218, 'M', u'ș'),
    +    (0x219, 'V'),
    +    (0x21A, 'M', u'ț'),
    +    (0x21B, 'V'),
    +    (0x21C, 'M', u'ȝ'),
    +    (0x21D, 'V'),
    +    (0x21E, 'M', u'ȟ'),
    +    (0x21F, 'V'),
    +    (0x220, 'M', u'ƞ'),
    +    (0x221, 'V'),
    +    (0x222, 'M', u'ȣ'),
    +    (0x223, 'V'),
    +    (0x224, 'M', u'ȥ'),
    +    (0x225, 'V'),
    +    (0x226, 'M', u'ȧ'),
    +    (0x227, 'V'),
    +    (0x228, 'M', u'ȩ'),
    +    (0x229, 'V'),
    +    (0x22A, 'M', u'ȫ'),
    +    (0x22B, 'V'),
    +    (0x22C, 'M', u'ȭ'),
    +    (0x22D, 'V'),
    +    (0x22E, 'M', u'ȯ'),
    +    (0x22F, 'V'),
    +    (0x230, 'M', u'ȱ'),
    +    (0x231, 'V'),
    +    (0x232, 'M', u'ȳ'),
    +    (0x233, 'V'),
    +    (0x23A, 'M', u'ⱥ'),
    +    (0x23B, 'M', u'ȼ'),
    +    (0x23C, 'V'),
    +    (0x23D, 'M', u'ƚ'),
    +    (0x23E, 'M', u'ⱦ'),
    +    (0x23F, 'V'),
    +    (0x241, 'M', u'ɂ'),
    +    (0x242, 'V'),
    +    (0x243, 'M', u'ƀ'),
    +    (0x244, 'M', u'ʉ'),
    +    (0x245, 'M', u'ʌ'),
    +    (0x246, 'M', u'ɇ'),
    +    (0x247, 'V'),
    +    (0x248, 'M', u'ɉ'),
    +    (0x249, 'V'),
    +    (0x24A, 'M', u'ɋ'),
    +    (0x24B, 'V'),
    +    (0x24C, 'M', u'ɍ'),
    +    (0x24D, 'V'),
    +    (0x24E, 'M', u'ɏ'),
    +    (0x24F, 'V'),
    +    (0x2B0, 'M', u'h'),
    +    (0x2B1, 'M', u'ɦ'),
    +    (0x2B2, 'M', u'j'),
    +    (0x2B3, 'M', u'r'),
    +    (0x2B4, 'M', u'ɹ'),
    +    (0x2B5, 'M', u'ɻ'),
    +    (0x2B6, 'M', u'ʁ'),
    +    (0x2B7, 'M', u'w'),
    +    (0x2B8, 'M', u'y'),
    +    (0x2B9, 'V'),
    +    (0x2D8, '3', u' ̆'),
    +    (0x2D9, '3', u' ̇'),
    +    (0x2DA, '3', u' ̊'),
    +    (0x2DB, '3', u' ̨'),
    +    (0x2DC, '3', u' ̃'),
    +    (0x2DD, '3', u' ̋'),
    +    (0x2DE, 'V'),
    +    (0x2E0, 'M', u'ɣ'),
    +    (0x2E1, 'M', u'l'),
    +    (0x2E2, 'M', u's'),
    +    (0x2E3, 'M', u'x'),
    +    (0x2E4, 'M', u'ʕ'),
    +    (0x2E5, 'V'),
    +    (0x340, 'M', u'̀'),
    +    (0x341, 'M', u'́'),
    +    (0x342, 'V'),
    +    (0x343, 'M', u'̓'),
    +    (0x344, 'M', u'̈́'),
    +    (0x345, 'M', u'ι'),
    +    (0x346, 'V'),
    +    (0x34F, 'I'),
    +    (0x350, 'V'),
    +    (0x370, 'M', u'ͱ'),
    +    (0x371, 'V'),
    +    (0x372, 'M', u'ͳ'),
    +    (0x373, 'V'),
    +    (0x374, 'M', u'ʹ'),
    +    (0x375, 'V'),
    +    (0x376, 'M', u'ͷ'),
    +    (0x377, 'V'),
    +    ]
    +
    +def _seg_6():
    +    return [
    +    (0x378, 'X'),
    +    (0x37A, '3', u' ι'),
    +    (0x37B, 'V'),
    +    (0x37E, '3', u';'),
    +    (0x37F, 'M', u'ϳ'),
    +    (0x380, 'X'),
    +    (0x384, '3', u' ́'),
    +    (0x385, '3', u' ̈́'),
    +    (0x386, 'M', u'ά'),
    +    (0x387, 'M', u'·'),
    +    (0x388, 'M', u'έ'),
    +    (0x389, 'M', u'ή'),
    +    (0x38A, 'M', u'ί'),
    +    (0x38B, 'X'),
    +    (0x38C, 'M', u'ό'),
    +    (0x38D, 'X'),
    +    (0x38E, 'M', u'ύ'),
    +    (0x38F, 'M', u'ώ'),
    +    (0x390, 'V'),
    +    (0x391, 'M', u'α'),
    +    (0x392, 'M', u'β'),
    +    (0x393, 'M', u'γ'),
    +    (0x394, 'M', u'δ'),
    +    (0x395, 'M', u'ε'),
    +    (0x396, 'M', u'ζ'),
    +    (0x397, 'M', u'η'),
    +    (0x398, 'M', u'θ'),
    +    (0x399, 'M', u'ι'),
    +    (0x39A, 'M', u'κ'),
    +    (0x39B, 'M', u'λ'),
    +    (0x39C, 'M', u'μ'),
    +    (0x39D, 'M', u'ν'),
    +    (0x39E, 'M', u'ξ'),
    +    (0x39F, 'M', u'ο'),
    +    (0x3A0, 'M', u'π'),
    +    (0x3A1, 'M', u'ρ'),
    +    (0x3A2, 'X'),
    +    (0x3A3, 'M', u'σ'),
    +    (0x3A4, 'M', u'τ'),
    +    (0x3A5, 'M', u'υ'),
    +    (0x3A6, 'M', u'φ'),
    +    (0x3A7, 'M', u'χ'),
    +    (0x3A8, 'M', u'ψ'),
    +    (0x3A9, 'M', u'ω'),
    +    (0x3AA, 'M', u'ϊ'),
    +    (0x3AB, 'M', u'ϋ'),
    +    (0x3AC, 'V'),
    +    (0x3C2, 'D', u'σ'),
    +    (0x3C3, 'V'),
    +    (0x3CF, 'M', u'ϗ'),
    +    (0x3D0, 'M', u'β'),
    +    (0x3D1, 'M', u'θ'),
    +    (0x3D2, 'M', u'υ'),
    +    (0x3D3, 'M', u'ύ'),
    +    (0x3D4, 'M', u'ϋ'),
    +    (0x3D5, 'M', u'φ'),
    +    (0x3D6, 'M', u'π'),
    +    (0x3D7, 'V'),
    +    (0x3D8, 'M', u'ϙ'),
    +    (0x3D9, 'V'),
    +    (0x3DA, 'M', u'ϛ'),
    +    (0x3DB, 'V'),
    +    (0x3DC, 'M', u'ϝ'),
    +    (0x3DD, 'V'),
    +    (0x3DE, 'M', u'ϟ'),
    +    (0x3DF, 'V'),
    +    (0x3E0, 'M', u'ϡ'),
    +    (0x3E1, 'V'),
    +    (0x3E2, 'M', u'ϣ'),
    +    (0x3E3, 'V'),
    +    (0x3E4, 'M', u'ϥ'),
    +    (0x3E5, 'V'),
    +    (0x3E6, 'M', u'ϧ'),
    +    (0x3E7, 'V'),
    +    (0x3E8, 'M', u'ϩ'),
    +    (0x3E9, 'V'),
    +    (0x3EA, 'M', u'ϫ'),
    +    (0x3EB, 'V'),
    +    (0x3EC, 'M', u'ϭ'),
    +    (0x3ED, 'V'),
    +    (0x3EE, 'M', u'ϯ'),
    +    (0x3EF, 'V'),
    +    (0x3F0, 'M', u'κ'),
    +    (0x3F1, 'M', u'ρ'),
    +    (0x3F2, 'M', u'σ'),
    +    (0x3F3, 'V'),
    +    (0x3F4, 'M', u'θ'),
    +    (0x3F5, 'M', u'ε'),
    +    (0x3F6, 'V'),
    +    (0x3F7, 'M', u'ϸ'),
    +    (0x3F8, 'V'),
    +    (0x3F9, 'M', u'σ'),
    +    (0x3FA, 'M', u'ϻ'),
    +    (0x3FB, 'V'),
    +    (0x3FD, 'M', u'ͻ'),
    +    (0x3FE, 'M', u'ͼ'),
    +    (0x3FF, 'M', u'ͽ'),
    +    (0x400, 'M', u'ѐ'),
    +    (0x401, 'M', u'ё'),
    +    (0x402, 'M', u'ђ'),
    +    ]
    +
    +def _seg_7():
    +    return [
    +    (0x403, 'M', u'ѓ'),
    +    (0x404, 'M', u'є'),
    +    (0x405, 'M', u'ѕ'),
    +    (0x406, 'M', u'і'),
    +    (0x407, 'M', u'ї'),
    +    (0x408, 'M', u'ј'),
    +    (0x409, 'M', u'љ'),
    +    (0x40A, 'M', u'њ'),
    +    (0x40B, 'M', u'ћ'),
    +    (0x40C, 'M', u'ќ'),
    +    (0x40D, 'M', u'ѝ'),
    +    (0x40E, 'M', u'ў'),
    +    (0x40F, 'M', u'џ'),
    +    (0x410, 'M', u'а'),
    +    (0x411, 'M', u'б'),
    +    (0x412, 'M', u'в'),
    +    (0x413, 'M', u'г'),
    +    (0x414, 'M', u'д'),
    +    (0x415, 'M', u'е'),
    +    (0x416, 'M', u'ж'),
    +    (0x417, 'M', u'з'),
    +    (0x418, 'M', u'и'),
    +    (0x419, 'M', u'й'),
    +    (0x41A, 'M', u'к'),
    +    (0x41B, 'M', u'л'),
    +    (0x41C, 'M', u'м'),
    +    (0x41D, 'M', u'н'),
    +    (0x41E, 'M', u'о'),
    +    (0x41F, 'M', u'п'),
    +    (0x420, 'M', u'р'),
    +    (0x421, 'M', u'с'),
    +    (0x422, 'M', u'т'),
    +    (0x423, 'M', u'у'),
    +    (0x424, 'M', u'ф'),
    +    (0x425, 'M', u'х'),
    +    (0x426, 'M', u'ц'),
    +    (0x427, 'M', u'ч'),
    +    (0x428, 'M', u'ш'),
    +    (0x429, 'M', u'щ'),
    +    (0x42A, 'M', u'ъ'),
    +    (0x42B, 'M', u'ы'),
    +    (0x42C, 'M', u'ь'),
    +    (0x42D, 'M', u'э'),
    +    (0x42E, 'M', u'ю'),
    +    (0x42F, 'M', u'я'),
    +    (0x430, 'V'),
    +    (0x460, 'M', u'ѡ'),
    +    (0x461, 'V'),
    +    (0x462, 'M', u'ѣ'),
    +    (0x463, 'V'),
    +    (0x464, 'M', u'ѥ'),
    +    (0x465, 'V'),
    +    (0x466, 'M', u'ѧ'),
    +    (0x467, 'V'),
    +    (0x468, 'M', u'ѩ'),
    +    (0x469, 'V'),
    +    (0x46A, 'M', u'ѫ'),
    +    (0x46B, 'V'),
    +    (0x46C, 'M', u'ѭ'),
    +    (0x46D, 'V'),
    +    (0x46E, 'M', u'ѯ'),
    +    (0x46F, 'V'),
    +    (0x470, 'M', u'ѱ'),
    +    (0x471, 'V'),
    +    (0x472, 'M', u'ѳ'),
    +    (0x473, 'V'),
    +    (0x474, 'M', u'ѵ'),
    +    (0x475, 'V'),
    +    (0x476, 'M', u'ѷ'),
    +    (0x477, 'V'),
    +    (0x478, 'M', u'ѹ'),
    +    (0x479, 'V'),
    +    (0x47A, 'M', u'ѻ'),
    +    (0x47B, 'V'),
    +    (0x47C, 'M', u'ѽ'),
    +    (0x47D, 'V'),
    +    (0x47E, 'M', u'ѿ'),
    +    (0x47F, 'V'),
    +    (0x480, 'M', u'ҁ'),
    +    (0x481, 'V'),
    +    (0x48A, 'M', u'ҋ'),
    +    (0x48B, 'V'),
    +    (0x48C, 'M', u'ҍ'),
    +    (0x48D, 'V'),
    +    (0x48E, 'M', u'ҏ'),
    +    (0x48F, 'V'),
    +    (0x490, 'M', u'ґ'),
    +    (0x491, 'V'),
    +    (0x492, 'M', u'ғ'),
    +    (0x493, 'V'),
    +    (0x494, 'M', u'ҕ'),
    +    (0x495, 'V'),
    +    (0x496, 'M', u'җ'),
    +    (0x497, 'V'),
    +    (0x498, 'M', u'ҙ'),
    +    (0x499, 'V'),
    +    (0x49A, 'M', u'қ'),
    +    (0x49B, 'V'),
    +    (0x49C, 'M', u'ҝ'),
    +    (0x49D, 'V'),
    +    ]
    +
    +def _seg_8():
    +    return [
    +    (0x49E, 'M', u'ҟ'),
    +    (0x49F, 'V'),
    +    (0x4A0, 'M', u'ҡ'),
    +    (0x4A1, 'V'),
    +    (0x4A2, 'M', u'ң'),
    +    (0x4A3, 'V'),
    +    (0x4A4, 'M', u'ҥ'),
    +    (0x4A5, 'V'),
    +    (0x4A6, 'M', u'ҧ'),
    +    (0x4A7, 'V'),
    +    (0x4A8, 'M', u'ҩ'),
    +    (0x4A9, 'V'),
    +    (0x4AA, 'M', u'ҫ'),
    +    (0x4AB, 'V'),
    +    (0x4AC, 'M', u'ҭ'),
    +    (0x4AD, 'V'),
    +    (0x4AE, 'M', u'ү'),
    +    (0x4AF, 'V'),
    +    (0x4B0, 'M', u'ұ'),
    +    (0x4B1, 'V'),
    +    (0x4B2, 'M', u'ҳ'),
    +    (0x4B3, 'V'),
    +    (0x4B4, 'M', u'ҵ'),
    +    (0x4B5, 'V'),
    +    (0x4B6, 'M', u'ҷ'),
    +    (0x4B7, 'V'),
    +    (0x4B8, 'M', u'ҹ'),
    +    (0x4B9, 'V'),
    +    (0x4BA, 'M', u'һ'),
    +    (0x4BB, 'V'),
    +    (0x4BC, 'M', u'ҽ'),
    +    (0x4BD, 'V'),
    +    (0x4BE, 'M', u'ҿ'),
    +    (0x4BF, 'V'),
    +    (0x4C0, 'X'),
    +    (0x4C1, 'M', u'ӂ'),
    +    (0x4C2, 'V'),
    +    (0x4C3, 'M', u'ӄ'),
    +    (0x4C4, 'V'),
    +    (0x4C5, 'M', u'ӆ'),
    +    (0x4C6, 'V'),
    +    (0x4C7, 'M', u'ӈ'),
    +    (0x4C8, 'V'),
    +    (0x4C9, 'M', u'ӊ'),
    +    (0x4CA, 'V'),
    +    (0x4CB, 'M', u'ӌ'),
    +    (0x4CC, 'V'),
    +    (0x4CD, 'M', u'ӎ'),
    +    (0x4CE, 'V'),
    +    (0x4D0, 'M', u'ӑ'),
    +    (0x4D1, 'V'),
    +    (0x4D2, 'M', u'ӓ'),
    +    (0x4D3, 'V'),
    +    (0x4D4, 'M', u'ӕ'),
    +    (0x4D5, 'V'),
    +    (0x4D6, 'M', u'ӗ'),
    +    (0x4D7, 'V'),
    +    (0x4D8, 'M', u'ә'),
    +    (0x4D9, 'V'),
    +    (0x4DA, 'M', u'ӛ'),
    +    (0x4DB, 'V'),
    +    (0x4DC, 'M', u'ӝ'),
    +    (0x4DD, 'V'),
    +    (0x4DE, 'M', u'ӟ'),
    +    (0x4DF, 'V'),
    +    (0x4E0, 'M', u'ӡ'),
    +    (0x4E1, 'V'),
    +    (0x4E2, 'M', u'ӣ'),
    +    (0x4E3, 'V'),
    +    (0x4E4, 'M', u'ӥ'),
    +    (0x4E5, 'V'),
    +    (0x4E6, 'M', u'ӧ'),
    +    (0x4E7, 'V'),
    +    (0x4E8, 'M', u'ө'),
    +    (0x4E9, 'V'),
    +    (0x4EA, 'M', u'ӫ'),
    +    (0x4EB, 'V'),
    +    (0x4EC, 'M', u'ӭ'),
    +    (0x4ED, 'V'),
    +    (0x4EE, 'M', u'ӯ'),
    +    (0x4EF, 'V'),
    +    (0x4F0, 'M', u'ӱ'),
    +    (0x4F1, 'V'),
    +    (0x4F2, 'M', u'ӳ'),
    +    (0x4F3, 'V'),
    +    (0x4F4, 'M', u'ӵ'),
    +    (0x4F5, 'V'),
    +    (0x4F6, 'M', u'ӷ'),
    +    (0x4F7, 'V'),
    +    (0x4F8, 'M', u'ӹ'),
    +    (0x4F9, 'V'),
    +    (0x4FA, 'M', u'ӻ'),
    +    (0x4FB, 'V'),
    +    (0x4FC, 'M', u'ӽ'),
    +    (0x4FD, 'V'),
    +    (0x4FE, 'M', u'ӿ'),
    +    (0x4FF, 'V'),
    +    (0x500, 'M', u'ԁ'),
    +    (0x501, 'V'),
    +    (0x502, 'M', u'ԃ'),
    +    ]
    +
    +def _seg_9():
    +    return [
    +    (0x503, 'V'),
    +    (0x504, 'M', u'ԅ'),
    +    (0x505, 'V'),
    +    (0x506, 'M', u'ԇ'),
    +    (0x507, 'V'),
    +    (0x508, 'M', u'ԉ'),
    +    (0x509, 'V'),
    +    (0x50A, 'M', u'ԋ'),
    +    (0x50B, 'V'),
    +    (0x50C, 'M', u'ԍ'),
    +    (0x50D, 'V'),
    +    (0x50E, 'M', u'ԏ'),
    +    (0x50F, 'V'),
    +    (0x510, 'M', u'ԑ'),
    +    (0x511, 'V'),
    +    (0x512, 'M', u'ԓ'),
    +    (0x513, 'V'),
    +    (0x514, 'M', u'ԕ'),
    +    (0x515, 'V'),
    +    (0x516, 'M', u'ԗ'),
    +    (0x517, 'V'),
    +    (0x518, 'M', u'ԙ'),
    +    (0x519, 'V'),
    +    (0x51A, 'M', u'ԛ'),
    +    (0x51B, 'V'),
    +    (0x51C, 'M', u'ԝ'),
    +    (0x51D, 'V'),
    +    (0x51E, 'M', u'ԟ'),
    +    (0x51F, 'V'),
    +    (0x520, 'M', u'ԡ'),
    +    (0x521, 'V'),
    +    (0x522, 'M', u'ԣ'),
    +    (0x523, 'V'),
    +    (0x524, 'M', u'ԥ'),
    +    (0x525, 'V'),
    +    (0x526, 'M', u'ԧ'),
    +    (0x527, 'V'),
    +    (0x528, 'M', u'ԩ'),
    +    (0x529, 'V'),
    +    (0x52A, 'M', u'ԫ'),
    +    (0x52B, 'V'),
    +    (0x52C, 'M', u'ԭ'),
    +    (0x52D, 'V'),
    +    (0x52E, 'M', u'ԯ'),
    +    (0x52F, 'V'),
    +    (0x530, 'X'),
    +    (0x531, 'M', u'ա'),
    +    (0x532, 'M', u'բ'),
    +    (0x533, 'M', u'գ'),
    +    (0x534, 'M', u'դ'),
    +    (0x535, 'M', u'ե'),
    +    (0x536, 'M', u'զ'),
    +    (0x537, 'M', u'է'),
    +    (0x538, 'M', u'ը'),
    +    (0x539, 'M', u'թ'),
    +    (0x53A, 'M', u'ժ'),
    +    (0x53B, 'M', u'ի'),
    +    (0x53C, 'M', u'լ'),
    +    (0x53D, 'M', u'խ'),
    +    (0x53E, 'M', u'ծ'),
    +    (0x53F, 'M', u'կ'),
    +    (0x540, 'M', u'հ'),
    +    (0x541, 'M', u'ձ'),
    +    (0x542, 'M', u'ղ'),
    +    (0x543, 'M', u'ճ'),
    +    (0x544, 'M', u'մ'),
    +    (0x545, 'M', u'յ'),
    +    (0x546, 'M', u'ն'),
    +    (0x547, 'M', u'շ'),
    +    (0x548, 'M', u'ո'),
    +    (0x549, 'M', u'չ'),
    +    (0x54A, 'M', u'պ'),
    +    (0x54B, 'M', u'ջ'),
    +    (0x54C, 'M', u'ռ'),
    +    (0x54D, 'M', u'ս'),
    +    (0x54E, 'M', u'վ'),
    +    (0x54F, 'M', u'տ'),
    +    (0x550, 'M', u'ր'),
    +    (0x551, 'M', u'ց'),
    +    (0x552, 'M', u'ւ'),
    +    (0x553, 'M', u'փ'),
    +    (0x554, 'M', u'ք'),
    +    (0x555, 'M', u'օ'),
    +    (0x556, 'M', u'ֆ'),
    +    (0x557, 'X'),
    +    (0x559, 'V'),
    +    (0x587, 'M', u'եւ'),
    +    (0x588, 'V'),
    +    (0x58B, 'X'),
    +    (0x58D, 'V'),
    +    (0x590, 'X'),
    +    (0x591, 'V'),
    +    (0x5C8, 'X'),
    +    (0x5D0, 'V'),
    +    (0x5EB, 'X'),
    +    (0x5EF, 'V'),
    +    (0x5F5, 'X'),
    +    (0x606, 'V'),
    +    (0x61C, 'X'),
    +    (0x61E, 'V'),
    +    ]
    +
    +def _seg_10():
    +    return [
    +    (0x675, 'M', u'اٴ'),
    +    (0x676, 'M', u'وٴ'),
    +    (0x677, 'M', u'ۇٴ'),
    +    (0x678, 'M', u'يٴ'),
    +    (0x679, 'V'),
    +    (0x6DD, 'X'),
    +    (0x6DE, 'V'),
    +    (0x70E, 'X'),
    +    (0x710, 'V'),
    +    (0x74B, 'X'),
    +    (0x74D, 'V'),
    +    (0x7B2, 'X'),
    +    (0x7C0, 'V'),
    +    (0x7FB, 'X'),
    +    (0x7FD, 'V'),
    +    (0x82E, 'X'),
    +    (0x830, 'V'),
    +    (0x83F, 'X'),
    +    (0x840, 'V'),
    +    (0x85C, 'X'),
    +    (0x85E, 'V'),
    +    (0x85F, 'X'),
    +    (0x860, 'V'),
    +    (0x86B, 'X'),
    +    (0x8A0, 'V'),
    +    (0x8B5, 'X'),
    +    (0x8B6, 'V'),
    +    (0x8BE, 'X'),
    +    (0x8D3, 'V'),
    +    (0x8E2, 'X'),
    +    (0x8E3, 'V'),
    +    (0x958, 'M', u'क़'),
    +    (0x959, 'M', u'ख़'),
    +    (0x95A, 'M', u'ग़'),
    +    (0x95B, 'M', u'ज़'),
    +    (0x95C, 'M', u'ड़'),
    +    (0x95D, 'M', u'ढ़'),
    +    (0x95E, 'M', u'फ़'),
    +    (0x95F, 'M', u'य़'),
    +    (0x960, 'V'),
    +    (0x984, 'X'),
    +    (0x985, 'V'),
    +    (0x98D, 'X'),
    +    (0x98F, 'V'),
    +    (0x991, 'X'),
    +    (0x993, 'V'),
    +    (0x9A9, 'X'),
    +    (0x9AA, 'V'),
    +    (0x9B1, 'X'),
    +    (0x9B2, 'V'),
    +    (0x9B3, 'X'),
    +    (0x9B6, 'V'),
    +    (0x9BA, 'X'),
    +    (0x9BC, 'V'),
    +    (0x9C5, 'X'),
    +    (0x9C7, 'V'),
    +    (0x9C9, 'X'),
    +    (0x9CB, 'V'),
    +    (0x9CF, 'X'),
    +    (0x9D7, 'V'),
    +    (0x9D8, 'X'),
    +    (0x9DC, 'M', u'ড়'),
    +    (0x9DD, 'M', u'ঢ়'),
    +    (0x9DE, 'X'),
    +    (0x9DF, 'M', u'য়'),
    +    (0x9E0, 'V'),
    +    (0x9E4, 'X'),
    +    (0x9E6, 'V'),
    +    (0x9FF, 'X'),
    +    (0xA01, 'V'),
    +    (0xA04, 'X'),
    +    (0xA05, 'V'),
    +    (0xA0B, 'X'),
    +    (0xA0F, 'V'),
    +    (0xA11, 'X'),
    +    (0xA13, 'V'),
    +    (0xA29, 'X'),
    +    (0xA2A, 'V'),
    +    (0xA31, 'X'),
    +    (0xA32, 'V'),
    +    (0xA33, 'M', u'ਲ਼'),
    +    (0xA34, 'X'),
    +    (0xA35, 'V'),
    +    (0xA36, 'M', u'ਸ਼'),
    +    (0xA37, 'X'),
    +    (0xA38, 'V'),
    +    (0xA3A, 'X'),
    +    (0xA3C, 'V'),
    +    (0xA3D, 'X'),
    +    (0xA3E, 'V'),
    +    (0xA43, 'X'),
    +    (0xA47, 'V'),
    +    (0xA49, 'X'),
    +    (0xA4B, 'V'),
    +    (0xA4E, 'X'),
    +    (0xA51, 'V'),
    +    (0xA52, 'X'),
    +    (0xA59, 'M', u'ਖ਼'),
    +    (0xA5A, 'M', u'ਗ਼'),
    +    (0xA5B, 'M', u'ਜ਼'),
    +    ]
    +
    +def _seg_11():
    +    return [
    +    (0xA5C, 'V'),
    +    (0xA5D, 'X'),
    +    (0xA5E, 'M', u'ਫ਼'),
    +    (0xA5F, 'X'),
    +    (0xA66, 'V'),
    +    (0xA77, 'X'),
    +    (0xA81, 'V'),
    +    (0xA84, 'X'),
    +    (0xA85, 'V'),
    +    (0xA8E, 'X'),
    +    (0xA8F, 'V'),
    +    (0xA92, 'X'),
    +    (0xA93, 'V'),
    +    (0xAA9, 'X'),
    +    (0xAAA, 'V'),
    +    (0xAB1, 'X'),
    +    (0xAB2, 'V'),
    +    (0xAB4, 'X'),
    +    (0xAB5, 'V'),
    +    (0xABA, 'X'),
    +    (0xABC, 'V'),
    +    (0xAC6, 'X'),
    +    (0xAC7, 'V'),
    +    (0xACA, 'X'),
    +    (0xACB, 'V'),
    +    (0xACE, 'X'),
    +    (0xAD0, 'V'),
    +    (0xAD1, 'X'),
    +    (0xAE0, 'V'),
    +    (0xAE4, 'X'),
    +    (0xAE6, 'V'),
    +    (0xAF2, 'X'),
    +    (0xAF9, 'V'),
    +    (0xB00, 'X'),
    +    (0xB01, 'V'),
    +    (0xB04, 'X'),
    +    (0xB05, 'V'),
    +    (0xB0D, 'X'),
    +    (0xB0F, 'V'),
    +    (0xB11, 'X'),
    +    (0xB13, 'V'),
    +    (0xB29, 'X'),
    +    (0xB2A, 'V'),
    +    (0xB31, 'X'),
    +    (0xB32, 'V'),
    +    (0xB34, 'X'),
    +    (0xB35, 'V'),
    +    (0xB3A, 'X'),
    +    (0xB3C, 'V'),
    +    (0xB45, 'X'),
    +    (0xB47, 'V'),
    +    (0xB49, 'X'),
    +    (0xB4B, 'V'),
    +    (0xB4E, 'X'),
    +    (0xB56, 'V'),
    +    (0xB58, 'X'),
    +    (0xB5C, 'M', u'ଡ଼'),
    +    (0xB5D, 'M', u'ଢ଼'),
    +    (0xB5E, 'X'),
    +    (0xB5F, 'V'),
    +    (0xB64, 'X'),
    +    (0xB66, 'V'),
    +    (0xB78, 'X'),
    +    (0xB82, 'V'),
    +    (0xB84, 'X'),
    +    (0xB85, 'V'),
    +    (0xB8B, 'X'),
    +    (0xB8E, 'V'),
    +    (0xB91, 'X'),
    +    (0xB92, 'V'),
    +    (0xB96, 'X'),
    +    (0xB99, 'V'),
    +    (0xB9B, 'X'),
    +    (0xB9C, 'V'),
    +    (0xB9D, 'X'),
    +    (0xB9E, 'V'),
    +    (0xBA0, 'X'),
    +    (0xBA3, 'V'),
    +    (0xBA5, 'X'),
    +    (0xBA8, 'V'),
    +    (0xBAB, 'X'),
    +    (0xBAE, 'V'),
    +    (0xBBA, 'X'),
    +    (0xBBE, 'V'),
    +    (0xBC3, 'X'),
    +    (0xBC6, 'V'),
    +    (0xBC9, 'X'),
    +    (0xBCA, 'V'),
    +    (0xBCE, 'X'),
    +    (0xBD0, 'V'),
    +    (0xBD1, 'X'),
    +    (0xBD7, 'V'),
    +    (0xBD8, 'X'),
    +    (0xBE6, 'V'),
    +    (0xBFB, 'X'),
    +    (0xC00, 'V'),
    +    (0xC0D, 'X'),
    +    (0xC0E, 'V'),
    +    (0xC11, 'X'),
    +    (0xC12, 'V'),
    +    ]
    +
    +def _seg_12():
    +    return [
    +    (0xC29, 'X'),
    +    (0xC2A, 'V'),
    +    (0xC3A, 'X'),
    +    (0xC3D, 'V'),
    +    (0xC45, 'X'),
    +    (0xC46, 'V'),
    +    (0xC49, 'X'),
    +    (0xC4A, 'V'),
    +    (0xC4E, 'X'),
    +    (0xC55, 'V'),
    +    (0xC57, 'X'),
    +    (0xC58, 'V'),
    +    (0xC5B, 'X'),
    +    (0xC60, 'V'),
    +    (0xC64, 'X'),
    +    (0xC66, 'V'),
    +    (0xC70, 'X'),
    +    (0xC77, 'V'),
    +    (0xC8D, 'X'),
    +    (0xC8E, 'V'),
    +    (0xC91, 'X'),
    +    (0xC92, 'V'),
    +    (0xCA9, 'X'),
    +    (0xCAA, 'V'),
    +    (0xCB4, 'X'),
    +    (0xCB5, 'V'),
    +    (0xCBA, 'X'),
    +    (0xCBC, 'V'),
    +    (0xCC5, 'X'),
    +    (0xCC6, 'V'),
    +    (0xCC9, 'X'),
    +    (0xCCA, 'V'),
    +    (0xCCE, 'X'),
    +    (0xCD5, 'V'),
    +    (0xCD7, 'X'),
    +    (0xCDE, 'V'),
    +    (0xCDF, 'X'),
    +    (0xCE0, 'V'),
    +    (0xCE4, 'X'),
    +    (0xCE6, 'V'),
    +    (0xCF0, 'X'),
    +    (0xCF1, 'V'),
    +    (0xCF3, 'X'),
    +    (0xD00, 'V'),
    +    (0xD04, 'X'),
    +    (0xD05, 'V'),
    +    (0xD0D, 'X'),
    +    (0xD0E, 'V'),
    +    (0xD11, 'X'),
    +    (0xD12, 'V'),
    +    (0xD45, 'X'),
    +    (0xD46, 'V'),
    +    (0xD49, 'X'),
    +    (0xD4A, 'V'),
    +    (0xD50, 'X'),
    +    (0xD54, 'V'),
    +    (0xD64, 'X'),
    +    (0xD66, 'V'),
    +    (0xD80, 'X'),
    +    (0xD82, 'V'),
    +    (0xD84, 'X'),
    +    (0xD85, 'V'),
    +    (0xD97, 'X'),
    +    (0xD9A, 'V'),
    +    (0xDB2, 'X'),
    +    (0xDB3, 'V'),
    +    (0xDBC, 'X'),
    +    (0xDBD, 'V'),
    +    (0xDBE, 'X'),
    +    (0xDC0, 'V'),
    +    (0xDC7, 'X'),
    +    (0xDCA, 'V'),
    +    (0xDCB, 'X'),
    +    (0xDCF, 'V'),
    +    (0xDD5, 'X'),
    +    (0xDD6, 'V'),
    +    (0xDD7, 'X'),
    +    (0xDD8, 'V'),
    +    (0xDE0, 'X'),
    +    (0xDE6, 'V'),
    +    (0xDF0, 'X'),
    +    (0xDF2, 'V'),
    +    (0xDF5, 'X'),
    +    (0xE01, 'V'),
    +    (0xE33, 'M', u'ํา'),
    +    (0xE34, 'V'),
    +    (0xE3B, 'X'),
    +    (0xE3F, 'V'),
    +    (0xE5C, 'X'),
    +    (0xE81, 'V'),
    +    (0xE83, 'X'),
    +    (0xE84, 'V'),
    +    (0xE85, 'X'),
    +    (0xE86, 'V'),
    +    (0xE8B, 'X'),
    +    (0xE8C, 'V'),
    +    (0xEA4, 'X'),
    +    (0xEA5, 'V'),
    +    (0xEA6, 'X'),
    +    (0xEA7, 'V'),
    +    ]
    +
    +def _seg_13():
    +    return [
    +    (0xEB3, 'M', u'ໍາ'),
    +    (0xEB4, 'V'),
    +    (0xEBE, 'X'),
    +    (0xEC0, 'V'),
    +    (0xEC5, 'X'),
    +    (0xEC6, 'V'),
    +    (0xEC7, 'X'),
    +    (0xEC8, 'V'),
    +    (0xECE, 'X'),
    +    (0xED0, 'V'),
    +    (0xEDA, 'X'),
    +    (0xEDC, 'M', u'ຫນ'),
    +    (0xEDD, 'M', u'ຫມ'),
    +    (0xEDE, 'V'),
    +    (0xEE0, 'X'),
    +    (0xF00, 'V'),
    +    (0xF0C, 'M', u'་'),
    +    (0xF0D, 'V'),
    +    (0xF43, 'M', u'གྷ'),
    +    (0xF44, 'V'),
    +    (0xF48, 'X'),
    +    (0xF49, 'V'),
    +    (0xF4D, 'M', u'ཌྷ'),
    +    (0xF4E, 'V'),
    +    (0xF52, 'M', u'དྷ'),
    +    (0xF53, 'V'),
    +    (0xF57, 'M', u'བྷ'),
    +    (0xF58, 'V'),
    +    (0xF5C, 'M', u'ཛྷ'),
    +    (0xF5D, 'V'),
    +    (0xF69, 'M', u'ཀྵ'),
    +    (0xF6A, 'V'),
    +    (0xF6D, 'X'),
    +    (0xF71, 'V'),
    +    (0xF73, 'M', u'ཱི'),
    +    (0xF74, 'V'),
    +    (0xF75, 'M', u'ཱུ'),
    +    (0xF76, 'M', u'ྲྀ'),
    +    (0xF77, 'M', u'ྲཱྀ'),
    +    (0xF78, 'M', u'ླྀ'),
    +    (0xF79, 'M', u'ླཱྀ'),
    +    (0xF7A, 'V'),
    +    (0xF81, 'M', u'ཱྀ'),
    +    (0xF82, 'V'),
    +    (0xF93, 'M', u'ྒྷ'),
    +    (0xF94, 'V'),
    +    (0xF98, 'X'),
    +    (0xF99, 'V'),
    +    (0xF9D, 'M', u'ྜྷ'),
    +    (0xF9E, 'V'),
    +    (0xFA2, 'M', u'ྡྷ'),
    +    (0xFA3, 'V'),
    +    (0xFA7, 'M', u'ྦྷ'),
    +    (0xFA8, 'V'),
    +    (0xFAC, 'M', u'ྫྷ'),
    +    (0xFAD, 'V'),
    +    (0xFB9, 'M', u'ྐྵ'),
    +    (0xFBA, 'V'),
    +    (0xFBD, 'X'),
    +    (0xFBE, 'V'),
    +    (0xFCD, 'X'),
    +    (0xFCE, 'V'),
    +    (0xFDB, 'X'),
    +    (0x1000, 'V'),
    +    (0x10A0, 'X'),
    +    (0x10C7, 'M', u'ⴧ'),
    +    (0x10C8, 'X'),
    +    (0x10CD, 'M', u'ⴭ'),
    +    (0x10CE, 'X'),
    +    (0x10D0, 'V'),
    +    (0x10FC, 'M', u'ნ'),
    +    (0x10FD, 'V'),
    +    (0x115F, 'X'),
    +    (0x1161, 'V'),
    +    (0x1249, 'X'),
    +    (0x124A, 'V'),
    +    (0x124E, 'X'),
    +    (0x1250, 'V'),
    +    (0x1257, 'X'),
    +    (0x1258, 'V'),
    +    (0x1259, 'X'),
    +    (0x125A, 'V'),
    +    (0x125E, 'X'),
    +    (0x1260, 'V'),
    +    (0x1289, 'X'),
    +    (0x128A, 'V'),
    +    (0x128E, 'X'),
    +    (0x1290, 'V'),
    +    (0x12B1, 'X'),
    +    (0x12B2, 'V'),
    +    (0x12B6, 'X'),
    +    (0x12B8, 'V'),
    +    (0x12BF, 'X'),
    +    (0x12C0, 'V'),
    +    (0x12C1, 'X'),
    +    (0x12C2, 'V'),
    +    (0x12C6, 'X'),
    +    (0x12C8, 'V'),
    +    (0x12D7, 'X'),
    +    (0x12D8, 'V'),
    +    ]
    +
    +def _seg_14():
    +    return [
    +    (0x1311, 'X'),
    +    (0x1312, 'V'),
    +    (0x1316, 'X'),
    +    (0x1318, 'V'),
    +    (0x135B, 'X'),
    +    (0x135D, 'V'),
    +    (0x137D, 'X'),
    +    (0x1380, 'V'),
    +    (0x139A, 'X'),
    +    (0x13A0, 'V'),
    +    (0x13F6, 'X'),
    +    (0x13F8, 'M', u'Ᏸ'),
    +    (0x13F9, 'M', u'Ᏹ'),
    +    (0x13FA, 'M', u'Ᏺ'),
    +    (0x13FB, 'M', u'Ᏻ'),
    +    (0x13FC, 'M', u'Ᏼ'),
    +    (0x13FD, 'M', u'Ᏽ'),
    +    (0x13FE, 'X'),
    +    (0x1400, 'V'),
    +    (0x1680, 'X'),
    +    (0x1681, 'V'),
    +    (0x169D, 'X'),
    +    (0x16A0, 'V'),
    +    (0x16F9, 'X'),
    +    (0x1700, 'V'),
    +    (0x170D, 'X'),
    +    (0x170E, 'V'),
    +    (0x1715, 'X'),
    +    (0x1720, 'V'),
    +    (0x1737, 'X'),
    +    (0x1740, 'V'),
    +    (0x1754, 'X'),
    +    (0x1760, 'V'),
    +    (0x176D, 'X'),
    +    (0x176E, 'V'),
    +    (0x1771, 'X'),
    +    (0x1772, 'V'),
    +    (0x1774, 'X'),
    +    (0x1780, 'V'),
    +    (0x17B4, 'X'),
    +    (0x17B6, 'V'),
    +    (0x17DE, 'X'),
    +    (0x17E0, 'V'),
    +    (0x17EA, 'X'),
    +    (0x17F0, 'V'),
    +    (0x17FA, 'X'),
    +    (0x1800, 'V'),
    +    (0x1806, 'X'),
    +    (0x1807, 'V'),
    +    (0x180B, 'I'),
    +    (0x180E, 'X'),
    +    (0x1810, 'V'),
    +    (0x181A, 'X'),
    +    (0x1820, 'V'),
    +    (0x1879, 'X'),
    +    (0x1880, 'V'),
    +    (0x18AB, 'X'),
    +    (0x18B0, 'V'),
    +    (0x18F6, 'X'),
    +    (0x1900, 'V'),
    +    (0x191F, 'X'),
    +    (0x1920, 'V'),
    +    (0x192C, 'X'),
    +    (0x1930, 'V'),
    +    (0x193C, 'X'),
    +    (0x1940, 'V'),
    +    (0x1941, 'X'),
    +    (0x1944, 'V'),
    +    (0x196E, 'X'),
    +    (0x1970, 'V'),
    +    (0x1975, 'X'),
    +    (0x1980, 'V'),
    +    (0x19AC, 'X'),
    +    (0x19B0, 'V'),
    +    (0x19CA, 'X'),
    +    (0x19D0, 'V'),
    +    (0x19DB, 'X'),
    +    (0x19DE, 'V'),
    +    (0x1A1C, 'X'),
    +    (0x1A1E, 'V'),
    +    (0x1A5F, 'X'),
    +    (0x1A60, 'V'),
    +    (0x1A7D, 'X'),
    +    (0x1A7F, 'V'),
    +    (0x1A8A, 'X'),
    +    (0x1A90, 'V'),
    +    (0x1A9A, 'X'),
    +    (0x1AA0, 'V'),
    +    (0x1AAE, 'X'),
    +    (0x1AB0, 'V'),
    +    (0x1ABF, 'X'),
    +    (0x1B00, 'V'),
    +    (0x1B4C, 'X'),
    +    (0x1B50, 'V'),
    +    (0x1B7D, 'X'),
    +    (0x1B80, 'V'),
    +    (0x1BF4, 'X'),
    +    (0x1BFC, 'V'),
    +    (0x1C38, 'X'),
    +    (0x1C3B, 'V'),
    +    ]
    +
    +def _seg_15():
    +    return [
    +    (0x1C4A, 'X'),
    +    (0x1C4D, 'V'),
    +    (0x1C80, 'M', u'в'),
    +    (0x1C81, 'M', u'д'),
    +    (0x1C82, 'M', u'о'),
    +    (0x1C83, 'M', u'с'),
    +    (0x1C84, 'M', u'т'),
    +    (0x1C86, 'M', u'ъ'),
    +    (0x1C87, 'M', u'ѣ'),
    +    (0x1C88, 'M', u'ꙋ'),
    +    (0x1C89, 'X'),
    +    (0x1C90, 'M', u'ა'),
    +    (0x1C91, 'M', u'ბ'),
    +    (0x1C92, 'M', u'გ'),
    +    (0x1C93, 'M', u'დ'),
    +    (0x1C94, 'M', u'ე'),
    +    (0x1C95, 'M', u'ვ'),
    +    (0x1C96, 'M', u'ზ'),
    +    (0x1C97, 'M', u'თ'),
    +    (0x1C98, 'M', u'ი'),
    +    (0x1C99, 'M', u'კ'),
    +    (0x1C9A, 'M', u'ლ'),
    +    (0x1C9B, 'M', u'მ'),
    +    (0x1C9C, 'M', u'ნ'),
    +    (0x1C9D, 'M', u'ო'),
    +    (0x1C9E, 'M', u'პ'),
    +    (0x1C9F, 'M', u'ჟ'),
    +    (0x1CA0, 'M', u'რ'),
    +    (0x1CA1, 'M', u'ს'),
    +    (0x1CA2, 'M', u'ტ'),
    +    (0x1CA3, 'M', u'უ'),
    +    (0x1CA4, 'M', u'ფ'),
    +    (0x1CA5, 'M', u'ქ'),
    +    (0x1CA6, 'M', u'ღ'),
    +    (0x1CA7, 'M', u'ყ'),
    +    (0x1CA8, 'M', u'შ'),
    +    (0x1CA9, 'M', u'ჩ'),
    +    (0x1CAA, 'M', u'ც'),
    +    (0x1CAB, 'M', u'ძ'),
    +    (0x1CAC, 'M', u'წ'),
    +    (0x1CAD, 'M', u'ჭ'),
    +    (0x1CAE, 'M', u'ხ'),
    +    (0x1CAF, 'M', u'ჯ'),
    +    (0x1CB0, 'M', u'ჰ'),
    +    (0x1CB1, 'M', u'ჱ'),
    +    (0x1CB2, 'M', u'ჲ'),
    +    (0x1CB3, 'M', u'ჳ'),
    +    (0x1CB4, 'M', u'ჴ'),
    +    (0x1CB5, 'M', u'ჵ'),
    +    (0x1CB6, 'M', u'ჶ'),
    +    (0x1CB7, 'M', u'ჷ'),
    +    (0x1CB8, 'M', u'ჸ'),
    +    (0x1CB9, 'M', u'ჹ'),
    +    (0x1CBA, 'M', u'ჺ'),
    +    (0x1CBB, 'X'),
    +    (0x1CBD, 'M', u'ჽ'),
    +    (0x1CBE, 'M', u'ჾ'),
    +    (0x1CBF, 'M', u'ჿ'),
    +    (0x1CC0, 'V'),
    +    (0x1CC8, 'X'),
    +    (0x1CD0, 'V'),
    +    (0x1CFB, 'X'),
    +    (0x1D00, 'V'),
    +    (0x1D2C, 'M', u'a'),
    +    (0x1D2D, 'M', u'æ'),
    +    (0x1D2E, 'M', u'b'),
    +    (0x1D2F, 'V'),
    +    (0x1D30, 'M', u'd'),
    +    (0x1D31, 'M', u'e'),
    +    (0x1D32, 'M', u'ǝ'),
    +    (0x1D33, 'M', u'g'),
    +    (0x1D34, 'M', u'h'),
    +    (0x1D35, 'M', u'i'),
    +    (0x1D36, 'M', u'j'),
    +    (0x1D37, 'M', u'k'),
    +    (0x1D38, 'M', u'l'),
    +    (0x1D39, 'M', u'm'),
    +    (0x1D3A, 'M', u'n'),
    +    (0x1D3B, 'V'),
    +    (0x1D3C, 'M', u'o'),
    +    (0x1D3D, 'M', u'ȣ'),
    +    (0x1D3E, 'M', u'p'),
    +    (0x1D3F, 'M', u'r'),
    +    (0x1D40, 'M', u't'),
    +    (0x1D41, 'M', u'u'),
    +    (0x1D42, 'M', u'w'),
    +    (0x1D43, 'M', u'a'),
    +    (0x1D44, 'M', u'ɐ'),
    +    (0x1D45, 'M', u'ɑ'),
    +    (0x1D46, 'M', u'ᴂ'),
    +    (0x1D47, 'M', u'b'),
    +    (0x1D48, 'M', u'd'),
    +    (0x1D49, 'M', u'e'),
    +    (0x1D4A, 'M', u'ə'),
    +    (0x1D4B, 'M', u'ɛ'),
    +    (0x1D4C, 'M', u'ɜ'),
    +    (0x1D4D, 'M', u'g'),
    +    (0x1D4E, 'V'),
    +    (0x1D4F, 'M', u'k'),
    +    (0x1D50, 'M', u'm'),
    +    ]
    +
    +def _seg_16():
    +    return [
    +    (0x1D51, 'M', u'ŋ'),
    +    (0x1D52, 'M', u'o'),
    +    (0x1D53, 'M', u'ɔ'),
    +    (0x1D54, 'M', u'ᴖ'),
    +    (0x1D55, 'M', u'ᴗ'),
    +    (0x1D56, 'M', u'p'),
    +    (0x1D57, 'M', u't'),
    +    (0x1D58, 'M', u'u'),
    +    (0x1D59, 'M', u'ᴝ'),
    +    (0x1D5A, 'M', u'ɯ'),
    +    (0x1D5B, 'M', u'v'),
    +    (0x1D5C, 'M', u'ᴥ'),
    +    (0x1D5D, 'M', u'β'),
    +    (0x1D5E, 'M', u'γ'),
    +    (0x1D5F, 'M', u'δ'),
    +    (0x1D60, 'M', u'φ'),
    +    (0x1D61, 'M', u'χ'),
    +    (0x1D62, 'M', u'i'),
    +    (0x1D63, 'M', u'r'),
    +    (0x1D64, 'M', u'u'),
    +    (0x1D65, 'M', u'v'),
    +    (0x1D66, 'M', u'β'),
    +    (0x1D67, 'M', u'γ'),
    +    (0x1D68, 'M', u'ρ'),
    +    (0x1D69, 'M', u'φ'),
    +    (0x1D6A, 'M', u'χ'),
    +    (0x1D6B, 'V'),
    +    (0x1D78, 'M', u'н'),
    +    (0x1D79, 'V'),
    +    (0x1D9B, 'M', u'ɒ'),
    +    (0x1D9C, 'M', u'c'),
    +    (0x1D9D, 'M', u'ɕ'),
    +    (0x1D9E, 'M', u'ð'),
    +    (0x1D9F, 'M', u'ɜ'),
    +    (0x1DA0, 'M', u'f'),
    +    (0x1DA1, 'M', u'ɟ'),
    +    (0x1DA2, 'M', u'ɡ'),
    +    (0x1DA3, 'M', u'ɥ'),
    +    (0x1DA4, 'M', u'ɨ'),
    +    (0x1DA5, 'M', u'ɩ'),
    +    (0x1DA6, 'M', u'ɪ'),
    +    (0x1DA7, 'M', u'ᵻ'),
    +    (0x1DA8, 'M', u'ʝ'),
    +    (0x1DA9, 'M', u'ɭ'),
    +    (0x1DAA, 'M', u'ᶅ'),
    +    (0x1DAB, 'M', u'ʟ'),
    +    (0x1DAC, 'M', u'ɱ'),
    +    (0x1DAD, 'M', u'ɰ'),
    +    (0x1DAE, 'M', u'ɲ'),
    +    (0x1DAF, 'M', u'ɳ'),
    +    (0x1DB0, 'M', u'ɴ'),
    +    (0x1DB1, 'M', u'ɵ'),
    +    (0x1DB2, 'M', u'ɸ'),
    +    (0x1DB3, 'M', u'ʂ'),
    +    (0x1DB4, 'M', u'ʃ'),
    +    (0x1DB5, 'M', u'ƫ'),
    +    (0x1DB6, 'M', u'ʉ'),
    +    (0x1DB7, 'M', u'ʊ'),
    +    (0x1DB8, 'M', u'ᴜ'),
    +    (0x1DB9, 'M', u'ʋ'),
    +    (0x1DBA, 'M', u'ʌ'),
    +    (0x1DBB, 'M', u'z'),
    +    (0x1DBC, 'M', u'ʐ'),
    +    (0x1DBD, 'M', u'ʑ'),
    +    (0x1DBE, 'M', u'ʒ'),
    +    (0x1DBF, 'M', u'θ'),
    +    (0x1DC0, 'V'),
    +    (0x1DFA, 'X'),
    +    (0x1DFB, 'V'),
    +    (0x1E00, 'M', u'ḁ'),
    +    (0x1E01, 'V'),
    +    (0x1E02, 'M', u'ḃ'),
    +    (0x1E03, 'V'),
    +    (0x1E04, 'M', u'ḅ'),
    +    (0x1E05, 'V'),
    +    (0x1E06, 'M', u'ḇ'),
    +    (0x1E07, 'V'),
    +    (0x1E08, 'M', u'ḉ'),
    +    (0x1E09, 'V'),
    +    (0x1E0A, 'M', u'ḋ'),
    +    (0x1E0B, 'V'),
    +    (0x1E0C, 'M', u'ḍ'),
    +    (0x1E0D, 'V'),
    +    (0x1E0E, 'M', u'ḏ'),
    +    (0x1E0F, 'V'),
    +    (0x1E10, 'M', u'ḑ'),
    +    (0x1E11, 'V'),
    +    (0x1E12, 'M', u'ḓ'),
    +    (0x1E13, 'V'),
    +    (0x1E14, 'M', u'ḕ'),
    +    (0x1E15, 'V'),
    +    (0x1E16, 'M', u'ḗ'),
    +    (0x1E17, 'V'),
    +    (0x1E18, 'M', u'ḙ'),
    +    (0x1E19, 'V'),
    +    (0x1E1A, 'M', u'ḛ'),
    +    (0x1E1B, 'V'),
    +    (0x1E1C, 'M', u'ḝ'),
    +    (0x1E1D, 'V'),
    +    (0x1E1E, 'M', u'ḟ'),
    +    ]
    +
    +def _seg_17():
    +    return [
    +    (0x1E1F, 'V'),
    +    (0x1E20, 'M', u'ḡ'),
    +    (0x1E21, 'V'),
    +    (0x1E22, 'M', u'ḣ'),
    +    (0x1E23, 'V'),
    +    (0x1E24, 'M', u'ḥ'),
    +    (0x1E25, 'V'),
    +    (0x1E26, 'M', u'ḧ'),
    +    (0x1E27, 'V'),
    +    (0x1E28, 'M', u'ḩ'),
    +    (0x1E29, 'V'),
    +    (0x1E2A, 'M', u'ḫ'),
    +    (0x1E2B, 'V'),
    +    (0x1E2C, 'M', u'ḭ'),
    +    (0x1E2D, 'V'),
    +    (0x1E2E, 'M', u'ḯ'),
    +    (0x1E2F, 'V'),
    +    (0x1E30, 'M', u'ḱ'),
    +    (0x1E31, 'V'),
    +    (0x1E32, 'M', u'ḳ'),
    +    (0x1E33, 'V'),
    +    (0x1E34, 'M', u'ḵ'),
    +    (0x1E35, 'V'),
    +    (0x1E36, 'M', u'ḷ'),
    +    (0x1E37, 'V'),
    +    (0x1E38, 'M', u'ḹ'),
    +    (0x1E39, 'V'),
    +    (0x1E3A, 'M', u'ḻ'),
    +    (0x1E3B, 'V'),
    +    (0x1E3C, 'M', u'ḽ'),
    +    (0x1E3D, 'V'),
    +    (0x1E3E, 'M', u'ḿ'),
    +    (0x1E3F, 'V'),
    +    (0x1E40, 'M', u'ṁ'),
    +    (0x1E41, 'V'),
    +    (0x1E42, 'M', u'ṃ'),
    +    (0x1E43, 'V'),
    +    (0x1E44, 'M', u'ṅ'),
    +    (0x1E45, 'V'),
    +    (0x1E46, 'M', u'ṇ'),
    +    (0x1E47, 'V'),
    +    (0x1E48, 'M', u'ṉ'),
    +    (0x1E49, 'V'),
    +    (0x1E4A, 'M', u'ṋ'),
    +    (0x1E4B, 'V'),
    +    (0x1E4C, 'M', u'ṍ'),
    +    (0x1E4D, 'V'),
    +    (0x1E4E, 'M', u'ṏ'),
    +    (0x1E4F, 'V'),
    +    (0x1E50, 'M', u'ṑ'),
    +    (0x1E51, 'V'),
    +    (0x1E52, 'M', u'ṓ'),
    +    (0x1E53, 'V'),
    +    (0x1E54, 'M', u'ṕ'),
    +    (0x1E55, 'V'),
    +    (0x1E56, 'M', u'ṗ'),
    +    (0x1E57, 'V'),
    +    (0x1E58, 'M', u'ṙ'),
    +    (0x1E59, 'V'),
    +    (0x1E5A, 'M', u'ṛ'),
    +    (0x1E5B, 'V'),
    +    (0x1E5C, 'M', u'ṝ'),
    +    (0x1E5D, 'V'),
    +    (0x1E5E, 'M', u'ṟ'),
    +    (0x1E5F, 'V'),
    +    (0x1E60, 'M', u'ṡ'),
    +    (0x1E61, 'V'),
    +    (0x1E62, 'M', u'ṣ'),
    +    (0x1E63, 'V'),
    +    (0x1E64, 'M', u'ṥ'),
    +    (0x1E65, 'V'),
    +    (0x1E66, 'M', u'ṧ'),
    +    (0x1E67, 'V'),
    +    (0x1E68, 'M', u'ṩ'),
    +    (0x1E69, 'V'),
    +    (0x1E6A, 'M', u'ṫ'),
    +    (0x1E6B, 'V'),
    +    (0x1E6C, 'M', u'ṭ'),
    +    (0x1E6D, 'V'),
    +    (0x1E6E, 'M', u'ṯ'),
    +    (0x1E6F, 'V'),
    +    (0x1E70, 'M', u'ṱ'),
    +    (0x1E71, 'V'),
    +    (0x1E72, 'M', u'ṳ'),
    +    (0x1E73, 'V'),
    +    (0x1E74, 'M', u'ṵ'),
    +    (0x1E75, 'V'),
    +    (0x1E76, 'M', u'ṷ'),
    +    (0x1E77, 'V'),
    +    (0x1E78, 'M', u'ṹ'),
    +    (0x1E79, 'V'),
    +    (0x1E7A, 'M', u'ṻ'),
    +    (0x1E7B, 'V'),
    +    (0x1E7C, 'M', u'ṽ'),
    +    (0x1E7D, 'V'),
    +    (0x1E7E, 'M', u'ṿ'),
    +    (0x1E7F, 'V'),
    +    (0x1E80, 'M', u'ẁ'),
    +    (0x1E81, 'V'),
    +    (0x1E82, 'M', u'ẃ'),
    +    ]
    +
    +def _seg_18():
    +    return [
    +    (0x1E83, 'V'),
    +    (0x1E84, 'M', u'ẅ'),
    +    (0x1E85, 'V'),
    +    (0x1E86, 'M', u'ẇ'),
    +    (0x1E87, 'V'),
    +    (0x1E88, 'M', u'ẉ'),
    +    (0x1E89, 'V'),
    +    (0x1E8A, 'M', u'ẋ'),
    +    (0x1E8B, 'V'),
    +    (0x1E8C, 'M', u'ẍ'),
    +    (0x1E8D, 'V'),
    +    (0x1E8E, 'M', u'ẏ'),
    +    (0x1E8F, 'V'),
    +    (0x1E90, 'M', u'ẑ'),
    +    (0x1E91, 'V'),
    +    (0x1E92, 'M', u'ẓ'),
    +    (0x1E93, 'V'),
    +    (0x1E94, 'M', u'ẕ'),
    +    (0x1E95, 'V'),
    +    (0x1E9A, 'M', u'aʾ'),
    +    (0x1E9B, 'M', u'ṡ'),
    +    (0x1E9C, 'V'),
    +    (0x1E9E, 'M', u'ss'),
    +    (0x1E9F, 'V'),
    +    (0x1EA0, 'M', u'ạ'),
    +    (0x1EA1, 'V'),
    +    (0x1EA2, 'M', u'ả'),
    +    (0x1EA3, 'V'),
    +    (0x1EA4, 'M', u'ấ'),
    +    (0x1EA5, 'V'),
    +    (0x1EA6, 'M', u'ầ'),
    +    (0x1EA7, 'V'),
    +    (0x1EA8, 'M', u'ẩ'),
    +    (0x1EA9, 'V'),
    +    (0x1EAA, 'M', u'ẫ'),
    +    (0x1EAB, 'V'),
    +    (0x1EAC, 'M', u'ậ'),
    +    (0x1EAD, 'V'),
    +    (0x1EAE, 'M', u'ắ'),
    +    (0x1EAF, 'V'),
    +    (0x1EB0, 'M', u'ằ'),
    +    (0x1EB1, 'V'),
    +    (0x1EB2, 'M', u'ẳ'),
    +    (0x1EB3, 'V'),
    +    (0x1EB4, 'M', u'ẵ'),
    +    (0x1EB5, 'V'),
    +    (0x1EB6, 'M', u'ặ'),
    +    (0x1EB7, 'V'),
    +    (0x1EB8, 'M', u'ẹ'),
    +    (0x1EB9, 'V'),
    +    (0x1EBA, 'M', u'ẻ'),
    +    (0x1EBB, 'V'),
    +    (0x1EBC, 'M', u'ẽ'),
    +    (0x1EBD, 'V'),
    +    (0x1EBE, 'M', u'ế'),
    +    (0x1EBF, 'V'),
    +    (0x1EC0, 'M', u'ề'),
    +    (0x1EC1, 'V'),
    +    (0x1EC2, 'M', u'ể'),
    +    (0x1EC3, 'V'),
    +    (0x1EC4, 'M', u'ễ'),
    +    (0x1EC5, 'V'),
    +    (0x1EC6, 'M', u'ệ'),
    +    (0x1EC7, 'V'),
    +    (0x1EC8, 'M', u'ỉ'),
    +    (0x1EC9, 'V'),
    +    (0x1ECA, 'M', u'ị'),
    +    (0x1ECB, 'V'),
    +    (0x1ECC, 'M', u'ọ'),
    +    (0x1ECD, 'V'),
    +    (0x1ECE, 'M', u'ỏ'),
    +    (0x1ECF, 'V'),
    +    (0x1ED0, 'M', u'ố'),
    +    (0x1ED1, 'V'),
    +    (0x1ED2, 'M', u'ồ'),
    +    (0x1ED3, 'V'),
    +    (0x1ED4, 'M', u'ổ'),
    +    (0x1ED5, 'V'),
    +    (0x1ED6, 'M', u'ỗ'),
    +    (0x1ED7, 'V'),
    +    (0x1ED8, 'M', u'ộ'),
    +    (0x1ED9, 'V'),
    +    (0x1EDA, 'M', u'ớ'),
    +    (0x1EDB, 'V'),
    +    (0x1EDC, 'M', u'ờ'),
    +    (0x1EDD, 'V'),
    +    (0x1EDE, 'M', u'ở'),
    +    (0x1EDF, 'V'),
    +    (0x1EE0, 'M', u'ỡ'),
    +    (0x1EE1, 'V'),
    +    (0x1EE2, 'M', u'ợ'),
    +    (0x1EE3, 'V'),
    +    (0x1EE4, 'M', u'ụ'),
    +    (0x1EE5, 'V'),
    +    (0x1EE6, 'M', u'ủ'),
    +    (0x1EE7, 'V'),
    +    (0x1EE8, 'M', u'ứ'),
    +    (0x1EE9, 'V'),
    +    (0x1EEA, 'M', u'ừ'),
    +    (0x1EEB, 'V'),
    +    ]
    +
    +def _seg_19():
    +    return [
    +    (0x1EEC, 'M', u'ử'),
    +    (0x1EED, 'V'),
    +    (0x1EEE, 'M', u'ữ'),
    +    (0x1EEF, 'V'),
    +    (0x1EF0, 'M', u'ự'),
    +    (0x1EF1, 'V'),
    +    (0x1EF2, 'M', u'ỳ'),
    +    (0x1EF3, 'V'),
    +    (0x1EF4, 'M', u'ỵ'),
    +    (0x1EF5, 'V'),
    +    (0x1EF6, 'M', u'ỷ'),
    +    (0x1EF7, 'V'),
    +    (0x1EF8, 'M', u'ỹ'),
    +    (0x1EF9, 'V'),
    +    (0x1EFA, 'M', u'ỻ'),
    +    (0x1EFB, 'V'),
    +    (0x1EFC, 'M', u'ỽ'),
    +    (0x1EFD, 'V'),
    +    (0x1EFE, 'M', u'ỿ'),
    +    (0x1EFF, 'V'),
    +    (0x1F08, 'M', u'ἀ'),
    +    (0x1F09, 'M', u'ἁ'),
    +    (0x1F0A, 'M', u'ἂ'),
    +    (0x1F0B, 'M', u'ἃ'),
    +    (0x1F0C, 'M', u'ἄ'),
    +    (0x1F0D, 'M', u'ἅ'),
    +    (0x1F0E, 'M', u'ἆ'),
    +    (0x1F0F, 'M', u'ἇ'),
    +    (0x1F10, 'V'),
    +    (0x1F16, 'X'),
    +    (0x1F18, 'M', u'ἐ'),
    +    (0x1F19, 'M', u'ἑ'),
    +    (0x1F1A, 'M', u'ἒ'),
    +    (0x1F1B, 'M', u'ἓ'),
    +    (0x1F1C, 'M', u'ἔ'),
    +    (0x1F1D, 'M', u'ἕ'),
    +    (0x1F1E, 'X'),
    +    (0x1F20, 'V'),
    +    (0x1F28, 'M', u'ἠ'),
    +    (0x1F29, 'M', u'ἡ'),
    +    (0x1F2A, 'M', u'ἢ'),
    +    (0x1F2B, 'M', u'ἣ'),
    +    (0x1F2C, 'M', u'ἤ'),
    +    (0x1F2D, 'M', u'ἥ'),
    +    (0x1F2E, 'M', u'ἦ'),
    +    (0x1F2F, 'M', u'ἧ'),
    +    (0x1F30, 'V'),
    +    (0x1F38, 'M', u'ἰ'),
    +    (0x1F39, 'M', u'ἱ'),
    +    (0x1F3A, 'M', u'ἲ'),
    +    (0x1F3B, 'M', u'ἳ'),
    +    (0x1F3C, 'M', u'ἴ'),
    +    (0x1F3D, 'M', u'ἵ'),
    +    (0x1F3E, 'M', u'ἶ'),
    +    (0x1F3F, 'M', u'ἷ'),
    +    (0x1F40, 'V'),
    +    (0x1F46, 'X'),
    +    (0x1F48, 'M', u'ὀ'),
    +    (0x1F49, 'M', u'ὁ'),
    +    (0x1F4A, 'M', u'ὂ'),
    +    (0x1F4B, 'M', u'ὃ'),
    +    (0x1F4C, 'M', u'ὄ'),
    +    (0x1F4D, 'M', u'ὅ'),
    +    (0x1F4E, 'X'),
    +    (0x1F50, 'V'),
    +    (0x1F58, 'X'),
    +    (0x1F59, 'M', u'ὑ'),
    +    (0x1F5A, 'X'),
    +    (0x1F5B, 'M', u'ὓ'),
    +    (0x1F5C, 'X'),
    +    (0x1F5D, 'M', u'ὕ'),
    +    (0x1F5E, 'X'),
    +    (0x1F5F, 'M', u'ὗ'),
    +    (0x1F60, 'V'),
    +    (0x1F68, 'M', u'ὠ'),
    +    (0x1F69, 'M', u'ὡ'),
    +    (0x1F6A, 'M', u'ὢ'),
    +    (0x1F6B, 'M', u'ὣ'),
    +    (0x1F6C, 'M', u'ὤ'),
    +    (0x1F6D, 'M', u'ὥ'),
    +    (0x1F6E, 'M', u'ὦ'),
    +    (0x1F6F, 'M', u'ὧ'),
    +    (0x1F70, 'V'),
    +    (0x1F71, 'M', u'ά'),
    +    (0x1F72, 'V'),
    +    (0x1F73, 'M', u'έ'),
    +    (0x1F74, 'V'),
    +    (0x1F75, 'M', u'ή'),
    +    (0x1F76, 'V'),
    +    (0x1F77, 'M', u'ί'),
    +    (0x1F78, 'V'),
    +    (0x1F79, 'M', u'ό'),
    +    (0x1F7A, 'V'),
    +    (0x1F7B, 'M', u'ύ'),
    +    (0x1F7C, 'V'),
    +    (0x1F7D, 'M', u'ώ'),
    +    (0x1F7E, 'X'),
    +    (0x1F80, 'M', u'ἀι'),
    +    (0x1F81, 'M', u'ἁι'),
    +    (0x1F82, 'M', u'ἂι'),
    +    ]
    +
    +def _seg_20():
    +    return [
    +    (0x1F83, 'M', u'ἃι'),
    +    (0x1F84, 'M', u'ἄι'),
    +    (0x1F85, 'M', u'ἅι'),
    +    (0x1F86, 'M', u'ἆι'),
    +    (0x1F87, 'M', u'ἇι'),
    +    (0x1F88, 'M', u'ἀι'),
    +    (0x1F89, 'M', u'ἁι'),
    +    (0x1F8A, 'M', u'ἂι'),
    +    (0x1F8B, 'M', u'ἃι'),
    +    (0x1F8C, 'M', u'ἄι'),
    +    (0x1F8D, 'M', u'ἅι'),
    +    (0x1F8E, 'M', u'ἆι'),
    +    (0x1F8F, 'M', u'ἇι'),
    +    (0x1F90, 'M', u'ἠι'),
    +    (0x1F91, 'M', u'ἡι'),
    +    (0x1F92, 'M', u'ἢι'),
    +    (0x1F93, 'M', u'ἣι'),
    +    (0x1F94, 'M', u'ἤι'),
    +    (0x1F95, 'M', u'ἥι'),
    +    (0x1F96, 'M', u'ἦι'),
    +    (0x1F97, 'M', u'ἧι'),
    +    (0x1F98, 'M', u'ἠι'),
    +    (0x1F99, 'M', u'ἡι'),
    +    (0x1F9A, 'M', u'ἢι'),
    +    (0x1F9B, 'M', u'ἣι'),
    +    (0x1F9C, 'M', u'ἤι'),
    +    (0x1F9D, 'M', u'ἥι'),
    +    (0x1F9E, 'M', u'ἦι'),
    +    (0x1F9F, 'M', u'ἧι'),
    +    (0x1FA0, 'M', u'ὠι'),
    +    (0x1FA1, 'M', u'ὡι'),
    +    (0x1FA2, 'M', u'ὢι'),
    +    (0x1FA3, 'M', u'ὣι'),
    +    (0x1FA4, 'M', u'ὤι'),
    +    (0x1FA5, 'M', u'ὥι'),
    +    (0x1FA6, 'M', u'ὦι'),
    +    (0x1FA7, 'M', u'ὧι'),
    +    (0x1FA8, 'M', u'ὠι'),
    +    (0x1FA9, 'M', u'ὡι'),
    +    (0x1FAA, 'M', u'ὢι'),
    +    (0x1FAB, 'M', u'ὣι'),
    +    (0x1FAC, 'M', u'ὤι'),
    +    (0x1FAD, 'M', u'ὥι'),
    +    (0x1FAE, 'M', u'ὦι'),
    +    (0x1FAF, 'M', u'ὧι'),
    +    (0x1FB0, 'V'),
    +    (0x1FB2, 'M', u'ὰι'),
    +    (0x1FB3, 'M', u'αι'),
    +    (0x1FB4, 'M', u'άι'),
    +    (0x1FB5, 'X'),
    +    (0x1FB6, 'V'),
    +    (0x1FB7, 'M', u'ᾶι'),
    +    (0x1FB8, 'M', u'ᾰ'),
    +    (0x1FB9, 'M', u'ᾱ'),
    +    (0x1FBA, 'M', u'ὰ'),
    +    (0x1FBB, 'M', u'ά'),
    +    (0x1FBC, 'M', u'αι'),
    +    (0x1FBD, '3', u' ̓'),
    +    (0x1FBE, 'M', u'ι'),
    +    (0x1FBF, '3', u' ̓'),
    +    (0x1FC0, '3', u' ͂'),
    +    (0x1FC1, '3', u' ̈͂'),
    +    (0x1FC2, 'M', u'ὴι'),
    +    (0x1FC3, 'M', u'ηι'),
    +    (0x1FC4, 'M', u'ήι'),
    +    (0x1FC5, 'X'),
    +    (0x1FC6, 'V'),
    +    (0x1FC7, 'M', u'ῆι'),
    +    (0x1FC8, 'M', u'ὲ'),
    +    (0x1FC9, 'M', u'έ'),
    +    (0x1FCA, 'M', u'ὴ'),
    +    (0x1FCB, 'M', u'ή'),
    +    (0x1FCC, 'M', u'ηι'),
    +    (0x1FCD, '3', u' ̓̀'),
    +    (0x1FCE, '3', u' ̓́'),
    +    (0x1FCF, '3', u' ̓͂'),
    +    (0x1FD0, 'V'),
    +    (0x1FD3, 'M', u'ΐ'),
    +    (0x1FD4, 'X'),
    +    (0x1FD6, 'V'),
    +    (0x1FD8, 'M', u'ῐ'),
    +    (0x1FD9, 'M', u'ῑ'),
    +    (0x1FDA, 'M', u'ὶ'),
    +    (0x1FDB, 'M', u'ί'),
    +    (0x1FDC, 'X'),
    +    (0x1FDD, '3', u' ̔̀'),
    +    (0x1FDE, '3', u' ̔́'),
    +    (0x1FDF, '3', u' ̔͂'),
    +    (0x1FE0, 'V'),
    +    (0x1FE3, 'M', u'ΰ'),
    +    (0x1FE4, 'V'),
    +    (0x1FE8, 'M', u'ῠ'),
    +    (0x1FE9, 'M', u'ῡ'),
    +    (0x1FEA, 'M', u'ὺ'),
    +    (0x1FEB, 'M', u'ύ'),
    +    (0x1FEC, 'M', u'ῥ'),
    +    (0x1FED, '3', u' ̈̀'),
    +    (0x1FEE, '3', u' ̈́'),
    +    (0x1FEF, '3', u'`'),
    +    (0x1FF0, 'X'),
    +    ]
    +
    +def _seg_21():
    +    return [
    +    (0x1FF2, 'M', u'ὼι'),
    +    (0x1FF3, 'M', u'ωι'),
    +    (0x1FF4, 'M', u'ώι'),
    +    (0x1FF5, 'X'),
    +    (0x1FF6, 'V'),
    +    (0x1FF7, 'M', u'ῶι'),
    +    (0x1FF8, 'M', u'ὸ'),
    +    (0x1FF9, 'M', u'ό'),
    +    (0x1FFA, 'M', u'ὼ'),
    +    (0x1FFB, 'M', u'ώ'),
    +    (0x1FFC, 'M', u'ωι'),
    +    (0x1FFD, '3', u' ́'),
    +    (0x1FFE, '3', u' ̔'),
    +    (0x1FFF, 'X'),
    +    (0x2000, '3', u' '),
    +    (0x200B, 'I'),
    +    (0x200C, 'D', u''),
    +    (0x200E, 'X'),
    +    (0x2010, 'V'),
    +    (0x2011, 'M', u'‐'),
    +    (0x2012, 'V'),
    +    (0x2017, '3', u' ̳'),
    +    (0x2018, 'V'),
    +    (0x2024, 'X'),
    +    (0x2027, 'V'),
    +    (0x2028, 'X'),
    +    (0x202F, '3', u' '),
    +    (0x2030, 'V'),
    +    (0x2033, 'M', u'′′'),
    +    (0x2034, 'M', u'′′′'),
    +    (0x2035, 'V'),
    +    (0x2036, 'M', u'‵‵'),
    +    (0x2037, 'M', u'‵‵‵'),
    +    (0x2038, 'V'),
    +    (0x203C, '3', u'!!'),
    +    (0x203D, 'V'),
    +    (0x203E, '3', u' ̅'),
    +    (0x203F, 'V'),
    +    (0x2047, '3', u'??'),
    +    (0x2048, '3', u'?!'),
    +    (0x2049, '3', u'!?'),
    +    (0x204A, 'V'),
    +    (0x2057, 'M', u'′′′′'),
    +    (0x2058, 'V'),
    +    (0x205F, '3', u' '),
    +    (0x2060, 'I'),
    +    (0x2061, 'X'),
    +    (0x2064, 'I'),
    +    (0x2065, 'X'),
    +    (0x2070, 'M', u'0'),
    +    (0x2071, 'M', u'i'),
    +    (0x2072, 'X'),
    +    (0x2074, 'M', u'4'),
    +    (0x2075, 'M', u'5'),
    +    (0x2076, 'M', u'6'),
    +    (0x2077, 'M', u'7'),
    +    (0x2078, 'M', u'8'),
    +    (0x2079, 'M', u'9'),
    +    (0x207A, '3', u'+'),
    +    (0x207B, 'M', u'−'),
    +    (0x207C, '3', u'='),
    +    (0x207D, '3', u'('),
    +    (0x207E, '3', u')'),
    +    (0x207F, 'M', u'n'),
    +    (0x2080, 'M', u'0'),
    +    (0x2081, 'M', u'1'),
    +    (0x2082, 'M', u'2'),
    +    (0x2083, 'M', u'3'),
    +    (0x2084, 'M', u'4'),
    +    (0x2085, 'M', u'5'),
    +    (0x2086, 'M', u'6'),
    +    (0x2087, 'M', u'7'),
    +    (0x2088, 'M', u'8'),
    +    (0x2089, 'M', u'9'),
    +    (0x208A, '3', u'+'),
    +    (0x208B, 'M', u'−'),
    +    (0x208C, '3', u'='),
    +    (0x208D, '3', u'('),
    +    (0x208E, '3', u')'),
    +    (0x208F, 'X'),
    +    (0x2090, 'M', u'a'),
    +    (0x2091, 'M', u'e'),
    +    (0x2092, 'M', u'o'),
    +    (0x2093, 'M', u'x'),
    +    (0x2094, 'M', u'ə'),
    +    (0x2095, 'M', u'h'),
    +    (0x2096, 'M', u'k'),
    +    (0x2097, 'M', u'l'),
    +    (0x2098, 'M', u'm'),
    +    (0x2099, 'M', u'n'),
    +    (0x209A, 'M', u'p'),
    +    (0x209B, 'M', u's'),
    +    (0x209C, 'M', u't'),
    +    (0x209D, 'X'),
    +    (0x20A0, 'V'),
    +    (0x20A8, 'M', u'rs'),
    +    (0x20A9, 'V'),
    +    (0x20C0, 'X'),
    +    (0x20D0, 'V'),
    +    (0x20F1, 'X'),
    +    ]
    +
    +def _seg_22():
    +    return [
    +    (0x2100, '3', u'a/c'),
    +    (0x2101, '3', u'a/s'),
    +    (0x2102, 'M', u'c'),
    +    (0x2103, 'M', u'°c'),
    +    (0x2104, 'V'),
    +    (0x2105, '3', u'c/o'),
    +    (0x2106, '3', u'c/u'),
    +    (0x2107, 'M', u'ɛ'),
    +    (0x2108, 'V'),
    +    (0x2109, 'M', u'°f'),
    +    (0x210A, 'M', u'g'),
    +    (0x210B, 'M', u'h'),
    +    (0x210F, 'M', u'ħ'),
    +    (0x2110, 'M', u'i'),
    +    (0x2112, 'M', u'l'),
    +    (0x2114, 'V'),
    +    (0x2115, 'M', u'n'),
    +    (0x2116, 'M', u'no'),
    +    (0x2117, 'V'),
    +    (0x2119, 'M', u'p'),
    +    (0x211A, 'M', u'q'),
    +    (0x211B, 'M', u'r'),
    +    (0x211E, 'V'),
    +    (0x2120, 'M', u'sm'),
    +    (0x2121, 'M', u'tel'),
    +    (0x2122, 'M', u'tm'),
    +    (0x2123, 'V'),
    +    (0x2124, 'M', u'z'),
    +    (0x2125, 'V'),
    +    (0x2126, 'M', u'ω'),
    +    (0x2127, 'V'),
    +    (0x2128, 'M', u'z'),
    +    (0x2129, 'V'),
    +    (0x212A, 'M', u'k'),
    +    (0x212B, 'M', u'å'),
    +    (0x212C, 'M', u'b'),
    +    (0x212D, 'M', u'c'),
    +    (0x212E, 'V'),
    +    (0x212F, 'M', u'e'),
    +    (0x2131, 'M', u'f'),
    +    (0x2132, 'X'),
    +    (0x2133, 'M', u'm'),
    +    (0x2134, 'M', u'o'),
    +    (0x2135, 'M', u'א'),
    +    (0x2136, 'M', u'ב'),
    +    (0x2137, 'M', u'ג'),
    +    (0x2138, 'M', u'ד'),
    +    (0x2139, 'M', u'i'),
    +    (0x213A, 'V'),
    +    (0x213B, 'M', u'fax'),
    +    (0x213C, 'M', u'π'),
    +    (0x213D, 'M', u'γ'),
    +    (0x213F, 'M', u'π'),
    +    (0x2140, 'M', u'∑'),
    +    (0x2141, 'V'),
    +    (0x2145, 'M', u'd'),
    +    (0x2147, 'M', u'e'),
    +    (0x2148, 'M', u'i'),
    +    (0x2149, 'M', u'j'),
    +    (0x214A, 'V'),
    +    (0x2150, 'M', u'1⁄7'),
    +    (0x2151, 'M', u'1⁄9'),
    +    (0x2152, 'M', u'1⁄10'),
    +    (0x2153, 'M', u'1⁄3'),
    +    (0x2154, 'M', u'2⁄3'),
    +    (0x2155, 'M', u'1⁄5'),
    +    (0x2156, 'M', u'2⁄5'),
    +    (0x2157, 'M', u'3⁄5'),
    +    (0x2158, 'M', u'4⁄5'),
    +    (0x2159, 'M', u'1⁄6'),
    +    (0x215A, 'M', u'5⁄6'),
    +    (0x215B, 'M', u'1⁄8'),
    +    (0x215C, 'M', u'3⁄8'),
    +    (0x215D, 'M', u'5⁄8'),
    +    (0x215E, 'M', u'7⁄8'),
    +    (0x215F, 'M', u'1⁄'),
    +    (0x2160, 'M', u'i'),
    +    (0x2161, 'M', u'ii'),
    +    (0x2162, 'M', u'iii'),
    +    (0x2163, 'M', u'iv'),
    +    (0x2164, 'M', u'v'),
    +    (0x2165, 'M', u'vi'),
    +    (0x2166, 'M', u'vii'),
    +    (0x2167, 'M', u'viii'),
    +    (0x2168, 'M', u'ix'),
    +    (0x2169, 'M', u'x'),
    +    (0x216A, 'M', u'xi'),
    +    (0x216B, 'M', u'xii'),
    +    (0x216C, 'M', u'l'),
    +    (0x216D, 'M', u'c'),
    +    (0x216E, 'M', u'd'),
    +    (0x216F, 'M', u'm'),
    +    (0x2170, 'M', u'i'),
    +    (0x2171, 'M', u'ii'),
    +    (0x2172, 'M', u'iii'),
    +    (0x2173, 'M', u'iv'),
    +    (0x2174, 'M', u'v'),
    +    (0x2175, 'M', u'vi'),
    +    (0x2176, 'M', u'vii'),
    +    (0x2177, 'M', u'viii'),
    +    ]
    +
    +def _seg_23():
    +    return [
    +    (0x2178, 'M', u'ix'),
    +    (0x2179, 'M', u'x'),
    +    (0x217A, 'M', u'xi'),
    +    (0x217B, 'M', u'xii'),
    +    (0x217C, 'M', u'l'),
    +    (0x217D, 'M', u'c'),
    +    (0x217E, 'M', u'd'),
    +    (0x217F, 'M', u'm'),
    +    (0x2180, 'V'),
    +    (0x2183, 'X'),
    +    (0x2184, 'V'),
    +    (0x2189, 'M', u'0⁄3'),
    +    (0x218A, 'V'),
    +    (0x218C, 'X'),
    +    (0x2190, 'V'),
    +    (0x222C, 'M', u'∫∫'),
    +    (0x222D, 'M', u'∫∫∫'),
    +    (0x222E, 'V'),
    +    (0x222F, 'M', u'∮∮'),
    +    (0x2230, 'M', u'∮∮∮'),
    +    (0x2231, 'V'),
    +    (0x2260, '3'),
    +    (0x2261, 'V'),
    +    (0x226E, '3'),
    +    (0x2270, 'V'),
    +    (0x2329, 'M', u'〈'),
    +    (0x232A, 'M', u'〉'),
    +    (0x232B, 'V'),
    +    (0x2427, 'X'),
    +    (0x2440, 'V'),
    +    (0x244B, 'X'),
    +    (0x2460, 'M', u'1'),
    +    (0x2461, 'M', u'2'),
    +    (0x2462, 'M', u'3'),
    +    (0x2463, 'M', u'4'),
    +    (0x2464, 'M', u'5'),
    +    (0x2465, 'M', u'6'),
    +    (0x2466, 'M', u'7'),
    +    (0x2467, 'M', u'8'),
    +    (0x2468, 'M', u'9'),
    +    (0x2469, 'M', u'10'),
    +    (0x246A, 'M', u'11'),
    +    (0x246B, 'M', u'12'),
    +    (0x246C, 'M', u'13'),
    +    (0x246D, 'M', u'14'),
    +    (0x246E, 'M', u'15'),
    +    (0x246F, 'M', u'16'),
    +    (0x2470, 'M', u'17'),
    +    (0x2471, 'M', u'18'),
    +    (0x2472, 'M', u'19'),
    +    (0x2473, 'M', u'20'),
    +    (0x2474, '3', u'(1)'),
    +    (0x2475, '3', u'(2)'),
    +    (0x2476, '3', u'(3)'),
    +    (0x2477, '3', u'(4)'),
    +    (0x2478, '3', u'(5)'),
    +    (0x2479, '3', u'(6)'),
    +    (0x247A, '3', u'(7)'),
    +    (0x247B, '3', u'(8)'),
    +    (0x247C, '3', u'(9)'),
    +    (0x247D, '3', u'(10)'),
    +    (0x247E, '3', u'(11)'),
    +    (0x247F, '3', u'(12)'),
    +    (0x2480, '3', u'(13)'),
    +    (0x2481, '3', u'(14)'),
    +    (0x2482, '3', u'(15)'),
    +    (0x2483, '3', u'(16)'),
    +    (0x2484, '3', u'(17)'),
    +    (0x2485, '3', u'(18)'),
    +    (0x2486, '3', u'(19)'),
    +    (0x2487, '3', u'(20)'),
    +    (0x2488, 'X'),
    +    (0x249C, '3', u'(a)'),
    +    (0x249D, '3', u'(b)'),
    +    (0x249E, '3', u'(c)'),
    +    (0x249F, '3', u'(d)'),
    +    (0x24A0, '3', u'(e)'),
    +    (0x24A1, '3', u'(f)'),
    +    (0x24A2, '3', u'(g)'),
    +    (0x24A3, '3', u'(h)'),
    +    (0x24A4, '3', u'(i)'),
    +    (0x24A5, '3', u'(j)'),
    +    (0x24A6, '3', u'(k)'),
    +    (0x24A7, '3', u'(l)'),
    +    (0x24A8, '3', u'(m)'),
    +    (0x24A9, '3', u'(n)'),
    +    (0x24AA, '3', u'(o)'),
    +    (0x24AB, '3', u'(p)'),
    +    (0x24AC, '3', u'(q)'),
    +    (0x24AD, '3', u'(r)'),
    +    (0x24AE, '3', u'(s)'),
    +    (0x24AF, '3', u'(t)'),
    +    (0x24B0, '3', u'(u)'),
    +    (0x24B1, '3', u'(v)'),
    +    (0x24B2, '3', u'(w)'),
    +    (0x24B3, '3', u'(x)'),
    +    (0x24B4, '3', u'(y)'),
    +    (0x24B5, '3', u'(z)'),
    +    (0x24B6, 'M', u'a'),
    +    (0x24B7, 'M', u'b'),
    +    ]
    +
    +def _seg_24():
    +    return [
    +    (0x24B8, 'M', u'c'),
    +    (0x24B9, 'M', u'd'),
    +    (0x24BA, 'M', u'e'),
    +    (0x24BB, 'M', u'f'),
    +    (0x24BC, 'M', u'g'),
    +    (0x24BD, 'M', u'h'),
    +    (0x24BE, 'M', u'i'),
    +    (0x24BF, 'M', u'j'),
    +    (0x24C0, 'M', u'k'),
    +    (0x24C1, 'M', u'l'),
    +    (0x24C2, 'M', u'm'),
    +    (0x24C3, 'M', u'n'),
    +    (0x24C4, 'M', u'o'),
    +    (0x24C5, 'M', u'p'),
    +    (0x24C6, 'M', u'q'),
    +    (0x24C7, 'M', u'r'),
    +    (0x24C8, 'M', u's'),
    +    (0x24C9, 'M', u't'),
    +    (0x24CA, 'M', u'u'),
    +    (0x24CB, 'M', u'v'),
    +    (0x24CC, 'M', u'w'),
    +    (0x24CD, 'M', u'x'),
    +    (0x24CE, 'M', u'y'),
    +    (0x24CF, 'M', u'z'),
    +    (0x24D0, 'M', u'a'),
    +    (0x24D1, 'M', u'b'),
    +    (0x24D2, 'M', u'c'),
    +    (0x24D3, 'M', u'd'),
    +    (0x24D4, 'M', u'e'),
    +    (0x24D5, 'M', u'f'),
    +    (0x24D6, 'M', u'g'),
    +    (0x24D7, 'M', u'h'),
    +    (0x24D8, 'M', u'i'),
    +    (0x24D9, 'M', u'j'),
    +    (0x24DA, 'M', u'k'),
    +    (0x24DB, 'M', u'l'),
    +    (0x24DC, 'M', u'm'),
    +    (0x24DD, 'M', u'n'),
    +    (0x24DE, 'M', u'o'),
    +    (0x24DF, 'M', u'p'),
    +    (0x24E0, 'M', u'q'),
    +    (0x24E1, 'M', u'r'),
    +    (0x24E2, 'M', u's'),
    +    (0x24E3, 'M', u't'),
    +    (0x24E4, 'M', u'u'),
    +    (0x24E5, 'M', u'v'),
    +    (0x24E6, 'M', u'w'),
    +    (0x24E7, 'M', u'x'),
    +    (0x24E8, 'M', u'y'),
    +    (0x24E9, 'M', u'z'),
    +    (0x24EA, 'M', u'0'),
    +    (0x24EB, 'V'),
    +    (0x2A0C, 'M', u'∫∫∫∫'),
    +    (0x2A0D, 'V'),
    +    (0x2A74, '3', u'::='),
    +    (0x2A75, '3', u'=='),
    +    (0x2A76, '3', u'==='),
    +    (0x2A77, 'V'),
    +    (0x2ADC, 'M', u'⫝̸'),
    +    (0x2ADD, 'V'),
    +    (0x2B74, 'X'),
    +    (0x2B76, 'V'),
    +    (0x2B96, 'X'),
    +    (0x2B98, 'V'),
    +    (0x2C00, 'M', u'ⰰ'),
    +    (0x2C01, 'M', u'ⰱ'),
    +    (0x2C02, 'M', u'ⰲ'),
    +    (0x2C03, 'M', u'ⰳ'),
    +    (0x2C04, 'M', u'ⰴ'),
    +    (0x2C05, 'M', u'ⰵ'),
    +    (0x2C06, 'M', u'ⰶ'),
    +    (0x2C07, 'M', u'ⰷ'),
    +    (0x2C08, 'M', u'ⰸ'),
    +    (0x2C09, 'M', u'ⰹ'),
    +    (0x2C0A, 'M', u'ⰺ'),
    +    (0x2C0B, 'M', u'ⰻ'),
    +    (0x2C0C, 'M', u'ⰼ'),
    +    (0x2C0D, 'M', u'ⰽ'),
    +    (0x2C0E, 'M', u'ⰾ'),
    +    (0x2C0F, 'M', u'ⰿ'),
    +    (0x2C10, 'M', u'ⱀ'),
    +    (0x2C11, 'M', u'ⱁ'),
    +    (0x2C12, 'M', u'ⱂ'),
    +    (0x2C13, 'M', u'ⱃ'),
    +    (0x2C14, 'M', u'ⱄ'),
    +    (0x2C15, 'M', u'ⱅ'),
    +    (0x2C16, 'M', u'ⱆ'),
    +    (0x2C17, 'M', u'ⱇ'),
    +    (0x2C18, 'M', u'ⱈ'),
    +    (0x2C19, 'M', u'ⱉ'),
    +    (0x2C1A, 'M', u'ⱊ'),
    +    (0x2C1B, 'M', u'ⱋ'),
    +    (0x2C1C, 'M', u'ⱌ'),
    +    (0x2C1D, 'M', u'ⱍ'),
    +    (0x2C1E, 'M', u'ⱎ'),
    +    (0x2C1F, 'M', u'ⱏ'),
    +    (0x2C20, 'M', u'ⱐ'),
    +    (0x2C21, 'M', u'ⱑ'),
    +    (0x2C22, 'M', u'ⱒ'),
    +    (0x2C23, 'M', u'ⱓ'),
    +    ]
    +
    +def _seg_25():
    +    return [
    +    (0x2C24, 'M', u'ⱔ'),
    +    (0x2C25, 'M', u'ⱕ'),
    +    (0x2C26, 'M', u'ⱖ'),
    +    (0x2C27, 'M', u'ⱗ'),
    +    (0x2C28, 'M', u'ⱘ'),
    +    (0x2C29, 'M', u'ⱙ'),
    +    (0x2C2A, 'M', u'ⱚ'),
    +    (0x2C2B, 'M', u'ⱛ'),
    +    (0x2C2C, 'M', u'ⱜ'),
    +    (0x2C2D, 'M', u'ⱝ'),
    +    (0x2C2E, 'M', u'ⱞ'),
    +    (0x2C2F, 'X'),
    +    (0x2C30, 'V'),
    +    (0x2C5F, 'X'),
    +    (0x2C60, 'M', u'ⱡ'),
    +    (0x2C61, 'V'),
    +    (0x2C62, 'M', u'ɫ'),
    +    (0x2C63, 'M', u'ᵽ'),
    +    (0x2C64, 'M', u'ɽ'),
    +    (0x2C65, 'V'),
    +    (0x2C67, 'M', u'ⱨ'),
    +    (0x2C68, 'V'),
    +    (0x2C69, 'M', u'ⱪ'),
    +    (0x2C6A, 'V'),
    +    (0x2C6B, 'M', u'ⱬ'),
    +    (0x2C6C, 'V'),
    +    (0x2C6D, 'M', u'ɑ'),
    +    (0x2C6E, 'M', u'ɱ'),
    +    (0x2C6F, 'M', u'ɐ'),
    +    (0x2C70, 'M', u'ɒ'),
    +    (0x2C71, 'V'),
    +    (0x2C72, 'M', u'ⱳ'),
    +    (0x2C73, 'V'),
    +    (0x2C75, 'M', u'ⱶ'),
    +    (0x2C76, 'V'),
    +    (0x2C7C, 'M', u'j'),
    +    (0x2C7D, 'M', u'v'),
    +    (0x2C7E, 'M', u'ȿ'),
    +    (0x2C7F, 'M', u'ɀ'),
    +    (0x2C80, 'M', u'ⲁ'),
    +    (0x2C81, 'V'),
    +    (0x2C82, 'M', u'ⲃ'),
    +    (0x2C83, 'V'),
    +    (0x2C84, 'M', u'ⲅ'),
    +    (0x2C85, 'V'),
    +    (0x2C86, 'M', u'ⲇ'),
    +    (0x2C87, 'V'),
    +    (0x2C88, 'M', u'ⲉ'),
    +    (0x2C89, 'V'),
    +    (0x2C8A, 'M', u'ⲋ'),
    +    (0x2C8B, 'V'),
    +    (0x2C8C, 'M', u'ⲍ'),
    +    (0x2C8D, 'V'),
    +    (0x2C8E, 'M', u'ⲏ'),
    +    (0x2C8F, 'V'),
    +    (0x2C90, 'M', u'ⲑ'),
    +    (0x2C91, 'V'),
    +    (0x2C92, 'M', u'ⲓ'),
    +    (0x2C93, 'V'),
    +    (0x2C94, 'M', u'ⲕ'),
    +    (0x2C95, 'V'),
    +    (0x2C96, 'M', u'ⲗ'),
    +    (0x2C97, 'V'),
    +    (0x2C98, 'M', u'ⲙ'),
    +    (0x2C99, 'V'),
    +    (0x2C9A, 'M', u'ⲛ'),
    +    (0x2C9B, 'V'),
    +    (0x2C9C, 'M', u'ⲝ'),
    +    (0x2C9D, 'V'),
    +    (0x2C9E, 'M', u'ⲟ'),
    +    (0x2C9F, 'V'),
    +    (0x2CA0, 'M', u'ⲡ'),
    +    (0x2CA1, 'V'),
    +    (0x2CA2, 'M', u'ⲣ'),
    +    (0x2CA3, 'V'),
    +    (0x2CA4, 'M', u'ⲥ'),
    +    (0x2CA5, 'V'),
    +    (0x2CA6, 'M', u'ⲧ'),
    +    (0x2CA7, 'V'),
    +    (0x2CA8, 'M', u'ⲩ'),
    +    (0x2CA9, 'V'),
    +    (0x2CAA, 'M', u'ⲫ'),
    +    (0x2CAB, 'V'),
    +    (0x2CAC, 'M', u'ⲭ'),
    +    (0x2CAD, 'V'),
    +    (0x2CAE, 'M', u'ⲯ'),
    +    (0x2CAF, 'V'),
    +    (0x2CB0, 'M', u'ⲱ'),
    +    (0x2CB1, 'V'),
    +    (0x2CB2, 'M', u'ⲳ'),
    +    (0x2CB3, 'V'),
    +    (0x2CB4, 'M', u'ⲵ'),
    +    (0x2CB5, 'V'),
    +    (0x2CB6, 'M', u'ⲷ'),
    +    (0x2CB7, 'V'),
    +    (0x2CB8, 'M', u'ⲹ'),
    +    (0x2CB9, 'V'),
    +    (0x2CBA, 'M', u'ⲻ'),
    +    (0x2CBB, 'V'),
    +    (0x2CBC, 'M', u'ⲽ'),
    +    ]
    +
    +def _seg_26():
    +    return [
    +    (0x2CBD, 'V'),
    +    (0x2CBE, 'M', u'ⲿ'),
    +    (0x2CBF, 'V'),
    +    (0x2CC0, 'M', u'ⳁ'),
    +    (0x2CC1, 'V'),
    +    (0x2CC2, 'M', u'ⳃ'),
    +    (0x2CC3, 'V'),
    +    (0x2CC4, 'M', u'ⳅ'),
    +    (0x2CC5, 'V'),
    +    (0x2CC6, 'M', u'ⳇ'),
    +    (0x2CC7, 'V'),
    +    (0x2CC8, 'M', u'ⳉ'),
    +    (0x2CC9, 'V'),
    +    (0x2CCA, 'M', u'ⳋ'),
    +    (0x2CCB, 'V'),
    +    (0x2CCC, 'M', u'ⳍ'),
    +    (0x2CCD, 'V'),
    +    (0x2CCE, 'M', u'ⳏ'),
    +    (0x2CCF, 'V'),
    +    (0x2CD0, 'M', u'ⳑ'),
    +    (0x2CD1, 'V'),
    +    (0x2CD2, 'M', u'ⳓ'),
    +    (0x2CD3, 'V'),
    +    (0x2CD4, 'M', u'ⳕ'),
    +    (0x2CD5, 'V'),
    +    (0x2CD6, 'M', u'ⳗ'),
    +    (0x2CD7, 'V'),
    +    (0x2CD8, 'M', u'ⳙ'),
    +    (0x2CD9, 'V'),
    +    (0x2CDA, 'M', u'ⳛ'),
    +    (0x2CDB, 'V'),
    +    (0x2CDC, 'M', u'ⳝ'),
    +    (0x2CDD, 'V'),
    +    (0x2CDE, 'M', u'ⳟ'),
    +    (0x2CDF, 'V'),
    +    (0x2CE0, 'M', u'ⳡ'),
    +    (0x2CE1, 'V'),
    +    (0x2CE2, 'M', u'ⳣ'),
    +    (0x2CE3, 'V'),
    +    (0x2CEB, 'M', u'ⳬ'),
    +    (0x2CEC, 'V'),
    +    (0x2CED, 'M', u'ⳮ'),
    +    (0x2CEE, 'V'),
    +    (0x2CF2, 'M', u'ⳳ'),
    +    (0x2CF3, 'V'),
    +    (0x2CF4, 'X'),
    +    (0x2CF9, 'V'),
    +    (0x2D26, 'X'),
    +    (0x2D27, 'V'),
    +    (0x2D28, 'X'),
    +    (0x2D2D, 'V'),
    +    (0x2D2E, 'X'),
    +    (0x2D30, 'V'),
    +    (0x2D68, 'X'),
    +    (0x2D6F, 'M', u'ⵡ'),
    +    (0x2D70, 'V'),
    +    (0x2D71, 'X'),
    +    (0x2D7F, 'V'),
    +    (0x2D97, 'X'),
    +    (0x2DA0, 'V'),
    +    (0x2DA7, 'X'),
    +    (0x2DA8, 'V'),
    +    (0x2DAF, 'X'),
    +    (0x2DB0, 'V'),
    +    (0x2DB7, 'X'),
    +    (0x2DB8, 'V'),
    +    (0x2DBF, 'X'),
    +    (0x2DC0, 'V'),
    +    (0x2DC7, 'X'),
    +    (0x2DC8, 'V'),
    +    (0x2DCF, 'X'),
    +    (0x2DD0, 'V'),
    +    (0x2DD7, 'X'),
    +    (0x2DD8, 'V'),
    +    (0x2DDF, 'X'),
    +    (0x2DE0, 'V'),
    +    (0x2E50, 'X'),
    +    (0x2E80, 'V'),
    +    (0x2E9A, 'X'),
    +    (0x2E9B, 'V'),
    +    (0x2E9F, 'M', u'母'),
    +    (0x2EA0, 'V'),
    +    (0x2EF3, 'M', u'龟'),
    +    (0x2EF4, 'X'),
    +    (0x2F00, 'M', u'一'),
    +    (0x2F01, 'M', u'丨'),
    +    (0x2F02, 'M', u'丶'),
    +    (0x2F03, 'M', u'丿'),
    +    (0x2F04, 'M', u'乙'),
    +    (0x2F05, 'M', u'亅'),
    +    (0x2F06, 'M', u'二'),
    +    (0x2F07, 'M', u'亠'),
    +    (0x2F08, 'M', u'人'),
    +    (0x2F09, 'M', u'儿'),
    +    (0x2F0A, 'M', u'入'),
    +    (0x2F0B, 'M', u'八'),
    +    (0x2F0C, 'M', u'冂'),
    +    (0x2F0D, 'M', u'冖'),
    +    (0x2F0E, 'M', u'冫'),
    +    (0x2F0F, 'M', u'几'),
    +    ]
    +
    +def _seg_27():
    +    return [
    +    (0x2F10, 'M', u'凵'),
    +    (0x2F11, 'M', u'刀'),
    +    (0x2F12, 'M', u'力'),
    +    (0x2F13, 'M', u'勹'),
    +    (0x2F14, 'M', u'匕'),
    +    (0x2F15, 'M', u'匚'),
    +    (0x2F16, 'M', u'匸'),
    +    (0x2F17, 'M', u'十'),
    +    (0x2F18, 'M', u'卜'),
    +    (0x2F19, 'M', u'卩'),
    +    (0x2F1A, 'M', u'厂'),
    +    (0x2F1B, 'M', u'厶'),
    +    (0x2F1C, 'M', u'又'),
    +    (0x2F1D, 'M', u'口'),
    +    (0x2F1E, 'M', u'囗'),
    +    (0x2F1F, 'M', u'土'),
    +    (0x2F20, 'M', u'士'),
    +    (0x2F21, 'M', u'夂'),
    +    (0x2F22, 'M', u'夊'),
    +    (0x2F23, 'M', u'夕'),
    +    (0x2F24, 'M', u'大'),
    +    (0x2F25, 'M', u'女'),
    +    (0x2F26, 'M', u'子'),
    +    (0x2F27, 'M', u'宀'),
    +    (0x2F28, 'M', u'寸'),
    +    (0x2F29, 'M', u'小'),
    +    (0x2F2A, 'M', u'尢'),
    +    (0x2F2B, 'M', u'尸'),
    +    (0x2F2C, 'M', u'屮'),
    +    (0x2F2D, 'M', u'山'),
    +    (0x2F2E, 'M', u'巛'),
    +    (0x2F2F, 'M', u'工'),
    +    (0x2F30, 'M', u'己'),
    +    (0x2F31, 'M', u'巾'),
    +    (0x2F32, 'M', u'干'),
    +    (0x2F33, 'M', u'幺'),
    +    (0x2F34, 'M', u'广'),
    +    (0x2F35, 'M', u'廴'),
    +    (0x2F36, 'M', u'廾'),
    +    (0x2F37, 'M', u'弋'),
    +    (0x2F38, 'M', u'弓'),
    +    (0x2F39, 'M', u'彐'),
    +    (0x2F3A, 'M', u'彡'),
    +    (0x2F3B, 'M', u'彳'),
    +    (0x2F3C, 'M', u'心'),
    +    (0x2F3D, 'M', u'戈'),
    +    (0x2F3E, 'M', u'戶'),
    +    (0x2F3F, 'M', u'手'),
    +    (0x2F40, 'M', u'支'),
    +    (0x2F41, 'M', u'攴'),
    +    (0x2F42, 'M', u'文'),
    +    (0x2F43, 'M', u'斗'),
    +    (0x2F44, 'M', u'斤'),
    +    (0x2F45, 'M', u'方'),
    +    (0x2F46, 'M', u'无'),
    +    (0x2F47, 'M', u'日'),
    +    (0x2F48, 'M', u'曰'),
    +    (0x2F49, 'M', u'月'),
    +    (0x2F4A, 'M', u'木'),
    +    (0x2F4B, 'M', u'欠'),
    +    (0x2F4C, 'M', u'止'),
    +    (0x2F4D, 'M', u'歹'),
    +    (0x2F4E, 'M', u'殳'),
    +    (0x2F4F, 'M', u'毋'),
    +    (0x2F50, 'M', u'比'),
    +    (0x2F51, 'M', u'毛'),
    +    (0x2F52, 'M', u'氏'),
    +    (0x2F53, 'M', u'气'),
    +    (0x2F54, 'M', u'水'),
    +    (0x2F55, 'M', u'火'),
    +    (0x2F56, 'M', u'爪'),
    +    (0x2F57, 'M', u'父'),
    +    (0x2F58, 'M', u'爻'),
    +    (0x2F59, 'M', u'爿'),
    +    (0x2F5A, 'M', u'片'),
    +    (0x2F5B, 'M', u'牙'),
    +    (0x2F5C, 'M', u'牛'),
    +    (0x2F5D, 'M', u'犬'),
    +    (0x2F5E, 'M', u'玄'),
    +    (0x2F5F, 'M', u'玉'),
    +    (0x2F60, 'M', u'瓜'),
    +    (0x2F61, 'M', u'瓦'),
    +    (0x2F62, 'M', u'甘'),
    +    (0x2F63, 'M', u'生'),
    +    (0x2F64, 'M', u'用'),
    +    (0x2F65, 'M', u'田'),
    +    (0x2F66, 'M', u'疋'),
    +    (0x2F67, 'M', u'疒'),
    +    (0x2F68, 'M', u'癶'),
    +    (0x2F69, 'M', u'白'),
    +    (0x2F6A, 'M', u'皮'),
    +    (0x2F6B, 'M', u'皿'),
    +    (0x2F6C, 'M', u'目'),
    +    (0x2F6D, 'M', u'矛'),
    +    (0x2F6E, 'M', u'矢'),
    +    (0x2F6F, 'M', u'石'),
    +    (0x2F70, 'M', u'示'),
    +    (0x2F71, 'M', u'禸'),
    +    (0x2F72, 'M', u'禾'),
    +    (0x2F73, 'M', u'穴'),
    +    ]
    +
    +def _seg_28():
    +    return [
    +    (0x2F74, 'M', u'立'),
    +    (0x2F75, 'M', u'竹'),
    +    (0x2F76, 'M', u'米'),
    +    (0x2F77, 'M', u'糸'),
    +    (0x2F78, 'M', u'缶'),
    +    (0x2F79, 'M', u'网'),
    +    (0x2F7A, 'M', u'羊'),
    +    (0x2F7B, 'M', u'羽'),
    +    (0x2F7C, 'M', u'老'),
    +    (0x2F7D, 'M', u'而'),
    +    (0x2F7E, 'M', u'耒'),
    +    (0x2F7F, 'M', u'耳'),
    +    (0x2F80, 'M', u'聿'),
    +    (0x2F81, 'M', u'肉'),
    +    (0x2F82, 'M', u'臣'),
    +    (0x2F83, 'M', u'自'),
    +    (0x2F84, 'M', u'至'),
    +    (0x2F85, 'M', u'臼'),
    +    (0x2F86, 'M', u'舌'),
    +    (0x2F87, 'M', u'舛'),
    +    (0x2F88, 'M', u'舟'),
    +    (0x2F89, 'M', u'艮'),
    +    (0x2F8A, 'M', u'色'),
    +    (0x2F8B, 'M', u'艸'),
    +    (0x2F8C, 'M', u'虍'),
    +    (0x2F8D, 'M', u'虫'),
    +    (0x2F8E, 'M', u'血'),
    +    (0x2F8F, 'M', u'行'),
    +    (0x2F90, 'M', u'衣'),
    +    (0x2F91, 'M', u'襾'),
    +    (0x2F92, 'M', u'見'),
    +    (0x2F93, 'M', u'角'),
    +    (0x2F94, 'M', u'言'),
    +    (0x2F95, 'M', u'谷'),
    +    (0x2F96, 'M', u'豆'),
    +    (0x2F97, 'M', u'豕'),
    +    (0x2F98, 'M', u'豸'),
    +    (0x2F99, 'M', u'貝'),
    +    (0x2F9A, 'M', u'赤'),
    +    (0x2F9B, 'M', u'走'),
    +    (0x2F9C, 'M', u'足'),
    +    (0x2F9D, 'M', u'身'),
    +    (0x2F9E, 'M', u'車'),
    +    (0x2F9F, 'M', u'辛'),
    +    (0x2FA0, 'M', u'辰'),
    +    (0x2FA1, 'M', u'辵'),
    +    (0x2FA2, 'M', u'邑'),
    +    (0x2FA3, 'M', u'酉'),
    +    (0x2FA4, 'M', u'釆'),
    +    (0x2FA5, 'M', u'里'),
    +    (0x2FA6, 'M', u'金'),
    +    (0x2FA7, 'M', u'長'),
    +    (0x2FA8, 'M', u'門'),
    +    (0x2FA9, 'M', u'阜'),
    +    (0x2FAA, 'M', u'隶'),
    +    (0x2FAB, 'M', u'隹'),
    +    (0x2FAC, 'M', u'雨'),
    +    (0x2FAD, 'M', u'靑'),
    +    (0x2FAE, 'M', u'非'),
    +    (0x2FAF, 'M', u'面'),
    +    (0x2FB0, 'M', u'革'),
    +    (0x2FB1, 'M', u'韋'),
    +    (0x2FB2, 'M', u'韭'),
    +    (0x2FB3, 'M', u'音'),
    +    (0x2FB4, 'M', u'頁'),
    +    (0x2FB5, 'M', u'風'),
    +    (0x2FB6, 'M', u'飛'),
    +    (0x2FB7, 'M', u'食'),
    +    (0x2FB8, 'M', u'首'),
    +    (0x2FB9, 'M', u'香'),
    +    (0x2FBA, 'M', u'馬'),
    +    (0x2FBB, 'M', u'骨'),
    +    (0x2FBC, 'M', u'高'),
    +    (0x2FBD, 'M', u'髟'),
    +    (0x2FBE, 'M', u'鬥'),
    +    (0x2FBF, 'M', u'鬯'),
    +    (0x2FC0, 'M', u'鬲'),
    +    (0x2FC1, 'M', u'鬼'),
    +    (0x2FC2, 'M', u'魚'),
    +    (0x2FC3, 'M', u'鳥'),
    +    (0x2FC4, 'M', u'鹵'),
    +    (0x2FC5, 'M', u'鹿'),
    +    (0x2FC6, 'M', u'麥'),
    +    (0x2FC7, 'M', u'麻'),
    +    (0x2FC8, 'M', u'黃'),
    +    (0x2FC9, 'M', u'黍'),
    +    (0x2FCA, 'M', u'黑'),
    +    (0x2FCB, 'M', u'黹'),
    +    (0x2FCC, 'M', u'黽'),
    +    (0x2FCD, 'M', u'鼎'),
    +    (0x2FCE, 'M', u'鼓'),
    +    (0x2FCF, 'M', u'鼠'),
    +    (0x2FD0, 'M', u'鼻'),
    +    (0x2FD1, 'M', u'齊'),
    +    (0x2FD2, 'M', u'齒'),
    +    (0x2FD3, 'M', u'龍'),
    +    (0x2FD4, 'M', u'龜'),
    +    (0x2FD5, 'M', u'龠'),
    +    (0x2FD6, 'X'),
    +    (0x3000, '3', u' '),
    +    ]
    +
    +def _seg_29():
    +    return [
    +    (0x3001, 'V'),
    +    (0x3002, 'M', u'.'),
    +    (0x3003, 'V'),
    +    (0x3036, 'M', u'〒'),
    +    (0x3037, 'V'),
    +    (0x3038, 'M', u'十'),
    +    (0x3039, 'M', u'卄'),
    +    (0x303A, 'M', u'卅'),
    +    (0x303B, 'V'),
    +    (0x3040, 'X'),
    +    (0x3041, 'V'),
    +    (0x3097, 'X'),
    +    (0x3099, 'V'),
    +    (0x309B, '3', u' ゙'),
    +    (0x309C, '3', u' ゚'),
    +    (0x309D, 'V'),
    +    (0x309F, 'M', u'より'),
    +    (0x30A0, 'V'),
    +    (0x30FF, 'M', u'コト'),
    +    (0x3100, 'X'),
    +    (0x3105, 'V'),
    +    (0x3130, 'X'),
    +    (0x3131, 'M', u'ᄀ'),
    +    (0x3132, 'M', u'ᄁ'),
    +    (0x3133, 'M', u'ᆪ'),
    +    (0x3134, 'M', u'ᄂ'),
    +    (0x3135, 'M', u'ᆬ'),
    +    (0x3136, 'M', u'ᆭ'),
    +    (0x3137, 'M', u'ᄃ'),
    +    (0x3138, 'M', u'ᄄ'),
    +    (0x3139, 'M', u'ᄅ'),
    +    (0x313A, 'M', u'ᆰ'),
    +    (0x313B, 'M', u'ᆱ'),
    +    (0x313C, 'M', u'ᆲ'),
    +    (0x313D, 'M', u'ᆳ'),
    +    (0x313E, 'M', u'ᆴ'),
    +    (0x313F, 'M', u'ᆵ'),
    +    (0x3140, 'M', u'ᄚ'),
    +    (0x3141, 'M', u'ᄆ'),
    +    (0x3142, 'M', u'ᄇ'),
    +    (0x3143, 'M', u'ᄈ'),
    +    (0x3144, 'M', u'ᄡ'),
    +    (0x3145, 'M', u'ᄉ'),
    +    (0x3146, 'M', u'ᄊ'),
    +    (0x3147, 'M', u'ᄋ'),
    +    (0x3148, 'M', u'ᄌ'),
    +    (0x3149, 'M', u'ᄍ'),
    +    (0x314A, 'M', u'ᄎ'),
    +    (0x314B, 'M', u'ᄏ'),
    +    (0x314C, 'M', u'ᄐ'),
    +    (0x314D, 'M', u'ᄑ'),
    +    (0x314E, 'M', u'ᄒ'),
    +    (0x314F, 'M', u'ᅡ'),
    +    (0x3150, 'M', u'ᅢ'),
    +    (0x3151, 'M', u'ᅣ'),
    +    (0x3152, 'M', u'ᅤ'),
    +    (0x3153, 'M', u'ᅥ'),
    +    (0x3154, 'M', u'ᅦ'),
    +    (0x3155, 'M', u'ᅧ'),
    +    (0x3156, 'M', u'ᅨ'),
    +    (0x3157, 'M', u'ᅩ'),
    +    (0x3158, 'M', u'ᅪ'),
    +    (0x3159, 'M', u'ᅫ'),
    +    (0x315A, 'M', u'ᅬ'),
    +    (0x315B, 'M', u'ᅭ'),
    +    (0x315C, 'M', u'ᅮ'),
    +    (0x315D, 'M', u'ᅯ'),
    +    (0x315E, 'M', u'ᅰ'),
    +    (0x315F, 'M', u'ᅱ'),
    +    (0x3160, 'M', u'ᅲ'),
    +    (0x3161, 'M', u'ᅳ'),
    +    (0x3162, 'M', u'ᅴ'),
    +    (0x3163, 'M', u'ᅵ'),
    +    (0x3164, 'X'),
    +    (0x3165, 'M', u'ᄔ'),
    +    (0x3166, 'M', u'ᄕ'),
    +    (0x3167, 'M', u'ᇇ'),
    +    (0x3168, 'M', u'ᇈ'),
    +    (0x3169, 'M', u'ᇌ'),
    +    (0x316A, 'M', u'ᇎ'),
    +    (0x316B, 'M', u'ᇓ'),
    +    (0x316C, 'M', u'ᇗ'),
    +    (0x316D, 'M', u'ᇙ'),
    +    (0x316E, 'M', u'ᄜ'),
    +    (0x316F, 'M', u'ᇝ'),
    +    (0x3170, 'M', u'ᇟ'),
    +    (0x3171, 'M', u'ᄝ'),
    +    (0x3172, 'M', u'ᄞ'),
    +    (0x3173, 'M', u'ᄠ'),
    +    (0x3174, 'M', u'ᄢ'),
    +    (0x3175, 'M', u'ᄣ'),
    +    (0x3176, 'M', u'ᄧ'),
    +    (0x3177, 'M', u'ᄩ'),
    +    (0x3178, 'M', u'ᄫ'),
    +    (0x3179, 'M', u'ᄬ'),
    +    (0x317A, 'M', u'ᄭ'),
    +    (0x317B, 'M', u'ᄮ'),
    +    (0x317C, 'M', u'ᄯ'),
    +    (0x317D, 'M', u'ᄲ'),
    +    (0x317E, 'M', u'ᄶ'),
    +    ]
    +
    +def _seg_30():
    +    return [
    +    (0x317F, 'M', u'ᅀ'),
    +    (0x3180, 'M', u'ᅇ'),
    +    (0x3181, 'M', u'ᅌ'),
    +    (0x3182, 'M', u'ᇱ'),
    +    (0x3183, 'M', u'ᇲ'),
    +    (0x3184, 'M', u'ᅗ'),
    +    (0x3185, 'M', u'ᅘ'),
    +    (0x3186, 'M', u'ᅙ'),
    +    (0x3187, 'M', u'ᆄ'),
    +    (0x3188, 'M', u'ᆅ'),
    +    (0x3189, 'M', u'ᆈ'),
    +    (0x318A, 'M', u'ᆑ'),
    +    (0x318B, 'M', u'ᆒ'),
    +    (0x318C, 'M', u'ᆔ'),
    +    (0x318D, 'M', u'ᆞ'),
    +    (0x318E, 'M', u'ᆡ'),
    +    (0x318F, 'X'),
    +    (0x3190, 'V'),
    +    (0x3192, 'M', u'一'),
    +    (0x3193, 'M', u'二'),
    +    (0x3194, 'M', u'三'),
    +    (0x3195, 'M', u'四'),
    +    (0x3196, 'M', u'上'),
    +    (0x3197, 'M', u'中'),
    +    (0x3198, 'M', u'下'),
    +    (0x3199, 'M', u'甲'),
    +    (0x319A, 'M', u'乙'),
    +    (0x319B, 'M', u'丙'),
    +    (0x319C, 'M', u'丁'),
    +    (0x319D, 'M', u'天'),
    +    (0x319E, 'M', u'地'),
    +    (0x319F, 'M', u'人'),
    +    (0x31A0, 'V'),
    +    (0x31BB, 'X'),
    +    (0x31C0, 'V'),
    +    (0x31E4, 'X'),
    +    (0x31F0, 'V'),
    +    (0x3200, '3', u'(ᄀ)'),
    +    (0x3201, '3', u'(ᄂ)'),
    +    (0x3202, '3', u'(ᄃ)'),
    +    (0x3203, '3', u'(ᄅ)'),
    +    (0x3204, '3', u'(ᄆ)'),
    +    (0x3205, '3', u'(ᄇ)'),
    +    (0x3206, '3', u'(ᄉ)'),
    +    (0x3207, '3', u'(ᄋ)'),
    +    (0x3208, '3', u'(ᄌ)'),
    +    (0x3209, '3', u'(ᄎ)'),
    +    (0x320A, '3', u'(ᄏ)'),
    +    (0x320B, '3', u'(ᄐ)'),
    +    (0x320C, '3', u'(ᄑ)'),
    +    (0x320D, '3', u'(ᄒ)'),
    +    (0x320E, '3', u'(가)'),
    +    (0x320F, '3', u'(나)'),
    +    (0x3210, '3', u'(다)'),
    +    (0x3211, '3', u'(라)'),
    +    (0x3212, '3', u'(마)'),
    +    (0x3213, '3', u'(바)'),
    +    (0x3214, '3', u'(사)'),
    +    (0x3215, '3', u'(아)'),
    +    (0x3216, '3', u'(자)'),
    +    (0x3217, '3', u'(차)'),
    +    (0x3218, '3', u'(카)'),
    +    (0x3219, '3', u'(타)'),
    +    (0x321A, '3', u'(파)'),
    +    (0x321B, '3', u'(하)'),
    +    (0x321C, '3', u'(주)'),
    +    (0x321D, '3', u'(오전)'),
    +    (0x321E, '3', u'(오후)'),
    +    (0x321F, 'X'),
    +    (0x3220, '3', u'(一)'),
    +    (0x3221, '3', u'(二)'),
    +    (0x3222, '3', u'(三)'),
    +    (0x3223, '3', u'(四)'),
    +    (0x3224, '3', u'(五)'),
    +    (0x3225, '3', u'(六)'),
    +    (0x3226, '3', u'(七)'),
    +    (0x3227, '3', u'(八)'),
    +    (0x3228, '3', u'(九)'),
    +    (0x3229, '3', u'(十)'),
    +    (0x322A, '3', u'(月)'),
    +    (0x322B, '3', u'(火)'),
    +    (0x322C, '3', u'(水)'),
    +    (0x322D, '3', u'(木)'),
    +    (0x322E, '3', u'(金)'),
    +    (0x322F, '3', u'(土)'),
    +    (0x3230, '3', u'(日)'),
    +    (0x3231, '3', u'(株)'),
    +    (0x3232, '3', u'(有)'),
    +    (0x3233, '3', u'(社)'),
    +    (0x3234, '3', u'(名)'),
    +    (0x3235, '3', u'(特)'),
    +    (0x3236, '3', u'(財)'),
    +    (0x3237, '3', u'(祝)'),
    +    (0x3238, '3', u'(労)'),
    +    (0x3239, '3', u'(代)'),
    +    (0x323A, '3', u'(呼)'),
    +    (0x323B, '3', u'(学)'),
    +    (0x323C, '3', u'(監)'),
    +    (0x323D, '3', u'(企)'),
    +    (0x323E, '3', u'(資)'),
    +    ]
    +
    +def _seg_31():
    +    return [
    +    (0x323F, '3', u'(協)'),
    +    (0x3240, '3', u'(祭)'),
    +    (0x3241, '3', u'(休)'),
    +    (0x3242, '3', u'(自)'),
    +    (0x3243, '3', u'(至)'),
    +    (0x3244, 'M', u'問'),
    +    (0x3245, 'M', u'幼'),
    +    (0x3246, 'M', u'文'),
    +    (0x3247, 'M', u'箏'),
    +    (0x3248, 'V'),
    +    (0x3250, 'M', u'pte'),
    +    (0x3251, 'M', u'21'),
    +    (0x3252, 'M', u'22'),
    +    (0x3253, 'M', u'23'),
    +    (0x3254, 'M', u'24'),
    +    (0x3255, 'M', u'25'),
    +    (0x3256, 'M', u'26'),
    +    (0x3257, 'M', u'27'),
    +    (0x3258, 'M', u'28'),
    +    (0x3259, 'M', u'29'),
    +    (0x325A, 'M', u'30'),
    +    (0x325B, 'M', u'31'),
    +    (0x325C, 'M', u'32'),
    +    (0x325D, 'M', u'33'),
    +    (0x325E, 'M', u'34'),
    +    (0x325F, 'M', u'35'),
    +    (0x3260, 'M', u'ᄀ'),
    +    (0x3261, 'M', u'ᄂ'),
    +    (0x3262, 'M', u'ᄃ'),
    +    (0x3263, 'M', u'ᄅ'),
    +    (0x3264, 'M', u'ᄆ'),
    +    (0x3265, 'M', u'ᄇ'),
    +    (0x3266, 'M', u'ᄉ'),
    +    (0x3267, 'M', u'ᄋ'),
    +    (0x3268, 'M', u'ᄌ'),
    +    (0x3269, 'M', u'ᄎ'),
    +    (0x326A, 'M', u'ᄏ'),
    +    (0x326B, 'M', u'ᄐ'),
    +    (0x326C, 'M', u'ᄑ'),
    +    (0x326D, 'M', u'ᄒ'),
    +    (0x326E, 'M', u'가'),
    +    (0x326F, 'M', u'나'),
    +    (0x3270, 'M', u'다'),
    +    (0x3271, 'M', u'라'),
    +    (0x3272, 'M', u'마'),
    +    (0x3273, 'M', u'바'),
    +    (0x3274, 'M', u'사'),
    +    (0x3275, 'M', u'아'),
    +    (0x3276, 'M', u'자'),
    +    (0x3277, 'M', u'차'),
    +    (0x3278, 'M', u'카'),
    +    (0x3279, 'M', u'타'),
    +    (0x327A, 'M', u'파'),
    +    (0x327B, 'M', u'하'),
    +    (0x327C, 'M', u'참고'),
    +    (0x327D, 'M', u'주의'),
    +    (0x327E, 'M', u'우'),
    +    (0x327F, 'V'),
    +    (0x3280, 'M', u'一'),
    +    (0x3281, 'M', u'二'),
    +    (0x3282, 'M', u'三'),
    +    (0x3283, 'M', u'四'),
    +    (0x3284, 'M', u'五'),
    +    (0x3285, 'M', u'六'),
    +    (0x3286, 'M', u'七'),
    +    (0x3287, 'M', u'八'),
    +    (0x3288, 'M', u'九'),
    +    (0x3289, 'M', u'十'),
    +    (0x328A, 'M', u'月'),
    +    (0x328B, 'M', u'火'),
    +    (0x328C, 'M', u'水'),
    +    (0x328D, 'M', u'木'),
    +    (0x328E, 'M', u'金'),
    +    (0x328F, 'M', u'土'),
    +    (0x3290, 'M', u'日'),
    +    (0x3291, 'M', u'株'),
    +    (0x3292, 'M', u'有'),
    +    (0x3293, 'M', u'社'),
    +    (0x3294, 'M', u'名'),
    +    (0x3295, 'M', u'特'),
    +    (0x3296, 'M', u'財'),
    +    (0x3297, 'M', u'祝'),
    +    (0x3298, 'M', u'労'),
    +    (0x3299, 'M', u'秘'),
    +    (0x329A, 'M', u'男'),
    +    (0x329B, 'M', u'女'),
    +    (0x329C, 'M', u'適'),
    +    (0x329D, 'M', u'優'),
    +    (0x329E, 'M', u'印'),
    +    (0x329F, 'M', u'注'),
    +    (0x32A0, 'M', u'項'),
    +    (0x32A1, 'M', u'休'),
    +    (0x32A2, 'M', u'写'),
    +    (0x32A3, 'M', u'正'),
    +    (0x32A4, 'M', u'上'),
    +    (0x32A5, 'M', u'中'),
    +    (0x32A6, 'M', u'下'),
    +    (0x32A7, 'M', u'左'),
    +    (0x32A8, 'M', u'右'),
    +    (0x32A9, 'M', u'医'),
    +    ]
    +
    +def _seg_32():
    +    return [
    +    (0x32AA, 'M', u'宗'),
    +    (0x32AB, 'M', u'学'),
    +    (0x32AC, 'M', u'監'),
    +    (0x32AD, 'M', u'企'),
    +    (0x32AE, 'M', u'資'),
    +    (0x32AF, 'M', u'協'),
    +    (0x32B0, 'M', u'夜'),
    +    (0x32B1, 'M', u'36'),
    +    (0x32B2, 'M', u'37'),
    +    (0x32B3, 'M', u'38'),
    +    (0x32B4, 'M', u'39'),
    +    (0x32B5, 'M', u'40'),
    +    (0x32B6, 'M', u'41'),
    +    (0x32B7, 'M', u'42'),
    +    (0x32B8, 'M', u'43'),
    +    (0x32B9, 'M', u'44'),
    +    (0x32BA, 'M', u'45'),
    +    (0x32BB, 'M', u'46'),
    +    (0x32BC, 'M', u'47'),
    +    (0x32BD, 'M', u'48'),
    +    (0x32BE, 'M', u'49'),
    +    (0x32BF, 'M', u'50'),
    +    (0x32C0, 'M', u'1月'),
    +    (0x32C1, 'M', u'2月'),
    +    (0x32C2, 'M', u'3月'),
    +    (0x32C3, 'M', u'4月'),
    +    (0x32C4, 'M', u'5月'),
    +    (0x32C5, 'M', u'6月'),
    +    (0x32C6, 'M', u'7月'),
    +    (0x32C7, 'M', u'8月'),
    +    (0x32C8, 'M', u'9月'),
    +    (0x32C9, 'M', u'10月'),
    +    (0x32CA, 'M', u'11月'),
    +    (0x32CB, 'M', u'12月'),
    +    (0x32CC, 'M', u'hg'),
    +    (0x32CD, 'M', u'erg'),
    +    (0x32CE, 'M', u'ev'),
    +    (0x32CF, 'M', u'ltd'),
    +    (0x32D0, 'M', u'ア'),
    +    (0x32D1, 'M', u'イ'),
    +    (0x32D2, 'M', u'ウ'),
    +    (0x32D3, 'M', u'エ'),
    +    (0x32D4, 'M', u'オ'),
    +    (0x32D5, 'M', u'カ'),
    +    (0x32D6, 'M', u'キ'),
    +    (0x32D7, 'M', u'ク'),
    +    (0x32D8, 'M', u'ケ'),
    +    (0x32D9, 'M', u'コ'),
    +    (0x32DA, 'M', u'サ'),
    +    (0x32DB, 'M', u'シ'),
    +    (0x32DC, 'M', u'ス'),
    +    (0x32DD, 'M', u'セ'),
    +    (0x32DE, 'M', u'ソ'),
    +    (0x32DF, 'M', u'タ'),
    +    (0x32E0, 'M', u'チ'),
    +    (0x32E1, 'M', u'ツ'),
    +    (0x32E2, 'M', u'テ'),
    +    (0x32E3, 'M', u'ト'),
    +    (0x32E4, 'M', u'ナ'),
    +    (0x32E5, 'M', u'ニ'),
    +    (0x32E6, 'M', u'ヌ'),
    +    (0x32E7, 'M', u'ネ'),
    +    (0x32E8, 'M', u'ノ'),
    +    (0x32E9, 'M', u'ハ'),
    +    (0x32EA, 'M', u'ヒ'),
    +    (0x32EB, 'M', u'フ'),
    +    (0x32EC, 'M', u'ヘ'),
    +    (0x32ED, 'M', u'ホ'),
    +    (0x32EE, 'M', u'マ'),
    +    (0x32EF, 'M', u'ミ'),
    +    (0x32F0, 'M', u'ム'),
    +    (0x32F1, 'M', u'メ'),
    +    (0x32F2, 'M', u'モ'),
    +    (0x32F3, 'M', u'ヤ'),
    +    (0x32F4, 'M', u'ユ'),
    +    (0x32F5, 'M', u'ヨ'),
    +    (0x32F6, 'M', u'ラ'),
    +    (0x32F7, 'M', u'リ'),
    +    (0x32F8, 'M', u'ル'),
    +    (0x32F9, 'M', u'レ'),
    +    (0x32FA, 'M', u'ロ'),
    +    (0x32FB, 'M', u'ワ'),
    +    (0x32FC, 'M', u'ヰ'),
    +    (0x32FD, 'M', u'ヱ'),
    +    (0x32FE, 'M', u'ヲ'),
    +    (0x32FF, 'M', u'令和'),
    +    (0x3300, 'M', u'アパート'),
    +    (0x3301, 'M', u'アルファ'),
    +    (0x3302, 'M', u'アンペア'),
    +    (0x3303, 'M', u'アール'),
    +    (0x3304, 'M', u'イニング'),
    +    (0x3305, 'M', u'インチ'),
    +    (0x3306, 'M', u'ウォン'),
    +    (0x3307, 'M', u'エスクード'),
    +    (0x3308, 'M', u'エーカー'),
    +    (0x3309, 'M', u'オンス'),
    +    (0x330A, 'M', u'オーム'),
    +    (0x330B, 'M', u'カイリ'),
    +    (0x330C, 'M', u'カラット'),
    +    (0x330D, 'M', u'カロリー'),
    +    ]
    +
    +def _seg_33():
    +    return [
    +    (0x330E, 'M', u'ガロン'),
    +    (0x330F, 'M', u'ガンマ'),
    +    (0x3310, 'M', u'ギガ'),
    +    (0x3311, 'M', u'ギニー'),
    +    (0x3312, 'M', u'キュリー'),
    +    (0x3313, 'M', u'ギルダー'),
    +    (0x3314, 'M', u'キロ'),
    +    (0x3315, 'M', u'キログラム'),
    +    (0x3316, 'M', u'キロメートル'),
    +    (0x3317, 'M', u'キロワット'),
    +    (0x3318, 'M', u'グラム'),
    +    (0x3319, 'M', u'グラムトン'),
    +    (0x331A, 'M', u'クルゼイロ'),
    +    (0x331B, 'M', u'クローネ'),
    +    (0x331C, 'M', u'ケース'),
    +    (0x331D, 'M', u'コルナ'),
    +    (0x331E, 'M', u'コーポ'),
    +    (0x331F, 'M', u'サイクル'),
    +    (0x3320, 'M', u'サンチーム'),
    +    (0x3321, 'M', u'シリング'),
    +    (0x3322, 'M', u'センチ'),
    +    (0x3323, 'M', u'セント'),
    +    (0x3324, 'M', u'ダース'),
    +    (0x3325, 'M', u'デシ'),
    +    (0x3326, 'M', u'ドル'),
    +    (0x3327, 'M', u'トン'),
    +    (0x3328, 'M', u'ナノ'),
    +    (0x3329, 'M', u'ノット'),
    +    (0x332A, 'M', u'ハイツ'),
    +    (0x332B, 'M', u'パーセント'),
    +    (0x332C, 'M', u'パーツ'),
    +    (0x332D, 'M', u'バーレル'),
    +    (0x332E, 'M', u'ピアストル'),
    +    (0x332F, 'M', u'ピクル'),
    +    (0x3330, 'M', u'ピコ'),
    +    (0x3331, 'M', u'ビル'),
    +    (0x3332, 'M', u'ファラッド'),
    +    (0x3333, 'M', u'フィート'),
    +    (0x3334, 'M', u'ブッシェル'),
    +    (0x3335, 'M', u'フラン'),
    +    (0x3336, 'M', u'ヘクタール'),
    +    (0x3337, 'M', u'ペソ'),
    +    (0x3338, 'M', u'ペニヒ'),
    +    (0x3339, 'M', u'ヘルツ'),
    +    (0x333A, 'M', u'ペンス'),
    +    (0x333B, 'M', u'ページ'),
    +    (0x333C, 'M', u'ベータ'),
    +    (0x333D, 'M', u'ポイント'),
    +    (0x333E, 'M', u'ボルト'),
    +    (0x333F, 'M', u'ホン'),
    +    (0x3340, 'M', u'ポンド'),
    +    (0x3341, 'M', u'ホール'),
    +    (0x3342, 'M', u'ホーン'),
    +    (0x3343, 'M', u'マイクロ'),
    +    (0x3344, 'M', u'マイル'),
    +    (0x3345, 'M', u'マッハ'),
    +    (0x3346, 'M', u'マルク'),
    +    (0x3347, 'M', u'マンション'),
    +    (0x3348, 'M', u'ミクロン'),
    +    (0x3349, 'M', u'ミリ'),
    +    (0x334A, 'M', u'ミリバール'),
    +    (0x334B, 'M', u'メガ'),
    +    (0x334C, 'M', u'メガトン'),
    +    (0x334D, 'M', u'メートル'),
    +    (0x334E, 'M', u'ヤード'),
    +    (0x334F, 'M', u'ヤール'),
    +    (0x3350, 'M', u'ユアン'),
    +    (0x3351, 'M', u'リットル'),
    +    (0x3352, 'M', u'リラ'),
    +    (0x3353, 'M', u'ルピー'),
    +    (0x3354, 'M', u'ルーブル'),
    +    (0x3355, 'M', u'レム'),
    +    (0x3356, 'M', u'レントゲン'),
    +    (0x3357, 'M', u'ワット'),
    +    (0x3358, 'M', u'0点'),
    +    (0x3359, 'M', u'1点'),
    +    (0x335A, 'M', u'2点'),
    +    (0x335B, 'M', u'3点'),
    +    (0x335C, 'M', u'4点'),
    +    (0x335D, 'M', u'5点'),
    +    (0x335E, 'M', u'6点'),
    +    (0x335F, 'M', u'7点'),
    +    (0x3360, 'M', u'8点'),
    +    (0x3361, 'M', u'9点'),
    +    (0x3362, 'M', u'10点'),
    +    (0x3363, 'M', u'11点'),
    +    (0x3364, 'M', u'12点'),
    +    (0x3365, 'M', u'13点'),
    +    (0x3366, 'M', u'14点'),
    +    (0x3367, 'M', u'15点'),
    +    (0x3368, 'M', u'16点'),
    +    (0x3369, 'M', u'17点'),
    +    (0x336A, 'M', u'18点'),
    +    (0x336B, 'M', u'19点'),
    +    (0x336C, 'M', u'20点'),
    +    (0x336D, 'M', u'21点'),
    +    (0x336E, 'M', u'22点'),
    +    (0x336F, 'M', u'23点'),
    +    (0x3370, 'M', u'24点'),
    +    (0x3371, 'M', u'hpa'),
    +    ]
    +
    +def _seg_34():
    +    return [
    +    (0x3372, 'M', u'da'),
    +    (0x3373, 'M', u'au'),
    +    (0x3374, 'M', u'bar'),
    +    (0x3375, 'M', u'ov'),
    +    (0x3376, 'M', u'pc'),
    +    (0x3377, 'M', u'dm'),
    +    (0x3378, 'M', u'dm2'),
    +    (0x3379, 'M', u'dm3'),
    +    (0x337A, 'M', u'iu'),
    +    (0x337B, 'M', u'平成'),
    +    (0x337C, 'M', u'昭和'),
    +    (0x337D, 'M', u'大正'),
    +    (0x337E, 'M', u'明治'),
    +    (0x337F, 'M', u'株式会社'),
    +    (0x3380, 'M', u'pa'),
    +    (0x3381, 'M', u'na'),
    +    (0x3382, 'M', u'μa'),
    +    (0x3383, 'M', u'ma'),
    +    (0x3384, 'M', u'ka'),
    +    (0x3385, 'M', u'kb'),
    +    (0x3386, 'M', u'mb'),
    +    (0x3387, 'M', u'gb'),
    +    (0x3388, 'M', u'cal'),
    +    (0x3389, 'M', u'kcal'),
    +    (0x338A, 'M', u'pf'),
    +    (0x338B, 'M', u'nf'),
    +    (0x338C, 'M', u'μf'),
    +    (0x338D, 'M', u'μg'),
    +    (0x338E, 'M', u'mg'),
    +    (0x338F, 'M', u'kg'),
    +    (0x3390, 'M', u'hz'),
    +    (0x3391, 'M', u'khz'),
    +    (0x3392, 'M', u'mhz'),
    +    (0x3393, 'M', u'ghz'),
    +    (0x3394, 'M', u'thz'),
    +    (0x3395, 'M', u'μl'),
    +    (0x3396, 'M', u'ml'),
    +    (0x3397, 'M', u'dl'),
    +    (0x3398, 'M', u'kl'),
    +    (0x3399, 'M', u'fm'),
    +    (0x339A, 'M', u'nm'),
    +    (0x339B, 'M', u'μm'),
    +    (0x339C, 'M', u'mm'),
    +    (0x339D, 'M', u'cm'),
    +    (0x339E, 'M', u'km'),
    +    (0x339F, 'M', u'mm2'),
    +    (0x33A0, 'M', u'cm2'),
    +    (0x33A1, 'M', u'm2'),
    +    (0x33A2, 'M', u'km2'),
    +    (0x33A3, 'M', u'mm3'),
    +    (0x33A4, 'M', u'cm3'),
    +    (0x33A5, 'M', u'm3'),
    +    (0x33A6, 'M', u'km3'),
    +    (0x33A7, 'M', u'm∕s'),
    +    (0x33A8, 'M', u'm∕s2'),
    +    (0x33A9, 'M', u'pa'),
    +    (0x33AA, 'M', u'kpa'),
    +    (0x33AB, 'M', u'mpa'),
    +    (0x33AC, 'M', u'gpa'),
    +    (0x33AD, 'M', u'rad'),
    +    (0x33AE, 'M', u'rad∕s'),
    +    (0x33AF, 'M', u'rad∕s2'),
    +    (0x33B0, 'M', u'ps'),
    +    (0x33B1, 'M', u'ns'),
    +    (0x33B2, 'M', u'μs'),
    +    (0x33B3, 'M', u'ms'),
    +    (0x33B4, 'M', u'pv'),
    +    (0x33B5, 'M', u'nv'),
    +    (0x33B6, 'M', u'μv'),
    +    (0x33B7, 'M', u'mv'),
    +    (0x33B8, 'M', u'kv'),
    +    (0x33B9, 'M', u'mv'),
    +    (0x33BA, 'M', u'pw'),
    +    (0x33BB, 'M', u'nw'),
    +    (0x33BC, 'M', u'μw'),
    +    (0x33BD, 'M', u'mw'),
    +    (0x33BE, 'M', u'kw'),
    +    (0x33BF, 'M', u'mw'),
    +    (0x33C0, 'M', u'kω'),
    +    (0x33C1, 'M', u'mω'),
    +    (0x33C2, 'X'),
    +    (0x33C3, 'M', u'bq'),
    +    (0x33C4, 'M', u'cc'),
    +    (0x33C5, 'M', u'cd'),
    +    (0x33C6, 'M', u'c∕kg'),
    +    (0x33C7, 'X'),
    +    (0x33C8, 'M', u'db'),
    +    (0x33C9, 'M', u'gy'),
    +    (0x33CA, 'M', u'ha'),
    +    (0x33CB, 'M', u'hp'),
    +    (0x33CC, 'M', u'in'),
    +    (0x33CD, 'M', u'kk'),
    +    (0x33CE, 'M', u'km'),
    +    (0x33CF, 'M', u'kt'),
    +    (0x33D0, 'M', u'lm'),
    +    (0x33D1, 'M', u'ln'),
    +    (0x33D2, 'M', u'log'),
    +    (0x33D3, 'M', u'lx'),
    +    (0x33D4, 'M', u'mb'),
    +    (0x33D5, 'M', u'mil'),
    +    ]
    +
    +def _seg_35():
    +    return [
    +    (0x33D6, 'M', u'mol'),
    +    (0x33D7, 'M', u'ph'),
    +    (0x33D8, 'X'),
    +    (0x33D9, 'M', u'ppm'),
    +    (0x33DA, 'M', u'pr'),
    +    (0x33DB, 'M', u'sr'),
    +    (0x33DC, 'M', u'sv'),
    +    (0x33DD, 'M', u'wb'),
    +    (0x33DE, 'M', u'v∕m'),
    +    (0x33DF, 'M', u'a∕m'),
    +    (0x33E0, 'M', u'1日'),
    +    (0x33E1, 'M', u'2日'),
    +    (0x33E2, 'M', u'3日'),
    +    (0x33E3, 'M', u'4日'),
    +    (0x33E4, 'M', u'5日'),
    +    (0x33E5, 'M', u'6日'),
    +    (0x33E6, 'M', u'7日'),
    +    (0x33E7, 'M', u'8日'),
    +    (0x33E8, 'M', u'9日'),
    +    (0x33E9, 'M', u'10日'),
    +    (0x33EA, 'M', u'11日'),
    +    (0x33EB, 'M', u'12日'),
    +    (0x33EC, 'M', u'13日'),
    +    (0x33ED, 'M', u'14日'),
    +    (0x33EE, 'M', u'15日'),
    +    (0x33EF, 'M', u'16日'),
    +    (0x33F0, 'M', u'17日'),
    +    (0x33F1, 'M', u'18日'),
    +    (0x33F2, 'M', u'19日'),
    +    (0x33F3, 'M', u'20日'),
    +    (0x33F4, 'M', u'21日'),
    +    (0x33F5, 'M', u'22日'),
    +    (0x33F6, 'M', u'23日'),
    +    (0x33F7, 'M', u'24日'),
    +    (0x33F8, 'M', u'25日'),
    +    (0x33F9, 'M', u'26日'),
    +    (0x33FA, 'M', u'27日'),
    +    (0x33FB, 'M', u'28日'),
    +    (0x33FC, 'M', u'29日'),
    +    (0x33FD, 'M', u'30日'),
    +    (0x33FE, 'M', u'31日'),
    +    (0x33FF, 'M', u'gal'),
    +    (0x3400, 'V'),
    +    (0x4DB6, 'X'),
    +    (0x4DC0, 'V'),
    +    (0x9FF0, 'X'),
    +    (0xA000, 'V'),
    +    (0xA48D, 'X'),
    +    (0xA490, 'V'),
    +    (0xA4C7, 'X'),
    +    (0xA4D0, 'V'),
    +    (0xA62C, 'X'),
    +    (0xA640, 'M', u'ꙁ'),
    +    (0xA641, 'V'),
    +    (0xA642, 'M', u'ꙃ'),
    +    (0xA643, 'V'),
    +    (0xA644, 'M', u'ꙅ'),
    +    (0xA645, 'V'),
    +    (0xA646, 'M', u'ꙇ'),
    +    (0xA647, 'V'),
    +    (0xA648, 'M', u'ꙉ'),
    +    (0xA649, 'V'),
    +    (0xA64A, 'M', u'ꙋ'),
    +    (0xA64B, 'V'),
    +    (0xA64C, 'M', u'ꙍ'),
    +    (0xA64D, 'V'),
    +    (0xA64E, 'M', u'ꙏ'),
    +    (0xA64F, 'V'),
    +    (0xA650, 'M', u'ꙑ'),
    +    (0xA651, 'V'),
    +    (0xA652, 'M', u'ꙓ'),
    +    (0xA653, 'V'),
    +    (0xA654, 'M', u'ꙕ'),
    +    (0xA655, 'V'),
    +    (0xA656, 'M', u'ꙗ'),
    +    (0xA657, 'V'),
    +    (0xA658, 'M', u'ꙙ'),
    +    (0xA659, 'V'),
    +    (0xA65A, 'M', u'ꙛ'),
    +    (0xA65B, 'V'),
    +    (0xA65C, 'M', u'ꙝ'),
    +    (0xA65D, 'V'),
    +    (0xA65E, 'M', u'ꙟ'),
    +    (0xA65F, 'V'),
    +    (0xA660, 'M', u'ꙡ'),
    +    (0xA661, 'V'),
    +    (0xA662, 'M', u'ꙣ'),
    +    (0xA663, 'V'),
    +    (0xA664, 'M', u'ꙥ'),
    +    (0xA665, 'V'),
    +    (0xA666, 'M', u'ꙧ'),
    +    (0xA667, 'V'),
    +    (0xA668, 'M', u'ꙩ'),
    +    (0xA669, 'V'),
    +    (0xA66A, 'M', u'ꙫ'),
    +    (0xA66B, 'V'),
    +    (0xA66C, 'M', u'ꙭ'),
    +    (0xA66D, 'V'),
    +    (0xA680, 'M', u'ꚁ'),
    +    (0xA681, 'V'),
    +    ]
    +
    +def _seg_36():
    +    return [
    +    (0xA682, 'M', u'ꚃ'),
    +    (0xA683, 'V'),
    +    (0xA684, 'M', u'ꚅ'),
    +    (0xA685, 'V'),
    +    (0xA686, 'M', u'ꚇ'),
    +    (0xA687, 'V'),
    +    (0xA688, 'M', u'ꚉ'),
    +    (0xA689, 'V'),
    +    (0xA68A, 'M', u'ꚋ'),
    +    (0xA68B, 'V'),
    +    (0xA68C, 'M', u'ꚍ'),
    +    (0xA68D, 'V'),
    +    (0xA68E, 'M', u'ꚏ'),
    +    (0xA68F, 'V'),
    +    (0xA690, 'M', u'ꚑ'),
    +    (0xA691, 'V'),
    +    (0xA692, 'M', u'ꚓ'),
    +    (0xA693, 'V'),
    +    (0xA694, 'M', u'ꚕ'),
    +    (0xA695, 'V'),
    +    (0xA696, 'M', u'ꚗ'),
    +    (0xA697, 'V'),
    +    (0xA698, 'M', u'ꚙ'),
    +    (0xA699, 'V'),
    +    (0xA69A, 'M', u'ꚛ'),
    +    (0xA69B, 'V'),
    +    (0xA69C, 'M', u'ъ'),
    +    (0xA69D, 'M', u'ь'),
    +    (0xA69E, 'V'),
    +    (0xA6F8, 'X'),
    +    (0xA700, 'V'),
    +    (0xA722, 'M', u'ꜣ'),
    +    (0xA723, 'V'),
    +    (0xA724, 'M', u'ꜥ'),
    +    (0xA725, 'V'),
    +    (0xA726, 'M', u'ꜧ'),
    +    (0xA727, 'V'),
    +    (0xA728, 'M', u'ꜩ'),
    +    (0xA729, 'V'),
    +    (0xA72A, 'M', u'ꜫ'),
    +    (0xA72B, 'V'),
    +    (0xA72C, 'M', u'ꜭ'),
    +    (0xA72D, 'V'),
    +    (0xA72E, 'M', u'ꜯ'),
    +    (0xA72F, 'V'),
    +    (0xA732, 'M', u'ꜳ'),
    +    (0xA733, 'V'),
    +    (0xA734, 'M', u'ꜵ'),
    +    (0xA735, 'V'),
    +    (0xA736, 'M', u'ꜷ'),
    +    (0xA737, 'V'),
    +    (0xA738, 'M', u'ꜹ'),
    +    (0xA739, 'V'),
    +    (0xA73A, 'M', u'ꜻ'),
    +    (0xA73B, 'V'),
    +    (0xA73C, 'M', u'ꜽ'),
    +    (0xA73D, 'V'),
    +    (0xA73E, 'M', u'ꜿ'),
    +    (0xA73F, 'V'),
    +    (0xA740, 'M', u'ꝁ'),
    +    (0xA741, 'V'),
    +    (0xA742, 'M', u'ꝃ'),
    +    (0xA743, 'V'),
    +    (0xA744, 'M', u'ꝅ'),
    +    (0xA745, 'V'),
    +    (0xA746, 'M', u'ꝇ'),
    +    (0xA747, 'V'),
    +    (0xA748, 'M', u'ꝉ'),
    +    (0xA749, 'V'),
    +    (0xA74A, 'M', u'ꝋ'),
    +    (0xA74B, 'V'),
    +    (0xA74C, 'M', u'ꝍ'),
    +    (0xA74D, 'V'),
    +    (0xA74E, 'M', u'ꝏ'),
    +    (0xA74F, 'V'),
    +    (0xA750, 'M', u'ꝑ'),
    +    (0xA751, 'V'),
    +    (0xA752, 'M', u'ꝓ'),
    +    (0xA753, 'V'),
    +    (0xA754, 'M', u'ꝕ'),
    +    (0xA755, 'V'),
    +    (0xA756, 'M', u'ꝗ'),
    +    (0xA757, 'V'),
    +    (0xA758, 'M', u'ꝙ'),
    +    (0xA759, 'V'),
    +    (0xA75A, 'M', u'ꝛ'),
    +    (0xA75B, 'V'),
    +    (0xA75C, 'M', u'ꝝ'),
    +    (0xA75D, 'V'),
    +    (0xA75E, 'M', u'ꝟ'),
    +    (0xA75F, 'V'),
    +    (0xA760, 'M', u'ꝡ'),
    +    (0xA761, 'V'),
    +    (0xA762, 'M', u'ꝣ'),
    +    (0xA763, 'V'),
    +    (0xA764, 'M', u'ꝥ'),
    +    (0xA765, 'V'),
    +    (0xA766, 'M', u'ꝧ'),
    +    (0xA767, 'V'),
    +    (0xA768, 'M', u'ꝩ'),
    +    ]
    +
    +def _seg_37():
    +    return [
    +    (0xA769, 'V'),
    +    (0xA76A, 'M', u'ꝫ'),
    +    (0xA76B, 'V'),
    +    (0xA76C, 'M', u'ꝭ'),
    +    (0xA76D, 'V'),
    +    (0xA76E, 'M', u'ꝯ'),
    +    (0xA76F, 'V'),
    +    (0xA770, 'M', u'ꝯ'),
    +    (0xA771, 'V'),
    +    (0xA779, 'M', u'ꝺ'),
    +    (0xA77A, 'V'),
    +    (0xA77B, 'M', u'ꝼ'),
    +    (0xA77C, 'V'),
    +    (0xA77D, 'M', u'ᵹ'),
    +    (0xA77E, 'M', u'ꝿ'),
    +    (0xA77F, 'V'),
    +    (0xA780, 'M', u'ꞁ'),
    +    (0xA781, 'V'),
    +    (0xA782, 'M', u'ꞃ'),
    +    (0xA783, 'V'),
    +    (0xA784, 'M', u'ꞅ'),
    +    (0xA785, 'V'),
    +    (0xA786, 'M', u'ꞇ'),
    +    (0xA787, 'V'),
    +    (0xA78B, 'M', u'ꞌ'),
    +    (0xA78C, 'V'),
    +    (0xA78D, 'M', u'ɥ'),
    +    (0xA78E, 'V'),
    +    (0xA790, 'M', u'ꞑ'),
    +    (0xA791, 'V'),
    +    (0xA792, 'M', u'ꞓ'),
    +    (0xA793, 'V'),
    +    (0xA796, 'M', u'ꞗ'),
    +    (0xA797, 'V'),
    +    (0xA798, 'M', u'ꞙ'),
    +    (0xA799, 'V'),
    +    (0xA79A, 'M', u'ꞛ'),
    +    (0xA79B, 'V'),
    +    (0xA79C, 'M', u'ꞝ'),
    +    (0xA79D, 'V'),
    +    (0xA79E, 'M', u'ꞟ'),
    +    (0xA79F, 'V'),
    +    (0xA7A0, 'M', u'ꞡ'),
    +    (0xA7A1, 'V'),
    +    (0xA7A2, 'M', u'ꞣ'),
    +    (0xA7A3, 'V'),
    +    (0xA7A4, 'M', u'ꞥ'),
    +    (0xA7A5, 'V'),
    +    (0xA7A6, 'M', u'ꞧ'),
    +    (0xA7A7, 'V'),
    +    (0xA7A8, 'M', u'ꞩ'),
    +    (0xA7A9, 'V'),
    +    (0xA7AA, 'M', u'ɦ'),
    +    (0xA7AB, 'M', u'ɜ'),
    +    (0xA7AC, 'M', u'ɡ'),
    +    (0xA7AD, 'M', u'ɬ'),
    +    (0xA7AE, 'M', u'ɪ'),
    +    (0xA7AF, 'V'),
    +    (0xA7B0, 'M', u'ʞ'),
    +    (0xA7B1, 'M', u'ʇ'),
    +    (0xA7B2, 'M', u'ʝ'),
    +    (0xA7B3, 'M', u'ꭓ'),
    +    (0xA7B4, 'M', u'ꞵ'),
    +    (0xA7B5, 'V'),
    +    (0xA7B6, 'M', u'ꞷ'),
    +    (0xA7B7, 'V'),
    +    (0xA7B8, 'M', u'ꞹ'),
    +    (0xA7B9, 'V'),
    +    (0xA7BA, 'M', u'ꞻ'),
    +    (0xA7BB, 'V'),
    +    (0xA7BC, 'M', u'ꞽ'),
    +    (0xA7BD, 'V'),
    +    (0xA7BE, 'M', u'ꞿ'),
    +    (0xA7BF, 'V'),
    +    (0xA7C0, 'X'),
    +    (0xA7C2, 'M', u'ꟃ'),
    +    (0xA7C3, 'V'),
    +    (0xA7C4, 'M', u'ꞔ'),
    +    (0xA7C5, 'M', u'ʂ'),
    +    (0xA7C6, 'M', u'ᶎ'),
    +    (0xA7C7, 'X'),
    +    (0xA7F7, 'V'),
    +    (0xA7F8, 'M', u'ħ'),
    +    (0xA7F9, 'M', u'œ'),
    +    (0xA7FA, 'V'),
    +    (0xA82C, 'X'),
    +    (0xA830, 'V'),
    +    (0xA83A, 'X'),
    +    (0xA840, 'V'),
    +    (0xA878, 'X'),
    +    (0xA880, 'V'),
    +    (0xA8C6, 'X'),
    +    (0xA8CE, 'V'),
    +    (0xA8DA, 'X'),
    +    (0xA8E0, 'V'),
    +    (0xA954, 'X'),
    +    (0xA95F, 'V'),
    +    (0xA97D, 'X'),
    +    (0xA980, 'V'),
    +    (0xA9CE, 'X'),
    +    ]
    +
    +def _seg_38():
    +    return [
    +    (0xA9CF, 'V'),
    +    (0xA9DA, 'X'),
    +    (0xA9DE, 'V'),
    +    (0xA9FF, 'X'),
    +    (0xAA00, 'V'),
    +    (0xAA37, 'X'),
    +    (0xAA40, 'V'),
    +    (0xAA4E, 'X'),
    +    (0xAA50, 'V'),
    +    (0xAA5A, 'X'),
    +    (0xAA5C, 'V'),
    +    (0xAAC3, 'X'),
    +    (0xAADB, 'V'),
    +    (0xAAF7, 'X'),
    +    (0xAB01, 'V'),
    +    (0xAB07, 'X'),
    +    (0xAB09, 'V'),
    +    (0xAB0F, 'X'),
    +    (0xAB11, 'V'),
    +    (0xAB17, 'X'),
    +    (0xAB20, 'V'),
    +    (0xAB27, 'X'),
    +    (0xAB28, 'V'),
    +    (0xAB2F, 'X'),
    +    (0xAB30, 'V'),
    +    (0xAB5C, 'M', u'ꜧ'),
    +    (0xAB5D, 'M', u'ꬷ'),
    +    (0xAB5E, 'M', u'ɫ'),
    +    (0xAB5F, 'M', u'ꭒ'),
    +    (0xAB60, 'V'),
    +    (0xAB68, 'X'),
    +    (0xAB70, 'M', u'Ꭰ'),
    +    (0xAB71, 'M', u'Ꭱ'),
    +    (0xAB72, 'M', u'Ꭲ'),
    +    (0xAB73, 'M', u'Ꭳ'),
    +    (0xAB74, 'M', u'Ꭴ'),
    +    (0xAB75, 'M', u'Ꭵ'),
    +    (0xAB76, 'M', u'Ꭶ'),
    +    (0xAB77, 'M', u'Ꭷ'),
    +    (0xAB78, 'M', u'Ꭸ'),
    +    (0xAB79, 'M', u'Ꭹ'),
    +    (0xAB7A, 'M', u'Ꭺ'),
    +    (0xAB7B, 'M', u'Ꭻ'),
    +    (0xAB7C, 'M', u'Ꭼ'),
    +    (0xAB7D, 'M', u'Ꭽ'),
    +    (0xAB7E, 'M', u'Ꭾ'),
    +    (0xAB7F, 'M', u'Ꭿ'),
    +    (0xAB80, 'M', u'Ꮀ'),
    +    (0xAB81, 'M', u'Ꮁ'),
    +    (0xAB82, 'M', u'Ꮂ'),
    +    (0xAB83, 'M', u'Ꮃ'),
    +    (0xAB84, 'M', u'Ꮄ'),
    +    (0xAB85, 'M', u'Ꮅ'),
    +    (0xAB86, 'M', u'Ꮆ'),
    +    (0xAB87, 'M', u'Ꮇ'),
    +    (0xAB88, 'M', u'Ꮈ'),
    +    (0xAB89, 'M', u'Ꮉ'),
    +    (0xAB8A, 'M', u'Ꮊ'),
    +    (0xAB8B, 'M', u'Ꮋ'),
    +    (0xAB8C, 'M', u'Ꮌ'),
    +    (0xAB8D, 'M', u'Ꮍ'),
    +    (0xAB8E, 'M', u'Ꮎ'),
    +    (0xAB8F, 'M', u'Ꮏ'),
    +    (0xAB90, 'M', u'Ꮐ'),
    +    (0xAB91, 'M', u'Ꮑ'),
    +    (0xAB92, 'M', u'Ꮒ'),
    +    (0xAB93, 'M', u'Ꮓ'),
    +    (0xAB94, 'M', u'Ꮔ'),
    +    (0xAB95, 'M', u'Ꮕ'),
    +    (0xAB96, 'M', u'Ꮖ'),
    +    (0xAB97, 'M', u'Ꮗ'),
    +    (0xAB98, 'M', u'Ꮘ'),
    +    (0xAB99, 'M', u'Ꮙ'),
    +    (0xAB9A, 'M', u'Ꮚ'),
    +    (0xAB9B, 'M', u'Ꮛ'),
    +    (0xAB9C, 'M', u'Ꮜ'),
    +    (0xAB9D, 'M', u'Ꮝ'),
    +    (0xAB9E, 'M', u'Ꮞ'),
    +    (0xAB9F, 'M', u'Ꮟ'),
    +    (0xABA0, 'M', u'Ꮠ'),
    +    (0xABA1, 'M', u'Ꮡ'),
    +    (0xABA2, 'M', u'Ꮢ'),
    +    (0xABA3, 'M', u'Ꮣ'),
    +    (0xABA4, 'M', u'Ꮤ'),
    +    (0xABA5, 'M', u'Ꮥ'),
    +    (0xABA6, 'M', u'Ꮦ'),
    +    (0xABA7, 'M', u'Ꮧ'),
    +    (0xABA8, 'M', u'Ꮨ'),
    +    (0xABA9, 'M', u'Ꮩ'),
    +    (0xABAA, 'M', u'Ꮪ'),
    +    (0xABAB, 'M', u'Ꮫ'),
    +    (0xABAC, 'M', u'Ꮬ'),
    +    (0xABAD, 'M', u'Ꮭ'),
    +    (0xABAE, 'M', u'Ꮮ'),
    +    (0xABAF, 'M', u'Ꮯ'),
    +    (0xABB0, 'M', u'Ꮰ'),
    +    (0xABB1, 'M', u'Ꮱ'),
    +    (0xABB2, 'M', u'Ꮲ'),
    +    (0xABB3, 'M', u'Ꮳ'),
    +    (0xABB4, 'M', u'Ꮴ'),
    +    ]
    +
    +def _seg_39():
    +    return [
    +    (0xABB5, 'M', u'Ꮵ'),
    +    (0xABB6, 'M', u'Ꮶ'),
    +    (0xABB7, 'M', u'Ꮷ'),
    +    (0xABB8, 'M', u'Ꮸ'),
    +    (0xABB9, 'M', u'Ꮹ'),
    +    (0xABBA, 'M', u'Ꮺ'),
    +    (0xABBB, 'M', u'Ꮻ'),
    +    (0xABBC, 'M', u'Ꮼ'),
    +    (0xABBD, 'M', u'Ꮽ'),
    +    (0xABBE, 'M', u'Ꮾ'),
    +    (0xABBF, 'M', u'Ꮿ'),
    +    (0xABC0, 'V'),
    +    (0xABEE, 'X'),
    +    (0xABF0, 'V'),
    +    (0xABFA, 'X'),
    +    (0xAC00, 'V'),
    +    (0xD7A4, 'X'),
    +    (0xD7B0, 'V'),
    +    (0xD7C7, 'X'),
    +    (0xD7CB, 'V'),
    +    (0xD7FC, 'X'),
    +    (0xF900, 'M', u'豈'),
    +    (0xF901, 'M', u'更'),
    +    (0xF902, 'M', u'車'),
    +    (0xF903, 'M', u'賈'),
    +    (0xF904, 'M', u'滑'),
    +    (0xF905, 'M', u'串'),
    +    (0xF906, 'M', u'句'),
    +    (0xF907, 'M', u'龜'),
    +    (0xF909, 'M', u'契'),
    +    (0xF90A, 'M', u'金'),
    +    (0xF90B, 'M', u'喇'),
    +    (0xF90C, 'M', u'奈'),
    +    (0xF90D, 'M', u'懶'),
    +    (0xF90E, 'M', u'癩'),
    +    (0xF90F, 'M', u'羅'),
    +    (0xF910, 'M', u'蘿'),
    +    (0xF911, 'M', u'螺'),
    +    (0xF912, 'M', u'裸'),
    +    (0xF913, 'M', u'邏'),
    +    (0xF914, 'M', u'樂'),
    +    (0xF915, 'M', u'洛'),
    +    (0xF916, 'M', u'烙'),
    +    (0xF917, 'M', u'珞'),
    +    (0xF918, 'M', u'落'),
    +    (0xF919, 'M', u'酪'),
    +    (0xF91A, 'M', u'駱'),
    +    (0xF91B, 'M', u'亂'),
    +    (0xF91C, 'M', u'卵'),
    +    (0xF91D, 'M', u'欄'),
    +    (0xF91E, 'M', u'爛'),
    +    (0xF91F, 'M', u'蘭'),
    +    (0xF920, 'M', u'鸞'),
    +    (0xF921, 'M', u'嵐'),
    +    (0xF922, 'M', u'濫'),
    +    (0xF923, 'M', u'藍'),
    +    (0xF924, 'M', u'襤'),
    +    (0xF925, 'M', u'拉'),
    +    (0xF926, 'M', u'臘'),
    +    (0xF927, 'M', u'蠟'),
    +    (0xF928, 'M', u'廊'),
    +    (0xF929, 'M', u'朗'),
    +    (0xF92A, 'M', u'浪'),
    +    (0xF92B, 'M', u'狼'),
    +    (0xF92C, 'M', u'郎'),
    +    (0xF92D, 'M', u'來'),
    +    (0xF92E, 'M', u'冷'),
    +    (0xF92F, 'M', u'勞'),
    +    (0xF930, 'M', u'擄'),
    +    (0xF931, 'M', u'櫓'),
    +    (0xF932, 'M', u'爐'),
    +    (0xF933, 'M', u'盧'),
    +    (0xF934, 'M', u'老'),
    +    (0xF935, 'M', u'蘆'),
    +    (0xF936, 'M', u'虜'),
    +    (0xF937, 'M', u'路'),
    +    (0xF938, 'M', u'露'),
    +    (0xF939, 'M', u'魯'),
    +    (0xF93A, 'M', u'鷺'),
    +    (0xF93B, 'M', u'碌'),
    +    (0xF93C, 'M', u'祿'),
    +    (0xF93D, 'M', u'綠'),
    +    (0xF93E, 'M', u'菉'),
    +    (0xF93F, 'M', u'錄'),
    +    (0xF940, 'M', u'鹿'),
    +    (0xF941, 'M', u'論'),
    +    (0xF942, 'M', u'壟'),
    +    (0xF943, 'M', u'弄'),
    +    (0xF944, 'M', u'籠'),
    +    (0xF945, 'M', u'聾'),
    +    (0xF946, 'M', u'牢'),
    +    (0xF947, 'M', u'磊'),
    +    (0xF948, 'M', u'賂'),
    +    (0xF949, 'M', u'雷'),
    +    (0xF94A, 'M', u'壘'),
    +    (0xF94B, 'M', u'屢'),
    +    (0xF94C, 'M', u'樓'),
    +    (0xF94D, 'M', u'淚'),
    +    (0xF94E, 'M', u'漏'),
    +    (0xF94F, 'M', u'累'),
    +    ]
    +
    +def _seg_40():
    +    return [
    +    (0xF950, 'M', u'縷'),
    +    (0xF951, 'M', u'陋'),
    +    (0xF952, 'M', u'勒'),
    +    (0xF953, 'M', u'肋'),
    +    (0xF954, 'M', u'凜'),
    +    (0xF955, 'M', u'凌'),
    +    (0xF956, 'M', u'稜'),
    +    (0xF957, 'M', u'綾'),
    +    (0xF958, 'M', u'菱'),
    +    (0xF959, 'M', u'陵'),
    +    (0xF95A, 'M', u'讀'),
    +    (0xF95B, 'M', u'拏'),
    +    (0xF95C, 'M', u'樂'),
    +    (0xF95D, 'M', u'諾'),
    +    (0xF95E, 'M', u'丹'),
    +    (0xF95F, 'M', u'寧'),
    +    (0xF960, 'M', u'怒'),
    +    (0xF961, 'M', u'率'),
    +    (0xF962, 'M', u'異'),
    +    (0xF963, 'M', u'北'),
    +    (0xF964, 'M', u'磻'),
    +    (0xF965, 'M', u'便'),
    +    (0xF966, 'M', u'復'),
    +    (0xF967, 'M', u'不'),
    +    (0xF968, 'M', u'泌'),
    +    (0xF969, 'M', u'數'),
    +    (0xF96A, 'M', u'索'),
    +    (0xF96B, 'M', u'參'),
    +    (0xF96C, 'M', u'塞'),
    +    (0xF96D, 'M', u'省'),
    +    (0xF96E, 'M', u'葉'),
    +    (0xF96F, 'M', u'說'),
    +    (0xF970, 'M', u'殺'),
    +    (0xF971, 'M', u'辰'),
    +    (0xF972, 'M', u'沈'),
    +    (0xF973, 'M', u'拾'),
    +    (0xF974, 'M', u'若'),
    +    (0xF975, 'M', u'掠'),
    +    (0xF976, 'M', u'略'),
    +    (0xF977, 'M', u'亮'),
    +    (0xF978, 'M', u'兩'),
    +    (0xF979, 'M', u'凉'),
    +    (0xF97A, 'M', u'梁'),
    +    (0xF97B, 'M', u'糧'),
    +    (0xF97C, 'M', u'良'),
    +    (0xF97D, 'M', u'諒'),
    +    (0xF97E, 'M', u'量'),
    +    (0xF97F, 'M', u'勵'),
    +    (0xF980, 'M', u'呂'),
    +    (0xF981, 'M', u'女'),
    +    (0xF982, 'M', u'廬'),
    +    (0xF983, 'M', u'旅'),
    +    (0xF984, 'M', u'濾'),
    +    (0xF985, 'M', u'礪'),
    +    (0xF986, 'M', u'閭'),
    +    (0xF987, 'M', u'驪'),
    +    (0xF988, 'M', u'麗'),
    +    (0xF989, 'M', u'黎'),
    +    (0xF98A, 'M', u'力'),
    +    (0xF98B, 'M', u'曆'),
    +    (0xF98C, 'M', u'歷'),
    +    (0xF98D, 'M', u'轢'),
    +    (0xF98E, 'M', u'年'),
    +    (0xF98F, 'M', u'憐'),
    +    (0xF990, 'M', u'戀'),
    +    (0xF991, 'M', u'撚'),
    +    (0xF992, 'M', u'漣'),
    +    (0xF993, 'M', u'煉'),
    +    (0xF994, 'M', u'璉'),
    +    (0xF995, 'M', u'秊'),
    +    (0xF996, 'M', u'練'),
    +    (0xF997, 'M', u'聯'),
    +    (0xF998, 'M', u'輦'),
    +    (0xF999, 'M', u'蓮'),
    +    (0xF99A, 'M', u'連'),
    +    (0xF99B, 'M', u'鍊'),
    +    (0xF99C, 'M', u'列'),
    +    (0xF99D, 'M', u'劣'),
    +    (0xF99E, 'M', u'咽'),
    +    (0xF99F, 'M', u'烈'),
    +    (0xF9A0, 'M', u'裂'),
    +    (0xF9A1, 'M', u'說'),
    +    (0xF9A2, 'M', u'廉'),
    +    (0xF9A3, 'M', u'念'),
    +    (0xF9A4, 'M', u'捻'),
    +    (0xF9A5, 'M', u'殮'),
    +    (0xF9A6, 'M', u'簾'),
    +    (0xF9A7, 'M', u'獵'),
    +    (0xF9A8, 'M', u'令'),
    +    (0xF9A9, 'M', u'囹'),
    +    (0xF9AA, 'M', u'寧'),
    +    (0xF9AB, 'M', u'嶺'),
    +    (0xF9AC, 'M', u'怜'),
    +    (0xF9AD, 'M', u'玲'),
    +    (0xF9AE, 'M', u'瑩'),
    +    (0xF9AF, 'M', u'羚'),
    +    (0xF9B0, 'M', u'聆'),
    +    (0xF9B1, 'M', u'鈴'),
    +    (0xF9B2, 'M', u'零'),
    +    (0xF9B3, 'M', u'靈'),
    +    ]
    +
    +def _seg_41():
    +    return [
    +    (0xF9B4, 'M', u'領'),
    +    (0xF9B5, 'M', u'例'),
    +    (0xF9B6, 'M', u'禮'),
    +    (0xF9B7, 'M', u'醴'),
    +    (0xF9B8, 'M', u'隸'),
    +    (0xF9B9, 'M', u'惡'),
    +    (0xF9BA, 'M', u'了'),
    +    (0xF9BB, 'M', u'僚'),
    +    (0xF9BC, 'M', u'寮'),
    +    (0xF9BD, 'M', u'尿'),
    +    (0xF9BE, 'M', u'料'),
    +    (0xF9BF, 'M', u'樂'),
    +    (0xF9C0, 'M', u'燎'),
    +    (0xF9C1, 'M', u'療'),
    +    (0xF9C2, 'M', u'蓼'),
    +    (0xF9C3, 'M', u'遼'),
    +    (0xF9C4, 'M', u'龍'),
    +    (0xF9C5, 'M', u'暈'),
    +    (0xF9C6, 'M', u'阮'),
    +    (0xF9C7, 'M', u'劉'),
    +    (0xF9C8, 'M', u'杻'),
    +    (0xF9C9, 'M', u'柳'),
    +    (0xF9CA, 'M', u'流'),
    +    (0xF9CB, 'M', u'溜'),
    +    (0xF9CC, 'M', u'琉'),
    +    (0xF9CD, 'M', u'留'),
    +    (0xF9CE, 'M', u'硫'),
    +    (0xF9CF, 'M', u'紐'),
    +    (0xF9D0, 'M', u'類'),
    +    (0xF9D1, 'M', u'六'),
    +    (0xF9D2, 'M', u'戮'),
    +    (0xF9D3, 'M', u'陸'),
    +    (0xF9D4, 'M', u'倫'),
    +    (0xF9D5, 'M', u'崙'),
    +    (0xF9D6, 'M', u'淪'),
    +    (0xF9D7, 'M', u'輪'),
    +    (0xF9D8, 'M', u'律'),
    +    (0xF9D9, 'M', u'慄'),
    +    (0xF9DA, 'M', u'栗'),
    +    (0xF9DB, 'M', u'率'),
    +    (0xF9DC, 'M', u'隆'),
    +    (0xF9DD, 'M', u'利'),
    +    (0xF9DE, 'M', u'吏'),
    +    (0xF9DF, 'M', u'履'),
    +    (0xF9E0, 'M', u'易'),
    +    (0xF9E1, 'M', u'李'),
    +    (0xF9E2, 'M', u'梨'),
    +    (0xF9E3, 'M', u'泥'),
    +    (0xF9E4, 'M', u'理'),
    +    (0xF9E5, 'M', u'痢'),
    +    (0xF9E6, 'M', u'罹'),
    +    (0xF9E7, 'M', u'裏'),
    +    (0xF9E8, 'M', u'裡'),
    +    (0xF9E9, 'M', u'里'),
    +    (0xF9EA, 'M', u'離'),
    +    (0xF9EB, 'M', u'匿'),
    +    (0xF9EC, 'M', u'溺'),
    +    (0xF9ED, 'M', u'吝'),
    +    (0xF9EE, 'M', u'燐'),
    +    (0xF9EF, 'M', u'璘'),
    +    (0xF9F0, 'M', u'藺'),
    +    (0xF9F1, 'M', u'隣'),
    +    (0xF9F2, 'M', u'鱗'),
    +    (0xF9F3, 'M', u'麟'),
    +    (0xF9F4, 'M', u'林'),
    +    (0xF9F5, 'M', u'淋'),
    +    (0xF9F6, 'M', u'臨'),
    +    (0xF9F7, 'M', u'立'),
    +    (0xF9F8, 'M', u'笠'),
    +    (0xF9F9, 'M', u'粒'),
    +    (0xF9FA, 'M', u'狀'),
    +    (0xF9FB, 'M', u'炙'),
    +    (0xF9FC, 'M', u'識'),
    +    (0xF9FD, 'M', u'什'),
    +    (0xF9FE, 'M', u'茶'),
    +    (0xF9FF, 'M', u'刺'),
    +    (0xFA00, 'M', u'切'),
    +    (0xFA01, 'M', u'度'),
    +    (0xFA02, 'M', u'拓'),
    +    (0xFA03, 'M', u'糖'),
    +    (0xFA04, 'M', u'宅'),
    +    (0xFA05, 'M', u'洞'),
    +    (0xFA06, 'M', u'暴'),
    +    (0xFA07, 'M', u'輻'),
    +    (0xFA08, 'M', u'行'),
    +    (0xFA09, 'M', u'降'),
    +    (0xFA0A, 'M', u'見'),
    +    (0xFA0B, 'M', u'廓'),
    +    (0xFA0C, 'M', u'兀'),
    +    (0xFA0D, 'M', u'嗀'),
    +    (0xFA0E, 'V'),
    +    (0xFA10, 'M', u'塚'),
    +    (0xFA11, 'V'),
    +    (0xFA12, 'M', u'晴'),
    +    (0xFA13, 'V'),
    +    (0xFA15, 'M', u'凞'),
    +    (0xFA16, 'M', u'猪'),
    +    (0xFA17, 'M', u'益'),
    +    (0xFA18, 'M', u'礼'),
    +    (0xFA19, 'M', u'神'),
    +    ]
    +
    +def _seg_42():
    +    return [
    +    (0xFA1A, 'M', u'祥'),
    +    (0xFA1B, 'M', u'福'),
    +    (0xFA1C, 'M', u'靖'),
    +    (0xFA1D, 'M', u'精'),
    +    (0xFA1E, 'M', u'羽'),
    +    (0xFA1F, 'V'),
    +    (0xFA20, 'M', u'蘒'),
    +    (0xFA21, 'V'),
    +    (0xFA22, 'M', u'諸'),
    +    (0xFA23, 'V'),
    +    (0xFA25, 'M', u'逸'),
    +    (0xFA26, 'M', u'都'),
    +    (0xFA27, 'V'),
    +    (0xFA2A, 'M', u'飯'),
    +    (0xFA2B, 'M', u'飼'),
    +    (0xFA2C, 'M', u'館'),
    +    (0xFA2D, 'M', u'鶴'),
    +    (0xFA2E, 'M', u'郞'),
    +    (0xFA2F, 'M', u'隷'),
    +    (0xFA30, 'M', u'侮'),
    +    (0xFA31, 'M', u'僧'),
    +    (0xFA32, 'M', u'免'),
    +    (0xFA33, 'M', u'勉'),
    +    (0xFA34, 'M', u'勤'),
    +    (0xFA35, 'M', u'卑'),
    +    (0xFA36, 'M', u'喝'),
    +    (0xFA37, 'M', u'嘆'),
    +    (0xFA38, 'M', u'器'),
    +    (0xFA39, 'M', u'塀'),
    +    (0xFA3A, 'M', u'墨'),
    +    (0xFA3B, 'M', u'層'),
    +    (0xFA3C, 'M', u'屮'),
    +    (0xFA3D, 'M', u'悔'),
    +    (0xFA3E, 'M', u'慨'),
    +    (0xFA3F, 'M', u'憎'),
    +    (0xFA40, 'M', u'懲'),
    +    (0xFA41, 'M', u'敏'),
    +    (0xFA42, 'M', u'既'),
    +    (0xFA43, 'M', u'暑'),
    +    (0xFA44, 'M', u'梅'),
    +    (0xFA45, 'M', u'海'),
    +    (0xFA46, 'M', u'渚'),
    +    (0xFA47, 'M', u'漢'),
    +    (0xFA48, 'M', u'煮'),
    +    (0xFA49, 'M', u'爫'),
    +    (0xFA4A, 'M', u'琢'),
    +    (0xFA4B, 'M', u'碑'),
    +    (0xFA4C, 'M', u'社'),
    +    (0xFA4D, 'M', u'祉'),
    +    (0xFA4E, 'M', u'祈'),
    +    (0xFA4F, 'M', u'祐'),
    +    (0xFA50, 'M', u'祖'),
    +    (0xFA51, 'M', u'祝'),
    +    (0xFA52, 'M', u'禍'),
    +    (0xFA53, 'M', u'禎'),
    +    (0xFA54, 'M', u'穀'),
    +    (0xFA55, 'M', u'突'),
    +    (0xFA56, 'M', u'節'),
    +    (0xFA57, 'M', u'練'),
    +    (0xFA58, 'M', u'縉'),
    +    (0xFA59, 'M', u'繁'),
    +    (0xFA5A, 'M', u'署'),
    +    (0xFA5B, 'M', u'者'),
    +    (0xFA5C, 'M', u'臭'),
    +    (0xFA5D, 'M', u'艹'),
    +    (0xFA5F, 'M', u'著'),
    +    (0xFA60, 'M', u'褐'),
    +    (0xFA61, 'M', u'視'),
    +    (0xFA62, 'M', u'謁'),
    +    (0xFA63, 'M', u'謹'),
    +    (0xFA64, 'M', u'賓'),
    +    (0xFA65, 'M', u'贈'),
    +    (0xFA66, 'M', u'辶'),
    +    (0xFA67, 'M', u'逸'),
    +    (0xFA68, 'M', u'難'),
    +    (0xFA69, 'M', u'響'),
    +    (0xFA6A, 'M', u'頻'),
    +    (0xFA6B, 'M', u'恵'),
    +    (0xFA6C, 'M', u'𤋮'),
    +    (0xFA6D, 'M', u'舘'),
    +    (0xFA6E, 'X'),
    +    (0xFA70, 'M', u'並'),
    +    (0xFA71, 'M', u'况'),
    +    (0xFA72, 'M', u'全'),
    +    (0xFA73, 'M', u'侀'),
    +    (0xFA74, 'M', u'充'),
    +    (0xFA75, 'M', u'冀'),
    +    (0xFA76, 'M', u'勇'),
    +    (0xFA77, 'M', u'勺'),
    +    (0xFA78, 'M', u'喝'),
    +    (0xFA79, 'M', u'啕'),
    +    (0xFA7A, 'M', u'喙'),
    +    (0xFA7B, 'M', u'嗢'),
    +    (0xFA7C, 'M', u'塚'),
    +    (0xFA7D, 'M', u'墳'),
    +    (0xFA7E, 'M', u'奄'),
    +    (0xFA7F, 'M', u'奔'),
    +    (0xFA80, 'M', u'婢'),
    +    (0xFA81, 'M', u'嬨'),
    +    (0xFA82, 'M', u'廒'),
    +    ]
    +
    +def _seg_43():
    +    return [
    +    (0xFA83, 'M', u'廙'),
    +    (0xFA84, 'M', u'彩'),
    +    (0xFA85, 'M', u'徭'),
    +    (0xFA86, 'M', u'惘'),
    +    (0xFA87, 'M', u'慎'),
    +    (0xFA88, 'M', u'愈'),
    +    (0xFA89, 'M', u'憎'),
    +    (0xFA8A, 'M', u'慠'),
    +    (0xFA8B, 'M', u'懲'),
    +    (0xFA8C, 'M', u'戴'),
    +    (0xFA8D, 'M', u'揄'),
    +    (0xFA8E, 'M', u'搜'),
    +    (0xFA8F, 'M', u'摒'),
    +    (0xFA90, 'M', u'敖'),
    +    (0xFA91, 'M', u'晴'),
    +    (0xFA92, 'M', u'朗'),
    +    (0xFA93, 'M', u'望'),
    +    (0xFA94, 'M', u'杖'),
    +    (0xFA95, 'M', u'歹'),
    +    (0xFA96, 'M', u'殺'),
    +    (0xFA97, 'M', u'流'),
    +    (0xFA98, 'M', u'滛'),
    +    (0xFA99, 'M', u'滋'),
    +    (0xFA9A, 'M', u'漢'),
    +    (0xFA9B, 'M', u'瀞'),
    +    (0xFA9C, 'M', u'煮'),
    +    (0xFA9D, 'M', u'瞧'),
    +    (0xFA9E, 'M', u'爵'),
    +    (0xFA9F, 'M', u'犯'),
    +    (0xFAA0, 'M', u'猪'),
    +    (0xFAA1, 'M', u'瑱'),
    +    (0xFAA2, 'M', u'甆'),
    +    (0xFAA3, 'M', u'画'),
    +    (0xFAA4, 'M', u'瘝'),
    +    (0xFAA5, 'M', u'瘟'),
    +    (0xFAA6, 'M', u'益'),
    +    (0xFAA7, 'M', u'盛'),
    +    (0xFAA8, 'M', u'直'),
    +    (0xFAA9, 'M', u'睊'),
    +    (0xFAAA, 'M', u'着'),
    +    (0xFAAB, 'M', u'磌'),
    +    (0xFAAC, 'M', u'窱'),
    +    (0xFAAD, 'M', u'節'),
    +    (0xFAAE, 'M', u'类'),
    +    (0xFAAF, 'M', u'絛'),
    +    (0xFAB0, 'M', u'練'),
    +    (0xFAB1, 'M', u'缾'),
    +    (0xFAB2, 'M', u'者'),
    +    (0xFAB3, 'M', u'荒'),
    +    (0xFAB4, 'M', u'華'),
    +    (0xFAB5, 'M', u'蝹'),
    +    (0xFAB6, 'M', u'襁'),
    +    (0xFAB7, 'M', u'覆'),
    +    (0xFAB8, 'M', u'視'),
    +    (0xFAB9, 'M', u'調'),
    +    (0xFABA, 'M', u'諸'),
    +    (0xFABB, 'M', u'請'),
    +    (0xFABC, 'M', u'謁'),
    +    (0xFABD, 'M', u'諾'),
    +    (0xFABE, 'M', u'諭'),
    +    (0xFABF, 'M', u'謹'),
    +    (0xFAC0, 'M', u'變'),
    +    (0xFAC1, 'M', u'贈'),
    +    (0xFAC2, 'M', u'輸'),
    +    (0xFAC3, 'M', u'遲'),
    +    (0xFAC4, 'M', u'醙'),
    +    (0xFAC5, 'M', u'鉶'),
    +    (0xFAC6, 'M', u'陼'),
    +    (0xFAC7, 'M', u'難'),
    +    (0xFAC8, 'M', u'靖'),
    +    (0xFAC9, 'M', u'韛'),
    +    (0xFACA, 'M', u'響'),
    +    (0xFACB, 'M', u'頋'),
    +    (0xFACC, 'M', u'頻'),
    +    (0xFACD, 'M', u'鬒'),
    +    (0xFACE, 'M', u'龜'),
    +    (0xFACF, 'M', u'𢡊'),
    +    (0xFAD0, 'M', u'𢡄'),
    +    (0xFAD1, 'M', u'𣏕'),
    +    (0xFAD2, 'M', u'㮝'),
    +    (0xFAD3, 'M', u'䀘'),
    +    (0xFAD4, 'M', u'䀹'),
    +    (0xFAD5, 'M', u'𥉉'),
    +    (0xFAD6, 'M', u'𥳐'),
    +    (0xFAD7, 'M', u'𧻓'),
    +    (0xFAD8, 'M', u'齃'),
    +    (0xFAD9, 'M', u'龎'),
    +    (0xFADA, 'X'),
    +    (0xFB00, 'M', u'ff'),
    +    (0xFB01, 'M', u'fi'),
    +    (0xFB02, 'M', u'fl'),
    +    (0xFB03, 'M', u'ffi'),
    +    (0xFB04, 'M', u'ffl'),
    +    (0xFB05, 'M', u'st'),
    +    (0xFB07, 'X'),
    +    (0xFB13, 'M', u'մն'),
    +    (0xFB14, 'M', u'մե'),
    +    (0xFB15, 'M', u'մի'),
    +    (0xFB16, 'M', u'վն'),
    +    (0xFB17, 'M', u'մխ'),
    +    ]
    +
    +def _seg_44():
    +    return [
    +    (0xFB18, 'X'),
    +    (0xFB1D, 'M', u'יִ'),
    +    (0xFB1E, 'V'),
    +    (0xFB1F, 'M', u'ײַ'),
    +    (0xFB20, 'M', u'ע'),
    +    (0xFB21, 'M', u'א'),
    +    (0xFB22, 'M', u'ד'),
    +    (0xFB23, 'M', u'ה'),
    +    (0xFB24, 'M', u'כ'),
    +    (0xFB25, 'M', u'ל'),
    +    (0xFB26, 'M', u'ם'),
    +    (0xFB27, 'M', u'ר'),
    +    (0xFB28, 'M', u'ת'),
    +    (0xFB29, '3', u'+'),
    +    (0xFB2A, 'M', u'שׁ'),
    +    (0xFB2B, 'M', u'שׂ'),
    +    (0xFB2C, 'M', u'שּׁ'),
    +    (0xFB2D, 'M', u'שּׂ'),
    +    (0xFB2E, 'M', u'אַ'),
    +    (0xFB2F, 'M', u'אָ'),
    +    (0xFB30, 'M', u'אּ'),
    +    (0xFB31, 'M', u'בּ'),
    +    (0xFB32, 'M', u'גּ'),
    +    (0xFB33, 'M', u'דּ'),
    +    (0xFB34, 'M', u'הּ'),
    +    (0xFB35, 'M', u'וּ'),
    +    (0xFB36, 'M', u'זּ'),
    +    (0xFB37, 'X'),
    +    (0xFB38, 'M', u'טּ'),
    +    (0xFB39, 'M', u'יּ'),
    +    (0xFB3A, 'M', u'ךּ'),
    +    (0xFB3B, 'M', u'כּ'),
    +    (0xFB3C, 'M', u'לּ'),
    +    (0xFB3D, 'X'),
    +    (0xFB3E, 'M', u'מּ'),
    +    (0xFB3F, 'X'),
    +    (0xFB40, 'M', u'נּ'),
    +    (0xFB41, 'M', u'סּ'),
    +    (0xFB42, 'X'),
    +    (0xFB43, 'M', u'ףּ'),
    +    (0xFB44, 'M', u'פּ'),
    +    (0xFB45, 'X'),
    +    (0xFB46, 'M', u'צּ'),
    +    (0xFB47, 'M', u'קּ'),
    +    (0xFB48, 'M', u'רּ'),
    +    (0xFB49, 'M', u'שּ'),
    +    (0xFB4A, 'M', u'תּ'),
    +    (0xFB4B, 'M', u'וֹ'),
    +    (0xFB4C, 'M', u'בֿ'),
    +    (0xFB4D, 'M', u'כֿ'),
    +    (0xFB4E, 'M', u'פֿ'),
    +    (0xFB4F, 'M', u'אל'),
    +    (0xFB50, 'M', u'ٱ'),
    +    (0xFB52, 'M', u'ٻ'),
    +    (0xFB56, 'M', u'پ'),
    +    (0xFB5A, 'M', u'ڀ'),
    +    (0xFB5E, 'M', u'ٺ'),
    +    (0xFB62, 'M', u'ٿ'),
    +    (0xFB66, 'M', u'ٹ'),
    +    (0xFB6A, 'M', u'ڤ'),
    +    (0xFB6E, 'M', u'ڦ'),
    +    (0xFB72, 'M', u'ڄ'),
    +    (0xFB76, 'M', u'ڃ'),
    +    (0xFB7A, 'M', u'چ'),
    +    (0xFB7E, 'M', u'ڇ'),
    +    (0xFB82, 'M', u'ڍ'),
    +    (0xFB84, 'M', u'ڌ'),
    +    (0xFB86, 'M', u'ڎ'),
    +    (0xFB88, 'M', u'ڈ'),
    +    (0xFB8A, 'M', u'ژ'),
    +    (0xFB8C, 'M', u'ڑ'),
    +    (0xFB8E, 'M', u'ک'),
    +    (0xFB92, 'M', u'گ'),
    +    (0xFB96, 'M', u'ڳ'),
    +    (0xFB9A, 'M', u'ڱ'),
    +    (0xFB9E, 'M', u'ں'),
    +    (0xFBA0, 'M', u'ڻ'),
    +    (0xFBA4, 'M', u'ۀ'),
    +    (0xFBA6, 'M', u'ہ'),
    +    (0xFBAA, 'M', u'ھ'),
    +    (0xFBAE, 'M', u'ے'),
    +    (0xFBB0, 'M', u'ۓ'),
    +    (0xFBB2, 'V'),
    +    (0xFBC2, 'X'),
    +    (0xFBD3, 'M', u'ڭ'),
    +    (0xFBD7, 'M', u'ۇ'),
    +    (0xFBD9, 'M', u'ۆ'),
    +    (0xFBDB, 'M', u'ۈ'),
    +    (0xFBDD, 'M', u'ۇٴ'),
    +    (0xFBDE, 'M', u'ۋ'),
    +    (0xFBE0, 'M', u'ۅ'),
    +    (0xFBE2, 'M', u'ۉ'),
    +    (0xFBE4, 'M', u'ې'),
    +    (0xFBE8, 'M', u'ى'),
    +    (0xFBEA, 'M', u'ئا'),
    +    (0xFBEC, 'M', u'ئە'),
    +    (0xFBEE, 'M', u'ئو'),
    +    (0xFBF0, 'M', u'ئۇ'),
    +    (0xFBF2, 'M', u'ئۆ'),
    +    (0xFBF4, 'M', u'ئۈ'),
    +    ]
    +
    +def _seg_45():
    +    return [
    +    (0xFBF6, 'M', u'ئې'),
    +    (0xFBF9, 'M', u'ئى'),
    +    (0xFBFC, 'M', u'ی'),
    +    (0xFC00, 'M', u'ئج'),
    +    (0xFC01, 'M', u'ئح'),
    +    (0xFC02, 'M', u'ئم'),
    +    (0xFC03, 'M', u'ئى'),
    +    (0xFC04, 'M', u'ئي'),
    +    (0xFC05, 'M', u'بج'),
    +    (0xFC06, 'M', u'بح'),
    +    (0xFC07, 'M', u'بخ'),
    +    (0xFC08, 'M', u'بم'),
    +    (0xFC09, 'M', u'بى'),
    +    (0xFC0A, 'M', u'بي'),
    +    (0xFC0B, 'M', u'تج'),
    +    (0xFC0C, 'M', u'تح'),
    +    (0xFC0D, 'M', u'تخ'),
    +    (0xFC0E, 'M', u'تم'),
    +    (0xFC0F, 'M', u'تى'),
    +    (0xFC10, 'M', u'تي'),
    +    (0xFC11, 'M', u'ثج'),
    +    (0xFC12, 'M', u'ثم'),
    +    (0xFC13, 'M', u'ثى'),
    +    (0xFC14, 'M', u'ثي'),
    +    (0xFC15, 'M', u'جح'),
    +    (0xFC16, 'M', u'جم'),
    +    (0xFC17, 'M', u'حج'),
    +    (0xFC18, 'M', u'حم'),
    +    (0xFC19, 'M', u'خج'),
    +    (0xFC1A, 'M', u'خح'),
    +    (0xFC1B, 'M', u'خم'),
    +    (0xFC1C, 'M', u'سج'),
    +    (0xFC1D, 'M', u'سح'),
    +    (0xFC1E, 'M', u'سخ'),
    +    (0xFC1F, 'M', u'سم'),
    +    (0xFC20, 'M', u'صح'),
    +    (0xFC21, 'M', u'صم'),
    +    (0xFC22, 'M', u'ضج'),
    +    (0xFC23, 'M', u'ضح'),
    +    (0xFC24, 'M', u'ضخ'),
    +    (0xFC25, 'M', u'ضم'),
    +    (0xFC26, 'M', u'طح'),
    +    (0xFC27, 'M', u'طم'),
    +    (0xFC28, 'M', u'ظم'),
    +    (0xFC29, 'M', u'عج'),
    +    (0xFC2A, 'M', u'عم'),
    +    (0xFC2B, 'M', u'غج'),
    +    (0xFC2C, 'M', u'غم'),
    +    (0xFC2D, 'M', u'فج'),
    +    (0xFC2E, 'M', u'فح'),
    +    (0xFC2F, 'M', u'فخ'),
    +    (0xFC30, 'M', u'فم'),
    +    (0xFC31, 'M', u'فى'),
    +    (0xFC32, 'M', u'في'),
    +    (0xFC33, 'M', u'قح'),
    +    (0xFC34, 'M', u'قم'),
    +    (0xFC35, 'M', u'قى'),
    +    (0xFC36, 'M', u'قي'),
    +    (0xFC37, 'M', u'كا'),
    +    (0xFC38, 'M', u'كج'),
    +    (0xFC39, 'M', u'كح'),
    +    (0xFC3A, 'M', u'كخ'),
    +    (0xFC3B, 'M', u'كل'),
    +    (0xFC3C, 'M', u'كم'),
    +    (0xFC3D, 'M', u'كى'),
    +    (0xFC3E, 'M', u'كي'),
    +    (0xFC3F, 'M', u'لج'),
    +    (0xFC40, 'M', u'لح'),
    +    (0xFC41, 'M', u'لخ'),
    +    (0xFC42, 'M', u'لم'),
    +    (0xFC43, 'M', u'لى'),
    +    (0xFC44, 'M', u'لي'),
    +    (0xFC45, 'M', u'مج'),
    +    (0xFC46, 'M', u'مح'),
    +    (0xFC47, 'M', u'مخ'),
    +    (0xFC48, 'M', u'مم'),
    +    (0xFC49, 'M', u'مى'),
    +    (0xFC4A, 'M', u'مي'),
    +    (0xFC4B, 'M', u'نج'),
    +    (0xFC4C, 'M', u'نح'),
    +    (0xFC4D, 'M', u'نخ'),
    +    (0xFC4E, 'M', u'نم'),
    +    (0xFC4F, 'M', u'نى'),
    +    (0xFC50, 'M', u'ني'),
    +    (0xFC51, 'M', u'هج'),
    +    (0xFC52, 'M', u'هم'),
    +    (0xFC53, 'M', u'هى'),
    +    (0xFC54, 'M', u'هي'),
    +    (0xFC55, 'M', u'يج'),
    +    (0xFC56, 'M', u'يح'),
    +    (0xFC57, 'M', u'يخ'),
    +    (0xFC58, 'M', u'يم'),
    +    (0xFC59, 'M', u'يى'),
    +    (0xFC5A, 'M', u'يي'),
    +    (0xFC5B, 'M', u'ذٰ'),
    +    (0xFC5C, 'M', u'رٰ'),
    +    (0xFC5D, 'M', u'ىٰ'),
    +    (0xFC5E, '3', u' ٌّ'),
    +    (0xFC5F, '3', u' ٍّ'),
    +    (0xFC60, '3', u' َّ'),
    +    ]
    +
    +def _seg_46():
    +    return [
    +    (0xFC61, '3', u' ُّ'),
    +    (0xFC62, '3', u' ِّ'),
    +    (0xFC63, '3', u' ّٰ'),
    +    (0xFC64, 'M', u'ئر'),
    +    (0xFC65, 'M', u'ئز'),
    +    (0xFC66, 'M', u'ئم'),
    +    (0xFC67, 'M', u'ئن'),
    +    (0xFC68, 'M', u'ئى'),
    +    (0xFC69, 'M', u'ئي'),
    +    (0xFC6A, 'M', u'بر'),
    +    (0xFC6B, 'M', u'بز'),
    +    (0xFC6C, 'M', u'بم'),
    +    (0xFC6D, 'M', u'بن'),
    +    (0xFC6E, 'M', u'بى'),
    +    (0xFC6F, 'M', u'بي'),
    +    (0xFC70, 'M', u'تر'),
    +    (0xFC71, 'M', u'تز'),
    +    (0xFC72, 'M', u'تم'),
    +    (0xFC73, 'M', u'تن'),
    +    (0xFC74, 'M', u'تى'),
    +    (0xFC75, 'M', u'تي'),
    +    (0xFC76, 'M', u'ثر'),
    +    (0xFC77, 'M', u'ثز'),
    +    (0xFC78, 'M', u'ثم'),
    +    (0xFC79, 'M', u'ثن'),
    +    (0xFC7A, 'M', u'ثى'),
    +    (0xFC7B, 'M', u'ثي'),
    +    (0xFC7C, 'M', u'فى'),
    +    (0xFC7D, 'M', u'في'),
    +    (0xFC7E, 'M', u'قى'),
    +    (0xFC7F, 'M', u'قي'),
    +    (0xFC80, 'M', u'كا'),
    +    (0xFC81, 'M', u'كل'),
    +    (0xFC82, 'M', u'كم'),
    +    (0xFC83, 'M', u'كى'),
    +    (0xFC84, 'M', u'كي'),
    +    (0xFC85, 'M', u'لم'),
    +    (0xFC86, 'M', u'لى'),
    +    (0xFC87, 'M', u'لي'),
    +    (0xFC88, 'M', u'ما'),
    +    (0xFC89, 'M', u'مم'),
    +    (0xFC8A, 'M', u'نر'),
    +    (0xFC8B, 'M', u'نز'),
    +    (0xFC8C, 'M', u'نم'),
    +    (0xFC8D, 'M', u'نن'),
    +    (0xFC8E, 'M', u'نى'),
    +    (0xFC8F, 'M', u'ني'),
    +    (0xFC90, 'M', u'ىٰ'),
    +    (0xFC91, 'M', u'ير'),
    +    (0xFC92, 'M', u'يز'),
    +    (0xFC93, 'M', u'يم'),
    +    (0xFC94, 'M', u'ين'),
    +    (0xFC95, 'M', u'يى'),
    +    (0xFC96, 'M', u'يي'),
    +    (0xFC97, 'M', u'ئج'),
    +    (0xFC98, 'M', u'ئح'),
    +    (0xFC99, 'M', u'ئخ'),
    +    (0xFC9A, 'M', u'ئم'),
    +    (0xFC9B, 'M', u'ئه'),
    +    (0xFC9C, 'M', u'بج'),
    +    (0xFC9D, 'M', u'بح'),
    +    (0xFC9E, 'M', u'بخ'),
    +    (0xFC9F, 'M', u'بم'),
    +    (0xFCA0, 'M', u'به'),
    +    (0xFCA1, 'M', u'تج'),
    +    (0xFCA2, 'M', u'تح'),
    +    (0xFCA3, 'M', u'تخ'),
    +    (0xFCA4, 'M', u'تم'),
    +    (0xFCA5, 'M', u'ته'),
    +    (0xFCA6, 'M', u'ثم'),
    +    (0xFCA7, 'M', u'جح'),
    +    (0xFCA8, 'M', u'جم'),
    +    (0xFCA9, 'M', u'حج'),
    +    (0xFCAA, 'M', u'حم'),
    +    (0xFCAB, 'M', u'خج'),
    +    (0xFCAC, 'M', u'خم'),
    +    (0xFCAD, 'M', u'سج'),
    +    (0xFCAE, 'M', u'سح'),
    +    (0xFCAF, 'M', u'سخ'),
    +    (0xFCB0, 'M', u'سم'),
    +    (0xFCB1, 'M', u'صح'),
    +    (0xFCB2, 'M', u'صخ'),
    +    (0xFCB3, 'M', u'صم'),
    +    (0xFCB4, 'M', u'ضج'),
    +    (0xFCB5, 'M', u'ضح'),
    +    (0xFCB6, 'M', u'ضخ'),
    +    (0xFCB7, 'M', u'ضم'),
    +    (0xFCB8, 'M', u'طح'),
    +    (0xFCB9, 'M', u'ظم'),
    +    (0xFCBA, 'M', u'عج'),
    +    (0xFCBB, 'M', u'عم'),
    +    (0xFCBC, 'M', u'غج'),
    +    (0xFCBD, 'M', u'غم'),
    +    (0xFCBE, 'M', u'فج'),
    +    (0xFCBF, 'M', u'فح'),
    +    (0xFCC0, 'M', u'فخ'),
    +    (0xFCC1, 'M', u'فم'),
    +    (0xFCC2, 'M', u'قح'),
    +    (0xFCC3, 'M', u'قم'),
    +    (0xFCC4, 'M', u'كج'),
    +    ]
    +
    +def _seg_47():
    +    return [
    +    (0xFCC5, 'M', u'كح'),
    +    (0xFCC6, 'M', u'كخ'),
    +    (0xFCC7, 'M', u'كل'),
    +    (0xFCC8, 'M', u'كم'),
    +    (0xFCC9, 'M', u'لج'),
    +    (0xFCCA, 'M', u'لح'),
    +    (0xFCCB, 'M', u'لخ'),
    +    (0xFCCC, 'M', u'لم'),
    +    (0xFCCD, 'M', u'له'),
    +    (0xFCCE, 'M', u'مج'),
    +    (0xFCCF, 'M', u'مح'),
    +    (0xFCD0, 'M', u'مخ'),
    +    (0xFCD1, 'M', u'مم'),
    +    (0xFCD2, 'M', u'نج'),
    +    (0xFCD3, 'M', u'نح'),
    +    (0xFCD4, 'M', u'نخ'),
    +    (0xFCD5, 'M', u'نم'),
    +    (0xFCD6, 'M', u'نه'),
    +    (0xFCD7, 'M', u'هج'),
    +    (0xFCD8, 'M', u'هم'),
    +    (0xFCD9, 'M', u'هٰ'),
    +    (0xFCDA, 'M', u'يج'),
    +    (0xFCDB, 'M', u'يح'),
    +    (0xFCDC, 'M', u'يخ'),
    +    (0xFCDD, 'M', u'يم'),
    +    (0xFCDE, 'M', u'يه'),
    +    (0xFCDF, 'M', u'ئم'),
    +    (0xFCE0, 'M', u'ئه'),
    +    (0xFCE1, 'M', u'بم'),
    +    (0xFCE2, 'M', u'به'),
    +    (0xFCE3, 'M', u'تم'),
    +    (0xFCE4, 'M', u'ته'),
    +    (0xFCE5, 'M', u'ثم'),
    +    (0xFCE6, 'M', u'ثه'),
    +    (0xFCE7, 'M', u'سم'),
    +    (0xFCE8, 'M', u'سه'),
    +    (0xFCE9, 'M', u'شم'),
    +    (0xFCEA, 'M', u'شه'),
    +    (0xFCEB, 'M', u'كل'),
    +    (0xFCEC, 'M', u'كم'),
    +    (0xFCED, 'M', u'لم'),
    +    (0xFCEE, 'M', u'نم'),
    +    (0xFCEF, 'M', u'نه'),
    +    (0xFCF0, 'M', u'يم'),
    +    (0xFCF1, 'M', u'يه'),
    +    (0xFCF2, 'M', u'ـَّ'),
    +    (0xFCF3, 'M', u'ـُّ'),
    +    (0xFCF4, 'M', u'ـِّ'),
    +    (0xFCF5, 'M', u'طى'),
    +    (0xFCF6, 'M', u'طي'),
    +    (0xFCF7, 'M', u'عى'),
    +    (0xFCF8, 'M', u'عي'),
    +    (0xFCF9, 'M', u'غى'),
    +    (0xFCFA, 'M', u'غي'),
    +    (0xFCFB, 'M', u'سى'),
    +    (0xFCFC, 'M', u'سي'),
    +    (0xFCFD, 'M', u'شى'),
    +    (0xFCFE, 'M', u'شي'),
    +    (0xFCFF, 'M', u'حى'),
    +    (0xFD00, 'M', u'حي'),
    +    (0xFD01, 'M', u'جى'),
    +    (0xFD02, 'M', u'جي'),
    +    (0xFD03, 'M', u'خى'),
    +    (0xFD04, 'M', u'خي'),
    +    (0xFD05, 'M', u'صى'),
    +    (0xFD06, 'M', u'صي'),
    +    (0xFD07, 'M', u'ضى'),
    +    (0xFD08, 'M', u'ضي'),
    +    (0xFD09, 'M', u'شج'),
    +    (0xFD0A, 'M', u'شح'),
    +    (0xFD0B, 'M', u'شخ'),
    +    (0xFD0C, 'M', u'شم'),
    +    (0xFD0D, 'M', u'شر'),
    +    (0xFD0E, 'M', u'سر'),
    +    (0xFD0F, 'M', u'صر'),
    +    (0xFD10, 'M', u'ضر'),
    +    (0xFD11, 'M', u'طى'),
    +    (0xFD12, 'M', u'طي'),
    +    (0xFD13, 'M', u'عى'),
    +    (0xFD14, 'M', u'عي'),
    +    (0xFD15, 'M', u'غى'),
    +    (0xFD16, 'M', u'غي'),
    +    (0xFD17, 'M', u'سى'),
    +    (0xFD18, 'M', u'سي'),
    +    (0xFD19, 'M', u'شى'),
    +    (0xFD1A, 'M', u'شي'),
    +    (0xFD1B, 'M', u'حى'),
    +    (0xFD1C, 'M', u'حي'),
    +    (0xFD1D, 'M', u'جى'),
    +    (0xFD1E, 'M', u'جي'),
    +    (0xFD1F, 'M', u'خى'),
    +    (0xFD20, 'M', u'خي'),
    +    (0xFD21, 'M', u'صى'),
    +    (0xFD22, 'M', u'صي'),
    +    (0xFD23, 'M', u'ضى'),
    +    (0xFD24, 'M', u'ضي'),
    +    (0xFD25, 'M', u'شج'),
    +    (0xFD26, 'M', u'شح'),
    +    (0xFD27, 'M', u'شخ'),
    +    (0xFD28, 'M', u'شم'),
    +    ]
    +
    +def _seg_48():
    +    return [
    +    (0xFD29, 'M', u'شر'),
    +    (0xFD2A, 'M', u'سر'),
    +    (0xFD2B, 'M', u'صر'),
    +    (0xFD2C, 'M', u'ضر'),
    +    (0xFD2D, 'M', u'شج'),
    +    (0xFD2E, 'M', u'شح'),
    +    (0xFD2F, 'M', u'شخ'),
    +    (0xFD30, 'M', u'شم'),
    +    (0xFD31, 'M', u'سه'),
    +    (0xFD32, 'M', u'شه'),
    +    (0xFD33, 'M', u'طم'),
    +    (0xFD34, 'M', u'سج'),
    +    (0xFD35, 'M', u'سح'),
    +    (0xFD36, 'M', u'سخ'),
    +    (0xFD37, 'M', u'شج'),
    +    (0xFD38, 'M', u'شح'),
    +    (0xFD39, 'M', u'شخ'),
    +    (0xFD3A, 'M', u'طم'),
    +    (0xFD3B, 'M', u'ظم'),
    +    (0xFD3C, 'M', u'اً'),
    +    (0xFD3E, 'V'),
    +    (0xFD40, 'X'),
    +    (0xFD50, 'M', u'تجم'),
    +    (0xFD51, 'M', u'تحج'),
    +    (0xFD53, 'M', u'تحم'),
    +    (0xFD54, 'M', u'تخم'),
    +    (0xFD55, 'M', u'تمج'),
    +    (0xFD56, 'M', u'تمح'),
    +    (0xFD57, 'M', u'تمخ'),
    +    (0xFD58, 'M', u'جمح'),
    +    (0xFD5A, 'M', u'حمي'),
    +    (0xFD5B, 'M', u'حمى'),
    +    (0xFD5C, 'M', u'سحج'),
    +    (0xFD5D, 'M', u'سجح'),
    +    (0xFD5E, 'M', u'سجى'),
    +    (0xFD5F, 'M', u'سمح'),
    +    (0xFD61, 'M', u'سمج'),
    +    (0xFD62, 'M', u'سمم'),
    +    (0xFD64, 'M', u'صحح'),
    +    (0xFD66, 'M', u'صمم'),
    +    (0xFD67, 'M', u'شحم'),
    +    (0xFD69, 'M', u'شجي'),
    +    (0xFD6A, 'M', u'شمخ'),
    +    (0xFD6C, 'M', u'شمم'),
    +    (0xFD6E, 'M', u'ضحى'),
    +    (0xFD6F, 'M', u'ضخم'),
    +    (0xFD71, 'M', u'طمح'),
    +    (0xFD73, 'M', u'طمم'),
    +    (0xFD74, 'M', u'طمي'),
    +    (0xFD75, 'M', u'عجم'),
    +    (0xFD76, 'M', u'عمم'),
    +    (0xFD78, 'M', u'عمى'),
    +    (0xFD79, 'M', u'غمم'),
    +    (0xFD7A, 'M', u'غمي'),
    +    (0xFD7B, 'M', u'غمى'),
    +    (0xFD7C, 'M', u'فخم'),
    +    (0xFD7E, 'M', u'قمح'),
    +    (0xFD7F, 'M', u'قمم'),
    +    (0xFD80, 'M', u'لحم'),
    +    (0xFD81, 'M', u'لحي'),
    +    (0xFD82, 'M', u'لحى'),
    +    (0xFD83, 'M', u'لجج'),
    +    (0xFD85, 'M', u'لخم'),
    +    (0xFD87, 'M', u'لمح'),
    +    (0xFD89, 'M', u'محج'),
    +    (0xFD8A, 'M', u'محم'),
    +    (0xFD8B, 'M', u'محي'),
    +    (0xFD8C, 'M', u'مجح'),
    +    (0xFD8D, 'M', u'مجم'),
    +    (0xFD8E, 'M', u'مخج'),
    +    (0xFD8F, 'M', u'مخم'),
    +    (0xFD90, 'X'),
    +    (0xFD92, 'M', u'مجخ'),
    +    (0xFD93, 'M', u'همج'),
    +    (0xFD94, 'M', u'همم'),
    +    (0xFD95, 'M', u'نحم'),
    +    (0xFD96, 'M', u'نحى'),
    +    (0xFD97, 'M', u'نجم'),
    +    (0xFD99, 'M', u'نجى'),
    +    (0xFD9A, 'M', u'نمي'),
    +    (0xFD9B, 'M', u'نمى'),
    +    (0xFD9C, 'M', u'يمم'),
    +    (0xFD9E, 'M', u'بخي'),
    +    (0xFD9F, 'M', u'تجي'),
    +    (0xFDA0, 'M', u'تجى'),
    +    (0xFDA1, 'M', u'تخي'),
    +    (0xFDA2, 'M', u'تخى'),
    +    (0xFDA3, 'M', u'تمي'),
    +    (0xFDA4, 'M', u'تمى'),
    +    (0xFDA5, 'M', u'جمي'),
    +    (0xFDA6, 'M', u'جحى'),
    +    (0xFDA7, 'M', u'جمى'),
    +    (0xFDA8, 'M', u'سخى'),
    +    (0xFDA9, 'M', u'صحي'),
    +    (0xFDAA, 'M', u'شحي'),
    +    (0xFDAB, 'M', u'ضحي'),
    +    (0xFDAC, 'M', u'لجي'),
    +    (0xFDAD, 'M', u'لمي'),
    +    (0xFDAE, 'M', u'يحي'),
    +    (0xFDAF, 'M', u'يجي'),
    +    ]
    +
    +def _seg_49():
    +    return [
    +    (0xFDB0, 'M', u'يمي'),
    +    (0xFDB1, 'M', u'ممي'),
    +    (0xFDB2, 'M', u'قمي'),
    +    (0xFDB3, 'M', u'نحي'),
    +    (0xFDB4, 'M', u'قمح'),
    +    (0xFDB5, 'M', u'لحم'),
    +    (0xFDB6, 'M', u'عمي'),
    +    (0xFDB7, 'M', u'كمي'),
    +    (0xFDB8, 'M', u'نجح'),
    +    (0xFDB9, 'M', u'مخي'),
    +    (0xFDBA, 'M', u'لجم'),
    +    (0xFDBB, 'M', u'كمم'),
    +    (0xFDBC, 'M', u'لجم'),
    +    (0xFDBD, 'M', u'نجح'),
    +    (0xFDBE, 'M', u'جحي'),
    +    (0xFDBF, 'M', u'حجي'),
    +    (0xFDC0, 'M', u'مجي'),
    +    (0xFDC1, 'M', u'فمي'),
    +    (0xFDC2, 'M', u'بحي'),
    +    (0xFDC3, 'M', u'كمم'),
    +    (0xFDC4, 'M', u'عجم'),
    +    (0xFDC5, 'M', u'صمم'),
    +    (0xFDC6, 'M', u'سخي'),
    +    (0xFDC7, 'M', u'نجي'),
    +    (0xFDC8, 'X'),
    +    (0xFDF0, 'M', u'صلے'),
    +    (0xFDF1, 'M', u'قلے'),
    +    (0xFDF2, 'M', u'الله'),
    +    (0xFDF3, 'M', u'اكبر'),
    +    (0xFDF4, 'M', u'محمد'),
    +    (0xFDF5, 'M', u'صلعم'),
    +    (0xFDF6, 'M', u'رسول'),
    +    (0xFDF7, 'M', u'عليه'),
    +    (0xFDF8, 'M', u'وسلم'),
    +    (0xFDF9, 'M', u'صلى'),
    +    (0xFDFA, '3', u'صلى الله عليه وسلم'),
    +    (0xFDFB, '3', u'جل جلاله'),
    +    (0xFDFC, 'M', u'ریال'),
    +    (0xFDFD, 'V'),
    +    (0xFDFE, 'X'),
    +    (0xFE00, 'I'),
    +    (0xFE10, '3', u','),
    +    (0xFE11, 'M', u'、'),
    +    (0xFE12, 'X'),
    +    (0xFE13, '3', u':'),
    +    (0xFE14, '3', u';'),
    +    (0xFE15, '3', u'!'),
    +    (0xFE16, '3', u'?'),
    +    (0xFE17, 'M', u'〖'),
    +    (0xFE18, 'M', u'〗'),
    +    (0xFE19, 'X'),
    +    (0xFE20, 'V'),
    +    (0xFE30, 'X'),
    +    (0xFE31, 'M', u'—'),
    +    (0xFE32, 'M', u'–'),
    +    (0xFE33, '3', u'_'),
    +    (0xFE35, '3', u'('),
    +    (0xFE36, '3', u')'),
    +    (0xFE37, '3', u'{'),
    +    (0xFE38, '3', u'}'),
    +    (0xFE39, 'M', u'〔'),
    +    (0xFE3A, 'M', u'〕'),
    +    (0xFE3B, 'M', u'【'),
    +    (0xFE3C, 'M', u'】'),
    +    (0xFE3D, 'M', u'《'),
    +    (0xFE3E, 'M', u'》'),
    +    (0xFE3F, 'M', u'〈'),
    +    (0xFE40, 'M', u'〉'),
    +    (0xFE41, 'M', u'「'),
    +    (0xFE42, 'M', u'」'),
    +    (0xFE43, 'M', u'『'),
    +    (0xFE44, 'M', u'』'),
    +    (0xFE45, 'V'),
    +    (0xFE47, '3', u'['),
    +    (0xFE48, '3', u']'),
    +    (0xFE49, '3', u' ̅'),
    +    (0xFE4D, '3', u'_'),
    +    (0xFE50, '3', u','),
    +    (0xFE51, 'M', u'、'),
    +    (0xFE52, 'X'),
    +    (0xFE54, '3', u';'),
    +    (0xFE55, '3', u':'),
    +    (0xFE56, '3', u'?'),
    +    (0xFE57, '3', u'!'),
    +    (0xFE58, 'M', u'—'),
    +    (0xFE59, '3', u'('),
    +    (0xFE5A, '3', u')'),
    +    (0xFE5B, '3', u'{'),
    +    (0xFE5C, '3', u'}'),
    +    (0xFE5D, 'M', u'〔'),
    +    (0xFE5E, 'M', u'〕'),
    +    (0xFE5F, '3', u'#'),
    +    (0xFE60, '3', u'&'),
    +    (0xFE61, '3', u'*'),
    +    (0xFE62, '3', u'+'),
    +    (0xFE63, 'M', u'-'),
    +    (0xFE64, '3', u'<'),
    +    (0xFE65, '3', u'>'),
    +    (0xFE66, '3', u'='),
    +    (0xFE67, 'X'),
    +    ]
    +
    +def _seg_50():
    +    return [
    +    (0xFE68, '3', u'\\'),
    +    (0xFE69, '3', u'$'),
    +    (0xFE6A, '3', u'%'),
    +    (0xFE6B, '3', u'@'),
    +    (0xFE6C, 'X'),
    +    (0xFE70, '3', u' ً'),
    +    (0xFE71, 'M', u'ـً'),
    +    (0xFE72, '3', u' ٌ'),
    +    (0xFE73, 'V'),
    +    (0xFE74, '3', u' ٍ'),
    +    (0xFE75, 'X'),
    +    (0xFE76, '3', u' َ'),
    +    (0xFE77, 'M', u'ـَ'),
    +    (0xFE78, '3', u' ُ'),
    +    (0xFE79, 'M', u'ـُ'),
    +    (0xFE7A, '3', u' ِ'),
    +    (0xFE7B, 'M', u'ـِ'),
    +    (0xFE7C, '3', u' ّ'),
    +    (0xFE7D, 'M', u'ـّ'),
    +    (0xFE7E, '3', u' ْ'),
    +    (0xFE7F, 'M', u'ـْ'),
    +    (0xFE80, 'M', u'ء'),
    +    (0xFE81, 'M', u'آ'),
    +    (0xFE83, 'M', u'أ'),
    +    (0xFE85, 'M', u'ؤ'),
    +    (0xFE87, 'M', u'إ'),
    +    (0xFE89, 'M', u'ئ'),
    +    (0xFE8D, 'M', u'ا'),
    +    (0xFE8F, 'M', u'ب'),
    +    (0xFE93, 'M', u'ة'),
    +    (0xFE95, 'M', u'ت'),
    +    (0xFE99, 'M', u'ث'),
    +    (0xFE9D, 'M', u'ج'),
    +    (0xFEA1, 'M', u'ح'),
    +    (0xFEA5, 'M', u'خ'),
    +    (0xFEA9, 'M', u'د'),
    +    (0xFEAB, 'M', u'ذ'),
    +    (0xFEAD, 'M', u'ر'),
    +    (0xFEAF, 'M', u'ز'),
    +    (0xFEB1, 'M', u'س'),
    +    (0xFEB5, 'M', u'ش'),
    +    (0xFEB9, 'M', u'ص'),
    +    (0xFEBD, 'M', u'ض'),
    +    (0xFEC1, 'M', u'ط'),
    +    (0xFEC5, 'M', u'ظ'),
    +    (0xFEC9, 'M', u'ع'),
    +    (0xFECD, 'M', u'غ'),
    +    (0xFED1, 'M', u'ف'),
    +    (0xFED5, 'M', u'ق'),
    +    (0xFED9, 'M', u'ك'),
    +    (0xFEDD, 'M', u'ل'),
    +    (0xFEE1, 'M', u'م'),
    +    (0xFEE5, 'M', u'ن'),
    +    (0xFEE9, 'M', u'ه'),
    +    (0xFEED, 'M', u'و'),
    +    (0xFEEF, 'M', u'ى'),
    +    (0xFEF1, 'M', u'ي'),
    +    (0xFEF5, 'M', u'لآ'),
    +    (0xFEF7, 'M', u'لأ'),
    +    (0xFEF9, 'M', u'لإ'),
    +    (0xFEFB, 'M', u'لا'),
    +    (0xFEFD, 'X'),
    +    (0xFEFF, 'I'),
    +    (0xFF00, 'X'),
    +    (0xFF01, '3', u'!'),
    +    (0xFF02, '3', u'"'),
    +    (0xFF03, '3', u'#'),
    +    (0xFF04, '3', u'$'),
    +    (0xFF05, '3', u'%'),
    +    (0xFF06, '3', u'&'),
    +    (0xFF07, '3', u'\''),
    +    (0xFF08, '3', u'('),
    +    (0xFF09, '3', u')'),
    +    (0xFF0A, '3', u'*'),
    +    (0xFF0B, '3', u'+'),
    +    (0xFF0C, '3', u','),
    +    (0xFF0D, 'M', u'-'),
    +    (0xFF0E, 'M', u'.'),
    +    (0xFF0F, '3', u'/'),
    +    (0xFF10, 'M', u'0'),
    +    (0xFF11, 'M', u'1'),
    +    (0xFF12, 'M', u'2'),
    +    (0xFF13, 'M', u'3'),
    +    (0xFF14, 'M', u'4'),
    +    (0xFF15, 'M', u'5'),
    +    (0xFF16, 'M', u'6'),
    +    (0xFF17, 'M', u'7'),
    +    (0xFF18, 'M', u'8'),
    +    (0xFF19, 'M', u'9'),
    +    (0xFF1A, '3', u':'),
    +    (0xFF1B, '3', u';'),
    +    (0xFF1C, '3', u'<'),
    +    (0xFF1D, '3', u'='),
    +    (0xFF1E, '3', u'>'),
    +    (0xFF1F, '3', u'?'),
    +    (0xFF20, '3', u'@'),
    +    (0xFF21, 'M', u'a'),
    +    (0xFF22, 'M', u'b'),
    +    (0xFF23, 'M', u'c'),
    +    (0xFF24, 'M', u'd'),
    +    ]
    +
    +def _seg_51():
    +    return [
    +    (0xFF25, 'M', u'e'),
    +    (0xFF26, 'M', u'f'),
    +    (0xFF27, 'M', u'g'),
    +    (0xFF28, 'M', u'h'),
    +    (0xFF29, 'M', u'i'),
    +    (0xFF2A, 'M', u'j'),
    +    (0xFF2B, 'M', u'k'),
    +    (0xFF2C, 'M', u'l'),
    +    (0xFF2D, 'M', u'm'),
    +    (0xFF2E, 'M', u'n'),
    +    (0xFF2F, 'M', u'o'),
    +    (0xFF30, 'M', u'p'),
    +    (0xFF31, 'M', u'q'),
    +    (0xFF32, 'M', u'r'),
    +    (0xFF33, 'M', u's'),
    +    (0xFF34, 'M', u't'),
    +    (0xFF35, 'M', u'u'),
    +    (0xFF36, 'M', u'v'),
    +    (0xFF37, 'M', u'w'),
    +    (0xFF38, 'M', u'x'),
    +    (0xFF39, 'M', u'y'),
    +    (0xFF3A, 'M', u'z'),
    +    (0xFF3B, '3', u'['),
    +    (0xFF3C, '3', u'\\'),
    +    (0xFF3D, '3', u']'),
    +    (0xFF3E, '3', u'^'),
    +    (0xFF3F, '3', u'_'),
    +    (0xFF40, '3', u'`'),
    +    (0xFF41, 'M', u'a'),
    +    (0xFF42, 'M', u'b'),
    +    (0xFF43, 'M', u'c'),
    +    (0xFF44, 'M', u'd'),
    +    (0xFF45, 'M', u'e'),
    +    (0xFF46, 'M', u'f'),
    +    (0xFF47, 'M', u'g'),
    +    (0xFF48, 'M', u'h'),
    +    (0xFF49, 'M', u'i'),
    +    (0xFF4A, 'M', u'j'),
    +    (0xFF4B, 'M', u'k'),
    +    (0xFF4C, 'M', u'l'),
    +    (0xFF4D, 'M', u'm'),
    +    (0xFF4E, 'M', u'n'),
    +    (0xFF4F, 'M', u'o'),
    +    (0xFF50, 'M', u'p'),
    +    (0xFF51, 'M', u'q'),
    +    (0xFF52, 'M', u'r'),
    +    (0xFF53, 'M', u's'),
    +    (0xFF54, 'M', u't'),
    +    (0xFF55, 'M', u'u'),
    +    (0xFF56, 'M', u'v'),
    +    (0xFF57, 'M', u'w'),
    +    (0xFF58, 'M', u'x'),
    +    (0xFF59, 'M', u'y'),
    +    (0xFF5A, 'M', u'z'),
    +    (0xFF5B, '3', u'{'),
    +    (0xFF5C, '3', u'|'),
    +    (0xFF5D, '3', u'}'),
    +    (0xFF5E, '3', u'~'),
    +    (0xFF5F, 'M', u'⦅'),
    +    (0xFF60, 'M', u'⦆'),
    +    (0xFF61, 'M', u'.'),
    +    (0xFF62, 'M', u'「'),
    +    (0xFF63, 'M', u'」'),
    +    (0xFF64, 'M', u'、'),
    +    (0xFF65, 'M', u'・'),
    +    (0xFF66, 'M', u'ヲ'),
    +    (0xFF67, 'M', u'ァ'),
    +    (0xFF68, 'M', u'ィ'),
    +    (0xFF69, 'M', u'ゥ'),
    +    (0xFF6A, 'M', u'ェ'),
    +    (0xFF6B, 'M', u'ォ'),
    +    (0xFF6C, 'M', u'ャ'),
    +    (0xFF6D, 'M', u'ュ'),
    +    (0xFF6E, 'M', u'ョ'),
    +    (0xFF6F, 'M', u'ッ'),
    +    (0xFF70, 'M', u'ー'),
    +    (0xFF71, 'M', u'ア'),
    +    (0xFF72, 'M', u'イ'),
    +    (0xFF73, 'M', u'ウ'),
    +    (0xFF74, 'M', u'エ'),
    +    (0xFF75, 'M', u'オ'),
    +    (0xFF76, 'M', u'カ'),
    +    (0xFF77, 'M', u'キ'),
    +    (0xFF78, 'M', u'ク'),
    +    (0xFF79, 'M', u'ケ'),
    +    (0xFF7A, 'M', u'コ'),
    +    (0xFF7B, 'M', u'サ'),
    +    (0xFF7C, 'M', u'シ'),
    +    (0xFF7D, 'M', u'ス'),
    +    (0xFF7E, 'M', u'セ'),
    +    (0xFF7F, 'M', u'ソ'),
    +    (0xFF80, 'M', u'タ'),
    +    (0xFF81, 'M', u'チ'),
    +    (0xFF82, 'M', u'ツ'),
    +    (0xFF83, 'M', u'テ'),
    +    (0xFF84, 'M', u'ト'),
    +    (0xFF85, 'M', u'ナ'),
    +    (0xFF86, 'M', u'ニ'),
    +    (0xFF87, 'M', u'ヌ'),
    +    (0xFF88, 'M', u'ネ'),
    +    ]
    +
    +def _seg_52():
    +    return [
    +    (0xFF89, 'M', u'ノ'),
    +    (0xFF8A, 'M', u'ハ'),
    +    (0xFF8B, 'M', u'ヒ'),
    +    (0xFF8C, 'M', u'フ'),
    +    (0xFF8D, 'M', u'ヘ'),
    +    (0xFF8E, 'M', u'ホ'),
    +    (0xFF8F, 'M', u'マ'),
    +    (0xFF90, 'M', u'ミ'),
    +    (0xFF91, 'M', u'ム'),
    +    (0xFF92, 'M', u'メ'),
    +    (0xFF93, 'M', u'モ'),
    +    (0xFF94, 'M', u'ヤ'),
    +    (0xFF95, 'M', u'ユ'),
    +    (0xFF96, 'M', u'ヨ'),
    +    (0xFF97, 'M', u'ラ'),
    +    (0xFF98, 'M', u'リ'),
    +    (0xFF99, 'M', u'ル'),
    +    (0xFF9A, 'M', u'レ'),
    +    (0xFF9B, 'M', u'ロ'),
    +    (0xFF9C, 'M', u'ワ'),
    +    (0xFF9D, 'M', u'ン'),
    +    (0xFF9E, 'M', u'゙'),
    +    (0xFF9F, 'M', u'゚'),
    +    (0xFFA0, 'X'),
    +    (0xFFA1, 'M', u'ᄀ'),
    +    (0xFFA2, 'M', u'ᄁ'),
    +    (0xFFA3, 'M', u'ᆪ'),
    +    (0xFFA4, 'M', u'ᄂ'),
    +    (0xFFA5, 'M', u'ᆬ'),
    +    (0xFFA6, 'M', u'ᆭ'),
    +    (0xFFA7, 'M', u'ᄃ'),
    +    (0xFFA8, 'M', u'ᄄ'),
    +    (0xFFA9, 'M', u'ᄅ'),
    +    (0xFFAA, 'M', u'ᆰ'),
    +    (0xFFAB, 'M', u'ᆱ'),
    +    (0xFFAC, 'M', u'ᆲ'),
    +    (0xFFAD, 'M', u'ᆳ'),
    +    (0xFFAE, 'M', u'ᆴ'),
    +    (0xFFAF, 'M', u'ᆵ'),
    +    (0xFFB0, 'M', u'ᄚ'),
    +    (0xFFB1, 'M', u'ᄆ'),
    +    (0xFFB2, 'M', u'ᄇ'),
    +    (0xFFB3, 'M', u'ᄈ'),
    +    (0xFFB4, 'M', u'ᄡ'),
    +    (0xFFB5, 'M', u'ᄉ'),
    +    (0xFFB6, 'M', u'ᄊ'),
    +    (0xFFB7, 'M', u'ᄋ'),
    +    (0xFFB8, 'M', u'ᄌ'),
    +    (0xFFB9, 'M', u'ᄍ'),
    +    (0xFFBA, 'M', u'ᄎ'),
    +    (0xFFBB, 'M', u'ᄏ'),
    +    (0xFFBC, 'M', u'ᄐ'),
    +    (0xFFBD, 'M', u'ᄑ'),
    +    (0xFFBE, 'M', u'ᄒ'),
    +    (0xFFBF, 'X'),
    +    (0xFFC2, 'M', u'ᅡ'),
    +    (0xFFC3, 'M', u'ᅢ'),
    +    (0xFFC4, 'M', u'ᅣ'),
    +    (0xFFC5, 'M', u'ᅤ'),
    +    (0xFFC6, 'M', u'ᅥ'),
    +    (0xFFC7, 'M', u'ᅦ'),
    +    (0xFFC8, 'X'),
    +    (0xFFCA, 'M', u'ᅧ'),
    +    (0xFFCB, 'M', u'ᅨ'),
    +    (0xFFCC, 'M', u'ᅩ'),
    +    (0xFFCD, 'M', u'ᅪ'),
    +    (0xFFCE, 'M', u'ᅫ'),
    +    (0xFFCF, 'M', u'ᅬ'),
    +    (0xFFD0, 'X'),
    +    (0xFFD2, 'M', u'ᅭ'),
    +    (0xFFD3, 'M', u'ᅮ'),
    +    (0xFFD4, 'M', u'ᅯ'),
    +    (0xFFD5, 'M', u'ᅰ'),
    +    (0xFFD6, 'M', u'ᅱ'),
    +    (0xFFD7, 'M', u'ᅲ'),
    +    (0xFFD8, 'X'),
    +    (0xFFDA, 'M', u'ᅳ'),
    +    (0xFFDB, 'M', u'ᅴ'),
    +    (0xFFDC, 'M', u'ᅵ'),
    +    (0xFFDD, 'X'),
    +    (0xFFE0, 'M', u'¢'),
    +    (0xFFE1, 'M', u'£'),
    +    (0xFFE2, 'M', u'¬'),
    +    (0xFFE3, '3', u' ̄'),
    +    (0xFFE4, 'M', u'¦'),
    +    (0xFFE5, 'M', u'¥'),
    +    (0xFFE6, 'M', u'₩'),
    +    (0xFFE7, 'X'),
    +    (0xFFE8, 'M', u'│'),
    +    (0xFFE9, 'M', u'←'),
    +    (0xFFEA, 'M', u'↑'),
    +    (0xFFEB, 'M', u'→'),
    +    (0xFFEC, 'M', u'↓'),
    +    (0xFFED, 'M', u'■'),
    +    (0xFFEE, 'M', u'○'),
    +    (0xFFEF, 'X'),
    +    (0x10000, 'V'),
    +    (0x1000C, 'X'),
    +    (0x1000D, 'V'),
    +    (0x10027, 'X'),
    +    ]
    +
    +def _seg_53():
    +    return [
    +    (0x10028, 'V'),
    +    (0x1003B, 'X'),
    +    (0x1003C, 'V'),
    +    (0x1003E, 'X'),
    +    (0x1003F, 'V'),
    +    (0x1004E, 'X'),
    +    (0x10050, 'V'),
    +    (0x1005E, 'X'),
    +    (0x10080, 'V'),
    +    (0x100FB, 'X'),
    +    (0x10100, 'V'),
    +    (0x10103, 'X'),
    +    (0x10107, 'V'),
    +    (0x10134, 'X'),
    +    (0x10137, 'V'),
    +    (0x1018F, 'X'),
    +    (0x10190, 'V'),
    +    (0x1019C, 'X'),
    +    (0x101A0, 'V'),
    +    (0x101A1, 'X'),
    +    (0x101D0, 'V'),
    +    (0x101FE, 'X'),
    +    (0x10280, 'V'),
    +    (0x1029D, 'X'),
    +    (0x102A0, 'V'),
    +    (0x102D1, 'X'),
    +    (0x102E0, 'V'),
    +    (0x102FC, 'X'),
    +    (0x10300, 'V'),
    +    (0x10324, 'X'),
    +    (0x1032D, 'V'),
    +    (0x1034B, 'X'),
    +    (0x10350, 'V'),
    +    (0x1037B, 'X'),
    +    (0x10380, 'V'),
    +    (0x1039E, 'X'),
    +    (0x1039F, 'V'),
    +    (0x103C4, 'X'),
    +    (0x103C8, 'V'),
    +    (0x103D6, 'X'),
    +    (0x10400, 'M', u'𐐨'),
    +    (0x10401, 'M', u'𐐩'),
    +    (0x10402, 'M', u'𐐪'),
    +    (0x10403, 'M', u'𐐫'),
    +    (0x10404, 'M', u'𐐬'),
    +    (0x10405, 'M', u'𐐭'),
    +    (0x10406, 'M', u'𐐮'),
    +    (0x10407, 'M', u'𐐯'),
    +    (0x10408, 'M', u'𐐰'),
    +    (0x10409, 'M', u'𐐱'),
    +    (0x1040A, 'M', u'𐐲'),
    +    (0x1040B, 'M', u'𐐳'),
    +    (0x1040C, 'M', u'𐐴'),
    +    (0x1040D, 'M', u'𐐵'),
    +    (0x1040E, 'M', u'𐐶'),
    +    (0x1040F, 'M', u'𐐷'),
    +    (0x10410, 'M', u'𐐸'),
    +    (0x10411, 'M', u'𐐹'),
    +    (0x10412, 'M', u'𐐺'),
    +    (0x10413, 'M', u'𐐻'),
    +    (0x10414, 'M', u'𐐼'),
    +    (0x10415, 'M', u'𐐽'),
    +    (0x10416, 'M', u'𐐾'),
    +    (0x10417, 'M', u'𐐿'),
    +    (0x10418, 'M', u'𐑀'),
    +    (0x10419, 'M', u'𐑁'),
    +    (0x1041A, 'M', u'𐑂'),
    +    (0x1041B, 'M', u'𐑃'),
    +    (0x1041C, 'M', u'𐑄'),
    +    (0x1041D, 'M', u'𐑅'),
    +    (0x1041E, 'M', u'𐑆'),
    +    (0x1041F, 'M', u'𐑇'),
    +    (0x10420, 'M', u'𐑈'),
    +    (0x10421, 'M', u'𐑉'),
    +    (0x10422, 'M', u'𐑊'),
    +    (0x10423, 'M', u'𐑋'),
    +    (0x10424, 'M', u'𐑌'),
    +    (0x10425, 'M', u'𐑍'),
    +    (0x10426, 'M', u'𐑎'),
    +    (0x10427, 'M', u'𐑏'),
    +    (0x10428, 'V'),
    +    (0x1049E, 'X'),
    +    (0x104A0, 'V'),
    +    (0x104AA, 'X'),
    +    (0x104B0, 'M', u'𐓘'),
    +    (0x104B1, 'M', u'𐓙'),
    +    (0x104B2, 'M', u'𐓚'),
    +    (0x104B3, 'M', u'𐓛'),
    +    (0x104B4, 'M', u'𐓜'),
    +    (0x104B5, 'M', u'𐓝'),
    +    (0x104B6, 'M', u'𐓞'),
    +    (0x104B7, 'M', u'𐓟'),
    +    (0x104B8, 'M', u'𐓠'),
    +    (0x104B9, 'M', u'𐓡'),
    +    (0x104BA, 'M', u'𐓢'),
    +    (0x104BB, 'M', u'𐓣'),
    +    (0x104BC, 'M', u'𐓤'),
    +    (0x104BD, 'M', u'𐓥'),
    +    (0x104BE, 'M', u'𐓦'),
    +    (0x104BF, 'M', u'𐓧'),
    +    ]
    +
    +def _seg_54():
    +    return [
    +    (0x104C0, 'M', u'𐓨'),
    +    (0x104C1, 'M', u'𐓩'),
    +    (0x104C2, 'M', u'𐓪'),
    +    (0x104C3, 'M', u'𐓫'),
    +    (0x104C4, 'M', u'𐓬'),
    +    (0x104C5, 'M', u'𐓭'),
    +    (0x104C6, 'M', u'𐓮'),
    +    (0x104C7, 'M', u'𐓯'),
    +    (0x104C8, 'M', u'𐓰'),
    +    (0x104C9, 'M', u'𐓱'),
    +    (0x104CA, 'M', u'𐓲'),
    +    (0x104CB, 'M', u'𐓳'),
    +    (0x104CC, 'M', u'𐓴'),
    +    (0x104CD, 'M', u'𐓵'),
    +    (0x104CE, 'M', u'𐓶'),
    +    (0x104CF, 'M', u'𐓷'),
    +    (0x104D0, 'M', u'𐓸'),
    +    (0x104D1, 'M', u'𐓹'),
    +    (0x104D2, 'M', u'𐓺'),
    +    (0x104D3, 'M', u'𐓻'),
    +    (0x104D4, 'X'),
    +    (0x104D8, 'V'),
    +    (0x104FC, 'X'),
    +    (0x10500, 'V'),
    +    (0x10528, 'X'),
    +    (0x10530, 'V'),
    +    (0x10564, 'X'),
    +    (0x1056F, 'V'),
    +    (0x10570, 'X'),
    +    (0x10600, 'V'),
    +    (0x10737, 'X'),
    +    (0x10740, 'V'),
    +    (0x10756, 'X'),
    +    (0x10760, 'V'),
    +    (0x10768, 'X'),
    +    (0x10800, 'V'),
    +    (0x10806, 'X'),
    +    (0x10808, 'V'),
    +    (0x10809, 'X'),
    +    (0x1080A, 'V'),
    +    (0x10836, 'X'),
    +    (0x10837, 'V'),
    +    (0x10839, 'X'),
    +    (0x1083C, 'V'),
    +    (0x1083D, 'X'),
    +    (0x1083F, 'V'),
    +    (0x10856, 'X'),
    +    (0x10857, 'V'),
    +    (0x1089F, 'X'),
    +    (0x108A7, 'V'),
    +    (0x108B0, 'X'),
    +    (0x108E0, 'V'),
    +    (0x108F3, 'X'),
    +    (0x108F4, 'V'),
    +    (0x108F6, 'X'),
    +    (0x108FB, 'V'),
    +    (0x1091C, 'X'),
    +    (0x1091F, 'V'),
    +    (0x1093A, 'X'),
    +    (0x1093F, 'V'),
    +    (0x10940, 'X'),
    +    (0x10980, 'V'),
    +    (0x109B8, 'X'),
    +    (0x109BC, 'V'),
    +    (0x109D0, 'X'),
    +    (0x109D2, 'V'),
    +    (0x10A04, 'X'),
    +    (0x10A05, 'V'),
    +    (0x10A07, 'X'),
    +    (0x10A0C, 'V'),
    +    (0x10A14, 'X'),
    +    (0x10A15, 'V'),
    +    (0x10A18, 'X'),
    +    (0x10A19, 'V'),
    +    (0x10A36, 'X'),
    +    (0x10A38, 'V'),
    +    (0x10A3B, 'X'),
    +    (0x10A3F, 'V'),
    +    (0x10A49, 'X'),
    +    (0x10A50, 'V'),
    +    (0x10A59, 'X'),
    +    (0x10A60, 'V'),
    +    (0x10AA0, 'X'),
    +    (0x10AC0, 'V'),
    +    (0x10AE7, 'X'),
    +    (0x10AEB, 'V'),
    +    (0x10AF7, 'X'),
    +    (0x10B00, 'V'),
    +    (0x10B36, 'X'),
    +    (0x10B39, 'V'),
    +    (0x10B56, 'X'),
    +    (0x10B58, 'V'),
    +    (0x10B73, 'X'),
    +    (0x10B78, 'V'),
    +    (0x10B92, 'X'),
    +    (0x10B99, 'V'),
    +    (0x10B9D, 'X'),
    +    (0x10BA9, 'V'),
    +    (0x10BB0, 'X'),
    +    (0x10C00, 'V'),
    +    ]
    +
    +def _seg_55():
    +    return [
    +    (0x10C49, 'X'),
    +    (0x10C80, 'M', u'𐳀'),
    +    (0x10C81, 'M', u'𐳁'),
    +    (0x10C82, 'M', u'𐳂'),
    +    (0x10C83, 'M', u'𐳃'),
    +    (0x10C84, 'M', u'𐳄'),
    +    (0x10C85, 'M', u'𐳅'),
    +    (0x10C86, 'M', u'𐳆'),
    +    (0x10C87, 'M', u'𐳇'),
    +    (0x10C88, 'M', u'𐳈'),
    +    (0x10C89, 'M', u'𐳉'),
    +    (0x10C8A, 'M', u'𐳊'),
    +    (0x10C8B, 'M', u'𐳋'),
    +    (0x10C8C, 'M', u'𐳌'),
    +    (0x10C8D, 'M', u'𐳍'),
    +    (0x10C8E, 'M', u'𐳎'),
    +    (0x10C8F, 'M', u'𐳏'),
    +    (0x10C90, 'M', u'𐳐'),
    +    (0x10C91, 'M', u'𐳑'),
    +    (0x10C92, 'M', u'𐳒'),
    +    (0x10C93, 'M', u'𐳓'),
    +    (0x10C94, 'M', u'𐳔'),
    +    (0x10C95, 'M', u'𐳕'),
    +    (0x10C96, 'M', u'𐳖'),
    +    (0x10C97, 'M', u'𐳗'),
    +    (0x10C98, 'M', u'𐳘'),
    +    (0x10C99, 'M', u'𐳙'),
    +    (0x10C9A, 'M', u'𐳚'),
    +    (0x10C9B, 'M', u'𐳛'),
    +    (0x10C9C, 'M', u'𐳜'),
    +    (0x10C9D, 'M', u'𐳝'),
    +    (0x10C9E, 'M', u'𐳞'),
    +    (0x10C9F, 'M', u'𐳟'),
    +    (0x10CA0, 'M', u'𐳠'),
    +    (0x10CA1, 'M', u'𐳡'),
    +    (0x10CA2, 'M', u'𐳢'),
    +    (0x10CA3, 'M', u'𐳣'),
    +    (0x10CA4, 'M', u'𐳤'),
    +    (0x10CA5, 'M', u'𐳥'),
    +    (0x10CA6, 'M', u'𐳦'),
    +    (0x10CA7, 'M', u'𐳧'),
    +    (0x10CA8, 'M', u'𐳨'),
    +    (0x10CA9, 'M', u'𐳩'),
    +    (0x10CAA, 'M', u'𐳪'),
    +    (0x10CAB, 'M', u'𐳫'),
    +    (0x10CAC, 'M', u'𐳬'),
    +    (0x10CAD, 'M', u'𐳭'),
    +    (0x10CAE, 'M', u'𐳮'),
    +    (0x10CAF, 'M', u'𐳯'),
    +    (0x10CB0, 'M', u'𐳰'),
    +    (0x10CB1, 'M', u'𐳱'),
    +    (0x10CB2, 'M', u'𐳲'),
    +    (0x10CB3, 'X'),
    +    (0x10CC0, 'V'),
    +    (0x10CF3, 'X'),
    +    (0x10CFA, 'V'),
    +    (0x10D28, 'X'),
    +    (0x10D30, 'V'),
    +    (0x10D3A, 'X'),
    +    (0x10E60, 'V'),
    +    (0x10E7F, 'X'),
    +    (0x10F00, 'V'),
    +    (0x10F28, 'X'),
    +    (0x10F30, 'V'),
    +    (0x10F5A, 'X'),
    +    (0x10FE0, 'V'),
    +    (0x10FF7, 'X'),
    +    (0x11000, 'V'),
    +    (0x1104E, 'X'),
    +    (0x11052, 'V'),
    +    (0x11070, 'X'),
    +    (0x1107F, 'V'),
    +    (0x110BD, 'X'),
    +    (0x110BE, 'V'),
    +    (0x110C2, 'X'),
    +    (0x110D0, 'V'),
    +    (0x110E9, 'X'),
    +    (0x110F0, 'V'),
    +    (0x110FA, 'X'),
    +    (0x11100, 'V'),
    +    (0x11135, 'X'),
    +    (0x11136, 'V'),
    +    (0x11147, 'X'),
    +    (0x11150, 'V'),
    +    (0x11177, 'X'),
    +    (0x11180, 'V'),
    +    (0x111CE, 'X'),
    +    (0x111D0, 'V'),
    +    (0x111E0, 'X'),
    +    (0x111E1, 'V'),
    +    (0x111F5, 'X'),
    +    (0x11200, 'V'),
    +    (0x11212, 'X'),
    +    (0x11213, 'V'),
    +    (0x1123F, 'X'),
    +    (0x11280, 'V'),
    +    (0x11287, 'X'),
    +    (0x11288, 'V'),
    +    (0x11289, 'X'),
    +    (0x1128A, 'V'),
    +    ]
    +
    +def _seg_56():
    +    return [
    +    (0x1128E, 'X'),
    +    (0x1128F, 'V'),
    +    (0x1129E, 'X'),
    +    (0x1129F, 'V'),
    +    (0x112AA, 'X'),
    +    (0x112B0, 'V'),
    +    (0x112EB, 'X'),
    +    (0x112F0, 'V'),
    +    (0x112FA, 'X'),
    +    (0x11300, 'V'),
    +    (0x11304, 'X'),
    +    (0x11305, 'V'),
    +    (0x1130D, 'X'),
    +    (0x1130F, 'V'),
    +    (0x11311, 'X'),
    +    (0x11313, 'V'),
    +    (0x11329, 'X'),
    +    (0x1132A, 'V'),
    +    (0x11331, 'X'),
    +    (0x11332, 'V'),
    +    (0x11334, 'X'),
    +    (0x11335, 'V'),
    +    (0x1133A, 'X'),
    +    (0x1133B, 'V'),
    +    (0x11345, 'X'),
    +    (0x11347, 'V'),
    +    (0x11349, 'X'),
    +    (0x1134B, 'V'),
    +    (0x1134E, 'X'),
    +    (0x11350, 'V'),
    +    (0x11351, 'X'),
    +    (0x11357, 'V'),
    +    (0x11358, 'X'),
    +    (0x1135D, 'V'),
    +    (0x11364, 'X'),
    +    (0x11366, 'V'),
    +    (0x1136D, 'X'),
    +    (0x11370, 'V'),
    +    (0x11375, 'X'),
    +    (0x11400, 'V'),
    +    (0x1145A, 'X'),
    +    (0x1145B, 'V'),
    +    (0x1145C, 'X'),
    +    (0x1145D, 'V'),
    +    (0x11460, 'X'),
    +    (0x11480, 'V'),
    +    (0x114C8, 'X'),
    +    (0x114D0, 'V'),
    +    (0x114DA, 'X'),
    +    (0x11580, 'V'),
    +    (0x115B6, 'X'),
    +    (0x115B8, 'V'),
    +    (0x115DE, 'X'),
    +    (0x11600, 'V'),
    +    (0x11645, 'X'),
    +    (0x11650, 'V'),
    +    (0x1165A, 'X'),
    +    (0x11660, 'V'),
    +    (0x1166D, 'X'),
    +    (0x11680, 'V'),
    +    (0x116B9, 'X'),
    +    (0x116C0, 'V'),
    +    (0x116CA, 'X'),
    +    (0x11700, 'V'),
    +    (0x1171B, 'X'),
    +    (0x1171D, 'V'),
    +    (0x1172C, 'X'),
    +    (0x11730, 'V'),
    +    (0x11740, 'X'),
    +    (0x11800, 'V'),
    +    (0x1183C, 'X'),
    +    (0x118A0, 'M', u'𑣀'),
    +    (0x118A1, 'M', u'𑣁'),
    +    (0x118A2, 'M', u'𑣂'),
    +    (0x118A3, 'M', u'𑣃'),
    +    (0x118A4, 'M', u'𑣄'),
    +    (0x118A5, 'M', u'𑣅'),
    +    (0x118A6, 'M', u'𑣆'),
    +    (0x118A7, 'M', u'𑣇'),
    +    (0x118A8, 'M', u'𑣈'),
    +    (0x118A9, 'M', u'𑣉'),
    +    (0x118AA, 'M', u'𑣊'),
    +    (0x118AB, 'M', u'𑣋'),
    +    (0x118AC, 'M', u'𑣌'),
    +    (0x118AD, 'M', u'𑣍'),
    +    (0x118AE, 'M', u'𑣎'),
    +    (0x118AF, 'M', u'𑣏'),
    +    (0x118B0, 'M', u'𑣐'),
    +    (0x118B1, 'M', u'𑣑'),
    +    (0x118B2, 'M', u'𑣒'),
    +    (0x118B3, 'M', u'𑣓'),
    +    (0x118B4, 'M', u'𑣔'),
    +    (0x118B5, 'M', u'𑣕'),
    +    (0x118B6, 'M', u'𑣖'),
    +    (0x118B7, 'M', u'𑣗'),
    +    (0x118B8, 'M', u'𑣘'),
    +    (0x118B9, 'M', u'𑣙'),
    +    (0x118BA, 'M', u'𑣚'),
    +    (0x118BB, 'M', u'𑣛'),
    +    (0x118BC, 'M', u'𑣜'),
    +    ]
    +
    +def _seg_57():
    +    return [
    +    (0x118BD, 'M', u'𑣝'),
    +    (0x118BE, 'M', u'𑣞'),
    +    (0x118BF, 'M', u'𑣟'),
    +    (0x118C0, 'V'),
    +    (0x118F3, 'X'),
    +    (0x118FF, 'V'),
    +    (0x11900, 'X'),
    +    (0x119A0, 'V'),
    +    (0x119A8, 'X'),
    +    (0x119AA, 'V'),
    +    (0x119D8, 'X'),
    +    (0x119DA, 'V'),
    +    (0x119E5, 'X'),
    +    (0x11A00, 'V'),
    +    (0x11A48, 'X'),
    +    (0x11A50, 'V'),
    +    (0x11AA3, 'X'),
    +    (0x11AC0, 'V'),
    +    (0x11AF9, 'X'),
    +    (0x11C00, 'V'),
    +    (0x11C09, 'X'),
    +    (0x11C0A, 'V'),
    +    (0x11C37, 'X'),
    +    (0x11C38, 'V'),
    +    (0x11C46, 'X'),
    +    (0x11C50, 'V'),
    +    (0x11C6D, 'X'),
    +    (0x11C70, 'V'),
    +    (0x11C90, 'X'),
    +    (0x11C92, 'V'),
    +    (0x11CA8, 'X'),
    +    (0x11CA9, 'V'),
    +    (0x11CB7, 'X'),
    +    (0x11D00, 'V'),
    +    (0x11D07, 'X'),
    +    (0x11D08, 'V'),
    +    (0x11D0A, 'X'),
    +    (0x11D0B, 'V'),
    +    (0x11D37, 'X'),
    +    (0x11D3A, 'V'),
    +    (0x11D3B, 'X'),
    +    (0x11D3C, 'V'),
    +    (0x11D3E, 'X'),
    +    (0x11D3F, 'V'),
    +    (0x11D48, 'X'),
    +    (0x11D50, 'V'),
    +    (0x11D5A, 'X'),
    +    (0x11D60, 'V'),
    +    (0x11D66, 'X'),
    +    (0x11D67, 'V'),
    +    (0x11D69, 'X'),
    +    (0x11D6A, 'V'),
    +    (0x11D8F, 'X'),
    +    (0x11D90, 'V'),
    +    (0x11D92, 'X'),
    +    (0x11D93, 'V'),
    +    (0x11D99, 'X'),
    +    (0x11DA0, 'V'),
    +    (0x11DAA, 'X'),
    +    (0x11EE0, 'V'),
    +    (0x11EF9, 'X'),
    +    (0x11FC0, 'V'),
    +    (0x11FF2, 'X'),
    +    (0x11FFF, 'V'),
    +    (0x1239A, 'X'),
    +    (0x12400, 'V'),
    +    (0x1246F, 'X'),
    +    (0x12470, 'V'),
    +    (0x12475, 'X'),
    +    (0x12480, 'V'),
    +    (0x12544, 'X'),
    +    (0x13000, 'V'),
    +    (0x1342F, 'X'),
    +    (0x14400, 'V'),
    +    (0x14647, 'X'),
    +    (0x16800, 'V'),
    +    (0x16A39, 'X'),
    +    (0x16A40, 'V'),
    +    (0x16A5F, 'X'),
    +    (0x16A60, 'V'),
    +    (0x16A6A, 'X'),
    +    (0x16A6E, 'V'),
    +    (0x16A70, 'X'),
    +    (0x16AD0, 'V'),
    +    (0x16AEE, 'X'),
    +    (0x16AF0, 'V'),
    +    (0x16AF6, 'X'),
    +    (0x16B00, 'V'),
    +    (0x16B46, 'X'),
    +    (0x16B50, 'V'),
    +    (0x16B5A, 'X'),
    +    (0x16B5B, 'V'),
    +    (0x16B62, 'X'),
    +    (0x16B63, 'V'),
    +    (0x16B78, 'X'),
    +    (0x16B7D, 'V'),
    +    (0x16B90, 'X'),
    +    (0x16E40, 'M', u'𖹠'),
    +    (0x16E41, 'M', u'𖹡'),
    +    (0x16E42, 'M', u'𖹢'),
    +    ]
    +
    +def _seg_58():
    +    return [
    +    (0x16E43, 'M', u'𖹣'),
    +    (0x16E44, 'M', u'𖹤'),
    +    (0x16E45, 'M', u'𖹥'),
    +    (0x16E46, 'M', u'𖹦'),
    +    (0x16E47, 'M', u'𖹧'),
    +    (0x16E48, 'M', u'𖹨'),
    +    (0x16E49, 'M', u'𖹩'),
    +    (0x16E4A, 'M', u'𖹪'),
    +    (0x16E4B, 'M', u'𖹫'),
    +    (0x16E4C, 'M', u'𖹬'),
    +    (0x16E4D, 'M', u'𖹭'),
    +    (0x16E4E, 'M', u'𖹮'),
    +    (0x16E4F, 'M', u'𖹯'),
    +    (0x16E50, 'M', u'𖹰'),
    +    (0x16E51, 'M', u'𖹱'),
    +    (0x16E52, 'M', u'𖹲'),
    +    (0x16E53, 'M', u'𖹳'),
    +    (0x16E54, 'M', u'𖹴'),
    +    (0x16E55, 'M', u'𖹵'),
    +    (0x16E56, 'M', u'𖹶'),
    +    (0x16E57, 'M', u'𖹷'),
    +    (0x16E58, 'M', u'𖹸'),
    +    (0x16E59, 'M', u'𖹹'),
    +    (0x16E5A, 'M', u'𖹺'),
    +    (0x16E5B, 'M', u'𖹻'),
    +    (0x16E5C, 'M', u'𖹼'),
    +    (0x16E5D, 'M', u'𖹽'),
    +    (0x16E5E, 'M', u'𖹾'),
    +    (0x16E5F, 'M', u'𖹿'),
    +    (0x16E60, 'V'),
    +    (0x16E9B, 'X'),
    +    (0x16F00, 'V'),
    +    (0x16F4B, 'X'),
    +    (0x16F4F, 'V'),
    +    (0x16F88, 'X'),
    +    (0x16F8F, 'V'),
    +    (0x16FA0, 'X'),
    +    (0x16FE0, 'V'),
    +    (0x16FE4, 'X'),
    +    (0x17000, 'V'),
    +    (0x187F8, 'X'),
    +    (0x18800, 'V'),
    +    (0x18AF3, 'X'),
    +    (0x1B000, 'V'),
    +    (0x1B11F, 'X'),
    +    (0x1B150, 'V'),
    +    (0x1B153, 'X'),
    +    (0x1B164, 'V'),
    +    (0x1B168, 'X'),
    +    (0x1B170, 'V'),
    +    (0x1B2FC, 'X'),
    +    (0x1BC00, 'V'),
    +    (0x1BC6B, 'X'),
    +    (0x1BC70, 'V'),
    +    (0x1BC7D, 'X'),
    +    (0x1BC80, 'V'),
    +    (0x1BC89, 'X'),
    +    (0x1BC90, 'V'),
    +    (0x1BC9A, 'X'),
    +    (0x1BC9C, 'V'),
    +    (0x1BCA0, 'I'),
    +    (0x1BCA4, 'X'),
    +    (0x1D000, 'V'),
    +    (0x1D0F6, 'X'),
    +    (0x1D100, 'V'),
    +    (0x1D127, 'X'),
    +    (0x1D129, 'V'),
    +    (0x1D15E, 'M', u'𝅗𝅥'),
    +    (0x1D15F, 'M', u'𝅘𝅥'),
    +    (0x1D160, 'M', u'𝅘𝅥𝅮'),
    +    (0x1D161, 'M', u'𝅘𝅥𝅯'),
    +    (0x1D162, 'M', u'𝅘𝅥𝅰'),
    +    (0x1D163, 'M', u'𝅘𝅥𝅱'),
    +    (0x1D164, 'M', u'𝅘𝅥𝅲'),
    +    (0x1D165, 'V'),
    +    (0x1D173, 'X'),
    +    (0x1D17B, 'V'),
    +    (0x1D1BB, 'M', u'𝆹𝅥'),
    +    (0x1D1BC, 'M', u'𝆺𝅥'),
    +    (0x1D1BD, 'M', u'𝆹𝅥𝅮'),
    +    (0x1D1BE, 'M', u'𝆺𝅥𝅮'),
    +    (0x1D1BF, 'M', u'𝆹𝅥𝅯'),
    +    (0x1D1C0, 'M', u'𝆺𝅥𝅯'),
    +    (0x1D1C1, 'V'),
    +    (0x1D1E9, 'X'),
    +    (0x1D200, 'V'),
    +    (0x1D246, 'X'),
    +    (0x1D2E0, 'V'),
    +    (0x1D2F4, 'X'),
    +    (0x1D300, 'V'),
    +    (0x1D357, 'X'),
    +    (0x1D360, 'V'),
    +    (0x1D379, 'X'),
    +    (0x1D400, 'M', u'a'),
    +    (0x1D401, 'M', u'b'),
    +    (0x1D402, 'M', u'c'),
    +    (0x1D403, 'M', u'd'),
    +    (0x1D404, 'M', u'e'),
    +    (0x1D405, 'M', u'f'),
    +    (0x1D406, 'M', u'g'),
    +    ]
    +
    +def _seg_59():
    +    return [
    +    (0x1D407, 'M', u'h'),
    +    (0x1D408, 'M', u'i'),
    +    (0x1D409, 'M', u'j'),
    +    (0x1D40A, 'M', u'k'),
    +    (0x1D40B, 'M', u'l'),
    +    (0x1D40C, 'M', u'm'),
    +    (0x1D40D, 'M', u'n'),
    +    (0x1D40E, 'M', u'o'),
    +    (0x1D40F, 'M', u'p'),
    +    (0x1D410, 'M', u'q'),
    +    (0x1D411, 'M', u'r'),
    +    (0x1D412, 'M', u's'),
    +    (0x1D413, 'M', u't'),
    +    (0x1D414, 'M', u'u'),
    +    (0x1D415, 'M', u'v'),
    +    (0x1D416, 'M', u'w'),
    +    (0x1D417, 'M', u'x'),
    +    (0x1D418, 'M', u'y'),
    +    (0x1D419, 'M', u'z'),
    +    (0x1D41A, 'M', u'a'),
    +    (0x1D41B, 'M', u'b'),
    +    (0x1D41C, 'M', u'c'),
    +    (0x1D41D, 'M', u'd'),
    +    (0x1D41E, 'M', u'e'),
    +    (0x1D41F, 'M', u'f'),
    +    (0x1D420, 'M', u'g'),
    +    (0x1D421, 'M', u'h'),
    +    (0x1D422, 'M', u'i'),
    +    (0x1D423, 'M', u'j'),
    +    (0x1D424, 'M', u'k'),
    +    (0x1D425, 'M', u'l'),
    +    (0x1D426, 'M', u'm'),
    +    (0x1D427, 'M', u'n'),
    +    (0x1D428, 'M', u'o'),
    +    (0x1D429, 'M', u'p'),
    +    (0x1D42A, 'M', u'q'),
    +    (0x1D42B, 'M', u'r'),
    +    (0x1D42C, 'M', u's'),
    +    (0x1D42D, 'M', u't'),
    +    (0x1D42E, 'M', u'u'),
    +    (0x1D42F, 'M', u'v'),
    +    (0x1D430, 'M', u'w'),
    +    (0x1D431, 'M', u'x'),
    +    (0x1D432, 'M', u'y'),
    +    (0x1D433, 'M', u'z'),
    +    (0x1D434, 'M', u'a'),
    +    (0x1D435, 'M', u'b'),
    +    (0x1D436, 'M', u'c'),
    +    (0x1D437, 'M', u'd'),
    +    (0x1D438, 'M', u'e'),
    +    (0x1D439, 'M', u'f'),
    +    (0x1D43A, 'M', u'g'),
    +    (0x1D43B, 'M', u'h'),
    +    (0x1D43C, 'M', u'i'),
    +    (0x1D43D, 'M', u'j'),
    +    (0x1D43E, 'M', u'k'),
    +    (0x1D43F, 'M', u'l'),
    +    (0x1D440, 'M', u'm'),
    +    (0x1D441, 'M', u'n'),
    +    (0x1D442, 'M', u'o'),
    +    (0x1D443, 'M', u'p'),
    +    (0x1D444, 'M', u'q'),
    +    (0x1D445, 'M', u'r'),
    +    (0x1D446, 'M', u's'),
    +    (0x1D447, 'M', u't'),
    +    (0x1D448, 'M', u'u'),
    +    (0x1D449, 'M', u'v'),
    +    (0x1D44A, 'M', u'w'),
    +    (0x1D44B, 'M', u'x'),
    +    (0x1D44C, 'M', u'y'),
    +    (0x1D44D, 'M', u'z'),
    +    (0x1D44E, 'M', u'a'),
    +    (0x1D44F, 'M', u'b'),
    +    (0x1D450, 'M', u'c'),
    +    (0x1D451, 'M', u'd'),
    +    (0x1D452, 'M', u'e'),
    +    (0x1D453, 'M', u'f'),
    +    (0x1D454, 'M', u'g'),
    +    (0x1D455, 'X'),
    +    (0x1D456, 'M', u'i'),
    +    (0x1D457, 'M', u'j'),
    +    (0x1D458, 'M', u'k'),
    +    (0x1D459, 'M', u'l'),
    +    (0x1D45A, 'M', u'm'),
    +    (0x1D45B, 'M', u'n'),
    +    (0x1D45C, 'M', u'o'),
    +    (0x1D45D, 'M', u'p'),
    +    (0x1D45E, 'M', u'q'),
    +    (0x1D45F, 'M', u'r'),
    +    (0x1D460, 'M', u's'),
    +    (0x1D461, 'M', u't'),
    +    (0x1D462, 'M', u'u'),
    +    (0x1D463, 'M', u'v'),
    +    (0x1D464, 'M', u'w'),
    +    (0x1D465, 'M', u'x'),
    +    (0x1D466, 'M', u'y'),
    +    (0x1D467, 'M', u'z'),
    +    (0x1D468, 'M', u'a'),
    +    (0x1D469, 'M', u'b'),
    +    (0x1D46A, 'M', u'c'),
    +    ]
    +
    +def _seg_60():
    +    return [
    +    (0x1D46B, 'M', u'd'),
    +    (0x1D46C, 'M', u'e'),
    +    (0x1D46D, 'M', u'f'),
    +    (0x1D46E, 'M', u'g'),
    +    (0x1D46F, 'M', u'h'),
    +    (0x1D470, 'M', u'i'),
    +    (0x1D471, 'M', u'j'),
    +    (0x1D472, 'M', u'k'),
    +    (0x1D473, 'M', u'l'),
    +    (0x1D474, 'M', u'm'),
    +    (0x1D475, 'M', u'n'),
    +    (0x1D476, 'M', u'o'),
    +    (0x1D477, 'M', u'p'),
    +    (0x1D478, 'M', u'q'),
    +    (0x1D479, 'M', u'r'),
    +    (0x1D47A, 'M', u's'),
    +    (0x1D47B, 'M', u't'),
    +    (0x1D47C, 'M', u'u'),
    +    (0x1D47D, 'M', u'v'),
    +    (0x1D47E, 'M', u'w'),
    +    (0x1D47F, 'M', u'x'),
    +    (0x1D480, 'M', u'y'),
    +    (0x1D481, 'M', u'z'),
    +    (0x1D482, 'M', u'a'),
    +    (0x1D483, 'M', u'b'),
    +    (0x1D484, 'M', u'c'),
    +    (0x1D485, 'M', u'd'),
    +    (0x1D486, 'M', u'e'),
    +    (0x1D487, 'M', u'f'),
    +    (0x1D488, 'M', u'g'),
    +    (0x1D489, 'M', u'h'),
    +    (0x1D48A, 'M', u'i'),
    +    (0x1D48B, 'M', u'j'),
    +    (0x1D48C, 'M', u'k'),
    +    (0x1D48D, 'M', u'l'),
    +    (0x1D48E, 'M', u'm'),
    +    (0x1D48F, 'M', u'n'),
    +    (0x1D490, 'M', u'o'),
    +    (0x1D491, 'M', u'p'),
    +    (0x1D492, 'M', u'q'),
    +    (0x1D493, 'M', u'r'),
    +    (0x1D494, 'M', u's'),
    +    (0x1D495, 'M', u't'),
    +    (0x1D496, 'M', u'u'),
    +    (0x1D497, 'M', u'v'),
    +    (0x1D498, 'M', u'w'),
    +    (0x1D499, 'M', u'x'),
    +    (0x1D49A, 'M', u'y'),
    +    (0x1D49B, 'M', u'z'),
    +    (0x1D49C, 'M', u'a'),
    +    (0x1D49D, 'X'),
    +    (0x1D49E, 'M', u'c'),
    +    (0x1D49F, 'M', u'd'),
    +    (0x1D4A0, 'X'),
    +    (0x1D4A2, 'M', u'g'),
    +    (0x1D4A3, 'X'),
    +    (0x1D4A5, 'M', u'j'),
    +    (0x1D4A6, 'M', u'k'),
    +    (0x1D4A7, 'X'),
    +    (0x1D4A9, 'M', u'n'),
    +    (0x1D4AA, 'M', u'o'),
    +    (0x1D4AB, 'M', u'p'),
    +    (0x1D4AC, 'M', u'q'),
    +    (0x1D4AD, 'X'),
    +    (0x1D4AE, 'M', u's'),
    +    (0x1D4AF, 'M', u't'),
    +    (0x1D4B0, 'M', u'u'),
    +    (0x1D4B1, 'M', u'v'),
    +    (0x1D4B2, 'M', u'w'),
    +    (0x1D4B3, 'M', u'x'),
    +    (0x1D4B4, 'M', u'y'),
    +    (0x1D4B5, 'M', u'z'),
    +    (0x1D4B6, 'M', u'a'),
    +    (0x1D4B7, 'M', u'b'),
    +    (0x1D4B8, 'M', u'c'),
    +    (0x1D4B9, 'M', u'd'),
    +    (0x1D4BA, 'X'),
    +    (0x1D4BB, 'M', u'f'),
    +    (0x1D4BC, 'X'),
    +    (0x1D4BD, 'M', u'h'),
    +    (0x1D4BE, 'M', u'i'),
    +    (0x1D4BF, 'M', u'j'),
    +    (0x1D4C0, 'M', u'k'),
    +    (0x1D4C1, 'M', u'l'),
    +    (0x1D4C2, 'M', u'm'),
    +    (0x1D4C3, 'M', u'n'),
    +    (0x1D4C4, 'X'),
    +    (0x1D4C5, 'M', u'p'),
    +    (0x1D4C6, 'M', u'q'),
    +    (0x1D4C7, 'M', u'r'),
    +    (0x1D4C8, 'M', u's'),
    +    (0x1D4C9, 'M', u't'),
    +    (0x1D4CA, 'M', u'u'),
    +    (0x1D4CB, 'M', u'v'),
    +    (0x1D4CC, 'M', u'w'),
    +    (0x1D4CD, 'M', u'x'),
    +    (0x1D4CE, 'M', u'y'),
    +    (0x1D4CF, 'M', u'z'),
    +    (0x1D4D0, 'M', u'a'),
    +    (0x1D4D1, 'M', u'b'),
    +    ]
    +
    +def _seg_61():
    +    return [
    +    (0x1D4D2, 'M', u'c'),
    +    (0x1D4D3, 'M', u'd'),
    +    (0x1D4D4, 'M', u'e'),
    +    (0x1D4D5, 'M', u'f'),
    +    (0x1D4D6, 'M', u'g'),
    +    (0x1D4D7, 'M', u'h'),
    +    (0x1D4D8, 'M', u'i'),
    +    (0x1D4D9, 'M', u'j'),
    +    (0x1D4DA, 'M', u'k'),
    +    (0x1D4DB, 'M', u'l'),
    +    (0x1D4DC, 'M', u'm'),
    +    (0x1D4DD, 'M', u'n'),
    +    (0x1D4DE, 'M', u'o'),
    +    (0x1D4DF, 'M', u'p'),
    +    (0x1D4E0, 'M', u'q'),
    +    (0x1D4E1, 'M', u'r'),
    +    (0x1D4E2, 'M', u's'),
    +    (0x1D4E3, 'M', u't'),
    +    (0x1D4E4, 'M', u'u'),
    +    (0x1D4E5, 'M', u'v'),
    +    (0x1D4E6, 'M', u'w'),
    +    (0x1D4E7, 'M', u'x'),
    +    (0x1D4E8, 'M', u'y'),
    +    (0x1D4E9, 'M', u'z'),
    +    (0x1D4EA, 'M', u'a'),
    +    (0x1D4EB, 'M', u'b'),
    +    (0x1D4EC, 'M', u'c'),
    +    (0x1D4ED, 'M', u'd'),
    +    (0x1D4EE, 'M', u'e'),
    +    (0x1D4EF, 'M', u'f'),
    +    (0x1D4F0, 'M', u'g'),
    +    (0x1D4F1, 'M', u'h'),
    +    (0x1D4F2, 'M', u'i'),
    +    (0x1D4F3, 'M', u'j'),
    +    (0x1D4F4, 'M', u'k'),
    +    (0x1D4F5, 'M', u'l'),
    +    (0x1D4F6, 'M', u'm'),
    +    (0x1D4F7, 'M', u'n'),
    +    (0x1D4F8, 'M', u'o'),
    +    (0x1D4F9, 'M', u'p'),
    +    (0x1D4FA, 'M', u'q'),
    +    (0x1D4FB, 'M', u'r'),
    +    (0x1D4FC, 'M', u's'),
    +    (0x1D4FD, 'M', u't'),
    +    (0x1D4FE, 'M', u'u'),
    +    (0x1D4FF, 'M', u'v'),
    +    (0x1D500, 'M', u'w'),
    +    (0x1D501, 'M', u'x'),
    +    (0x1D502, 'M', u'y'),
    +    (0x1D503, 'M', u'z'),
    +    (0x1D504, 'M', u'a'),
    +    (0x1D505, 'M', u'b'),
    +    (0x1D506, 'X'),
    +    (0x1D507, 'M', u'd'),
    +    (0x1D508, 'M', u'e'),
    +    (0x1D509, 'M', u'f'),
    +    (0x1D50A, 'M', u'g'),
    +    (0x1D50B, 'X'),
    +    (0x1D50D, 'M', u'j'),
    +    (0x1D50E, 'M', u'k'),
    +    (0x1D50F, 'M', u'l'),
    +    (0x1D510, 'M', u'm'),
    +    (0x1D511, 'M', u'n'),
    +    (0x1D512, 'M', u'o'),
    +    (0x1D513, 'M', u'p'),
    +    (0x1D514, 'M', u'q'),
    +    (0x1D515, 'X'),
    +    (0x1D516, 'M', u's'),
    +    (0x1D517, 'M', u't'),
    +    (0x1D518, 'M', u'u'),
    +    (0x1D519, 'M', u'v'),
    +    (0x1D51A, 'M', u'w'),
    +    (0x1D51B, 'M', u'x'),
    +    (0x1D51C, 'M', u'y'),
    +    (0x1D51D, 'X'),
    +    (0x1D51E, 'M', u'a'),
    +    (0x1D51F, 'M', u'b'),
    +    (0x1D520, 'M', u'c'),
    +    (0x1D521, 'M', u'd'),
    +    (0x1D522, 'M', u'e'),
    +    (0x1D523, 'M', u'f'),
    +    (0x1D524, 'M', u'g'),
    +    (0x1D525, 'M', u'h'),
    +    (0x1D526, 'M', u'i'),
    +    (0x1D527, 'M', u'j'),
    +    (0x1D528, 'M', u'k'),
    +    (0x1D529, 'M', u'l'),
    +    (0x1D52A, 'M', u'm'),
    +    (0x1D52B, 'M', u'n'),
    +    (0x1D52C, 'M', u'o'),
    +    (0x1D52D, 'M', u'p'),
    +    (0x1D52E, 'M', u'q'),
    +    (0x1D52F, 'M', u'r'),
    +    (0x1D530, 'M', u's'),
    +    (0x1D531, 'M', u't'),
    +    (0x1D532, 'M', u'u'),
    +    (0x1D533, 'M', u'v'),
    +    (0x1D534, 'M', u'w'),
    +    (0x1D535, 'M', u'x'),
    +    (0x1D536, 'M', u'y'),
    +    ]
    +
    +def _seg_62():
    +    return [
    +    (0x1D537, 'M', u'z'),
    +    (0x1D538, 'M', u'a'),
    +    (0x1D539, 'M', u'b'),
    +    (0x1D53A, 'X'),
    +    (0x1D53B, 'M', u'd'),
    +    (0x1D53C, 'M', u'e'),
    +    (0x1D53D, 'M', u'f'),
    +    (0x1D53E, 'M', u'g'),
    +    (0x1D53F, 'X'),
    +    (0x1D540, 'M', u'i'),
    +    (0x1D541, 'M', u'j'),
    +    (0x1D542, 'M', u'k'),
    +    (0x1D543, 'M', u'l'),
    +    (0x1D544, 'M', u'm'),
    +    (0x1D545, 'X'),
    +    (0x1D546, 'M', u'o'),
    +    (0x1D547, 'X'),
    +    (0x1D54A, 'M', u's'),
    +    (0x1D54B, 'M', u't'),
    +    (0x1D54C, 'M', u'u'),
    +    (0x1D54D, 'M', u'v'),
    +    (0x1D54E, 'M', u'w'),
    +    (0x1D54F, 'M', u'x'),
    +    (0x1D550, 'M', u'y'),
    +    (0x1D551, 'X'),
    +    (0x1D552, 'M', u'a'),
    +    (0x1D553, 'M', u'b'),
    +    (0x1D554, 'M', u'c'),
    +    (0x1D555, 'M', u'd'),
    +    (0x1D556, 'M', u'e'),
    +    (0x1D557, 'M', u'f'),
    +    (0x1D558, 'M', u'g'),
    +    (0x1D559, 'M', u'h'),
    +    (0x1D55A, 'M', u'i'),
    +    (0x1D55B, 'M', u'j'),
    +    (0x1D55C, 'M', u'k'),
    +    (0x1D55D, 'M', u'l'),
    +    (0x1D55E, 'M', u'm'),
    +    (0x1D55F, 'M', u'n'),
    +    (0x1D560, 'M', u'o'),
    +    (0x1D561, 'M', u'p'),
    +    (0x1D562, 'M', u'q'),
    +    (0x1D563, 'M', u'r'),
    +    (0x1D564, 'M', u's'),
    +    (0x1D565, 'M', u't'),
    +    (0x1D566, 'M', u'u'),
    +    (0x1D567, 'M', u'v'),
    +    (0x1D568, 'M', u'w'),
    +    (0x1D569, 'M', u'x'),
    +    (0x1D56A, 'M', u'y'),
    +    (0x1D56B, 'M', u'z'),
    +    (0x1D56C, 'M', u'a'),
    +    (0x1D56D, 'M', u'b'),
    +    (0x1D56E, 'M', u'c'),
    +    (0x1D56F, 'M', u'd'),
    +    (0x1D570, 'M', u'e'),
    +    (0x1D571, 'M', u'f'),
    +    (0x1D572, 'M', u'g'),
    +    (0x1D573, 'M', u'h'),
    +    (0x1D574, 'M', u'i'),
    +    (0x1D575, 'M', u'j'),
    +    (0x1D576, 'M', u'k'),
    +    (0x1D577, 'M', u'l'),
    +    (0x1D578, 'M', u'm'),
    +    (0x1D579, 'M', u'n'),
    +    (0x1D57A, 'M', u'o'),
    +    (0x1D57B, 'M', u'p'),
    +    (0x1D57C, 'M', u'q'),
    +    (0x1D57D, 'M', u'r'),
    +    (0x1D57E, 'M', u's'),
    +    (0x1D57F, 'M', u't'),
    +    (0x1D580, 'M', u'u'),
    +    (0x1D581, 'M', u'v'),
    +    (0x1D582, 'M', u'w'),
    +    (0x1D583, 'M', u'x'),
    +    (0x1D584, 'M', u'y'),
    +    (0x1D585, 'M', u'z'),
    +    (0x1D586, 'M', u'a'),
    +    (0x1D587, 'M', u'b'),
    +    (0x1D588, 'M', u'c'),
    +    (0x1D589, 'M', u'd'),
    +    (0x1D58A, 'M', u'e'),
    +    (0x1D58B, 'M', u'f'),
    +    (0x1D58C, 'M', u'g'),
    +    (0x1D58D, 'M', u'h'),
    +    (0x1D58E, 'M', u'i'),
    +    (0x1D58F, 'M', u'j'),
    +    (0x1D590, 'M', u'k'),
    +    (0x1D591, 'M', u'l'),
    +    (0x1D592, 'M', u'm'),
    +    (0x1D593, 'M', u'n'),
    +    (0x1D594, 'M', u'o'),
    +    (0x1D595, 'M', u'p'),
    +    (0x1D596, 'M', u'q'),
    +    (0x1D597, 'M', u'r'),
    +    (0x1D598, 'M', u's'),
    +    (0x1D599, 'M', u't'),
    +    (0x1D59A, 'M', u'u'),
    +    (0x1D59B, 'M', u'v'),
    +    (0x1D59C, 'M', u'w'),
    +    ]
    +
    +def _seg_63():
    +    return [
    +    (0x1D59D, 'M', u'x'),
    +    (0x1D59E, 'M', u'y'),
    +    (0x1D59F, 'M', u'z'),
    +    (0x1D5A0, 'M', u'a'),
    +    (0x1D5A1, 'M', u'b'),
    +    (0x1D5A2, 'M', u'c'),
    +    (0x1D5A3, 'M', u'd'),
    +    (0x1D5A4, 'M', u'e'),
    +    (0x1D5A5, 'M', u'f'),
    +    (0x1D5A6, 'M', u'g'),
    +    (0x1D5A7, 'M', u'h'),
    +    (0x1D5A8, 'M', u'i'),
    +    (0x1D5A9, 'M', u'j'),
    +    (0x1D5AA, 'M', u'k'),
    +    (0x1D5AB, 'M', u'l'),
    +    (0x1D5AC, 'M', u'm'),
    +    (0x1D5AD, 'M', u'n'),
    +    (0x1D5AE, 'M', u'o'),
    +    (0x1D5AF, 'M', u'p'),
    +    (0x1D5B0, 'M', u'q'),
    +    (0x1D5B1, 'M', u'r'),
    +    (0x1D5B2, 'M', u's'),
    +    (0x1D5B3, 'M', u't'),
    +    (0x1D5B4, 'M', u'u'),
    +    (0x1D5B5, 'M', u'v'),
    +    (0x1D5B6, 'M', u'w'),
    +    (0x1D5B7, 'M', u'x'),
    +    (0x1D5B8, 'M', u'y'),
    +    (0x1D5B9, 'M', u'z'),
    +    (0x1D5BA, 'M', u'a'),
    +    (0x1D5BB, 'M', u'b'),
    +    (0x1D5BC, 'M', u'c'),
    +    (0x1D5BD, 'M', u'd'),
    +    (0x1D5BE, 'M', u'e'),
    +    (0x1D5BF, 'M', u'f'),
    +    (0x1D5C0, 'M', u'g'),
    +    (0x1D5C1, 'M', u'h'),
    +    (0x1D5C2, 'M', u'i'),
    +    (0x1D5C3, 'M', u'j'),
    +    (0x1D5C4, 'M', u'k'),
    +    (0x1D5C5, 'M', u'l'),
    +    (0x1D5C6, 'M', u'm'),
    +    (0x1D5C7, 'M', u'n'),
    +    (0x1D5C8, 'M', u'o'),
    +    (0x1D5C9, 'M', u'p'),
    +    (0x1D5CA, 'M', u'q'),
    +    (0x1D5CB, 'M', u'r'),
    +    (0x1D5CC, 'M', u's'),
    +    (0x1D5CD, 'M', u't'),
    +    (0x1D5CE, 'M', u'u'),
    +    (0x1D5CF, 'M', u'v'),
    +    (0x1D5D0, 'M', u'w'),
    +    (0x1D5D1, 'M', u'x'),
    +    (0x1D5D2, 'M', u'y'),
    +    (0x1D5D3, 'M', u'z'),
    +    (0x1D5D4, 'M', u'a'),
    +    (0x1D5D5, 'M', u'b'),
    +    (0x1D5D6, 'M', u'c'),
    +    (0x1D5D7, 'M', u'd'),
    +    (0x1D5D8, 'M', u'e'),
    +    (0x1D5D9, 'M', u'f'),
    +    (0x1D5DA, 'M', u'g'),
    +    (0x1D5DB, 'M', u'h'),
    +    (0x1D5DC, 'M', u'i'),
    +    (0x1D5DD, 'M', u'j'),
    +    (0x1D5DE, 'M', u'k'),
    +    (0x1D5DF, 'M', u'l'),
    +    (0x1D5E0, 'M', u'm'),
    +    (0x1D5E1, 'M', u'n'),
    +    (0x1D5E2, 'M', u'o'),
    +    (0x1D5E3, 'M', u'p'),
    +    (0x1D5E4, 'M', u'q'),
    +    (0x1D5E5, 'M', u'r'),
    +    (0x1D5E6, 'M', u's'),
    +    (0x1D5E7, 'M', u't'),
    +    (0x1D5E8, 'M', u'u'),
    +    (0x1D5E9, 'M', u'v'),
    +    (0x1D5EA, 'M', u'w'),
    +    (0x1D5EB, 'M', u'x'),
    +    (0x1D5EC, 'M', u'y'),
    +    (0x1D5ED, 'M', u'z'),
    +    (0x1D5EE, 'M', u'a'),
    +    (0x1D5EF, 'M', u'b'),
    +    (0x1D5F0, 'M', u'c'),
    +    (0x1D5F1, 'M', u'd'),
    +    (0x1D5F2, 'M', u'e'),
    +    (0x1D5F3, 'M', u'f'),
    +    (0x1D5F4, 'M', u'g'),
    +    (0x1D5F5, 'M', u'h'),
    +    (0x1D5F6, 'M', u'i'),
    +    (0x1D5F7, 'M', u'j'),
    +    (0x1D5F8, 'M', u'k'),
    +    (0x1D5F9, 'M', u'l'),
    +    (0x1D5FA, 'M', u'm'),
    +    (0x1D5FB, 'M', u'n'),
    +    (0x1D5FC, 'M', u'o'),
    +    (0x1D5FD, 'M', u'p'),
    +    (0x1D5FE, 'M', u'q'),
    +    (0x1D5FF, 'M', u'r'),
    +    (0x1D600, 'M', u's'),
    +    ]
    +
    +def _seg_64():
    +    return [
    +    (0x1D601, 'M', u't'),
    +    (0x1D602, 'M', u'u'),
    +    (0x1D603, 'M', u'v'),
    +    (0x1D604, 'M', u'w'),
    +    (0x1D605, 'M', u'x'),
    +    (0x1D606, 'M', u'y'),
    +    (0x1D607, 'M', u'z'),
    +    (0x1D608, 'M', u'a'),
    +    (0x1D609, 'M', u'b'),
    +    (0x1D60A, 'M', u'c'),
    +    (0x1D60B, 'M', u'd'),
    +    (0x1D60C, 'M', u'e'),
    +    (0x1D60D, 'M', u'f'),
    +    (0x1D60E, 'M', u'g'),
    +    (0x1D60F, 'M', u'h'),
    +    (0x1D610, 'M', u'i'),
    +    (0x1D611, 'M', u'j'),
    +    (0x1D612, 'M', u'k'),
    +    (0x1D613, 'M', u'l'),
    +    (0x1D614, 'M', u'm'),
    +    (0x1D615, 'M', u'n'),
    +    (0x1D616, 'M', u'o'),
    +    (0x1D617, 'M', u'p'),
    +    (0x1D618, 'M', u'q'),
    +    (0x1D619, 'M', u'r'),
    +    (0x1D61A, 'M', u's'),
    +    (0x1D61B, 'M', u't'),
    +    (0x1D61C, 'M', u'u'),
    +    (0x1D61D, 'M', u'v'),
    +    (0x1D61E, 'M', u'w'),
    +    (0x1D61F, 'M', u'x'),
    +    (0x1D620, 'M', u'y'),
    +    (0x1D621, 'M', u'z'),
    +    (0x1D622, 'M', u'a'),
    +    (0x1D623, 'M', u'b'),
    +    (0x1D624, 'M', u'c'),
    +    (0x1D625, 'M', u'd'),
    +    (0x1D626, 'M', u'e'),
    +    (0x1D627, 'M', u'f'),
    +    (0x1D628, 'M', u'g'),
    +    (0x1D629, 'M', u'h'),
    +    (0x1D62A, 'M', u'i'),
    +    (0x1D62B, 'M', u'j'),
    +    (0x1D62C, 'M', u'k'),
    +    (0x1D62D, 'M', u'l'),
    +    (0x1D62E, 'M', u'm'),
    +    (0x1D62F, 'M', u'n'),
    +    (0x1D630, 'M', u'o'),
    +    (0x1D631, 'M', u'p'),
    +    (0x1D632, 'M', u'q'),
    +    (0x1D633, 'M', u'r'),
    +    (0x1D634, 'M', u's'),
    +    (0x1D635, 'M', u't'),
    +    (0x1D636, 'M', u'u'),
    +    (0x1D637, 'M', u'v'),
    +    (0x1D638, 'M', u'w'),
    +    (0x1D639, 'M', u'x'),
    +    (0x1D63A, 'M', u'y'),
    +    (0x1D63B, 'M', u'z'),
    +    (0x1D63C, 'M', u'a'),
    +    (0x1D63D, 'M', u'b'),
    +    (0x1D63E, 'M', u'c'),
    +    (0x1D63F, 'M', u'd'),
    +    (0x1D640, 'M', u'e'),
    +    (0x1D641, 'M', u'f'),
    +    (0x1D642, 'M', u'g'),
    +    (0x1D643, 'M', u'h'),
    +    (0x1D644, 'M', u'i'),
    +    (0x1D645, 'M', u'j'),
    +    (0x1D646, 'M', u'k'),
    +    (0x1D647, 'M', u'l'),
    +    (0x1D648, 'M', u'm'),
    +    (0x1D649, 'M', u'n'),
    +    (0x1D64A, 'M', u'o'),
    +    (0x1D64B, 'M', u'p'),
    +    (0x1D64C, 'M', u'q'),
    +    (0x1D64D, 'M', u'r'),
    +    (0x1D64E, 'M', u's'),
    +    (0x1D64F, 'M', u't'),
    +    (0x1D650, 'M', u'u'),
    +    (0x1D651, 'M', u'v'),
    +    (0x1D652, 'M', u'w'),
    +    (0x1D653, 'M', u'x'),
    +    (0x1D654, 'M', u'y'),
    +    (0x1D655, 'M', u'z'),
    +    (0x1D656, 'M', u'a'),
    +    (0x1D657, 'M', u'b'),
    +    (0x1D658, 'M', u'c'),
    +    (0x1D659, 'M', u'd'),
    +    (0x1D65A, 'M', u'e'),
    +    (0x1D65B, 'M', u'f'),
    +    (0x1D65C, 'M', u'g'),
    +    (0x1D65D, 'M', u'h'),
    +    (0x1D65E, 'M', u'i'),
    +    (0x1D65F, 'M', u'j'),
    +    (0x1D660, 'M', u'k'),
    +    (0x1D661, 'M', u'l'),
    +    (0x1D662, 'M', u'm'),
    +    (0x1D663, 'M', u'n'),
    +    (0x1D664, 'M', u'o'),
    +    ]
    +
    +def _seg_65():
    +    return [
    +    (0x1D665, 'M', u'p'),
    +    (0x1D666, 'M', u'q'),
    +    (0x1D667, 'M', u'r'),
    +    (0x1D668, 'M', u's'),
    +    (0x1D669, 'M', u't'),
    +    (0x1D66A, 'M', u'u'),
    +    (0x1D66B, 'M', u'v'),
    +    (0x1D66C, 'M', u'w'),
    +    (0x1D66D, 'M', u'x'),
    +    (0x1D66E, 'M', u'y'),
    +    (0x1D66F, 'M', u'z'),
    +    (0x1D670, 'M', u'a'),
    +    (0x1D671, 'M', u'b'),
    +    (0x1D672, 'M', u'c'),
    +    (0x1D673, 'M', u'd'),
    +    (0x1D674, 'M', u'e'),
    +    (0x1D675, 'M', u'f'),
    +    (0x1D676, 'M', u'g'),
    +    (0x1D677, 'M', u'h'),
    +    (0x1D678, 'M', u'i'),
    +    (0x1D679, 'M', u'j'),
    +    (0x1D67A, 'M', u'k'),
    +    (0x1D67B, 'M', u'l'),
    +    (0x1D67C, 'M', u'm'),
    +    (0x1D67D, 'M', u'n'),
    +    (0x1D67E, 'M', u'o'),
    +    (0x1D67F, 'M', u'p'),
    +    (0x1D680, 'M', u'q'),
    +    (0x1D681, 'M', u'r'),
    +    (0x1D682, 'M', u's'),
    +    (0x1D683, 'M', u't'),
    +    (0x1D684, 'M', u'u'),
    +    (0x1D685, 'M', u'v'),
    +    (0x1D686, 'M', u'w'),
    +    (0x1D687, 'M', u'x'),
    +    (0x1D688, 'M', u'y'),
    +    (0x1D689, 'M', u'z'),
    +    (0x1D68A, 'M', u'a'),
    +    (0x1D68B, 'M', u'b'),
    +    (0x1D68C, 'M', u'c'),
    +    (0x1D68D, 'M', u'd'),
    +    (0x1D68E, 'M', u'e'),
    +    (0x1D68F, 'M', u'f'),
    +    (0x1D690, 'M', u'g'),
    +    (0x1D691, 'M', u'h'),
    +    (0x1D692, 'M', u'i'),
    +    (0x1D693, 'M', u'j'),
    +    (0x1D694, 'M', u'k'),
    +    (0x1D695, 'M', u'l'),
    +    (0x1D696, 'M', u'm'),
    +    (0x1D697, 'M', u'n'),
    +    (0x1D698, 'M', u'o'),
    +    (0x1D699, 'M', u'p'),
    +    (0x1D69A, 'M', u'q'),
    +    (0x1D69B, 'M', u'r'),
    +    (0x1D69C, 'M', u's'),
    +    (0x1D69D, 'M', u't'),
    +    (0x1D69E, 'M', u'u'),
    +    (0x1D69F, 'M', u'v'),
    +    (0x1D6A0, 'M', u'w'),
    +    (0x1D6A1, 'M', u'x'),
    +    (0x1D6A2, 'M', u'y'),
    +    (0x1D6A3, 'M', u'z'),
    +    (0x1D6A4, 'M', u'ı'),
    +    (0x1D6A5, 'M', u'ȷ'),
    +    (0x1D6A6, 'X'),
    +    (0x1D6A8, 'M', u'α'),
    +    (0x1D6A9, 'M', u'β'),
    +    (0x1D6AA, 'M', u'γ'),
    +    (0x1D6AB, 'M', u'δ'),
    +    (0x1D6AC, 'M', u'ε'),
    +    (0x1D6AD, 'M', u'ζ'),
    +    (0x1D6AE, 'M', u'η'),
    +    (0x1D6AF, 'M', u'θ'),
    +    (0x1D6B0, 'M', u'ι'),
    +    (0x1D6B1, 'M', u'κ'),
    +    (0x1D6B2, 'M', u'λ'),
    +    (0x1D6B3, 'M', u'μ'),
    +    (0x1D6B4, 'M', u'ν'),
    +    (0x1D6B5, 'M', u'ξ'),
    +    (0x1D6B6, 'M', u'ο'),
    +    (0x1D6B7, 'M', u'π'),
    +    (0x1D6B8, 'M', u'ρ'),
    +    (0x1D6B9, 'M', u'θ'),
    +    (0x1D6BA, 'M', u'σ'),
    +    (0x1D6BB, 'M', u'τ'),
    +    (0x1D6BC, 'M', u'υ'),
    +    (0x1D6BD, 'M', u'φ'),
    +    (0x1D6BE, 'M', u'χ'),
    +    (0x1D6BF, 'M', u'ψ'),
    +    (0x1D6C0, 'M', u'ω'),
    +    (0x1D6C1, 'M', u'∇'),
    +    (0x1D6C2, 'M', u'α'),
    +    (0x1D6C3, 'M', u'β'),
    +    (0x1D6C4, 'M', u'γ'),
    +    (0x1D6C5, 'M', u'δ'),
    +    (0x1D6C6, 'M', u'ε'),
    +    (0x1D6C7, 'M', u'ζ'),
    +    (0x1D6C8, 'M', u'η'),
    +    (0x1D6C9, 'M', u'θ'),
    +    ]
    +
    +def _seg_66():
    +    return [
    +    (0x1D6CA, 'M', u'ι'),
    +    (0x1D6CB, 'M', u'κ'),
    +    (0x1D6CC, 'M', u'λ'),
    +    (0x1D6CD, 'M', u'μ'),
    +    (0x1D6CE, 'M', u'ν'),
    +    (0x1D6CF, 'M', u'ξ'),
    +    (0x1D6D0, 'M', u'ο'),
    +    (0x1D6D1, 'M', u'π'),
    +    (0x1D6D2, 'M', u'ρ'),
    +    (0x1D6D3, 'M', u'σ'),
    +    (0x1D6D5, 'M', u'τ'),
    +    (0x1D6D6, 'M', u'υ'),
    +    (0x1D6D7, 'M', u'φ'),
    +    (0x1D6D8, 'M', u'χ'),
    +    (0x1D6D9, 'M', u'ψ'),
    +    (0x1D6DA, 'M', u'ω'),
    +    (0x1D6DB, 'M', u'∂'),
    +    (0x1D6DC, 'M', u'ε'),
    +    (0x1D6DD, 'M', u'θ'),
    +    (0x1D6DE, 'M', u'κ'),
    +    (0x1D6DF, 'M', u'φ'),
    +    (0x1D6E0, 'M', u'ρ'),
    +    (0x1D6E1, 'M', u'π'),
    +    (0x1D6E2, 'M', u'α'),
    +    (0x1D6E3, 'M', u'β'),
    +    (0x1D6E4, 'M', u'γ'),
    +    (0x1D6E5, 'M', u'δ'),
    +    (0x1D6E6, 'M', u'ε'),
    +    (0x1D6E7, 'M', u'ζ'),
    +    (0x1D6E8, 'M', u'η'),
    +    (0x1D6E9, 'M', u'θ'),
    +    (0x1D6EA, 'M', u'ι'),
    +    (0x1D6EB, 'M', u'κ'),
    +    (0x1D6EC, 'M', u'λ'),
    +    (0x1D6ED, 'M', u'μ'),
    +    (0x1D6EE, 'M', u'ν'),
    +    (0x1D6EF, 'M', u'ξ'),
    +    (0x1D6F0, 'M', u'ο'),
    +    (0x1D6F1, 'M', u'π'),
    +    (0x1D6F2, 'M', u'ρ'),
    +    (0x1D6F3, 'M', u'θ'),
    +    (0x1D6F4, 'M', u'σ'),
    +    (0x1D6F5, 'M', u'τ'),
    +    (0x1D6F6, 'M', u'υ'),
    +    (0x1D6F7, 'M', u'φ'),
    +    (0x1D6F8, 'M', u'χ'),
    +    (0x1D6F9, 'M', u'ψ'),
    +    (0x1D6FA, 'M', u'ω'),
    +    (0x1D6FB, 'M', u'∇'),
    +    (0x1D6FC, 'M', u'α'),
    +    (0x1D6FD, 'M', u'β'),
    +    (0x1D6FE, 'M', u'γ'),
    +    (0x1D6FF, 'M', u'δ'),
    +    (0x1D700, 'M', u'ε'),
    +    (0x1D701, 'M', u'ζ'),
    +    (0x1D702, 'M', u'η'),
    +    (0x1D703, 'M', u'θ'),
    +    (0x1D704, 'M', u'ι'),
    +    (0x1D705, 'M', u'κ'),
    +    (0x1D706, 'M', u'λ'),
    +    (0x1D707, 'M', u'μ'),
    +    (0x1D708, 'M', u'ν'),
    +    (0x1D709, 'M', u'ξ'),
    +    (0x1D70A, 'M', u'ο'),
    +    (0x1D70B, 'M', u'π'),
    +    (0x1D70C, 'M', u'ρ'),
    +    (0x1D70D, 'M', u'σ'),
    +    (0x1D70F, 'M', u'τ'),
    +    (0x1D710, 'M', u'υ'),
    +    (0x1D711, 'M', u'φ'),
    +    (0x1D712, 'M', u'χ'),
    +    (0x1D713, 'M', u'ψ'),
    +    (0x1D714, 'M', u'ω'),
    +    (0x1D715, 'M', u'∂'),
    +    (0x1D716, 'M', u'ε'),
    +    (0x1D717, 'M', u'θ'),
    +    (0x1D718, 'M', u'κ'),
    +    (0x1D719, 'M', u'φ'),
    +    (0x1D71A, 'M', u'ρ'),
    +    (0x1D71B, 'M', u'π'),
    +    (0x1D71C, 'M', u'α'),
    +    (0x1D71D, 'M', u'β'),
    +    (0x1D71E, 'M', u'γ'),
    +    (0x1D71F, 'M', u'δ'),
    +    (0x1D720, 'M', u'ε'),
    +    (0x1D721, 'M', u'ζ'),
    +    (0x1D722, 'M', u'η'),
    +    (0x1D723, 'M', u'θ'),
    +    (0x1D724, 'M', u'ι'),
    +    (0x1D725, 'M', u'κ'),
    +    (0x1D726, 'M', u'λ'),
    +    (0x1D727, 'M', u'μ'),
    +    (0x1D728, 'M', u'ν'),
    +    (0x1D729, 'M', u'ξ'),
    +    (0x1D72A, 'M', u'ο'),
    +    (0x1D72B, 'M', u'π'),
    +    (0x1D72C, 'M', u'ρ'),
    +    (0x1D72D, 'M', u'θ'),
    +    (0x1D72E, 'M', u'σ'),
    +    (0x1D72F, 'M', u'τ'),
    +    ]
    +
    +def _seg_67():
    +    return [
    +    (0x1D730, 'M', u'υ'),
    +    (0x1D731, 'M', u'φ'),
    +    (0x1D732, 'M', u'χ'),
    +    (0x1D733, 'M', u'ψ'),
    +    (0x1D734, 'M', u'ω'),
    +    (0x1D735, 'M', u'∇'),
    +    (0x1D736, 'M', u'α'),
    +    (0x1D737, 'M', u'β'),
    +    (0x1D738, 'M', u'γ'),
    +    (0x1D739, 'M', u'δ'),
    +    (0x1D73A, 'M', u'ε'),
    +    (0x1D73B, 'M', u'ζ'),
    +    (0x1D73C, 'M', u'η'),
    +    (0x1D73D, 'M', u'θ'),
    +    (0x1D73E, 'M', u'ι'),
    +    (0x1D73F, 'M', u'κ'),
    +    (0x1D740, 'M', u'λ'),
    +    (0x1D741, 'M', u'μ'),
    +    (0x1D742, 'M', u'ν'),
    +    (0x1D743, 'M', u'ξ'),
    +    (0x1D744, 'M', u'ο'),
    +    (0x1D745, 'M', u'π'),
    +    (0x1D746, 'M', u'ρ'),
    +    (0x1D747, 'M', u'σ'),
    +    (0x1D749, 'M', u'τ'),
    +    (0x1D74A, 'M', u'υ'),
    +    (0x1D74B, 'M', u'φ'),
    +    (0x1D74C, 'M', u'χ'),
    +    (0x1D74D, 'M', u'ψ'),
    +    (0x1D74E, 'M', u'ω'),
    +    (0x1D74F, 'M', u'∂'),
    +    (0x1D750, 'M', u'ε'),
    +    (0x1D751, 'M', u'θ'),
    +    (0x1D752, 'M', u'κ'),
    +    (0x1D753, 'M', u'φ'),
    +    (0x1D754, 'M', u'ρ'),
    +    (0x1D755, 'M', u'π'),
    +    (0x1D756, 'M', u'α'),
    +    (0x1D757, 'M', u'β'),
    +    (0x1D758, 'M', u'γ'),
    +    (0x1D759, 'M', u'δ'),
    +    (0x1D75A, 'M', u'ε'),
    +    (0x1D75B, 'M', u'ζ'),
    +    (0x1D75C, 'M', u'η'),
    +    (0x1D75D, 'M', u'θ'),
    +    (0x1D75E, 'M', u'ι'),
    +    (0x1D75F, 'M', u'κ'),
    +    (0x1D760, 'M', u'λ'),
    +    (0x1D761, 'M', u'μ'),
    +    (0x1D762, 'M', u'ν'),
    +    (0x1D763, 'M', u'ξ'),
    +    (0x1D764, 'M', u'ο'),
    +    (0x1D765, 'M', u'π'),
    +    (0x1D766, 'M', u'ρ'),
    +    (0x1D767, 'M', u'θ'),
    +    (0x1D768, 'M', u'σ'),
    +    (0x1D769, 'M', u'τ'),
    +    (0x1D76A, 'M', u'υ'),
    +    (0x1D76B, 'M', u'φ'),
    +    (0x1D76C, 'M', u'χ'),
    +    (0x1D76D, 'M', u'ψ'),
    +    (0x1D76E, 'M', u'ω'),
    +    (0x1D76F, 'M', u'∇'),
    +    (0x1D770, 'M', u'α'),
    +    (0x1D771, 'M', u'β'),
    +    (0x1D772, 'M', u'γ'),
    +    (0x1D773, 'M', u'δ'),
    +    (0x1D774, 'M', u'ε'),
    +    (0x1D775, 'M', u'ζ'),
    +    (0x1D776, 'M', u'η'),
    +    (0x1D777, 'M', u'θ'),
    +    (0x1D778, 'M', u'ι'),
    +    (0x1D779, 'M', u'κ'),
    +    (0x1D77A, 'M', u'λ'),
    +    (0x1D77B, 'M', u'μ'),
    +    (0x1D77C, 'M', u'ν'),
    +    (0x1D77D, 'M', u'ξ'),
    +    (0x1D77E, 'M', u'ο'),
    +    (0x1D77F, 'M', u'π'),
    +    (0x1D780, 'M', u'ρ'),
    +    (0x1D781, 'M', u'σ'),
    +    (0x1D783, 'M', u'τ'),
    +    (0x1D784, 'M', u'υ'),
    +    (0x1D785, 'M', u'φ'),
    +    (0x1D786, 'M', u'χ'),
    +    (0x1D787, 'M', u'ψ'),
    +    (0x1D788, 'M', u'ω'),
    +    (0x1D789, 'M', u'∂'),
    +    (0x1D78A, 'M', u'ε'),
    +    (0x1D78B, 'M', u'θ'),
    +    (0x1D78C, 'M', u'κ'),
    +    (0x1D78D, 'M', u'φ'),
    +    (0x1D78E, 'M', u'ρ'),
    +    (0x1D78F, 'M', u'π'),
    +    (0x1D790, 'M', u'α'),
    +    (0x1D791, 'M', u'β'),
    +    (0x1D792, 'M', u'γ'),
    +    (0x1D793, 'M', u'δ'),
    +    (0x1D794, 'M', u'ε'),
    +    (0x1D795, 'M', u'ζ'),
    +    ]
    +
    +def _seg_68():
    +    return [
    +    (0x1D796, 'M', u'η'),
    +    (0x1D797, 'M', u'θ'),
    +    (0x1D798, 'M', u'ι'),
    +    (0x1D799, 'M', u'κ'),
    +    (0x1D79A, 'M', u'λ'),
    +    (0x1D79B, 'M', u'μ'),
    +    (0x1D79C, 'M', u'ν'),
    +    (0x1D79D, 'M', u'ξ'),
    +    (0x1D79E, 'M', u'ο'),
    +    (0x1D79F, 'M', u'π'),
    +    (0x1D7A0, 'M', u'ρ'),
    +    (0x1D7A1, 'M', u'θ'),
    +    (0x1D7A2, 'M', u'σ'),
    +    (0x1D7A3, 'M', u'τ'),
    +    (0x1D7A4, 'M', u'υ'),
    +    (0x1D7A5, 'M', u'φ'),
    +    (0x1D7A6, 'M', u'χ'),
    +    (0x1D7A7, 'M', u'ψ'),
    +    (0x1D7A8, 'M', u'ω'),
    +    (0x1D7A9, 'M', u'∇'),
    +    (0x1D7AA, 'M', u'α'),
    +    (0x1D7AB, 'M', u'β'),
    +    (0x1D7AC, 'M', u'γ'),
    +    (0x1D7AD, 'M', u'δ'),
    +    (0x1D7AE, 'M', u'ε'),
    +    (0x1D7AF, 'M', u'ζ'),
    +    (0x1D7B0, 'M', u'η'),
    +    (0x1D7B1, 'M', u'θ'),
    +    (0x1D7B2, 'M', u'ι'),
    +    (0x1D7B3, 'M', u'κ'),
    +    (0x1D7B4, 'M', u'λ'),
    +    (0x1D7B5, 'M', u'μ'),
    +    (0x1D7B6, 'M', u'ν'),
    +    (0x1D7B7, 'M', u'ξ'),
    +    (0x1D7B8, 'M', u'ο'),
    +    (0x1D7B9, 'M', u'π'),
    +    (0x1D7BA, 'M', u'ρ'),
    +    (0x1D7BB, 'M', u'σ'),
    +    (0x1D7BD, 'M', u'τ'),
    +    (0x1D7BE, 'M', u'υ'),
    +    (0x1D7BF, 'M', u'φ'),
    +    (0x1D7C0, 'M', u'χ'),
    +    (0x1D7C1, 'M', u'ψ'),
    +    (0x1D7C2, 'M', u'ω'),
    +    (0x1D7C3, 'M', u'∂'),
    +    (0x1D7C4, 'M', u'ε'),
    +    (0x1D7C5, 'M', u'θ'),
    +    (0x1D7C6, 'M', u'κ'),
    +    (0x1D7C7, 'M', u'φ'),
    +    (0x1D7C8, 'M', u'ρ'),
    +    (0x1D7C9, 'M', u'π'),
    +    (0x1D7CA, 'M', u'ϝ'),
    +    (0x1D7CC, 'X'),
    +    (0x1D7CE, 'M', u'0'),
    +    (0x1D7CF, 'M', u'1'),
    +    (0x1D7D0, 'M', u'2'),
    +    (0x1D7D1, 'M', u'3'),
    +    (0x1D7D2, 'M', u'4'),
    +    (0x1D7D3, 'M', u'5'),
    +    (0x1D7D4, 'M', u'6'),
    +    (0x1D7D5, 'M', u'7'),
    +    (0x1D7D6, 'M', u'8'),
    +    (0x1D7D7, 'M', u'9'),
    +    (0x1D7D8, 'M', u'0'),
    +    (0x1D7D9, 'M', u'1'),
    +    (0x1D7DA, 'M', u'2'),
    +    (0x1D7DB, 'M', u'3'),
    +    (0x1D7DC, 'M', u'4'),
    +    (0x1D7DD, 'M', u'5'),
    +    (0x1D7DE, 'M', u'6'),
    +    (0x1D7DF, 'M', u'7'),
    +    (0x1D7E0, 'M', u'8'),
    +    (0x1D7E1, 'M', u'9'),
    +    (0x1D7E2, 'M', u'0'),
    +    (0x1D7E3, 'M', u'1'),
    +    (0x1D7E4, 'M', u'2'),
    +    (0x1D7E5, 'M', u'3'),
    +    (0x1D7E6, 'M', u'4'),
    +    (0x1D7E7, 'M', u'5'),
    +    (0x1D7E8, 'M', u'6'),
    +    (0x1D7E9, 'M', u'7'),
    +    (0x1D7EA, 'M', u'8'),
    +    (0x1D7EB, 'M', u'9'),
    +    (0x1D7EC, 'M', u'0'),
    +    (0x1D7ED, 'M', u'1'),
    +    (0x1D7EE, 'M', u'2'),
    +    (0x1D7EF, 'M', u'3'),
    +    (0x1D7F0, 'M', u'4'),
    +    (0x1D7F1, 'M', u'5'),
    +    (0x1D7F2, 'M', u'6'),
    +    (0x1D7F3, 'M', u'7'),
    +    (0x1D7F4, 'M', u'8'),
    +    (0x1D7F5, 'M', u'9'),
    +    (0x1D7F6, 'M', u'0'),
    +    (0x1D7F7, 'M', u'1'),
    +    (0x1D7F8, 'M', u'2'),
    +    (0x1D7F9, 'M', u'3'),
    +    (0x1D7FA, 'M', u'4'),
    +    (0x1D7FB, 'M', u'5'),
    +    (0x1D7FC, 'M', u'6'),
    +    ]
    +
    +def _seg_69():
    +    return [
    +    (0x1D7FD, 'M', u'7'),
    +    (0x1D7FE, 'M', u'8'),
    +    (0x1D7FF, 'M', u'9'),
    +    (0x1D800, 'V'),
    +    (0x1DA8C, 'X'),
    +    (0x1DA9B, 'V'),
    +    (0x1DAA0, 'X'),
    +    (0x1DAA1, 'V'),
    +    (0x1DAB0, 'X'),
    +    (0x1E000, 'V'),
    +    (0x1E007, 'X'),
    +    (0x1E008, 'V'),
    +    (0x1E019, 'X'),
    +    (0x1E01B, 'V'),
    +    (0x1E022, 'X'),
    +    (0x1E023, 'V'),
    +    (0x1E025, 'X'),
    +    (0x1E026, 'V'),
    +    (0x1E02B, 'X'),
    +    (0x1E100, 'V'),
    +    (0x1E12D, 'X'),
    +    (0x1E130, 'V'),
    +    (0x1E13E, 'X'),
    +    (0x1E140, 'V'),
    +    (0x1E14A, 'X'),
    +    (0x1E14E, 'V'),
    +    (0x1E150, 'X'),
    +    (0x1E2C0, 'V'),
    +    (0x1E2FA, 'X'),
    +    (0x1E2FF, 'V'),
    +    (0x1E300, 'X'),
    +    (0x1E800, 'V'),
    +    (0x1E8C5, 'X'),
    +    (0x1E8C7, 'V'),
    +    (0x1E8D7, 'X'),
    +    (0x1E900, 'M', u'𞤢'),
    +    (0x1E901, 'M', u'𞤣'),
    +    (0x1E902, 'M', u'𞤤'),
    +    (0x1E903, 'M', u'𞤥'),
    +    (0x1E904, 'M', u'𞤦'),
    +    (0x1E905, 'M', u'𞤧'),
    +    (0x1E906, 'M', u'𞤨'),
    +    (0x1E907, 'M', u'𞤩'),
    +    (0x1E908, 'M', u'𞤪'),
    +    (0x1E909, 'M', u'𞤫'),
    +    (0x1E90A, 'M', u'𞤬'),
    +    (0x1E90B, 'M', u'𞤭'),
    +    (0x1E90C, 'M', u'𞤮'),
    +    (0x1E90D, 'M', u'𞤯'),
    +    (0x1E90E, 'M', u'𞤰'),
    +    (0x1E90F, 'M', u'𞤱'),
    +    (0x1E910, 'M', u'𞤲'),
    +    (0x1E911, 'M', u'𞤳'),
    +    (0x1E912, 'M', u'𞤴'),
    +    (0x1E913, 'M', u'𞤵'),
    +    (0x1E914, 'M', u'𞤶'),
    +    (0x1E915, 'M', u'𞤷'),
    +    (0x1E916, 'M', u'𞤸'),
    +    (0x1E917, 'M', u'𞤹'),
    +    (0x1E918, 'M', u'𞤺'),
    +    (0x1E919, 'M', u'𞤻'),
    +    (0x1E91A, 'M', u'𞤼'),
    +    (0x1E91B, 'M', u'𞤽'),
    +    (0x1E91C, 'M', u'𞤾'),
    +    (0x1E91D, 'M', u'𞤿'),
    +    (0x1E91E, 'M', u'𞥀'),
    +    (0x1E91F, 'M', u'𞥁'),
    +    (0x1E920, 'M', u'𞥂'),
    +    (0x1E921, 'M', u'𞥃'),
    +    (0x1E922, 'V'),
    +    (0x1E94C, 'X'),
    +    (0x1E950, 'V'),
    +    (0x1E95A, 'X'),
    +    (0x1E95E, 'V'),
    +    (0x1E960, 'X'),
    +    (0x1EC71, 'V'),
    +    (0x1ECB5, 'X'),
    +    (0x1ED01, 'V'),
    +    (0x1ED3E, 'X'),
    +    (0x1EE00, 'M', u'ا'),
    +    (0x1EE01, 'M', u'ب'),
    +    (0x1EE02, 'M', u'ج'),
    +    (0x1EE03, 'M', u'د'),
    +    (0x1EE04, 'X'),
    +    (0x1EE05, 'M', u'و'),
    +    (0x1EE06, 'M', u'ز'),
    +    (0x1EE07, 'M', u'ح'),
    +    (0x1EE08, 'M', u'ط'),
    +    (0x1EE09, 'M', u'ي'),
    +    (0x1EE0A, 'M', u'ك'),
    +    (0x1EE0B, 'M', u'ل'),
    +    (0x1EE0C, 'M', u'م'),
    +    (0x1EE0D, 'M', u'ن'),
    +    (0x1EE0E, 'M', u'س'),
    +    (0x1EE0F, 'M', u'ع'),
    +    (0x1EE10, 'M', u'ف'),
    +    (0x1EE11, 'M', u'ص'),
    +    (0x1EE12, 'M', u'ق'),
    +    (0x1EE13, 'M', u'ر'),
    +    (0x1EE14, 'M', u'ش'),
    +    ]
    +
    +def _seg_70():
    +    return [
    +    (0x1EE15, 'M', u'ت'),
    +    (0x1EE16, 'M', u'ث'),
    +    (0x1EE17, 'M', u'خ'),
    +    (0x1EE18, 'M', u'ذ'),
    +    (0x1EE19, 'M', u'ض'),
    +    (0x1EE1A, 'M', u'ظ'),
    +    (0x1EE1B, 'M', u'غ'),
    +    (0x1EE1C, 'M', u'ٮ'),
    +    (0x1EE1D, 'M', u'ں'),
    +    (0x1EE1E, 'M', u'ڡ'),
    +    (0x1EE1F, 'M', u'ٯ'),
    +    (0x1EE20, 'X'),
    +    (0x1EE21, 'M', u'ب'),
    +    (0x1EE22, 'M', u'ج'),
    +    (0x1EE23, 'X'),
    +    (0x1EE24, 'M', u'ه'),
    +    (0x1EE25, 'X'),
    +    (0x1EE27, 'M', u'ح'),
    +    (0x1EE28, 'X'),
    +    (0x1EE29, 'M', u'ي'),
    +    (0x1EE2A, 'M', u'ك'),
    +    (0x1EE2B, 'M', u'ل'),
    +    (0x1EE2C, 'M', u'م'),
    +    (0x1EE2D, 'M', u'ن'),
    +    (0x1EE2E, 'M', u'س'),
    +    (0x1EE2F, 'M', u'ع'),
    +    (0x1EE30, 'M', u'ف'),
    +    (0x1EE31, 'M', u'ص'),
    +    (0x1EE32, 'M', u'ق'),
    +    (0x1EE33, 'X'),
    +    (0x1EE34, 'M', u'ش'),
    +    (0x1EE35, 'M', u'ت'),
    +    (0x1EE36, 'M', u'ث'),
    +    (0x1EE37, 'M', u'خ'),
    +    (0x1EE38, 'X'),
    +    (0x1EE39, 'M', u'ض'),
    +    (0x1EE3A, 'X'),
    +    (0x1EE3B, 'M', u'غ'),
    +    (0x1EE3C, 'X'),
    +    (0x1EE42, 'M', u'ج'),
    +    (0x1EE43, 'X'),
    +    (0x1EE47, 'M', u'ح'),
    +    (0x1EE48, 'X'),
    +    (0x1EE49, 'M', u'ي'),
    +    (0x1EE4A, 'X'),
    +    (0x1EE4B, 'M', u'ل'),
    +    (0x1EE4C, 'X'),
    +    (0x1EE4D, 'M', u'ن'),
    +    (0x1EE4E, 'M', u'س'),
    +    (0x1EE4F, 'M', u'ع'),
    +    (0x1EE50, 'X'),
    +    (0x1EE51, 'M', u'ص'),
    +    (0x1EE52, 'M', u'ق'),
    +    (0x1EE53, 'X'),
    +    (0x1EE54, 'M', u'ش'),
    +    (0x1EE55, 'X'),
    +    (0x1EE57, 'M', u'خ'),
    +    (0x1EE58, 'X'),
    +    (0x1EE59, 'M', u'ض'),
    +    (0x1EE5A, 'X'),
    +    (0x1EE5B, 'M', u'غ'),
    +    (0x1EE5C, 'X'),
    +    (0x1EE5D, 'M', u'ں'),
    +    (0x1EE5E, 'X'),
    +    (0x1EE5F, 'M', u'ٯ'),
    +    (0x1EE60, 'X'),
    +    (0x1EE61, 'M', u'ب'),
    +    (0x1EE62, 'M', u'ج'),
    +    (0x1EE63, 'X'),
    +    (0x1EE64, 'M', u'ه'),
    +    (0x1EE65, 'X'),
    +    (0x1EE67, 'M', u'ح'),
    +    (0x1EE68, 'M', u'ط'),
    +    (0x1EE69, 'M', u'ي'),
    +    (0x1EE6A, 'M', u'ك'),
    +    (0x1EE6B, 'X'),
    +    (0x1EE6C, 'M', u'م'),
    +    (0x1EE6D, 'M', u'ن'),
    +    (0x1EE6E, 'M', u'س'),
    +    (0x1EE6F, 'M', u'ع'),
    +    (0x1EE70, 'M', u'ف'),
    +    (0x1EE71, 'M', u'ص'),
    +    (0x1EE72, 'M', u'ق'),
    +    (0x1EE73, 'X'),
    +    (0x1EE74, 'M', u'ش'),
    +    (0x1EE75, 'M', u'ت'),
    +    (0x1EE76, 'M', u'ث'),
    +    (0x1EE77, 'M', u'خ'),
    +    (0x1EE78, 'X'),
    +    (0x1EE79, 'M', u'ض'),
    +    (0x1EE7A, 'M', u'ظ'),
    +    (0x1EE7B, 'M', u'غ'),
    +    (0x1EE7C, 'M', u'ٮ'),
    +    (0x1EE7D, 'X'),
    +    (0x1EE7E, 'M', u'ڡ'),
    +    (0x1EE7F, 'X'),
    +    (0x1EE80, 'M', u'ا'),
    +    (0x1EE81, 'M', u'ب'),
    +    (0x1EE82, 'M', u'ج'),
    +    (0x1EE83, 'M', u'د'),
    +    ]
    +
    +def _seg_71():
    +    return [
    +    (0x1EE84, 'M', u'ه'),
    +    (0x1EE85, 'M', u'و'),
    +    (0x1EE86, 'M', u'ز'),
    +    (0x1EE87, 'M', u'ح'),
    +    (0x1EE88, 'M', u'ط'),
    +    (0x1EE89, 'M', u'ي'),
    +    (0x1EE8A, 'X'),
    +    (0x1EE8B, 'M', u'ل'),
    +    (0x1EE8C, 'M', u'م'),
    +    (0x1EE8D, 'M', u'ن'),
    +    (0x1EE8E, 'M', u'س'),
    +    (0x1EE8F, 'M', u'ع'),
    +    (0x1EE90, 'M', u'ف'),
    +    (0x1EE91, 'M', u'ص'),
    +    (0x1EE92, 'M', u'ق'),
    +    (0x1EE93, 'M', u'ر'),
    +    (0x1EE94, 'M', u'ش'),
    +    (0x1EE95, 'M', u'ت'),
    +    (0x1EE96, 'M', u'ث'),
    +    (0x1EE97, 'M', u'خ'),
    +    (0x1EE98, 'M', u'ذ'),
    +    (0x1EE99, 'M', u'ض'),
    +    (0x1EE9A, 'M', u'ظ'),
    +    (0x1EE9B, 'M', u'غ'),
    +    (0x1EE9C, 'X'),
    +    (0x1EEA1, 'M', u'ب'),
    +    (0x1EEA2, 'M', u'ج'),
    +    (0x1EEA3, 'M', u'د'),
    +    (0x1EEA4, 'X'),
    +    (0x1EEA5, 'M', u'و'),
    +    (0x1EEA6, 'M', u'ز'),
    +    (0x1EEA7, 'M', u'ح'),
    +    (0x1EEA8, 'M', u'ط'),
    +    (0x1EEA9, 'M', u'ي'),
    +    (0x1EEAA, 'X'),
    +    (0x1EEAB, 'M', u'ل'),
    +    (0x1EEAC, 'M', u'م'),
    +    (0x1EEAD, 'M', u'ن'),
    +    (0x1EEAE, 'M', u'س'),
    +    (0x1EEAF, 'M', u'ع'),
    +    (0x1EEB0, 'M', u'ف'),
    +    (0x1EEB1, 'M', u'ص'),
    +    (0x1EEB2, 'M', u'ق'),
    +    (0x1EEB3, 'M', u'ر'),
    +    (0x1EEB4, 'M', u'ش'),
    +    (0x1EEB5, 'M', u'ت'),
    +    (0x1EEB6, 'M', u'ث'),
    +    (0x1EEB7, 'M', u'خ'),
    +    (0x1EEB8, 'M', u'ذ'),
    +    (0x1EEB9, 'M', u'ض'),
    +    (0x1EEBA, 'M', u'ظ'),
    +    (0x1EEBB, 'M', u'غ'),
    +    (0x1EEBC, 'X'),
    +    (0x1EEF0, 'V'),
    +    (0x1EEF2, 'X'),
    +    (0x1F000, 'V'),
    +    (0x1F02C, 'X'),
    +    (0x1F030, 'V'),
    +    (0x1F094, 'X'),
    +    (0x1F0A0, 'V'),
    +    (0x1F0AF, 'X'),
    +    (0x1F0B1, 'V'),
    +    (0x1F0C0, 'X'),
    +    (0x1F0C1, 'V'),
    +    (0x1F0D0, 'X'),
    +    (0x1F0D1, 'V'),
    +    (0x1F0F6, 'X'),
    +    (0x1F101, '3', u'0,'),
    +    (0x1F102, '3', u'1,'),
    +    (0x1F103, '3', u'2,'),
    +    (0x1F104, '3', u'3,'),
    +    (0x1F105, '3', u'4,'),
    +    (0x1F106, '3', u'5,'),
    +    (0x1F107, '3', u'6,'),
    +    (0x1F108, '3', u'7,'),
    +    (0x1F109, '3', u'8,'),
    +    (0x1F10A, '3', u'9,'),
    +    (0x1F10B, 'V'),
    +    (0x1F10D, 'X'),
    +    (0x1F110, '3', u'(a)'),
    +    (0x1F111, '3', u'(b)'),
    +    (0x1F112, '3', u'(c)'),
    +    (0x1F113, '3', u'(d)'),
    +    (0x1F114, '3', u'(e)'),
    +    (0x1F115, '3', u'(f)'),
    +    (0x1F116, '3', u'(g)'),
    +    (0x1F117, '3', u'(h)'),
    +    (0x1F118, '3', u'(i)'),
    +    (0x1F119, '3', u'(j)'),
    +    (0x1F11A, '3', u'(k)'),
    +    (0x1F11B, '3', u'(l)'),
    +    (0x1F11C, '3', u'(m)'),
    +    (0x1F11D, '3', u'(n)'),
    +    (0x1F11E, '3', u'(o)'),
    +    (0x1F11F, '3', u'(p)'),
    +    (0x1F120, '3', u'(q)'),
    +    (0x1F121, '3', u'(r)'),
    +    (0x1F122, '3', u'(s)'),
    +    (0x1F123, '3', u'(t)'),
    +    (0x1F124, '3', u'(u)'),
    +    ]
    +
    +def _seg_72():
    +    return [
    +    (0x1F125, '3', u'(v)'),
    +    (0x1F126, '3', u'(w)'),
    +    (0x1F127, '3', u'(x)'),
    +    (0x1F128, '3', u'(y)'),
    +    (0x1F129, '3', u'(z)'),
    +    (0x1F12A, 'M', u'〔s〕'),
    +    (0x1F12B, 'M', u'c'),
    +    (0x1F12C, 'M', u'r'),
    +    (0x1F12D, 'M', u'cd'),
    +    (0x1F12E, 'M', u'wz'),
    +    (0x1F12F, 'V'),
    +    (0x1F130, 'M', u'a'),
    +    (0x1F131, 'M', u'b'),
    +    (0x1F132, 'M', u'c'),
    +    (0x1F133, 'M', u'd'),
    +    (0x1F134, 'M', u'e'),
    +    (0x1F135, 'M', u'f'),
    +    (0x1F136, 'M', u'g'),
    +    (0x1F137, 'M', u'h'),
    +    (0x1F138, 'M', u'i'),
    +    (0x1F139, 'M', u'j'),
    +    (0x1F13A, 'M', u'k'),
    +    (0x1F13B, 'M', u'l'),
    +    (0x1F13C, 'M', u'm'),
    +    (0x1F13D, 'M', u'n'),
    +    (0x1F13E, 'M', u'o'),
    +    (0x1F13F, 'M', u'p'),
    +    (0x1F140, 'M', u'q'),
    +    (0x1F141, 'M', u'r'),
    +    (0x1F142, 'M', u's'),
    +    (0x1F143, 'M', u't'),
    +    (0x1F144, 'M', u'u'),
    +    (0x1F145, 'M', u'v'),
    +    (0x1F146, 'M', u'w'),
    +    (0x1F147, 'M', u'x'),
    +    (0x1F148, 'M', u'y'),
    +    (0x1F149, 'M', u'z'),
    +    (0x1F14A, 'M', u'hv'),
    +    (0x1F14B, 'M', u'mv'),
    +    (0x1F14C, 'M', u'sd'),
    +    (0x1F14D, 'M', u'ss'),
    +    (0x1F14E, 'M', u'ppv'),
    +    (0x1F14F, 'M', u'wc'),
    +    (0x1F150, 'V'),
    +    (0x1F16A, 'M', u'mc'),
    +    (0x1F16B, 'M', u'md'),
    +    (0x1F16C, 'M', u'mr'),
    +    (0x1F16D, 'X'),
    +    (0x1F170, 'V'),
    +    (0x1F190, 'M', u'dj'),
    +    (0x1F191, 'V'),
    +    (0x1F1AD, 'X'),
    +    (0x1F1E6, 'V'),
    +    (0x1F200, 'M', u'ほか'),
    +    (0x1F201, 'M', u'ココ'),
    +    (0x1F202, 'M', u'サ'),
    +    (0x1F203, 'X'),
    +    (0x1F210, 'M', u'手'),
    +    (0x1F211, 'M', u'字'),
    +    (0x1F212, 'M', u'双'),
    +    (0x1F213, 'M', u'デ'),
    +    (0x1F214, 'M', u'二'),
    +    (0x1F215, 'M', u'多'),
    +    (0x1F216, 'M', u'解'),
    +    (0x1F217, 'M', u'天'),
    +    (0x1F218, 'M', u'交'),
    +    (0x1F219, 'M', u'映'),
    +    (0x1F21A, 'M', u'無'),
    +    (0x1F21B, 'M', u'料'),
    +    (0x1F21C, 'M', u'前'),
    +    (0x1F21D, 'M', u'後'),
    +    (0x1F21E, 'M', u'再'),
    +    (0x1F21F, 'M', u'新'),
    +    (0x1F220, 'M', u'初'),
    +    (0x1F221, 'M', u'終'),
    +    (0x1F222, 'M', u'生'),
    +    (0x1F223, 'M', u'販'),
    +    (0x1F224, 'M', u'声'),
    +    (0x1F225, 'M', u'吹'),
    +    (0x1F226, 'M', u'演'),
    +    (0x1F227, 'M', u'投'),
    +    (0x1F228, 'M', u'捕'),
    +    (0x1F229, 'M', u'一'),
    +    (0x1F22A, 'M', u'三'),
    +    (0x1F22B, 'M', u'遊'),
    +    (0x1F22C, 'M', u'左'),
    +    (0x1F22D, 'M', u'中'),
    +    (0x1F22E, 'M', u'右'),
    +    (0x1F22F, 'M', u'指'),
    +    (0x1F230, 'M', u'走'),
    +    (0x1F231, 'M', u'打'),
    +    (0x1F232, 'M', u'禁'),
    +    (0x1F233, 'M', u'空'),
    +    (0x1F234, 'M', u'合'),
    +    (0x1F235, 'M', u'満'),
    +    (0x1F236, 'M', u'有'),
    +    (0x1F237, 'M', u'月'),
    +    (0x1F238, 'M', u'申'),
    +    (0x1F239, 'M', u'割'),
    +    (0x1F23A, 'M', u'営'),
    +    ]
    +
    +def _seg_73():
    +    return [
    +    (0x1F23B, 'M', u'配'),
    +    (0x1F23C, 'X'),
    +    (0x1F240, 'M', u'〔本〕'),
    +    (0x1F241, 'M', u'〔三〕'),
    +    (0x1F242, 'M', u'〔二〕'),
    +    (0x1F243, 'M', u'〔安〕'),
    +    (0x1F244, 'M', u'〔点〕'),
    +    (0x1F245, 'M', u'〔打〕'),
    +    (0x1F246, 'M', u'〔盗〕'),
    +    (0x1F247, 'M', u'〔勝〕'),
    +    (0x1F248, 'M', u'〔敗〕'),
    +    (0x1F249, 'X'),
    +    (0x1F250, 'M', u'得'),
    +    (0x1F251, 'M', u'可'),
    +    (0x1F252, 'X'),
    +    (0x1F260, 'V'),
    +    (0x1F266, 'X'),
    +    (0x1F300, 'V'),
    +    (0x1F6D6, 'X'),
    +    (0x1F6E0, 'V'),
    +    (0x1F6ED, 'X'),
    +    (0x1F6F0, 'V'),
    +    (0x1F6FB, 'X'),
    +    (0x1F700, 'V'),
    +    (0x1F774, 'X'),
    +    (0x1F780, 'V'),
    +    (0x1F7D9, 'X'),
    +    (0x1F7E0, 'V'),
    +    (0x1F7EC, 'X'),
    +    (0x1F800, 'V'),
    +    (0x1F80C, 'X'),
    +    (0x1F810, 'V'),
    +    (0x1F848, 'X'),
    +    (0x1F850, 'V'),
    +    (0x1F85A, 'X'),
    +    (0x1F860, 'V'),
    +    (0x1F888, 'X'),
    +    (0x1F890, 'V'),
    +    (0x1F8AE, 'X'),
    +    (0x1F900, 'V'),
    +    (0x1F90C, 'X'),
    +    (0x1F90D, 'V'),
    +    (0x1F972, 'X'),
    +    (0x1F973, 'V'),
    +    (0x1F977, 'X'),
    +    (0x1F97A, 'V'),
    +    (0x1F9A3, 'X'),
    +    (0x1F9A5, 'V'),
    +    (0x1F9AB, 'X'),
    +    (0x1F9AE, 'V'),
    +    (0x1F9CB, 'X'),
    +    (0x1F9CD, 'V'),
    +    (0x1FA54, 'X'),
    +    (0x1FA60, 'V'),
    +    (0x1FA6E, 'X'),
    +    (0x1FA70, 'V'),
    +    (0x1FA74, 'X'),
    +    (0x1FA78, 'V'),
    +    (0x1FA7B, 'X'),
    +    (0x1FA80, 'V'),
    +    (0x1FA83, 'X'),
    +    (0x1FA90, 'V'),
    +    (0x1FA96, 'X'),
    +    (0x20000, 'V'),
    +    (0x2A6D7, 'X'),
    +    (0x2A700, 'V'),
    +    (0x2B735, 'X'),
    +    (0x2B740, 'V'),
    +    (0x2B81E, 'X'),
    +    (0x2B820, 'V'),
    +    (0x2CEA2, 'X'),
    +    (0x2CEB0, 'V'),
    +    (0x2EBE1, 'X'),
    +    (0x2F800, 'M', u'丽'),
    +    (0x2F801, 'M', u'丸'),
    +    (0x2F802, 'M', u'乁'),
    +    (0x2F803, 'M', u'𠄢'),
    +    (0x2F804, 'M', u'你'),
    +    (0x2F805, 'M', u'侮'),
    +    (0x2F806, 'M', u'侻'),
    +    (0x2F807, 'M', u'倂'),
    +    (0x2F808, 'M', u'偺'),
    +    (0x2F809, 'M', u'備'),
    +    (0x2F80A, 'M', u'僧'),
    +    (0x2F80B, 'M', u'像'),
    +    (0x2F80C, 'M', u'㒞'),
    +    (0x2F80D, 'M', u'𠘺'),
    +    (0x2F80E, 'M', u'免'),
    +    (0x2F80F, 'M', u'兔'),
    +    (0x2F810, 'M', u'兤'),
    +    (0x2F811, 'M', u'具'),
    +    (0x2F812, 'M', u'𠔜'),
    +    (0x2F813, 'M', u'㒹'),
    +    (0x2F814, 'M', u'內'),
    +    (0x2F815, 'M', u'再'),
    +    (0x2F816, 'M', u'𠕋'),
    +    (0x2F817, 'M', u'冗'),
    +    (0x2F818, 'M', u'冤'),
    +    (0x2F819, 'M', u'仌'),
    +    (0x2F81A, 'M', u'冬'),
    +    ]
    +
    +def _seg_74():
    +    return [
    +    (0x2F81B, 'M', u'况'),
    +    (0x2F81C, 'M', u'𩇟'),
    +    (0x2F81D, 'M', u'凵'),
    +    (0x2F81E, 'M', u'刃'),
    +    (0x2F81F, 'M', u'㓟'),
    +    (0x2F820, 'M', u'刻'),
    +    (0x2F821, 'M', u'剆'),
    +    (0x2F822, 'M', u'割'),
    +    (0x2F823, 'M', u'剷'),
    +    (0x2F824, 'M', u'㔕'),
    +    (0x2F825, 'M', u'勇'),
    +    (0x2F826, 'M', u'勉'),
    +    (0x2F827, 'M', u'勤'),
    +    (0x2F828, 'M', u'勺'),
    +    (0x2F829, 'M', u'包'),
    +    (0x2F82A, 'M', u'匆'),
    +    (0x2F82B, 'M', u'北'),
    +    (0x2F82C, 'M', u'卉'),
    +    (0x2F82D, 'M', u'卑'),
    +    (0x2F82E, 'M', u'博'),
    +    (0x2F82F, 'M', u'即'),
    +    (0x2F830, 'M', u'卽'),
    +    (0x2F831, 'M', u'卿'),
    +    (0x2F834, 'M', u'𠨬'),
    +    (0x2F835, 'M', u'灰'),
    +    (0x2F836, 'M', u'及'),
    +    (0x2F837, 'M', u'叟'),
    +    (0x2F838, 'M', u'𠭣'),
    +    (0x2F839, 'M', u'叫'),
    +    (0x2F83A, 'M', u'叱'),
    +    (0x2F83B, 'M', u'吆'),
    +    (0x2F83C, 'M', u'咞'),
    +    (0x2F83D, 'M', u'吸'),
    +    (0x2F83E, 'M', u'呈'),
    +    (0x2F83F, 'M', u'周'),
    +    (0x2F840, 'M', u'咢'),
    +    (0x2F841, 'M', u'哶'),
    +    (0x2F842, 'M', u'唐'),
    +    (0x2F843, 'M', u'啓'),
    +    (0x2F844, 'M', u'啣'),
    +    (0x2F845, 'M', u'善'),
    +    (0x2F847, 'M', u'喙'),
    +    (0x2F848, 'M', u'喫'),
    +    (0x2F849, 'M', u'喳'),
    +    (0x2F84A, 'M', u'嗂'),
    +    (0x2F84B, 'M', u'圖'),
    +    (0x2F84C, 'M', u'嘆'),
    +    (0x2F84D, 'M', u'圗'),
    +    (0x2F84E, 'M', u'噑'),
    +    (0x2F84F, 'M', u'噴'),
    +    (0x2F850, 'M', u'切'),
    +    (0x2F851, 'M', u'壮'),
    +    (0x2F852, 'M', u'城'),
    +    (0x2F853, 'M', u'埴'),
    +    (0x2F854, 'M', u'堍'),
    +    (0x2F855, 'M', u'型'),
    +    (0x2F856, 'M', u'堲'),
    +    (0x2F857, 'M', u'報'),
    +    (0x2F858, 'M', u'墬'),
    +    (0x2F859, 'M', u'𡓤'),
    +    (0x2F85A, 'M', u'売'),
    +    (0x2F85B, 'M', u'壷'),
    +    (0x2F85C, 'M', u'夆'),
    +    (0x2F85D, 'M', u'多'),
    +    (0x2F85E, 'M', u'夢'),
    +    (0x2F85F, 'M', u'奢'),
    +    (0x2F860, 'M', u'𡚨'),
    +    (0x2F861, 'M', u'𡛪'),
    +    (0x2F862, 'M', u'姬'),
    +    (0x2F863, 'M', u'娛'),
    +    (0x2F864, 'M', u'娧'),
    +    (0x2F865, 'M', u'姘'),
    +    (0x2F866, 'M', u'婦'),
    +    (0x2F867, 'M', u'㛮'),
    +    (0x2F868, 'X'),
    +    (0x2F869, 'M', u'嬈'),
    +    (0x2F86A, 'M', u'嬾'),
    +    (0x2F86C, 'M', u'𡧈'),
    +    (0x2F86D, 'M', u'寃'),
    +    (0x2F86E, 'M', u'寘'),
    +    (0x2F86F, 'M', u'寧'),
    +    (0x2F870, 'M', u'寳'),
    +    (0x2F871, 'M', u'𡬘'),
    +    (0x2F872, 'M', u'寿'),
    +    (0x2F873, 'M', u'将'),
    +    (0x2F874, 'X'),
    +    (0x2F875, 'M', u'尢'),
    +    (0x2F876, 'M', u'㞁'),
    +    (0x2F877, 'M', u'屠'),
    +    (0x2F878, 'M', u'屮'),
    +    (0x2F879, 'M', u'峀'),
    +    (0x2F87A, 'M', u'岍'),
    +    (0x2F87B, 'M', u'𡷤'),
    +    (0x2F87C, 'M', u'嵃'),
    +    (0x2F87D, 'M', u'𡷦'),
    +    (0x2F87E, 'M', u'嵮'),
    +    (0x2F87F, 'M', u'嵫'),
    +    (0x2F880, 'M', u'嵼'),
    +    (0x2F881, 'M', u'巡'),
    +    (0x2F882, 'M', u'巢'),
    +    ]
    +
    +def _seg_75():
    +    return [
    +    (0x2F883, 'M', u'㠯'),
    +    (0x2F884, 'M', u'巽'),
    +    (0x2F885, 'M', u'帨'),
    +    (0x2F886, 'M', u'帽'),
    +    (0x2F887, 'M', u'幩'),
    +    (0x2F888, 'M', u'㡢'),
    +    (0x2F889, 'M', u'𢆃'),
    +    (0x2F88A, 'M', u'㡼'),
    +    (0x2F88B, 'M', u'庰'),
    +    (0x2F88C, 'M', u'庳'),
    +    (0x2F88D, 'M', u'庶'),
    +    (0x2F88E, 'M', u'廊'),
    +    (0x2F88F, 'M', u'𪎒'),
    +    (0x2F890, 'M', u'廾'),
    +    (0x2F891, 'M', u'𢌱'),
    +    (0x2F893, 'M', u'舁'),
    +    (0x2F894, 'M', u'弢'),
    +    (0x2F896, 'M', u'㣇'),
    +    (0x2F897, 'M', u'𣊸'),
    +    (0x2F898, 'M', u'𦇚'),
    +    (0x2F899, 'M', u'形'),
    +    (0x2F89A, 'M', u'彫'),
    +    (0x2F89B, 'M', u'㣣'),
    +    (0x2F89C, 'M', u'徚'),
    +    (0x2F89D, 'M', u'忍'),
    +    (0x2F89E, 'M', u'志'),
    +    (0x2F89F, 'M', u'忹'),
    +    (0x2F8A0, 'M', u'悁'),
    +    (0x2F8A1, 'M', u'㤺'),
    +    (0x2F8A2, 'M', u'㤜'),
    +    (0x2F8A3, 'M', u'悔'),
    +    (0x2F8A4, 'M', u'𢛔'),
    +    (0x2F8A5, 'M', u'惇'),
    +    (0x2F8A6, 'M', u'慈'),
    +    (0x2F8A7, 'M', u'慌'),
    +    (0x2F8A8, 'M', u'慎'),
    +    (0x2F8A9, 'M', u'慌'),
    +    (0x2F8AA, 'M', u'慺'),
    +    (0x2F8AB, 'M', u'憎'),
    +    (0x2F8AC, 'M', u'憲'),
    +    (0x2F8AD, 'M', u'憤'),
    +    (0x2F8AE, 'M', u'憯'),
    +    (0x2F8AF, 'M', u'懞'),
    +    (0x2F8B0, 'M', u'懲'),
    +    (0x2F8B1, 'M', u'懶'),
    +    (0x2F8B2, 'M', u'成'),
    +    (0x2F8B3, 'M', u'戛'),
    +    (0x2F8B4, 'M', u'扝'),
    +    (0x2F8B5, 'M', u'抱'),
    +    (0x2F8B6, 'M', u'拔'),
    +    (0x2F8B7, 'M', u'捐'),
    +    (0x2F8B8, 'M', u'𢬌'),
    +    (0x2F8B9, 'M', u'挽'),
    +    (0x2F8BA, 'M', u'拼'),
    +    (0x2F8BB, 'M', u'捨'),
    +    (0x2F8BC, 'M', u'掃'),
    +    (0x2F8BD, 'M', u'揤'),
    +    (0x2F8BE, 'M', u'𢯱'),
    +    (0x2F8BF, 'M', u'搢'),
    +    (0x2F8C0, 'M', u'揅'),
    +    (0x2F8C1, 'M', u'掩'),
    +    (0x2F8C2, 'M', u'㨮'),
    +    (0x2F8C3, 'M', u'摩'),
    +    (0x2F8C4, 'M', u'摾'),
    +    (0x2F8C5, 'M', u'撝'),
    +    (0x2F8C6, 'M', u'摷'),
    +    (0x2F8C7, 'M', u'㩬'),
    +    (0x2F8C8, 'M', u'敏'),
    +    (0x2F8C9, 'M', u'敬'),
    +    (0x2F8CA, 'M', u'𣀊'),
    +    (0x2F8CB, 'M', u'旣'),
    +    (0x2F8CC, 'M', u'書'),
    +    (0x2F8CD, 'M', u'晉'),
    +    (0x2F8CE, 'M', u'㬙'),
    +    (0x2F8CF, 'M', u'暑'),
    +    (0x2F8D0, 'M', u'㬈'),
    +    (0x2F8D1, 'M', u'㫤'),
    +    (0x2F8D2, 'M', u'冒'),
    +    (0x2F8D3, 'M', u'冕'),
    +    (0x2F8D4, 'M', u'最'),
    +    (0x2F8D5, 'M', u'暜'),
    +    (0x2F8D6, 'M', u'肭'),
    +    (0x2F8D7, 'M', u'䏙'),
    +    (0x2F8D8, 'M', u'朗'),
    +    (0x2F8D9, 'M', u'望'),
    +    (0x2F8DA, 'M', u'朡'),
    +    (0x2F8DB, 'M', u'杞'),
    +    (0x2F8DC, 'M', u'杓'),
    +    (0x2F8DD, 'M', u'𣏃'),
    +    (0x2F8DE, 'M', u'㭉'),
    +    (0x2F8DF, 'M', u'柺'),
    +    (0x2F8E0, 'M', u'枅'),
    +    (0x2F8E1, 'M', u'桒'),
    +    (0x2F8E2, 'M', u'梅'),
    +    (0x2F8E3, 'M', u'𣑭'),
    +    (0x2F8E4, 'M', u'梎'),
    +    (0x2F8E5, 'M', u'栟'),
    +    (0x2F8E6, 'M', u'椔'),
    +    (0x2F8E7, 'M', u'㮝'),
    +    (0x2F8E8, 'M', u'楂'),
    +    ]
    +
    +def _seg_76():
    +    return [
    +    (0x2F8E9, 'M', u'榣'),
    +    (0x2F8EA, 'M', u'槪'),
    +    (0x2F8EB, 'M', u'檨'),
    +    (0x2F8EC, 'M', u'𣚣'),
    +    (0x2F8ED, 'M', u'櫛'),
    +    (0x2F8EE, 'M', u'㰘'),
    +    (0x2F8EF, 'M', u'次'),
    +    (0x2F8F0, 'M', u'𣢧'),
    +    (0x2F8F1, 'M', u'歔'),
    +    (0x2F8F2, 'M', u'㱎'),
    +    (0x2F8F3, 'M', u'歲'),
    +    (0x2F8F4, 'M', u'殟'),
    +    (0x2F8F5, 'M', u'殺'),
    +    (0x2F8F6, 'M', u'殻'),
    +    (0x2F8F7, 'M', u'𣪍'),
    +    (0x2F8F8, 'M', u'𡴋'),
    +    (0x2F8F9, 'M', u'𣫺'),
    +    (0x2F8FA, 'M', u'汎'),
    +    (0x2F8FB, 'M', u'𣲼'),
    +    (0x2F8FC, 'M', u'沿'),
    +    (0x2F8FD, 'M', u'泍'),
    +    (0x2F8FE, 'M', u'汧'),
    +    (0x2F8FF, 'M', u'洖'),
    +    (0x2F900, 'M', u'派'),
    +    (0x2F901, 'M', u'海'),
    +    (0x2F902, 'M', u'流'),
    +    (0x2F903, 'M', u'浩'),
    +    (0x2F904, 'M', u'浸'),
    +    (0x2F905, 'M', u'涅'),
    +    (0x2F906, 'M', u'𣴞'),
    +    (0x2F907, 'M', u'洴'),
    +    (0x2F908, 'M', u'港'),
    +    (0x2F909, 'M', u'湮'),
    +    (0x2F90A, 'M', u'㴳'),
    +    (0x2F90B, 'M', u'滋'),
    +    (0x2F90C, 'M', u'滇'),
    +    (0x2F90D, 'M', u'𣻑'),
    +    (0x2F90E, 'M', u'淹'),
    +    (0x2F90F, 'M', u'潮'),
    +    (0x2F910, 'M', u'𣽞'),
    +    (0x2F911, 'M', u'𣾎'),
    +    (0x2F912, 'M', u'濆'),
    +    (0x2F913, 'M', u'瀹'),
    +    (0x2F914, 'M', u'瀞'),
    +    (0x2F915, 'M', u'瀛'),
    +    (0x2F916, 'M', u'㶖'),
    +    (0x2F917, 'M', u'灊'),
    +    (0x2F918, 'M', u'災'),
    +    (0x2F919, 'M', u'灷'),
    +    (0x2F91A, 'M', u'炭'),
    +    (0x2F91B, 'M', u'𠔥'),
    +    (0x2F91C, 'M', u'煅'),
    +    (0x2F91D, 'M', u'𤉣'),
    +    (0x2F91E, 'M', u'熜'),
    +    (0x2F91F, 'X'),
    +    (0x2F920, 'M', u'爨'),
    +    (0x2F921, 'M', u'爵'),
    +    (0x2F922, 'M', u'牐'),
    +    (0x2F923, 'M', u'𤘈'),
    +    (0x2F924, 'M', u'犀'),
    +    (0x2F925, 'M', u'犕'),
    +    (0x2F926, 'M', u'𤜵'),
    +    (0x2F927, 'M', u'𤠔'),
    +    (0x2F928, 'M', u'獺'),
    +    (0x2F929, 'M', u'王'),
    +    (0x2F92A, 'M', u'㺬'),
    +    (0x2F92B, 'M', u'玥'),
    +    (0x2F92C, 'M', u'㺸'),
    +    (0x2F92E, 'M', u'瑇'),
    +    (0x2F92F, 'M', u'瑜'),
    +    (0x2F930, 'M', u'瑱'),
    +    (0x2F931, 'M', u'璅'),
    +    (0x2F932, 'M', u'瓊'),
    +    (0x2F933, 'M', u'㼛'),
    +    (0x2F934, 'M', u'甤'),
    +    (0x2F935, 'M', u'𤰶'),
    +    (0x2F936, 'M', u'甾'),
    +    (0x2F937, 'M', u'𤲒'),
    +    (0x2F938, 'M', u'異'),
    +    (0x2F939, 'M', u'𢆟'),
    +    (0x2F93A, 'M', u'瘐'),
    +    (0x2F93B, 'M', u'𤾡'),
    +    (0x2F93C, 'M', u'𤾸'),
    +    (0x2F93D, 'M', u'𥁄'),
    +    (0x2F93E, 'M', u'㿼'),
    +    (0x2F93F, 'M', u'䀈'),
    +    (0x2F940, 'M', u'直'),
    +    (0x2F941, 'M', u'𥃳'),
    +    (0x2F942, 'M', u'𥃲'),
    +    (0x2F943, 'M', u'𥄙'),
    +    (0x2F944, 'M', u'𥄳'),
    +    (0x2F945, 'M', u'眞'),
    +    (0x2F946, 'M', u'真'),
    +    (0x2F948, 'M', u'睊'),
    +    (0x2F949, 'M', u'䀹'),
    +    (0x2F94A, 'M', u'瞋'),
    +    (0x2F94B, 'M', u'䁆'),
    +    (0x2F94C, 'M', u'䂖'),
    +    (0x2F94D, 'M', u'𥐝'),
    +    (0x2F94E, 'M', u'硎'),
    +    ]
    +
    +def _seg_77():
    +    return [
    +    (0x2F94F, 'M', u'碌'),
    +    (0x2F950, 'M', u'磌'),
    +    (0x2F951, 'M', u'䃣'),
    +    (0x2F952, 'M', u'𥘦'),
    +    (0x2F953, 'M', u'祖'),
    +    (0x2F954, 'M', u'𥚚'),
    +    (0x2F955, 'M', u'𥛅'),
    +    (0x2F956, 'M', u'福'),
    +    (0x2F957, 'M', u'秫'),
    +    (0x2F958, 'M', u'䄯'),
    +    (0x2F959, 'M', u'穀'),
    +    (0x2F95A, 'M', u'穊'),
    +    (0x2F95B, 'M', u'穏'),
    +    (0x2F95C, 'M', u'𥥼'),
    +    (0x2F95D, 'M', u'𥪧'),
    +    (0x2F95F, 'X'),
    +    (0x2F960, 'M', u'䈂'),
    +    (0x2F961, 'M', u'𥮫'),
    +    (0x2F962, 'M', u'篆'),
    +    (0x2F963, 'M', u'築'),
    +    (0x2F964, 'M', u'䈧'),
    +    (0x2F965, 'M', u'𥲀'),
    +    (0x2F966, 'M', u'糒'),
    +    (0x2F967, 'M', u'䊠'),
    +    (0x2F968, 'M', u'糨'),
    +    (0x2F969, 'M', u'糣'),
    +    (0x2F96A, 'M', u'紀'),
    +    (0x2F96B, 'M', u'𥾆'),
    +    (0x2F96C, 'M', u'絣'),
    +    (0x2F96D, 'M', u'䌁'),
    +    (0x2F96E, 'M', u'緇'),
    +    (0x2F96F, 'M', u'縂'),
    +    (0x2F970, 'M', u'繅'),
    +    (0x2F971, 'M', u'䌴'),
    +    (0x2F972, 'M', u'𦈨'),
    +    (0x2F973, 'M', u'𦉇'),
    +    (0x2F974, 'M', u'䍙'),
    +    (0x2F975, 'M', u'𦋙'),
    +    (0x2F976, 'M', u'罺'),
    +    (0x2F977, 'M', u'𦌾'),
    +    (0x2F978, 'M', u'羕'),
    +    (0x2F979, 'M', u'翺'),
    +    (0x2F97A, 'M', u'者'),
    +    (0x2F97B, 'M', u'𦓚'),
    +    (0x2F97C, 'M', u'𦔣'),
    +    (0x2F97D, 'M', u'聠'),
    +    (0x2F97E, 'M', u'𦖨'),
    +    (0x2F97F, 'M', u'聰'),
    +    (0x2F980, 'M', u'𣍟'),
    +    (0x2F981, 'M', u'䏕'),
    +    (0x2F982, 'M', u'育'),
    +    (0x2F983, 'M', u'脃'),
    +    (0x2F984, 'M', u'䐋'),
    +    (0x2F985, 'M', u'脾'),
    +    (0x2F986, 'M', u'媵'),
    +    (0x2F987, 'M', u'𦞧'),
    +    (0x2F988, 'M', u'𦞵'),
    +    (0x2F989, 'M', u'𣎓'),
    +    (0x2F98A, 'M', u'𣎜'),
    +    (0x2F98B, 'M', u'舁'),
    +    (0x2F98C, 'M', u'舄'),
    +    (0x2F98D, 'M', u'辞'),
    +    (0x2F98E, 'M', u'䑫'),
    +    (0x2F98F, 'M', u'芑'),
    +    (0x2F990, 'M', u'芋'),
    +    (0x2F991, 'M', u'芝'),
    +    (0x2F992, 'M', u'劳'),
    +    (0x2F993, 'M', u'花'),
    +    (0x2F994, 'M', u'芳'),
    +    (0x2F995, 'M', u'芽'),
    +    (0x2F996, 'M', u'苦'),
    +    (0x2F997, 'M', u'𦬼'),
    +    (0x2F998, 'M', u'若'),
    +    (0x2F999, 'M', u'茝'),
    +    (0x2F99A, 'M', u'荣'),
    +    (0x2F99B, 'M', u'莭'),
    +    (0x2F99C, 'M', u'茣'),
    +    (0x2F99D, 'M', u'莽'),
    +    (0x2F99E, 'M', u'菧'),
    +    (0x2F99F, 'M', u'著'),
    +    (0x2F9A0, 'M', u'荓'),
    +    (0x2F9A1, 'M', u'菊'),
    +    (0x2F9A2, 'M', u'菌'),
    +    (0x2F9A3, 'M', u'菜'),
    +    (0x2F9A4, 'M', u'𦰶'),
    +    (0x2F9A5, 'M', u'𦵫'),
    +    (0x2F9A6, 'M', u'𦳕'),
    +    (0x2F9A7, 'M', u'䔫'),
    +    (0x2F9A8, 'M', u'蓱'),
    +    (0x2F9A9, 'M', u'蓳'),
    +    (0x2F9AA, 'M', u'蔖'),
    +    (0x2F9AB, 'M', u'𧏊'),
    +    (0x2F9AC, 'M', u'蕤'),
    +    (0x2F9AD, 'M', u'𦼬'),
    +    (0x2F9AE, 'M', u'䕝'),
    +    (0x2F9AF, 'M', u'䕡'),
    +    (0x2F9B0, 'M', u'𦾱'),
    +    (0x2F9B1, 'M', u'𧃒'),
    +    (0x2F9B2, 'M', u'䕫'),
    +    (0x2F9B3, 'M', u'虐'),
    +    ]
    +
    +def _seg_78():
    +    return [
    +    (0x2F9B4, 'M', u'虜'),
    +    (0x2F9B5, 'M', u'虧'),
    +    (0x2F9B6, 'M', u'虩'),
    +    (0x2F9B7, 'M', u'蚩'),
    +    (0x2F9B8, 'M', u'蚈'),
    +    (0x2F9B9, 'M', u'蜎'),
    +    (0x2F9BA, 'M', u'蛢'),
    +    (0x2F9BB, 'M', u'蝹'),
    +    (0x2F9BC, 'M', u'蜨'),
    +    (0x2F9BD, 'M', u'蝫'),
    +    (0x2F9BE, 'M', u'螆'),
    +    (0x2F9BF, 'X'),
    +    (0x2F9C0, 'M', u'蟡'),
    +    (0x2F9C1, 'M', u'蠁'),
    +    (0x2F9C2, 'M', u'䗹'),
    +    (0x2F9C3, 'M', u'衠'),
    +    (0x2F9C4, 'M', u'衣'),
    +    (0x2F9C5, 'M', u'𧙧'),
    +    (0x2F9C6, 'M', u'裗'),
    +    (0x2F9C7, 'M', u'裞'),
    +    (0x2F9C8, 'M', u'䘵'),
    +    (0x2F9C9, 'M', u'裺'),
    +    (0x2F9CA, 'M', u'㒻'),
    +    (0x2F9CB, 'M', u'𧢮'),
    +    (0x2F9CC, 'M', u'𧥦'),
    +    (0x2F9CD, 'M', u'䚾'),
    +    (0x2F9CE, 'M', u'䛇'),
    +    (0x2F9CF, 'M', u'誠'),
    +    (0x2F9D0, 'M', u'諭'),
    +    (0x2F9D1, 'M', u'變'),
    +    (0x2F9D2, 'M', u'豕'),
    +    (0x2F9D3, 'M', u'𧲨'),
    +    (0x2F9D4, 'M', u'貫'),
    +    (0x2F9D5, 'M', u'賁'),
    +    (0x2F9D6, 'M', u'贛'),
    +    (0x2F9D7, 'M', u'起'),
    +    (0x2F9D8, 'M', u'𧼯'),
    +    (0x2F9D9, 'M', u'𠠄'),
    +    (0x2F9DA, 'M', u'跋'),
    +    (0x2F9DB, 'M', u'趼'),
    +    (0x2F9DC, 'M', u'跰'),
    +    (0x2F9DD, 'M', u'𠣞'),
    +    (0x2F9DE, 'M', u'軔'),
    +    (0x2F9DF, 'M', u'輸'),
    +    (0x2F9E0, 'M', u'𨗒'),
    +    (0x2F9E1, 'M', u'𨗭'),
    +    (0x2F9E2, 'M', u'邔'),
    +    (0x2F9E3, 'M', u'郱'),
    +    (0x2F9E4, 'M', u'鄑'),
    +    (0x2F9E5, 'M', u'𨜮'),
    +    (0x2F9E6, 'M', u'鄛'),
    +    (0x2F9E7, 'M', u'鈸'),
    +    (0x2F9E8, 'M', u'鋗'),
    +    (0x2F9E9, 'M', u'鋘'),
    +    (0x2F9EA, 'M', u'鉼'),
    +    (0x2F9EB, 'M', u'鏹'),
    +    (0x2F9EC, 'M', u'鐕'),
    +    (0x2F9ED, 'M', u'𨯺'),
    +    (0x2F9EE, 'M', u'開'),
    +    (0x2F9EF, 'M', u'䦕'),
    +    (0x2F9F0, 'M', u'閷'),
    +    (0x2F9F1, 'M', u'𨵷'),
    +    (0x2F9F2, 'M', u'䧦'),
    +    (0x2F9F3, 'M', u'雃'),
    +    (0x2F9F4, 'M', u'嶲'),
    +    (0x2F9F5, 'M', u'霣'),
    +    (0x2F9F6, 'M', u'𩅅'),
    +    (0x2F9F7, 'M', u'𩈚'),
    +    (0x2F9F8, 'M', u'䩮'),
    +    (0x2F9F9, 'M', u'䩶'),
    +    (0x2F9FA, 'M', u'韠'),
    +    (0x2F9FB, 'M', u'𩐊'),
    +    (0x2F9FC, 'M', u'䪲'),
    +    (0x2F9FD, 'M', u'𩒖'),
    +    (0x2F9FE, 'M', u'頋'),
    +    (0x2FA00, 'M', u'頩'),
    +    (0x2FA01, 'M', u'𩖶'),
    +    (0x2FA02, 'M', u'飢'),
    +    (0x2FA03, 'M', u'䬳'),
    +    (0x2FA04, 'M', u'餩'),
    +    (0x2FA05, 'M', u'馧'),
    +    (0x2FA06, 'M', u'駂'),
    +    (0x2FA07, 'M', u'駾'),
    +    (0x2FA08, 'M', u'䯎'),
    +    (0x2FA09, 'M', u'𩬰'),
    +    (0x2FA0A, 'M', u'鬒'),
    +    (0x2FA0B, 'M', u'鱀'),
    +    (0x2FA0C, 'M', u'鳽'),
    +    (0x2FA0D, 'M', u'䳎'),
    +    (0x2FA0E, 'M', u'䳭'),
    +    (0x2FA0F, 'M', u'鵧'),
    +    (0x2FA10, 'M', u'𪃎'),
    +    (0x2FA11, 'M', u'䳸'),
    +    (0x2FA12, 'M', u'𪄅'),
    +    (0x2FA13, 'M', u'𪈎'),
    +    (0x2FA14, 'M', u'𪊑'),
    +    (0x2FA15, 'M', u'麻'),
    +    (0x2FA16, 'M', u'䵖'),
    +    (0x2FA17, 'M', u'黹'),
    +    (0x2FA18, 'M', u'黾'),
    +    ]
    +
    +def _seg_79():
    +    return [
    +    (0x2FA19, 'M', u'鼅'),
    +    (0x2FA1A, 'M', u'鼏'),
    +    (0x2FA1B, 'M', u'鼖'),
    +    (0x2FA1C, 'M', u'鼻'),
    +    (0x2FA1D, 'M', u'𪘀'),
    +    (0x2FA1E, 'X'),
    +    (0xE0100, 'I'),
    +    (0xE01F0, 'X'),
    +    ]
    +
    +uts46data = tuple(
    +    _seg_0()
    +    + _seg_1()
    +    + _seg_2()
    +    + _seg_3()
    +    + _seg_4()
    +    + _seg_5()
    +    + _seg_6()
    +    + _seg_7()
    +    + _seg_8()
    +    + _seg_9()
    +    + _seg_10()
    +    + _seg_11()
    +    + _seg_12()
    +    + _seg_13()
    +    + _seg_14()
    +    + _seg_15()
    +    + _seg_16()
    +    + _seg_17()
    +    + _seg_18()
    +    + _seg_19()
    +    + _seg_20()
    +    + _seg_21()
    +    + _seg_22()
    +    + _seg_23()
    +    + _seg_24()
    +    + _seg_25()
    +    + _seg_26()
    +    + _seg_27()
    +    + _seg_28()
    +    + _seg_29()
    +    + _seg_30()
    +    + _seg_31()
    +    + _seg_32()
    +    + _seg_33()
    +    + _seg_34()
    +    + _seg_35()
    +    + _seg_36()
    +    + _seg_37()
    +    + _seg_38()
    +    + _seg_39()
    +    + _seg_40()
    +    + _seg_41()
    +    + _seg_42()
    +    + _seg_43()
    +    + _seg_44()
    +    + _seg_45()
    +    + _seg_46()
    +    + _seg_47()
    +    + _seg_48()
    +    + _seg_49()
    +    + _seg_50()
    +    + _seg_51()
    +    + _seg_52()
    +    + _seg_53()
    +    + _seg_54()
    +    + _seg_55()
    +    + _seg_56()
    +    + _seg_57()
    +    + _seg_58()
    +    + _seg_59()
    +    + _seg_60()
    +    + _seg_61()
    +    + _seg_62()
    +    + _seg_63()
    +    + _seg_64()
    +    + _seg_65()
    +    + _seg_66()
    +    + _seg_67()
    +    + _seg_68()
    +    + _seg_69()
    +    + _seg_70()
    +    + _seg_71()
    +    + _seg_72()
    +    + _seg_73()
    +    + _seg_74()
    +    + _seg_75()
    +    + _seg_76()
    +    + _seg_77()
    +    + _seg_78()
    +    + _seg_79()
    +)
    diff --git a/server/www/packages/packages-windows/x86/ldap3/__init__.py b/server/www/packages/packages-windows/x86/ldap3/__init__.py
    index f6f1666..9bb5435 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/__init__.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/__init__.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2013 - 2018 Giovanni Cannata
    +# Copyright 2013 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -89,6 +89,7 @@ SCHEMA = 'SCHEMA'
     ALL = 'ALL'
     
     OFFLINE_EDIR_8_8_8 = 'EDIR_8_8_8'
    +OFFLINE_EDIR_9_1_4 = 'EDIR_9_1_4'
     OFFLINE_AD_2012_R2 = 'AD_2012_R2'
     OFFLINE_SLAPD_2_4 = 'SLAPD_2_4'
     OFFLINE_DS389_1_3_3 = 'DS389_1_3_3'
    diff --git a/server/www/packages/packages-windows/x86/ldap3/abstract/__init__.py b/server/www/packages/packages-windows/x86/ldap3/abstract/__init__.py
    index c40f838..29aabb3 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/abstract/__init__.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/abstract/__init__.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/abstract/attrDef.py b/server/www/packages/packages-windows/x86/ldap3/abstract/attrDef.py
    index d954e25..caffb53 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/abstract/attrDef.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/abstract/attrDef.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     
     #
     # This file is part of ldap3.
    diff --git a/server/www/packages/packages-windows/x86/ldap3/abstract/attribute.py b/server/www/packages/packages-windows/x86/ldap3/abstract/attribute.py
    index 24f682c..5d33cc7 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/abstract/attribute.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/abstract/attribute.py
    @@ -1,285 +1,290 @@
    -"""
    -"""
    -
    -# Created on 2014.01.06
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2014 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -
    -from os import linesep
    -
    -from .. import MODIFY_ADD, MODIFY_REPLACE, MODIFY_DELETE, SEQUENCE_TYPES
    -from ..core.exceptions import LDAPCursorError
    -from ..utils.repr import to_stdout_encoding
    -from . import STATUS_PENDING_CHANGES, STATUS_VIRTUAL, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING
    -from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
    -
    -
    -# noinspection PyUnresolvedReferences
    -class Attribute(object):
    -    """Attribute/values object, it includes the search result (after post_query transformation) of each attribute in an entry
    -
    -    Attribute object is read only
    -
    -    - values: contain the processed attribute values
    -    - raw_values': contain the unprocessed attribute values
    -
    -
    -    """
    -
    -    def __init__(self, attr_def, entry, cursor):
    -        self.key = attr_def.key
    -        self.definition = attr_def
    -        self.values = []
    -        self.raw_values = []
    -        self.response = None
    -        self.entry = entry
    -        self.cursor = cursor
    -        other_names = [name for name in attr_def.oid_info.name if self.key.lower() != name.lower()] if attr_def.oid_info else None
    -        self.other_names = set(other_names) if other_names else None  # self.other_names is None if there are no short names, else is a set of secondary names
    -
    -    def __repr__(self):
    -        if len(self.values) == 1:
    -            r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
    -        elif len(self.values) > 1:
    -            r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
    -            filler = ' ' * (len(self.key) + 6)
    -            for value in self.values[1:]:
    -                r += linesep + filler + to_stdout_encoding(value)
    -        else:
    -            r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding('')
    -
    -        return r
    -
    -    def __str__(self):
    -        if len(self.values) == 1:
    -            return to_stdout_encoding(self.values[0])
    -        else:
    -            return to_stdout_encoding(self.values)
    -
    -    def __len__(self):
    -        return len(self.values)
    -
    -    def __iter__(self):
    -        return self.values.__iter__()
    -
    -    def __getitem__(self, item):
    -        return self.values[item]
    -
    -    def __eq__(self, other):
    -        try:
    -            if self.value == other:
    -                return True
    -        except Exception:
    -            return False
    -
    -    def __ne__(self, other):
    -        return not self == other
    -
    -    @property
    -    def value(self):
    -        """
    -        :return: The single value or a list of values of the attribute.
    -        """
    -        if not self.values:
    -            return None
    -
    -        return self.values[0] if len(self.values) == 1 else self.values
    -
    -
    -class OperationalAttribute(Attribute):
    -    """Operational attribute/values object. Include the search result of an
    -    operational attribute in an entry
    -
    -    OperationalAttribute object is read only
    -
    -    - values: contains the processed attribute values
    -    - raw_values: contains the unprocessed attribute values
    -
    -    It may not have an AttrDef
    -
    -    """
    -
    -    def __repr__(self):
    -        if len(self.values) == 1:
    -            r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0])
    -        elif len(self.values) > 1:
    -            r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0])
    -            filler = ' ' * (len(self.key) + 6)
    -            for value in sorted(self.values[1:]):
    -                r += linesep + filler + to_stdout_encoding(value)
    -        else:
    -            r = ''
    -
    -        return r
    -
    -
    -class WritableAttribute(Attribute):
    -    def __repr__(self):
    -        filler = ' ' * (len(self.key) + 6)
    -        if len(self.values) == 1:
    -            r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
    -        elif len(self.values) > 1:
    -            r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
    -            for value in self.values[1:]:
    -                r += linesep + filler + to_stdout_encoding(value)
    -        else:
    -            r = to_stdout_encoding(self.key) + to_stdout_encoding(': ')
    -        if self.definition.name in self.entry._changes:
    -            r += linesep + filler + 'CHANGES: ' + str(self.entry._changes[self.definition.name])
    -        return r
    -
    -    def __iadd__(self, other):
    -        self.add(other)
    -        return Ellipsis  # hack to avoid calling set() in entry __setattr__
    -
    -    def __isub__(self, other):
    -        self.delete(other)
    -        return Ellipsis  # hack to avoid calling set_value in entry __setattr__
    -
    -    def _update_changes(self, changes, remove_old=False):
    -        # checks for friendly key in AttrDef and uses the real attribute name
    -        if self.definition and self.definition.name:
    -            key = self.definition.name
    -        else:
    -            key = self.key
    -
    -        if key not in self.entry._changes or remove_old:  # remove old changes (for removing attribute)
    -            self.entry._changes[key] = []
    -
    -        self.entry._changes[key].append(changes)
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'updated changes <%r> for <%s> attribute in <%s> entry', changes, self.key, self.entry.entry_dn)
    -        self.entry._state.set_status(STATUS_PENDING_CHANGES)
    -
    -    def add(self, values):
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'adding %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn)
    -        # new value for attribute to commit with a MODIFY_ADD
    -        if self.entry._state._initial_status == STATUS_VIRTUAL:
    -            error_message = 'cannot add an attribute value in a new entry'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
    -            error_message = self.entry.entry_status + ' - cannot add attributes'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        if values is None:
    -            error_message = 'value to add cannot be None'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        if values is not None:
    -            validated = self.definition.validate(values)  # returns True, False or a value to substitute to the actual values
    -            if validated is False:
    -                error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key)
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', error_message, self)
    -                raise LDAPCursorError(error_message)
    -            elif validated is not True:  # a valid LDAP value equivalent to the actual values
    -                values = validated
    -        self._update_changes((MODIFY_ADD, values if isinstance(values, SEQUENCE_TYPES) else [values]))
    -
    -    def set(self, values):
    -        # new value for attribute to commit with a MODIFY_REPLACE, old values are deleted
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'setting %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn)
    -        if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
    -            error_message = self.entry.entry_status + ' - cannot set attributes'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        if values is None:
    -            error_message = 'new value cannot be None'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        validated = self.definition.validate(values)  # returns True, False or a value to substitute to the actual values
    -        if validated is False:
    -            error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key)
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        elif validated is not True:  # a valid LDAP value equivalent to the actual values
    -            values = validated
    -        self._update_changes((MODIFY_REPLACE, values if isinstance(values, SEQUENCE_TYPES) else [values]), remove_old=True)
    -
    -    def delete(self, values):
    -        # value for attribute to delete in commit with a MODIFY_DELETE
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'deleting %r from <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn)
    -        if self.entry._state._initial_status == STATUS_VIRTUAL:
    -            error_message = 'cannot delete an attribute value in a new entry'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
    -            error_message = self.entry.entry_status + ' - cannot delete attributes'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        if values is None:
    -            error_message = 'value to delete cannot be None'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        if not isinstance(values, SEQUENCE_TYPES):
    -            values = [values]
    -        for single_value in values:
    -            if single_value not in self.values:
    -                error_message = 'value \'%s\' not present in \'%s\'' % (single_value, ', '.join(self.values))
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', error_message, self)
    -                raise LDAPCursorError(error_message)
    -        self._update_changes((MODIFY_DELETE, values))
    -
    -    def remove(self):
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'removing <%s> attribute in <%s> entry', self.key, self.entry.entry_dn)
    -        if self.entry._state._initial_status == STATUS_VIRTUAL:
    -            error_message = 'cannot remove an attribute in a new entry'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
    -            error_message = self.entry.entry_status + ' - cannot remove attributes'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        self._update_changes((MODIFY_REPLACE, []), True)
    -
    -    def discard(self):
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'discarding <%s> attribute in <%s> entry', self.key, self.entry.entry_dn)
    -        del self.entry._changes[self.key]
    -        if not self.entry._changes:
    -            self.entry._state.set_status(self.entry._state._initial_status)
    -
    -    @property
    -    def virtual(self):
    -        return False if len(self.values) else True
    -
    -    @property
    -    def changes(self):
    -        if self.key in self.entry._changes:
    -            return self.entry._changes[self.key]
    -        return None
    +"""
    +"""
    +
    +# Created on 2014.01.06
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2014 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +from os import linesep
    +
    +from .. import MODIFY_ADD, MODIFY_REPLACE, MODIFY_DELETE, SEQUENCE_TYPES
    +from ..core.exceptions import LDAPCursorError
    +from ..utils.repr import to_stdout_encoding
    +from . import STATUS_PENDING_CHANGES, STATUS_VIRTUAL, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING
    +from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
    +
    +
    +# noinspection PyUnresolvedReferences
    +class Attribute(object):
    +    """Attribute/values object, it includes the search result (after post_query transformation) of each attribute in an entry
    +
    +    Attribute object is read only
    +
    +    - values: contain the processed attribute values
    +    - raw_values': contain the unprocessed attribute values
    +
    +
    +    """
    +
    +    def __init__(self, attr_def, entry, cursor):
    +        self.key = attr_def.key
    +        self.definition = attr_def
    +        self.values = []
    +        self.raw_values = []
    +        self.response = None
    +        self.entry = entry
    +        self.cursor = cursor
    +        other_names = [name for name in attr_def.oid_info.name if self.key.lower() != name.lower()] if attr_def.oid_info else None
    +        self.other_names = set(other_names) if other_names else None  # self.other_names is None if there are no short names, else is a set of secondary names
    +
    +    def __repr__(self):
    +        if len(self.values) == 1:
    +            r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
    +        elif len(self.values) > 1:
    +            r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
    +            filler = ' ' * (len(self.key) + 6)
    +            for value in self.values[1:]:
    +                r += linesep + filler + to_stdout_encoding(value)
    +        else:
    +            r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding('')
    +
    +        return r
    +
    +    def __str__(self):
    +        if len(self.values) == 1:
    +            return to_stdout_encoding(self.values[0])
    +        else:
    +            return to_stdout_encoding(self.values)
    +
    +    def __len__(self):
    +        return len(self.values)
    +
    +    def __iter__(self):
    +        return self.values.__iter__()
    +
    +    def __getitem__(self, item):
    +        return self.values[item]
    +
    +    def __getstate__(self):
    +        cpy = dict(self.__dict__)
    +        cpy['cursor'] = None
    +        return cpy
    +
    +    def __eq__(self, other):
    +        try:
    +            if self.value == other:
    +                return True
    +        except Exception:
    +            return False
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    @property
    +    def value(self):
    +        """
    +        :return: The single value or a list of values of the attribute.
    +        """
    +        if not self.values:
    +            return None
    +
    +        return self.values[0] if len(self.values) == 1 else self.values
    +
    +
    +class OperationalAttribute(Attribute):
    +    """Operational attribute/values object. Include the search result of an
    +    operational attribute in an entry
    +
    +    OperationalAttribute object is read only
    +
    +    - values: contains the processed attribute values
    +    - raw_values: contains the unprocessed attribute values
    +
    +    It may not have an AttrDef
    +
    +    """
    +
    +    def __repr__(self):
    +        if len(self.values) == 1:
    +            r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0])
    +        elif len(self.values) > 1:
    +            r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0])
    +            filler = ' ' * (len(self.key) + 6)
    +            for value in sorted(self.values[1:]):
    +                r += linesep + filler + to_stdout_encoding(value)
    +        else:
    +            r = ''
    +
    +        return r
    +
    +
    +class WritableAttribute(Attribute):
    +    def __repr__(self):
    +        filler = ' ' * (len(self.key) + 6)
    +        if len(self.values) == 1:
    +            r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
    +        elif len(self.values) > 1:
    +            r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
    +            for value in self.values[1:]:
    +                r += linesep + filler + to_stdout_encoding(value)
    +        else:
    +            r = to_stdout_encoding(self.key) + to_stdout_encoding(': ')
    +        if self.definition.name in self.entry._changes:
    +            r += linesep + filler + 'CHANGES: ' + str(self.entry._changes[self.definition.name])
    +        return r
    +
    +    def __iadd__(self, other):
    +        self.add(other)
    +        return Ellipsis  # hack to avoid calling set() in entry __setattr__
    +
    +    def __isub__(self, other):
    +        self.delete(other)
    +        return Ellipsis  # hack to avoid calling set_value in entry __setattr__
    +
    +    def _update_changes(self, changes, remove_old=False):
    +        # checks for friendly key in AttrDef and uses the real attribute name
    +        if self.definition and self.definition.name:
    +            key = self.definition.name
    +        else:
    +            key = self.key
    +
    +        if key not in self.entry._changes or remove_old:  # remove old changes (for removing attribute)
    +            self.entry._changes[key] = []
    +
    +        self.entry._changes[key].append(changes)
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'updated changes <%r> for <%s> attribute in <%s> entry', changes, self.key, self.entry.entry_dn)
    +        self.entry._state.set_status(STATUS_PENDING_CHANGES)
    +
    +    def add(self, values):
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'adding %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn)
    +        # new value for attribute to commit with a MODIFY_ADD
    +        if self.entry._state._initial_status == STATUS_VIRTUAL:
    +            error_message = 'cannot perform a modify operation in a new entry'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
    +            error_message = self.entry.entry_status + ' - cannot add attributes'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        if values is None:
    +            error_message = 'value to add cannot be None'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        if values is not None:
    +            validated = self.definition.validate(values)  # returns True, False or a value to substitute to the actual values
    +            if validated is False:
    +                error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key)
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', error_message, self)
    +                raise LDAPCursorError(error_message)
    +            elif validated is not True:  # a valid LDAP value equivalent to the actual values
    +                values = validated
    +        self._update_changes((MODIFY_ADD, values if isinstance(values, SEQUENCE_TYPES) else [values]))
    +
    +    def set(self, values):
    +        # new value for attribute to commit with a MODIFY_REPLACE, old values are deleted
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'setting %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn)
    +        if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
    +            error_message = self.entry.entry_status + ' - cannot set attributes'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        if values is None:
    +            error_message = 'new value cannot be None'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        validated = self.definition.validate(values)  # returns True, False or a value to substitute to the actual values
    +        if validated is False:
    +            error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key)
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        elif validated is not True:  # a valid LDAP value equivalent to the actual values
    +            values = validated
    +        self._update_changes((MODIFY_REPLACE, values if isinstance(values, SEQUENCE_TYPES) else [values]), remove_old=True)
    +
    +    def delete(self, values):
    +        # value for attribute to delete in commit with a MODIFY_DELETE
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'deleting %r from <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn)
    +        if self.entry._state._initial_status == STATUS_VIRTUAL:
    +            error_message = 'cannot delete an attribute value in a new entry'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
    +            error_message = self.entry.entry_status + ' - cannot delete attributes'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        if values is None:
    +            error_message = 'value to delete cannot be None'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        if not isinstance(values, SEQUENCE_TYPES):
    +            values = [values]
    +        for single_value in values:
    +            if single_value not in self.values:
    +                error_message = 'value \'%s\' not present in \'%s\'' % (single_value, ', '.join(self.values))
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', error_message, self)
    +                raise LDAPCursorError(error_message)
    +        self._update_changes((MODIFY_DELETE, values))
    +
    +    def remove(self):
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'removing <%s> attribute in <%s> entry', self.key, self.entry.entry_dn)
    +        if self.entry._state._initial_status == STATUS_VIRTUAL:
    +            error_message = 'cannot remove an attribute in a new entry'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
    +            error_message = self.entry.entry_status + ' - cannot remove attributes'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        self._update_changes((MODIFY_REPLACE, []), True)
    +
    +    def discard(self):
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'discarding <%s> attribute in <%s> entry', self.key, self.entry.entry_dn)
    +        del self.entry._changes[self.key]
    +        if not self.entry._changes:
    +            self.entry._state.set_status(self.entry._state._initial_status)
    +
    +    @property
    +    def virtual(self):
    +        return False if len(self.values) else True
    +
    +    @property
    +    def changes(self):
    +        if self.key in self.entry._changes:
    +            return self.entry._changes[self.key]
    +        return None
    diff --git a/server/www/packages/packages-windows/x86/ldap3/abstract/cursor.py b/server/www/packages/packages-windows/x86/ldap3/abstract/cursor.py
    index 275a384..9259a2c 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/abstract/cursor.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/abstract/cursor.py
    @@ -1,904 +1,906 @@
    -"""
    -"""
    -
    -# Created on 2014.01.06
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2014 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -from collections import namedtuple
    -from copy import deepcopy
    -from datetime import datetime
    -from os import linesep
    -from time import sleep
    -
    -from . import STATUS_VIRTUAL, STATUS_READ, STATUS_WRITABLE
    -from .. import SUBTREE, LEVEL, DEREF_ALWAYS, DEREF_NEVER, BASE, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter
    -from ..abstract import STATUS_PENDING_CHANGES
    -from .attribute import Attribute, OperationalAttribute, WritableAttribute
    -from .attrDef import AttrDef
    -from .objectDef import ObjectDef
    -from .entry import Entry, WritableEntry
    -from ..core.exceptions import LDAPCursorError, LDAPObjectDereferenceError
    -from ..core.results import RESULT_SUCCESS
    -from ..utils.ciDict import CaseInsensitiveWithAliasDict
    -from ..utils.dn import safe_dn, safe_rdn
    -from ..utils.conv import to_raw
    -from ..utils.config import get_config_parameter
    -from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
    -from ..protocol.oid import ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION, CLASS_AUXILIARY
    -
    -Operation = namedtuple('Operation', ('request', 'result', 'response'))
    -
    -
    -def _ret_search_value(value):
    -    return value[0] + '=' + value[1:] if value[0] in '<>~' and value[1] != '=' else value
    -
    -
    -def _create_query_dict(query_text):
    -    """
    -    Create a dictionary with query key:value definitions
    -    query_text is a comma delimited key:value sequence
    -    """
    -    query_dict = dict()
    -    if query_text:
    -        for arg_value_str in query_text.split(','):
    -            if ':' in arg_value_str:
    -                arg_value_list = arg_value_str.split(':')
    -                query_dict[arg_value_list[0].strip()] = arg_value_list[1].strip()
    -
    -    return query_dict
    -
    -
    -class Cursor(object):
    -    # entry_class and attribute_class define the type of entry and attribute used by the cursor
    -    # entry_initial_status defines the initial status of a entry
    -    # entry_class = Entry, must be defined in subclasses
    -    # attribute_class = Attribute, must be defined in subclasses
    -    # entry_initial_status = STATUS, must be defined in subclasses
    -
    -    def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None):
    -        conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
    -        self.connection = connection
    -        self.get_operational_attributes = get_operational_attributes
    -        if connection._deferred_bind or connection._deferred_open:  # probably a lazy connection, tries to bind
    -            connection._fire_deferred()
    -
    -        if isinstance(object_def, (STRING_TYPES, SEQUENCE_TYPES)):
    -            object_def = ObjectDef(object_def, connection.server.schema, auxiliary_class=auxiliary_class)
    -        self.definition = object_def
    -        if attributes:  # checks if requested attributes are defined in ObjectDef
    -            not_defined_attributes = []
    -            if isinstance(attributes, STRING_TYPES):
    -                attributes = [attributes]
    -
    -            for attribute in attributes:
    -                if attribute not in self.definition._attributes and attribute.lower() not in conf_attributes_excluded_from_object_def:
    -                    not_defined_attributes.append(attribute)
    -
    -            if not_defined_attributes:
    -                error_message = 'Attributes \'%s\' non in definition' % ', '.join(not_defined_attributes)
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', error_message, self)
    -                raise LDAPCursorError(error_message)
    -
    -        self.attributes = set(attributes) if attributes else set([attr.name for attr in self.definition])
    -        self.controls = controls
    -        self.execution_time = None
    -        self.entries = []
    -        self.schema = self.connection.server.schema
    -        self._do_not_reset = False  # used for refreshing entry in entry_refresh() without removing all entries from the Cursor
    -        self._operation_history = list()  # a list storing all the requests, results and responses for the last cursor operation
    -
    -    def __repr__(self):
    -        r = 'CURSOR : ' + self.__class__.__name__ + linesep
    -        r += 'CONN   : ' + str(self.connection) + linesep
    -        r += 'DEFS   : ' + ', '.join(self.definition._object_class)
    -        if self.definition._auxiliary_class:
    -            r += ' [AUX: ' + ', '.join(self.definition._auxiliary_class) + ']'
    -        r += linesep
    -        # for attr_def in sorted(self.definition):
    -        #     r += (attr_def.key if attr_def.key == attr_def.name else (attr_def.key + ' <' + attr_def.name + '>')) + ', '
    -        # if r[-2] == ',':
    -        #     r = r[:-2]
    -        # r += ']' + linesep
    -        if hasattr(self, 'attributes'):
    -            r += 'ATTRS  : ' + repr(sorted(self.attributes)) + (' [OPERATIONAL]' if self.get_operational_attributes else '') + linesep
    -        if isinstance(self, Reader):
    -            if hasattr(self, 'base'):
    -                r += 'BASE   : ' + repr(self.base) + (' [SUB]' if self.sub_tree else ' [LEVEL]') + linesep
    -            if hasattr(self, '_query') and self._query:
    -                r += 'QUERY  : ' + repr(self._query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep
    -            if hasattr(self, 'validated_query') and self.validated_query:
    -                r += 'PARSED : ' + repr(self.validated_query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep
    -            if hasattr(self, 'query_filter') and self.query_filter:
    -                r += 'FILTER : ' + repr(self.query_filter) + linesep
    -
    -        if hasattr(self, 'execution_time') and self.execution_time:
    -            r += 'ENTRIES: ' + str(len(self.entries))
    -            r += ' [executed at: ' + str(self.execution_time.isoformat()) + ']' + linesep
    -
    -        if self.failed:
    -            r += 'LAST OPERATION FAILED [' + str(len(self.errors)) + ' failure' + ('s' if len(self.errors) > 1 else '') + ' at operation' + ('s ' if len(self.errors) > 1 else ' ') + ', '.join([str(i) for i, error in enumerate(self.operations) if error.result['result'] != RESULT_SUCCESS]) + ']'
    -
    -        return r
    -
    -    def __str__(self):
    -        return self.__repr__()
    -
    -    def __iter__(self):
    -        return self.entries.__iter__()
    -
    -    def __getitem__(self, item):
    -        """Return indexed item, if index is not found then try to sequentially search in DN of entries.
    -        If only one entry is found return it else raise a KeyError exception. The exception message
    -        includes the number of entries that matches, if less than 10 entries match then show the DNs
    -        in the exception message.
    -        """
    -        try:
    -            return self.entries[item]
    -        except TypeError:
    -            pass
    -
    -        if isinstance(item, STRING_TYPES):
    -            found = self.match_dn(item)
    -
    -            if len(found) == 1:
    -                return found[0]
    -            elif len(found) > 1:
    -                error_message = 'Multiple entries found: %d entries match the text in dn' % len(found) + ('' if len(found) > 10 else (' [' + '; '.join([e.entry_dn for e in found]) + ']'))
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', error_message, self)
    -                raise KeyError(error_message)
    -
    -        error_message = 'no entry found'
    -        if log_enabled(ERROR):
    -            log(ERROR, '%s for <%s>', error_message, self)
    -        raise KeyError(error_message)
    -
    -    def __len__(self):
    -        return len(self.entries)
    -
    -    if str is not bytes:  # Python 3
    -        def __bool__(self):  # needed to make the cursor appears as existing in "if cursor:" even if there are no entries
    -            return True
    -    else:  # Python 2
    -        def __nonzero__(self):
    -            return True
    -
    -    def _get_attributes(self, response, attr_defs, entry):
    -        """Assign the result of the LDAP query to the Entry object dictionary.
    -
    -        If the optional 'post_query' callable is present in the AttrDef it is called with each value of the attribute and the callable result is stored in the attribute.
    -
    -        Returns the default value for missing attributes.
    -        If the 'dereference_dn' in AttrDef is a ObjectDef then the attribute values are treated as distinguished name and the relevant entry is retrieved and stored in the attribute value.
    -
    -        """
    -        conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX')
    -        conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
    -        attributes = CaseInsensitiveWithAliasDict()
    -        used_attribute_names = set()
    -        for attr in attr_defs:
    -            attr_def = attr_defs[attr]
    -            attribute_name = None
    -            for attr_name in response['attributes']:
    -                if attr_def.name.lower() == attr_name.lower():
    -                    attribute_name = attr_name
    -                    break
    -
    -            if attribute_name or attr_def.default is not NotImplemented:  # attribute value found in result or default value present - NotImplemented allows use of None as default
    -                attribute = self.attribute_class(attr_def, entry, self)
    -                attribute.response = response
    -                attribute.raw_values = response['raw_attributes'][attribute_name] if attribute_name else None
    -                if attr_def.post_query and attr_def.name in response['attributes'] and response['raw_attributes'] != list():
    -                    attribute.values = attr_def.post_query(attr_def.key, response['attributes'][attribute_name])
    -                else:
    -                    if attr_def.default is NotImplemented or (attribute_name and response['raw_attributes'][attribute_name] != list()):
    -                        attribute.values = response['attributes'][attribute_name]
    -                    else:
    -                        attribute.values = attr_def.default if isinstance(attr_def.default, SEQUENCE_TYPES) else [attr_def.default]
    -                if not isinstance(attribute.values, list):  # force attribute values to list (if attribute is single-valued)
    -                    attribute.values = [attribute.values]
    -                if attr_def.dereference_dn:  # try to get object referenced in value
    -                    if attribute.values:
    -                        temp_reader = Reader(self.connection, attr_def.dereference_dn, base='', get_operational_attributes=self.get_operational_attributes, controls=self.controls)
    -                        temp_values = []
    -                        for element in attribute.values:
    -                            if entry.entry_dn != element:
    -                                temp_values.append(temp_reader.search_object(element))
    -                            else:
    -                                error_message = 'object %s is referencing itself in the \'%s\' attribute' % (entry.entry_dn, attribute.definition.name)
    -                                if log_enabled(ERROR):
    -                                    log(ERROR, '%s for <%s>', error_message, self)
    -                                raise LDAPObjectDereferenceError(error_message)
    -                        del temp_reader  # remove the temporary Reader
    -                        attribute.values = temp_values
    -                attributes[attribute.key] = attribute
    -                if attribute.other_names:
    -                    attributes.set_alias(attribute.key, attribute.other_names)
    -                if attr_def.other_names:
    -                    attributes.set_alias(attribute.key, attr_def.other_names)
    -                used_attribute_names.add(attribute_name)
    -
    -        if self.attributes:
    -            used_attribute_names.update(self.attributes)
    -
    -        for attribute_name in response['attributes']:
    -            if attribute_name not in used_attribute_names:
    -                operational_attribute = False
    -                # check if the type is an operational attribute
    -                if attribute_name in self.schema.attribute_types:
    -                    if self.schema.attribute_types[attribute_name].no_user_modification or self.schema.attribute_types[attribute_name].usage in [ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION]:
    -                        operational_attribute = True
    -                else:
    -                    operational_attribute = True
    -                if not operational_attribute and attribute_name not in attr_defs and attribute_name.lower() not in conf_attributes_excluded_from_object_def:
    -                    error_message = 'attribute \'%s\' not in object class \'%s\' for entry %s' % (attribute_name, ', '.join(entry.entry_definition._object_class), entry.entry_dn)
    -                    if log_enabled(ERROR):
    -                        log(ERROR, '%s for <%s>', error_message, self)
    -                    raise LDAPCursorError(error_message)
    -                attribute = OperationalAttribute(AttrDef(conf_operational_attribute_prefix + attribute_name), entry, self)
    -                attribute.raw_values = response['raw_attributes'][attribute_name]
    -                attribute.values = response['attributes'][attribute_name] if isinstance(response['attributes'][attribute_name], SEQUENCE_TYPES) else [response['attributes'][attribute_name]]
    -                if (conf_operational_attribute_prefix + attribute_name) not in attributes:
    -                    attributes[conf_operational_attribute_prefix + attribute_name] = attribute
    -
    -        return attributes
    -
    -    def match_dn(self, dn):
    -        """Return entries with text in DN"""
    -        matched = []
    -        for entry in self.entries:
    -            if dn.lower() in entry.entry_dn.lower():
    -                matched.append(entry)
    -        return matched
    -
    -    def match(self, attributes, value):
    -        """Return entries with text in one of the specified attributes"""
    -        matched = []
    -        if not isinstance(attributes, SEQUENCE_TYPES):
    -            attributes = [attributes]
    -
    -        for entry in self.entries:
    -            found = False
    -            for attribute in attributes:
    -                if attribute in entry:
    -                    for attr_value in entry[attribute].values:
    -                        if hasattr(attr_value, 'lower') and hasattr(value, 'lower') and value.lower() in attr_value.lower():
    -                            found = True
    -                        elif value == attr_value:
    -                            found = True
    -                        if found:
    -                            matched.append(entry)
    -                            break
    -                    if found:
    -                        break
    -                    # checks raw values, tries to convert value to byte
    -                    raw_value = to_raw(value)
    -                    if isinstance(raw_value, (bytes, bytearray)):
    -                        for attr_value in entry[attribute].raw_values:
    -                            if hasattr(attr_value, 'lower') and hasattr(raw_value, 'lower') and raw_value.lower() in attr_value.lower():
    -                                found = True
    -                            elif raw_value == attr_value:
    -                                found = True
    -                            if found:
    -                                matched.append(entry)
    -                                break
    -                        if found:
    -                            break
    -        return matched
    -
    -    def _create_entry(self, response):
    -        if not response['type'] == 'searchResEntry':
    -            return None
    -
    -        entry = self.entry_class(response['dn'], self)  # define an Entry (writable or readonly), as specified in the cursor definition
    -        entry._state.attributes = self._get_attributes(response, self.definition._attributes, entry)
    -        entry._state.entry_raw_attributes = deepcopy(response['raw_attributes'])
    -
    -        entry._state.response = response
    -        entry._state.read_time = datetime.now()
    -        entry._state.set_status(self.entry_initial_status)
    -        for attr in entry:  # returns the whole attribute object
    -            entry.__dict__[attr.key] = attr
    -
    -        return entry
    -
    -    def _execute_query(self, query_scope, attributes):
    -        if not self.connection:
    -            error_message = 'no connection established'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        old_query_filter = None
    -        if query_scope == BASE:  # requesting a single object so an always-valid filter is set
    -            if hasattr(self, 'query_filter'):  # only Reader has a query filter
    -                old_query_filter = self.query_filter
    -            self.query_filter = '(objectclass=*)'
    -        else:
    -            self._create_query_filter()
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'executing query - base: %s - filter: %s - scope: %s for <%s>', self.base, self.query_filter, query_scope, self)
    -        with self.connection:
    -            result = self.connection.search(search_base=self.base,
    -                                            search_filter=self.query_filter,
    -                                            search_scope=query_scope,
    -                                            dereference_aliases=self.dereference_aliases,
    -                                            attributes=attributes if attributes else list(self.attributes),
    -                                            get_operational_attributes=self.get_operational_attributes,
    -                                            controls=self.controls)
    -            if not self.connection.strategy.sync:
    -                response, result, request = self.connection.get_response(result, get_request=True)
    -            else:
    -                response = self.connection.response
    -                result = self.connection.result
    -                request = self.connection.request
    -
    -        self._store_operation_in_history(request, result, response)
    -
    -        if self._do_not_reset:  # trick to not remove entries when using _refresh()
    -            return self._create_entry(response[0])
    -
    -        self.entries = []
    -        for r in response:
    -            entry = self._create_entry(r)
    -            if entry is not None:
    -                self.entries.append(entry)
    -                if 'objectClass' in entry:
    -                    for object_class in entry.objectClass:
    -                        if self.schema.object_classes[object_class].kind == CLASS_AUXILIARY and object_class not in self.definition._auxiliary_class:
    -                            # add auxiliary class to object definition
    -                            self.definition._auxiliary_class.append(object_class)
    -                            self.definition._populate_attr_defs(object_class)
    -        self.execution_time = datetime.now()
    -
    -        if old_query_filter:  # requesting a single object so an always-valid filter is set
    -            self.query_filter = old_query_filter
    -
    -    def remove(self, entry):
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'removing entry <%s> in <%s>', entry, self)
    -        self.entries.remove(entry)
    -
    -    def _reset_history(self):
    -        self._operation_history = list()
    -
    -    def _store_operation_in_history(self, request, result, response):
    -        self._operation_history.append(Operation(request, result, response))
    -
    -    @property
    -    def operations(self):
    -        return self._operation_history
    -
    -    @property
    -    def errors(self):
    -        return [error for error in self._operation_history if error.result['result'] != RESULT_SUCCESS]
    -
    -    @property
    -    def failed(self):
    -        if hasattr(self, '_operation_history'):
    -            return any([error.result['result'] != RESULT_SUCCESS for error in self._operation_history])
    -
    -
    -class Reader(Cursor):
    -    """Reader object to perform searches:
    -
    -    :param connection: the LDAP connection object to use
    -    :type connection: LDAPConnection
    -    :param object_def: the ObjectDef of the LDAP object returned
    -    :type object_def: ObjectDef
    -    :param query: the simplified query (will be transformed in an LDAP filter)
    -    :type query: str
    -    :param base: starting base of the search
    -    :type base: str
    -    :param components_in_and: specify if assertions in the query must all be satisfied or not (AND/OR)
    -    :type components_in_and: bool
    -    :param sub_tree: specify if the search must be performed ad Single Level (False) or Whole SubTree (True)
    -    :type sub_tree: bool
    -    :param get_operational_attributes: specify if operational attributes are returned or not
    -    :type get_operational_attributes: bool
    -    :param controls: controls to be used in search
    -    :type controls: tuple
    -
    -    """
    -    entry_class = Entry  # entries are read_only
    -    attribute_class = Attribute  # attributes are read_only
    -    entry_initial_status = STATUS_READ
    -
    -    def __init__(self, connection, object_def, base, query='', components_in_and=True, sub_tree=True, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None):
    -        Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls, auxiliary_class)
    -        self._components_in_and = components_in_and
    -        self.sub_tree = sub_tree
    -        self._query = query
    -        self.base = base
    -        self.dereference_aliases = DEREF_ALWAYS
    -        self.validated_query = None
    -        self._query_dict = dict()
    -        self._validated_query_dict = dict()
    -        self.query_filter = None
    -        self.reset()
    -
    -        if log_enabled(BASIC):
    -            log(BASIC, 'instantiated Reader Cursor: <%r>', self)
    -
    -    @property
    -    def query(self):
    -        return self._query
    -
    -    @query.setter
    -    def query(self, value):
    -        self._query = value
    -        self.reset()
    -
    -    @property
    -    def components_in_and(self):
    -        return self._components_in_and
    -
    -    @components_in_and.setter
    -    def components_in_and(self, value):
    -        self._components_in_and = value
    -        self.reset()
    -
    -    def clear(self):
    -        """Clear the Reader search parameters
    -
    -        """
    -        self.dereference_aliases = DEREF_ALWAYS
    -        self._reset_history()
    -
    -    def reset(self):
    -        """Clear all the Reader parameters
    -
    -        """
    -        self.clear()
    -        self.validated_query = None
    -        self._query_dict = dict()
    -        self._validated_query_dict = dict()
    -        self.execution_time = None
    -        self.query_filter = None
    -        self.entries = []
    -        self._create_query_filter()
    -
    -    def _validate_query(self):
    -        """Processes the text query and verifies that the requested friendly names are in the Reader dictionary
    -        If the AttrDef has a 'validate' property the callable is executed and if it returns False an Exception is raised
    -
    -        """
    -        if not self._query_dict:
    -            self._query_dict = _create_query_dict(self._query)
    -
    -        query = ''
    -        for d in sorted(self._query_dict):
    -            attr = d[1:] if d[0] in '&|' else d
    -            for attr_def in self.definition:
    -                if ''.join(attr.split()).lower() == attr_def.key.lower():
    -                    attr = attr_def.key
    -                    break
    -            if attr in self.definition:
    -                vals = sorted(self._query_dict[d].split(';'))
    -
    -                query += (d[0] + attr if d[0] in '&|' else attr) + ': '
    -                for val in vals:
    -                    val = val.strip()
    -                    val_not = True if val[0] == '!' else False
    -                    val_search_operator = '='  # default
    -                    if val_not:
    -                        if val[1:].lstrip()[0] not in '=<>~':
    -                            value = val[1:].lstrip()
    -                        else:
    -                            val_search_operator = val[1:].lstrip()[0]
    -                            value = val[1:].lstrip()[1:]
    -                    else:
    -                        if val[0] not in '=<>~':
    -                            value = val.lstrip()
    -                        else:
    -                            val_search_operator = val[0]
    -                            value = val[1:].lstrip()
    -
    -                    if self.definition[attr].validate:
    -                        validated = self.definition[attr].validate(value)  # returns True, False or a value to substitute to the actual values
    -                        if validated is False:
    -                            error_message = 'validation failed for attribute %s and value %s' % (d, val)
    -                            if log_enabled(ERROR):
    -                                log(ERROR, '%s for <%s>', error_message, self)
    -                            raise LDAPCursorError(error_message)
    -                        elif validated is not True:  # a valid LDAP value equivalent to the actual values
    -                                value = validated
    -                    if val_not:
    -                        query += '!' + val_search_operator + str(value)
    -                    else:
    -                        query += val_search_operator + str(value)
    -
    -                    query += ';'
    -                query = query[:-1] + ', '
    -            else:
    -                error_message = 'attribute \'%s\' not in definition' % attr
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', error_message, self)
    -                raise LDAPCursorError(error_message)
    -        self.validated_query = query[:-2]
    -        self._validated_query_dict = _create_query_dict(self.validated_query)
    -
    -    def _create_query_filter(self):
    -        """Converts the query dictionary to the filter text"""
    -        self.query_filter = ''
    -
    -        if self.definition._object_class:
    -            self.query_filter += '(&'
    -            if isinstance(self.definition._object_class, SEQUENCE_TYPES) and len(self.definition._object_class) == 1:
    -                self.query_filter += '(objectClass=' + self.definition._object_class[0] + ')'
    -            elif isinstance(self.definition._object_class, SEQUENCE_TYPES):
    -                self.query_filter += '(&'
    -                for object_class in self.definition._object_class:
    -                    self.query_filter += '(objectClass=' + object_class + ')'
    -                self.query_filter += ')'
    -            else:
    -                error_message = 'object class must be a string or a list'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', error_message, self)
    -                raise LDAPCursorError(error_message)
    -
    -        if self._query and self._query.startswith('(') and self._query.endswith(')'):  # query is already an LDAP filter
    -            if 'objectclass' not in self._query.lower():
    -                self.query_filter += self._query + ')'  # if objectclass not in filter adds from definition
    -            else:
    -                self.query_filter = self._query
    -            return
    -        elif self._query:  # if a simplified filter is present
    -            if not self.components_in_and:
    -                self.query_filter += '(|'
    -            elif not self.definition._object_class:
    -                self.query_filter += '(&'
    -
    -            self._validate_query()
    -
    -            attr_counter = 0
    -            for attr in sorted(self._validated_query_dict):
    -                attr_counter += 1
    -                multi = True if ';' in self._validated_query_dict[attr] else False
    -                vals = sorted(self._validated_query_dict[attr].split(';'))
    -                attr_def = self.definition[attr[1:]] if attr[0] in '&|' else self.definition[attr]
    -                if attr_def.pre_query:
    -                    modvals = []
    -                    for val in vals:
    -                        modvals.append(val[0] + attr_def.pre_query(attr_def.key, val[1:]))
    -                    vals = modvals
    -                if multi:
    -                    if attr[0] in '&|':
    -                        self.query_filter += '(' + attr[0]
    -                    else:
    -                        self.query_filter += '(|'
    -
    -                for val in vals:
    -                    if val[0] == '!':
    -                        self.query_filter += '(!(' + attr_def.name + _ret_search_value(val[1:]) + '))'
    -                    else:
    -                        self.query_filter += '(' + attr_def.name + _ret_search_value(val) + ')'
    -                if multi:
    -                    self.query_filter += ')'
    -
    -            if not self.components_in_and:
    -                self.query_filter += '))'
    -            else:
    -                self.query_filter += ')'
    -
    -            if not self.definition._object_class and attr_counter == 1:  # removes unneeded starting filter
    -                self.query_filter = self.query_filter[2: -1]
    -
    -            if self.query_filter == '(|)' or self.query_filter == '(&)':  # removes empty filter
    -                self.query_filter = ''
    -        else:  # no query, remove unneeded leading (&
    -            self.query_filter = self.query_filter[2:]
    -
    -    def search(self, attributes=None):
    -        """Perform the LDAP search
    -
    -        :return: Entries found in search
    -
    -        """
    -        self.clear()
    -        query_scope = SUBTREE if self.sub_tree else LEVEL
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'performing search in <%s>', self)
    -        self._execute_query(query_scope, attributes)
    -
    -        return self.entries
    -
    -    def search_object(self, entry_dn=None, attributes=None):  # base must be a single dn
    -        """Perform the LDAP search operation SINGLE_OBJECT scope
    -
    -        :return: Entry found in search
    -
    -        """
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'performing object search in <%s>', self)
    -        self.clear()
    -        if entry_dn:
    -            old_base = self.base
    -            self.base = entry_dn
    -            self._execute_query(BASE, attributes)
    -            self.base = old_base
    -        else:
    -            self._execute_query(BASE, attributes)
    -
    -        return self.entries[0] if len(self.entries) > 0 else None
    -
    -    def search_level(self, attributes=None):
    -        """Perform the LDAP search operation with SINGLE_LEVEL scope
    -
    -        :return: Entries found in search
    -
    -        """
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'performing single level search in <%s>', self)
    -        self.clear()
    -        self._execute_query(LEVEL, attributes)
    -
    -        return self.entries
    -
    -    def search_subtree(self, attributes=None):
    -        """Perform the LDAP search operation WHOLE_SUBTREE scope
    -
    -        :return: Entries found in search
    -
    -        """
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'performing whole subtree search in <%s>', self)
    -        self.clear()
    -        self._execute_query(SUBTREE, attributes)
    -
    -        return self.entries
    -
    -    def _entries_generator(self, responses):
    -        for response in responses:
    -            yield self._create_entry(response)
    -
    -    def search_paged(self, paged_size, paged_criticality=True, generator=True, attributes=None):
    -        """Perform a paged search, can be called as an Iterator
    -
    -        :param attributes: optional attributes to search
    -        :param paged_size: number of entries returned in each search
    -        :type paged_size: int
    -        :param paged_criticality: specify if server must not execute the search if it is not capable of paging searches
    -        :type paged_criticality: bool
    -        :param generator: if True the paged searches are executed while generating the entries,
    -                          if False all the paged searches are execute before returning the generator
    -        :type generator: bool
    -        :return: Entries found in search
    -
    -        """
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'performing paged search in <%s> with paged size %s', self, str(paged_size))
    -        if not self.connection:
    -            error_message = 'no connection established'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -
    -        self.clear()
    -        self._create_query_filter()
    -        self.entries = []
    -        self.execution_time = datetime.now()
    -        response = self.connection.extend.standard.paged_search(search_base=self.base,
    -                                                                search_filter=self.query_filter,
    -                                                                search_scope=SUBTREE if self.sub_tree else LEVEL,
    -                                                                dereference_aliases=self.dereference_aliases,
    -                                                                attributes=attributes if attributes else self.attributes,
    -                                                                get_operational_attributes=self.get_operational_attributes,
    -                                                                controls=self.controls,
    -                                                                paged_size=paged_size,
    -                                                                paged_criticality=paged_criticality,
    -                                                                generator=generator)
    -        if generator:
    -            return self._entries_generator(response)
    -        else:
    -            return list(self._entries_generator(response))
    -
    -
    -class Writer(Cursor):
    -    entry_class = WritableEntry
    -    attribute_class = WritableAttribute
    -    entry_initial_status = STATUS_WRITABLE
    -
    -    @staticmethod
    -    def from_cursor(cursor, connection=None, object_def=None, custom_validator=None):
    -        if connection is None:
    -            connection = cursor.connection
    -        if object_def is None:
    -            object_def = cursor.definition
    -        writer = Writer(connection, object_def, attributes=cursor.attributes)
    -        for entry in cursor.entries:
    -            if isinstance(cursor, Reader):
    -                entry.entry_writable(object_def, writer, custom_validator=custom_validator)
    -            elif isinstance(cursor, Writer):
    -                pass
    -            else:
    -                error_message = 'unknown cursor type %s' % str(type(cursor))
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s', error_message)
    -                raise LDAPCursorError(error_message)
    -        writer.execution_time = cursor.execution_time
    -        if log_enabled(BASIC):
    -            log(BASIC, 'instantiated Writer Cursor <%r> from cursor <%r>', writer, cursor)
    -        return writer
    -
    -    @staticmethod
    -    def from_response(connection, object_def, response=None):
    -        if response is None:
    -            if not connection.strategy.sync:
    -                error_message = 'with asynchronous strategies response must be specified'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s', error_message)
    -                raise LDAPCursorError(error_message)
    -            elif connection.response:
    -                response = connection.response
    -            else:
    -                error_message = 'response not present'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s', error_message)
    -                raise LDAPCursorError(error_message)
    -        writer = Writer(connection, object_def)
    -
    -        for resp in response:
    -            if resp['type'] == 'searchResEntry':
    -                entry = writer._create_entry(resp)
    -                writer.entries.append(entry)
    -        if log_enabled(BASIC):
    -            log(BASIC, 'instantiated Writer Cursor <%r> from response', writer)
    -        return writer
    -
    -    def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None):
    -        Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls, auxiliary_class)
    -        self.dereference_aliases = DEREF_NEVER
    -
    -        if log_enabled(BASIC):
    -            log(BASIC, 'instantiated Writer Cursor: <%r>', self)
    -
    -    def commit(self, refresh=True):
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'committed changes for <%s>', self)
    -        self._reset_history()
    -        successful = True
    -        for entry in self.entries:
    -            if not entry.entry_commit_changes(refresh=refresh, controls=self.controls, clear_history=False):
    -                successful = False
    -
    -        self.execution_time = datetime.now()
    -
    -        return successful
    -
    -    def discard(self):
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'discarded changes for <%s>', self)
    -        for entry in self.entries:
    -            entry.entry_discard_changes()
    -
    -    def _refresh_object(self, entry_dn, attributes=None, tries=4, seconds=2, controls=None):  # base must be a single dn
    -        """Performs the LDAP search operation SINGLE_OBJECT scope
    -
    -        :return: Entry found in search
    -
    -        """
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'refreshing object <%s> for <%s>', entry_dn, self)
    -        if not self.connection:
    -            error_message = 'no connection established'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -
    -        response = []
    -        with self.connection:
    -            counter = 0
    -            while counter < tries:
    -                result = self.connection.search(search_base=entry_dn,
    -                                                search_filter='(objectclass=*)',
    -                                                search_scope=BASE,
    -                                                dereference_aliases=DEREF_NEVER,
    -                                                attributes=attributes if attributes else self.attributes,
    -                                                get_operational_attributes=self.get_operational_attributes,
    -                                                controls=controls)
    -                if not self.connection.strategy.sync:
    -                    response, result, request = self.connection.get_response(result, get_request=True)
    -                else:
    -                    response = self.connection.response
    -                    result = self.connection.result
    -                    request = self.connection.request
    -
    -                if result['result'] in [RESULT_SUCCESS]:
    -                    break
    -                sleep(seconds)
    -                counter += 1
    -                self._store_operation_in_history(request, result, response)
    -
    -        if len(response) == 1:
    -            return self._create_entry(response[0])
    -        elif len(response) == 0:
    -            return None
    -
    -        error_message = 'more than 1 entry returned for a single object search'
    -        if log_enabled(ERROR):
    -            log(ERROR, '%s for <%s>', error_message, self)
    -        raise LDAPCursorError(error_message)
    -
    -    def new(self, dn):
    -        if log_enabled(BASIC):
    -            log(BASIC, 'creating new entry <%s> for <%s>', dn, self)
    -        dn = safe_dn(dn)
    -        for entry in self.entries:  # checks if dn is already used in an cursor entry
    -            if entry.entry_dn == dn:
    -                error_message = 'dn already present in cursor'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', error_message, self)
    -                raise LDAPCursorError(error_message)
    -        rdns = safe_rdn(dn, decompose=True)
    -        entry = self.entry_class(dn, self)  # defines a new empty Entry
    -        for attr in entry.entry_mandatory_attributes:  # defines all mandatory attributes as virtual
    -                entry._state.attributes[attr] = self.attribute_class(entry._state.definition[attr], entry, self)
    -                entry.__dict__[attr] = entry._state.attributes[attr]
    -        entry.objectclass.set(self.definition._object_class)
    -        for rdn in rdns:  # adds virtual attributes from rdns in entry name (should be more than one with + syntax)
    -            if rdn[0] in entry._state.definition._attributes:
    -                rdn_name = entry._state.definition._attributes[rdn[0]].name  # normalize case folding
    -                if rdn_name not in entry._state.attributes:
    -                    entry._state.attributes[rdn_name] = self.attribute_class(entry._state.definition[rdn_name], entry, self)
    -                    entry.__dict__[rdn_name] = entry._state.attributes[rdn_name]
    -                entry.__dict__[rdn_name].set(rdn[1])
    -            else:
    -                error_message = 'rdn type \'%s\' not in object class definition' % rdn[0]
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', error_message, self)
    -                raise LDAPCursorError(error_message)
    -        entry._state.set_status(STATUS_VIRTUAL)  # set intial status
    -        entry._state.set_status(STATUS_PENDING_CHANGES)  # tries to change status to PENDING_CHANGES. If mandatory attributes are missing status is reverted to MANDATORY_MISSING
    -        self.entries.append(entry)
    -        return entry
    -
    -    def refresh_entry(self, entry, tries=4, seconds=2):
    -        conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX')
    -
    -        self._do_not_reset = True
    -        attr_list = []
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'refreshing entry <%s> for <%s>', entry, self)
    -        for attr in entry._state.attributes:  # check friendly attribute name in AttrDef, do not check operational attributes
    -            if attr.lower().startswith(conf_operational_attribute_prefix.lower()):
    -                continue
    -            if entry._state.definition[attr].name:
    -                attr_list.append(entry._state.definition[attr].name)
    -            else:
    -                attr_list.append(entry._state.definition[attr].key)
    -
    -        temp_entry = self._refresh_object(entry.entry_dn, attr_list, tries, seconds=seconds)  # if any attributes is added adds only to the entry not to the definition
    -        self._do_not_reset = False
    -        if temp_entry:
    -            temp_entry._state.origin = entry._state.origin
    -            entry.__dict__.clear()
    -            entry.__dict__['_state'] = temp_entry._state
    -            for attr in entry._state.attributes:  # returns the attribute key
    -                entry.__dict__[attr] = entry._state.attributes[attr]
    -
    -            for attr in entry.entry_attributes:  # if any attribute of the class was deleted makes it virtual
    -                if attr not in entry._state.attributes and attr in entry.entry_definition._attributes:
    -                    entry._state.attributes[attr] = WritableAttribute(entry.entry_definition[attr], entry, self)
    -                    entry.__dict__[attr] = entry._state.attributes[attr]
    -            entry._state.set_status(entry._state._initial_status)
    -            return True
    -        return False
    +"""
    +"""
    +
    +# Created on 2014.01.06
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2014 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +from collections import namedtuple
    +from copy import deepcopy
    +from datetime import datetime
    +from os import linesep
    +from time import sleep
    +
    +from . import STATUS_VIRTUAL, STATUS_READ, STATUS_WRITABLE
    +from .. import SUBTREE, LEVEL, DEREF_ALWAYS, DEREF_NEVER, BASE, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter
    +from ..abstract import STATUS_PENDING_CHANGES
    +from .attribute import Attribute, OperationalAttribute, WritableAttribute
    +from .attrDef import AttrDef
    +from .objectDef import ObjectDef
    +from .entry import Entry, WritableEntry
    +from ..core.exceptions import LDAPCursorError, LDAPObjectDereferenceError
    +from ..core.results import RESULT_SUCCESS
    +from ..utils.ciDict import CaseInsensitiveWithAliasDict
    +from ..utils.dn import safe_dn, safe_rdn
    +from ..utils.conv import to_raw
    +from ..utils.config import get_config_parameter
    +from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
    +from ..protocol.oid import ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION, CLASS_AUXILIARY
    +
    +Operation = namedtuple('Operation', ('request', 'result', 'response'))
    +
    +
    +def _ret_search_value(value):
    +    return value[0] + '=' + value[1:] if value[0] in '<>~' and value[1] != '=' else value
    +
    +
    +def _create_query_dict(query_text):
    +    """
    +    Create a dictionary with query key:value definitions
    +    query_text is a comma delimited key:value sequence
    +    """
    +    query_dict = dict()
    +    if query_text:
    +        for arg_value_str in query_text.split(','):
    +            if ':' in arg_value_str:
    +                arg_value_list = arg_value_str.split(':')
    +                query_dict[arg_value_list[0].strip()] = arg_value_list[1].strip()
    +
    +    return query_dict
    +
    +
    +class Cursor(object):
    +    # entry_class and attribute_class define the type of entry and attribute used by the cursor
    +    # entry_initial_status defines the initial status of a entry
    +    # entry_class = Entry, must be defined in subclasses
    +    # attribute_class = Attribute, must be defined in subclasses
    +    # entry_initial_status = STATUS, must be defined in subclasses
    +
    +    def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None):
    +        conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
    +        self.connection = connection
    +        self.get_operational_attributes = get_operational_attributes
    +        if connection._deferred_bind or connection._deferred_open:  # probably a lazy connection, tries to bind
    +            connection._fire_deferred()
    +
    +        if isinstance(object_def, (STRING_TYPES, SEQUENCE_TYPES)):
    +            if connection.closed:  # try to open connection if closed to read schema
    +                connection.bind()
    +            object_def = ObjectDef(object_def, connection.server.schema, auxiliary_class=auxiliary_class)
    +        self.definition = object_def
    +        if attributes:  # checks if requested attributes are defined in ObjectDef
    +            not_defined_attributes = []
    +            if isinstance(attributes, STRING_TYPES):
    +                attributes = [attributes]
    +
    +            for attribute in attributes:
    +                if attribute not in self.definition._attributes and attribute.lower() not in conf_attributes_excluded_from_object_def:
    +                    not_defined_attributes.append(attribute)
    +
    +            if not_defined_attributes:
    +                error_message = 'Attributes \'%s\' non in definition' % ', '.join(not_defined_attributes)
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', error_message, self)
    +                raise LDAPCursorError(error_message)
    +
    +        self.attributes = set(attributes) if attributes else set([attr.name for attr in self.definition])
    +        self.controls = controls
    +        self.execution_time = None
    +        self.entries = []
    +        self.schema = self.connection.server.schema
    +        self._do_not_reset = False  # used for refreshing entry in entry_refresh() without removing all entries from the Cursor
    +        self._operation_history = list()  # a list storing all the requests, results and responses for the last cursor operation
    +
    +    def __repr__(self):
    +        r = 'CURSOR : ' + self.__class__.__name__ + linesep
    +        r += 'CONN   : ' + str(self.connection) + linesep
    +        r += 'DEFS   : ' + ', '.join(self.definition._object_class)
    +        if self.definition._auxiliary_class:
    +            r += ' [AUX: ' + ', '.join(self.definition._auxiliary_class) + ']'
    +        r += linesep
    +        # for attr_def in sorted(self.definition):
    +        #     r += (attr_def.key if attr_def.key == attr_def.name else (attr_def.key + ' <' + attr_def.name + '>')) + ', '
    +        # if r[-2] == ',':
    +        #     r = r[:-2]
    +        # r += ']' + linesep
    +        if hasattr(self, 'attributes'):
    +            r += 'ATTRS  : ' + repr(sorted(self.attributes)) + (' [OPERATIONAL]' if self.get_operational_attributes else '') + linesep
    +        if isinstance(self, Reader):
    +            if hasattr(self, 'base'):
    +                r += 'BASE   : ' + repr(self.base) + (' [SUB]' if self.sub_tree else ' [LEVEL]') + linesep
    +            if hasattr(self, '_query') and self._query:
    +                r += 'QUERY  : ' + repr(self._query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep
    +            if hasattr(self, 'validated_query') and self.validated_query:
    +                r += 'PARSED : ' + repr(self.validated_query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep
    +            if hasattr(self, 'query_filter') and self.query_filter:
    +                r += 'FILTER : ' + repr(self.query_filter) + linesep
    +
    +        if hasattr(self, 'execution_time') and self.execution_time:
    +            r += 'ENTRIES: ' + str(len(self.entries))
    +            r += ' [executed at: ' + str(self.execution_time.isoformat()) + ']' + linesep
    +
    +        if self.failed:
    +            r += 'LAST OPERATION FAILED [' + str(len(self.errors)) + ' failure' + ('s' if len(self.errors) > 1 else '') + ' at operation' + ('s ' if len(self.errors) > 1 else ' ') + ', '.join([str(i) for i, error in enumerate(self.operations) if error.result['result'] != RESULT_SUCCESS]) + ']'
    +
    +        return r
    +
    +    def __str__(self):
    +        return self.__repr__()
    +
    +    def __iter__(self):
    +        return self.entries.__iter__()
    +
    +    def __getitem__(self, item):
    +        """Return indexed item, if index is not found then try to sequentially search in DN of entries.
    +        If only one entry is found return it else raise a KeyError exception. The exception message
    +        includes the number of entries that matches, if less than 10 entries match then show the DNs
    +        in the exception message.
    +        """
    +        try:
    +            return self.entries[item]
    +        except TypeError:
    +            pass
    +
    +        if isinstance(item, STRING_TYPES):
    +            found = self.match_dn(item)
    +
    +            if len(found) == 1:
    +                return found[0]
    +            elif len(found) > 1:
    +                error_message = 'Multiple entries found: %d entries match the text in dn' % len(found) + ('' if len(found) > 10 else (' [' + '; '.join([e.entry_dn for e in found]) + ']'))
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', error_message, self)
    +                raise KeyError(error_message)
    +
    +        error_message = 'no entry found'
    +        if log_enabled(ERROR):
    +            log(ERROR, '%s for <%s>', error_message, self)
    +        raise KeyError(error_message)
    +
    +    def __len__(self):
    +        return len(self.entries)
    +
    +    if str is not bytes:  # Python 3
    +        def __bool__(self):  # needed to make the cursor appears as existing in "if cursor:" even if there are no entries
    +            return True
    +    else:  # Python 2
    +        def __nonzero__(self):
    +            return True
    +
    +    def _get_attributes(self, response, attr_defs, entry):
    +        """Assign the result of the LDAP query to the Entry object dictionary.
    +
    +        If the optional 'post_query' callable is present in the AttrDef it is called with each value of the attribute and the callable result is stored in the attribute.
    +
    +        Returns the default value for missing attributes.
    +        If the 'dereference_dn' in AttrDef is a ObjectDef then the attribute values are treated as distinguished name and the relevant entry is retrieved and stored in the attribute value.
    +
    +        """
    +        conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX')
    +        conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
    +        attributes = CaseInsensitiveWithAliasDict()
    +        used_attribute_names = set()
    +        for attr in attr_defs:
    +            attr_def = attr_defs[attr]
    +            attribute_name = None
    +            for attr_name in response['attributes']:
    +                if attr_def.name.lower() == attr_name.lower():
    +                    attribute_name = attr_name
    +                    break
    +
    +            if attribute_name or attr_def.default is not NotImplemented:  # attribute value found in result or default value present - NotImplemented allows use of None as default
    +                attribute = self.attribute_class(attr_def, entry, self)
    +                attribute.response = response
    +                attribute.raw_values = response['raw_attributes'][attribute_name] if attribute_name else None
    +                if attr_def.post_query and attr_def.name in response['attributes'] and response['raw_attributes'] != list():
    +                    attribute.values = attr_def.post_query(attr_def.key, response['attributes'][attribute_name])
    +                else:
    +                    if attr_def.default is NotImplemented or (attribute_name and response['raw_attributes'][attribute_name] != list()):
    +                        attribute.values = response['attributes'][attribute_name]
    +                    else:
    +                        attribute.values = attr_def.default if isinstance(attr_def.default, SEQUENCE_TYPES) else [attr_def.default]
    +                if not isinstance(attribute.values, list):  # force attribute values to list (if attribute is single-valued)
    +                    attribute.values = [attribute.values]
    +                if attr_def.dereference_dn:  # try to get object referenced in value
    +                    if attribute.values:
    +                        temp_reader = Reader(self.connection, attr_def.dereference_dn, base='', get_operational_attributes=self.get_operational_attributes, controls=self.controls)
    +                        temp_values = []
    +                        for element in attribute.values:
    +                            if entry.entry_dn != element:
    +                                temp_values.append(temp_reader.search_object(element))
    +                            else:
    +                                error_message = 'object %s is referencing itself in the \'%s\' attribute' % (entry.entry_dn, attribute.definition.name)
    +                                if log_enabled(ERROR):
    +                                    log(ERROR, '%s for <%s>', error_message, self)
    +                                raise LDAPObjectDereferenceError(error_message)
    +                        del temp_reader  # remove the temporary Reader
    +                        attribute.values = temp_values
    +                attributes[attribute.key] = attribute
    +                if attribute.other_names:
    +                    attributes.set_alias(attribute.key, attribute.other_names)
    +                if attr_def.other_names:
    +                    attributes.set_alias(attribute.key, attr_def.other_names)
    +                used_attribute_names.add(attribute_name)
    +
    +        if self.attributes:
    +            used_attribute_names.update(self.attributes)
    +
    +        for attribute_name in response['attributes']:
    +            if attribute_name not in used_attribute_names:
    +                operational_attribute = False
    +                # check if the type is an operational attribute
    +                if attribute_name in self.schema.attribute_types:
    +                    if self.schema.attribute_types[attribute_name].no_user_modification or self.schema.attribute_types[attribute_name].usage in [ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION]:
    +                        operational_attribute = True
    +                else:
    +                    operational_attribute = True
    +                if not operational_attribute and attribute_name not in attr_defs and attribute_name.lower() not in conf_attributes_excluded_from_object_def:
    +                    error_message = 'attribute \'%s\' not in object class \'%s\' for entry %s' % (attribute_name, ', '.join(entry.entry_definition._object_class), entry.entry_dn)
    +                    if log_enabled(ERROR):
    +                        log(ERROR, '%s for <%s>', error_message, self)
    +                    raise LDAPCursorError(error_message)
    +                attribute = OperationalAttribute(AttrDef(conf_operational_attribute_prefix + attribute_name), entry, self)
    +                attribute.raw_values = response['raw_attributes'][attribute_name]
    +                attribute.values = response['attributes'][attribute_name] if isinstance(response['attributes'][attribute_name], SEQUENCE_TYPES) else [response['attributes'][attribute_name]]
    +                if (conf_operational_attribute_prefix + attribute_name) not in attributes:
    +                    attributes[conf_operational_attribute_prefix + attribute_name] = attribute
    +
    +        return attributes
    +
    +    def match_dn(self, dn):
    +        """Return entries with text in DN"""
    +        matched = []
    +        for entry in self.entries:
    +            if dn.lower() in entry.entry_dn.lower():
    +                matched.append(entry)
    +        return matched
    +
    +    def match(self, attributes, value):
    +        """Return entries with text in one of the specified attributes"""
    +        matched = []
    +        if not isinstance(attributes, SEQUENCE_TYPES):
    +            attributes = [attributes]
    +
    +        for entry in self.entries:
    +            found = False
    +            for attribute in attributes:
    +                if attribute in entry:
    +                    for attr_value in entry[attribute].values:
    +                        if hasattr(attr_value, 'lower') and hasattr(value, 'lower') and value.lower() in attr_value.lower():
    +                            found = True
    +                        elif value == attr_value:
    +                            found = True
    +                        if found:
    +                            matched.append(entry)
    +                            break
    +                    if found:
    +                        break
    +                    # checks raw values, tries to convert value to byte
    +                    raw_value = to_raw(value)
    +                    if isinstance(raw_value, (bytes, bytearray)):
    +                        for attr_value in entry[attribute].raw_values:
    +                            if hasattr(attr_value, 'lower') and hasattr(raw_value, 'lower') and raw_value.lower() in attr_value.lower():
    +                                found = True
    +                            elif raw_value == attr_value:
    +                                found = True
    +                            if found:
    +                                matched.append(entry)
    +                                break
    +                        if found:
    +                            break
    +        return matched
    +
    +    def _create_entry(self, response):
    +        if not response['type'] == 'searchResEntry':
    +            return None
    +
    +        entry = self.entry_class(response['dn'], self)  # define an Entry (writable or readonly), as specified in the cursor definition
    +        entry._state.attributes = self._get_attributes(response, self.definition._attributes, entry)
    +        entry._state.raw_attributes = deepcopy(response['raw_attributes'])
    +
    +        entry._state.response = response
    +        entry._state.read_time = datetime.now()
    +        entry._state.set_status(self.entry_initial_status)
    +        for attr in entry:  # returns the whole attribute object
    +            entry.__dict__[attr.key] = attr
    +
    +        return entry
    +
    +    def _execute_query(self, query_scope, attributes):
    +        if not self.connection:
    +            error_message = 'no connection established'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        old_query_filter = None
    +        if query_scope == BASE:  # requesting a single object so an always-valid filter is set
    +            if hasattr(self, 'query_filter'):  # only Reader has a query filter
    +                old_query_filter = self.query_filter
    +            self.query_filter = '(objectclass=*)'
    +        else:
    +            self._create_query_filter()
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'executing query - base: %s - filter: %s - scope: %s for <%s>', self.base, self.query_filter, query_scope, self)
    +        with self.connection:
    +            result = self.connection.search(search_base=self.base,
    +                                            search_filter=self.query_filter,
    +                                            search_scope=query_scope,
    +                                            dereference_aliases=self.dereference_aliases,
    +                                            attributes=attributes if attributes else list(self.attributes),
    +                                            get_operational_attributes=self.get_operational_attributes,
    +                                            controls=self.controls)
    +            if not self.connection.strategy.sync:
    +                response, result, request = self.connection.get_response(result, get_request=True)
    +            else:
    +                response = self.connection.response
    +                result = self.connection.result
    +                request = self.connection.request
    +
    +        self._store_operation_in_history(request, result, response)
    +
    +        if self._do_not_reset:  # trick to not remove entries when using _refresh()
    +            return self._create_entry(response[0])
    +
    +        self.entries = []
    +        for r in response:
    +            entry = self._create_entry(r)
    +            if entry is not None:
    +                self.entries.append(entry)
    +                if 'objectClass' in entry:
    +                    for object_class in entry.objectClass:
    +                        if self.schema and self.schema.object_classes[object_class].kind == CLASS_AUXILIARY and object_class not in self.definition._auxiliary_class:
    +                            # add auxiliary class to object definition
    +                            self.definition._auxiliary_class.append(object_class)
    +                            self.definition._populate_attr_defs(object_class)
    +        self.execution_time = datetime.now()
    +
    +        if old_query_filter:  # requesting a single object so an always-valid filter is set
    +            self.query_filter = old_query_filter
    +
    +    def remove(self, entry):
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'removing entry <%s> in <%s>', entry, self)
    +        self.entries.remove(entry)
    +
    +    def _reset_history(self):
    +        self._operation_history = list()
    +
    +    def _store_operation_in_history(self, request, result, response):
    +        self._operation_history.append(Operation(request, result, response))
    +
    +    @property
    +    def operations(self):
    +        return self._operation_history
    +
    +    @property
    +    def errors(self):
    +        return [error for error in self._operation_history if error.result['result'] != RESULT_SUCCESS]
    +
    +    @property
    +    def failed(self):
    +        if hasattr(self, '_operation_history'):
    +            return any([error.result['result'] != RESULT_SUCCESS for error in self._operation_history])
    +
    +
    +class Reader(Cursor):
    +    """Reader object to perform searches:
    +
    +    :param connection: the LDAP connection object to use
    +    :type connection: LDAPConnection
    +    :param object_def: the ObjectDef of the LDAP object returned
    +    :type object_def: ObjectDef
    +    :param query: the simplified query (will be transformed in an LDAP filter)
    +    :type query: str
    +    :param base: starting base of the search
    +    :type base: str
    +    :param components_in_and: specify if assertions in the query must all be satisfied or not (AND/OR)
    +    :type components_in_and: bool
    +    :param sub_tree: specify if the search must be performed ad Single Level (False) or Whole SubTree (True)
    +    :type sub_tree: bool
    +    :param get_operational_attributes: specify if operational attributes are returned or not
    +    :type get_operational_attributes: bool
    +    :param controls: controls to be used in search
    +    :type controls: tuple
    +
    +    """
    +    entry_class = Entry  # entries are read_only
    +    attribute_class = Attribute  # attributes are read_only
    +    entry_initial_status = STATUS_READ
    +
    +    def __init__(self, connection, object_def, base, query='', components_in_and=True, sub_tree=True, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None):
    +        Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls, auxiliary_class)
    +        self._components_in_and = components_in_and
    +        self.sub_tree = sub_tree
    +        self._query = query
    +        self.base = base
    +        self.dereference_aliases = DEREF_ALWAYS
    +        self.validated_query = None
    +        self._query_dict = dict()
    +        self._validated_query_dict = dict()
    +        self.query_filter = None
    +        self.reset()
    +
    +        if log_enabled(BASIC):
    +            log(BASIC, 'instantiated Reader Cursor: <%r>', self)
    +
    +    @property
    +    def query(self):
    +        return self._query
    +
    +    @query.setter
    +    def query(self, value):
    +        self._query = value
    +        self.reset()
    +
    +    @property
    +    def components_in_and(self):
    +        return self._components_in_and
    +
    +    @components_in_and.setter
    +    def components_in_and(self, value):
    +        self._components_in_and = value
    +        self.reset()
    +
    +    def clear(self):
    +        """Clear the Reader search parameters
    +
    +        """
    +        self.dereference_aliases = DEREF_ALWAYS
    +        self._reset_history()
    +
    +    def reset(self):
    +        """Clear all the Reader parameters
    +
    +        """
    +        self.clear()
    +        self.validated_query = None
    +        self._query_dict = dict()
    +        self._validated_query_dict = dict()
    +        self.execution_time = None
    +        self.query_filter = None
    +        self.entries = []
    +        self._create_query_filter()
    +
    +    def _validate_query(self):
    +        """Processes the text query and verifies that the requested friendly names are in the Reader dictionary
    +        If the AttrDef has a 'validate' property the callable is executed and if it returns False an Exception is raised
    +
    +        """
    +        if not self._query_dict:
    +            self._query_dict = _create_query_dict(self._query)
    +
    +        query = ''
    +        for d in sorted(self._query_dict):
    +            attr = d[1:] if d[0] in '&|' else d
    +            for attr_def in self.definition:
    +                if ''.join(attr.split()).lower() == attr_def.key.lower():
    +                    attr = attr_def.key
    +                    break
    +            if attr in self.definition:
    +                vals = sorted(self._query_dict[d].split(';'))
    +
    +                query += (d[0] + attr if d[0] in '&|' else attr) + ': '
    +                for val in vals:
    +                    val = val.strip()
    +                    val_not = True if val[0] == '!' else False
    +                    val_search_operator = '='  # default
    +                    if val_not:
    +                        if val[1:].lstrip()[0] not in '=<>~':
    +                            value = val[1:].lstrip()
    +                        else:
    +                            val_search_operator = val[1:].lstrip()[0]
    +                            value = val[1:].lstrip()[1:]
    +                    else:
    +                        if val[0] not in '=<>~':
    +                            value = val.lstrip()
    +                        else:
    +                            val_search_operator = val[0]
    +                            value = val[1:].lstrip()
    +
    +                    if self.definition[attr].validate:
    +                        validated = self.definition[attr].validate(value)  # returns True, False or a value to substitute to the actual values
    +                        if validated is False:
    +                            error_message = 'validation failed for attribute %s and value %s' % (d, val)
    +                            if log_enabled(ERROR):
    +                                log(ERROR, '%s for <%s>', error_message, self)
    +                            raise LDAPCursorError(error_message)
    +                        elif validated is not True:  # a valid LDAP value equivalent to the actual values
    +                                value = validated
    +                    if val_not:
    +                        query += '!' + val_search_operator + str(value)
    +                    else:
    +                        query += val_search_operator + str(value)
    +
    +                    query += ';'
    +                query = query[:-1] + ', '
    +            else:
    +                error_message = 'attribute \'%s\' not in definition' % attr
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', error_message, self)
    +                raise LDAPCursorError(error_message)
    +        self.validated_query = query[:-2]
    +        self._validated_query_dict = _create_query_dict(self.validated_query)
    +
    +    def _create_query_filter(self):
    +        """Converts the query dictionary to the filter text"""
    +        self.query_filter = ''
    +
    +        if self.definition._object_class:
    +            self.query_filter += '(&'
    +            if isinstance(self.definition._object_class, SEQUENCE_TYPES) and len(self.definition._object_class) == 1:
    +                self.query_filter += '(objectClass=' + self.definition._object_class[0] + ')'
    +            elif isinstance(self.definition._object_class, SEQUENCE_TYPES):
    +                self.query_filter += '(&'
    +                for object_class in self.definition._object_class:
    +                    self.query_filter += '(objectClass=' + object_class + ')'
    +                self.query_filter += ')'
    +            else:
    +                error_message = 'object class must be a string or a list'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', error_message, self)
    +                raise LDAPCursorError(error_message)
    +
    +        if self._query and self._query.startswith('(') and self._query.endswith(')'):  # query is already an LDAP filter
    +            if 'objectclass' not in self._query.lower():
    +                self.query_filter += self._query + ')'  # if objectclass not in filter adds from definition
    +            else:
    +                self.query_filter = self._query
    +            return
    +        elif self._query:  # if a simplified filter is present
    +            if not self.components_in_and:
    +                self.query_filter += '(|'
    +            elif not self.definition._object_class:
    +                self.query_filter += '(&'
    +
    +            self._validate_query()
    +
    +            attr_counter = 0
    +            for attr in sorted(self._validated_query_dict):
    +                attr_counter += 1
    +                multi = True if ';' in self._validated_query_dict[attr] else False
    +                vals = sorted(self._validated_query_dict[attr].split(';'))
    +                attr_def = self.definition[attr[1:]] if attr[0] in '&|' else self.definition[attr]
    +                if attr_def.pre_query:
    +                    modvals = []
    +                    for val in vals:
    +                        modvals.append(val[0] + attr_def.pre_query(attr_def.key, val[1:]))
    +                    vals = modvals
    +                if multi:
    +                    if attr[0] in '&|':
    +                        self.query_filter += '(' + attr[0]
    +                    else:
    +                        self.query_filter += '(|'
    +
    +                for val in vals:
    +                    if val[0] == '!':
    +                        self.query_filter += '(!(' + attr_def.name + _ret_search_value(val[1:]) + '))'
    +                    else:
    +                        self.query_filter += '(' + attr_def.name + _ret_search_value(val) + ')'
    +                if multi:
    +                    self.query_filter += ')'
    +
    +            if not self.components_in_and:
    +                self.query_filter += '))'
    +            else:
    +                self.query_filter += ')'
    +
    +            if not self.definition._object_class and attr_counter == 1:  # removes unneeded starting filter
    +                self.query_filter = self.query_filter[2: -1]
    +
    +            if self.query_filter == '(|)' or self.query_filter == '(&)':  # removes empty filter
    +                self.query_filter = ''
    +        else:  # no query, remove unneeded leading (&
    +            self.query_filter = self.query_filter[2:]
    +
    +    def search(self, attributes=None):
    +        """Perform the LDAP search
    +
    +        :return: Entries found in search
    +
    +        """
    +        self.clear()
    +        query_scope = SUBTREE if self.sub_tree else LEVEL
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'performing search in <%s>', self)
    +        self._execute_query(query_scope, attributes)
    +
    +        return self.entries
    +
    +    def search_object(self, entry_dn=None, attributes=None):  # base must be a single dn
    +        """Perform the LDAP search operation SINGLE_OBJECT scope
    +
    +        :return: Entry found in search
    +
    +        """
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'performing object search in <%s>', self)
    +        self.clear()
    +        if entry_dn:
    +            old_base = self.base
    +            self.base = entry_dn
    +            self._execute_query(BASE, attributes)
    +            self.base = old_base
    +        else:
    +            self._execute_query(BASE, attributes)
    +
    +        return self.entries[0] if len(self.entries) > 0 else None
    +
    +    def search_level(self, attributes=None):
    +        """Perform the LDAP search operation with SINGLE_LEVEL scope
    +
    +        :return: Entries found in search
    +
    +        """
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'performing single level search in <%s>', self)
    +        self.clear()
    +        self._execute_query(LEVEL, attributes)
    +
    +        return self.entries
    +
    +    def search_subtree(self, attributes=None):
    +        """Perform the LDAP search operation WHOLE_SUBTREE scope
    +
    +        :return: Entries found in search
    +
    +        """
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'performing whole subtree search in <%s>', self)
    +        self.clear()
    +        self._execute_query(SUBTREE, attributes)
    +
    +        return self.entries
    +
    +    def _entries_generator(self, responses):
    +        for response in responses:
    +            yield self._create_entry(response)
    +
    +    def search_paged(self, paged_size, paged_criticality=True, generator=True, attributes=None):
    +        """Perform a paged search, can be called as an Iterator
    +
    +        :param attributes: optional attributes to search
    +        :param paged_size: number of entries returned in each search
    +        :type paged_size: int
    +        :param paged_criticality: specify if server must not execute the search if it is not capable of paging searches
    +        :type paged_criticality: bool
    +        :param generator: if True the paged searches are executed while generating the entries,
    +                          if False all the paged searches are execute before returning the generator
    +        :type generator: bool
    +        :return: Entries found in search
    +
    +        """
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'performing paged search in <%s> with paged size %s', self, str(paged_size))
    +        if not self.connection:
    +            error_message = 'no connection established'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +
    +        self.clear()
    +        self._create_query_filter()
    +        self.entries = []
    +        self.execution_time = datetime.now()
    +        response = self.connection.extend.standard.paged_search(search_base=self.base,
    +                                                                search_filter=self.query_filter,
    +                                                                search_scope=SUBTREE if self.sub_tree else LEVEL,
    +                                                                dereference_aliases=self.dereference_aliases,
    +                                                                attributes=attributes if attributes else self.attributes,
    +                                                                get_operational_attributes=self.get_operational_attributes,
    +                                                                controls=self.controls,
    +                                                                paged_size=paged_size,
    +                                                                paged_criticality=paged_criticality,
    +                                                                generator=generator)
    +        if generator:
    +            return self._entries_generator(response)
    +        else:
    +            return list(self._entries_generator(response))
    +
    +
    +class Writer(Cursor):
    +    entry_class = WritableEntry
    +    attribute_class = WritableAttribute
    +    entry_initial_status = STATUS_WRITABLE
    +
    +    @staticmethod
    +    def from_cursor(cursor, connection=None, object_def=None, custom_validator=None):
    +        if connection is None:
    +            connection = cursor.connection
    +        if object_def is None:
    +            object_def = cursor.definition
    +        writer = Writer(connection, object_def, attributes=cursor.attributes)
    +        for entry in cursor.entries:
    +            if isinstance(cursor, Reader):
    +                entry.entry_writable(object_def, writer, custom_validator=custom_validator)
    +            elif isinstance(cursor, Writer):
    +                pass
    +            else:
    +                error_message = 'unknown cursor type %s' % str(type(cursor))
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s', error_message)
    +                raise LDAPCursorError(error_message)
    +        writer.execution_time = cursor.execution_time
    +        if log_enabled(BASIC):
    +            log(BASIC, 'instantiated Writer Cursor <%r> from cursor <%r>', writer, cursor)
    +        return writer
    +
    +    @staticmethod
    +    def from_response(connection, object_def, response=None):
    +        if response is None:
    +            if not connection.strategy.sync:
    +                error_message = 'with asynchronous strategies response must be specified'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s', error_message)
    +                raise LDAPCursorError(error_message)
    +            elif connection.response:
    +                response = connection.response
    +            else:
    +                error_message = 'response not present'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s', error_message)
    +                raise LDAPCursorError(error_message)
    +        writer = Writer(connection, object_def)
    +
    +        for resp in response:
    +            if resp['type'] == 'searchResEntry':
    +                entry = writer._create_entry(resp)
    +                writer.entries.append(entry)
    +        if log_enabled(BASIC):
    +            log(BASIC, 'instantiated Writer Cursor <%r> from response', writer)
    +        return writer
    +
    +    def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None):
    +        Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls, auxiliary_class)
    +        self.dereference_aliases = DEREF_NEVER
    +
    +        if log_enabled(BASIC):
    +            log(BASIC, 'instantiated Writer Cursor: <%r>', self)
    +
    +    def commit(self, refresh=True):
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'committed changes for <%s>', self)
    +        self._reset_history()
    +        successful = True
    +        for entry in self.entries:
    +            if not entry.entry_commit_changes(refresh=refresh, controls=self.controls, clear_history=False):
    +                successful = False
    +
    +        self.execution_time = datetime.now()
    +
    +        return successful
    +
    +    def discard(self):
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'discarded changes for <%s>', self)
    +        for entry in self.entries:
    +            entry.entry_discard_changes()
    +
    +    def _refresh_object(self, entry_dn, attributes=None, tries=4, seconds=2, controls=None):  # base must be a single dn
    +        """Performs the LDAP search operation SINGLE_OBJECT scope
    +
    +        :return: Entry found in search
    +
    +        """
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'refreshing object <%s> for <%s>', entry_dn, self)
    +        if not self.connection:
    +            error_message = 'no connection established'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +
    +        response = []
    +        with self.connection:
    +            counter = 0
    +            while counter < tries:
    +                result = self.connection.search(search_base=entry_dn,
    +                                                search_filter='(objectclass=*)',
    +                                                search_scope=BASE,
    +                                                dereference_aliases=DEREF_NEVER,
    +                                                attributes=attributes if attributes else self.attributes,
    +                                                get_operational_attributes=self.get_operational_attributes,
    +                                                controls=controls)
    +                if not self.connection.strategy.sync:
    +                    response, result, request = self.connection.get_response(result, get_request=True)
    +                else:
    +                    response = self.connection.response
    +                    result = self.connection.result
    +                    request = self.connection.request
    +
    +                if result['result'] in [RESULT_SUCCESS]:
    +                    break
    +                sleep(seconds)
    +                counter += 1
    +                self._store_operation_in_history(request, result, response)
    +
    +        if len(response) == 1:
    +            return self._create_entry(response[0])
    +        elif len(response) == 0:
    +            return None
    +
    +        error_message = 'more than 1 entry returned for a single object search'
    +        if log_enabled(ERROR):
    +            log(ERROR, '%s for <%s>', error_message, self)
    +        raise LDAPCursorError(error_message)
    +
    +    def new(self, dn):
    +        if log_enabled(BASIC):
    +            log(BASIC, 'creating new entry <%s> for <%s>', dn, self)
    +        dn = safe_dn(dn)
    +        for entry in self.entries:  # checks if dn is already used in an cursor entry
    +            if entry.entry_dn == dn:
    +                error_message = 'dn already present in cursor'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', error_message, self)
    +                raise LDAPCursorError(error_message)
    +        rdns = safe_rdn(dn, decompose=True)
    +        entry = self.entry_class(dn, self)  # defines a new empty Entry
    +        for attr in entry.entry_mandatory_attributes:  # defines all mandatory attributes as virtual
    +                entry._state.attributes[attr] = self.attribute_class(entry._state.definition[attr], entry, self)
    +                entry.__dict__[attr] = entry._state.attributes[attr]
    +        entry.objectclass.set(self.definition._object_class)
    +        for rdn in rdns:  # adds virtual attributes from rdns in entry name (should be more than one with + syntax)
    +            if rdn[0] in entry._state.definition._attributes:
    +                rdn_name = entry._state.definition._attributes[rdn[0]].name  # normalize case folding
    +                if rdn_name not in entry._state.attributes:
    +                    entry._state.attributes[rdn_name] = self.attribute_class(entry._state.definition[rdn_name], entry, self)
    +                    entry.__dict__[rdn_name] = entry._state.attributes[rdn_name]
    +                entry.__dict__[rdn_name].set(rdn[1])
    +            else:
    +                error_message = 'rdn type \'%s\' not in object class definition' % rdn[0]
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', error_message, self)
    +                raise LDAPCursorError(error_message)
    +        entry._state.set_status(STATUS_VIRTUAL)  # set intial status
    +        entry._state.set_status(STATUS_PENDING_CHANGES)  # tries to change status to PENDING_CHANGES. If mandatory attributes are missing status is reverted to MANDATORY_MISSING
    +        self.entries.append(entry)
    +        return entry
    +
    +    def refresh_entry(self, entry, tries=4, seconds=2):
    +        conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX')
    +
    +        self._do_not_reset = True
    +        attr_list = []
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'refreshing entry <%s> for <%s>', entry, self)
    +        for attr in entry._state.attributes:  # check friendly attribute name in AttrDef, do not check operational attributes
    +            if attr.lower().startswith(conf_operational_attribute_prefix.lower()):
    +                continue
    +            if entry._state.definition[attr].name:
    +                attr_list.append(entry._state.definition[attr].name)
    +            else:
    +                attr_list.append(entry._state.definition[attr].key)
    +
    +        temp_entry = self._refresh_object(entry.entry_dn, attr_list, tries, seconds=seconds)  # if any attributes is added adds only to the entry not to the definition
    +        self._do_not_reset = False
    +        if temp_entry:
    +            temp_entry._state.origin = entry._state.origin
    +            entry.__dict__.clear()
    +            entry.__dict__['_state'] = temp_entry._state
    +            for attr in entry._state.attributes:  # returns the attribute key
    +                entry.__dict__[attr] = entry._state.attributes[attr]
    +
    +            for attr in entry.entry_attributes:  # if any attribute of the class was deleted makes it virtual
    +                if attr not in entry._state.attributes and attr in entry.entry_definition._attributes:
    +                    entry._state.attributes[attr] = WritableAttribute(entry.entry_definition[attr], entry, self)
    +                    entry.__dict__[attr] = entry._state.attributes[attr]
    +            entry._state.set_status(entry._state._initial_status)
    +            return True
    +        return False
    diff --git a/server/www/packages/packages-windows/x86/ldap3/abstract/entry.py b/server/www/packages/packages-windows/x86/ldap3/abstract/entry.py
    index 18c0420..b73c50f 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/abstract/entry.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/abstract/entry.py
    @@ -1,671 +1,699 @@
    -"""
    -"""
    -
    -# Created on 2016.08.19
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2016 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -
    -
    -import json
    -try:
    -    from collections import OrderedDict
    -except ImportError:
    -    from ..utils.ordDict import OrderedDict  # for Python 2.6
    -
    -from os import linesep
    -
    -from .. import STRING_TYPES, SEQUENCE_TYPES, MODIFY_ADD, MODIFY_REPLACE
    -from .attribute import WritableAttribute
    -from .objectDef import ObjectDef
    -from .attrDef import AttrDef
    -from ..core.exceptions import LDAPKeyError, LDAPCursorError
    -from ..utils.conv import check_json_dict, format_json, prepare_for_stream
    -from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header
    -from ..utils.dn import safe_dn, safe_rdn, to_dn
    -from ..utils.repr import to_stdout_encoding
    -from ..utils.ciDict import CaseInsensitiveWithAliasDict
    -from ..utils.config import get_config_parameter
    -from . import STATUS_VIRTUAL, STATUS_WRITABLE, STATUS_PENDING_CHANGES, STATUS_COMMITTED, STATUS_DELETED,\
    -    STATUS_INIT, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING, STATUS_MANDATORY_MISSING, STATUSES, INITIAL_STATUSES
    -from ..core.results import RESULT_SUCCESS
    -from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
    -
    -
    -class EntryState(object):
    -    """Contains data on the status of the entry. Does not pollute the Entry __dict__.
    -
    -    """
    -
    -    def __init__(self, dn, cursor):
    -        self.dn = dn
    -        self._initial_status = None
    -        self._to = None  # used for move and rename
    -        self.status = STATUS_INIT
    -        self.attributes = CaseInsensitiveWithAliasDict()
    -        self.raw_attributes = CaseInsensitiveWithAliasDict()
    -        self.response = None
    -        self.cursor = cursor
    -        self.origin = None  # reference to the original read-only entry (set when made writable). Needed to update attributes in read-only when modified (only if both refer the same server)
    -        self.read_time = None
    -        self.changes = OrderedDict()  # includes changes to commit in a writable entry
    -        if cursor.definition:
    -            self.definition = cursor.definition
    -        else:
    -            self.definition = None
    -
    -    def __repr__(self):
    -        if self.__dict__ and self.dn is not None:
    -            r = 'DN: ' + to_stdout_encoding(self.dn) + ' - STATUS: ' + ((self._initial_status + ', ') if self._initial_status != self.status else '') + self.status + ' - READ TIME: ' + (self.read_time.isoformat() if self.read_time else '') + linesep
    -            r += 'attributes: ' + ', '.join(sorted(self.attributes.keys())) + linesep
    -            r += 'object def: ' + (', '.join(sorted(self.definition._object_class)) if self.definition._object_class else '') + linesep
    -            r += 'attr defs: ' + ', '.join(sorted(self.definition._attributes.keys())) + linesep
    -            r += 'response: ' + ('present' if self.response else '') + linesep
    -            r += 'cursor: ' + (self.cursor.__class__.__name__ if self.cursor else '') + linesep
    -            return r
    -        else:
    -            return object.__repr__(self)
    -
    -    def __str__(self):
    -        return self.__repr__()
    -
    -    def set_status(self, status):
    -        conf_ignored_mandatory_attributes_in_object_def = [v.lower() for v in get_config_parameter('IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF')]
    -        if status not in STATUSES:
    -            error_message = 'invalid entry status ' + str(status)
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        if status in INITIAL_STATUSES:
    -            self._initial_status = status
    -        self.status = status
    -        if status == STATUS_DELETED:
    -            self._initial_status = STATUS_VIRTUAL
    -        if status == STATUS_COMMITTED:
    -            self._initial_status = STATUS_WRITABLE
    -        if self.status == STATUS_VIRTUAL or (self.status == STATUS_PENDING_CHANGES and self._initial_status == STATUS_VIRTUAL):  # checks if all mandatory attributes are present in new entries
    -            for attr in self.definition._attributes:
    -                if self.definition._attributes[attr].mandatory and attr.lower() not in conf_ignored_mandatory_attributes_in_object_def:
    -                    if (attr not in self.attributes or self.attributes[attr].virtual) and attr not in self.changes:
    -                        self.status = STATUS_MANDATORY_MISSING
    -                        break
    -
    -
    -class EntryBase(object):
    -    """The Entry object contains a single LDAP entry.
    -    Attributes can be accessed either by sequence, by assignment
    -    or as dictionary keys. Keys are not case sensitive.
    -
    -    The Entry object is read only
    -
    -    - The DN is retrieved by entry_dn
    -    - The cursor reference is in _cursor
    -    - Raw attributes values are retrieved with _raw_attributes and the _raw_attribute() methods
    -    """
    -
    -    def __init__(self, dn, cursor):
    -        self.__dict__['_state'] = EntryState(dn, cursor)
    -
    -    def __repr__(self):
    -        if self.__dict__ and self.entry_dn is not None:
    -            r = 'DN: ' + to_stdout_encoding(self.entry_dn) + ' - STATUS: ' + ((self._state._initial_status + ', ') if self._state._initial_status != self.entry_status else '') + self.entry_status + ' - READ TIME: ' + (self.entry_read_time.isoformat() if self.entry_read_time else '') + linesep
    -            if self._state.attributes:
    -                for attr in sorted(self._state.attributes):
    -                    if self._state.attributes[attr] or (hasattr(self._state.attributes[attr], 'changes') and self._state.attributes[attr].changes):
    -                        r += '    ' + repr(self._state.attributes[attr]) + linesep
    -            return r
    -        else:
    -            return object.__repr__(self)
    -
    -    def __str__(self):
    -        return self.__repr__()
    -
    -    def __iter__(self):
    -        for attribute in self._state.attributes:
    -            yield self._state.attributes[attribute]
    -        # raise StopIteration  # deprecated in PEP 479
    -        return
    -
    -    def __contains__(self, item):
    -        try:
    -            self.__getitem__(item)
    -            return True
    -        except LDAPKeyError:
    -            return False
    -
    -    def __getattr__(self, item):
    -        if isinstance(item, STRING_TYPES):
    -            if item == '_state':
    -                return self.__dict__['_state']
    -            item = ''.join(item.split()).lower()
    -            attr_found = None
    -            for attr in self._state.attributes.keys():
    -                if item == attr.lower():
    -                    attr_found = attr
    -                    break
    -            if not attr_found:
    -                for attr in self._state.attributes.aliases():
    -                    if item == attr.lower():
    -                        attr_found = attr
    -                        break
    -            if not attr_found:
    -                for attr in self._state.attributes.keys():
    -                    if item + ';binary' == attr.lower():
    -                        attr_found = attr
    -                        break
    -            if not attr_found:
    -                for attr in self._state.attributes.aliases():
    -                    if item + ';binary' == attr.lower():
    -                        attr_found = attr
    -                        break
    -            if not attr_found:
    -                for attr in self._state.attributes.keys():
    -                    if item + ';range' in attr.lower():
    -                        attr_found = attr
    -                        break
    -            if not attr_found:
    -                for attr in self._state.attributes.aliases():
    -                    if item + ';range' in attr.lower():
    -                        attr_found = attr
    -                        break
    -            if not attr_found:
    -                error_message = 'attribute \'%s\' not found' % item
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', error_message, self)
    -                raise LDAPCursorError(error_message)
    -            return self._state.attributes[attr]
    -        error_message = 'attribute name must be a string'
    -        if log_enabled(ERROR):
    -            log(ERROR, '%s for <%s>', error_message, self)
    -        raise LDAPCursorError(error_message)
    -
    -    def __setattr__(self, item, value):
    -        if item in self._state.attributes:
    -            error_message = 'attribute \'%s\' is read only' % item
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        else:
    -            error_message = 'entry is read only, cannot add \'%s\'' % item
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -
    -    def __getitem__(self, item):
    -        if isinstance(item, STRING_TYPES):
    -            item = ''.join(item.split()).lower()
    -            attr_found = None
    -            for attr in self._state.attributes.keys():
    -                if item == attr.lower():
    -                    attr_found = attr
    -                    break
    -            if not attr_found:
    -                for attr in self._state.attributes.aliases():
    -                    if item == attr.lower():
    -                        attr_found = attr
    -                        break
    -            if not attr_found:
    -                for attr in self._state.attributes.keys():
    -                    if item + ';binary' == attr.lower():
    -                        attr_found = attr
    -                        break
    -            if not attr_found:
    -                for attr in self._state.attributes.aliases():
    -                    if item + ';binary' == attr.lower():
    -                        attr_found = attr
    -                        break
    -            if not attr_found:
    -                error_message = 'key \'%s\' not found' % item
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', error_message, self)
    -                raise LDAPKeyError(error_message)
    -            return self._state.attributes[attr]
    -
    -        error_message = 'key must be a string'
    -        if log_enabled(ERROR):
    -            log(ERROR, '%s for <%s>', error_message, self)
    -        raise LDAPKeyError(error_message)
    -
    -    def __eq__(self, other):
    -        if isinstance(other, EntryBase):
    -            return self.entry_dn == other.entry_dn
    -
    -        return False
    -
    -    def __lt__(self, other):
    -        if isinstance(other, EntryBase):
    -            return self.entry_dn <= other.entry_dn
    -
    -        return False
    -
    -    @property
    -    def entry_dn(self):
    -        return self._state.dn
    -
    -    @property
    -    def entry_cursor(self):
    -        return self._state.cursor
    -
    -    @property
    -    def entry_status(self):
    -        return self._state.status
    -
    -    @property
    -    def entry_definition(self):
    -        return self._state.definition
    -
    -    @property
    -    def entry_raw_attributes(self):
    -        return self._state.entry_raw_attributes
    -
    -    def entry_raw_attribute(self, name):
    -        """
    -
    -        :param name: name of the attribute
    -        :return: raw (unencoded) value of the attribute, None if attribute is not found
    -        """
    -        return self._state.entry_raw_attributes[name] if name in self._state.entry_raw_attributes else None
    -
    -    @property
    -    def entry_mandatory_attributes(self):
    -        return [attribute for attribute in self.entry_definition._attributes if self.entry_definition._attributes[attribute].mandatory]
    -
    -    @property
    -    def entry_attributes(self):
    -        return list(self._state.attributes.keys())
    -
    -    @property
    -    def entry_attributes_as_dict(self):
    -        return dict((attribute_key, attribute_value.values) for (attribute_key, attribute_value) in self._state.attributes.items())
    -
    -    @property
    -    def entry_read_time(self):
    -        return self._state.read_time
    -
    -    @property
    -    def _changes(self):
    -        return self._state.changes
    -
    -    def entry_to_json(self, raw=False, indent=4, sort=True, stream=None, checked_attributes=True, include_empty=True):
    -        json_entry = dict()
    -        json_entry['dn'] = self.entry_dn
    -        if checked_attributes:
    -            if not include_empty:
    -                # needed for python 2.6 compatibility
    -                json_entry['attributes'] = dict((key, self.entry_attributes_as_dict[key]) for key in self.entry_attributes_as_dict if self.entry_attributes_as_dict[key])
    -            else:
    -                json_entry['attributes'] = self.entry_attributes_as_dict
    -        if raw:
    -            if not include_empty:
    -                # needed for python 2.6 compatibility
    -                json_entry['raw'] = dict((key, self.entry_raw_attributes[key]) for key in self.entry_raw_attributes if self.entry_raw_attributes[key])
    -            else:
    -                json_entry['raw'] = dict(self.entry_raw_attributes)
    -
    -        if str is bytes:  # Python 2
    -            check_json_dict(json_entry)
    -
    -        json_output = json.dumps(json_entry,
    -                                 ensure_ascii=True,
    -                                 sort_keys=sort,
    -                                 indent=indent,
    -                                 check_circular=True,
    -                                 default=format_json,
    -                                 separators=(',', ': '))
    -
    -        if stream:
    -            stream.write(json_output)
    -
    -        return json_output
    -
    -    def entry_to_ldif(self, all_base64=False, line_separator=None, sort_order=None, stream=None):
    -        ldif_lines = operation_to_ldif('searchResponse', [self._state.response], all_base64, sort_order=sort_order)
    -        ldif_lines = add_ldif_header(ldif_lines)
    -        line_separator = line_separator or linesep
    -        ldif_output = line_separator.join(ldif_lines)
    -        if stream:
    -            if stream.tell() == 0:
    -                header = add_ldif_header(['-'])[0]
    -                stream.write(prepare_for_stream(header + line_separator + line_separator))
    -            stream.write(prepare_for_stream(ldif_output + line_separator + line_separator))
    -        return ldif_output
    -
    -
    -class Entry(EntryBase):
    -    """The Entry object contains a single LDAP entry.
    -    Attributes can be accessed either by sequence, by assignment
    -    or as dictionary keys. Keys are not case sensitive.
    -
    -    The Entry object is read only
    -
    -    - The DN is retrieved by entry_dn
    -    - The Reader reference is in _cursor()
    -    - Raw attributes values are retrieved by the _ra_attributes and
    -      _raw_attribute() methods
    -
    -    """
    -    def entry_writable(self, object_def=None, writer_cursor=None, attributes=None, custom_validator=None, auxiliary_class=None):
    -        if not self.entry_cursor.schema:
    -            error_message = 'schema must be available to make an entry writable'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        # returns a new WritableEntry and its Writer cursor
    -        if object_def is None:
    -            if self.entry_cursor.definition._object_class:
    -                object_def = self.entry_definition._object_class
    -                auxiliary_class = self.entry_definition._auxiliary_class + (auxiliary_class if isinstance(auxiliary_class, SEQUENCE_TYPES) else [])
    -            elif 'objectclass' in self:
    -                object_def = self.objectclass.values
    -
    -        if not object_def:
    -            error_message = 'object class must be specified to make an entry writable'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -
    -        if not isinstance(object_def, ObjectDef):
    -                object_def = ObjectDef(object_def, self.entry_cursor.schema, custom_validator, auxiliary_class)
    -
    -        if attributes:
    -            if isinstance(attributes, STRING_TYPES):
    -                attributes = [attributes]
    -
    -            if isinstance(attributes, SEQUENCE_TYPES):
    -                for attribute in attributes:
    -                    if attribute not in object_def._attributes:
    -                        error_message = 'attribute \'%s\' not in schema for \'%s\'' % (attribute, object_def)
    -                        if log_enabled(ERROR):
    -                            log(ERROR, '%s for <%s>', error_message, self)
    -                        raise LDAPCursorError(error_message)
    -        else:
    -            attributes = []
    -
    -        if not writer_cursor:
    -            from .cursor import Writer  # local import to avoid circular reference in import at startup
    -            writable_cursor = Writer(self.entry_cursor.connection, object_def)
    -        else:
    -            writable_cursor = writer_cursor
    -
    -        if attributes:  # force reading of attributes
    -            writable_entry = writable_cursor._refresh_object(self.entry_dn, list(attributes) + self.entry_attributes)
    -        else:
    -            writable_entry = writable_cursor._create_entry(self._state.response)
    -            writable_cursor.entries.append(writable_entry)
    -            writable_entry._state.read_time = self.entry_read_time
    -        writable_entry._state.origin = self  # reference to the original read-only entry
    -        # checks original entry for custom definitions in AttrDefs
    -        for attr in writable_entry._state.origin.entry_definition._attributes:
    -            original_attr = writable_entry._state.origin.entry_definition._attributes[attr]
    -            if attr != original_attr.name and attr not in writable_entry._state.attributes:
    -                old_attr_def = writable_entry.entry_definition._attributes[original_attr.name]
    -                new_attr_def = AttrDef(original_attr.name,
    -                                       key=attr,
    -                                       validate=original_attr.validate,
    -                                       pre_query=original_attr.pre_query,
    -                                       post_query=original_attr.post_query,
    -                                       default=original_attr.default,
    -                                       dereference_dn=original_attr.dereference_dn,
    -                                       description=original_attr.description,
    -                                       mandatory=old_attr_def.mandatory,  # keeps value read from schema
    -                                       single_value=old_attr_def.single_value,  # keeps value read from schema
    -                                       alias=original_attr.other_names)
    -                object_def = writable_entry.entry_definition
    -                object_def -= old_attr_def
    -                object_def += new_attr_def
    -                # updates attribute name in entry attributes
    -                new_attr = WritableAttribute(new_attr_def, writable_entry, writable_cursor)
    -                if original_attr.name in writable_entry._state.attributes:
    -                    new_attr.other_names = writable_entry._state.attributes[original_attr.name].other_names
    -                    new_attr.raw_values = writable_entry._state.attributes[original_attr.name].raw_values
    -                    new_attr.values = writable_entry._state.attributes[original_attr.name].values
    -                    new_attr.response = writable_entry._state.attributes[original_attr.name].response
    -                writable_entry._state.attributes[attr] = new_attr
    -                # writable_entry._state.attributes.set_alias(attr, new_attr.other_names)
    -                del writable_entry._state.attributes[original_attr.name]
    -
    -        writable_entry._state.set_status(STATUS_WRITABLE)
    -        return writable_entry
    -
    -
    -class WritableEntry(EntryBase):
    -    def __setitem__(self, key, value):
    -        if value is not Ellipsis:  # hack for using implicit operators in writable attributes
    -            self.__setattr__(key, value)
    -
    -    def __setattr__(self, item, value):
    -        conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
    -        if item == '_state' and isinstance(value, EntryState):
    -            self.__dict__['_state'] = value
    -            return
    -
    -        if value is not Ellipsis:  # hack for using implicit operators in writable attributes
    -            # checks if using an alias
    -            if item in self.entry_cursor.definition._attributes or item.lower() in conf_attributes_excluded_from_object_def:
    -                if item not in self._state.attributes:  # setting value to an attribute still without values
    -                    new_attribute = WritableAttribute(self.entry_cursor.definition._attributes[item], self, cursor=self.entry_cursor)
    -                    self._state.attributes[str(item)] = new_attribute  # force item to a string for key in attributes dict
    -                self._state.attributes[item].set(value)  # try to add to new_values
    -            else:
    -                error_message = 'attribute \'%s\' not defined' % item
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', error_message, self)
    -                raise LDAPCursorError(error_message)
    -
    -    def __getattr__(self, item):
    -        if isinstance(item, STRING_TYPES):
    -            if item == '_state':
    -                return self.__dict__['_state']
    -            item = ''.join(item.split()).lower()
    -            for attr in self._state.attributes.keys():
    -                if item == attr.lower():
    -                    return self._state.attributes[attr]
    -            for attr in self._state.attributes.aliases():
    -                if item == attr.lower():
    -                    return self._state.attributes[attr]
    -            if item in self.entry_definition._attributes:  # item is a new attribute to commit, creates the AttrDef and add to the attributes to retrive
    -                self._state.attributes[item] = WritableAttribute(self.entry_definition._attributes[item], self, self.entry_cursor)
    -                self.entry_cursor.attributes.add(item)
    -                return self._state.attributes[item]
    -            error_message = 'attribute \'%s\' not defined' % item
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        else:
    -            error_message = 'attribute name must be a string'
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -
    -    @property
    -    def entry_virtual_attributes(self):
    -        return [attr for attr in self.entry_attributes if self[attr].virtual]
    -
    -    def entry_commit_changes(self, refresh=True, controls=None, clear_history=True):
    -        if clear_history:
    -            self.entry_cursor._reset_history()
    -
    -        if self.entry_status == STATUS_READY_FOR_DELETION:
    -            result = self.entry_cursor.connection.delete(self.entry_dn, controls)
    -            if not self.entry_cursor.connection.strategy.sync:
    -                response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
    -            else:
    -                response = self.entry_cursor.connection.response
    -                result = self.entry_cursor.connection.result
    -                request = self.entry_cursor.connection.request
    -            self.entry_cursor._store_operation_in_history(request, result, response)
    -            if result['result'] == RESULT_SUCCESS:
    -                dn = self.entry_dn
    -                if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server:  # deletes original read-only Entry
    -                    cursor = self._state.origin.entry_cursor
    -                    self._state.origin.__dict__.clear()
    -                    self._state.origin.__dict__['_state'] = EntryState(dn, cursor)
    -                    self._state.origin._state.set_status(STATUS_DELETED)
    -                cursor = self.entry_cursor
    -                self.__dict__.clear()
    -                self._state = EntryState(dn, cursor)
    -                self._state.set_status(STATUS_DELETED)
    -                return True
    -            return False
    -        elif self.entry_status == STATUS_READY_FOR_MOVING:
    -            result = self.entry_cursor.connection.modify_dn(self.entry_dn, '+'.join(safe_rdn(self.entry_dn)), new_superior=self._state._to)
    -            if not self.entry_cursor.connection.strategy.sync:
    -                response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
    -            else:
    -                response = self.entry_cursor.connection.response
    -                result = self.entry_cursor.connection.result
    -                request = self.entry_cursor.connection.request
    -            self.entry_cursor._store_operation_in_history(request, result, response)
    -            if result['result'] == RESULT_SUCCESS:
    -                self._state.dn = safe_dn('+'.join(safe_rdn(self.entry_dn)) + ',' + self._state._to)
    -                if refresh:
    -                    if self.entry_refresh():
    -                        if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server:  # refresh dn of origin
    -                            self._state.origin._state.dn = self.entry_dn
    -                self._state.set_status(STATUS_COMMITTED)
    -                self._state._to = None
    -                return True
    -            return False
    -        elif self.entry_status == STATUS_READY_FOR_RENAMING:
    -            rdn = '+'.join(safe_rdn(self._state._to))
    -            result = self.entry_cursor.connection.modify_dn(self.entry_dn, rdn)
    -            if not self.entry_cursor.connection.strategy.sync:
    -                response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
    -            else:
    -                response = self.entry_cursor.connection.response
    -                result = self.entry_cursor.connection.result
    -                request = self.entry_cursor.connection.request
    -            self.entry_cursor._store_operation_in_history(request, result, response)
    -            if result['result'] == RESULT_SUCCESS:
    -                self._state.dn = rdn + ',' + ','.join(to_dn(self.entry_dn)[1:])
    -                if refresh:
    -                    if self.entry_refresh():
    -                        if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server:  # refresh dn of origin
    -                            self._state.origin._state.dn = self.entry_dn
    -                self._state.set_status(STATUS_COMMITTED)
    -                self._state._to = None
    -                return True
    -            return False
    -        elif self.entry_status in [STATUS_VIRTUAL, STATUS_MANDATORY_MISSING]:
    -            missing_attributes = []
    -            for attr in self.entry_mandatory_attributes:
    -                if (attr not in self._state.attributes or self._state.attributes[attr].virtual) and attr not in self._changes:
    -                    missing_attributes.append('\'' + attr + '\'')
    -            error_message = 'mandatory attributes %s missing in entry %s' % (', '.join(missing_attributes), self.entry_dn)
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        elif self.entry_status == STATUS_PENDING_CHANGES:
    -            if self._changes:
    -                if self.entry_definition._auxiliary_class:  # checks if an attribute is from an auxiliary class and adds it to the objectClass attribute if not present
    -                    for attr in self._changes:
    -                        # checks schema to see if attribute is defined in one of the already present object classes
    -                        attr_classes = self.entry_cursor.schema.attribute_types[attr].mandatory_in + self.entry_cursor.schema.attribute_types[attr].optional_in
    -                        for object_class in self.objectclass:
    -                            if object_class in attr_classes:
    -                                break
    -                        else:  # executed only if the attribute class is not present in the objectClass attribute
    -                            # checks if attribute is defined in one of the possible auxiliary classes
    -                            for aux_class in self.entry_definition._auxiliary_class:
    -                                if aux_class in attr_classes:
    -                                    if self._state._initial_status == STATUS_VIRTUAL:  # entry is new, there must be a pending objectClass MODIFY_REPLACE
    -                                        self._changes['objectClass'][0][1].append(aux_class)
    -                                    else:
    -                                        self.objectclass += aux_class
    -                if self._state._initial_status == STATUS_VIRTUAL:
    -                    new_attributes = dict()
    -                    for attr in self._changes:
    -                        new_attributes[attr] = self._changes[attr][0][1]
    -                    result = self.entry_cursor.connection.add(self.entry_dn, None, new_attributes, controls)
    -                else:
    -                    result = self.entry_cursor.connection.modify(self.entry_dn, self._changes, controls)
    -
    -                if not self.entry_cursor.connection.strategy.sync:  # asynchronous request
    -                    response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
    -                else:
    -                    response = self.entry_cursor.connection.response
    -                    result = self.entry_cursor.connection.result
    -                    request = self.entry_cursor.connection.request
    -                self.entry_cursor._store_operation_in_history(request, result, response)
    -
    -                if result['result'] == RESULT_SUCCESS:
    -                    if refresh:
    -                        if self.entry_refresh():
    -                            if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server:  # updates original read-only entry if present
    -                                for attr in self:  # adds AttrDefs from writable entry to origin entry definition if some is missing
    -                                    if attr.key in self.entry_definition._attributes and attr.key not in self._state.origin.entry_definition._attributes:
    -                                        self._state.origin.entry_cursor.definition.add_attribute(self.entry_cursor.definition._attributes[attr.key])  # adds AttrDef from writable entry to original entry if missing
    -                                temp_entry = self._state.origin.entry_cursor._create_entry(self._state.response)
    -                                self._state.origin.__dict__.clear()
    -                                self._state.origin.__dict__['_state'] = temp_entry._state
    -                                for attr in self:  # returns the whole attribute object
    -                                    if not attr.virtual:
    -                                        self._state.origin.__dict__[attr.key] = self._state.origin._state.attributes[attr.key]
    -                                self._state.origin._state.read_time = self.entry_read_time
    -                    else:
    -                        self.entry_discard_changes()  # if not refreshed remove committed changes
    -                    self._state.set_status(STATUS_COMMITTED)
    -                    return True
    -        return False
    -
    -    def entry_discard_changes(self):
    -        self._changes.clear()
    -        self._state.set_status(self._state._initial_status)
    -
    -    def entry_delete(self):
    -        if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_DELETION]:
    -            error_message = 'cannot delete entry, invalid status: ' + self.entry_status
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        self._state.set_status(STATUS_READY_FOR_DELETION)
    -
    -    def entry_refresh(self, tries=4, seconds=2):
    -        """
    -
    -        Refreshes the entry from the LDAP Server
    -        """
    -        if self.entry_cursor.connection:
    -            if self.entry_cursor.refresh_entry(self, tries, seconds):
    -                return True
    -
    -        return False
    -
    -    def entry_move(self, destination_dn):
    -        if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_MOVING]:
    -            error_message = 'cannot move entry, invalid status: ' + self.entry_status
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        self._state._to = safe_dn(destination_dn)
    -        self._state.set_status(STATUS_READY_FOR_MOVING)
    -
    -    def entry_rename(self, new_name):
    -        if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_RENAMING]:
    -            error_message = 'cannot rename entry, invalid status: ' + self.entry_status
    -            if log_enabled(ERROR):
    -                log(ERROR, '%s for <%s>', error_message, self)
    -            raise LDAPCursorError(error_message)
    -        self._state._to = new_name
    -        self._state.set_status(STATUS_READY_FOR_RENAMING)
    -
    -    @property
    -    def entry_changes(self):
    -        return self._changes
    +"""
    +"""
    +
    +# Created on 2016.08.19
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2016 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +
    +import json
    +try:
    +    from collections import OrderedDict
    +except ImportError:
    +    from ..utils.ordDict import OrderedDict  # for Python 2.6
    +
    +from os import linesep
    +
    +from .. import STRING_TYPES, SEQUENCE_TYPES, MODIFY_ADD, MODIFY_REPLACE
    +from .attribute import WritableAttribute
    +from .objectDef import ObjectDef
    +from .attrDef import AttrDef
    +from ..core.exceptions import LDAPKeyError, LDAPCursorError, LDAPCursorAttributeError
    +from ..utils.conv import check_json_dict, format_json, prepare_for_stream
    +from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header
    +from ..utils.dn import safe_dn, safe_rdn, to_dn
    +from ..utils.repr import to_stdout_encoding
    +from ..utils.ciDict import CaseInsensitiveWithAliasDict
    +from ..utils.config import get_config_parameter
    +from . import STATUS_VIRTUAL, STATUS_WRITABLE, STATUS_PENDING_CHANGES, STATUS_COMMITTED, STATUS_DELETED,\
    +    STATUS_INIT, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING, STATUS_MANDATORY_MISSING, STATUSES, INITIAL_STATUSES
    +from ..core.results import RESULT_SUCCESS
    +from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
    +
    +
    +class EntryState(object):
    +    """Contains data on the status of the entry. Does not pollute the Entry __dict__.
    +
    +    """
    +
    +    def __init__(self, dn, cursor):
    +        self.dn = dn
    +        self._initial_status = None
    +        self._to = None  # used for move and rename
    +        self.status = STATUS_INIT
    +        self.attributes = CaseInsensitiveWithAliasDict()
    +        self.raw_attributes = CaseInsensitiveWithAliasDict()
    +        self.response = None
    +        self.cursor = cursor
    +        self.origin = None  # reference to the original read-only entry (set when made writable). Needed to update attributes in read-only when modified (only if both refer the same server)
    +        self.read_time = None
    +        self.changes = OrderedDict()  # includes changes to commit in a writable entry
    +        if cursor.definition:
    +            self.definition = cursor.definition
    +        else:
    +            self.definition = None
    +
    +    def __repr__(self):
    +        if self.__dict__ and self.dn is not None:
    +            r = 'DN: ' + to_stdout_encoding(self.dn) + ' - STATUS: ' + ((self._initial_status + ', ') if self._initial_status != self.status else '') + self.status + ' - READ TIME: ' + (self.read_time.isoformat() if self.read_time else '') + linesep
    +            r += 'attributes: ' + ', '.join(sorted(self.attributes.keys())) + linesep
    +            r += 'object def: ' + (', '.join(sorted(self.definition._object_class)) if self.definition._object_class else '') + linesep
    +            r += 'attr defs: ' + ', '.join(sorted(self.definition._attributes.keys())) + linesep
    +            r += 'response: ' + ('present' if self.response else '') + linesep
    +            r += 'cursor: ' + (self.cursor.__class__.__name__ if self.cursor else '') + linesep
    +            return r
    +        else:
    +            return object.__repr__(self)
    +
    +    def __str__(self):
    +        return self.__repr__()
    +
    +    def __getstate__(self):
    +        cpy = dict(self.__dict__)
    +        cpy['cursor'] = None
    +        return cpy
    +
    +    def set_status(self, status):
    +        conf_ignored_mandatory_attributes_in_object_def = [v.lower() for v in get_config_parameter('IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF')]
    +        if status not in STATUSES:
    +            error_message = 'invalid entry status ' + str(status)
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        if status in INITIAL_STATUSES:
    +            self._initial_status = status
    +        self.status = status
    +        if status == STATUS_DELETED:
    +            self._initial_status = STATUS_VIRTUAL
    +        if status == STATUS_COMMITTED:
    +            self._initial_status = STATUS_WRITABLE
    +        if self.status == STATUS_VIRTUAL or (self.status == STATUS_PENDING_CHANGES and self._initial_status == STATUS_VIRTUAL):  # checks if all mandatory attributes are present in new entries
    +            for attr in self.definition._attributes:
    +                if self.definition._attributes[attr].mandatory and attr.lower() not in conf_ignored_mandatory_attributes_in_object_def:
    +                    if (attr not in self.attributes or self.attributes[attr].virtual) and attr not in self.changes:
    +                        self.status = STATUS_MANDATORY_MISSING
    +                        break
    +
    +    @property
    +    def entry_raw_attributes(self):
    +        return self.raw_attributes
    +
    +
    +class EntryBase(object):
    +    """The Entry object contains a single LDAP entry.
    +    Attributes can be accessed either by sequence, by assignment
    +    or as dictionary keys. Keys are not case sensitive.
    +
    +    The Entry object is read only
    +
    +    - The DN is retrieved by entry_dn
    +    - The cursor reference is in _cursor
    +    - Raw attributes values are retrieved with _raw_attributes and the _raw_attribute() methods
    +    """
    +
    +    def __init__(self, dn, cursor):
    +        self._state = EntryState(dn, cursor)
    +
    +    def __repr__(self):
    +        if self.__dict__ and self.entry_dn is not None:
    +            r = 'DN: ' + to_stdout_encoding(self.entry_dn) + ' - STATUS: ' + ((self._state._initial_status + ', ') if self._state._initial_status != self.entry_status else '') + self.entry_status + ' - READ TIME: ' + (self.entry_read_time.isoformat() if self.entry_read_time else '') + linesep
    +            if self._state.attributes:
    +                for attr in sorted(self._state.attributes):
    +                    if self._state.attributes[attr] or (hasattr(self._state.attributes[attr], 'changes') and self._state.attributes[attr].changes):
    +                        r += '    ' + repr(self._state.attributes[attr]) + linesep
    +            return r
    +        else:
    +            return object.__repr__(self)
    +
    +    def __str__(self):
    +        return self.__repr__()
    +
    +    def __iter__(self):
    +        for attribute in self._state.attributes:
    +            yield self._state.attributes[attribute]
    +        # raise StopIteration  # deprecated in PEP 479
    +        return
    +
    +    def __contains__(self, item):
    +        try:
    +            self.__getitem__(item)
    +            return True
    +        except LDAPKeyError:
    +            return False
    +
    +    def __getattr__(self, item):
    +        if isinstance(item, STRING_TYPES):
    +            if item == '_state':
    +                return object.__getattr__(self, item)
    +            item = ''.join(item.split()).lower()
    +            attr_found = None
    +            for attr in self._state.attributes.keys():
    +                if item == attr.lower():
    +                    attr_found = attr
    +                    break
    +            if not attr_found:
    +                for attr in self._state.attributes.aliases():
    +                    if item == attr.lower():
    +                        attr_found = attr
    +                        break
    +            if not attr_found:
    +                for attr in self._state.attributes.keys():
    +                    if item + ';binary' == attr.lower():
    +                        attr_found = attr
    +                        break
    +            if not attr_found:
    +                for attr in self._state.attributes.aliases():
    +                    if item + ';binary' == attr.lower():
    +                        attr_found = attr
    +                        break
    +            if not attr_found:
    +                for attr in self._state.attributes.keys():
    +                    if item + ';range' in attr.lower():
    +                        attr_found = attr
    +                        break
    +            if not attr_found:
    +                for attr in self._state.attributes.aliases():
    +                    if item + ';range' in attr.lower():
    +                        attr_found = attr
    +                        break
    +            if not attr_found:
    +                error_message = 'attribute \'%s\' not found' % item
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', error_message, self)
    +                raise LDAPCursorAttributeError(error_message)
    +            return self._state.attributes[attr]
    +        error_message = 'attribute name must be a string'
    +        if log_enabled(ERROR):
    +            log(ERROR, '%s for <%s>', error_message, self)
    +        raise LDAPCursorAttributeError(error_message)
    +
    +    def __setattr__(self, item, value):
    +        if item == '_state':
    +            object.__setattr__(self, item, value)
    +        elif item in self._state.attributes:
    +            error_message = 'attribute \'%s\' is read only' % item
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorAttributeError(error_message)
    +        else:
    +            error_message = 'entry is read only, cannot add \'%s\'' % item
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorAttributeError(error_message)
    +
    +    def __getitem__(self, item):
    +        if isinstance(item, STRING_TYPES):
    +            item = ''.join(item.split()).lower()
    +            attr_found = None
    +            for attr in self._state.attributes.keys():
    +                if item == attr.lower():
    +                    attr_found = attr
    +                    break
    +            if not attr_found:
    +                for attr in self._state.attributes.aliases():
    +                    if item == attr.lower():
    +                        attr_found = attr
    +                        break
    +            if not attr_found:
    +                for attr in self._state.attributes.keys():
    +                    if item + ';binary' == attr.lower():
    +                        attr_found = attr
    +                        break
    +            if not attr_found:
    +                for attr in self._state.attributes.aliases():
    +                    if item + ';binary' == attr.lower():
    +                        attr_found = attr
    +                        break
    +            if not attr_found:
    +                error_message = 'key \'%s\' not found' % item
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', error_message, self)
    +                raise LDAPKeyError(error_message)
    +            return self._state.attributes[attr]
    +
    +        error_message = 'key must be a string'
    +        if log_enabled(ERROR):
    +            log(ERROR, '%s for <%s>', error_message, self)
    +        raise LDAPKeyError(error_message)
    +
    +    def __eq__(self, other):
    +        if isinstance(other, EntryBase):
    +            return self.entry_dn == other.entry_dn
    +
    +        return False
    +
    +    def __lt__(self, other):
    +        if isinstance(other, EntryBase):
    +            return self.entry_dn <= other.entry_dn
    +
    +        return False
    +
    +    @property
    +    def entry_dn(self):
    +        return self._state.dn
    +
    +    @property
    +    def entry_cursor(self):
    +        return self._state.cursor
    +
    +    @property
    +    def entry_status(self):
    +        return self._state.status
    +
    +    @property
    +    def entry_definition(self):
    +        return self._state.definition
    +
    +    @property
    +    def entry_raw_attributes(self):
    +        return self._state.raw_attributes
    +
    +    def entry_raw_attribute(self, name):
    +        """
    +
    +        :param name: name of the attribute
    +        :return: raw (unencoded) value of the attribute, None if attribute is not found
    +        """
    +        return self._state.raw_attributes[name] if name in self._state.raw_attributes else None
    +
    +    @property
    +    def entry_mandatory_attributes(self):
    +        return [attribute for attribute in self.entry_definition._attributes if self.entry_definition._attributes[attribute].mandatory]
    +
    +    @property
    +    def entry_attributes(self):
    +        return list(self._state.attributes.keys())
    +
    +    @property
    +    def entry_attributes_as_dict(self):
    +        return dict((attribute_key, attribute_value.values) for (attribute_key, attribute_value) in self._state.attributes.items())
    +
    +    @property
    +    def entry_read_time(self):
    +        return self._state.read_time
    +
    +    @property
    +    def _changes(self):
    +        return self._state.changes
    +
    +    def entry_to_json(self, raw=False, indent=4, sort=True, stream=None, checked_attributes=True, include_empty=True):
    +        json_entry = dict()
    +        json_entry['dn'] = self.entry_dn
    +        if checked_attributes:
    +            if not include_empty:
    +                # needed for python 2.6 compatibility
    +                json_entry['attributes'] = dict((key, self.entry_attributes_as_dict[key]) for key in self.entry_attributes_as_dict if self.entry_attributes_as_dict[key])
    +            else:
    +                json_entry['attributes'] = self.entry_attributes_as_dict
    +        if raw:
    +            if not include_empty:
    +                # needed for python 2.6 compatibility
    +                json_entry['raw'] = dict((key, self.entry_raw_attributes[key]) for key in self.entry_raw_attributes if self.entry_raw_attributes[key])
    +            else:
    +                json_entry['raw'] = dict(self.entry_raw_attributes)
    +
    +        if str is bytes:  # Python 2
    +            check_json_dict(json_entry)
    +
    +        json_output = json.dumps(json_entry,
    +                                 ensure_ascii=True,
    +                                 sort_keys=sort,
    +                                 indent=indent,
    +                                 check_circular=True,
    +                                 default=format_json,
    +                                 separators=(',', ': '))
    +
    +        if stream:
    +            stream.write(json_output)
    +
    +        return json_output
    +
    +    def entry_to_ldif(self, all_base64=False, line_separator=None, sort_order=None, stream=None):
    +        ldif_lines = operation_to_ldif('searchResponse', [self._state.response], all_base64, sort_order=sort_order)
    +        ldif_lines = add_ldif_header(ldif_lines)
    +        line_separator = line_separator or linesep
    +        ldif_output = line_separator.join(ldif_lines)
    +        if stream:
    +            if stream.tell() == 0:
    +                header = add_ldif_header(['-'])[0]
    +                stream.write(prepare_for_stream(header + line_separator + line_separator))
    +            stream.write(prepare_for_stream(ldif_output + line_separator + line_separator))
    +        return ldif_output
    +
    +
    +class Entry(EntryBase):
    +    """The Entry object contains a single LDAP entry.
    +    Attributes can be accessed either by sequence, by assignment
    +    or as dictionary keys. Keys are not case sensitive.
    +
    +    The Entry object is read only
    +
    +    - The DN is retrieved by entry_dn
    +    - The Reader reference is in _cursor()
    +    - Raw attributes values are retrieved by the _ra_attributes and
    +      _raw_attribute() methods
    +
    +    """
    +    def entry_writable(self, object_def=None, writer_cursor=None, attributes=None, custom_validator=None, auxiliary_class=None):
    +        conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX')
    +        if not self.entry_cursor.schema:
    +            error_message = 'schema must be available to make an entry writable'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        # returns a new WritableEntry and its Writer cursor
    +        if object_def is None:
    +            if self.entry_cursor.definition._object_class:
    +                object_def = self.entry_definition._object_class
    +                auxiliary_class = self.entry_definition._auxiliary_class + (auxiliary_class if isinstance(auxiliary_class, SEQUENCE_TYPES) else [])
    +            elif 'objectclass' in self:
    +                object_def = self.objectclass.values
    +
    +        if not object_def:
    +            error_message = 'object class must be specified to make an entry writable'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +
    +        if not isinstance(object_def, ObjectDef):
    +                object_def = ObjectDef(object_def, self.entry_cursor.schema, custom_validator, auxiliary_class)
    +
    +        if attributes:
    +            if isinstance(attributes, STRING_TYPES):
    +                attributes = [attributes]
    +
    +            if isinstance(attributes, SEQUENCE_TYPES):
    +                for attribute in attributes:
    +                    if attribute not in object_def._attributes:
    +                        error_message = 'attribute \'%s\' not in schema for \'%s\'' % (attribute, object_def)
    +                        if log_enabled(ERROR):
    +                            log(ERROR, '%s for <%s>', error_message, self)
    +                        raise LDAPCursorError(error_message)
    +        else:
    +            attributes = []
    +
    +        if not writer_cursor:
    +            from .cursor import Writer  # local import to avoid circular reference in import at startup
    +            writable_cursor = Writer(self.entry_cursor.connection, object_def)
    +        else:
    +            writable_cursor = writer_cursor
    +
    +        if attributes:  # force reading of attributes
    +            writable_entry = writable_cursor._refresh_object(self.entry_dn, list(attributes) + self.entry_attributes)
    +        else:
    +            writable_entry = writable_cursor._create_entry(self._state.response)
    +            writable_cursor.entries.append(writable_entry)
    +            writable_entry._state.read_time = self.entry_read_time
    +        writable_entry._state.origin = self  # reference to the original read-only entry
    +        # checks original entry for custom definitions in AttrDefs
    +        attr_to_add = []
    +        attr_to_remove = []
    +        object_def_to_add = []
    +        object_def_to_remove = []
    +        for attr in writable_entry._state.origin.entry_definition._attributes:
    +            original_attr = writable_entry._state.origin.entry_definition._attributes[attr]
    +            if attr != original_attr.name and (attr not in writable_entry._state.attributes or conf_operational_attribute_prefix + original_attr.name not in writable_entry._state.attributes):
    +                old_attr_def = writable_entry.entry_definition._attributes[original_attr.name]
    +                new_attr_def = AttrDef(original_attr.name,
    +                                       key=attr,
    +                                       validate=original_attr.validate,
    +                                       pre_query=original_attr.pre_query,
    +                                       post_query=original_attr.post_query,
    +                                       default=original_attr.default,
    +                                       dereference_dn=original_attr.dereference_dn,
    +                                       description=original_attr.description,
    +                                       mandatory=old_attr_def.mandatory,  # keeps value read from schema
    +                                       single_value=old_attr_def.single_value,  # keeps value read from schema
    +                                       alias=original_attr.other_names)
    +                od = writable_entry.entry_definition
    +                object_def_to_remove.append(old_attr_def)
    +                object_def_to_add.append(new_attr_def)
    +                # updates attribute name in entry attributes
    +                new_attr = WritableAttribute(new_attr_def, writable_entry, writable_cursor)
    +                if original_attr.name in writable_entry._state.attributes:
    +                    new_attr.other_names = writable_entry._state.attributes[original_attr.name].other_names
    +                    new_attr.raw_values = writable_entry._state.attributes[original_attr.name].raw_values
    +                    new_attr.values = writable_entry._state.attributes[original_attr.name].values
    +                    new_attr.response = writable_entry._state.attributes[original_attr.name].response
    +                attr_to_add.append((attr, new_attr))
    +                attr_to_remove.append(original_attr.name)
    +                # writable_entry._state.attributes[attr] = new_attr
    +                ## writable_entry._state.attributes.set_alias(attr, new_attr.other_names)
    +                # del writable_entry._state.attributes[original_attr.name]
    +        for attr, new_attr in attr_to_add:
    +            writable_entry._state.attributes[attr] = new_attr
    +        for attr in attr_to_remove:
    +            del writable_entry._state.attributes[attr]
    +        for object_def in object_def_to_remove:
    +            o = writable_entry.entry_definition
    +            o -= object_def
    +        for object_def in object_def_to_add:
    +            o = writable_entry.entry_definition
    +            o += object_def
    +
    +        writable_entry._state.set_status(STATUS_WRITABLE)
    +        return writable_entry
    +
    +
    +class WritableEntry(EntryBase):
    +    def __setitem__(self, key, value):
    +        if value is not Ellipsis:  # hack for using implicit operators in writable attributes
    +            self.__setattr__(key, value)
    +
    +    def __setattr__(self, item, value):
    +        conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
    +        if item == '_state' and isinstance(value, EntryState):
    +            self.__dict__['_state'] = value
    +            return
    +
    +        if value is not Ellipsis:  # hack for using implicit operators in writable attributes
    +            # checks if using an alias
    +            if item in self.entry_cursor.definition._attributes or item.lower() in conf_attributes_excluded_from_object_def:
    +                if item not in self._state.attributes:  # setting value to an attribute still without values
    +                    new_attribute = WritableAttribute(self.entry_cursor.definition._attributes[item], self, cursor=self.entry_cursor)
    +                    self._state.attributes[str(item)] = new_attribute  # force item to a string for key in attributes dict
    +                self._state.attributes[item].set(value)  # try to add to new_values
    +            else:
    +                error_message = 'attribute \'%s\' not defined' % item
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', error_message, self)
    +                raise LDAPCursorAttributeError(error_message)
    +
    +    def __getattr__(self, item):
    +        if isinstance(item, STRING_TYPES):
    +            if item == '_state':
    +                return self.__dict__['_state']
    +            item = ''.join(item.split()).lower()
    +            for attr in self._state.attributes.keys():
    +                if item == attr.lower():
    +                    return self._state.attributes[attr]
    +            for attr in self._state.attributes.aliases():
    +                if item == attr.lower():
    +                    return self._state.attributes[attr]
    +            if item in self.entry_definition._attributes:  # item is a new attribute to commit, creates the AttrDef and add to the attributes to retrive
    +                self._state.attributes[item] = WritableAttribute(self.entry_definition._attributes[item], self, self.entry_cursor)
    +                self.entry_cursor.attributes.add(item)
    +                return self._state.attributes[item]
    +            error_message = 'attribute \'%s\' not defined' % item
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorAttributeError(error_message)
    +        else:
    +            error_message = 'attribute name must be a string'
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorAttributeError(error_message)
    +
    +    @property
    +    def entry_virtual_attributes(self):
    +        return [attr for attr in self.entry_attributes if self[attr].virtual]
    +
    +    def entry_commit_changes(self, refresh=True, controls=None, clear_history=True):
    +        if clear_history:
    +            self.entry_cursor._reset_history()
    +
    +        if self.entry_status == STATUS_READY_FOR_DELETION:
    +            result = self.entry_cursor.connection.delete(self.entry_dn, controls)
    +            if not self.entry_cursor.connection.strategy.sync:
    +                response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
    +            else:
    +                response = self.entry_cursor.connection.response
    +                result = self.entry_cursor.connection.result
    +                request = self.entry_cursor.connection.request
    +            self.entry_cursor._store_operation_in_history(request, result, response)
    +            if result['result'] == RESULT_SUCCESS:
    +                dn = self.entry_dn
    +                if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server:  # deletes original read-only Entry
    +                    cursor = self._state.origin.entry_cursor
    +                    self._state.origin.__dict__.clear()
    +                    self._state.origin.__dict__['_state'] = EntryState(dn, cursor)
    +                    self._state.origin._state.set_status(STATUS_DELETED)
    +                cursor = self.entry_cursor
    +                self.__dict__.clear()
    +                self._state = EntryState(dn, cursor)
    +                self._state.set_status(STATUS_DELETED)
    +                return True
    +            return False
    +        elif self.entry_status == STATUS_READY_FOR_MOVING:
    +            result = self.entry_cursor.connection.modify_dn(self.entry_dn, '+'.join(safe_rdn(self.entry_dn)), new_superior=self._state._to)
    +            if not self.entry_cursor.connection.strategy.sync:
    +                response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
    +            else:
    +                response = self.entry_cursor.connection.response
    +                result = self.entry_cursor.connection.result
    +                request = self.entry_cursor.connection.request
    +            self.entry_cursor._store_operation_in_history(request, result, response)
    +            if result['result'] == RESULT_SUCCESS:
    +                self._state.dn = safe_dn('+'.join(safe_rdn(self.entry_dn)) + ',' + self._state._to)
    +                if refresh:
    +                    if self.entry_refresh():
    +                        if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server:  # refresh dn of origin
    +                            self._state.origin._state.dn = self.entry_dn
    +                self._state.set_status(STATUS_COMMITTED)
    +                self._state._to = None
    +                return True
    +            return False
    +        elif self.entry_status == STATUS_READY_FOR_RENAMING:
    +            rdn = '+'.join(safe_rdn(self._state._to))
    +            result = self.entry_cursor.connection.modify_dn(self.entry_dn, rdn)
    +            if not self.entry_cursor.connection.strategy.sync:
    +                response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
    +            else:
    +                response = self.entry_cursor.connection.response
    +                result = self.entry_cursor.connection.result
    +                request = self.entry_cursor.connection.request
    +            self.entry_cursor._store_operation_in_history(request, result, response)
    +            if result['result'] == RESULT_SUCCESS:
    +                self._state.dn = rdn + ',' + ','.join(to_dn(self.entry_dn)[1:])
    +                if refresh:
    +                    if self.entry_refresh():
    +                        if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server:  # refresh dn of origin
    +                            self._state.origin._state.dn = self.entry_dn
    +                self._state.set_status(STATUS_COMMITTED)
    +                self._state._to = None
    +                return True
    +            return False
    +        elif self.entry_status in [STATUS_VIRTUAL, STATUS_MANDATORY_MISSING]:
    +            missing_attributes = []
    +            for attr in self.entry_mandatory_attributes:
    +                if (attr not in self._state.attributes or self._state.attributes[attr].virtual) and attr not in self._changes:
    +                    missing_attributes.append('\'' + attr + '\'')
    +            error_message = 'mandatory attributes %s missing in entry %s' % (', '.join(missing_attributes), self.entry_dn)
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        elif self.entry_status == STATUS_PENDING_CHANGES:
    +            if self._changes:
    +                if self.entry_definition._auxiliary_class:  # checks if an attribute is from an auxiliary class and adds it to the objectClass attribute if not present
    +                    for attr in self._changes:
    +                        # checks schema to see if attribute is defined in one of the already present object classes
    +                        attr_classes = self.entry_cursor.schema.attribute_types[attr].mandatory_in + self.entry_cursor.schema.attribute_types[attr].optional_in
    +                        for object_class in self.objectclass:
    +                            if object_class in attr_classes:
    +                                break
    +                        else:  # executed only if the attribute class is not present in the objectClass attribute
    +                            # checks if attribute is defined in one of the possible auxiliary classes
    +                            for aux_class in self.entry_definition._auxiliary_class:
    +                                if aux_class in attr_classes:
    +                                    if self._state._initial_status == STATUS_VIRTUAL:  # entry is new, there must be a pending objectClass MODIFY_REPLACE
    +                                        self._changes['objectClass'][0][1].append(aux_class)
    +                                    else:
    +                                        self.objectclass += aux_class
    +                if self._state._initial_status == STATUS_VIRTUAL:
    +                    new_attributes = dict()
    +                    for attr in self._changes:
    +                        new_attributes[attr] = self._changes[attr][0][1]
    +                    result = self.entry_cursor.connection.add(self.entry_dn, None, new_attributes, controls)
    +                else:
    +                    result = self.entry_cursor.connection.modify(self.entry_dn, self._changes, controls)
    +
    +                if not self.entry_cursor.connection.strategy.sync:  # asynchronous request
    +                    response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
    +                else:
    +                    response = self.entry_cursor.connection.response
    +                    result = self.entry_cursor.connection.result
    +                    request = self.entry_cursor.connection.request
    +                self.entry_cursor._store_operation_in_history(request, result, response)
    +
    +                if result['result'] == RESULT_SUCCESS:
    +                    if refresh:
    +                        if self.entry_refresh():
    +                            if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server:  # updates original read-only entry if present
    +                                for attr in self:  # adds AttrDefs from writable entry to origin entry definition if some is missing
    +                                    if attr.key in self.entry_definition._attributes and attr.key not in self._state.origin.entry_definition._attributes:
    +                                        self._state.origin.entry_cursor.definition.add_attribute(self.entry_cursor.definition._attributes[attr.key])  # adds AttrDef from writable entry to original entry if missing
    +                                temp_entry = self._state.origin.entry_cursor._create_entry(self._state.response)
    +                                self._state.origin.__dict__.clear()
    +                                self._state.origin.__dict__['_state'] = temp_entry._state
    +                                for attr in self:  # returns the whole attribute object
    +                                    if not hasattr(attr,'virtual'):
    +                                        self._state.origin.__dict__[attr.key] = self._state.origin._state.attributes[attr.key]
    +                                self._state.origin._state.read_time = self.entry_read_time
    +                    else:
    +                        self.entry_discard_changes()  # if not refreshed remove committed changes
    +                    self._state.set_status(STATUS_COMMITTED)
    +                    return True
    +        return False
    +
    +    def entry_discard_changes(self):
    +        self._changes.clear()
    +        self._state.set_status(self._state._initial_status)
    +
    +    def entry_delete(self):
    +        if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_DELETION]:
    +            error_message = 'cannot delete entry, invalid status: ' + self.entry_status
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        self._state.set_status(STATUS_READY_FOR_DELETION)
    +
    +    def entry_refresh(self, tries=4, seconds=2):
    +        """
    +
    +        Refreshes the entry from the LDAP Server
    +        """
    +        if self.entry_cursor.connection:
    +            if self.entry_cursor.refresh_entry(self, tries, seconds):
    +                return True
    +
    +        return False
    +
    +    def entry_move(self, destination_dn):
    +        if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_MOVING]:
    +            error_message = 'cannot move entry, invalid status: ' + self.entry_status
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        self._state._to = safe_dn(destination_dn)
    +        self._state.set_status(STATUS_READY_FOR_MOVING)
    +
    +    def entry_rename(self, new_name):
    +        if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_RENAMING]:
    +            error_message = 'cannot rename entry, invalid status: ' + self.entry_status
    +            if log_enabled(ERROR):
    +                log(ERROR, '%s for <%s>', error_message, self)
    +            raise LDAPCursorError(error_message)
    +        self._state._to = new_name
    +        self._state.set_status(STATUS_READY_FOR_RENAMING)
    +
    +    @property
    +    def entry_changes(self):
    +        return self._changes
    diff --git a/server/www/packages/packages-windows/x86/ldap3/abstract/objectDef.py b/server/www/packages/packages-windows/x86/ldap3/abstract/objectDef.py
    index 5af64d5..1f8609c 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/abstract/objectDef.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/abstract/objectDef.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/core/connection.py b/server/www/packages/packages-windows/x86/ldap3/core/connection.py
    index b8ed002..0f148e8 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/core/connection.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/core/connection.py
    @@ -1,1504 +1,1549 @@
    -"""
    -"""
    -
    -# Created on 2014.05.31
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2014 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -from copy import deepcopy
    -from os import linesep
    -from threading import RLock, Lock
    -from functools import reduce
    -import json
    -
    -from .. import ANONYMOUS, SIMPLE, SASL, MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, get_config_parameter, DEREF_ALWAYS, \
    -    SUBTREE, ASYNC, SYNC, NO_ATTRIBUTES, ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, MODIFY_INCREMENT, LDIF, ASYNC_STREAM, \
    -    RESTARTABLE, ROUND_ROBIN, REUSABLE, AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_TLS_BEFORE_BIND, AUTO_BIND_TLS_AFTER_BIND, AUTO_BIND_NO_TLS, \
    -    STRING_TYPES, SEQUENCE_TYPES, MOCK_SYNC, MOCK_ASYNC, NTLM, EXTERNAL, DIGEST_MD5, GSSAPI, PLAIN
    -
    -from .results import RESULT_SUCCESS, RESULT_COMPARE_TRUE, RESULT_COMPARE_FALSE
    -from ..extend import ExtendedOperationsRoot
    -from .pooling import ServerPool
    -from .server import Server
    -from ..operation.abandon import abandon_operation, abandon_request_to_dict
    -from ..operation.add import add_operation, add_request_to_dict
    -from ..operation.bind import bind_operation, bind_request_to_dict
    -from ..operation.compare import compare_operation, compare_request_to_dict
    -from ..operation.delete import delete_operation, delete_request_to_dict
    -from ..operation.extended import extended_operation, extended_request_to_dict
    -from ..operation.modify import modify_operation, modify_request_to_dict
    -from ..operation.modifyDn import modify_dn_operation, modify_dn_request_to_dict
    -from ..operation.search import search_operation, search_request_to_dict
    -from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header
    -from ..protocol.sasl.digestMd5 import sasl_digest_md5
    -from ..protocol.sasl.external import sasl_external
    -from ..protocol.sasl.plain import sasl_plain
    -from ..strategy.sync import SyncStrategy
    -from ..strategy.mockAsync import MockAsyncStrategy
    -from ..strategy.asynchronous import AsyncStrategy
    -from ..strategy.reusable import ReusableStrategy
    -from ..strategy.restartable import RestartableStrategy
    -from ..strategy.ldifProducer import LdifProducerStrategy
    -from ..strategy.mockSync import MockSyncStrategy
    -from ..strategy.asyncStream import AsyncStreamStrategy
    -from ..operation.unbind import unbind_operation
    -from ..protocol.rfc2696 import paged_search_control
    -from .usage import ConnectionUsage
    -from .tls import Tls
    -from .exceptions import LDAPUnknownStrategyError, LDAPBindError, LDAPUnknownAuthenticationMethodError, \
    -    LDAPSASLMechanismNotSupportedError, LDAPObjectClassError, LDAPConnectionIsReadOnlyError, LDAPChangeError, LDAPExceptionError, \
    -    LDAPObjectError, LDAPSocketReceiveError, LDAPAttributeError, LDAPInvalidValueError, LDAPConfigurationError
    -
    -from ..utils.conv import escape_bytes, prepare_for_stream, check_json_dict, format_json, to_unicode
    -from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED, get_library_log_hide_sensitive_data
    -from ..utils.dn import safe_dn
    -
    -
    -SASL_AVAILABLE_MECHANISMS = [EXTERNAL,
    -                             DIGEST_MD5,
    -                             GSSAPI,
    -                             PLAIN]
    -
    -CLIENT_STRATEGIES = [SYNC,
    -                     ASYNC,
    -                     LDIF,
    -                     RESTARTABLE,
    -                     REUSABLE,
    -                     MOCK_SYNC,
    -                     MOCK_ASYNC,
    -                     ASYNC_STREAM]
    -
    -
    -def _format_socket_endpoint(endpoint):
    -    if endpoint and len(endpoint) == 2:  # IPv4
    -        return str(endpoint[0]) + ':' + str(endpoint[1])
    -    elif endpoint and len(endpoint) == 4:  # IPv6
    -        return '[' + str(endpoint[0]) + ']:' + str(endpoint[1])
    -
    -    try:
    -        return str(endpoint)
    -    except Exception:
    -        return '?'
    -
    -
    -def _format_socket_endpoints(sock):
    -    if sock:
    -        try:
    -            local = sock.getsockname()
    -        except Exception:
    -            local = (None, None, None, None)
    -        try:
    -            remote = sock.getpeername()
    -        except Exception:
    -            remote = (None, None, None, None)
    -
    -        return ''
    -    return ''
    -
    -
    -# noinspection PyProtectedMember
    -class Connection(object):
    -    """Main ldap connection class.
    -
    -    Controls, if used, must be a list of tuples. Each tuple must have 3
    -    elements, the control OID, a boolean meaning if the control is
    -    critical, a value.
    -
    -    If the boolean is set to True the server must honor the control or
    -    refuse the operation
    -
    -    Mixing controls must be defined in controls specification (as per
    -    RFC 4511)
    -
    -    :param server: the Server object to connect to
    -    :type server: Server, str
    -    :param user: the user name for simple authentication
    -    :type user: str
    -    :param password: the password for simple authentication
    -    :type password: str
    -    :param auto_bind: specify if the bind will be performed automatically when defining the Connection object
    -    :type auto_bind: int, can be one of AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_NO_TLS, AUTO_BIND_TLS_BEFORE_BIND, AUTO_BIND_TLS_AFTER_BIND as specified in ldap3
    -    :param version: LDAP version, default to 3
    -    :type version: int
    -    :param authentication: type of authentication
    -    :type authentication: int, can be one of AUTH_ANONYMOUS, AUTH_SIMPLE or AUTH_SASL, as specified in ldap3
    -    :param client_strategy: communication strategy used in the Connection
    -    :type client_strategy: can be one of STRATEGY_SYNC, STRATEGY_ASYNC_THREADED, STRATEGY_LDIF_PRODUCER, STRATEGY_SYNC_RESTARTABLE, STRATEGY_REUSABLE_THREADED as specified in ldap3
    -    :param auto_referrals: specify if the connection object must automatically follow referrals
    -    :type auto_referrals: bool
    -    :param sasl_mechanism: mechanism for SASL authentication, can be one of 'EXTERNAL', 'DIGEST-MD5', 'GSSAPI', 'PLAIN'
    -    :type sasl_mechanism: str
    -    :param sasl_credentials: credentials for SASL mechanism
    -    :type sasl_credentials: tuple
    -    :param check_names: if True the library will check names of attributes and object classes against the schema. Also values found in entries will be formatted as indicated by the schema
    -    :type check_names: bool
    -    :param collect_usage: collect usage metrics in the usage attribute
    -    :type collect_usage: bool
    -    :param read_only: disable operations that modify data in the LDAP server
    -    :type read_only: bool
    -    :param lazy: open and bind the connection only when an actual operation is performed
    -    :type lazy: bool
    -    :param raise_exceptions: raise exceptions when operations are not successful, if False operations return False if not successful but not raise exceptions
    -    :type raise_exceptions: bool
    -    :param pool_name: pool name for pooled strategies
    -    :type pool_name: str
    -    :param pool_size: pool size for pooled strategies
    -    :type pool_size: int
    -    :param pool_lifetime: pool lifetime for pooled strategies
    -    :type pool_lifetime: int
    -    :param use_referral_cache: keep referral connections open and reuse them
    -    :type use_referral_cache: bool
    -    :param auto_escape: automatic escaping of filter values
    -    :param auto_encode: automatic encoding of attribute values
    -    :type use_referral_cache: bool
    -    """
    -
    -    def __init__(self,
    -                 server,
    -                 user=None,
    -                 password=None,
    -                 auto_bind=AUTO_BIND_DEFAULT,
    -                 version=3,
    -                 authentication=None,
    -                 client_strategy=SYNC,
    -                 auto_referrals=True,
    -                 auto_range=True,
    -                 sasl_mechanism=None,
    -                 sasl_credentials=None,
    -                 check_names=True,
    -                 collect_usage=False,
    -                 read_only=False,
    -                 lazy=False,
    -                 raise_exceptions=False,
    -                 pool_name=None,
    -                 pool_size=None,
    -                 pool_lifetime=None,
    -                 fast_decoder=True,
    -                 receive_timeout=None,
    -                 return_empty_attributes=True,
    -                 use_referral_cache=False,
    -                 auto_escape=True,
    -                 auto_encode=True,
    -                 pool_keepalive=None):
    -
    -        conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME')
    -        self.connection_lock = RLock()  # re-entrant lock to ensure that operations in the Connection object are executed atomically in the same thread
    -        with self.connection_lock:
    -            if client_strategy not in CLIENT_STRATEGIES:
    -                self.last_error = 'unknown client connection strategy'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', self.last_error, self)
    -                raise LDAPUnknownStrategyError(self.last_error)
    -
    -            self.strategy_type = client_strategy
    -            self.user = user
    -            self.password = password
    -
    -            if not authentication and self.user:
    -                self.authentication = SIMPLE
    -            elif not authentication:
    -                self.authentication = ANONYMOUS
    -            elif authentication in [SIMPLE, ANONYMOUS, SASL, NTLM]:
    -                self.authentication = authentication
    -            else:
    -                self.last_error = 'unknown authentication method'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', self.last_error, self)
    -                raise LDAPUnknownAuthenticationMethodError(self.last_error)
    -
    -            self.version = version
    -            self.auto_referrals = True if auto_referrals else False
    -            self.request = None
    -            self.response = None
    -            self.result = None
    -            self.bound = False
    -            self.listening = False
    -            self.closed = True
    -            self.last_error = None
    -            if auto_bind is False:  # compatibility with older version where auto_bind was a boolean
    -                self.auto_bind = AUTO_BIND_DEFAULT
    -            elif auto_bind is True:
    -                self.auto_bind = AUTO_BIND_NO_TLS
    -            else:
    -                self.auto_bind = auto_bind
    -            self.sasl_mechanism = sasl_mechanism
    -            self.sasl_credentials = sasl_credentials
    -            self._usage = ConnectionUsage() if collect_usage else None
    -            self.socket = None
    -            self.tls_started = False
    -            self.sasl_in_progress = False
    -            self.read_only = read_only
    -            self._context_state = []
    -            self._deferred_open = False
    -            self._deferred_bind = False
    -            self._deferred_start_tls = False
    -            self._bind_controls = None
    -            self._executing_deferred = False
    -            self.lazy = lazy
    -            self.pool_name = pool_name if pool_name else conf_default_pool_name
    -            self.pool_size = pool_size
    -            self.pool_lifetime = pool_lifetime
    -            self.pool_keepalive = pool_keepalive
    -            self.starting_tls = False
    -            self.check_names = check_names
    -            self.raise_exceptions = raise_exceptions
    -            self.auto_range = True if auto_range else False
    -            self.extend = ExtendedOperationsRoot(self)
    -            self._entries = []
    -            self.fast_decoder = fast_decoder
    -            self.receive_timeout = receive_timeout
    -            self.empty_attributes = return_empty_attributes
    -            self.use_referral_cache = use_referral_cache
    -            self.auto_escape = auto_escape
    -            self.auto_encode = auto_encode
    -
    -            if isinstance(server, STRING_TYPES):
    -                server = Server(server)
    -            if isinstance(server, SEQUENCE_TYPES):
    -                server = ServerPool(server, ROUND_ROBIN, active=True, exhaust=True)
    -
    -            if isinstance(server, ServerPool):
    -                self.server_pool = server
    -                self.server_pool.initialize(self)
    -                self.server = self.server_pool.get_current_server(self)
    -            else:
    -                self.server_pool = None
    -                self.server = server
    -
    -            # if self.authentication == SIMPLE and self.user and self.check_names:
    -            #     self.user = safe_dn(self.user)
    -            #     if log_enabled(EXTENDED):
    -            #         log(EXTENDED, 'user name sanitized to <%s> for simple authentication via <%s>', self.user, self)
    -
    -            if self.strategy_type == SYNC:
    -                self.strategy = SyncStrategy(self)
    -            elif self.strategy_type == ASYNC:
    -                self.strategy = AsyncStrategy(self)
    -            elif self.strategy_type == LDIF:
    -                self.strategy = LdifProducerStrategy(self)
    -            elif self.strategy_type == RESTARTABLE:
    -                self.strategy = RestartableStrategy(self)
    -            elif self.strategy_type == REUSABLE:
    -                self.strategy = ReusableStrategy(self)
    -                self.lazy = False
    -            elif self.strategy_type == MOCK_SYNC:
    -                self.strategy = MockSyncStrategy(self)
    -            elif self.strategy_type == MOCK_ASYNC:
    -                self.strategy = MockAsyncStrategy(self)
    -            elif self.strategy_type == ASYNC_STREAM:
    -                self.strategy = AsyncStreamStrategy(self)
    -            else:
    -                self.last_error = 'unknown strategy'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', self.last_error, self)
    -                raise LDAPUnknownStrategyError(self.last_error)
    -
    -            # maps strategy functions to connection functions
    -            self.send = self.strategy.send
    -            self.open = self.strategy.open
    -            self.get_response = self.strategy.get_response
    -            self.post_send_single_response = self.strategy.post_send_single_response
    -            self.post_send_search = self.strategy.post_send_search
    -
    -            if not self.strategy.no_real_dsa:
    -                self.do_auto_bind()
    -            # else:  # for strategies with a fake server set get_info to NONE if server hasn't a schema
    -            #     if self.server and not self.server.schema:
    -            #         self.server.get_info = NONE
    -            if log_enabled(BASIC):
    -                if get_library_log_hide_sensitive_data():
    -                    log(BASIC, 'instantiated Connection: <%s>', self.repr_with_sensitive_data_stripped())
    -                else:
    -                    log(BASIC, 'instantiated Connection: <%r>', self)
    -
    -    def do_auto_bind(self):
    -        if self.auto_bind and self.auto_bind not in [AUTO_BIND_NONE, AUTO_BIND_DEFAULT]:
    -            if log_enabled(BASIC):
    -                log(BASIC, 'performing automatic bind for <%s>', self)
    -            if self.closed:
    -               self.open(read_server_info=False)
    -            if self.auto_bind == AUTO_BIND_NO_TLS:
    -                self.bind(read_server_info=True)
    -            elif self.auto_bind == AUTO_BIND_TLS_BEFORE_BIND:
    -                self.start_tls(read_server_info=False)
    -                self.bind(read_server_info=True)
    -            elif self.auto_bind == AUTO_BIND_TLS_AFTER_BIND:
    -                self.bind(read_server_info=False)
    -                self.start_tls(read_server_info=True)
    -            if not self.bound:
    -                self.last_error = 'automatic bind not successful' + (' - ' + self.last_error if self.last_error else '')
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', self.last_error, self)
    -                raise LDAPBindError(self.last_error)
    -
    -    def __str__(self):
    -        s = [
    -            str(self.server) if self.server else 'None',
    -            'user: ' + str(self.user),
    -            'lazy' if self.lazy else 'not lazy',
    -            'unbound' if not self.bound else ('deferred bind' if self._deferred_bind else 'bound'),
    -            'closed' if self.closed else ('deferred open' if self._deferred_open else 'open'),
    -            _format_socket_endpoints(self.socket),
    -            'tls not started' if not self.tls_started else('deferred start_tls' if self._deferred_start_tls else 'tls started'),
    -            'listening' if self.listening else 'not listening',
    -            self.strategy.__class__.__name__ if hasattr(self, 'strategy') else 'No strategy',
    -            'internal decoder' if self.fast_decoder else 'pyasn1 decoder'
    -        ]
    -        return ' - '.join(s)
    -
    -    def __repr__(self):
    -        conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME')
    -        if self.server_pool:
    -            r = 'Connection(server={0.server_pool!r}'.format(self)
    -        else:
    -            r = 'Connection(server={0.server!r}'.format(self)
    -        r += '' if self.user is None else ', user={0.user!r}'.format(self)
    -        r += '' if self.password is None else ', password={0.password!r}'.format(self)
    -        r += '' if self.auto_bind is None else ', auto_bind={0.auto_bind!r}'.format(self)
    -        r += '' if self.version is None else ', version={0.version!r}'.format(self)
    -        r += '' if self.authentication is None else ', authentication={0.authentication!r}'.format(self)
    -        r += '' if self.strategy_type is None else ', client_strategy={0.strategy_type!r}'.format(self)
    -        r += '' if self.auto_referrals is None else ', auto_referrals={0.auto_referrals!r}'.format(self)
    -        r += '' if self.sasl_mechanism is None else ', sasl_mechanism={0.sasl_mechanism!r}'.format(self)
    -        r += '' if self.sasl_credentials is None else ', sasl_credentials={0.sasl_credentials!r}'.format(self)
    -        r += '' if self.check_names is None else ', check_names={0.check_names!r}'.format(self)
    -        r += '' if self.usage is None else (', collect_usage=' + ('True' if self.usage else 'False'))
    -        r += '' if self.read_only is None else ', read_only={0.read_only!r}'.format(self)
    -        r += '' if self.lazy is None else ', lazy={0.lazy!r}'.format(self)
    -        r += '' if self.raise_exceptions is None else ', raise_exceptions={0.raise_exceptions!r}'.format(self)
    -        r += '' if (self.pool_name is None or self.pool_name == conf_default_pool_name) else ', pool_name={0.pool_name!r}'.format(self)
    -        r += '' if self.pool_size is None else ', pool_size={0.pool_size!r}'.format(self)
    -        r += '' if self.pool_lifetime is None else ', pool_lifetime={0.pool_lifetime!r}'.format(self)
    -        r += '' if self.pool_keepalive is None else ', pool_keepalive={0.pool_keepalive!r}'.format(self)
    -        r += '' if self.fast_decoder is None else (', fast_decoder=' + ('True' if self.fast_decoder else 'False'))
    -        r += '' if self.auto_range is None else (', auto_range=' + ('True' if self.auto_range else 'False'))
    -        r += '' if self.receive_timeout is None else ', receive_timeout={0.receive_timeout!r}'.format(self)
    -        r += '' if self.empty_attributes is None else (', return_empty_attributes=' + ('True' if self.empty_attributes else 'False'))
    -        r += '' if self.auto_encode is None else (', auto_encode=' + ('True' if self.auto_encode else 'False'))
    -        r += '' if self.auto_escape is None else (', auto_escape=' + ('True' if self.auto_escape else 'False'))
    -        r += '' if self.use_referral_cache is None else (', use_referral_cache=' + ('True' if self.use_referral_cache else 'False'))
    -        r += ')'
    -
    -        return r
    -
    -    def repr_with_sensitive_data_stripped(self):
    -        conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME')
    -        if self.server_pool:
    -            r = 'Connection(server={0.server_pool!r}'.format(self)
    -        else:
    -            r = 'Connection(server={0.server!r}'.format(self)
    -        r += '' if self.user is None else ', user={0.user!r}'.format(self)
    -        r += '' if self.password is None else ", password='{0}'".format('' % len(self.password))
    -        r += '' if self.auto_bind is None else ', auto_bind={0.auto_bind!r}'.format(self)
    -        r += '' if self.version is None else ', version={0.version!r}'.format(self)
    -        r += '' if self.authentication is None else ', authentication={0.authentication!r}'.format(self)
    -        r += '' if self.strategy_type is None else ', client_strategy={0.strategy_type!r}'.format(self)
    -        r += '' if self.auto_referrals is None else ', auto_referrals={0.auto_referrals!r}'.format(self)
    -        r += '' if self.sasl_mechanism is None else ', sasl_mechanism={0.sasl_mechanism!r}'.format(self)
    -        if self.sasl_mechanism == DIGEST_MD5:
    -            r += '' if self.sasl_credentials is None else ", sasl_credentials=({0!r}, {1!r}, '{2}', {3!r})".format(self.sasl_credentials[0], self.sasl_credentials[1], '*' * len(self.sasl_credentials[2]), self.sasl_credentials[3])
    -        else:
    -            r += '' if self.sasl_credentials is None else ', sasl_credentials={0.sasl_credentials!r}'.format(self)
    -        r += '' if self.check_names is None else ', check_names={0.check_names!r}'.format(self)
    -        r += '' if self.usage is None else (', collect_usage=' + 'True' if self.usage else 'False')
    -        r += '' if self.read_only is None else ', read_only={0.read_only!r}'.format(self)
    -        r += '' if self.lazy is None else ', lazy={0.lazy!r}'.format(self)
    -        r += '' if self.raise_exceptions is None else ', raise_exceptions={0.raise_exceptions!r}'.format(self)
    -        r += '' if (self.pool_name is None or self.pool_name == conf_default_pool_name) else ', pool_name={0.pool_name!r}'.format(self)
    -        r += '' if self.pool_size is None else ', pool_size={0.pool_size!r}'.format(self)
    -        r += '' if self.pool_lifetime is None else ', pool_lifetime={0.pool_lifetime!r}'.format(self)
    -        r += '' if self.pool_keepalive is None else ', pool_keepalive={0.pool_keepalive!r}'.format(self)
    -        r += '' if self.fast_decoder is None else (', fast_decoder=' + 'True' if self.fast_decoder else 'False')
    -        r += '' if self.auto_range is None else (', auto_range=' + ('True' if self.auto_range else 'False'))
    -        r += '' if self.receive_timeout is None else ', receive_timeout={0.receive_timeout!r}'.format(self)
    -        r += '' if self.empty_attributes is None else (', return_empty_attributes=' + 'True' if self.empty_attributes else 'False')
    -        r += '' if self.auto_encode is None else (', auto_encode=' + ('True' if self.auto_encode else 'False'))
    -        r += '' if self.auto_escape is None else (', auto_escape=' + ('True' if self.auto_escape else 'False'))
    -        r += '' if self.use_referral_cache is None else (', use_referral_cache=' + ('True' if self.use_referral_cache else 'False'))
    -        r += ')'
    -
    -        return r
    -
    -    @property
    -    def stream(self):
    -        """Used by the LDIFProducer strategy to accumulate the ldif-change operations with a single LDIF header
    -        :return: reference to the response stream if defined in the strategy.
    -        """
    -        return self.strategy.get_stream() if self.strategy.can_stream else None
    -
    -    @stream.setter
    -    def stream(self, value):
    -        with self.connection_lock:
    -            if self.strategy.can_stream:
    -                self.strategy.set_stream(value)
    -
    -    @property
    -    def usage(self):
    -        """Usage statistics for the connection.
    -        :return: Usage object
    -        """
    -        if not self._usage:
    -            return None
    -        if self.strategy.pooled:  # update master connection usage from pooled connections
    -            self._usage.reset()
    -            for worker in self.strategy.pool.workers:
    -                self._usage += worker.connection.usage
    -            self._usage += self.strategy.pool.terminated_usage
    -        return self._usage
    -
    -    def __enter__(self):
    -        with self.connection_lock:
    -            self._context_state.append((self.bound, self.closed))  # save status out of context as a tuple in a list
    -            if self.auto_bind != AUTO_BIND_NONE:
    -                if self.auto_bind == AUTO_BIND_DEFAULT:
    -                    self.auto_bind = AUTO_BIND_NO_TLS
    -                if self.closed:
    -                    self.open()
    -                if not self.bound:
    -                    self.bind()
    -
    -            return self
    -
    -    # noinspection PyUnusedLocal
    -    def __exit__(self, exc_type, exc_val, exc_tb):
    -        with self.connection_lock:
    -            context_bound, context_closed = self._context_state.pop()
    -            if (not context_bound and self.bound) or self.stream:  # restore status prior to entering context
    -                try:
    -                    self.unbind()
    -                except LDAPExceptionError:
    -                    pass
    -
    -            if not context_closed and self.closed:
    -                self.open()
    -
    -            if exc_type is not None:
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', exc_type, self)
    -                return False  # re-raise LDAPExceptionError
    -
    -    def bind(self,
    -             read_server_info=True,
    -             controls=None):
    -        """Bind to ldap Server with the authentication method and the user defined in the connection
    -
    -        :param read_server_info: reads info from server
    -        :param controls: LDAP controls to send along with the bind operation
    -        :type controls: list of tuple
    -        :return: bool
    -
    -        """
    -        if log_enabled(BASIC):
    -            log(BASIC, 'start BIND operation via <%s>', self)
    -        self.last_error = None
    -        with self.connection_lock:
    -            if self.lazy and not self._executing_deferred:
    -                if self.strategy.pooled:
    -                    self.strategy.validate_bind(controls)
    -                self._deferred_bind = True
    -                self._bind_controls = controls
    -                self.bound = True
    -                if log_enabled(BASIC):
    -                    log(BASIC, 'deferring bind for <%s>', self)
    -            else:
    -                self._deferred_bind = False
    -                self._bind_controls = None
    -                if self.closed:  # try to open connection if closed
    -                    self.open(read_server_info=False)
    -                if self.authentication == ANONYMOUS:
    -                    if log_enabled(PROTOCOL):
    -                        log(PROTOCOL, 'performing anonymous BIND for <%s>', self)
    -                    if not self.strategy.pooled:
    -                        request = bind_operation(self.version, self.authentication, self.user, '', auto_encode=self.auto_encode)
    -                        if log_enabled(PROTOCOL):
    -                            log(PROTOCOL, 'anonymous BIND request <%s> sent via <%s>', bind_request_to_dict(request), self)
    -                        response = self.post_send_single_response(self.send('bindRequest', request, controls))
    -                    else:
    -                        response = self.strategy.validate_bind(controls)  # only for REUSABLE
    -                elif self.authentication == SIMPLE:
    -                    if log_enabled(PROTOCOL):
    -                        log(PROTOCOL, 'performing simple BIND for <%s>', self)
    -                    if not self.strategy.pooled:
    -                        request = bind_operation(self.version, self.authentication, self.user, self.password, auto_encode=self.auto_encode)
    -                        if log_enabled(PROTOCOL):
    -                            log(PROTOCOL, 'simple BIND request <%s> sent via <%s>', bind_request_to_dict(request), self)
    -                        response = self.post_send_single_response(self.send('bindRequest', request, controls))
    -                    else:
    -                        response = self.strategy.validate_bind(controls)  # only for REUSABLE
    -                elif self.authentication == SASL:
    -                    if self.sasl_mechanism in SASL_AVAILABLE_MECHANISMS:
    -                        if log_enabled(PROTOCOL):
    -                            log(PROTOCOL, 'performing SASL BIND for <%s>', self)
    -                        if not self.strategy.pooled:
    -                            response = self.do_sasl_bind(controls)
    -                        else:
    -                            response = self.strategy.validate_bind(controls)  # only for REUSABLE
    -                    else:
    -                        self.last_error = 'requested SASL mechanism not supported'
    -                        if log_enabled(ERROR):
    -                            log(ERROR, '%s for <%s>', self.last_error, self)
    -                        raise LDAPSASLMechanismNotSupportedError(self.last_error)
    -                elif self.authentication == NTLM:
    -                    if self.user and self.password and len(self.user.split('\\')) == 2:
    -                        if log_enabled(PROTOCOL):
    -                            log(PROTOCOL, 'performing NTLM BIND for <%s>', self)
    -                        if not self.strategy.pooled:
    -                            response = self.do_ntlm_bind(controls)
    -                        else:
    -                            response = self.strategy.validate_bind(controls)  # only for REUSABLE
    -                    else:  # user or password missing
    -                        self.last_error = 'NTLM needs domain\\username and a password'
    -                        if log_enabled(ERROR):
    -                            log(ERROR, '%s for <%s>', self.last_error, self)
    -                        raise LDAPUnknownAuthenticationMethodError(self.last_error)
    -                else:
    -                    self.last_error = 'unknown authentication method'
    -                    if log_enabled(ERROR):
    -                        log(ERROR, '%s for <%s>', self.last_error, self)
    -                    raise LDAPUnknownAuthenticationMethodError(self.last_error)
    -
    -                if not self.strategy.sync and not self.strategy.pooled and self.authentication not in (SASL, NTLM):  # get response if asynchronous except for SASL and NTLM that return the bind result even for asynchronous strategy
    -                    _, result = self.get_response(response)
    -                    if log_enabled(PROTOCOL):
    -                        log(PROTOCOL, 'async BIND response id <%s> received via <%s>', result, self)
    -                elif self.strategy.sync:
    -                    result = self.result
    -                    if log_enabled(PROTOCOL):
    -                        log(PROTOCOL, 'BIND response <%s> received via <%s>', result, self)
    -                elif self.strategy.pooled or self.authentication in (SASL, NTLM):  # asynchronous SASL and NTLM or reusable strtegy get the bind result synchronously
    -                    result = response
    -                else:
    -                    self.last_error = 'unknown authentication method'
    -                    if log_enabled(ERROR):
    -                        log(ERROR, '%s for <%s>', self.last_error, self)
    -                    raise LDAPUnknownAuthenticationMethodError(self.last_error)
    -
    -                if result is None:
    -                    # self.bound = True if self.strategy_type == REUSABLE else False
    -                    self.bound = False
    -                elif result is True:
    -                    self.bound = True
    -                elif result is False:
    -                    self.bound = False
    -                else:
    -                    self.bound = True if result['result'] == RESULT_SUCCESS else False
    -                    if not self.bound and result and result['description'] and not self.last_error:
    -                        self.last_error = result['description']
    -
    -                if read_server_info and self.bound:
    -                    self.refresh_server_info()
    -            self._entries = []
    -
    -            if log_enabled(BASIC):
    -                log(BASIC, 'done BIND operation, result <%s>', self.bound)
    -
    -            return self.bound
    -
    -    def rebind(self,
    -               user=None,
    -               password=None,
    -               authentication=None,
    -               sasl_mechanism=None,
    -               sasl_credentials=None,
    -               read_server_info=True,
    -               controls=None
    -               ):
    -
    -        if log_enabled(BASIC):
    -            log(BASIC, 'start (RE)BIND operation via <%s>', self)
    -        self.last_error = None
    -        with self.connection_lock:
    -            if user:
    -                self.user = user
    -            if password is not None:
    -                self.password = password
    -            if not authentication and user:
    -                self.authentication = SIMPLE
    -            if authentication in [SIMPLE, ANONYMOUS, SASL, NTLM]:
    -                self.authentication = authentication
    -            elif authentication is not None:
    -                self.last_error = 'unknown authentication method'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', self.last_error, self)
    -                raise LDAPUnknownAuthenticationMethodError(self.last_error)
    -            if sasl_mechanism:
    -                self.sasl_mechanism = sasl_mechanism
    -            if sasl_credentials:
    -                self.sasl_credentials = sasl_credentials
    -
    -            # if self.authentication == SIMPLE and self.user and self.check_names:
    -            #     self.user = safe_dn(self.user)
    -            #     if log_enabled(EXTENDED):
    -            #         log(EXTENDED, 'user name sanitized to <%s> for rebind via <%s>', self.user, self)
    -
    -            if not self.strategy.pooled:
    -                try:
    -                    return self.bind(read_server_info, controls)
    -                except LDAPSocketReceiveError:
    -                    raise LDAPBindError('Unable to rebind as a different user, furthermore the server abruptly closed the connection')
    -            else:
    -                self.strategy.pool.rebind_pool()
    -                return True
    -
    -    def unbind(self,
    -               controls=None):
    -        """Unbind the connected user. Unbind implies closing session as per RFC4511 (4.3)
    -
    -        :param controls: LDAP controls to send along with the bind operation
    -
    -        """
    -        if log_enabled(BASIC):
    -            log(BASIC, 'start UNBIND operation via <%s>', self)
    -
    -        if self.use_referral_cache:
    -            self.strategy.unbind_referral_cache()
    -
    -        self.last_error = None
    -        with self.connection_lock:
    -            if self.lazy and not self._executing_deferred and (self._deferred_bind or self._deferred_open):  # _clear deferred status
    -                self.strategy.close()
    -                self._deferred_open = False
    -                self._deferred_bind = False
    -                self._deferred_start_tls = False
    -            elif not self.closed:
    -                request = unbind_operation()
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'UNBIND request sent via <%s>', self)
    -                self.send('unbindRequest', request, controls)
    -                self.strategy.close()
    -
    -            if log_enabled(BASIC):
    -                log(BASIC, 'done UNBIND operation, result <%s>', True)
    -
    -            return True
    -
    -    def search(self,
    -               search_base,
    -               search_filter,
    -               search_scope=SUBTREE,
    -               dereference_aliases=DEREF_ALWAYS,
    -               attributes=None,
    -               size_limit=0,
    -               time_limit=0,
    -               types_only=False,
    -               get_operational_attributes=False,
    -               controls=None,
    -               paged_size=None,
    -               paged_criticality=False,
    -               paged_cookie=None,
    -               auto_escape=None):
    -        """
    -        Perform an ldap search:
    -
    -        - If attributes is empty noRFC2696 with the specified size
    -        - If paged is 0 and cookie is present the search is abandoned on
    -          server attribute is returned
    -        - If attributes is ALL_ATTRIBUTES all attributes are returned
    -        - If paged_size is an int greater than 0 a simple paged search
    -          is tried as described in
    -        - Cookie is an opaque string received in the last paged search
    -          and must be used on the next paged search response
    -        - If lazy == True open and bind will be deferred until another
    -          LDAP operation is performed
    -        - If mssing_attributes == True then an attribute not returned by the server is set to None
    -        - If auto_escape is set it overrides the Connection auto_escape
    -        """
    -        conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
    -        if log_enabled(BASIC):
    -            log(BASIC, 'start SEARCH operation via <%s>', self)
    -
    -        if self.check_names and search_base:
    -            search_base = safe_dn(search_base)
    -            if log_enabled(EXTENDED):
    -                log(EXTENDED, 'search base sanitized to <%s> for SEARCH operation via <%s>', search_base, self)
    -
    -        with self.connection_lock:
    -            self._fire_deferred()
    -            if not attributes:
    -                attributes = [NO_ATTRIBUTES]
    -            elif attributes == ALL_ATTRIBUTES:
    -                attributes = [ALL_ATTRIBUTES]
    -
    -            if isinstance(attributes, STRING_TYPES):
    -                attributes = [attributes]
    -
    -            if get_operational_attributes and isinstance(attributes, list):
    -                attributes.append(ALL_OPERATIONAL_ATTRIBUTES)
    -            elif get_operational_attributes and isinstance(attributes, tuple):
    -                attributes += (ALL_OPERATIONAL_ATTRIBUTES, )  # concatenate tuple
    -
    -            if isinstance(paged_size, int):
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'performing paged search for %d items with cookie <%s> for <%s>', paged_size, escape_bytes(paged_cookie), self)
    -
    -                if controls is None:
    -                    controls = []
    -                else:
    -                    # Copy the controls to prevent modifying the original object
    -                    controls = list(controls)
    -                controls.append(paged_search_control(paged_criticality, paged_size, paged_cookie))
    -
    -            if self.server and self.server.schema and self.check_names:
    -                for attribute_name in attributes:
    -                    if ';' in attribute_name:  # remove tags
    -                        attribute_name_to_check = attribute_name.split(';')[0]
    -                    else:
    -                        attribute_name_to_check = attribute_name
    -                    if self.server.schema and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
    -                        raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
    -
    -            request = search_operation(search_base,
    -                                       search_filter,
    -                                       search_scope,
    -                                       dereference_aliases,
    -                                       attributes,
    -                                       size_limit,
    -                                       time_limit,
    -                                       types_only,
    -                                       self.auto_escape if auto_escape is None else auto_escape,
    -                                       self.auto_encode,
    -                                       self.server.schema if self.server else None,
    -                                       validator=self.server.custom_validator,
    -                                       check_names=self.check_names)
    -            if log_enabled(PROTOCOL):
    -                log(PROTOCOL, 'SEARCH request <%s> sent via <%s>', search_request_to_dict(request), self)
    -            response = self.post_send_search(self.send('searchRequest', request, controls))
    -            self._entries = []
    -
    -            if isinstance(response, int):  # asynchronous strategy
    -                return_value = response
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'async SEARCH response id <%s> received via <%s>', return_value, self)
    -            else:
    -                return_value = True if self.result['type'] == 'searchResDone' and len(response) > 0 else False
    -                if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
    -                    self.last_error = self.result['description']
    -
    -                if log_enabled(PROTOCOL):
    -                    for entry in response:
    -                        if entry['type'] == 'searchResEntry':
    -                            log(PROTOCOL, 'SEARCH response entry <%s> received via <%s>', entry, self)
    -                        elif entry['type'] == 'searchResRef':
    -                            log(PROTOCOL, 'SEARCH response reference <%s> received via <%s>', entry, self)
    -
    -            if log_enabled(BASIC):
    -                log(BASIC, 'done SEARCH operation, result <%s>', return_value)
    -
    -            return return_value
    -
    -    def compare(self,
    -                dn,
    -                attribute,
    -                value,
    -                controls=None):
    -        """
    -        Perform a compare operation
    -        """
    -        conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
    -
    -        if log_enabled(BASIC):
    -            log(BASIC, 'start COMPARE operation via <%s>', self)
    -        self.last_error = None
    -        if self.check_names:
    -            dn = safe_dn(dn)
    -            if log_enabled(EXTENDED):
    -                log(EXTENDED, 'dn sanitized to <%s> for COMPARE operation via <%s>', dn, self)
    -
    -        if self.server and self.server.schema and self.check_names:
    -            if ';' in attribute:  # remove tags for checking
    -                attribute_name_to_check = attribute.split(';')[0]
    -            else:
    -                attribute_name_to_check = attribute
    -
    -            if self.server.schema.attribute_types and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
    -                raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
    -
    -        if isinstance(value, SEQUENCE_TYPES):  # value can't be a sequence
    -            raise LDAPInvalidValueError('value cannot be a sequence')
    -
    -        with self.connection_lock:
    -            self._fire_deferred()
    -            request = compare_operation(dn, attribute, value, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names)
    -            if log_enabled(PROTOCOL):
    -                log(PROTOCOL, 'COMPARE request <%s> sent via <%s>', compare_request_to_dict(request), self)
    -            response = self.post_send_single_response(self.send('compareRequest', request, controls))
    -            self._entries = []
    -            if isinstance(response, int):
    -                return_value = response
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'async COMPARE response id <%s> received via <%s>', return_value, self)
    -            else:
    -                return_value = True if self.result['type'] == 'compareResponse' and self.result['result'] == RESULT_COMPARE_TRUE else False
    -                if not return_value and self.result['result'] not in [RESULT_COMPARE_TRUE, RESULT_COMPARE_FALSE] and not self.last_error:
    -                    self.last_error = self.result['description']
    -
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'COMPARE response <%s> received via <%s>', response, self)
    -
    -            if log_enabled(BASIC):
    -                log(BASIC, 'done COMPARE operation, result <%s>', return_value)
    -
    -            return return_value
    -
    -    def add(self,
    -            dn,
    -            object_class=None,
    -            attributes=None,
    -            controls=None):
    -        """
    -        Add dn to the DIT, object_class is None, a class name or a list
    -        of class names.
    -
    -        Attributes is a dictionary in the form 'attr': 'val' or 'attr':
    -        ['val1', 'val2', ...] for multivalued attributes
    -        """
    -        conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
    -        conf_classes_excluded_from_check = [v.lower() for v in get_config_parameter('CLASSES_EXCLUDED_FROM_CHECK')]
    -        if log_enabled(BASIC):
    -            log(BASIC, 'start ADD operation via <%s>', self)
    -        self.last_error = None
    -        _attributes = deepcopy(attributes)  # dict could change when adding objectClass values
    -        if self.check_names:
    -            dn = safe_dn(dn)
    -            if log_enabled(EXTENDED):
    -                log(EXTENDED, 'dn sanitized to <%s> for ADD operation via <%s>', dn, self)
    -
    -        with self.connection_lock:
    -            self._fire_deferred()
    -            attr_object_class = []
    -            if object_class is None:
    -                parm_object_class = []
    -            else:
    -                parm_object_class = list(object_class) if isinstance(object_class, SEQUENCE_TYPES) else [object_class]
    -
    -            object_class_attr_name = ''
    -            if _attributes:
    -                for attr in _attributes:
    -                    if attr.lower() == 'objectclass':
    -                        object_class_attr_name = attr
    -                        attr_object_class = list(_attributes[object_class_attr_name]) if isinstance(_attributes[object_class_attr_name], SEQUENCE_TYPES) else [_attributes[object_class_attr_name]]
    -                        break
    -            else:
    -                _attributes = dict()
    -
    -            if not object_class_attr_name:
    -                object_class_attr_name = 'objectClass'
    -
    -            attr_object_class = [to_unicode(object_class) for object_class in attr_object_class]  # converts objectclass to unicode in case of bytes value
    -            _attributes[object_class_attr_name] = reduce(lambda x, y: x + [y] if y not in x else x, parm_object_class + attr_object_class, [])  # remove duplicate ObjectClasses
    -
    -            if not _attributes[object_class_attr_name]:
    -                self.last_error = 'objectClass attribute is mandatory'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', self.last_error, self)
    -                raise LDAPObjectClassError(self.last_error)
    -
    -            if self.server and self.server.schema and self.check_names:
    -                for object_class_name in _attributes[object_class_attr_name]:
    -                    if object_class_name.lower() not in conf_classes_excluded_from_check and object_class_name not in self.server.schema.object_classes:
    -                        raise LDAPObjectClassError('invalid object class ' + str(object_class_name))
    -
    -                for attribute_name in _attributes:
    -                    if ';' in attribute_name:  # remove tags for checking
    -                        attribute_name_to_check = attribute_name.split(';')[0]
    -                    else:
    -                        attribute_name_to_check = attribute_name
    -
    -                    if attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
    -                        raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
    -
    -            request = add_operation(dn, _attributes, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names)
    -            if log_enabled(PROTOCOL):
    -                log(PROTOCOL, 'ADD request <%s> sent via <%s>', add_request_to_dict(request), self)
    -            response = self.post_send_single_response(self.send('addRequest', request, controls))
    -            self._entries = []
    -
    -            if isinstance(response, STRING_TYPES + (int, )):
    -                return_value = response
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'async ADD response id <%s> received via <%s>', return_value, self)
    -            else:
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'ADD response <%s> received via <%s>', response, self)
    -                return_value = True if self.result['type'] == 'addResponse' and self.result['result'] == RESULT_SUCCESS else False
    -                if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
    -                    self.last_error = self.result['description']
    -
    -            if log_enabled(BASIC):
    -                log(BASIC, 'done ADD operation, result <%s>', return_value)
    -
    -            return return_value
    -
    -    def delete(self,
    -               dn,
    -               controls=None):
    -        """
    -        Delete the entry identified by the DN from the DIB.
    -        """
    -        if log_enabled(BASIC):
    -            log(BASIC, 'start DELETE operation via <%s>', self)
    -        self.last_error = None
    -        if self.check_names:
    -            dn = safe_dn(dn)
    -            if log_enabled(EXTENDED):
    -                log(EXTENDED, 'dn sanitized to <%s> for DELETE operation via <%s>', dn, self)
    -
    -        with self.connection_lock:
    -            self._fire_deferred()
    -            if self.read_only:
    -                self.last_error = 'connection is read-only'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', self.last_error, self)
    -                raise LDAPConnectionIsReadOnlyError(self.last_error)
    -
    -            request = delete_operation(dn)
    -            if log_enabled(PROTOCOL):
    -                log(PROTOCOL, 'DELETE request <%s> sent via <%s>', delete_request_to_dict(request), self)
    -            response = self.post_send_single_response(self.send('delRequest', request, controls))
    -            self._entries = []
    -
    -            if isinstance(response, STRING_TYPES + (int, )):
    -                return_value = response
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'async DELETE response id <%s> received via <%s>', return_value, self)
    -            else:
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'DELETE response <%s> received via <%s>', response, self)
    -                return_value = True if self.result['type'] == 'delResponse' and self.result['result'] == RESULT_SUCCESS else False
    -                if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
    -                    self.last_error = self.result['description']
    -
    -            if log_enabled(BASIC):
    -                log(BASIC, 'done DELETE operation, result <%s>', return_value)
    -
    -            return return_value
    -
    -    def modify(self,
    -               dn,
    -               changes,
    -               controls=None):
    -        """
    -        Modify attributes of entry
    -
    -        - changes is a dictionary in the form {'attribute1': change), 'attribute2': [change, change, ...], ...}
    -        - change is (operation, [value1, value2, ...])
    -        - operation is 0 (MODIFY_ADD), 1 (MODIFY_DELETE), 2 (MODIFY_REPLACE), 3 (MODIFY_INCREMENT)
    -        """
    -        conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
    -
    -        if log_enabled(BASIC):
    -            log(BASIC, 'start MODIFY operation via <%s>', self)
    -        self.last_error = None
    -        if self.check_names:
    -            dn = safe_dn(dn)
    -            if log_enabled(EXTENDED):
    -                log(EXTENDED, 'dn sanitized to <%s> for MODIFY operation via <%s>', dn, self)
    -
    -        with self.connection_lock:
    -            self._fire_deferred()
    -            if self.read_only:
    -                self.last_error = 'connection is read-only'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', self.last_error, self)
    -                raise LDAPConnectionIsReadOnlyError(self.last_error)
    -
    -            if not isinstance(changes, dict):
    -                self.last_error = 'changes must be a dictionary'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', self.last_error, self)
    -                raise LDAPChangeError(self.last_error)
    -
    -            if not changes:
    -                self.last_error = 'no changes in modify request'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', self.last_error, self)
    -                raise LDAPChangeError(self.last_error)
    -
    -            for attribute_name in changes:
    -                if self.server and self.server.schema and self.check_names:
    -                    if ';' in attribute_name:  # remove tags for checking
    -                        attribute_name_to_check = attribute_name.split(';')[0]
    -                    else:
    -                        attribute_name_to_check = attribute_name
    -
    -                    if self.server.schema.attribute_types and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
    -                        raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
    -                change = changes[attribute_name]
    -                if isinstance(change, SEQUENCE_TYPES) and change[0] in [MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, MODIFY_INCREMENT, 0, 1, 2, 3]:
    -                    if len(change) != 2:
    -                        self.last_error = 'malformed change'
    -                        if log_enabled(ERROR):
    -                            log(ERROR, '%s for <%s>', self.last_error, self)
    -                        raise LDAPChangeError(self.last_error)
    -
    -                    changes[attribute_name] = [change]  # insert change in a tuple
    -                else:
    -                    for change_operation in change:
    -                        if len(change_operation) != 2 or change_operation[0] not in [MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, MODIFY_INCREMENT, 0, 1, 2, 3]:
    -                            self.last_error = 'invalid change list'
    -                            if log_enabled(ERROR):
    -                                log(ERROR, '%s for <%s>', self.last_error, self)
    -                            raise LDAPChangeError(self.last_error)
    -            request = modify_operation(dn, changes, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names)
    -            if log_enabled(PROTOCOL):
    -                log(PROTOCOL, 'MODIFY request <%s> sent via <%s>', modify_request_to_dict(request), self)
    -            response = self.post_send_single_response(self.send('modifyRequest', request, controls))
    -            self._entries = []
    -
    -            if isinstance(response, STRING_TYPES + (int, )):
    -                return_value = response
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'async MODIFY response id <%s> received via <%s>', return_value, self)
    -            else:
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'MODIFY response <%s> received via <%s>', response, self)
    -                return_value = True if self.result['type'] == 'modifyResponse' and self.result['result'] == RESULT_SUCCESS else False
    -                if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
    -                    self.last_error = self.result['description']
    -
    -            if log_enabled(BASIC):
    -                log(BASIC, 'done MODIFY operation, result <%s>', return_value)
    -
    -            return return_value
    -
    -    def modify_dn(self,
    -                  dn,
    -                  relative_dn,
    -                  delete_old_dn=True,
    -                  new_superior=None,
    -                  controls=None):
    -        """
    -        Modify DN of the entry or performs a move of the entry in the
    -        DIT.
    -        """
    -        if log_enabled(BASIC):
    -            log(BASIC, 'start MODIFY DN operation via <%s>', self)
    -        self.last_error = None
    -        if self.check_names:
    -            dn = safe_dn(dn)
    -            if log_enabled(EXTENDED):
    -                log(EXTENDED, 'dn sanitized to <%s> for MODIFY DN operation via <%s>', dn, self)
    -            relative_dn = safe_dn(relative_dn)
    -            if log_enabled(EXTENDED):
    -                log(EXTENDED, 'relative dn sanitized to <%s> for MODIFY DN operation via <%s>', relative_dn, self)
    -
    -        with self.connection_lock:
    -            self._fire_deferred()
    -            if self.read_only:
    -                self.last_error = 'connection is read-only'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', self.last_error, self)
    -                raise LDAPConnectionIsReadOnlyError(self.last_error)
    -
    -            if new_superior and not dn.startswith(relative_dn):  # as per RFC4511 (4.9)
    -                self.last_error = 'DN cannot change while performing moving'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', self.last_error, self)
    -                raise LDAPChangeError(self.last_error)
    -
    -            request = modify_dn_operation(dn, relative_dn, delete_old_dn, new_superior)
    -            if log_enabled(PROTOCOL):
    -                log(PROTOCOL, 'MODIFY DN request <%s> sent via <%s>', modify_dn_request_to_dict(request), self)
    -            response = self.post_send_single_response(self.send('modDNRequest', request, controls))
    -            self._entries = []
    -
    -            if isinstance(response, STRING_TYPES + (int, )):
    -                return_value = response
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'async MODIFY DN response id <%s> received via <%s>', return_value, self)
    -            else:
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'MODIFY DN response <%s> received via <%s>', response, self)
    -                return_value = True if self.result['type'] == 'modDNResponse' and self.result['result'] == RESULT_SUCCESS else False
    -                if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
    -                    self.last_error = self.result['description']
    -
    -            if log_enabled(BASIC):
    -                log(BASIC, 'done MODIFY DN operation, result <%s>', return_value)
    -
    -            return return_value
    -
    -    def abandon(self,
    -                message_id,
    -                controls=None):
    -        """
    -        Abandon the operation indicated by message_id
    -        """
    -        if log_enabled(BASIC):
    -            log(BASIC, 'start ABANDON operation via <%s>', self)
    -        self.last_error = None
    -        with self.connection_lock:
    -            self._fire_deferred()
    -            return_value = False
    -            if self.strategy._outstanding or message_id == 0:
    -                # only current  operation should be abandoned, abandon, bind and unbind cannot ever be abandoned,
    -                # messagiId 0 is invalid and should be used as a "ping" to keep alive the connection
    -                if (self.strategy._outstanding and message_id in self.strategy._outstanding and self.strategy._outstanding[message_id]['type'] not in ['abandonRequest', 'bindRequest', 'unbindRequest']) or message_id == 0:
    -                    request = abandon_operation(message_id)
    -                    if log_enabled(PROTOCOL):
    -                        log(PROTOCOL, 'ABANDON request: <%s> sent via <%s>', abandon_request_to_dict(request), self)
    -                    self.send('abandonRequest', request, controls)
    -                    self.result = None
    -                    self.response = None
    -                    self._entries = []
    -                    return_value = True
    -                else:
    -                    if log_enabled(ERROR):
    -                        log(ERROR, 'cannot abandon a Bind, an Unbind or an Abandon operation or message ID %s not found via <%s>', str(message_id), self)
    -
    -            if log_enabled(BASIC):
    -                log(BASIC, 'done ABANDON operation, result <%s>', return_value)
    -
    -            return return_value
    -
    -    def extended(self,
    -                 request_name,
    -                 request_value=None,
    -                 controls=None,
    -                 no_encode=None):
    -        """
    -        Performs an extended operation
    -        """
    -        if log_enabled(BASIC):
    -            log(BASIC, 'start EXTENDED operation via <%s>', self)
    -        self.last_error = None
    -        with self.connection_lock:
    -            self._fire_deferred()
    -            request = extended_operation(request_name, request_value, no_encode=no_encode)
    -            if log_enabled(PROTOCOL):
    -                log(PROTOCOL, 'EXTENDED request <%s> sent via <%s>', extended_request_to_dict(request), self)
    -            response = self.post_send_single_response(self.send('extendedReq', request, controls))
    -            self._entries = []
    -            if isinstance(response, int):
    -                return_value = response
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'async EXTENDED response id <%s> received via <%s>', return_value, self)
    -            else:
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'EXTENDED response <%s> received via <%s>', response, self)
    -                return_value = True if self.result['type'] == 'extendedResp' and self.result['result'] == RESULT_SUCCESS else False
    -                if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
    -                    self.last_error = self.result['description']
    -
    -            if log_enabled(BASIC):
    -                log(BASIC, 'done EXTENDED operation, result <%s>', return_value)
    -
    -            return return_value
    -
    -    def start_tls(self, read_server_info=True):  # as per RFC4511. Removal of TLS is defined as MAY in RFC4511 so the client can't implement a generic stop_tls method0
    -
    -        if log_enabled(BASIC):
    -            log(BASIC, 'start START TLS operation via <%s>', self)
    -
    -        with self.connection_lock:
    -            return_value = False
    -            if not self.server.tls:
    -                self.server.tls = Tls()
    -
    -            if self.lazy and not self._executing_deferred:
    -                self._deferred_start_tls = True
    -                self.tls_started = True
    -                return_value = True
    -                if log_enabled(BASIC):
    -                    log(BASIC, 'deferring START TLS for <%s>', self)
    -            else:
    -                self._deferred_start_tls = False
    -                if self.server.tls.start_tls(self) and self.strategy.sync:  # for asynchronous connections _start_tls is run by the strategy
    -                    if read_server_info:
    -                        self.refresh_server_info()  # refresh server info as per RFC4515 (3.1.5)
    -                    return_value = True
    -                elif not self.strategy.sync:
    -                    return_value = True
    -
    -            if log_enabled(BASIC):
    -                log(BASIC, 'done START TLS operation, result <%s>', return_value)
    -
    -            return return_value
    -
    -    def do_sasl_bind(self,
    -                     controls):
    -        if log_enabled(BASIC):
    -            log(BASIC, 'start SASL BIND operation via <%s>', self)
    -        self.last_error = None
    -        with self.connection_lock:
    -            result = None
    -
    -            if not self.sasl_in_progress:
    -                self.sasl_in_progress = True
    -                try:
    -                    if self.sasl_mechanism == EXTERNAL:
    -                        result = sasl_external(self, controls)
    -                    elif self.sasl_mechanism == DIGEST_MD5:
    -                        result = sasl_digest_md5(self, controls)
    -                    elif self.sasl_mechanism == GSSAPI:
    -                        from ..protocol.sasl.kerberos import sasl_gssapi  # needs the gssapi package
    -                        result = sasl_gssapi(self, controls)
    -                    elif self.sasl_mechanism == 'PLAIN':
    -                        result = sasl_plain(self, controls)
    -                finally:
    -                    self.sasl_in_progress = False
    -
    -            if log_enabled(BASIC):
    -                log(BASIC, 'done SASL BIND operation, result <%s>', result)
    -
    -            return result
    -
    -    def do_ntlm_bind(self,
    -                     controls):
    -        if log_enabled(BASIC):
    -            log(BASIC, 'start NTLM BIND operation via <%s>', self)
    -        self.last_error = None
    -        with self.connection_lock:
    -            result = None
    -            if not self.sasl_in_progress:
    -                self.sasl_in_progress = True  # ntlm is same of sasl authentication
    -                # additional import for NTLM
    -                from ..utils.ntlm import NtlmClient
    -                domain_name, user_name = self.user.split('\\', 1)
    -                ntlm_client = NtlmClient(user_name=user_name, domain=domain_name, password=self.password)
    -
    -                # as per https://msdn.microsoft.com/en-us/library/cc223501.aspx
    -                # send a sicilyPackageDiscovery request (in the bindRequest)
    -                request = bind_operation(self.version, 'SICILY_PACKAGE_DISCOVERY', ntlm_client)
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'NTLM SICILY PACKAGE DISCOVERY request sent via <%s>', self)
    -                response = self.post_send_single_response(self.send('bindRequest', request, controls))
    -                if not self.strategy.sync:
    -                    _, result = self.get_response(response)
    -                else:
    -                    result = response[0]
    -                if 'server_creds' in result:
    -                    sicily_packages = result['server_creds'].decode('ascii').split(';')
    -                    if 'NTLM' in sicily_packages:  # NTLM available on server
    -                        request = bind_operation(self.version, 'SICILY_NEGOTIATE_NTLM', ntlm_client)
    -                        if log_enabled(PROTOCOL):
    -                            log(PROTOCOL, 'NTLM SICILY NEGOTIATE request sent via <%s>', self)
    -                        response = self.post_send_single_response(self.send('bindRequest', request, controls))
    -                        if not self.strategy.sync:
    -                            _, result = self.get_response(response)
    -                        else:
    -                            if log_enabled(PROTOCOL):
    -                                log(PROTOCOL, 'NTLM SICILY NEGOTIATE response <%s> received via <%s>', response[0], self)
    -                            result = response[0]
    -
    -                        if result['result'] == RESULT_SUCCESS:
    -                            request = bind_operation(self.version, 'SICILY_RESPONSE_NTLM', ntlm_client, result['server_creds'])
    -                            if log_enabled(PROTOCOL):
    -                                log(PROTOCOL, 'NTLM SICILY RESPONSE NTLM request sent via <%s>', self)
    -                            response = self.post_send_single_response(self.send('bindRequest', request, controls))
    -                            if not self.strategy.sync:
    -                                _, result = self.get_response(response)
    -                            else:
    -                                if log_enabled(PROTOCOL):
    -                                    log(PROTOCOL, 'NTLM BIND response <%s> received via <%s>', response[0], self)
    -                                result = response[0]
    -                else:
    -                    result = None
    -                self.sasl_in_progress = False
    -
    -            if log_enabled(BASIC):
    -                log(BASIC, 'done SASL NTLM operation, result <%s>', result)
    -
    -            return result
    -
    -    def refresh_server_info(self):
    -        # if self.strategy.no_real_dsa:  # do not refresh for mock strategies
    -        #     return
    -
    -        if not self.strategy.pooled:
    -            with self.connection_lock:
    -                if not self.closed:
    -                    if log_enabled(BASIC):
    -                        log(BASIC, 'refreshing server info for <%s>', self)
    -                    previous_response = self.response
    -                    previous_result = self.result
    -                    previous_entries = self._entries
    -                    self.server.get_info_from_server(self)
    -                    self.response = previous_response
    -                    self.result = previous_result
    -                    self._entries = previous_entries
    -        else:
    -            if log_enabled(BASIC):
    -                log(BASIC, 'refreshing server info from pool for <%s>', self)
    -            self.strategy.pool.get_info_from_server()
    -
    -    def response_to_ldif(self,
    -                         search_result=None,
    -                         all_base64=False,
    -                         line_separator=None,
    -                         sort_order=None,
    -                         stream=None):
    -        with self.connection_lock:
    -            if search_result is None:
    -                search_result = self.response
    -
    -            if isinstance(search_result, SEQUENCE_TYPES):
    -                ldif_lines = operation_to_ldif('searchResponse', search_result, all_base64, sort_order=sort_order)
    -                ldif_lines = add_ldif_header(ldif_lines)
    -                line_separator = line_separator or linesep
    -                ldif_output = line_separator.join(ldif_lines)
    -                if stream:
    -                    if stream.tell() == 0:
    -                        header = add_ldif_header(['-'])[0]
    -                        stream.write(prepare_for_stream(header + line_separator + line_separator))
    -                    stream.write(prepare_for_stream(ldif_output + line_separator + line_separator))
    -                if log_enabled(BASIC):
    -                    log(BASIC, 'building LDIF output <%s> for <%s>', ldif_output, self)
    -                return ldif_output
    -
    -            return None
    -
    -    def response_to_json(self,
    -                         raw=False,
    -                         search_result=None,
    -                         indent=4,
    -                         sort=True,
    -                         stream=None,
    -                         checked_attributes=True,
    -                         include_empty=True):
    -
    -        with self.connection_lock:
    -            if search_result is None:
    -                search_result = self.response
    -
    -            if isinstance(search_result, SEQUENCE_TYPES):
    -                json_dict = dict()
    -                json_dict['entries'] = []
    -
    -                for response in search_result:
    -                    if response['type'] == 'searchResEntry':
    -                        entry = dict()
    -
    -                        entry['dn'] = response['dn']
    -                        if checked_attributes:
    -                            if not include_empty:
    -                                # needed for python 2.6 compatibility
    -                                entry['attributes'] = dict((key, response['attributes'][key]) for key in response['attributes'] if response['attributes'][key])
    -                            else:
    -                                entry['attributes'] = dict(response['attributes'])
    -                        if raw:
    -                            if not include_empty:
    -                                # needed for python 2.6 compatibility
    -                                entry['raw_attributes'] = dict((key, response['raw_attributes'][key]) for key in response['raw_attributes'] if response['raw:attributes'][key])
    -                            else:
    -                                entry['raw'] = dict(response['raw_attributes'])
    -                        json_dict['entries'].append(entry)
    -
    -                if str is bytes:  # Python 2
    -                    check_json_dict(json_dict)
    -
    -                json_output = json.dumps(json_dict, ensure_ascii=True, sort_keys=sort, indent=indent, check_circular=True, default=format_json, separators=(',', ': '))
    -
    -                if log_enabled(BASIC):
    -                    log(BASIC, 'building JSON output <%s> for <%s>', json_output, self)
    -                if stream:
    -                    stream.write(json_output)
    -
    -                return json_output
    -
    -    def response_to_file(self,
    -                         target,
    -                         raw=False,
    -                         indent=4,
    -                         sort=True):
    -        with self.connection_lock:
    -            if self.response:
    -                if isinstance(target, STRING_TYPES):
    -                    target = open(target, 'w+')
    -
    -                if log_enabled(BASIC):
    -                    log(BASIC, 'writing response to file for <%s>', self)
    -
    -                target.writelines(self.response_to_json(raw=raw, indent=indent, sort=sort))
    -                target.close()
    -
    -    def _fire_deferred(self, read_info=True):
    -        with self.connection_lock:
    -            if self.lazy and not self._executing_deferred:
    -                self._executing_deferred = True
    -
    -                if log_enabled(BASIC):
    -                    log(BASIC, 'executing deferred (open: %s, start_tls: %s, bind: %s) for <%s>', self._deferred_open, self._deferred_start_tls, self._deferred_bind, self)
    -                try:
    -                    if self._deferred_open:
    -                        self.open(read_server_info=False)
    -                    if self._deferred_start_tls:
    -                        self.start_tls(read_server_info=False)
    -                    if self._deferred_bind:
    -                        self.bind(read_server_info=False, controls=self._bind_controls)
    -                    if read_info:
    -                        self.refresh_server_info()
    -                except LDAPExceptionError as e:
    -                    if log_enabled(ERROR):
    -                        log(ERROR, '%s for <%s>', e, self)
    -                    raise  # re-raise LDAPExceptionError
    -                finally:
    -                    self._executing_deferred = False
    -
    -    @property
    -    def entries(self):
    -        if self.response:
    -            if not self._entries:
    -                self._entries = self._get_entries(self.response)
    -        return self._entries
    -
    -    def _get_entries(self, search_response):
    -        with self.connection_lock:
    -            from .. import ObjectDef, Reader
    -
    -            # build a table of ObjectDefs, grouping the entries found in search_response for their attributes set, subset will be included in superset
    -            attr_sets = []
    -            for response in search_response:
    -                if response['type'] == 'searchResEntry':
    -                    resp_attr_set = set(response['attributes'].keys())
    -                    if resp_attr_set not in attr_sets:
    -                        attr_sets.append(resp_attr_set)
    -            attr_sets.sort(key=lambda x: -len(x))  # sorts the list in descending length order
    -            unique_attr_sets = []
    -            for attr_set in attr_sets:
    -                for unique_set in unique_attr_sets:
    -                    if unique_set >= attr_set:  # checks if unique set is a superset of attr_set
    -                        break
    -                else:  # the attr_set is not a subset of any element in unique_attr_sets
    -                    unique_attr_sets.append(attr_set)
    -            object_defs = []
    -            for attr_set in unique_attr_sets:
    -                object_def = ObjectDef(schema=self.server.schema)
    -                object_def += list(attr_set)  # converts the set in a list to be added to the object definition
    -                object_defs.append((attr_set,
    -                                    object_def,
    -                                    Reader(self, object_def, self.request['base'], self.request['filter'], attributes=attr_set) if self.strategy.sync else Reader(self, object_def, '', '', attributes=attr_set))
    -                                   )  # objects_defs contains a tuple with the set, the ObjectDef and a cursor
    -
    -            entries = []
    -            for response in search_response:
    -                if response['type'] == 'searchResEntry':
    -                    resp_attr_set = set(response['attributes'].keys())
    -                    for object_def in object_defs:
    -                        if resp_attr_set <= object_def[0]:  # finds the ObjectDef for the attribute set of this entry
    -                            entry = object_def[2]._create_entry(response)
    -                            entries.append(entry)
    -                            break
    -                    else:
    -                        if log_enabled(ERROR):
    -                            log(ERROR, 'attribute set not found for %s in <%s>', resp_attr_set, self)
    -                        raise LDAPObjectError('attribute set not found for ' + str(resp_attr_set))
    -
    -        return entries
    +"""
    +"""
    +
    +# Created on 2014.05.31
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2014 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +from copy import deepcopy
    +from os import linesep
    +from threading import RLock, Lock
    +from functools import reduce
    +import json
    +
    +from .. import ANONYMOUS, SIMPLE, SASL, MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, get_config_parameter, DEREF_ALWAYS, \
    +    SUBTREE, ASYNC, SYNC, NO_ATTRIBUTES, ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, MODIFY_INCREMENT, LDIF, ASYNC_STREAM, \
    +    RESTARTABLE, ROUND_ROBIN, REUSABLE, AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_TLS_BEFORE_BIND,\
    +    AUTO_BIND_TLS_AFTER_BIND, AUTO_BIND_NO_TLS, STRING_TYPES, SEQUENCE_TYPES, MOCK_SYNC, MOCK_ASYNC, NTLM, EXTERNAL,\
    +    DIGEST_MD5, GSSAPI, PLAIN
    +
    +from .results import RESULT_SUCCESS, RESULT_COMPARE_TRUE, RESULT_COMPARE_FALSE
    +from ..extend import ExtendedOperationsRoot
    +from .pooling import ServerPool
    +from .server import Server
    +from ..operation.abandon import abandon_operation, abandon_request_to_dict
    +from ..operation.add import add_operation, add_request_to_dict
    +from ..operation.bind import bind_operation, bind_request_to_dict
    +from ..operation.compare import compare_operation, compare_request_to_dict
    +from ..operation.delete import delete_operation, delete_request_to_dict
    +from ..operation.extended import extended_operation, extended_request_to_dict
    +from ..operation.modify import modify_operation, modify_request_to_dict
    +from ..operation.modifyDn import modify_dn_operation, modify_dn_request_to_dict
    +from ..operation.search import search_operation, search_request_to_dict
    +from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header
    +from ..protocol.sasl.digestMd5 import sasl_digest_md5
    +from ..protocol.sasl.external import sasl_external
    +from ..protocol.sasl.plain import sasl_plain
    +from ..strategy.sync import SyncStrategy
    +from ..strategy.mockAsync import MockAsyncStrategy
    +from ..strategy.asynchronous import AsyncStrategy
    +from ..strategy.reusable import ReusableStrategy
    +from ..strategy.restartable import RestartableStrategy
    +from ..strategy.ldifProducer import LdifProducerStrategy
    +from ..strategy.mockSync import MockSyncStrategy
    +from ..strategy.asyncStream import AsyncStreamStrategy
    +from ..operation.unbind import unbind_operation
    +from ..protocol.rfc2696 import paged_search_control
    +from .usage import ConnectionUsage
    +from .tls import Tls
    +from .exceptions import LDAPUnknownStrategyError, LDAPBindError, LDAPUnknownAuthenticationMethodError, \
    +    LDAPSASLMechanismNotSupportedError, LDAPObjectClassError, LDAPConnectionIsReadOnlyError, LDAPChangeError, LDAPExceptionError, \
    +    LDAPObjectError, LDAPSocketReceiveError, LDAPAttributeError, LDAPInvalidValueError, LDAPConfigurationError, \
    +    LDAPInvalidPortError
    +
    +from ..utils.conv import escape_bytes, prepare_for_stream, check_json_dict, format_json, to_unicode
    +from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED, get_library_log_hide_sensitive_data
    +from ..utils.dn import safe_dn
    +from ..utils.port_validators import check_port_and_port_list
    +
    +
    +SASL_AVAILABLE_MECHANISMS = [EXTERNAL,
    +                             DIGEST_MD5,
    +                             GSSAPI,
    +                             PLAIN]
    +
    +CLIENT_STRATEGIES = [SYNC,
    +                     ASYNC,
    +                     LDIF,
    +                     RESTARTABLE,
    +                     REUSABLE,
    +                     MOCK_SYNC,
    +                     MOCK_ASYNC,
    +                     ASYNC_STREAM]
    +
    +
    +def _format_socket_endpoint(endpoint):
    +    if endpoint and len(endpoint) == 2:  # IPv4
    +        return str(endpoint[0]) + ':' + str(endpoint[1])
    +    elif endpoint and len(endpoint) == 4:  # IPv6
    +        return '[' + str(endpoint[0]) + ']:' + str(endpoint[1])
    +
    +    try:
    +        return str(endpoint)
    +    except Exception:
    +        return '?'
    +
    +
    +def _format_socket_endpoints(sock):
    +    if sock:
    +        try:
    +            local = sock.getsockname()
    +        except Exception:
    +            local = (None, None, None, None)
    +        try:
    +            remote = sock.getpeername()
    +        except Exception:
    +            remote = (None, None, None, None)
    +
    +        return ''
    +    return ''
    +
    +
    +# noinspection PyProtectedMember
    +class Connection(object):
    +    """Main ldap connection class.
    +
    +    Controls, if used, must be a list of tuples. Each tuple must have 3
    +    elements, the control OID, a boolean meaning if the control is
    +    critical, a value.
    +
    +    If the boolean is set to True the server must honor the control or
    +    refuse the operation
    +
    +    Mixing controls must be defined in controls specification (as per
    +    RFC 4511)
    +
    +    :param server: the Server object to connect to
    +    :type server: Server, str
    +    :param user: the user name for simple authentication
    +    :type user: str
    +    :param password: the password for simple authentication
    +    :type password: str
    +    :param auto_bind: specify if the bind will be performed automatically when defining the Connection object
    +    :type auto_bind: int, can be one of AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_NO_TLS, AUTO_BIND_TLS_BEFORE_BIND, AUTO_BIND_TLS_AFTER_BIND as specified in ldap3
    +    :param version: LDAP version, default to 3
    +    :type version: int
    +    :param authentication: type of authentication
    +    :type authentication: int, can be one of AUTH_ANONYMOUS, AUTH_SIMPLE or AUTH_SASL, as specified in ldap3
    +    :param client_strategy: communication strategy used in the Connection
    +    :type client_strategy: can be one of STRATEGY_SYNC, STRATEGY_ASYNC_THREADED, STRATEGY_LDIF_PRODUCER, STRATEGY_SYNC_RESTARTABLE, STRATEGY_REUSABLE_THREADED as specified in ldap3
    +    :param auto_referrals: specify if the connection object must automatically follow referrals
    +    :type auto_referrals: bool
    +    :param sasl_mechanism: mechanism for SASL authentication, can be one of 'EXTERNAL', 'DIGEST-MD5', 'GSSAPI', 'PLAIN'
    +    :type sasl_mechanism: str
    +    :param sasl_credentials: credentials for SASL mechanism
    +    :type sasl_credentials: tuple
    +    :param check_names: if True the library will check names of attributes and object classes against the schema. Also values found in entries will be formatted as indicated by the schema
    +    :type check_names: bool
    +    :param collect_usage: collect usage metrics in the usage attribute
    +    :type collect_usage: bool
    +    :param read_only: disable operations that modify data in the LDAP server
    +    :type read_only: bool
    +    :param lazy: open and bind the connection only when an actual operation is performed
    +    :type lazy: bool
    +    :param raise_exceptions: raise exceptions when operations are not successful, if False operations return False if not successful but not raise exceptions
    +    :type raise_exceptions: bool
    +    :param pool_name: pool name for pooled strategies
    +    :type pool_name: str
    +    :param pool_size: pool size for pooled strategies
    +    :type pool_size: int
    +    :param pool_lifetime: pool lifetime for pooled strategies
    +    :type pool_lifetime: int
    +    :param cred_store: credential store for gssapi
    +    :type cred_store: dict
    +    :param use_referral_cache: keep referral connections open and reuse them
    +    :type use_referral_cache: bool
    +    :param auto_escape: automatic escaping of filter values
    +    :type auto_escape: bool
    +    :param auto_encode: automatic encoding of attribute values
    +    :type auto_encode: bool
    +    :param source_address: the ip address or hostname to use as the source when opening the connection to the server
    +    :type source_address: str
    +    :param source_port: the source port to use when opening the connection to the server. Cannot be specified with source_port_list
    +    :type source_port: int
    +    :param source_port_list: a list of source ports to choose from when opening the connection to the server. Cannot be specified with source_port
    +    :type source_port_list: list
    +    """
    +
    +    def __init__(self,
    +                 server,
    +                 user=None,
    +                 password=None,
    +                 auto_bind=AUTO_BIND_DEFAULT,
    +                 version=3,
    +                 authentication=None,
    +                 client_strategy=SYNC,
    +                 auto_referrals=True,
    +                 auto_range=True,
    +                 sasl_mechanism=None,
    +                 sasl_credentials=None,
    +                 check_names=True,
    +                 collect_usage=False,
    +                 read_only=False,
    +                 lazy=False,
    +                 raise_exceptions=False,
    +                 pool_name=None,
    +                 pool_size=None,
    +                 pool_lifetime=None,
    +                 cred_store=None,
    +                 fast_decoder=True,
    +                 receive_timeout=None,
    +                 return_empty_attributes=True,
    +                 use_referral_cache=False,
    +                 auto_escape=True,
    +                 auto_encode=True,
    +                 pool_keepalive=None,
    +                 source_address=None,
    +                 source_port=None,
    +                 source_port_list=None):
    +
    +        conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME')
    +        self.connection_lock = RLock()  # re-entrant lock to ensure that operations in the Connection object are executed atomically in the same thread
    +        with self.connection_lock:
    +            if client_strategy not in CLIENT_STRATEGIES:
    +                self.last_error = 'unknown client connection strategy'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', self.last_error, self)
    +                raise LDAPUnknownStrategyError(self.last_error)
    +
    +            self.strategy_type = client_strategy
    +            self.user = user
    +            self.password = password
    +
    +            if not authentication and self.user:
    +                self.authentication = SIMPLE
    +            elif not authentication:
    +                self.authentication = ANONYMOUS
    +            elif authentication in [SIMPLE, ANONYMOUS, SASL, NTLM]:
    +                self.authentication = authentication
    +            else:
    +                self.last_error = 'unknown authentication method'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', self.last_error, self)
    +                raise LDAPUnknownAuthenticationMethodError(self.last_error)
    +
    +            self.version = version
    +            self.auto_referrals = True if auto_referrals else False
    +            self.request = None
    +            self.response = None
    +            self.result = None
    +            self.bound = False
    +            self.listening = False
    +            self.closed = True
    +            self.last_error = None
    +            if auto_bind is False:  # compatibility with older version where auto_bind was a boolean
    +                self.auto_bind = AUTO_BIND_DEFAULT
    +            elif auto_bind is True:
    +                self.auto_bind = AUTO_BIND_NO_TLS
    +            else:
    +                self.auto_bind = auto_bind
    +            self.sasl_mechanism = sasl_mechanism
    +            self.sasl_credentials = sasl_credentials
    +            self._usage = ConnectionUsage() if collect_usage else None
    +            self.socket = None
    +            self.tls_started = False
    +            self.sasl_in_progress = False
    +            self.read_only = read_only
    +            self._context_state = []
    +            self._deferred_open = False
    +            self._deferred_bind = False
    +            self._deferred_start_tls = False
    +            self._bind_controls = None
    +            self._executing_deferred = False
    +            self.lazy = lazy
    +            self.pool_name = pool_name if pool_name else conf_default_pool_name
    +            self.pool_size = pool_size
    +            self.cred_store = cred_store
    +            self.pool_lifetime = pool_lifetime
    +            self.pool_keepalive = pool_keepalive
    +            self.starting_tls = False
    +            self.check_names = check_names
    +            self.raise_exceptions = raise_exceptions
    +            self.auto_range = True if auto_range else False
    +            self.extend = ExtendedOperationsRoot(self)
    +            self._entries = []
    +            self.fast_decoder = fast_decoder
    +            self.receive_timeout = receive_timeout
    +            self.empty_attributes = return_empty_attributes
    +            self.use_referral_cache = use_referral_cache
    +            self.auto_escape = auto_escape
    +            self.auto_encode = auto_encode
    +
    +            port_err = check_port_and_port_list(source_port, source_port_list)
    +            if port_err:
    +                if log_enabled(ERROR):
    +                    log(ERROR, port_err)
    +                raise LDAPInvalidPortError(port_err)
    +            # using an empty string to bind a socket means "use the default as if this wasn't provided" because socket
    +            # binding requires that you pass something for the ip if you want to pass a specific port
    +            self.source_address = source_address if source_address is not None else ''
    +            # using 0 as the source port to bind a socket means "use the default behavior of picking a random port from
    +            # all ports as if this wasn't provided" because socket binding requires that you pass something for the port
    +            # if you want to pass a specific ip
    +            self.source_port_list = [0]
    +            if source_port is not None:
    +                self.source_port_list = [source_port]
    +            elif source_port_list is not None:
    +                self.source_port_list = source_port_list[:]
    +
    +            if isinstance(server, STRING_TYPES):
    +                server = Server(server)
    +            if isinstance(server, SEQUENCE_TYPES):
    +                server = ServerPool(server, ROUND_ROBIN, active=True, exhaust=True)
    +
    +            if isinstance(server, ServerPool):
    +                self.server_pool = server
    +                self.server_pool.initialize(self)
    +                self.server = self.server_pool.get_current_server(self)
    +            else:
    +                self.server_pool = None
    +                self.server = server
    +
    +            # if self.authentication == SIMPLE and self.user and self.check_names:
    +            #     self.user = safe_dn(self.user)
    +            #     if log_enabled(EXTENDED):
    +            #         log(EXTENDED, 'user name sanitized to <%s> for simple authentication via <%s>', self.user, self)
    +
    +            if self.strategy_type == SYNC:
    +                self.strategy = SyncStrategy(self)
    +            elif self.strategy_type == ASYNC:
    +                self.strategy = AsyncStrategy(self)
    +            elif self.strategy_type == LDIF:
    +                self.strategy = LdifProducerStrategy(self)
    +            elif self.strategy_type == RESTARTABLE:
    +                self.strategy = RestartableStrategy(self)
    +            elif self.strategy_type == REUSABLE:
    +                self.strategy = ReusableStrategy(self)
    +                self.lazy = False
    +            elif self.strategy_type == MOCK_SYNC:
    +                self.strategy = MockSyncStrategy(self)
    +            elif self.strategy_type == MOCK_ASYNC:
    +                self.strategy = MockAsyncStrategy(self)
    +            elif self.strategy_type == ASYNC_STREAM:
    +                self.strategy = AsyncStreamStrategy(self)
    +            else:
    +                self.last_error = 'unknown strategy'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', self.last_error, self)
    +                raise LDAPUnknownStrategyError(self.last_error)
    +
    +            # maps strategy functions to connection functions
    +            self.send = self.strategy.send
    +            self.open = self.strategy.open
    +            self.get_response = self.strategy.get_response
    +            self.post_send_single_response = self.strategy.post_send_single_response
    +            self.post_send_search = self.strategy.post_send_search
    +
    +            if not self.strategy.no_real_dsa:
    +                self.do_auto_bind()
    +            # else:  # for strategies with a fake server set get_info to NONE if server hasn't a schema
    +            #     if self.server and not self.server.schema:
    +            #         self.server.get_info = NONE
    +            if log_enabled(BASIC):
    +                if get_library_log_hide_sensitive_data():
    +                    log(BASIC, 'instantiated Connection: <%s>', self.repr_with_sensitive_data_stripped())
    +                else:
    +                    log(BASIC, 'instantiated Connection: <%r>', self)
    +
    +    def do_auto_bind(self):
    +        if self.auto_bind and self.auto_bind not in [AUTO_BIND_NONE, AUTO_BIND_DEFAULT]:
    +            if log_enabled(BASIC):
    +                log(BASIC, 'performing automatic bind for <%s>', self)
    +            if self.closed:
    +                self.open(read_server_info=False)
    +            if self.auto_bind == AUTO_BIND_NO_TLS:
    +                self.bind(read_server_info=True)
    +            elif self.auto_bind == AUTO_BIND_TLS_BEFORE_BIND:
    +                self.start_tls(read_server_info=False)
    +                self.bind(read_server_info=True)
    +            elif self.auto_bind == AUTO_BIND_TLS_AFTER_BIND:
    +                self.bind(read_server_info=False)
    +                self.start_tls(read_server_info=True)
    +            if not self.bound:
    +                self.last_error = 'automatic bind not successful' + (' - ' + self.last_error if self.last_error else '')
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', self.last_error, self)
    +                self.unbind()
    +                raise LDAPBindError(self.last_error)
    +
    +    def __str__(self):
    +        s = [
    +            str(self.server) if self.server else 'None',
    +            'user: ' + str(self.user),
    +            'lazy' if self.lazy else 'not lazy',
    +            'unbound' if not self.bound else ('deferred bind' if self._deferred_bind else 'bound'),
    +            'closed' if self.closed else ('deferred open' if self._deferred_open else 'open'),
    +            _format_socket_endpoints(self.socket),
    +            'tls not started' if not self.tls_started else('deferred start_tls' if self._deferred_start_tls else 'tls started'),
    +            'listening' if self.listening else 'not listening',
    +            self.strategy.__class__.__name__ if hasattr(self, 'strategy') else 'No strategy',
    +            'internal decoder' if self.fast_decoder else 'pyasn1 decoder'
    +        ]
    +        return ' - '.join(s)
    +
    +    def __repr__(self):
    +        conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME')
    +        if self.server_pool:
    +            r = 'Connection(server={0.server_pool!r}'.format(self)
    +        else:
    +            r = 'Connection(server={0.server!r}'.format(self)
    +        r += '' if self.user is None else ', user={0.user!r}'.format(self)
    +        r += '' if self.password is None else ', password={0.password!r}'.format(self)
    +        r += '' if self.auto_bind is None else ', auto_bind={0.auto_bind!r}'.format(self)
    +        r += '' if self.version is None else ', version={0.version!r}'.format(self)
    +        r += '' if self.authentication is None else ', authentication={0.authentication!r}'.format(self)
    +        r += '' if self.strategy_type is None else ', client_strategy={0.strategy_type!r}'.format(self)
    +        r += '' if self.auto_referrals is None else ', auto_referrals={0.auto_referrals!r}'.format(self)
    +        r += '' if self.sasl_mechanism is None else ', sasl_mechanism={0.sasl_mechanism!r}'.format(self)
    +        r += '' if self.sasl_credentials is None else ', sasl_credentials={0.sasl_credentials!r}'.format(self)
    +        r += '' if self.check_names is None else ', check_names={0.check_names!r}'.format(self)
    +        r += '' if self.usage is None else (', collect_usage=' + ('True' if self.usage else 'False'))
    +        r += '' if self.read_only is None else ', read_only={0.read_only!r}'.format(self)
    +        r += '' if self.lazy is None else ', lazy={0.lazy!r}'.format(self)
    +        r += '' if self.raise_exceptions is None else ', raise_exceptions={0.raise_exceptions!r}'.format(self)
    +        r += '' if (self.pool_name is None or self.pool_name == conf_default_pool_name) else ', pool_name={0.pool_name!r}'.format(self)
    +        r += '' if self.pool_size is None else ', pool_size={0.pool_size!r}'.format(self)
    +        r += '' if self.pool_lifetime is None else ', pool_lifetime={0.pool_lifetime!r}'.format(self)
    +        r += '' if self.pool_keepalive is None else ', pool_keepalive={0.pool_keepalive!r}'.format(self)
    +        r += '' if self.cred_store is None else (', cred_store=' + repr(self.cred_store))
    +        r += '' if self.fast_decoder is None else (', fast_decoder=' + ('True' if self.fast_decoder else 'False'))
    +        r += '' if self.auto_range is None else (', auto_range=' + ('True' if self.auto_range else 'False'))
    +        r += '' if self.receive_timeout is None else ', receive_timeout={0.receive_timeout!r}'.format(self)
    +        r += '' if self.empty_attributes is None else (', return_empty_attributes=' + ('True' if self.empty_attributes else 'False'))
    +        r += '' if self.auto_encode is None else (', auto_encode=' + ('True' if self.auto_encode else 'False'))
    +        r += '' if self.auto_escape is None else (', auto_escape=' + ('True' if self.auto_escape else 'False'))
    +        r += '' if self.use_referral_cache is None else (', use_referral_cache=' + ('True' if self.use_referral_cache else 'False'))
    +        r += ')'
    +
    +        return r
    +
    +    def repr_with_sensitive_data_stripped(self):
    +        conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME')
    +        if self.server_pool:
    +            r = 'Connection(server={0.server_pool!r}'.format(self)
    +        else:
    +            r = 'Connection(server={0.server!r}'.format(self)
    +        r += '' if self.user is None else ', user={0.user!r}'.format(self)
    +        r += '' if self.password is None else ", password='{0}'".format('' % len(self.password))
    +        r += '' if self.auto_bind is None else ', auto_bind={0.auto_bind!r}'.format(self)
    +        r += '' if self.version is None else ', version={0.version!r}'.format(self)
    +        r += '' if self.authentication is None else ', authentication={0.authentication!r}'.format(self)
    +        r += '' if self.strategy_type is None else ', client_strategy={0.strategy_type!r}'.format(self)
    +        r += '' if self.auto_referrals is None else ', auto_referrals={0.auto_referrals!r}'.format(self)
    +        r += '' if self.sasl_mechanism is None else ', sasl_mechanism={0.sasl_mechanism!r}'.format(self)
    +        if self.sasl_mechanism == DIGEST_MD5:
    +            r += '' if self.sasl_credentials is None else ", sasl_credentials=({0!r}, {1!r}, '{2}', {3!r})".format(self.sasl_credentials[0], self.sasl_credentials[1], '*' * len(self.sasl_credentials[2]), self.sasl_credentials[3])
    +        else:
    +            r += '' if self.sasl_credentials is None else ', sasl_credentials={0.sasl_credentials!r}'.format(self)
    +        r += '' if self.check_names is None else ', check_names={0.check_names!r}'.format(self)
    +        r += '' if self.usage is None else (', collect_usage=' + 'True' if self.usage else 'False')
    +        r += '' if self.read_only is None else ', read_only={0.read_only!r}'.format(self)
    +        r += '' if self.lazy is None else ', lazy={0.lazy!r}'.format(self)
    +        r += '' if self.raise_exceptions is None else ', raise_exceptions={0.raise_exceptions!r}'.format(self)
    +        r += '' if (self.pool_name is None or self.pool_name == conf_default_pool_name) else ', pool_name={0.pool_name!r}'.format(self)
    +        r += '' if self.pool_size is None else ', pool_size={0.pool_size!r}'.format(self)
    +        r += '' if self.pool_lifetime is None else ', pool_lifetime={0.pool_lifetime!r}'.format(self)
    +        r += '' if self.pool_keepalive is None else ', pool_keepalive={0.pool_keepalive!r}'.format(self)
    +        r += '' if self.cred_store is None else (', cred_store=' + repr(self.cred_store))
    +        r += '' if self.fast_decoder is None else (', fast_decoder=' + 'True' if self.fast_decoder else 'False')
    +        r += '' if self.auto_range is None else (', auto_range=' + ('True' if self.auto_range else 'False'))
    +        r += '' if self.receive_timeout is None else ', receive_timeout={0.receive_timeout!r}'.format(self)
    +        r += '' if self.empty_attributes is None else (', return_empty_attributes=' + 'True' if self.empty_attributes else 'False')
    +        r += '' if self.auto_encode is None else (', auto_encode=' + ('True' if self.auto_encode else 'False'))
    +        r += '' if self.auto_escape is None else (', auto_escape=' + ('True' if self.auto_escape else 'False'))
    +        r += '' if self.use_referral_cache is None else (', use_referral_cache=' + ('True' if self.use_referral_cache else 'False'))
    +        r += ')'
    +
    +        return r
    +
    +    @property
    +    def stream(self):
    +        """Used by the LDIFProducer strategy to accumulate the ldif-change operations with a single LDIF header
    +        :return: reference to the response stream if defined in the strategy.
    +        """
    +        return self.strategy.get_stream() if self.strategy.can_stream else None
    +
    +    @stream.setter
    +    def stream(self, value):
    +        with self.connection_lock:
    +            if self.strategy.can_stream:
    +                self.strategy.set_stream(value)
    +
    +    @property
    +    def usage(self):
    +        """Usage statistics for the connection.
    +        :return: Usage object
    +        """
    +        if not self._usage:
    +            return None
    +        if self.strategy.pooled:  # update master connection usage from pooled connections
    +            self._usage.reset()
    +            for worker in self.strategy.pool.workers:
    +                self._usage += worker.connection.usage
    +            self._usage += self.strategy.pool.terminated_usage
    +        return self._usage
    +
    +    def __enter__(self):
    +        with self.connection_lock:
    +            self._context_state.append((self.bound, self.closed))  # save status out of context as a tuple in a list
    +            if self.auto_bind != AUTO_BIND_NONE:
    +                if self.auto_bind == AUTO_BIND_DEFAULT:
    +                    self.auto_bind = AUTO_BIND_NO_TLS
    +                if self.closed:
    +                    self.open()
    +                if not self.bound:
    +                    self.bind()
    +
    +            return self
    +
    +    # noinspection PyUnusedLocal
    +    def __exit__(self, exc_type, exc_val, exc_tb):
    +        with self.connection_lock:
    +            context_bound, context_closed = self._context_state.pop()
    +            if (not context_bound and self.bound) or self.stream:  # restore status prior to entering context
    +                try:
    +                    self.unbind()
    +                except LDAPExceptionError:
    +                    pass
    +
    +            if not context_closed and self.closed:
    +                self.open()
    +
    +            if exc_type is not None:
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', exc_type, self)
    +                return False  # re-raise LDAPExceptionError
    +
    +    def bind(self,
    +             read_server_info=True,
    +             controls=None):
    +        """Bind to ldap Server with the authentication method and the user defined in the connection
    +
    +        :param read_server_info: reads info from server
    +        :param controls: LDAP controls to send along with the bind operation
    +        :type controls: list of tuple
    +        :return: bool
    +
    +        """
    +        if log_enabled(BASIC):
    +            log(BASIC, 'start BIND operation via <%s>', self)
    +        self.last_error = None
    +        with self.connection_lock:
    +            if self.lazy and not self._executing_deferred:
    +                if self.strategy.pooled:
    +                    self.strategy.validate_bind(controls)
    +                self._deferred_bind = True
    +                self._bind_controls = controls
    +                self.bound = True
    +                if log_enabled(BASIC):
    +                    log(BASIC, 'deferring bind for <%s>', self)
    +            else:
    +                self._deferred_bind = False
    +                self._bind_controls = None
    +                if self.closed:  # try to open connection if closed
    +                    self.open(read_server_info=False)
    +                if self.authentication == ANONYMOUS:
    +                    if log_enabled(PROTOCOL):
    +                        log(PROTOCOL, 'performing anonymous BIND for <%s>', self)
    +                    if not self.strategy.pooled:
    +                        request = bind_operation(self.version, self.authentication, self.user, '', auto_encode=self.auto_encode)
    +                        if log_enabled(PROTOCOL):
    +                            log(PROTOCOL, 'anonymous BIND request <%s> sent via <%s>', bind_request_to_dict(request), self)
    +                        response = self.post_send_single_response(self.send('bindRequest', request, controls))
    +                    else:
    +                        response = self.strategy.validate_bind(controls)  # only for REUSABLE
    +                elif self.authentication == SIMPLE:
    +                    if log_enabled(PROTOCOL):
    +                        log(PROTOCOL, 'performing simple BIND for <%s>', self)
    +                    if not self.strategy.pooled:
    +                        request = bind_operation(self.version, self.authentication, self.user, self.password, auto_encode=self.auto_encode)
    +                        if log_enabled(PROTOCOL):
    +                            log(PROTOCOL, 'simple BIND request <%s> sent via <%s>', bind_request_to_dict(request), self)
    +                        response = self.post_send_single_response(self.send('bindRequest', request, controls))
    +                    else:
    +                        response = self.strategy.validate_bind(controls)  # only for REUSABLE
    +                elif self.authentication == SASL:
    +                    if self.sasl_mechanism in SASL_AVAILABLE_MECHANISMS:
    +                        if log_enabled(PROTOCOL):
    +                            log(PROTOCOL, 'performing SASL BIND for <%s>', self)
    +                        if not self.strategy.pooled:
    +                            response = self.do_sasl_bind(controls)
    +                        else:
    +                            response = self.strategy.validate_bind(controls)  # only for REUSABLE
    +                    else:
    +                        self.last_error = 'requested SASL mechanism not supported'
    +                        if log_enabled(ERROR):
    +                            log(ERROR, '%s for <%s>', self.last_error, self)
    +                        raise LDAPSASLMechanismNotSupportedError(self.last_error)
    +                elif self.authentication == NTLM:
    +                    if self.user and self.password and len(self.user.split('\\')) == 2:
    +                        if log_enabled(PROTOCOL):
    +                            log(PROTOCOL, 'performing NTLM BIND for <%s>', self)
    +                        if not self.strategy.pooled:
    +                            response = self.do_ntlm_bind(controls)
    +                        else:
    +                            response = self.strategy.validate_bind(controls)  # only for REUSABLE
    +                    else:  # user or password missing
    +                        self.last_error = 'NTLM needs domain\\username and a password'
    +                        if log_enabled(ERROR):
    +                            log(ERROR, '%s for <%s>', self.last_error, self)
    +                        raise LDAPUnknownAuthenticationMethodError(self.last_error)
    +                else:
    +                    self.last_error = 'unknown authentication method'
    +                    if log_enabled(ERROR):
    +                        log(ERROR, '%s for <%s>', self.last_error, self)
    +                    raise LDAPUnknownAuthenticationMethodError(self.last_error)
    +
    +                if not self.strategy.sync and not self.strategy.pooled and self.authentication not in (SASL, NTLM):  # get response if asynchronous except for SASL and NTLM that return the bind result even for asynchronous strategy
    +                    _, result = self.get_response(response)
    +                    if log_enabled(PROTOCOL):
    +                        log(PROTOCOL, 'async BIND response id <%s> received via <%s>', result, self)
    +                elif self.strategy.sync:
    +                    result = self.result
    +                    if log_enabled(PROTOCOL):
    +                        log(PROTOCOL, 'BIND response <%s> received via <%s>', result, self)
    +                elif self.strategy.pooled or self.authentication in (SASL, NTLM):  # asynchronous SASL and NTLM or reusable strtegy get the bind result synchronously
    +                    result = response
    +                else:
    +                    self.last_error = 'unknown authentication method'
    +                    if log_enabled(ERROR):
    +                        log(ERROR, '%s for <%s>', self.last_error, self)
    +                    raise LDAPUnknownAuthenticationMethodError(self.last_error)
    +
    +                if result is None:
    +                    # self.bound = True if self.strategy_type == REUSABLE else False
    +                    self.bound = False
    +                elif result is True:
    +                    self.bound = True
    +                elif result is False:
    +                    self.bound = False
    +                else:
    +                    self.bound = True if result['result'] == RESULT_SUCCESS else False
    +                    if not self.bound and result and result['description'] and not self.last_error:
    +                        self.last_error = result['description']
    +
    +                if read_server_info and self.bound:
    +                    self.refresh_server_info()
    +            self._entries = []
    +
    +            if log_enabled(BASIC):
    +                log(BASIC, 'done BIND operation, result <%s>', self.bound)
    +
    +            return self.bound
    +
    +    def rebind(self,
    +               user=None,
    +               password=None,
    +               authentication=None,
    +               sasl_mechanism=None,
    +               sasl_credentials=None,
    +               read_server_info=True,
    +               controls=None
    +               ):
    +
    +        if log_enabled(BASIC):
    +            log(BASIC, 'start (RE)BIND operation via <%s>', self)
    +        self.last_error = None
    +        with self.connection_lock:
    +            if user:
    +                self.user = user
    +            if password is not None:
    +                self.password = password
    +            if not authentication and user:
    +                self.authentication = SIMPLE
    +            if authentication in [SIMPLE, ANONYMOUS, SASL, NTLM]:
    +                self.authentication = authentication
    +            elif authentication is not None:
    +                self.last_error = 'unknown authentication method'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', self.last_error, self)
    +                raise LDAPUnknownAuthenticationMethodError(self.last_error)
    +            if sasl_mechanism:
    +                self.sasl_mechanism = sasl_mechanism
    +            if sasl_credentials:
    +                self.sasl_credentials = sasl_credentials
    +
    +            # if self.authentication == SIMPLE and self.user and self.check_names:
    +            #     self.user = safe_dn(self.user)
    +            #     if log_enabled(EXTENDED):
    +            #         log(EXTENDED, 'user name sanitized to <%s> for rebind via <%s>', self.user, self)
    +
    +            if not self.strategy.pooled:
    +                try:
    +                    return self.bind(read_server_info, controls)
    +                except LDAPSocketReceiveError:
    +                    raise LDAPBindError('Unable to rebind as a different user, furthermore the server abruptly closed the connection')
    +            else:
    +                self.strategy.pool.rebind_pool()
    +                return True
    +
    +    def unbind(self,
    +               controls=None):
    +        """Unbind the connected user. Unbind implies closing session as per RFC4511 (4.3)
    +
    +        :param controls: LDAP controls to send along with the bind operation
    +
    +        """
    +        if log_enabled(BASIC):
    +            log(BASIC, 'start UNBIND operation via <%s>', self)
    +
    +        if self.use_referral_cache:
    +            self.strategy.unbind_referral_cache()
    +
    +        self.last_error = None
    +        with self.connection_lock:
    +            if self.lazy and not self._executing_deferred and (self._deferred_bind or self._deferred_open):  # _clear deferred status
    +                self.strategy.close()
    +                self._deferred_open = False
    +                self._deferred_bind = False
    +                self._deferred_start_tls = False
    +            elif not self.closed:
    +                request = unbind_operation()
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'UNBIND request sent via <%s>', self)
    +                self.send('unbindRequest', request, controls)
    +                self.strategy.close()
    +
    +            if log_enabled(BASIC):
    +                log(BASIC, 'done UNBIND operation, result <%s>', True)
    +
    +            return True
    +
    +    def search(self,
    +               search_base,
    +               search_filter,
    +               search_scope=SUBTREE,
    +               dereference_aliases=DEREF_ALWAYS,
    +               attributes=None,
    +               size_limit=0,
    +               time_limit=0,
    +               types_only=False,
    +               get_operational_attributes=False,
    +               controls=None,
    +               paged_size=None,
    +               paged_criticality=False,
    +               paged_cookie=None,
    +               auto_escape=None):
    +        """
    +        Perform an ldap search:
    +
    +        - If attributes is empty noRFC2696 with the specified size
    +        - If paged is 0 and cookie is present the search is abandoned on
    +          server attribute is returned
    +        - If attributes is ALL_ATTRIBUTES all attributes are returned
    +        - If paged_size is an int greater than 0 a simple paged search
    +          is tried as described in
    +        - Cookie is an opaque string received in the last paged search
    +          and must be used on the next paged search response
    +        - If lazy == True open and bind will be deferred until another
    +          LDAP operation is performed
    +        - If mssing_attributes == True then an attribute not returned by the server is set to None
    +        - If auto_escape is set it overrides the Connection auto_escape
    +        """
    +        conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
    +        if log_enabled(BASIC):
    +            log(BASIC, 'start SEARCH operation via <%s>', self)
    +
    +        if self.check_names and search_base:
    +            search_base = safe_dn(search_base)
    +            if log_enabled(EXTENDED):
    +                log(EXTENDED, 'search base sanitized to <%s> for SEARCH operation via <%s>', search_base, self)
    +
    +        with self.connection_lock:
    +            self._fire_deferred()
    +            if not attributes:
    +                attributes = [NO_ATTRIBUTES]
    +            elif attributes == ALL_ATTRIBUTES:
    +                attributes = [ALL_ATTRIBUTES]
    +
    +            if isinstance(attributes, STRING_TYPES):
    +                attributes = [attributes]
    +
    +            if get_operational_attributes and isinstance(attributes, list):
    +                attributes.append(ALL_OPERATIONAL_ATTRIBUTES)
    +            elif get_operational_attributes and isinstance(attributes, tuple):
    +                attributes += (ALL_OPERATIONAL_ATTRIBUTES, )  # concatenate tuple
    +
    +            if isinstance(paged_size, int):
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'performing paged search for %d items with cookie <%s> for <%s>', paged_size, escape_bytes(paged_cookie), self)
    +
    +                if controls is None:
    +                    controls = []
    +                else:
    +                    # Copy the controls to prevent modifying the original object
    +                    controls = list(controls)
    +                controls.append(paged_search_control(paged_criticality, paged_size, paged_cookie))
    +
    +            if self.server and self.server.schema and self.check_names:
    +                for attribute_name in attributes:
    +                    if ';' in attribute_name:  # remove tags
    +                        attribute_name_to_check = attribute_name.split(';')[0]
    +                    else:
    +                        attribute_name_to_check = attribute_name
    +                    if self.server.schema and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
    +                        raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
    +
    +            request = search_operation(search_base,
    +                                       search_filter,
    +                                       search_scope,
    +                                       dereference_aliases,
    +                                       attributes,
    +                                       size_limit,
    +                                       time_limit,
    +                                       types_only,
    +                                       self.auto_escape if auto_escape is None else auto_escape,
    +                                       self.auto_encode,
    +                                       self.server.schema if self.server else None,
    +                                       validator=self.server.custom_validator,
    +                                       check_names=self.check_names)
    +            if log_enabled(PROTOCOL):
    +                log(PROTOCOL, 'SEARCH request <%s> sent via <%s>', search_request_to_dict(request), self)
    +            response = self.post_send_search(self.send('searchRequest', request, controls))
    +            self._entries = []
    +
    +            if isinstance(response, int):  # asynchronous strategy
    +                return_value = response
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'async SEARCH response id <%s> received via <%s>', return_value, self)
    +            else:
    +                return_value = True if self.result['type'] == 'searchResDone' and len(response) > 0 else False
    +                if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
    +                    self.last_error = self.result['description']
    +
    +                if log_enabled(PROTOCOL):
    +                    for entry in response:
    +                        if entry['type'] == 'searchResEntry':
    +                            log(PROTOCOL, 'SEARCH response entry <%s> received via <%s>', entry, self)
    +                        elif entry['type'] == 'searchResRef':
    +                            log(PROTOCOL, 'SEARCH response reference <%s> received via <%s>', entry, self)
    +
    +            if log_enabled(BASIC):
    +                log(BASIC, 'done SEARCH operation, result <%s>', return_value)
    +
    +            return return_value
    +
    +    def compare(self,
    +                dn,
    +                attribute,
    +                value,
    +                controls=None):
    +        """
    +        Perform a compare operation
    +        """
    +        conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
    +
    +        if log_enabled(BASIC):
    +            log(BASIC, 'start COMPARE operation via <%s>', self)
    +        self.last_error = None
    +        if self.check_names:
    +            dn = safe_dn(dn)
    +            if log_enabled(EXTENDED):
    +                log(EXTENDED, 'dn sanitized to <%s> for COMPARE operation via <%s>', dn, self)
    +
    +        if self.server and self.server.schema and self.check_names:
    +            if ';' in attribute:  # remove tags for checking
    +                attribute_name_to_check = attribute.split(';')[0]
    +            else:
    +                attribute_name_to_check = attribute
    +
    +            if self.server.schema.attribute_types and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
    +                raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
    +
    +        if isinstance(value, SEQUENCE_TYPES):  # value can't be a sequence
    +            raise LDAPInvalidValueError('value cannot be a sequence')
    +
    +        with self.connection_lock:
    +            self._fire_deferred()
    +            request = compare_operation(dn, attribute, value, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names)
    +            if log_enabled(PROTOCOL):
    +                log(PROTOCOL, 'COMPARE request <%s> sent via <%s>', compare_request_to_dict(request), self)
    +            response = self.post_send_single_response(self.send('compareRequest', request, controls))
    +            self._entries = []
    +            if isinstance(response, int):
    +                return_value = response
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'async COMPARE response id <%s> received via <%s>', return_value, self)
    +            else:
    +                return_value = True if self.result['type'] == 'compareResponse' and self.result['result'] == RESULT_COMPARE_TRUE else False
    +                if not return_value and self.result['result'] not in [RESULT_COMPARE_TRUE, RESULT_COMPARE_FALSE] and not self.last_error:
    +                    self.last_error = self.result['description']
    +
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'COMPARE response <%s> received via <%s>', response, self)
    +
    +            if log_enabled(BASIC):
    +                log(BASIC, 'done COMPARE operation, result <%s>', return_value)
    +
    +            return return_value
    +
    +    def add(self,
    +            dn,
    +            object_class=None,
    +            attributes=None,
    +            controls=None):
    +        """
    +        Add dn to the DIT, object_class is None, a class name or a list
    +        of class names.
    +
    +        Attributes is a dictionary in the form 'attr': 'val' or 'attr':
    +        ['val1', 'val2', ...] for multivalued attributes
    +        """
    +        conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
    +        conf_classes_excluded_from_check = [v.lower() for v in get_config_parameter('CLASSES_EXCLUDED_FROM_CHECK')]
    +        if log_enabled(BASIC):
    +            log(BASIC, 'start ADD operation via <%s>', self)
    +        self.last_error = None
    +        _attributes = deepcopy(attributes)  # dict could change when adding objectClass values
    +        if self.check_names:
    +            dn = safe_dn(dn)
    +            if log_enabled(EXTENDED):
    +                log(EXTENDED, 'dn sanitized to <%s> for ADD operation via <%s>', dn, self)
    +
    +        with self.connection_lock:
    +            self._fire_deferred()
    +            attr_object_class = []
    +            if object_class is None:
    +                parm_object_class = []
    +            else:
    +                parm_object_class = list(object_class) if isinstance(object_class, SEQUENCE_TYPES) else [object_class]
    +
    +            object_class_attr_name = ''
    +            if _attributes:
    +                for attr in _attributes:
    +                    if attr.lower() == 'objectclass':
    +                        object_class_attr_name = attr
    +                        attr_object_class = list(_attributes[object_class_attr_name]) if isinstance(_attributes[object_class_attr_name], SEQUENCE_TYPES) else [_attributes[object_class_attr_name]]
    +                        break
    +            else:
    +                _attributes = dict()
    +
    +            if not object_class_attr_name:
    +                object_class_attr_name = 'objectClass'
    +
    +            attr_object_class = [to_unicode(object_class) for object_class in attr_object_class]  # converts objectclass to unicode in case of bytes value
    +            _attributes[object_class_attr_name] = reduce(lambda x, y: x + [y] if y not in x else x, parm_object_class + attr_object_class, [])  # remove duplicate ObjectClasses
    +
    +            if not _attributes[object_class_attr_name]:
    +                self.last_error = 'objectClass attribute is mandatory'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', self.last_error, self)
    +                raise LDAPObjectClassError(self.last_error)
    +
    +            if self.server and self.server.schema and self.check_names:
    +                for object_class_name in _attributes[object_class_attr_name]:
    +                    if object_class_name.lower() not in conf_classes_excluded_from_check and object_class_name not in self.server.schema.object_classes:
    +                        raise LDAPObjectClassError('invalid object class ' + str(object_class_name))
    +
    +                for attribute_name in _attributes:
    +                    if ';' in attribute_name:  # remove tags for checking
    +                        attribute_name_to_check = attribute_name.split(';')[0]
    +                    else:
    +                        attribute_name_to_check = attribute_name
    +
    +                    if attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
    +                        raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
    +
    +            request = add_operation(dn, _attributes, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names)
    +            if log_enabled(PROTOCOL):
    +                log(PROTOCOL, 'ADD request <%s> sent via <%s>', add_request_to_dict(request), self)
    +            response = self.post_send_single_response(self.send('addRequest', request, controls))
    +            self._entries = []
    +
    +            if isinstance(response, STRING_TYPES + (int, )):
    +                return_value = response
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'async ADD response id <%s> received via <%s>', return_value, self)
    +            else:
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'ADD response <%s> received via <%s>', response, self)
    +                return_value = True if self.result['type'] == 'addResponse' and self.result['result'] == RESULT_SUCCESS else False
    +                if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
    +                    self.last_error = self.result['description']
    +
    +            if log_enabled(BASIC):
    +                log(BASIC, 'done ADD operation, result <%s>', return_value)
    +
    +            return return_value
    +
    +    def delete(self,
    +               dn,
    +               controls=None):
    +        """
    +        Delete the entry identified by the DN from the DIB.
    +        """
    +        if log_enabled(BASIC):
    +            log(BASIC, 'start DELETE operation via <%s>', self)
    +        self.last_error = None
    +        if self.check_names:
    +            dn = safe_dn(dn)
    +            if log_enabled(EXTENDED):
    +                log(EXTENDED, 'dn sanitized to <%s> for DELETE operation via <%s>', dn, self)
    +
    +        with self.connection_lock:
    +            self._fire_deferred()
    +            if self.read_only:
    +                self.last_error = 'connection is read-only'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', self.last_error, self)
    +                raise LDAPConnectionIsReadOnlyError(self.last_error)
    +
    +            request = delete_operation(dn)
    +            if log_enabled(PROTOCOL):
    +                log(PROTOCOL, 'DELETE request <%s> sent via <%s>', delete_request_to_dict(request), self)
    +            response = self.post_send_single_response(self.send('delRequest', request, controls))
    +            self._entries = []
    +
    +            if isinstance(response, STRING_TYPES + (int, )):
    +                return_value = response
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'async DELETE response id <%s> received via <%s>', return_value, self)
    +            else:
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'DELETE response <%s> received via <%s>', response, self)
    +                return_value = True if self.result['type'] == 'delResponse' and self.result['result'] == RESULT_SUCCESS else False
    +                if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
    +                    self.last_error = self.result['description']
    +
    +            if log_enabled(BASIC):
    +                log(BASIC, 'done DELETE operation, result <%s>', return_value)
    +
    +            return return_value
    +
    +    def modify(self,
    +               dn,
    +               changes,
    +               controls=None):
    +        """
    +        Modify attributes of entry
    +
    +        - changes is a dictionary in the form {'attribute1': change), 'attribute2': [change, change, ...], ...}
    +        - change is (operation, [value1, value2, ...])
    +        - operation is 0 (MODIFY_ADD), 1 (MODIFY_DELETE), 2 (MODIFY_REPLACE), 3 (MODIFY_INCREMENT)
    +        """
    +        conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
    +
    +        if log_enabled(BASIC):
    +            log(BASIC, 'start MODIFY operation via <%s>', self)
    +        self.last_error = None
    +        if self.check_names:
    +            dn = safe_dn(dn)
    +            if log_enabled(EXTENDED):
    +                log(EXTENDED, 'dn sanitized to <%s> for MODIFY operation via <%s>', dn, self)
    +
    +        with self.connection_lock:
    +            self._fire_deferred()
    +            if self.read_only:
    +                self.last_error = 'connection is read-only'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', self.last_error, self)
    +                raise LDAPConnectionIsReadOnlyError(self.last_error)
    +
    +            if not isinstance(changes, dict):
    +                self.last_error = 'changes must be a dictionary'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', self.last_error, self)
    +                raise LDAPChangeError(self.last_error)
    +
    +            if not changes:
    +                self.last_error = 'no changes in modify request'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', self.last_error, self)
    +                raise LDAPChangeError(self.last_error)
    +
    +            changelist = dict()
    +            for attribute_name in changes:
    +                if self.server and self.server.schema and self.check_names:
    +                    if ';' in attribute_name:  # remove tags for checking
    +                        attribute_name_to_check = attribute_name.split(';')[0]
    +                    else:
    +                        attribute_name_to_check = attribute_name
    +
    +                    if self.server.schema.attribute_types and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
    +                        raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
    +                change = changes[attribute_name]
    +                if isinstance(change, SEQUENCE_TYPES) and change[0] in [MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, MODIFY_INCREMENT, 0, 1, 2, 3]:
    +                    if len(change) != 2:
    +                        self.last_error = 'malformed change'
    +                        if log_enabled(ERROR):
    +                            log(ERROR, '%s for <%s>', self.last_error, self)
    +                        raise LDAPChangeError(self.last_error)
    +
    +                    changelist[attribute_name] = [change]  # insert change in a list
    +                else:
    +                    for change_operation in change:
    +                        if len(change_operation) != 2 or change_operation[0] not in [MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, MODIFY_INCREMENT, 0, 1, 2, 3]:
    +                            self.last_error = 'invalid change list'
    +                            if log_enabled(ERROR):
    +                                log(ERROR, '%s for <%s>', self.last_error, self)
    +                            raise LDAPChangeError(self.last_error)
    +                    changelist[attribute_name] = change
    +            request = modify_operation(dn, changelist, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names)
    +            if log_enabled(PROTOCOL):
    +                log(PROTOCOL, 'MODIFY request <%s> sent via <%s>', modify_request_to_dict(request), self)
    +            response = self.post_send_single_response(self.send('modifyRequest', request, controls))
    +            self._entries = []
    +
    +            if isinstance(response, STRING_TYPES + (int, )):
    +                return_value = response
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'async MODIFY response id <%s> received via <%s>', return_value, self)
    +            else:
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'MODIFY response <%s> received via <%s>', response, self)
    +                return_value = True if self.result['type'] == 'modifyResponse' and self.result['result'] == RESULT_SUCCESS else False
    +                if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
    +                    self.last_error = self.result['description']
    +
    +            if log_enabled(BASIC):
    +                log(BASIC, 'done MODIFY operation, result <%s>', return_value)
    +
    +            return return_value
    +
    +    def modify_dn(self,
    +                  dn,
    +                  relative_dn,
    +                  delete_old_dn=True,
    +                  new_superior=None,
    +                  controls=None):
    +        """
    +        Modify DN of the entry or performs a move of the entry in the
    +        DIT.
    +        """
    +        if log_enabled(BASIC):
    +            log(BASIC, 'start MODIFY DN operation via <%s>', self)
    +        self.last_error = None
    +        if self.check_names:
    +            dn = safe_dn(dn)
    +            if log_enabled(EXTENDED):
    +                log(EXTENDED, 'dn sanitized to <%s> for MODIFY DN operation via <%s>', dn, self)
    +            relative_dn = safe_dn(relative_dn)
    +            if log_enabled(EXTENDED):
    +                log(EXTENDED, 'relative dn sanitized to <%s> for MODIFY DN operation via <%s>', relative_dn, self)
    +
    +        with self.connection_lock:
    +            self._fire_deferred()
    +            if self.read_only:
    +                self.last_error = 'connection is read-only'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', self.last_error, self)
    +                raise LDAPConnectionIsReadOnlyError(self.last_error)
    +
    +            # if new_superior and not dn.startswith(relative_dn):  # as per RFC4511 (4.9)
    +            #     self.last_error = 'DN cannot change while performing moving'
    +            #     if log_enabled(ERROR):
    +            #         log(ERROR, '%s for <%s>', self.last_error, self)
    +            #     raise LDAPChangeError(self.last_error)
    +
    +            request = modify_dn_operation(dn, relative_dn, delete_old_dn, new_superior)
    +            if log_enabled(PROTOCOL):
    +                log(PROTOCOL, 'MODIFY DN request <%s> sent via <%s>', modify_dn_request_to_dict(request), self)
    +            response = self.post_send_single_response(self.send('modDNRequest', request, controls))
    +            self._entries = []
    +
    +            if isinstance(response, STRING_TYPES + (int, )):
    +                return_value = response
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'async MODIFY DN response id <%s> received via <%s>', return_value, self)
    +            else:
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'MODIFY DN response <%s> received via <%s>', response, self)
    +                return_value = True if self.result['type'] == 'modDNResponse' and self.result['result'] == RESULT_SUCCESS else False
    +                if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
    +                    self.last_error = self.result['description']
    +
    +            if log_enabled(BASIC):
    +                log(BASIC, 'done MODIFY DN operation, result <%s>', return_value)
    +
    +            return return_value
    +
    +    def abandon(self,
    +                message_id,
    +                controls=None):
    +        """
    +        Abandon the operation indicated by message_id
    +        """
    +        if log_enabled(BASIC):
    +            log(BASIC, 'start ABANDON operation via <%s>', self)
    +        self.last_error = None
    +        with self.connection_lock:
    +            self._fire_deferred()
    +            return_value = False
    +            if self.strategy._outstanding or message_id == 0:
    +                # only current  operation should be abandoned, abandon, bind and unbind cannot ever be abandoned,
    +                # messagiId 0 is invalid and should be used as a "ping" to keep alive the connection
    +                if (self.strategy._outstanding and message_id in self.strategy._outstanding and self.strategy._outstanding[message_id]['type'] not in ['abandonRequest', 'bindRequest', 'unbindRequest']) or message_id == 0:
    +                    request = abandon_operation(message_id)
    +                    if log_enabled(PROTOCOL):
    +                        log(PROTOCOL, 'ABANDON request: <%s> sent via <%s>', abandon_request_to_dict(request), self)
    +                    self.send('abandonRequest', request, controls)
    +                    self.result = None
    +                    self.response = None
    +                    self._entries = []
    +                    return_value = True
    +                else:
    +                    if log_enabled(ERROR):
    +                        log(ERROR, 'cannot abandon a Bind, an Unbind or an Abandon operation or message ID %s not found via <%s>', str(message_id), self)
    +
    +            if log_enabled(BASIC):
    +                log(BASIC, 'done ABANDON operation, result <%s>', return_value)
    +
    +            return return_value
    +
    +    def extended(self,
    +                 request_name,
    +                 request_value=None,
    +                 controls=None,
    +                 no_encode=None):
    +        """
    +        Performs an extended operation
    +        """
    +        if log_enabled(BASIC):
    +            log(BASIC, 'start EXTENDED operation via <%s>', self)
    +        self.last_error = None
    +        with self.connection_lock:
    +            self._fire_deferred()
    +            request = extended_operation(request_name, request_value, no_encode=no_encode)
    +            if log_enabled(PROTOCOL):
    +                log(PROTOCOL, 'EXTENDED request <%s> sent via <%s>', extended_request_to_dict(request), self)
    +            response = self.post_send_single_response(self.send('extendedReq', request, controls))
    +            self._entries = []
    +            if isinstance(response, int):
    +                return_value = response
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'async EXTENDED response id <%s> received via <%s>', return_value, self)
    +            else:
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'EXTENDED response <%s> received via <%s>', response, self)
    +                return_value = True if self.result['type'] == 'extendedResp' and self.result['result'] == RESULT_SUCCESS else False
    +                if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
    +                    self.last_error = self.result['description']
    +
    +            if log_enabled(BASIC):
    +                log(BASIC, 'done EXTENDED operation, result <%s>', return_value)
    +
    +            return return_value
    +
    +    def start_tls(self, read_server_info=True):  # as per RFC4511. Removal of TLS is defined as MAY in RFC4511 so the client can't implement a generic stop_tls method0
    +
    +        if log_enabled(BASIC):
    +            log(BASIC, 'start START TLS operation via <%s>', self)
    +
    +        with self.connection_lock:
    +            return_value = False
    +            if not self.server.tls:
    +                self.server.tls = Tls()
    +
    +            if self.lazy and not self._executing_deferred:
    +                self._deferred_start_tls = True
    +                self.tls_started = True
    +                return_value = True
    +                if log_enabled(BASIC):
    +                    log(BASIC, 'deferring START TLS for <%s>', self)
    +            else:
    +                self._deferred_start_tls = False
    +                if self.closed:
    +                    self.open()
    +                if self.server.tls.start_tls(self) and self.strategy.sync:  # for asynchronous connections _start_tls is run by the strategy
    +                    if read_server_info:
    +                        self.refresh_server_info()  # refresh server info as per RFC4515 (3.1.5)
    +                    return_value = True
    +                elif not self.strategy.sync:
    +                    return_value = True
    +
    +            if log_enabled(BASIC):
    +                log(BASIC, 'done START TLS operation, result <%s>', return_value)
    +
    +            return return_value
    +
    +    def do_sasl_bind(self,
    +                     controls):
    +        if log_enabled(BASIC):
    +            log(BASIC, 'start SASL BIND operation via <%s>', self)
    +        self.last_error = None
    +        with self.connection_lock:
    +            result = None
    +
    +            if not self.sasl_in_progress:
    +                self.sasl_in_progress = True
    +                try:
    +                    if self.sasl_mechanism == EXTERNAL:
    +                        result = sasl_external(self, controls)
    +                    elif self.sasl_mechanism == DIGEST_MD5:
    +                        result = sasl_digest_md5(self, controls)
    +                    elif self.sasl_mechanism == GSSAPI:
    +                        from ..protocol.sasl.kerberos import sasl_gssapi  # needs the gssapi package
    +                        result = sasl_gssapi(self, controls)
    +                    elif self.sasl_mechanism == 'PLAIN':
    +                        result = sasl_plain(self, controls)
    +                finally:
    +                    self.sasl_in_progress = False
    +
    +            if log_enabled(BASIC):
    +                log(BASIC, 'done SASL BIND operation, result <%s>', result)
    +
    +            return result
    +
    +    def do_ntlm_bind(self,
    +                     controls):
    +        if log_enabled(BASIC):
    +            log(BASIC, 'start NTLM BIND operation via <%s>', self)
    +        self.last_error = None
    +        with self.connection_lock:
    +            result = None
    +            if not self.sasl_in_progress:
    +                self.sasl_in_progress = True  # ntlm is same of sasl authentication
    +                try:
    +                    # additional import for NTLM
    +                    from ..utils.ntlm import NtlmClient
    +                    domain_name, user_name = self.user.split('\\', 1)
    +                    ntlm_client = NtlmClient(user_name=user_name, domain=domain_name, password=self.password)
    +
    +                    # as per https://msdn.microsoft.com/en-us/library/cc223501.aspx
    +                    # send a sicilyPackageDiscovery request (in the bindRequest)
    +                    request = bind_operation(self.version, 'SICILY_PACKAGE_DISCOVERY', ntlm_client)
    +                    if log_enabled(PROTOCOL):
    +                        log(PROTOCOL, 'NTLM SICILY PACKAGE DISCOVERY request sent via <%s>', self)
    +                    response = self.post_send_single_response(self.send('bindRequest', request, controls))
    +                    if not self.strategy.sync:
    +                        _, result = self.get_response(response)
    +                    else:
    +                        result = response[0]
    +                    if 'server_creds' in result:
    +                        sicily_packages = result['server_creds'].decode('ascii').split(';')
    +                        if 'NTLM' in sicily_packages:  # NTLM available on server
    +                            request = bind_operation(self.version, 'SICILY_NEGOTIATE_NTLM', ntlm_client)
    +                            if log_enabled(PROTOCOL):
    +                                log(PROTOCOL, 'NTLM SICILY NEGOTIATE request sent via <%s>', self)
    +                            response = self.post_send_single_response(self.send('bindRequest', request, controls))
    +                            if not self.strategy.sync:
    +                                _, result = self.get_response(response)
    +                            else:
    +                                if log_enabled(PROTOCOL):
    +                                    log(PROTOCOL, 'NTLM SICILY NEGOTIATE response <%s> received via <%s>', response[0],
    +                                        self)
    +                                result = response[0]
    +
    +                            if result['result'] == RESULT_SUCCESS:
    +                                request = bind_operation(self.version, 'SICILY_RESPONSE_NTLM', ntlm_client,
    +                                                         result['server_creds'])
    +                                if log_enabled(PROTOCOL):
    +                                    log(PROTOCOL, 'NTLM SICILY RESPONSE NTLM request sent via <%s>', self)
    +                                response = self.post_send_single_response(self.send('bindRequest', request, controls))
    +                                if not self.strategy.sync:
    +                                    _, result = self.get_response(response)
    +                                else:
    +                                    if log_enabled(PROTOCOL):
    +                                        log(PROTOCOL, 'NTLM BIND response <%s> received via <%s>', response[0], self)
    +                                    result = response[0]
    +                    else:
    +                        result = None
    +                finally:
    +                    self.sasl_in_progress = False
    +
    +                if log_enabled(BASIC):
    +                    log(BASIC, 'done SASL NTLM operation, result <%s>', result)
    +
    +                return result
    +
    +    def refresh_server_info(self):
    +        # if self.strategy.no_real_dsa:  # do not refresh for mock strategies
    +        #     return
    +
    +        if not self.strategy.pooled:
    +            with self.connection_lock:
    +                if not self.closed:
    +                    if log_enabled(BASIC):
    +                        log(BASIC, 'refreshing server info for <%s>', self)
    +                    previous_response = self.response
    +                    previous_result = self.result
    +                    previous_entries = self._entries
    +                    self.server.get_info_from_server(self)
    +                    self.response = previous_response
    +                    self.result = previous_result
    +                    self._entries = previous_entries
    +        else:
    +            if log_enabled(BASIC):
    +                log(BASIC, 'refreshing server info from pool for <%s>', self)
    +            self.strategy.pool.get_info_from_server()
    +
    +    def response_to_ldif(self,
    +                         search_result=None,
    +                         all_base64=False,
    +                         line_separator=None,
    +                         sort_order=None,
    +                         stream=None):
    +        with self.connection_lock:
    +            if search_result is None:
    +                search_result = self.response
    +
    +            if isinstance(search_result, SEQUENCE_TYPES):
    +                ldif_lines = operation_to_ldif('searchResponse', search_result, all_base64, sort_order=sort_order)
    +                ldif_lines = add_ldif_header(ldif_lines)
    +                line_separator = line_separator or linesep
    +                ldif_output = line_separator.join(ldif_lines)
    +                if stream:
    +                    if stream.tell() == 0:
    +                        header = add_ldif_header(['-'])[0]
    +                        stream.write(prepare_for_stream(header + line_separator + line_separator))
    +                    stream.write(prepare_for_stream(ldif_output + line_separator + line_separator))
    +                if log_enabled(BASIC):
    +                    log(BASIC, 'building LDIF output <%s> for <%s>', ldif_output, self)
    +                return ldif_output
    +
    +            return None
    +
    +    def response_to_json(self,
    +                         raw=False,
    +                         search_result=None,
    +                         indent=4,
    +                         sort=True,
    +                         stream=None,
    +                         checked_attributes=True,
    +                         include_empty=True):
    +
    +        with self.connection_lock:
    +            if search_result is None:
    +                search_result = self.response
    +
    +            if isinstance(search_result, SEQUENCE_TYPES):
    +                json_dict = dict()
    +                json_dict['entries'] = []
    +
    +                for response in search_result:
    +                    if response['type'] == 'searchResEntry':
    +                        entry = dict()
    +
    +                        entry['dn'] = response['dn']
    +                        if checked_attributes:
    +                            if not include_empty:
    +                                # needed for python 2.6 compatibility
    +                                entry['attributes'] = dict((key, response['attributes'][key]) for key in response['attributes'] if response['attributes'][key])
    +                            else:
    +                                entry['attributes'] = dict(response['attributes'])
    +                        if raw:
    +                            if not include_empty:
    +                                # needed for python 2.6 compatibility
    +                                entry['raw_attributes'] = dict((key, response['raw_attributes'][key]) for key in response['raw_attributes'] if response['raw:attributes'][key])
    +                            else:
    +                                entry['raw'] = dict(response['raw_attributes'])
    +                        json_dict['entries'].append(entry)
    +
    +                if str is bytes:  # Python 2
    +                    check_json_dict(json_dict)
    +
    +                json_output = json.dumps(json_dict, ensure_ascii=True, sort_keys=sort, indent=indent, check_circular=True, default=format_json, separators=(',', ': '))
    +
    +                if log_enabled(BASIC):
    +                    log(BASIC, 'building JSON output <%s> for <%s>', json_output, self)
    +                if stream:
    +                    stream.write(json_output)
    +
    +                return json_output
    +
    +    def response_to_file(self,
    +                         target,
    +                         raw=False,
    +                         indent=4,
    +                         sort=True):
    +        with self.connection_lock:
    +            if self.response:
    +                if isinstance(target, STRING_TYPES):
    +                    target = open(target, 'w+')
    +
    +                if log_enabled(BASIC):
    +                    log(BASIC, 'writing response to file for <%s>', self)
    +
    +                target.writelines(self.response_to_json(raw=raw, indent=indent, sort=sort))
    +                target.close()
    +
    +    def _fire_deferred(self, read_info=True):
    +        with self.connection_lock:
    +            if self.lazy and not self._executing_deferred:
    +                self._executing_deferred = True
    +
    +                if log_enabled(BASIC):
    +                    log(BASIC, 'executing deferred (open: %s, start_tls: %s, bind: %s) for <%s>', self._deferred_open, self._deferred_start_tls, self._deferred_bind, self)
    +                try:
    +                    if self._deferred_open:
    +                        self.open(read_server_info=False)
    +                    if self._deferred_start_tls:
    +                        self.start_tls(read_server_info=False)
    +                    if self._deferred_bind:
    +                        self.bind(read_server_info=False, controls=self._bind_controls)
    +                    if read_info:
    +                        self.refresh_server_info()
    +                except LDAPExceptionError as e:
    +                    if log_enabled(ERROR):
    +                        log(ERROR, '%s for <%s>', e, self)
    +                    raise  # re-raise LDAPExceptionError
    +                finally:
    +                    self._executing_deferred = False
    +
    +    @property
    +    def entries(self):
    +        if self.response:
    +            if not self._entries:
    +                self._entries = self._get_entries(self.response)
    +        return self._entries
    +
    +    def _get_entries(self, search_response):
    +        with self.connection_lock:
    +            from .. import ObjectDef, Reader
    +
    +            # build a table of ObjectDefs, grouping the entries found in search_response for their attributes set, subset will be included in superset
    +            attr_sets = []
    +            for response in search_response:
    +                if response['type'] == 'searchResEntry':
    +                    resp_attr_set = set(response['attributes'].keys())
    +                    if resp_attr_set not in attr_sets:
    +                        attr_sets.append(resp_attr_set)
    +            attr_sets.sort(key=lambda x: -len(x))  # sorts the list in descending length order
    +            unique_attr_sets = []
    +            for attr_set in attr_sets:
    +                for unique_set in unique_attr_sets:
    +                    if unique_set >= attr_set:  # checks if unique set is a superset of attr_set
    +                        break
    +                else:  # the attr_set is not a subset of any element in unique_attr_sets
    +                    unique_attr_sets.append(attr_set)
    +            object_defs = []
    +            for attr_set in unique_attr_sets:
    +                object_def = ObjectDef(schema=self.server.schema)
    +                object_def += list(attr_set)  # converts the set in a list to be added to the object definition
    +                object_defs.append((attr_set,
    +                                    object_def,
    +                                    Reader(self, object_def, self.request['base'], self.request['filter'], attributes=attr_set) if self.strategy.sync else Reader(self, object_def, '', '', attributes=attr_set))
    +                                   )  # objects_defs contains a tuple with the set, the ObjectDef and a cursor
    +
    +            entries = []
    +            for response in search_response:
    +                if response['type'] == 'searchResEntry':
    +                    resp_attr_set = set(response['attributes'].keys())
    +                    for object_def in object_defs:
    +                        if resp_attr_set <= object_def[0]:  # finds the ObjectDef for the attribute set of this entry
    +                            entry = object_def[2]._create_entry(response)
    +                            entries.append(entry)
    +                            break
    +                    else:
    +                        if log_enabled(ERROR):
    +                            log(ERROR, 'attribute set not found for %s in <%s>', resp_attr_set, self)
    +                        raise LDAPObjectError('attribute set not found for ' + str(resp_attr_set))
    +
    +        return entries
    diff --git a/server/www/packages/packages-windows/x86/ldap3/core/exceptions.py b/server/www/packages/packages-windows/x86/ldap3/core/exceptions.py
    index cfefb6d..29aed26 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/core/exceptions.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/core/exceptions.py
    @@ -1,599 +1,609 @@
    -"""
    -"""
    -
    -# Created on 2014.05.14
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2014 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -
    -from os import sep
    -from .results import RESULT_OPERATIONS_ERROR, RESULT_PROTOCOL_ERROR, RESULT_TIME_LIMIT_EXCEEDED, RESULT_SIZE_LIMIT_EXCEEDED, \
    -    RESULT_STRONGER_AUTH_REQUIRED, RESULT_REFERRAL, RESULT_ADMIN_LIMIT_EXCEEDED, RESULT_UNAVAILABLE_CRITICAL_EXTENSION, \
    -    RESULT_AUTH_METHOD_NOT_SUPPORTED, RESULT_UNDEFINED_ATTRIBUTE_TYPE, RESULT_NO_SUCH_ATTRIBUTE, \
    -    RESULT_SASL_BIND_IN_PROGRESS, RESULT_CONFIDENTIALITY_REQUIRED, RESULT_INAPPROPRIATE_MATCHING, \
    -    RESULT_CONSTRAINT_VIOLATION, \
    -    RESULT_ATTRIBUTE_OR_VALUE_EXISTS, RESULT_INVALID_ATTRIBUTE_SYNTAX, RESULT_NO_SUCH_OBJECT, RESULT_ALIAS_PROBLEM, \
    -    RESULT_INVALID_DN_SYNTAX, RESULT_ALIAS_DEREFERENCING_PROBLEM, RESULT_INVALID_CREDENTIALS, RESULT_LOOP_DETECTED, \
    -    RESULT_ENTRY_ALREADY_EXISTS, RESULT_LCUP_SECURITY_VIOLATION, RESULT_CANCELED, RESULT_E_SYNC_REFRESH_REQUIRED, \
    -    RESULT_NO_SUCH_OPERATION, RESULT_LCUP_INVALID_DATA, RESULT_OBJECT_CLASS_MODS_PROHIBITED, RESULT_NAMING_VIOLATION, \
    -    RESULT_INSUFFICIENT_ACCESS_RIGHTS, RESULT_OBJECT_CLASS_VIOLATION, RESULT_TOO_LATE, RESULT_CANNOT_CANCEL, \
    -    RESULT_LCUP_UNSUPPORTED_SCHEME, RESULT_BUSY, RESULT_AFFECT_MULTIPLE_DSAS, RESULT_UNAVAILABLE, \
    -    RESULT_NOT_ALLOWED_ON_NON_LEAF, \
    -    RESULT_UNWILLING_TO_PERFORM, RESULT_OTHER, RESULT_LCUP_RELOAD_REQUIRED, RESULT_ASSERTION_FAILED, \
    -    RESULT_AUTHORIZATION_DENIED, RESULT_LCUP_RESOURCES_EXHAUSTED, RESULT_NOT_ALLOWED_ON_RDN, \
    -    RESULT_INAPPROPRIATE_AUTHENTICATION
    -import socket
    -
    -
    -# LDAPException hierarchy
    -class LDAPException(Exception):
    -    pass
    -
    -
    -class LDAPOperationResult(LDAPException):
    -    def __new__(cls, result=None, description=None, dn=None, message=None, response_type=None, response=None):
    -        if cls is LDAPOperationResult and result and result in exception_table:
    -            exc = super(LDAPOperationResult, exception_table[result]).__new__(
    -                exception_table[result])  # create an exception of the required result error
    -            exc.result = result
    -            exc.description = description
    -            exc.dn = dn
    -            exc.message = message
    -            exc.type = response_type
    -            exc.response = response
    -        else:
    -            exc = super(LDAPOperationResult, cls).__new__(cls)
    -        return exc
    -
    -    def __init__(self, result=None, description=None, dn=None, message=None, response_type=None, response=None):
    -        self.result = result
    -        self.description = description
    -        self.dn = dn
    -        self.message = message
    -        self.type = response_type
    -        self.response = response
    -
    -    def __str__(self):
    -        s = [self.__class__.__name__,
    -             str(self.result) if self.result else None,
    -             self.description if self.description else None,
    -             self.dn if self.dn else None,
    -             self.message if self.message else None,
    -             self.type if self.type else None,
    -             self.response if self.response else None]
    -
    -        return ' - '.join([str(item) for item in s if s is not None])
    -
    -    def __repr__(self):
    -        return self.__str__()
    -
    -
    -class LDAPOperationsErrorResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPProtocolErrorResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPTimeLimitExceededResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPSizeLimitExceededResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPAuthMethodNotSupportedResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPStrongerAuthRequiredResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPReferralResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPAdminLimitExceededResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPUnavailableCriticalExtensionResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPConfidentialityRequiredResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPSASLBindInProgressResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPNoSuchAttributeResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPUndefinedAttributeTypeResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPInappropriateMatchingResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPConstraintViolationResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPAttributeOrValueExistsResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPInvalidAttributeSyntaxResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPNoSuchObjectResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPAliasProblemResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPInvalidDNSyntaxResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPAliasDereferencingProblemResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPInappropriateAuthenticationResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPInvalidCredentialsResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPInsufficientAccessRightsResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPBusyResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPUnavailableResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPUnwillingToPerformResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPLoopDetectedResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPNamingViolationResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPObjectClassViolationResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPNotAllowedOnNotLeafResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPNotAllowedOnRDNResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPEntryAlreadyExistsResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPObjectClassModsProhibitedResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPAffectMultipleDSASResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPOtherResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPLCUPResourcesExhaustedResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPLCUPSecurityViolationResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPLCUPInvalidDataResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPLCUPUnsupportedSchemeResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPLCUPReloadRequiredResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPCanceledResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPNoSuchOperationResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPTooLateResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPCannotCancelResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPAssertionFailedResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPAuthorizationDeniedResult(LDAPOperationResult):
    -    pass
    -
    -
    -class LDAPESyncRefreshRequiredResult(LDAPOperationResult):
    -    pass
    -
    -
    -exception_table = {RESULT_OPERATIONS_ERROR: LDAPOperationsErrorResult,
    -                   RESULT_PROTOCOL_ERROR: LDAPProtocolErrorResult,
    -                   RESULT_TIME_LIMIT_EXCEEDED: LDAPTimeLimitExceededResult,
    -                   RESULT_SIZE_LIMIT_EXCEEDED: LDAPSizeLimitExceededResult,
    -                   RESULT_AUTH_METHOD_NOT_SUPPORTED: LDAPAuthMethodNotSupportedResult,
    -                   RESULT_STRONGER_AUTH_REQUIRED: LDAPStrongerAuthRequiredResult,
    -                   RESULT_REFERRAL: LDAPReferralResult,
    -                   RESULT_ADMIN_LIMIT_EXCEEDED: LDAPAdminLimitExceededResult,
    -                   RESULT_UNAVAILABLE_CRITICAL_EXTENSION: LDAPUnavailableCriticalExtensionResult,
    -                   RESULT_CONFIDENTIALITY_REQUIRED: LDAPConfidentialityRequiredResult,
    -                   RESULT_SASL_BIND_IN_PROGRESS: LDAPSASLBindInProgressResult,
    -                   RESULT_NO_SUCH_ATTRIBUTE: LDAPNoSuchAttributeResult,
    -                   RESULT_UNDEFINED_ATTRIBUTE_TYPE: LDAPUndefinedAttributeTypeResult,
    -                   RESULT_INAPPROPRIATE_MATCHING: LDAPInappropriateMatchingResult,
    -                   RESULT_CONSTRAINT_VIOLATION: LDAPConstraintViolationResult,
    -                   RESULT_ATTRIBUTE_OR_VALUE_EXISTS: LDAPAttributeOrValueExistsResult,
    -                   RESULT_INVALID_ATTRIBUTE_SYNTAX: LDAPInvalidAttributeSyntaxResult,
    -                   RESULT_NO_SUCH_OBJECT: LDAPNoSuchObjectResult,
    -                   RESULT_ALIAS_PROBLEM: LDAPAliasProblemResult,
    -                   RESULT_INVALID_DN_SYNTAX: LDAPInvalidDNSyntaxResult,
    -                   RESULT_ALIAS_DEREFERENCING_PROBLEM: LDAPAliasDereferencingProblemResult,
    -                   RESULT_INAPPROPRIATE_AUTHENTICATION: LDAPInappropriateAuthenticationResult,
    -                   RESULT_INVALID_CREDENTIALS: LDAPInvalidCredentialsResult,
    -                   RESULT_INSUFFICIENT_ACCESS_RIGHTS: LDAPInsufficientAccessRightsResult,
    -                   RESULT_BUSY: LDAPBusyResult,
    -                   RESULT_UNAVAILABLE: LDAPUnavailableResult,
    -                   RESULT_UNWILLING_TO_PERFORM: LDAPUnwillingToPerformResult,
    -                   RESULT_LOOP_DETECTED: LDAPLoopDetectedResult,
    -                   RESULT_NAMING_VIOLATION: LDAPNamingViolationResult,
    -                   RESULT_OBJECT_CLASS_VIOLATION: LDAPObjectClassViolationResult,
    -                   RESULT_NOT_ALLOWED_ON_NON_LEAF: LDAPNotAllowedOnNotLeafResult,
    -                   RESULT_NOT_ALLOWED_ON_RDN: LDAPNotAllowedOnRDNResult,
    -                   RESULT_ENTRY_ALREADY_EXISTS: LDAPEntryAlreadyExistsResult,
    -                   RESULT_OBJECT_CLASS_MODS_PROHIBITED: LDAPObjectClassModsProhibitedResult,
    -                   RESULT_AFFECT_MULTIPLE_DSAS: LDAPAffectMultipleDSASResult,
    -                   RESULT_OTHER: LDAPOtherResult,
    -                   RESULT_LCUP_RESOURCES_EXHAUSTED: LDAPLCUPResourcesExhaustedResult,
    -                   RESULT_LCUP_SECURITY_VIOLATION: LDAPLCUPSecurityViolationResult,
    -                   RESULT_LCUP_INVALID_DATA: LDAPLCUPInvalidDataResult,
    -                   RESULT_LCUP_UNSUPPORTED_SCHEME: LDAPLCUPUnsupportedSchemeResult,
    -                   RESULT_LCUP_RELOAD_REQUIRED: LDAPLCUPReloadRequiredResult,
    -                   RESULT_CANCELED: LDAPCanceledResult,
    -                   RESULT_NO_SUCH_OPERATION: LDAPNoSuchOperationResult,
    -                   RESULT_TOO_LATE: LDAPTooLateResult,
    -                   RESULT_CANNOT_CANCEL: LDAPCannotCancelResult,
    -                   RESULT_ASSERTION_FAILED: LDAPAssertionFailedResult,
    -                   RESULT_AUTHORIZATION_DENIED: LDAPAuthorizationDeniedResult,
    -                   RESULT_E_SYNC_REFRESH_REQUIRED: LDAPESyncRefreshRequiredResult}
    -
    -
    -class LDAPExceptionError(LDAPException):
    -    pass
    -
    -
    -# configuration exceptions
    -class LDAPConfigurationError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPUnknownStrategyError(LDAPConfigurationError):
    -    pass
    -
    -
    -class LDAPUnknownAuthenticationMethodError(LDAPConfigurationError):
    -    pass
    -
    -
    -class LDAPSSLConfigurationError(LDAPConfigurationError):
    -    pass
    -
    -
    -class LDAPDefinitionError(LDAPConfigurationError):
    -    pass
    -
    -
    -class LDAPPackageUnavailableError(LDAPConfigurationError, ImportError):
    -    pass
    -
    -
    -class LDAPConfigurationParameterError(LDAPConfigurationError):
    -    pass
    -
    -
    -# abstract layer exceptions
    -class LDAPKeyError(LDAPExceptionError, KeyError, AttributeError):
    -    pass
    -
    -
    -class LDAPObjectError(LDAPExceptionError, ValueError):
    -    pass
    -
    -
    -class LDAPAttributeError(LDAPExceptionError, ValueError, TypeError):
    -    pass
    -
    -
    -class LDAPCursorError(LDAPExceptionError):
    -    pass
    -
    -class LDAPObjectDereferenceError(LDAPExceptionError):
    -    pass
    -
    -# security exceptions
    -class LDAPSSLNotSupportedError(LDAPExceptionError, ImportError):
    -    pass
    -
    -
    -class LDAPInvalidTlsSpecificationError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPInvalidHashAlgorithmError(LDAPExceptionError, ValueError):
    -    pass
    -
    -
    -# connection exceptions
    -class LDAPBindError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPInvalidServerError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPSASLMechanismNotSupportedError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPConnectionIsReadOnlyError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPChangeError(LDAPExceptionError, ValueError):
    -    pass
    -
    -
    -class LDAPServerPoolError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPServerPoolExhaustedError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPInvalidPortError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPStartTLSError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPCertificateError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPUserNameNotAllowedError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPUserNameIsMandatoryError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPPasswordIsMandatoryError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPInvalidFilterError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPInvalidScopeError(LDAPExceptionError, ValueError):
    -    pass
    -
    -
    -class LDAPInvalidDereferenceAliasesError(LDAPExceptionError, ValueError):
    -    pass
    -
    -
    -class LDAPInvalidValueError(LDAPExceptionError, ValueError):
    -    pass
    -
    -
    -class LDAPControlError(LDAPExceptionError, ValueError):
    -    pass
    -
    -
    -class LDAPExtensionError(LDAPExceptionError, ValueError):
    -    pass
    -
    -
    -class LDAPLDIFError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPSchemaError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPSASLPrepError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPSASLBindInProgressError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPMetricsError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPObjectClassError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPInvalidDnError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPResponseTimeoutError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPTransactionError(LDAPExceptionError):
    -    pass
    -
    -
    -# communication exceptions
    -class LDAPCommunicationError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPSocketOpenError(LDAPCommunicationError):
    -    pass
    -
    -
    -class LDAPSocketCloseError(LDAPCommunicationError):
    -    pass
    -
    -
    -class LDAPSocketReceiveError(LDAPCommunicationError, socket.error):
    -    pass
    -
    -
    -class LDAPSocketSendError(LDAPCommunicationError, socket.error):
    -    pass
    -
    -
    -class LDAPSessionTerminatedByServerError(LDAPCommunicationError):
    -    pass
    -
    -
    -class LDAPUnknownResponseError(LDAPCommunicationError):
    -    pass
    -
    -
    -class LDAPUnknownRequestError(LDAPCommunicationError):
    -    pass
    -
    -
    -class LDAPReferralError(LDAPCommunicationError):
    -    pass
    -
    -
    -# pooling exceptions
    -class LDAPConnectionPoolNameIsMandatoryError(LDAPExceptionError):
    -    pass
    -
    -
    -class LDAPConnectionPoolNotStartedError(LDAPExceptionError):
    -    pass
    -
    -
    -# restartable strategy
    -class LDAPMaximumRetriesError(LDAPExceptionError):
    -    def __str__(self):
    -        s = []
    -        if self.args:
    -            if isinstance(self.args, tuple):
    -                if len(self.args) > 0:
    -                    s.append('LDAPMaximumRetriesError: ' + str(self.args[0]))
    -                if len(self.args) > 1:
    -                    s.append('Exception history:')
    -                    prev_exc = ''
    -                    for i, exc in enumerate(self.args[1]):  # args[1] contains exception history
    -                        # if str(exc[1]) != prev_exc:
    -                        #     s.append((str(i).rjust(5) + ' ' + str(exc[0]) + ': ' + str(exc[1]) + ' - ' + str(exc[2])))
    -                        #     prev_exc = str(exc[1])
    -                        if str(exc) != prev_exc:
    -                            s.append((str(i).rjust(5) + ' ' + str(type(exc)) + ': ' + str(exc)))
    -                            prev_exc = str(exc)
    -                if len(self.args) > 2:
    -                    s.append('Maximum number of retries reached: ' + str(self.args[2]))
    -        else:
    -            s = [LDAPExceptionError.__str__(self)]
    -
    -        return sep.join(s)
    -
    -
    -# exception factories
    -def communication_exception_factory(exc_to_raise, exc):
    -    """
    -    Generates a new exception class of the requested type (subclass of LDAPCommunication) merged with the exception raised by the interpreter
    -    """
    -    if exc_to_raise.__name__ in [cls.__name__ for cls in LDAPCommunicationError.__subclasses__()]:
    -        return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict())
    -    else:
    -        raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise))
    -
    -
    -def start_tls_exception_factory(exc_to_raise, exc):
    -    """
    -    Generates a new exception class of the requested type merged with the exception raised by the interpreter
    -    """
    -
    -    if exc_to_raise.__name__ == 'LDAPStartTLSError':
    -        return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict())
    -    else:
    -        raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise))
    +"""
    +"""
    +
    +# Created on 2014.05.14
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2014 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +from os import sep
    +from .results import RESULT_OPERATIONS_ERROR, RESULT_PROTOCOL_ERROR, RESULT_TIME_LIMIT_EXCEEDED, RESULT_SIZE_LIMIT_EXCEEDED, \
    +    RESULT_STRONGER_AUTH_REQUIRED, RESULT_REFERRAL, RESULT_ADMIN_LIMIT_EXCEEDED, RESULT_UNAVAILABLE_CRITICAL_EXTENSION, \
    +    RESULT_AUTH_METHOD_NOT_SUPPORTED, RESULT_UNDEFINED_ATTRIBUTE_TYPE, RESULT_NO_SUCH_ATTRIBUTE, \
    +    RESULT_SASL_BIND_IN_PROGRESS, RESULT_CONFIDENTIALITY_REQUIRED, RESULT_INAPPROPRIATE_MATCHING, \
    +    RESULT_CONSTRAINT_VIOLATION, \
    +    RESULT_ATTRIBUTE_OR_VALUE_EXISTS, RESULT_INVALID_ATTRIBUTE_SYNTAX, RESULT_NO_SUCH_OBJECT, RESULT_ALIAS_PROBLEM, \
    +    RESULT_INVALID_DN_SYNTAX, RESULT_ALIAS_DEREFERENCING_PROBLEM, RESULT_INVALID_CREDENTIALS, RESULT_LOOP_DETECTED, \
    +    RESULT_ENTRY_ALREADY_EXISTS, RESULT_LCUP_SECURITY_VIOLATION, RESULT_CANCELED, RESULT_E_SYNC_REFRESH_REQUIRED, \
    +    RESULT_NO_SUCH_OPERATION, RESULT_LCUP_INVALID_DATA, RESULT_OBJECT_CLASS_MODS_PROHIBITED, RESULT_NAMING_VIOLATION, \
    +    RESULT_INSUFFICIENT_ACCESS_RIGHTS, RESULT_OBJECT_CLASS_VIOLATION, RESULT_TOO_LATE, RESULT_CANNOT_CANCEL, \
    +    RESULT_LCUP_UNSUPPORTED_SCHEME, RESULT_BUSY, RESULT_AFFECT_MULTIPLE_DSAS, RESULT_UNAVAILABLE, \
    +    RESULT_NOT_ALLOWED_ON_NON_LEAF, \
    +    RESULT_UNWILLING_TO_PERFORM, RESULT_OTHER, RESULT_LCUP_RELOAD_REQUIRED, RESULT_ASSERTION_FAILED, \
    +    RESULT_AUTHORIZATION_DENIED, RESULT_LCUP_RESOURCES_EXHAUSTED, RESULT_NOT_ALLOWED_ON_RDN, \
    +    RESULT_INAPPROPRIATE_AUTHENTICATION
    +import socket
    +
    +
    +# LDAPException hierarchy
    +class LDAPException(Exception):
    +    pass
    +
    +
    +class LDAPOperationResult(LDAPException):
    +    def __new__(cls, result=None, description=None, dn=None, message=None, response_type=None, response=None):
    +        if cls is LDAPOperationResult and result and result in exception_table:
    +            exc = super(LDAPOperationResult, exception_table[result]).__new__(
    +                exception_table[result])  # create an exception of the required result error
    +            exc.result = result
    +            exc.description = description
    +            exc.dn = dn
    +            exc.message = message
    +            exc.type = response_type
    +            exc.response = response
    +        else:
    +            exc = super(LDAPOperationResult, cls).__new__(cls)
    +        return exc
    +
    +    def __init__(self, result=None, description=None, dn=None, message=None, response_type=None, response=None):
    +        self.result = result
    +        self.description = description
    +        self.dn = dn
    +        self.message = message
    +        self.type = response_type
    +        self.response = response
    +
    +    def __str__(self):
    +        s = [self.__class__.__name__,
    +             str(self.result) if self.result else None,
    +             self.description if self.description else None,
    +             self.dn if self.dn else None,
    +             self.message if self.message else None,
    +             self.type if self.type else None,
    +             self.response if self.response else None]
    +
    +        return ' - '.join([str(item) for item in s if s is not None])
    +
    +    def __repr__(self):
    +        return self.__str__()
    +
    +
    +class LDAPOperationsErrorResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPProtocolErrorResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPTimeLimitExceededResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPSizeLimitExceededResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPAuthMethodNotSupportedResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPStrongerAuthRequiredResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPReferralResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPAdminLimitExceededResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPUnavailableCriticalExtensionResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPConfidentialityRequiredResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPSASLBindInProgressResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPNoSuchAttributeResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPUndefinedAttributeTypeResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPInappropriateMatchingResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPConstraintViolationResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPAttributeOrValueExistsResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPInvalidAttributeSyntaxResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPNoSuchObjectResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPAliasProblemResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPInvalidDNSyntaxResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPAliasDereferencingProblemResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPInappropriateAuthenticationResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPInvalidCredentialsResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPInsufficientAccessRightsResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPBusyResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPUnavailableResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPUnwillingToPerformResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPLoopDetectedResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPNamingViolationResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPObjectClassViolationResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPNotAllowedOnNotLeafResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPNotAllowedOnRDNResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPEntryAlreadyExistsResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPObjectClassModsProhibitedResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPAffectMultipleDSASResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPOtherResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPLCUPResourcesExhaustedResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPLCUPSecurityViolationResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPLCUPInvalidDataResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPLCUPUnsupportedSchemeResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPLCUPReloadRequiredResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPCanceledResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPNoSuchOperationResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPTooLateResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPCannotCancelResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPAssertionFailedResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPAuthorizationDeniedResult(LDAPOperationResult):
    +    pass
    +
    +
    +class LDAPESyncRefreshRequiredResult(LDAPOperationResult):
    +    pass
    +
    +
    +exception_table = {RESULT_OPERATIONS_ERROR: LDAPOperationsErrorResult,
    +                   RESULT_PROTOCOL_ERROR: LDAPProtocolErrorResult,
    +                   RESULT_TIME_LIMIT_EXCEEDED: LDAPTimeLimitExceededResult,
    +                   RESULT_SIZE_LIMIT_EXCEEDED: LDAPSizeLimitExceededResult,
    +                   RESULT_AUTH_METHOD_NOT_SUPPORTED: LDAPAuthMethodNotSupportedResult,
    +                   RESULT_STRONGER_AUTH_REQUIRED: LDAPStrongerAuthRequiredResult,
    +                   RESULT_REFERRAL: LDAPReferralResult,
    +                   RESULT_ADMIN_LIMIT_EXCEEDED: LDAPAdminLimitExceededResult,
    +                   RESULT_UNAVAILABLE_CRITICAL_EXTENSION: LDAPUnavailableCriticalExtensionResult,
    +                   RESULT_CONFIDENTIALITY_REQUIRED: LDAPConfidentialityRequiredResult,
    +                   RESULT_SASL_BIND_IN_PROGRESS: LDAPSASLBindInProgressResult,
    +                   RESULT_NO_SUCH_ATTRIBUTE: LDAPNoSuchAttributeResult,
    +                   RESULT_UNDEFINED_ATTRIBUTE_TYPE: LDAPUndefinedAttributeTypeResult,
    +                   RESULT_INAPPROPRIATE_MATCHING: LDAPInappropriateMatchingResult,
    +                   RESULT_CONSTRAINT_VIOLATION: LDAPConstraintViolationResult,
    +                   RESULT_ATTRIBUTE_OR_VALUE_EXISTS: LDAPAttributeOrValueExistsResult,
    +                   RESULT_INVALID_ATTRIBUTE_SYNTAX: LDAPInvalidAttributeSyntaxResult,
    +                   RESULT_NO_SUCH_OBJECT: LDAPNoSuchObjectResult,
    +                   RESULT_ALIAS_PROBLEM: LDAPAliasProblemResult,
    +                   RESULT_INVALID_DN_SYNTAX: LDAPInvalidDNSyntaxResult,
    +                   RESULT_ALIAS_DEREFERENCING_PROBLEM: LDAPAliasDereferencingProblemResult,
    +                   RESULT_INAPPROPRIATE_AUTHENTICATION: LDAPInappropriateAuthenticationResult,
    +                   RESULT_INVALID_CREDENTIALS: LDAPInvalidCredentialsResult,
    +                   RESULT_INSUFFICIENT_ACCESS_RIGHTS: LDAPInsufficientAccessRightsResult,
    +                   RESULT_BUSY: LDAPBusyResult,
    +                   RESULT_UNAVAILABLE: LDAPUnavailableResult,
    +                   RESULT_UNWILLING_TO_PERFORM: LDAPUnwillingToPerformResult,
    +                   RESULT_LOOP_DETECTED: LDAPLoopDetectedResult,
    +                   RESULT_NAMING_VIOLATION: LDAPNamingViolationResult,
    +                   RESULT_OBJECT_CLASS_VIOLATION: LDAPObjectClassViolationResult,
    +                   RESULT_NOT_ALLOWED_ON_NON_LEAF: LDAPNotAllowedOnNotLeafResult,
    +                   RESULT_NOT_ALLOWED_ON_RDN: LDAPNotAllowedOnRDNResult,
    +                   RESULT_ENTRY_ALREADY_EXISTS: LDAPEntryAlreadyExistsResult,
    +                   RESULT_OBJECT_CLASS_MODS_PROHIBITED: LDAPObjectClassModsProhibitedResult,
    +                   RESULT_AFFECT_MULTIPLE_DSAS: LDAPAffectMultipleDSASResult,
    +                   RESULT_OTHER: LDAPOtherResult,
    +                   RESULT_LCUP_RESOURCES_EXHAUSTED: LDAPLCUPResourcesExhaustedResult,
    +                   RESULT_LCUP_SECURITY_VIOLATION: LDAPLCUPSecurityViolationResult,
    +                   RESULT_LCUP_INVALID_DATA: LDAPLCUPInvalidDataResult,
    +                   RESULT_LCUP_UNSUPPORTED_SCHEME: LDAPLCUPUnsupportedSchemeResult,
    +                   RESULT_LCUP_RELOAD_REQUIRED: LDAPLCUPReloadRequiredResult,
    +                   RESULT_CANCELED: LDAPCanceledResult,
    +                   RESULT_NO_SUCH_OPERATION: LDAPNoSuchOperationResult,
    +                   RESULT_TOO_LATE: LDAPTooLateResult,
    +                   RESULT_CANNOT_CANCEL: LDAPCannotCancelResult,
    +                   RESULT_ASSERTION_FAILED: LDAPAssertionFailedResult,
    +                   RESULT_AUTHORIZATION_DENIED: LDAPAuthorizationDeniedResult,
    +                   RESULT_E_SYNC_REFRESH_REQUIRED: LDAPESyncRefreshRequiredResult}
    +
    +
    +class LDAPExceptionError(LDAPException):
    +    pass
    +
    +
    +# configuration exceptions
    +class LDAPConfigurationError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPUnknownStrategyError(LDAPConfigurationError):
    +    pass
    +
    +
    +class LDAPUnknownAuthenticationMethodError(LDAPConfigurationError):
    +    pass
    +
    +
    +class LDAPSSLConfigurationError(LDAPConfigurationError):
    +    pass
    +
    +
    +class LDAPDefinitionError(LDAPConfigurationError):
    +    pass
    +
    +
    +class LDAPPackageUnavailableError(LDAPConfigurationError, ImportError):
    +    pass
    +
    +
    +class LDAPConfigurationParameterError(LDAPConfigurationError):
    +    pass
    +
    +
    +# abstract layer exceptions
    +class LDAPKeyError(LDAPExceptionError, KeyError, AttributeError):
    +    pass
    +
    +
    +class LDAPObjectError(LDAPExceptionError, ValueError):
    +    pass
    +
    +
    +class LDAPAttributeError(LDAPExceptionError, ValueError, TypeError):
    +    pass
    +
    +
    +class LDAPCursorError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPCursorAttributeError(LDAPCursorError, AttributeError):
    +    pass
    +
    +
    +class LDAPObjectDereferenceError(LDAPExceptionError):
    +    pass
    +
    +
    +# security exceptions
    +class LDAPSSLNotSupportedError(LDAPExceptionError, ImportError):
    +    pass
    +
    +
    +class LDAPInvalidTlsSpecificationError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPInvalidHashAlgorithmError(LDAPExceptionError, ValueError):
    +    pass
    +
    +
    +# connection exceptions
    +class LDAPBindError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPInvalidServerError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPSASLMechanismNotSupportedError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPConnectionIsReadOnlyError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPChangeError(LDAPExceptionError, ValueError):
    +    pass
    +
    +
    +class LDAPServerPoolError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPServerPoolExhaustedError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPInvalidPortError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPStartTLSError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPCertificateError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPUserNameNotAllowedError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPUserNameIsMandatoryError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPPasswordIsMandatoryError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPInvalidFilterError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPInvalidScopeError(LDAPExceptionError, ValueError):
    +    pass
    +
    +
    +class LDAPInvalidDereferenceAliasesError(LDAPExceptionError, ValueError):
    +    pass
    +
    +
    +class LDAPInvalidValueError(LDAPExceptionError, ValueError):
    +    pass
    +
    +
    +class LDAPControlError(LDAPExceptionError, ValueError):
    +    pass
    +
    +
    +class LDAPExtensionError(LDAPExceptionError, ValueError):
    +    pass
    +
    +
    +class LDAPLDIFError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPSchemaError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPSASLPrepError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPSASLBindInProgressError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPMetricsError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPObjectClassError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPInvalidDnError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPResponseTimeoutError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPTransactionError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPInfoError(LDAPExceptionError):
    +    pass
    +
    +
    +# communication exceptions
    +class LDAPCommunicationError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPSocketOpenError(LDAPCommunicationError):
    +    pass
    +
    +
    +class LDAPSocketCloseError(LDAPCommunicationError):
    +    pass
    +
    +
    +class LDAPSocketReceiveError(LDAPCommunicationError, socket.error):
    +    pass
    +
    +
    +class LDAPSocketSendError(LDAPCommunicationError, socket.error):
    +    pass
    +
    +
    +class LDAPSessionTerminatedByServerError(LDAPCommunicationError):
    +    pass
    +
    +
    +class LDAPUnknownResponseError(LDAPCommunicationError):
    +    pass
    +
    +
    +class LDAPUnknownRequestError(LDAPCommunicationError):
    +    pass
    +
    +
    +class LDAPReferralError(LDAPCommunicationError):
    +    pass
    +
    +
    +# pooling exceptions
    +class LDAPConnectionPoolNameIsMandatoryError(LDAPExceptionError):
    +    pass
    +
    +
    +class LDAPConnectionPoolNotStartedError(LDAPExceptionError):
    +    pass
    +
    +
    +# restartable strategy
    +class LDAPMaximumRetriesError(LDAPExceptionError):
    +    def __str__(self):
    +        s = []
    +        if self.args:
    +            if isinstance(self.args, tuple):
    +                if len(self.args) > 0:
    +                    s.append('LDAPMaximumRetriesError: ' + str(self.args[0]))
    +                if len(self.args) > 1:
    +                    s.append('Exception history:')
    +                    prev_exc = ''
    +                    for i, exc in enumerate(self.args[1]):  # args[1] contains exception history
    +                        # if str(exc[1]) != prev_exc:
    +                        #     s.append((str(i).rjust(5) + ' ' + str(exc[0]) + ': ' + str(exc[1]) + ' - ' + str(exc[2])))
    +                        #     prev_exc = str(exc[1])
    +                        if str(exc) != prev_exc:
    +                            s.append((str(i).rjust(5) + ' ' + str(type(exc)) + ': ' + str(exc)))
    +                            prev_exc = str(exc)
    +                if len(self.args) > 2:
    +                    s.append('Maximum number of retries reached: ' + str(self.args[2]))
    +        else:
    +            s = [LDAPExceptionError.__str__(self)]
    +
    +        return sep.join(s)
    +
    +
    +# exception factories
    +def communication_exception_factory(exc_to_raise, exc):
    +    """
    +    Generates a new exception class of the requested type (subclass of LDAPCommunication) merged with the exception raised by the interpreter
    +    """
    +    if exc_to_raise.__name__ in [cls.__name__ for cls in LDAPCommunicationError.__subclasses__()]:
    +        return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict())
    +    else:
    +        raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise))
    +
    +
    +def start_tls_exception_factory(exc_to_raise, exc):
    +    """
    +    Generates a new exception class of the requested type merged with the exception raised by the interpreter
    +    """
    +
    +    if exc_to_raise.__name__ == 'LDAPStartTLSError':
    +        return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict())
    +    else:
    +        raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise))
    diff --git a/server/www/packages/packages-windows/x86/ldap3/core/pooling.py b/server/www/packages/packages-windows/x86/ldap3/core/pooling.py
    index 66a0bbd..24a5b0f 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/core/pooling.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/core/pooling.py
    @@ -1,306 +1,329 @@
    -"""
    -"""
    -
    -# Created on 2014.03.14
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2014 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -
    -from datetime import datetime, MINYEAR
    -from os import linesep
    -from random import randint
    -from time import sleep
    -
    -from .. import FIRST, ROUND_ROBIN, RANDOM, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter
    -from .exceptions import LDAPUnknownStrategyError, LDAPServerPoolError, LDAPServerPoolExhaustedError
    -from .server import Server
    -from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK
    -
    -POOLING_STRATEGIES = [FIRST, ROUND_ROBIN, RANDOM]
    -
    -
    -class ServerPoolState(object):
    -    def __init__(self, server_pool):
    -        self.servers = []  # each element is a list: [server, last_checked_time, available]
    -        self.strategy = server_pool.strategy
    -        self.server_pool = server_pool
    -        self.last_used_server = 0
    -        self.refresh()
    -        self.initialize_time = datetime.now()
    -
    -        if log_enabled(BASIC):
    -            log(BASIC, 'instantiated ServerPoolState: <%r>', self)
    -
    -    def __str__(self):
    -        s = 'servers: ' + linesep
    -        if self.servers:
    -            for server in self.servers:
    -                s += str(server[0]) + linesep
    -        else:
    -            s += 'None' + linesep
    -        s += 'Pool strategy: ' + str(self.strategy) + linesep
    -        s += ' - Last used server: ' + ('None' if self.last_used_server == -1 else str(self.servers[self.last_used_server][0]))
    -
    -        return s
    -
    -    def refresh(self):
    -        self.servers = []
    -        for server in self.server_pool.servers:
    -            self.servers.append([server, datetime(MINYEAR, 1, 1), True])  # server, smallest date ever, supposed available
    -        self.last_used_server = randint(0, len(self.servers) - 1)
    -
    -    def get_current_server(self):
    -        return self.servers[self.last_used_server][0]
    -
    -    def get_server(self):
    -        if self.servers:
    -            if self.server_pool.strategy == FIRST:
    -                if self.server_pool.active:
    -                    # returns the first active server
    -                    self.last_used_server = self.find_active_server(starting=0)
    -                else:
    -                    # returns always the first server - no pooling
    -                    self.last_used_server = 0
    -            elif self.server_pool.strategy == ROUND_ROBIN:
    -                if self.server_pool.active:
    -                    # returns the next active server in a circular range
    -                    self.last_used_server = self.find_active_server(self.last_used_server + 1)
    -                else:
    -                    # returns the next server in a circular range
    -                    self.last_used_server = self.last_used_server + 1 if (self.last_used_server + 1) < len(self.servers) else 0
    -            elif self.server_pool.strategy == RANDOM:
    -                if self.server_pool.active:
    -                    self.last_used_server = self.find_active_random_server()
    -                else:
    -                    # returns a random server in the pool
    -                    self.last_used_server = randint(0, len(self.servers) - 1)
    -            else:
    -                if log_enabled(ERROR):
    -                    log(ERROR, 'unknown server pooling strategy <%s>', self.server_pool.strategy)
    -                raise LDAPUnknownStrategyError('unknown server pooling strategy')
    -            if log_enabled(BASIC):
    -                log(BASIC, 'server returned from Server Pool: <%s>', self.last_used_server)
    -            return self.servers[self.last_used_server][0]
    -        else:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'no servers in Server Pool <%s>', self)
    -            raise LDAPServerPoolError('no servers in server pool')
    -
    -    def find_active_random_server(self):
    -        counter = self.server_pool.active  # can be True for "forever" or the number of cycles to try
    -        while counter:
    -            if log_enabled(NETWORK):
    -                log(NETWORK, 'entering loop for finding active server in pool <%s>', self)
    -            temp_list = self.servers[:]  # copy
    -            while temp_list:
    -                # pops a random server from a temp list and checks its
    -                # availability, if not available tries another one
    -                server = temp_list.pop(randint(0, len(temp_list) - 1))
    -                if not server[2]:  # server is offline
    -                    if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - server[1]).seconds < self.server_pool.exhaust:  # keeps server offline
    -                        if log_enabled(NETWORK):
    -                            log(NETWORK, 'server <%s> excluded from checking because it is offline', server[0])
    -                        continue
    -                    if log_enabled(NETWORK):
    -                            log(NETWORK, 'server <%s> reinserted in pool', server[0])
    -                server[1] = datetime.now()
    -                if log_enabled(NETWORK):
    -                    log(NETWORK, 'checking server <%s> for availability', server[0])
    -                if server[0].check_availability():
    -                    # returns a random active server in the pool
    -                    server[2] = True
    -                    return self.servers.index(server)
    -                else:
    -                    server[2] = False
    -            if not isinstance(self.server_pool.active, bool):
    -                counter -= 1
    -        if log_enabled(ERROR):
    -            log(ERROR, 'no random active server available in Server Pool <%s> after maximum number of tries', self)
    -        raise LDAPServerPoolExhaustedError('no random active server available in server pool after maximum number of tries')
    -
    -    def find_active_server(self, starting):
    -        conf_pool_timeout = get_config_parameter('POOLING_LOOP_TIMEOUT')
    -        counter = self.server_pool.active  # can be True for "forever" or the number of cycles to try
    -        if starting >= len(self.servers):
    -            starting = 0
    -
    -        while counter:
    -            if log_enabled(NETWORK):
    -                log(NETWORK, 'entering loop number <%s> for finding active server in pool <%s>', counter, self)
    -            index = -1
    -            pool_size = len(self.servers)
    -            while index < pool_size - 1:
    -                index += 1
    -                offset = index + starting if index + starting < pool_size else index + starting - pool_size
    -                if not self.servers[offset][2]:  # server is offline
    -                    if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - self.servers[offset][1]).seconds < self.server_pool.exhaust:  # keeps server offline
    -                        if log_enabled(NETWORK):
    -                            if isinstance(self.server_pool.exhaust, bool):
    -                                log(NETWORK, 'server <%s> excluded from checking because is offline', self.servers[offset][0])
    -                            else:
    -                                log(NETWORK, 'server <%s> excluded from checking because is offline for %d seconds', self.servers[offset][0], (self.server_pool.exhaust - (datetime.now() - self.servers[offset][1]).seconds))
    -                        continue
    -                    if log_enabled(NETWORK):
    -                            log(NETWORK, 'server <%s> reinserted in pool', self.servers[offset][0])
    -                self.servers[offset][1] = datetime.now()
    -                if log_enabled(NETWORK):
    -                    log(NETWORK, 'checking server <%s> for availability', self.servers[offset][0])
    -                if self.servers[offset][0].check_availability():
    -                    self.servers[offset][2] = True
    -                    return offset
    -                else:
    -                    self.servers[offset][2] = False  # sets server offline
    -
    -            if not isinstance(self.server_pool.active, bool):
    -                counter -= 1
    -            if log_enabled(NETWORK):
    -                log(NETWORK, 'waiting for %d seconds before retrying pool servers cycle', conf_pool_timeout)
    -            sleep(conf_pool_timeout)
    -
    -        if log_enabled(ERROR):
    -            log(ERROR, 'no active server available in Server Pool <%s> after maximum number of tries', self)
    -        raise LDAPServerPoolExhaustedError('no active server available in server pool after maximum number of tries')
    -
    -    def __len__(self):
    -        return len(self.servers)
    -
    -
    -class ServerPool(object):
    -    def __init__(self,
    -                 servers=None,
    -                 pool_strategy=ROUND_ROBIN,
    -                 active=True,
    -                 exhaust=False):
    -
    -        if pool_strategy not in POOLING_STRATEGIES:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'unknown pooling strategy <%s>', pool_strategy)
    -            raise LDAPUnknownStrategyError('unknown pooling strategy')
    -        if exhaust and not active:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'cannot instantiate pool with exhaust and not active')
    -            raise LDAPServerPoolError('pools can be exhausted only when checking for active servers')
    -        self.servers = []
    -        self.pool_states = dict()
    -        self.active = active
    -        self.exhaust = exhaust
    -        if isinstance(servers, SEQUENCE_TYPES + (Server, )):
    -            self.add(servers)
    -        elif isinstance(servers, STRING_TYPES):
    -            self.add(Server(servers))
    -        self.strategy = pool_strategy
    -
    -        if log_enabled(BASIC):
    -            log(BASIC, 'instantiated ServerPool: <%r>', self)
    -
    -    def __str__(self):
    -            s = 'servers: ' + linesep
    -            if self.servers:
    -                for server in self.servers:
    -                    s += str(server) + linesep
    -            else:
    -                s += 'None' + linesep
    -            s += 'Pool strategy: ' + str(self.strategy)
    -            s += ' - ' + 'active: ' + (str(self.active) if self.active else 'False')
    -            s += ' - ' + 'exhaust pool: ' + (str(self.exhaust) if self.exhaust else 'False')
    -            return s
    -
    -    def __repr__(self):
    -        r = 'ServerPool(servers='
    -        if self.servers:
    -            r += '['
    -            for server in self.servers:
    -                r += server.__repr__() + ', '
    -            r = r[:-2] + ']'
    -        else:
    -            r += 'None'
    -        r += ', pool_strategy={0.strategy!r}'.format(self)
    -        r += ', active={0.active!r}'.format(self)
    -        r += ', exhaust={0.exhaust!r}'.format(self)
    -        r += ')'
    -
    -        return r
    -
    -    def __len__(self):
    -        return len(self.servers)
    -
    -    def __getitem__(self, item):
    -        return self.servers[item]
    -
    -    def __iter__(self):
    -        return self.servers.__iter__()
    -
    -    def add(self, servers):
    -        if isinstance(servers, Server):
    -            if servers not in self.servers:
    -                self.servers.append(servers)
    -        elif isinstance(servers, STRING_TYPES):
    -            self.servers.append(Server(servers))
    -        elif isinstance(servers, SEQUENCE_TYPES):
    -            for server in servers:
    -                if isinstance(server, Server):
    -                    self.servers.append(server)
    -                elif isinstance(server, STRING_TYPES):
    -                    self.servers.append(Server(server))
    -                else:
    -                    if log_enabled(ERROR):
    -                        log(ERROR, 'element must be a server in Server Pool <%s>', self)
    -                    raise LDAPServerPoolError('server in ServerPool must be a Server')
    -        else:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'server must be a Server of a list of Servers when adding to Server Pool <%s>', self)
    -            raise LDAPServerPoolError('server must be a Server or a list of Server')
    -
    -        for connection in self.pool_states:
    -            # notifies connections using this pool to refresh
    -            self.pool_states[connection].refresh()
    -
    -    def remove(self, server):
    -        if server in self.servers:
    -            self.servers.remove(server)
    -        else:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'server %s to be removed not in Server Pool <%s>', server, self)
    -            raise LDAPServerPoolError('server not in server pool')
    -
    -        for connection in self.pool_states:
    -            # notifies connections using this pool to refresh
    -            self.pool_states[connection].refresh()
    -
    -    def initialize(self, connection):
    -        pool_state = ServerPoolState(self)
    -        # registers pool_state in ServerPool object
    -        self.pool_states[connection] = pool_state
    -
    -    def get_server(self, connection):
    -        if connection in self.pool_states:
    -            return self.pool_states[connection].get_server()
    -        else:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self)
    -            raise LDAPServerPoolError('connection not in ServerPoolState')
    -
    -    def get_current_server(self, connection):
    -        if connection in self.pool_states:
    -            return self.pool_states[connection].get_current_server()
    -        else:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self)
    -            raise LDAPServerPoolError('connection not in ServerPoolState')
    +"""
    +"""
    +
    +# Created on 2014.03.14
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2014 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +from datetime import datetime, MINYEAR
    +from os import linesep
    +from random import randint
    +from time import sleep
    +
    +from .. import FIRST, ROUND_ROBIN, RANDOM, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter
    +from .exceptions import LDAPUnknownStrategyError, LDAPServerPoolError, LDAPServerPoolExhaustedError
    +from .server import Server
    +from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK
    +
    +POOLING_STRATEGIES = [FIRST, ROUND_ROBIN, RANDOM]
    +
    +
    +class ServerState(object):
    +    def __init__(self, server, last_checked_time, available):
    +        self.server = server
    +        self.last_checked_time = last_checked_time
    +        self.available = available
    +
    +
    +class ServerPoolState(object):
    +    def __init__(self, server_pool):
    +        self.server_states = []  # each element is a ServerState
    +        self.strategy = server_pool.strategy
    +        self.server_pool = server_pool
    +        self.last_used_server = 0
    +        self.refresh()
    +        self.initialize_time = datetime.now()
    +
    +        if log_enabled(BASIC):
    +            log(BASIC, 'instantiated ServerPoolState: <%r>', self)
    +
    +    def __str__(self):
    +        s = 'servers: ' + linesep
    +        if self.server_states:
    +            for state in self.server_states:
    +                s += str(state.server) + linesep
    +        else:
    +            s += 'None' + linesep
    +        s += 'Pool strategy: ' + str(self.strategy) + linesep
    +        s += ' - Last used server: ' + ('None' if self.last_used_server == -1 else str(self.server_states[self.last_used_server].server))
    +
    +        return s
    +
    +    def refresh(self):
    +        self.server_states = []
    +        for server in self.server_pool.servers:
    +            self.server_states.append(ServerState(server, datetime(MINYEAR, 1, 1), True))  # server, smallest date ever, supposed available
    +        self.last_used_server = randint(0, len(self.server_states) - 1)
    +
    +    def get_current_server(self):
    +        return self.server_states[self.last_used_server].server
    +
    +    def get_server(self):
    +        if self.server_states:
    +            if self.server_pool.strategy == FIRST:
    +                if self.server_pool.active:
    +                    # returns the first active server
    +                    self.last_used_server = self.find_active_server(starting=0)
    +                else:
    +                    # returns always the first server - no pooling
    +                    self.last_used_server = 0
    +            elif self.server_pool.strategy == ROUND_ROBIN:
    +                if self.server_pool.active:
    +                    # returns the next active server in a circular range
    +                    self.last_used_server = self.find_active_server(self.last_used_server + 1)
    +                else:
    +                    # returns the next server in a circular range
    +                    self.last_used_server = self.last_used_server + 1 if (self.last_used_server + 1) < len(self.server_states) else 0
    +            elif self.server_pool.strategy == RANDOM:
    +                if self.server_pool.active:
    +                    self.last_used_server = self.find_active_random_server()
    +                else:
    +                    # returns a random server in the pool
    +                    self.last_used_server = randint(0, len(self.server_states) - 1)
    +            else:
    +                if log_enabled(ERROR):
    +                    log(ERROR, 'unknown server pooling strategy <%s>', self.server_pool.strategy)
    +                raise LDAPUnknownStrategyError('unknown server pooling strategy')
    +            if log_enabled(BASIC):
    +                log(BASIC, 'server returned from Server Pool: <%s>', self.last_used_server)
    +            return self.server_states[self.last_used_server].server
    +        else:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'no servers in Server Pool <%s>', self)
    +            raise LDAPServerPoolError('no servers in server pool')
    +
    +    def find_active_random_server(self):
    +        counter = self.server_pool.active  # can be True for "forever" or the number of cycles to try
    +        while counter:
    +            if log_enabled(NETWORK):
    +                log(NETWORK, 'entering loop for finding active server in pool <%s>', self)
    +            temp_list = self.server_states[:]  # copy
    +            while temp_list:
    +                # pops a random server from a temp list and checks its
    +                # availability, if not available tries another one
    +                server_state = temp_list.pop(randint(0, len(temp_list) - 1))
    +                if not server_state.available:  # server is offline
    +                    if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - server_state.last_checked_time).seconds < self.server_pool.exhaust:  # keeps server offline
    +                        if log_enabled(NETWORK):
    +                            log(NETWORK, 'server <%s> excluded from checking because it is offline', server_state.server)
    +                        continue
    +                    if log_enabled(NETWORK):
    +                            log(NETWORK, 'server <%s> reinserted in pool', server_state.server)
    +                server_state.last_checked_time = datetime.now()
    +                if log_enabled(NETWORK):
    +                    log(NETWORK, 'checking server <%s> for availability', server_state.server)
    +                if server_state.server.check_availability():
    +                    # returns a random active server in the pool
    +                    server_state.available = True
    +                    return self.server_states.index(server_state)
    +                else:
    +                    server_state.available = False
    +            if not isinstance(self.server_pool.active, bool):
    +                counter -= 1
    +        if log_enabled(ERROR):
    +            log(ERROR, 'no random active server available in Server Pool <%s> after maximum number of tries', self)
    +        raise LDAPServerPoolExhaustedError('no random active server available in server pool after maximum number of tries')
    +
    +    def find_active_server(self, starting):
    +        conf_pool_timeout = get_config_parameter('POOLING_LOOP_TIMEOUT')
    +        counter = self.server_pool.active  # can be True for "forever" or the number of cycles to try
    +        if starting >= len(self.server_states):
    +            starting = 0
    +
    +        while counter:
    +            if log_enabled(NETWORK):
    +                log(NETWORK, 'entering loop number <%s> for finding active server in pool <%s>', counter, self)
    +            index = -1
    +            pool_size = len(self.server_states)
    +            while index < pool_size - 1:
    +                index += 1
    +                offset = index + starting if index + starting < pool_size else index + starting - pool_size
    +                server_state = self.server_states[offset]
    +                if not server_state.available:  # server is offline
    +                    if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - server_state.last_checked_time).seconds < self.server_pool.exhaust:  # keeps server offline
    +                        if log_enabled(NETWORK):
    +                            if isinstance(self.server_pool.exhaust, bool):
    +                                log(NETWORK, 'server <%s> excluded from checking because is offline', server_state.server)
    +                            else:
    +                                log(NETWORK, 'server <%s> excluded from checking because is offline for %d seconds', server_state.server, (self.server_pool.exhaust - (datetime.now() - server_state.last_checked_time).seconds))
    +                        continue
    +                    if log_enabled(NETWORK):
    +                            log(NETWORK, 'server <%s> reinserted in pool', server_state.server)
    +                server_state.last_checked_time = datetime.now()
    +                if log_enabled(NETWORK):
    +                    log(NETWORK, 'checking server <%s> for availability', server_state.server)
    +                if server_state.server.check_availability():
    +                    server_state.available = True
    +                    return offset
    +                else:
    +                    server_state.available = False  # sets server offline
    +
    +            if not isinstance(self.server_pool.active, bool):
    +                counter -= 1
    +            if log_enabled(NETWORK):
    +                log(NETWORK, 'waiting for %d seconds before retrying pool servers cycle', conf_pool_timeout)
    +            sleep(conf_pool_timeout)
    +
    +        if log_enabled(ERROR):
    +            log(ERROR, 'no active server available in Server Pool <%s> after maximum number of tries', self)
    +        raise LDAPServerPoolExhaustedError('no active server available in server pool after maximum number of tries')
    +
    +    def __len__(self):
    +        return len(self.server_states)
    +
    +
    +class ServerPool(object):
    +    def __init__(self,
    +                 servers=None,
    +                 pool_strategy=ROUND_ROBIN,
    +                 active=True,
    +                 exhaust=False,
    +                 single_state=True):
    +
    +        if pool_strategy not in POOLING_STRATEGIES:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'unknown pooling strategy <%s>', pool_strategy)
    +            raise LDAPUnknownStrategyError('unknown pooling strategy')
    +        if exhaust and not active:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'cannot instantiate pool with exhaust and not active')
    +            raise LDAPServerPoolError('pools can be exhausted only when checking for active servers')
    +        self.servers = []
    +        self.pool_states = dict()
    +        self.active = active
    +        self.exhaust = exhaust
    +        self.single = single_state
    +        self._pool_state = None # used for storing the global state of the pool
    +        if isinstance(servers, SEQUENCE_TYPES + (Server, )):
    +            self.add(servers)
    +        elif isinstance(servers, STRING_TYPES):
    +            self.add(Server(servers))
    +        self.strategy = pool_strategy
    +
    +        if log_enabled(BASIC):
    +            log(BASIC, 'instantiated ServerPool: <%r>', self)
    +
    +    def __str__(self):
    +            s = 'servers: ' + linesep
    +            if self.servers:
    +                for server in self.servers:
    +                    s += str(server) + linesep
    +            else:
    +                s += 'None' + linesep
    +            s += 'Pool strategy: ' + str(self.strategy)
    +            s += ' - ' + 'active: ' + (str(self.active) if self.active else 'False')
    +            s += ' - ' + 'exhaust pool: ' + (str(self.exhaust) if self.exhaust else 'False')
    +            return s
    +
    +    def __repr__(self):
    +        r = 'ServerPool(servers='
    +        if self.servers:
    +            r += '['
    +            for server in self.servers:
    +                r += server.__repr__() + ', '
    +            r = r[:-2] + ']'
    +        else:
    +            r += 'None'
    +        r += ', pool_strategy={0.strategy!r}'.format(self)
    +        r += ', active={0.active!r}'.format(self)
    +        r += ', exhaust={0.exhaust!r}'.format(self)
    +        r += ')'
    +
    +        return r
    +
    +    def __len__(self):
    +        return len(self.servers)
    +
    +    def __getitem__(self, item):
    +        return self.servers[item]
    +
    +    def __iter__(self):
    +        return self.servers.__iter__()
    +
    +    def add(self, servers):
    +        if isinstance(servers, Server):
    +            if servers not in self.servers:
    +                self.servers.append(servers)
    +        elif isinstance(servers, STRING_TYPES):
    +            self.servers.append(Server(servers))
    +        elif isinstance(servers, SEQUENCE_TYPES):
    +            for server in servers:
    +                if isinstance(server, Server):
    +                    self.servers.append(server)
    +                elif isinstance(server, STRING_TYPES):
    +                    self.servers.append(Server(server))
    +                else:
    +                    if log_enabled(ERROR):
    +                        log(ERROR, 'element must be a server in Server Pool <%s>', self)
    +                    raise LDAPServerPoolError('server in ServerPool must be a Server')
    +        else:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'server must be a Server of a list of Servers when adding to Server Pool <%s>', self)
    +            raise LDAPServerPoolError('server must be a Server or a list of Server')
    +
    +        if self.single:
    +            if self._pool_state:
    +                self._pool_state.refresh()
    +        else:
    +            for connection in self.pool_states:
    +                # notifies connections using this pool to refresh
    +                self.pool_states[connection].refresh()
    +
    +    def remove(self, server):
    +        if server in self.servers:
    +            self.servers.remove(server)
    +        else:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'server %s to be removed not in Server Pool <%s>', server, self)
    +            raise LDAPServerPoolError('server not in server pool')
    +
    +        if self.single:
    +            if self._pool_state:
    +                self._pool_state.refresh()
    +        else:
    +            for connection in self.pool_states:
    +                # notifies connections using this pool to refresh
    +                self.pool_states[connection].refresh()
    +
    +    def initialize(self, connection):
    +        # registers pool_state in ServerPool object
    +        if self.single:
    +            if not self._pool_state:
    +                self._pool_state = ServerPoolState(self)
    +            self.pool_states[connection] = self._pool_state
    +        else:
    +            self.pool_states[connection] = ServerPoolState(self)
    +
    +    def get_server(self, connection):
    +        if connection in self.pool_states:
    +            return self.pool_states[connection].get_server()
    +        else:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self)
    +            raise LDAPServerPoolError('connection not in ServerPoolState')
    +
    +    def get_current_server(self, connection):
    +        if connection in self.pool_states:
    +            return self.pool_states[connection].get_current_server()
    +        else:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self)
    +            raise LDAPServerPoolError('connection not in ServerPoolState')
    diff --git a/server/www/packages/packages-windows/x86/ldap3/core/results.py b/server/www/packages/packages-windows/x86/ldap3/core/results.py
    index 6f10643..14f8f73 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/core/results.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/core/results.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -134,4 +134,4 @@ RESULT_CODES = {
     }
     
     # do not raise exception for (in raise_exceptions connection mode)
    -DO_NOT_RAISE_EXCEPTIONS = [RESULT_SUCCESS, RESULT_COMPARE_FALSE, RESULT_COMPARE_TRUE, RESULT_REFERRAL, RESULT_SASL_BIND_IN_PROGRESS]
    +DO_NOT_RAISE_EXCEPTIONS = [RESULT_SUCCESS, RESULT_COMPARE_FALSE, RESULT_COMPARE_TRUE, RESULT_REFERRAL, RESULT_SASL_BIND_IN_PROGRESS, RESULT_SIZE_LIMIT_EXCEEDED, RESULT_TIME_LIMIT_EXCEEDED]
    diff --git a/server/www/packages/packages-windows/x86/ldap3/core/server.py b/server/www/packages/packages-windows/x86/ldap3/core/server.py
    index 36c782b..43189ef 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/core/server.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/core/server.py
    @@ -1,572 +1,663 @@
    -"""
    -"""
    -
    -# Created on 2014.05.31
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2014 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -
    -import socket
    -from threading import Lock
    -from datetime import datetime, MINYEAR
    -
    -from .. import DSA, SCHEMA, ALL, BASE, get_config_parameter, OFFLINE_EDIR_8_8_8, OFFLINE_AD_2012_R2, OFFLINE_SLAPD_2_4, OFFLINE_DS389_1_3_3, SEQUENCE_TYPES, IP_SYSTEM_DEFAULT, IP_V4_ONLY, IP_V6_ONLY, IP_V4_PREFERRED, IP_V6_PREFERRED, STRING_TYPES
    -from .exceptions import LDAPInvalidServerError, LDAPDefinitionError, LDAPInvalidPortError, LDAPInvalidTlsSpecificationError, LDAPSocketOpenError
    -from ..protocol.formatters.standard import format_attribute_values
    -from ..protocol.rfc4511 import LDAP_MAX_INT
    -from ..protocol.rfc4512 import SchemaInfo, DsaInfo
    -from .tls import Tls
    -from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL
    -from ..utils.conv import to_unicode
    -
    -try:
    -    from urllib.parse import unquote  # Python 3
    -except ImportError:
    -    from urllib import unquote  # Python 2
    -
    -try:  # try to discover if unix sockets are available for LDAP over IPC (ldapi:// scheme)
    -    # noinspection PyUnresolvedReferences
    -    from socket import AF_UNIX
    -    unix_socket_available = True
    -except ImportError:
    -    unix_socket_available = False
    -
    -
    -class Server(object):
    -    """
    -    LDAP Server definition class
    -
    -    Allowed_referral_hosts can be None (default), or a list of tuples of
    -    allowed servers ip address or names to contact while redirecting
    -    search to referrals.
    -
    -    The second element of the tuple is a boolean to indicate if
    -    authentication to that server is allowed; if False only anonymous
    -    bind will be used.
    -
    -    Per RFC 4516. Use [('*', False)] to allow any host with anonymous
    -    bind, use [('*', True)] to allow any host with same authentication of
    -    Server.
    -    """
    -
    -    _message_counter = 0
    -    _message_id_lock = Lock()  # global lock for message_id shared by all Server objects
    -
    -
    -    def __init__(self,
    -                 host,
    -                 port=None,
    -                 use_ssl=False,
    -                 allowed_referral_hosts=None,
    -                 get_info=SCHEMA,
    -                 tls=None,
    -                 formatter=None,
    -                 connect_timeout=None,
    -                 mode=IP_V6_PREFERRED,
    -                 validator=None):
    -
    -        self.ipc = False
    -        url_given = False
    -        host = host.strip()
    -        if host.lower().startswith('ldap://'):
    -            self.host = host[7:]
    -            use_ssl = False
    -            url_given = True
    -        elif host.lower().startswith('ldaps://'):
    -            self.host = host[8:]
    -            use_ssl = True
    -            url_given = True
    -        elif host.lower().startswith('ldapi://') and unix_socket_available:
    -            self.ipc = True
    -            use_ssl = False
    -            url_given = True
    -        elif host.lower().startswith('ldapi://') and not unix_socket_available:
    -            raise LDAPSocketOpenError('LDAP over IPC not available - UNIX sockets non present')
    -        else:
    -            self.host = host
    -
    -        if self.ipc:
    -            if str is bytes:  # Python 2
    -                self.host = unquote(host[7:]).decode('utf-8')
    -            else:  # Python 3
    -                self.host = unquote(host[7:])  # encoding defaults to utf-8 in python3
    -            self.port = None
    -        elif ':' in self.host and self.host.count(':') == 1:
    -            hostname, _, hostport = self.host.partition(':')
    -            try:
    -                port = int(hostport) or port
    -            except ValueError:
    -                if log_enabled(ERROR):
    -                    log(ERROR, 'port <%s> must be an integer', port)
    -                raise LDAPInvalidPortError('port must be an integer')
    -            self.host = hostname
    -        elif url_given and self.host.startswith('['):
    -            hostname, sep, hostport = self.host[1:].partition(']')
    -            if sep != ']' or not self._is_ipv6(hostname):
    -                if log_enabled(ERROR):
    -                    log(ERROR, 'invalid IPv6 server address for <%s>', self.host)
    -                raise LDAPInvalidServerError()
    -            if len(hostport):
    -                if not hostport.startswith(':'):
    -                    if log_enabled(ERROR):
    -                        log(ERROR, 'invalid URL in server name for <%s>', self.host)
    -                    raise LDAPInvalidServerError('invalid URL in server name')
    -                if not hostport[1:].isdecimal():
    -                    if log_enabled(ERROR):
    -                        log(ERROR, 'port must be an integer for <%s>', self.host)
    -                    raise LDAPInvalidPortError('port must be an integer')
    -                port = int(hostport[1:])
    -            self.host = hostname
    -        elif not url_given and self._is_ipv6(self.host):
    -            pass
    -        elif self.host.count(':') > 1:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'invalid server address for <%s>', self.host)
    -            raise LDAPInvalidServerError()
    -
    -        if not self.ipc:
    -            self.host.rstrip('/')
    -            if not use_ssl and not port:
    -                port = 389
    -            elif use_ssl and not port:
    -                port = 636
    -
    -            if isinstance(port, int):
    -                if port in range(0, 65535):
    -                    self.port = port
    -                else:
    -                    if log_enabled(ERROR):
    -                        log(ERROR, 'port <%s> must be in range from 0 to 65535', port)
    -                    raise LDAPInvalidPortError('port must in range from 0 to 65535')
    -            else:
    -                if log_enabled(ERROR):
    -                    log(ERROR, 'port <%s> must be an integer', port)
    -                raise LDAPInvalidPortError('port must be an integer')
    -
    -        if allowed_referral_hosts is None:  # defaults to any server with authentication
    -            allowed_referral_hosts = [('*', True)]
    -
    -        if isinstance(allowed_referral_hosts, SEQUENCE_TYPES):
    -            self.allowed_referral_hosts = []
    -            for referral_host in allowed_referral_hosts:
    -                if isinstance(referral_host, tuple):
    -                    if isinstance(referral_host[1], bool):
    -                        self.allowed_referral_hosts.append(referral_host)
    -        elif isinstance(allowed_referral_hosts, tuple):
    -            if isinstance(allowed_referral_hosts[1], bool):
    -                self.allowed_referral_hosts = [allowed_referral_hosts]
    -        else:
    -            self.allowed_referral_hosts = []
    -
    -        self.ssl = True if use_ssl else False
    -        if tls and not isinstance(tls, Tls):
    -            if log_enabled(ERROR):
    -                log(ERROR, 'invalid tls specification: <%s>', tls)
    -            raise LDAPInvalidTlsSpecificationError('invalid Tls object')
    -
    -        self.tls = Tls() if self.ssl and not tls else tls
    -
    -        if not self.ipc:
    -            if self._is_ipv6(self.host):
    -                self.name = ('ldaps' if self.ssl else 'ldap') + '://[' + self.host + ']:' + str(self.port)
    -            else:
    -                self.name = ('ldaps' if self.ssl else 'ldap') + '://' + self.host + ':' + str(self.port)
    -        else:
    -            self.name = host
    -
    -        self.get_info = get_info
    -        self._dsa_info = None
    -        self._schema_info = None
    -        self.dit_lock = Lock()
    -        self.custom_formatter = formatter
    -        self.custom_validator = validator
    -        self._address_info = []  # property self.address_info resolved at open time (or when check_availability is called)
    -        self._address_info_resolved_time = datetime(MINYEAR, 1, 1)  # smallest date ever
    -        self.current_address = None
    -        self.connect_timeout = connect_timeout
    -        self.mode = mode
    -
    -        self.get_info_from_server(None)  # load offline schema if needed
    -
    -        if log_enabled(BASIC):
    -            log(BASIC, 'instantiated Server: <%r>', self)
    -
    -    @staticmethod
    -    def _is_ipv6(host):
    -        try:
    -            socket.inet_pton(socket.AF_INET6, host)
    -        except (socket.error, AttributeError, ValueError):
    -            return False
    -        return True
    -
    -    def __str__(self):
    -        if self.host:
    -            s = self.name + (' - ssl' if self.ssl else ' - cleartext') + (' - unix socket' if self.ipc else '')
    -        else:
    -            s = object.__str__(self)
    -        return s
    -
    -    def __repr__(self):
    -        r = 'Server(host={0.host!r}, port={0.port!r}, use_ssl={0.ssl!r}'.format(self)
    -        r += '' if not self.allowed_referral_hosts else ', allowed_referral_hosts={0.allowed_referral_hosts!r}'.format(self)
    -        r += '' if self.tls is None else ', tls={0.tls!r}'.format(self)
    -        r += '' if not self.get_info else ', get_info={0.get_info!r}'.format(self)
    -        r += '' if not self.connect_timeout else ', connect_timeout={0.connect_timeout!r}'.format(self)
    -        r += '' if not self.mode else ', mode={0.mode!r}'.format(self)
    -        r += ')'
    -
    -        return r
    -
    -    @property
    -    def address_info(self):
    -        conf_refresh_interval = get_config_parameter('ADDRESS_INFO_REFRESH_TIME')
    -        if not self._address_info or (datetime.now() - self._address_info_resolved_time).seconds > conf_refresh_interval:
    -            # converts addresses tuple to list and adds a 6th parameter for availability (None = not checked, True = available, False=not available) and a 7th parameter for the checking time
    -            addresses = None
    -            try:
    -                if self.ipc:
    -                    addresses = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, None, self.host, None)]
    -                else:
    -                    addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP, socket.AI_ADDRCONFIG | socket.AI_V4MAPPED)
    -            except (socket.gaierror, AttributeError):
    -                pass
    -
    -            if not addresses:  # if addresses not found or raised an exception (for example for bad flags) tries again without flags
    -                try:
    -                    addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP)
    -                except socket.gaierror:
    -                    pass
    -
    -            if addresses:
    -                self._address_info = [list(address) + [None, None] for address in addresses]
    -                self._address_info_resolved_time = datetime.now()
    -            else:
    -                self._address_info = []
    -                self._address_info_resolved_time = datetime(MINYEAR, 1, 1)  # smallest date
    -
    -            if log_enabled(BASIC):
    -                for address in self._address_info:
    -                    log(BASIC, 'address for <%s> resolved as <%r>', self, address[:-2])
    -        return self._address_info
    -
    -    def update_availability(self, address, available):
    -        cont = 0
    -        while cont < len(self._address_info):
    -            if self.address_info[cont] == address:
    -                self._address_info[cont][5] = True if available else False
    -                self._address_info[cont][6] = datetime.now()
    -                break
    -            cont += 1
    -
    -    def reset_availability(self):
    -        for address in self._address_info:
    -            address[5] = None
    -            address[6] = None
    -
    -    def check_availability(self):
    -        """
    -        Tries to open, connect and close a socket to specified address
    -        and port to check availability. Timeout in seconds is specified in CHECK_AVAILABITY_TIMEOUT if not specified in
    -        the Server object
    -        """
    -        conf_availability_timeout = get_config_parameter('CHECK_AVAILABILITY_TIMEOUT')
    -        available = False
    -        self.reset_availability()
    -        for address in self.candidate_addresses():
    -            available = True
    -            try:
    -                temp_socket = socket.socket(*address[:3])
    -                if self.connect_timeout:
    -                    temp_socket.settimeout(self.connect_timeout)
    -                else:
    -                    temp_socket.settimeout(conf_availability_timeout)  # set timeout for checking availability to default
    -                try:
    -                    temp_socket.connect(address[4])
    -                except socket.error:
    -                    available = False
    -                finally:
    -                    try:
    -                        temp_socket.shutdown(socket.SHUT_RDWR)
    -                    except socket.error:
    -                        available = False
    -                    finally:
    -                        temp_socket.close()
    -            except socket.gaierror:
    -                available = False
    -
    -            if available:
    -                if log_enabled(BASIC):
    -                    log(BASIC, 'server <%s> available at <%r>', self, address)
    -                self.update_availability(address, True)
    -                break  # if an available address is found exits immediately
    -            else:
    -                self.update_availability(address, False)
    -                if log_enabled(ERROR):
    -                    log(ERROR, 'server <%s> not available at <%r>', self, address)
    -
    -        return available
    -
    -    @staticmethod
    -    def next_message_id():
    -        """
    -        LDAP messageId is unique for all connections to same server
    -        """
    -        with Server._message_id_lock:
    -            Server._message_counter += 1
    -            if Server._message_counter >= LDAP_MAX_INT:
    -                Server._message_counter = 1
    -            if log_enabled(PROTOCOL):
    -                log(PROTOCOL, 'new message id <%d> generated', Server._message_counter)
    -
    -        return Server._message_counter
    -
    -    def _get_dsa_info(self, connection):
    -        """
    -        Retrieve DSE operational attribute as per RFC4512 (5.1).
    -        """
    -        if connection.strategy.no_real_dsa:  # do not try for mock strategies
    -            return
    -
    -        if not connection.strategy.pooled:  # in pooled strategies get_dsa_info is performed by the worker threads
    -            result = connection.search(search_base='',
    -                                       search_filter='(objectClass=*)',
    -                                       search_scope=BASE,
    -                                       attributes=['altServer',  # requests specific dsa info attributes
    -                                                   'namingContexts',
    -                                                   'supportedControl',
    -                                                   'supportedExtension',
    -                                                   'supportedFeatures',
    -                                                   'supportedCapabilities',
    -                                                   'supportedLdapVersion',
    -                                                   'supportedSASLMechanisms',
    -                                                   'vendorName',
    -                                                   'vendorVersion',
    -                                                   'subschemaSubentry',
    -                                                   '*',
    -                                                   '+'],  # requests all remaining attributes (other),
    -                                       get_operational_attributes=True)
    -
    -            with self.dit_lock:
    -                if isinstance(result, bool):  # sync request
    -                    self._dsa_info = DsaInfo(connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else self._dsa_info
    -                elif result:  # asynchronous request, must check if attributes in response
    -                    results, _ = connection.get_response(result)
    -                    if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]:
    -                        self._dsa_info = DsaInfo(results[0]['attributes'], results[0]['raw_attributes'])
    -
    -            if log_enabled(BASIC):
    -                log(BASIC, 'DSA info read for <%s> via <%s>', self, connection)
    -
    -    def _get_schema_info(self, connection, entry=''):
    -        """
    -        Retrieve schema from subschemaSubentry DSE attribute, per RFC
    -        4512 (4.4 and 5.1); entry = '' means DSE.
    -        """
    -        if connection.strategy.no_real_dsa:  # do not try for mock strategies
    -            return
    -
    -        schema_entry = None
    -        if self._dsa_info and entry == '':  # subschemaSubentry already present in dsaInfo
    -            if isinstance(self._dsa_info.schema_entry, SEQUENCE_TYPES):
    -                schema_entry = self._dsa_info.schema_entry[0] if self._dsa_info.schema_entry else None
    -            else:
    -                schema_entry = self._dsa_info.schema_entry if self._dsa_info.schema_entry else None
    -        else:
    -            result = connection.search(entry, '(objectClass=*)', BASE, attributes=['subschemaSubentry'], get_operational_attributes=True)
    -            if isinstance(result, bool):  # sync request
    -                if result and 'subschemaSubentry' in connection.response[0]['raw_attributes']:
    -                    if len(connection.response[0]['raw_attributes']['subschemaSubentry']) > 0:
    -                        schema_entry = connection.response[0]['raw_attributes']['subschemaSubentry'][0]
    -            else:  # asynchronous request, must check if subschemaSubentry in attributes
    -                results, _ = connection.get_response(result)
    -                if len(results) == 1 and 'raw_attributes' in results[0] and 'subschemaSubentry' in results[0]['attributes']:
    -                    if len(results[0]['raw_attributes']['subschemaSubentry']) > 0:
    -                        schema_entry = results[0]['raw_attributes']['subschemaSubentry'][0]
    -
    -        if schema_entry and not connection.strategy.pooled:  # in pooled strategies get_schema_info is performed by the worker threads
    -            if isinstance(schema_entry, bytes) and str is not bytes:  # Python 3
    -                schema_entry = to_unicode(schema_entry, from_server=True)
    -            result = connection.search(schema_entry,
    -                                       search_filter='(objectClass=subschema)',
    -                                       search_scope=BASE,
    -                                       attributes=['objectClasses',  # requests specific subschema attributes
    -                                                   'attributeTypes',
    -                                                   'ldapSyntaxes',
    -                                                   'matchingRules',
    -                                                   'matchingRuleUse',
    -                                                   'dITContentRules',
    -                                                   'dITStructureRules',
    -                                                   'nameForms',
    -                                                   'createTimestamp',
    -                                                   'modifyTimestamp',
    -                                                   '*'],  # requests all remaining attributes (other)
    -                                       get_operational_attributes=True
    -                                       )
    -            with self.dit_lock:
    -                self._schema_info = None
    -                if result:
    -                    if isinstance(result, bool):  # sync request
    -                        self._schema_info = SchemaInfo(schema_entry, connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else None
    -                    else:  # asynchronous request, must check if attributes in response
    -                        results, result = connection.get_response(result)
    -                        if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]:
    -                            self._schema_info = SchemaInfo(schema_entry, results[0]['attributes'], results[0]['raw_attributes'])
    -                    if self._schema_info and not self._schema_info.is_valid():  # flaky servers can return an empty schema, checks if it is so and set schema to None
    -                        self._schema_info = None
    -                    if self._schema_info:  # if schema is valid tries to apply formatter to the "other" dict with raw values for schema and info
    -                        for attribute in self._schema_info.other:
    -                            self._schema_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._schema_info.raw[attribute], self.custom_formatter)
    -                        if self._dsa_info:  # try to apply formatter to the "other" dict with dsa info raw values
    -                            for attribute in self._dsa_info.other:
    -                                self._dsa_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._dsa_info.raw[attribute], self.custom_formatter)
    -            if log_enabled(BASIC):
    -                log(BASIC, 'schema read for <%s> via <%s>', self, connection)
    -
    -    def get_info_from_server(self, connection):
    -        """
    -        reads info from DSE and from subschema
    -        """
    -        if connection and not connection.closed:
    -            if self.get_info in [DSA, ALL]:
    -                self._get_dsa_info(connection)
    -            if self.get_info in [SCHEMA, ALL]:
    -                    self._get_schema_info(connection)
    -        elif self.get_info == OFFLINE_EDIR_8_8_8:
    -            from ..protocol.schemas.edir888 import edir_8_8_8_schema, edir_8_8_8_dsa_info
    -            self.attach_schema_info(SchemaInfo.from_json(edir_8_8_8_schema))
    -            self.attach_dsa_info(DsaInfo.from_json(edir_8_8_8_dsa_info))
    -        elif self.get_info == OFFLINE_AD_2012_R2:
    -            from ..protocol.schemas.ad2012R2 import ad_2012_r2_schema, ad_2012_r2_dsa_info
    -            self.attach_schema_info(SchemaInfo.from_json(ad_2012_r2_schema))
    -            self.attach_dsa_info(DsaInfo.from_json(ad_2012_r2_dsa_info))
    -        elif self.get_info == OFFLINE_SLAPD_2_4:
    -            from ..protocol.schemas.slapd24 import slapd_2_4_schema, slapd_2_4_dsa_info
    -            self.attach_schema_info(SchemaInfo.from_json(slapd_2_4_schema))
    -            self.attach_dsa_info(DsaInfo.from_json(slapd_2_4_dsa_info))
    -        elif self.get_info == OFFLINE_DS389_1_3_3:
    -            from ..protocol.schemas.ds389 import ds389_1_3_3_schema, ds389_1_3_3_dsa_info
    -            self.attach_schema_info(SchemaInfo.from_json(ds389_1_3_3_schema))
    -            self.attach_dsa_info(DsaInfo.from_json(ds389_1_3_3_dsa_info))
    -
    -    def attach_dsa_info(self, dsa_info=None):
    -        if isinstance(dsa_info, DsaInfo):
    -            self._dsa_info = dsa_info
    -            if log_enabled(BASIC):
    -                log(BASIC, 'attached DSA info to Server <%s>', self)
    -
    -    def attach_schema_info(self, dsa_schema=None):
    -        if isinstance(dsa_schema, SchemaInfo):
    -            self._schema_info = dsa_schema
    -        if log_enabled(BASIC):
    -            log(BASIC, 'attached schema info to Server <%s>', self)
    -
    -    @property
    -    def info(self):
    -        return self._dsa_info
    -
    -    @property
    -    def schema(self):
    -        return self._schema_info
    -
    -    @staticmethod
    -    def from_definition(host, dsa_info, dsa_schema, port=None, use_ssl=False, formatter=None, validator=None):
    -        """
    -        Define a dummy server with preloaded schema and info
    -        :param host: host name
    -        :param dsa_info: DsaInfo preloaded object or a json formatted string or a file name
    -        :param dsa_schema: SchemaInfo preloaded object or a json formatted string or a file name
    -        :param port: dummy port
    -        :param use_ssl: use_ssl
    -        :param formatter: custom formatter
    -        :return: Server object
    -        """
    -        if isinstance(host, SEQUENCE_TYPES):
    -            dummy = Server(host=host[0], port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, get_info=ALL)  # for ServerPool object
    -        else:
    -            dummy = Server(host=host, port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, get_info=ALL)
    -        if isinstance(dsa_info, DsaInfo):
    -            dummy._dsa_info = dsa_info
    -        elif isinstance(dsa_info, STRING_TYPES):
    -            try:
    -                dummy._dsa_info = DsaInfo.from_json(dsa_info)  # tries to use dsa_info as a json configuration string
    -            except Exception:
    -                dummy._dsa_info = DsaInfo.from_file(dsa_info)  # tries to use dsa_info as a file name
    -
    -        if not dummy.info:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'invalid DSA info for %s', host)
    -            raise LDAPDefinitionError('invalid dsa info')
    -
    -        if isinstance(dsa_schema, SchemaInfo):
    -            dummy._schema_info = dsa_schema
    -        elif isinstance(dsa_schema, STRING_TYPES):
    -            try:
    -                dummy._schema_info = SchemaInfo.from_json(dsa_schema)
    -            except Exception:
    -                dummy._schema_info = SchemaInfo.from_file(dsa_schema)
    -
    -        if not dummy.schema:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'invalid schema info for %s', host)
    -            raise LDAPDefinitionError('invalid schema info')
    -
    -        if log_enabled(BASIC):
    -            log(BASIC, 'created server <%s> from definition', dummy)
    -
    -        return dummy
    -
    -    def candidate_addresses(self):
    -        conf_reset_availability_timeout = get_config_parameter('RESET_AVAILABILITY_TIMEOUT')
    -        if self.ipc:
    -            candidates = self.address_info
    -            if log_enabled(BASIC):
    -                log(BASIC, 'candidate address for <%s>: <%s> with mode UNIX_SOCKET', self, self.name)
    -        else:
    -            # checks reset availability timeout
    -            for address in self.address_info:
    -                if address[6] and ((datetime.now() - address[6]).seconds > conf_reset_availability_timeout):
    -                    address[5] = None
    -                    address[6] = None
    -
    -            # selects server address based on server mode and availability (in address[5])
    -            addresses = self.address_info[:]  # copy to avoid refreshing while searching candidates
    -            candidates = []
    -            if addresses:
    -                if self.mode == IP_SYSTEM_DEFAULT:
    -                    candidates.append(addresses[0])
    -                elif self.mode == IP_V4_ONLY:
    -                    candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)]
    -                elif self.mode == IP_V6_ONLY:
    -                    candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)]
    -                elif self.mode == IP_V4_PREFERRED:
    -                    candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)]
    -                    candidates += [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)]
    -                elif self.mode == IP_V6_PREFERRED:
    -                    candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)]
    -                    candidates += [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)]
    -                else:
    -                    if log_enabled(ERROR):
    -                        log(ERROR, 'invalid server mode for <%s>', self)
    -                    raise LDAPInvalidServerError('invalid server mode')
    -
    -            if log_enabled(BASIC):
    -                for candidate in candidates:
    -                    log(BASIC, 'obtained candidate address for <%s>: <%r> with mode %s', self, candidate[:-2], self.mode)
    -        return candidates
    +"""
    +"""
    +
    +# Created on 2014.05.31
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2014 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +import socket
    +from threading import Lock
    +from datetime import datetime, MINYEAR
    +
    +from .. import DSA, SCHEMA, ALL, BASE, get_config_parameter, OFFLINE_EDIR_8_8_8, OFFLINE_EDIR_9_1_4, OFFLINE_AD_2012_R2, OFFLINE_SLAPD_2_4, OFFLINE_DS389_1_3_3, SEQUENCE_TYPES, IP_SYSTEM_DEFAULT, IP_V4_ONLY, IP_V6_ONLY, IP_V4_PREFERRED, IP_V6_PREFERRED, STRING_TYPES
    +from .exceptions import LDAPInvalidServerError, LDAPDefinitionError, LDAPInvalidPortError, LDAPInvalidTlsSpecificationError, LDAPSocketOpenError, LDAPInfoError
    +from ..protocol.formatters.standard import format_attribute_values
    +from ..protocol.rfc4511 import LDAP_MAX_INT
    +from ..protocol.rfc4512 import SchemaInfo, DsaInfo
    +from .tls import Tls
    +from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, NETWORK
    +from ..utils.conv import to_unicode
    +from ..utils.port_validators import check_port, check_port_and_port_list
    +
    +try:
    +    from urllib.parse import unquote  # Python 3
    +except ImportError:
    +    from urllib import unquote  # Python 2
    +
    +try:  # try to discover if unix sockets are available for LDAP over IPC (ldapi:// scheme)
    +    # noinspection PyUnresolvedReferences
    +    from socket import AF_UNIX
    +    unix_socket_available = True
    +except ImportError:
    +    unix_socket_available = False
    +
    +
    +class Server(object):
    +    """
    +    LDAP Server definition class
    +
    +    Allowed_referral_hosts can be None (default), or a list of tuples of
    +    allowed servers ip address or names to contact while redirecting
    +    search to referrals.
    +
    +    The second element of the tuple is a boolean to indicate if
    +    authentication to that server is allowed; if False only anonymous
    +    bind will be used.
    +
    +    Per RFC 4516. Use [('*', False)] to allow any host with anonymous
    +    bind, use [('*', True)] to allow any host with same authentication of
    +    Server.
    +    """
    +
    +    _message_counter = 0
    +    _message_id_lock = Lock()  # global lock for message_id shared by all Server objects
    +
    +    def __init__(self,
    +                 host,
    +                 port=None,
    +                 use_ssl=False,
    +                 allowed_referral_hosts=None,
    +                 get_info=SCHEMA,
    +                 tls=None,
    +                 formatter=None,
    +                 connect_timeout=None,
    +                 mode=IP_V6_PREFERRED,
    +                 validator=None):
    +
    +        self.ipc = False
    +        url_given = False
    +        host = host.strip()
    +        if host.lower().startswith('ldap://'):
    +            self.host = host[7:]
    +            use_ssl = False
    +            url_given = True
    +        elif host.lower().startswith('ldaps://'):
    +            self.host = host[8:]
    +            use_ssl = True
    +            url_given = True
    +        elif host.lower().startswith('ldapi://') and unix_socket_available:
    +            self.ipc = True
    +            use_ssl = False
    +            url_given = True
    +        elif host.lower().startswith('ldapi://') and not unix_socket_available:
    +            raise LDAPSocketOpenError('LDAP over IPC not available - UNIX sockets non present')
    +        else:
    +            self.host = host
    +
    +        if self.ipc:
    +            if str is bytes:  # Python 2
    +                self.host = unquote(host[7:]).decode('utf-8')
    +            else:  # Python 3
    +                self.host = unquote(host[7:])  # encoding defaults to utf-8 in python3
    +            self.port = None
    +        elif ':' in self.host and self.host.count(':') == 1:
    +            hostname, _, hostport = self.host.partition(':')
    +            try:
    +                port = int(hostport) or port
    +            except ValueError:
    +                if log_enabled(ERROR):
    +                    log(ERROR, 'port <%s> must be an integer', port)
    +                raise LDAPInvalidPortError('port must be an integer')
    +            self.host = hostname
    +        elif url_given and self.host.startswith('['):
    +            hostname, sep, hostport = self.host[1:].partition(']')
    +            if sep != ']' or not self._is_ipv6(hostname):
    +                if log_enabled(ERROR):
    +                    log(ERROR, 'invalid IPv6 server address for <%s>', self.host)
    +                raise LDAPInvalidServerError()
    +            if len(hostport):
    +                if not hostport.startswith(':'):
    +                    if log_enabled(ERROR):
    +                        log(ERROR, 'invalid URL in server name for <%s>', self.host)
    +                    raise LDAPInvalidServerError('invalid URL in server name')
    +                if not hostport[1:].isdecimal():
    +                    if log_enabled(ERROR):
    +                        log(ERROR, 'port must be an integer for <%s>', self.host)
    +                    raise LDAPInvalidPortError('port must be an integer')
    +                port = int(hostport[1:])
    +            self.host = hostname
    +        elif not url_given and self._is_ipv6(self.host):
    +            pass
    +        elif self.host.count(':') > 1:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'invalid server address for <%s>', self.host)
    +            raise LDAPInvalidServerError()
    +
    +        if not self.ipc:
    +            self.host.rstrip('/')
    +            if not use_ssl and not port:
    +                port = 389
    +            elif use_ssl and not port:
    +                port = 636
    +
    +            port_err = check_port(port)
    +            if port_err:
    +                if log_enabled(ERROR):
    +                    log(ERROR, port_err)
    +                raise LDAPInvalidPortError(port_err)
    +            self.port = port
    +
    +        if allowed_referral_hosts is None:  # defaults to any server with authentication
    +            allowed_referral_hosts = [('*', True)]
    +
    +        if isinstance(allowed_referral_hosts, SEQUENCE_TYPES):
    +            self.allowed_referral_hosts = []
    +            for referral_host in allowed_referral_hosts:
    +                if isinstance(referral_host, tuple):
    +                    if isinstance(referral_host[1], bool):
    +                        self.allowed_referral_hosts.append(referral_host)
    +        elif isinstance(allowed_referral_hosts, tuple):
    +            if isinstance(allowed_referral_hosts[1], bool):
    +                self.allowed_referral_hosts = [allowed_referral_hosts]
    +        else:
    +            self.allowed_referral_hosts = []
    +
    +        self.ssl = True if use_ssl else False
    +        if tls and not isinstance(tls, Tls):
    +            if log_enabled(ERROR):
    +                log(ERROR, 'invalid tls specification: <%s>', tls)
    +            raise LDAPInvalidTlsSpecificationError('invalid Tls object')
    +
    +        self.tls = Tls() if self.ssl and not tls else tls
    +
    +        if not self.ipc:
    +            if self._is_ipv6(self.host):
    +                self.name = ('ldaps' if self.ssl else 'ldap') + '://[' + self.host + ']:' + str(self.port)
    +            else:
    +                self.name = ('ldaps' if self.ssl else 'ldap') + '://' + self.host + ':' + str(self.port)
    +        else:
    +            self.name = host
    +
    +        self.get_info = get_info
    +        self._dsa_info = None
    +        self._schema_info = None
    +        self.dit_lock = Lock()
    +        self.custom_formatter = formatter
    +        self.custom_validator = validator
    +        self._address_info = []  # property self.address_info resolved at open time (or when check_availability is called)
    +        self._address_info_resolved_time = datetime(MINYEAR, 1, 1)  # smallest date ever
    +        self.current_address = None
    +        self.connect_timeout = connect_timeout
    +        self.mode = mode
    +
    +        self.get_info_from_server(None)  # load offline schema if needed
    +
    +        if log_enabled(BASIC):
    +            log(BASIC, 'instantiated Server: <%r>', self)
    +
    +    @staticmethod
    +    def _is_ipv6(host):
    +        try:
    +            socket.inet_pton(socket.AF_INET6, host)
    +        except (socket.error, AttributeError, ValueError):
    +            return False
    +        return True
    +
    +    def __str__(self):
    +        if self.host:
    +            s = self.name + (' - ssl' if self.ssl else ' - cleartext') + (' - unix socket' if self.ipc else '')
    +        else:
    +            s = object.__str__(self)
    +        return s
    +
    +    def __repr__(self):
    +        r = 'Server(host={0.host!r}, port={0.port!r}, use_ssl={0.ssl!r}'.format(self)
    +        r += '' if not self.allowed_referral_hosts else ', allowed_referral_hosts={0.allowed_referral_hosts!r}'.format(self)
    +        r += '' if self.tls is None else ', tls={0.tls!r}'.format(self)
    +        r += '' if not self.get_info else ', get_info={0.get_info!r}'.format(self)
    +        r += '' if not self.connect_timeout else ', connect_timeout={0.connect_timeout!r}'.format(self)
    +        r += '' if not self.mode else ', mode={0.mode!r}'.format(self)
    +        r += ')'
    +
    +        return r
    +
    +    @property
    +    def address_info(self):
    +        conf_refresh_interval = get_config_parameter('ADDRESS_INFO_REFRESH_TIME')
    +        if not self._address_info or (datetime.now() - self._address_info_resolved_time).seconds > conf_refresh_interval:
    +            # converts addresses tuple to list and adds a 6th parameter for availability (None = not checked, True = available, False=not available) and a 7th parameter for the checking time
    +            addresses = None
    +            try:
    +                if self.ipc:
    +                    addresses = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, None, self.host, None)]
    +                else:
    +                    if self.mode == IP_V4_ONLY:
    +                        addresses = socket.getaddrinfo(self.host, self.port, socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP, socket.AI_ADDRCONFIG | socket.AI_V4MAPPED)
    +                    elif self.mode == IP_V6_ONLY:
    +                        addresses = socket.getaddrinfo(self.host, self.port, socket.AF_INET6, socket.SOCK_STREAM, socket.IPPROTO_TCP, socket.AI_ADDRCONFIG | socket.AI_V4MAPPED)
    +                    else:
    +                        addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP, socket.AI_ADDRCONFIG | socket.AI_V4MAPPED)
    +            except (socket.gaierror, AttributeError):
    +                pass
    +
    +            if not addresses:  # if addresses not found or raised an exception (for example for bad flags) tries again without flags
    +                try:
    +                    if self.mode == IP_V4_ONLY:
    +                        addresses = socket.getaddrinfo(self.host, self.port, socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
    +                    elif self.mode == IP_V6_ONLY:
    +                        addresses = socket.getaddrinfo(self.host, self.port, socket.AF_INET6, socket.SOCK_STREAM, socket.IPPROTO_TCP)
    +                    else:
    +                        addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP)
    +                except socket.gaierror:
    +                    pass
    +
    +            if addresses:
    +                self._address_info = [list(address) + [None, None] for address in addresses]
    +                self._address_info_resolved_time = datetime.now()
    +            else:
    +                self._address_info = []
    +                self._address_info_resolved_time = datetime(MINYEAR, 1, 1)  # smallest date
    +
    +            if log_enabled(BASIC):
    +                for address in self._address_info:
    +                    log(BASIC, 'address for <%s> resolved as <%r>', self, address[:-2])
    +        return self._address_info
    +
    +    def update_availability(self, address, available):
    +        cont = 0
    +        while cont < len(self._address_info):
    +            if self.address_info[cont] == address:
    +                self._address_info[cont][5] = True if available else False
    +                self._address_info[cont][6] = datetime.now()
    +                break
    +            cont += 1
    +
    +    def reset_availability(self):
    +        for address in self._address_info:
    +            address[5] = None
    +            address[6] = None
    +
    +    def check_availability(self, source_address=None, source_port=None, source_port_list=None):
    +        """
    +        Tries to open, connect and close a socket to specified address and port to check availability.
    +        Timeout in seconds is specified in CHECK_AVAILABITY_TIMEOUT if not specified in
    +        the Server object.
    +        If specified, use a specific address, port, or list of possible ports, when attempting to check availability.
    +        NOTE: This will only consider multiple ports from the source port list if the first ones we try to bind to are
    +              already in use. This will not attempt using different ports in the list if the server is unavailable,
    +              as that could result in the runtime of check_availability significantly exceeding the connection timeout.
    +        """
    +        source_port_err = check_port_and_port_list(source_port, source_port_list)
    +        if source_port_err:
    +            if log_enabled(ERROR):
    +                log(ERROR, source_port_err)
    +            raise LDAPInvalidPortError(source_port_err)
    +
    +        # using an empty string to bind a socket means "use the default as if this wasn't provided" because socket
    +        # binding requires that you pass something for the ip if you want to pass a specific port
    +        bind_address = source_address if source_address is not None else ''
    +        # using 0 as the source port to bind a socket means "use the default behavior of picking a random port from
    +        # all ports as if this wasn't provided" because socket binding requires that you pass something for the port
    +        # if you want to pass a specific ip
    +        candidate_bind_ports = [0]
    +
    +        # if we have either a source port or source port list, convert that into our candidate list
    +        if source_port is not None:
    +            candidate_bind_ports = [source_port]
    +        elif source_port_list is not None:
    +            candidate_bind_ports = source_port_list[:]
    +
    +        conf_availability_timeout = get_config_parameter('CHECK_AVAILABILITY_TIMEOUT')
    +        available = False
    +        self.reset_availability()
    +        for address in self.candidate_addresses():
    +            available = True
    +            try:
    +                temp_socket = socket.socket(*address[:3])
    +
    +                # Go through our candidate bind ports and try to bind our socket to our source address with them.
    +                # if no source address or ports were specified, this will have the same success/fail result as if we
    +                # tried to connect to the remote server without binding locally first.
    +                # This is actually a little bit better, as it lets us distinguish the case of "issue binding the socket
    +                # locally" from "remote server is unavailable" with more clarity, though this will only really be an
    +                # issue when no source address/port is specified if the system checking server availability is running
    +                # as a very unprivileged user.
    +                last_bind_exc = None
    +                socket_bind_succeeded = False
    +                for bind_port in candidate_bind_ports:
    +                    try:
    +                        temp_socket.bind((bind_address, bind_port))
    +                        socket_bind_succeeded = True
    +                        break
    +                    except Exception as bind_ex:
    +                        last_bind_exc = bind_ex
    +                        if log_enabled(NETWORK):
    +                            log(NETWORK, 'Unable to bind to local address <%s> with source port <%s> due to <%s>',
    +                                bind_address, bind_port, bind_ex)
    +                if not socket_bind_succeeded:
    +                    if log_enabled(ERROR):
    +                        log(ERROR, 'Unable to locally bind to local address <%s> with any of the source ports <%s> due to <%s>',
    +                            bind_address, candidate_bind_ports, last_bind_exc)
    +                    raise LDAPSocketOpenError('Unable to bind socket locally to address {} with any of the source ports {} due to {}'
    +                                              .format(bind_address, candidate_bind_ports, last_bind_exc))
    +
    +                if self.connect_timeout:
    +                    temp_socket.settimeout(self.connect_timeout)
    +                else:
    +                    temp_socket.settimeout(conf_availability_timeout)  # set timeout for checking availability to default
    +                try:
    +                    temp_socket.connect(address[4])
    +                except socket.error:
    +                    available = False
    +                finally:
    +                    try:
    +                        temp_socket.shutdown(socket.SHUT_RDWR)
    +                    except socket.error:
    +                        available = False
    +                    finally:
    +                        temp_socket.close()
    +            except socket.gaierror:
    +                available = False
    +
    +            if available:
    +                if log_enabled(BASIC):
    +                    log(BASIC, 'server <%s> available at <%r>', self, address)
    +                self.update_availability(address, True)
    +                break  # if an available address is found exits immediately
    +            else:
    +                self.update_availability(address, False)
    +                if log_enabled(ERROR):
    +                    log(ERROR, 'server <%s> not available at <%r>', self, address)
    +
    +        return available
    +
    +    @staticmethod
    +    def next_message_id():
    +        """
    +        LDAP messageId is unique for all connections to same server
    +        """
    +        with Server._message_id_lock:
    +            Server._message_counter += 1
    +            if Server._message_counter >= LDAP_MAX_INT:
    +                Server._message_counter = 1
    +            if log_enabled(PROTOCOL):
    +                log(PROTOCOL, 'new message id <%d> generated', Server._message_counter)
    +
    +        return Server._message_counter
    +
    +    def _get_dsa_info(self, connection):
    +        """
    +        Retrieve DSE operational attribute as per RFC4512 (5.1).
    +        """
    +        if connection.strategy.no_real_dsa:  # do not try for mock strategies
    +            return
    +
    +        if not connection.strategy.pooled:  # in pooled strategies get_dsa_info is performed by the worker threads
    +            result = connection.search(search_base='',
    +                                       search_filter='(objectClass=*)',
    +                                       search_scope=BASE,
    +                                       attributes=['altServer',  # requests specific dsa info attributes
    +                                                   'namingContexts',
    +                                                   'supportedControl',
    +                                                   'supportedExtension',
    +                                                   'supportedFeatures',
    +                                                   'supportedCapabilities',
    +                                                   'supportedLdapVersion',
    +                                                   'supportedSASLMechanisms',
    +                                                   'vendorName',
    +                                                   'vendorVersion',
    +                                                   'subschemaSubentry',
    +                                                   '*',
    +                                                   '+'],  # requests all remaining attributes (other),
    +                                       get_operational_attributes=True)
    +
    +            with self.dit_lock:
    +                if isinstance(result, bool):  # sync request
    +                    self._dsa_info = DsaInfo(connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else self._dsa_info
    +                elif result:  # asynchronous request, must check if attributes in response
    +                    results, _ = connection.get_response(result)
    +                    if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]:
    +                        self._dsa_info = DsaInfo(results[0]['attributes'], results[0]['raw_attributes'])
    +
    +            if log_enabled(BASIC):
    +                log(BASIC, 'DSA info read for <%s> via <%s>', self, connection)
    +
    +    def _get_schema_info(self, connection, entry=''):
    +        """
    +        Retrieve schema from subschemaSubentry DSE attribute, per RFC
    +        4512 (4.4 and 5.1); entry = '' means DSE.
    +        """
    +        if connection.strategy.no_real_dsa:  # do not try for mock strategies
    +            return
    +
    +        schema_entry = None
    +        if self._dsa_info and entry == '':  # subschemaSubentry already present in dsaInfo
    +            if isinstance(self._dsa_info.schema_entry, SEQUENCE_TYPES):
    +                schema_entry = self._dsa_info.schema_entry[0] if self._dsa_info.schema_entry else None
    +            else:
    +                schema_entry = self._dsa_info.schema_entry if self._dsa_info.schema_entry else None
    +        else:
    +            result = connection.search(entry, '(objectClass=*)', BASE, attributes=['subschemaSubentry'], get_operational_attributes=True)
    +            if isinstance(result, bool):  # sync request
    +                if result and 'subschemaSubentry' in connection.response[0]['raw_attributes']:
    +                    if len(connection.response[0]['raw_attributes']['subschemaSubentry']) > 0:
    +                        schema_entry = connection.response[0]['raw_attributes']['subschemaSubentry'][0]
    +            else:  # asynchronous request, must check if subschemaSubentry in attributes
    +                results, _ = connection.get_response(result)
    +                if len(results) == 1 and 'raw_attributes' in results[0] and 'subschemaSubentry' in results[0]['attributes']:
    +                    if len(results[0]['raw_attributes']['subschemaSubentry']) > 0:
    +                        schema_entry = results[0]['raw_attributes']['subschemaSubentry'][0]
    +
    +        if schema_entry and not connection.strategy.pooled:  # in pooled strategies get_schema_info is performed by the worker threads
    +            if isinstance(schema_entry, bytes) and str is not bytes:  # Python 3
    +                schema_entry = to_unicode(schema_entry, from_server=True)
    +            result = connection.search(schema_entry,
    +                                       search_filter='(objectClass=subschema)',
    +                                       search_scope=BASE,
    +                                       attributes=['objectClasses',  # requests specific subschema attributes
    +                                                   'attributeTypes',
    +                                                   'ldapSyntaxes',
    +                                                   'matchingRules',
    +                                                   'matchingRuleUse',
    +                                                   'dITContentRules',
    +                                                   'dITStructureRules',
    +                                                   'nameForms',
    +                                                   'createTimestamp',
    +                                                   'modifyTimestamp',
    +                                                   '*'],  # requests all remaining attributes (other)
    +                                       get_operational_attributes=True
    +                                       )
    +            with self.dit_lock:
    +                self._schema_info = None
    +                if result:
    +                    if isinstance(result, bool):  # sync request
    +                        self._schema_info = SchemaInfo(schema_entry, connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else None
    +                    else:  # asynchronous request, must check if attributes in response
    +                        results, result = connection.get_response(result)
    +                        if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]:
    +                            self._schema_info = SchemaInfo(schema_entry, results[0]['attributes'], results[0]['raw_attributes'])
    +                    if self._schema_info and not self._schema_info.is_valid():  # flaky servers can return an empty schema, checks if it is so and set schema to None
    +                        self._schema_info = None
    +                    if self._schema_info:  # if schema is valid tries to apply formatter to the "other" dict with raw values for schema and info
    +                        for attribute in self._schema_info.other:
    +                            self._schema_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._schema_info.raw[attribute], self.custom_formatter)
    +                        if self._dsa_info:  # try to apply formatter to the "other" dict with dsa info raw values
    +                            for attribute in self._dsa_info.other:
    +                                self._dsa_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._dsa_info.raw[attribute], self.custom_formatter)
    +            if log_enabled(BASIC):
    +                log(BASIC, 'schema read for <%s> via <%s>', self, connection)
    +
    +    def get_info_from_server(self, connection):
    +        """
    +        reads info from DSE and from subschema
    +        """
    +        if connection and not connection.closed:
    +            if self.get_info in [DSA, ALL]:
    +                self._get_dsa_info(connection)
    +            if self.get_info in [SCHEMA, ALL]:
    +                    self._get_schema_info(connection)
    +        elif self.get_info == OFFLINE_EDIR_8_8_8:
    +            from ..protocol.schemas.edir888 import edir_8_8_8_schema, edir_8_8_8_dsa_info
    +            self.attach_schema_info(SchemaInfo.from_json(edir_8_8_8_schema))
    +            self.attach_dsa_info(DsaInfo.from_json(edir_8_8_8_dsa_info))
    +        elif self.get_info == OFFLINE_EDIR_9_1_4:
    +            from ..protocol.schemas.edir914 import edir_9_1_4_schema, edir_9_1_4_dsa_info
    +            self.attach_schema_info(SchemaInfo.from_json(edir_9_1_4_schema))
    +            self.attach_dsa_info(DsaInfo.from_json(edir_9_1_4_dsa_info))
    +        elif self.get_info == OFFLINE_AD_2012_R2:
    +            from ..protocol.schemas.ad2012R2 import ad_2012_r2_schema, ad_2012_r2_dsa_info
    +            self.attach_schema_info(SchemaInfo.from_json(ad_2012_r2_schema))
    +            self.attach_dsa_info(DsaInfo.from_json(ad_2012_r2_dsa_info))
    +        elif self.get_info == OFFLINE_SLAPD_2_4:
    +            from ..protocol.schemas.slapd24 import slapd_2_4_schema, slapd_2_4_dsa_info
    +            self.attach_schema_info(SchemaInfo.from_json(slapd_2_4_schema))
    +            self.attach_dsa_info(DsaInfo.from_json(slapd_2_4_dsa_info))
    +        elif self.get_info == OFFLINE_DS389_1_3_3:
    +            from ..protocol.schemas.ds389 import ds389_1_3_3_schema, ds389_1_3_3_dsa_info
    +            self.attach_schema_info(SchemaInfo.from_json(ds389_1_3_3_schema))
    +            self.attach_dsa_info(DsaInfo.from_json(ds389_1_3_3_dsa_info))
    +
    +    def attach_dsa_info(self, dsa_info=None):
    +        if isinstance(dsa_info, DsaInfo):
    +            self._dsa_info = dsa_info
    +            if log_enabled(BASIC):
    +                log(BASIC, 'attached DSA info to Server <%s>', self)
    +
    +    def attach_schema_info(self, dsa_schema=None):
    +        if isinstance(dsa_schema, SchemaInfo):
    +            self._schema_info = dsa_schema
    +        if log_enabled(BASIC):
    +            log(BASIC, 'attached schema info to Server <%s>', self)
    +
    +    @property
    +    def info(self):
    +        return self._dsa_info
    +
    +    @property
    +    def schema(self):
    +        return self._schema_info
    +
    +    @staticmethod
    +    def from_definition(host, dsa_info, dsa_schema, port=None, use_ssl=False, formatter=None, validator=None):
    +        """
    +        Define a dummy server with preloaded schema and info
    +        :param host: host name
    +        :param dsa_info: DsaInfo preloaded object or a json formatted string or a file name
    +        :param dsa_schema: SchemaInfo preloaded object or a json formatted string or a file name
    +        :param port: fake port
    +        :param use_ssl: use_ssl
    +        :param formatter: custom formatters
    +        :return: Server object
    +        """
    +        if isinstance(host, SEQUENCE_TYPES):
    +            dummy = Server(host=host[0], port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, get_info=ALL)  # for ServerPool object
    +        else:
    +            dummy = Server(host=host, port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, get_info=ALL)
    +        if isinstance(dsa_info, DsaInfo):
    +            dummy._dsa_info = dsa_info
    +        elif isinstance(dsa_info, STRING_TYPES):
    +            try:
    +                dummy._dsa_info = DsaInfo.from_json(dsa_info)  # tries to use dsa_info as a json configuration string
    +            except Exception:
    +                dummy._dsa_info = DsaInfo.from_file(dsa_info)  # tries to use dsa_info as a file name
    +
    +        if not dummy.info:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'invalid DSA info for %s', host)
    +            raise LDAPDefinitionError('invalid dsa info')
    +
    +        if isinstance(dsa_schema, SchemaInfo):
    +            dummy._schema_info = dsa_schema
    +        elif isinstance(dsa_schema, STRING_TYPES):
    +            try:
    +                dummy._schema_info = SchemaInfo.from_json(dsa_schema)
    +            except Exception:
    +                dummy._schema_info = SchemaInfo.from_file(dsa_schema)
    +
    +        if not dummy.schema:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'invalid schema info for %s', host)
    +            raise LDAPDefinitionError('invalid schema info')
    +
    +        if log_enabled(BASIC):
    +            log(BASIC, 'created server <%s> from definition', dummy)
    +
    +        return dummy
    +
    +    def candidate_addresses(self):
    +        conf_reset_availability_timeout = get_config_parameter('RESET_AVAILABILITY_TIMEOUT')
    +        if self.ipc:
    +            candidates = self.address_info
    +            if log_enabled(BASIC):
    +                log(BASIC, 'candidate address for <%s>: <%s> with mode UNIX_SOCKET', self, self.name)
    +        else:
    +            # checks reset availability timeout
    +            for address in self.address_info:
    +                if address[6] and ((datetime.now() - address[6]).seconds > conf_reset_availability_timeout):
    +                    address[5] = None
    +                    address[6] = None
    +
    +            # selects server address based on server mode and availability (in address[5])
    +            addresses = self.address_info[:]  # copy to avoid refreshing while searching candidates
    +            candidates = []
    +            if addresses:
    +                if self.mode == IP_SYSTEM_DEFAULT:
    +                    candidates.append(addresses[0])
    +                elif self.mode == IP_V4_ONLY:
    +                    candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)]
    +                elif self.mode == IP_V6_ONLY:
    +                    candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)]
    +                elif self.mode == IP_V4_PREFERRED:
    +                    candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)]
    +                    candidates += [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)]
    +                elif self.mode == IP_V6_PREFERRED:
    +                    candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)]
    +                    candidates += [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)]
    +                else:
    +                    if log_enabled(ERROR):
    +                        log(ERROR, 'invalid server mode for <%s>', self)
    +                    raise LDAPInvalidServerError('invalid server mode')
    +
    +            if log_enabled(BASIC):
    +                for candidate in candidates:
    +                    log(BASIC, 'obtained candidate address for <%s>: <%r> with mode %s', self, candidate[:-2], self.mode)
    +        return candidates
    +
    +    def _check_info_property(self, kind, name):
    +        if not self._dsa_info:
    +            raise LDAPInfoError('server info not loaded')
    +
    +        if kind == 'control':
    +            properties = self.info.supported_controls
    +        elif kind == 'extension':
    +            properties = self.info.supported_extensions
    +        elif kind == 'feature':
    +            properties = self.info.supported_features
    +        else:
    +            raise LDAPInfoError('invalid info category')
    +
    +        for prop in properties:
    +                if name == prop[0] or (prop[2] and name.lower() == prop[2].lower()):  # checks oid and description
    +                    return True
    +
    +        return False
    +
    +    def has_control(self, control):
    +        return self._check_info_property('control', control)
    +
    +    def has_extension(self, extension):
    +        return self._check_info_property('extension', extension)
    +
    +    def has_feature(self, feature):
    +        return self._check_info_property('feature', feature)
    +
    +
    +
    diff --git a/server/www/packages/packages-windows/x86/ldap3/core/timezone.py b/server/www/packages/packages-windows/x86/ldap3/core/timezone.py
    index 728f73b..0c24a77 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/core/timezone.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/core/timezone.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2015 - 2018 Giovanni Cannata
    +# Copyright 2015 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/core/tls.py b/server/www/packages/packages-windows/x86/ldap3/core/tls.py
    index befb019..1539b9f 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/core/tls.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/core/tls.py
    @@ -1,321 +1,327 @@
    -"""
    -"""
    -
    -# Created on 2013.08.05
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2013 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -
    -from .exceptions import LDAPSSLNotSupportedError, LDAPSSLConfigurationError, LDAPStartTLSError, LDAPCertificateError, start_tls_exception_factory
    -from .. import SEQUENCE_TYPES
    -from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK
    -
    -try:
    -    # noinspection PyUnresolvedReferences
    -    import ssl
    -except ImportError:
    -    if log_enabled(ERROR):
    -        log(ERROR, 'SSL not supported in this Python interpreter')
    -    raise LDAPSSLNotSupportedError('SSL not supported in this Python interpreter')
    -
    -try:
    -    from ssl import match_hostname, CertificateError  # backport for python2 missing ssl functionalities
    -except ImportError:
    -    from ..utils.tls_backport import CertificateError
    -    from ..utils.tls_backport import match_hostname
    -    if log_enabled(BASIC):
    -        log(BASIC, 'using tls_backport')
    -
    -try:  # try to use SSLContext
    -    # noinspection PyUnresolvedReferences
    -    from ssl import create_default_context, Purpose  # defined in Python 3.4 and Python 2.7.9
    -    use_ssl_context = True
    -except ImportError:
    -    use_ssl_context = False
    -    if log_enabled(BASIC):
    -        log(BASIC, 'SSLContext unavailable')
    -
    -from os import path
    -
    -
    -# noinspection PyProtectedMember
    -class Tls(object):
    -    """
    -    tls/ssl configuration for Server object
    -    Starting from python 2.7.9 and python 3.4 uses the SSLContext object
    -    that tries to read the CAs defined at system level
    -    ca_certs_path and ca_certs_data are valid only when using SSLContext
    -    local_private_key_password is valid only when using SSLContext
    -    sni is the server name for Server Name Indication (when available)
    -    """
    -
    -    def __init__(self,
    -                 local_private_key_file=None,
    -                 local_certificate_file=None,
    -                 validate=ssl.CERT_NONE,
    -                 version=None,
    -                 ca_certs_file=None,
    -                 valid_names=None,
    -                 ca_certs_path=None,
    -                 ca_certs_data=None,
    -                 local_private_key_password=None,
    -                 ciphers=None,
    -                 sni=None):
    -
    -        if validate in [ssl.CERT_NONE, ssl.CERT_OPTIONAL, ssl.CERT_REQUIRED]:
    -            self.validate = validate
    -        elif validate:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'invalid validate parameter <%s>', validate)
    -            raise LDAPSSLConfigurationError('invalid validate parameter')
    -        if ca_certs_file and path.exists(ca_certs_file):
    -            self.ca_certs_file = ca_certs_file
    -        elif ca_certs_file:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'invalid CA public key file <%s>', ca_certs_file)
    -            raise LDAPSSLConfigurationError('invalid CA public key file')
    -        else:
    -            self.ca_certs_file = None
    -
    -        if ca_certs_path and use_ssl_context and path.exists(ca_certs_path):
    -            self.ca_certs_path = ca_certs_path
    -        elif ca_certs_path and not use_ssl_context:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'cannot use CA public keys path, SSLContext not available')
    -            raise LDAPSSLNotSupportedError('cannot use CA public keys path, SSLContext not available')
    -        elif ca_certs_path:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'invalid CA public keys path <%s>', ca_certs_path)
    -            raise LDAPSSLConfigurationError('invalid CA public keys path')
    -        else:
    -            self.ca_certs_path = None
    -
    -        if ca_certs_data and use_ssl_context:
    -            self.ca_certs_data = ca_certs_data
    -        elif ca_certs_data:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'cannot use CA data, SSLContext not available')
    -            raise LDAPSSLNotSupportedError('cannot use CA data, SSLContext not available')
    -        else:
    -            self.ca_certs_data = None
    -
    -        if local_private_key_password and use_ssl_context:
    -            self.private_key_password = local_private_key_password
    -        elif local_private_key_password:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'cannot use local private key password, SSLContext not available')
    -            raise LDAPSSLNotSupportedError('cannot use local private key password, SSLContext is not available')
    -        else:
    -            self.private_key_password = None
    -
    -        self.version = version
    -        self.private_key_file = local_private_key_file
    -        self.certificate_file = local_certificate_file
    -        self.valid_names = valid_names
    -        self.ciphers = ciphers
    -        self.sni = sni
    -
    -        if log_enabled(BASIC):
    -            log(BASIC, 'instantiated Tls: <%r>' % self)
    -
    -    def __str__(self):
    -        s = [
    -            'protocol: ' + str(self.version),
    -            'client private key: ' + ('present ' if self.private_key_file else 'not present'),
    -            'client certificate: ' + ('present ' if self.certificate_file else 'not present'),
    -            'private key password: ' + ('present ' if self.private_key_password else 'not present'),
    -            'CA certificates file: ' + ('present ' if self.ca_certs_file else 'not present'),
    -            'CA certificates path: ' + ('present ' if self.ca_certs_path else 'not present'),
    -            'CA certificates data: ' + ('present ' if self.ca_certs_data else 'not present'),
    -            'verify mode: ' + str(self.validate),
    -            'valid names: ' + str(self.valid_names),
    -            'ciphers: ' + str(self.ciphers),
    -            'sni: ' + str(self.sni)
    -        ]
    -        return ' - '.join(s)
    -
    -    def __repr__(self):
    -        r = '' if self.private_key_file is None else ', local_private_key_file={0.private_key_file!r}'.format(self)
    -        r += '' if self.certificate_file is None else ', local_certificate_file={0.certificate_file!r}'.format(self)
    -        r += '' if self.validate is None else ', validate={0.validate!r}'.format(self)
    -        r += '' if self.version is None else ', version={0.version!r}'.format(self)
    -        r += '' if self.ca_certs_file is None else ', ca_certs_file={0.ca_certs_file!r}'.format(self)
    -        r += '' if self.ca_certs_path is None else ', ca_certs_path={0.ca_certs_path!r}'.format(self)
    -        r += '' if self.ca_certs_data is None else ', ca_certs_data={0.ca_certs_data!r}'.format(self)
    -        r += '' if self.ciphers is None else ', ciphers={0.ciphers!r}'.format(self)
    -        r += '' if self.sni is None else ', sni={0.sni!r}'.format(self)
    -        r = 'Tls(' + r[2:] + ')'
    -        return r
    -
    -    def wrap_socket(self, connection, do_handshake=False):
    -        """
    -        Adds TLS to the connection socket
    -        """
    -        if use_ssl_context:
    -            if self.version is None:  # uses the default ssl context for reasonable security
    -                ssl_context = create_default_context(purpose=Purpose.SERVER_AUTH,
    -                                                     cafile=self.ca_certs_file,
    -                                                     capath=self.ca_certs_path,
    -                                                     cadata=self.ca_certs_data)
    -            else:  # code from create_default_context in the Python standard library 3.5.1, creates a ssl context with the specificd protocol version
    -                ssl_context = ssl.SSLContext(self.version)
    -                if self.ca_certs_file or self.ca_certs_path or self.ca_certs_data:
    -                    ssl_context.load_verify_locations(self.ca_certs_file, self.ca_certs_path, self.ca_certs_data)
    -                elif self.validate != ssl.CERT_NONE:
    -                    ssl_context.load_default_certs(Purpose.SERVER_AUTH)
    -
    -            if self.certificate_file:
    -                ssl_context.load_cert_chain(self.certificate_file, keyfile=self.private_key_file, password=self.private_key_password)
    -            ssl_context.check_hostname = False
    -            ssl_context.verify_mode = self.validate
    -
    -            if self.ciphers:
    -                try:
    -                    ssl_context.set_ciphers(self.ciphers)
    -                except ssl.SSLError:
    -                    pass
    -
    -            if self.sni:
    -                wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake, server_hostname=self.sni)
    -            else:
    -                wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake)
    -            if log_enabled(NETWORK):
    -                log(NETWORK, 'socket wrapped with SSL using SSLContext for <%s>', connection)
    -        else:
    -            if self.version is None and hasattr(ssl, 'PROTOCOL_SSLv23'):
    -                self.version = ssl.PROTOCOL_SSLv23
    -            if self.ciphers:
    -                try:
    -
    -                    wrapped_socket = ssl.wrap_socket(connection.socket,
    -                                                     keyfile=self.private_key_file,
    -                                                     certfile=self.certificate_file,
    -                                                     server_side=False,
    -                                                     cert_reqs=self.validate,
    -                                                     ssl_version=self.version,
    -                                                     ca_certs=self.ca_certs_file,
    -                                                     do_handshake_on_connect=do_handshake,
    -                                                     ciphers=self.ciphers)
    -                except ssl.SSLError:
    -                    raise
    -                except TypeError:  # in python2.6 no ciphers argument is present, failback to self.ciphers=None
    -                    self.ciphers = None
    -
    -            if not self.ciphers:
    -                wrapped_socket = ssl.wrap_socket(connection.socket,
    -                                                 keyfile=self.private_key_file,
    -                                                 certfile=self.certificate_file,
    -                                                 server_side=False,
    -                                                 cert_reqs=self.validate,
    -                                                 ssl_version=self.version,
    -                                                 ca_certs=self.ca_certs_file,
    -                                                 do_handshake_on_connect=do_handshake)
    -            if log_enabled(NETWORK):
    -                log(NETWORK, 'socket wrapped with SSL for <%s>', connection)
    -
    -        if do_handshake and (self.validate == ssl.CERT_REQUIRED or self.validate == ssl.CERT_OPTIONAL):
    -            check_hostname(wrapped_socket, connection.server.host, self.valid_names)
    -
    -        connection.socket = wrapped_socket
    -        return
    -
    -    def start_tls(self, connection):
    -        if connection.server.ssl:  # ssl already established at server level
    -            return False
    -
    -        if (connection.tls_started and not connection._executing_deferred) or connection.strategy._outstanding or connection.sasl_in_progress:
    -            # Per RFC 4513 (3.1.1)
    -            if log_enabled(ERROR):
    -                log(ERROR, "can't start tls because operations are in progress for <%s>", self)
    -            return False
    -        connection.starting_tls = True
    -        if log_enabled(BASIC):
    -            log(BASIC, 'starting tls for <%s>', connection)
    -        if not connection.strategy.sync:
    -            connection._awaiting_for_async_start_tls = True  # some flaky servers (OpenLDAP) doesn't return the extended response name in response
    -        result = connection.extended('1.3.6.1.4.1.1466.20037')
    -        if not connection.strategy.sync:
    -            # asynchronous - _start_tls must be executed by the strategy
    -            response = connection.get_response(result)
    -            if response != (None, None):
    -                if log_enabled(BASIC):
    -                    log(BASIC, 'tls started for <%s>', connection)
    -                return True
    -            else:
    -                if log_enabled(BASIC):
    -                    log(BASIC, 'tls not started for <%s>', connection)
    -                return False
    -        else:
    -            if connection.result['description'] not in ['success']:
    -                # startTLS failed
    -                connection.last_error = 'startTLS failed - ' + str(connection.result['description'])
    -                if log_enabled(ERROR):
    -                    log(ERROR, '%s for <%s>', connection.last_error, connection)
    -                raise LDAPStartTLSError(connection.last_error)
    -            if log_enabled(BASIC):
    -                log(BASIC, 'tls started for <%s>', connection)
    -            return self._start_tls(connection)
    -
    -    def _start_tls(self, connection):
    -        try:
    -            self.wrap_socket(connection, do_handshake=True)
    -        except Exception as e:
    -            connection.last_error = 'wrap socket error: ' + str(e)
    -            if log_enabled(ERROR):
    -                log(ERROR, 'error <%s> wrapping socket for TLS in <%s>', connection.last_error, connection)
    -            raise start_tls_exception_factory(LDAPStartTLSError, e)(connection.last_error)
    -        finally:
    -            connection.starting_tls = False
    -
    -        if connection.usage:
    -            connection._usage.wrapped_sockets += 1
    -        connection.tls_started = True
    -        return True
    -
    -
    -def check_hostname(sock, server_name, additional_names):
    -    server_certificate = sock.getpeercert()
    -    if log_enabled(NETWORK):
    -        log(NETWORK, 'certificate found for %s: %s', sock, server_certificate)
    -    if additional_names:
    -        host_names = [server_name] + (additional_names if isinstance(additional_names, SEQUENCE_TYPES) else [additional_names])
    -    else:
    -        host_names = [server_name]
    -
    -    for host_name in host_names:
    -        if not host_name:
    -            continue
    -        elif host_name == '*':
    -            if log_enabled(NETWORK):
    -                log(NETWORK, 'certificate matches * wildcard')
    -            return  # valid
    -
    -        try:
    -            match_hostname(server_certificate, host_name)  # raise CertificateError if certificate doesn't match server name
    -            if log_enabled(NETWORK):
    -                log(NETWORK, 'certificate matches host name <%s>', host_name)
    -            return  # valid
    -        except CertificateError as e:
    -            if log_enabled(NETWORK):
    -                log(NETWORK, str(e))
    -
    -    if log_enabled(ERROR):
    -        log(ERROR, "hostname doesn't match certificate")
    -    raise LDAPCertificateError("certificate %s doesn't match any name in %s " % (server_certificate, str(host_names)))
    +"""
    +"""
    +
    +# Created on 2013.08.05
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2013 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +from .exceptions import LDAPSSLNotSupportedError, LDAPSSLConfigurationError, LDAPStartTLSError, LDAPCertificateError, start_tls_exception_factory
    +from .. import SEQUENCE_TYPES
    +from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK
    +
    +try:
    +    # noinspection PyUnresolvedReferences
    +    import ssl
    +except ImportError:
    +    if log_enabled(ERROR):
    +        log(ERROR, 'SSL not supported in this Python interpreter')
    +    raise LDAPSSLNotSupportedError('SSL not supported in this Python interpreter')
    +
    +try:
    +    from ssl import match_hostname, CertificateError  # backport for python2 missing ssl functionalities
    +except ImportError:
    +    from ..utils.tls_backport import CertificateError
    +    from ..utils.tls_backport import match_hostname
    +    if log_enabled(BASIC):
    +        log(BASIC, 'using tls_backport')
    +
    +try:  # try to use SSLContext
    +    # noinspection PyUnresolvedReferences
    +    from ssl import create_default_context, Purpose  # defined in Python 3.4 and Python 2.7.9
    +    use_ssl_context = True
    +except ImportError:
    +    use_ssl_context = False
    +    if log_enabled(BASIC):
    +        log(BASIC, 'SSLContext unavailable')
    +
    +from os import path
    +
    +
    +# noinspection PyProtectedMember
    +class Tls(object):
    +    """
    +    tls/ssl configuration for Server object
    +    Starting from python 2.7.9 and python 3.4 uses the SSLContext object
    +    that tries to read the CAs defined at system level
    +    ca_certs_path and ca_certs_data are valid only when using SSLContext
    +    local_private_key_password is valid only when using SSLContext
    +    ssl_options is valid only when using SSLContext
    +    sni is the server name for Server Name Indication (when available)
    +    """
    +
    +    def __init__(self,
    +                 local_private_key_file=None,
    +                 local_certificate_file=None,
    +                 validate=ssl.CERT_NONE,
    +                 version=None,
    +                 ssl_options=None,
    +                 ca_certs_file=None,
    +                 valid_names=None,
    +                 ca_certs_path=None,
    +                 ca_certs_data=None,
    +                 local_private_key_password=None,
    +                 ciphers=None,
    +                 sni=None):
    +        if ssl_options is None:
    +            ssl_options = []
    +        self.ssl_options = ssl_options
    +        if validate in [ssl.CERT_NONE, ssl.CERT_OPTIONAL, ssl.CERT_REQUIRED]:
    +            self.validate = validate
    +        elif validate:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'invalid validate parameter <%s>', validate)
    +            raise LDAPSSLConfigurationError('invalid validate parameter')
    +        if ca_certs_file and path.exists(ca_certs_file):
    +            self.ca_certs_file = ca_certs_file
    +        elif ca_certs_file:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'invalid CA public key file <%s>', ca_certs_file)
    +            raise LDAPSSLConfigurationError('invalid CA public key file')
    +        else:
    +            self.ca_certs_file = None
    +
    +        if ca_certs_path and use_ssl_context and path.exists(ca_certs_path):
    +            self.ca_certs_path = ca_certs_path
    +        elif ca_certs_path and not use_ssl_context:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'cannot use CA public keys path, SSLContext not available')
    +            raise LDAPSSLNotSupportedError('cannot use CA public keys path, SSLContext not available')
    +        elif ca_certs_path:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'invalid CA public keys path <%s>', ca_certs_path)
    +            raise LDAPSSLConfigurationError('invalid CA public keys path')
    +        else:
    +            self.ca_certs_path = None
    +
    +        if ca_certs_data and use_ssl_context:
    +            self.ca_certs_data = ca_certs_data
    +        elif ca_certs_data:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'cannot use CA data, SSLContext not available')
    +            raise LDAPSSLNotSupportedError('cannot use CA data, SSLContext not available')
    +        else:
    +            self.ca_certs_data = None
    +
    +        if local_private_key_password and use_ssl_context:
    +            self.private_key_password = local_private_key_password
    +        elif local_private_key_password:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'cannot use local private key password, SSLContext not available')
    +            raise LDAPSSLNotSupportedError('cannot use local private key password, SSLContext is not available')
    +        else:
    +            self.private_key_password = None
    +
    +        self.version = version
    +        self.private_key_file = local_private_key_file
    +        self.certificate_file = local_certificate_file
    +        self.valid_names = valid_names
    +        self.ciphers = ciphers
    +        self.sni = sni
    +
    +        if log_enabled(BASIC):
    +            log(BASIC, 'instantiated Tls: <%r>' % self)
    +
    +    def __str__(self):
    +        s = [
    +            'protocol: ' + str(self.version),
    +            'client private key: ' + ('present ' if self.private_key_file else 'not present'),
    +            'client certificate: ' + ('present ' if self.certificate_file else 'not present'),
    +            'private key password: ' + ('present ' if self.private_key_password else 'not present'),
    +            'CA certificates file: ' + ('present ' if self.ca_certs_file else 'not present'),
    +            'CA certificates path: ' + ('present ' if self.ca_certs_path else 'not present'),
    +            'CA certificates data: ' + ('present ' if self.ca_certs_data else 'not present'),
    +            'verify mode: ' + str(self.validate),
    +            'valid names: ' + str(self.valid_names),
    +            'ciphers: ' + str(self.ciphers),
    +            'sni: ' + str(self.sni)
    +        ]
    +        return ' - '.join(s)
    +
    +    def __repr__(self):
    +        r = '' if self.private_key_file is None else ', local_private_key_file={0.private_key_file!r}'.format(self)
    +        r += '' if self.certificate_file is None else ', local_certificate_file={0.certificate_file!r}'.format(self)
    +        r += '' if self.validate is None else ', validate={0.validate!r}'.format(self)
    +        r += '' if self.version is None else ', version={0.version!r}'.format(self)
    +        r += '' if self.ca_certs_file is None else ', ca_certs_file={0.ca_certs_file!r}'.format(self)
    +        r += '' if self.ca_certs_path is None else ', ca_certs_path={0.ca_certs_path!r}'.format(self)
    +        r += '' if self.ca_certs_data is None else ', ca_certs_data={0.ca_certs_data!r}'.format(self)
    +        r += '' if self.ciphers is None else ', ciphers={0.ciphers!r}'.format(self)
    +        r += '' if self.sni is None else ', sni={0.sni!r}'.format(self)
    +        r = 'Tls(' + r[2:] + ')'
    +        return r
    +
    +    def wrap_socket(self, connection, do_handshake=False):
    +        """
    +        Adds TLS to the connection socket
    +        """
    +        if use_ssl_context:
    +            if self.version is None:  # uses the default ssl context for reasonable security
    +                ssl_context = create_default_context(purpose=Purpose.SERVER_AUTH,
    +                                                     cafile=self.ca_certs_file,
    +                                                     capath=self.ca_certs_path,
    +                                                     cadata=self.ca_certs_data)
    +            else:  # code from create_default_context in the Python standard library 3.5.1, creates a ssl context with the specificd protocol version
    +                ssl_context = ssl.SSLContext(self.version)
    +                if self.ca_certs_file or self.ca_certs_path or self.ca_certs_data:
    +                    ssl_context.load_verify_locations(self.ca_certs_file, self.ca_certs_path, self.ca_certs_data)
    +                elif self.validate != ssl.CERT_NONE:
    +                    ssl_context.load_default_certs(Purpose.SERVER_AUTH)
    +
    +            if self.certificate_file:
    +                ssl_context.load_cert_chain(self.certificate_file, keyfile=self.private_key_file, password=self.private_key_password)
    +            ssl_context.check_hostname = False
    +            ssl_context.verify_mode = self.validate
    +            for option in self.ssl_options:
    +                ssl_context.options |= option
    +
    +            if self.ciphers:
    +                try:
    +                    ssl_context.set_ciphers(self.ciphers)
    +                except ssl.SSLError:
    +                    pass
    +
    +            if self.sni:
    +                wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake, server_hostname=self.sni)
    +            else:
    +                wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake)
    +            if log_enabled(NETWORK):
    +                log(NETWORK, 'socket wrapped with SSL using SSLContext for <%s>', connection)
    +        else:
    +            if self.version is None and hasattr(ssl, 'PROTOCOL_SSLv23'):
    +                self.version = ssl.PROTOCOL_SSLv23
    +            if self.ciphers:
    +                try:
    +
    +                    wrapped_socket = ssl.wrap_socket(connection.socket,
    +                                                     keyfile=self.private_key_file,
    +                                                     certfile=self.certificate_file,
    +                                                     server_side=False,
    +                                                     cert_reqs=self.validate,
    +                                                     ssl_version=self.version,
    +                                                     ca_certs=self.ca_certs_file,
    +                                                     do_handshake_on_connect=do_handshake,
    +                                                     ciphers=self.ciphers)
    +                except ssl.SSLError:
    +                    raise
    +                except TypeError:  # in python2.6 no ciphers argument is present, failback to self.ciphers=None
    +                    self.ciphers = None
    +
    +            if not self.ciphers:
    +                wrapped_socket = ssl.wrap_socket(connection.socket,
    +                                                 keyfile=self.private_key_file,
    +                                                 certfile=self.certificate_file,
    +                                                 server_side=False,
    +                                                 cert_reqs=self.validate,
    +                                                 ssl_version=self.version,
    +                                                 ca_certs=self.ca_certs_file,
    +                                                 do_handshake_on_connect=do_handshake)
    +            if log_enabled(NETWORK):
    +                log(NETWORK, 'socket wrapped with SSL for <%s>', connection)
    +
    +        if do_handshake and (self.validate == ssl.CERT_REQUIRED or self.validate == ssl.CERT_OPTIONAL):
    +            check_hostname(wrapped_socket, connection.server.host, self.valid_names)
    +
    +        connection.socket = wrapped_socket
    +        return
    +
    +    def start_tls(self, connection):
    +        if connection.server.ssl:  # ssl already established at server level
    +            return False
    +
    +        if (connection.tls_started and not connection._executing_deferred) or connection.strategy._outstanding or connection.sasl_in_progress:
    +            # Per RFC 4513 (3.1.1)
    +            if log_enabled(ERROR):
    +                log(ERROR, "can't start tls because operations are in progress for <%s>", self)
    +            return False
    +        connection.starting_tls = True
    +        if log_enabled(BASIC):
    +            log(BASIC, 'starting tls for <%s>', connection)
    +        if not connection.strategy.sync:
    +            connection._awaiting_for_async_start_tls = True  # some flaky servers (OpenLDAP) doesn't return the extended response name in response
    +        result = connection.extended('1.3.6.1.4.1.1466.20037')
    +        if not connection.strategy.sync:
    +            # asynchronous - _start_tls must be executed by the strategy
    +            response = connection.get_response(result)
    +            if response != (None, None):
    +                if log_enabled(BASIC):
    +                    log(BASIC, 'tls started for <%s>', connection)
    +                return True
    +            else:
    +                if log_enabled(BASIC):
    +                    log(BASIC, 'tls not started for <%s>', connection)
    +                return False
    +        else:
    +            if connection.result['description'] not in ['success']:
    +                # startTLS failed
    +                connection.last_error = 'startTLS failed - ' + str(connection.result['description'])
    +                if log_enabled(ERROR):
    +                    log(ERROR, '%s for <%s>', connection.last_error, connection)
    +                raise LDAPStartTLSError(connection.last_error)
    +            if log_enabled(BASIC):
    +                log(BASIC, 'tls started for <%s>', connection)
    +            return self._start_tls(connection)
    +
    +    def _start_tls(self, connection):
    +        try:
    +            self.wrap_socket(connection, do_handshake=True)
    +        except Exception as e:
    +            connection.last_error = 'wrap socket error: ' + str(e)
    +            if log_enabled(ERROR):
    +                log(ERROR, 'error <%s> wrapping socket for TLS in <%s>', connection.last_error, connection)
    +            raise start_tls_exception_factory(LDAPStartTLSError, e)(connection.last_error)
    +        finally:
    +            connection.starting_tls = False
    +
    +        if connection.usage:
    +            connection._usage.wrapped_sockets += 1
    +        connection.tls_started = True
    +        return True
    +
    +
    +def check_hostname(sock, server_name, additional_names):
    +    server_certificate = sock.getpeercert()
    +    if log_enabled(NETWORK):
    +        log(NETWORK, 'certificate found for %s: %s', sock, server_certificate)
    +    if additional_names:
    +        host_names = [server_name] + (additional_names if isinstance(additional_names, SEQUENCE_TYPES) else [additional_names])
    +    else:
    +        host_names = [server_name]
    +
    +    for host_name in host_names:
    +        if not host_name:
    +            continue
    +        elif host_name == '*':
    +            if log_enabled(NETWORK):
    +                log(NETWORK, 'certificate matches * wildcard')
    +            return  # valid
    +
    +        try:
    +            match_hostname(server_certificate, host_name)  # raise CertificateError if certificate doesn't match server name
    +            if log_enabled(NETWORK):
    +                log(NETWORK, 'certificate matches host name <%s>', host_name)
    +            return  # valid
    +        except CertificateError as e:
    +            if log_enabled(NETWORK):
    +                log(NETWORK, str(e))
    +
    +    if log_enabled(ERROR):
    +        log(ERROR, "hostname doesn't match certificate")
    +    raise LDAPCertificateError("certificate %s doesn't match any name in %s " % (server_certificate, str(host_names)))
    diff --git a/server/www/packages/packages-windows/x86/ldap3/core/usage.py b/server/www/packages/packages-windows/x86/ldap3/core/usage.py
    index 187d415..7748c76 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/core/usage.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/core/usage.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2013 - 2018 Giovanni Cannata
    +# Copyright 2013 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/__init__.py b/server/www/packages/packages-windows/x86/ldap3/extend/__init__.py
    index 24f426e..32795ef 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/__init__.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/__init__.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -169,6 +169,33 @@ class StandardExtendedOperations(ExtendedOperationContainer):
                                     streaming,
                                     callback)
     
    +    def funnel_search(self,
    +                      search_base='',
    +                      search_filter='',
    +                      search_scope=SUBTREE,
    +                      dereference_aliases=DEREF_NEVER,
    +                      attributes=ALL_ATTRIBUTES,
    +                      size_limit=0,
    +                      time_limit=0,
    +                      controls=None,
    +                      streaming=False,
    +                      callback=None
    +                      ):
    +        return PersistentSearch(self._connection,
    +                                search_base,
    +                                search_filter,
    +                                search_scope,
    +                                dereference_aliases,
    +                                attributes,
    +                                size_limit,
    +                                time_limit,
    +                                controls,
    +                                None,
    +                                None,
    +                                None,
    +                                streaming,
    +                                callback)
    +
     
     class NovellExtendedOperations(ExtendedOperationContainer):
         def get_bind_dn(self, controls=None):
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/addMembersToGroups.py b/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/addMembersToGroups.py
    index 28c409f..eaf6cfd 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/addMembersToGroups.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/addMembersToGroups.py
    @@ -1,81 +1,93 @@
    -"""
    -"""
    -
    -# Created on 2016.12.26
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2016 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -from ...core.exceptions import LDAPInvalidDnError
    -from ... import SEQUENCE_TYPES, MODIFY_ADD, BASE, DEREF_NEVER
    -
    -
    -def ad_add_members_to_groups(connection,
    -                             members_dn,
    -                             groups_dn,
    -                             fix=True):
    -    """
    -    :param connection: a bound Connection object
    -    :param members_dn: the list of members to add to groups
    -    :param groups_dn: the list of groups where members are to be added
    -    :param fix: checks for group existence and already assigned members
    -    :return: a boolean where True means that the operation was successful and False means an error has happened
    -    Establishes users-groups relations following the Active Directory rules: users are added to the member attribute of groups.
    -    Raises LDAPInvalidDnError if members or groups are not found in the DIT.
    -    """
    -
    -    if not isinstance(members_dn, SEQUENCE_TYPES):
    -        members_dn = [members_dn]
    -
    -    if not isinstance(groups_dn, SEQUENCE_TYPES):
    -        groups_dn = [groups_dn]
    -
    -    error = False
    -    for group in groups_dn:
    -        if fix:  # checks for existance of group and for already assigned members
    -            result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member'])
    -
    -            if not connection.strategy.sync:
    -                response, result = connection.get_response(result)
    -            else:
    -                response, result = connection.response, connection.result
    -
    -            if not result['description'] == 'success':
    -                raise LDAPInvalidDnError(group + ' not found')
    -
    -            existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else []
    -            existing_members = [element.lower() for element in existing_members]
    -        else:
    -            existing_members = []
    -
    -        changes = dict()
    -        member_to_add = [element for element in members_dn if element.lower() not in existing_members]
    -        if member_to_add:
    -            changes['member'] = (MODIFY_ADD, member_to_add)
    -        if changes:
    -            result = connection.modify(group, changes)
    -            if not connection.strategy.sync:
    -                _, result = connection.get_response(result)
    -            else:
    -                result = connection.result
    -            if result['description'] != 'success':
    -                error = True
    -                break
    -
    -    return not error  # returns True if no error is raised in the LDAP operations
    +"""
    +"""
    +
    +# Created on 2016.12.26
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2016 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +from ... import SEQUENCE_TYPES, MODIFY_ADD, BASE, DEREF_NEVER
    +from ...core.exceptions import LDAPInvalidDnError, LDAPOperationsErrorResult
    +from ...utils.dn import safe_dn
    +
    +
    +def ad_add_members_to_groups(connection,
    +                             members_dn,
    +                             groups_dn,
    +                             fix=True,
    +                             raise_error=False):
    +    """
    +    :param connection: a bound Connection object
    +    :param members_dn: the list of members to add to groups
    +    :param groups_dn: the list of groups where members are to be added
    +    :param fix: checks for group existence and already assigned members
    +    :param raise_error: If the operation fails it raises an error instead of returning False
    +    :return: a boolean where True means that the operation was successful and False means an error has happened
    +    Establishes users-groups relations following the Active Directory rules: users are added to the member attribute of groups.
    +    Raises LDAPInvalidDnError if members or groups are not found in the DIT.
    +    """
    +
    +    if not isinstance(members_dn, SEQUENCE_TYPES):
    +        members_dn = [members_dn]
    +
    +    if not isinstance(groups_dn, SEQUENCE_TYPES):
    +        groups_dn = [groups_dn]
    +
    +    if connection.check_names:  # builds new lists with sanitized dn
    +        members_dn = [safe_dn(member_dn) for member_dn in members_dn]
    +        groups_dn = [safe_dn(group_dn) for group_dn in groups_dn]
    +
    +    error = False
    +    for group in groups_dn:
    +        if fix:  # checks for existance of group and for already assigned members
    +            result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER,
    +                                       attributes=['member'])
    +
    +            if not connection.strategy.sync:
    +                response, result = connection.get_response(result)
    +            else:
    +                response, result = connection.response, connection.result
    +
    +            if not result['description'] == 'success':
    +                raise LDAPInvalidDnError(group + ' not found')
    +
    +            existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else []
    +            existing_members = [element.lower() for element in existing_members]
    +        else:
    +            existing_members = []
    +
    +        changes = dict()
    +        member_to_add = [element for element in members_dn if element.lower() not in existing_members]
    +        if member_to_add:
    +            changes['member'] = (MODIFY_ADD, member_to_add)
    +        if changes:
    +            result = connection.modify(group, changes)
    +            if not connection.strategy.sync:
    +                _, result = connection.get_response(result)
    +            else:
    +                result = connection.result
    +            if result['description'] != 'success':
    +                error = True
    +                result_error_params = ['result', 'description', 'dn', 'message']
    +                if raise_error:
    +                    raise LDAPOperationsErrorResult([(k, v) for k, v in result.items() if k in result_error_params])
    +                break
    +
    +    return not error  # returns True if no error is raised in the LDAP operations
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/dirSync.py b/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/dirSync.py
    index cb18e7a..db403a1 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/dirSync.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/dirSync.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2015 - 2018 Giovanni Cannata
    +# Copyright 2015 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/modifyPassword.py b/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/modifyPassword.py
    index 4a17fb0..0bf1c06 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/modifyPassword.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/modifyPassword.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2015 - 2018 Giovanni Cannata
    +# Copyright 2015 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/removeMembersFromGroups.py b/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/removeMembersFromGroups.py
    index 1b7feb3..0998713 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/removeMembersFromGroups.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/removeMembersFromGroups.py
    @@ -1,93 +1,92 @@
    -"""
    -"""
    -
    -# Created on 2016.12.26
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2016 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -from ...core.exceptions import LDAPInvalidDnError
    -from ... import SEQUENCE_TYPES, MODIFY_DELETE, BASE, DEREF_NEVER
    -from ...utils.dn import safe_dn
    -
    -
    -def ad_remove_members_from_groups(connection,
    -                                  members_dn,
    -                                  groups_dn,
    -                                  fix):
    -    """
    -    :param connection: a bound Connection object
    -    :param members_dn: the list of members to remove from groups
    -    :param groups_dn: the list of groups where members are to be removed
    -    :param fix: checks for group existence and existing members
    -    :return: a boolean where True means that the operation was successful and False means an error has happened
    -    Removes users-groups relations following the Activwe Directory rules: users are removed from groups' member attribute
    -
    -    """
    -    if not isinstance(members_dn, SEQUENCE_TYPES):
    -        members_dn = [members_dn]
    -
    -    if not isinstance(groups_dn, SEQUENCE_TYPES):
    -        groups_dn = [groups_dn]
    -
    -    if connection.check_names:  # builds new lists with sanitized dn
    -        safe_members_dn = []
    -        safe_groups_dn = []
    -        for member_dn in members_dn:
    -            safe_members_dn.append(safe_dn(member_dn))
    -        for group_dn in groups_dn:
    -            safe_groups_dn.append(safe_dn(group_dn))
    -
    -        members_dn = safe_members_dn
    -        groups_dn = safe_groups_dn
    -
    -    error = False
    -
    -    for group in groups_dn:
    -        if fix:  # checks for existance of group and for already assigned members
    -            result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member'])
    -
    -            if not connection.strategy.sync:
    -                response, result = connection.get_response(result)
    -            else:
    -                response, result = connection.response, connection.result
    -
    -            if not result['description'] == 'success':
    -                raise LDAPInvalidDnError(group + ' not found')
    -
    -            existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else []
    -        else:
    -            existing_members = members_dn
    -
    -        existing_members = [element.lower() for element in existing_members]
    -        changes = dict()
    -        member_to_remove = [element for element in members_dn if element.lower() in existing_members]
    -        if member_to_remove:
    -            changes['member'] = (MODIFY_DELETE, member_to_remove)
    -        if changes:
    -            result = connection.modify(group, changes)
    -            if not connection.strategy.sync:
    -                _, result = connection.get_response(result)
    -            else:
    -                result = connection.result
    -            if result['description'] != 'success':
    -                error = True
    -                break
    -
    -    return not error
    +"""
    +"""
    +
    +# Created on 2016.12.26
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2016 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +from ...core.exceptions import LDAPInvalidDnError, LDAPOperationsErrorResult
    +from ... import SEQUENCE_TYPES, MODIFY_DELETE, BASE, DEREF_NEVER
    +from ...utils.dn import safe_dn
    +
    +
    +def ad_remove_members_from_groups(connection,
    +                                  members_dn,
    +                                  groups_dn,
    +                                  fix,
    +                                  raise_error=False):
    +    """
    +    :param connection: a bound Connection object
    +    :param members_dn: the list of members to remove from groups
    +    :param groups_dn: the list of groups where members are to be removed
    +    :param fix: checks for group existence and existing members
    +    :param raise_error: If the operation fails it raises an error instead of returning False
    +    :return: a boolean where True means that the operation was successful and False means an error has happened
    +    Removes users-groups relations following the Activwe Directory rules: users are removed from groups' member attribute
    +
    +    """
    +    if not isinstance(members_dn, SEQUENCE_TYPES):
    +        members_dn = [members_dn]
    +
    +    if not isinstance(groups_dn, SEQUENCE_TYPES):
    +        groups_dn = [groups_dn]
    +
    +    if connection.check_names:  # builds new lists with sanitized dn
    +        members_dn = [safe_dn(member_dn) for member_dn in members_dn]
    +        groups_dn = [safe_dn(group_dn) for group_dn in groups_dn]
    +
    +    error = False
    +
    +    for group in groups_dn:
    +        if fix:  # checks for existance of group and for already assigned members
    +            result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member'])
    +
    +            if not connection.strategy.sync:
    +                response, result = connection.get_response(result)
    +            else:
    +                response, result = connection.response, connection.result
    +
    +            if not result['description'] == 'success':
    +                raise LDAPInvalidDnError(group + ' not found')
    +
    +            existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else []
    +        else:
    +            existing_members = members_dn
    +
    +        existing_members = [element.lower() for element in existing_members]
    +        changes = dict()
    +        member_to_remove = [element for element in members_dn if element.lower() in existing_members]
    +        if member_to_remove:
    +            changes['member'] = (MODIFY_DELETE, member_to_remove)
    +        if changes:
    +            result = connection.modify(group, changes)
    +            if not connection.strategy.sync:
    +                _, result = connection.get_response(result)
    +            else:
    +                result = connection.result
    +            if result['description'] != 'success':
    +                error = True
    +                result_error_params = ['result', 'description', 'dn', 'message']
    +                if raise_error:
    +                    raise LDAPOperationsErrorResult([(k, v) for k, v in result.items() if k in result_error_params])
    +                break
    +
    +    return not error
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/unlockAccount.py b/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/unlockAccount.py
    index 393e08c..bc59b58 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/unlockAccount.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/microsoft/unlockAccount.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2015 - 2018 Giovanni Cannata
    +# Copyright 2015 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/novell/addMembersToGroups.py b/server/www/packages/packages-windows/x86/ldap3/extend/novell/addMembersToGroups.py
    index 5583549..d649dc8 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/novell/addMembersToGroups.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/novell/addMembersToGroups.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2016 - 2018 Giovanni Cannata
    +# Copyright 2016 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/novell/checkGroupsMemberships.py b/server/www/packages/packages-windows/x86/ldap3/extend/novell/checkGroupsMemberships.py
    index 1013fde..c51dbf2 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/novell/checkGroupsMemberships.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/novell/checkGroupsMemberships.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2016 - 2018 Giovanni Cannata
    +# Copyright 2016 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/novell/endTransaction.py b/server/www/packages/packages-windows/x86/ldap3/extend/novell/endTransaction.py
    index 0e9a58c..18bc041 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/novell/endTransaction.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/novell/endTransaction.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2016 - 2018 Giovanni Cannata
    +# Copyright 2016 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/novell/getBindDn.py b/server/www/packages/packages-windows/x86/ldap3/extend/novell/getBindDn.py
    index 39fae2b..492bcdd 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/novell/getBindDn.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/novell/getBindDn.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/novell/listReplicas.py b/server/www/packages/packages-windows/x86/ldap3/extend/novell/listReplicas.py
    index fdc6d08..8ccf2ff 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/novell/listReplicas.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/novell/listReplicas.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -45,6 +45,6 @@ class ListReplicas(ExtendedOperation):
     
         def populate_result(self):
             try:
    -            self.result['replicas'] = str(self.decoded_response['replicaList']) if self.decoded_response['replicaList'] else None
    +            self.result['replicas'] = [str(replica) for replica in self.decoded_response] if self.decoded_response else None
             except TypeError:
                 self.result['replicas'] = None
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/novell/nmasGetUniversalPassword.py b/server/www/packages/packages-windows/x86/ldap3/extend/novell/nmasGetUniversalPassword.py
    index b8b045b..291ae92 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/novell/nmasGetUniversalPassword.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/novell/nmasGetUniversalPassword.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/novell/nmasSetUniversalPassword.py b/server/www/packages/packages-windows/x86/ldap3/extend/novell/nmasSetUniversalPassword.py
    index 65ea0d6..dadab59 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/novell/nmasSetUniversalPassword.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/novell/nmasSetUniversalPassword.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/novell/partition_entry_count.py b/server/www/packages/packages-windows/x86/ldap3/extend/novell/partition_entry_count.py
    index 8218aea..3d46c7a 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/novell/partition_entry_count.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/novell/partition_entry_count.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/novell/removeMembersFromGroups.py b/server/www/packages/packages-windows/x86/ldap3/extend/novell/removeMembersFromGroups.py
    index df493ba..c46c275 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/novell/removeMembersFromGroups.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/novell/removeMembersFromGroups.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2016 - 2018 Giovanni Cannata
    +# Copyright 2016 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/novell/replicaInfo.py b/server/www/packages/packages-windows/x86/ldap3/extend/novell/replicaInfo.py
    index 45bd0e9..057f934 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/novell/replicaInfo.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/novell/replicaInfo.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/novell/startTransaction.py b/server/www/packages/packages-windows/x86/ldap3/extend/novell/startTransaction.py
    index 2ed21c2..6179cb0 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/novell/startTransaction.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/novell/startTransaction.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2016 - 2018 Giovanni Cannata
    +# Copyright 2016 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/operation.py b/server/www/packages/packages-windows/x86/ldap3/extend/operation.py
    index 9906885..c1d478c 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/operation.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/operation.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/standard/PagedSearch.py b/server/www/packages/packages-windows/x86/ldap3/extend/standard/PagedSearch.py
    index 1b5df49..f8bc7e6 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/standard/PagedSearch.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/standard/PagedSearch.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -47,7 +47,11 @@ def paged_search_generator(connection,
             search_base = safe_dn(search_base)
     
         responses = []
    -    cookie = True  # performs search at least one time
    +    original_connection = None
    +    original_auto_referrals = connection.auto_referrals
    +    connection.auto_referrals = False  # disable auto referrals because it cannot handle paged searches
    +    cookie = True  # performs search operation at least one time
    +    cachekey = None  # for referrals cache
         while cookie:
             result = connection.search(search_base,
                                        search_filter,
    @@ -69,13 +73,19 @@ def paged_search_generator(connection,
                 response = connection.response
                 result = connection.result
     
    +        if result['referrals'] and original_auto_referrals:  # if rererrals are returned start over the loop with a new connection to the referral
    +            if not original_connection:
    +                original_connection = connection
    +            _, connection, cachekey = connection.strategy.create_referral_connection(result['referrals'])   # change connection to a valid referrals
    +            continue
    +
             responses.extend(response)
             try:
                 cookie = result['controls']['1.2.840.113556.1.4.319']['value']['cookie']
             except KeyError:
                 cookie = None
     
    -        if result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS:
    +        if connection.raise_exceptions and result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS:
                 if log_enabled(PROTOCOL):
                     log(PROTOCOL, 'paged search operation result <%s> for <%s>', result, connection)
                 if result['result'] == RESULT_SIZE_LIMIT_EXCEEDED:
    @@ -86,6 +96,14 @@ def paged_search_generator(connection,
             while responses:
                 yield responses.pop()
     
    +    if original_connection:
    +        connection = original_connection
    +        if connection.use_referral_cache and cachekey:
    +            connection.strategy.referral_cache[cachekey] = connection
    +        else:
    +            connection.unbind()
    +
    +    connection.auto_referrals = original_auto_referrals
         connection.response = None
     
     
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/standard/PersistentSearch.py b/server/www/packages/packages-windows/x86/ldap3/extend/standard/PersistentSearch.py
    index 62286e1..b25ec68 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/standard/PersistentSearch.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/standard/PersistentSearch.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2016 - 2018 Giovanni Cannata
    +# Copyright 2016 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -80,7 +80,8 @@ class PersistentSearch(object):
             else:
                 self.controls = controls
     
    -        self.controls.append(persistent_search_control(events_type, changes_only, notifications))
    +        if events_type and changes_only and notifications:
    +            self.controls.append(persistent_search_control(events_type, changes_only, notifications))
             self.start()
     
         def start(self):
    @@ -101,9 +102,10 @@ class PersistentSearch(object):
                                                          controls=self.controls)
                 self.connection.strategy.persistent_search_message_id = self.message_id
     
    -    def stop(self):
    +    def stop(self, unbind=True):
             self.connection.abandon(self.message_id)
    -        self.connection.unbind()
    +        if unbind:
    +            self.connection.unbind()
             if self.message_id in self.connection.strategy._responses:
                 del self.connection.strategy._responses[self.message_id]
             if hasattr(self.connection.strategy, '_requests') and self.message_id in self.connection.strategy._requests:  # asynchronous strategy has a dict of request that could be returned by get_response()
    @@ -111,11 +113,25 @@ class PersistentSearch(object):
             self.connection.strategy.persistent_search_message_id = None
             self.message_id = None
     
    -    def next(self):
    +    def next(self, block=False, timeout=None):
             if not self.connection.strategy.streaming and not self.connection.strategy.callback:
                 try:
    -                return self.connection.strategy.events.get_nowait()
    +                return self.connection.strategy.events.get(block, timeout)
                 except Empty:
                     return None
     
             raise LDAPExtensionError('Persistent search is not accumulating events in queue')
    +
    +    def funnel(self, block=False, timeout=None):
    +        done = False
    +        while not done:
    +            try:
    +                entry = self.connection.strategy.events.get(block, timeout)
    +            except Empty:
    +                yield None
    +            if entry['type'] == 'searchResEntry':
    +                yield entry
    +            else:
    +                done = True
    +
    +        yield entry
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/standard/modifyPassword.py b/server/www/packages/packages-windows/x86/ldap3/extend/standard/modifyPassword.py
    index 167816e..7837355 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/standard/modifyPassword.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/standard/modifyPassword.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -67,6 +67,6 @@ class ModifyPassword(ExtendedOperation):
                     self.result[self.response_attribute] = True
                 else:  # change was not successful, raises exception if raise_exception = True in connection or returns the operation result, error code is in result['result']
                     self.result[self.response_attribute] = False
    -                if not self.connection.raise_exceptions:
    +                if self.connection.raise_exceptions:
                         from ...core.exceptions import LDAPOperationResult
                         raise LDAPOperationResult(result=self.result['result'], description=self.result['description'], dn=self.result['dn'], message=self.result['message'], response_type=self.result['type'])
    diff --git a/server/www/packages/packages-windows/x86/ldap3/extend/standard/whoAmI.py b/server/www/packages/packages-windows/x86/ldap3/extend/standard/whoAmI.py
    index 0eda5c4..a6c08a8 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/extend/standard/whoAmI.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/extend/standard/whoAmI.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -24,11 +24,9 @@
     # If not, see .
     
     # implements RFC4532
    -from pyasn1.type.univ import NoValue
    -
     from ...extend.operation import ExtendedOperation
     from ...utils.conv import to_unicode
    -from ...protocol.rfc4511 import OctetString
    +
     
     class WhoAmI(ExtendedOperation):
         def config(self):
    diff --git a/server/www/packages/packages-windows/x86/ldap3/operation/abandon.py b/server/www/packages/packages-windows/x86/ldap3/operation/abandon.py
    index ccc3e88..66fcb6c 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/operation/abandon.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/operation/abandon.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2013 - 2018 Giovanni Cannata
    +# Copyright 2013 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/operation/add.py b/server/www/packages/packages-windows/x86/ldap3/operation/add.py
    index a08e463..d0b95b4 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/operation/add.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/operation/add.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2013 - 2018 Giovanni Cannata
    +# Copyright 2013 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/operation/bind.py b/server/www/packages/packages-windows/x86/ldap3/operation/bind.py
    index 0eecc4e..43ad1fb 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/operation/bind.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/operation/bind.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2013 - 2018 Giovanni Cannata
    +# Copyright 2013 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -122,7 +122,7 @@ def bind_response_to_dict(response):
                 'description': ResultCode().getNamedValues().getName(response['resultCode']),
                 'dn': str(response['matchedDN']),
                 'message': str(response['diagnosticMessage']),
    -            'referrals': referrals_to_list(response['referral']),
    +            'referrals': referrals_to_list(response['referral']) if response['referral'] is not None and response['referral'].hasValue() else [],
                 'saslCreds': bytes(response['serverSaslCreds']) if response['serverSaslCreds'] is not None and response['serverSaslCreds'].hasValue() else None}
     
     
    diff --git a/server/www/packages/packages-windows/x86/ldap3/operation/compare.py b/server/www/packages/packages-windows/x86/ldap3/operation/compare.py
    index 5ee03d5..2232f61 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/operation/compare.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/operation/compare.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2013 - 2018 Giovanni Cannata
    +# Copyright 2013 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/operation/delete.py b/server/www/packages/packages-windows/x86/ldap3/operation/delete.py
    index df0aee8..2db40f4 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/operation/delete.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/operation/delete.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2013 - 2018 Giovanni Cannata
    +# Copyright 2013 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/operation/extended.py b/server/www/packages/packages-windows/x86/ldap3/operation/extended.py
    index 3bbdd87..4b1ebc7 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/operation/extended.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/operation/extended.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2013 - 2018 Giovanni Cannata
    +# Copyright 2013 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/operation/modify.py b/server/www/packages/packages-windows/x86/ldap3/operation/modify.py
    index 363e1ef..31867e9 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/operation/modify.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/operation/modify.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2013 - 2018 Giovanni Cannata
    +# Copyright 2013 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/operation/modifyDn.py b/server/www/packages/packages-windows/x86/ldap3/operation/modifyDn.py
    index 174bb36..73c6da3 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/operation/modifyDn.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/operation/modifyDn.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2013 - 2018 Giovanni Cannata
    +# Copyright 2013 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/operation/search.py b/server/www/packages/packages-windows/x86/ldap3/operation/search.py
    index 7cf2fb3..b78d86d 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/operation/search.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/operation/search.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2013 - 2018 Giovanni Cannata
    +# Copyright 2013 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -38,7 +38,7 @@ from ..operation.bind import referrals_to_list
     from ..protocol.convert import ava_to_dict, attributes_to_list, search_refs_to_list, validate_assertion_value, prepare_filter_for_sending, search_refs_to_list_fast
     from ..protocol.formatters.standard import format_attribute_values
     from ..utils.conv import to_unicode, to_raw
    -
    +from pyasn1.error import PyAsn1UnicodeDecodeError
     
     ROOT = 0
     AND = 1
    @@ -379,8 +379,10 @@ def search_operation(search_base,
     
     
     def decode_vals(vals):
    -    return [str(val) for val in vals if val] if vals else None
    -
    +    try:
    +        return [str(val) for val in vals if val] if vals else None
    +    except PyAsn1UnicodeDecodeError:
    +        return decode_raw_vals(vals)
     
     def decode_vals_fast(vals):
         try:
    @@ -393,8 +395,7 @@ def attributes_to_dict(attribute_list):
         conf_case_insensitive_attributes = get_config_parameter('CASE_INSENSITIVE_ATTRIBUTE_NAMES')
         attributes = CaseInsensitiveDict() if conf_case_insensitive_attributes else dict()
         for attribute in attribute_list:
    -        attributes[str(attribute['type'])] = decode_vals(attribute['vals'])
    -
    +            attributes[str(attribute['type'])] = decode_vals(attribute['vals'])
         return attributes
     
     
    @@ -525,10 +526,11 @@ def search_result_entry_response_to_dict(response, schema, custom_formatter, che
         entry = dict()
         # entry['dn'] = str(response['object'])
         if response['object']:
    -        entry['raw_dn'] = to_raw(response['object'])
             if isinstance(response['object'], STRING_TYPES):  # mock strategies return string not a PyAsn1 object
    +            entry['raw_dn'] = to_raw(response['object'])
                 entry['dn'] = to_unicode(response['object'])
             else:
    +            entry['raw_dn'] = str(response['object'])
                 entry['dn'] = to_unicode(bytes(response['object']), from_server=True)
         else:
             entry['raw_dn'] = b''
    @@ -555,6 +557,8 @@ def search_result_done_response_to_dict(response):
                 result['controls'][control[0]] = control[1]
     
         return result
    +
    +
     def search_result_reference_response_to_dict(response):
         return {'uri': search_refs_to_list(response)}
     
    diff --git a/server/www/packages/packages-windows/x86/ldap3/operation/unbind.py b/server/www/packages/packages-windows/x86/ldap3/operation/unbind.py
    index 6f1e713..4d418fb 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/operation/unbind.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/operation/unbind.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2013 - 2018 Giovanni Cannata
    +# Copyright 2013 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/protocol/controls.py b/server/www/packages/packages-windows/x86/ldap3/protocol/controls.py
    index 197777e..658867b 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/protocol/controls.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/protocol/controls.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2015 - 2018 Giovanni Cannata
    +# Copyright 2015 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/protocol/convert.py b/server/www/packages/packages-windows/x86/ldap3/protocol/convert.py
    index 319f36d..af3a6f8 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/protocol/convert.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/protocol/convert.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2013 - 2018 Giovanni Cannata
    +# Copyright 2013 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -37,6 +37,7 @@ def attribute_to_dict(attribute):
         except PyAsn1Error:  # invalid encoding, return bytes value
             return {'type': str(attribute['type']), 'values': [bytes(val) for val in attribute['vals']]}
     
    +
     def attributes_to_dict(attributes):
         attributes_dict = dict()
         for attribute in attributes:
    @@ -46,7 +47,10 @@ def attributes_to_dict(attributes):
     
     
     def referrals_to_list(referrals):
    -    return [str(referral) for referral in referrals if referral] if referrals else None
    +    if isinstance(referrals, list):
    +        return [str(referral) for referral in referrals if referral] if referrals else None
    +    else:
    +        return [str(referral) for referral in referrals if referral] if referrals is not None and referrals.hasValue() else None
     
     
     def search_refs_to_list(search_refs):
    @@ -93,6 +97,7 @@ def ava_to_dict(ava):
             except Exception:
                 return {'attribute': str(ava['attributeDesc']), 'value': bytes(ava['assertionValue'])}
     
    +
     def substring_to_dict(substring):
         return {'initial': substring['initial'] if substring['initial'] else '', 'any': [middle for middle in substring['any']] if substring['any'] else '', 'final': substring['final'] if substring['final'] else ''}
     
    @@ -183,7 +188,7 @@ def prepare_filter_for_sending(raw_string):
         ints = []
         raw_string = to_raw(raw_string)
         while i < len(raw_string):
    -        if (raw_string[i] == 92 or raw_string[i] == '\\') and i < len(raw_string) - 2:  # 92 is backslash
    +        if (raw_string[i] == 92 or raw_string[i] == '\\') and i < len(raw_string) - 2:  # 92 (0x5C) is backslash
                 try:
                     ints.append(int(raw_string[i + 1: i + 3], 16))
                     i += 2
    diff --git a/server/www/packages/packages-windows/x86/ldap3/protocol/formatters/formatters.py b/server/www/packages/packages-windows/x86/ldap3/protocol/formatters/formatters.py
    index 36cd9c8..2638d52 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/protocol/formatters/formatters.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/protocol/formatters/formatters.py
    @@ -1,407 +1,436 @@
    -"""
    -"""
    -
    -# Created on 2014.10.28
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2014 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -
    -import re
    -
    -from binascii import hexlify
    -from uuid import UUID
    -from datetime import datetime, timedelta
    -from ...utils.conv import to_unicode
    -
    -from ...core.timezone import OffsetTzInfo
    -
    -def format_unicode(raw_value):
    -    try:
    -        if str is not bytes:  # Python 3
    -            return str(raw_value, 'utf-8', errors='strict')
    -        else:  # Python 2
    -            return unicode(raw_value, 'utf-8', errors='strict')
    -    except (TypeError, UnicodeDecodeError):
    -        pass
    -
    -    return raw_value
    -
    -
    -def format_integer(raw_value):
    -    try:
    -        return int(raw_value)
    -    except (TypeError, ValueError):  # expected exceptions
    -        pass
    -    except Exception:  # any other exception should be investigated, anyway the formatter return the raw_value
    -        pass
    -
    -    return raw_value
    -
    -
    -def format_binary(raw_value):
    -    try:
    -        return bytes(raw_value)
    -    except TypeError:  # expected exceptions
    -        pass
    -    except Exception:  # any other exception should be investigated, anyway the formatter return the raw_value
    -        pass
    -
    -    return raw_value
    -
    -
    -def format_uuid(raw_value):
    -    try:
    -        return str(UUID(bytes=raw_value))
    -    except (TypeError, ValueError):
    -        return format_unicode(raw_value)
    -    except Exception:  # any other exception should be investigated, anyway the formatter return the raw_value
    -        pass
    -
    -    return raw_value
    -
    -
    -def format_uuid_le(raw_value):
    -    try:
    -        return '{' + str(UUID(bytes_le=raw_value)) + '}'
    -    except (TypeError, ValueError):
    -        return format_unicode(raw_value)
    -    except Exception:  # any other exception should be investigated, anyway the formatter return the raw_value
    -        pass
    -
    -    return raw_value
    -
    -
    -def format_boolean(raw_value):
    -    if raw_value in [b'TRUE', b'true', b'True']:
    -        return True
    -    if raw_value in [b'FALSE', b'false', b'False']:
    -        return False
    -
    -    return raw_value
    -
    -
    -def format_ad_timestamp(raw_value):
    -    """
    -    Active Directory stores date/time values as the number of 100-nanosecond intervals
    -    that have elapsed since the 0 hour on January 1, 1601 till the date/time that is being stored.
    -    The time is always stored in Greenwich Mean Time (GMT) in the Active Directory.
    -    """
    -    if raw_value == b'9223372036854775807':  # max value to be stored in a 64 bit signed int
    -        return datetime.max  # returns datetime.datetime(9999, 12, 31, 23, 59, 59, 999999)
    -    try:
    -        timestamp = int(raw_value)
    -        if timestamp < 0:  # ad timestamp cannot be negative
    -            return raw_value
    -    except Exception:
    -        return raw_value
    -
    -    try:
    -        return datetime.fromtimestamp(timestamp / 10000000.0 - 11644473600, tz=OffsetTzInfo(0, 'UTC'))  # forces true division in python 2
    -    except (OSError, OverflowError, ValueError):  # on Windows backwards timestamps are not allowed
    -        try:
    -            unix_epoch = datetime.fromtimestamp(0, tz=OffsetTzInfo(0, 'UTC'))
    -            diff_seconds = timedelta(seconds=timestamp/10000000.0 - 11644473600)
    -            return unix_epoch + diff_seconds
    -        except Exception:
    -            pass
    -    except Exception:
    -        pass
    -
    -    return raw_value
    -
    -
    -try:  # uses regular expressions and the timezone class (python3.2 and later)
    -    from datetime import timezone
    -    time_format = re.compile(
    -        r'''
    -        ^
    -        (?P[0-9]{4})
    -        (?P0[1-9]|1[0-2])
    -        (?P0[1-9]|[12][0-9]|3[01])
    -        (?P[01][0-9]|2[0-3])
    -        (?:
    -          (?P[0-5][0-9])
    -          (?P[0-5][0-9]|60)?
    -        )?
    -        (?:
    -          [.,]
    -          (?P[0-9]+)
    -        )?  
    -        (?:
    -          Z
    -          |
    -          (?:
    -            (?P[+-])
    -            (?P[01][0-9]|2[0-3])
    -            (?P[0-5][0-9])?
    -          )
    -        )
    -        $
    -        ''',
    -        re.VERBOSE
    -    )
    -
    -    def format_time(raw_value):
    -        try:
    -            match = time_format.fullmatch(to_unicode(raw_value))
    -            if match is None:
    -                return raw_value
    -            matches = match.groupdict()
    -
    -            offset = timedelta(
    -                hours=int(matches['OffHour'] or 0),
    -                minutes=int(matches['OffMinute'] or 0)
    -            )
    -
    -            if matches['Offset'] == '-':
    -                offset *= -1
    -
    -            # Python does not support leap second in datetime (!)
    -            if matches['Second'] == '60':
    -                matches['Second'] = '59'
    -
    -            # According to RFC, fraction may be applied to an Hour/Minute (!)
    -            fraction = float('0.' + (matches['Fraction'] or '0'))
    -
    -            if matches['Minute'] is None:
    -                fraction *= 60
    -                minute = int(fraction)
    -                fraction -= minute
    -            else:
    -                minute = int(matches['Minute'])
    -
    -            if matches['Second'] is None:
    -                fraction *= 60
    -                second = int(fraction)
    -                fraction -= second
    -            else:
    -                second = int(matches['Second'])
    -
    -            microseconds = int(fraction * 1000000)
    -
    -            return datetime(
    -                int(matches['Year']),
    -                int(matches['Month']),
    -                int(matches['Day']),
    -                int(matches['Hour']),
    -                minute,
    -                second,
    -                microseconds,
    -                timezone(offset),
    -            )
    -        except Exception:  # exceptions should be investigated, anyway the formatter return the raw_value
    -            pass
    -        return raw_value
    -
    -except ImportError:
    -    def format_time(raw_value):
    -        """
    -        From RFC4517:
    -        A value of the Generalized Time syntax is a character string
    -        representing a date and time. The LDAP-specific encoding of a value
    -        of this syntax is a restriction of the format defined in [ISO8601],
    -        and is described by the following ABNF:
    -    
    -        GeneralizedTime = century year month day hour
    -                           [ minute [ second / leap-second ] ]
    -                           [ fraction ]
    -                           g-time-zone
    -    
    -        century = 2(%x30-39) ; "00" to "99"
    -        year    = 2(%x30-39) ; "00" to "99"
    -        month   =   ( %x30 %x31-39 ) ; "01" (January) to "09"
    -                / ( %x31 %x30-32 ) ; "10" to "12"
    -        day     =   ( %x30 %x31-39 )    ; "01" to "09"
    -                / ( %x31-32 %x30-39 ) ; "10" to "29"
    -                / ( %x33 %x30-31 )    ; "30" to "31"
    -        hour    = ( %x30-31 %x30-39 ) / ( %x32 %x30-33 ) ; "00" to "23"
    -        minute  = %x30-35 %x30-39                        ; "00" to "59"
    -        second      = ( %x30-35 %x30-39 ) ; "00" to "59"
    -        leap-second = ( %x36 %x30 )       ; "60"
    -        fraction        = ( DOT / COMMA ) 1*(%x30-39)
    -        g-time-zone     = %x5A  ; "Z"
    -                        / g-differential
    -        g-differential  = ( MINUS / PLUS ) hour [ minute ]
    -            MINUS           = %x2D  ; minus sign ("-")
    -        """
    -
    -        if len(raw_value) < 10 or not all((c in b'0123456789+-,.Z' for c in raw_value)) or (b'Z' in raw_value and not raw_value.endswith(b'Z')):  # first ten characters are mandatory and must be numeric or timezone or fraction
    -            return raw_value
    -
    -        # sets position for fixed values
    -        year = int(raw_value[0: 4])
    -        month = int(raw_value[4: 6])
    -        day = int(raw_value[6: 8])
    -        hour = int(raw_value[8: 10])
    -        minute = 0
    -        second = 0
    -        microsecond = 0
    -
    -        remain = raw_value[10:]
    -        if remain and remain.endswith(b'Z'):  # uppercase 'Z'
    -            sep = b'Z'
    -        elif b'+' in remain:  # timezone can be specified with +hh[mm] or -hh[mm]
    -            sep = b'+'
    -        elif b'-' in remain:
    -            sep = b'-'
    -        else:  # timezone not specified
    -            return raw_value
    -
    -        time, _, offset = remain.partition(sep)
    -
    -        if time and (b'.' in time or b',' in time):
    -            # fraction time
    -            if time[0] in b',.':
    -                minute = 6 * int(time[1] if str is bytes else chr(time[1]))  # Python 2 / Python 3
    -            elif time[2] in b',.':
    -                minute = int(raw_value[10: 12])
    -                second = 6 * int(time[3] if str is bytes else chr(time[3]))  # Python 2 / Python 3
    -            elif time[4] in b',.':
    -                minute = int(raw_value[10: 12])
    -                second = int(raw_value[12: 14])
    -                microsecond = 100000 * int(time[5] if str is bytes else chr(time[5]))  # Python 2 / Python 3
    -        elif len(time) == 2:  # mmZ format
    -            minute = int(raw_value[10: 12])
    -        elif len(time) == 0:  # Z format
    -            pass
    -        elif len(time) == 4:  # mmssZ
    -            minute = int(raw_value[10: 12])
    -            second = int(raw_value[12: 14])
    -        else:
    -            return raw_value
    -
    -        if sep == b'Z':  # UTC
    -            timezone = OffsetTzInfo(0, 'UTC')
    -        else:  # build timezone
    -            try:
    -                if len(offset) == 2:
    -                    timezone_hour = int(offset[:2])
    -                    timezone_minute = 0
    -                elif len(offset) == 4:
    -                    timezone_hour = int(offset[:2])
    -                    timezone_minute = int(offset[2:4])
    -                else:  # malformed timezone
    -                    raise ValueError
    -            except ValueError:
    -                return raw_value
    -            if timezone_hour > 23 or timezone_minute > 59:  # invalid timezone
    -                return raw_value
    -
    -            if str is not bytes:  # Python 3
    -                timezone = OffsetTzInfo((timezone_hour * 60 + timezone_minute) * (1 if sep == b'+' else -1), 'UTC' + str(sep + offset, encoding='utf-8'))
    -            else:  # Python 2
    -                timezone = OffsetTzInfo((timezone_hour * 60 + timezone_minute) * (1 if sep == b'+' else -1), unicode('UTC' + sep + offset, encoding='utf-8'))
    -
    -        try:
    -            return datetime(year=year,
    -                            month=month,
    -                            day=day,
    -                            hour=hour,
    -                            minute=minute,
    -                            second=second,
    -                            microsecond=microsecond,
    -                            tzinfo=timezone)
    -        except (TypeError, ValueError):
    -            pass
    -
    -        return raw_value
    -
    -
    -def format_time_with_0_year(raw_value):
    -    try:
    -        if raw_value.startswith(b'0000'):
    -            return raw_value
    -    except Exception:
    -        try:
    -            if raw_value.startswith('0000'):
    -                return raw_value
    -        except Exception:
    -            pass
    -
    -    return format_time(raw_value)
    -
    -
    -def format_sid(raw_value):
    -    """
    -    SID= "S-1-" IdentifierAuthority 1*SubAuthority
    -           IdentifierAuthority= IdentifierAuthorityDec / IdentifierAuthorityHex
    -              ; If the identifier authority is < 2^32, the
    -              ; identifier authority is represented as a decimal
    -              ; number
    -              ; If the identifier authority is >= 2^32,
    -              ; the identifier authority is represented in
    -              ; hexadecimal
    -            IdentifierAuthorityDec =  1*10DIGIT
    -              ; IdentifierAuthorityDec, top level authority of a
    -              ; security identifier is represented as a decimal number
    -            IdentifierAuthorityHex = "0x" 12HEXDIG
    -              ; IdentifierAuthorityHex, the top-level authority of a
    -              ; security identifier is represented as a hexadecimal number
    -            SubAuthority= "-" 1*10DIGIT
    -              ; Sub-Authority is always represented as a decimal number
    -              ; No leading "0" characters are allowed when IdentifierAuthority
    -              ; or SubAuthority is represented as a decimal number
    -              ; All hexadecimal digits must be output in string format,
    -              ; pre-pended by "0x"
    -
    -    Revision (1 byte): An 8-bit unsigned integer that specifies the revision level of the SID. This value MUST be set to 0x01.
    -    SubAuthorityCount (1 byte): An 8-bit unsigned integer that specifies the number of elements in the SubAuthority array. The maximum number of elements allowed is 15.
    -    IdentifierAuthority (6 bytes): A SID_IDENTIFIER_AUTHORITY structure that indicates the authority under which the SID was created. It describes the entity that created the SID. The Identifier Authority value {0,0,0,0,0,5} denotes SIDs created by the NT SID authority.
    -    SubAuthority (variable): A variable length array of unsigned 32-bit integers that uniquely identifies a principal relative to the IdentifierAuthority. Its length is determined by SubAuthorityCount.
    -    """
    -    try:
    -        if raw_value.startswith(b'S-1-'):
    -            return raw_value
    -    except Exception:
    -        try:
    -            if raw_value.startswith('S-1-'):
    -                return raw_value
    -        except Exception:
    -            pass
    -    try:
    -        if str is not bytes:  # Python 3
    -            revision = int(raw_value[0])
    -            sub_authority_count = int(raw_value[1])
    -            identifier_authority = int.from_bytes(raw_value[2:8], byteorder='big')
    -            if identifier_authority >= 4294967296:  # 2 ^ 32
    -                identifier_authority = hex(identifier_authority)
    -
    -            sub_authority = ''
    -            i = 0
    -            while i < sub_authority_count:
    -                sub_authority += '-' + str(int.from_bytes(raw_value[8 + (i * 4): 12 + (i * 4)], byteorder='little'))  # little endian
    -                i += 1
    -        else:  # Python 2
    -            revision = int(ord(raw_value[0]))
    -            sub_authority_count = int(ord(raw_value[1]))
    -            identifier_authority = int(hexlify(raw_value[2:8]), 16)
    -            if identifier_authority >= 4294967296:  # 2 ^ 32
    -                identifier_authority = hex(identifier_authority)
    -
    -            sub_authority = ''
    -            i = 0
    -            while i < sub_authority_count:
    -                sub_authority += '-' + str(int(hexlify(raw_value[11 + (i * 4): 7 + (i * 4): -1]), 16))  # little endian
    -                i += 1
    -        return 'S-' + str(revision) + '-' + str(identifier_authority) + sub_authority
    -    except Exception:  # any exception should be investigated, anyway the formatter return the raw_value
    -        pass
    -
    -    return raw_value
    +"""
    +"""
    +
    +# Created on 2014.10.28
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2014 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +import re
    +
    +from binascii import hexlify
    +from uuid import UUID
    +from datetime import datetime, timedelta
    +from ...utils.conv import to_unicode
    +
    +from ...core.timezone import OffsetTzInfo
    +
    +
    +def format_unicode(raw_value):
    +    try:
    +        if str is not bytes:  # Python 3
    +            return str(raw_value, 'utf-8', errors='strict')
    +        else:  # Python 2
    +            return unicode(raw_value, 'utf-8', errors='strict')
    +    except (TypeError, UnicodeDecodeError):
    +        pass
    +
    +    return raw_value
    +
    +
    +def format_integer(raw_value):
    +    try:
    +        return int(raw_value)
    +    except (TypeError, ValueError):  # expected exceptions
    +        pass
    +    except Exception:  # any other exception should be investigated, anyway the formatter return the raw_value
    +        pass
    +
    +    return raw_value
    +
    +
    +def format_binary(raw_value):
    +    try:
    +        return bytes(raw_value)
    +    except TypeError:  # expected exceptions
    +        pass
    +    except Exception:  # any other exception should be investigated, anyway the formatter return the raw_value
    +        pass
    +
    +    return raw_value
    +
    +
    +def format_uuid(raw_value):
    +    try:
    +        return str(UUID(bytes=raw_value))
    +    except (TypeError, ValueError):
    +        return format_unicode(raw_value)
    +    except Exception:  # any other exception should be investigated, anyway the formatter return the raw_value
    +        pass
    +
    +    return raw_value
    +
    +
    +def format_uuid_le(raw_value):
    +    try:
    +        return '{' + str(UUID(bytes_le=raw_value)) + '}'
    +    except (TypeError, ValueError):
    +        return format_unicode(raw_value)
    +    except Exception:  # any other exception should be investigated, anyway the formatter return the raw_value
    +        pass
    +
    +    return raw_value
    +
    +
    +def format_boolean(raw_value):
    +    if raw_value in [b'TRUE', b'true', b'True']:
    +        return True
    +    if raw_value in [b'FALSE', b'false', b'False']:
    +        return False
    +
    +    return raw_value
    +
    +
    +def format_ad_timestamp(raw_value):
    +    """
    +    Active Directory stores date/time values as the number of 100-nanosecond intervals
    +    that have elapsed since the 0 hour on January 1, 1601 till the date/time that is being stored.
    +    The time is always stored in Greenwich Mean Time (GMT) in the Active Directory.
    +    """
    +    utc_timezone = OffsetTzInfo(0, 'UTC')
    +    if raw_value == b'9223372036854775807':  # max value to be stored in a 64 bit signed int
    +        return datetime.max.replace(tzinfo=utc_timezone)  # returns datetime.datetime(9999, 12, 31, 23, 59, 59, 999999, tzinfo=OffsetTzInfo(offset=0, name='UTC'))
    +    try:
    +        timestamp = int(raw_value)
    +        if timestamp < 0:  # ad timestamp cannot be negative
    +            timestamp = timestamp * -1
    +    except Exception:
    +        return raw_value
    +
    +    try:
    +        return datetime.fromtimestamp(timestamp / 10000000.0 - 11644473600,
    +                                      tz=utc_timezone)  # forces true division in python 2
    +    except (OSError, OverflowError, ValueError):  # on Windows backwards timestamps are not allowed
    +        try:
    +            unix_epoch = datetime.fromtimestamp(0, tz=utc_timezone)
    +            diff_seconds = timedelta(seconds=timestamp / 10000000.0 - 11644473600)
    +            return unix_epoch + diff_seconds
    +        except Exception:
    +            pass
    +    except Exception:
    +        pass
    +
    +    return raw_value
    +
    +
    +try:  # uses regular expressions and the timezone class (python3.2 and later)
    +    from datetime import timezone
    +
    +    time_format = re.compile(
    +        r'''
    +        ^
    +        (?P[0-9]{4})
    +        (?P0[1-9]|1[0-2])
    +        (?P0[1-9]|[12][0-9]|3[01])
    +        (?P[01][0-9]|2[0-3])
    +        (?:
    +          (?P[0-5][0-9])
    +          (?P[0-5][0-9]|60)?
    +        )?
    +        (?:
    +          [.,]
    +          (?P[0-9]+)
    +        )?  
    +        (?:
    +          Z
    +          |
    +          (?:
    +            (?P[+-])
    +            (?P[01][0-9]|2[0-3])
    +            (?P[0-5][0-9])?
    +          )
    +        )
    +        $
    +        ''',
    +        re.VERBOSE
    +    )
    +
    +
    +    def format_time(raw_value):
    +        try:
    +            match = time_format.fullmatch(to_unicode(raw_value))
    +            if match is None:
    +                return raw_value
    +            matches = match.groupdict()
    +
    +            offset = timedelta(
    +                hours=int(matches['OffHour'] or 0),
    +                minutes=int(matches['OffMinute'] or 0)
    +            )
    +
    +            if matches['Offset'] == '-':
    +                offset *= -1
    +
    +            # Python does not support leap second in datetime (!)
    +            if matches['Second'] == '60':
    +                matches['Second'] = '59'
    +
    +            # According to RFC, fraction may be applied to an Hour/Minute (!)
    +            fraction = float('0.' + (matches['Fraction'] or '0'))
    +
    +            if matches['Minute'] is None:
    +                fraction *= 60
    +                minute = int(fraction)
    +                fraction -= minute
    +            else:
    +                minute = int(matches['Minute'])
    +
    +            if matches['Second'] is None:
    +                fraction *= 60
    +                second = int(fraction)
    +                fraction -= second
    +            else:
    +                second = int(matches['Second'])
    +
    +            microseconds = int(fraction * 1000000)
    +
    +            return datetime(
    +                int(matches['Year']),
    +                int(matches['Month']),
    +                int(matches['Day']),
    +                int(matches['Hour']),
    +                minute,
    +                second,
    +                microseconds,
    +                timezone(offset),
    +            )
    +        except Exception:  # exceptions should be investigated, anyway the formatter return the raw_value
    +            pass
    +        return raw_value
    +
    +except ImportError:
    +    def format_time(raw_value):
    +        """
    +        From RFC4517:
    +        A value of the Generalized Time syntax is a character string
    +        representing a date and time. The LDAP-specific encoding of a value
    +        of this syntax is a restriction of the format defined in [ISO8601],
    +        and is described by the following ABNF:
    +
    +        GeneralizedTime = century year month day hour
    +                           [ minute [ second / leap-second ] ]
    +                           [ fraction ]
    +                           g-time-zone
    +
    +        century = 2(%x30-39) ; "00" to "99"
    +        year    = 2(%x30-39) ; "00" to "99"
    +        month   =   ( %x30 %x31-39 ) ; "01" (January) to "09"
    +                / ( %x31 %x30-32 ) ; "10" to "12"
    +        day     =   ( %x30 %x31-39 )    ; "01" to "09"
    +                / ( %x31-32 %x30-39 ) ; "10" to "29"
    +                / ( %x33 %x30-31 )    ; "30" to "31"
    +        hour    = ( %x30-31 %x30-39 ) / ( %x32 %x30-33 ) ; "00" to "23"
    +        minute  = %x30-35 %x30-39                        ; "00" to "59"
    +        second      = ( %x30-35 %x30-39 ) ; "00" to "59"
    +        leap-second = ( %x36 %x30 )       ; "60"
    +        fraction        = ( DOT / COMMA ) 1*(%x30-39)
    +        g-time-zone     = %x5A  ; "Z"
    +                        / g-differential
    +        g-differential  = ( MINUS / PLUS ) hour [ minute ]
    +            MINUS           = %x2D  ; minus sign ("-")
    +        """
    +
    +        if len(raw_value) < 10 or not all((c in b'0123456789+-,.Z' for c in raw_value)) or (
    +                b'Z' in raw_value and not raw_value.endswith(
    +                b'Z')):  # first ten characters are mandatory and must be numeric or timezone or fraction
    +            return raw_value
    +
    +        # sets position for fixed values
    +        year = int(raw_value[0: 4])
    +        month = int(raw_value[4: 6])
    +        day = int(raw_value[6: 8])
    +        hour = int(raw_value[8: 10])
    +        minute = 0
    +        second = 0
    +        microsecond = 0
    +
    +        remain = raw_value[10:]
    +        if remain and remain.endswith(b'Z'):  # uppercase 'Z'
    +            sep = b'Z'
    +        elif b'+' in remain:  # timezone can be specified with +hh[mm] or -hh[mm]
    +            sep = b'+'
    +        elif b'-' in remain:
    +            sep = b'-'
    +        else:  # timezone not specified
    +            return raw_value
    +
    +        time, _, offset = remain.partition(sep)
    +
    +        if time and (b'.' in time or b',' in time):
    +            # fraction time
    +            if time[0] in b',.':
    +                minute = 6 * int(time[1] if str is bytes else chr(time[1]))  # Python 2 / Python 3
    +            elif time[2] in b',.':
    +                minute = int(raw_value[10: 12])
    +                second = 6 * int(time[3] if str is bytes else chr(time[3]))  # Python 2 / Python 3
    +            elif time[4] in b',.':
    +                minute = int(raw_value[10: 12])
    +                second = int(raw_value[12: 14])
    +                microsecond = 100000 * int(time[5] if str is bytes else chr(time[5]))  # Python 2 / Python 3
    +        elif len(time) == 2:  # mmZ format
    +            minute = int(raw_value[10: 12])
    +        elif len(time) == 0:  # Z format
    +            pass
    +        elif len(time) == 4:  # mmssZ
    +            minute = int(raw_value[10: 12])
    +            second = int(raw_value[12: 14])
    +        else:
    +            return raw_value
    +
    +        if sep == b'Z':  # UTC
    +            timezone = OffsetTzInfo(0, 'UTC')
    +        else:  # build timezone
    +            try:
    +                if len(offset) == 2:
    +                    timezone_hour = int(offset[:2])
    +                    timezone_minute = 0
    +                elif len(offset) == 4:
    +                    timezone_hour = int(offset[:2])
    +                    timezone_minute = int(offset[2:4])
    +                else:  # malformed timezone
    +                    raise ValueError
    +            except ValueError:
    +                return raw_value
    +            if timezone_hour > 23 or timezone_minute > 59:  # invalid timezone
    +                return raw_value
    +
    +            if str is not bytes:  # Python 3
    +                timezone = OffsetTzInfo((timezone_hour * 60 + timezone_minute) * (1 if sep == b'+' else -1),
    +                                        'UTC' + str(sep + offset, encoding='utf-8'))
    +            else:  # Python 2
    +                timezone = OffsetTzInfo((timezone_hour * 60 + timezone_minute) * (1 if sep == b'+' else -1),
    +                                        unicode('UTC' + sep + offset, encoding='utf-8'))
    +
    +        try:
    +            return datetime(year=year,
    +                            month=month,
    +                            day=day,
    +                            hour=hour,
    +                            minute=minute,
    +                            second=second,
    +                            microsecond=microsecond,
    +                            tzinfo=timezone)
    +        except (TypeError, ValueError):
    +            pass
    +
    +        return raw_value
    +
    +
    +def format_ad_timedelta(raw_value):
    +    """
    +    Convert a negative filetime value to a timedelta.
    +    """
    +    # Active Directory stores attributes like "minPwdAge" as a negative
    +    # "filetime" timestamp, which is the number of 100-nanosecond intervals that
    +    # have elapsed since the 0 hour on January 1, 1601.
    +    #
    +    # Handle the minimum value that can be stored in a 64 bit signed integer.
    +    # See https://docs.microsoft.com/en-us/dotnet/api/system.int64.minvalue
    +    # In attributes like "maxPwdAge", this signifies never.
    +    if raw_value == b'-9223372036854775808':
    +        return timedelta.max
    +    # We can reuse format_ad_timestamp to get a datetime object from the
    +    # timestamp. Afterwards, we can subtract a datetime representing 0 hour on
    +    # January 1, 1601 from the returned datetime to get the timedelta.
    +    return format_ad_timestamp(raw_value) - format_ad_timestamp(0)
    +
    +
    +def format_time_with_0_year(raw_value):
    +    try:
    +        if raw_value.startswith(b'0000'):
    +            return raw_value
    +    except Exception:
    +        try:
    +            if raw_value.startswith('0000'):
    +                return raw_value
    +        except Exception:
    +            pass
    +
    +    return format_time(raw_value)
    +
    +
    +def format_sid(raw_value):
    +    """
    +    SID= "S-1-" IdentifierAuthority 1*SubAuthority
    +           IdentifierAuthority= IdentifierAuthorityDec / IdentifierAuthorityHex
    +              ; If the identifier authority is < 2^32, the
    +              ; identifier authority is represented as a decimal
    +              ; number
    +              ; If the identifier authority is >= 2^32,
    +              ; the identifier authority is represented in
    +              ; hexadecimal
    +            IdentifierAuthorityDec =  1*10DIGIT
    +              ; IdentifierAuthorityDec, top level authority of a
    +              ; security identifier is represented as a decimal number
    +            IdentifierAuthorityHex = "0x" 12HEXDIG
    +              ; IdentifierAuthorityHex, the top-level authority of a
    +              ; security identifier is represented as a hexadecimal number
    +            SubAuthority= "-" 1*10DIGIT
    +              ; Sub-Authority is always represented as a decimal number
    +              ; No leading "0" characters are allowed when IdentifierAuthority
    +              ; or SubAuthority is represented as a decimal number
    +              ; All hexadecimal digits must be output in string format,
    +              ; pre-pended by "0x"
    +
    +    Revision (1 byte): An 8-bit unsigned integer that specifies the revision level of the SID. This value MUST be set to 0x01.
    +    SubAuthorityCount (1 byte): An 8-bit unsigned integer that specifies the number of elements in the SubAuthority array. The maximum number of elements allowed is 15.
    +    IdentifierAuthority (6 bytes): A SID_IDENTIFIER_AUTHORITY structure that indicates the authority under which the SID was created. It describes the entity that created the SID. The Identifier Authority value {0,0,0,0,0,5} denotes SIDs created by the NT SID authority.
    +    SubAuthority (variable): A variable length array of unsigned 32-bit integers that uniquely identifies a principal relative to the IdentifierAuthority. Its length is determined by SubAuthorityCount.
    +    """
    +    try:
    +        if raw_value.startswith(b'S-1-'):
    +            return raw_value
    +    except Exception:
    +        try:
    +            if raw_value.startswith('S-1-'):
    +                return raw_value
    +        except Exception:
    +            pass
    +    try:
    +        if str is not bytes:  # Python 3
    +            revision = int(raw_value[0])
    +            sub_authority_count = int(raw_value[1])
    +            identifier_authority = int.from_bytes(raw_value[2:8], byteorder='big')
    +            if identifier_authority >= 4294967296:  # 2 ^ 32
    +                identifier_authority = hex(identifier_authority)
    +
    +            sub_authority = ''
    +            i = 0
    +            while i < sub_authority_count:
    +                sub_authority += '-' + str(
    +                    int.from_bytes(raw_value[8 + (i * 4): 12 + (i * 4)], byteorder='little'))  # little endian
    +                i += 1
    +        else:  # Python 2
    +            revision = int(ord(raw_value[0]))
    +            sub_authority_count = int(ord(raw_value[1]))
    +            identifier_authority = int(hexlify(raw_value[2:8]), 16)
    +            if identifier_authority >= 4294967296:  # 2 ^ 32
    +                identifier_authority = hex(identifier_authority)
    +
    +            sub_authority = ''
    +            i = 0
    +            while i < sub_authority_count:
    +                sub_authority += '-' + str(int(hexlify(raw_value[11 + (i * 4): 7 + (i * 4): -1]), 16))  # little endian
    +                i += 1
    +        return 'S-' + str(revision) + '-' + str(identifier_authority) + sub_authority
    +    except Exception:  # any exception should be investigated, anyway the formatter return the raw_value
    +        pass
    +
    +    return raw_value
    diff --git a/server/www/packages/packages-windows/x86/ldap3/protocol/formatters/standard.py b/server/www/packages/packages-windows/x86/ldap3/protocol/formatters/standard.py
    index 77f7b2e..42f6c26 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/protocol/formatters/standard.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/protocol/formatters/standard.py
    @@ -1,232 +1,238 @@
    -"""
    -"""
    -
    -# Created on 2014.10.28
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2014 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -
    -from ... import SEQUENCE_TYPES
    -from .formatters import format_ad_timestamp, format_binary, format_boolean,\
    -    format_integer, format_sid, format_time, format_unicode, format_uuid, format_uuid_le, format_time_with_0_year
    -from .validators import validate_integer, validate_time, always_valid,\
    -    validate_generic_single_value, validate_boolean, validate_ad_timestamp, validate_sid,\
    -    validate_uuid_le, validate_uuid, validate_zero_and_minus_one_and_positive_int, validate_guid, validate_time_with_0_year
    -
    -# for each syntax can be specified a format function and a input validation function
    -
    -standard_formatter = {
    -    '1.2.840.113556.1.4.903': (format_binary, None),  # Object (DN-binary) - Microsoft
    -    '1.2.840.113556.1.4.904': (format_unicode, None),  # Object (DN-string) - Microsoft
    -    '1.2.840.113556.1.4.905': (format_unicode, None),  # String (Teletex) - Microsoft
    -    '1.2.840.113556.1.4.906': (format_integer, validate_integer),  # Large integer - Microsoft
    -    '1.2.840.113556.1.4.907': (format_binary, None),  # String (NT-sec-desc) - Microsoft
    -    '1.2.840.113556.1.4.1221': (format_binary, None),  # Object (OR-name) - Microsoft
    -    '1.2.840.113556.1.4.1362': (format_unicode, None),  # String (Case) - Microsoft
    -    '1.3.6.1.4.1.1466.115.121.1.1': (format_binary, None),  # ACI item [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.2': (format_binary, None),  # Access point [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.3': (format_unicode, None),  # Attribute type description
    -    '1.3.6.1.4.1.1466.115.121.1.4': (format_binary, None),  # Audio [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.5': (format_binary, None),  # Binary [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.6': (format_unicode, None),  # Bit String
    -    '1.3.6.1.4.1.1466.115.121.1.7': (format_boolean, validate_boolean),  # Boolean
    -    '1.3.6.1.4.1.1466.115.121.1.8': (format_binary, None),  # Certificate [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.9': (format_binary, None),  # Certificate List [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.10': (format_binary, None),  # Certificate Pair [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.11': (format_unicode, None),  # Country String
    -    '1.3.6.1.4.1.1466.115.121.1.12': (format_unicode, None),  # Distinguished name (DN)
    -    '1.3.6.1.4.1.1466.115.121.1.13': (format_binary, None),  # Data Quality Syntax [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.14': (format_unicode, None),  # Delivery method
    -    '1.3.6.1.4.1.1466.115.121.1.15': (format_unicode, None),  # Directory string
    -    '1.3.6.1.4.1.1466.115.121.1.16': (format_unicode, None),  # DIT Content Rule Description
    -    '1.3.6.1.4.1.1466.115.121.1.17': (format_unicode, None),  # DIT Structure Rule Description
    -    '1.3.6.1.4.1.1466.115.121.1.18': (format_binary, None),  # DL Submit Permission [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.19': (format_binary, None),  # DSA Quality Syntax [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.20': (format_binary, None),  # DSE Type [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.21': (format_binary, None),  # Enhanced Guide
    -    '1.3.6.1.4.1.1466.115.121.1.22': (format_unicode, None),  # Facsimile Telephone Number
    -    '1.3.6.1.4.1.1466.115.121.1.23': (format_binary, None),  # Fax
    -    '1.3.6.1.4.1.1466.115.121.1.24': (format_time, validate_time),  # Generalized time
    -    '1.3.6.1.4.1.1466.115.121.1.25': (format_binary, None),  # Guide [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.26': (format_unicode, None),  # IA5 string
    -    '1.3.6.1.4.1.1466.115.121.1.27': (format_integer, validate_integer),  # Integer
    -    '1.3.6.1.4.1.1466.115.121.1.28': (format_binary, None),  # JPEG
    -    '1.3.6.1.4.1.1466.115.121.1.29': (format_binary, None),  # Master and Shadow Access Points [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.30': (format_unicode, None),  # Matching rule description
    -    '1.3.6.1.4.1.1466.115.121.1.31': (format_unicode, None),  # Matching rule use description
    -    '1.3.6.1.4.1.1466.115.121.1.32': (format_unicode, None),  # Mail Preference [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.33': (format_unicode, None),  # MHS OR Address [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.34': (format_unicode, None),  # Name and optional UID
    -    '1.3.6.1.4.1.1466.115.121.1.35': (format_unicode, None),  # Name form description
    -    '1.3.6.1.4.1.1466.115.121.1.36': (format_unicode, None),  # Numeric string
    -    '1.3.6.1.4.1.1466.115.121.1.37': (format_unicode, None),  # Object class description
    -    '1.3.6.1.4.1.1466.115.121.1.38': (format_unicode, None),  # OID
    -    '1.3.6.1.4.1.1466.115.121.1.39': (format_unicode, None),  # Other mailbox
    -    '1.3.6.1.4.1.1466.115.121.1.40': (format_binary, None),  # Octet string
    -    '1.3.6.1.4.1.1466.115.121.1.41': (format_unicode, None),  # Postal address
    -    '1.3.6.1.4.1.1466.115.121.1.42': (format_binary, None),  # Protocol Information [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.43': (format_binary, None),  # Presentation Address [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.44': (format_unicode, None),  # Printable string
    -    '1.3.6.1.4.1.1466.115.121.1.45': (format_binary, None),  # Subtree specification [OBSOLETE
    -    '1.3.6.1.4.1.1466.115.121.1.46': (format_binary, None),  # Supplier Information [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.47': (format_binary, None),  # Supplier Or Consumer [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.48': (format_binary, None),  # Supplier And Consumer [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.49': (format_binary, None),  # Supported Algorithm [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.50': (format_unicode, None),  # Telephone number
    -    '1.3.6.1.4.1.1466.115.121.1.51': (format_unicode, None),  # Teletex terminal identifier
    -    '1.3.6.1.4.1.1466.115.121.1.52': (format_unicode, None),  # Teletex number
    -    '1.3.6.1.4.1.1466.115.121.1.53': (format_time, validate_time),  # Utc time  (deprecated)
    -    '1.3.6.1.4.1.1466.115.121.1.54': (format_unicode, None),  # LDAP syntax description
    -    '1.3.6.1.4.1.1466.115.121.1.55': (format_binary, None),  # Modify rights [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.56': (format_binary, None),  # LDAP Schema Definition [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.57': (format_unicode, None),  # LDAP Schema Description [OBSOLETE]
    -    '1.3.6.1.4.1.1466.115.121.1.58': (format_unicode, None),  # Substring assertion
    -    '1.3.6.1.1.16.1': (format_uuid, validate_uuid),  # UUID
    -    '1.3.6.1.1.16.4': (format_uuid, validate_uuid),  # entryUUID (RFC 4530)
    -    '2.16.840.1.113719.1.1.4.1.501': (format_uuid, validate_guid),  # GUID (Novell)
    -    '2.16.840.1.113719.1.1.5.1.0': (format_binary, None),  # Unknown (Novell)
    -    '2.16.840.1.113719.1.1.5.1.6': (format_unicode, None),  # Case Ignore List (Novell)
    -    '2.16.840.1.113719.1.1.5.1.12': (format_binary, None),  # Tagged Data (Novell)
    -    '2.16.840.1.113719.1.1.5.1.13': (format_binary, None),  # Octet List (Novell)
    -    '2.16.840.1.113719.1.1.5.1.14': (format_unicode, None),  # Tagged String (Novell)
    -    '2.16.840.1.113719.1.1.5.1.15': (format_unicode, None),  # Tagged Name And String (Novell)
    -    '2.16.840.1.113719.1.1.5.1.16': (format_binary, None),  # NDS Replica Pointer (Novell)
    -    '2.16.840.1.113719.1.1.5.1.17': (format_unicode, None),  # NDS ACL (Novell)
    -    '2.16.840.1.113719.1.1.5.1.19': (format_time, validate_time),  # NDS Timestamp (Novell)
    -    '2.16.840.1.113719.1.1.5.1.22': (format_integer, validate_integer),  # Counter (Novell)
    -    '2.16.840.1.113719.1.1.5.1.23': (format_unicode, None),  # Tagged Name (Novell)
    -    '2.16.840.1.113719.1.1.5.1.25': (format_unicode, None),  # Typed Name (Novell)
    -    'supportedldapversion': (format_integer, None),  # supportedLdapVersion (Microsoft)
    -    'octetstring': (format_binary, validate_uuid_le),  # octect string (Microsoft)
    -    '1.2.840.113556.1.4.2': (format_uuid_le, validate_uuid_le),  # object guid (Microsoft)
    -    '1.2.840.113556.1.4.13': (format_ad_timestamp, validate_ad_timestamp),  # builtinCreationTime (Microsoft)
    -    '1.2.840.113556.1.4.26': (format_ad_timestamp, validate_ad_timestamp),  # creationTime (Microsoft)
    -    '1.2.840.113556.1.4.49': (format_ad_timestamp, validate_ad_timestamp),  # badPasswordTime (Microsoft)
    -    '1.2.840.113556.1.4.51': (format_ad_timestamp, validate_ad_timestamp),  # lastLogoff (Microsoft)
    -    '1.2.840.113556.1.4.52': (format_ad_timestamp, validate_ad_timestamp),  # lastLogon (Microsoft)
    -    '1.2.840.113556.1.4.96': (format_ad_timestamp, validate_zero_and_minus_one_and_positive_int),  # pwdLastSet (Microsoft, can be set to -1 only)
    -    '1.2.840.113556.1.4.146': (format_sid, validate_sid),  # objectSid (Microsoft)
    -    '1.2.840.113556.1.4.159': (format_ad_timestamp, validate_ad_timestamp),  # accountExpires (Microsoft)
    -    '1.2.840.113556.1.4.662': (format_ad_timestamp, validate_ad_timestamp),  # lockoutTime (Microsoft)
    -    '1.2.840.113556.1.4.1696': (format_ad_timestamp, validate_ad_timestamp),  # lastLogonTimestamp (Microsoft)
    -    '1.3.6.1.4.1.42.2.27.8.1.17': (format_time_with_0_year, validate_time_with_0_year)  # pwdAccountLockedTime (Novell)
    -}
    -
    -
    -def find_attribute_helpers(attr_type, name, custom_formatter):
    -    """
    -    Tries to format following the OIDs info and format_helper specification.
    -    Search for attribute oid, then attribute name (can be multiple), then attribute syntax
    -    Precedence is:
    -    1. attribute name
    -    2. attribute oid(from schema)
    -    3. attribute names (from oid_info)
    -    4. attribute syntax (from schema)
    -    Custom formatters can be defined in Server object and have precedence over the standard_formatters
    -    If no formatter is found the raw_value is returned as bytes.
    -    Attributes defined as SINGLE_VALUE in schema are returned as a single object, otherwise are returned as a list of object
    -    Formatter functions can return any kind of object
    -    return a tuple (formatter, validator)
    -    """
    -    formatter = None
    -    if custom_formatter and isinstance(custom_formatter, dict):  # if custom formatters are defined they have precedence over the standard formatters
    -        if name in custom_formatter:  # search for attribute name, as returned by the search operation
    -            formatter = custom_formatter[name]
    -
    -        if not formatter and attr_type and attr_type.oid in custom_formatter:  # search for attribute oid as returned by schema
    -            formatter = custom_formatter[attr_type.oid]
    -        if not formatter and attr_type and attr_type.oid_info:
    -            if isinstance(attr_type.oid_info[2], SEQUENCE_TYPES):  # search for multiple names defined in oid_info
    -                for attr_name in attr_type.oid_info[2]:
    -                    if attr_name in custom_formatter:
    -                        formatter = custom_formatter[attr_name]
    -                        break
    -            elif attr_type.oid_info[2] in custom_formatter:  # search for name defined in oid_info
    -                formatter = custom_formatter[attr_type.oid_info[2]]
    -
    -        if not formatter and attr_type and attr_type.syntax in custom_formatter:  # search for syntax defined in schema
    -            formatter = custom_formatter[attr_type.syntax]
    -
    -    if not formatter and name in standard_formatter:  # search for attribute name, as returned by the search operation
    -        formatter = standard_formatter[name]
    -
    -    if not formatter and attr_type and attr_type.oid in standard_formatter:  # search for attribute oid as returned by schema
    -        formatter = standard_formatter[attr_type.oid]
    -
    -    if not formatter and attr_type and attr_type.oid_info:
    -        if isinstance(attr_type.oid_info[2], SEQUENCE_TYPES):  # search for multiple names defined in oid_info
    -            for attr_name in attr_type.oid_info[2]:
    -                if attr_name in standard_formatter:
    -                    formatter = standard_formatter[attr_name]
    -                    break
    -        elif attr_type.oid_info[2] in standard_formatter:  # search for name defined in oid_info
    -            formatter = standard_formatter[attr_type.oid_info[2]]
    -    if not formatter and attr_type and attr_type.syntax in standard_formatter:  # search for syntax defined in schema
    -        formatter = standard_formatter[attr_type.syntax]
    -
    -    if formatter is None:
    -        return None, None
    -
    -    return formatter
    -
    -
    -def format_attribute_values(schema, name, values, custom_formatter):
    -    if not values:  # RFCs states that attributes must always have values, but a flaky server returns empty values too
    -        return []
    -
    -    if not isinstance(values, SEQUENCE_TYPES):
    -        values = [values]
    -
    -    if schema and schema.attribute_types and name in schema.attribute_types:
    -        attr_type = schema.attribute_types[name]
    -    else:
    -        attr_type = None
    -
    -    attribute_helpers = find_attribute_helpers(attr_type, name, custom_formatter)
    -    if not isinstance(attribute_helpers, tuple):  # custom formatter
    -        formatter = attribute_helpers
    -    else:
    -        formatter = format_unicode if not attribute_helpers[0] else attribute_helpers[0]
    -
    -    formatted_values = [formatter(raw_value) for raw_value in values]  # executes formatter
    -    if formatted_values:
    -        return formatted_values[0] if (attr_type and attr_type.single_value) else formatted_values
    -    else:  # RFCs states that attributes must always have values, but AD return empty values in DirSync
    -        return []
    -
    -
    -def find_attribute_validator(schema, name, custom_validator):
    -    if schema and schema.attribute_types and name in schema.attribute_types:
    -        attr_type = schema.attribute_types[name]
    -    else:
    -        attr_type = None
    -
    -    attribute_helpers = find_attribute_helpers(attr_type, name, custom_validator)
    -    if not isinstance(attribute_helpers, tuple):  # custom validator
    -        validator = attribute_helpers
    -    else:
    -        if not attribute_helpers[1]:
    -            if attr_type and attr_type.single_value:
    -                validator = validate_generic_single_value  # validate only single value
    -            else:
    -                validator = always_valid  # unknown syntax, accepts single and multi value
    -        else:
    -            validator = attribute_helpers[1]
    -    return validator
    +"""
    +"""
    +
    +# Created on 2014.10.28
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2014 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +from ... import SEQUENCE_TYPES
    +from .formatters import format_ad_timestamp, format_binary, format_boolean,\
    +    format_integer, format_sid, format_time, format_unicode, format_uuid, format_uuid_le, format_time_with_0_year,\
    +    format_ad_timedelta
    +from .validators import validate_integer, validate_time, always_valid,\
    +    validate_generic_single_value, validate_boolean, validate_ad_timestamp, validate_sid,\
    +    validate_uuid_le, validate_uuid, validate_zero_and_minus_one_and_positive_int, validate_guid, validate_time_with_0_year,\
    +    validate_ad_timedelta
    +
    +# for each syntax can be specified a format function and a input validation function
    +
    +standard_formatter = {
    +    '1.2.840.113556.1.4.903': (format_binary, None),  # Object (DN-binary) - Microsoft
    +    '1.2.840.113556.1.4.904': (format_unicode, None),  # Object (DN-string) - Microsoft
    +    '1.2.840.113556.1.4.905': (format_unicode, None),  # String (Teletex) - Microsoft
    +    '1.2.840.113556.1.4.906': (format_integer, validate_integer),  # Large integer - Microsoft
    +    '1.2.840.113556.1.4.907': (format_binary, None),  # String (NT-sec-desc) - Microsoft
    +    '1.2.840.113556.1.4.1221': (format_binary, None),  # Object (OR-name) - Microsoft
    +    '1.2.840.113556.1.4.1362': (format_unicode, None),  # String (Case) - Microsoft
    +    '1.3.6.1.4.1.1466.115.121.1.1': (format_binary, None),  # ACI item [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.2': (format_binary, None),  # Access point [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.3': (format_unicode, None),  # Attribute type description
    +    '1.3.6.1.4.1.1466.115.121.1.4': (format_binary, None),  # Audio [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.5': (format_binary, None),  # Binary [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.6': (format_unicode, None),  # Bit String
    +    '1.3.6.1.4.1.1466.115.121.1.7': (format_boolean, validate_boolean),  # Boolean
    +    '1.3.6.1.4.1.1466.115.121.1.8': (format_binary, None),  # Certificate [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.9': (format_binary, None),  # Certificate List [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.10': (format_binary, None),  # Certificate Pair [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.11': (format_unicode, None),  # Country String
    +    '1.3.6.1.4.1.1466.115.121.1.12': (format_unicode, None),  # Distinguished name (DN)
    +    '1.3.6.1.4.1.1466.115.121.1.13': (format_binary, None),  # Data Quality Syntax [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.14': (format_unicode, None),  # Delivery method
    +    '1.3.6.1.4.1.1466.115.121.1.15': (format_unicode, None),  # Directory string
    +    '1.3.6.1.4.1.1466.115.121.1.16': (format_unicode, None),  # DIT Content Rule Description
    +    '1.3.6.1.4.1.1466.115.121.1.17': (format_unicode, None),  # DIT Structure Rule Description
    +    '1.3.6.1.4.1.1466.115.121.1.18': (format_binary, None),  # DL Submit Permission [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.19': (format_binary, None),  # DSA Quality Syntax [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.20': (format_binary, None),  # DSE Type [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.21': (format_binary, None),  # Enhanced Guide
    +    '1.3.6.1.4.1.1466.115.121.1.22': (format_unicode, None),  # Facsimile Telephone Number
    +    '1.3.6.1.4.1.1466.115.121.1.23': (format_binary, None),  # Fax
    +    '1.3.6.1.4.1.1466.115.121.1.24': (format_time, validate_time),  # Generalized time
    +    '1.3.6.1.4.1.1466.115.121.1.25': (format_binary, None),  # Guide [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.26': (format_unicode, None),  # IA5 string
    +    '1.3.6.1.4.1.1466.115.121.1.27': (format_integer, validate_integer),  # Integer
    +    '1.3.6.1.4.1.1466.115.121.1.28': (format_binary, None),  # JPEG
    +    '1.3.6.1.4.1.1466.115.121.1.29': (format_binary, None),  # Master and Shadow Access Points [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.30': (format_unicode, None),  # Matching rule description
    +    '1.3.6.1.4.1.1466.115.121.1.31': (format_unicode, None),  # Matching rule use description
    +    '1.3.6.1.4.1.1466.115.121.1.32': (format_unicode, None),  # Mail Preference [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.33': (format_unicode, None),  # MHS OR Address [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.34': (format_unicode, None),  # Name and optional UID
    +    '1.3.6.1.4.1.1466.115.121.1.35': (format_unicode, None),  # Name form description
    +    '1.3.6.1.4.1.1466.115.121.1.36': (format_unicode, None),  # Numeric string
    +    '1.3.6.1.4.1.1466.115.121.1.37': (format_unicode, None),  # Object class description
    +    '1.3.6.1.4.1.1466.115.121.1.38': (format_unicode, None),  # OID
    +    '1.3.6.1.4.1.1466.115.121.1.39': (format_unicode, None),  # Other mailbox
    +    '1.3.6.1.4.1.1466.115.121.1.40': (format_binary, None),  # Octet string
    +    '1.3.6.1.4.1.1466.115.121.1.41': (format_unicode, None),  # Postal address
    +    '1.3.6.1.4.1.1466.115.121.1.42': (format_binary, None),  # Protocol Information [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.43': (format_binary, None),  # Presentation Address [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.44': (format_unicode, None),  # Printable string
    +    '1.3.6.1.4.1.1466.115.121.1.45': (format_binary, None),  # Subtree specification [OBSOLETE
    +    '1.3.6.1.4.1.1466.115.121.1.46': (format_binary, None),  # Supplier Information [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.47': (format_binary, None),  # Supplier Or Consumer [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.48': (format_binary, None),  # Supplier And Consumer [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.49': (format_binary, None),  # Supported Algorithm [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.50': (format_unicode, None),  # Telephone number
    +    '1.3.6.1.4.1.1466.115.121.1.51': (format_unicode, None),  # Teletex terminal identifier
    +    '1.3.6.1.4.1.1466.115.121.1.52': (format_unicode, None),  # Teletex number
    +    '1.3.6.1.4.1.1466.115.121.1.53': (format_time, validate_time),  # Utc time  (deprecated)
    +    '1.3.6.1.4.1.1466.115.121.1.54': (format_unicode, None),  # LDAP syntax description
    +    '1.3.6.1.4.1.1466.115.121.1.55': (format_binary, None),  # Modify rights [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.56': (format_binary, None),  # LDAP Schema Definition [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.57': (format_unicode, None),  # LDAP Schema Description [OBSOLETE]
    +    '1.3.6.1.4.1.1466.115.121.1.58': (format_unicode, None),  # Substring assertion
    +    '1.3.6.1.1.16.1': (format_uuid, validate_uuid),  # UUID
    +    '1.3.6.1.1.16.4': (format_uuid, validate_uuid),  # entryUUID (RFC 4530)
    +    '2.16.840.1.113719.1.1.4.1.501': (format_uuid, validate_guid),  # GUID (Novell)
    +    '2.16.840.1.113719.1.1.5.1.0': (format_binary, None),  # Unknown (Novell)
    +    '2.16.840.1.113719.1.1.5.1.6': (format_unicode, None),  # Case Ignore List (Novell)
    +    '2.16.840.1.113719.1.1.5.1.12': (format_binary, None),  # Tagged Data (Novell)
    +    '2.16.840.1.113719.1.1.5.1.13': (format_binary, None),  # Octet List (Novell)
    +    '2.16.840.1.113719.1.1.5.1.14': (format_unicode, None),  # Tagged String (Novell)
    +    '2.16.840.1.113719.1.1.5.1.15': (format_unicode, None),  # Tagged Name And String (Novell)
    +    '2.16.840.1.113719.1.1.5.1.16': (format_binary, None),  # NDS Replica Pointer (Novell)
    +    '2.16.840.1.113719.1.1.5.1.17': (format_unicode, None),  # NDS ACL (Novell)
    +    '2.16.840.1.113719.1.1.5.1.19': (format_time, validate_time),  # NDS Timestamp (Novell)
    +    '2.16.840.1.113719.1.1.5.1.22': (format_integer, validate_integer),  # Counter (Novell)
    +    '2.16.840.1.113719.1.1.5.1.23': (format_unicode, None),  # Tagged Name (Novell)
    +    '2.16.840.1.113719.1.1.5.1.25': (format_unicode, None),  # Typed Name (Novell)
    +    'supportedldapversion': (format_integer, None),  # supportedLdapVersion (Microsoft)
    +    'octetstring': (format_binary, validate_uuid_le),  # octect string (Microsoft)
    +    '1.2.840.113556.1.4.2': (format_uuid_le, validate_uuid_le),  # objectGUID (Microsoft)
    +    '1.2.840.113556.1.4.13': (format_ad_timestamp, validate_ad_timestamp),  # builtinCreationTime (Microsoft)
    +    '1.2.840.113556.1.4.26': (format_ad_timestamp, validate_ad_timestamp),  # creationTime (Microsoft)
    +    '1.2.840.113556.1.4.49': (format_ad_timestamp, validate_ad_timestamp),  # badPasswordTime (Microsoft)
    +    '1.2.840.113556.1.4.51': (format_ad_timestamp, validate_ad_timestamp),  # lastLogoff (Microsoft)
    +    '1.2.840.113556.1.4.52': (format_ad_timestamp, validate_ad_timestamp),  # lastLogon (Microsoft)
    +    '1.2.840.113556.1.4.60': (format_ad_timedelta, validate_ad_timedelta),  # lockoutDuration (Microsoft)
    +    '1.2.840.113556.1.4.61': (format_ad_timedelta, validate_ad_timedelta),  # lockOutObservationWindow (Microsoft)
    +    '1.2.840.113556.1.4.74': (format_ad_timedelta, validate_ad_timedelta),  # maxPwdAge (Microsoft)
    +    '1.2.840.113556.1.4.78': (format_ad_timedelta, validate_ad_timedelta),  # minPwdAge (Microsoft)
    +    '1.2.840.113556.1.4.96': (format_ad_timestamp, validate_zero_and_minus_one_and_positive_int),  # pwdLastSet (Microsoft, can be set to -1 only)
    +    '1.2.840.113556.1.4.146': (format_sid, validate_sid),  # objectSid (Microsoft)
    +    '1.2.840.113556.1.4.159': (format_ad_timestamp, validate_ad_timestamp),  # accountExpires (Microsoft)
    +    '1.2.840.113556.1.4.662': (format_ad_timestamp, validate_ad_timestamp),  # lockoutTime (Microsoft)
    +    '1.2.840.113556.1.4.1696': (format_ad_timestamp, validate_ad_timestamp),  # lastLogonTimestamp (Microsoft)
    +    '1.3.6.1.4.1.42.2.27.8.1.17': (format_time_with_0_year, validate_time_with_0_year)  # pwdAccountLockedTime (Novell)
    +}
    +
    +
    +def find_attribute_helpers(attr_type, name, custom_formatter):
    +    """
    +    Tries to format following the OIDs info and format_helper specification.
    +    Search for attribute oid, then attribute name (can be multiple), then attribute syntax
    +    Precedence is:
    +    1. attribute name
    +    2. attribute oid(from schema)
    +    3. attribute names (from oid_info)
    +    4. attribute syntax (from schema)
    +    Custom formatters can be defined in Server object and have precedence over the standard_formatters
    +    If no formatter is found the raw_value is returned as bytes.
    +    Attributes defined as SINGLE_VALUE in schema are returned as a single object, otherwise are returned as a list of object
    +    Formatter functions can return any kind of object
    +    return a tuple (formatter, validator)
    +    """
    +    formatter = None
    +    if custom_formatter and isinstance(custom_formatter, dict):  # if custom formatters are defined they have precedence over the standard formatters
    +        if name in custom_formatter:  # search for attribute name, as returned by the search operation
    +            formatter = custom_formatter[name]
    +
    +        if not formatter and attr_type and attr_type.oid in custom_formatter:  # search for attribute oid as returned by schema
    +            formatter = custom_formatter[attr_type.oid]
    +        if not formatter and attr_type and attr_type.oid_info:
    +            if isinstance(attr_type.oid_info[2], SEQUENCE_TYPES):  # search for multiple names defined in oid_info
    +                for attr_name in attr_type.oid_info[2]:
    +                    if attr_name in custom_formatter:
    +                        formatter = custom_formatter[attr_name]
    +                        break
    +            elif attr_type.oid_info[2] in custom_formatter:  # search for name defined in oid_info
    +                formatter = custom_formatter[attr_type.oid_info[2]]
    +
    +        if not formatter and attr_type and attr_type.syntax in custom_formatter:  # search for syntax defined in schema
    +            formatter = custom_formatter[attr_type.syntax]
    +
    +    if not formatter and name in standard_formatter:  # search for attribute name, as returned by the search operation
    +        formatter = standard_formatter[name]
    +
    +    if not formatter and attr_type and attr_type.oid in standard_formatter:  # search for attribute oid as returned by schema
    +        formatter = standard_formatter[attr_type.oid]
    +
    +    if not formatter and attr_type and attr_type.oid_info:
    +        if isinstance(attr_type.oid_info[2], SEQUENCE_TYPES):  # search for multiple names defined in oid_info
    +            for attr_name in attr_type.oid_info[2]:
    +                if attr_name in standard_formatter:
    +                    formatter = standard_formatter[attr_name]
    +                    break
    +        elif attr_type.oid_info[2] in standard_formatter:  # search for name defined in oid_info
    +            formatter = standard_formatter[attr_type.oid_info[2]]
    +    if not formatter and attr_type and attr_type.syntax in standard_formatter:  # search for syntax defined in schema
    +        formatter = standard_formatter[attr_type.syntax]
    +
    +    if formatter is None:
    +        return None, None
    +
    +    return formatter
    +
    +
    +def format_attribute_values(schema, name, values, custom_formatter):
    +    if not values:  # RFCs states that attributes must always have values, but a flaky server returns empty values too
    +        return []
    +
    +    if not isinstance(values, SEQUENCE_TYPES):
    +        values = [values]
    +
    +    if schema and schema.attribute_types and name in schema.attribute_types:
    +        attr_type = schema.attribute_types[name]
    +    else:
    +        attr_type = None
    +
    +    attribute_helpers = find_attribute_helpers(attr_type, name, custom_formatter)
    +    if not isinstance(attribute_helpers, tuple):  # custom formatter
    +        formatter = attribute_helpers
    +    else:
    +        formatter = format_unicode if not attribute_helpers[0] else attribute_helpers[0]
    +
    +    formatted_values = [formatter(raw_value) for raw_value in values]  # executes formatter
    +    if formatted_values:
    +        return formatted_values[0] if (attr_type and attr_type.single_value) else formatted_values
    +    else:  # RFCs states that attributes must always have values, but AD return empty values in DirSync
    +        return []
    +
    +
    +def find_attribute_validator(schema, name, custom_validator):
    +    if schema and schema.attribute_types and name in schema.attribute_types:
    +        attr_type = schema.attribute_types[name]
    +    else:
    +        attr_type = None
    +
    +    attribute_helpers = find_attribute_helpers(attr_type, name, custom_validator)
    +    if not isinstance(attribute_helpers, tuple):  # custom validator
    +        validator = attribute_helpers
    +    else:
    +        if not attribute_helpers[1]:
    +            if attr_type and attr_type.single_value:
    +                validator = validate_generic_single_value  # validate only single value
    +            else:
    +                validator = always_valid  # unknown syntax, accepts single and multi value
    +        else:
    +            validator = attribute_helpers[1]
    +    return validator
    diff --git a/server/www/packages/packages-windows/x86/ldap3/protocol/formatters/validators.py b/server/www/packages/packages-windows/x86/ldap3/protocol/formatters/validators.py
    index fff2198..3ab300d 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/protocol/formatters/validators.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/protocol/formatters/validators.py
    @@ -1,461 +1,503 @@
    -"""
    -"""
    -
    -# Created on 2016.08.09
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2016 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -from binascii import a2b_hex
    -from datetime import datetime
    -from calendar import timegm
    -from uuid import UUID
    -from struct import pack
    -
    -
    -from ... import SEQUENCE_TYPES, STRING_TYPES, NUMERIC_TYPES, INTEGER_TYPES
    -from .formatters import format_time, format_ad_timestamp
    -from ...utils.conv import to_raw, to_unicode, ldap_escape_to_bytes
    -
    -# Validators return True if value is valid, False if value is not valid,
    -# or a value different from True and False that is a valid value to substitute to the input value
    -
    -
    -def check_type(input_value, value_type):
    -    if isinstance(input_value, value_type):
    -        return True
    -
    -    if isinstance(input_value, SEQUENCE_TYPES):
    -        for value in input_value:
    -            if not isinstance(value, value_type):
    -                return False
    -        return True
    -
    -    return False
    -
    -
    -# noinspection PyUnusedLocal
    -def always_valid(input_value):
    -    return True
    -
    -
    -def validate_generic_single_value(input_value):
    -    if not isinstance(input_value, SEQUENCE_TYPES):
    -        return True
    -
    -    try:  # object couldn't have a __len__ method
    -        if len(input_value) == 1:
    -            return True
    -    except Exception:
    -        pass
    -
    -    return False
    -
    -
    -def validate_zero_and_minus_one_and_positive_int(input_value):
    -    """Accept -1 only (used by pwdLastSet in AD)
    -    """
    -    if not isinstance(input_value, SEQUENCE_TYPES):
    -        if isinstance(input_value, NUMERIC_TYPES) or isinstance(input_value, STRING_TYPES):
    -            return True if int(input_value) >= -1 else False
    -        return False
    -    else:
    -        if len(input_value) == 1 and (isinstance(input_value[0], NUMERIC_TYPES) or isinstance(input_value[0], STRING_TYPES)):
    -            return True if int(input_value[0]) >= -1 else False
    -
    -    return False
    -
    -
    -def validate_integer(input_value):
    -    if check_type(input_value, (float, bool)):
    -        return False
    -    if check_type(input_value, INTEGER_TYPES):
    -        return True
    -
    -    if not isinstance(input_value, SEQUENCE_TYPES):
    -        sequence = False
    -        input_value = [input_value]
    -    else:
    -        sequence = True  # indicates if a sequence must be returned
    -
    -    valid_values = []  # builds a list of valid int values
    -    from decimal import Decimal, InvalidOperation
    -    for element in input_value:
    -        try:  # try to convert any type to int, an invalid conversion raise TypeError or ValueError, doublecheck with Decimal type, if both are valid and equal then then int() value is used
    -            value = to_unicode(element) if isinstance(element, bytes) else element
    -            decimal_value = Decimal(value)
    -            int_value = int(value)
    -            if decimal_value == int_value:
    -                valid_values.append(int_value)
    -            else:
    -                return False
    -        except (ValueError, TypeError, InvalidOperation):
    -            return False
    -
    -    if sequence:
    -        return valid_values
    -    else:
    -        return valid_values[0]
    -
    -
    -def validate_bytes(input_value):
    -    return check_type(input_value, bytes)
    -
    -
    -def validate_boolean(input_value):
    -    # it could be a real bool or the string TRUE or FALSE, # only a single valued is allowed
    -    if validate_generic_single_value(input_value):  # valid only if a single value or a sequence with a single element
    -        if isinstance(input_value, SEQUENCE_TYPES):
    -            input_value = input_value[0]
    -        if isinstance(input_value, bool):
    -            if input_value:
    -                return 'TRUE'
    -            else:
    -                return 'FALSE'
    -        if str is not bytes and isinstance(input_value, bytes):  # python3 try to converts bytes to string
    -            input_value = to_unicode(input_value)
    -        if isinstance(input_value, STRING_TYPES):
    -            if input_value.lower() == 'true':
    -                return 'TRUE'
    -            elif input_value.lower() == 'false':
    -                return 'FALSE'
    -    return False
    -
    -
    -def validate_time_with_0_year(input_value):
    -    # validates generalized time but accept a 0000 year too
    -    # if datetime object doesn't have a timezone it's considered local time and is adjusted to UTC
    -    if not isinstance(input_value, SEQUENCE_TYPES):
    -        sequence = False
    -        input_value = [input_value]
    -    else:
    -        sequence = True  # indicates if a sequence must be returned
    -
    -    valid_values = []
    -    changed = False
    -    for element in input_value:
    -        if str is not bytes and isinstance(element, bytes):  # python3 try to converts bytes to string
    -            element = to_unicode(element)
    -        if isinstance(element, STRING_TYPES):  # tries to check if it is already be a Generalized Time
    -            if element.startswith('0000') or isinstance(format_time(to_raw(element)), datetime):  # valid Generalized Time string
    -                valid_values.append(element)
    -            else:
    -                return False
    -        elif isinstance(element, datetime):
    -            changed = True
    -            if element.tzinfo:  # a datetime with a timezone
    -                valid_values.append(element.strftime('%Y%m%d%H%M%S%z'))
    -            else:  # datetime without timezone, assumed local and adjusted to UTC
    -                offset = datetime.now() - datetime.utcnow()
    -                valid_values.append((element - offset).strftime('%Y%m%d%H%M%SZ'))
    -        else:
    -            return False
    -
    -    if changed:
    -        if sequence:
    -            return valid_values
    -        else:
    -            return valid_values[0]
    -    else:
    -        return True
    -
    -
    -def validate_time(input_value):
    -    # if datetime object doesn't have a timezone it's considered local time and is adjusted to UTC
    -    if not isinstance(input_value, SEQUENCE_TYPES):
    -        sequence = False
    -        input_value = [input_value]
    -    else:
    -        sequence = True  # indicates if a sequence must be returned
    -
    -    valid_values = []
    -    changed = False
    -    for element in input_value:
    -        if str is not bytes and isinstance(element, bytes):  # python3 try to converts bytes to string
    -            element = to_unicode(element)
    -        if isinstance(element, STRING_TYPES):  # tries to check if it is already be a Generalized Time
    -            if isinstance(format_time(to_raw(element)), datetime):  # valid Generalized Time string
    -                valid_values.append(element)
    -            else:
    -                return False
    -        elif isinstance(element, datetime):
    -            changed = True
    -            if element.tzinfo:  # a datetime with a timezone
    -                valid_values.append(element.strftime('%Y%m%d%H%M%S%z'))
    -            else:  # datetime without timezone, assumed local and adjusted to UTC
    -                offset = datetime.now() - datetime.utcnow()
    -                valid_values.append((element - offset).strftime('%Y%m%d%H%M%SZ'))
    -        else:
    -            return False
    -
    -    if changed:
    -        if sequence:
    -            return valid_values
    -        else:
    -            return valid_values[0]
    -    else:
    -        return True
    -
    -
    -def validate_ad_timestamp(input_value):
    -    """
    -    Active Directory stores date/time values as the number of 100-nanosecond intervals
    -    that have elapsed since the 0 hour on January 1, 1601 till the date/time that is being stored.
    -    The time is always stored in Greenwich Mean Time (GMT) in the Active Directory.
    -    """
    -    if not isinstance(input_value, SEQUENCE_TYPES):
    -        sequence = False
    -        input_value = [input_value]
    -    else:
    -        sequence = True  # indicates if a sequence must be returned
    -
    -    valid_values = []
    -    changed = False
    -    for element in input_value:
    -        if str is not bytes and isinstance(element, bytes):  # python3 try to converts bytes to string
    -            element = to_unicode(element)
    -        if isinstance(element, NUMERIC_TYPES):
    -            if 0 <= element <= 9223372036854775807:  # min and max for the AD timestamp starting from 12:00 AM January 1, 1601
    -                valid_values.append(element)
    -            else:
    -                return False
    -        elif isinstance(element, STRING_TYPES):  # tries to check if it is already be a AD timestamp
    -            if isinstance(format_ad_timestamp(to_raw(element)), datetime):  # valid Generalized Time string
    -                valid_values.append(element)
    -            else:
    -                return False
    -        elif isinstance(element, datetime):
    -            changed = True
    -            if element.tzinfo:  # a datetime with a timezone
    -                valid_values.append(to_raw((timegm(element.utctimetuple()) + 11644473600) * 10000000, encoding='ascii'))
    -            else:  # datetime without timezone, assumed local and adjusted to UTC
    -                offset = datetime.now() - datetime.utcnow()
    -                valid_values.append(to_raw((timegm((element - offset).timetuple()) + 11644473600) * 10000000, encoding='ascii'))
    -        else:
    -            return False
    -
    -    if changed:
    -        if sequence:
    -            return valid_values
    -        else:
    -            return valid_values[0]
    -    else:
    -        return True
    -
    -
    -def validate_guid(input_value):
    -    """
    -    object guid in uuid format (Novell eDirectory)
    -    """
    -    if not isinstance(input_value, SEQUENCE_TYPES):
    -        sequence = False
    -        input_value = [input_value]
    -    else:
    -        sequence = True  # indicates if a sequence must be returned
    -
    -    valid_values = []
    -    changed = False
    -    for element in input_value:
    -        if isinstance(element,  STRING_TYPES):
    -            try:
    -                valid_values.append(UUID(element).bytes)
    -                changed = True
    -            except ValueError: # try if the value is an escaped byte sequence
    -                try:
    -                    valid_values.append(UUID(element.replace('\\', '')).bytes)
    -                    changed = True
    -                    continue
    -                except ValueError:
    -                    if str is not bytes:  # python 3
    -                        pass
    -                    else:
    -                        valid_values.append(element)
    -                        continue
    -                return False
    -        elif isinstance(element, (bytes, bytearray)):  # assumes bytes are valid
    -            valid_values.append(element)
    -        else:
    -            return False
    -
    -    if changed:
    -        if sequence:
    -            return valid_values
    -        else:
    -            return valid_values[0]
    -    else:
    -        return True
    -
    -def validate_uuid(input_value):
    -    """
    -    object entryUUID in uuid format
    -    """
    -    if not isinstance(input_value, SEQUENCE_TYPES):
    -        sequence = False
    -        input_value = [input_value]
    -    else:
    -        sequence = True  # indicates if a sequence must be returned
    -
    -    valid_values = []
    -    changed = False
    -    for element in input_value:
    -        if isinstance(element,  STRING_TYPES):
    -            try:
    -                valid_values.append(str(UUID(element)))
    -                changed = True
    -            except ValueError: # try if the value is an escaped byte sequence
    -                try:
    -                    valid_values.append(str(UUID(element.replace('\\', ''))))
    -                    changed = True
    -                    continue
    -                except ValueError:
    -                    if str is not bytes:  # python 3
    -                        pass
    -                    else:
    -                        valid_values.append(element)
    -                        continue
    -                return False
    -        elif isinstance(element, (bytes, bytearray)):  # assumes bytes are valid
    -            valid_values.append(element)
    -        else:
    -            return False
    -
    -    if changed:
    -        if sequence:
    -            return valid_values
    -        else:
    -            return valid_values[0]
    -    else:
    -        return True
    -
    -
    -def validate_uuid_le(input_value):
    -    """
    -    Active Directory stores objectGUID in uuid_le format, follows RFC4122 and MS-DTYP:
    -    "{07039e68-4373-264d-a0a7-07039e684373}": string representation big endian, converted to little endian (with or without brace curles)
    -    "689e030773434d26a7a007039e684373": packet representation, already in little endian
    -    "\68\9e\03\07\73\43\4d\26\a7\a0\07\03\9e\68\43\73": bytes representation, already in little endian
    -    byte sequence: already in little endian
    -
    -    """
    -    if not isinstance(input_value, SEQUENCE_TYPES):
    -        sequence = False
    -        input_value = [input_value]
    -    else:
    -        sequence = True  # indicates if a sequence must be returned
    -
    -    valid_values = []
    -    changed = False
    -    for element in input_value:
    -        if isinstance(element, STRING_TYPES):
    -            if element[0] == '{' and element[-1] == '}':
    -                valid_values.append(UUID(hex=element).bytes_le)  # string representation, value in big endian, converts to little endian
    -                changed = True
    -            elif '-' in element:
    -                valid_values.append(UUID(hex=element).bytes_le)  # string representation, value in big endian, converts to little endian
    -                changed = True
    -            elif '\\' in element:
    -                valid_values.append(UUID(bytes_le=ldap_escape_to_bytes(element)).bytes_le)  # byte representation, value in little endian
    -                changed = True
    -            elif '-' not in element: # value in little endian
    -                valid_values.append(UUID(bytes_le=a2b_hex(element)).bytes_le)  # packet representation, value in little endian, converts to little endian
    -                changed = True
    -        elif isinstance(element, (bytes, bytearray)):  # assumes bytes are valid uuid
    -            valid_values.append(element)  # value is untouched, must be in little endian
    -        else:
    -            return False
    -
    -    if changed:
    -        if sequence:
    -            return valid_values
    -        else:
    -            return valid_values[0]
    -    else:
    -        return True
    -
    -
    -def validate_sid(input_value):
    -    """
    -        SID= "S-1-" IdentifierAuthority 1*SubAuthority
    -               IdentifierAuthority= IdentifierAuthorityDec / IdentifierAuthorityHex
    -                  ; If the identifier authority is < 2^32, the
    -                  ; identifier authority is represented as a decimal
    -                  ; number
    -                  ; If the identifier authority is >= 2^32,
    -                  ; the identifier authority is represented in
    -                  ; hexadecimal
    -                IdentifierAuthorityDec =  1*10DIGIT
    -                  ; IdentifierAuthorityDec, top level authority of a
    -                  ; security identifier is represented as a decimal number
    -                IdentifierAuthorityHex = "0x" 12HEXDIG
    -                  ; IdentifierAuthorityHex, the top-level authority of a
    -                  ; security identifier is represented as a hexadecimal number
    -                SubAuthority= "-" 1*10DIGIT
    -                  ; Sub-Authority is always represented as a decimal number
    -                  ; No leading "0" characters are allowed when IdentifierAuthority
    -                  ; or SubAuthority is represented as a decimal number
    -                  ; All hexadecimal digits must be output in string format,
    -                  ; pre-pended by "0x"
    -
    -        Revision (1 byte): An 8-bit unsigned integer that specifies the revision level of the SID. This value MUST be set to 0x01.
    -        SubAuthorityCount (1 byte): An 8-bit unsigned integer that specifies the number of elements in the SubAuthority array. The maximum number of elements allowed is 15.
    -        IdentifierAuthority (6 bytes): A SID_IDENTIFIER_AUTHORITY structure that indicates the authority under which the SID was created. It describes the entity that created the SID. The Identifier Authority value {0,0,0,0,0,5} denotes SIDs created by the NT SID authority.
    -        SubAuthority (variable): A variable length array of unsigned 32-bit integers that uniquely identifies a principal relative to the IdentifierAuthority. Its length is determined by SubAuthorityCount.
    -
    -        If you have a SID like S-a-b-c-d-e-f-g-...
    -
    -        Then the bytes are
    -        a 	(revision)
    -        N 	(number of dashes minus two)
    -        bbbbbb 	(six bytes of "b" treated as a 48-bit number in big-endian format)
    -        cccc 	(four bytes of "c" treated as a 32-bit number in little-endian format)
    -        dddd 	(four bytes of "d" treated as a 32-bit number in little-endian format)
    -        eeee 	(four bytes of "e" treated as a 32-bit number in little-endian format)
    -        ffff 	(four bytes of "f" treated as a 32-bit number in little-endian format)
    -
    -    """
    -    if not isinstance(input_value, SEQUENCE_TYPES):
    -        sequence = False
    -        input_value = [input_value]
    -    else:
    -        sequence = True  # indicates if a sequence must be returned
    -
    -    valid_values = []
    -    changed = False
    -    for element in input_value:
    -        if isinstance(element, STRING_TYPES):
    -            if element.startswith('S-'):
    -                parts = element.split('-')
    -                sid_bytes = pack('q', int(parts[2]))[2:]  # authority (in dec)
    -                else:
    -                    sid_bytes += pack('>q', int(parts[2], 16))[2:]  # authority (in hex)
    -                for sub_auth in parts[3:]:
    -                    sid_bytes += pack('.
    +from binascii import a2b_hex, hexlify
    +from datetime import datetime
    +from calendar import timegm
    +from uuid import UUID
    +from struct import pack
    +
    +
    +from ... import SEQUENCE_TYPES, STRING_TYPES, NUMERIC_TYPES, INTEGER_TYPES
    +from .formatters import format_time, format_ad_timestamp
    +from ...utils.conv import to_raw, to_unicode, ldap_escape_to_bytes, escape_bytes
    +
    +# Validators return True if value is valid, False if value is not valid,
    +# or a value different from True and False that is a valid value to substitute to the input value
    +
    +
    +def check_backslash(value):
    +    if isinstance(value, (bytearray, bytes)):
    +        if b'\\' in value:
    +            value = value.replace(b'\\', b'\\5C')
    +    elif isinstance(value, STRING_TYPES):
    +        if '\\' in value:
    +            value = value.replace('\\', '\\5C')
    +    return value
    +
    +
    +def check_type(input_value, value_type):
    +    if isinstance(input_value, value_type):
    +        return True
    +
    +    if isinstance(input_value, SEQUENCE_TYPES):
    +        for value in input_value:
    +            if not isinstance(value, value_type):
    +                return False
    +        return True
    +
    +    return False
    +
    +
    +# noinspection PyUnusedLocal
    +def always_valid(input_value):
    +    return True
    +
    +
    +def validate_generic_single_value(input_value):
    +    if not isinstance(input_value, SEQUENCE_TYPES):
    +        return True
    +
    +    try:  # object couldn't have a __len__ method
    +        if len(input_value) == 1:
    +            return True
    +    except Exception:
    +        pass
    +
    +    return False
    +
    +
    +def validate_zero_and_minus_one_and_positive_int(input_value):
    +    """Accept -1 and 0 only (used by pwdLastSet in AD)
    +    """
    +    if not isinstance(input_value, SEQUENCE_TYPES):
    +        if isinstance(input_value, NUMERIC_TYPES) or isinstance(input_value, STRING_TYPES):
    +            return True if int(input_value) >= -1 else False
    +        return False
    +    else:
    +        if len(input_value) == 1 and (isinstance(input_value[0], NUMERIC_TYPES) or isinstance(input_value[0], STRING_TYPES)):
    +            return True if int(input_value[0]) >= -1 else False
    +
    +    return False
    +
    +
    +def validate_integer(input_value):
    +    if check_type(input_value, (float, bool)):
    +        return False
    +    if check_type(input_value, INTEGER_TYPES):
    +        return True
    +
    +    if not isinstance(input_value, SEQUENCE_TYPES):
    +        sequence = False
    +        input_value = [input_value]
    +    else:
    +        sequence = True  # indicates if a sequence must be returned
    +
    +    valid_values = []  # builds a list of valid int values
    +    from decimal import Decimal, InvalidOperation
    +    for element in input_value:
    +        try:  #try to convert any type to int, an invalid conversion raise TypeError or ValueError, doublecheck with Decimal type, if both are valid and equal then then int() value is used
    +            value = to_unicode(element) if isinstance(element, bytes) else element
    +            decimal_value = Decimal(value)
    +            int_value = int(value)
    +            if decimal_value == int_value:
    +                valid_values.append(int_value)
    +            else:
    +                return False
    +        except (ValueError, TypeError, InvalidOperation):
    +            return False
    +
    +    if sequence:
    +        return valid_values
    +    else:
    +        return valid_values[0]
    +
    +
    +def validate_bytes(input_value):
    +    return check_type(input_value, bytes)
    +
    +
    +def validate_boolean(input_value):
    +    # it could be a real bool or the string TRUE or FALSE, # only a single valued is allowed
    +    if validate_generic_single_value(input_value):  # valid only if a single value or a sequence with a single element
    +        if isinstance(input_value, SEQUENCE_TYPES):
    +            input_value = input_value[0]
    +        if isinstance(input_value, bool):
    +            if input_value:
    +                return 'TRUE'
    +            else:
    +                return 'FALSE'
    +        if str is not bytes and isinstance(input_value, bytes):  # python3 try to converts bytes to string
    +            input_value = to_unicode(input_value)
    +        if isinstance(input_value, STRING_TYPES):
    +            if input_value.lower() == 'true':
    +                return 'TRUE'
    +            elif input_value.lower() == 'false':
    +                return 'FALSE'
    +    return False
    +
    +
    +def validate_time_with_0_year(input_value):
    +    # validates generalized time but accept a 0000 year too
    +    # if datetime object doesn't have a timezone it's considered local time and is adjusted to UTC
    +    if not isinstance(input_value, SEQUENCE_TYPES):
    +        sequence = False
    +        input_value = [input_value]
    +    else:
    +        sequence = True  # indicates if a sequence must be returned
    +
    +    valid_values = []
    +    changed = False
    +    for element in input_value:
    +        if str is not bytes and isinstance(element, bytes):  # python3 try to converts bytes to string
    +            element = to_unicode(element)
    +        if isinstance(element, STRING_TYPES):  # tries to check if it is already be a Generalized Time
    +            if element.startswith('0000') or isinstance(format_time(to_raw(element)), datetime):  # valid Generalized Time string
    +                valid_values.append(element)
    +            else:
    +                return False
    +        elif isinstance(element, datetime):
    +            changed = True
    +            if element.tzinfo:  # a datetime with a timezone
    +                valid_values.append(element.strftime('%Y%m%d%H%M%S%z'))
    +            else:  # datetime without timezone, assumed local and adjusted to UTC
    +                offset = datetime.now() - datetime.utcnow()
    +                valid_values.append((element - offset).strftime('%Y%m%d%H%M%SZ'))
    +        else:
    +            return False
    +
    +    if changed:
    +        if sequence:
    +            return valid_values
    +        else:
    +            return valid_values[0]
    +    else:
    +        return True
    +
    +
    +def validate_time(input_value):
    +    # if datetime object doesn't have a timezone it's considered local time and is adjusted to UTC
    +    if not isinstance(input_value, SEQUENCE_TYPES):
    +        sequence = False
    +        input_value = [input_value]
    +    else:
    +        sequence = True  # indicates if a sequence must be returned
    +
    +    valid_values = []
    +    changed = False
    +    for element in input_value:
    +        if str is not bytes and isinstance(element, bytes):  # python3 try to converts bytes to string
    +            element = to_unicode(element)
    +        if isinstance(element, STRING_TYPES):  # tries to check if it is already be a Generalized Time
    +            if isinstance(format_time(to_raw(element)), datetime):  # valid Generalized Time string
    +                valid_values.append(element)
    +            else:
    +                return False
    +        elif isinstance(element, datetime):
    +            changed = True
    +            if element.tzinfo:  # a datetime with a timezone
    +                valid_values.append(element.strftime('%Y%m%d%H%M%S%z'))
    +            else:  # datetime without timezone, assumed local and adjusted to UTC
    +                offset = datetime.now() - datetime.utcnow()
    +                valid_values.append((element - offset).strftime('%Y%m%d%H%M%SZ'))
    +        else:
    +            return False
    +
    +    if changed:
    +        if sequence:
    +            return valid_values
    +        else:
    +            return valid_values[0]
    +    else:
    +        return True
    +
    +
    +def validate_ad_timestamp(input_value):
    +    """
    +    Active Directory stores date/time values as the number of 100-nanosecond intervals
    +    that have elapsed since the 0 hour on January 1, 1601 till the date/time that is being stored.
    +    The time is always stored in Greenwich Mean Time (GMT) in the Active Directory.
    +    """
    +    if not isinstance(input_value, SEQUENCE_TYPES):
    +        sequence = False
    +        input_value = [input_value]
    +    else:
    +        sequence = True  # indicates if a sequence must be returned
    +
    +    valid_values = []
    +    changed = False
    +    for element in input_value:
    +        if str is not bytes and isinstance(element, bytes):  # python3 try to converts bytes to string
    +            element = to_unicode(element)
    +        if isinstance(element, NUMERIC_TYPES):
    +            if 0 <= element <= 9223372036854775807:  # min and max for the AD timestamp starting from 12:00 AM January 1, 1601
    +                valid_values.append(element)
    +            else:
    +                return False
    +        elif isinstance(element, STRING_TYPES):  # tries to check if it is already be a AD timestamp
    +            if isinstance(format_ad_timestamp(to_raw(element)), datetime):  # valid Generalized Time string
    +                valid_values.append(element)
    +            else:
    +                return False
    +        elif isinstance(element, datetime):
    +            changed = True
    +            if element.tzinfo:  # a datetime with a timezone
    +                valid_values.append(to_raw((timegm(element.utctimetuple()) + 11644473600) * 10000000, encoding='ascii'))
    +            else:  # datetime without timezone, assumed local and adjusted to UTC
    +                offset = datetime.now() - datetime.utcnow()
    +                valid_values.append(to_raw((timegm((element - offset).timetuple()) + 11644473600) * 10000000, encoding='ascii'))
    +        else:
    +            return False
    +
    +    if changed:
    +        if sequence:
    +            return valid_values
    +        else:
    +            return valid_values[0]
    +    else:
    +        return True
    +
    +
    +def validate_ad_timedelta(input_value):
    +    """
    +    Should be validated like an AD timestamp except that since it is a time
    +    delta, it is stored as a negative number.
    +    """
    +    if not isinstance(input_value, INTEGER_TYPES) or input_value > 0:
    +        return False
    +    return validate_ad_timestamp(input_value * -1)
    +
    +
    +def validate_guid(input_value):
    +    """
    +    object guid in uuid format (Novell eDirectory)
    +    """
    +    if not isinstance(input_value, SEQUENCE_TYPES):
    +        sequence = False
    +        input_value = [input_value]
    +    else:
    +        sequence = True  # indicates if a sequence must be returned
    +
    +    valid_values = []
    +    changed = False
    +    for element in input_value:
    +        if isinstance(element,  STRING_TYPES):
    +            try:
    +                valid_values.append(UUID(element).bytes)
    +                changed = True
    +            except ValueError: # try if the value is an escaped byte sequence
    +                try:
    +                    valid_values.append(UUID(element.replace('\\', '')).bytes)
    +                    changed = True
    +                    continue
    +                except ValueError:
    +                    if str is not bytes:  # python 3
    +                        pass
    +                    else:
    +                        valid_values.append(element)
    +                        continue
    +                return False
    +        elif isinstance(element, (bytes, bytearray)):  # assumes bytes are valid
    +            valid_values.append(element)
    +        else:
    +            return False
    +
    +    if changed:
    +        valid_values = [check_backslash(value) for value in valid_values]
    +        if sequence:
    +            return valid_values
    +        else:
    +            return valid_values[0]
    +    else:
    +        return True
    +
    +
    +def validate_uuid(input_value):
    +    """
    +    object entryUUID in uuid format
    +    """
    +    if not isinstance(input_value, SEQUENCE_TYPES):
    +        sequence = False
    +        input_value = [input_value]
    +    else:
    +        sequence = True  # indicates if a sequence must be returned
    +
    +    valid_values = []
    +    changed = False
    +    for element in input_value:
    +        if isinstance(element,  STRING_TYPES):
    +            try:
    +                valid_values.append(str(UUID(element)))
    +                changed = True
    +            except ValueError: # try if the value is an escaped byte sequence
    +                try:
    +                    valid_values.append(str(UUID(element.replace('\\', ''))))
    +                    changed = True
    +                    continue
    +                except ValueError:
    +                    if str is not bytes:  # python 3
    +                        pass
    +                    else:
    +                        valid_values.append(element)
    +                        continue
    +                return False
    +        elif isinstance(element, (bytes, bytearray)):  # assumes bytes are valid
    +            valid_values.append(element)
    +        else:
    +            return False
    +
    +    if changed:
    +        valid_values = [check_backslash(value) for value in valid_values]
    +        if sequence:
    +            return valid_values
    +        else:
    +            return valid_values[0]
    +    else:
    +        return True
    +
    +
    +def validate_uuid_le(input_value):
    +    """
    +    Active Directory stores objectGUID in uuid_le format, follows RFC4122 and MS-DTYP:
    +    "{07039e68-4373-264d-a0a7-07039e684373}": string representation big endian, converted to little endian (with or without brace curles)
    +    "689e030773434d26a7a007039e684373": packet representation, already in little endian
    +    "\68\9e\03\07\73\43\4d\26\a7\a0\07\03\9e\68\43\73": bytes representation, already in little endian
    +    byte sequence: already in little endian
    +
    +    """
    +    if not isinstance(input_value, SEQUENCE_TYPES):
    +        sequence = False
    +        input_value = [input_value]
    +    else:
    +        sequence = True  # indicates if a sequence must be returned
    +
    +    valid_values = []
    +    changed = False
    +    for element in input_value:
    +        error = False
    +        if isinstance(element, STRING_TYPES):
    +            if element[0] == '{' and element[-1] == '}':
    +                try:
    +                    valid_values.append(UUID(hex=element).bytes_le)  # string representation, value in big endian, converts to little endian
    +                    changed = True
    +                except ValueError:
    +                    error = True
    +            elif '-' in element:
    +                try:
    +                    valid_values.append(UUID(hex=element).bytes_le)  # string representation, value in big endian, converts to little endian
    +                    changed = True
    +                except ValueError:
    +                    error = True
    +            elif '\\' in element:
    +                try:
    +                    uuid = UUID(bytes_le=ldap_escape_to_bytes(element)).bytes_le
    +                    uuid = escape_bytes(uuid)
    +                    valid_values.append(uuid)  # byte representation, value in little endian
    +                    changed = True
    +                except ValueError:
    +                    error = True
    +            elif '-' not in element:  # value in little endian
    +                try:
    +                    valid_values.append(UUID(bytes_le=a2b_hex(element)).bytes_le)  # packet representation, value in little endian, converts to little endian
    +                    changed = True
    +                except ValueError:
    +                    error = True
    +            if error and str == bytes:  # python2 only assume value is bytes and valid
    +                valid_values.append(element)  # value is untouched, must be in little endian
    +        elif isinstance(element, (bytes, bytearray)):  # assumes bytes are valid uuid
    +            valid_values.append(element)  # value is untouched, must be in little endian
    +        else:
    +            return False
    +
    +    if changed:
    +        valid_values = [check_backslash(value) for value in valid_values]
    +        if sequence:
    +            return valid_values
    +        else:
    +            return valid_values[0]
    +    else:
    +        return True
    +
    +
    +def validate_sid(input_value):
    +    """
    +        SID= "S-1-" IdentifierAuthority 1*SubAuthority
    +               IdentifierAuthority= IdentifierAuthorityDec / IdentifierAuthorityHex
    +                  ; If the identifier authority is < 2^32, the
    +                  ; identifier authority is represented as a decimal
    +                  ; number
    +                  ; If the identifier authority is >= 2^32,
    +                  ; the identifier authority is represented in
    +                  ; hexadecimal
    +                IdentifierAuthorityDec =  1*10DIGIT
    +                  ; IdentifierAuthorityDec, top level authority of a
    +                  ; security identifier is represented as a decimal number
    +                IdentifierAuthorityHex = "0x" 12HEXDIG
    +                  ; IdentifierAuthorityHex, the top-level authority of a
    +                  ; security identifier is represented as a hexadecimal number
    +                SubAuthority= "-" 1*10DIGIT
    +                  ; Sub-Authority is always represented as a decimal number
    +                  ; No leading "0" characters are allowed when IdentifierAuthority
    +                  ; or SubAuthority is represented as a decimal number
    +                  ; All hexadecimal digits must be output in string format,
    +                  ; pre-pended by "0x"
    +
    +        Revision (1 byte): An 8-bit unsigned integer that specifies the revision level of the SID. This value MUST be set to 0x01.
    +        SubAuthorityCount (1 byte): An 8-bit unsigned integer that specifies the number of elements in the SubAuthority array. The maximum number of elements allowed is 15.
    +        IdentifierAuthority (6 bytes): A SID_IDENTIFIER_AUTHORITY structure that indicates the authority under which the SID was created. It describes the entity that created the SID. The Identifier Authority value {0,0,0,0,0,5} denotes SIDs created by the NT SID authority.
    +        SubAuthority (variable): A variable length array of unsigned 32-bit integers that uniquely identifies a principal relative to the IdentifierAuthority. Its length is determined by SubAuthorityCount.
    +
    +        If you have a SID like S-a-b-c-d-e-f-g-...
    +
    +        Then the bytes are
    +        a 	(revision)
    +        N 	(number of dashes minus two)
    +        bbbbbb 	(six bytes of "b" treated as a 48-bit number in big-endian format)
    +        cccc 	(four bytes of "c" treated as a 32-bit number in little-endian format)
    +        dddd 	(four bytes of "d" treated as a 32-bit number in little-endian format)
    +        eeee 	(four bytes of "e" treated as a 32-bit number in little-endian format)
    +        ffff 	(four bytes of "f" treated as a 32-bit number in little-endian format)
    +
    +    """
    +    if not isinstance(input_value, SEQUENCE_TYPES):
    +        sequence = False
    +        input_value = [input_value]
    +    else:
    +        sequence = True  # indicates if a sequence must be returned
    +
    +    valid_values = []
    +    changed = False
    +    for element in input_value:
    +        if isinstance(element, STRING_TYPES):
    +            if element.startswith('S-'):
    +                parts = element.split('-')
    +                sid_bytes = pack('q', int(parts[2]))[2:]  # authority (in dec)
    +                else:
    +                    sid_bytes += pack('>q', int(parts[2], 16))[2:]  # authority (in hex)
    +                for sub_auth in parts[3:]:
    +                    sid_bytes += pack('= 1 and connection.sasl_credentials[0]:
                 if connection.sasl_credentials[0] is True:
    @@ -70,9 +75,15 @@ def sasl_gssapi(connection, controls):
                     target_name = gssapi.Name('ldap@' + connection.sasl_credentials[0], gssapi.NameType.hostbased_service)
             if len(connection.sasl_credentials) >= 2 and connection.sasl_credentials[1]:
                 authz_id = connection.sasl_credentials[1].encode("utf-8")
    +        if len(connection.sasl_credentials) >= 3 and connection.sasl_credentials[2]:
    +            raw_creds = connection.sasl_credentials[2]
         if target_name is None:
             target_name = gssapi.Name('ldap@' + connection.server.host, gssapi.NameType.hostbased_service)
    -    creds = gssapi.Credentials(name=gssapi.Name(connection.user), usage='initiate') if connection.user else None
    +
    +    if raw_creds is not None:
    +        creds = gssapi.Credentials(base=raw_creds, usage='initiate', store=connection.cred_store)
    +    else:
    +        creds = gssapi.Credentials(name=gssapi.Name(connection.user), usage='initiate', store=connection.cred_store) if connection.user else None
         ctx = gssapi.SecurityContext(name=target_name, mech=gssapi.MechType.kerberos, creds=creds)
         in_token = None
         try:
    diff --git a/server/www/packages/packages-windows/x86/ldap3/protocol/sasl/plain.py b/server/www/packages/packages-windows/x86/ldap3/protocol/sasl/plain.py
    index 1de2a36..f7f7456 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/protocol/sasl/plain.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/protocol/sasl/plain.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/protocol/sasl/sasl.py b/server/www/packages/packages-windows/x86/ldap3/protocol/sasl/sasl.py
    index 375b235..30fe0e9 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/protocol/sasl/sasl.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/protocol/sasl/sasl.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2013 - 2018 Giovanni Cannata
    +# Copyright 2013 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/ad2012R2.py b/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/ad2012R2.py
    index f583973..1712613 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/ad2012R2.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/ad2012R2.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/ds389.py b/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/ds389.py
    index 0ede92f..f0e19dc 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/ds389.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/ds389.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/edir888.py b/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/edir888.py
    index 630d7dc..8243a7e 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/edir888.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/edir888.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -939,12 +939,7 @@ edir_8_8_8_dsa_info = """
             "addEntryOps": [
                 "947"
             ],
    -        "altServer": [
    -            "ldap://192.168.137.102:389/",
    -            "ldaps://192.168.137.102:636/",
    -            "ldap://192.168.137.103:389/",
    -            "ldaps://192.168.137.103:636/"
    -        ],
    +        "altServer": [],
             "bindSecurityErrors": [
                 "3"
             ],
    diff --git a/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/edir914.py b/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/edir914.py
    new file mode 100644
    index 0000000..0a1d2e6
    --- /dev/null
    +++ b/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/edir914.py
    @@ -0,0 +1,1157 @@
    +"""
    +"""
    +
    +# Created on 2019.08.31
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2014 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +edir_9_1_4_schema = """
    +{
    +    "raw": {
    +        "attributeTypes": [
    +            "( 2.5.4.35 NAME 'userPassword' DESC 'Internal NDS policy forces this to be single-valued' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{128} USAGE directoryOperation )",
    +            "( 2.5.18.1 NAME 'createTimestamp' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
    +            "( 2.5.18.2 NAME 'modifyTimestamp' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
    +            "( 2.5.18.10 NAME 'subschemaSubentry' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 USAGE directoryOperation )",
    +            "( 2.5.21.9 NAME 'structuralObjectClass' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
    +            "( 2.16.840.1.113719.1.27.4.49 NAME 'subordinateCount' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
    +            "( 2.16.840.1.113719.1.27.4.48 NAME 'entryFlags' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
    +            "( 2.16.840.1.113719.1.27.4.51 NAME 'federationBoundary' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 NO-USER-MODIFICATION USAGE directoryOperation )",
    +            "( 2.5.21.5 NAME 'attributeTypes' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.3 USAGE directoryOperation )",
    +            "( 2.5.21.6 NAME 'objectClasses' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.37 USAGE directoryOperation )",
    +            "( 1.3.6.1.1.20 NAME 'entryDN' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
    +            "( 2.16.840.1.113719.1.1.4.1.2 NAME 'ACL' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
    +            "( 2.5.4.1 NAME 'aliasedObjectName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Aliased Object Name' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.6 NAME 'backLink' SYNTAX 2.16.840.1.113719.1.1.5.1.23 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Back Link' X-NDS_SERVER_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.8 NAME 'binderyProperty' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Bindery Property' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.7 NAME 'binderyObjectRestriction' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Bindery Object Restriction' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.9 NAME 'binderyType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Bindery Type' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.11 NAME 'cAPrivateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'CA Private Key' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.12 NAME 'cAPublicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'CA Public Key' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.10 NAME 'Cartridge' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.3 NAME ( 'cn' 'commonName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'CN' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.78 NAME 'printerConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Printer Configuration' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.15 NAME 'Convergence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{1} SINGLE-VALUE X-NDS_UPPER_BOUND '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.6 NAME ( 'c' 'countryName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{2} SINGLE-VALUE X-NDS_NAME 'C' X-NDS_LOWER_BOUND '2' X-NDS_UPPER_BOUND '2' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.18 NAME 'defaultQueue' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Default Queue' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.13 NAME ( 'description' 'multiLineDescription' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{1024} X-NDS_NAME 'Description' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '1024' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.64 NAME 'partitionCreationTime' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Partition Creation Time' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.5.4.23 NAME 'facsimileTelephoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.22{64512} X-NDS_NAME 'Facsimile Telephone Number' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.117 NAME 'highConvergenceSyncInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'High Convergence Sync Interval' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.25 NAME 'groupMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Group Membership' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.26 NAME 'ndsHomeDirectory' SYNTAX 2.16.840.1.113719.1.1.5.1.15{255} SINGLE-VALUE X-NDS_NAME 'Home Directory' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '255' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.27 NAME 'hostDevice' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Host Device' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.28 NAME 'hostResourceName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'Host Resource Name' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.29 NAME 'hostServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Host Server' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.30 NAME 'inheritedACL' SYNTAX 2.16.840.1.113719.1.1.5.1.17 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Inherited ACL' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.5.4.7 NAME ( 'l' 'localityname' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'L' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.39 NAME 'loginAllowedTimeMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{42} SINGLE-VALUE X-NDS_NAME 'Login Allowed Time Map' X-NDS_LOWER_BOUND '42' X-NDS_UPPER_BOUND '42' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.40 NAME 'loginDisabled' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Login Disabled' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.41 NAME 'loginExpirationTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Login Expiration Time' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.42 NAME 'loginGraceLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Login Grace Limit' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.43 NAME 'loginGraceRemaining' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_NAME 'Login Grace Remaining' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.44 NAME 'loginIntruderAddress' SYNTAX 2.16.840.1.113719.1.1.5.1.12 SINGLE-VALUE X-NDS_NAME 'Login Intruder Address' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.45 NAME 'loginIntruderAttempts' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_NAME 'Login Intruder Attempts' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.46 NAME 'loginIntruderLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Login Intruder Limit' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.31 NAME 'intruderAttemptResetInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Intruder Attempt Reset Interval' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.47 NAME 'loginIntruderResetTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Login Intruder Reset Time' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.48 NAME 'loginMaximumSimultaneous' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Login Maximum Simultaneous' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.49 NAME 'loginScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Login Script' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.50 NAME 'loginTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Login Time' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.31 NAME ( 'member' 'uniqueMember' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Member' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.52 NAME 'Memory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.22 NAME 'eMailAddress' SYNTAX 2.16.840.1.113719.1.1.5.1.14{64512} X-NDS_NAME 'EMail Address' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.55 NAME 'networkAddress' SYNTAX 2.16.840.1.113719.1.1.5.1.12 X-NDS_NAME 'Network Address' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.56 NAME 'networkAddressRestriction' SYNTAX 2.16.840.1.113719.1.1.5.1.12 X-NDS_NAME 'Network Address Restriction' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.57 NAME 'notify' SYNTAX 2.16.840.1.113719.1.1.5.1.25 X-NDS_NAME 'Notify' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.114 NAME 'Obituary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.5.4.0 NAME 'objectClass' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 X-NDS_NAME 'Object Class' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.59 NAME 'operator' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Operator' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.11 NAME ( 'ou' 'organizationalUnitName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'OU' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.10 NAME ( 'o' 'organizationname' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'O' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.32 NAME 'owner' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Owner' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.63 NAME 'pageDescriptionLanguage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{64} X-NDS_NAME 'Page Description Language' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.65 NAME 'passwordsUsed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'Passwords Used' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.66 NAME 'passwordAllowChange' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Password Allow Change' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.67 NAME 'passwordExpirationInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Password Expiration Interval' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.68 NAME 'passwordExpirationTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Password Expiration Time' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.69 NAME 'passwordMinimumLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Password Minimum Length' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.70 NAME 'passwordRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Password Required' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.71 NAME 'passwordUniqueRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Password Unique Required' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.72 NAME 'path' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'Path' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.19 NAME 'physicalDeliveryOfficeName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'Physical Delivery Office Name' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.16 NAME 'postalAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.41{64512} X-NDS_NAME 'Postal Address' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.17 NAME 'postalCode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{40} X-NDS_NAME 'Postal Code' X-NDS_UPPER_BOUND '40' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.18 NAME 'postOfficeBox' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{40} X-NDS_NAME 'Postal Office Box' X-NDS_UPPER_BOUND '40' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.80 NAME 'printJobConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Print Job Configuration' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.79 NAME 'printerControl' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Printer Control' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.82 NAME 'privateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Private Key' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.83 NAME 'Profile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.84 NAME 'publicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Public Key' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_OPERATIONAL '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.85 NAME 'queue' SYNTAX 2.16.840.1.113719.1.1.5.1.25 X-NDS_NAME 'Queue' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.86 NAME 'queueDirectory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{255} SINGLE-VALUE X-NDS_NAME 'Queue Directory' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '255' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.115 NAME 'Reference' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.88 NAME 'Replica' SYNTAX 2.16.840.1.113719.1.1.5.1.16{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.89 NAME 'Resource' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.33 NAME 'roleOccupant' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Role Occupant' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.116 NAME 'higherPrivileges' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Higher Privileges' X-NDS_SERVER_READ '1' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.92 NAME 'securityEquals' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Security Equals' X-NDS_SERVER_READ '1' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
    +            "( 2.5.4.34 NAME 'seeAlso' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'See Also' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.5 NAME 'serialNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{64} X-NDS_NAME 'Serial Number' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.95 NAME 'server' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Server' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.8 NAME ( 'st' 'stateOrProvinceName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'S' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.98 NAME 'status' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Status' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_OPERATIONAL '1' )",
    +            "( 2.5.4.9 NAME 'street' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'SA' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.102 NAME 'supportedTypefaces' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Supported Typefaces' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.101 NAME 'supportedServices' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Supported Services' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.4 NAME ( 'sn' 'surname' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Surname' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.20 NAME 'telephoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} X-NDS_NAME 'Telephone Number' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.12 NAME 'title' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Title' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.111 NAME 'User' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.112 NAME 'Version' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} SINGLE-VALUE X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.1 NAME 'accountBalance' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_NAME 'Account Balance' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.4 NAME 'allowUnlimitedCredit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Allow Unlimited Credit' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.118 NAME 'lowConvergenceResetTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Low Convergence Reset Time' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.54 NAME 'minimumAccountBalance' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Minimum Account Balance' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.104 NAME 'lowConvergenceSyncInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Low Convergence Sync Interval' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.21 NAME 'Device' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.53 NAME 'messageServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Message Server' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.34 NAME 'Language' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.100 NAME 'supportedConnections' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Supported Connections' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.107 NAME 'typeCreatorMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Type Creator Map' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.108 NAME 'ndsUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'UID' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.24 NAME 'groupID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'GID' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.110 NAME 'unknownBaseClass' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Unknown Base Class' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.87 NAME 'receivedUpTo' SYNTAX 2.16.840.1.113719.1.1.5.1.19 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Received Up To' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.33 NAME 'synchronizedUpTo' SYNTAX 2.16.840.1.113719.1.1.5.1.19 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Synchronized Up To' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.5 NAME 'authorityRevocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Authority Revocation' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.13 NAME 'certificateRevocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Certificate Revocation' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.17 NAME 'ndsCrossCertificatePair' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'Cross Certificate Pair' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.37 NAME 'lockedByIntruder' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Locked By Intruder' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.77 NAME 'printer' SYNTAX 2.16.840.1.113719.1.1.5.1.25 X-NDS_NAME 'Printer' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.20 NAME 'detectIntruder' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Detect Intruder' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.38 NAME 'lockoutAfterDetection' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Lockout After Detection' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.32 NAME 'intruderLockoutResetInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Intruder Lockout Reset Interval' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.96 NAME 'serverHolds' SYNTAX 2.16.840.1.113719.1.1.5.1.26 X-NDS_NAME 'Server Holds' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.91 NAME 'sAPName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{47} SINGLE-VALUE X-NDS_NAME 'SAP Name' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '47' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.113 NAME 'Volume' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.35 NAME 'lastLoginTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Last Login Time' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.81 NAME 'printServer' SYNTAX 2.16.840.1.113719.1.1.5.1.25 SINGLE-VALUE X-NDS_NAME 'Print Server' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.119 NAME 'nNSDomain' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'NNS Domain' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.120 NAME 'fullName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{127} X-NDS_NAME 'Full Name' X-NDS_UPPER_BOUND '127' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.121 NAME 'partitionControl' SYNTAX 2.16.840.1.113719.1.1.5.1.25 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Partition Control' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.122 NAME 'revision' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Revision' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_SCHED_SYNC_NEVER '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.123 NAME 'certificateValidityInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'Certificate Validity Interval' X-NDS_LOWER_BOUND '60' X-NDS_UPPER_BOUND '-1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.124 NAME 'externalSynchronizer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'External Synchronizer' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.125 NAME 'messagingDatabaseLocation' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NAME 'Messaging Database Location' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.126 NAME 'messageRoutingGroup' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Message Routing Group' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.127 NAME 'messagingServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Messaging Server' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.128 NAME 'Postmaster' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.162 NAME 'mailboxLocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Mailbox Location' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.163 NAME 'mailboxID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{8} SINGLE-VALUE X-NDS_NAME 'Mailbox ID' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '8' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.164 NAME 'externalName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'External Name' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.165 NAME 'securityFlags' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Security Flags' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.166 NAME 'messagingServerType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} SINGLE-VALUE X-NDS_NAME 'Messaging Server Type' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.167 NAME 'lastReferencedTime' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Last Referenced Time' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.5.4.42 NAME 'givenName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} X-NDS_NAME 'Given Name' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.43 NAME 'initials' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{8} X-NDS_NAME 'Initials' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '8' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.4.44 NAME 'generationQualifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{8} SINGLE-VALUE X-NDS_NAME 'Generational Qualifier' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '8' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.171 NAME 'profileMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Profile Membership' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.172 NAME 'dsRevision' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'DS Revision' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_OPERATIONAL '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.173 NAME 'supportedGateway' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{4096} X-NDS_NAME 'Supported Gateway' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '4096' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.174 NAME 'equivalentToMe' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Equivalent To Me' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.175 NAME 'replicaUpTo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Replica Up To' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.176 NAME 'partitionStatus' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Partition Status' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.177 NAME 'permanentConfigParms' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'Permanent Config Parms' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.178 NAME 'Timezone' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.179 NAME 'binderyRestrictionLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Bindery Restriction Level' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.180 NAME 'transitiveVector' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Transitive Vector' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_SCHED_SYNC_NEVER '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.181 NAME 'T' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.183 NAME 'purgeVector' SYNTAX 2.16.840.1.113719.1.1.5.1.19 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Purge Vector' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_SCHED_SYNC_NEVER '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.184 NAME 'synchronizationTolerance' SYNTAX 2.16.840.1.113719.1.1.5.1.19 USAGE directoryOperation X-NDS_NAME 'Synchronization Tolerance' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.185 NAME 'passwordManagement' SYNTAX 2.16.840.1.113719.1.1.5.1.0 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Password Management' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.186 NAME 'usedBy' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Used By' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.187 NAME 'Uses' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.500 NAME 'obituaryNotify' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Obituary Notify' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.501 NAME 'GUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{16} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_LOWER_BOUND '16' X-NDS_UPPER_BOUND '16' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.502 NAME 'otherGUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{16} USAGE directoryOperation X-NDS_NAME 'Other GUID' X-NDS_LOWER_BOUND '16' X-NDS_UPPER_BOUND '16' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.503 NAME 'auxiliaryClassFlag' SYNTAX 2.16.840.1.113719.1.1.5.1.0 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Auxiliary Class Flag' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.504 NAME 'unknownAuxiliaryClass' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} USAGE directoryOperation X-NDS_NAME 'Unknown Auxiliary Class' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 0.9.2342.19200300.100.1.1 NAME ( 'uid' 'userId' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'uniqueID' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 0.9.2342.19200300.100.1.25 NAME 'dc' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64} X-NDS_NAME 'dc' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.507 NAME 'auxClassObjectClassBackup' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'AuxClass Object Class Backup' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.508 NAME 'localReceivedUpTo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Local Received Up To' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.141.4.4 NAME 'federationControl' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.141.4.2 NAME 'federationSearchPath' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.141.4.3 NAME 'federationDNSName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.141.4.1 NAME 'federationBoundaryType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.14.4.1.4 NAME 'DirXML-Associations' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
    +            "( 2.5.18.3 NAME 'creatorsName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.5.18.4 NAME 'modifiersName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.300 NAME 'languageId' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.27.4.35 NAME 'ndsPredicate' SYNTAX 2.16.840.1.113719.1.1.5.1.12 X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.27.4.36 NAME 'ndsPredicateState' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.27.4.37 NAME 'ndsPredicateFlush' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.27.4.38 NAME 'ndsPredicateTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_UPPER_BOUND '2147483647' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.27.4.40 NAME 'ndsPredicateStatsDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.27.4.39 NAME 'ndsPredicateUseValues' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.601 NAME 'syncPanePoint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.600 NAME 'syncWindowVector' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.602 NAME 'objectVersion' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.27.4.52 NAME 'memberQueryURL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'memberQuery' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.302 NAME 'excludedMember' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.525 NAME 'auxClassCompatibility' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.518 NAME 'ndsAgentPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.519 NAME 'ndsOperationCheckpoint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.520 NAME 'localReferral' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.521 NAME 'treeReferral' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.522 NAME 'schemaResetLock' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.523 NAME 'modifiedACLEntry' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.524 NAME 'monitoredConnection' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.526 NAME 'localFederationBoundary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.527 NAME 'replicationFilter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.721 NAME 'ServerEBAEnabled' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.716 NAME 'EBATreeConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.722 NAME 'EBAPartitionConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.723 NAME 'EBAServerConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.296 NAME 'loginActivationTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.687 NAME 'UpdateInProgress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.720 NAME 'dsContainerReadyAttrs' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.400.1 NAME 'edirSchemaFlagVersion' SYNTAX 2.16.840.1.113719.1.1.5.1.0 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.512 NAME 'indexDefinition' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.513 NAME 'ndsStatusRepair' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.514 NAME 'ndsStatusExternalReference' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.515 NAME 'ndsStatusObituary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.516 NAME 'ndsStatusSchema' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.517 NAME 'ndsStatusLimber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.511 NAME 'authoritative' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113730.3.1.34 NAME 'ref' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.546 NAME 'CachedAttrsOnExtRefs' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.547 NAME 'ExtRefLastUpdatedTime' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.688 NAME 'NCPKeyMaterialName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.1.4.713 NAME 'UTF8LoginScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.1.4.714 NAME 'loginScriptCharset' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.1.4.721 NAME 'NDSRightsToMonitor' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.1.192 NAME 'lDAPLogLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_NAME 'LDAP Log Level' X-NDS_UPPER_BOUND '32768' )",
    +            "( 2.16.840.1.113719.1.27.4.12 NAME 'lDAPUDPPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{65535} SINGLE-VALUE X-NDS_NAME 'LDAP UDP Port' X-NDS_UPPER_BOUND '65535' )",
    +            "( 2.16.840.1.113719.1.1.4.1.204 NAME 'lDAPLogFilename' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Log Filename' )",
    +            "( 2.16.840.1.113719.1.1.4.1.205 NAME 'lDAPBackupLogFilename' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Backup Log Filename' )",
    +            "( 2.16.840.1.113719.1.1.4.1.206 NAME 'lDAPLogSizeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'LDAP Log Size Limit' X-NDS_LOWER_BOUND '2048' X-NDS_UPPER_BOUND '-1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.194 NAME 'lDAPSearchSizeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_NAME 'LDAP Search Size Limit' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '2147483647' )",
    +            "( 2.16.840.1.113719.1.1.4.1.195 NAME 'lDAPSearchTimeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_NAME 'LDAP Search Time Limit' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '2147483647' )",
    +            "( 2.16.840.1.113719.1.1.4.1.207 NAME 'lDAPSuffix' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'LDAP Suffix' )",
    +            "( 2.16.840.1.113719.1.27.4.70 NAME 'ldapConfigVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.14 NAME 'ldapReferral' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Referral' )",
    +            "( 2.16.840.1.113719.1.27.4.73 NAME 'ldapDefaultReferralBehavior' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.23 NAME 'ldapSearchReferralUsage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'LDAP:searchReferralUsage' )",
    +            "( 2.16.840.1.113719.1.27.4.24 NAME 'lDAPOtherReferralUsage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'LDAP:otherReferralUsage' )",
    +            "( 2.16.840.1.113719.1.27.4.1 NAME 'ldapHostServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'LDAP Host Server' )",
    +            "( 2.16.840.1.113719.1.27.4.2 NAME 'ldapGroupDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'LDAP Group' )",
    +            "( 2.16.840.1.113719.1.27.4.3 NAME 'ldapTraceLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_NAME 'LDAP Screen Level' X-NDS_UPPER_BOUND '32768' )",
    +            "( 2.16.840.1.113719.1.27.4.4 NAME 'searchSizeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_UPPER_BOUND '2147483647' )",
    +            "( 2.16.840.1.113719.1.27.4.5 NAME 'searchTimeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_UPPER_BOUND '2147483647' )",
    +            "( 2.16.840.1.113719.1.27.4.6 NAME 'ldapServerBindLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'LDAP Server Bind Limit' X-NDS_UPPER_BOUND '-1' )",
    +            "( 2.16.840.1.113719.1.27.4.7 NAME 'ldapServerIdleTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'LDAP Server Idle Timeout' X-NDS_UPPER_BOUND '-1' )",
    +            "( 2.16.840.1.113719.1.27.4.8 NAME 'ldapEnableTCP' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'LDAP Enable TCP' )",
    +            "( 2.16.840.1.113719.1.27.4.10 NAME 'ldapEnableSSL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'LDAP Enable SSL' )",
    +            "( 2.16.840.1.113719.1.27.4.11 NAME 'ldapTCPPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{65535} SINGLE-VALUE X-NDS_NAME 'LDAP TCP Port' X-NDS_UPPER_BOUND '65535' )",
    +            "( 2.16.840.1.113719.1.27.4.13 NAME 'ldapSSLPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{65535} SINGLE-VALUE X-NDS_NAME 'LDAP SSL Port' X-NDS_UPPER_BOUND '65535' )",
    +            "( 2.16.840.1.113719.1.27.4.21 NAME 'filteredReplicaUsage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.22 NAME 'ldapKeyMaterialName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP:keyMaterialName' )",
    +            "( 2.16.840.1.113719.1.27.4.42 NAME 'extensionInfo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.27.4.45 NAME 'nonStdClientSchemaCompatMode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.46 NAME 'sslEnableMutualAuthentication' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.62 NAME 'ldapEnablePSearch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.63 NAME 'ldapMaximumPSearchOperations' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.64 NAME 'ldapIgnorePSearchLimitsForEvents' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.65 NAME 'ldapTLSTrustedRootContainer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
    +            "( 2.16.840.1.113719.1.27.4.66 NAME 'ldapEnableMonitorEvents' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.67 NAME 'ldapMaximumMonitorEventsLoad' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.68 NAME 'ldapTLSRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.69 NAME 'ldapTLSVerifyClientCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.71 NAME 'ldapDerefAlias' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.72 NAME 'ldapNonStdAllUserAttrsMode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.75 NAME 'ldapBindRestrictions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.79 NAME 'ldapInterfaces' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.27.4.80 NAME 'ldapChainSecureRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.82 NAME 'ldapStdCompliance' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.83 NAME 'ldapDerefAliasOnAuth' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.84 NAME 'ldapGeneralizedTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.85 NAME 'ldapPermissiveModify' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.86 NAME 'ldapSSLConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.27.4.15 NAME 'ldapServerList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'LDAP Server List' )",
    +            "( 2.16.840.1.113719.1.27.4.16 NAME 'ldapAttributeMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Attribute Map v11' )",
    +            "( 2.16.840.1.113719.1.27.4.17 NAME 'ldapClassMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Class Map v11' )",
    +            "( 2.16.840.1.113719.1.27.4.18 NAME 'ldapAllowClearTextPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'LDAP Allow Clear Text Password' )",
    +            "( 2.16.840.1.113719.1.27.4.19 NAME 'ldapAnonymousIdentity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'LDAP Anonymous Identity' )",
    +            "( 2.16.840.1.113719.1.27.4.52 NAME 'ldapAttributeList' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} )",
    +            "( 2.16.840.1.113719.1.27.4.53 NAME 'ldapClassList' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} )",
    +            "( 2.16.840.1.113719.1.27.4.56 NAME 'transitionGroupDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.74 NAME 'ldapTransitionBackLink' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.78 NAME 'ldapLBURPNumWriterThreads' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.27.4.20 NAME 'ldapServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'LDAP Server' )",
    +            "( 0.9.2342.19200300.100.1.3 NAME 'mail' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NAME 'Internet EMail Address' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113730.3.1.3 NAME 'employeeNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NAME 'NSCP:employeeNumber' )",
    +            "( 2.16.840.1.113719.1.27.4.76 NAME 'referralExcludeFilter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.27.4.77 NAME 'referralIncludeFilter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.5.4.36 NAME 'userCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'userCertificate' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.5.4.37 NAME 'cACertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'cACertificate' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.5.4.40 NAME 'crossCertificatePair' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'crossCertificatePair' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.5.4.58 NAME 'attributeCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.5.4.2 NAME 'knowledgeInformation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32768' )",
    +            "( 2.5.4.14 NAME 'searchGuide' SYNTAX 1.3.6.1.4.1.1466.115.121.1.25{64512} X-NDS_NAME 'searchGuide' )",
    +            "( 2.5.4.15 NAME 'businessCategory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' )",
    +            "( 2.5.4.21 NAME 'telexNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.52{64512} X-NDS_NAME 'telexNumber' )",
    +            "( 2.5.4.22 NAME 'teletexTerminalIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.51{64512} X-NDS_NAME 'teletexTerminalIdentifier' )",
    +            "( 2.5.4.24 NAME 'x121Address' SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{15} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '15' )",
    +            "( 2.5.4.25 NAME 'internationaliSDNNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{16} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '16' )",
    +            "( 2.5.4.26 NAME 'registeredAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.41{64512} X-NDS_NAME 'registeredAddress' )",
    +            "( 2.5.4.27 NAME 'destinationIndicator' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{128} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' )",
    +            "( 2.5.4.28 NAME 'preferredDeliveryMethod' SYNTAX 1.3.6.1.4.1.1466.115.121.1.14{64512} SINGLE-VALUE X-NDS_NAME 'preferredDeliveryMethod' )",
    +            "( 2.5.4.29 NAME 'presentationAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.43{64512} SINGLE-VALUE X-NDS_NAME 'presentationAddress' )",
    +            "( 2.5.4.30 NAME 'supportedApplicationContext' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38{64512} X-NDS_NAME 'supportedApplicationContext' )",
    +            "( 2.5.4.45 NAME 'x500UniqueIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.6{64512} X-NDS_NAME 'x500UniqueIdentifier' )",
    +            "( 2.5.4.46 NAME 'dnQualifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{64512} )",
    +            "( 2.5.4.47 NAME 'enhancedSearchGuide' SYNTAX 1.3.6.1.4.1.1466.115.121.1.21{64512} X-NDS_NAME 'enhancedSearchGuide' )",
    +            "( 2.5.4.48 NAME 'protocolInformation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.42{64512} X-NDS_NAME 'protocolInformation' )",
    +            "( 2.5.4.51 NAME 'houseIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32768' )",
    +            "( 2.5.4.52 NAME 'supportedAlgorithms' SYNTAX 1.3.6.1.4.1.1466.115.121.1.49{64512} X-NDS_NAME 'supportedAlgorithms' )",
    +            "( 2.5.4.54 NAME 'dmdName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32768' )",
    +            "( 0.9.2342.19200300.100.1.6 NAME 'roomNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 0.9.2342.19200300.100.1.38 NAME 'associatedName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
    +            "( 2.5.4.49 NAME 'dn' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.3.4.1 NAME 'httpServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
    +            "( 2.16.840.1.113719.1.3.4.2 NAME 'httpHostServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.3.4.3 NAME 'httpThreadsPerCPU' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.3.4.4 NAME 'httpIOBufferSize' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.3.4.5 NAME 'httpRequestTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.3.4.6 NAME 'httpKeepAliveRequestTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.3.4.7 NAME 'httpSessionTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.3.4.8 NAME 'httpKeyMaterialObject' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.3.4.9 NAME 'httpTraceLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.3.4.10 NAME 'httpAuthRequiresTLS' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.3.4.11 NAME 'httpDefaultClearPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.3.4.12 NAME 'httpDefaultTLSPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.3.4.13 NAME 'httpBindRestrictions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.1.4.1.295 NAME 'emboxConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.54.4.1.1 NAME 'trusteesOfNewObject' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NAME 'Trustees Of New Object' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.55.4.1.1 NAME 'newObjectSDSRights' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NAME 'New Object's DS Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.56.4.1.1 NAME 'newObjectSFSRights' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'New Object's FS Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.57.4.1.1 NAME 'setupScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Setup Script' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.58.4.1.1 NAME 'runSetupScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Run Setup Script' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.59.4.1.1 NAME 'membersOfTemplate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Members Of Template' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.60.4.1.1 NAME 'volumeSpaceRestrictions' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'Volume Space Restrictions' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.61.4.1.1 NAME 'setPasswordAfterCreate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Set Password After Create' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.62.4.1.1 NAME 'homeDirectoryRights' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_NAME 'Home Directory Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.63.4.1.1 NAME 'newObjectSSelfRights' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NAME 'New Object's Self Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.8.4.1 NAME 'digitalMeID' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.8.4.2 NAME 'assistant' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
    +            "( 2.16.840.1.113719.1.8.4.3 NAME 'assistantPhone' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.4 NAME 'city' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.5 NAME 'company' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 0.9.2342.19200300.100.1.43 NAME 'co' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.6 NAME 'directReports' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
    +            "( 0.9.2342.19200300.100.1.10 NAME 'manager' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
    +            "( 2.16.840.1.113719.1.8.4.7 NAME 'mailstop' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 0.9.2342.19200300.100.1.41 NAME 'mobile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
    +            "( 0.9.2342.19200300.100.1.40 NAME 'personalTitle' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 0.9.2342.19200300.100.1.42 NAME 'pager' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.8 NAME 'workforceID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.9 NAME 'instantMessagingID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.10 NAME 'preferredName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 0.9.2342.19200300.100.1.7 NAME 'photo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.11 NAME 'jobCode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.12 NAME 'siteLocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.13 NAME 'employeeStatus' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113730.3.1.4 NAME 'employeeType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.14 NAME 'costCenter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.15 NAME 'costCenterDescription' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.16 NAME 'tollFreePhoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.17 NAME 'otherPhoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.18 NAME 'managerWorkforceID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.19 NAME 'jackNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113730.3.1.2 NAME 'departmentNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.20 NAME 'vehicleInformation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.21 NAME 'accessCardNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.32 NAME 'isManager' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.8.4.22 NAME 'homeCity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.23 NAME 'homeEmailAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 1.3.6.1.4.1.1466.101.120.31 NAME 'homeFax' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
    +            "( 0.9.2342.19200300.100.1.20 NAME 'homePhone' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.24 NAME 'homeState' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 0.9.2342.19200300.100.1.39 NAME 'homePostalAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.41{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.25 NAME 'homeZipCode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.26 NAME 'personalMobile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.27 NAME 'children' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.28 NAME 'spouse' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.29 NAME 'vendorName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.30 NAME 'vendorAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.8.4.31 NAME 'vendorPhoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
    +            "( 2.16.840.1.113719.1.1.4.1.303 NAME 'dgIdentity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME_VALUE_ACCESS '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.304 NAME 'dgTimeOut' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.1.4.1.305 NAME 'dgAllowUnknown' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.1.4.1.306 NAME 'dgAllowDuplicates' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.1.4.1.546 NAME 'allowAliasToAncestor' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.39.4.1.1 NAME 'sASSecurityDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Security DN' X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.4.1.2 NAME 'sASServiceDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Service DN' X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.4.1.3 NAME 'sASSecretStore' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'SAS:SecretStore' )",
    +            "( 2.16.840.1.113719.1.39.4.1.4 NAME 'sASSecretStoreKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'SAS:SecretStore:Key' X-NDS_HIDDEN '1' )",
    +            "( 2.16.840.1.113719.1.39.4.1.5 NAME 'sASSecretStoreData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'SAS:SecretStore:Data' X-NDS_HIDDEN '1' )",
    +            "( 2.16.840.1.113719.1.39.4.1.6 NAME 'sASPKIStoreKeys' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'SAS:PKIStore:Keys' X-NDS_HIDDEN '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.1 NAME 'nDSPKIPublicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.2 NAME 'nDSPKIPrivateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Private Key' )",
    +            "( 2.16.840.1.113719.1.48.4.1.3 NAME 'nDSPKIPublicKeyCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key Certificate' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.4 NAME 'nDSPKICertificateChain' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'NDSPKI:Certificate Chain' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.16 NAME 'nDSPKIPublicKeyEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key EC' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.17 NAME 'nDSPKIPrivateKeyEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Private Key EC' )",
    +            "( 2.16.840.1.113719.1.48.4.1.18 NAME 'nDSPKIPublicKeyCertificateEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key Certificate EC' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.19 NAME 'crossCertificatePairEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'Cross Certificate Pair EC' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.20 NAME 'nDSPKICertificateChainEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'NDSPKI:Certificate Chain EC' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.5 NAME 'nDSPKIParentCA' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Parent CA' )",
    +            "( 2.16.840.1.113719.1.48.4.1.6 NAME 'nDSPKIParentCADN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'NDSPKI:Parent CA DN' )",
    +            "( 2.16.840.1.113719.1.48.4.1.20 NAME 'nDSPKISuiteBMode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'NDSPKI:SuiteBMode' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.7 NAME 'nDSPKIKeyFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Key File' )",
    +            "( 2.16.840.1.113719.1.48.4.1.8 NAME 'nDSPKISubjectName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Subject Name' )",
    +            "( 2.16.840.1.113719.1.48.4.1.11 NAME 'nDSPKIGivenName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Given Name' )",
    +            "( 2.16.840.1.113719.1.48.4.1.9 NAME 'nDSPKIKeyMaterialDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'NDSPKI:Key Material DN' )",
    +            "( 2.16.840.1.113719.1.48.4.1.10 NAME 'nDSPKITreeCADN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'NDSPKI:Tree CA DN' )",
    +            "( 2.5.4.59 NAME 'cAECCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.12 NAME 'nDSPKIUserCertificateInfo' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'NDSPKI:userCertificateInfo' )",
    +            "( 2.16.840.1.113719.1.48.4.1.13 NAME 'nDSPKITrustedRootCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Trusted Root Certificate' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.14 NAME 'nDSPKINotBefore' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Not Before' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.15 NAME 'nDSPKINotAfter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Not After' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.101 NAME 'nDSPKISDKeyServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'NDSPKI:SD Key Server DN' X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.102 NAME 'nDSPKISDKeyStruct' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'NDSPKI:SD Key Struct' )",
    +            "( 2.16.840.1.113719.1.48.4.1.103 NAME 'nDSPKISDKeyCert' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:SD Key Cert' )",
    +            "( 2.16.840.1.113719.1.48.4.1.104 NAME 'nDSPKISDKeyID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:SD Key ID' )",
    +            "( 2.16.840.1.113719.1.39.4.1.105 NAME 'nDSPKIKeystore' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'NDSPKI:Keystore' X-NDS_HIDDEN '1' )",
    +            "( 2.16.840.1.113719.1.39.4.1.106 NAME 'ndspkiAdditionalRoots' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.31.4.2.3 NAME 'masvLabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.31.4.2.4 NAME 'masvProposedLabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.31.4.2.5 NAME 'masvDefaultRange' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.31.4.2.6 NAME 'masvAuthorizedRange' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.31.4.2.7 NAME 'masvDomainPolicy' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.31.4.1.8 NAME 'masvClearanceNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.31.4.1.9 NAME 'masvLabelNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.31.4.1.10 NAME 'masvLabelSecrecyLevelNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.31.4.1.11 NAME 'masvLabelSecrecyCategoryNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.31.4.1.12 NAME 'masvLabelIntegrityLevelNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.31.4.1.13 NAME 'masvLabelIntegrityCategoryNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.31.4.1.14 NAME 'masvPolicyUpdate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.31.4.1.16 NAME 'masvNDSAttributeLabels' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.31.4.1.15 NAME 'masvPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.2 NAME 'sASLoginSequence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NAME 'SAS:Login Sequence' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.8 NAME 'sASLoginPolicyUpdate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:Login Policy Update' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.38 NAME 'sasNMASProductOptions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.74 NAME 'sasAuditConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.14 NAME 'sASNDSPasswordWindow' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:NDS Password Window' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.15 NAME 'sASPolicyCredentials' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Policy Credentials' X-NDS_SERVER_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.16 NAME 'sASPolicyMethods' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Methods' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.17 NAME 'sASPolicyObjectVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:Policy Object Version' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.18 NAME 'sASPolicyServiceSubtypes' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Service Subtypes' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.19 NAME 'sASPolicyServices' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Services' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.20 NAME 'sASPolicyUsers' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Users' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.21 NAME 'sASAllowNDSPasswordWindow' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'SAS:Allow NDS Password Window' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.9 NAME 'sASMethodIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Method Identifier' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.10 NAME 'sASMethodVendor' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Method Vendor' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.11 NAME 'sASAdvisoryMethodGrade' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Advisory Method Grade' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.12 NAME 'sASVendorSupport' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Vendor Support' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.13 NAME 'sasCertificateSearchContainers' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.70 NAME 'sasNMASMethodConfigData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.22 NAME 'sASLoginClientMethodNetWare' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Client Method NetWare' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.23 NAME 'sASLoginServerMethodNetWare' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Server Method NetWare' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.24 NAME 'sASLoginClientMethodWINNT' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Client Method WINNT' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.25 NAME 'sASLoginServerMethodWINNT' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Server Method WINNT' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.26 NAME 'sasLoginClientMethodSolaris' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.27 NAME 'sasLoginServerMethodSolaris' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.28 NAME 'sasLoginClientMethodLinux' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.29 NAME 'sasLoginServerMethodLinux' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.30 NAME 'sasLoginClientMethodTru64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.31 NAME 'sasLoginServerMethodTru64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.32 NAME 'sasLoginClientMethodAIX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.33 NAME 'sasLoginServerMethodAIX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.34 NAME 'sasLoginClientMethodHPUX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.35 NAME 'sasLoginServerMethodHPUX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1000 NAME 'sasLoginClientMethods390' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1001 NAME 'sasLoginServerMethods390' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1002 NAME 'sasLoginClientMethodLinuxX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1003 NAME 'sasLoginServerMethodLinuxX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1004 NAME 'sasLoginClientMethodWinX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1005 NAME 'sasLoginServerMethodWinX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1006 NAME 'sasLoginClientMethodSolaris64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1007 NAME 'sasLoginServerMethodSolaris64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1008 NAME 'sasLoginClientMethodAIX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1009 NAME 'sasLoginServerMethodAIX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1011 NAME 'sasLoginServerMethodSolarisi386' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1012 NAME 'sasLoginClientMethodSolarisi386' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.78 NAME 'sasUnsignedMethodModules' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.79 NAME 'sasServerModuleName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.80 NAME 'sasServerModuleEntryPointName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.81 NAME 'sasSASLMechanismName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.82 NAME 'sasSASLMechanismEntryPointName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.83 NAME 'sasClientModuleName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.84 NAME 'sasClientModuleEntryPointName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.36 NAME 'sASLoginMethodContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Login Method Container DN' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.37 NAME 'sASLoginPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Login Policy DN' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.63 NAME 'sasPostLoginMethodContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.38 NAME 'rADIUSActiveConnections' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Active Connections' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.39 NAME 'rADIUSAgedInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Aged Interval' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.40 NAME 'rADIUSAttributeList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Attribute List' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.41 NAME 'rADIUSAttributeLists' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Attribute Lists' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.42 NAME 'rADIUSClient' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Client' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.43 NAME 'rADIUSCommonNameResolution' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Common Name Resolution' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.44 NAME 'rADIUSConcurrentLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Concurrent Limit' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.45 NAME 'rADIUSConnectionHistory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Connection History' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.46 NAME 'rADIUSDASVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:DAS Version' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.47 NAME 'rADIUSDefaultProfile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Default Profile' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.48 NAME 'rADIUSDialAccessGroup' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'RADIUS:Dial Access Group' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.49 NAME 'rADIUSEnableCommonNameLogin' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'RADIUS:Enable Common Name Login' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.50 NAME 'rADIUSEnableDialAccess' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'RADIUS:Enable Dial Access' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.51 NAME 'rADIUSInterimAcctingTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Interim Accting Timeout' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.52 NAME 'rADIUSLookupContexts' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'RADIUS:Lookup Contexts' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.53 NAME 'rADIUSMaxDASHistoryRecord' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Max DAS History Record' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.54 NAME 'rADIUSMaximumHistoryRecord' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Maximum History Record' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.55 NAME 'rADIUSPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Password' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.56 NAME 'rADIUSPasswordPolicy' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Password Policy' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.57 NAME 'rADIUSPrivateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Private Key' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.58 NAME 'rADIUSProxyContext' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'RADIUS:Proxy Context' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.59 NAME 'rADIUSProxyDomain' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Proxy Domain' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.60 NAME 'rADIUSProxyTarget' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Proxy Target' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.61 NAME 'rADIUSPublicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Public Key' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.62 NAME 'rADIUSServiceList' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'RADIUS:Service List' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.3 NAME 'sASLoginSecret' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Secret' X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.4 NAME 'sASLoginSecretKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Secret Key' X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.5 NAME 'sASEncryptionType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:Encryption Type' X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.6 NAME 'sASLoginConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Configuration' X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.7 NAME 'sASLoginConfigurationKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Configuration Key' X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.73 NAME 'sasDefaultLoginSequence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.64 NAME 'sasAuthorizedLoginSequences' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.69 NAME 'sasAllowableSubjectNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.71 NAME 'sasLoginFailureDelay' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.72 NAME 'sasMethodVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1010 NAME 'sasUpdateLoginInfo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1011 NAME 'sasOTPEnabled' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1012 NAME 'sasOTPCounter' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1013 NAME 'sasOTPLookAheadWindow' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1014 NAME 'sasOTPDigits' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1015 NAME 'sasOTPReSync' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.42.1.0.1016 NAME 'sasUpdateLoginTimeInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.6.4.1 NAME 'snmpGroupDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.6.4.2 NAME 'snmpServerList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
    +            "( 2.16.840.1.113719.1.6.4.3 NAME 'snmpTrapConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.6.4.4 NAME 'snmpTrapDescription' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.6.4.5 NAME 'snmpTrapInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.6.4.6 NAME 'snmpTrapDisable' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.1.4.1.528 NAME 'ndapPartitionPasswordMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.529 NAME 'ndapClassPasswordMgmt' SYNTAX 2.16.840.1.113719.1.1.5.1.0 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.530 NAME 'ndapPasswordMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.537 NAME 'ndapPartitionLoginMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.538 NAME 'ndapClassLoginMgmt' SYNTAX 2.16.840.1.113719.1.1.5.1.0 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.539 NAME 'ndapLoginMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.1 NAME 'nspmPasswordKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.2 NAME 'nspmPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.3 NAME 'nspmDistributionPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.4 NAME 'nspmPasswordHistory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_HIDDEN '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.5 NAME 'nspmAdministratorChangeCount' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.6 NAME 'nspmPasswordPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.7 NAME 'nspmPreviousDistributionPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.8 NAME 'nspmDoNotExpirePassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 1.3.6.1.4.1.42.2.27.8.1.16 NAME 'pwdChangedTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
    +            "( 1.3.6.1.4.1.42.2.27.8.1.17 NAME 'pwdAccountLockedTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
    +            "( 1.3.6.1.4.1.42.2.27.8.1.19 NAME 'pwdFailureTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 NO-USER-MODIFICATION USAGE directoryOperation )",
    +            "( 2.16.840.1.113719.1.39.43.4.100 NAME 'nspmConfigurationOptions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.102 NAME 'nspmChangePasswordMessage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.103 NAME 'nspmPasswordHistoryLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.104 NAME 'nspmPasswordHistoryExpiration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 1.3.6.1.4.1.42.2.27.8.1.4 NAME 'pwdInHistory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.105 NAME 'nspmMinPasswordLifetime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.106 NAME 'nspmAdminsDoNotExpirePassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.107 NAME 'nspmPasswordACL' SYNTAX 2.16.840.1.113719.1.1.5.1.17 )",
    +            "( 2.16.840.1.113719.1.39.43.4.200 NAME 'nspmMaximumLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.201 NAME 'nspmMinUpperCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.202 NAME 'nspmMaxUpperCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.203 NAME 'nspmMinLowerCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.204 NAME 'nspmMaxLowerCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.205 NAME 'nspmNumericCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.206 NAME 'nspmNumericAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.207 NAME 'nspmNumericAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.208 NAME 'nspmMinNumericCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.209 NAME 'nspmMaxNumericCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.210 NAME 'nspmSpecialCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.211 NAME 'nspmSpecialAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.212 NAME 'nspmSpecialAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.213 NAME 'nspmMinSpecialCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.214 NAME 'nspmMaxSpecialCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.215 NAME 'nspmMaxRepeatedCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.216 NAME 'nspmMaxConsecutiveCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.217 NAME 'nspmMinUniqueCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.218 NAME 'nspmDisallowedAttributeValues' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.219 NAME 'nspmExcludeList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.220 NAME 'nspmCaseSensitive' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.221 NAME 'nspmPolicyPrecedence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.222 NAME 'nspmExtendedCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.223 NAME 'nspmExtendedAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.224 NAME 'nspmExtendedAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.225 NAME 'nspmMinExtendedCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.226 NAME 'nspmMaxExtendedCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.227 NAME 'nspmUpperAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.228 NAME 'nspmUpperAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.229 NAME 'nspmLowerAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.230 NAME 'nspmLowerAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.231 NAME 'nspmComplexityRules' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.233 NAME 'nspmAD2K8Syntax' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.234 NAME 'nspmAD2K8maxViolation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.235 NAME 'nspmXCharLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.236 NAME 'nspmXCharHistoryLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.237 NAME 'nspmUnicodeAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.238 NAME 'nspmNonAlphaCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.239 NAME 'nspmMinNonAlphaCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.240 NAME 'nspmMaxNonAlphaCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.241 NAME 'nspmGraceLoginHistoryLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.300 NAME 'nspmPolicyAgentContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.301 NAME 'nspmPolicyAgentNetWare' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.302 NAME 'nspmPolicyAgentWINNT' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.303 NAME 'nspmPolicyAgentSolaris' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.304 NAME 'nspmPolicyAgentLinux' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.305 NAME 'nspmPolicyAgentAIX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.43.4.306 NAME 'nspmPolicyAgentHPUX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 0.9.2342.19200300.100.1.55 NAME 'audio' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
    +            "( 2.16.840.1.113730.3.1.1 NAME 'carLicense' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113730.3.1.241 NAME 'displayName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 0.9.2342.19200300.100.1.60 NAME 'jpegPhoto' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
    +            "( 1.3.6.1.4.1.250.1.57 NAME 'labeledUri' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 0.9.2342.19200300.100.1.7 NAME 'ldapPhoto' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
    +            "( 2.16.840.1.113730.3.1.39 NAME 'preferredLanguage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE )",
    +            "( 0.9.2342.19200300.100.1.21 NAME 'secretary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
    +            "( 2.16.840.1.113730.3.1.40 NAME 'userSMIMECertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
    +            "( 2.16.840.1.113730.3.1.216 NAME 'userPKCS12' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
    +            "( 2.16.840.1.113719.1.12.4.1.0 NAME 'auditAEncryptionKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:A Encryption Key' )",
    +            "( 2.16.840.1.113719.1.12.4.2.0 NAME 'auditBEncryptionKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:B Encryption Key' )",
    +            "( 2.16.840.1.113719.1.12.4.3.0 NAME 'auditContents' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Audit:Contents' )",
    +            "( 2.16.840.1.113719.1.12.4.4.0 NAME 'auditType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Audit:Type' )",
    +            "( 2.16.840.1.113719.1.12.4.5.0 NAME 'auditCurrentEncryptionKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:Current Encryption Key' )",
    +            "( 2.16.840.1.113719.1.12.4.6.0 NAME 'auditFileLink' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Audit:File Link' )",
    +            "( 2.16.840.1.113719.1.12.4.7.0 NAME 'auditLinkList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Audit:Link List' )",
    +            "( 2.16.840.1.113719.1.12.4.8.0 NAME 'auditPath' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NAME 'Audit:Path' )",
    +            "( 2.16.840.1.113719.1.12.4.9.0 NAME 'auditPolicy' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:Policy' )",
    +            "( 2.16.840.1.113719.1.38.4.1.1 NAME 'wANMANWANPolicy' SYNTAX 2.16.840.1.113719.1.1.5.1.13{64512} X-NDS_NAME 'WANMAN:WAN Policy' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.38.4.1.2 NAME 'wANMANLANAreaMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'WANMAN:LAN Area Membership' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.38.4.1.3 NAME 'wANMANCost' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'WANMAN:Cost' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.38.4.1.4 NAME 'wANMANDefaultCost' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'WANMAN:Default Cost' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.135.4.30 NAME 'rbsAssignedRoles' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
    +            "( 2.16.840.1.113719.1.135.4.31 NAME 'rbsContent' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
    +            "( 2.16.840.1.113719.1.135.4.32 NAME 'rbsContentMembership' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
    +            "( 2.16.840.1.113719.1.135.4.33 NAME 'rbsEntryPoint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.135.4.34 NAME 'rbsMember' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
    +            "( 2.16.840.1.113719.1.135.4.35 NAME 'rbsOwnedCollections' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
    +            "( 2.16.840.1.113719.1.135.4.36 NAME 'rbsPath' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
    +            "( 2.16.840.1.113719.1.135.4.37 NAME 'rbsParameters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} )",
    +            "( 2.16.840.1.113719.1.135.4.38 NAME 'rbsTaskRights' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
    +            "( 2.16.840.1.113719.1.135.4.39 NAME 'rbsTrusteeOf' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
    +            "( 2.16.840.1.113719.1.135.4.40 NAME 'rbsType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} SINGLE-VALUE X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '256' )",
    +            "( 2.16.840.1.113719.1.135.4.41 NAME 'rbsURL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.135.4.42 NAME 'rbsTaskTemplates' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
    +            "( 2.16.840.1.113719.1.135.4.43 NAME 'rbsTaskTemplatesURL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.135.4.44 NAME 'rbsGALabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.135.4.45 NAME 'rbsPageMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} )",
    +            "( 2.16.840.1.113719.1.135.4.46 NAME 'rbsTargetObjectType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.135.4.47 NAME 'rbsContext' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.135.4.48 NAME 'rbsXMLInfo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.135.4.51 NAME 'rbsAssignedRoles2' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
    +            "( 2.16.840.1.113719.1.135.4.52 NAME 'rbsOwnedCollections2' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
    +            "( 2.16.840.1.113719.1.1.4.1.540 NAME 'prSyncPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.541 NAME 'prSyncAttributes' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_SERVER_READ '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.542 NAME 'dsEncryptedReplicationConfig' SYNTAX 2.16.840.1.113719.1.1.5.1.19 )",
    +            "( 2.16.840.1.113719.1.1.4.1.543 NAME 'encryptionPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.1.4.1.544 NAME 'attrEncryptionRequiresSecure' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.1.4.1.545 NAME 'attrEncryptionDefinition' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.16 NAME 'ndspkiCRLFileName' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.17 NAME 'ndspkiStatus' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.18 NAME 'ndspkiIssueTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.19 NAME 'ndspkiNextIssueTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.20 NAME 'ndspkiAttemptTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.21 NAME 'ndspkiTimeInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.22 NAME 'ndspkiCRLMaxProcessingInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.23 NAME 'ndspkiCRLNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.24 NAME 'ndspkiDistributionPoints' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.25 NAME 'ndspkiCRLProcessData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.26 NAME 'ndspkiCRLConfigurationDNList' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.27 NAME 'ndspkiCADN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.28 NAME 'ndspkiCRLContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.29 NAME 'ndspkiIssuedCertContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.30 NAME 'ndspkiDistributionPointDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.31 NAME 'ndspkiCRLConfigurationDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.32 NAME 'ndspkiDirectory' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} )",
    +            "( 2.5.4.38 NAME 'authorityRevocationList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 SINGLE-VALUE X-NDS_NAME 'ndspkiAuthorityRevocationList' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.5.4.39 NAME 'certificateRevocationList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 SINGLE-VALUE X-NDS_NAME 'ndspkiCertificateRevocationList' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.5.4.53 NAME 'deltaRevocationList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 SINGLE-VALUE X-NDS_NAME 'ndspkiDeltaRevocationList' X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.36 NAME 'ndspkiTrustedRootList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.37 NAME 'ndspkiSecurityRightsLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.48.4.1.38 NAME 'ndspkiKMOExport' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.48.4.1.39 NAME 'ndspkiCRLECConfigurationDNList' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.40 NAME 'ndspkiCRLType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.41 NAME 'ndspkiCRLExtendValidity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.42 NAME 'ndspkiDefaultRSAKeySize' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.43 NAME 'ndspkiDefaultECCurve' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.48.4.1.44 NAME 'ndspkiDefaultCertificateLife' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.7.4.1 NAME 'notfSMTPEmailHost' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.7.4.2 NAME 'notfSMTPEmailFrom' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.7.4.3 NAME 'notfSMTPEmailUserName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.7.4.5 NAME 'notfMergeTemplateData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.7.4.6 NAME 'notfMergeTemplateSubject' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.44.4.1 NAME 'nsimRequiredQuestions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.44.4.2 NAME 'nsimRandomQuestions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.44.4.3 NAME 'nsimNumberRandomQuestions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.44.4.4 NAME 'nsimMinResponseLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.44.4.5 NAME 'nsimMaxResponseLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.44.4.6 NAME 'nsimForgottenLoginConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.44.4.7 NAME 'nsimForgottenAction' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.44.4.8 NAME 'nsimAssignments' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.44.4.9 NAME 'nsimChallengeSetDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.44.4.10 NAME 'nsimChallengeSetGUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.44.4.11 NAME 'nsimPwdRuleEnforcement' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.39.44.4.12 NAME 'nsimHint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.39.44.4.13 NAME 'nsimPasswordReminder' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.266.4.4 NAME 'sssProxyStoreKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
    +            "( 2.16.840.1.113719.1.266.4.5 NAME 'sssProxyStoreSecrets' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_HIDDEN '1' )",
    +            "( 2.16.840.1.113719.1.266.4.6 NAME 'sssActiveServerList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
    +            "( 2.16.840.1.113719.1.266.4.7 NAME 'sssCacheRefreshInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.266.4.8 NAME 'sssAdminList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
    +            "( 2.16.840.1.113719.1.266.4.9 NAME 'sssAdminGALabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.266.4.10 NAME 'sssEnableReadTimestamps' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.266.4.11 NAME 'sssDisableMasterPasswords' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.266.4.12 NAME 'sssEnableAdminAccess' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.266.4.13 NAME 'sssReadSecretPolicies' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
    +            "( 2.16.840.1.113719.1.266.4.14 NAME 'sssServerPolicyOverrideDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.1.4.1.531 NAME 'eDirCloneSource' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.532 NAME 'eDirCloneKeys' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_HIDDEN '1' )",
    +            "( 2.16.840.1.113719.1.1.4.1.533 NAME 'eDirCloneLock' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
    +            "( 2.16.840.1.113719.1.1.4.711 NAME 'groupMember' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
    +            "( 2.16.840.1.113719.1.1.4.712 NAME 'nestedConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
    +            "( 2.16.840.1.113719.1.1.4.717 NAME 'xdasDSConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.1.4.718 NAME 'xdasConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.1.4.719 NAME 'xdasVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_UPPER_BOUND '32768' )",
    +            "( 2.16.840.1.113719.1.347.4.79 NAME 'NAuditInstrumentation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.347.4.2 NAME 'NAuditLoggingServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_PUBLIC_READ '1' )",
    +            "( 2.16.840.1.113719.1.1.4.724 NAME 'cefConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
    +            "( 2.16.840.1.113719.1.1.4.725 NAME 'cefVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_UPPER_BOUND '32768' )"
    +        ],
    +        "createTimestamp": [],
    +        "dITContentRules": [],
    +        "dITStructureRules": [],
    +        "ldapSyntaxes": [
    +            "( 1.3.6.1.4.1.1466.115.121.1.1 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.2 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.3 X-NDS_SYNTAX '3' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.4 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.5 X-NDS_SYNTAX '21' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.6 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.7 X-NDS_SYNTAX '7' )",
    +            "( 2.16.840.1.113719.1.1.5.1.6 X-NDS_SYNTAX '6' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.8 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.9 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.10 X-NDS_SYNTAX '9' )",
    +            "( 2.16.840.1.113719.1.1.5.1.22 X-NDS_SYNTAX '22' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.11 X-NDS_SYNTAX '3' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_SYNTAX '1' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.13 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.14 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.15 X-NDS_SYNTAX '3' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.16 X-NDS_SYNTAX '3' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.17 X-NDS_SYNTAX '3' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.18 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.19 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.20 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.21 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.22 X-NDS_SYNTAX '11' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.23 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.24 X-NDS_SYNTAX '24' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.25 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.26 X-NDS_SYNTAX '2' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_SYNTAX '8' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.28 X-NDS_SYNTAX '9' )",
    +            "( 1.2.840.113556.1.4.906 X-NDS_SYNTAX '29' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.54 X-NDS_SYNTAX '3' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.56 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.57 X-NDS_SYNTAX '3' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.29 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.30 X-NDS_SYNTAX '3' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.31 X-NDS_SYNTAX '3' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.32 X-NDS_SYNTAX '3' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.33 X-NDS_SYNTAX '3' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.55 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.34 X-NDS_SYNTAX '3' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.35 X-NDS_SYNTAX '3' )",
    +            "( 2.16.840.1.113719.1.1.5.1.19 X-NDS_SYNTAX '19' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.36 X-NDS_SYNTAX '5' )",
    +            "( 2.16.840.1.113719.1.1.5.1.17 X-NDS_SYNTAX '17' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.37 X-NDS_SYNTAX '3' )",
    +            "( 2.16.840.1.113719.1.1.5.1.13 X-NDS_SYNTAX '13' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.40 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.38 X-NDS_SYNTAX '20' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.39 X-NDS_SYNTAX '3' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.41 X-NDS_SYNTAX '18' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.43 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.44 X-NDS_SYNTAX '4' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.42 X-NDS_SYNTAX '9' )",
    +            "( 2.16.840.1.113719.1.1.5.1.16 X-NDS_SYNTAX '16' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.58 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.45 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.46 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.47 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.48 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.49 X-NDS_SYNTAX '9' )",
    +            "( 2.16.840.1.113719.1.1.5.1.12 X-NDS_SYNTAX '12' )",
    +            "( 2.16.840.1.113719.1.1.5.1.23 X-NDS_SYNTAX '23' )",
    +            "( 2.16.840.1.113719.1.1.5.1.15 X-NDS_SYNTAX '15' )",
    +            "( 2.16.840.1.113719.1.1.5.1.14 X-NDS_SYNTAX '14' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.50 X-NDS_SYNTAX '10' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.51 X-NDS_SYNTAX '9' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.52 X-NDS_SYNTAX '9' )",
    +            "( 2.16.840.1.113719.1.1.5.1.25 X-NDS_SYNTAX '25' )",
    +            "( 1.3.6.1.4.1.1466.115.121.1.53 X-NDS_SYNTAX '9' )",
    +            "( 2.16.840.1.113719.1.1.5.1.26 X-NDS_SYNTAX '26' )",
    +            "( 2.16.840.1.113719.1.1.5.1.27 X-NDS_SYNTAX '27' )"
    +        ],
    +        "matchingRuleUse": [],
    +        "matchingRules": [],
    +        "modifyTimestamp": [
    +            "20190831135835Z"
    +        ],
    +        "nameForms": [],
    +        "objectClass": [
    +            "top",
    +            "subschema"
    +        ],
    +        "objectClasses": [
    +            "( 2.5.6.0 NAME 'Top' STRUCTURAL MUST objectClass MAY ( cAPublicKey $ cAPrivateKey $ certificateValidityInterval $ authorityRevocation $ lastReferencedTime $ equivalentToMe $ ACL $ backLink $ binderyProperty $ Obituary $ Reference $ revision $ ndsCrossCertificatePair $ certificateRevocation $ usedBy $ GUID $ otherGUID $ DirXML-Associations $ creatorsName $ modifiersName $ objectVersion $ auxClassCompatibility $ unknownBaseClass $ unknownAuxiliaryClass $ masvProposedLabel $ masvDefaultRange $ masvAuthorizedRange $ auditFileLink $ rbsAssignedRoles $ rbsOwnedCollections $ rbsAssignedRoles2 $ rbsOwnedCollections2 ) X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '16#subtree#[Creator]#[Entry Rights]' )",
    +            "( 1.3.6.1.4.1.42.2.27.1.2.1 NAME 'aliasObject' SUP Top STRUCTURAL MUST aliasedObjectName X-NDS_NAME 'Alias' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.6.2 NAME 'Country' SUP Top STRUCTURAL MUST c MAY ( description $ searchGuide $ sssActiveServerList $ sssServerPolicyOverrideDN ) X-NDS_NAMING 'c' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'domain' ) X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.6.3 NAME 'Locality' SUP Top STRUCTURAL MAY ( description $ l $ seeAlso $ st $ street $ searchGuide $ sssActiveServerList $ sssServerPolicyOverrideDN ) X-NDS_NAMING ( 'l' 'st' ) X-NDS_CONTAINMENT ( 'Country' 'organizationalUnit' 'Locality' 'Organization' 'domain' ) X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.6.4 NAME 'Organization' SUP ( ndsLoginProperties $ ndsContainerLoginProperties ) STRUCTURAL MUST o MAY ( description $ facsimileTelephoneNumber $ l $ loginScript $ eMailAddress $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ printJobConfiguration $ printerControl $ seeAlso $ st $ street $ telephoneNumber $ loginIntruderLimit $ intruderAttemptResetInterval $ detectIntruder $ lockoutAfterDetection $ intruderLockoutResetInterval $ nNSDomain $ mailboxLocation $ mailboxID $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ internationaliSDNNumber $ businessCategory $ searchGuide $ rADIUSAttributeLists $ rADIUSDefaultProfile $ rADIUSDialAccessGroup $ rADIUSEnableDialAccess $ rADIUSServiceList $ sssActiveServerList $ sssServerPolicyOverrideDN $ userPassword ) X-NDS_NAMING 'o' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'Country' 'Locality' 'domain' ) X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Self]#loginScript' '2#entry#[Self]#printJobConfiguration') )",
    +            "( 2.5.6.5 NAME 'organizationalUnit' SUP ( ndsLoginProperties $ ndsContainerLoginProperties ) STRUCTURAL MUST ou MAY ( description $ facsimileTelephoneNumber $ l $ loginScript $ eMailAddress $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ printJobConfiguration $ printerControl $ seeAlso $ st $ street $ telephoneNumber $ loginIntruderLimit $ intruderAttemptResetInterval $ detectIntruder $ lockoutAfterDetection $ intruderLockoutResetInterval $ nNSDomain $ mailboxLocation $ mailboxID $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ internationaliSDNNumber $ businessCategory $ searchGuide $ rADIUSAttributeLists $ rADIUSDefaultProfile $ rADIUSDialAccessGroup $ rADIUSEnableDialAccess $ rADIUSServiceList $ sssActiveServerList $ sssServerPolicyOverrideDN $ userPassword ) X-NDS_NAMING 'ou' X-NDS_CONTAINMENT ( 'Locality' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Organizational Unit' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Self]#loginScript' '2#entry#[Self]#printJobConfiguration') )",
    +            "( 2.5.6.8 NAME 'organizationalRole' SUP Top STRUCTURAL MUST cn MAY ( description $ facsimileTelephoneNumber $ l $ eMailAddress $ ou $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ roleOccupant $ seeAlso $ st $ street $ telephoneNumber $ mailboxLocation $ mailboxID $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ internationaliSDNNumber ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Organizational Role' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.6.9 NAME ( 'groupOfNames' 'group' 'groupOfUniqueNames' ) SUP Top STRUCTURAL MUST cn MAY ( description $ l $ member $ ou $ o $ owner $ seeAlso $ groupID $ fullName $ eMailAddress $ mailboxLocation $ mailboxID $ Profile $ profileMembership $ loginScript $ businessCategory $ nspmPasswordPolicyDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Group' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.6.6 NAME 'Person' SUP ndsLoginProperties STRUCTURAL MUST ( cn $ sn ) MAY ( description $ seeAlso $ telephoneNumber $ fullName $ givenName $ initials $ generationQualifier $ uid $ assistant $ assistantPhone $ city $ st $ company $ co $ directReports $ manager $ mailstop $ mobile $ personalTitle $ pager $ workforceID $ instantMessagingID $ preferredName $ photo $ jobCode $ siteLocation $ employeeStatus $ employeeType $ costCenter $ costCenterDescription $ tollFreePhoneNumber $ otherPhoneNumber $ managerWorkforceID $ roomNumber $ jackNumber $ departmentNumber $ vehicleInformation $ accessCardNumber $ isManager $ userPassword ) X-NDS_NAMING ( 'cn' 'uid' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.5.6.7 NAME 'organizationalPerson' SUP Person STRUCTURAL MAY ( facsimileTelephoneNumber $ l $ eMailAddress $ ou $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ st $ street $ title $ mailboxLocation $ mailboxID $ uid $ mail $ employeeNumber $ destinationIndicator $ internationaliSDNNumber $ preferredDeliveryMethod $ registeredAddress $ teletexTerminalIdentifier $ telexNumber $ x121Address $ businessCategory $ roomNumber $ x500UniqueIdentifier ) X-NDS_NAMING ( 'cn' 'ou' 'uid' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Organizational Person' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113730.3.2.2 NAME 'inetOrgPerson' SUP organizationalPerson STRUCTURAL MAY ( groupMembership $ ndsHomeDirectory $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginGraceLimit $ loginGraceRemaining $ loginIntruderAddress $ loginIntruderAttempts $ loginIntruderResetTime $ loginMaximumSimultaneous $ loginScript $ loginTime $ networkAddressRestriction $ networkAddress $ passwordsUsed $ passwordAllowChange $ passwordExpirationInterval $ passwordExpirationTime $ passwordMinimumLength $ passwordRequired $ passwordUniqueRequired $ printJobConfiguration $ privateKey $ Profile $ publicKey $ securityEquals $ accountBalance $ allowUnlimitedCredit $ minimumAccountBalance $ messageServer $ Language $ ndsUID $ lockedByIntruder $ serverHolds $ lastLoginTime $ typeCreatorMap $ higherPrivileges $ printerControl $ securityFlags $ profileMembership $ Timezone $ sASServiceDN $ sASSecretStore $ sASSecretStoreKey $ sASSecretStoreData $ sASPKIStoreKeys $ userCertificate $ nDSPKIUserCertificateInfo $ nDSPKIKeystore $ rADIUSActiveConnections $ rADIUSAttributeLists $ rADIUSConcurrentLimit $ rADIUSConnectionHistory $ rADIUSDefaultProfile $ rADIUSDialAccessGroup $ rADIUSEnableDialAccess $ rADIUSPassword $ rADIUSServiceList $ audio $ businessCategory $ carLicense $ departmentNumber $ employeeNumber $ employeeType $ displayName $ givenName $ homePhone $ homePostalAddress $ initials $ jpegPhoto $ labeledUri $ mail $ manager $ mobile $ o $ pager $ ldapPhoto $ preferredLanguage $ roomNumber $ secretary $ uid $ userSMIMECertificate $ x500UniqueIdentifier $ userPKCS12 $ sssProxyStoreKey $ sssProxyStoreSecrets $ sssServerPolicyOverrideDN ) X-NDS_NAME 'User' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#subtree#[Self]#[All Attributes Rights]' '6#entry#[Self]#loginScript' '1#subtree#[Root Template]#[Entry Rights]' '2#entry#[Public]#messageServer' '2#entry#[Root Template]#groupMembership' '6#entry#[Self]#printJobConfiguration' '2#entry#[Root Template]#networkAddress') )",
    +            "( 2.5.6.14 NAME 'Device' SUP Top STRUCTURAL MUST cn MAY ( description $ l $ networkAddress $ ou $ o $ owner $ seeAlso $ serialNumber ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.4 NAME 'Computer' SUP Device STRUCTURAL MAY ( operator $ server $ status ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.17 NAME 'Printer' SUP Device STRUCTURAL MAY ( Cartridge $ printerConfiguration $ defaultQueue $ hostDevice $ printServer $ Memory $ networkAddressRestriction $ notify $ operator $ pageDescriptionLanguage $ queue $ status $ supportedTypefaces ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.21 NAME 'Resource' SUP Top ABSTRACT MUST cn MAY ( description $ hostResourceName $ l $ ou $ o $ seeAlso $ Uses ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.20 NAME 'Queue' SUP Resource STRUCTURAL MUST queueDirectory MAY ( Device $ operator $ server $ User $ networkAddress $ Volume $ hostServer ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#subtree#[Root Template]#[All Attributes Rights]' )",
    +            "( 2.16.840.1.113719.1.1.6.1.3 NAME 'binderyQueue' SUP Queue STRUCTURAL MUST binderyType X-NDS_NAMING ( 'cn' 'binderyType' ) X-NDS_NAME 'Bindery Queue' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#subtree#[Root Template]#[All Attributes Rights]' )",
    +            "( 2.16.840.1.113719.1.1.6.1.26 NAME 'Volume' SUP Resource STRUCTURAL MUST hostServer MAY status X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Root Template]#hostResourceName' '2#entry#[Root Template]#hostServer') )",
    +            "( 2.16.840.1.113719.1.1.6.1.7 NAME 'directoryMap' SUP Resource STRUCTURAL MUST hostServer MAY path X-NDS_NAME 'Directory Map' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.19 NAME 'Profile' SUP Top STRUCTURAL MUST ( cn $ loginScript ) MAY ( description $ l $ ou $ o $ seeAlso $ fullName ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.22 NAME 'Server' SUP Top ABSTRACT MUST cn MAY ( description $ hostDevice $ l $ ou $ o $ privateKey $ publicKey $ Resource $ seeAlso $ status $ User $ Version $ networkAddress $ accountBalance $ allowUnlimitedCredit $ minimumAccountBalance $ fullName $ securityEquals $ securityFlags $ Timezone $ ndapClassPasswordMgmt $ ndapClassLoginMgmt ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Public]#networkAddress' '16#subtree#[Self]#[Entry Rights]') )",
    +            "( 2.16.840.1.113719.1.1.6.1.10 NAME 'ncpServer' SUP Server STRUCTURAL MAY ( operator $ supportedServices $ messagingServer $ dsRevision $ permanentConfigParms $ ndsPredicateStatsDN $ languageId $ indexDefinition $ CachedAttrsOnExtRefs $ NCPKeyMaterialName $ NDSRightsToMonitor $ ldapServerDN $ httpServerDN $ emboxConfig $ sASServiceDN $ cACertificate $ cAECCertificate $ nDSPKIPublicKey $ nDSPKIPrivateKey $ nDSPKICertificateChain $ nDSPKIParentCADN $ nDSPKISDKeyID $ nDSPKISDKeyStruct $ snmpGroupDN $ wANMANWANPolicy $ wANMANLANAreaMembership $ wANMANCost $ wANMANDefaultCost $ encryptionPolicyDN $ eDirCloneSource $ eDirCloneLock $ xdasDSConfiguration $ xdasConfiguration $ xdasVersion $ NAuditLoggingServer $ NAuditInstrumentation $ cefConfiguration $ cefVersion ) X-NDS_NAME 'NCP Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#entry#[Public]#messagingServer' )",
    +            "( 2.16.840.1.113719.1.1.6.1.18 NAME 'printServer' SUP Server STRUCTURAL MAY ( operator $ printer $ sAPName ) X-NDS_NAME 'Print Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#subtree#[Root Template]#[All Attributes Rights]' )",
    +            "( 2.16.840.1.113719.1.1.6.1.31 NAME 'CommExec' SUP Server STRUCTURAL MAY networkAddressRestriction X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.2 NAME 'binderyObject' SUP Top STRUCTURAL MUST ( binderyObjectRestriction $ binderyType $ cn ) X-NDS_NAMING ( 'cn' 'binderyType' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Bindery Object' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.15 NAME 'Partition' AUXILIARY MAY ( Convergence $ partitionCreationTime $ Replica $ inheritedACL $ lowConvergenceSyncInterval $ receivedUpTo $ synchronizedUpTo $ authorityRevocation $ certificateRevocation $ cAPrivateKey $ cAPublicKey $ ndsCrossCertificatePair $ lowConvergenceResetTime $ highConvergenceSyncInterval $ partitionControl $ replicaUpTo $ partitionStatus $ transitiveVector $ purgeVector $ synchronizationTolerance $ obituaryNotify $ localReceivedUpTo $ federationControl $ syncPanePoint $ syncWindowVector $ EBAPartitionConfiguration $ authoritative $ allowAliasToAncestor $ sASSecurityDN $ masvLabel $ ndapPartitionPasswordMgmt $ ndapPartitionLoginMgmt $ prSyncPolicyDN $ dsEncryptedReplicationConfig ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.0 NAME 'aFPServer' SUP Server STRUCTURAL MAY ( serialNumber $ supportedConnections ) X-NDS_NAME 'AFP Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.27 NAME 'messagingServer' SUP Server STRUCTURAL MAY ( messagingDatabaseLocation $ messageRoutingGroup $ Postmaster $ supportedServices $ messagingServerType $ supportedGateway ) X-NDS_NAME 'Messaging Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '1#subtree#[Self]#[Entry Rights]' '2#subtree#[Self]#[All Attributes Rights]' '6#entry#[Self]#status' '2#entry#[Public]#messagingServerType' '2#entry#[Public]#messagingDatabaseLocation') )",
    +            "( 2.16.840.1.113719.1.1.6.1.28 NAME 'messageRoutingGroup' SUP groupOfNames STRUCTURAL X-NDS_NAME 'Message Routing Group' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '1#subtree#[Self]#[Entry Rights]' '2#subtree#[Self]#[All Attributes Rights]') )",
    +            "( 2.16.840.1.113719.1.1.6.1.29 NAME 'externalEntity' SUP Top STRUCTURAL MUST cn MAY ( description $ seeAlso $ facsimileTelephoneNumber $ l $ eMailAddress $ ou $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ st $ street $ title $ externalName $ mailboxLocation $ mailboxID ) X-NDS_NAMING ( 'cn' 'ou' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'External Entity' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#entry#[Public]#externalName' )",
    +            "( 2.16.840.1.113719.1.1.6.1.30 NAME 'List' SUP Top STRUCTURAL MUST cn MAY ( description $ l $ member $ ou $ o $ eMailAddress $ mailboxLocation $ mailboxID $ owner $ seeAlso $ fullName ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#entry#[Root Template]#member' )",
    +            "( 2.16.840.1.113719.1.1.6.1.32 NAME 'treeRoot' SUP Top STRUCTURAL MUST T MAY ( EBATreeConfiguration $ sssActiveServerList ) X-NDS_NAMING 'T' X-NDS_NAME 'Tree Root' X-NDS_NONREMOVABLE '1' )",
    +            "( 0.9.2342.19200300.100.4.13 NAME 'domain' SUP ( Top $ ndsLoginProperties $ ndsContainerLoginProperties ) STRUCTURAL MUST dc MAY ( searchGuide $ o $ seeAlso $ businessCategory $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $ street $ postOfficeBox $ postalCode $ postalAddress $ physicalDeliveryOfficeName $ l $ associatedName $ description $ sssActiveServerList $ sssServerPolicyOverrideDN $ userPassword ) X-NDS_NAMING 'dc' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'Country' 'Locality' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NONREMOVABLE '1' )",
    +            "( 1.3.6.1.4.1.1466.344 NAME 'dcObject' AUXILIARY MUST dc X-NDS_NAMING 'dc' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.33 NAME 'ndsLoginProperties' SUP Top ABSTRACT MAY ( groupMembership $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginGraceLimit $ loginGraceRemaining $ loginIntruderAddress $ loginIntruderAttempts $ loginIntruderResetTime $ loginMaximumSimultaneous $ loginScript $ loginTime $ networkAddressRestriction $ networkAddress $ passwordsUsed $ passwordAllowChange $ passwordExpirationInterval $ passwordExpirationTime $ passwordMinimumLength $ passwordRequired $ passwordUniqueRequired $ privateKey $ Profile $ publicKey $ securityEquals $ accountBalance $ allowUnlimitedCredit $ minimumAccountBalance $ Language $ lockedByIntruder $ serverHolds $ lastLoginTime $ higherPrivileges $ securityFlags $ profileMembership $ Timezone $ loginActivationTime $ UTF8LoginScript $ loginScriptCharset $ sASNDSPasswordWindow $ sASLoginSecret $ sASLoginSecretKey $ sASEncryptionType $ sASLoginConfiguration $ sASLoginConfigurationKey $ sasLoginFailureDelay $ sasDefaultLoginSequence $ sasAuthorizedLoginSequences $ sasAllowableSubjectNames $ sasUpdateLoginInfo $ sasOTPEnabled $ sasOTPCounter $ sasOTPDigits $ sasOTPReSync $ sasUpdateLoginTimeInterval $ ndapPasswordMgmt $ ndapLoginMgmt $ nspmPasswordKey $ nspmPassword $ pwdChangedTime $ pwdAccountLockedTime $ pwdFailureTime $ nspmDoNotExpirePassword $ nspmDistributionPassword $ nspmPreviousDistributionPassword $ nspmPasswordHistory $ nspmAdministratorChangeCount $ nspmPasswordPolicyDN $ nsimHint $ nsimPasswordReminder $ userPassword ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.141.6.1 NAME 'federationBoundary' AUXILIARY MUST federationBoundaryType MAY ( federationControl $ federationDNSName $ federationSearchPath ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.34 NAME 'ndsContainerLoginProperties' SUP Top ABSTRACT MAY ( loginIntruderLimit $ intruderAttemptResetInterval $ detectIntruder $ lockoutAfterDetection $ intruderLockoutResetInterval $ sasLoginFailureDelay $ sasDefaultLoginSequence $ sasAuthorizedLoginSequences $ sasUpdateLoginInfo $ sasOTPEnabled $ sasOTPDigits $ sasUpdateLoginTimeInterval $ ndapPasswordMgmt $ ndapLoginMgmt $ nspmPasswordPolicyDN ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.27.6.3 NAME 'ndsPredicateStats' SUP Top STRUCTURAL MUST ( cn $ ndsPredicateState $ ndsPredicateFlush ) MAY ( ndsPredicate $ ndsPredicateTimeout $ ndsPredicateUseValues ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.6.400.1 NAME 'edirSchemaVersion' SUP Top ABSTRACT MAY edirSchemaFlagVersion X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.47 NAME 'immediateSuperiorReference' AUXILIARY MAY ref X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.27.6.1 NAME 'ldapServer' SUP Top STRUCTURAL MUST cn MAY ( ldapHostServer $ ldapGroupDN $ ldapTraceLevel $ ldapServerBindLimit $ ldapServerIdleTimeout $ lDAPUDPPort $ lDAPSearchSizeLimit $ lDAPSearchTimeLimit $ lDAPLogLevel $ lDAPLogFilename $ lDAPBackupLogFilename $ lDAPLogSizeLimit $ Version $ searchSizeLimit $ searchTimeLimit $ ldapEnableTCP $ ldapTCPPort $ ldapEnableSSL $ ldapSSLPort $ ldapKeyMaterialName $ filteredReplicaUsage $ extensionInfo $ nonStdClientSchemaCompatMode $ sslEnableMutualAuthentication $ ldapEnablePSearch $ ldapMaximumPSearchOperations $ ldapIgnorePSearchLimitsForEvents $ ldapTLSTrustedRootContainer $ ldapEnableMonitorEvents $ ldapMaximumMonitorEventsLoad $ ldapTLSRequired $ ldapTLSVerifyClientCertificate $ ldapConfigVersion $ ldapDerefAlias $ ldapNonStdAllUserAttrsMode $ ldapBindRestrictions $ ldapDefaultReferralBehavior $ ldapReferral $ ldapSearchReferralUsage $ lDAPOtherReferralUsage $ ldapLBURPNumWriterThreads $ ldapInterfaces $ ldapChainSecureRequired $ ldapStdCompliance $ ldapDerefAliasOnAuth $ ldapGeneralizedTime $ ldapPermissiveModify $ ldapSSLConfig ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) X-NDS_NAME 'LDAP Server' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.27.6.2 NAME 'ldapGroup' SUP Top STRUCTURAL MUST cn MAY ( ldapReferral $ ldapServerList $ ldapAllowClearTextPassword $ ldapAnonymousIdentity $ lDAPSuffix $ ldapAttributeMap $ ldapClassMap $ ldapSearchReferralUsage $ lDAPOtherReferralUsage $ transitionGroupDN $ ldapAttributeList $ ldapClassList $ ldapConfigVersion $ Version $ ldapDefaultReferralBehavior $ ldapTransitionBackLink $ ldapSSLConfig $ referralIncludeFilter $ referralExcludeFilter ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) X-NDS_NAME 'LDAP Group' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.5.6.22 NAME 'pkiCA' AUXILIARY MAY ( cACertificate $ certificateRevocationList $ authorityRevocationList $ crossCertificatePair $ attributeCertificate $ publicKey $ privateKey $ networkAddress $ loginTime $ lastLoginTime $ cAECCertificate $ crossCertificatePairEC ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.5.6.21 NAME 'pkiUser' AUXILIARY MAY userCertificate X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.5.6.15 NAME 'strongAuthenticationUser' AUXILIARY MAY userCertificate X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.5.6.11 NAME 'applicationProcess' SUP Top STRUCTURAL MUST cn MAY ( seeAlso $ ou $ l $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )",
    +            "( 2.5.6.12 NAME 'applicationEntity' SUP Top STRUCTURAL MUST ( presentationAddress $ cn ) MAY ( supportedApplicationContext $ seeAlso $ ou $ o $ l $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )",
    +            "( 2.5.6.13 NAME 'dSA' SUP applicationEntity STRUCTURAL MAY knowledgeInformation X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )",
    +            "( 2.5.6.16 NAME 'certificationAuthority' AUXILIARY MUST ( authorityRevocationList $ certificateRevocationList $ cACertificate ) MAY crossCertificatePair X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.5.6.18 NAME 'userSecurityInformation' AUXILIARY MAY supportedAlgorithms X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.5.6.20 NAME 'dmd' SUP ndsLoginProperties AUXILIARY MUST dmdName MAY ( searchGuide $ seeAlso $ businessCategory $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $ street $ postOfficeBox $ postalCode $ postalAddress $ physicalDeliveryOfficeName $ l $ description $ userPassword ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.5.6.16.2 NAME 'certificationAuthority-V2' AUXILIARY MUST ( authorityRevocationList $ certificateRevocationList $ cACertificate ) MAY ( crossCertificatePair $ deltaRevocationList ) X-NDS_NAME 'certificationAuthorityVer2' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.3.6.1 NAME 'httpServer' SUP Top STRUCTURAL MUST cn MAY ( httpHostServerDN $ httpThreadsPerCPU $ httpIOBufferSize $ httpRequestTimeout $ httpKeepAliveRequestTimeout $ httpSessionTimeout $ httpKeyMaterialObject $ httpTraceLevel $ httpAuthRequiresTLS $ httpDefaultClearPort $ httpDefaultTLSPort $ httpBindRestrictions ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'domain' 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.64.6.1.1 NAME 'Template' SUP Top STRUCTURAL MUST cn MAY ( trusteesOfNewObject $ newObjectSDSRights $ newObjectSFSRights $ setupScript $ runSetupScript $ membersOfTemplate $ volumeSpaceRestrictions $ setPasswordAfterCreate $ homeDirectoryRights $ accountBalance $ allowUnlimitedCredit $ description $ eMailAddress $ facsimileTelephoneNumber $ groupMembership $ higherPrivileges $ ndsHomeDirectory $ l $ Language $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginGraceLimit $ loginMaximumSimultaneous $ loginScript $ mailboxID $ mailboxLocation $ member $ messageServer $ minimumAccountBalance $ networkAddressRestriction $ newObjectSSelfRights $ ou $ passwordAllowChange $ passwordExpirationInterval $ passwordExpirationTime $ passwordMinimumLength $ passwordRequired $ passwordUniqueRequired $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ Profile $ st $ street $ securityEquals $ securityFlags $ seeAlso $ telephoneNumber $ title $ assistant $ assistantPhone $ city $ company $ co $ manager $ managerWorkforceID $ mailstop $ siteLocation $ employeeType $ costCenter $ costCenterDescription $ tollFreePhoneNumber $ departmentNumber ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.8.6.1 NAME 'homeInfo' AUXILIARY MAY ( homeCity $ homeEmailAddress $ homeFax $ homePhone $ homeState $ homePostalAddress $ homeZipCode $ personalMobile $ spouse $ children ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.8.6.2 NAME 'contingentWorker' AUXILIARY MAY ( vendorName $ vendorAddress $ vendorPhoneNumber ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.45 NAME 'dynamicGroup' SUP ( groupOfNames $ ndsLoginProperties ) STRUCTURAL MAY ( memberQueryURL $ excludedMember $ dgIdentity $ dgAllowUnknown $ dgTimeOut $ dgAllowDuplicates $ userPassword ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.46 NAME 'dynamicGroupAux' SUP ( groupOfNames $ ndsLoginProperties ) AUXILIARY MAY ( memberQueryURL $ excludedMember $ dgIdentity $ dgAllowUnknown $ dgTimeOut $ dgAllowDuplicates $ userPassword ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.39.6.1.1 NAME 'sASSecurity' SUP Top STRUCTURAL MUST cn MAY ( nDSPKITreeCADN $ masvPolicyDN $ sASLoginPolicyDN $ sASLoginMethodContainerDN $ sasPostLoginMethodContainerDN $ nspmPolicyAgentContainerDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'Country' 'Organization' 'domain' ) X-NDS_NAME 'SAS:Security' )",
    +            "( 2.16.840.1.113719.1.39.6.1.2 NAME 'sASService' SUP Resource STRUCTURAL MAY ( hostServer $ privateKey $ publicKey $ allowUnlimitedCredit $ fullName $ lastLoginTime $ lockedByIntruder $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginIntruderAddress $ loginIntruderAttempts $ loginIntruderResetTime $ loginMaximumSimultaneous $ loginTime $ networkAddress $ networkAddressRestriction $ notify $ operator $ owner $ path $ securityEquals $ securityFlags $ status $ Version $ nDSPKIKeyMaterialDN $ ndspkiKMOExport ) X-NDS_NAMING 'cn' X-NDS_NAME 'SAS:Service' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.48.6.1.1 NAME 'nDSPKICertificateAuthority' SUP Top STRUCTURAL MUST cn MAY ( hostServer $ nDSPKIPublicKey $ nDSPKIPrivateKey $ nDSPKIPublicKeyCertificate $ nDSPKICertificateChain $ nDSPKICertificateChainEC $ nDSPKIParentCA $ nDSPKIParentCADN $ nDSPKISubjectName $ nDSPKIPublicKeyEC $ nDSPKIPrivateKeyEC $ nDSPKIPublicKeyCertificateEC $ crossCertificatePairEC $ nDSPKISuiteBMode $ cACertificate $ cAECCertificate $ ndspkiCRLContainerDN $ ndspkiIssuedCertContainerDN $ ndspkiCRLConfigurationDNList $ ndspkiCRLECConfigurationDNList $ ndspkiSecurityRightsLevel $ ndspkiDefaultRSAKeySize $ ndspkiDefaultECCurve $ ndspkiDefaultCertificateLife ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'NDSPKI:Certificate Authority' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.48.6.1.2 NAME 'nDSPKIKeyMaterial' SUP Top STRUCTURAL MUST cn MAY ( hostServer $ nDSPKIKeyFile $ nDSPKIPrivateKey $ nDSPKIPublicKey $ nDSPKIPublicKeyCertificate $ nDSPKICertificateChain $ nDSPKISubjectName $ nDSPKIGivenName $ ndspkiAdditionalRoots $ nDSPKINotBefore $ nDSPKINotAfter ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'NDSPKI:Key Material' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.48.6.1.3 NAME 'nDSPKITrustedRoot' SUP Top STRUCTURAL MUST cn MAY ndspkiTrustedRootList X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'domain' ) X-NDS_NAME 'NDSPKI:Trusted Root' )",
    +            "( 2.16.840.1.113719.1.48.6.1.4 NAME 'nDSPKITrustedRootObject' SUP Top STRUCTURAL MUST ( cn $ nDSPKITrustedRootCertificate ) MAY ( nDSPKISubjectName $ nDSPKINotBefore $ nDSPKINotAfter $ externalName $ givenName $ sn ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'nDSPKITrustedRoot' X-NDS_NAME 'NDSPKI:Trusted Root Object' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.48.6.1.101 NAME 'nDSPKISDKeyAccessPartition' SUP Top STRUCTURAL MUST cn X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'NDSPKI:SD Key Access Partition' )",
    +            "( 2.16.840.1.113719.1.48.6.1.102 NAME 'nDSPKISDKeyList' SUP Top STRUCTURAL MUST cn MAY ( nDSPKISDKeyServerDN $ nDSPKISDKeyStruct $ nDSPKISDKeyCert ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'nDSPKISDKeyAccessPartition' X-NDS_NAME 'NDSPKI:SD Key List' )",
    +            "( 2.16.840.1.113719.1.31.6.2.1 NAME 'mASVSecurityPolicy' SUP Top STRUCTURAL MUST cn MAY ( description $ masvDomainPolicy $ masvPolicyUpdate $ masvClearanceNames $ masvLabelNames $ masvLabelSecrecyLevelNames $ masvLabelSecrecyCategoryNames $ masvLabelIntegrityLevelNames $ masvLabelIntegrityCategoryNames $ masvNDSAttributeLabels ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'MASV:Security Policy' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.39.42.2.0.1 NAME 'sASLoginMethodContainer' SUP Top STRUCTURAL MUST cn MAY description X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NAME 'SAS:Login Method Container' )",
    +            "( 2.16.840.1.113719.1.39.42.2.0.4 NAME 'sASLoginPolicy' SUP Top STRUCTURAL MUST cn MAY ( description $ privateKey $ publicKey $ sASAllowNDSPasswordWindow $ sASPolicyCredentials $ sASPolicyMethods $ sASPolicyObjectVersion $ sASPolicyServiceSubtypes $ sASPolicyServices $ sASPolicyUsers $ sASLoginSequence $ sASLoginPolicyUpdate $ sasNMASProductOptions $ sasPolicyMethods $ sasPolicyServices $ sasPolicyUsers $ sasAllowNDSPasswordWindow $ sasLoginFailureDelay $ sasDefaultLoginSequence $ sasAuthorizedLoginSequences $ sasAuditConfiguration $ sasUpdateLoginInfo $ sasOTPEnabled $ sasOTPLookAheadWindow $ sasOTPDigits $ sasUpdateLoginTimeInterval $ nspmPasswordPolicyDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'SAS:Login Policy' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.39.42.2.0.7 NAME 'sASNMASBaseLoginMethod' SUP Top ABSTRACT MUST cn MAY ( description $ sASLoginSecret $ sASLoginSecretKey $ sASEncryptionType $ sASLoginConfiguration $ sASLoginConfigurationKey $ sASMethodIdentifier $ sASMethodVendor $ sASVendorSupport $ sASAdvisoryMethodGrade $ sASLoginClientMethodNetWare $ sASLoginServerMethodNetWare $ sASLoginClientMethodWINNT $ sASLoginServerMethodWINNT $ sasCertificateSearchContainers $ sasNMASMethodConfigData $ sasMethodVersion $ sASLoginPolicyUpdate $ sasUnsignedMethodModules $ sasServerModuleName $ sasServerModuleEntryPointName $ sasSASLMechanismName $ sasSASLMechanismEntryPointName $ sasClientModuleName $ sasClientModuleEntryPointName $ sasLoginClientMethodSolaris $ sasLoginServerMethodSolaris $ sasLoginClientMethodLinux $ sasLoginServerMethodLinux $ sasLoginClientMethodTru64 $ sasLoginServerMethodTru64 $ sasLoginClientMethodAIX $ sasLoginServerMethodAIX $ sasLoginClientMethodHPUX $ sasLoginServerMethodHPUX $ sasLoginClientMethods390 $ sasLoginServerMethods390 $ sasLoginClientMethodLinuxX64 $ sasLoginServerMethodLinuxX64 $ sasLoginClientMethodWinX64 $ sasLoginServerMethodWinX64 $ sasLoginClientMethodSolaris64 $ sasLoginServerMethodSolaris64 $ sasLoginClientMethodSolarisi386 $ sasLoginServerMethodSolarisi386 $ sasLoginClientMethodAIX64 $ sasLoginServerMethodAIX64 ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASLoginMethodContainer' X-NDS_NAME 'SAS:NMAS Base Login Method' )",
    +            "( 2.16.840.1.113719.1.39.42.2.0.8 NAME 'sASNMASLoginMethod' SUP sASNMASBaseLoginMethod STRUCTURAL X-NDS_NAME 'SAS:NMAS Login Method' )",
    +            "( 2.16.840.1.113719.1.39.42.2.0.9 NAME 'rADIUSDialAccessSystem' SUP Top STRUCTURAL MUST cn MAY ( publicKey $ privateKey $ rADIUSAgedInterval $ rADIUSClient $ rADIUSCommonNameResolution $ rADIUSConcurrentLimit $ rADIUSDASVersion $ rADIUSEnableCommonNameLogin $ rADIUSEnableDialAccess $ rADIUSInterimAcctingTimeout $ rADIUSLookupContexts $ rADIUSMaxDASHistoryRecord $ rADIUSMaximumHistoryRecord $ rADIUSPasswordPolicy $ rADIUSPrivateKey $ rADIUSProxyContext $ rADIUSProxyDomain $ rADIUSProxyTarget $ rADIUSPublicKey $ sASLoginConfiguration $ sASLoginConfigurationKey ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NAME 'RADIUS:Dial Access System' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.39.42.2.0.10 NAME 'rADIUSProfile' SUP Top STRUCTURAL MUST cn MAY rADIUSAttributeList X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NAME 'RADIUS:Profile' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.39.42.2.0.11 NAME 'sasPostLoginMethodContainer' SUP Top STRUCTURAL MUST cn MAY description X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' )",
    +            "( 2.16.840.1.113719.1.39.42.2.0.12 NAME 'sasPostLoginMethod' SUP Top STRUCTURAL MUST cn MAY ( description $ sASLoginSecret $ sASLoginSecretKey $ sASEncryptionType $ sASLoginConfiguration $ sASLoginConfigurationKey $ sASMethodIdentifier $ sASMethodVendor $ sASVendorSupport $ sASAdvisoryMethodGrade $ sASLoginClientMethodNetWare $ sASLoginServerMethodNetWare $ sASLoginClientMethodWINNT $ sASLoginServerMethodWINNT $ sasMethodVersion $ sASLoginPolicyUpdate $ sasUnsignedMethodModules $ sasServerModuleName $ sasServerModuleEntryPointName $ sasSASLMechanismName $ sasSASLMechanismEntryPointName $ sasClientModuleName $ sasClientModuleEntryPointName $ sasLoginClientMethodSolaris $ sasLoginServerMethodSolaris $ sasLoginClientMethodLinux $ sasLoginServerMethodLinux $ sasLoginClientMethodTru64 $ sasLoginServerMethodTru64 $ sasLoginClientMethodAIX $ sasLoginServerMethodAIX $ sasLoginClientMethodHPUX $ sasLoginServerMethodHPUX $ sasLoginClientMethods390 $ sasLoginServerMethods390 $ sasLoginClientMethodLinuxX64 $ sasLoginServerMethodLinuxX64 $ sasLoginClientMethodWinX64 $ sasLoginServerMethodWinX64 $ sasLoginClientMethodSolaris64 $ sasLoginServerMethodSolaris64 $ sasLoginClientMethodSolarisi386 $ sasLoginServerMethodSolarisi386 $ sasLoginClientMethodAIX64 $ sasLoginServerMethodAIX64 ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sasPostLoginMethodContainer' )",
    +            "( 2.16.840.1.113719.1.6.6.1 NAME 'snmpGroup' SUP Top STRUCTURAL MUST cn MAY ( Version $ snmpServerList $ snmpTrapDisable $ snmpTrapInterval $ snmpTrapDescription $ snmpTrapConfig ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'domain' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.39.43.6.2 NAME 'nspmPasswordPolicyContainer' SUP Top STRUCTURAL MUST cn MAY description X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Country' 'domain' 'Locality' 'Organization' 'organizationalUnit' ) )",
    +            "( 2.16.840.1.113719.1.39.43.6.3 NAME 'nspmPolicyAgent' SUP Top STRUCTURAL MUST cn MAY ( description $ nspmPolicyAgentNetWare $ nspmPolicyAgentWINNT $ nspmPolicyAgentSolaris $ nspmPolicyAgentLinux $ nspmPolicyAgentAIX $ nspmPolicyAgentHPUX ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'nspmPasswordPolicyContainer' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.39.43.6.1 NAME 'nspmPasswordPolicy' SUP Top STRUCTURAL MUST cn MAY ( description $ nspmPolicyPrecedence $ nspmConfigurationOptions $ nspmChangePasswordMessage $ passwordExpirationInterval $ loginGraceLimit $ nspmMinPasswordLifetime $ passwordUniqueRequired $ nspmPasswordHistoryLimit $ nspmPasswordHistoryExpiration $ passwordAllowChange $ passwordRequired $ passwordMinimumLength $ nspmMaximumLength $ nspmCaseSensitive $ nspmMinUpperCaseCharacters $ nspmMaxUpperCaseCharacters $ nspmMinLowerCaseCharacters $ nspmMaxLowerCaseCharacters $ nspmNumericCharactersAllowed $ nspmNumericAsFirstCharacter $ nspmNumericAsLastCharacter $ nspmMinNumericCharacters $ nspmMaxNumericCharacters $ nspmSpecialCharactersAllowed $ nspmSpecialAsFirstCharacter $ nspmSpecialAsLastCharacter $ nspmMinSpecialCharacters $ nspmMaxSpecialCharacters $ nspmMaxRepeatedCharacters $ nspmMaxConsecutiveCharacters $ nspmMinUniqueCharacters $ nspmDisallowedAttributeValues $ nspmExcludeList $ nspmExtendedCharactersAllowed $ nspmExtendedAsFirstCharacter $ nspmExtendedAsLastCharacter $ nspmMinExtendedCharacters $ nspmMaxExtendedCharacters $ nspmUpperAsFirstCharacter $ nspmUpperAsLastCharacter $ nspmLowerAsFirstCharacter $ nspmLowerAsLastCharacter $ nspmComplexityRules $ nspmAD2K8Syntax $ nspmAD2K8maxViolation $ nspmXCharLimit $ nspmXCharHistoryLimit $ nspmUnicodeAllowed $ nspmNonAlphaCharactersAllowed $ nspmMinNonAlphaCharacters $ nspmMaxNonAlphaCharacters $ pwdInHistory $ nspmAdminsDoNotExpirePassword $ nspmPasswordACL $ nsimChallengeSetDN $ nsimForgottenAction $ nsimForgottenLoginConfig $ nsimAssignments $ nsimChallengeSetGUID $ nsimPwdRuleEnforcement ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'nspmPasswordPolicyContainer' 'domain' 'Locality' 'Organization' 'organizationalUnit' 'Country' ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.39.43.6.4 NAME 'nspmPasswordAux' AUXILIARY MAY ( publicKey $ privateKey $ loginGraceLimit $ loginGraceRemaining $ passwordExpirationTime $ passwordRequired $ nspmPasswordKey $ nspmPassword $ nspmDistributionPassword $ nspmPreviousDistributionPassword $ nspmPasswordHistory $ nspmAdministratorChangeCount $ nspmPasswordPolicyDN $ pwdChangedTime $ pwdAccountLockedTime $ pwdFailureTime $ nspmDoNotExpirePassword ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.12.6.1.0 NAME 'auditFileObject' SUP Top STRUCTURAL MUST ( cn $ auditPolicy $ auditContents ) MAY ( description $ auditPath $ auditLinkList $ auditType $ auditCurrentEncryptionKey $ auditAEncryptionKey $ auditBEncryptionKey ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Top' 'Country' 'Locality' 'Organization' 'organizationalUnit' 'treeRoot' 'domain' ) X-NDS_NAME 'Audit:File Object' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.38.6.1.4 NAME 'wANMANLANArea' SUP Top STRUCTURAL MUST cn MAY ( description $ l $ member $ o $ ou $ owner $ seeAlso $ wANMANWANPolicy $ wANMANCost $ wANMANDefaultCost ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'Organization' 'organizationalUnit' ) X-NDS_NAME 'WANMAN:LAN Area' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.135.6.37.1 NAME 'rbsCollection' SUP Top STRUCTURAL MUST cn MAY ( owner $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )",
    +            "( 2.16.840.1.113719.1.135.6.30.1 NAME 'rbsExternalScope' SUP Top ABSTRACT MUST cn MAY ( rbsURL $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.135.6.31.1 NAME 'rbsModule' SUP Top STRUCTURAL MUST cn MAY ( rbsURL $ rbsPath $ rbsType $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection' )",
    +            "( 2.16.840.1.113719.1.135.6.32.1 NAME 'rbsRole' SUP Top STRUCTURAL MUST cn MAY ( rbsContent $ rbsMember $ rbsTrusteeOf $ rbsGALabel $ rbsParameters $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection' )",
    +            "( 2.16.840.1.113719.1.135.6.33.1 NAME 'rbsTask' SUP Top STRUCTURAL MUST cn MAY ( rbsContentMembership $ rbsType $ rbsTaskRights $ rbsEntryPoint $ rbsParameters $ rbsTaskTemplates $ rbsTaskTemplatesURL $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsModule' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.135.6.34.1 NAME 'rbsBook' SUP rbsTask STRUCTURAL MAY ( rbsTargetObjectType $ rbsPageMembership ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.135.6.35.1 NAME 'rbsScope' SUP groupOfNames STRUCTURAL MAY ( rbsContext $ rbsXMLInfo ) X-NDS_CONTAINMENT 'rbsRole' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.135.6.45.1 NAME 'rbsCollection2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsParameters $ owner $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )",
    +            "( 2.16.840.1.113719.1.135.6.38.1 NAME 'rbsExternalScope2' SUP Top ABSTRACT MUST cn MAY ( rbsXMLInfo $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection2' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.135.6.39.1 NAME 'rbsModule2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsPath $ rbsType $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection2' )",
    +            "( 2.16.840.1.113719.1.135.6.40.1 NAME 'rbsRole2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsContent $ rbsMember $ rbsTrusteeOf $ rbsParameters $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection2' )",
    +            "( 2.16.840.1.113719.1.135.6.41.1 NAME 'rbsTask2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsContentMembership $ rbsType $ rbsTaskRights $ rbsEntryPoint $ rbsParameters $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsModule2' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.135.6.42.1 NAME 'rbsBook2' SUP rbsTask2 STRUCTURAL MAY ( rbsTargetObjectType $ rbsPageMembership ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.135.6.43.1 NAME 'rbsScope2' SUP groupOfNames STRUCTURAL MAY ( rbsContext $ rbsXMLInfo ) X-NDS_CONTAINMENT 'rbsRole2' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.49 NAME 'prSyncPolicy' SUP Top STRUCTURAL MUST cn MAY prSyncAttributes X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'domain' 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.50 NAME 'encryptionPolicy' SUP Top STRUCTURAL MUST cn MAY ( attrEncryptionDefinition $ attrEncryptionRequiresSecure ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'domain' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.48.6.1.5 NAME 'ndspkiContainer' SUP Top STRUCTURAL MUST cn X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'ndspkiContainer' 'sASSecurity' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'nDSPKITrustedRoot' ) )",
    +            "( 2.16.840.1.113719.1.48.6.1.6 NAME 'ndspkiCertificate' SUP Top STRUCTURAL MUST ( cn $ userCertificate ) MAY ( nDSPKISubjectName $ nDSPKINotBefore $ nDSPKINotAfter $ externalName $ givenName $ sn ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'ndspkiContainer' 'nDSPKITrustedRoot' ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.48.6.1.7 NAME 'ndspkiCRLConfiguration' SUP Top STRUCTURAL MUST cn MAY ( ndspkiCRLFileName $ ndspkiDirectory $ ndspkiStatus $ ndspkiIssueTime $ ndspkiNextIssueTime $ ndspkiAttemptTime $ ndspkiTimeInterval $ ndspkiCRLMaxProcessingInterval $ ndspkiCRLNumber $ ndspkiDistributionPoints $ ndspkiDistributionPointDN $ ndspkiCADN $ ndspkiCRLProcessData $ nDSPKIPublicKey $ nDSPKIPrivateKey $ nDSPKIPublicKeyCertificate $ nDSPKICertificateChain $ nDSPKIParentCA $ nDSPKIParentCADN $ nDSPKISubjectName $ cACertificate $ hostServer $ ndspkiCRLType $ ndspkiCRLExtendValidity ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'ndspkiContainer' )",
    +            "( 2.5.6.19 NAME 'cRLDistributionPoint' SUP Top STRUCTURAL MUST cn MAY ( authorityRevocationList $ authorityRevocationList $ cACertificate $ certificateRevocationList $ certificateRevocationList $ crossCertificatePair $ deltaRevocationList $ deltaRevocationList $ ndspkiCRLConfigurationDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'sASSecurity' 'domain' 'ndspkiCRLConfiguration' ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.7.6.1 NAME 'notfTemplateCollection' SUP Top STRUCTURAL MUST cn MAY ( notfSMTPEmailHost $ notfSMTPEmailFrom $ notfSMTPEmailUserName $ sASSecretStore ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' )",
    +            "( 2.16.840.1.113719.1.7.6.2 NAME 'notfMergeTemplate' SUP Top STRUCTURAL MUST cn MAY ( notfMergeTemplateData $ notfMergeTemplateSubject ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'notfTemplateCollection' X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.39.44.6.1 NAME 'nsimChallengeSet' SUP Top STRUCTURAL MUST cn MAY ( description $ nsimRequiredQuestions $ nsimRandomQuestions $ nsimNumberRandomQuestions $ nsimMinResponseLength $ nsimMaxResponseLength ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'nspmPasswordPolicyContainer' 'Country' 'domain' 'Locality' 'Organization' 'organizationalUnit' ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.266.6.1 NAME 'sssServerPolicies' SUP Top STRUCTURAL MUST cn MAY ( sssCacheRefreshInterval $ sssEnableReadTimestamps $ sssDisableMasterPasswords $ sssEnableAdminAccess $ sssAdminList $ sssAdminGALabel $ sssReadSecretPolicies ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' )",
    +            "( 2.16.840.1.113719.1.266.6.2 NAME 'sssServerPolicyOverride' SUP Top STRUCTURAL MUST cn MAY ( sssCacheRefreshInterval $ sssEnableReadTimestamps $ sssDisableMasterPasswords $ sssEnableAdminAccess $ sssAdminList $ sssAdminGALabel $ sssReadSecretPolicies ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sssServerPolicies' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'domain' ) X-NDS_NOT_CONTAINER '1' )",
    +            "( 2.16.840.1.113719.1.1.6.1.91 NAME 'nestedGroupAux' AUXILIARY MAY ( groupMember $ excludedMember $ nestedConfig $ groupMembership ) X-NDS_NOT_CONTAINER '1' )"
    +        ]
    +    },
    +    "schema_entry": "cn=schema",
    +    "type": "SchemaInfo"
    +}
    +"""
    +
    +edir_9_1_4_dsa_info = """
    +{
    +    "raw": {
    +        "abandonOps": [
    +            "0"
    +        ],
    +        "addEntryOps": [
    +            "0"
    +        ],
    +        "altServer": [],
    +        "bindSecurityErrors": [
    +            "0"
    +        ],
    +        "chainings": [
    +            "0"
    +        ],
    +        "compareOps": [
    +            "0"
    +        ],
    +        "directoryTreeName": [
    +            "TEST_TREE"
    +        ],
    +        "dsaName": [
    +            "cn=MYSERVER,o=resources"
    +        ],
    +        "errors": [
    +            "0"
    +        ],
    +        "extendedOps": [
    +            "0"
    +        ],
    +        "inBytes": [
    +            "293"
    +        ],
    +        "inOps": [
    +            "3"
    +        ],
    +        "listOps": [
    +            "0"
    +        ],
    +        "modifyEntryOps": [
    +            "0"
    +        ],
    +        "modifyRDNOps": [
    +            "0"
    +        ],
    +        "namingContexts": [
    +            ""
    +        ],
    +        "oneLevelSearchOps": [
    +            "0"
    +        ],
    +        "outBytes": [
    +            "14"
    +        ],
    +        "readOps": [
    +            "1"
    +        ],
    +        "referralsReturned": [
    +            "0"
    +        ],
    +        "removeEntryOps": [
    +            "0"
    +        ],
    +        "repUpdatesIn": [
    +            "0"
    +        ],
    +        "repUpdatesOut": [
    +            "0"
    +        ],
    +        "searchOps": [
    +            "1"
    +        ],
    +        "securityErrors": [
    +            "0"
    +        ],
    +        "simpleAuthBinds": [
    +            "1"
    +        ],
    +        "strongAuthBinds": [
    +            "0"
    +        ],
    +        "subschemaSubentry": [
    +            "cn=schema"
    +        ],
    +        "supportedCapabilities": [],
    +        "supportedControl": [
    +            "2.16.840.1.113719.1.27.101.6",
    +            "2.16.840.1.113719.1.27.101.5",
    +            "1.2.840.113556.1.4.319",
    +            "2.16.840.1.113730.3.4.3",
    +            "2.16.840.1.113730.3.4.2",
    +            "2.16.840.1.113719.1.27.101.57",
    +            "2.16.840.1.113719.1.27.103.7",
    +            "2.16.840.1.113719.1.27.101.40",
    +            "2.16.840.1.113719.1.27.101.41",
    +            "1.2.840.113556.1.4.1413",
    +            "1.2.840.113556.1.4.805",
    +            "2.16.840.1.113730.3.4.18",
    +            "1.2.840.113556.1.4.529"
    +        ],
    +        "supportedExtension": [
    +            "2.16.840.1.113719.1.148.100.1",
    +            "2.16.840.1.113719.1.148.100.3",
    +            "2.16.840.1.113719.1.148.100.5",
    +            "2.16.840.1.113719.1.148.100.7",
    +            "2.16.840.1.113719.1.148.100.9",
    +            "2.16.840.1.113719.1.148.100.11",
    +            "2.16.840.1.113719.1.148.100.13",
    +            "2.16.840.1.113719.1.148.100.15",
    +            "2.16.840.1.113719.1.148.100.17",
    +            "2.16.840.1.113719.1.39.42.100.1",
    +            "2.16.840.1.113719.1.39.42.100.3",
    +            "2.16.840.1.113719.1.39.42.100.5",
    +            "2.16.840.1.113719.1.39.42.100.7",
    +            "2.16.840.1.113719.1.39.42.100.9",
    +            "2.16.840.1.113719.1.39.42.100.11",
    +            "2.16.840.1.113719.1.39.42.100.13",
    +            "2.16.840.1.113719.1.39.42.100.15",
    +            "2.16.840.1.113719.1.39.42.100.17",
    +            "2.16.840.1.113719.1.39.42.100.19",
    +            "2.16.840.1.113719.1.39.42.100.21",
    +            "2.16.840.1.113719.1.39.42.100.23",
    +            "2.16.840.1.113719.1.39.42.100.25",
    +            "2.16.840.1.113719.1.39.42.100.27",
    +            "2.16.840.1.113719.1.39.42.100.29",
    +            "1.3.6.1.4.1.4203.1.11.1",
    +            "2.16.840.1.113719.1.27.100.1",
    +            "2.16.840.1.113719.1.27.100.3",
    +            "2.16.840.1.113719.1.27.100.5",
    +            "2.16.840.1.113719.1.27.100.7",
    +            "2.16.840.1.113719.1.27.100.11",
    +            "2.16.840.1.113719.1.27.100.13",
    +            "2.16.840.1.113719.1.27.100.15",
    +            "2.16.840.1.113719.1.27.100.17",
    +            "2.16.840.1.113719.1.27.100.19",
    +            "2.16.840.1.113719.1.27.100.21",
    +            "2.16.840.1.113719.1.27.100.23",
    +            "2.16.840.1.113719.1.27.100.25",
    +            "2.16.840.1.113719.1.27.100.27",
    +            "2.16.840.1.113719.1.27.100.29",
    +            "2.16.840.1.113719.1.27.100.31",
    +            "2.16.840.1.113719.1.27.100.33",
    +            "2.16.840.1.113719.1.27.100.35",
    +            "2.16.840.1.113719.1.27.100.37",
    +            "2.16.840.1.113719.1.27.100.39",
    +            "2.16.840.1.113719.1.27.100.41",
    +            "2.16.840.1.113719.1.27.100.96",
    +            "2.16.840.1.113719.1.27.100.98",
    +            "2.16.840.1.113719.1.27.100.101",
    +            "2.16.840.1.113719.1.27.100.103",
    +            "2.16.840.1.113719.1.142.100.1",
    +            "2.16.840.1.113719.1.142.100.4",
    +            "2.16.840.1.113719.1.142.100.6",
    +            "2.16.840.1.113719.1.27.100.9",
    +            "2.16.840.1.113719.1.27.100.43",
    +            "2.16.840.1.113719.1.27.100.45",
    +            "2.16.840.1.113719.1.27.100.47",
    +            "2.16.840.1.113719.1.27.100.49",
    +            "2.16.840.1.113719.1.27.100.51",
    +            "2.16.840.1.113719.1.27.100.53",
    +            "2.16.840.1.113719.1.27.100.55",
    +            "1.3.6.1.4.1.1466.20037",
    +            "2.16.840.1.113719.1.27.100.79",
    +            "2.16.840.1.113719.1.27.100.84",
    +            "2.16.840.1.113719.1.27.103.1",
    +            "2.16.840.1.113719.1.27.103.2"
    +        ],
    +        "supportedFeatures": [
    +            "1.3.6.1.4.1.4203.1.5.1",
    +            "2.16.840.1.113719.1.27.99.1"
    +        ],
    +        "supportedGroupingTypes": [
    +            "2.16.840.1.113719.1.27.103.8"
    +        ],
    +        "supportedLDAPVersion": [
    +            "2",
    +            "3"
    +        ],
    +        "supportedSASLMechanisms": [
    +            "NMAS_LOGIN"
    +        ],
    +        "unAuthBinds": [
    +            "0"
    +        ],
    +        "vendorName": [
    +            "NetIQ Corporation"
    +        ],
    +        "vendorVersion": [
    +            "LDAP Agent for NetIQ eDirectory 9.1.4 (40105.09)"
    +        ],
    +        "wholeSubtreeSearchOps": [
    +            "0"
    +        ]
    +    },
    +    "type": "DsaInfo"
    +}
    +"""
    \ No newline at end of file
    diff --git a/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/slapd24.py b/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/slapd24.py
    index 30e1795..1c66332 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/slapd24.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/protocol/schemas/slapd24.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/strategy/asyncStream.py b/server/www/packages/packages-windows/x86/ldap3/strategy/asyncStream.py
    index 7977d7e..631331c 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/strategy/asyncStream.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/strategy/asyncStream.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2016 - 2018 Giovanni Cannata
    +# Copyright 2016 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -55,7 +55,11 @@ class AsyncStreamStrategy(AsyncStrategy):
             self.persistent_search_message_id = None
             self.streaming = False
             self.callback = None
    -        self.events = Queue()
    +        if ldap_connection.pool_size:
    +            self.events = Queue(ldap_connection.pool_size)
    +        else:
    +            self.events = Queue()
    +
             del self._requests  # remove _requests dict from Async Strategy
     
         def _start_listen(self):
    @@ -77,7 +81,6 @@ class AsyncStreamStrategy(AsyncStrategy):
                     if not self._header_added and self.stream.tell() == 0:
                         header = add_ldif_header(['-'])[0]
                         self.stream.write(prepare_for_stream(header + self.line_separator + self.line_separator))
    -
                     ldif_lines = persistent_search_response_to_ldif(change)
                     if self.stream and ldif_lines and not self.connection.closed:
                         fragment = self.line_separator.join(ldif_lines)
    diff --git a/server/www/packages/packages-windows/x86/ldap3/strategy/asynchronous.py b/server/www/packages/packages-windows/x86/ldap3/strategy/asynchronous.py
    index 8ac79ee..b772ad2 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/strategy/asynchronous.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/strategy/asynchronous.py
    @@ -1,221 +1,253 @@
    -"""
    -"""
    -
    -# Created on 2013.07.15
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2013 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -
    -from threading import Thread, Lock
    -import socket
    -
    -from .. import get_config_parameter
    -from ..core.exceptions import LDAPSSLConfigurationError, LDAPStartTLSError, LDAPOperationResult
    -from ..strategy.base import BaseStrategy, RESPONSE_COMPLETE
    -from ..protocol.rfc4511 import LDAPMessage
    -from ..utils.log import log, log_enabled, format_ldap_message, ERROR, NETWORK, EXTENDED
    -from ..utils.asn1 import decoder, decode_message_fast
    -
    -
    -# noinspection PyProtectedMember
    -class AsyncStrategy(BaseStrategy):
    -    """
    -    This strategy is asynchronous. You send the request and get the messageId of the request sent
    -    Receiving data from socket is managed in a separated thread in a blocking mode
    -    Requests return an int value to indicate the messageId of the requested Operation
    -    You get the response with get_response, it has a timeout to wait for response to appear
    -    Connection.response will contain the whole LDAP response for the messageId requested in a dict form
    -    Connection.request will contain the result LDAP message in a dict form
    -    Response appear in strategy._responses dictionary
    -    """
    -
    -    # noinspection PyProtectedMember
    -    class ReceiverSocketThread(Thread):
    -        """
    -        The thread that actually manage the receiver socket
    -        """
    -
    -        def __init__(self, ldap_connection):
    -            Thread.__init__(self)
    -            self.connection = ldap_connection
    -            self.socket_size = get_config_parameter('SOCKET_SIZE')
    -
    -        def run(self):
    -            """
    -            Wait for data on socket, compute the length of the message and wait for enough bytes to decode the message
    -            Message are appended to strategy._responses
    -            """
    -            unprocessed = b''
    -            get_more_data = True
    -            listen = True
    -            data = b''
    -            while listen:
    -                if get_more_data:
    -                    try:
    -                        data = self.connection.socket.recv(self.socket_size)
    -                    except (OSError, socket.error, AttributeError):
    -                        if self.connection.receive_timeout:  # a receive timeout has been detected - keep kistening on the socket
    -                            continue
    -                    except Exception as e:
    -                        if log_enabled(ERROR):
    -                            log(ERROR, '<%s> for <%s>', str(e), self.connection)
    -                        raise  # unexpected exception - re-raise
    -                    if len(data) > 0:
    -                        unprocessed += data
    -                        data = b''
    -                    else:
    -                        listen = False
    -                length = BaseStrategy.compute_ldap_message_size(unprocessed)
    -                if length == -1 or len(unprocessed) < length:
    -                    get_more_data = True
    -                elif len(unprocessed) >= length:  # add message to message list
    -                    if self.connection.usage:
    -                        self.connection._usage.update_received_message(length)
    -                        if log_enabled(NETWORK):
    -                            log(NETWORK, 'received %d bytes via <%s>', length, self.connection)
    -                    if self.connection.fast_decoder:
    -                        ldap_resp = decode_message_fast(unprocessed[:length])
    -                        dict_response = self.connection.strategy.decode_response_fast(ldap_resp)
    -                    else:
    -                        ldap_resp = decoder.decode(unprocessed[:length], asn1Spec=LDAPMessage())[0]
    -                        dict_response = self.connection.strategy.decode_response(ldap_resp)
    -                    message_id = int(ldap_resp['messageID'])
    -                    if log_enabled(NETWORK):
    -                        log(NETWORK, 'received 1 ldap message via <%s>', self.connection)
    -                    if log_enabled(EXTENDED):
    -                        log(EXTENDED, 'ldap message received via <%s>:%s', self.connection, format_ldap_message(ldap_resp, '<<'))
    -                    if dict_response['type'] == 'extendedResp' and (dict_response['responseName'] == '1.3.6.1.4.1.1466.20037' or hasattr(self.connection, '_awaiting_for_async_start_tls')):
    -                        if dict_response['result'] == 0:  # StartTls in progress
    -                            if self.connection.server.tls:
    -                                self.connection.server.tls._start_tls(self.connection)
    -                            else:
    -                                self.connection.last_error = 'no Tls object defined in Server'
    -                                if log_enabled(ERROR):
    -                                    log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -                                raise LDAPSSLConfigurationError(self.connection.last_error)
    -                        else:
    -                            self.connection.last_error = 'asynchronous StartTls failed'
    -                            if log_enabled(ERROR):
    -                                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -                            raise LDAPStartTLSError(self.connection.last_error)
    -                        del self.connection._awaiting_for_async_start_tls
    -                    if message_id != 0:  # 0 is reserved for 'Unsolicited Notification' from server as per RFC4511 (paragraph 4.4)
    -                        with self.connection.strategy.async_lock:
    -                            if message_id in self.connection.strategy._responses:
    -                                self.connection.strategy._responses[message_id].append(dict_response)
    -                            else:
    -                                self.connection.strategy._responses[message_id] = [dict_response]
    -                            if dict_response['type'] not in ['searchResEntry', 'searchResRef', 'intermediateResponse']:
    -                                self.connection.strategy._responses[message_id].append(RESPONSE_COMPLETE)
    -                        if self.connection.strategy.can_stream:  # for AsyncStreamStrategy, used for PersistentSearch
    -                            self.connection.strategy.accumulate_stream(message_id, dict_response)
    -                        unprocessed = unprocessed[length:]
    -                        get_more_data = False if unprocessed else True
    -                        listen = True if self.connection.listening or unprocessed else False
    -                    else:  # Unsolicited Notification
    -                        if dict_response['responseName'] == '1.3.6.1.4.1.1466.20036':  # Notice of Disconnection as per RFC4511 (paragraph 4.4.1)
    -                            listen = False
    -                        else:
    -                            self.connection.last_error = 'unknown unsolicited notification from server'
    -                            if log_enabled(ERROR):
    -                                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -                            raise LDAPStartTLSError(self.connection.last_error)
    -            self.connection.strategy.close()
    -
    -    def __init__(self, ldap_connection):
    -        BaseStrategy.__init__(self, ldap_connection)
    -        self.sync = False
    -        self.no_real_dsa = False
    -        self.pooled = False
    -        self._responses = None
    -        self._requests = None
    -        self.can_stream = False
    -        self.receiver = None
    -        self.async_lock = Lock()
    -
    -    def open(self, reset_usage=True, read_server_info=True):
    -        """
    -        Open connection and start listen on the socket in a different thread
    -        """
    -        with self.connection.connection_lock:
    -            self._responses = dict()
    -            self._requests = dict()
    -            BaseStrategy.open(self, reset_usage, read_server_info)
    -
    -        if read_server_info:
    -            try:
    -                self.connection.refresh_server_info()
    -            except LDAPOperationResult:  # catch errors from server if raise_exception = True
    -                self.connection.server._dsa_info = None
    -                self.connection.server._schema_info = None
    -
    -    def close(self):
    -        """
    -        Close connection and stop socket thread
    -        """
    -        with self.connection.connection_lock:
    -            BaseStrategy.close(self)
    -
    -    def post_send_search(self, message_id):
    -        """
    -        Clears connection.response and returns messageId
    -        """
    -        self.connection.response = None
    -        self.connection.request = None
    -        self.connection.result = None
    -        return message_id
    -
    -    def post_send_single_response(self, message_id):
    -        """
    -        Clears connection.response and returns messageId.
    -        """
    -        self.connection.response = None
    -        self.connection.request = None
    -        self.connection.result = None
    -        return message_id
    -
    -    def _start_listen(self):
    -        """
    -        Start thread in daemon mode
    -        """
    -        if not self.connection.listening:
    -            self.receiver = AsyncStrategy.ReceiverSocketThread(self.connection)
    -            self.connection.listening = True
    -            self.receiver.daemon = True
    -            self.receiver.start()
    -
    -    def _get_response(self, message_id):
    -        """
    -        Performs the capture of LDAP response for this strategy
    -        Checks lock to avoid race condition with receiver thread
    -        """
    -        with self.async_lock:
    -            responses = self._responses.pop(message_id) if message_id in self._responses and self._responses[message_id][-1] == RESPONSE_COMPLETE else None
    -
    -        return responses
    -
    -    def receiving(self):
    -        raise NotImplementedError
    -
    -    def get_stream(self):
    -        raise NotImplementedError
    -
    -    def set_stream(self, value):
    -        raise NotImplementedError
    +"""
    +"""
    +
    +# Created on 2013.07.15
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2013 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +from threading import Thread, Lock, Event
    +import socket
    +
    +from .. import get_config_parameter
    +from ..core.exceptions import LDAPSSLConfigurationError, LDAPStartTLSError, LDAPOperationResult
    +from ..strategy.base import BaseStrategy, RESPONSE_COMPLETE
    +from ..protocol.rfc4511 import LDAPMessage
    +from ..utils.log import log, log_enabled, format_ldap_message, ERROR, NETWORK, EXTENDED
    +from ..utils.asn1 import decoder, decode_message_fast
    +
    +
    +# noinspection PyProtectedMember
    +class AsyncStrategy(BaseStrategy):
    +    """
    +    This strategy is asynchronous. You send the request and get the messageId of the request sent
    +    Receiving data from socket is managed in a separated thread in a blocking mode
    +    Requests return an int value to indicate the messageId of the requested Operation
    +    You get the response with get_response, it has a timeout to wait for response to appear
    +    Connection.response will contain the whole LDAP response for the messageId requested in a dict form
    +    Connection.request will contain the result LDAP message in a dict form
    +    Response appear in strategy._responses dictionary
    +    """
    +
    +    # noinspection PyProtectedMember
    +    class ReceiverSocketThread(Thread):
    +        """
    +        The thread that actually manage the receiver socket
    +        """
    +
    +        def __init__(self, ldap_connection):
    +            Thread.__init__(self)
    +            self.connection = ldap_connection
    +            self.socket_size = get_config_parameter('SOCKET_SIZE')
    +
    +        def run(self):
    +            """
    +            Waits for data on socket, computes the length of the message and waits for enough bytes to decode the message
    +            Message are appended to strategy._responses
    +            """
    +            unprocessed = b''
    +            get_more_data = True
    +            listen = True
    +            data = b''
    +            while listen:
    +                if get_more_data:
    +                    try:
    +                        data = self.connection.socket.recv(self.socket_size)
    +                    except (OSError, socket.error, AttributeError):
    +                        if self.connection.receive_timeout:  # a receive timeout has been detected - keep kistening on the socket
    +                            continue
    +                    except Exception as e:
    +                        if log_enabled(ERROR):
    +                            log(ERROR, '<%s> for <%s>', str(e), self.connection)
    +                        raise  # unexpected exception - re-raise
    +                    if len(data) > 0:
    +                        unprocessed += data
    +                        data = b''
    +                    else:
    +                        listen = False
    +                length = BaseStrategy.compute_ldap_message_size(unprocessed)
    +                if length == -1 or len(unprocessed) < length:
    +                    get_more_data = True
    +                elif len(unprocessed) >= length:  # add message to message list
    +                    if self.connection.usage:
    +                        self.connection._usage.update_received_message(length)
    +                        if log_enabled(NETWORK):
    +                            log(NETWORK, 'received %d bytes via <%s>', length, self.connection)
    +                    if self.connection.fast_decoder:
    +                        ldap_resp = decode_message_fast(unprocessed[:length])
    +                        dict_response = self.connection.strategy.decode_response_fast(ldap_resp)
    +                    else:
    +                        ldap_resp = decoder.decode(unprocessed[:length], asn1Spec=LDAPMessage())[0]
    +                        dict_response = self.connection.strategy.decode_response(ldap_resp)
    +                    message_id = int(ldap_resp['messageID'])
    +                    if log_enabled(NETWORK):
    +                        log(NETWORK, 'received 1 ldap message via <%s>', self.connection)
    +                    if log_enabled(EXTENDED):
    +                        log(EXTENDED, 'ldap message received via <%s>:%s', self.connection, format_ldap_message(ldap_resp, '<<'))
    +                    if dict_response['type'] == 'extendedResp' and (dict_response['responseName'] == '1.3.6.1.4.1.1466.20037' or hasattr(self.connection, '_awaiting_for_async_start_tls')):
    +                        if dict_response['result'] == 0:  # StartTls in progress
    +                            if self.connection.server.tls:
    +                                self.connection.server.tls._start_tls(self.connection)
    +                            else:
    +                                self.connection.last_error = 'no Tls object defined in Server'
    +                                if log_enabled(ERROR):
    +                                    log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +                                raise LDAPSSLConfigurationError(self.connection.last_error)
    +                        else:
    +                            self.connection.last_error = 'asynchronous StartTls failed'
    +                            if log_enabled(ERROR):
    +                                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +                            raise LDAPStartTLSError(self.connection.last_error)
    +                        del self.connection._awaiting_for_async_start_tls
    +                    if message_id != 0:  # 0 is reserved for 'Unsolicited Notification' from server as per RFC4511 (paragraph 4.4)
    +                        with self.connection.strategy.async_lock:
    +                            if message_id in self.connection.strategy._responses:
    +                                self.connection.strategy._responses[message_id].append(dict_response)
    +                            else:
    +                                self.connection.strategy._responses[message_id] = [dict_response]
    +                            if dict_response['type'] not in ['searchResEntry', 'searchResRef', 'intermediateResponse']:
    +                                self.connection.strategy._responses[message_id].append(RESPONSE_COMPLETE)
    +                                self.connection.strategy.set_event_for_message(message_id)
    +
    +                        if self.connection.strategy.can_stream:  # for AsyncStreamStrategy, used for PersistentSearch
    +                            self.connection.strategy.accumulate_stream(message_id, dict_response)
    +                        unprocessed = unprocessed[length:]
    +                        get_more_data = False if unprocessed else True
    +                        listen = True if self.connection.listening or unprocessed else False
    +                    else:  # Unsolicited Notification
    +                        if dict_response['responseName'] == '1.3.6.1.4.1.1466.20036':  # Notice of Disconnection as per RFC4511 (paragraph 4.4.1)
    +                            listen = False
    +                        else:
    +                            self.connection.last_error = 'unknown unsolicited notification from server'
    +                            if log_enabled(ERROR):
    +                                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +                            raise LDAPStartTLSError(self.connection.last_error)
    +            self.connection.strategy.close()
    +
    +    def __init__(self, ldap_connection):
    +        BaseStrategy.__init__(self, ldap_connection)
    +        self.sync = False
    +        self.no_real_dsa = False
    +        self.pooled = False
    +        self._responses = None
    +        self._requests = None
    +        self.can_stream = False
    +        self.receiver = None
    +        self.async_lock = Lock()
    +        self.event_lock = Lock()
    +        self._events = {}
    +
    +    def open(self, reset_usage=True, read_server_info=True):
    +        """
    +        Open connection and start listen on the socket in a different thread
    +        """
    +        with self.connection.connection_lock:
    +            self._responses = dict()
    +            self._requests = dict()
    +            BaseStrategy.open(self, reset_usage, read_server_info)
    +
    +        if read_server_info:
    +            try:
    +                self.connection.refresh_server_info()
    +            except LDAPOperationResult:  # catch errors from server if raise_exception = True
    +                self.connection.server._dsa_info = None
    +                self.connection.server._schema_info = None
    +
    +    def close(self):
    +        """
    +        Close connection and stop socket thread
    +        """
    +        with self.connection.connection_lock:
    +            BaseStrategy.close(self)
    +
    +    def _add_event_for_message(self, message_id):
    +        with self.event_lock:
    +            # Should have the check here because the receiver thread may has created it
    +            if message_id not in self._events:
    +                self._events[message_id] = Event()
    +
    +    def set_event_for_message(self, message_id):
    +        with self.event_lock:
    +            # The receiver thread may receive the response before the sender set the event for the message_id,
    +            # so we have to check if the event exists
    +            if message_id not in self._events:
    +                self._events[message_id] = Event()
    +            self._events[message_id].set()
    +
    +    def _get_event_for_message(self, message_id):
    +        with self.event_lock:
    +            if message_id not in self._events:
    +                raise RuntimeError('Event for message[{}] should have been created before accessing'.format(message_id))
    +            return self._events[message_id]
    +
    +    def post_send_search(self, message_id):
    +        """
    +        Clears connection.response and returns messageId
    +        """
    +        self.connection.response = None
    +        self.connection.request = None
    +        self.connection.result = None
    +        self._add_event_for_message(message_id)
    +        return message_id
    +
    +    def post_send_single_response(self, message_id):
    +        """
    +        Clears connection.response and returns messageId.
    +        """
    +        self.connection.response = None
    +        self.connection.request = None
    +        self.connection.result = None
    +        self._add_event_for_message(message_id)
    +        return message_id
    +
    +    def _start_listen(self):
    +        """
    +        Start thread in daemon mode
    +        """
    +        if not self.connection.listening:
    +            self.receiver = AsyncStrategy.ReceiverSocketThread(self.connection)
    +            self.connection.listening = True
    +            self.receiver.daemon = True
    +            self.receiver.start()
    +
    +    def _get_response(self, message_id, timeout):
    +        """
    +        Performs the capture of LDAP response for this strategy
    +        The response is only complete after the event been set
    +        """
    +        event = self._get_event_for_message(message_id)
    +        flag = event.wait(timeout)
    +        if not flag:
    +            # timeout
    +            return None
    +
    +        # In this stage we could ensure the response is already there
    +        self._events.pop(message_id)
    +        with self.async_lock:
    +            return self._responses.pop(message_id)
    +
    +    def receiving(self):
    +        raise NotImplementedError
    +
    +    def get_stream(self):
    +        raise NotImplementedError
    +
    +    def set_stream(self, value):
    +        raise NotImplementedError
    diff --git a/server/www/packages/packages-windows/x86/ldap3/strategy/base.py b/server/www/packages/packages-windows/x86/ldap3/strategy/base.py
    index 0506703..568459e 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/strategy/base.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/strategy/base.py
    @@ -1,867 +1,902 @@
    -"""
    -"""
    -
    -# Created on 2013.07.15
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2013 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more dectails.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -
    -import socket
    -from struct import pack
    -from platform import system
    -from time import sleep
    -from random import choice
    -from datetime import datetime
    -
    -from .. import SYNC, ANONYMOUS, get_config_parameter, BASE, ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES
    -from ..core.results import DO_NOT_RAISE_EXCEPTIONS, RESULT_REFERRAL
    -from ..core.exceptions import LDAPOperationResult, LDAPSASLBindInProgressError, LDAPSocketOpenError, LDAPSessionTerminatedByServerError,\
    -    LDAPUnknownResponseError, LDAPUnknownRequestError, LDAPReferralError, communication_exception_factory, \
    -    LDAPSocketSendError, LDAPExceptionError, LDAPControlError, LDAPResponseTimeoutError, LDAPTransactionError
    -from ..utils.uri import parse_uri
    -from ..protocol.rfc4511 import LDAPMessage, ProtocolOp, MessageID, SearchResultEntry
    -from ..operation.add import add_response_to_dict, add_request_to_dict
    -from ..operation.modify import modify_request_to_dict, modify_response_to_dict
    -from ..operation.search import search_result_reference_response_to_dict, search_result_done_response_to_dict,\
    -    search_result_entry_response_to_dict, search_request_to_dict, search_result_entry_response_to_dict_fast,\
    -    search_result_reference_response_to_dict_fast, attributes_to_dict, attributes_to_dict_fast
    -from ..operation.bind import bind_response_to_dict, bind_request_to_dict, sicily_bind_response_to_dict, bind_response_to_dict_fast, \
    -    sicily_bind_response_to_dict_fast
    -from ..operation.compare import compare_response_to_dict, compare_request_to_dict
    -from ..operation.extended import extended_request_to_dict, extended_response_to_dict, intermediate_response_to_dict, extended_response_to_dict_fast, intermediate_response_to_dict_fast
    -from ..core.server import Server
    -from ..operation.modifyDn import modify_dn_request_to_dict, modify_dn_response_to_dict
    -from ..operation.delete import delete_response_to_dict, delete_request_to_dict
    -from ..protocol.convert import prepare_changes_for_request, build_controls_list
    -from ..operation.abandon import abandon_request_to_dict
    -from ..core.tls import Tls
    -from ..protocol.oid import Oids
    -from ..protocol.rfc2696 import RealSearchControlValue
    -from ..protocol.microsoft import DirSyncControlResponseValue
    -from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, NETWORK, EXTENDED, format_ldap_message
    -from ..utils.asn1 import encode, decoder, ldap_result_to_dict_fast, decode_sequence
    -from ..utils.conv import to_unicode
    -
    -SESSION_TERMINATED_BY_SERVER = 'TERMINATED_BY_SERVER'
    -TRANSACTION_ERROR = 'TRANSACTION_ERROR'
    -RESPONSE_COMPLETE = 'RESPONSE_FROM_SERVER_COMPLETE'
    -
    -
    -# noinspection PyProtectedMember
    -class BaseStrategy(object):
    -    """
    -    Base class for connection strategy
    -    """
    -
    -    def __init__(self, ldap_connection):
    -        self.connection = ldap_connection
    -        self._outstanding = None
    -        self._referrals = []
    -        self.sync = None  # indicates a synchronous connection
    -        self.no_real_dsa = None  # indicates a connection to a fake LDAP server
    -        self.pooled = None  # Indicates a connection with a connection pool
    -        self.can_stream = None  # indicates if a strategy keeps a stream of responses (i.e. LdifProducer can accumulate responses with a single header). Stream must be initialized and closed in _start_listen() and _stop_listen()
    -        self.referral_cache = {}
    -        if log_enabled(BASIC):
    -            log(BASIC, 'instantiated <%s>: <%s>', self.__class__.__name__, self)
    -
    -    def __str__(self):
    -        s = [
    -            str(self.connection) if self.connection else 'None',
    -            'sync' if self.sync else 'async',
    -            'no real DSA' if self.no_real_dsa else 'real DSA',
    -            'pooled' if self.pooled else 'not pooled',
    -            'can stream output' if self.can_stream else 'cannot stream output',
    -        ]
    -        return ' - '.join(s)
    -
    -    def open(self, reset_usage=True, read_server_info=True):
    -        """
    -        Open a socket to a server. Choose a server from the server pool if available
    -        """
    -        if log_enabled(NETWORK):
    -            log(NETWORK, 'opening connection for <%s>', self.connection)
    -        if self.connection.lazy and not self.connection._executing_deferred:
    -            self.connection._deferred_open = True
    -            self.connection.closed = False
    -            if log_enabled(NETWORK):
    -                log(NETWORK, 'deferring open connection for <%s>', self.connection)
    -        else:
    -            if not self.connection.closed and not self.connection._executing_deferred:  # try to close connection if still open
    -                self.close()
    -
    -            self._outstanding = dict()
    -            if self.connection.usage:
    -                if reset_usage or not self.connection._usage.initial_connection_start_time:
    -                    self.connection._usage.start()
    -
    -            if self.connection.server_pool:
    -                new_server = self.connection.server_pool.get_server(self.connection)  # get a server from the server_pool if available
    -                if self.connection.server != new_server:
    -                    self.connection.server = new_server
    -                    if self.connection.usage:
    -                        self.connection._usage.servers_from_pool += 1
    -
    -            exception_history = []
    -            if not self.no_real_dsa:  # tries to connect to a real server
    -                for candidate_address in self.connection.server.candidate_addresses():
    -                    try:
    -                        if log_enabled(BASIC):
    -                            log(BASIC, 'try to open candidate address %s', candidate_address[:-2])
    -                        self._open_socket(candidate_address, self.connection.server.ssl, unix_socket=self.connection.server.ipc)
    -                        self.connection.server.current_address = candidate_address
    -                        self.connection.server.update_availability(candidate_address, True)
    -                        break
    -                    except Exception as e:
    -                        self.connection.server.update_availability(candidate_address, False)
    -                        # exception_history.append((datetime.now(), exc_type, exc_value, candidate_address[4]))
    -                        exception_history.append((type(e)(str(e)), candidate_address[4]))
    -                if not self.connection.server.current_address and exception_history:
    -                    # if len(exception_history) == 1:  # only one exception, reraise
    -                    #     if log_enabled(ERROR):
    -                    #         log(ERROR, '<%s> for <%s>', exception_history[0][1](exception_history[0][2]), self.connection)
    -                    #     raise exception_history[0][1](exception_history[0][2])
    -                    # else:
    -                    #     if log_enabled(ERROR):
    -                    #         log(ERROR, 'unable to open socket for <%s>', self.connection)
    -                    #     raise LDAPSocketOpenError('unable to open socket', exception_history)
    -                    if log_enabled(ERROR):
    -                        log(ERROR, 'unable to open socket for <%s>', self.connection)
    -                    raise LDAPSocketOpenError('unable to open socket', exception_history)
    -                elif not self.connection.server.current_address:
    -                    if log_enabled(ERROR):
    -                        log(ERROR, 'invalid server address for <%s>', self.connection)
    -                    raise LDAPSocketOpenError('invalid server address')
    -
    -            self.connection._deferred_open = False
    -            self._start_listen()
    -            # self.connection.do_auto_bind()
    -            if log_enabled(NETWORK):
    -                log(NETWORK, 'connection open for <%s>', self.connection)
    -
    -    def close(self):
    -        """
    -        Close connection
    -        """
    -        if log_enabled(NETWORK):
    -            log(NETWORK, 'closing connection for <%s>', self.connection)
    -        if self.connection.lazy and not self.connection._executing_deferred and (self.connection._deferred_bind or self.connection._deferred_open):
    -            self.connection.listening = False
    -            self.connection.closed = True
    -            if log_enabled(NETWORK):
    -                log(NETWORK, 'deferred connection closed for <%s>', self.connection)
    -        else:
    -            if not self.connection.closed:
    -                self._stop_listen()
    -                if not self. no_real_dsa:
    -                    self._close_socket()
    -            if log_enabled(NETWORK):
    -                log(NETWORK, 'connection closed for <%s>', self.connection)
    -
    -        self.connection.bound = False
    -        self.connection.request = None
    -        self.connection.response = None
    -        self.connection.tls_started = False
    -        self._outstanding = None
    -        self._referrals = []
    -
    -        if not self.connection.strategy.no_real_dsa:
    -            self.connection.server.current_address = None
    -        if self.connection.usage:
    -            self.connection._usage.stop()
    -
    -    def _open_socket(self, address, use_ssl=False, unix_socket=False):
    -        """
    -        Tries to open and connect a socket to a Server
    -        raise LDAPExceptionError if unable to open or connect socket
    -        """
    -        try:
    -            self.connection.socket = socket.socket(*address[:3])
    -        except Exception as e:
    -            self.connection.last_error = 'socket creation error: ' + str(e)
    -            if log_enabled(ERROR):
    -                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -            # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
    -            raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
    -        try:  # set socket timeout for opening connection
    -            if self.connection.server.connect_timeout:
    -                self.connection.socket.settimeout(self.connection.server.connect_timeout)
    -            self.connection.socket.connect(address[4])
    -        except socket.error as e:
    -            self.connection.last_error = 'socket connection error while opening: ' + str(e)
    -            if log_enabled(ERROR):
    -                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -            # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
    -            raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
    -
    -        # Set connection recv timeout (must be set after connect,
    -        # because socket.settimeout() affects both, connect() as
    -        # well as recv(). Set it before tls.wrap_socket() because
    -        # the recv timeout should take effect during the TLS
    -        # handshake.
    -        if self.connection.receive_timeout is not None:
    -            try:  # set receive timeout for the connection socket
    -                self.connection.socket.settimeout(self.connection.receive_timeout)
    -                if system().lower() == 'windows':
    -                    self.connection.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, int(1000 * self.connection.receive_timeout))
    -                else:
    -                    self.connection.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, pack('LL', self.connection.receive_timeout, 0))
    -            except socket.error as e:
    -                self.connection.last_error = 'unable to set receive timeout for socket connection: ' + str(e)
    -
    -        # if exc:
    -        #     if log_enabled(ERROR):
    -        #         log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -        #     raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
    -                if log_enabled(ERROR):
    -                    log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -                raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
    -
    -        if use_ssl:
    -            try:
    -                self.connection.server.tls.wrap_socket(self.connection, do_handshake=True)
    -                if self.connection.usage:
    -                    self.connection._usage.wrapped_sockets += 1
    -            except Exception as e:
    -                self.connection.last_error = 'socket ssl wrapping error: ' + str(e)
    -                if log_enabled(ERROR):
    -                    log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -                # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
    -                raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
    -        if self.connection.usage:
    -            self.connection._usage.open_sockets += 1
    -
    -        self.connection.closed = False
    -
    -    def _close_socket(self):
    -        """
    -        Try to close a socket
    -        don't raise exception if unable to close socket, assume socket is already closed
    -        """
    -
    -        try:
    -            self.connection.socket.shutdown(socket.SHUT_RDWR)
    -        except Exception:
    -            pass
    -
    -        try:
    -            self.connection.socket.close()
    -        except Exception:
    -            pass
    -
    -        self.connection.socket = None
    -        self.connection.closed = True
    -
    -        if self.connection.usage:
    -            self.connection._usage.closed_sockets += 1
    -
    -    def _stop_listen(self):
    -        self.connection.listening = False
    -
    -    def send(self, message_type, request, controls=None):
    -        """
    -        Send an LDAP message
    -        Returns the message_id
    -        """
    -        self.connection.request = None
    -        if self.connection.listening:
    -            if self.connection.sasl_in_progress and message_type not in ['bindRequest']:  # as per RFC4511 (4.2.1)
    -                self.connection.last_error = 'cannot send operation requests while SASL bind is in progress'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -                raise LDAPSASLBindInProgressError(self.connection.last_error)
    -            message_id = self.connection.server.next_message_id()
    -            ldap_message = LDAPMessage()
    -            ldap_message['messageID'] = MessageID(message_id)
    -            ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request)
    -            message_controls = build_controls_list(controls)
    -            if message_controls is not None:
    -                ldap_message['controls'] = message_controls
    -            self.connection.request = BaseStrategy.decode_request(message_type, request, controls)
    -            self._outstanding[message_id] = self.connection.request
    -            self.sending(ldap_message)
    -        else:
    -            self.connection.last_error = 'unable to send message, socket is not open'
    -            if log_enabled(ERROR):
    -                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -            raise LDAPSocketOpenError(self.connection.last_error)
    -
    -        return message_id
    -
    -    def get_response(self, message_id, timeout=None, get_request=False):
    -        """
    -        Get response LDAP messages
    -        Responses are returned by the underlying connection strategy
    -        Check if message_id LDAP message is still outstanding and wait for timeout to see if it appears in _get_response
    -        Result is stored in connection.result
    -        Responses without result is stored in connection.response
    -        A tuple (responses, result) is returned
    -        """
    -        conf_sleep_interval = get_config_parameter('RESPONSE_SLEEPTIME')
    -        if timeout is None:
    -            timeout = get_config_parameter('RESPONSE_WAITING_TIMEOUT')
    -        response = None
    -        result = None
    -        request = None
    -        if self._outstanding and message_id in self._outstanding:
    -            while timeout >= 0:  # waiting for completed message to appear in responses
    -                responses = self._get_response(message_id)
    -                if not responses:
    -                    sleep(conf_sleep_interval)
    -                    timeout -= conf_sleep_interval
    -                    continue
    -
    -                if responses == SESSION_TERMINATED_BY_SERVER:
    -                    try:  # try to close the session but don't raise any error if server has already closed the session
    -                        self.close()
    -                    except (socket.error, LDAPExceptionError):
    -                        pass
    -                    self.connection.last_error = 'session terminated by server'
    -                    if log_enabled(ERROR):
    -                        log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -                    raise LDAPSessionTerminatedByServerError(self.connection.last_error)
    -                elif responses == TRANSACTION_ERROR:  # Novell LDAP Transaction unsolicited notification
    -                    self.connection.last_error = 'transaction error'
    -                    if log_enabled(ERROR):
    -                        log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -                    raise LDAPTransactionError(self.connection.last_error)
    -
    -                # if referral in response opens a new connection to resolve referrals if requested
    -
    -                if responses[-2]['result'] == RESULT_REFERRAL:
    -                    if self.connection.usage:
    -                        self.connection._usage.referrals_received += 1
    -                    if self.connection.auto_referrals:
    -                        ref_response, ref_result = self.do_operation_on_referral(self._outstanding[message_id], responses[-2]['referrals'])
    -                        if ref_response is not None:
    -                            responses = ref_response + [ref_result]
    -                            responses.append(RESPONSE_COMPLETE)
    -                        elif ref_result is not None:
    -                            responses = [ref_result, RESPONSE_COMPLETE]
    -
    -                        self._referrals = []
    -
    -                if responses:
    -                    result = responses[-2]
    -                    response = responses[:-2]
    -                    self.connection.result = None
    -                    self.connection.response = None
    -                    break
    -
    -            if timeout <= 0:
    -                if log_enabled(ERROR):
    -                    log(ERROR, 'socket timeout, no response from server for <%s>', self.connection)
    -                raise LDAPResponseTimeoutError('no response from server')
    -
    -            if self.connection.raise_exceptions and result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS:
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'operation result <%s> for <%s>', result, self.connection)
    -                self._outstanding.pop(message_id)
    -                self.connection.result = result.copy()
    -                raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type'])
    -
    -            # checks if any response has a range tag
    -            # self._auto_range_searching is set as a flag to avoid recursive searches
    -            if self.connection.auto_range and not hasattr(self, '_auto_range_searching') and any((True for resp in response if 'raw_attributes' in resp for name in resp['raw_attributes'] if ';range=' in name)):
    -                self._auto_range_searching = result.copy()
    -                temp_response = response[:]  # copy
    -                if self.do_search_on_auto_range(self._outstanding[message_id], response):
    -                    for resp in temp_response:
    -                        if resp['type'] == 'searchResEntry':
    -                            keys = [key for key in resp['raw_attributes'] if ';range=' in key]
    -                            for key in keys:
    -                                del resp['raw_attributes'][key]
    -                                del resp['attributes'][key]
    -                    response = temp_response
    -                    result = self._auto_range_searching
    -                del self._auto_range_searching
    -
    -            if self.connection.empty_attributes:
    -                for entry in response:
    -                    if entry['type'] == 'searchResEntry':
    -                        for attribute_type in self._outstanding[message_id]['attributes']:
    -                            if attribute_type not in entry['raw_attributes'] and attribute_type not in (ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES):
    -                                entry['raw_attributes'][attribute_type] = list()
    -                                entry['attributes'][attribute_type] = list()
    -                                if log_enabled(PROTOCOL):
    -                                    log(PROTOCOL, 'attribute set to empty list for missing attribute <%s> in <%s>', attribute_type, self)
    -                        if not self.connection.auto_range:
    -                            attrs_to_remove = []
    -                            # removes original empty attribute in case a range tag is returned
    -                            for attribute_type in entry['attributes']:
    -                                if ';range' in attribute_type.lower():
    -                                    orig_attr, _, _ = attribute_type.partition(';')
    -                                    attrs_to_remove.append(orig_attr)
    -                            for attribute_type in attrs_to_remove:
    -                                if log_enabled(PROTOCOL):
    -                                    log(PROTOCOL, 'attribute type <%s> removed in response because of same attribute returned as range by the server in <%s>', attribute_type, self)
    -                                del entry['raw_attributes'][attribute_type]
    -                                del entry['attributes'][attribute_type]
    -
    -            request = self._outstanding.pop(message_id)
    -        else:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'message id not in outstanding queue for <%s>', self.connection)
    -            raise(LDAPResponseTimeoutError('message id not in outstanding queue'))
    -
    -        if get_request:
    -            return response, result, request
    -        else:
    -            return response, result
    -
    -    @staticmethod
    -    def compute_ldap_message_size(data):
    -        """
    -        Compute LDAP Message size according to BER definite length rules
    -        Returns -1 if too few data to compute message length
    -        """
    -        if isinstance(data, str):  # fix for Python 2, data is string not bytes
    -            data = bytearray(data)  # Python 2 bytearray is equivalent to Python 3 bytes
    -
    -        ret_value = -1
    -        if len(data) > 2:
    -            if data[1] <= 127:  # BER definite length - short form. Highest bit of byte 1 is 0, message length is in the last 7 bits - Value can be up to 127 bytes long
    -                ret_value = data[1] + 2
    -            else:  # BER definite length - long form. Highest bit of byte 1 is 1, last 7 bits counts the number of following octets containing the value length
    -                bytes_length = data[1] - 128
    -                if len(data) >= bytes_length + 2:
    -                    value_length = 0
    -                    cont = bytes_length
    -                    for byte in data[2:2 + bytes_length]:
    -                        cont -= 1
    -                        value_length += byte * (256 ** cont)
    -                    ret_value = value_length + 2 + bytes_length
    -
    -        return ret_value
    -
    -    def decode_response(self, ldap_message):
    -        """
    -        Convert received LDAPMessage to a dict
    -        """
    -        message_type = ldap_message.getComponentByName('protocolOp').getName()
    -        component = ldap_message['protocolOp'].getComponent()
    -        controls = ldap_message['controls']
    -        if message_type == 'bindResponse':
    -            if not bytes(component['matchedDN']).startswith(b'NTLM'):  # patch for microsoft ntlm authentication
    -                result = bind_response_to_dict(component)
    -            else:
    -                result = sicily_bind_response_to_dict(component)
    -        elif message_type == 'searchResEntry':
    -            result = search_result_entry_response_to_dict(component, self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names)
    -        elif message_type == 'searchResDone':
    -            result = search_result_done_response_to_dict(component)
    -        elif message_type == 'searchResRef':
    -            result = search_result_reference_response_to_dict(component)
    -        elif message_type == 'modifyResponse':
    -            result = modify_response_to_dict(component)
    -        elif message_type == 'addResponse':
    -            result = add_response_to_dict(component)
    -        elif message_type == 'delResponse':
    -            result = delete_response_to_dict(component)
    -        elif message_type == 'modDNResponse':
    -            result = modify_dn_response_to_dict(component)
    -        elif message_type == 'compareResponse':
    -            result = compare_response_to_dict(component)
    -        elif message_type == 'extendedResp':
    -            result = extended_response_to_dict(component)
    -        elif message_type == 'intermediateResponse':
    -            result = intermediate_response_to_dict(component)
    -        else:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'unknown response <%s> for <%s>', message_type, self.connection)
    -            raise LDAPUnknownResponseError('unknown response')
    -        result['type'] = message_type
    -        if controls:
    -            result['controls'] = dict()
    -            for control in controls:
    -                decoded_control = self.decode_control(control)
    -                result['controls'][decoded_control[0]] = decoded_control[1]
    -        return result
    -
    -    def decode_response_fast(self, ldap_message):
    -        """
    -        Convert received LDAPMessage from fast ber decoder to a dict
    -        """
    -        if ldap_message['protocolOp'] == 1:  # bindResponse
    -            if not ldap_message['payload'][1][3].startswith(b'NTLM'):  # patch for microsoft ntlm authentication
    -                result = bind_response_to_dict_fast(ldap_message['payload'])
    -            else:
    -                result = sicily_bind_response_to_dict_fast(ldap_message['payload'])
    -            result['type'] = 'bindResponse'
    -        elif ldap_message['protocolOp'] == 4:  # searchResEntry'
    -            result = search_result_entry_response_to_dict_fast(ldap_message['payload'], self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names)
    -            result['type'] = 'searchResEntry'
    -        elif ldap_message['protocolOp'] == 5:  # searchResDone
    -            result = ldap_result_to_dict_fast(ldap_message['payload'])
    -            result['type'] = 'searchResDone'
    -        elif ldap_message['protocolOp'] == 19:  # searchResRef
    -            result = search_result_reference_response_to_dict_fast(ldap_message['payload'])
    -            result['type'] = 'searchResRef'
    -        elif ldap_message['protocolOp'] == 7:  # modifyResponse
    -            result = ldap_result_to_dict_fast(ldap_message['payload'])
    -            result['type'] = 'modifyResponse'
    -        elif ldap_message['protocolOp'] == 9:  # addResponse
    -            result = ldap_result_to_dict_fast(ldap_message['payload'])
    -            result['type'] = 'addResponse'
    -        elif ldap_message['protocolOp'] == 11:  # delResponse
    -            result = ldap_result_to_dict_fast(ldap_message['payload'])
    -            result['type'] = 'delResponse'
    -        elif ldap_message['protocolOp'] == 13:  # modDNResponse
    -            result = ldap_result_to_dict_fast(ldap_message['payload'])
    -            result['type'] = 'modDNResponse'
    -        elif ldap_message['protocolOp'] == 15:  # compareResponse
    -            result = ldap_result_to_dict_fast(ldap_message['payload'])
    -            result['type'] = 'compareResponse'
    -        elif ldap_message['protocolOp'] == 24:  # extendedResp
    -            result = extended_response_to_dict_fast(ldap_message['payload'])
    -            result['type'] = 'extendedResp'
    -        elif ldap_message['protocolOp'] == 25:  # intermediateResponse
    -            result = intermediate_response_to_dict_fast(ldap_message['payload'])
    -            result['type'] = 'intermediateResponse'
    -        else:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'unknown response <%s> for <%s>', ldap_message['protocolOp'], self.connection)
    -            raise LDAPUnknownResponseError('unknown response')
    -        if ldap_message['controls']:
    -            result['controls'] = dict()
    -            for control in ldap_message['controls']:
    -                decoded_control = self.decode_control_fast(control[3])
    -                result['controls'][decoded_control[0]] = decoded_control[1]
    -        return result
    -
    -    @staticmethod
    -    def decode_control(control):
    -        """
    -        decode control, return a 2-element tuple where the first element is the control oid
    -        and the second element is a dictionary with description (from Oids), criticality and decoded control value
    -        """
    -        control_type = str(control['controlType'])
    -        criticality = bool(control['criticality'])
    -        control_value = bytes(control['controlValue'])
    -        unprocessed = None
    -        if control_type == '1.2.840.113556.1.4.319':  # simple paged search as per RFC2696
    -            control_resp, unprocessed = decoder.decode(control_value, asn1Spec=RealSearchControlValue())
    -            control_value = dict()
    -            control_value['size'] = int(control_resp['size'])
    -            control_value['cookie'] = bytes(control_resp['cookie'])
    -        elif control_type == '1.2.840.113556.1.4.841':  # DirSync AD
    -            control_resp, unprocessed = decoder.decode(control_value, asn1Spec=DirSyncControlResponseValue())
    -            control_value = dict()
    -            control_value['more_results'] = bool(control_resp['MoreResults'])  # more_result if nonzero
    -            control_value['cookie'] = bytes(control_resp['CookieServer'])
    -        elif control_type == '1.3.6.1.1.13.1' or control_type == '1.3.6.1.1.13.2':  # Pre-Read control, Post-Read Control as per RFC 4527
    -            control_resp, unprocessed = decoder.decode(control_value, asn1Spec=SearchResultEntry())
    -            control_value = dict()
    -            control_value['result'] = attributes_to_dict(control_resp['attributes'])
    -        if unprocessed:
    -                if log_enabled(ERROR):
    -                    log(ERROR, 'unprocessed control response in substrate')
    -                raise LDAPControlError('unprocessed control response in substrate')
    -        return control_type, {'description': Oids.get(control_type, ''), 'criticality': criticality, 'value': control_value}
    -
    -    @staticmethod
    -    def decode_control_fast(control):
    -        """
    -        decode control, return a 2-element tuple where the first element is the control oid
    -        and the second element is a dictionary with description (from Oids), criticality and decoded control value
    -        """
    -        control_type = str(to_unicode(control[0][3], from_server=True))
    -        criticality = False
    -        control_value = None
    -        for r in control[1:]:
    -            if r[2] == 4:  # controlValue
    -                control_value = r[3]
    -            else:
    -                criticality = False if r[3] == 0 else True  # criticality (booleand default to False)
    -        if control_type == '1.2.840.113556.1.4.319':  # simple paged search as per RFC2696
    -            control_resp = decode_sequence(control_value, 0, len(control_value))
    -            control_value = dict()
    -            control_value['size'] = int(control_resp[0][3][0][3])
    -            control_value['cookie'] = bytes(control_resp[0][3][1][3])
    -        elif control_type == '1.2.840.113556.1.4.841':  # DirSync AD
    -            control_resp = decode_sequence(control_value, 0, len(control_value))
    -            control_value = dict()
    -            control_value['more_results'] = True if control_resp[0][3][0][3] else False  # more_result if nonzero
    -            control_value['cookie'] = control_resp[0][3][2][3]
    -        elif control_type == '1.3.6.1.1.13.1' or control_type == '1.3.6.1.1.13.2':  # Pre-Read control, Post-Read Control as per RFC 4527
    -            control_resp = decode_sequence(control_value, 0, len(control_value))
    -            control_value = dict()
    -            control_value['result'] = attributes_to_dict_fast(control_resp[0][3][1][3])
    -        return control_type, {'description': Oids.get(control_type, ''), 'criticality': criticality, 'value': control_value}
    -
    -    @staticmethod
    -    def decode_request(message_type, component, controls=None):
    -        # message_type = ldap_message.getComponentByName('protocolOp').getName()
    -        # component = ldap_message['protocolOp'].getComponent()
    -        if message_type == 'bindRequest':
    -            result = bind_request_to_dict(component)
    -        elif message_type == 'unbindRequest':
    -            result = dict()
    -        elif message_type == 'addRequest':
    -            result = add_request_to_dict(component)
    -        elif message_type == 'compareRequest':
    -            result = compare_request_to_dict(component)
    -        elif message_type == 'delRequest':
    -            result = delete_request_to_dict(component)
    -        elif message_type == 'extendedReq':
    -            result = extended_request_to_dict(component)
    -        elif message_type == 'modifyRequest':
    -            result = modify_request_to_dict(component)
    -        elif message_type == 'modDNRequest':
    -            result = modify_dn_request_to_dict(component)
    -        elif message_type == 'searchRequest':
    -            result = search_request_to_dict(component)
    -        elif message_type == 'abandonRequest':
    -            result = abandon_request_to_dict(component)
    -        else:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'unknown request <%s>', message_type)
    -            raise LDAPUnknownRequestError('unknown request')
    -        result['type'] = message_type
    -        result['controls'] = controls
    -
    -        return result
    -
    -    def valid_referral_list(self, referrals):
    -        referral_list = []
    -        for referral in referrals:
    -            candidate_referral = parse_uri(referral)
    -            if candidate_referral:
    -                for ref_host in self.connection.server.allowed_referral_hosts:
    -                    if ref_host[0] == candidate_referral['host'] or ref_host[0] == '*':
    -                        if candidate_referral['host'] not in self._referrals:
    -                            candidate_referral['anonymousBindOnly'] = not ref_host[1]
    -                            referral_list.append(candidate_referral)
    -                            break
    -
    -        return referral_list
    -
    -    def do_next_range_search(self, request, response, attr_name):
    -        done = False
    -        current_response = response
    -        while not done:
    -            attr_type, _, returned_range = attr_name.partition(';range=')
    -            _, _, high_range = returned_range.partition('-')
    -            response['raw_attributes'][attr_type] += current_response['raw_attributes'][attr_name]
    -            response['attributes'][attr_type] += current_response['attributes'][attr_name]
    -            if high_range != '*':
    -                if log_enabled(PROTOCOL):
    -                    log(PROTOCOL, 'performing next search on auto-range <%s> via <%s>', str(int(high_range) + 1), self.connection)
    -                requested_range = attr_type + ';range=' + str(int(high_range) + 1) + '-*'
    -                result = self.connection.search(search_base=response['dn'],
    -                                                search_filter='(objectclass=*)',
    -                                                search_scope=BASE,
    -                                                dereference_aliases=request['dereferenceAlias'],
    -                                                attributes=[attr_type + ';range=' + str(int(high_range) + 1) + '-*'])
    -                if isinstance(result, bool):
    -                    if result:
    -                        current_response = self.connection.response[0]
    -                    else:
    -                        done = True
    -                else:
    -                    current_response, _ = self.get_response(result)
    -                    current_response = current_response[0]
    -
    -                if not done:
    -                    if requested_range in current_response['raw_attributes'] and len(current_response['raw_attributes'][requested_range]) == 0:
    -                        del current_response['raw_attributes'][requested_range]
    -                        del current_response['attributes'][requested_range]
    -                    attr_name = list(filter(lambda a: ';range=' in a, current_response['raw_attributes'].keys()))[0]
    -                    continue
    -
    -            done = True
    -
    -    def do_search_on_auto_range(self, request, response):
    -        for resp in [r for r in response if r['type'] == 'searchResEntry']:
    -            for attr_name in list(resp['raw_attributes'].keys()):  # generate list to avoid changing of dict size error
    -                if ';range=' in attr_name:
    -                    attr_type, _, range_values = attr_name.partition(';range=')
    -                    if range_values in ('1-1', '0-0'):  # DirSync returns these values for adding and removing members
    -                        return False
    -                    if attr_type not in resp['raw_attributes'] or resp['raw_attributes'][attr_type] is None:
    -                        resp['raw_attributes'][attr_type] = list()
    -                    if attr_type not in resp['attributes'] or resp['attributes'][attr_type] is None:
    -                        resp['attributes'][attr_type] = list()
    -                    self.do_next_range_search(request, resp, attr_name)
    -        return True
    -    def do_operation_on_referral(self, request, referrals):
    -        if log_enabled(PROTOCOL):
    -            log(PROTOCOL, 'following referral for <%s>', self.connection)
    -        valid_referral_list = self.valid_referral_list(referrals)
    -        if valid_referral_list:
    -            preferred_referral_list = [referral for referral in valid_referral_list if referral['ssl'] == self.connection.server.ssl]
    -            selected_referral = choice(preferred_referral_list) if preferred_referral_list else choice(valid_referral_list)
    -
    -            cachekey = (selected_referral['host'], selected_referral['port'] or self.connection.server.port, selected_referral['ssl'])
    -            if self.connection.use_referral_cache and cachekey in self.referral_cache:
    -                referral_connection = self.referral_cache[cachekey]
    -            else:
    -                referral_server = Server(host=selected_referral['host'],
    -                                         port=selected_referral['port'] or self.connection.server.port,
    -                                         use_ssl=selected_referral['ssl'],
    -                                         get_info=self.connection.server.get_info,
    -                                         formatter=self.connection.server.custom_formatter,
    -                                         connect_timeout=self.connection.server.connect_timeout,
    -                                         mode=self.connection.server.mode,
    -                                         allowed_referral_hosts=self.connection.server.allowed_referral_hosts,
    -                                         tls=Tls(local_private_key_file=self.connection.server.tls.private_key_file,
    -                                                 local_certificate_file=self.connection.server.tls.certificate_file,
    -                                                 validate=self.connection.server.tls.validate,
    -                                                 version=self.connection.server.tls.version,
    -                                                 ca_certs_file=self.connection.server.tls.ca_certs_file) if selected_referral['ssl'] else None)
    -
    -                from ..core.connection import Connection
    -
    -                referral_connection = Connection(server=referral_server,
    -                                                 user=self.connection.user if not selected_referral['anonymousBindOnly'] else None,
    -                                                 password=self.connection.password if not selected_referral['anonymousBindOnly'] else None,
    -                                                 version=self.connection.version,
    -                                                 authentication=self.connection.authentication if not selected_referral['anonymousBindOnly'] else ANONYMOUS,
    -                                                 client_strategy=SYNC,
    -                                                 auto_referrals=True,
    -                                                 read_only=self.connection.read_only,
    -                                                 check_names=self.connection.check_names,
    -                                                 raise_exceptions=self.connection.raise_exceptions,
    -                                                 fast_decoder=self.connection.fast_decoder,
    -                                                 receive_timeout=self.connection.receive_timeout,
    -                                                 sasl_mechanism=self.connection.sasl_mechanism,
    -                                                 sasl_credentials=self.connection.sasl_credentials)
    -
    -                if self.connection.usage:
    -                    self.connection._usage.referrals_connections += 1
    -
    -                referral_connection.open()
    -                referral_connection.strategy._referrals = self._referrals
    -                if self.connection.tls_started and not referral_server.ssl:  # if the original server was in start_tls mode and the referral server is not in ssl then start_tls on the referral connection
    -                    referral_connection.start_tls()
    -
    -                if self.connection.bound:
    -                    referral_connection.bind()
    -
    -            if self.connection.usage:
    -                self.connection._usage.referrals_followed += 1
    -
    -            if request['type'] == 'searchRequest':
    -                referral_connection.search(selected_referral['base'] or request['base'],
    -                                           selected_referral['filter'] or request['filter'],
    -                                           selected_referral['scope'] or request['scope'],
    -                                           request['dereferenceAlias'],
    -                                           selected_referral['attributes'] or request['attributes'],
    -                                           request['sizeLimit'],
    -                                           request['timeLimit'],
    -                                           request['typesOnly'],
    -                                           controls=request['controls'])
    -            elif request['type'] == 'addRequest':
    -                referral_connection.add(selected_referral['base'] or request['entry'],
    -                                        None,
    -                                        request['attributes'],
    -                                        controls=request['controls'])
    -            elif request['type'] == 'compareRequest':
    -                referral_connection.compare(selected_referral['base'] or request['entry'],
    -                                            request['attribute'],
    -                                            request['value'],
    -                                            controls=request['controls'])
    -            elif request['type'] == 'delRequest':
    -                referral_connection.delete(selected_referral['base'] or request['entry'],
    -                                           controls=request['controls'])
    -            elif request['type'] == 'extendedReq':
    -                referral_connection.extended(request['name'],
    -                                             request['value'],
    -                                             controls=request['controls'],
    -                                             no_encode=True
    -                                             )
    -            elif request['type'] == 'modifyRequest':
    -                referral_connection.modify(selected_referral['base'] or request['entry'],
    -                                           prepare_changes_for_request(request['changes']),
    -                                           controls=request['controls'])
    -            elif request['type'] == 'modDNRequest':
    -                referral_connection.modify_dn(selected_referral['base'] or request['entry'],
    -                                              request['newRdn'],
    -                                              request['deleteOldRdn'],
    -                                              request['newSuperior'],
    -                                              controls=request['controls'])
    -            else:
    -                self.connection.last_error = 'referral operation not permitted'
    -                if log_enabled(ERROR):
    -                    log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -                raise LDAPReferralError(self.connection.last_error)
    -
    -            response = referral_connection.response
    -            result = referral_connection.result
    -            if self.connection.use_referral_cache:
    -                self.referral_cache[cachekey] = referral_connection
    -            else:
    -                referral_connection.unbind()
    -        else:
    -            response = None
    -            result = None
    -
    -        return response, result
    -
    -    def sending(self, ldap_message):
    -        if log_enabled(NETWORK):
    -            log(NETWORK, 'sending 1 ldap message for <%s>', self.connection)
    -        try:
    -            encoded_message = encode(ldap_message)
    -            self.connection.socket.sendall(encoded_message)
    -            if log_enabled(EXTENDED):
    -                log(EXTENDED, 'ldap message sent via <%s>:%s', self.connection, format_ldap_message(ldap_message, '>>'))
    -            if log_enabled(NETWORK):
    -                log(NETWORK, 'sent %d bytes via <%s>', len(encoded_message), self.connection)
    -        except socket.error as e:
    -            self.connection.last_error = 'socket sending error' + str(e)
    -            encoded_message = None
    -            if log_enabled(ERROR):
    -                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -            # raise communication_exception_factory(LDAPSocketSendError, exc)(self.connection.last_error)
    -            raise communication_exception_factory(LDAPSocketSendError, type(e)(str(e)))(self.connection.last_error)
    -        if self.connection.usage:
    -            self.connection._usage.update_transmitted_message(self.connection.request, len(encoded_message))
    -
    -    def _start_listen(self):
    -        # overridden on strategy class
    -        raise NotImplementedError
    -
    -    def _get_response(self, message_id):
    -        # overridden in strategy class
    -        raise NotImplementedError
    -
    -    def receiving(self):
    -        # overridden in strategy class
    -        raise NotImplementedError
    -
    -    def post_send_single_response(self, message_id):
    -        # overridden in strategy class
    -        raise NotImplementedError
    -
    -    def post_send_search(self, message_id):
    -        # overridden in strategy class
    -        raise NotImplementedError
    -
    -    def get_stream(self):
    -        raise NotImplementedError
    -
    -    def set_stream(self, value):
    -        raise NotImplementedError
    -
    -    def unbind_referral_cache(self):
    -        while len(self.referral_cache) > 0:
    -            cachekey, referral_connection = self.referral_cache.popitem()
    -            referral_connection.unbind()
    +"""
    +"""
    +
    +# Created on 2013.07.15
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2013 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more dectails.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +import socket
    +from struct import pack
    +from platform import system
    +from time import sleep
    +from random import choice
    +
    +from .. import SYNC, ANONYMOUS, get_config_parameter, BASE, ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES
    +from ..core.results import DO_NOT_RAISE_EXCEPTIONS, RESULT_REFERRAL
    +from ..core.exceptions import LDAPOperationResult, LDAPSASLBindInProgressError, LDAPSocketOpenError, LDAPSessionTerminatedByServerError,\
    +    LDAPUnknownResponseError, LDAPUnknownRequestError, LDAPReferralError, communication_exception_factory, \
    +    LDAPSocketSendError, LDAPExceptionError, LDAPControlError, LDAPResponseTimeoutError, LDAPTransactionError
    +from ..utils.uri import parse_uri
    +from ..protocol.rfc4511 import LDAPMessage, ProtocolOp, MessageID, SearchResultEntry
    +from ..operation.add import add_response_to_dict, add_request_to_dict
    +from ..operation.modify import modify_request_to_dict, modify_response_to_dict
    +from ..operation.search import search_result_reference_response_to_dict, search_result_done_response_to_dict,\
    +    search_result_entry_response_to_dict, search_request_to_dict, search_result_entry_response_to_dict_fast,\
    +    search_result_reference_response_to_dict_fast, attributes_to_dict, attributes_to_dict_fast
    +from ..operation.bind import bind_response_to_dict, bind_request_to_dict, sicily_bind_response_to_dict, bind_response_to_dict_fast, \
    +    sicily_bind_response_to_dict_fast
    +from ..operation.compare import compare_response_to_dict, compare_request_to_dict
    +from ..operation.extended import extended_request_to_dict, extended_response_to_dict, intermediate_response_to_dict, extended_response_to_dict_fast, intermediate_response_to_dict_fast
    +from ..core.server import Server
    +from ..operation.modifyDn import modify_dn_request_to_dict, modify_dn_response_to_dict
    +from ..operation.delete import delete_response_to_dict, delete_request_to_dict
    +from ..protocol.convert import prepare_changes_for_request, build_controls_list
    +from ..operation.abandon import abandon_request_to_dict
    +from ..core.tls import Tls
    +from ..protocol.oid import Oids
    +from ..protocol.rfc2696 import RealSearchControlValue
    +from ..protocol.microsoft import DirSyncControlResponseValue
    +from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, NETWORK, EXTENDED, format_ldap_message
    +from ..utils.asn1 import encode, decoder, ldap_result_to_dict_fast, decode_sequence
    +from ..utils.conv import to_unicode
    +
    +SESSION_TERMINATED_BY_SERVER = 'TERMINATED_BY_SERVER'
    +TRANSACTION_ERROR = 'TRANSACTION_ERROR'
    +RESPONSE_COMPLETE = 'RESPONSE_FROM_SERVER_COMPLETE'
    +
    +
    +# noinspection PyProtectedMember
    +class BaseStrategy(object):
    +    """
    +    Base class for connection strategy
    +    """
    +
    +    def __init__(self, ldap_connection):
    +        self.connection = ldap_connection
    +        self._outstanding = None
    +        self._referrals = []
    +        self.sync = None  # indicates a synchronous connection
    +        self.no_real_dsa = None  # indicates a connection to a fake LDAP server
    +        self.pooled = None  # Indicates a connection with a connection pool
    +        self.can_stream = None  # indicates if a strategy keeps a stream of responses (i.e. LdifProducer can accumulate responses with a single header). Stream must be initialized and closed in _start_listen() and _stop_listen()
    +        self.referral_cache = {}
    +        if log_enabled(BASIC):
    +            log(BASIC, 'instantiated <%s>: <%s>', self.__class__.__name__, self)
    +
    +    def __str__(self):
    +        s = [
    +            str(self.connection) if self.connection else 'None',
    +            'sync' if self.sync else 'async',
    +            'no real DSA' if self.no_real_dsa else 'real DSA',
    +            'pooled' if self.pooled else 'not pooled',
    +            'can stream output' if self.can_stream else 'cannot stream output',
    +        ]
    +        return ' - '.join(s)
    +
    +    def open(self, reset_usage=True, read_server_info=True):
    +        """
    +        Open a socket to a server. Choose a server from the server pool if available
    +        """
    +        if log_enabled(NETWORK):
    +            log(NETWORK, 'opening connection for <%s>', self.connection)
    +        if self.connection.lazy and not self.connection._executing_deferred:
    +            self.connection._deferred_open = True
    +            self.connection.closed = False
    +            if log_enabled(NETWORK):
    +                log(NETWORK, 'deferring open connection for <%s>', self.connection)
    +        else:
    +            if not self.connection.closed and not self.connection._executing_deferred:  # try to close connection if still open
    +                self.close()
    +
    +            self._outstanding = dict()
    +            if self.connection.usage:
    +                if reset_usage or not self.connection._usage.initial_connection_start_time:
    +                    self.connection._usage.start()
    +
    +            if self.connection.server_pool:
    +                new_server = self.connection.server_pool.get_server(self.connection)  # get a server from the server_pool if available
    +                if self.connection.server != new_server:
    +                    self.connection.server = new_server
    +                    if self.connection.usage:
    +                        self.connection._usage.servers_from_pool += 1
    +
    +            exception_history = []
    +            if not self.no_real_dsa:  # tries to connect to a real server
    +                for candidate_address in self.connection.server.candidate_addresses():
    +                    try:
    +                        if log_enabled(BASIC):
    +                            log(BASIC, 'try to open candidate address %s', candidate_address[:-2])
    +                        self._open_socket(candidate_address, self.connection.server.ssl, unix_socket=self.connection.server.ipc)
    +                        self.connection.server.current_address = candidate_address
    +                        self.connection.server.update_availability(candidate_address, True)
    +                        break
    +                    except Exception as e:
    +                        self.connection.server.update_availability(candidate_address, False)
    +                        # exception_history.append((datetime.now(), exc_type, exc_value, candidate_address[4]))
    +                        exception_history.append((type(e)(str(e)), candidate_address[4]))
    +                if not self.connection.server.current_address and exception_history:
    +                    if len(exception_history) == 1:  # only one exception, reraise
    +                        if log_enabled(ERROR):
    +                            log(ERROR, '<%s> for <%s>', str(exception_history[0][0]) + ' ' + str((exception_history[0][1])), self.connection)
    +                        raise exception_history[0][0]
    +                    else:
    +                        if log_enabled(ERROR):
    +                            log(ERROR, 'unable to open socket for <%s>', self.connection)
    +                        raise LDAPSocketOpenError('unable to open socket', exception_history)
    +                    if log_enabled(ERROR):
    +                        log(ERROR, 'unable to open socket for <%s>', self.connection)
    +                    raise LDAPSocketOpenError('unable to open socket', exception_history)
    +                elif not self.connection.server.current_address:
    +                    if log_enabled(ERROR):
    +                        log(ERROR, 'invalid server address for <%s>', self.connection)
    +                    raise LDAPSocketOpenError('invalid server address')
    +
    +            self.connection._deferred_open = False
    +            self._start_listen()
    +            # self.connection.do_auto_bind()
    +            if log_enabled(NETWORK):
    +                log(NETWORK, 'connection open for <%s>', self.connection)
    +
    +    def close(self):
    +        """
    +        Close connection
    +        """
    +        if log_enabled(NETWORK):
    +            log(NETWORK, 'closing connection for <%s>', self.connection)
    +        if self.connection.lazy and not self.connection._executing_deferred and (self.connection._deferred_bind or self.connection._deferred_open):
    +            self.connection.listening = False
    +            self.connection.closed = True
    +            if log_enabled(NETWORK):
    +                log(NETWORK, 'deferred connection closed for <%s>', self.connection)
    +        else:
    +            if not self.connection.closed:
    +                self._stop_listen()
    +                if not self. no_real_dsa:
    +                    self._close_socket()
    +            if log_enabled(NETWORK):
    +                log(NETWORK, 'connection closed for <%s>', self.connection)
    +
    +        self.connection.bound = False
    +        self.connection.request = None
    +        self.connection.response = None
    +        self.connection.tls_started = False
    +        self._outstanding = None
    +        self._referrals = []
    +
    +        if not self.connection.strategy.no_real_dsa:
    +            self.connection.server.current_address = None
    +        if self.connection.usage:
    +            self.connection._usage.stop()
    +
    +    def _open_socket(self, address, use_ssl=False, unix_socket=False):
    +        """
    +        Tries to open and connect a socket to a Server
    +        raise LDAPExceptionError if unable to open or connect socket
    +        """
    +        try:
    +            self.connection.socket = socket.socket(*address[:3])
    +        except Exception as e:
    +            self.connection.last_error = 'socket creation error: ' + str(e)
    +            if log_enabled(ERROR):
    +                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +            # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
    +            raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
    +
    +        # Try to bind the socket locally before connecting to the remote address
    +        # We go through our connection's source ports and try to bind our socket to our connection's source address
    +        # with them.
    +        # If no source address or ports were specified, this will have the same success/fail result as if we
    +        # tried to connect to the remote server without binding locally first.
    +        # This is actually a little bit better, as it lets us distinguish the case of "issue binding the socket
    +        # locally" from "remote server is unavailable" with more clarity, though this will only really be an
    +        # issue when no source address/port is specified if the system checking server availability is running
    +        # as a very unprivileged user.
    +        last_bind_exc = None
    +        socket_bind_succeeded = False
    +        for source_port in self.connection.source_port_list:
    +            try:
    +                self.connection.socket.bind((self.connection.source_address, source_port))
    +                socket_bind_succeeded = True
    +                break
    +            except Exception as bind_ex:
    +                last_bind_exc = bind_ex
    +                # we'll always end up logging at error level if we cannot bind any ports to the address locally.
    +                # but if some work and some don't you probably don't want the ones that don't at ERROR level
    +                if log_enabled(NETWORK):
    +                    log(NETWORK, 'Unable to bind to local address <%s> with source port <%s> due to <%s>',
    +                        self.connection.source_address, source_port, bind_ex)
    +        if not socket_bind_succeeded:
    +            self.connection.last_error = 'socket connection error while locally binding: ' + str(last_bind_exc)
    +            if log_enabled(ERROR):
    +                log(ERROR, 'Unable to locally bind to local address <%s> with any of the source ports <%s> for connection <%s due to <%s>',
    +                    self.connection.source_address, self.connection.source_port_list, self.connection, last_bind_exc)
    +            raise communication_exception_factory(LDAPSocketOpenError, type(last_bind_exc)(str(last_bind_exc)))(last_bind_exc)
    +
    +        try:  # set socket timeout for opening connection
    +            if self.connection.server.connect_timeout:
    +                self.connection.socket.settimeout(self.connection.server.connect_timeout)
    +            self.connection.socket.connect(address[4])
    +        except socket.error as e:
    +            self.connection.last_error = 'socket connection error while opening: ' + str(e)
    +            if log_enabled(ERROR):
    +                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +            # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
    +            raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
    +
    +        # Set connection recv timeout (must be set after connect,
    +        # because socket.settimeout() affects both, connect() as
    +        # well as recv(). Set it before tls.wrap_socket() because
    +        # the recv timeout should take effect during the TLS
    +        # handshake.
    +        if self.connection.receive_timeout is not None:
    +            try:  # set receive timeout for the connection socket
    +                self.connection.socket.settimeout(self.connection.receive_timeout)
    +                if system().lower() == 'windows':
    +                    self.connection.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, int(1000 * self.connection.receive_timeout))
    +                else:
    +                    self.connection.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, pack('LL', self.connection.receive_timeout, 0))
    +            except socket.error as e:
    +                self.connection.last_error = 'unable to set receive timeout for socket connection: ' + str(e)
    +
    +        # if exc:
    +        #     if log_enabled(ERROR):
    +        #         log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +        #     raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
    +                if log_enabled(ERROR):
    +                    log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +                raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
    +
    +        if use_ssl:
    +            try:
    +                self.connection.server.tls.wrap_socket(self.connection, do_handshake=True)
    +                if self.connection.usage:
    +                    self.connection._usage.wrapped_sockets += 1
    +            except Exception as e:
    +                self.connection.last_error = 'socket ssl wrapping error: ' + str(e)
    +                if log_enabled(ERROR):
    +                    log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +                # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
    +                raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
    +        if self.connection.usage:
    +            self.connection._usage.open_sockets += 1
    +
    +        self.connection.closed = False
    +
    +    def _close_socket(self):
    +        """
    +        Try to close a socket
    +        don't raise exception if unable to close socket, assume socket is already closed
    +        """
    +
    +        try:
    +            self.connection.socket.shutdown(socket.SHUT_RDWR)
    +        except Exception:
    +            pass
    +
    +        try:
    +            self.connection.socket.close()
    +        except Exception:
    +            pass
    +
    +        self.connection.socket = None
    +        self.connection.closed = True
    +
    +        if self.connection.usage:
    +            self.connection._usage.closed_sockets += 1
    +
    +    def _stop_listen(self):
    +        self.connection.listening = False
    +
    +    def send(self, message_type, request, controls=None):
    +        """
    +        Send an LDAP message
    +        Returns the message_id
    +        """
    +        self.connection.request = None
    +        if self.connection.listening:
    +            if self.connection.sasl_in_progress and message_type not in ['bindRequest']:  # as per RFC4511 (4.2.1)
    +                self.connection.last_error = 'cannot send operation requests while SASL bind is in progress'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +                raise LDAPSASLBindInProgressError(self.connection.last_error)
    +            message_id = self.connection.server.next_message_id()
    +            ldap_message = LDAPMessage()
    +            ldap_message['messageID'] = MessageID(message_id)
    +            ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request)
    +            message_controls = build_controls_list(controls)
    +            if message_controls is not None:
    +                ldap_message['controls'] = message_controls
    +            self.connection.request = BaseStrategy.decode_request(message_type, request, controls)
    +            self._outstanding[message_id] = self.connection.request
    +            self.sending(ldap_message)
    +        else:
    +            self.connection.last_error = 'unable to send message, socket is not open'
    +            if log_enabled(ERROR):
    +                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +            raise LDAPSocketOpenError(self.connection.last_error)
    +
    +        return message_id
    +
    +    def get_response(self, message_id, timeout=None, get_request=False):
    +        """
    +        Get response LDAP messages
    +        Responses are returned by the underlying connection strategy
    +        Check if message_id LDAP message is still outstanding and wait for timeout to see if it appears in _get_response
    +        Result is stored in connection.result
    +        Responses without result is stored in connection.response
    +        A tuple (responses, result) is returned
    +        """
    +        if timeout is None:
    +            timeout = get_config_parameter('RESPONSE_WAITING_TIMEOUT')
    +        response = None
    +        result = None
    +        request = None
    +        if self._outstanding and message_id in self._outstanding:
    +            responses = self._get_response(message_id, timeout)
    +
    +            if not responses:
    +                if log_enabled(ERROR):
    +                    log(ERROR, 'socket timeout, no response from server for <%s>', self.connection)
    +                raise LDAPResponseTimeoutError('no response from server')
    +
    +            if responses == SESSION_TERMINATED_BY_SERVER:
    +                try:  # try to close the session but don't raise any error if server has already closed the session
    +                    self.close()
    +                except (socket.error, LDAPExceptionError):
    +                    pass
    +                self.connection.last_error = 'session terminated by server'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +                raise LDAPSessionTerminatedByServerError(self.connection.last_error)
    +            elif responses == TRANSACTION_ERROR:  # Novell LDAP Transaction unsolicited notification
    +                self.connection.last_error = 'transaction error'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +                raise LDAPTransactionError(self.connection.last_error)
    +
    +            # if referral in response opens a new connection to resolve referrals if requested
    +
    +            if responses[-2]['result'] == RESULT_REFERRAL:
    +                if self.connection.usage:
    +                    self.connection._usage.referrals_received += 1
    +                if self.connection.auto_referrals:
    +                    ref_response, ref_result = self.do_operation_on_referral(self._outstanding[message_id], responses[-2]['referrals'])
    +                    if ref_response is not None:
    +                        responses = ref_response + [ref_result]
    +                        responses.append(RESPONSE_COMPLETE)
    +                    elif ref_result is not None:
    +                        responses = [ref_result, RESPONSE_COMPLETE]
    +
    +                    self._referrals = []
    +
    +            if responses:
    +                result = responses[-2]
    +                response = responses[:-2]
    +                self.connection.result = None
    +                self.connection.response = None
    +
    +            if self.connection.raise_exceptions and result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS:
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'operation result <%s> for <%s>', result, self.connection)
    +                self._outstanding.pop(message_id)
    +                self.connection.result = result.copy()
    +                raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type'])
    +
    +            # checks if any response has a range tag
    +            # self._auto_range_searching is set as a flag to avoid recursive searches
    +            if self.connection.auto_range and not hasattr(self, '_auto_range_searching') and any((True for resp in response if 'raw_attributes' in resp for name in resp['raw_attributes'] if ';range=' in name)):
    +                self._auto_range_searching = result.copy()
    +                temp_response = response[:]  # copy
    +                if self.do_search_on_auto_range(self._outstanding[message_id], response):
    +                    for resp in temp_response:
    +                        if resp['type'] == 'searchResEntry':
    +                            keys = [key for key in resp['raw_attributes'] if ';range=' in key]
    +                            for key in keys:
    +                                del resp['raw_attributes'][key]
    +                                del resp['attributes'][key]
    +                    response = temp_response
    +                    result = self._auto_range_searching
    +                del self._auto_range_searching
    +
    +            if self.connection.empty_attributes:
    +                for entry in response:
    +                    if entry['type'] == 'searchResEntry':
    +                        for attribute_type in self._outstanding[message_id]['attributes']:
    +                            if attribute_type not in entry['raw_attributes'] and attribute_type not in (ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES):
    +                                entry['raw_attributes'][attribute_type] = list()
    +                                entry['attributes'][attribute_type] = list()
    +                                if log_enabled(PROTOCOL):
    +                                    log(PROTOCOL, 'attribute set to empty list for missing attribute <%s> in <%s>', attribute_type, self)
    +                        if not self.connection.auto_range:
    +                            attrs_to_remove = []
    +                            # removes original empty attribute in case a range tag is returned
    +                            for attribute_type in entry['attributes']:
    +                                if ';range' in attribute_type.lower():
    +                                    orig_attr, _, _ = attribute_type.partition(';')
    +                                    attrs_to_remove.append(orig_attr)
    +                            for attribute_type in attrs_to_remove:
    +                                if log_enabled(PROTOCOL):
    +                                    log(PROTOCOL, 'attribute type <%s> removed in response because of same attribute returned as range by the server in <%s>', attribute_type, self)
    +                                del entry['raw_attributes'][attribute_type]
    +                                del entry['attributes'][attribute_type]
    +
    +            request = self._outstanding.pop(message_id)
    +        else:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'message id not in outstanding queue for <%s>', self.connection)
    +            raise(LDAPResponseTimeoutError('message id not in outstanding queue'))
    +
    +        if get_request:
    +            return response, result, request
    +        else:
    +            return response, result
    +
    +    @staticmethod
    +    def compute_ldap_message_size(data):
    +        """
    +        Compute LDAP Message size according to BER definite length rules
    +        Returns -1 if too few data to compute message length
    +        """
    +        if isinstance(data, str):  # fix for Python 2, data is string not bytes
    +            data = bytearray(data)  # Python 2 bytearray is equivalent to Python 3 bytes
    +
    +        ret_value = -1
    +        if len(data) > 2:
    +            if data[1] <= 127:  # BER definite length - short form. Highest bit of byte 1 is 0, message length is in the last 7 bits - Value can be up to 127 bytes long
    +                ret_value = data[1] + 2
    +            else:  # BER definite length - long form. Highest bit of byte 1 is 1, last 7 bits counts the number of following octets containing the value length
    +                bytes_length = data[1] - 128
    +                if len(data) >= bytes_length + 2:
    +                    value_length = 0
    +                    cont = bytes_length
    +                    for byte in data[2:2 + bytes_length]:
    +                        cont -= 1
    +                        value_length += byte * (256 ** cont)
    +                    ret_value = value_length + 2 + bytes_length
    +
    +        return ret_value
    +
    +    def decode_response(self, ldap_message):
    +        """
    +        Convert received LDAPMessage to a dict
    +        """
    +        message_type = ldap_message.getComponentByName('protocolOp').getName()
    +        component = ldap_message['protocolOp'].getComponent()
    +        controls = ldap_message['controls'] if ldap_message['controls'].hasValue() else None
    +        if message_type == 'bindResponse':
    +            if not bytes(component['matchedDN']).startswith(b'NTLM'):  # patch for microsoft ntlm authentication
    +                result = bind_response_to_dict(component)
    +            else:
    +                result = sicily_bind_response_to_dict(component)
    +        elif message_type == 'searchResEntry':
    +            result = search_result_entry_response_to_dict(component, self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names)
    +        elif message_type == 'searchResDone':
    +            result = search_result_done_response_to_dict(component)
    +        elif message_type == 'searchResRef':
    +            result = search_result_reference_response_to_dict(component)
    +        elif message_type == 'modifyResponse':
    +            result = modify_response_to_dict(component)
    +        elif message_type == 'addResponse':
    +            result = add_response_to_dict(component)
    +        elif message_type == 'delResponse':
    +            result = delete_response_to_dict(component)
    +        elif message_type == 'modDNResponse':
    +            result = modify_dn_response_to_dict(component)
    +        elif message_type == 'compareResponse':
    +            result = compare_response_to_dict(component)
    +        elif message_type == 'extendedResp':
    +            result = extended_response_to_dict(component)
    +        elif message_type == 'intermediateResponse':
    +            result = intermediate_response_to_dict(component)
    +        else:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'unknown response <%s> for <%s>', message_type, self.connection)
    +            raise LDAPUnknownResponseError('unknown response')
    +        result['type'] = message_type
    +        if controls:
    +            result['controls'] = dict()
    +            for control in controls:
    +                decoded_control = self.decode_control(control)
    +                result['controls'][decoded_control[0]] = decoded_control[1]
    +        return result
    +
    +    def decode_response_fast(self, ldap_message):
    +        """
    +        Convert received LDAPMessage from fast ber decoder to a dict
    +        """
    +        if ldap_message['protocolOp'] == 1:  # bindResponse
    +            if not ldap_message['payload'][1][3].startswith(b'NTLM'):  # patch for microsoft ntlm authentication
    +                result = bind_response_to_dict_fast(ldap_message['payload'])
    +            else:
    +                result = sicily_bind_response_to_dict_fast(ldap_message['payload'])
    +            result['type'] = 'bindResponse'
    +        elif ldap_message['protocolOp'] == 4:  # searchResEntry'
    +            result = search_result_entry_response_to_dict_fast(ldap_message['payload'], self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names)
    +            result['type'] = 'searchResEntry'
    +        elif ldap_message['protocolOp'] == 5:  # searchResDone
    +            result = ldap_result_to_dict_fast(ldap_message['payload'])
    +            result['type'] = 'searchResDone'
    +        elif ldap_message['protocolOp'] == 19:  # searchResRef
    +            result = search_result_reference_response_to_dict_fast(ldap_message['payload'])
    +            result['type'] = 'searchResRef'
    +        elif ldap_message['protocolOp'] == 7:  # modifyResponse
    +            result = ldap_result_to_dict_fast(ldap_message['payload'])
    +            result['type'] = 'modifyResponse'
    +        elif ldap_message['protocolOp'] == 9:  # addResponse
    +            result = ldap_result_to_dict_fast(ldap_message['payload'])
    +            result['type'] = 'addResponse'
    +        elif ldap_message['protocolOp'] == 11:  # delResponse
    +            result = ldap_result_to_dict_fast(ldap_message['payload'])
    +            result['type'] = 'delResponse'
    +        elif ldap_message['protocolOp'] == 13:  # modDNResponse
    +            result = ldap_result_to_dict_fast(ldap_message['payload'])
    +            result['type'] = 'modDNResponse'
    +        elif ldap_message['protocolOp'] == 15:  # compareResponse
    +            result = ldap_result_to_dict_fast(ldap_message['payload'])
    +            result['type'] = 'compareResponse'
    +        elif ldap_message['protocolOp'] == 24:  # extendedResp
    +            result = extended_response_to_dict_fast(ldap_message['payload'])
    +            result['type'] = 'extendedResp'
    +        elif ldap_message['protocolOp'] == 25:  # intermediateResponse
    +            result = intermediate_response_to_dict_fast(ldap_message['payload'])
    +            result['type'] = 'intermediateResponse'
    +        else:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'unknown response <%s> for <%s>', ldap_message['protocolOp'], self.connection)
    +            raise LDAPUnknownResponseError('unknown response')
    +        if ldap_message['controls']:
    +            result['controls'] = dict()
    +            for control in ldap_message['controls']:
    +                decoded_control = self.decode_control_fast(control[3])
    +                result['controls'][decoded_control[0]] = decoded_control[1]
    +        return result
    +
    +    @staticmethod
    +    def decode_control(control):
    +        """
    +        decode control, return a 2-element tuple where the first element is the control oid
    +        and the second element is a dictionary with description (from Oids), criticality and decoded control value
    +        """
    +        control_type = str(control['controlType'])
    +        criticality = bool(control['criticality'])
    +        control_value = bytes(control['controlValue'])
    +        unprocessed = None
    +        if control_type == '1.2.840.113556.1.4.319':  # simple paged search as per RFC2696
    +            control_resp, unprocessed = decoder.decode(control_value, asn1Spec=RealSearchControlValue())
    +            control_value = dict()
    +            control_value['size'] = int(control_resp['size'])
    +            control_value['cookie'] = bytes(control_resp['cookie'])
    +        elif control_type == '1.2.840.113556.1.4.841':  # DirSync AD
    +            control_resp, unprocessed = decoder.decode(control_value, asn1Spec=DirSyncControlResponseValue())
    +            control_value = dict()
    +            control_value['more_results'] = bool(control_resp['MoreResults'])  # more_result if nonzero
    +            control_value['cookie'] = bytes(control_resp['CookieServer'])
    +        elif control_type == '1.3.6.1.1.13.1' or control_type == '1.3.6.1.1.13.2':  # Pre-Read control, Post-Read Control as per RFC 4527
    +            control_resp, unprocessed = decoder.decode(control_value, asn1Spec=SearchResultEntry())
    +            control_value = dict()
    +            control_value['result'] = attributes_to_dict(control_resp['attributes'])
    +        if unprocessed:
    +                if log_enabled(ERROR):
    +                    log(ERROR, 'unprocessed control response in substrate')
    +                raise LDAPControlError('unprocessed control response in substrate')
    +        return control_type, {'description': Oids.get(control_type, ''), 'criticality': criticality, 'value': control_value}
    +
    +    @staticmethod
    +    def decode_control_fast(control, from_server=True):
    +        """
    +        decode control, return a 2-element tuple where the first element is the control oid
    +        and the second element is a dictionary with description (from Oids), criticality and decoded control value
    +        """
    +        control_type = str(to_unicode(control[0][3], from_server=from_server))
    +        criticality = False
    +        control_value = None
    +        for r in control[1:]:
    +            if r[2] == 4:  # controlValue
    +                control_value = r[3]
    +            else:
    +                criticality = False if r[3] == 0 else True  # criticality (booleand default to False)
    +        if control_type == '1.2.840.113556.1.4.319':  # simple paged search as per RFC2696
    +            control_resp = decode_sequence(control_value, 0, len(control_value))
    +            control_value = dict()
    +            control_value['size'] = int(control_resp[0][3][0][3])
    +            control_value['cookie'] = bytes(control_resp[0][3][1][3])
    +        elif control_type == '1.2.840.113556.1.4.841':  # DirSync AD
    +            control_resp = decode_sequence(control_value, 0, len(control_value))
    +            control_value = dict()
    +            control_value['more_results'] = True if control_resp[0][3][0][3] else False  # more_result if nonzero
    +            control_value['cookie'] = control_resp[0][3][2][3]
    +        elif control_type == '1.3.6.1.1.13.1' or control_type == '1.3.6.1.1.13.2':  # Pre-Read control, Post-Read Control as per RFC 4527
    +            control_resp = decode_sequence(control_value, 0, len(control_value))
    +            control_value = dict()
    +            control_value['result'] = attributes_to_dict_fast(control_resp[0][3][1][3])
    +        return control_type, {'description': Oids.get(control_type, ''), 'criticality': criticality, 'value': control_value}
    +
    +    @staticmethod
    +    def decode_request(message_type, component, controls=None):
    +        # message_type = ldap_message.getComponentByName('protocolOp').getName()
    +        # component = ldap_message['protocolOp'].getComponent()
    +        if message_type == 'bindRequest':
    +            result = bind_request_to_dict(component)
    +        elif message_type == 'unbindRequest':
    +            result = dict()
    +        elif message_type == 'addRequest':
    +            result = add_request_to_dict(component)
    +        elif message_type == 'compareRequest':
    +            result = compare_request_to_dict(component)
    +        elif message_type == 'delRequest':
    +            result = delete_request_to_dict(component)
    +        elif message_type == 'extendedReq':
    +            result = extended_request_to_dict(component)
    +        elif message_type == 'modifyRequest':
    +            result = modify_request_to_dict(component)
    +        elif message_type == 'modDNRequest':
    +            result = modify_dn_request_to_dict(component)
    +        elif message_type == 'searchRequest':
    +            result = search_request_to_dict(component)
    +        elif message_type == 'abandonRequest':
    +            result = abandon_request_to_dict(component)
    +        else:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'unknown request <%s>', message_type)
    +            raise LDAPUnknownRequestError('unknown request')
    +        result['type'] = message_type
    +        result['controls'] = controls
    +
    +        return result
    +
    +    def valid_referral_list(self, referrals):
    +        referral_list = []
    +        for referral in referrals:
    +            candidate_referral = parse_uri(referral)
    +            if candidate_referral:
    +                for ref_host in self.connection.server.allowed_referral_hosts:
    +                    if ref_host[0] == candidate_referral['host'] or ref_host[0] == '*':
    +                        if candidate_referral['host'] not in self._referrals:
    +                            candidate_referral['anonymousBindOnly'] = not ref_host[1]
    +                            referral_list.append(candidate_referral)
    +                            break
    +
    +        return referral_list
    +
    +    def do_next_range_search(self, request, response, attr_name):
    +        done = False
    +        current_response = response
    +        while not done:
    +            attr_type, _, returned_range = attr_name.partition(';range=')
    +            _, _, high_range = returned_range.partition('-')
    +            response['raw_attributes'][attr_type] += current_response['raw_attributes'][attr_name]
    +            response['attributes'][attr_type] += current_response['attributes'][attr_name]
    +            if high_range != '*':
    +                if log_enabled(PROTOCOL):
    +                    log(PROTOCOL, 'performing next search on auto-range <%s> via <%s>', str(int(high_range) + 1), self.connection)
    +                requested_range = attr_type + ';range=' + str(int(high_range) + 1) + '-*'
    +                result = self.connection.search(search_base=response['dn'],
    +                                                search_filter='(objectclass=*)',
    +                                                search_scope=BASE,
    +                                                dereference_aliases=request['dereferenceAlias'],
    +                                                attributes=[attr_type + ';range=' + str(int(high_range) + 1) + '-*'])
    +                if isinstance(result, bool):
    +                    if result:
    +                        current_response = self.connection.response[0]
    +                    else:
    +                        done = True
    +                else:
    +                    current_response, _ = self.get_response(result)
    +                    current_response = current_response[0]
    +
    +                if not done:
    +                    if requested_range in current_response['raw_attributes'] and len(current_response['raw_attributes'][requested_range]) == 0:
    +                        del current_response['raw_attributes'][requested_range]
    +                        del current_response['attributes'][requested_range]
    +                    attr_name = list(filter(lambda a: ';range=' in a, current_response['raw_attributes'].keys()))[0]
    +                    continue
    +
    +            done = True
    +
    +    def do_search_on_auto_range(self, request, response):
    +        for resp in [r for r in response if r['type'] == 'searchResEntry']:
    +            for attr_name in list(resp['raw_attributes'].keys()):  # generate list to avoid changing of dict size error
    +                if ';range=' in attr_name:
    +                    attr_type, _, range_values = attr_name.partition(';range=')
    +                    if range_values in ('1-1', '0-0'):  # DirSync returns these values for adding and removing members
    +                        return False
    +                    if attr_type not in resp['raw_attributes'] or resp['raw_attributes'][attr_type] is None:
    +                        resp['raw_attributes'][attr_type] = list()
    +                    if attr_type not in resp['attributes'] or resp['attributes'][attr_type] is None:
    +                        resp['attributes'][attr_type] = list()
    +                    self.do_next_range_search(request, resp, attr_name)
    +        return True
    +
    +    def create_referral_connection(self, referrals):
    +        referral_connection = None
    +        selected_referral = None
    +        cachekey = None
    +        valid_referral_list = self.valid_referral_list(referrals)
    +        if valid_referral_list:
    +            preferred_referral_list = [referral for referral in valid_referral_list if
    +                                       referral['ssl'] == self.connection.server.ssl]
    +            selected_referral = choice(preferred_referral_list) if preferred_referral_list else choice(
    +                valid_referral_list)
    +
    +            cachekey = (selected_referral['host'], selected_referral['port'] or self.connection.server.port, selected_referral['ssl'])
    +            if self.connection.use_referral_cache and cachekey in self.referral_cache:
    +                referral_connection = self.referral_cache[cachekey]
    +            else:
    +                referral_server = Server(host=selected_referral['host'],
    +                                         port=selected_referral['port'] or self.connection.server.port,
    +                                         use_ssl=selected_referral['ssl'],
    +                                         get_info=self.connection.server.get_info,
    +                                         formatter=self.connection.server.custom_formatter,
    +                                         connect_timeout=self.connection.server.connect_timeout,
    +                                         mode=self.connection.server.mode,
    +                                         allowed_referral_hosts=self.connection.server.allowed_referral_hosts,
    +                                         tls=Tls(local_private_key_file=self.connection.server.tls.private_key_file,
    +                                                 local_certificate_file=self.connection.server.tls.certificate_file,
    +                                                 validate=self.connection.server.tls.validate,
    +                                                 version=self.connection.server.tls.version,
    +                                                 ca_certs_file=self.connection.server.tls.ca_certs_file) if
    +                                         selected_referral['ssl'] else None)
    +
    +                from ..core.connection import Connection
    +
    +                referral_connection = Connection(server=referral_server,
    +                                                 user=self.connection.user if not selected_referral['anonymousBindOnly'] else None,
    +                                                 password=self.connection.password if not selected_referral['anonymousBindOnly'] else None,
    +                                                 version=self.connection.version,
    +                                                 authentication=self.connection.authentication if not selected_referral['anonymousBindOnly'] else ANONYMOUS,
    +                                                 client_strategy=SYNC,
    +                                                 auto_referrals=True,
    +                                                 read_only=self.connection.read_only,
    +                                                 check_names=self.connection.check_names,
    +                                                 raise_exceptions=self.connection.raise_exceptions,
    +                                                 fast_decoder=self.connection.fast_decoder,
    +                                                 receive_timeout=self.connection.receive_timeout,
    +                                                 sasl_mechanism=self.connection.sasl_mechanism,
    +                                                 sasl_credentials=self.connection.sasl_credentials)
    +
    +                if self.connection.usage:
    +                    self.connection._usage.referrals_connections += 1
    +
    +                referral_connection.open()
    +                referral_connection.strategy._referrals = self._referrals
    +                if self.connection.tls_started and not referral_server.ssl:  # if the original server was in start_tls mode and the referral server is not in ssl then start_tls on the referral connection
    +                    referral_connection.start_tls()
    +
    +                if self.connection.bound:
    +                    referral_connection.bind()
    +
    +            if self.connection.usage:
    +                self.connection._usage.referrals_followed += 1
    +
    +        return selected_referral, referral_connection, cachekey
    +
    +    def do_operation_on_referral(self, request, referrals):
    +        if log_enabled(PROTOCOL):
    +            log(PROTOCOL, 'following referral for <%s>', self.connection)
    +        selected_referral, referral_connection, cachekey = self.create_referral_connection(referrals)
    +        if selected_referral:
    +            if request['type'] == 'searchRequest':
    +                referral_connection.search(selected_referral['base'] or request['base'],
    +                                           selected_referral['filter'] or request['filter'],
    +                                           selected_referral['scope'] or request['scope'],
    +                                           request['dereferenceAlias'],
    +                                           selected_referral['attributes'] or request['attributes'],
    +                                           request['sizeLimit'],
    +                                           request['timeLimit'],
    +                                           request['typesOnly'],
    +                                           controls=request['controls'])
    +            elif request['type'] == 'addRequest':
    +                referral_connection.add(selected_referral['base'] or request['entry'],
    +                                        None,
    +                                        request['attributes'],
    +                                        controls=request['controls'])
    +            elif request['type'] == 'compareRequest':
    +                referral_connection.compare(selected_referral['base'] or request['entry'],
    +                                            request['attribute'],
    +                                            request['value'],
    +                                            controls=request['controls'])
    +            elif request['type'] == 'delRequest':
    +                referral_connection.delete(selected_referral['base'] or request['entry'],
    +                                           controls=request['controls'])
    +            elif request['type'] == 'extendedReq':
    +                referral_connection.extended(request['name'],
    +                                             request['value'],
    +                                             controls=request['controls'],
    +                                             no_encode=True
    +                                             )
    +            elif request['type'] == 'modifyRequest':
    +                referral_connection.modify(selected_referral['base'] or request['entry'],
    +                                           prepare_changes_for_request(request['changes']),
    +                                           controls=request['controls'])
    +            elif request['type'] == 'modDNRequest':
    +                referral_connection.modify_dn(selected_referral['base'] or request['entry'],
    +                                              request['newRdn'],
    +                                              request['deleteOldRdn'],
    +                                              request['newSuperior'],
    +                                              controls=request['controls'])
    +            else:
    +                self.connection.last_error = 'referral operation not permitted'
    +                if log_enabled(ERROR):
    +                    log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +                raise LDAPReferralError(self.connection.last_error)
    +
    +            response = referral_connection.response
    +            result = referral_connection.result
    +            if self.connection.use_referral_cache:
    +                self.referral_cache[cachekey] = referral_connection
    +            else:
    +                referral_connection.unbind()
    +        else:
    +            response = None
    +            result = None
    +
    +        return response, result
    +
    +    def sending(self, ldap_message):
    +        if log_enabled(NETWORK):
    +            log(NETWORK, 'sending 1 ldap message for <%s>', self.connection)
    +        try:
    +            encoded_message = encode(ldap_message)
    +            self.connection.socket.sendall(encoded_message)
    +            if log_enabled(EXTENDED):
    +                log(EXTENDED, 'ldap message sent via <%s>:%s', self.connection, format_ldap_message(ldap_message, '>>'))
    +            if log_enabled(NETWORK):
    +                log(NETWORK, 'sent %d bytes via <%s>', len(encoded_message), self.connection)
    +        except socket.error as e:
    +            self.connection.last_error = 'socket sending error' + str(e)
    +            encoded_message = None
    +            if log_enabled(ERROR):
    +                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +            # raise communication_exception_factory(LDAPSocketSendError, exc)(self.connection.last_error)
    +            raise communication_exception_factory(LDAPSocketSendError, type(e)(str(e)))(self.connection.last_error)
    +        if self.connection.usage:
    +            self.connection._usage.update_transmitted_message(self.connection.request, len(encoded_message))
    +
    +    def _start_listen(self):
    +        # overridden on strategy class
    +        raise NotImplementedError
    +
    +    def _get_response(self, message_id, timeout):
    +        # overridden in strategy class
    +        raise NotImplementedError
    +
    +    def receiving(self):
    +        # overridden in strategy class
    +        raise NotImplementedError
    +
    +    def post_send_single_response(self, message_id):
    +        # overridden in strategy class
    +        raise NotImplementedError
    +
    +    def post_send_search(self, message_id):
    +        # overridden in strategy class
    +        raise NotImplementedError
    +
    +    def get_stream(self):
    +        raise NotImplementedError
    +
    +    def set_stream(self, value):
    +        raise NotImplementedError
    +
    +    def unbind_referral_cache(self):
    +        while len(self.referral_cache) > 0:
    +            cachekey, referral_connection = self.referral_cache.popitem()
    +            referral_connection.unbind()
    diff --git a/server/www/packages/packages-windows/x86/ldap3/strategy/ldifProducer.py b/server/www/packages/packages-windows/x86/ldap3/strategy/ldifProducer.py
    index 119e172..392239e 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/strategy/ldifProducer.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/strategy/ldifProducer.py
    @@ -1,148 +1,150 @@
    -"""
    -"""
    -
    -# Created on 2013.07.15
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2013 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -
    -from io import StringIO
    -from os import linesep
    -import random
    -
    -from ..core.exceptions import LDAPLDIFError
    -from ..utils.conv import prepare_for_stream
    -from ..protocol.rfc4511 import LDAPMessage, MessageID, ProtocolOp, LDAP_MAX_INT
    -from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header
    -from ..protocol.convert import build_controls_list
    -from .base import BaseStrategy
    -
    -
    -class LdifProducerStrategy(BaseStrategy):
    -    """
    -    This strategy is used to create the LDIF stream for the Add, Delete, Modify, ModifyDn operations.
    -    You send the request and get the request in the ldif-change representation of the operation.
    -    NO OPERATION IS SENT TO THE LDAP SERVER!
    -    Connection.request will contain the result LDAP message in a dict form
    -    Connection.response will contain the ldif-change format of the requested operation if available
    -    You don't need a real server to connect to for this strategy
    -    """
    -
    -    def __init__(self, ldap_connection):
    -        BaseStrategy.__init__(self, ldap_connection)
    -        self.sync = True
    -        self.no_real_dsa = True
    -        self.pooled = False
    -        self.can_stream = True
    -        self.line_separator = linesep
    -        self.all_base64 = False
    -        self.stream = None
    -        self.order = dict()
    -        self._header_added = False
    -        random.seed()
    -
    -    def _open_socket(self, address, use_ssl=False, unix_socket=False):  # fake open socket
    -        self.connection.socket = NotImplemented  # placeholder for a dummy socket
    -        if self.connection.usage:
    -            self.connection._usage.open_sockets += 1
    -
    -        self.connection.closed = False
    -
    -    def _close_socket(self):
    -        if self.connection.usage:
    -            self.connection._usage.closed_sockets += 1
    -
    -        self.connection.socket = None
    -        self.connection.closed = True
    -
    -    def _start_listen(self):
    -        self.connection.listening = True
    -        self.connection.closed = False
    -        self._header_added = False
    -        if not self.stream or (isinstance(self.stream, StringIO) and self.stream.closed):
    -            self.set_stream(StringIO())
    -
    -    def _stop_listen(self):
    -        self.stream.close()
    -        self.connection.listening = False
    -        self.connection.closed = True
    -
    -    def receiving(self):
    -        return None
    -
    -    def send(self, message_type, request, controls=None):
    -        """
    -        Build the LDAPMessage without sending to server
    -        """
    -        message_id = random.randint(0, LDAP_MAX_INT)
    -        ldap_message = LDAPMessage()
    -        ldap_message['messageID'] = MessageID(message_id)
    -        ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request)
    -        message_controls = build_controls_list(controls)
    -        if message_controls is not None:
    -            ldap_message['controls'] = message_controls
    -
    -        self.connection.request = BaseStrategy.decode_request(message_type, request, controls)
    -        self.connection.request['controls'] = controls
    -        self._outstanding[message_id] = self.connection.request
    -        return message_id
    -
    -    def post_send_single_response(self, message_id):
    -        self.connection.response = None
    -        self.connection.result = None
    -        if self._outstanding and message_id in self._outstanding:
    -            request = self._outstanding.pop(message_id)
    -            ldif_lines = operation_to_ldif(self.connection.request['type'], request, self.all_base64, self.order.get(self.connection.request['type']))
    -            if self.stream and ldif_lines and not self.connection.closed:
    -                self.accumulate_stream(self.line_separator.join(ldif_lines))
    -            ldif_lines = add_ldif_header(ldif_lines)
    -            self.connection.response = self.line_separator.join(ldif_lines)
    -            return self.connection.response
    -
    -        return None
    -
    -    def post_send_search(self, message_id):
    -        raise LDAPLDIFError('LDIF-CONTENT cannot be produced for Search operations')
    -
    -    def _get_response(self, message_id):
    -        pass
    -
    -    def accumulate_stream(self, fragment):
    -        if not self._header_added and self.stream.tell() == 0:
    -            self._header_added = True
    -            header = add_ldif_header(['-'])[0]
    -            self.stream.write(prepare_for_stream(header + self.line_separator + self.line_separator))
    -        self.stream.write(prepare_for_stream(fragment + self.line_separator + self.line_separator))
    -
    -    def get_stream(self):
    -        return self.stream
    -
    -    def set_stream(self, value):
    -        error = False
    -        try:
    -            if not value.writable():
    -                error = True
    -        except (ValueError, AttributeError):
    -            error = True
    -
    -        if error:
    -            raise LDAPLDIFError('stream must be writable')
    -
    -        self.stream = value
    +"""
    +"""
    +
    +# Created on 2013.07.15
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2013 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +from io import StringIO
    +from os import linesep
    +import random
    +
    +from ..core.exceptions import LDAPLDIFError
    +from ..utils.conv import prepare_for_stream
    +from ..protocol.rfc4511 import LDAPMessage, MessageID, ProtocolOp, LDAP_MAX_INT
    +from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header
    +from ..protocol.convert import build_controls_list
    +from .base import BaseStrategy
    +
    +
    +class LdifProducerStrategy(BaseStrategy):
    +    """
    +    This strategy is used to create the LDIF stream for the Add, Delete, Modify, ModifyDn operations.
    +    You send the request and get the request in the ldif-change representation of the operation.
    +    NO OPERATION IS SENT TO THE LDAP SERVER!
    +    Connection.request will contain the result LDAP message in a dict form
    +    Connection.response will contain the ldif-change format of the requested operation if available
    +    You don't need a real server to connect to for this strategy
    +    """
    +
    +    def __init__(self, ldap_connection):
    +        BaseStrategy.__init__(self, ldap_connection)
    +        self.sync = True
    +        self.no_real_dsa = True
    +        self.pooled = False
    +        self.can_stream = True
    +        self.line_separator = linesep
    +        self.all_base64 = False
    +        self.stream = None
    +        self.order = dict()
    +        self._header_added = False
    +        random.seed()
    +
    +    def _open_socket(self, address, use_ssl=False, unix_socket=False):  # fake open socket
    +        self.connection.socket = NotImplemented  # placeholder for a dummy socket
    +        if self.connection.usage:
    +            self.connection._usage.open_sockets += 1
    +
    +        self.connection.closed = False
    +
    +    def _close_socket(self):
    +        if self.connection.usage:
    +            self.connection._usage.closed_sockets += 1
    +
    +        self.connection.socket = None
    +        self.connection.closed = True
    +
    +    def _start_listen(self):
    +        self.connection.listening = True
    +        self.connection.closed = False
    +        self._header_added = False
    +        if not self.stream or (isinstance(self.stream, StringIO) and self.stream.closed):
    +            self.set_stream(StringIO())
    +
    +    def _stop_listen(self):
    +        self.stream.close()
    +        self.connection.listening = False
    +        self.connection.closed = True
    +
    +    def receiving(self):
    +        return None
    +
    +    def send(self, message_type, request, controls=None):
    +        """
    +        Build the LDAPMessage without sending to server
    +        """
    +        message_id = random.randint(0, LDAP_MAX_INT)
    +        ldap_message = LDAPMessage()
    +        ldap_message['messageID'] = MessageID(message_id)
    +        ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request)
    +        message_controls = build_controls_list(controls)
    +        if message_controls is not None:
    +            ldap_message['controls'] = message_controls
    +
    +        self.connection.request = BaseStrategy.decode_request(message_type, request, controls)
    +        self.connection.request['controls'] = controls
    +        if self._outstanding is None:
    +            self._outstanding = dict()
    +        self._outstanding[message_id] = self.connection.request
    +        return message_id
    +
    +    def post_send_single_response(self, message_id):
    +        self.connection.response = None
    +        self.connection.result = None
    +        if self._outstanding and message_id in self._outstanding:
    +            request = self._outstanding.pop(message_id)
    +            ldif_lines = operation_to_ldif(self.connection.request['type'], request, self.all_base64, self.order.get(self.connection.request['type']))
    +            if self.stream and ldif_lines and not self.connection.closed:
    +                self.accumulate_stream(self.line_separator.join(ldif_lines))
    +            ldif_lines = add_ldif_header(ldif_lines)
    +            self.connection.response = self.line_separator.join(ldif_lines)
    +            return self.connection.response
    +
    +        return None
    +
    +    def post_send_search(self, message_id):
    +        raise LDAPLDIFError('LDIF-CONTENT cannot be produced for Search operations')
    +
    +    def _get_response(self, message_id, timeout):
    +        pass
    +
    +    def accumulate_stream(self, fragment):
    +        if not self._header_added and self.stream.tell() == 0:
    +            self._header_added = True
    +            header = add_ldif_header(['-'])[0]
    +            self.stream.write(prepare_for_stream(header + self.line_separator + self.line_separator))
    +        self.stream.write(prepare_for_stream(fragment + self.line_separator + self.line_separator))
    +
    +    def get_stream(self):
    +        return self.stream
    +
    +    def set_stream(self, value):
    +        error = False
    +        try:
    +            if not value.writable():
    +                error = True
    +        except (ValueError, AttributeError):
    +            error = True
    +
    +        if error:
    +            raise LDAPLDIFError('stream must be writable')
    +
    +        self.stream = value
    diff --git a/server/www/packages/packages-windows/x86/ldap3/strategy/mockAsync.py b/server/www/packages/packages-windows/x86/ldap3/strategy/mockAsync.py
    index 2891506..f9965dc 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/strategy/mockAsync.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/strategy/mockAsync.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2016 - 2018 Giovanni Cannata
    +# Copyright 2016 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/strategy/mockBase.py b/server/www/packages/packages-windows/x86/ldap3/strategy/mockBase.py
    index f07c7c2..7acf706 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/strategy/mockBase.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/strategy/mockBase.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2016 - 2018 Giovanni Cannata
    +# Copyright 2016 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -26,7 +26,6 @@
     import json
     import re
     
    -from threading import Lock
     from random import SystemRandom
     
     from pyasn1.type.univ import OctetString
    @@ -224,7 +223,7 @@ class MockBaseStrategy(object):
                             return False
                         if attribute.lower() == 'objectclass' and self.connection.server.schema:  # builds the objectClass hierarchy only if schema is present
                             class_set = set()
    -                        for object_class in attributes['objectClass']:
    +                        for object_class in attributes[attribute]:
                                 if self.connection.server.schema.object_classes and object_class not in self.connection.server.schema.object_classes:
                                     return False
                                 # walkups the class hierarchy and buils a set of all classes in it
    @@ -654,6 +653,7 @@ class MockBaseStrategy(object):
             if '+' in attributes:  # operational attributes requested
                 attributes.extend(self.operational_attributes)
                 attributes.remove('+')
    +
             attributes = [attr.lower() for attr in request['attributes']]
     
             filter_root = parse_filter(request['filter'], self.connection.server.schema, auto_escape=True, auto_encode=False, validator=self.connection.server.custom_validator, check_names=self.connection.check_names)
    @@ -687,7 +687,11 @@ class MockBaseStrategy(object):
                                            for attribute in self.connection.server.dit[match]
                                            if attribute.lower() in attributes or ALL_ATTRIBUTES in attributes]
                         })
    -
    +                    if '+' not in attributes:  # remove operational attributes
    +                        for op_attr in self.operational_attributes:
    +                            for i, attr in enumerate(responses[len(responses)-1]['attributes']):
    +                                if attr['type'] == op_attr:
    +                                    del responses[len(responses)-1]['attributes'][i]
                     result_code = 0
                     message = ''
     
    @@ -724,12 +728,12 @@ class MockBaseStrategy(object):
                         if extension[0] == '2.16.840.1.113719.1.27.100.31':  # getBindDNRequest [NOVELL]
                             result_code = 0
                             message = ''
    -                        response_name = '2.16.840.1.113719.1.27.100.32'  # getBindDNResponse [NOVELL]
    +                        response_name = OctetString('2.16.840.1.113719.1.27.100.32')  # getBindDNResponse [NOVELL]
                             response_value = OctetString(self.bound)
                         elif extension[0] == '1.3.6.1.4.1.4203.1.11.3':  # WhoAmI [RFC4532]
                             result_code = 0
                             message = ''
    -                        response_name = '1.3.6.1.4.1.4203.1.11.3'  # WhoAmI [RFC4532]
    +                        response_name = OctetString('1.3.6.1.4.1.4203.1.11.3')  # WhoAmI [RFC4532]
                             response_value = OctetString(self.bound)
                         break
     
    @@ -845,7 +849,6 @@ class MockBaseStrategy(object):
                 attr_name = node.assertion['attr']
                 attr_value = node.assertion['value']
                 for candidate in candidates:
    -                # if attr_name in self.connection.server.dit[candidate] and attr_value in self.connection.server.dit[candidate][attr_name]:
                     if attr_name in self.connection.server.dit[candidate] and self.equal(candidate, attr_name, attr_value):
                         node.matched.add(candidate)
                     else:
    diff --git a/server/www/packages/packages-windows/x86/ldap3/strategy/mockSync.py b/server/www/packages/packages-windows/x86/ldap3/strategy/mockSync.py
    index b155781..efd2c15 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/strategy/mockSync.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/strategy/mockSync.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/strategy/restartable.py b/server/www/packages/packages-windows/x86/ldap3/strategy/restartable.py
    index 77ef4cd..d739f41 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/strategy/restartable.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/strategy/restartable.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/strategy/reusable.py b/server/www/packages/packages-windows/x86/ldap3/strategy/reusable.py
    index d70c4d9..01bd9d3 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/strategy/reusable.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/strategy/reusable.py
    @@ -1,493 +1,495 @@
    -"""
    -"""
    -
    -# Created on 2014.03.23
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2014 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -
    -from datetime import datetime
    -from os import linesep
    -from threading import Thread, Lock
    -from time import sleep
    -
    -from .. import RESTARTABLE, get_config_parameter, AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_NO_TLS, AUTO_BIND_TLS_AFTER_BIND, AUTO_BIND_TLS_BEFORE_BIND
    -from .base import BaseStrategy
    -from ..core.usage import ConnectionUsage
    -from ..core.exceptions import LDAPConnectionPoolNameIsMandatoryError, LDAPConnectionPoolNotStartedError, LDAPOperationResult, LDAPExceptionError, LDAPResponseTimeoutError
    -from ..utils.log import log, log_enabled, ERROR, BASIC
    -from ..protocol.rfc4511 import LDAP_MAX_INT
    -
    -TERMINATE_REUSABLE = 'TERMINATE_REUSABLE_CONNECTION'
    -
    -BOGUS_BIND = -1
    -BOGUS_UNBIND = -2
    -BOGUS_EXTENDED = -3
    -BOGUS_ABANDON = -4
    -
    -try:
    -    from queue import Queue, Empty
    -except ImportError:  # Python 2
    -    # noinspection PyUnresolvedReferences
    -    from Queue import Queue, Empty
    -
    -
    -# noinspection PyProtectedMember
    -class ReusableStrategy(BaseStrategy):
    -    """
    -    A pool of reusable SyncWaitRestartable connections with lazy behaviour and limited lifetime.
    -    The connection using this strategy presents itself as a normal connection, but internally the strategy has a pool of
    -    connections that can be used as needed. Each connection lives in its own thread and has a busy/available status.
    -    The strategy performs the requested operation on the first available connection.
    -    The pool of connections is instantiated at strategy initialization.
    -    Strategy has two customizable properties, the total number of connections in the pool and the lifetime of each connection.
    -    When lifetime is expired the connection is closed and will be open again when needed.
    -    """
    -    pools = dict()
    -
    -    def receiving(self):
    -        raise NotImplementedError
    -
    -    def _start_listen(self):
    -        raise NotImplementedError
    -
    -    def _get_response(self, message_id):
    -        raise NotImplementedError
    -
    -    def get_stream(self):
    -        raise NotImplementedError
    -
    -    def set_stream(self, value):
    -        raise NotImplementedError
    -
    -    # noinspection PyProtectedMember
    -    class ConnectionPool(object):
    -        """
    -        Container for the Connection Threads
    -        """
    -        def __new__(cls, connection):
    -            if connection.pool_name in ReusableStrategy.pools:  # returns existing connection pool
    -                pool = ReusableStrategy.pools[connection.pool_name]
    -                if not pool.started:  # if pool is not started remove it from the pools singleton and create a new onw
    -                    del ReusableStrategy.pools[connection.pool_name]
    -                    return object.__new__(cls)
    -                if connection.pool_keepalive and pool.keepalive != connection.pool_keepalive:  # change lifetime
    -                    pool.keepalive = connection.pool_keepalive
    -                if connection.pool_lifetime and pool.lifetime != connection.pool_lifetime:  # change keepalive
    -                    pool.lifetime = connection.pool_lifetime
    -                if connection.pool_size and pool.pool_size != connection.pool_size:  # if pool size has changed terminate and recreate the connections
    -                    pool.terminate_pool()
    -                    pool.pool_size = connection.pool_size
    -                return pool
    -            else:
    -                return object.__new__(cls)
    -
    -        def __init__(self, connection):
    -            if not hasattr(self, 'workers'):
    -                self.name = connection.pool_name
    -                self.master_connection = connection
    -                self.workers = []
    -                self.pool_size = connection.pool_size or get_config_parameter('REUSABLE_THREADED_POOL_SIZE')
    -                self.lifetime = connection.pool_lifetime or get_config_parameter('REUSABLE_THREADED_LIFETIME')
    -                self.keepalive = connection.pool_keepalive
    -                self.request_queue = Queue()
    -                self.open_pool = False
    -                self.bind_pool = False
    -                self.tls_pool = False
    -                self._incoming = dict()
    -                self.counter = 0
    -                self.terminated_usage = ConnectionUsage() if connection._usage else None
    -                self.terminated = False
    -                self.pool_lock = Lock()
    -                ReusableStrategy.pools[self.name] = self
    -                self.started = False
    -                if log_enabled(BASIC):
    -                    log(BASIC, 'instantiated ConnectionPool: <%r>', self)
    -
    -        def __str__(self):
    -            s = 'POOL: ' + str(self.name) + ' - status: ' + ('started' if self.started else 'terminated')
    -            s += ' - responses in queue: ' + str(len(self._incoming))
    -            s += ' - pool size: ' + str(self.pool_size)
    -            s += ' - lifetime: ' + str(self.lifetime)
    -            s += ' - keepalive: ' + str(self.keepalive)
    -            s += ' - open: ' + str(self.open_pool)
    -            s += ' - bind: ' + str(self.bind_pool)
    -            s += ' - tls: ' + str(self.tls_pool) + linesep
    -            s += 'MASTER CONN: ' + str(self.master_connection) + linesep
    -            s += 'WORKERS:'
    -            if self.workers:
    -                for i, worker in enumerate(self.workers):
    -                    s += linesep + str(i).rjust(5) + ': ' + str(worker)
    -            else:
    -                s += linesep + '    no active workers in pool'
    -
    -            return s
    -
    -        def __repr__(self):
    -            return self.__str__()
    -
    -        def get_info_from_server(self):
    -            for worker in self.workers:
    -                with worker.worker_lock:
    -                    if not worker.connection.server.schema or not worker.connection.server.info:
    -                        worker.get_info_from_server = True
    -                    else:
    -                        worker.get_info_from_server = False
    -
    -        def rebind_pool(self):
    -            for worker in self.workers:
    -                with worker.worker_lock:
    -                    worker.connection.rebind(self.master_connection.user,
    -                                             self.master_connection.password,
    -                                             self.master_connection.authentication,
    -                                             self.master_connection.sasl_mechanism,
    -                                             self.master_connection.sasl_credentials)
    -
    -        def start_pool(self):
    -            if not self.started:
    -                self.create_pool()
    -                for worker in self.workers:
    -                    with worker.worker_lock:
    -                        worker.thread.start()
    -                self.started = True
    -                self.terminated = False
    -                if log_enabled(BASIC):
    -                    log(BASIC, 'worker started for pool <%s>', self)
    -                return True
    -            return False
    -
    -        def create_pool(self):
    -            if log_enabled(BASIC):
    -                log(BASIC, 'created pool <%s>', self)
    -            self.workers = [ReusableStrategy.PooledConnectionWorker(self.master_connection, self.request_queue) for _ in range(self.pool_size)]
    -
    -        def terminate_pool(self):
    -            if not self.terminated:
    -                if log_enabled(BASIC):
    -                    log(BASIC, 'terminating pool <%s>', self)
    -                self.started = False
    -                self.request_queue.join()  # waits for all queue pending operations
    -                for _ in range(len([worker for worker in self.workers if worker.thread.is_alive()])):  # put a TERMINATE signal on the queue for each active thread
    -                    self.request_queue.put((TERMINATE_REUSABLE, None, None, None))
    -                self.request_queue.join()  # waits for all queue terminate operations
    -                self.terminated = True
    -                if log_enabled(BASIC):
    -                    log(BASIC, 'pool terminated for <%s>', self)
    -
    -    class PooledConnectionThread(Thread):
    -        """
    -        The thread that holds the Reusable connection and receive operation request via the queue
    -        Result are sent back in the pool._incoming list when ready
    -        """
    -        def __init__(self, worker, master_connection):
    -            Thread.__init__(self)
    -            self.daemon = True
    -            self.worker = worker
    -            self.master_connection = master_connection
    -            if log_enabled(BASIC):
    -                log(BASIC, 'instantiated PooledConnectionThread: <%r>', self)
    -
    -        # noinspection PyProtectedMember
    -        def run(self):
    -            self.worker.running = True
    -            terminate = False
    -            pool = self.master_connection.strategy.pool
    -            while not terminate:
    -                try:
    -                    counter, message_type, request, controls = pool.request_queue.get(block=True, timeout=self.master_connection.strategy.pool.keepalive)
    -                except Empty:  # issue an Abandon(0) operation to keep the connection live - Abandon(0) is a harmless operation
    -                    if not self.worker.connection.closed:
    -                        self.worker.connection.abandon(0)
    -                    continue
    -
    -                with self.worker.worker_lock:
    -                    self.worker.busy = True
    -                    if counter == TERMINATE_REUSABLE:
    -                        terminate = True
    -                        if self.worker.connection.bound:
    -                            try:
    -                                self.worker.connection.unbind()
    -                                if log_enabled(BASIC):
    -                                    log(BASIC, 'thread terminated')
    -                            except LDAPExceptionError:
    -                                pass
    -                    else:
    -                        if (datetime.now() - self.worker.creation_time).seconds >= self.master_connection.strategy.pool.lifetime:  # destroy and create a new connection
    -                            try:
    -                                self.worker.connection.unbind()
    -                            except LDAPExceptionError:
    -                                pass
    -                            self.worker.new_connection()
    -                            if log_enabled(BASIC):
    -                                log(BASIC, 'thread respawn')
    -                        if message_type not in ['bindRequest', 'unbindRequest']:
    -                            try:
    -                                if pool.open_pool and self.worker.connection.closed:
    -                                    self.worker.connection.open(read_server_info=False)
    -                                    if pool.tls_pool and not self.worker.connection.tls_started:
    -                                        self.worker.connection.start_tls(read_server_info=False)
    -                                    if pool.bind_pool and not self.worker.connection.bound:
    -                                        self.worker.connection.bind(read_server_info=False)
    -                                elif pool.open_pool and not self.worker.connection.closed:  # connection already open, issues a start_tls
    -                                    if pool.tls_pool and not self.worker.connection.tls_started:
    -                                        self.worker.connection.start_tls(read_server_info=False)
    -                                if self.worker.get_info_from_server and counter:
    -                                    self.worker.connection._fire_deferred()
    -                                    self.worker.get_info_from_server = False
    -                                response = None
    -                                result = None
    -                                if message_type == 'searchRequest':
    -                                    response = self.worker.connection.post_send_search(self.worker.connection.send(message_type, request, controls))
    -                                else:
    -                                    response = self.worker.connection.post_send_single_response(self.worker.connection.send(message_type, request, controls))
    -                                result = self.worker.connection.result
    -                                with pool.pool_lock:
    -                                    pool._incoming[counter] = (response, result, BaseStrategy.decode_request(message_type, request, controls))
    -                            except LDAPOperationResult as e:  # raise_exceptions has raised an exception. It must be redirected to the original connection thread
    -                                with pool.pool_lock:
    -                                    pool._incoming[counter] = (type(e)(str(e)), None, None)
    -                            # except LDAPOperationResult as e:  # raise_exceptions has raised an exception. It must be redirected to the original connection thread
    -                            #     exc = e
    -                            # with pool.pool_lock:
    -                            #     if exc:
    -                            #         pool._incoming[counter] = (exc, None, None)
    -                            #     else:
    -                            #         pool._incoming[counter] = (response, result, BaseStrategy.decode_request(message_type, request, controls))
    -
    -                    self.worker.busy = False
    -                    pool.request_queue.task_done()
    -                    self.worker.task_counter += 1
    -            if log_enabled(BASIC):
    -                log(BASIC, 'thread terminated')
    -            if self.master_connection.usage:
    -                pool.terminated_usage += self.worker.connection.usage
    -            self.worker.running = False
    -
    -    class PooledConnectionWorker(object):
    -        """
    -        Container for the restartable connection. it includes a thread and a lock to execute the connection in the pool
    -        """
    -        def __init__(self, connection, request_queue):
    -            self.master_connection = connection
    -            self.request_queue = request_queue
    -            self.running = False
    -            self.busy = False
    -            self.get_info_from_server = False
    -            self.connection = None
    -            self.creation_time = None
    -            self.task_counter = 0
    -            self.new_connection()
    -            self.thread = ReusableStrategy.PooledConnectionThread(self, self.master_connection)
    -            self.worker_lock = Lock()
    -            if log_enabled(BASIC):
    -                log(BASIC, 'instantiated PooledConnectionWorker: <%s>', self)
    -
    -        def __str__(self):
    -            s = 'CONN: ' + str(self.connection) + linesep + '       THREAD: '
    -            s += 'running' if self.running else 'halted'
    -            s += ' - ' + ('busy' if self.busy else 'available')
    -            s += ' - ' + ('created at: ' + self.creation_time.isoformat())
    -            s += ' - time to live: ' + str(self.master_connection.strategy.pool.lifetime - (datetime.now() - self.creation_time).seconds)
    -            s += ' - requests served: ' + str(self.task_counter)
    -
    -            return s
    -
    -        def new_connection(self):
    -            from ..core.connection import Connection
    -            # noinspection PyProtectedMember
    -            self.creation_time = datetime.now()
    -            self.connection = Connection(server=self.master_connection.server_pool if self.master_connection.server_pool else self.master_connection.server,
    -                                         user=self.master_connection.user,
    -                                         password=self.master_connection.password,
    -                                         auto_bind=AUTO_BIND_NONE,  # do not perform auto_bind because it reads again the schema
    -                                         version=self.master_connection.version,
    -                                         authentication=self.master_connection.authentication,
    -                                         client_strategy=RESTARTABLE,
    -                                         auto_referrals=self.master_connection.auto_referrals,
    -                                         auto_range=self.master_connection.auto_range,
    -                                         sasl_mechanism=self.master_connection.sasl_mechanism,
    -                                         sasl_credentials=self.master_connection.sasl_credentials,
    -                                         check_names=self.master_connection.check_names,
    -                                         collect_usage=self.master_connection._usage,
    -                                         read_only=self.master_connection.read_only,
    -                                         raise_exceptions=self.master_connection.raise_exceptions,
    -                                         lazy=False,
    -                                         fast_decoder=self.master_connection.fast_decoder,
    -                                         receive_timeout=self.master_connection.receive_timeout,
    -                                         return_empty_attributes=self.master_connection.empty_attributes)
    -
    -            # simulates auto_bind, always with read_server_info=False
    -            if self.master_connection.auto_bind and self.master_connection.auto_bind not in [AUTO_BIND_NONE, AUTO_BIND_DEFAULT]:
    -                if log_enabled(BASIC):
    -                    log(BASIC, 'performing automatic bind for <%s>', self.connection)
    -                self.connection.open(read_server_info=False)
    -                if self.master_connection.auto_bind == AUTO_BIND_NO_TLS:
    -                    self.connection.bind(read_server_info=False)
    -                elif self.master_connection.auto_bind == AUTO_BIND_TLS_BEFORE_BIND:
    -                    self.connection.start_tls(read_server_info=False)
    -                    self.connection.bind(read_server_info=False)
    -                elif self.master_connection.auto_bind == AUTO_BIND_TLS_AFTER_BIND:
    -                    self.connection.bind(read_server_info=False)
    -                    self.connection.start_tls(read_server_info=False)
    -
    -            if self.master_connection.server_pool:
    -                self.connection.server_pool = self.master_connection.server_pool
    -                self.connection.server_pool.initialize(self.connection)
    -
    -    # ReusableStrategy methods
    -    def __init__(self, ldap_connection):
    -        BaseStrategy.__init__(self, ldap_connection)
    -        self.sync = False
    -        self.no_real_dsa = False
    -        self.pooled = True
    -        self.can_stream = False
    -        if hasattr(ldap_connection, 'pool_name') and ldap_connection.pool_name:
    -            self.pool = ReusableStrategy.ConnectionPool(ldap_connection)
    -        else:
    -            if log_enabled(ERROR):
    -                log(ERROR, 'reusable connection must have a pool_name')
    -            raise LDAPConnectionPoolNameIsMandatoryError('reusable connection must have a pool_name')
    -
    -    def open(self, reset_usage=True, read_server_info=True):
    -        # read_server_info not used
    -        self.pool.open_pool = True
    -        self.pool.start_pool()
    -        self.connection.closed = False
    -        if self.connection.usage:
    -            if reset_usage or not self.connection._usage.initial_connection_start_time:
    -                self.connection._usage.start()
    -
    -    def terminate(self):
    -        self.pool.terminate_pool()
    -        self.pool.open_pool = False
    -        self.connection.bound = False
    -        self.connection.closed = True
    -        self.pool.bind_pool = False
    -        self.pool.tls_pool = False
    -
    -    def _close_socket(self):
    -        """
    -        Doesn't really close the socket
    -        """
    -        self.connection.closed = True
    -
    -        if self.connection.usage:
    -            self.connection._usage.closed_sockets += 1
    -
    -    def send(self, message_type, request, controls=None):
    -        if self.pool.started:
    -            if message_type == 'bindRequest':
    -                self.pool.bind_pool = True
    -                counter = BOGUS_BIND
    -            elif message_type == 'unbindRequest':
    -                self.pool.bind_pool = False
    -                counter = BOGUS_UNBIND
    -            elif message_type == 'abandonRequest':
    -                counter = BOGUS_ABANDON
    -            elif message_type == 'extendedReq' and self.connection.starting_tls:
    -                self.pool.tls_pool = True
    -                counter = BOGUS_EXTENDED
    -            else:
    -                with self.pool.pool_lock:
    -                    self.pool.counter += 1
    -                    if self.pool.counter > LDAP_MAX_INT:
    -                        self.pool.counter = 1
    -                    counter = self.pool.counter
    -                self.pool.request_queue.put((counter, message_type, request, controls))
    -            return counter
    -        if log_enabled(ERROR):
    -            log(ERROR, 'reusable connection pool not started')
    -        raise LDAPConnectionPoolNotStartedError('reusable connection pool not started')
    -
    -    def validate_bind(self, controls):
    -        # in case of a new connection or different credentials
    -        if (self.connection.user != self.pool.master_connection.user or
    -                self.connection.password != self.pool.master_connection.password or
    -                self.connection.authentication != self.pool.master_connection.authentication or
    -                self.connection.sasl_mechanism != self.pool.master_connection.sasl_mechanism or
    -                self.connection.sasl_credentials != self.pool.master_connection.sasl_credentials):
    -            self.pool.master_connection.user = self.connection.user
    -            self.pool.master_connection.password = self.connection.password
    -            self.pool.master_connection.authentication = self.connection.authentication
    -            self.pool.master_connection.sasl_mechanism = self.connection.sasl_mechanism
    -            self.pool.master_connection.sasl_credentials = self.connection.sasl_credentials
    -            self.pool.rebind_pool()
    -        temp_connection = self.pool.workers[0].connection
    -        temp_connection.lazy = False
    -        if not self.connection.server.schema or not self.connection.server.info:
    -            result = self.pool.workers[0].connection.bind(controls=controls)
    -        else:
    -            result = self.pool.workers[0].connection.bind(controls=controls, read_server_info=False)
    -
    -        temp_connection.unbind()
    -        temp_connection.lazy = True
    -        if result:
    -            self.pool.bind_pool = True  # bind pool if bind is validated
    -        return result
    -
    -    def get_response(self, counter, timeout=None, get_request=False):
    -        sleeptime = get_config_parameter('RESPONSE_SLEEPTIME')
    -        request=None
    -        if timeout is None:
    -            timeout = get_config_parameter('RESPONSE_WAITING_TIMEOUT')
    -        if counter == BOGUS_BIND:  # send a bogus bindResponse
    -            response = list()
    -            result = {'description': 'success', 'referrals': None, 'type': 'bindResponse', 'result': 0, 'dn': '', 'message': '', 'saslCreds': None}
    -        elif counter == BOGUS_UNBIND:  # bogus unbind response
    -            response = None
    -            result = None
    -        elif counter == BOGUS_ABANDON:  # abandon cannot be executed because of multiple connections
    -            response = list()
    -            result = {'result': 0, 'referrals': None, 'responseName': '1.3.6.1.4.1.1466.20037', 'type': 'extendedResp', 'description': 'success', 'responseValue': 'None', 'dn': '', 'message': ''}
    -        elif counter == BOGUS_EXTENDED:  # bogus startTls extended response
    -            response = list()
    -            result = {'result': 0, 'referrals': None, 'responseName': '1.3.6.1.4.1.1466.20037', 'type': 'extendedResp', 'description': 'success', 'responseValue': 'None', 'dn': '', 'message': ''}
    -            self.connection.starting_tls = False
    -        else:
    -            response = None
    -            result = None
    -            while timeout >= 0:  # waiting for completed message to appear in _incoming
    -                try:
    -                    with self.connection.strategy.pool.pool_lock:
    -                        response, result, request = self.connection.strategy.pool._incoming.pop(counter)
    -                except KeyError:
    -                    sleep(sleeptime)
    -                    timeout -= sleeptime
    -                    continue
    -                break
    -
    -            if timeout <= 0:
    -                if log_enabled(ERROR):
    -                    log(ERROR, 'no response from worker threads in Reusable connection')
    -                raise LDAPResponseTimeoutError('no response from worker threads in Reusable connection')
    -
    -        if isinstance(response, LDAPOperationResult):
    -            raise response  # an exception has been raised with raise_exceptions
    -
    -        if get_request:
    -            return response, result, request
    -
    -        return response, result
    -
    -    def post_send_single_response(self, counter):
    -        return counter
    -
    -    def post_send_search(self, counter):
    -        return counter
    +"""
    +"""
    +
    +# Created on 2014.03.23
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2014 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +from datetime import datetime
    +from os import linesep
    +from threading import Thread, Lock
    +from time import sleep
    +
    +from .. import RESTARTABLE, get_config_parameter, AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_NO_TLS, AUTO_BIND_TLS_AFTER_BIND, AUTO_BIND_TLS_BEFORE_BIND
    +from .base import BaseStrategy
    +from ..core.usage import ConnectionUsage
    +from ..core.exceptions import LDAPConnectionPoolNameIsMandatoryError, LDAPConnectionPoolNotStartedError, LDAPOperationResult, LDAPExceptionError, LDAPResponseTimeoutError
    +from ..utils.log import log, log_enabled, ERROR, BASIC
    +from ..protocol.rfc4511 import LDAP_MAX_INT
    +
    +TERMINATE_REUSABLE = 'TERMINATE_REUSABLE_CONNECTION'
    +
    +BOGUS_BIND = -1
    +BOGUS_UNBIND = -2
    +BOGUS_EXTENDED = -3
    +BOGUS_ABANDON = -4
    +
    +try:
    +    from queue import Queue, Empty
    +except ImportError:  # Python 2
    +    # noinspection PyUnresolvedReferences
    +    from Queue import Queue, Empty
    +
    +
    +# noinspection PyProtectedMember
    +class ReusableStrategy(BaseStrategy):
    +    """
    +    A pool of reusable SyncWaitRestartable connections with lazy behaviour and limited lifetime.
    +    The connection using this strategy presents itself as a normal connection, but internally the strategy has a pool of
    +    connections that can be used as needed. Each connection lives in its own thread and has a busy/available status.
    +    The strategy performs the requested operation on the first available connection.
    +    The pool of connections is instantiated at strategy initialization.
    +    Strategy has two customizable properties, the total number of connections in the pool and the lifetime of each connection.
    +    When lifetime is expired the connection is closed and will be open again when needed.
    +    """
    +    pools = dict()
    +
    +    def receiving(self):
    +        raise NotImplementedError
    +
    +    def _start_listen(self):
    +        raise NotImplementedError
    +
    +    def _get_response(self, message_id, timeout):
    +        raise NotImplementedError
    +
    +    def get_stream(self):
    +        raise NotImplementedError
    +
    +    def set_stream(self, value):
    +        raise NotImplementedError
    +
    +    # noinspection PyProtectedMember
    +    class ConnectionPool(object):
    +        """
    +        Container for the Connection Threads
    +        """
    +        def __new__(cls, connection):
    +            if connection.pool_name in ReusableStrategy.pools:  # returns existing connection pool
    +                pool = ReusableStrategy.pools[connection.pool_name]
    +                if not pool.started:  # if pool is not started remove it from the pools singleton and create a new onw
    +                    del ReusableStrategy.pools[connection.pool_name]
    +                    return object.__new__(cls)
    +                if connection.pool_keepalive and pool.keepalive != connection.pool_keepalive:  # change lifetime
    +                    pool.keepalive = connection.pool_keepalive
    +                if connection.pool_lifetime and pool.lifetime != connection.pool_lifetime:  # change keepalive
    +                    pool.lifetime = connection.pool_lifetime
    +                if connection.pool_size and pool.pool_size != connection.pool_size:  # if pool size has changed terminate and recreate the connections
    +                    pool.terminate_pool()
    +                    pool.pool_size = connection.pool_size
    +                return pool
    +            else:
    +                return object.__new__(cls)
    +
    +        def __init__(self, connection):
    +            if not hasattr(self, 'workers'):
    +                self.name = connection.pool_name
    +                self.master_connection = connection
    +                self.workers = []
    +                self.pool_size = connection.pool_size or get_config_parameter('REUSABLE_THREADED_POOL_SIZE')
    +                self.lifetime = connection.pool_lifetime or get_config_parameter('REUSABLE_THREADED_LIFETIME')
    +                self.keepalive = connection.pool_keepalive
    +                self.request_queue = Queue()
    +                self.open_pool = False
    +                self.bind_pool = False
    +                self.tls_pool = False
    +                self._incoming = dict()
    +                self.counter = 0
    +                self.terminated_usage = ConnectionUsage() if connection._usage else None
    +                self.terminated = False
    +                self.pool_lock = Lock()
    +                ReusableStrategy.pools[self.name] = self
    +                self.started = False
    +                if log_enabled(BASIC):
    +                    log(BASIC, 'instantiated ConnectionPool: <%r>', self)
    +
    +        def __str__(self):
    +            s = 'POOL: ' + str(self.name) + ' - status: ' + ('started' if self.started else 'terminated')
    +            s += ' - responses in queue: ' + str(len(self._incoming))
    +            s += ' - pool size: ' + str(self.pool_size)
    +            s += ' - lifetime: ' + str(self.lifetime)
    +            s += ' - keepalive: ' + str(self.keepalive)
    +            s += ' - open: ' + str(self.open_pool)
    +            s += ' - bind: ' + str(self.bind_pool)
    +            s += ' - tls: ' + str(self.tls_pool) + linesep
    +            s += 'MASTER CONN: ' + str(self.master_connection) + linesep
    +            s += 'WORKERS:'
    +            if self.workers:
    +                for i, worker in enumerate(self.workers):
    +                    s += linesep + str(i).rjust(5) + ': ' + str(worker)
    +            else:
    +                s += linesep + '    no active workers in pool'
    +
    +            return s
    +
    +        def __repr__(self):
    +            return self.__str__()
    +
    +        def get_info_from_server(self):
    +            for worker in self.workers:
    +                with worker.worker_lock:
    +                    if not worker.connection.server.schema or not worker.connection.server.info:
    +                        worker.get_info_from_server = True
    +                    else:
    +                        worker.get_info_from_server = False
    +
    +        def rebind_pool(self):
    +            for worker in self.workers:
    +                with worker.worker_lock:
    +                    worker.connection.rebind(self.master_connection.user,
    +                                             self.master_connection.password,
    +                                             self.master_connection.authentication,
    +                                             self.master_connection.sasl_mechanism,
    +                                             self.master_connection.sasl_credentials)
    +
    +        def start_pool(self):
    +            if not self.started:
    +                self.create_pool()
    +                for worker in self.workers:
    +                    with worker.worker_lock:
    +                        worker.thread.start()
    +                self.started = True
    +                self.terminated = False
    +                if log_enabled(BASIC):
    +                    log(BASIC, 'worker started for pool <%s>', self)
    +                return True
    +            return False
    +
    +        def create_pool(self):
    +            if log_enabled(BASIC):
    +                log(BASIC, 'created pool <%s>', self)
    +            self.workers = [ReusableStrategy.PooledConnectionWorker(self.master_connection, self.request_queue) for _ in range(self.pool_size)]
    +
    +        def terminate_pool(self):
    +            if not self.terminated:
    +                if log_enabled(BASIC):
    +                    log(BASIC, 'terminating pool <%s>', self)
    +                self.started = False
    +                self.request_queue.join()  # waits for all queue pending operations
    +                for _ in range(len([worker for worker in self.workers if worker.thread.is_alive()])):  # put a TERMINATE signal on the queue for each active thread
    +                    self.request_queue.put((TERMINATE_REUSABLE, None, None, None))
    +                self.request_queue.join()  # waits for all queue terminate operations
    +                self.terminated = True
    +                if log_enabled(BASIC):
    +                    log(BASIC, 'pool terminated for <%s>', self)
    +
    +    class PooledConnectionThread(Thread):
    +        """
    +        The thread that holds the Reusable connection and receive operation request via the queue
    +        Result are sent back in the pool._incoming list when ready
    +        """
    +        def __init__(self, worker, master_connection):
    +            Thread.__init__(self)
    +            self.daemon = True
    +            self.worker = worker
    +            self.master_connection = master_connection
    +            if log_enabled(BASIC):
    +                log(BASIC, 'instantiated PooledConnectionThread: <%r>', self)
    +
    +        # noinspection PyProtectedMember
    +        def run(self):
    +            self.worker.running = True
    +            terminate = False
    +            pool = self.master_connection.strategy.pool
    +            while not terminate:
    +                try:
    +                    counter, message_type, request, controls = pool.request_queue.get(block=True, timeout=self.master_connection.strategy.pool.keepalive)
    +                except Empty:  # issue an Abandon(0) operation to keep the connection live - Abandon(0) is a harmless operation
    +                    if not self.worker.connection.closed:
    +                        self.worker.connection.abandon(0)
    +                    continue
    +
    +                with self.worker.worker_lock:
    +                    self.worker.busy = True
    +                    if counter == TERMINATE_REUSABLE:
    +                        terminate = True
    +                        if self.worker.connection.bound:
    +                            try:
    +                                self.worker.connection.unbind()
    +                                if log_enabled(BASIC):
    +                                    log(BASIC, 'thread terminated')
    +                            except LDAPExceptionError:
    +                                pass
    +                    else:
    +                        if (datetime.now() - self.worker.creation_time).seconds >= self.master_connection.strategy.pool.lifetime:  # destroy and create a new connection
    +                            try:
    +                                self.worker.connection.unbind()
    +                            except LDAPExceptionError:
    +                                pass
    +                            self.worker.new_connection()
    +                            if log_enabled(BASIC):
    +                                log(BASIC, 'thread respawn')
    +                        if message_type not in ['bindRequest', 'unbindRequest']:
    +                            try:
    +                                if pool.open_pool and self.worker.connection.closed:
    +                                    self.worker.connection.open(read_server_info=False)
    +                                    if pool.tls_pool and not self.worker.connection.tls_started:
    +                                        self.worker.connection.start_tls(read_server_info=False)
    +                                    if pool.bind_pool and not self.worker.connection.bound:
    +                                        self.worker.connection.bind(read_server_info=False)
    +                                elif pool.open_pool and not self.worker.connection.closed:  # connection already open, issues a start_tls
    +                                    if pool.tls_pool and not self.worker.connection.tls_started:
    +                                        self.worker.connection.start_tls(read_server_info=False)
    +                                if self.worker.get_info_from_server and counter:
    +                                    self.worker.connection.refresh_server_info()
    +                                    self.worker.get_info_from_server = False
    +                                response = None
    +                                result = None
    +                                if message_type == 'searchRequest':
    +                                    response = self.worker.connection.post_send_search(self.worker.connection.send(message_type, request, controls))
    +                                else:
    +                                    response = self.worker.connection.post_send_single_response(self.worker.connection.send(message_type, request, controls))
    +                                result = self.worker.connection.result
    +                                with pool.pool_lock:
    +                                    pool._incoming[counter] = (response, result, BaseStrategy.decode_request(message_type, request, controls))
    +                            except LDAPOperationResult as e:  # raise_exceptions has raised an exception. It must be redirected to the original connection thread
    +                                with pool.pool_lock:
    +                                    pool._incoming[counter] = (e, None, None)
    +                                    # pool._incoming[counter] = (type(e)(str(e)), None, None)
    +                            # except LDAPOperationResult as e:  # raise_exceptions has raised an exception. It must be redirected to the original connection thread
    +                            #     exc = e
    +                            # with pool.pool_lock:
    +                            #     if exc:
    +                            #         pool._incoming[counter] = (exc, None, None)
    +                            #     else:
    +                            #         pool._incoming[counter] = (response, result, BaseStrategy.decode_request(message_type, request, controls))
    +
    +                    self.worker.busy = False
    +                    pool.request_queue.task_done()
    +                    self.worker.task_counter += 1
    +            if log_enabled(BASIC):
    +                log(BASIC, 'thread terminated')
    +            if self.master_connection.usage:
    +                pool.terminated_usage += self.worker.connection.usage
    +            self.worker.running = False
    +
    +    class PooledConnectionWorker(object):
    +        """
    +        Container for the restartable connection. it includes a thread and a lock to execute the connection in the pool
    +        """
    +        def __init__(self, connection, request_queue):
    +            self.master_connection = connection
    +            self.request_queue = request_queue
    +            self.running = False
    +            self.busy = False
    +            self.get_info_from_server = False
    +            self.connection = None
    +            self.creation_time = None
    +            self.task_counter = 0
    +            self.new_connection()
    +            self.thread = ReusableStrategy.PooledConnectionThread(self, self.master_connection)
    +            self.worker_lock = Lock()
    +            if log_enabled(BASIC):
    +                log(BASIC, 'instantiated PooledConnectionWorker: <%s>', self)
    +
    +        def __str__(self):
    +            s = 'CONN: ' + str(self.connection) + linesep + '       THREAD: '
    +            s += 'running' if self.running else 'halted'
    +            s += ' - ' + ('busy' if self.busy else 'available')
    +            s += ' - ' + ('created at: ' + self.creation_time.isoformat())
    +            s += ' - time to live: ' + str(self.master_connection.strategy.pool.lifetime - (datetime.now() - self.creation_time).seconds)
    +            s += ' - requests served: ' + str(self.task_counter)
    +
    +            return s
    +
    +        def new_connection(self):
    +            from ..core.connection import Connection
    +            # noinspection PyProtectedMember
    +            self.creation_time = datetime.now()
    +            self.connection = Connection(server=self.master_connection.server_pool if self.master_connection.server_pool else self.master_connection.server,
    +                                         user=self.master_connection.user,
    +                                         password=self.master_connection.password,
    +                                         auto_bind=AUTO_BIND_NONE,  # do not perform auto_bind because it reads again the schema
    +                                         version=self.master_connection.version,
    +                                         authentication=self.master_connection.authentication,
    +                                         client_strategy=RESTARTABLE,
    +                                         auto_referrals=self.master_connection.auto_referrals,
    +                                         auto_range=self.master_connection.auto_range,
    +                                         sasl_mechanism=self.master_connection.sasl_mechanism,
    +                                         sasl_credentials=self.master_connection.sasl_credentials,
    +                                         check_names=self.master_connection.check_names,
    +                                         collect_usage=self.master_connection._usage,
    +                                         read_only=self.master_connection.read_only,
    +                                         raise_exceptions=self.master_connection.raise_exceptions,
    +                                         lazy=False,
    +                                         fast_decoder=self.master_connection.fast_decoder,
    +                                         receive_timeout=self.master_connection.receive_timeout,
    +                                         return_empty_attributes=self.master_connection.empty_attributes)
    +
    +            # simulates auto_bind, always with read_server_info=False
    +            if self.master_connection.auto_bind and self.master_connection.auto_bind not in [AUTO_BIND_NONE, AUTO_BIND_DEFAULT]:
    +                if log_enabled(BASIC):
    +                    log(BASIC, 'performing automatic bind for <%s>', self.connection)
    +                self.connection.open(read_server_info=False)
    +                if self.master_connection.auto_bind == AUTO_BIND_NO_TLS:
    +                    self.connection.bind(read_server_info=False)
    +                elif self.master_connection.auto_bind == AUTO_BIND_TLS_BEFORE_BIND:
    +                    self.connection.start_tls(read_server_info=False)
    +                    self.connection.bind(read_server_info=False)
    +                elif self.master_connection.auto_bind == AUTO_BIND_TLS_AFTER_BIND:
    +                    self.connection.bind(read_server_info=False)
    +                    self.connection.start_tls(read_server_info=False)
    +
    +            if self.master_connection.server_pool:
    +                self.connection.server_pool = self.master_connection.server_pool
    +                self.connection.server_pool.initialize(self.connection)
    +
    +    # ReusableStrategy methods
    +    def __init__(self, ldap_connection):
    +        BaseStrategy.__init__(self, ldap_connection)
    +        self.sync = False
    +        self.no_real_dsa = False
    +        self.pooled = True
    +        self.can_stream = False
    +        if hasattr(ldap_connection, 'pool_name') and ldap_connection.pool_name:
    +            self.pool = ReusableStrategy.ConnectionPool(ldap_connection)
    +        else:
    +            if log_enabled(ERROR):
    +                log(ERROR, 'reusable connection must have a pool_name')
    +            raise LDAPConnectionPoolNameIsMandatoryError('reusable connection must have a pool_name')
    +
    +    def open(self, reset_usage=True, read_server_info=True):
    +        # read_server_info not used
    +        self.pool.open_pool = True
    +        self.pool.start_pool()
    +        self.connection.closed = False
    +        if self.connection.usage:
    +            if reset_usage or not self.connection._usage.initial_connection_start_time:
    +                self.connection._usage.start()
    +
    +    def terminate(self):
    +        self.pool.terminate_pool()
    +        self.pool.open_pool = False
    +        self.connection.bound = False
    +        self.connection.closed = True
    +        self.pool.bind_pool = False
    +        self.pool.tls_pool = False
    +
    +    def _close_socket(self):
    +        """
    +        Doesn't really close the socket
    +        """
    +        self.connection.closed = True
    +
    +        if self.connection.usage:
    +            self.connection._usage.closed_sockets += 1
    +
    +    def send(self, message_type, request, controls=None):
    +        if self.pool.started:
    +            if message_type == 'bindRequest':
    +                self.pool.bind_pool = True
    +                counter = BOGUS_BIND
    +            elif message_type == 'unbindRequest':
    +                self.pool.bind_pool = False
    +                counter = BOGUS_UNBIND
    +            elif message_type == 'abandonRequest':
    +                counter = BOGUS_ABANDON
    +            elif message_type == 'extendedReq' and self.connection.starting_tls:
    +                self.pool.tls_pool = True
    +                counter = BOGUS_EXTENDED
    +            else:
    +                with self.pool.pool_lock:
    +                    self.pool.counter += 1
    +                    if self.pool.counter > LDAP_MAX_INT:
    +                        self.pool.counter = 1
    +                    counter = self.pool.counter
    +                self.pool.request_queue.put((counter, message_type, request, controls))
    +            return counter
    +        if log_enabled(ERROR):
    +            log(ERROR, 'reusable connection pool not started')
    +        raise LDAPConnectionPoolNotStartedError('reusable connection pool not started')
    +
    +    def validate_bind(self, controls):
    +        # in case of a new connection or different credentials
    +        if (self.connection.user != self.pool.master_connection.user or
    +                self.connection.password != self.pool.master_connection.password or
    +                self.connection.authentication != self.pool.master_connection.authentication or
    +                self.connection.sasl_mechanism != self.pool.master_connection.sasl_mechanism or
    +                self.connection.sasl_credentials != self.pool.master_connection.sasl_credentials):
    +            self.pool.master_connection.user = self.connection.user
    +            self.pool.master_connection.password = self.connection.password
    +            self.pool.master_connection.authentication = self.connection.authentication
    +            self.pool.master_connection.sasl_mechanism = self.connection.sasl_mechanism
    +            self.pool.master_connection.sasl_credentials = self.connection.sasl_credentials
    +            self.pool.rebind_pool()
    +        temp_connection = self.pool.workers[0].connection
    +        old_lazy = temp_connection.lazy
    +        temp_connection.lazy = False
    +        if not self.connection.server.schema or not self.connection.server.info:
    +            result = self.pool.workers[0].connection.bind(controls=controls)
    +        else:
    +            result = self.pool.workers[0].connection.bind(controls=controls, read_server_info=False)
    +
    +        temp_connection.unbind()
    +        temp_connection.lazy = old_lazy
    +        if result:
    +            self.pool.bind_pool = True  # bind pool if bind is validated
    +        return result
    +
    +    def get_response(self, counter, timeout=None, get_request=False):
    +        sleeptime = get_config_parameter('RESPONSE_SLEEPTIME')
    +        request=None
    +        if timeout is None:
    +            timeout = get_config_parameter('RESPONSE_WAITING_TIMEOUT')
    +        if counter == BOGUS_BIND:  # send a bogus bindResponse
    +            response = list()
    +            result = {'description': 'success', 'referrals': None, 'type': 'bindResponse', 'result': 0, 'dn': '', 'message': '', 'saslCreds': None}
    +        elif counter == BOGUS_UNBIND:  # bogus unbind response
    +            response = None
    +            result = None
    +        elif counter == BOGUS_ABANDON:  # abandon cannot be executed because of multiple connections
    +            response = list()
    +            result = {'result': 0, 'referrals': None, 'responseName': '1.3.6.1.4.1.1466.20037', 'type': 'extendedResp', 'description': 'success', 'responseValue': 'None', 'dn': '', 'message': ''}
    +        elif counter == BOGUS_EXTENDED:  # bogus startTls extended response
    +            response = list()
    +            result = {'result': 0, 'referrals': None, 'responseName': '1.3.6.1.4.1.1466.20037', 'type': 'extendedResp', 'description': 'success', 'responseValue': 'None', 'dn': '', 'message': ''}
    +            self.connection.starting_tls = False
    +        else:
    +            response = None
    +            result = None
    +            while timeout >= 0:  # waiting for completed message to appear in _incoming
    +                try:
    +                    with self.connection.strategy.pool.pool_lock:
    +                        response, result, request = self.connection.strategy.pool._incoming.pop(counter)
    +                except KeyError:
    +                    sleep(sleeptime)
    +                    timeout -= sleeptime
    +                    continue
    +                break
    +
    +            if timeout <= 0:
    +                if log_enabled(ERROR):
    +                    log(ERROR, 'no response from worker threads in Reusable connection')
    +                raise LDAPResponseTimeoutError('no response from worker threads in Reusable connection')
    +
    +        if isinstance(response, LDAPOperationResult):
    +            raise response  # an exception has been raised with raise_exceptions
    +
    +        if get_request:
    +            return response, result, request
    +
    +        return response, result
    +
    +    def post_send_single_response(self, counter):
    +        return counter
    +
    +    def post_send_search(self, counter):
    +        return counter
    diff --git a/server/www/packages/packages-windows/x86/ldap3/strategy/sync.py b/server/www/packages/packages-windows/x86/ldap3/strategy/sync.py
    index b2c0257..fdb1441 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/strategy/sync.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/strategy/sync.py
    @@ -1,212 +1,212 @@
    -"""
    -"""
    -
    -# Created on 2013.07.15
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2013 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -
    -import socket
    -
    -from .. import SEQUENCE_TYPES, get_config_parameter
    -from ..core.exceptions import LDAPSocketReceiveError, communication_exception_factory, LDAPExceptionError, LDAPExtensionError, LDAPOperationResult
    -from ..strategy.base import BaseStrategy, SESSION_TERMINATED_BY_SERVER, RESPONSE_COMPLETE, TRANSACTION_ERROR
    -from ..protocol.rfc4511 import LDAPMessage
    -from ..utils.log import log, log_enabled, ERROR, NETWORK, EXTENDED, format_ldap_message
    -from ..utils.asn1 import decoder, decode_message_fast
    -
    -LDAP_MESSAGE_TEMPLATE = LDAPMessage()
    -
    -
    -# noinspection PyProtectedMember
    -class SyncStrategy(BaseStrategy):
    -    """
    -    This strategy is synchronous. You send the request and get the response
    -    Requests return a boolean value to indicate the result of the requested Operation
    -    Connection.response will contain the whole LDAP response for the messageId requested in a dict form
    -    Connection.request will contain the result LDAP message in a dict form
    -    """
    -
    -    def __init__(self, ldap_connection):
    -        BaseStrategy.__init__(self, ldap_connection)
    -        self.sync = True
    -        self.no_real_dsa = False
    -        self.pooled = False
    -        self.can_stream = False
    -        self.socket_size = get_config_parameter('SOCKET_SIZE')
    -
    -    def open(self, reset_usage=True, read_server_info=True):
    -        BaseStrategy.open(self, reset_usage, read_server_info)
    -        if read_server_info:
    -            try:
    -                self.connection.refresh_server_info()
    -            except LDAPOperationResult:  # catch errors from server if raise_exception = True
    -                self.connection.server._dsa_info = None
    -                self.connection.server._schema_info = None
    -
    -    def _start_listen(self):
    -        if not self.connection.listening and not self.connection.closed:
    -            self.connection.listening = True
    -
    -    def receiving(self):
    -        """
    -        Receive data over the socket
    -        Checks if the socket is closed
    -        """
    -        messages = []
    -        receiving = True
    -        unprocessed = b''
    -        data = b''
    -        get_more_data = True
    -        exc = None
    -        while receiving:
    -            if get_more_data:
    -                try:
    -                    data = self.connection.socket.recv(self.socket_size)
    -                except (OSError, socket.error, AttributeError) as e:
    -                    self.connection.last_error = 'error receiving data: ' + str(e)
    -                    try:  # try to close the connection before raising exception
    -                        self.close()
    -                    except (socket.error, LDAPExceptionError):
    -                        pass
    -                    if log_enabled(ERROR):
    -                        log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -                    # raise communication_exception_factory(LDAPSocketReceiveError, exc)(self.connection.last_error)
    -                    raise communication_exception_factory(LDAPSocketReceiveError, type(e)(str(e)))(self.connection.last_error)
    -                unprocessed += data
    -            if len(data) > 0:
    -                length = BaseStrategy.compute_ldap_message_size(unprocessed)
    -                if length == -1:  # too few data to decode message length
    -                    get_more_data = True
    -                    continue
    -                if len(unprocessed) < length:
    -                    get_more_data = True
    -                else:
    -                    if log_enabled(NETWORK):
    -                        log(NETWORK, 'received %d bytes via <%s>', len(unprocessed[:length]), self.connection)
    -                    messages.append(unprocessed[:length])
    -                    unprocessed = unprocessed[length:]
    -                    get_more_data = False
    -                    if len(unprocessed) == 0:
    -                        receiving = False
    -            else:
    -                receiving = False
    -
    -        if log_enabled(NETWORK):
    -            log(NETWORK, 'received %d ldap messages via <%s>', len(messages), self.connection)
    -        return messages
    -
    -    def post_send_single_response(self, message_id):
    -        """
    -        Executed after an Operation Request (except Search)
    -        Returns the result message or None
    -        """
    -        responses, result = self.get_response(message_id)
    -        self.connection.result = result
    -        if result['type'] == 'intermediateResponse':  # checks that all responses are intermediates (there should be only one)
    -            for response in responses:
    -                if response['type'] != 'intermediateResponse':
    -                    self.connection.last_error = 'multiple messages received error'
    -                    if log_enabled(ERROR):
    -                        log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -                    raise LDAPSocketReceiveError(self.connection.last_error)
    -
    -        responses.append(result)
    -        return responses
    -
    -    def post_send_search(self, message_id):
    -        """
    -        Executed after a search request
    -        Returns the result message and store in connection.response the objects found
    -        """
    -        responses, result = self.get_response(message_id)
    -        self.connection.result = result
    -        if isinstance(responses, SEQUENCE_TYPES):
    -            self.connection.response = responses[:]  # copy search result entries
    -            return responses
    -
    -        self.connection.last_error = 'error receiving response'
    -        if log_enabled(ERROR):
    -            log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -        raise LDAPSocketReceiveError(self.connection.last_error)
    -
    -    def _get_response(self, message_id):
    -        """
    -        Performs the capture of LDAP response for SyncStrategy
    -        """
    -        ldap_responses = []
    -        response_complete = False
    -        while not response_complete:
    -            responses = self.receiving()
    -            if responses:
    -                for response in responses:
    -                    if len(response) > 0:
    -                        if self.connection.usage:
    -                            self.connection._usage.update_received_message(len(response))
    -                        if self.connection.fast_decoder:
    -                            ldap_resp = decode_message_fast(response)
    -                            dict_response = self.decode_response_fast(ldap_resp)
    -                        else:
    -                            ldap_resp, _ = decoder.decode(response, asn1Spec=LDAP_MESSAGE_TEMPLATE)  # unprocessed unused because receiving() waits for the whole message
    -                            dict_response = self.decode_response(ldap_resp)
    -                        if log_enabled(EXTENDED):
    -                            log(EXTENDED, 'ldap message received via <%s>:%s', self.connection, format_ldap_message(ldap_resp, '<<'))
    -                        if int(ldap_resp['messageID']) == message_id:
    -                            ldap_responses.append(dict_response)
    -                            if dict_response['type'] not in ['searchResEntry', 'searchResRef', 'intermediateResponse']:
    -                                response_complete = True
    -                        elif int(ldap_resp['messageID']) == 0:  # 0 is reserved for 'Unsolicited Notification' from server as per RFC4511 (paragraph 4.4)
    -                            if dict_response['responseName'] == '1.3.6.1.4.1.1466.20036':  # Notice of Disconnection as per RFC4511 (paragraph 4.4.1)
    -                                return SESSION_TERMINATED_BY_SERVER
    -                            elif dict_response['responseName'] == '2.16.840.1.113719.1.27.103.4':  # Novell LDAP transaction error unsolicited notification
    -                                return TRANSACTION_ERROR
    -                            else:
    -                                self.connection.last_error = 'unknown unsolicited notification from server'
    -                                if log_enabled(ERROR):
    -                                    log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -                                raise LDAPSocketReceiveError(self.connection.last_error)
    -                        elif int(ldap_resp['messageID']) != message_id and dict_response['type'] == 'extendedResp':
    -                            self.connection.last_error = 'multiple extended responses to a single extended request'
    -                            if log_enabled(ERROR):
    -                                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -                            raise LDAPExtensionError(self.connection.last_error)
    -                            # pass  # ignore message with invalid messageId when receiving multiple extendedResp. This is not allowed by RFC4511 but some LDAP server do it
    -                        else:
    -                            self.connection.last_error = 'invalid messageId received'
    -                            if log_enabled(ERROR):
    -                                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -                            raise LDAPSocketReceiveError(self.connection.last_error)
    -                        # response = unprocessed
    -                        # if response:  # if this statement is removed unprocessed data will be processed as another message
    -                        #     self.connection.last_error = 'unprocessed substrate error'
    -                        #     if log_enabled(ERROR):
    -                        #         log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    -                        #     raise LDAPSocketReceiveError(self.connection.last_error)
    -            else:
    -                return SESSION_TERMINATED_BY_SERVER
    -        ldap_responses.append(RESPONSE_COMPLETE)
    -
    -        return ldap_responses
    -
    -    def set_stream(self, value):
    -        raise NotImplementedError
    -
    -    def get_stream(self):
    -        raise NotImplementedError
    +"""
    +"""
    +
    +# Created on 2013.07.15
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2013 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +import socket
    +
    +from .. import SEQUENCE_TYPES, get_config_parameter
    +from ..core.exceptions import LDAPSocketReceiveError, communication_exception_factory, LDAPExceptionError, LDAPExtensionError, LDAPOperationResult
    +from ..strategy.base import BaseStrategy, SESSION_TERMINATED_BY_SERVER, RESPONSE_COMPLETE, TRANSACTION_ERROR
    +from ..protocol.rfc4511 import LDAPMessage
    +from ..utils.log import log, log_enabled, ERROR, NETWORK, EXTENDED, format_ldap_message
    +from ..utils.asn1 import decoder, decode_message_fast
    +
    +LDAP_MESSAGE_TEMPLATE = LDAPMessage()
    +
    +
    +# noinspection PyProtectedMember
    +class SyncStrategy(BaseStrategy):
    +    """
    +    This strategy is synchronous. You send the request and get the response
    +    Requests return a boolean value to indicate the result of the requested Operation
    +    Connection.response will contain the whole LDAP response for the messageId requested in a dict form
    +    Connection.request will contain the result LDAP message in a dict form
    +    """
    +
    +    def __init__(self, ldap_connection):
    +        BaseStrategy.__init__(self, ldap_connection)
    +        self.sync = True
    +        self.no_real_dsa = False
    +        self.pooled = False
    +        self.can_stream = False
    +        self.socket_size = get_config_parameter('SOCKET_SIZE')
    +
    +    def open(self, reset_usage=True, read_server_info=True):
    +        BaseStrategy.open(self, reset_usage, read_server_info)
    +        if read_server_info:
    +            try:
    +                self.connection.refresh_server_info()
    +            except LDAPOperationResult:  # catch errors from server if raise_exception = True
    +                self.connection.server._dsa_info = None
    +                self.connection.server._schema_info = None
    +
    +    def _start_listen(self):
    +        if not self.connection.listening and not self.connection.closed:
    +            self.connection.listening = True
    +
    +    def receiving(self):
    +        """
    +        Receives data over the socket
    +        Checks if the socket is closed
    +        """
    +        messages = []
    +        receiving = True
    +        unprocessed = b''
    +        data = b''
    +        get_more_data = True
    +        exc = None
    +        while receiving:
    +            if get_more_data:
    +                try:
    +                    data = self.connection.socket.recv(self.socket_size)
    +                except (OSError, socket.error, AttributeError) as e:
    +                    self.connection.last_error = 'error receiving data: ' + str(e)
    +                    try:  # try to close the connection before raising exception
    +                        self.close()
    +                    except (socket.error, LDAPExceptionError):
    +                        pass
    +                    if log_enabled(ERROR):
    +                        log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +                    # raise communication_exception_factory(LDAPSocketReceiveError, exc)(self.connection.last_error)
    +                    raise communication_exception_factory(LDAPSocketReceiveError, type(e)(str(e)))(self.connection.last_error)
    +                unprocessed += data
    +            if len(data) > 0:
    +                length = BaseStrategy.compute_ldap_message_size(unprocessed)
    +                if length == -1:  # too few data to decode message length
    +                    get_more_data = True
    +                    continue
    +                if len(unprocessed) < length:
    +                    get_more_data = True
    +                else:
    +                    if log_enabled(NETWORK):
    +                        log(NETWORK, 'received %d bytes via <%s>', len(unprocessed[:length]), self.connection)
    +                    messages.append(unprocessed[:length])
    +                    unprocessed = unprocessed[length:]
    +                    get_more_data = False
    +                    if len(unprocessed) == 0:
    +                        receiving = False
    +            else:
    +                receiving = False
    +
    +        if log_enabled(NETWORK):
    +            log(NETWORK, 'received %d ldap messages via <%s>', len(messages), self.connection)
    +        return messages
    +
    +    def post_send_single_response(self, message_id):
    +        """
    +        Executed after an Operation Request (except Search)
    +        Returns the result message or None
    +        """
    +        responses, result = self.get_response(message_id)
    +        self.connection.result = result
    +        if result['type'] == 'intermediateResponse':  # checks that all responses are intermediates (there should be only one)
    +            for response in responses:
    +                if response['type'] != 'intermediateResponse':
    +                    self.connection.last_error = 'multiple messages received error'
    +                    if log_enabled(ERROR):
    +                        log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +                    raise LDAPSocketReceiveError(self.connection.last_error)
    +
    +        responses.append(result)
    +        return responses
    +
    +    def post_send_search(self, message_id):
    +        """
    +        Executed after a search request
    +        Returns the result message and store in connection.response the objects found
    +        """
    +        responses, result = self.get_response(message_id)
    +        self.connection.result = result
    +        if isinstance(responses, SEQUENCE_TYPES):
    +            self.connection.response = responses[:]  # copy search result entries
    +            return responses
    +
    +        self.connection.last_error = 'error receiving response'
    +        if log_enabled(ERROR):
    +            log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +        raise LDAPSocketReceiveError(self.connection.last_error)
    +
    +    def _get_response(self, message_id, timeout):
    +        """
    +        Performs the capture of LDAP response for SyncStrategy
    +        """
    +        ldap_responses = []
    +        response_complete = False
    +        while not response_complete:
    +            responses = self.receiving()
    +            if responses:
    +                for response in responses:
    +                    if len(response) > 0:
    +                        if self.connection.usage:
    +                            self.connection._usage.update_received_message(len(response))
    +                        if self.connection.fast_decoder:
    +                            ldap_resp = decode_message_fast(response)
    +                            dict_response = self.decode_response_fast(ldap_resp)
    +                        else:
    +                            ldap_resp, _ = decoder.decode(response, asn1Spec=LDAP_MESSAGE_TEMPLATE)  # unprocessed unused because receiving() waits for the whole message
    +                            dict_response = self.decode_response(ldap_resp)
    +                        if log_enabled(EXTENDED):
    +                            log(EXTENDED, 'ldap message received via <%s>:%s', self.connection, format_ldap_message(ldap_resp, '<<'))
    +                        if int(ldap_resp['messageID']) == message_id:
    +                            ldap_responses.append(dict_response)
    +                            if dict_response['type'] not in ['searchResEntry', 'searchResRef', 'intermediateResponse']:
    +                                response_complete = True
    +                        elif int(ldap_resp['messageID']) == 0:  # 0 is reserved for 'Unsolicited Notification' from server as per RFC4511 (paragraph 4.4)
    +                            if dict_response['responseName'] == '1.3.6.1.4.1.1466.20036':  # Notice of Disconnection as per RFC4511 (paragraph 4.4.1)
    +                                return SESSION_TERMINATED_BY_SERVER
    +                            elif dict_response['responseName'] == '2.16.840.1.113719.1.27.103.4':  # Novell LDAP transaction error unsolicited notification
    +                                return TRANSACTION_ERROR
    +                            else:
    +                                self.connection.last_error = 'unknown unsolicited notification from server'
    +                                if log_enabled(ERROR):
    +                                    log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +                                raise LDAPSocketReceiveError(self.connection.last_error)
    +                        elif int(ldap_resp['messageID']) != message_id and dict_response['type'] == 'extendedResp':
    +                            self.connection.last_error = 'multiple extended responses to a single extended request'
    +                            if log_enabled(ERROR):
    +                                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +                            raise LDAPExtensionError(self.connection.last_error)
    +                            # pass  # ignore message with invalid messageId when receiving multiple extendedResp. This is not allowed by RFC4511 but some LDAP server do it
    +                        else:
    +                            self.connection.last_error = 'invalid messageId received'
    +                            if log_enabled(ERROR):
    +                                log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +                            raise LDAPSocketReceiveError(self.connection.last_error)
    +                        # response = unprocessed
    +                        # if response:  # if this statement is removed unprocessed data will be processed as another message
    +                        #     self.connection.last_error = 'unprocessed substrate error'
    +                        #     if log_enabled(ERROR):
    +                        #         log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
    +                        #     raise LDAPSocketReceiveError(self.connection.last_error)
    +            else:
    +                return SESSION_TERMINATED_BY_SERVER
    +        ldap_responses.append(RESPONSE_COMPLETE)
    +
    +        return ldap_responses
    +
    +    def set_stream(self, value):
    +        raise NotImplementedError
    +
    +    def get_stream(self):
    +        raise NotImplementedError
    diff --git a/server/www/packages/packages-windows/x86/ldap3/utils/asn1.py b/server/www/packages/packages-windows/x86/ldap3/utils/asn1.py
    index 6b0b0bb..1b6091d 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/utils/asn1.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/utils/asn1.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2015 - 2018 Giovanni Cannata
    +# Copyright 2015 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/utils/ciDict.py b/server/www/packages/packages-windows/x86/ldap3/utils/ciDict.py
    index f81ba1b..c51d7ff 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/utils/ciDict.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/utils/ciDict.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2014 - 2018 Giovanni Cannata
    +# Copyright 2014 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -143,7 +143,7 @@ class CaseInsensitiveWithAliasDict(CaseInsensitiveDict):
                 if ci_key in self._aliases:
                     self.remove_alias(ci_key)
     
    -    def set_alias(self, key, alias):
    +    def set_alias(self, key, alias, ignore_duplicates=False):
             if not isinstance(alias, SEQUENCE_TYPES):
                 alias = [alias]
             for alias_to_add in alias:
    @@ -153,23 +153,28 @@ class CaseInsensitiveWithAliasDict(CaseInsensitiveDict):
                     if ci_alias not in self._case_insensitive_keymap:  # checks if alias is used a key
                         if ci_alias not in self._aliases:  # checks if alias is used as another alias
                             self._aliases[ci_alias] = ci_key
    -                        if ci_key in self._alias_keymap:  # extend alias keymap
    +                        if ci_key in self._alias_keymap:  # extends alias keymap
                                 self._alias_keymap[ci_key].append(self._ci_key(ci_alias))
                             else:
                                 self._alias_keymap[ci_key] = list()
                                 self._alias_keymap[ci_key].append(self._ci_key(ci_alias))
                         else:
    -                        if ci_key == self._ci_key(self._alias_keymap[ci_alias]):  # passes if alias is already defined to the same key
    +                        if ci_key in self._alias_keymap and ci_alias in self._alias_keymap[ci_key]:  # passes if alias is already defined to the same key
                                 pass
    -                        else:
    +                        elif not ignore_duplicates:
                                 raise KeyError('\'' + str(alias_to_add) + '\' already used as alias')
                     else:
                         if ci_key == self._ci_key(self._case_insensitive_keymap[ci_alias]):  # passes if alias is already defined to the same key
                             pass
    -                    else:
    +                    elif not ignore_duplicates:
                             raise KeyError('\'' + str(alias_to_add) + '\' already used as key')
                 else:
    -                raise KeyError('\'' + str(ci_key) + '\' is not an existing key')
    +                for keymap in self._alias_keymap:
    +                    if ci_key in self._alias_keymap[keymap]:  # kye is already aliased
    +                        self.set_alias(keymap, alias + [ci_key], ignore_duplicates=ignore_duplicates)
    +                        break
    +                else:
    +                    raise KeyError('\'' + str(ci_key) + '\' is not an existing alias or key')
     
         def remove_alias(self, alias):
             if not isinstance(alias, SEQUENCE_TYPES):
    diff --git a/server/www/packages/packages-windows/x86/ldap3/utils/config.py b/server/www/packages/packages-windows/x86/ldap3/utils/config.py
    index 36b57a9..e3edbf8 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/utils/config.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/utils/config.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2013 - 2018 Giovanni Cannata
    +# Copyright 2013 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/utils/conv.py b/server/www/packages/packages-windows/x86/ldap3/utils/conv.py
    index ee90c66..b000e30 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/utils/conv.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/utils/conv.py
    @@ -1,278 +1,270 @@
    -"""
    -"""
    -
    -# Created on 2014.04.26
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2014 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -
    -from base64 import b64encode, b64decode
    -import datetime
    -import re
    -
    -from .. import SEQUENCE_TYPES, STRING_TYPES, NUMERIC_TYPES, get_config_parameter
    -from ..utils.ciDict import CaseInsensitiveDict
    -from ..core.exceptions import LDAPDefinitionError
    -
    -
    -def to_unicode(obj, encoding=None, from_server=False):
    -    """Try to convert bytes (and str in python2) to unicode.
    -     Return object unmodified if python3 string, else raise an exception
    -    """
    -    conf_default_client_encoding = get_config_parameter('DEFAULT_CLIENT_ENCODING')
    -    conf_default_server_encoding = get_config_parameter('DEFAULT_SERVER_ENCODING')
    -    conf_additional_server_encodings = get_config_parameter('ADDITIONAL_SERVER_ENCODINGS')
    -    conf_additional_client_encodings = get_config_parameter('ADDITIONAL_CLIENT_ENCODINGS')
    -    if isinstance(obj, NUMERIC_TYPES):
    -        obj = str(obj)
    -
    -    if isinstance(obj, (bytes, bytearray)):
    -        if from_server:  # data from server
    -            if encoding is None:
    -                encoding = conf_default_server_encoding
    -            try:
    -                return obj.decode(encoding)
    -            except UnicodeDecodeError:
    -                for encoding in conf_additional_server_encodings:  # AD could have DN not encoded in utf-8 (even if this is not allowed by RFC4510)
    -                    try:
    -                        return obj.decode(encoding)
    -                    except UnicodeDecodeError:
    -                        pass
    -                raise UnicodeError("Unable to convert server data to unicode: %r" % obj)
    -        else:  # data from client
    -            if encoding is None:
    -                encoding = conf_default_client_encoding
    -            try:
    -                return obj.decode(encoding)
    -            except UnicodeDecodeError:
    -                for encoding in conf_additional_client_encodings:  # tries additional encodings
    -                    try:
    -                        return obj.decode(encoding)
    -                    except UnicodeDecodeError:
    -                        pass
    -                raise UnicodeError("Unable to convert client data to unicode: %r" % obj)
    -
    -    if isinstance(obj, STRING_TYPES):  # python3 strings, python 2 unicode
    -        return obj
    -
    -    raise UnicodeError("Unable to convert type %s to unicode: %r" % (type(obj).__class__.__name__, obj))
    -
    -
    -def to_raw(obj, encoding='utf-8'):
    -    """Tries to convert to raw bytes from unicode"""
    -    if isinstance(obj, NUMERIC_TYPES):
    -        obj = str(obj)
    -
    -    if not (isinstance(obj, bytes)):
    -        if isinstance(obj, SEQUENCE_TYPES):
    -            return [to_raw(element) for element in obj]
    -        elif isinstance(obj, STRING_TYPES):
    -            return obj.encode(encoding)
    -    return obj
    -
    -
    -def escape_filter_chars(text, encoding=None):
    -    """ Escape chars mentioned in RFC4515. """
    -    if encoding is None:
    -        encoding = get_config_parameter('DEFAULT_ENCODING')
    -
    -    try:
    -        text = to_unicode(text, encoding)
    -        escaped = text.replace('\\', '\\5c')
    -        escaped = escaped.replace('*', '\\2a')
    -        escaped = escaped.replace('(', '\\28')
    -        escaped = escaped.replace(')', '\\29')
    -        escaped = escaped.replace('\x00', '\\00')
    -    except Exception:  # probably raw bytes values, return escaped bytes value
    -        escaped = to_unicode(escape_bytes(text))
    -        # escape all octets greater than 0x7F that are not part of a valid UTF-8
    -        # escaped = ''.join(c if c <= ord(b'\x7f') else escape_bytes(to_raw(to_unicode(c, encoding))) for c in escaped)
    -    return escaped
    -
    -
    -def unescape_filter_chars(text, encoding=None):
    -    """ unescape chars mentioned in RFC4515. """
    -    if encoding is None:
    -        encoding = get_config_parameter('DEFAULT_ENCODING')
    -
    -    unescaped = to_raw(text, encoding)
    -    unescaped = unescaped.replace(b'\\5c', b'\\')
    -    unescaped = unescaped.replace(b'\\5C', b'\\')
    -    unescaped = unescaped.replace(b'\\2a', b'*')
    -    unescaped = unescaped.replace(b'\\2A', b'*')
    -    unescaped = unescaped.replace(b'\\28', b'(')
    -    unescaped = unescaped.replace(b'\\29', b')')
    -    unescaped = unescaped.replace(b'\\00', b'\x00')
    -    return unescaped
    -
    -
    -def escape_bytes(bytes_value):
    -    """ Convert a byte sequence to a properly escaped for LDAP (format BACKSLASH HEX HEX) string"""
    -    if bytes_value:
    -        if str is not bytes:  # Python 3
    -            if isinstance(bytes_value, str):
    -                bytes_value = bytearray(bytes_value, encoding='utf-8')
    -            escaped = '\\'.join([('%02x' % int(b)) for b in bytes_value])
    -        else:  # Python 2
    -            if isinstance(bytes_value, unicode):
    -                bytes_value = bytes_value.encode('utf-8')
    -            escaped = '\\'.join([('%02x' % ord(b)) for b in bytes_value])
    -    else:
    -        escaped = ''
    -
    -    return ('\\' + escaped) if escaped else ''
    -
    -
    -def prepare_for_stream(value):
    -    if str is not bytes:  # Python 3
    -        return value
    -    else:  # Python 2
    -        return value.decode()
    -
    -def json_encode_b64(obj):
    -    try:
    -        return dict(encoding='base64', encoded=b64encode(obj))
    -    except Exception as e:
    -        raise LDAPDefinitionError('unable to encode ' + str(obj) + ' - ' + str(e))
    -
    -
    -# noinspection PyProtectedMember
    -def check_json_dict(json_dict):
    -    # needed for python 2
    -
    -    for k, v in json_dict.items():
    -        if isinstance(v, dict):
    -            check_json_dict(v)
    -        elif isinstance(v, CaseInsensitiveDict):
    -            check_json_dict(v._store)
    -        elif isinstance(v, SEQUENCE_TYPES):
    -            for i, e in enumerate(v):
    -                if isinstance(e, dict):
    -                    check_json_dict(e)
    -                elif isinstance(e, CaseInsensitiveDict):
    -                    check_json_dict(e._store)
    -                else:
    -                    v[i] = format_json(e)
    -        else:
    -            json_dict[k] = format_json(v)
    -
    -
    -def json_hook(obj):
    -    if hasattr(obj, 'keys') and len(list(obj.keys())) == 2 and 'encoding' in obj.keys() and 'encoded' in obj.keys():
    -        return b64decode(obj['encoded'])
    -
    -    return obj
    -
    -
    -# noinspection PyProtectedMember
    -def format_json(obj):
    -    if isinstance(obj, CaseInsensitiveDict):
    -        return obj._store
    -
    -    if isinstance(obj, datetime.datetime):
    -        return str(obj)
    -
    -    if isinstance(obj, int):
    -        return obj
    -
    -    if str is bytes:  # Python 2
    -        if isinstance(obj, long):  # long exists only in python2
    -            return obj
    -
    -    try:
    -        if str is not bytes:  # Python 3
    -            if isinstance(obj, bytes):
    -                # return check_escape(str(obj, 'utf-8', errors='strict'))
    -                return str(obj, 'utf-8', errors='strict')
    -            raise LDAPDefinitionError('unable to serialize ' + str(obj))
    -        else:  # Python 2
    -            if isinstance(obj, unicode):
    -                return obj
    -            else:
    -                # return unicode(check_escape(obj))
    -                return unicode(obj)
    -    except (TypeError, UnicodeDecodeError):
    -        pass
    -
    -    try:
    -        return json_encode_b64(bytes(obj))
    -    except Exception:
    -        pass
    -
    -    raise LDAPDefinitionError('unable to serialize ' + str(obj))
    -
    -
    -def is_filter_escaped(text):
    -    if not type(text) == ((str is not bytes) and str or unicode):  # requires str for Python 3 and unicode for Python 2
    -        raise ValueError('unicode input expected')
    -
    -    return all(c not in text for c in '()*\0') and not re.search('\\\\([^0-9a-fA-F]|(.[^0-9a-fA-F]))', text)
    -
    -
    -# def ldap_escape_to_bytes(text):
    -#     bytesequence = bytearray()
    -#     if text.startswith('\\'):
    -#         byte_values = text.split('\\')
    -#         for value in byte_values[1:]:
    -#             if len(value) != 2 and not value.isdigit():
    -#                 raise LDAPDefinitionError('badly formatted LDAP byte escaped sequence')
    -#             bytesequence.append(int(value, 16))
    -#         return bytes(bytesequence)
    -#     raise LDAPDefinitionError('badly formatted LDAP byte escaped sequence')
    -
    -
    -def ldap_escape_to_bytes(text):
    -    bytesequence = bytearray()
    -    i = 0
    -    try:
    -        if isinstance(text, STRING_TYPES):
    -            while i < len(text):
    -                if text[i] == '\\':
    -                    if len(text) > i + 2:
    -                        try:
    -                            bytesequence.append(int(text[i+1:i+3], 16))
    -                            i += 3
    -                            continue
    -                        except ValueError:
    -                            pass
    -                    bytesequence.append(92)  # "\" ASCII code
    -                else:
    -                    raw = to_raw(text[i])
    -                    for c in raw:
    -                        bytesequence.append(c)
    -                i += 1
    -        elif isinstance(text, (bytes, bytearray)):
    -            while i < len(text):
    -                if text[i] == 92:  # "\" ASCII code
    -                    if len(text) > i + 2:
    -                        try:
    -                            bytesequence.append(int(text[i + 1:i + 3], 16))
    -                            i += 3
    -                            continue
    -                        except ValueError:
    -                            pass
    -                    bytesequence.append(92)  # "\" ASCII code
    -                else:
    -                    bytesequence.append(text[i])
    -                i += 1
    -    except Exception:
    -        raise LDAPDefinitionError('badly formatted LDAP byte escaped sequence')
    -
    -    return bytes(bytesequence)
    +"""
    +"""
    +
    +# Created on 2014.04.26
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2014 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +from base64 import b64encode, b64decode
    +import datetime
    +import re
    +
    +from .. import SEQUENCE_TYPES, STRING_TYPES, NUMERIC_TYPES, get_config_parameter
    +from ..utils.ciDict import CaseInsensitiveDict
    +from ..core.exceptions import LDAPDefinitionError
    +
    +
    +def to_unicode(obj, encoding=None, from_server=False):
    +    """Try to convert bytes (and str in python2) to unicode.
    +     Return object unmodified if python3 string, else raise an exception
    +    """
    +    conf_default_client_encoding = get_config_parameter('DEFAULT_CLIENT_ENCODING')
    +    conf_default_server_encoding = get_config_parameter('DEFAULT_SERVER_ENCODING')
    +    conf_additional_server_encodings = get_config_parameter('ADDITIONAL_SERVER_ENCODINGS')
    +    conf_additional_client_encodings = get_config_parameter('ADDITIONAL_CLIENT_ENCODINGS')
    +    if isinstance(obj, NUMERIC_TYPES):
    +        obj = str(obj)
    +
    +    if isinstance(obj, (bytes, bytearray)):
    +        if from_server:  # data from server
    +            if encoding is None:
    +                encoding = conf_default_server_encoding
    +            try:
    +                return obj.decode(encoding)
    +            except UnicodeDecodeError:
    +                for encoding in conf_additional_server_encodings:  # AD could have DN not encoded in utf-8 (even if this is not allowed by RFC4510)
    +                    try:
    +                        return obj.decode(encoding)
    +                    except UnicodeDecodeError:
    +                        pass
    +                raise UnicodeError("Unable to convert server data to unicode: %r" % obj)
    +        else:  # data from client
    +            if encoding is None:
    +                encoding = conf_default_client_encoding
    +            try:
    +                return obj.decode(encoding)
    +            except UnicodeDecodeError:
    +                for encoding in conf_additional_client_encodings:  # tries additional encodings
    +                    try:
    +                        return obj.decode(encoding)
    +                    except UnicodeDecodeError:
    +                        pass
    +                raise UnicodeError("Unable to convert client data to unicode: %r" % obj)
    +
    +    if isinstance(obj, STRING_TYPES):  # python3 strings, python 2 unicode
    +        return obj
    +
    +    raise UnicodeError("Unable to convert type %s to unicode: %r" % (obj.__class__.__name__, obj))
    +
    +
    +def to_raw(obj, encoding='utf-8'):
    +    """Tries to convert to raw bytes from unicode"""
    +    if isinstance(obj, NUMERIC_TYPES):
    +        obj = str(obj)
    +
    +    if not (isinstance(obj, bytes)):
    +        if isinstance(obj, SEQUENCE_TYPES):
    +            return [to_raw(element) for element in obj]
    +        elif isinstance(obj, STRING_TYPES):
    +            return obj.encode(encoding)
    +    return obj
    +
    +
    +def escape_filter_chars(text, encoding=None):
    +    """ Escape chars mentioned in RFC4515. """
    +    if encoding is None:
    +        encoding = get_config_parameter('DEFAULT_ENCODING')
    +
    +    try:
    +        text = to_unicode(text, encoding)
    +        escaped = text.replace('\\', '\\5c')
    +        escaped = escaped.replace('*', '\\2a')
    +        escaped = escaped.replace('(', '\\28')
    +        escaped = escaped.replace(')', '\\29')
    +        escaped = escaped.replace('\x00', '\\00')
    +    except Exception:  # probably raw bytes values, return escaped bytes value
    +        escaped = to_unicode(escape_bytes(text))
    +        # escape all octets greater than 0x7F that are not part of a valid UTF-8
    +        # escaped = ''.join(c if c <= ord(b'\x7f') else escape_bytes(to_raw(to_unicode(c, encoding))) for c in escaped)
    +    return escaped
    +
    +
    +def unescape_filter_chars(text, encoding=None):
    +    """ unescape chars mentioned in RFC4515. """
    +    if encoding is None:
    +        encoding = get_config_parameter('DEFAULT_ENCODING')
    +
    +    unescaped = to_raw(text, encoding)
    +    unescaped = unescaped.replace(b'\\5c', b'\\')
    +    unescaped = unescaped.replace(b'\\5C', b'\\')
    +    unescaped = unescaped.replace(b'\\2a', b'*')
    +    unescaped = unescaped.replace(b'\\2A', b'*')
    +    unescaped = unescaped.replace(b'\\28', b'(')
    +    unescaped = unescaped.replace(b'\\29', b')')
    +    unescaped = unescaped.replace(b'\\00', b'\x00')
    +    return unescaped
    +
    +
    +def escape_bytes(bytes_value):
    +    """ Convert a byte sequence to a properly escaped for LDAP (format BACKSLASH HEX HEX) string"""
    +    if bytes_value:
    +        if str is not bytes:  # Python 3
    +            if isinstance(bytes_value, str):
    +                bytes_value = bytearray(bytes_value, encoding='utf-8')
    +            escaped = '\\'.join([('%02x' % int(b)) for b in bytes_value])
    +        else:  # Python 2
    +            if isinstance(bytes_value, unicode):
    +                bytes_value = bytes_value.encode('utf-8')
    +            escaped = '\\'.join([('%02x' % ord(b)) for b in bytes_value])
    +    else:
    +        escaped = ''
    +
    +    return ('\\' + escaped) if escaped else ''
    +
    +
    +def prepare_for_stream(value):
    +    if str is not bytes:  # Python 3
    +        return value
    +    else:  # Python 2
    +        return value.decode()
    +
    +
    +def json_encode_b64(obj):
    +    try:
    +        return dict(encoding='base64', encoded=b64encode(obj))
    +    except Exception as e:
    +        raise LDAPDefinitionError('unable to encode ' + str(obj) + ' - ' + str(e))
    +
    +
    +# noinspection PyProtectedMember
    +def check_json_dict(json_dict):
    +    # needed for python 2
    +
    +    for k, v in json_dict.items():
    +        if isinstance(v, dict):
    +            check_json_dict(v)
    +        elif isinstance(v, CaseInsensitiveDict):
    +            check_json_dict(v._store)
    +        elif isinstance(v, SEQUENCE_TYPES):
    +            for i, e in enumerate(v):
    +                if isinstance(e, dict):
    +                    check_json_dict(e)
    +                elif isinstance(e, CaseInsensitiveDict):
    +                    check_json_dict(e._store)
    +                else:
    +                    v[i] = format_json(e)
    +        else:
    +            json_dict[k] = format_json(v)
    +
    +
    +def json_hook(obj):
    +    if hasattr(obj, 'keys') and len(list(obj.keys())) == 2 and 'encoding' in obj.keys() and 'encoded' in obj.keys():
    +        return b64decode(obj['encoded'])
    +
    +    return obj
    +
    +
    +# noinspection PyProtectedMember
    +def format_json(obj):
    +    if isinstance(obj, CaseInsensitiveDict):
    +        return obj._store
    +
    +    if isinstance(obj, datetime.datetime):
    +        return str(obj)
    +
    +    if isinstance(obj, int):
    +        return obj
    +
    +    if isinstance(obj, datetime.timedelta):
    +        return str(obj)
    +
    +    if str is bytes:  # Python 2
    +        if isinstance(obj, long):  # long exists only in python2
    +            return obj
    +
    +    try:
    +        if str is not bytes:  # Python 3
    +            if isinstance(obj, bytes):
    +                # return check_escape(str(obj, 'utf-8', errors='strict'))
    +                return str(obj, 'utf-8', errors='strict')
    +            raise LDAPDefinitionError('unable to serialize ' + str(obj))
    +        else:  # Python 2
    +            if isinstance(obj, unicode):
    +                return obj
    +            else:
    +                # return unicode(check_escape(obj))
    +                return unicode(obj)
    +    except (TypeError, UnicodeDecodeError):
    +        pass
    +
    +    try:
    +        return json_encode_b64(bytes(obj))
    +    except Exception:
    +        pass
    +
    +    raise LDAPDefinitionError('unable to serialize ' + str(obj))
    +
    +
    +def is_filter_escaped(text):
    +    if not type(text) == ((str is not bytes) and str or unicode):  # requires str for Python 3 and unicode for Python 2
    +        raise ValueError('unicode input expected')
    +
    +    return all(c not in text for c in '()*\0') and not re.search('\\\\([^0-9a-fA-F]|(.[^0-9a-fA-F]))', text)
    +
    +
    +def ldap_escape_to_bytes(text):
    +    bytesequence = bytearray()
    +    i = 0
    +    try:
    +        if isinstance(text, STRING_TYPES):
    +            while i < len(text):
    +                if text[i] == '\\':
    +                    if len(text) > i + 2:
    +                        try:
    +                            bytesequence.append(int(text[i+1:i+3], 16))
    +                            i += 3
    +                            continue
    +                        except ValueError:
    +                            pass
    +                    bytesequence.append(92)  # "\" ASCII code
    +                else:
    +                    raw = to_raw(text[i])
    +                    for c in raw:
    +                        bytesequence.append(c)
    +                i += 1
    +        elif isinstance(text, (bytes, bytearray)):
    +            while i < len(text):
    +                if text[i] == 92:  # "\" ASCII code
    +                    if len(text) > i + 2:
    +                        try:
    +                            bytesequence.append(int(text[i + 1:i + 3], 16))
    +                            i += 3
    +                            continue
    +                        except ValueError:
    +                            pass
    +                    bytesequence.append(92)  # "\" ASCII code
    +                else:
    +                    bytesequence.append(text[i])
    +                i += 1
    +    except Exception:
    +        raise LDAPDefinitionError('badly formatted LDAP byte escaped sequence')
    +
    +    return bytes(bytesequence)
    diff --git a/server/www/packages/packages-windows/x86/ldap3/utils/dn.py b/server/www/packages/packages-windows/x86/ldap3/utils/dn.py
    index d1a50a9..c2a1e66 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/utils/dn.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/utils/dn.py
    @@ -1,375 +1,405 @@
    -"""
    -"""
    -
    -# Created on 2014.09.08
    -#
    -# Author: Giovanni Cannata
    -#
    -# Copyright 2014 - 2018 Giovanni Cannata
    -#
    -# This file is part of ldap3.
    -#
    -# ldap3 is free software: you can redistribute it and/or modify
    -# it under the terms of the GNU Lesser General Public License as published
    -# by the Free Software Foundation, either version 3 of the License, or
    -# (at your option) any later version.
    -#
    -# ldap3 is distributed in the hope that it will be useful,
    -# but WITHOUT ANY WARRANTY; without even the implied warranty of
    -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -# GNU Lesser General Public License for more details.
    -#
    -# You should have received a copy of the GNU Lesser General Public License
    -# along with ldap3 in the COPYING and COPYING.LESSER files.
    -# If not, see .
    -
    -from string import hexdigits, ascii_letters, digits
    -
    -from .. import SEQUENCE_TYPES
    -from ..core.exceptions import LDAPInvalidDnError
    -
    -
    -STATE_ANY = 0
    -STATE_ESCAPE = 1
    -STATE_ESCAPE_HEX = 2
    -
    -
    -def _add_ava(ava, decompose, remove_space, space_around_equal):
    -    if not ava:
    -        return ''
    -
    -    space = ' ' if space_around_equal else ''
    -    attr_name, _, value = ava.partition('=')
    -    if decompose:
    -        if remove_space:
    -            component = (attr_name.strip(), value.strip())
    -        else:
    -            component = (attr_name, value)
    -    else:
    -        if remove_space:
    -            component = attr_name.strip() + space + '=' + space + value.strip()
    -        else:
    -            component = attr_name + space + '=' + space + value
    -
    -    return component
    -
    -
    -def to_dn(iterator, decompose=False, remove_space=False, space_around_equal=False, separate_rdn=False):
    -    """
    -    Convert an iterator to a list of dn parts
    -    if decompose=True return a list of tuple (one for each dn component) else return a list of strings
    -    if remove_space=True removes unneeded spaces
    -    if space_around_equal=True add spaces around equal in returned strings
    -    if separate_rdn=True consider multiple RDNs as different component of DN
    -    """
    -    dn = []
    -    component = ''
    -    escape_sequence = False
    -    for c in iterator:
    -        if c == '\\':  # escape sequence
    -            escape_sequence = True
    -        elif escape_sequence and c != ' ':
    -            escape_sequence = False
    -        elif c == '+' and separate_rdn:
    -            dn.append(_add_ava(component, decompose, remove_space, space_around_equal))
    -            component = ''
    -            continue
    -        elif c == ',':
    -            if '=' in component:
    -                dn.append(_add_ava(component, decompose, remove_space, space_around_equal))
    -                component = ''
    -                continue
    -
    -        component += c
    -
    -    dn.append(_add_ava(component, decompose, remove_space, space_around_equal))
    -    return dn
    -
    -
    -def _find_first_unescaped(dn, char, pos):
    -    while True:
    -        pos = dn.find(char, pos)
    -        if pos == -1:
    -            break  # no char found
    -        if pos > 0 and dn[pos - 1] != '\\':  # unescaped char
    -            break
    -
    -        pos += 1
    -
    -    return pos
    -
    -
    -def _find_last_unescaped(dn, char, start, stop=0):
    -    while True:
    -        stop = dn.rfind(char, start, stop)
    -        if stop == -1:
    -            break
    -        if stop >= 0 and dn[stop - 1] != '\\':
    -            break
    -
    -        if stop < start:
    -            stop = -1
    -            break
    -
    -    return stop
    -
    -
    -def _get_next_ava(dn):
    -    comma = _find_first_unescaped(dn, ',', 0)
    -    plus = _find_first_unescaped(dn, '+', 0)
    -
    -    if plus > 0 and (plus < comma or comma == -1):
    -        equal = _find_first_unescaped(dn, '=', plus + 1)
    -        if equal > plus + 1:
    -            plus = _find_last_unescaped(dn, '+', plus, equal)
    -            return dn[:plus], '+'
    -
    -    if comma > 0:
    -        equal = _find_first_unescaped(dn, '=', comma + 1)
    -        if equal > comma + 1:
    -            comma = _find_last_unescaped(dn, ',', comma, equal)
    -            return dn[:comma], ','
    -
    -    return dn, ''
    -
    -
    -def _split_ava(ava, escape=False, strip=True):
    -    equal = ava.find('=')
    -    while equal > 0:  # not first character
    -        if ava[equal - 1] != '\\':  # not an escaped equal so it must be an ava separator
    -            # attribute_type1 = ava[0:equal].strip() if strip else ava[0:equal]
    -            if strip:
    -                attribute_type = ava[0:equal].strip()
    -                attribute_value = _escape_attribute_value(ava[equal + 1:].strip()) if escape else ava[equal + 1:].strip()
    -            else:
    -                attribute_type = ava[0:equal]
    -                attribute_value = _escape_attribute_value(ava[equal + 1:]) if escape else ava[equal + 1:]
    -
    -            return attribute_type, attribute_value
    -        equal = ava.find('=', equal + 1)
    -
    -    return '', (ava.strip if strip else ava)  # if no equal found return only value
    -
    -
    -def _validate_attribute_type(attribute_type):
    -    if not attribute_type:
    -        raise LDAPInvalidDnError('attribute type not present')
    -
    -    if attribute_type == ' pairs')
    -    if attribute_value[0] == ' ':  # space cannot be used as first or last character
    -        raise LDAPInvalidDnError('SPACE not allowed as first character of attribute value')
    -    if attribute_value[-1] == ' ':
    -        raise LDAPInvalidDnError('SPACE not allowed as last character of attribute value')
    -
    -    state = STATE_ANY
    -    for c in attribute_value:
    -        if state == STATE_ANY:
    -            if c == '\\':
    -                state = STATE_ESCAPE
    -            elif c in '"#+,;<=>\00':
    -                raise LDAPInvalidDnError('special characters ' + c + ' must be escaped')
    -        elif state == STATE_ESCAPE:
    -            if c in hexdigits:
    -                state = STATE_ESCAPE_HEX
    -            elif c in ' "#+,;<=>\\\00':
    -                state = STATE_ANY
    -            else:
    -                raise LDAPInvalidDnError('invalid escaped character ' + c)
    -        elif state == STATE_ESCAPE_HEX:
    -            if c in hexdigits:
    -                state = STATE_ANY
    -            else:
    -                raise LDAPInvalidDnError('invalid escaped character ' + c)
    -
    -    # final state
    -    if state != STATE_ANY:
    -        raise LDAPInvalidDnError('invalid final character')
    -
    -    return True
    -
    -
    -def _escape_attribute_value(attribute_value):
    -    if not attribute_value:
    -        return ''
    -
    -    if attribute_value[0] == '#':  # with leading SHARP only pairs of hex characters are valid
    -        valid_hex = True
    -        if len(attribute_value) % 2 == 0:  # string must be # + HEX HEX (an odd number of chars)
    -            valid_hex = False
    -
    -        if valid_hex:
    -            for c in attribute_value:
    -                if c not in hexdigits:  # allowed only hex digits as per RFC 4514
    -                    valid_hex = False
    -                    break
    -
    -        if valid_hex:
    -            return attribute_value
    -
    -    state = STATE_ANY
    -    escaped = ''
    -    tmp_buffer = ''
    -    for c in attribute_value:
    -        if state == STATE_ANY:
    -            if c == '\\':
    -                state = STATE_ESCAPE
    -            elif c in '"#+,;<=>\00':
    -                escaped += '\\' + c
    -            else:
    -                escaped += c
    -        elif state == STATE_ESCAPE:
    -            if c in hexdigits:
    -                tmp_buffer = c
    -                state = STATE_ESCAPE_HEX
    -            elif c in ' "#+,;<=>\\\00':
    -                escaped += '\\' + c
    -                state = STATE_ANY
    -            else:
    -                escaped += '\\\\' + c
    -        elif state == STATE_ESCAPE_HEX:
    -            if c in hexdigits:
    -                escaped += '\\' + tmp_buffer + c
    -            else:
    -                escaped += '\\\\' + tmp_buffer + c
    -            tmp_buffer = ''
    -            state = STATE_ANY
    -
    -    # final state
    -    if state == STATE_ESCAPE:
    -        escaped += '\\\\'
    -    elif state == STATE_ESCAPE_HEX:
    -        escaped += '\\\\' + tmp_buffer
    -
    -    if escaped[0] == ' ':  # leading SPACE must be escaped
    -        escaped = '\\' + escaped
    -
    -    if escaped[-1] == ' ' and len(escaped) > 1 and escaped[-2] != '\\':  # trailing SPACE must be escaped
    -        escaped = escaped[:-1] + '\\ '
    -
    -    return escaped
    -
    -
    -def parse_dn(dn, escape=False, strip=True):
    -    rdns = []
    -    avas = []
    -    while dn:
    -        ava, separator = _get_next_ava(dn)  # if returned ava doesn't containg any unescaped equal it'a appended to last ava in avas
    -
    -        dn = dn[len(ava) + 1:]
    -        if _find_first_unescaped(ava, '=', 0) > 0 or len(avas) == 0:
    -            avas.append((ava, separator))
    -        else:
    -            avas[len(avas) - 1] = (avas[len(avas) - 1][0] + avas[len(avas) - 1][1] + ava, separator)
    -
    -    for ava, separator in avas:
    -        attribute_type, attribute_value = _split_ava(ava, escape, strip)
    -
    -        if not _validate_attribute_type(attribute_type):
    -            raise LDAPInvalidDnError('unable to validate attribute type in ' + ava)
    -
    -        if not _validate_attribute_value(attribute_value):
    -            raise LDAPInvalidDnError('unable to validate attribute value in ' + ava)
    -
    -        rdns.append((attribute_type, attribute_value, separator))
    -        dn = dn[len(ava) + 1:]
    -
    -    if not rdns:
    -        raise LDAPInvalidDnError('empty dn')
    -
    -    return rdns
    -
    -
    -def safe_dn(dn, decompose=False, reverse=False):
    -    """
    -    normalize and escape a dn, if dn is a sequence it is joined.
    -    the reverse parameter changes the join direction of the sequence
    -    """
    -    if isinstance(dn, SEQUENCE_TYPES):
    -        components = [rdn for rdn in dn]
    -        if reverse:
    -            dn = ','.join(reversed(components))
    -        else:
    -            dn = ','.join(components)
    -    if decompose:
    -        escaped_dn = []
    -    else:
    -        escaped_dn = ''
    -
    -    if dn.startswith(''):  # Active Directory allows looking up objects by putting its GUID in a specially-formatted DN (e.g. '')
    -        escaped_dn = dn
    -    elif '@' not in dn and '\\' not in dn:  # active directory UPN (User Principal Name) consist of an account, the at sign (@) and a domain, or the domain level logn name domain\username
    -        for component in parse_dn(dn, escape=True):
    -            if decompose:
    -                escaped_dn.append((component[0], component[1], component[2]))
    -            else:
    -                escaped_dn += component[0] + '=' + component[1] + component[2]
    -    elif '@' in dn and '=' not in dn and len(dn.split('@')) != 2:
    -        raise LDAPInvalidDnError('Active Directory User Principal Name must consist of name@domain')
    -    elif '\\' in dn and '=' not in dn and len(dn.split('\\')) != 2:
    -        raise LDAPInvalidDnError('Active Directory Domain Level Logon Name must consist of name\\domain')
    -    else:
    -        escaped_dn = dn
    -
    -    return escaped_dn
    -
    -
    -def safe_rdn(dn, decompose=False):
    -    """Returns a list of rdn for the dn, usually there is only one rdn, but it can be more than one when the + sign is used"""
    -    escaped_rdn = []
    -    one_more = True
    -    for component in parse_dn(dn, escape=True):
    -        if component[2] == '+' or one_more:
    -            if decompose:
    -                escaped_rdn.append((component[0], component[1]))
    -            else:
    -                escaped_rdn.append(component[0] + '=' + component[1])
    -            if component[2] == '+':
    -                one_more = True
    -            else:
    -                one_more = False
    -                break
    -
    -    if one_more:
    -        raise LDAPInvalidDnError('bad dn ' + str(dn))
    -
    -    return escaped_rdn
    -
    -
    -def escape_rdn(rdn):
    -    """
    -    Escape rdn characters to prevent injection according to RFC 4514.
    -    """
    -
    -    # '/' must be handled first or the escape slashes will be escaped!
    -    for char in ['\\', ',', '+', '"', '<', '>', ';', '=', '\x00']:
    -        rdn = rdn.replace(char, '\\' + char)
    -
    -    if rdn[0] == '#' or rdn[0] == ' ':
    -        rdn = ''.join(('\\', rdn))
    -
    -    if rdn[-1] == ' ':
    -        rdn = ''.join((rdn[:-1], '\\ '))
    -
    -    return rdn
    +"""
    +"""
    +
    +# Created on 2014.09.08
    +#
    +# Author: Giovanni Cannata
    +#
    +# Copyright 2014 - 2020 Giovanni Cannata
    +#
    +# This file is part of ldap3.
    +#
    +# ldap3 is free software: you can redistribute it and/or modify
    +# it under the terms of the GNU Lesser General Public License as published
    +# by the Free Software Foundation, either version 3 of the License, or
    +# (at your option) any later version.
    +#
    +# ldap3 is distributed in the hope that it will be useful,
    +# but WITHOUT ANY WARRANTY; without even the implied warranty of
    +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    +# GNU Lesser General Public License for more details.
    +#
    +# You should have received a copy of the GNU Lesser General Public License
    +# along with ldap3 in the COPYING and COPYING.LESSER files.
    +# If not, see .
    +
    +from string import hexdigits, ascii_letters, digits
    +
    +from .. import SEQUENCE_TYPES
    +from ..core.exceptions import LDAPInvalidDnError
    +
    +
    +STATE_ANY = 0
    +STATE_ESCAPE = 1
    +STATE_ESCAPE_HEX = 2
    +
    +
    +def _add_ava(ava, decompose, remove_space, space_around_equal):
    +    if not ava:
    +        return ''
    +
    +    space = ' ' if space_around_equal else ''
    +    attr_name, _, value = ava.partition('=')
    +    if decompose:
    +        if remove_space:
    +            component = (attr_name.strip(), value.strip())
    +        else:
    +            component = (attr_name, value)
    +    else:
    +        if remove_space:
    +            component = attr_name.strip() + space + '=' + space + value.strip()
    +        else:
    +            component = attr_name + space + '=' + space + value
    +
    +    return component
    +
    +
    +def to_dn(iterator, decompose=False, remove_space=False, space_around_equal=False, separate_rdn=False):
    +    """
    +    Convert an iterator to a list of dn parts
    +    if decompose=True return a list of tuple (one for each dn component) else return a list of strings
    +    if remove_space=True removes unneeded spaces
    +    if space_around_equal=True add spaces around equal in returned strings
    +    if separate_rdn=True consider multiple RDNs as different component of DN
    +    """
    +    dn = []
    +    component = ''
    +    escape_sequence = False
    +    for c in iterator:
    +        if c == '\\':  # escape sequence
    +            escape_sequence = True
    +        elif escape_sequence and c != ' ':
    +            escape_sequence = False
    +        elif c == '+' and separate_rdn:
    +            dn.append(_add_ava(component, decompose, remove_space, space_around_equal))
    +            component = ''
    +            continue
    +        elif c == ',':
    +            if '=' in component:
    +                dn.append(_add_ava(component, decompose, remove_space, space_around_equal))
    +                component = ''
    +                continue
    +
    +        component += c
    +
    +    dn.append(_add_ava(component, decompose, remove_space, space_around_equal))
    +    return dn
    +
    +
    +def _find_first_unescaped(dn, char, pos):
    +    while True:
    +        pos = dn.find(char, pos)
    +        if pos == -1:
    +            break  # no char found
    +        if pos > 0 and dn[pos - 1] != '\\':  # unescaped char
    +            break
    +        elif pos > 1 and dn[pos - 1] == '\\':  # may be unescaped
    +            escaped = True
    +            for c in dn[pos - 2:0:-1]:
    +                if c == '\\':
    +                    escaped = not escaped
    +                else:
    +                    break
    +            if not escaped:
    +                break
    +        pos += 1
    +
    +    return pos
    +
    +
    +def _find_last_unescaped(dn, char, start, stop=0):
    +    while True:
    +        stop = dn.rfind(char, start, stop)
    +        if stop == -1:
    +            break
    +        if stop >= 0 and dn[stop - 1] != '\\':
    +            break
    +        elif stop > 1 and dn[stop - 1] == '\\':  # may be unescaped
    +            escaped = True
    +            for c in dn[stop - 2:0:-1]:
    +                if c == '\\':
    +                    escaped = not escaped
    +                else:
    +                    break
    +            if not escaped:
    +                break
    +        if stop < start:
    +            stop = -1
    +            break
    +
    +    return stop
    +
    +
    +def _get_next_ava(dn):
    +    comma = _find_first_unescaped(dn, ',', 0)
    +    plus = _find_first_unescaped(dn, '+', 0)
    +
    +    if plus > 0 and (plus < comma or comma == -1):
    +        equal = _find_first_unescaped(dn, '=', plus + 1)
    +        if equal > plus + 1:
    +            plus = _find_last_unescaped(dn, '+', plus, equal)
    +            return dn[:plus], '+'
    +
    +    if comma > 0:
    +        equal = _find_first_unescaped(dn, '=', comma + 1)
    +        if equal > comma + 1:
    +            comma = _find_last_unescaped(dn, ',', comma, equal)
    +            return dn[:comma], ','
    +
    +    return dn, ''
    +
    +
    +def _split_ava(ava, escape=False, strip=True):
    +    equal = ava.find('=')
    +    while equal > 0:  # not first character
    +        if ava[equal - 1] != '\\':  # not an escaped equal so it must be an ava separator
    +            # attribute_type1 = ava[0:equal].strip() if strip else ava[0:equal]
    +            if strip:
    +                attribute_type = ava[0:equal].strip()
    +                attribute_value = _escape_attribute_value(ava[equal + 1:].strip()) if escape else ava[equal + 1:].strip()
    +            else:
    +                attribute_type = ava[0:equal]
    +                attribute_value = _escape_attribute_value(ava[equal + 1:]) if escape else ava[equal + 1:]
    +
    +            return attribute_type, attribute_value
    +        equal = ava.find('=', equal + 1)
    +
    +    return '', (ava.strip if strip else ava)  # if no equal found return only value
    +
    +
    +def _validate_attribute_type(attribute_type):
    +    if not attribute_type:
    +        raise LDAPInvalidDnError('attribute type not present')
    +
    +    if attribute_type == ' pairs')
    +    if attribute_value[0] == ' ':  # unescaped space cannot be used as leading or last character
    +        raise LDAPInvalidDnError('SPACE must be escaped as leading character of attribute value')
    +    if attribute_value.endswith(' ') and not attribute_value.endswith('\\ '):
    +        raise LDAPInvalidDnError('SPACE must be escaped as trailing character of attribute value')
    +
    +    state = STATE_ANY
    +    for c in attribute_value:
    +        if state == STATE_ANY:
    +            if c == '\\':
    +                state = STATE_ESCAPE
    +            elif c in '"#+,;<=>\00':
    +                raise LDAPInvalidDnError('special character ' + c + ' must be escaped')
    +        elif state == STATE_ESCAPE:
    +            if c in hexdigits:
    +                state = STATE_ESCAPE_HEX
    +            elif c in ' "#+,;<=>\\\00':
    +                state = STATE_ANY
    +            else:
    +                raise LDAPInvalidDnError('invalid escaped character ' + c)
    +        elif state == STATE_ESCAPE_HEX:
    +            if c in hexdigits:
    +                state = STATE_ANY
    +            else:
    +                raise LDAPInvalidDnError('invalid escaped character ' + c)
    +
    +    # final state
    +    if state != STATE_ANY:
    +        raise LDAPInvalidDnError('invalid final character')
    +
    +    return True
    +
    +
    +def _escape_attribute_value(attribute_value):
    +    if not attribute_value:
    +        return ''
    +
    +    if attribute_value[0] == '#':  # with leading SHARP only pairs of hex characters are valid
    +        valid_hex = True
    +        if len(attribute_value) % 2 == 0:  # string must be # + HEX HEX (an odd number of chars)
    +            valid_hex = False
    +
    +        if valid_hex:
    +            for c in attribute_value:
    +                if c not in hexdigits:  # allowed only hex digits as per RFC 4514
    +                    valid_hex = False
    +                    break
    +
    +        if valid_hex:
    +            return attribute_value
    +
    +    state = STATE_ANY
    +    escaped = ''
    +    tmp_buffer = ''
    +    for c in attribute_value:
    +        if state == STATE_ANY:
    +            if c == '\\':
    +                state = STATE_ESCAPE
    +            elif c in '"#+,;<=>\00':
    +                escaped += '\\' + c
    +            else:
    +                escaped += c
    +        elif state == STATE_ESCAPE:
    +            if c in hexdigits:
    +                tmp_buffer = c
    +                state = STATE_ESCAPE_HEX
    +            elif c in ' "#+,;<=>\\\00':
    +                escaped += '\\' + c
    +                state = STATE_ANY
    +            else:
    +                escaped += '\\\\' + c
    +        elif state == STATE_ESCAPE_HEX:
    +            if c in hexdigits:
    +                escaped += '\\' + tmp_buffer + c
    +            else:
    +                escaped += '\\\\' + tmp_buffer + c
    +            tmp_buffer = ''
    +            state = STATE_ANY
    +
    +    # final state
    +    if state == STATE_ESCAPE:
    +        escaped += '\\\\'
    +    elif state == STATE_ESCAPE_HEX:
    +        escaped += '\\\\' + tmp_buffer
    +
    +    if escaped[0] == ' ':  # leading SPACE must be escaped
    +        escaped = '\\' + escaped
    +
    +    if escaped[-1] == ' ' and len(escaped) > 1 and escaped[-2] != '\\':  # trailing SPACE must be escaped
    +        escaped = escaped[:-1] + '\\ '
    +
    +    return escaped
    +
    +
    +def parse_dn(dn, escape=False, strip=False):
    +    """
    +    Parses a DN into syntactic components
    +    :param dn:
    +    :param escape:
    +    :param strip:
    +    :return:
    +    a list of tripels representing `attributeTypeAndValue` elements
    +    containing `attributeType`, `attributeValue` and the following separator (`COMMA` or `PLUS`) if given, else an empty `str`.
    +    in their original representation, still containing escapes or encoded as hex.
    +    """
    +    rdns = []
    +    avas = []
    +    while dn:
    +        ava, separator = _get_next_ava(dn)  # if returned ava doesn't containg any unescaped equal it'a appended to last ava in avas
    +
    +        dn = dn[len(ava) + 1:]
    +        if _find_first_unescaped(ava, '=', 0) > 0 or len(avas) == 0:
    +            avas.append((ava, separator))
    +        else:
    +            avas[len(avas) - 1] = (avas[len(avas) - 1][0] + avas[len(avas) - 1][1] + ava, separator)
    +
    +    for ava, separator in avas:
    +        attribute_type, attribute_value = _split_ava(ava, escape, strip)
    +
    +        if not _validate_attribute_type(attribute_type):
    +            raise LDAPInvalidDnError('unable to validate attribute type in ' + ava)
    +
    +        if not _validate_attribute_value(attribute_value):
    +            raise LDAPInvalidDnError('unable to validate attribute value in ' + ava)
    +
    +        rdns.append((attribute_type, attribute_value, separator))
    +        dn = dn[len(ava) + 1:]
    +
    +    if not rdns:
    +        raise LDAPInvalidDnError('empty dn')
    +
    +    return rdns
    +
    +
    +def safe_dn(dn, decompose=False, reverse=False):
    +    """
    +    normalize and escape a dn, if dn is a sequence it is joined.
    +    the reverse parameter changes the join direction of the sequence
    +    """
    +    if isinstance(dn, SEQUENCE_TYPES):
    +        components = [rdn for rdn in dn]
    +        if reverse:
    +            dn = ','.join(reversed(components))
    +        else:
    +            dn = ','.join(components)
    +    if decompose:
    +        escaped_dn = []
    +    else:
    +        escaped_dn = ''
    +
    +    if dn.startswith(''):  # Active Directory allows looking up objects by putting its GUID in a specially-formatted DN (e.g. '')
    +        escaped_dn = dn
    +    elif dn.startswith(''):  # Active Directory allows Binding to Well-Known Objects Using WKGUID in a specially-formatted DN (e.g. )
    +        escaped_dn = dn
    +    elif dn.startswith(''):  # Active Directory allows looking up objects by putting its security identifier (SID) in a specially-formatted DN (e.g. '')
    +        escaped_dn = dn
    +    elif '@' not in dn:  # active directory UPN (User Principal Name) consist of an account, the at sign (@) and a domain, or the domain level logn name domain\username
    +        for component in parse_dn(dn, escape=True):
    +            if decompose:
    +                escaped_dn.append((component[0], component[1], component[2]))
    +            else:
    +                escaped_dn += component[0] + '=' + component[1] + component[2]
    +    elif '@' in dn and '=' not in dn and len(dn.split('@')) != 2:
    +        raise LDAPInvalidDnError('Active Directory User Principal Name must consist of name@domain')
    +    elif '\\' in dn and '=' not in dn and len(dn.split('\\')) != 2:
    +        raise LDAPInvalidDnError('Active Directory Domain Level Logon Name must consist of name\\domain')
    +    else:
    +        escaped_dn = dn
    +
    +    return escaped_dn
    +
    +
    +def safe_rdn(dn, decompose=False):
    +    """Returns a list of rdn for the dn, usually there is only one rdn, but it can be more than one when the + sign is used"""
    +    escaped_rdn = []
    +    one_more = True
    +    for component in parse_dn(dn, escape=True):
    +        if component[2] == '+' or one_more:
    +            if decompose:
    +                escaped_rdn.append((component[0], component[1]))
    +            else:
    +                escaped_rdn.append(component[0] + '=' + component[1])
    +            if component[2] == '+':
    +                one_more = True
    +            else:
    +                one_more = False
    +                break
    +
    +    if one_more:
    +        raise LDAPInvalidDnError('bad dn ' + str(dn))
    +
    +    return escaped_rdn
    +
    +
    +def escape_rdn(rdn):
    +    """
    +    Escape rdn characters to prevent injection according to RFC 4514.
    +    """
    +
    +    # '/' must be handled first or the escape slashes will be escaped!
    +    for char in ['\\', ',', '+', '"', '<', '>', ';', '=', '\x00']:
    +        rdn = rdn.replace(char, '\\' + char)
    +
    +    if rdn[0] == '#' or rdn[0] == ' ':
    +        rdn = ''.join(('\\', rdn))
    +
    +    if rdn[-1] == ' ':
    +        rdn = ''.join((rdn[:-1], '\\ '))
    +
    +    return rdn
    diff --git a/server/www/packages/packages-windows/x86/ldap3/utils/hashed.py b/server/www/packages/packages-windows/x86/ldap3/utils/hashed.py
    index 33a2b89..e58d67d 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/utils/hashed.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/utils/hashed.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2015 - 2018 Giovanni Cannata
    +# Copyright 2015 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/utils/log.py b/server/www/packages/packages-windows/x86/ldap3/utils/log.py
    index d65cc1b..228c745 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/utils/log.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/utils/log.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2015 - 2018 Giovanni Cannata
    +# Copyright 2015 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    diff --git a/server/www/packages/packages-windows/x86/ldap3/utils/ntlm.py b/server/www/packages/packages-windows/x86/ldap3/utils/ntlm.py
    index 54efaae..f91776d 100644
    --- a/server/www/packages/packages-windows/x86/ldap3/utils/ntlm.py
    +++ b/server/www/packages/packages-windows/x86/ldap3/utils/ntlm.py
    @@ -5,7 +5,7 @@
     #
     # Author: Giovanni Cannata
     #
    -# Copyright 2015 - 2018 Giovanni Cannata
    +# Copyright 2015 - 2020 Giovanni Cannata
     #
     # This file is part of ldap3.
     #
    @@ -483,7 +483,7 @@ class NtlmClient(object):
             temp += self.server_target_info_raw
             temp += pack('
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
     
     
    -__version__ = '1.0.7'
    +__version__ = '1.1.3'
    diff --git a/server/www/packages/packages-windows/x86/mako/_ast_util.py b/server/www/packages/packages-windows/x86/mako/_ast_util.py
    index c410287..bdcdbf6 100644
    --- a/server/www/packages/packages-windows/x86/mako/_ast_util.py
    +++ b/server/www/packages/packages-windows/x86/mako/_ast_util.py
    @@ -1,5 +1,5 @@
     # mako/_ast_util.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
    @@ -8,69 +8,77 @@
         ast
         ~~~
     
    -    The `ast` module helps Python applications to process trees of the Python
    -    abstract syntax grammar.  The abstract syntax itself might change with
    -    each Python release; this module helps to find out programmatically what
    -    the current grammar looks like and allows modifications of it.
    -
    -    An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as
    -    a flag to the `compile()` builtin function or by using the `parse()`
    -    function from this module.  The result will be a tree of objects whose
    -    classes all inherit from `ast.AST`.
    -
    -    A modified abstract syntax tree can be compiled into a Python code object
    -    using the built-in `compile()` function.
    -
    -    Additionally various helper functions are provided that make working with
    -    the trees simpler.  The main intention of the helper functions and this
    -    module in general is to provide an easy to use interface for libraries
    -    that work tightly with the python syntax (template engines for example).
    -
    +    This is a stripped down version of Armin Ronacher's ast module.
     
         :copyright: Copyright 2008 by Armin Ronacher.
         :license: Python License.
     """
    -from _ast import *  # noqa
    +
    +
    +from _ast import Add
    +from _ast import And
    +from _ast import AST
    +from _ast import BitAnd
    +from _ast import BitOr
    +from _ast import BitXor
    +from _ast import Div
    +from _ast import Eq
    +from _ast import FloorDiv
    +from _ast import Gt
    +from _ast import GtE
    +from _ast import If
    +from _ast import In
    +from _ast import Invert
    +from _ast import Is
    +from _ast import IsNot
    +from _ast import LShift
    +from _ast import Lt
    +from _ast import LtE
    +from _ast import Mod
    +from _ast import Mult
    +from _ast import Name
    +from _ast import Not
    +from _ast import NotEq
    +from _ast import NotIn
    +from _ast import Or
    +from _ast import PyCF_ONLY_AST
    +from _ast import RShift
    +from _ast import Sub
    +from _ast import UAdd
    +from _ast import USub
    +
     from mako.compat import arg_stringname
     
    -BOOLOP_SYMBOLS = {
    -    And: 'and',
    -    Or: 'or'
    -}
    +BOOLOP_SYMBOLS = {And: "and", Or: "or"}
     
     BINOP_SYMBOLS = {
    -    Add: '+',
    -    Sub: '-',
    -    Mult: '*',
    -    Div: '/',
    -    FloorDiv: '//',
    -    Mod: '%',
    -    LShift: '<<',
    -    RShift: '>>',
    -    BitOr: '|',
    -    BitAnd: '&',
    -    BitXor: '^'
    +    Add: "+",
    +    Sub: "-",
    +    Mult: "*",
    +    Div: "/",
    +    FloorDiv: "//",
    +    Mod: "%",
    +    LShift: "<<",
    +    RShift: ">>",
    +    BitOr: "|",
    +    BitAnd: "&",
    +    BitXor: "^",
     }
     
     CMPOP_SYMBOLS = {
    -    Eq: '==',
    -    Gt: '>',
    -    GtE: '>=',
    -    In: 'in',
    -    Is: 'is',
    -    IsNot: 'is not',
    -    Lt: '<',
    -    LtE: '<=',
    -    NotEq: '!=',
    -    NotIn: 'not in'
    +    Eq: "==",
    +    Gt: ">",
    +    GtE: ">=",
    +    In: "in",
    +    Is: "is",
    +    IsNot: "is not",
    +    Lt: "<",
    +    LtE: "<=",
    +    NotEq: "!=",
    +    NotIn: "not in",
     }
     
    -UNARYOP_SYMBOLS = {
    -    Invert: '~',
    -    Not: 'not',
    -    UAdd: '+',
    -    USub: '-'
    -}
    +UNARYOP_SYMBOLS = {Invert: "~", Not: "not", UAdd: "+", USub: "-"}
     
     ALL_SYMBOLS = {}
     ALL_SYMBOLS.update(BOOLOP_SYMBOLS)
    @@ -79,105 +87,15 @@ ALL_SYMBOLS.update(CMPOP_SYMBOLS)
     ALL_SYMBOLS.update(UNARYOP_SYMBOLS)
     
     
    -def parse(expr, filename='', mode='exec'):
    +def parse(expr, filename="", mode="exec"):
         """Parse an expression into an AST node."""
         return compile(expr, filename, mode, PyCF_ONLY_AST)
     
     
    -def to_source(node, indent_with=' ' * 4):
    -    """
    -    This function can convert a node tree back into python sourcecode.  This
    -    is useful for debugging purposes, especially if you're dealing with custom
    -    asts not generated by python itself.
    -
    -    It could be that the sourcecode is evaluable when the AST itself is not
    -    compilable / evaluable.  The reason for this is that the AST contains some
    -    more data than regular sourcecode does, which is dropped during
    -    conversion.
    -
    -    Each level of indentation is replaced with `indent_with`.  Per default this
    -    parameter is equal to four spaces as suggested by PEP 8, but it might be
    -    adjusted to match the application's styleguide.
    -    """
    -    generator = SourceGenerator(indent_with)
    -    generator.visit(node)
    -    return ''.join(generator.result)
    -
    -
    -def dump(node):
    -    """
    -    A very verbose representation of the node passed.  This is useful for
    -    debugging purposes.
    -    """
    -    def _format(node):
    -        if isinstance(node, AST):
    -            return '%s(%s)' % (node.__class__.__name__,
    -                               ', '.join('%s=%s' % (a, _format(b))
    -                                         for a, b in iter_fields(node)))
    -        elif isinstance(node, list):
    -            return '[%s]' % ', '.join(_format(x) for x in node)
    -        return repr(node)
    -    if not isinstance(node, AST):
    -        raise TypeError('expected AST, got %r' % node.__class__.__name__)
    -    return _format(node)
    -
    -
    -def copy_location(new_node, old_node):
    -    """
    -    Copy the source location hint (`lineno` and `col_offset`) from the
    -    old to the new node if possible and return the new one.
    -    """
    -    for attr in 'lineno', 'col_offset':
    -        if attr in old_node._attributes and attr in new_node._attributes \
    -           and hasattr(old_node, attr):
    -            setattr(new_node, attr, getattr(old_node, attr))
    -    return new_node
    -
    -
    -def fix_missing_locations(node):
    -    """
    -    Some nodes require a line number and the column offset.  Without that
    -    information the compiler will abort the compilation.  Because it can be
    -    a dull task to add appropriate line numbers and column offsets when
    -    adding new nodes this function can help.  It copies the line number and
    -    column offset of the parent node to the child nodes without this
    -    information.
    -
    -    Unlike `copy_location` this works recursive and won't touch nodes that
    -    already have a location information.
    -    """
    -    def _fix(node, lineno, col_offset):
    -        if 'lineno' in node._attributes:
    -            if not hasattr(node, 'lineno'):
    -                node.lineno = lineno
    -            else:
    -                lineno = node.lineno
    -        if 'col_offset' in node._attributes:
    -            if not hasattr(node, 'col_offset'):
    -                node.col_offset = col_offset
    -            else:
    -                col_offset = node.col_offset
    -        for child in iter_child_nodes(node):
    -            _fix(child, lineno, col_offset)
    -    _fix(node, 1, 0)
    -    return node
    -
    -
    -def increment_lineno(node, n=1):
    -    """
    -    Increment the line numbers of all nodes by `n` if they have line number
    -    attributes.  This is useful to "move code" to a different location in a
    -    file.
    -    """
    -    for node in zip((node,), walk(node)):
    -        if 'lineno' in node._attributes:
    -            node.lineno = getattr(node, 'lineno', 0) + n
    -
    -
     def iter_fields(node):
         """Iterate over all fields of a node, only yielding existing fields."""
         # CPython 2.5 compat
    -    if not hasattr(node, '_fields') or not node._fields:
    +    if not hasattr(node, "_fields") or not node._fields:
             return
         for field in node._fields:
             try:
    @@ -186,65 +104,6 @@ def iter_fields(node):
                 pass
     
     
    -def get_fields(node):
    -    """Like `iter_fields` but returns a dict."""
    -    return dict(iter_fields(node))
    -
    -
    -def iter_child_nodes(node):
    -    """Iterate over all child nodes or a node."""
    -    for name, field in iter_fields(node):
    -        if isinstance(field, AST):
    -            yield field
    -        elif isinstance(field, list):
    -            for item in field:
    -                if isinstance(item, AST):
    -                    yield item
    -
    -
    -def get_child_nodes(node):
    -    """Like `iter_child_nodes` but returns a list."""
    -    return list(iter_child_nodes(node))
    -
    -
    -def get_compile_mode(node):
    -    """
    -    Get the mode for `compile` of a given node.  If the node is not a `mod`
    -    node (`Expression`, `Module` etc.) a `TypeError` is thrown.
    -    """
    -    if not isinstance(node, mod):
    -        raise TypeError('expected mod node, got %r' % node.__class__.__name__)
    -    return {
    -        Expression: 'eval',
    -        Interactive: 'single'
    -    }.get(node.__class__, 'expr')
    -
    -
    -def get_docstring(node):
    -    """
    -    Return the docstring for the given node or `None` if no docstring can be
    -    found.  If the node provided does not accept docstrings a `TypeError`
    -    will be raised.
    -    """
    -    if not isinstance(node, (FunctionDef, ClassDef, Module)):
    -        raise TypeError("%r can't have docstrings" % node.__class__.__name__)
    -    if node.body and isinstance(node.body[0], Str):
    -        return node.body[0].s
    -
    -
    -def walk(node):
    -    """
    -    Iterate over all nodes.  This is useful if you only want to modify nodes in
    -    place and don't care about the context or the order the nodes are returned.
    -    """
    -    from collections import deque
    -    todo = deque([node])
    -    while todo:
    -        node = todo.popleft()
    -        todo.extend(iter_child_nodes(node))
    -        yield node
    -
    -
     class NodeVisitor(object):
     
         """
    @@ -269,7 +128,7 @@ class NodeVisitor(object):
             exists for this node.  In that case the generic visit function is
             used instead.
             """
    -        method = 'visit_' + node.__class__.__name__
    +        method = "visit_" + node.__class__.__name__
             return getattr(self, method, None)
     
         def visit(self, node):
    @@ -367,7 +226,7 @@ class SourceGenerator(NodeVisitor):
         def write(self, x):
             if self.new_lines:
                 if self.result:
    -                self.result.append('\n' * self.new_lines)
    +                self.result.append("\n" * self.new_lines)
                 self.result.append(self.indent_with * self.indentation)
                 self.new_lines = 0
             self.result.append(x)
    @@ -386,7 +245,7 @@ class SourceGenerator(NodeVisitor):
             self.body(node.body)
             if node.orelse:
                 self.newline()
    -            self.write('else:')
    +            self.write("else:")
                 self.body(node.orelse)
     
         def signature(self, node):
    @@ -394,7 +253,7 @@ class SourceGenerator(NodeVisitor):
     
             def write_comma():
                 if want_comma:
    -                self.write(', ')
    +                self.write(", ")
                 else:
                     want_comma.append(True)
     
    @@ -403,19 +262,19 @@ class SourceGenerator(NodeVisitor):
                 write_comma()
                 self.visit(arg)
                 if default is not None:
    -                self.write('=')
    +                self.write("=")
                     self.visit(default)
             if node.vararg is not None:
                 write_comma()
    -            self.write('*' + arg_stringname(node.vararg))
    +            self.write("*" + arg_stringname(node.vararg))
             if node.kwarg is not None:
                 write_comma()
    -            self.write('**' + arg_stringname(node.kwarg))
    +            self.write("**" + arg_stringname(node.kwarg))
     
         def decorators(self, node):
             for decorator in node.decorator_list:
                 self.newline()
    -            self.write('@')
    +            self.write("@")
                 self.visit(decorator)
     
         # Statements
    @@ -424,29 +283,29 @@ class SourceGenerator(NodeVisitor):
             self.newline()
             for idx, target in enumerate(node.targets):
                 if idx:
    -                self.write(', ')
    +                self.write(", ")
                 self.visit(target)
    -        self.write(' = ')
    +        self.write(" = ")
             self.visit(node.value)
     
         def visit_AugAssign(self, node):
             self.newline()
             self.visit(node.target)
    -        self.write(BINOP_SYMBOLS[type(node.op)] + '=')
    +        self.write(BINOP_SYMBOLS[type(node.op)] + "=")
             self.visit(node.value)
     
         def visit_ImportFrom(self, node):
             self.newline()
    -        self.write('from %s%s import ' % ('.' * node.level, node.module))
    +        self.write("from %s%s import " % ("." * node.level, node.module))
             for idx, item in enumerate(node.names):
                 if idx:
    -                self.write(', ')
    +                self.write(", ")
                 self.write(item)
     
         def visit_Import(self, node):
             self.newline()
             for item in node.names:
    -            self.write('import ')
    +            self.write("import ")
                 self.visit(item)
     
         def visit_Expr(self, node):
    @@ -457,9 +316,9 @@ class SourceGenerator(NodeVisitor):
             self.newline(n=2)
             self.decorators(node)
             self.newline()
    -        self.write('def %s(' % node.name)
    +        self.write("def %s(" % node.name)
             self.signature(node.args)
    -        self.write('):')
    +        self.write("):")
             self.body(node.body)
     
         def visit_ClassDef(self, node):
    @@ -467,200 +326,200 @@ class SourceGenerator(NodeVisitor):
     
             def paren_or_comma():
                 if have_args:
    -                self.write(', ')
    +                self.write(", ")
                 else:
                     have_args.append(True)
    -                self.write('(')
    +                self.write("(")
     
             self.newline(n=3)
             self.decorators(node)
             self.newline()
    -        self.write('class %s' % node.name)
    +        self.write("class %s" % node.name)
             for base in node.bases:
                 paren_or_comma()
                 self.visit(base)
             # XXX: the if here is used to keep this module compatible
             #      with python 2.6.
    -        if hasattr(node, 'keywords'):
    +        if hasattr(node, "keywords"):
                 for keyword in node.keywords:
                     paren_or_comma()
    -                self.write(keyword.arg + '=')
    +                self.write(keyword.arg + "=")
                     self.visit(keyword.value)
                 if getattr(node, "starargs", None):
                     paren_or_comma()
    -                self.write('*')
    +                self.write("*")
                     self.visit(node.starargs)
                 if getattr(node, "kwargs", None):
                     paren_or_comma()
    -                self.write('**')
    +                self.write("**")
                     self.visit(node.kwargs)
    -        self.write(have_args and '):' or ':')
    +        self.write(have_args and "):" or ":")
             self.body(node.body)
     
         def visit_If(self, node):
             self.newline()
    -        self.write('if ')
    +        self.write("if ")
             self.visit(node.test)
    -        self.write(':')
    +        self.write(":")
             self.body(node.body)
             while True:
                 else_ = node.orelse
                 if len(else_) == 1 and isinstance(else_[0], If):
                     node = else_[0]
                     self.newline()
    -                self.write('elif ')
    +                self.write("elif ")
                     self.visit(node.test)
    -                self.write(':')
    +                self.write(":")
                     self.body(node.body)
                 else:
                     self.newline()
    -                self.write('else:')
    +                self.write("else:")
                     self.body(else_)
                     break
     
         def visit_For(self, node):
             self.newline()
    -        self.write('for ')
    +        self.write("for ")
             self.visit(node.target)
    -        self.write(' in ')
    +        self.write(" in ")
             self.visit(node.iter)
    -        self.write(':')
    +        self.write(":")
             self.body_or_else(node)
     
         def visit_While(self, node):
             self.newline()
    -        self.write('while ')
    +        self.write("while ")
             self.visit(node.test)
    -        self.write(':')
    +        self.write(":")
             self.body_or_else(node)
     
         def visit_With(self, node):
             self.newline()
    -        self.write('with ')
    +        self.write("with ")
             self.visit(node.context_expr)
             if node.optional_vars is not None:
    -            self.write(' as ')
    +            self.write(" as ")
                 self.visit(node.optional_vars)
    -        self.write(':')
    +        self.write(":")
             self.body(node.body)
     
         def visit_Pass(self, node):
             self.newline()
    -        self.write('pass')
    +        self.write("pass")
     
         def visit_Print(self, node):
             # XXX: python 2.6 only
             self.newline()
    -        self.write('print ')
    +        self.write("print ")
             want_comma = False
             if node.dest is not None:
    -            self.write(' >> ')
    +            self.write(" >> ")
                 self.visit(node.dest)
                 want_comma = True
             for value in node.values:
                 if want_comma:
    -                self.write(', ')
    +                self.write(", ")
                 self.visit(value)
                 want_comma = True
             if not node.nl:
    -            self.write(',')
    +            self.write(",")
     
         def visit_Delete(self, node):
             self.newline()
    -        self.write('del ')
    +        self.write("del ")
             for idx, target in enumerate(node):
                 if idx:
    -                self.write(', ')
    +                self.write(", ")
                 self.visit(target)
     
         def visit_TryExcept(self, node):
             self.newline()
    -        self.write('try:')
    +        self.write("try:")
             self.body(node.body)
             for handler in node.handlers:
                 self.visit(handler)
     
         def visit_TryFinally(self, node):
             self.newline()
    -        self.write('try:')
    +        self.write("try:")
             self.body(node.body)
             self.newline()
    -        self.write('finally:')
    +        self.write("finally:")
             self.body(node.finalbody)
     
         def visit_Global(self, node):
             self.newline()
    -        self.write('global ' + ', '.join(node.names))
    +        self.write("global " + ", ".join(node.names))
     
         def visit_Nonlocal(self, node):
             self.newline()
    -        self.write('nonlocal ' + ', '.join(node.names))
    +        self.write("nonlocal " + ", ".join(node.names))
     
         def visit_Return(self, node):
             self.newline()
    -        self.write('return ')
    +        self.write("return ")
             self.visit(node.value)
     
         def visit_Break(self, node):
             self.newline()
    -        self.write('break')
    +        self.write("break")
     
         def visit_Continue(self, node):
             self.newline()
    -        self.write('continue')
    +        self.write("continue")
     
         def visit_Raise(self, node):
             # XXX: Python 2.6 / 3.0 compatibility
             self.newline()
    -        self.write('raise')
    -        if hasattr(node, 'exc') and node.exc is not None:
    -            self.write(' ')
    +        self.write("raise")
    +        if hasattr(node, "exc") and node.exc is not None:
    +            self.write(" ")
                 self.visit(node.exc)
                 if node.cause is not None:
    -                self.write(' from ')
    +                self.write(" from ")
                     self.visit(node.cause)
    -        elif hasattr(node, 'type') and node.type is not None:
    +        elif hasattr(node, "type") and node.type is not None:
                 self.visit(node.type)
                 if node.inst is not None:
    -                self.write(', ')
    +                self.write(", ")
                     self.visit(node.inst)
                 if node.tback is not None:
    -                self.write(', ')
    +                self.write(", ")
                     self.visit(node.tback)
     
         # Expressions
     
         def visit_Attribute(self, node):
             self.visit(node.value)
    -        self.write('.' + node.attr)
    +        self.write("." + node.attr)
     
         def visit_Call(self, node):
             want_comma = []
     
             def write_comma():
                 if want_comma:
    -                self.write(', ')
    +                self.write(", ")
                 else:
                     want_comma.append(True)
     
             self.visit(node.func)
    -        self.write('(')
    +        self.write("(")
             for arg in node.args:
                 write_comma()
                 self.visit(arg)
             for keyword in node.keywords:
                 write_comma()
    -            self.write(keyword.arg + '=')
    +            self.write(keyword.arg + "=")
                 self.visit(keyword.value)
             if getattr(node, "starargs", None):
                 write_comma()
    -            self.write('*')
    +            self.write("*")
                 self.visit(node.starargs)
             if getattr(node, "kwargs", None):
                 write_comma()
    -            self.write('**')
    +            self.write("**")
                 self.visit(node.kwargs)
    -        self.write(')')
    +        self.write(")")
     
         def visit_Name(self, node):
             self.write(node.id)
    @@ -680,106 +539,111 @@ class SourceGenerator(NodeVisitor):
         def visit_Num(self, node):
             self.write(repr(node.n))
     
    +    # newly needed in Python 3.8
    +    def visit_Constant(self, node):
    +        self.write(repr(node.value))
    +
         def visit_Tuple(self, node):
    -        self.write('(')
    +        self.write("(")
             idx = -1
             for idx, item in enumerate(node.elts):
                 if idx:
    -                self.write(', ')
    +                self.write(", ")
                 self.visit(item)
    -        self.write(idx and ')' or ',)')
    +        self.write(idx and ")" or ",)")
     
         def sequence_visit(left, right):
             def visit(self, node):
                 self.write(left)
                 for idx, item in enumerate(node.elts):
                     if idx:
    -                    self.write(', ')
    +                    self.write(", ")
                     self.visit(item)
                 self.write(right)
    +
             return visit
     
    -    visit_List = sequence_visit('[', ']')
    -    visit_Set = sequence_visit('{', '}')
    +    visit_List = sequence_visit("[", "]")
    +    visit_Set = sequence_visit("{", "}")
         del sequence_visit
     
         def visit_Dict(self, node):
    -        self.write('{')
    +        self.write("{")
             for idx, (key, value) in enumerate(zip(node.keys, node.values)):
                 if idx:
    -                self.write(', ')
    +                self.write(", ")
                 self.visit(key)
    -            self.write(': ')
    +            self.write(": ")
                 self.visit(value)
    -        self.write('}')
    +        self.write("}")
     
         def visit_BinOp(self, node):
    -        self.write('(')
    +        self.write("(")
             self.visit(node.left)
    -        self.write(' %s ' % BINOP_SYMBOLS[type(node.op)])
    +        self.write(" %s " % BINOP_SYMBOLS[type(node.op)])
             self.visit(node.right)
    -        self.write(')')
    +        self.write(")")
     
         def visit_BoolOp(self, node):
    -        self.write('(')
    +        self.write("(")
             for idx, value in enumerate(node.values):
                 if idx:
    -                self.write(' %s ' % BOOLOP_SYMBOLS[type(node.op)])
    +                self.write(" %s " % BOOLOP_SYMBOLS[type(node.op)])
                 self.visit(value)
    -        self.write(')')
    +        self.write(")")
     
         def visit_Compare(self, node):
    -        self.write('(')
    +        self.write("(")
             self.visit(node.left)
             for op, right in zip(node.ops, node.comparators):
    -            self.write(' %s ' % CMPOP_SYMBOLS[type(op)])
    +            self.write(" %s " % CMPOP_SYMBOLS[type(op)])
                 self.visit(right)
    -        self.write(')')
    +        self.write(")")
     
         def visit_UnaryOp(self, node):
    -        self.write('(')
    +        self.write("(")
             op = UNARYOP_SYMBOLS[type(node.op)]
             self.write(op)
    -        if op == 'not':
    -            self.write(' ')
    +        if op == "not":
    +            self.write(" ")
             self.visit(node.operand)
    -        self.write(')')
    +        self.write(")")
     
         def visit_Subscript(self, node):
             self.visit(node.value)
    -        self.write('[')
    +        self.write("[")
             self.visit(node.slice)
    -        self.write(']')
    +        self.write("]")
     
         def visit_Slice(self, node):
             if node.lower is not None:
                 self.visit(node.lower)
    -        self.write(':')
    +        self.write(":")
             if node.upper is not None:
                 self.visit(node.upper)
             if node.step is not None:
    -            self.write(':')
    -            if not (isinstance(node.step, Name) and node.step.id == 'None'):
    +            self.write(":")
    +            if not (isinstance(node.step, Name) and node.step.id == "None"):
                     self.visit(node.step)
     
         def visit_ExtSlice(self, node):
             for idx, item in node.dims:
                 if idx:
    -                self.write(', ')
    +                self.write(", ")
                 self.visit(item)
     
         def visit_Yield(self, node):
    -        self.write('yield ')
    +        self.write("yield ")
             self.visit(node.value)
     
         def visit_Lambda(self, node):
    -        self.write('lambda ')
    +        self.write("lambda ")
             self.signature(node.args)
    -        self.write(': ')
    +        self.write(": ")
             self.visit(node.body)
     
         def visit_Ellipsis(self, node):
    -        self.write('Ellipsis')
    +        self.write("Ellipsis")
     
         def generator_visit(left, right):
             def visit(self, node):
    @@ -788,64 +652,65 @@ class SourceGenerator(NodeVisitor):
                 for comprehension in node.generators:
                     self.visit(comprehension)
                 self.write(right)
    +
             return visit
     
    -    visit_ListComp = generator_visit('[', ']')
    -    visit_GeneratorExp = generator_visit('(', ')')
    -    visit_SetComp = generator_visit('{', '}')
    +    visit_ListComp = generator_visit("[", "]")
    +    visit_GeneratorExp = generator_visit("(", ")")
    +    visit_SetComp = generator_visit("{", "}")
         del generator_visit
     
         def visit_DictComp(self, node):
    -        self.write('{')
    +        self.write("{")
             self.visit(node.key)
    -        self.write(': ')
    +        self.write(": ")
             self.visit(node.value)
             for comprehension in node.generators:
                 self.visit(comprehension)
    -        self.write('}')
    +        self.write("}")
     
         def visit_IfExp(self, node):
             self.visit(node.body)
    -        self.write(' if ')
    +        self.write(" if ")
             self.visit(node.test)
    -        self.write(' else ')
    +        self.write(" else ")
             self.visit(node.orelse)
     
         def visit_Starred(self, node):
    -        self.write('*')
    +        self.write("*")
             self.visit(node.value)
     
         def visit_Repr(self, node):
             # XXX: python 2.6 only
    -        self.write('`')
    +        self.write("`")
             self.visit(node.value)
    -        self.write('`')
    +        self.write("`")
     
         # Helper Nodes
     
         def visit_alias(self, node):
             self.write(node.name)
             if node.asname is not None:
    -            self.write(' as ' + node.asname)
    +            self.write(" as " + node.asname)
     
         def visit_comprehension(self, node):
    -        self.write(' for ')
    +        self.write(" for ")
             self.visit(node.target)
    -        self.write(' in ')
    +        self.write(" in ")
             self.visit(node.iter)
             if node.ifs:
                 for if_ in node.ifs:
    -                self.write(' if ')
    +                self.write(" if ")
                     self.visit(if_)
     
         def visit_excepthandler(self, node):
             self.newline()
    -        self.write('except')
    +        self.write("except")
             if node.type is not None:
    -            self.write(' ')
    +            self.write(" ")
                 self.visit(node.type)
                 if node.name is not None:
    -                self.write(' as ')
    +                self.write(" as ")
                     self.visit(node.name)
    -        self.write(':')
    +        self.write(":")
             self.body(node.body)
    diff --git a/server/www/packages/packages-windows/x86/mako/ast.py b/server/www/packages/packages-windows/x86/mako/ast.py
    index 8d2d150..cfae280 100644
    --- a/server/www/packages/packages-windows/x86/mako/ast.py
    +++ b/server/www/packages/packages-windows/x86/mako/ast.py
    @@ -1,5 +1,5 @@
     # mako/ast.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
    @@ -7,9 +7,12 @@
     """utilities for analyzing expressions and blocks of Python
     code, as well as generating Python from AST nodes"""
     
    -from mako import exceptions, pyparser, compat
     import re
     
    +from mako import compat
    +from mako import exceptions
    +from mako import pyparser
    +
     
     class PythonCode(object):
     
    @@ -72,36 +75,39 @@ class PythonFragment(PythonCode):
         """extends PythonCode to provide identifier lookups in partial control
         statements
     
    -    e.g.
    +    e.g.::
    +
             for x in 5:
             elif y==9:
             except (MyException, e):
    -    etc.
    +
         """
     
         def __init__(self, code, **exception_kwargs):
    -        m = re.match(r'^(\w+)(?:\s+(.*?))?:\s*(#|$)', code.strip(), re.S)
    +        m = re.match(r"^(\w+)(?:\s+(.*?))?:\s*(#|$)", code.strip(), re.S)
             if not m:
                 raise exceptions.CompileException(
    -                "Fragment '%s' is not a partial control statement" %
    -                code, **exception_kwargs)
    +                "Fragment '%s' is not a partial control statement" % code,
    +                **exception_kwargs
    +            )
             if m.group(3):
    -            code = code[:m.start(3)]
    +            code = code[: m.start(3)]
             (keyword, expr) = m.group(1, 2)
    -        if keyword in ['for', 'if', 'while']:
    +        if keyword in ["for", "if", "while"]:
                 code = code + "pass"
    -        elif keyword == 'try':
    +        elif keyword == "try":
                 code = code + "pass\nexcept:pass"
    -        elif keyword == 'elif' or keyword == 'else':
    +        elif keyword == "elif" or keyword == "else":
                 code = "if False:pass\n" + code + "pass"
    -        elif keyword == 'except':
    +        elif keyword == "except":
                 code = "try:pass\n" + code + "pass"
    -        elif keyword == 'with':
    +        elif keyword == "with":
                 code = code + "pass"
             else:
                 raise exceptions.CompileException(
    -                "Unsupported control keyword: '%s'" %
    -                keyword, **exception_kwargs)
    +                "Unsupported control keyword: '%s'" % keyword,
    +                **exception_kwargs
    +            )
             super(PythonFragment, self).__init__(code, **exception_kwargs)
     
     
    @@ -115,14 +121,17 @@ class FunctionDecl(object):
     
             f = pyparser.ParseFunc(self, **exception_kwargs)
             f.visit(expr)
    -        if not hasattr(self, 'funcname'):
    +        if not hasattr(self, "funcname"):
                 raise exceptions.CompileException(
                     "Code '%s' is not a function declaration" % code,
    -                **exception_kwargs)
    +                **exception_kwargs
    +            )
             if not allow_kwargs and self.kwargs:
                 raise exceptions.CompileException(
    -                "'**%s' keyword argument not allowed here" %
    -                self.kwargnames[-1], **exception_kwargs)
    +                "'**%s' keyword argument not allowed here"
    +                % self.kwargnames[-1],
    +                **exception_kwargs
    +            )
     
         def get_argument_expressions(self, as_call=False):
             """Return the argument declarations of this FunctionDecl as a printable
    @@ -157,8 +166,10 @@ class FunctionDecl(object):
                         # `def foo(*, a=1, b, c=3)`
                         namedecls.append(name)
                     else:
    -                    namedecls.append("%s=%s" % (
    -                        name, pyparser.ExpressionGenerator(default).value()))
    +                    namedecls.append(
    +                        "%s=%s"
    +                        % (name, pyparser.ExpressionGenerator(default).value())
    +                    )
                 else:
                     namedecls.append(name)
     
    @@ -171,8 +182,10 @@ class FunctionDecl(object):
                     namedecls.append(name)
                 else:
                     default = defaults.pop(0)
    -                namedecls.append("%s=%s" % (
    -                    name, pyparser.ExpressionGenerator(default).value()))
    +                namedecls.append(
    +                    "%s=%s"
    +                    % (name, pyparser.ExpressionGenerator(default).value())
    +                )
     
             namedecls.reverse()
             return namedecls
    @@ -187,5 +200,6 @@ class FunctionArgs(FunctionDecl):
         """the argument portion of a function declaration"""
     
         def __init__(self, code, **kwargs):
    -        super(FunctionArgs, self).__init__("def ANON(%s):pass" % code,
    -                                           **kwargs)
    +        super(FunctionArgs, self).__init__(
    +            "def ANON(%s):pass" % code, **kwargs
    +        )
    diff --git a/server/www/packages/packages-windows/x86/mako/cache.py b/server/www/packages/packages-windows/x86/mako/cache.py
    index 1af17dd..26aa93e 100644
    --- a/server/www/packages/packages-windows/x86/mako/cache.py
    +++ b/server/www/packages/packages-windows/x86/mako/cache.py
    @@ -1,10 +1,11 @@
     # mako/cache.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
     
    -from mako import compat, util
    +from mako import compat
    +from mako import util
     
     _cache_plugins = util.PluginLoader("mako.cache")
     
    @@ -90,9 +91,8 @@ class Cache(object):
                 return creation_function()
     
             return self.impl.get_or_create(
    -            key,
    -            creation_function,
    -            **self._get_cache_kw(kw, context))
    +            key, creation_function, **self._get_cache_kw(kw, context)
    +        )
     
         def set(self, key, value, **kw):
             r"""Place a value in the cache.
    @@ -141,7 +141,7 @@ class Cache(object):
             template.
     
             """
    -        self.invalidate('render_body', __M_defname='render_body')
    +        self.invalidate("render_body", __M_defname="render_body")
     
         def invalidate_def(self, name):
             """Invalidate the cached content of a particular ``<%def>`` within this
    @@ -149,7 +149,7 @@ class Cache(object):
     
             """
     
    -        self.invalidate('render_%s' % name, __M_defname='render_%s' % name)
    +        self.invalidate("render_%s" % name, __M_defname="render_%s" % name)
     
         def invalidate_closure(self, name):
             """Invalidate a nested ``<%def>`` within this template.
    @@ -165,7 +165,7 @@ class Cache(object):
             self.invalidate(name, __M_defname=name)
     
         def _get_cache_kw(self, kw, context):
    -        defname = kw.pop('__M_defname', None)
    +        defname = kw.pop("__M_defname", None)
             if not defname:
                 tmpl_kw = self.template.cache_args.copy()
                 tmpl_kw.update(kw)
    @@ -177,7 +177,7 @@ class Cache(object):
                 self._def_regions[defname] = tmpl_kw
             if context and self.impl.pass_context:
                 tmpl_kw = tmpl_kw.copy()
    -            tmpl_kw.setdefault('context', context)
    +            tmpl_kw.setdefault("context", context)
             return tmpl_kw
     
     
    diff --git a/server/www/packages/packages-windows/x86/mako/cmd.py b/server/www/packages/packages-windows/x86/mako/cmd.py
    index 8db1346..c0f2c75 100644
    --- a/server/www/packages/packages-windows/x86/mako/cmd.py
    +++ b/server/www/packages/packages-windows/x86/mako/cmd.py
    @@ -1,14 +1,17 @@
     # mako/cmd.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
     from argparse import ArgumentParser
    -from os.path import isfile, dirname
    +import io
    +from os.path import dirname
    +from os.path import isfile
     import sys
    -from mako.template import Template
    -from mako.lookup import TemplateLookup
    +
     from mako import exceptions
    +from mako.lookup import TemplateLookup
    +from mako.template import Template
     
     
     def varsplit(var):
    @@ -24,25 +27,47 @@ def _exit():
     
     def cmdline(argv=None):
     
    -    parser = ArgumentParser("usage: %prog [FILENAME]")
    +    parser = ArgumentParser()
         parser.add_argument(
    -        "--var", default=[], action="append",
    -        help="variable (can be used multiple times, use name=value)")
    +        "--var",
    +        default=[],
    +        action="append",
    +        help="variable (can be used multiple times, use name=value)",
    +    )
         parser.add_argument(
    -        "--template-dir", default=[], action="append",
    +        "--template-dir",
    +        default=[],
    +        action="append",
             help="Directory to use for template lookup (multiple "
             "directories may be provided). If not given then if the "
             "template is read from stdin, the value defaults to be "
             "the current directory, otherwise it defaults to be the "
    -        "parent directory of the file provided.")
    -    parser.add_argument('input', nargs='?', default='-')
    +        "parent directory of the file provided.",
    +    )
    +    parser.add_argument(
    +        "--output-encoding", default=None, help="force output encoding"
    +    )
    +    parser.add_argument(
    +        "--output-file",
    +        default=None,
    +        help="Write to file upon successful render instead of stdout",
    +    )
    +    parser.add_argument("input", nargs="?", default="-")
     
         options = parser.parse_args(argv)
    -    if options.input == '-':
    +
    +    output_encoding = options.output_encoding
    +    output_file = options.output_file
    +
    +    if options.input == "-":
             lookup_dirs = options.template_dir or ["."]
             lookup = TemplateLookup(lookup_dirs)
             try:
    -            template = Template(sys.stdin.read(), lookup=lookup)
    +            template = Template(
    +                sys.stdin.read(),
    +                lookup=lookup,
    +                output_encoding=output_encoding,
    +            )
             except:
                 _exit()
         else:
    @@ -52,15 +77,26 @@ def cmdline(argv=None):
             lookup_dirs = options.template_dir or [dirname(filename)]
             lookup = TemplateLookup(lookup_dirs)
             try:
    -            template = Template(filename=filename, lookup=lookup)
    +            template = Template(
    +                filename=filename,
    +                lookup=lookup,
    +                output_encoding=output_encoding,
    +            )
             except:
                 _exit()
     
         kw = dict([varsplit(var) for var in options.var])
         try:
    -        sys.stdout.write(template.render(**kw))
    +        rendered = template.render(**kw)
         except:
             _exit()
    +    else:
    +        if output_file:
    +            io.open(output_file, "wt", encoding=output_encoding).write(
    +                rendered
    +            )
    +        else:
    +            sys.stdout.write(rendered)
     
     
     if __name__ == "__main__":
    diff --git a/server/www/packages/packages-windows/x86/mako/codegen.py b/server/www/packages/packages-windows/x86/mako/codegen.py
    index d4ecbe8..a9ae55b 100644
    --- a/server/www/packages/packages-windows/x86/mako/codegen.py
    +++ b/server/www/packages/packages-windows/x86/mako/codegen.py
    @@ -1,5 +1,5 @@
     # mako/codegen.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
    @@ -7,11 +7,17 @@
     """provides functionality for rendering a parsetree constructing into module
     source code."""
     
    -import time
    +import json
     import re
    -from mako.pygen import PythonPrinter
    -from mako import util, ast, parsetree, filters, exceptions
    +import time
    +
    +from mako import ast
     from mako import compat
    +from mako import exceptions
    +from mako import filters
    +from mako import parsetree
    +from mako import util
    +from mako.pygen import PythonPrinter
     
     
     MAGIC_NUMBER = 10
    @@ -20,22 +26,24 @@ MAGIC_NUMBER = 10
     # template and are not accessed via the
     # context itself
     TOPLEVEL_DECLARED = set(["UNDEFINED", "STOP_RENDERING"])
    -RESERVED_NAMES = set(['context', 'loop']).union(TOPLEVEL_DECLARED)
    +RESERVED_NAMES = set(["context", "loop"]).union(TOPLEVEL_DECLARED)
     
     
    -def compile(node,
    -            uri,
    -            filename=None,
    -            default_filters=None,
    -            buffer_filters=None,
    -            imports=None,
    -            future_imports=None,
    -            source_encoding=None,
    -            generate_magic_comment=True,
    -            disable_unicode=False,
    -            strict_undefined=False,
    -            enable_loop=True,
    -            reserved_names=frozenset()):
    +def compile(  # noqa
    +    node,
    +    uri,
    +    filename=None,
    +    default_filters=None,
    +    buffer_filters=None,
    +    imports=None,
    +    future_imports=None,
    +    source_encoding=None,
    +    generate_magic_comment=True,
    +    disable_unicode=False,
    +    strict_undefined=False,
    +    enable_loop=True,
    +    reserved_names=frozenset(),
    +):
         """Generate module source code given a parsetree node,
           uri, and optional source filename"""
     
    @@ -49,38 +57,43 @@ def compile(node,
         buf = util.FastEncodingBuffer()
     
         printer = PythonPrinter(buf)
    -    _GenerateRenderMethod(printer,
    -                          _CompileContext(uri,
    -                                          filename,
    -                                          default_filters,
    -                                          buffer_filters,
    -                                          imports,
    -                                          future_imports,
    -                                          source_encoding,
    -                                          generate_magic_comment,
    -                                          disable_unicode,
    -                                          strict_undefined,
    -                                          enable_loop,
    -                                          reserved_names),
    -                          node)
    +    _GenerateRenderMethod(
    +        printer,
    +        _CompileContext(
    +            uri,
    +            filename,
    +            default_filters,
    +            buffer_filters,
    +            imports,
    +            future_imports,
    +            source_encoding,
    +            generate_magic_comment,
    +            disable_unicode,
    +            strict_undefined,
    +            enable_loop,
    +            reserved_names,
    +        ),
    +        node,
    +    )
         return buf.getvalue()
     
     
     class _CompileContext(object):
    -
    -    def __init__(self,
    -                 uri,
    -                 filename,
    -                 default_filters,
    -                 buffer_filters,
    -                 imports,
    -                 future_imports,
    -                 source_encoding,
    -                 generate_magic_comment,
    -                 disable_unicode,
    -                 strict_undefined,
    -                 enable_loop,
    -                 reserved_names):
    +    def __init__(
    +        self,
    +        uri,
    +        filename,
    +        default_filters,
    +        buffer_filters,
    +        imports,
    +        future_imports,
    +        source_encoding,
    +        generate_magic_comment,
    +        disable_unicode,
    +        strict_undefined,
    +        enable_loop,
    +        reserved_names,
    +    ):
             self.uri = uri
             self.filename = filename
             self.default_filters = default_filters
    @@ -113,12 +126,12 @@ class _GenerateRenderMethod(object):
                 name = "render_%s" % node.funcname
                 args = node.get_argument_expressions()
                 filtered = len(node.filter_args.args) > 0
    -            buffered = eval(node.attributes.get('buffered', 'False'))
    -            cached = eval(node.attributes.get('cached', 'False'))
    +            buffered = eval(node.attributes.get("buffered", "False"))
    +            cached = eval(node.attributes.get("cached", "False"))
                 defs = None
                 pagetag = None
                 if node.is_block and not node.is_anonymous:
    -                args += ['**pageargs']
    +                args += ["**pageargs"]
             else:
                 defs = self.write_toplevel()
                 pagetag = self.compiler.pagetag
    @@ -126,25 +139,23 @@ class _GenerateRenderMethod(object):
                 if pagetag is not None:
                     args = pagetag.body_decl.get_argument_expressions()
                     if not pagetag.body_decl.kwargs:
    -                    args += ['**pageargs']
    -                cached = eval(pagetag.attributes.get('cached', 'False'))
    +                    args += ["**pageargs"]
    +                cached = eval(pagetag.attributes.get("cached", "False"))
                     self.compiler.enable_loop = self.compiler.enable_loop or eval(
    -                    pagetag.attributes.get(
    -                        'enable_loop', 'False')
    +                    pagetag.attributes.get("enable_loop", "False")
                     )
                 else:
    -                args = ['**pageargs']
    +                args = ["**pageargs"]
                     cached = False
                 buffered = filtered = False
             if args is None:
    -            args = ['context']
    +            args = ["context"]
             else:
    -            args = [a for a in ['context'] + args]
    +            args = [a for a in ["context"] + args]
     
             self.write_render_callable(
    -            pagetag or node,
    -            name, args,
    -            buffered, filtered, cached)
    +            pagetag or node, name, args, buffered, filtered, cached
    +        )
     
             if defs is not None:
                 for node in defs:
    @@ -154,8 +165,9 @@ class _GenerateRenderMethod(object):
                 self.write_metadata_struct()
     
         def write_metadata_struct(self):
    -        self.printer.source_map[self.printer.lineno] = \
    -            max(self.printer.source_map)
    +        self.printer.source_map[self.printer.lineno] = max(
    +            self.printer.source_map
    +        )
             struct = {
                 "filename": self.compiler.filename,
                 "uri": self.compiler.uri,
    @@ -164,10 +176,9 @@ class _GenerateRenderMethod(object):
             }
             self.printer.writelines(
                 '"""',
    -            '__M_BEGIN_METADATA',
    -            compat.json.dumps(struct),
    -            '__M_END_METADATA\n'
    -            '"""'
    +            "__M_BEGIN_METADATA",
    +            json.dumps(struct),
    +            "__M_END_METADATA\n" '"""',
             )
     
         @property
    @@ -186,7 +197,6 @@ class _GenerateRenderMethod(object):
             self.compiler.pagetag = None
     
             class FindTopLevel(object):
    -
                 def visitInheritTag(s, node):
                     inherit.append(node)
     
    @@ -214,14 +224,19 @@ class _GenerateRenderMethod(object):
             module_identifiers.declared = module_ident
     
             # module-level names, python code
    -        if self.compiler.generate_magic_comment and \
    -                self.compiler.source_encoding:
    -            self.printer.writeline("# -*- coding:%s -*-" %
    -                                   self.compiler.source_encoding)
    +        if (
    +            self.compiler.generate_magic_comment
    +            and self.compiler.source_encoding
    +        ):
    +            self.printer.writeline(
    +                "# -*- coding:%s -*-" % self.compiler.source_encoding
    +            )
     
             if self.compiler.future_imports:
    -            self.printer.writeline("from __future__ import %s" %
    -                                   (", ".join(self.compiler.future_imports),))
    +            self.printer.writeline(
    +                "from __future__ import %s"
    +                % (", ".join(self.compiler.future_imports),)
    +            )
             self.printer.writeline("from mako import runtime, filters, cache")
             self.printer.writeline("UNDEFINED = runtime.UNDEFINED")
             self.printer.writeline("STOP_RENDERING = runtime.STOP_RENDERING")
    @@ -231,36 +246,41 @@ class _GenerateRenderMethod(object):
             self.printer.writeline("_modified_time = %r" % time.time())
             self.printer.writeline("_enable_loop = %r" % self.compiler.enable_loop)
             self.printer.writeline(
    -            "_template_filename = %r" % self.compiler.filename)
    +            "_template_filename = %r" % self.compiler.filename
    +        )
             self.printer.writeline("_template_uri = %r" % self.compiler.uri)
             self.printer.writeline(
    -            "_source_encoding = %r" % self.compiler.source_encoding)
    +            "_source_encoding = %r" % self.compiler.source_encoding
    +        )
             if self.compiler.imports:
    -            buf = ''
    +            buf = ""
                 for imp in self.compiler.imports:
                     buf += imp + "\n"
                     self.printer.writeline(imp)
                 impcode = ast.PythonCode(
                     buf,
    -                source='', lineno=0,
    +                source="",
    +                lineno=0,
                     pos=0,
    -                filename='template defined imports')
    +                filename="template defined imports",
    +            )
             else:
                 impcode = None
     
             main_identifiers = module_identifiers.branch(self.node)
    -        module_identifiers.topleveldefs = \
    -            module_identifiers.topleveldefs.\
    -            union(main_identifiers.topleveldefs)
    +        mit = module_identifiers.topleveldefs
    +        module_identifiers.topleveldefs = mit.union(
    +            main_identifiers.topleveldefs
    +        )
             module_identifiers.declared.update(TOPLEVEL_DECLARED)
             if impcode:
                 module_identifiers.declared.update(impcode.declared_identifiers)
     
             self.compiler.identifiers = module_identifiers
    -        self.printer.writeline("_exports = %r" %
    -                               [n.name for n in
    -                                main_identifiers.topleveldefs.values()]
    -                               )
    +        self.printer.writeline(
    +            "_exports = %r"
    +            % [n.name for n in main_identifiers.topleveldefs.values()]
    +        )
             self.printer.write_blanks(2)
     
             if len(module_code):
    @@ -274,8 +294,9 @@ class _GenerateRenderMethod(object):
     
             return list(main_identifiers.topleveldefs.values())
     
    -    def write_render_callable(self, node, name, args, buffered, filtered,
    -                              cached):
    +    def write_render_callable(
    +        self, node, name, args, buffered, filtered, cached
    +    ):
             """write a top-level render callable.
     
             this could be the main render() method or that of a top-level def."""
    @@ -284,32 +305,38 @@ class _GenerateRenderMethod(object):
                 decorator = node.decorator
                 if decorator:
                     self.printer.writeline(
    -                    "@runtime._decorate_toplevel(%s)" % decorator)
    +                    "@runtime._decorate_toplevel(%s)" % decorator
    +                )
     
             self.printer.start_source(node.lineno)
             self.printer.writelines(
    -            "def %s(%s):" % (name, ','.join(args)),
    +            "def %s(%s):" % (name, ",".join(args)),
                 # push new frame, assign current frame to __M_caller
                 "__M_caller = context.caller_stack._push_frame()",
    -            "try:"
    +            "try:",
             )
             if buffered or filtered or cached:
                 self.printer.writeline("context._push_buffer()")
     
             self.identifier_stack.append(
    -            self.compiler.identifiers.branch(self.node))
    -        if (not self.in_def or self.node.is_block) and '**pageargs' in args:
    -            self.identifier_stack[-1].argument_declared.add('pageargs')
    +            self.compiler.identifiers.branch(self.node)
    +        )
    +        if (not self.in_def or self.node.is_block) and "**pageargs" in args:
    +            self.identifier_stack[-1].argument_declared.add("pageargs")
     
             if not self.in_def and (
    -            len(self.identifiers.locally_assigned) > 0 or
    -            len(self.identifiers.argument_declared) > 0
    +            len(self.identifiers.locally_assigned) > 0
    +            or len(self.identifiers.argument_declared) > 0
             ):
    -            self.printer.writeline("__M_locals = __M_dict_builtin(%s)" %
    -                                   ','.join([
    -                                       "%s=%s" % (x, x) for x in
    -                                            self.identifiers.argument_declared
    -                                            ]))
    +            self.printer.writeline(
    +                "__M_locals = __M_dict_builtin(%s)"
    +                % ",".join(
    +                    [
    +                        "%s=%s" % (x, x)
    +                        for x in self.identifiers.argument_declared
    +                    ]
    +                )
    +            )
     
             self.write_variable_declares(self.identifiers, toplevel=True)
     
    @@ -321,16 +348,14 @@ class _GenerateRenderMethod(object):
             self.printer.write_blanks(2)
             if cached:
                 self.write_cache_decorator(
    -                node, name,
    -                args, buffered,
    -                self.identifiers, toplevel=True)
    +                node, name, args, buffered, self.identifiers, toplevel=True
    +            )
     
         def write_module_code(self, module_code):
             """write module-level template code, i.e. that which
             is enclosed in <%! %> tags in the template."""
             for n in module_code:
    -            self.printer.start_source(n.lineno)
    -            self.printer.write_indented_block(n.text)
    +            self.printer.write_indented_block(n.text, starting_lineno=n.lineno)
     
         def write_inherit(self, node):
             """write the module-level inheritance-determination callable."""
    @@ -338,9 +363,9 @@ class _GenerateRenderMethod(object):
             self.printer.writelines(
                 "def _mako_inherit(template, context):",
                 "_mako_generate_namespaces(context)",
    -            "return runtime._inherit_from(context, %s, _template_uri)" %
    -            (node.parsed_attributes['file']),
    -            None
    +            "return runtime._inherit_from(context, %s, _template_uri)"
    +            % (node.parsed_attributes["file"]),
    +            None,
             )
     
         def write_namespaces(self, namespaces):
    @@ -352,12 +377,13 @@ class _GenerateRenderMethod(object):
                 "except KeyError:",
                 "_mako_generate_namespaces(context)",
                 "return context.namespaces[(__name__, name)]",
    -            None, None
    +            None,
    +            None,
             )
             self.printer.writeline("def _mako_generate_namespaces(context):")
     
             for node in namespaces.values():
    -            if 'import' in node.attributes:
    +            if "import" in node.attributes:
                     self.compiler.has_ns_imports = True
                 self.printer.start_source(node.lineno)
                 if len(node.nodes):
    @@ -367,7 +393,6 @@ class _GenerateRenderMethod(object):
                     self.in_def = True
     
                     class NSDefVisitor(object):
    -
                         def visitDefTag(s, node):
                             s.visitDefOrBase(node)
     
    @@ -383,56 +408,54 @@ class _GenerateRenderMethod(object):
                                 )
                             self.write_inline_def(node, identifiers, nested=False)
                             export.append(node.funcname)
    +
                     vis = NSDefVisitor()
                     for n in node.nodes:
                         n.accept_visitor(vis)
    -                self.printer.writeline("return [%s]" % (','.join(export)))
    +                self.printer.writeline("return [%s]" % (",".join(export)))
                     self.printer.writeline(None)
                     self.in_def = False
                     callable_name = "make_namespace()"
                 else:
                     callable_name = "None"
     
    -            if 'file' in node.parsed_attributes:
    +            if "file" in node.parsed_attributes:
                     self.printer.writeline(
                         "ns = runtime.TemplateNamespace(%r,"
                         " context._clean_inheritance_tokens(),"
                         " templateuri=%s, callables=%s, "
    -                    " calling_uri=_template_uri)" %
    -                    (
    +                    " calling_uri=_template_uri)"
    +                    % (
                             node.name,
    -                        node.parsed_attributes.get('file', 'None'),
    +                        node.parsed_attributes.get("file", "None"),
                             callable_name,
                         )
                     )
    -            elif 'module' in node.parsed_attributes:
    +            elif "module" in node.parsed_attributes:
                     self.printer.writeline(
                         "ns = runtime.ModuleNamespace(%r,"
                         " context._clean_inheritance_tokens(),"
                         " callables=%s, calling_uri=_template_uri,"
    -                    " module=%s)" %
    -                    (
    +                    " module=%s)"
    +                    % (
                             node.name,
                             callable_name,
    -                        node.parsed_attributes.get(
    -                            'module', 'None')
    +                        node.parsed_attributes.get("module", "None"),
                         )
                     )
                 else:
                     self.printer.writeline(
                         "ns = runtime.Namespace(%r,"
                         " context._clean_inheritance_tokens(),"
    -                    " callables=%s, calling_uri=_template_uri)" %
    -                    (
    -                        node.name,
    -                        callable_name,
    -                    )
    +                    " callables=%s, calling_uri=_template_uri)"
    +                    % (node.name, callable_name)
                     )
    -            if eval(node.attributes.get('inheritable', "False")):
    +            if eval(node.attributes.get("inheritable", "False")):
                     self.printer.writeline("context['self'].%s = ns" % (node.name))
     
                 self.printer.writeline(
    -                "context.namespaces[(__name__, %s)] = ns" % repr(node.name))
    +                "context.namespaces[(__name__, %s)] = ns" % repr(node.name)
    +            )
                 self.printer.write_blanks(1)
             if not len(namespaces):
                 self.printer.writeline("pass")
    @@ -468,7 +491,8 @@ class _GenerateRenderMethod(object):
             # write closure functions for closures that we define
             # right here
             to_write = to_write.union(
    -            [c.funcname for c in identifiers.closuredefs.values()])
    +            [c.funcname for c in identifiers.closuredefs.values()]
    +        )
     
             # remove identifiers that are declared in the argument
             # signature of the callable
    @@ -492,23 +516,22 @@ class _GenerateRenderMethod(object):
             if limit is not None:
                 to_write = to_write.intersection(limit)
     
    -        if toplevel and getattr(self.compiler, 'has_ns_imports', False):
    +        if toplevel and getattr(self.compiler, "has_ns_imports", False):
                 self.printer.writeline("_import_ns = {}")
                 self.compiler.has_imports = True
                 for ident, ns in self.compiler.namespaces.items():
    -                if 'import' in ns.attributes:
    +                if "import" in ns.attributes:
                         self.printer.writeline(
                             "_mako_get_namespace(context, %r)."
    -                        "_populate(_import_ns, %r)" %
    -                        (
    +                        "_populate(_import_ns, %r)"
    +                        % (
                                 ident,
    -                            re.split(r'\s*,\s*', ns.attributes['import'])
    -                        ))
    +                            re.split(r"\s*,\s*", ns.attributes["import"]),
    +                        )
    +                    )
     
             if has_loop:
    -            self.printer.writeline(
    -                'loop = __M_loop = runtime.LoopStack()'
    -            )
    +            self.printer.writeline("loop = __M_loop = runtime.LoopStack()")
     
             for ident in to_write:
                 if ident in comp_idents:
    @@ -526,37 +549,36 @@ class _GenerateRenderMethod(object):
     
                 elif ident in self.compiler.namespaces:
                     self.printer.writeline(
    -                    "%s = _mako_get_namespace(context, %r)" %
    -                    (ident, ident)
    +                    "%s = _mako_get_namespace(context, %r)" % (ident, ident)
                     )
                 else:
    -                if getattr(self.compiler, 'has_ns_imports', False):
    +                if getattr(self.compiler, "has_ns_imports", False):
                         if self.compiler.strict_undefined:
                             self.printer.writelines(
    -                            "%s = _import_ns.get(%r, UNDEFINED)" %
    -                            (ident, ident),
    +                            "%s = _import_ns.get(%r, UNDEFINED)"
    +                            % (ident, ident),
                                 "if %s is UNDEFINED:" % ident,
                                 "try:",
                                 "%s = context[%r]" % (ident, ident),
                                 "except KeyError:",
    -                            "raise NameError(\"'%s' is not defined\")" %
    -                            ident,
    -                            None, None
    +                            "raise NameError(\"'%s' is not defined\")" % ident,
    +                            None,
    +                            None,
                             )
                         else:
                             self.printer.writeline(
                                 "%s = _import_ns.get"
    -                            "(%r, context.get(%r, UNDEFINED))" %
    -                            (ident, ident, ident))
    +                            "(%r, context.get(%r, UNDEFINED))"
    +                            % (ident, ident, ident)
    +                        )
                     else:
                         if self.compiler.strict_undefined:
                             self.printer.writelines(
                                 "try:",
                                 "%s = context[%r]" % (ident, ident),
                                 "except KeyError:",
    -                            "raise NameError(\"'%s' is not defined\")" %
    -                            ident,
    -                            None
    +                            "raise NameError(\"'%s' is not defined\")" % ident,
    +                            None,
                             )
                         else:
                             self.printer.writeline(
    @@ -572,14 +594,16 @@ class _GenerateRenderMethod(object):
             nameargs = node.get_argument_expressions(as_call=True)
     
             if not self.in_def and (
    -                len(self.identifiers.locally_assigned) > 0 or
    -                len(self.identifiers.argument_declared) > 0):
    -            nameargs.insert(0, 'context._locals(__M_locals)')
    +            len(self.identifiers.locally_assigned) > 0
    +            or len(self.identifiers.argument_declared) > 0
    +        ):
    +            nameargs.insert(0, "context._locals(__M_locals)")
             else:
    -            nameargs.insert(0, 'context')
    +            nameargs.insert(0, "context")
             self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls)))
             self.printer.writeline(
    -            "return render_%s(%s)" % (funcname, ",".join(nameargs)))
    +            "return render_%s(%s)" % (funcname, ",".join(nameargs))
    +        )
             self.printer.writeline(None)
     
         def write_inline_def(self, node, identifiers, nested):
    @@ -590,21 +614,21 @@ class _GenerateRenderMethod(object):
             decorator = node.decorator
             if decorator:
                 self.printer.writeline(
    -                "@runtime._decorate_inline(context, %s)" % decorator)
    +                "@runtime._decorate_inline(context, %s)" % decorator
    +            )
             self.printer.writeline(
    -            "def %s(%s):" % (node.funcname, ",".join(namedecls)))
    +            "def %s(%s):" % (node.funcname, ",".join(namedecls))
    +        )
             filtered = len(node.filter_args.args) > 0
    -        buffered = eval(node.attributes.get('buffered', 'False'))
    -        cached = eval(node.attributes.get('cached', 'False'))
    +        buffered = eval(node.attributes.get("buffered", "False"))
    +        cached = eval(node.attributes.get("cached", "False"))
             self.printer.writelines(
                 # push new frame, assign current frame to __M_caller
                 "__M_caller = context.caller_stack._push_frame()",
    -            "try:"
    +            "try:",
             )
             if buffered or filtered or cached:
    -            self.printer.writelines(
    -                "context._push_buffer()",
    -            )
    +            self.printer.writelines("context._push_buffer()")
     
             identifiers = identifiers.branch(node, nested=nested)
     
    @@ -618,12 +642,19 @@ class _GenerateRenderMethod(object):
             self.write_def_finish(node, buffered, filtered, cached)
             self.printer.writeline(None)
             if cached:
    -            self.write_cache_decorator(node, node.funcname,
    -                                       namedecls, False, identifiers,
    -                                       inline=True, toplevel=False)
    +            self.write_cache_decorator(
    +                node,
    +                node.funcname,
    +                namedecls,
    +                False,
    +                identifiers,
    +                inline=True,
    +                toplevel=False,
    +            )
     
    -    def write_def_finish(self, node, buffered, filtered, cached,
    -                         callstack=True):
    +    def write_def_finish(
    +        self, node, buffered, filtered, cached, callstack=True
    +    ):
             """write the end section of a rendering function, either outermost or
             inline.
     
    @@ -636,9 +667,7 @@ class _GenerateRenderMethod(object):
                 self.printer.writeline("return ''")
                 if callstack:
                     self.printer.writelines(
    -                    "finally:",
    -                    "context.caller_stack._pop_frame()",
    -                    None
    +                    "finally:", "context.caller_stack._pop_frame()", None
                     )
     
             if buffered or filtered or cached:
    @@ -648,13 +677,12 @@ class _GenerateRenderMethod(object):
                     # implemenation might be using a context with no
                     # extra buffers
                     self.printer.writelines(
    -                    "finally:",
    -                    "__M_buf = context._pop_buffer()"
    +                    "finally:", "__M_buf = context._pop_buffer()"
                     )
                 else:
                     self.printer.writelines(
                         "finally:",
    -                    "__M_buf, __M_writer = context._pop_buffer_and_writer()"
    +                    "__M_buf, __M_writer = context._pop_buffer_and_writer()",
                     )
     
                 if callstack:
    @@ -662,89 +690,100 @@ class _GenerateRenderMethod(object):
     
                 s = "__M_buf.getvalue()"
                 if filtered:
    -                s = self.create_filter_callable(node.filter_args.args, s,
    -                                                False)
    +                s = self.create_filter_callable(
    +                    node.filter_args.args, s, False
    +                )
                 self.printer.writeline(None)
                 if buffered and not cached:
    -                s = self.create_filter_callable(self.compiler.buffer_filters,
    -                                                s, False)
    +                s = self.create_filter_callable(
    +                    self.compiler.buffer_filters, s, False
    +                )
                 if buffered or cached:
                     self.printer.writeline("return %s" % s)
                 else:
    -                self.printer.writelines(
    -                    "__M_writer(%s)" % s,
    -                    "return ''"
    -                )
    +                self.printer.writelines("__M_writer(%s)" % s, "return ''")
     
    -    def write_cache_decorator(self, node_or_pagetag, name,
    -                              args, buffered, identifiers,
    -                              inline=False, toplevel=False):
    +    def write_cache_decorator(
    +        self,
    +        node_or_pagetag,
    +        name,
    +        args,
    +        buffered,
    +        identifiers,
    +        inline=False,
    +        toplevel=False,
    +    ):
             """write a post-function decorator to replace a rendering
                 callable with a cached version of itself."""
     
             self.printer.writeline("__M_%s = %s" % (name, name))
    -        cachekey = node_or_pagetag.parsed_attributes.get('cache_key',
    -                                                         repr(name))
    +        cachekey = node_or_pagetag.parsed_attributes.get(
    +            "cache_key", repr(name)
    +        )
     
             cache_args = {}
             if self.compiler.pagetag is not None:
                 cache_args.update(
    -                (
    -                    pa[6:],
    -                    self.compiler.pagetag.parsed_attributes[pa]
    -                )
    +                (pa[6:], self.compiler.pagetag.parsed_attributes[pa])
                     for pa in self.compiler.pagetag.parsed_attributes
    -                if pa.startswith('cache_') and pa != 'cache_key'
    +                if pa.startswith("cache_") and pa != "cache_key"
                 )
             cache_args.update(
    -            (
    -                pa[6:],
    -                node_or_pagetag.parsed_attributes[pa]
    -            ) for pa in node_or_pagetag.parsed_attributes
    -            if pa.startswith('cache_') and pa != 'cache_key'
    +            (pa[6:], node_or_pagetag.parsed_attributes[pa])
    +            for pa in node_or_pagetag.parsed_attributes
    +            if pa.startswith("cache_") and pa != "cache_key"
             )
    -        if 'timeout' in cache_args:
    -            cache_args['timeout'] = int(eval(cache_args['timeout']))
    +        if "timeout" in cache_args:
    +            cache_args["timeout"] = int(eval(cache_args["timeout"]))
     
    -        self.printer.writeline("def %s(%s):" % (name, ','.join(args)))
    +        self.printer.writeline("def %s(%s):" % (name, ",".join(args)))
     
             # form "arg1, arg2, arg3=arg3, arg4=arg4", etc.
             pass_args = [
    -            "%s=%s" % ((a.split('=')[0],) * 2) if '=' in a else a
    -            for a in args
    +            "%s=%s" % ((a.split("=")[0],) * 2) if "=" in a else a for a in args
             ]
     
             self.write_variable_declares(
                 identifiers,
                 toplevel=toplevel,
    -            limit=node_or_pagetag.undeclared_identifiers()
    +            limit=node_or_pagetag.undeclared_identifiers(),
             )
             if buffered:
    -            s = "context.get('local')."\
    -                "cache._ctx_get_or_create("\
    -                "%s, lambda:__M_%s(%s),  context, %s__M_defname=%r)" % (
    -                    cachekey, name, ','.join(pass_args),
    -                    ''.join(["%s=%s, " % (k, v)
    -                             for k, v in cache_args.items()]),
    -                    name
    +            s = (
    +                "context.get('local')."
    +                "cache._ctx_get_or_create("
    +                "%s, lambda:__M_%s(%s),  context, %s__M_defname=%r)"
    +                % (
    +                    cachekey,
    +                    name,
    +                    ",".join(pass_args),
    +                    "".join(
    +                        ["%s=%s, " % (k, v) for k, v in cache_args.items()]
    +                    ),
    +                    name,
                     )
    +            )
                 # apply buffer_filters
    -            s = self.create_filter_callable(self.compiler.buffer_filters, s,
    -                                            False)
    +            s = self.create_filter_callable(
    +                self.compiler.buffer_filters, s, False
    +            )
                 self.printer.writelines("return " + s, None)
             else:
                 self.printer.writelines(
                     "__M_writer(context.get('local')."
                     "cache._ctx_get_or_create("
    -                "%s, lambda:__M_%s(%s), context, %s__M_defname=%r))" %
    -                (
    -                    cachekey, name, ','.join(pass_args),
    -                    ''.join(["%s=%s, " % (k, v)
    -                             for k, v in cache_args.items()]),
    +                "%s, lambda:__M_%s(%s), context, %s__M_defname=%r))"
    +                % (
    +                    cachekey,
    +                    name,
    +                    ",".join(pass_args),
    +                    "".join(
    +                        ["%s=%s, " % (k, v) for k, v in cache_args.items()]
    +                    ),
                         name,
                     ),
                     "return ''",
    -                None
    +                None,
                 )
     
         def create_filter_callable(self, args, target, is_expression):
    @@ -753,24 +792,24 @@ class _GenerateRenderMethod(object):
             'default' filter aliases as needed."""
     
             def locate_encode(name):
    -            if re.match(r'decode\..+', name):
    +            if re.match(r"decode\..+", name):
                     return "filters." + name
                 elif self.compiler.disable_unicode:
                     return filters.NON_UNICODE_ESCAPES.get(name, name)
                 else:
                     return filters.DEFAULT_ESCAPES.get(name, name)
     
    -        if 'n' not in args:
    +        if "n" not in args:
                 if is_expression:
                     if self.compiler.pagetag:
                         args = self.compiler.pagetag.filter_args.args + args
    -                if self.compiler.default_filters:
    +                if self.compiler.default_filters and "n" not in args:
                         args = self.compiler.default_filters + args
             for e in args:
                 # if filter given as a function, get just the identifier portion
    -            if e == 'n':
    +            if e == "n":
                     continue
    -            m = re.match(r'(.+?)(\(.*\))', e)
    +            m = re.match(r"(.+?)(\(.*\))", e)
                 if m:
                     ident, fargs = m.group(1, 2)
                     f = locate_encode(ident)
    @@ -783,15 +822,18 @@ class _GenerateRenderMethod(object):
     
         def visitExpression(self, node):
             self.printer.start_source(node.lineno)
    -        if len(node.escapes) or \
    -                (
    -                    self.compiler.pagetag is not None and
    -                    len(self.compiler.pagetag.filter_args.args)
    -        ) or \
    -                len(self.compiler.default_filters):
    +        if (
    +            len(node.escapes)
    +            or (
    +                self.compiler.pagetag is not None
    +                and len(self.compiler.pagetag.filter_args.args)
    +            )
    +            or len(self.compiler.default_filters)
    +        ):
     
    -            s = self.create_filter_callable(node.escapes_code.args,
    -                                            "%s" % node.text, True)
    +            s = self.create_filter_callable(
    +                node.escapes_code.args, "%s" % node.text, True
    +            )
                 self.printer.writeline("__M_writer(%s)" % s)
             else:
                 self.printer.writeline("__M_writer(%s)" % node.text)
    @@ -800,12 +842,12 @@ class _GenerateRenderMethod(object):
             if node.isend:
                 self.printer.writeline(None)
                 if node.has_loop_context:
    -                self.printer.writeline('finally:')
    +                self.printer.writeline("finally:")
                     self.printer.writeline("loop = __M_loop._exit()")
                     self.printer.writeline(None)
             else:
                 self.printer.start_source(node.lineno)
    -            if self.compiler.enable_loop and node.keyword == 'for':
    +            if self.compiler.enable_loop and node.keyword == "for":
                     text = mangle_mako_loop(node, self.printer)
                 else:
                     text = node.text
    @@ -817,12 +859,16 @@ class _GenerateRenderMethod(object):
                 #          and end control lines, and
                 #    3) any control line with no content other than comments
                 if not children or (
    -                    compat.all(isinstance(c, (parsetree.Comment,
    -                                              parsetree.ControlLine))
    -                               for c in children) and
    -                    compat.all((node.is_ternary(c.keyword) or c.isend)
    -                               for c in children
    -                               if isinstance(c, parsetree.ControlLine))):
    +                compat.all(
    +                    isinstance(c, (parsetree.Comment, parsetree.ControlLine))
    +                    for c in children
    +                )
    +                and compat.all(
    +                    (node.is_ternary(c.keyword) or c.isend)
    +                    for c in children
    +                    if isinstance(c, parsetree.ControlLine)
    +                )
    +            ):
                     self.printer.writeline("pass")
     
         def visitText(self, node):
    @@ -833,8 +879,7 @@ class _GenerateRenderMethod(object):
             filtered = len(node.filter_args.args) > 0
             if filtered:
                 self.printer.writelines(
    -                "__M_writer = context._push_writer()",
    -                "try:",
    +                "__M_writer = context._push_writer()", "try:"
                 )
             for n in node.nodes:
                 n.accept_visitor(self)
    @@ -842,18 +887,18 @@ class _GenerateRenderMethod(object):
                 self.printer.writelines(
                     "finally:",
                     "__M_buf, __M_writer = context._pop_buffer_and_writer()",
    -                "__M_writer(%s)" %
    -                self.create_filter_callable(
    -                    node.filter_args.args,
    -                    "__M_buf.getvalue()",
    -                    False),
    -                None
    +                "__M_writer(%s)"
    +                % self.create_filter_callable(
    +                    node.filter_args.args, "__M_buf.getvalue()", False
    +                ),
    +                None,
                 )
     
         def visitCode(self, node):
             if not node.ismodule:
    -            self.printer.start_source(node.lineno)
    -            self.printer.write_indented_block(node.text)
    +            self.printer.write_indented_block(
    +                node.text, starting_lineno=node.lineno
    +            )
     
                 if not self.in_def and len(self.identifiers.locally_assigned) > 0:
                     # if we are the "template" def, fudge locally
    @@ -861,24 +906,28 @@ class _GenerateRenderMethod(object):
                     # which is used for def calls within the same template,
                     # to simulate "enclosing scope"
                     self.printer.writeline(
    -                    '__M_locals_builtin_stored = __M_locals_builtin()')
    +                    "__M_locals_builtin_stored = __M_locals_builtin()"
    +                )
                     self.printer.writeline(
    -                    '__M_locals.update(__M_dict_builtin([(__M_key,'
    -                    ' __M_locals_builtin_stored[__M_key]) for __M_key in'
    -                    ' [%s] if __M_key in __M_locals_builtin_stored]))' %
    -                    ','.join([repr(x) for x in node.declared_identifiers()]))
    +                    "__M_locals.update(__M_dict_builtin([(__M_key,"
    +                    " __M_locals_builtin_stored[__M_key]) for __M_key in"
    +                    " [%s] if __M_key in __M_locals_builtin_stored]))"
    +                    % ",".join([repr(x) for x in node.declared_identifiers()])
    +                )
     
         def visitIncludeTag(self, node):
             self.printer.start_source(node.lineno)
    -        args = node.attributes.get('args')
    +        args = node.attributes.get("args")
             if args:
                 self.printer.writeline(
    -                "runtime._include_file(context, %s, _template_uri, %s)" %
    -                (node.parsed_attributes['file'], args))
    +                "runtime._include_file(context, %s, _template_uri, %s)"
    +                % (node.parsed_attributes["file"], args)
    +            )
             else:
                 self.printer.writeline(
    -                "runtime._include_file(context, %s, _template_uri)" %
    -                (node.parsed_attributes['file']))
    +                "runtime._include_file(context, %s, _template_uri)"
    +                % (node.parsed_attributes["file"])
    +            )
     
         def visitNamespaceTag(self, node):
             pass
    @@ -891,13 +940,14 @@ class _GenerateRenderMethod(object):
                 self.printer.writeline("%s()" % node.funcname)
             else:
                 nameargs = node.get_argument_expressions(as_call=True)
    -            nameargs += ['**pageargs']
    +            nameargs += ["**pageargs"]
                 self.printer.writeline(
                     "if 'parent' not in context._data or "
    -                "not hasattr(context._data['parent'], '%s'):"
    -                % node.funcname)
    +                "not hasattr(context._data['parent'], '%s'):" % node.funcname
    +            )
                 self.printer.writeline(
    -                "context['self'].%s(%s)" % (node.funcname, ",".join(nameargs)))
    +                "context['self'].%s(%s)" % (node.funcname, ",".join(nameargs))
    +            )
                 self.printer.writeline("\n")
     
         def visitCallNamespaceTag(self, node):
    @@ -908,19 +958,18 @@ class _GenerateRenderMethod(object):
     
         def visitCallTag(self, node):
             self.printer.writeline("def ccall(caller):")
    -        export = ['body']
    +        export = ["body"]
             callable_identifiers = self.identifiers.branch(node, nested=True)
             body_identifiers = callable_identifiers.branch(node, nested=False)
             # we want the 'caller' passed to ccall to be used
             # for the body() function, but for other non-body()
             # <%def>s within <%call> we want the current caller
             # off the call stack (if any)
    -        body_identifiers.add_declared('caller')
    +        body_identifiers.add_declared("caller")
     
             self.identifier_stack.append(body_identifiers)
     
             class DefVisitor(object):
    -
                 def visitDefTag(s, node):
                     s.visitDefOrBase(node)
     
    @@ -942,16 +991,13 @@ class _GenerateRenderMethod(object):
             self.identifier_stack.pop()
     
             bodyargs = node.body_decl.get_argument_expressions()
    -        self.printer.writeline("def body(%s):" % ','.join(bodyargs))
    +        self.printer.writeline("def body(%s):" % ",".join(bodyargs))
     
             # TODO: figure out best way to specify
             # buffering/nonbuffering (at call time would be better)
             buffered = False
             if buffered:
    -            self.printer.writelines(
    -                "context._push_buffer()",
    -                "try:"
    -            )
    +            self.printer.writelines("context._push_buffer()", "try:")
             self.write_variable_declares(body_identifiers)
             self.identifier_stack.append(body_identifiers)
     
    @@ -960,25 +1006,22 @@ class _GenerateRenderMethod(object):
             self.identifier_stack.pop()
     
             self.write_def_finish(node, buffered, False, False, callstack=False)
    -        self.printer.writelines(
    -            None,
    -            "return [%s]" % (','.join(export)),
    -            None
    -        )
    +        self.printer.writelines(None, "return [%s]" % (",".join(export)), None)
     
             self.printer.writelines(
                 # push on caller for nested call
                 "context.caller_stack.nextcaller = "
                 "runtime.Namespace('caller', context, "
                 "callables=ccall(__M_caller))",
    -            "try:")
    +            "try:",
    +        )
             self.printer.start_source(node.lineno)
             self.printer.writelines(
    -            "__M_writer(%s)" % self.create_filter_callable(
    -                [], node.expression, True),
    +            "__M_writer(%s)"
    +            % self.create_filter_callable([], node.expression, True),
                 "finally:",
                 "context.caller_stack.nextcaller = None",
    -            None
    +            None,
             )
     
     
    @@ -996,10 +1039,12 @@ class _Identifiers(object):
                 else:
                     # things that have already been declared
                     # in an enclosing namespace (i.e. names we can just use)
    -                self.declared = set(parent.declared).\
    -                    union([c.name for c in parent.closuredefs.values()]).\
    -                    union(parent.locally_declared).\
    -                    union(parent.argument_declared)
    +                self.declared = (
    +                    set(parent.declared)
    +                    .union([c.name for c in parent.closuredefs.values()])
    +                    .union(parent.locally_declared)
    +                    .union(parent.argument_declared)
    +                )
     
                     # if these identifiers correspond to a "nested"
                     # scope, it means whatever the parent identifiers
    @@ -1043,11 +1088,13 @@ class _Identifiers(object):
                 node.accept_visitor(self)
     
             illegal_names = self.compiler.reserved_names.intersection(
    -            self.locally_declared)
    +            self.locally_declared
    +        )
             if illegal_names:
                 raise exceptions.NameConflictError(
    -                "Reserved words declared in template: %s" %
    -                ", ".join(illegal_names))
    +                "Reserved words declared in template: %s"
    +                % ", ".join(illegal_names)
    +            )
     
         def branch(self, node, **kwargs):
             """create a new Identifiers for a new Node, with
    @@ -1060,24 +1107,28 @@ class _Identifiers(object):
             return set(self.topleveldefs.union(self.closuredefs).values())
     
         def __repr__(self):
    -        return "Identifiers(declared=%r, locally_declared=%r, "\
    -            "undeclared=%r, topleveldefs=%r, closuredefs=%r, "\
    -            "argumentdeclared=%r)" %\
    -            (
    +        return (
    +            "Identifiers(declared=%r, locally_declared=%r, "
    +            "undeclared=%r, topleveldefs=%r, closuredefs=%r, "
    +            "argumentdeclared=%r)"
    +            % (
                     list(self.declared),
                     list(self.locally_declared),
                     list(self.undeclared),
                     [c.name for c in self.topleveldefs.values()],
                     [c.name for c in self.closuredefs.values()],
    -                self.argument_declared)
    +                self.argument_declared,
    +            )
    +        )
     
         def check_declared(self, node):
             """update the state of this Identifiers with the undeclared
                 and declared identifiers of the given node."""
     
             for ident in node.undeclared_identifiers():
    -            if ident != 'context' and\
    -                    ident not in self.declared.union(self.locally_declared):
    +            if ident != "context" and ident not in self.declared.union(
    +                self.locally_declared
    +            ):
                     self.undeclared.add(ident)
             for ident in node.declared_identifiers():
                 self.locally_declared.add(ident)
    @@ -1097,7 +1148,8 @@ class _Identifiers(object):
             if not node.ismodule:
                 self.check_declared(node)
                 self.locally_assigned = self.locally_assigned.union(
    -                node.declared_identifiers())
    +                node.declared_identifiers()
    +            )
     
         def visitNamespaceTag(self, node):
             # only traverse into the sub-elements of a
    @@ -1110,13 +1162,16 @@ class _Identifiers(object):
         def _check_name_exists(self, collection, node):
             existing = collection.get(node.funcname)
             collection[node.funcname] = node
    -        if existing is not None and \
    -                existing is not node and \
    -                (node.is_block or existing.is_block):
    +        if (
    +            existing is not None
    +            and existing is not node
    +            and (node.is_block or existing.is_block)
    +        ):
                 raise exceptions.CompileException(
                     "%%def or %%block named '%s' already "
    -                "exists in this template." %
    -                node.funcname, **node.exception_kwargs)
    +                "exists in this template." % node.funcname,
    +                **node.exception_kwargs
    +            )
     
         def visitDefTag(self, node):
             if node.is_root() and not node.is_anonymous:
    @@ -1125,8 +1180,9 @@ class _Identifiers(object):
                 self._check_name_exists(self.closuredefs, node)
     
             for ident in node.undeclared_identifiers():
    -            if ident != 'context' and \
    -                    ident not in self.declared.union(self.locally_declared):
    +            if ident != "context" and ident not in self.declared.union(
    +                self.locally_declared
    +            ):
                     self.undeclared.add(ident)
     
             # visit defs only one level deep
    @@ -1143,16 +1199,22 @@ class _Identifiers(object):
                 if isinstance(self.node, parsetree.DefTag):
                     raise exceptions.CompileException(
                         "Named block '%s' not allowed inside of def '%s'"
    -                    % (node.name, self.node.name), **node.exception_kwargs)
    -            elif isinstance(self.node,
    -                            (parsetree.CallTag, parsetree.CallNamespaceTag)):
    +                    % (node.name, self.node.name),
    +                    **node.exception_kwargs
    +                )
    +            elif isinstance(
    +                self.node, (parsetree.CallTag, parsetree.CallNamespaceTag)
    +            ):
                     raise exceptions.CompileException(
                         "Named block '%s' not allowed inside of <%%call> tag"
    -                    % (node.name, ), **node.exception_kwargs)
    +                    % (node.name,),
    +                    **node.exception_kwargs
    +                )
     
             for ident in node.undeclared_identifiers():
    -            if ident != 'context' and \
    -                    ident not in self.declared.union(self.locally_declared):
    +            if ident != "context" and ident not in self.declared.union(
    +                self.locally_declared
    +            ):
                     self.undeclared.add(ident)
     
             if not node.is_anonymous:
    @@ -1167,8 +1229,9 @@ class _Identifiers(object):
     
         def visitTextTag(self, node):
             for ident in node.undeclared_identifiers():
    -            if ident != 'context' and \
    -                    ident not in self.declared.union(self.locally_declared):
    +            if ident != "context" and ident not in self.declared.union(
    +                self.locally_declared
    +            ):
                     self.undeclared.add(ident)
     
         def visitIncludeTag(self, node):
    @@ -1185,9 +1248,9 @@ class _Identifiers(object):
         def visitCallTag(self, node):
             if node is self.node:
                 for ident in node.undeclared_identifiers():
    -                if ident != 'context' and \
    -                        ident not in self.declared.union(
    -                            self.locally_declared):
    +                if ident != "context" and ident not in self.declared.union(
    +                    self.locally_declared
    +                ):
                         self.undeclared.add(ident)
                 for ident in node.declared_identifiers():
                     self.argument_declared.add(ident)
    @@ -1195,15 +1258,15 @@ class _Identifiers(object):
                     n.accept_visitor(self)
             else:
                 for ident in node.undeclared_identifiers():
    -                if ident != 'context' and \
    -                        ident not in self.declared.union(
    -                            self.locally_declared):
    +                if ident != "context" and ident not in self.declared.union(
    +                    self.locally_declared
    +                ):
                         self.undeclared.add(ident)
     
     
     _FOR_LOOP = re.compile(
    -    r'^for\s+((?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*'
    -    r'(?:\s*,\s*(?:[A-Za-z_][A-Za-z0-9_]*),??)*\s*(?:\)?))\s+in\s+(.*):'
    +    r"^for\s+((?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*"
    +    r"(?:\s*,\s*(?:[A-Za-z_][A-Za-z0-9_]*),??)*\s*(?:\)?))\s+in\s+(.*):"
     )
     
     
    @@ -1218,11 +1281,11 @@ def mangle_mako_loop(node, printer):
             match = _FOR_LOOP.match(node.text)
             if match:
                 printer.writelines(
    -                'loop = __M_loop._enter(%s)' % match.group(2),
    -                'try:'
    +                "loop = __M_loop._enter(%s)" % match.group(2),
    +                "try:"
                     # 'with __M_loop(%s) as loop:' % match.group(2)
                 )
    -            text = 'for %s in loop:' % match.group(1)
    +            text = "for %s in loop:" % match.group(1)
             else:
                 raise SyntaxError("Couldn't apply loop context: %s" % node.text)
         else:
    @@ -1239,7 +1302,7 @@ class LoopVariable(object):
             self.detected = False
     
         def _loop_reference_detected(self, node):
    -        if 'loop' in node.undeclared_identifiers():
    +        if "loop" in node.undeclared_identifiers():
                 self.detected = True
             else:
                 for n in node.get_children():
    diff --git a/server/www/packages/packages-windows/x86/mako/compat.py b/server/www/packages/packages-windows/x86/mako/compat.py
    index a2ab243..9aac98c 100644
    --- a/server/www/packages/packages-windows/x86/mako/compat.py
    +++ b/server/www/packages/packages-windows/x86/mako/compat.py
    @@ -1,34 +1,52 @@
    +# mako/compat.py
    +# Copyright 2006-2020 the Mako authors and contributors 
    +#
    +# This module is part of Mako and is released under
    +# the MIT License: http://www.opensource.org/licenses/mit-license.php
    +
    +import collections
    +import inspect
     import sys
    -import time
     
     py3k = sys.version_info >= (3, 0)
    -py33 = sys.version_info >= (3, 3)
     py2k = sys.version_info < (3,)
    -py26 = sys.version_info >= (2, 6)
     py27 = sys.version_info >= (2, 7)
    -jython = sys.platform.startswith('java')
    -win32 = sys.platform.startswith('win')
    -pypy = hasattr(sys, 'pypy_version_info')
    +jython = sys.platform.startswith("java")
    +win32 = sys.platform.startswith("win")
    +pypy = hasattr(sys, "pypy_version_info")
     
    -if py3k:
    -    # create a "getargspec" from getfullargspec(), which is not deprecated
    -    # in Py3K; getargspec() has started to emit warnings as of Py3.5.
    -    # As of Py3.4, now they are trying to move from getfullargspec()
    -    # to "signature()", but getfullargspec() is not deprecated, so stick
    -    # with that for now.
    +ArgSpec = collections.namedtuple(
    +    "ArgSpec", ["args", "varargs", "keywords", "defaults"]
    +)
     
    -    import collections
    -    ArgSpec = collections.namedtuple(
    -        "ArgSpec",
    -        ["args", "varargs", "keywords", "defaults"])
    -    from inspect import getfullargspec as inspect_getfullargspec
     
    -    def inspect_getargspec(func):
    -        return ArgSpec(
    -            *inspect_getfullargspec(func)[0:4]
    -        )
    -else:
    -    from inspect import getargspec as inspect_getargspec  # noqa
    +def inspect_getargspec(func):
    +    """getargspec based on fully vendored getfullargspec from Python 3.3."""
    +
    +    if inspect.ismethod(func):
    +        func = func.__func__
    +    if not inspect.isfunction(func):
    +        raise TypeError("{!r} is not a Python function".format(func))
    +
    +    co = func.__code__
    +    if not inspect.iscode(co):
    +        raise TypeError("{!r} is not a code object".format(co))
    +
    +    nargs = co.co_argcount
    +    names = co.co_varnames
    +    nkwargs = co.co_kwonlyargcount if py3k else 0
    +    args = list(names[:nargs])
    +
    +    nargs += nkwargs
    +    varargs = None
    +    if co.co_flags & inspect.CO_VARARGS:
    +        varargs = co.co_varnames[nargs]
    +        nargs = nargs + 1
    +    varkw = None
    +    if co.co_flags & inspect.CO_VARKEYWORDS:
    +        varkw = co.co_varnames[nargs]
    +
    +    return ArgSpec(args, varargs, varkw, func.__defaults__)
     
     
     if py3k:
    @@ -36,7 +54,8 @@ if py3k:
         import builtins as compat_builtins
         from urllib.parse import quote_plus, unquote_plus
         from html.entities import codepoint2name, name2codepoint
    -    string_types = str,
    +
    +    string_types = (str,)
         binary_type = bytes
         text_type = str
     
    @@ -51,8 +70,10 @@ if py3k:
         def octal(lit):
             return eval("0o" + lit)
     
    +
     else:
         import __builtin__ as compat_builtins  # noqa
    +
         try:
             from cStringIO import StringIO
         except:
    @@ -62,7 +83,8 @@ else:
     
         from urllib import quote_plus, unquote_plus  # noqa
         from htmlentitydefs import codepoint2name, name2codepoint  # noqa
    -    string_types = basestring,  # noqa
    +
    +    string_types = (basestring,)  # noqa
         binary_type = str
         text_type = unicode  # noqa
     
    @@ -76,16 +98,18 @@ else:
             return eval("0" + lit)
     
     
    -if py33:
    +if py3k:
         from importlib import machinery
     
         def load_module(module_id, path):
             return machinery.SourceFileLoader(module_id, path).load_module()
    +
    +
     else:
         import imp
     
         def load_module(module_id, path):
    -        fp = open(path, 'rb')
    +        fp = open(path, "rb")
             try:
                 return imp.load_source(module_id, path, fp)
             finally:
    @@ -93,93 +117,32 @@ else:
     
     
     if py3k:
    +
         def reraise(tp, value, tb=None, cause=None):
             if cause is not None:
                 value.__cause__ = cause
             if value.__traceback__ is not tb:
                 raise value.with_traceback(tb)
             raise value
    +
    +
     else:
    -    exec("def reraise(tp, value, tb=None, cause=None):\n"
    -         "    raise tp, value, tb\n")
    +    exec(
    +        "def reraise(tp, value, tb=None, cause=None):\n"
    +        "    raise tp, value, tb\n"
    +    )
     
     
     def exception_as():
         return sys.exc_info()[1]
     
    -try:
    -    import threading
    -    if py3k:
    -        import _thread as thread
    -    else:
    -        import thread
    -except ImportError:
    -    import dummy_threading as threading  # noqa
    -    if py3k:
    -        import _dummy_thread as thread
    -    else:
    -        import dummy_thread as thread  # noqa
     
    -if win32 or jython:
    -    time_func = time.clock
    -else:
    -    time_func = time.time
    -
    -try:
    -    from functools import partial
    -except:
    -    def partial(func, *args, **keywords):
    -        def newfunc(*fargs, **fkeywords):
    -            newkeywords = keywords.copy()
    -            newkeywords.update(fkeywords)
    -            return func(*(args + fargs), **newkeywords)
    -        return newfunc
    -
    -
    -all = all
    -import json  # noqa
    +all = all  # noqa
     
     
     def exception_name(exc):
         return exc.__class__.__name__
     
    -try:
    -    from inspect import CO_VARKEYWORDS, CO_VARARGS
    -
    -    def inspect_func_args(fn):
    -        if py3k:
    -            co = fn.__code__
    -        else:
    -            co = fn.func_code
    -
    -        nargs = co.co_argcount
    -        names = co.co_varnames
    -        args = list(names[:nargs])
    -
    -        varargs = None
    -        if co.co_flags & CO_VARARGS:
    -            varargs = co.co_varnames[nargs]
    -            nargs = nargs + 1
    -        varkw = None
    -        if co.co_flags & CO_VARKEYWORDS:
    -            varkw = co.co_varnames[nargs]
    -
    -        if py3k:
    -            return args, varargs, varkw, fn.__defaults__
    -        else:
    -            return args, varargs, varkw, fn.func_defaults
    -except ImportError:
    -    import inspect
    -
    -    def inspect_func_args(fn):
    -        return inspect.getargspec(fn)
    -
    -if py3k:
    -    def callable(fn):
    -        return hasattr(fn, '__call__')
    -else:
    -    callable = callable
    -
     
     ################################################
     # cross-compatible metaclass implementation
    @@ -187,6 +150,8 @@ else:
     def with_metaclass(meta, base=object):
         """Create a base class with a metaclass."""
         return meta("%sBase" % meta.__name__, (base,), {})
    +
    +
     ################################################
     
     
    @@ -195,7 +160,7 @@ def arg_stringname(func_arg):
         In Python3.4 a function's args are
         of _ast.arg type not _ast.name
         """
    -    if hasattr(func_arg, 'arg'):
    +    if hasattr(func_arg, "arg"):
             return func_arg.arg
         else:
             return str(func_arg)
    diff --git a/server/www/packages/packages-windows/x86/mako/exceptions.py b/server/www/packages/packages-windows/x86/mako/exceptions.py
    index cb6fb3f..ea7b20d 100644
    --- a/server/www/packages/packages-windows/x86/mako/exceptions.py
    +++ b/server/www/packages/packages-windows/x86/mako/exceptions.py
    @@ -1,14 +1,16 @@
     # mako/exceptions.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
     
     """exception classes"""
     
    -import traceback
     import sys
    -from mako import util, compat
    +import traceback
    +
    +from mako import compat
    +from mako import util
     
     
     class MakoException(Exception):
    @@ -27,11 +29,10 @@ def _format_filepos(lineno, pos, filename):
     
     
     class CompileException(MakoException):
    -
         def __init__(self, message, source, lineno, pos, filename):
             MakoException.__init__(
    -            self,
    -            message + _format_filepos(lineno, pos, filename))
    +            self, message + _format_filepos(lineno, pos, filename)
    +        )
             self.lineno = lineno
             self.pos = pos
             self.filename = filename
    @@ -39,11 +40,10 @@ class CompileException(MakoException):
     
     
     class SyntaxException(MakoException):
    -
         def __init__(self, message, source, lineno, pos, filename):
             MakoException.__init__(
    -            self,
    -            message + _format_filepos(lineno, pos, filename))
    +            self, message + _format_filepos(lineno, pos, filename)
    +        )
             self.lineno = lineno
             self.pos = pos
             self.filename = filename
    @@ -115,7 +115,7 @@ class RichTraceback(object):
                     # str(Exception(u'\xe6')) work in Python < 2.6
                     self.message = self.error.args[0]
             if not isinstance(self.message, compat.text_type):
    -            self.message = compat.text_type(self.message, 'ascii', 'replace')
    +            self.message = compat.text_type(self.message, "ascii", "replace")
     
         def _get_reformatted_records(self, records):
             for rec in records:
    @@ -151,25 +151,28 @@ class RichTraceback(object):
             source, and code line from that line number of the template."""
     
             import mako.template
    +
             mods = {}
             rawrecords = traceback.extract_tb(trcback)
             new_trcback = []
             for filename, lineno, function, line in rawrecords:
                 if not line:
    -                line = ''
    +                line = ""
                 try:
    -                (line_map, template_lines) = mods[filename]
    +                (line_map, template_lines, template_filename) = mods[filename]
                 except KeyError:
                     try:
                         info = mako.template._get_module_info(filename)
                         module_source = info.code
                         template_source = info.source
    -                    template_filename = info.template_filename or filename
    +                    template_filename = (
    +                        info.template_filename or info.template_uri or filename
    +                    )
                     except KeyError:
                         # A normal .py file (not a Template)
                         if not compat.py3k:
                             try:
    -                            fp = open(filename, 'rb')
    +                            fp = open(filename, "rb")
                                 encoding = util.parse_encoding(fp)
                                 fp.close()
                             except IOError:
    @@ -177,21 +180,33 @@ class RichTraceback(object):
                             if encoding:
                                 line = line.decode(encoding)
                             else:
    -                            line = line.decode('ascii', 'replace')
    -                    new_trcback.append((filename, lineno, function, line,
    -                                        None, None, None, None))
    +                            line = line.decode("ascii", "replace")
    +                    new_trcback.append(
    +                        (
    +                            filename,
    +                            lineno,
    +                            function,
    +                            line,
    +                            None,
    +                            None,
    +                            None,
    +                            None,
    +                        )
    +                    )
                         continue
     
                     template_ln = 1
     
    -                source_map = mako.template.ModuleInfo.\
    -                    get_module_source_metadata(
    -                        module_source, full_line_map=True)
    -                line_map = source_map['full_line_map']
    +                mtm = mako.template.ModuleInfo
    +                source_map = mtm.get_module_source_metadata(
    +                    module_source, full_line_map=True
    +                )
    +                line_map = source_map["full_line_map"]
     
    -                template_lines = [line_ for line_ in
    -                                  template_source.split("\n")]
    -                mods[filename] = (line_map, template_lines)
    +                template_lines = [
    +                    line_ for line_ in template_source.split("\n")
    +                ]
    +                mods[filename] = (line_map, template_lines, template_filename)
     
                 template_ln = line_map[lineno - 1]
     
    @@ -199,9 +214,18 @@ class RichTraceback(object):
                     template_line = template_lines[template_ln - 1]
                 else:
                     template_line = None
    -            new_trcback.append((filename, lineno, function,
    -                                line, template_filename, template_ln,
    -                                template_line, template_source))
    +            new_trcback.append(
    +                (
    +                    filename,
    +                    lineno,
    +                    function,
    +                    line,
    +                    template_filename,
    +                    template_ln,
    +                    template_line,
    +                    template_source,
    +                )
    +            )
             if not self.source:
                 for l in range(len(new_trcback) - 1, 0, -1):
                     if new_trcback[l][5]:
    @@ -212,15 +236,17 @@ class RichTraceback(object):
                     if new_trcback:
                         try:
                             # A normal .py file (not a Template)
    -                        fp = open(new_trcback[-1][0], 'rb')
    +                        fp = open(new_trcback[-1][0], "rb")
                             encoding = util.parse_encoding(fp)
    +                        if compat.py3k and not encoding:
    +                            encoding = "utf-8"
                             fp.seek(0)
                             self.source = fp.read()
                             fp.close()
                             if encoding:
                                 self.source = self.source.decode(encoding)
                         except IOError:
    -                        self.source = ''
    +                        self.source = ""
                         self.lineno = new_trcback[-1][1]
             return new_trcback
     
    @@ -233,7 +259,9 @@ def text_error_template(lookup=None):
     
         """
         import mako.template
    -    return mako.template.Template(r"""
    +
    +    return mako.template.Template(
    +        r"""
     <%page args="error=None, traceback=None"/>
     <%!
         from mako.exceptions import RichTraceback
    @@ -247,7 +275,8 @@ Traceback (most recent call last):
         ${line | trim}
     % endfor
     ${tback.errorname}: ${tback.message}
    -""")
    +"""
    +    )
     
     
     def _install_pygments():
    @@ -259,9 +288,10 @@ def _install_pygments():
     def _install_fallback():
         global syntax_highlight, pygments_html_formatter
         from mako.filters import html_escape
    +
         pygments_html_formatter = None
     
    -    def syntax_highlight(filename='', language=None):
    +    def syntax_highlight(filename="", language=None):
             return html_escape
     
     
    @@ -270,6 +300,8 @@ def _install_highlighting():
             _install_pygments()
         except ImportError:
             _install_fallback()
    +
    +
     _install_highlighting()
     
     
    @@ -287,7 +319,9 @@ def html_error_template():
     
         """
         import mako.template
    -    return mako.template.Template(r"""
    +
    +    return mako.template.Template(
    +        r"""
     <%!
         from mako.exceptions import RichTraceback, syntax_highlight,\
                 pygments_html_formatter
    @@ -390,5 +424,7 @@ def html_error_template():
     
     
     % endif
    -""", output_encoding=sys.getdefaultencoding(),
    -                                  encoding_errors='htmlentityreplace')
    +""",
    +        output_encoding=sys.getdefaultencoding(),
    +        encoding_errors="htmlentityreplace",
    +    )
    diff --git a/server/www/packages/packages-windows/x86/mako/ext/autohandler.py b/server/www/packages/packages-windows/x86/mako/ext/autohandler.py
    index 9d1c911..8b1324e 100644
    --- a/server/www/packages/packages-windows/x86/mako/ext/autohandler.py
    +++ b/server/www/packages/packages-windows/x86/mako/ext/autohandler.py
    @@ -1,5 +1,5 @@
     # ext/autohandler.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
    @@ -8,29 +8,29 @@
     
     requires that the TemplateLookup class is used with templates.
     
    -usage:
    +usage::
     
    -<%!
    -    from mako.ext.autohandler import autohandler
    -%>
    -<%inherit file="${autohandler(template, context)}"/>
    +    <%!
    +        from mako.ext.autohandler import autohandler
    +    %>
    +    <%inherit file="${autohandler(template, context)}"/>
     
     
    -or with custom autohandler filename:
    +or with custom autohandler filename::
     
    -<%!
    -    from mako.ext.autohandler import autohandler
    -%>
    -<%inherit file="${autohandler(template, context, name='somefilename')}"/>
    +    <%!
    +        from mako.ext.autohandler import autohandler
    +    %>
    +    <%inherit file="${autohandler(template, context, name='somefilename')}"/>
     
     """
     
    -import posixpath
     import os
    +import posixpath
     import re
     
     
    -def autohandler(template, context, name='autohandler'):
    +def autohandler(template, context, name="autohandler"):
         lookup = context.lookup
         _template_uri = template.module._template_uri
         if not lookup.filesystem_checks:
    @@ -39,13 +39,14 @@ def autohandler(template, context, name='autohandler'):
             except KeyError:
                 pass
     
    -    tokens = re.findall(r'([^/]+)', posixpath.dirname(_template_uri)) + [name]
    +    tokens = re.findall(r"([^/]+)", posixpath.dirname(_template_uri)) + [name]
         while len(tokens):
    -        path = '/' + '/'.join(tokens)
    +        path = "/" + "/".join(tokens)
             if path != _template_uri and _file_exists(lookup, path):
                 if not lookup.filesystem_checks:
                     return lookup._uri_cache.setdefault(
    -                    (autohandler, _template_uri, name), path)
    +                    (autohandler, _template_uri, name), path
    +                )
                 else:
                     return path
             if len(tokens) == 1:
    @@ -54,15 +55,16 @@ def autohandler(template, context, name='autohandler'):
     
         if not lookup.filesystem_checks:
             return lookup._uri_cache.setdefault(
    -            (autohandler, _template_uri, name), None)
    +            (autohandler, _template_uri, name), None
    +        )
         else:
             return None
     
     
     def _file_exists(lookup, path):
    -    psub = re.sub(r'^/', '', path)
    +    psub = re.sub(r"^/", "", path)
         for d in lookup.directories:
    -        if os.path.exists(d + '/' + psub):
    +        if os.path.exists(d + "/" + psub):
                 return True
         else:
             return False
    diff --git a/server/www/packages/packages-windows/x86/mako/ext/babelplugin.py b/server/www/packages/packages-windows/x86/mako/ext/babelplugin.py
    index 0b5e84f..76bbc5b 100644
    --- a/server/www/packages/packages-windows/x86/mako/ext/babelplugin.py
    +++ b/server/www/packages/packages-windows/x86/mako/ext/babelplugin.py
    @@ -1,23 +1,24 @@
     # ext/babelplugin.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
     
     """gettext message extraction via Babel: http://babel.edgewall.org/"""
     from babel.messages.extract import extract_python
    +
     from mako.ext.extract import MessageExtractor
     
     
     class BabelMakoExtractor(MessageExtractor):
    -
         def __init__(self, keywords, comment_tags, options):
             self.keywords = keywords
             self.options = options
             self.config = {
    -            'comment-tags': u' '.join(comment_tags),
    -            'encoding': options.get('input_encoding',
    -                                    options.get('encoding', None)),
    +            "comment-tags": u" ".join(comment_tags),
    +            "encoding": options.get(
    +                "input_encoding", options.get("encoding", None)
    +            ),
             }
             super(BabelMakoExtractor, self).__init__()
     
    @@ -25,12 +26,19 @@ class BabelMakoExtractor(MessageExtractor):
             return self.process_file(fileobj)
     
         def process_python(self, code, code_lineno, translator_strings):
    -        comment_tags = self.config['comment-tags']
    -        for lineno, funcname, messages, python_translator_comments \
    -                in extract_python(code,
    -                                  self.keywords, comment_tags, self.options):
    -            yield (code_lineno + (lineno - 1), funcname, messages,
    -                   translator_strings + python_translator_comments)
    +        comment_tags = self.config["comment-tags"]
    +        for (
    +            lineno,
    +            funcname,
    +            messages,
    +            python_translator_comments,
    +        ) in extract_python(code, self.keywords, comment_tags, self.options):
    +            yield (
    +                code_lineno + (lineno - 1),
    +                funcname,
    +                messages,
    +                translator_strings + python_translator_comments,
    +            )
     
     
     def extract(fileobj, keywords, comment_tags, options):
    diff --git a/server/www/packages/packages-windows/x86/mako/ext/beaker_cache.py b/server/www/packages/packages-windows/x86/mako/ext/beaker_cache.py
    index c7c260d..f65ce43 100644
    --- a/server/www/packages/packages-windows/x86/mako/ext/beaker_cache.py
    +++ b/server/www/packages/packages-windows/x86/mako/ext/beaker_cache.py
    @@ -1,7 +1,12 @@
    +# ext/beaker_cache.py
    +# Copyright 2006-2020 the Mako authors and contributors 
    +#
    +# This module is part of Mako and is released under
    +# the MIT License: http://www.opensource.org/licenses/mit-license.php
    +
     """Provide a :class:`.CacheImpl` for the Beaker caching system."""
     
     from mako import exceptions
    -
     from mako.cache import CacheImpl
     
     try:
    @@ -27,36 +32,37 @@ class BeakerCacheImpl(CacheImpl):
         def __init__(self, cache):
             if not has_beaker:
                 raise exceptions.RuntimeException(
    -                "Can't initialize Beaker plugin; Beaker is not installed.")
    +                "Can't initialize Beaker plugin; Beaker is not installed."
    +            )
             global _beaker_cache
             if _beaker_cache is None:
    -            if 'manager' in cache.template.cache_args:
    -                _beaker_cache = cache.template.cache_args['manager']
    +            if "manager" in cache.template.cache_args:
    +                _beaker_cache = cache.template.cache_args["manager"]
                 else:
                     _beaker_cache = beaker_cache.CacheManager()
             super(BeakerCacheImpl, self).__init__(cache)
     
         def _get_cache(self, **kw):
    -        expiretime = kw.pop('timeout', None)
    -        if 'dir' in kw:
    -            kw['data_dir'] = kw.pop('dir')
    +        expiretime = kw.pop("timeout", None)
    +        if "dir" in kw:
    +            kw["data_dir"] = kw.pop("dir")
             elif self.cache.template.module_directory:
    -            kw['data_dir'] = self.cache.template.module_directory
    +            kw["data_dir"] = self.cache.template.module_directory
     
    -        if 'manager' in kw:
    -            kw.pop('manager')
    +        if "manager" in kw:
    +            kw.pop("manager")
     
    -        if kw.get('type') == 'memcached':
    -            kw['type'] = 'ext:memcached'
    +        if kw.get("type") == "memcached":
    +            kw["type"] = "ext:memcached"
     
    -        if 'region' in kw:
    -            region = kw.pop('region')
    +        if "region" in kw:
    +            region = kw.pop("region")
                 cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw)
             else:
                 cache = _beaker_cache.get_cache(self.cache.id, **kw)
    -        cache_args = {'starttime': self.cache.starttime}
    +        cache_args = {"starttime": self.cache.starttime}
             if expiretime:
    -            cache_args['expiretime'] = expiretime
    +            cache_args["expiretime"] = expiretime
             return cache, cache_args
     
         def get_or_create(self, key, creation_function, **kw):
    diff --git a/server/www/packages/packages-windows/x86/mako/ext/extract.py b/server/www/packages/packages-windows/x86/mako/ext/extract.py
    index d777ea8..ad2348a 100644
    --- a/server/www/packages/packages-windows/x86/mako/ext/extract.py
    +++ b/server/www/packages/packages-windows/x86/mako/ext/extract.py
    @@ -1,30 +1,39 @@
    +# ext/extract.py
    +# Copyright 2006-2020 the Mako authors and contributors 
    +#
    +# This module is part of Mako and is released under
    +# the MIT License: http://www.opensource.org/licenses/mit-license.php
    +
     import re
    +
     from mako import compat
     from mako import lexer
     from mako import parsetree
     
     
     class MessageExtractor(object):
    -
         def process_file(self, fileobj):
             template_node = lexer.Lexer(
    -            fileobj.read(),
    -            input_encoding=self.config['encoding']).parse()
    +            fileobj.read(), input_encoding=self.config["encoding"]
    +        ).parse()
             for extracted in self.extract_nodes(template_node.get_children()):
                 yield extracted
     
         def extract_nodes(self, nodes):
             translator_comments = []
             in_translator_comments = False
    -        input_encoding = self.config['encoding'] or 'ascii'
    +        input_encoding = self.config["encoding"] or "ascii"
             comment_tags = list(
    -            filter(None, re.split(r'\s+', self.config['comment-tags'])))
    +            filter(None, re.split(r"\s+", self.config["comment-tags"]))
    +        )
     
             for node in nodes:
                 child_nodes = None
    -            if in_translator_comments and \
    -                    isinstance(node, parsetree.Text) and \
    -                    not node.content.strip():
    +            if (
    +                in_translator_comments
    +                and isinstance(node, parsetree.Text)
    +                and not node.content.strip()
    +            ):
                     # Ignore whitespace within translator comments
                     continue
     
    @@ -32,13 +41,15 @@ class MessageExtractor(object):
                     value = node.text.strip()
                     if in_translator_comments:
                         translator_comments.extend(
    -                        self._split_comment(node.lineno, value))
    +                        self._split_comment(node.lineno, value)
    +                    )
                         continue
                     for comment_tag in comment_tags:
                         if value.startswith(comment_tag):
                             in_translator_comments = True
                             translator_comments.extend(
    -                            self._split_comment(node.lineno, value))
    +                            self._split_comment(node.lineno, value)
    +                        )
                     continue
     
                 if isinstance(node, parsetree.DefTag):
    @@ -69,15 +80,18 @@ class MessageExtractor(object):
                     continue
     
                 # Comments don't apply unless they immediately precede the message
    -            if translator_comments and \
    -                    translator_comments[-1][0] < node.lineno - 1:
    +            if (
    +                translator_comments
    +                and translator_comments[-1][0] < node.lineno - 1
    +            ):
                     translator_comments = []
     
                 translator_strings = [
    -                comment[1] for comment in translator_comments]
    +                comment[1] for comment in translator_comments
    +            ]
     
                 if isinstance(code, compat.text_type):
    -                code = code.encode(input_encoding, 'backslashreplace')
    +                code = code.encode(input_encoding, "backslashreplace")
     
                 used_translator_comments = False
                 # We add extra newline to work around a pybabel bug
    @@ -85,10 +99,11 @@ class MessageExtractor(object):
                 # input string of the input is non-ascii)
                 # Also, because we added it, we have to subtract one from
                 # node.lineno
    -            code = compat.byte_buffer(compat.b('\n') + code)
    +            code = compat.byte_buffer(compat.b("\n") + code)
     
                 for message in self.process_python(
    -                    code, node.lineno - 1, translator_strings):
    +                code, node.lineno - 1, translator_strings
    +            ):
                     yield message
                     used_translator_comments = True
     
    @@ -104,5 +119,7 @@ class MessageExtractor(object):
         def _split_comment(lineno, comment):
             """Return the multiline comment at lineno split into a list of
             comment line numbers and the accompanying comment line"""
    -        return [(lineno + index, line) for index, line in
    -                enumerate(comment.splitlines())]
    +        return [
    +            (lineno + index, line)
    +            for index, line in enumerate(comment.splitlines())
    +        ]
    diff --git a/server/www/packages/packages-windows/x86/mako/ext/linguaplugin.py b/server/www/packages/packages-windows/x86/mako/ext/linguaplugin.py
    index 46b0d6a..0f6d165 100644
    --- a/server/www/packages/packages-windows/x86/mako/ext/linguaplugin.py
    +++ b/server/www/packages/packages-windows/x86/mako/ext/linguaplugin.py
    @@ -1,43 +1,57 @@
    +# ext/linguaplugin.py
    +# Copyright 2006-2020 the Mako authors and contributors 
    +#
    +# This module is part of Mako and is released under
    +# the MIT License: http://www.opensource.org/licenses/mit-license.php
    +
     import io
    +
     from lingua.extractors import Extractor
    -from lingua.extractors import Message
     from lingua.extractors import get_extractor
    -from mako.ext.extract import MessageExtractor
    +from lingua.extractors import Message
    +
     from mako import compat
    +from mako.ext.extract import MessageExtractor
     
     
     class LinguaMakoExtractor(Extractor, MessageExtractor):
     
    -    '''Mako templates'''
    -    extensions = ['.mako']
    -    default_config = {
    -        'encoding': 'utf-8',
    -        'comment-tags': '',
    -    }
    +    """Mako templates"""
    +
    +    extensions = [".mako"]
    +    default_config = {"encoding": "utf-8", "comment-tags": ""}
     
         def __call__(self, filename, options, fileobj=None):
             self.options = options
             self.filename = filename
    -        self.python_extractor = get_extractor('x.py')
    +        self.python_extractor = get_extractor("x.py")
             if fileobj is None:
    -            fileobj = open(filename, 'rb')
    +            fileobj = open(filename, "rb")
             return self.process_file(fileobj)
     
         def process_python(self, code, code_lineno, translator_strings):
             source = code.getvalue().strip()
    -        if source.endswith(compat.b(':')):
    -            if source in (compat.b('try:'), compat.b('else:')) or source.startswith(compat.b('except')):
    -                source = compat.b('') # Ignore try/except and else
    -            elif source.startswith(compat.b('elif')):
    -                source = source[2:] # Replace "elif" with "if"
    -            source += compat.b('pass')
    +        if source.endswith(compat.b(":")):
    +            if source in (
    +                compat.b("try:"),
    +                compat.b("else:"),
    +            ) or source.startswith(compat.b("except")):
    +                source = compat.b("")  # Ignore try/except and else
    +            elif source.startswith(compat.b("elif")):
    +                source = source[2:]  # Replace "elif" with "if"
    +            source += compat.b("pass")
             code = io.BytesIO(source)
             for msg in self.python_extractor(
    -                self.filename, self.options, code, code_lineno -1):
    +            self.filename, self.options, code, code_lineno - 1
    +        ):
                 if translator_strings:
    -                msg = Message(msg.msgctxt, msg.msgid, msg.msgid_plural,
    -                              msg.flags,
    -                              compat.u(' ').join(
    -                                  translator_strings + [msg.comment]),
    -                              msg.tcomment, msg.location)
    +                msg = Message(
    +                    msg.msgctxt,
    +                    msg.msgid,
    +                    msg.msgid_plural,
    +                    msg.flags,
    +                    compat.u(" ").join(translator_strings + [msg.comment]),
    +                    msg.tcomment,
    +                    msg.location,
    +                )
                 yield msg
    diff --git a/server/www/packages/packages-windows/x86/mako/ext/preprocessors.py b/server/www/packages/packages-windows/x86/mako/ext/preprocessors.py
    index 9b700d1..9cc0621 100644
    --- a/server/www/packages/packages-windows/x86/mako/ext/preprocessors.py
    +++ b/server/www/packages/packages-windows/x86/mako/ext/preprocessors.py
    @@ -1,5 +1,5 @@
     # ext/preprocessors.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
    @@ -17,4 +17,4 @@ def convert_comments(text):
     
         from mako.ext.preprocessors import convert_comments
         t = Template(..., preprocessor=convert_comments)"""
    -    return re.sub(r'(?<=\n)\s*#[^#]', "##", text)
    +    return re.sub(r"(?<=\n)\s*#[^#]", "##", text)
    diff --git a/server/www/packages/packages-windows/x86/mako/ext/pygmentplugin.py b/server/www/packages/packages-windows/x86/mako/ext/pygmentplugin.py
    index 4057caa..943a67a 100644
    --- a/server/www/packages/packages-windows/x86/mako/ext/pygmentplugin.py
    +++ b/server/www/packages/packages-windows/x86/mako/ext/pygmentplugin.py
    @@ -1,45 +1,73 @@
     # ext/pygmentplugin.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
     
    -from pygments.lexers.web import \
    -    HtmlLexer, XmlLexer, JavascriptLexer, CssLexer
    -from pygments.lexers.agile import PythonLexer, Python3Lexer
    -from pygments.lexer import DelegatingLexer, RegexLexer, bygroups, \
    -    include, using
    -from pygments.token import \
    -    Text, Comment, Operator, Keyword, Name, String, Other
    -from pygments.formatters.html import HtmlFormatter
     from pygments import highlight
    +from pygments.formatters.html import HtmlFormatter
    +from pygments.lexer import bygroups
    +from pygments.lexer import DelegatingLexer
    +from pygments.lexer import include
    +from pygments.lexer import RegexLexer
    +from pygments.lexer import using
    +from pygments.lexers.agile import Python3Lexer
    +from pygments.lexers.agile import PythonLexer
    +from pygments.lexers.web import CssLexer
    +from pygments.lexers.web import HtmlLexer
    +from pygments.lexers.web import JavascriptLexer
    +from pygments.lexers.web import XmlLexer
    +from pygments.token import Comment
    +from pygments.token import Keyword
    +from pygments.token import Name
    +from pygments.token import Operator
    +from pygments.token import Other
    +from pygments.token import String
    +from pygments.token import Text
    +
     from mako import compat
     
     
     class MakoLexer(RegexLexer):
    -    name = 'Mako'
    -    aliases = ['mako']
    -    filenames = ['*.mao']
    +    name = "Mako"
    +    aliases = ["mako"]
    +    filenames = ["*.mao"]
     
         tokens = {
    -        'root': [
    -            (r'(\s*)(\%)(\s*end(?:\w+))(\n|\Z)',
    -             bygroups(Text, Comment.Preproc, Keyword, Other)),
    -            (r'(\s*)(\%(?!%))([^\n]*)(\n|\Z)',
    -             bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
    -            (r'(\s*)(##[^\n]*)(\n|\Z)',
    -             bygroups(Text, Comment.Preproc, Other)),
    -            (r'''(?s)<%doc>.*?''', Comment.Preproc),
    -            (r'(<%)([\w\.\:]+)',
    -             bygroups(Comment.Preproc, Name.Builtin), 'tag'),
    -            (r'()',
    -             bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
    -            (r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'),
    -            (r'(<%(?:!?))(.*?)(%>)(?s)',
    -             bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
    -            (r'(\$\{)(.*?)(\})',
    -             bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
    -            (r'''(?sx)
    +        "root": [
    +            (
    +                r"(\s*)(\%)(\s*end(?:\w+))(\n|\Z)",
    +                bygroups(Text, Comment.Preproc, Keyword, Other),
    +            ),
    +            (
    +                r"(\s*)(\%(?!%))([^\n]*)(\n|\Z)",
    +                bygroups(Text, Comment.Preproc, using(PythonLexer), Other),
    +            ),
    +            (
    +                r"(\s*)(##[^\n]*)(\n|\Z)",
    +                bygroups(Text, Comment.Preproc, Other),
    +            ),
    +            (r"""(?s)<%doc>.*?""", Comment.Preproc),
    +            (
    +                r"(<%)([\w\.\:]+)",
    +                bygroups(Comment.Preproc, Name.Builtin),
    +                "tag",
    +            ),
    +            (
    +                r"()",
    +                bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc),
    +            ),
    +            (r"<%(?=([\w\.\:]+))", Comment.Preproc, "ondeftags"),
    +            (
    +                r"(?s)(<%(?:!?))(.*?)(%>)",
    +                bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc),
    +            ),
    +            (
    +                r"(\$\{)(.*?)(\})",
    +                bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc),
    +            ),
    +            (
    +                r"""(?sx)
                     (.+?)               # anything, followed by:
                     (?:
                      (?<=\n)(?=%(?!%)|\#\#) |  # an eval or comment line
    @@ -52,76 +80,78 @@ class MakoLexer(RegexLexer):
                      (\\\n) |           # an escaped newline
                      \Z                 # end of string
                     )
    -            ''', bygroups(Other, Operator)),
    -            (r'\s+', Text),
    +            """,
    +                bygroups(Other, Operator),
    +            ),
    +            (r"\s+", Text),
             ],
    -        'ondeftags': [
    -            (r'<%', Comment.Preproc),
    -            (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
    -            include('tag'),
    +        "ondeftags": [
    +            (r"<%", Comment.Preproc),
    +            (r"(?<=<%)(include|inherit|namespace|page)", Name.Builtin),
    +            include("tag"),
             ],
    -        'tag': [
    -            (r'((?:\w+)\s*=)\s*(".*?")',
    -             bygroups(Name.Attribute, String)),
    -            (r'/?\s*>', Comment.Preproc, '#pop'),
    -            (r'\s+', Text),
    +        "tag": [
    +            (r'((?:\w+)\s*=)\s*(".*?")', bygroups(Name.Attribute, String)),
    +            (r"/?\s*>", Comment.Preproc, "#pop"),
    +            (r"\s+", Text),
             ],
    -        'attr': [
    -            ('".*?"', String, '#pop'),
    -            ("'.*?'", String, '#pop'),
    -            (r'[^\s>]+', String, '#pop'),
    +        "attr": [
    +            ('".*?"', String, "#pop"),
    +            ("'.*?'", String, "#pop"),
    +            (r"[^\s>]+", String, "#pop"),
             ],
         }
     
     
     class MakoHtmlLexer(DelegatingLexer):
    -    name = 'HTML+Mako'
    -    aliases = ['html+mako']
    +    name = "HTML+Mako"
    +    aliases = ["html+mako"]
     
         def __init__(self, **options):
    -        super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
    -                                            **options)
    +        super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, **options)
     
     
     class MakoXmlLexer(DelegatingLexer):
    -    name = 'XML+Mako'
    -    aliases = ['xml+mako']
    +    name = "XML+Mako"
    +    aliases = ["xml+mako"]
     
         def __init__(self, **options):
    -        super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
    -                                           **options)
    +        super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, **options)
     
     
     class MakoJavascriptLexer(DelegatingLexer):
    -    name = 'JavaScript+Mako'
    -    aliases = ['js+mako', 'javascript+mako']
    +    name = "JavaScript+Mako"
    +    aliases = ["js+mako", "javascript+mako"]
     
         def __init__(self, **options):
    -        super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
    -                                                  MakoLexer, **options)
    +        super(MakoJavascriptLexer, self).__init__(
    +            JavascriptLexer, MakoLexer, **options
    +        )
     
     
     class MakoCssLexer(DelegatingLexer):
    -    name = 'CSS+Mako'
    -    aliases = ['css+mako']
    +    name = "CSS+Mako"
    +    aliases = ["css+mako"]
     
         def __init__(self, **options):
    -        super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
    -                                           **options)
    +        super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, **options)
     
     
    -pygments_html_formatter = HtmlFormatter(cssclass='syntax-highlighted',
    -                                        linenos=True)
    +pygments_html_formatter = HtmlFormatter(
    +    cssclass="syntax-highlighted", linenos=True
    +)
     
     
    -def syntax_highlight(filename='', language=None):
    +def syntax_highlight(filename="", language=None):
         mako_lexer = MakoLexer()
         if compat.py3k:
             python_lexer = Python3Lexer()
         else:
             python_lexer = PythonLexer()
    -    if filename.startswith('memory:') or language == 'mako':
    -        return lambda string: highlight(string, mako_lexer,
    -                                        pygments_html_formatter)
    -    return lambda string: highlight(string, python_lexer,
    -                                    pygments_html_formatter)
    +    if filename.startswith("memory:") or language == "mako":
    +        return lambda string: highlight(
    +            string, mako_lexer, pygments_html_formatter
    +        )
    +    return lambda string: highlight(
    +        string, python_lexer, pygments_html_formatter
    +    )
    diff --git a/server/www/packages/packages-windows/x86/mako/ext/turbogears.py b/server/www/packages/packages-windows/x86/mako/ext/turbogears.py
    index eaa2d78..722a6b4 100644
    --- a/server/www/packages/packages-windows/x86/mako/ext/turbogears.py
    +++ b/server/www/packages/packages-windows/x86/mako/ext/turbogears.py
    @@ -1,5 +1,5 @@
     # ext/turbogears.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
    @@ -13,7 +13,7 @@ class TGPlugin(object):
     
         """TurboGears compatible Template Plugin."""
     
    -    def __init__(self, extra_vars_func=None, options=None, extension='mak'):
    +    def __init__(self, extra_vars_func=None, options=None, extension="mak"):
             self.extra_vars_func = extra_vars_func
             self.extension = extension
             if not options:
    @@ -22,9 +22,9 @@ class TGPlugin(object):
             # Pull the options out and initialize the lookup
             lookup_options = {}
             for k, v in options.items():
    -            if k.startswith('mako.'):
    +            if k.startswith("mako."):
                     lookup_options[k[5:]] = v
    -            elif k in ['directories', 'filesystem_checks', 'module_directory']:
    +            elif k in ["directories", "filesystem_checks", "module_directory"]:
                     lookup_options[k] = v
             self.lookup = TemplateLookup(**lookup_options)
     
    @@ -40,14 +40,17 @@ class TGPlugin(object):
             if template_string is not None:
                 return Template(template_string, **self.tmpl_options)
             # Translate TG dot notation to normal / template path
    -        if '/' not in templatename:
    -            templatename = '/' + templatename.replace('.', '/') + '.' +\
    -                self.extension
    +        if "/" not in templatename:
    +            templatename = (
    +                "/" + templatename.replace(".", "/") + "." + self.extension
    +            )
     
             # Lookup template
             return self.lookup.get_template(templatename)
     
    -    def render(self, info, format="html", fragment=False, template=None):
    +    def render(
    +        self, info, format="html", fragment=False, template=None  # noqa
    +    ):
             if isinstance(template, compat.string_types):
                 template = self.load_template(template)
     
    diff --git a/server/www/packages/packages-windows/x86/mako/filters.py b/server/www/packages/packages-windows/x86/mako/filters.py
    index c082690..0ae33ff 100644
    --- a/server/www/packages/packages-windows/x86/mako/filters.py
    +++ b/server/www/packages/packages-windows/x86/mako/filters.py
    @@ -1,24 +1,25 @@
     # mako/filters.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
     
     
    -import re
     import codecs
    -
    -from mako.compat import quote_plus, unquote_plus, codepoint2name, \
    -    name2codepoint
    +import re
     
     from mako import compat
    +from mako.compat import codepoint2name
    +from mako.compat import name2codepoint
    +from mako.compat import quote_plus
    +from mako.compat import unquote_plus
     
     xml_escapes = {
    -    '&': '&',
    -    '>': '>',
    -    '<': '<',
    -    '"': '"',   # also " in html-only
    -    "'": '''    # also ' in html-only
    +    "&": "&",
    +    ">": ">",
    +    "<": "<",
    +    '"': """,  # also " in html-only
    +    "'": "'",  # also ' in html-only
     }
     
     # XXX: " is valid in HTML and XML
    @@ -37,6 +38,7 @@ def legacy_html_escape(s):
     
     try:
         import markupsafe
    +
         html_escape = markupsafe.escape
     except ImportError:
         html_escape = legacy_html_escape
    @@ -69,7 +71,6 @@ def trim(string):
     
     
     class Decode(object):
    -
         def __getattr__(self, key):
             def decode(x):
                 if isinstance(x, compat.text_type):
    @@ -78,24 +79,31 @@ class Decode(object):
                     return decode(str(x))
                 else:
                     return compat.text_type(x, encoding=key)
    +
             return decode
    +
    +
     decode = Decode()
     
     
    -_ASCII_re = re.compile(r'\A[\x00-\x7f]*\Z')
    +_ASCII_re = re.compile(r"\A[\x00-\x7f]*\Z")
     
     
     def is_ascii_str(text):
         return isinstance(text, str) and _ASCII_re.match(text)
     
    +
     ################################################################
     
     
     class XMLEntityEscaper(object):
    -
         def __init__(self, codepoint2name, name2codepoint):
    -        self.codepoint2entity = dict([(c, compat.text_type('&%s;' % n))
    -                                      for c, n in codepoint2name.items()])
    +        self.codepoint2entity = dict(
    +            [
    +                (c, compat.text_type("&%s;" % n))
    +                for c, n in codepoint2name.items()
    +            ]
    +        )
             self.name2codepoint = name2codepoint
     
         def escape_entities(self, text):
    @@ -110,7 +118,7 @@ class XMLEntityEscaper(object):
             try:
                 return self.codepoint2entity[codepoint]
             except (KeyError, IndexError):
    -            return '&#x%X;' % codepoint
    +            return "&#x%X;" % codepoint
     
         __escapable = re.compile(r'["&<>]|[^\x00-\x7f]')
     
    @@ -123,19 +131,22 @@ class XMLEntityEscaper(object):
     
             The return value is guaranteed to be ASCII.
             """
    -        return self.__escapable.sub(self.__escape, compat.text_type(text)
    -                                    ).encode('ascii')
    +        return self.__escapable.sub(
    +            self.__escape, compat.text_type(text)
    +        ).encode("ascii")
     
         # XXX: This regexp will not match all valid XML entity names__.
         # (It punts on details involving involving CombiningChars and Extenders.)
         #
         # .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef
    -    __characterrefs = re.compile(r'''& (?:
    +    __characterrefs = re.compile(
    +        r"""& (?:
                                               \#(\d+)
                                               | \#x([\da-f]+)
                                               | ( (?!\d) [:\w] [-.:\w]+ )
    -                                          ) ;''',
    -                                 re.X | re.UNICODE)
    +                                          ) ;""",
    +        re.X | re.UNICODE,
    +    )
     
         def __unescape(self, m):
             dval, hval, name = m.groups()
    @@ -144,7 +155,7 @@ class XMLEntityEscaper(object):
             elif hval:
                 codepoint = int(hval, 16)
             else:
    -            codepoint = self.name2codepoint.get(name, 0xfffd)
    +            codepoint = self.name2codepoint.get(name, 0xFFFD)
                 # U+FFFD = "REPLACEMENT CHARACTER"
             if codepoint < 128:
                 return chr(codepoint)
    @@ -168,42 +179,41 @@ html_entities_unescape = _html_entities_escaper.unescape
     def htmlentityreplace_errors(ex):
         """An encoding error handler.
     
    -    This python `codecs`_ error handler replaces unencodable
    +    This python codecs error handler replaces unencodable
         characters with HTML entities, or, if no HTML entity exists for
    -    the character, XML character references.
    +    the character, XML character references::
     
    -    >>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace')
    -    'The cost was €12.'
    +        >>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace')
    +        'The cost was €12.'
         """
         if isinstance(ex, UnicodeEncodeError):
             # Handle encoding errors
    -        bad_text = ex.object[ex.start:ex.end]
    +        bad_text = ex.object[ex.start : ex.end]
             text = _html_entities_escaper.escape(bad_text)
             return (compat.text_type(text), ex.end)
         raise ex
     
    -codecs.register_error('htmlentityreplace', htmlentityreplace_errors)
    +
    +codecs.register_error("htmlentityreplace", htmlentityreplace_errors)
     
     
     # TODO: options to make this dynamic per-compilation will be added in a later
     # release
     DEFAULT_ESCAPES = {
    -    'x': 'filters.xml_escape',
    -    'h': 'filters.html_escape',
    -    'u': 'filters.url_escape',
    -    'trim': 'filters.trim',
    -    'entity': 'filters.html_entities_escape',
    -    'unicode': 'unicode',
    -    'decode': 'decode',
    -    'str': 'str',
    -    'n': 'n'
    +    "x": "filters.xml_escape",
    +    "h": "filters.html_escape",
    +    "u": "filters.url_escape",
    +    "trim": "filters.trim",
    +    "entity": "filters.html_entities_escape",
    +    "unicode": "unicode",
    +    "decode": "decode",
    +    "str": "str",
    +    "n": "n",
     }
     
     if compat.py3k:
    -    DEFAULT_ESCAPES.update({
    -        'unicode': 'str'
    -    })
    +    DEFAULT_ESCAPES.update({"unicode": "str"})
     
     NON_UNICODE_ESCAPES = DEFAULT_ESCAPES.copy()
    -NON_UNICODE_ESCAPES['h'] = 'filters.legacy_html_escape'
    -NON_UNICODE_ESCAPES['u'] = 'filters.legacy_url_escape'
    +NON_UNICODE_ESCAPES["h"] = "filters.legacy_html_escape"
    +NON_UNICODE_ESCAPES["u"] = "filters.legacy_url_escape"
    diff --git a/server/www/packages/packages-windows/x86/mako/lexer.py b/server/www/packages/packages-windows/x86/mako/lexer.py
    index cf4187f..6226e26 100644
    --- a/server/www/packages/packages-windows/x86/mako/lexer.py
    +++ b/server/www/packages/packages-windows/x86/mako/lexer.py
    @@ -1,24 +1,31 @@
     # mako/lexer.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
     
     """provides the Lexer class for parsing template strings into parse trees."""
     
    -import re
     import codecs
    -from mako import parsetree, exceptions, compat
    +import re
    +
    +from mako import compat
    +from mako import exceptions
    +from mako import parsetree
     from mako.pygen import adjust_whitespace
     
     _regexp_cache = {}
     
     
     class Lexer(object):
    -
    -    def __init__(self, text, filename=None,
    -                 disable_unicode=False,
    -                 input_encoding=None, preprocessor=None):
    +    def __init__(
    +        self,
    +        text,
    +        filename=None,
    +        disable_unicode=False,
    +        input_encoding=None,
    +        preprocessor=None,
    +    ):
             self.text = text
             self.filename = filename
             self.template = parsetree.TemplateNode(self.filename)
    @@ -34,22 +41,24 @@ class Lexer(object):
     
             if compat.py3k and disable_unicode:
                 raise exceptions.UnsupportedError(
    -                "Mako for Python 3 does not "
    -                "support disabling Unicode")
    +                "Mako for Python 3 does not " "support disabling Unicode"
    +            )
     
             if preprocessor is None:
                 self.preprocessor = []
    -        elif not hasattr(preprocessor, '__iter__'):
    +        elif not hasattr(preprocessor, "__iter__"):
                 self.preprocessor = [preprocessor]
             else:
                 self.preprocessor = preprocessor
     
         @property
         def exception_kwargs(self):
    -        return {'source': self.text,
    -                'lineno': self.matched_lineno,
    -                'pos': self.matched_charpos,
    -                'filename': self.filename}
    +        return {
    +            "source": self.text,
    +            "lineno": self.matched_lineno,
    +            "pos": self.matched_charpos,
    +            "filename": self.filename,
    +        }
     
         def match(self, regexp, flags=None):
             """compile the given regexp, cache the reg, and call match_reg()."""
    @@ -83,9 +92,9 @@ class Lexer(object):
                 else:
                     self.match_position = end
                 self.matched_lineno = self.lineno
    -            lines = re.findall(r"\n", self.text[mp:self.match_position])
    +            lines = re.findall(r"\n", self.text[mp : self.match_position])
                 cp = mp - 1
    -            while (cp >= 0 and cp < self.textlength and self.text[cp] != '\n'):
    +            while cp >= 0 and cp < self.textlength and self.text[cp] != "\n":
                     cp -= 1
                 self.matched_charpos = mp - cp
                 self.lineno += len(lines)
    @@ -97,46 +106,49 @@ class Lexer(object):
     
         def parse_until_text(self, watch_nesting, *text):
             startpos = self.match_position
    -        text_re = r'|'.join(text)
    +        text_re = r"|".join(text)
             brace_level = 0
             paren_level = 0
             bracket_level = 0
             while True:
    -            match = self.match(r'#.*\n')
    +            match = self.match(r"#.*\n")
                 if match:
                     continue
    -            match = self.match(r'(\"\"\"|\'\'\'|\"|\')[^\\]*?(\\.[^\\]*?)*\1',
    -                               re.S)
    +            match = self.match(
    +                r"(\"\"\"|\'\'\'|\"|\')[^\\]*?(\\.[^\\]*?)*\1", re.S
    +            )
                 if match:
                     continue
    -            match = self.match(r'(%s)' % text_re)
    -            if match and not (watch_nesting
    -                              and (brace_level > 0 or paren_level > 0
    -                                   or bracket_level > 0)):
    -                return \
    -                    self.text[startpos:
    -                              self.match_position - len(match.group(1))],\
    -                    match.group(1)
    +            match = self.match(r"(%s)" % text_re)
    +            if match and not (
    +                watch_nesting
    +                and (brace_level > 0 or paren_level > 0 or bracket_level > 0)
    +            ):
    +                return (
    +                    self.text[
    +                        startpos : self.match_position - len(match.group(1))
    +                    ],
    +                    match.group(1),
    +                )
                 elif not match:
                     match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S)
                 if match:
    -                brace_level += match.group(1).count('{')
    -                brace_level -= match.group(1).count('}')
    -                paren_level += match.group(1).count('(')
    -                paren_level -= match.group(1).count(')')
    -                bracket_level += match.group(1).count('[')
    -                bracket_level -= match.group(1).count(']')
    +                brace_level += match.group(1).count("{")
    +                brace_level -= match.group(1).count("}")
    +                paren_level += match.group(1).count("(")
    +                paren_level -= match.group(1).count(")")
    +                bracket_level += match.group(1).count("[")
    +                bracket_level -= match.group(1).count("]")
                     continue
                 raise exceptions.SyntaxException(
    -                "Expected: %s" %
    -                ','.join(text),
    -                **self.exception_kwargs)
    +                "Expected: %s" % ",".join(text), **self.exception_kwargs
    +            )
     
         def append_node(self, nodecls, *args, **kwargs):
    -        kwargs.setdefault('source', self.text)
    -        kwargs.setdefault('lineno', self.matched_lineno)
    -        kwargs.setdefault('pos', self.matched_charpos)
    -        kwargs['filename'] = self.filename
    +        kwargs.setdefault("source", self.text)
    +        kwargs.setdefault("lineno", self.matched_lineno)
    +        kwargs.setdefault("pos", self.matched_charpos)
    +        kwargs["filename"] = self.filename
             node = nodecls(*args, **kwargs)
             if len(self.tag):
                 self.tag[-1].nodes.append(node)
    @@ -149,8 +161,10 @@ class Lexer(object):
             if self.control_line:
                 control_frame = self.control_line[-1]
                 control_frame.nodes.append(node)
    -            if not (isinstance(node, parsetree.ControlLine) and
    -                    control_frame.is_ternary(node.keyword)):
    +            if not (
    +                isinstance(node, parsetree.ControlLine)
    +                and control_frame.is_ternary(node.keyword)
    +            ):
                     if self.ternary_stack and self.ternary_stack[-1]:
                         self.ternary_stack[-1][-1].nodes.append(node)
             if isinstance(node, parsetree.Tag):
    @@ -164,17 +178,20 @@ class Lexer(object):
                 elif node.is_primary:
                     self.control_line.append(node)
                     self.ternary_stack.append([])
    -            elif self.control_line and \
    -                    self.control_line[-1].is_ternary(node.keyword):
    +            elif self.control_line and self.control_line[-1].is_ternary(
    +                node.keyword
    +            ):
                     self.ternary_stack[-1].append(node)
    -            elif self.control_line and \
    -                    not self.control_line[-1].is_ternary(node.keyword):
    +            elif self.control_line and not self.control_line[-1].is_ternary(
    +                node.keyword
    +            ):
                     raise exceptions.SyntaxException(
    -                    "Keyword '%s' not a legal ternary for keyword '%s'" %
    -                    (node.keyword, self.control_line[-1].keyword),
    -                    **self.exception_kwargs)
    +                    "Keyword '%s' not a legal ternary for keyword '%s'"
    +                    % (node.keyword, self.control_line[-1].keyword),
    +                    **self.exception_kwargs
    +                )
     
    -    _coding_re = re.compile(r'#.*coding[:=]\s*([-\w.]+).*\r?\n')
    +    _coding_re = re.compile(r"#.*coding[:=]\s*([-\w.]+).*\r?\n")
     
         def decode_raw_stream(self, text, decode_raw, known_encoding, filename):
             """given string/unicode or bytes/string, determine encoding
    @@ -184,44 +201,48 @@ class Lexer(object):
             """
             if isinstance(text, compat.text_type):
                 m = self._coding_re.match(text)
    -            encoding = m and m.group(1) or known_encoding or 'ascii'
    +            encoding = m and m.group(1) or known_encoding or "utf-8"
                 return encoding, text
     
             if text.startswith(codecs.BOM_UTF8):
    -            text = text[len(codecs.BOM_UTF8):]
    -            parsed_encoding = 'utf-8'
    -            m = self._coding_re.match(text.decode('utf-8', 'ignore'))
    -            if m is not None and m.group(1) != 'utf-8':
    +            text = text[len(codecs.BOM_UTF8) :]
    +            parsed_encoding = "utf-8"
    +            m = self._coding_re.match(text.decode("utf-8", "ignore"))
    +            if m is not None and m.group(1) != "utf-8":
                     raise exceptions.CompileException(
                         "Found utf-8 BOM in file, with conflicting "
                         "magic encoding comment of '%s'" % m.group(1),
    -                    text.decode('utf-8', 'ignore'),
    -                    0, 0, filename)
    +                    text.decode("utf-8", "ignore"),
    +                    0,
    +                    0,
    +                    filename,
    +                )
             else:
    -            m = self._coding_re.match(text.decode('utf-8', 'ignore'))
    +            m = self._coding_re.match(text.decode("utf-8", "ignore"))
                 if m:
                     parsed_encoding = m.group(1)
                 else:
    -                parsed_encoding = known_encoding or 'ascii'
    +                parsed_encoding = known_encoding or "utf-8"
     
             if decode_raw:
                 try:
                     text = text.decode(parsed_encoding)
                 except UnicodeDecodeError:
                     raise exceptions.CompileException(
    -                    "Unicode decode operation of encoding '%s' failed" %
    -                    parsed_encoding,
    -                    text.decode('utf-8', 'ignore'),
    -                    0, 0, filename)
    +                    "Unicode decode operation of encoding '%s' failed"
    +                    % parsed_encoding,
    +                    text.decode("utf-8", "ignore"),
    +                    0,
    +                    0,
    +                    filename,
    +                )
     
             return parsed_encoding, text
     
         def parse(self):
             self.encoding, self.text = self.decode_raw_stream(
    -            self.text,
    -            not self.disable_unicode,
    -            self.encoding,
    -            self.filename)
    +            self.text, not self.disable_unicode, self.encoding, self.filename
    +        )
     
             for preproc in self.preprocessor:
                 self.text = preproc(self.text)
    @@ -232,7 +253,7 @@ class Lexer(object):
     
             self.textlength = len(self.text)
     
    -        while (True):
    +        while True:
                 if self.match_position > self.textlength:
                     break
     
    @@ -258,20 +279,24 @@ class Lexer(object):
                 raise exceptions.CompileException("assertion failed")
     
             if len(self.tag):
    -            raise exceptions.SyntaxException("Unclosed tag: <%%%s>" %
    -                                             self.tag[-1].keyword,
    -                                             **self.exception_kwargs)
    +            raise exceptions.SyntaxException(
    +                "Unclosed tag: <%%%s>" % self.tag[-1].keyword,
    +                **self.exception_kwargs
    +            )
             if len(self.control_line):
                 raise exceptions.SyntaxException(
    -                "Unterminated control keyword: '%s'" %
    -                self.control_line[-1].keyword,
    +                "Unterminated control keyword: '%s'"
    +                % self.control_line[-1].keyword,
                     self.text,
                     self.control_line[-1].lineno,
    -                self.control_line[-1].pos, self.filename)
    +                self.control_line[-1].pos,
    +                self.filename,
    +            )
             return self.template
     
         def match_tag_start(self):
    -        match = self.match(r'''
    +        match = self.match(
    +            r"""
                 \<%     # opening tag
     
                 ([\w\.\:]+)   # keyword
    @@ -283,9 +308,9 @@ class Lexer(object):
     
                 (/)?>   # closing
     
    -            ''',
    -
    -                           re.I | re.S | re.X)
    +            """,
    +            re.I | re.S | re.X,
    +        )
     
             if match:
                 keyword, attr, isend = match.groups()
    @@ -293,22 +318,23 @@ class Lexer(object):
                 attributes = {}
                 if attr:
                     for att in re.findall(
    -                        r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr):
    +                    r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr
    +                ):
                         key, val1, val2 = att
                         text = val1 or val2
    -                    text = text.replace('\r\n', '\n')
    +                    text = text.replace("\r\n", "\n")
                         attributes[key] = text
                 self.append_node(parsetree.Tag, keyword, attributes)
                 if isend:
                     self.tag.pop()
                 else:
    -                if keyword == 'text':
    -                    match = self.match(r'(.*?)(?=\)', re.S)
    +                if keyword == "text":
    +                    match = self.match(r"(.*?)(?=\)", re.S)
                         if not match:
                             raise exceptions.SyntaxException(
    -                            "Unclosed tag: <%%%s>" %
    -                            self.tag[-1].keyword,
    -                            **self.exception_kwargs)
    +                            "Unclosed tag: <%%%s>" % self.tag[-1].keyword,
    +                            **self.exception_kwargs
    +                        )
                         self.append_node(parsetree.Text, match.group(1))
                         return self.match_tag_end()
                 return True
    @@ -316,25 +342,27 @@ class Lexer(object):
                 return False
     
         def match_tag_end(self):
    -        match = self.match(r'\')
    +        match = self.match(r"\")
             if match:
                 if not len(self.tag):
                     raise exceptions.SyntaxException(
    -                    "Closing tag without opening tag: " %
    -                    match.group(1),
    -                    **self.exception_kwargs)
    +                    "Closing tag without opening tag: "
    +                    % match.group(1),
    +                    **self.exception_kwargs
    +                )
                 elif self.tag[-1].keyword != match.group(1):
                     raise exceptions.SyntaxException(
    -                    "Closing tag  does not match tag: <%%%s>" %
    -                    (match.group(1), self.tag[-1].keyword),
    -                    **self.exception_kwargs)
    +                    "Closing tag  does not match tag: <%%%s>"
    +                    % (match.group(1), self.tag[-1].keyword),
    +                    **self.exception_kwargs
    +                )
                 self.tag.pop()
                 return True
             else:
                 return False
     
         def match_end(self):
    -        match = self.match(r'\Z', re.S)
    +        match = self.match(r"\Z", re.S)
             if match:
                 string = match.group()
                 if string:
    @@ -345,7 +373,8 @@ class Lexer(object):
                 return False
     
         def match_text(self):
    -        match = self.match(r"""
    +        match = self.match(
    +            r"""
                     (.*?)         # anything, followed by:
                     (
                      (?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based
    @@ -360,7 +389,9 @@ class Lexer(object):
                      (\\\r?\n)    # an escaped newline  - throw away
                      |
                      \Z           # end of string
    -                )""", re.X | re.S)
    +                )""",
    +            re.X | re.S,
    +        )
     
             if match:
                 text = match.group(1)
    @@ -374,14 +405,17 @@ class Lexer(object):
             match = self.match(r"<%(!)?")
             if match:
                 line, pos = self.matched_lineno, self.matched_charpos
    -            text, end = self.parse_until_text(False, r'%>')
    +            text, end = self.parse_until_text(False, r"%>")
                 # the trailing newline helps
                 # compiler.parse() not complain about indentation
                 text = adjust_whitespace(text) + "\n"
                 self.append_node(
                     parsetree.Code,
                     text,
    -                match.group(1) == '!', lineno=line, pos=pos)
    +                match.group(1) == "!",
    +                lineno=line,
    +                pos=pos,
    +            )
                 return True
             else:
                 return False
    @@ -390,16 +424,19 @@ class Lexer(object):
             match = self.match(r"\${")
             if match:
                 line, pos = self.matched_lineno, self.matched_charpos
    -            text, end = self.parse_until_text(True, r'\|', r'}')
    -            if end == '|':
    -                escapes, end = self.parse_until_text(True, r'}')
    +            text, end = self.parse_until_text(True, r"\|", r"}")
    +            if end == "|":
    +                escapes, end = self.parse_until_text(True, r"}")
                 else:
                     escapes = ""
    -            text = text.replace('\r\n', '\n')
    +            text = text.replace("\r\n", "\n")
                 self.append_node(
                     parsetree.Expression,
    -                text, escapes.strip(),
    -                lineno=line, pos=pos)
    +                text,
    +                escapes.strip(),
    +                lineno=line,
    +                pos=pos,
    +            )
                 return True
             else:
                 return False
    @@ -407,31 +444,35 @@ class Lexer(object):
         def match_control_line(self):
             match = self.match(
                 r"(?<=^)[\t ]*(%(?!%)|##)[\t ]*((?:(?:\\r?\n)|[^\r\n])*)"
    -            r"(?:\r?\n|\Z)", re.M)
    +            r"(?:\r?\n|\Z)",
    +            re.M,
    +        )
             if match:
                 operator = match.group(1)
                 text = match.group(2)
    -            if operator == '%':
    -                m2 = re.match(r'(end)?(\w+)\s*(.*)', text)
    +            if operator == "%":
    +                m2 = re.match(r"(end)?(\w+)\s*(.*)", text)
                     if not m2:
                         raise exceptions.SyntaxException(
    -                        "Invalid control line: '%s'" %
    -                        text,
    -                        **self.exception_kwargs)
    +                        "Invalid control line: '%s'" % text,
    +                        **self.exception_kwargs
    +                    )
                     isend, keyword = m2.group(1, 2)
    -                isend = (isend is not None)
    +                isend = isend is not None
     
                     if isend:
                         if not len(self.control_line):
                             raise exceptions.SyntaxException(
    -                            "No starting keyword '%s' for '%s'" %
    -                            (keyword, text),
    -                            **self.exception_kwargs)
    +                            "No starting keyword '%s' for '%s'"
    +                            % (keyword, text),
    +                            **self.exception_kwargs
    +                        )
                         elif self.control_line[-1].keyword != keyword:
                             raise exceptions.SyntaxException(
    -                            "Keyword '%s' doesn't match keyword '%s'" %
    -                            (text, self.control_line[-1].keyword),
    -                            **self.exception_kwargs)
    +                            "Keyword '%s' doesn't match keyword '%s'"
    +                            % (text, self.control_line[-1].keyword),
    +                            **self.exception_kwargs
    +                        )
                     self.append_node(parsetree.ControlLine, keyword, isend, text)
                 else:
                     self.append_node(parsetree.Comment, text)
    diff --git a/server/www/packages/packages-windows/x86/mako/lookup.py b/server/www/packages/packages-windows/x86/mako/lookup.py
    index 0d3f304..476326d 100644
    --- a/server/www/packages/packages-windows/x86/mako/lookup.py
    +++ b/server/www/packages/packages-windows/x86/mako/lookup.py
    @@ -1,14 +1,16 @@
     # mako/lookup.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
     
     import os
    -import stat
     import posixpath
     import re
    -from mako import exceptions, util
    +import stat
    +
    +from mako import exceptions
    +from mako import util
     from mako.template import Template
     
     try:
    @@ -151,41 +153,41 @@ class TemplateLookup(TemplateCollection):
     
         """
     
    -    def __init__(self,
    -                 directories=None,
    -                 module_directory=None,
    -                 filesystem_checks=True,
    -                 collection_size=-1,
    -                 format_exceptions=False,
    -                 error_handler=None,
    -                 disable_unicode=False,
    -                 bytestring_passthrough=False,
    -                 output_encoding=None,
    -                 encoding_errors='strict',
    +    def __init__(
    +        self,
    +        directories=None,
    +        module_directory=None,
    +        filesystem_checks=True,
    +        collection_size=-1,
    +        format_exceptions=False,
    +        error_handler=None,
    +        disable_unicode=False,
    +        bytestring_passthrough=False,
    +        output_encoding=None,
    +        encoding_errors="strict",
    +        cache_args=None,
    +        cache_impl="beaker",
    +        cache_enabled=True,
    +        cache_type=None,
    +        cache_dir=None,
    +        cache_url=None,
    +        modulename_callable=None,
    +        module_writer=None,
    +        default_filters=None,
    +        buffer_filters=(),
    +        strict_undefined=False,
    +        imports=None,
    +        future_imports=None,
    +        enable_loop=True,
    +        input_encoding=None,
    +        preprocessor=None,
    +        lexer_cls=None,
    +        include_error_handler=None,
    +    ):
     
    -                 cache_args=None,
    -                 cache_impl='beaker',
    -                 cache_enabled=True,
    -                 cache_type=None,
    -                 cache_dir=None,
    -                 cache_url=None,
    -
    -                 modulename_callable=None,
    -                 module_writer=None,
    -                 default_filters=None,
    -                 buffer_filters=(),
    -                 strict_undefined=False,
    -                 imports=None,
    -                 future_imports=None,
    -                 enable_loop=True,
    -                 input_encoding=None,
    -                 preprocessor=None,
    -                 lexer_cls=None,
    -                 include_error_handler=None):
    -
    -        self.directories = [posixpath.normpath(d) for d in
    -                            util.to_list(directories, ())
    -                            ]
    +        self.directories = [
    +            posixpath.normpath(d) for d in util.to_list(directories, ())
    +        ]
             self.module_directory = module_directory
             self.modulename_callable = modulename_callable
             self.filesystem_checks = filesystem_checks
    @@ -195,34 +197,34 @@ class TemplateLookup(TemplateCollection):
                 cache_args = {}
             # transfer deprecated cache_* args
             if cache_dir:
    -            cache_args.setdefault('dir', cache_dir)
    +            cache_args.setdefault("dir", cache_dir)
             if cache_url:
    -            cache_args.setdefault('url', cache_url)
    +            cache_args.setdefault("url", cache_url)
             if cache_type:
    -            cache_args.setdefault('type', cache_type)
    +            cache_args.setdefault("type", cache_type)
     
             self.template_args = {
    -            'format_exceptions': format_exceptions,
    -            'error_handler': error_handler,
    -            'include_error_handler': include_error_handler,
    -            'disable_unicode': disable_unicode,
    -            'bytestring_passthrough': bytestring_passthrough,
    -            'output_encoding': output_encoding,
    -            'cache_impl': cache_impl,
    -            'encoding_errors': encoding_errors,
    -            'input_encoding': input_encoding,
    -            'module_directory': module_directory,
    -            'module_writer': module_writer,
    -            'cache_args': cache_args,
    -            'cache_enabled': cache_enabled,
    -            'default_filters': default_filters,
    -            'buffer_filters': buffer_filters,
    -            'strict_undefined': strict_undefined,
    -            'imports': imports,
    -            'future_imports': future_imports,
    -            'enable_loop': enable_loop,
    -            'preprocessor': preprocessor,
    -            'lexer_cls': lexer_cls
    +            "format_exceptions": format_exceptions,
    +            "error_handler": error_handler,
    +            "include_error_handler": include_error_handler,
    +            "disable_unicode": disable_unicode,
    +            "bytestring_passthrough": bytestring_passthrough,
    +            "output_encoding": output_encoding,
    +            "cache_impl": cache_impl,
    +            "encoding_errors": encoding_errors,
    +            "input_encoding": input_encoding,
    +            "module_directory": module_directory,
    +            "module_writer": module_writer,
    +            "cache_args": cache_args,
    +            "cache_enabled": cache_enabled,
    +            "default_filters": default_filters,
    +            "buffer_filters": buffer_filters,
    +            "strict_undefined": strict_undefined,
    +            "imports": imports,
    +            "future_imports": future_imports,
    +            "enable_loop": enable_loop,
    +            "preprocessor": preprocessor,
    +            "lexer_cls": lexer_cls,
             }
     
             if collection_size == -1:
    @@ -248,17 +250,18 @@ class TemplateLookup(TemplateCollection):
                 else:
                     return self._collection[uri]
             except KeyError:
    -            u = re.sub(r'^\/+', '', uri)
    -            for dir in self.directories:
    +            u = re.sub(r"^\/+", "", uri)
    +            for dir_ in self.directories:
                     # make sure the path seperators are posix - os.altsep is empty
                     # on POSIX and cannot be used.
    -                dir = dir.replace(os.path.sep, posixpath.sep)
    -                srcfile = posixpath.normpath(posixpath.join(dir, u))
    +                dir_ = dir_.replace(os.path.sep, posixpath.sep)
    +                srcfile = posixpath.normpath(posixpath.join(dir_, u))
                     if os.path.isfile(srcfile):
                         return self._load(srcfile, uri)
                 else:
                     raise exceptions.TopLevelLookupException(
    -                    "Cant locate template for uri %r" % uri)
    +                    "Cant locate template for uri %r" % uri
    +                )
     
         def adjust_uri(self, uri, relativeto):
             """Adjust the given ``uri`` based on the given relative URI."""
    @@ -267,12 +270,13 @@ class TemplateLookup(TemplateCollection):
             if key in self._uri_cache:
                 return self._uri_cache[key]
     
    -        if uri[0] != '/':
    +        if uri[0] != "/":
                 if relativeto is not None:
                     v = self._uri_cache[key] = posixpath.join(
    -                    posixpath.dirname(relativeto), uri)
    +                    posixpath.dirname(relativeto), uri
    +                )
                 else:
    -                v = self._uri_cache[key] = '/' + uri
    +                v = self._uri_cache[key] = "/" + uri
             else:
                 v = self._uri_cache[key] = uri
             return v
    @@ -295,9 +299,9 @@ class TemplateLookup(TemplateCollection):
             """
     
             filename = posixpath.normpath(filename)
    -        for dir in self.directories:
    -            if filename[0:len(dir)] == dir:
    -                return filename[len(dir):]
    +        for dir_ in self.directories:
    +            if filename[0 : len(dir_)] == dir_:
    +                return filename[len(dir_) :]
             else:
                 return None
     
    @@ -320,7 +324,8 @@ class TemplateLookup(TemplateCollection):
                         filename=posixpath.normpath(filename),
                         lookup=self,
                         module_filename=module_filename,
    -                    **self.template_args)
    +                    **self.template_args
    +                )
                     return template
                 except:
                     # if compilation fails etc, ensure
    @@ -337,8 +342,7 @@ class TemplateLookup(TemplateCollection):
     
             try:
                 template_stat = os.stat(template.filename)
    -            if template.module._modified_time < \
    -                    template_stat[stat.ST_MTIME]:
    +            if template.module._modified_time < template_stat[stat.ST_MTIME]:
                     self._collection.pop(uri, None)
                     return self._load(template.filename, uri)
                 else:
    @@ -346,7 +350,8 @@ class TemplateLookup(TemplateCollection):
             except OSError:
                 self._collection.pop(uri, None)
                 raise exceptions.TemplateLookupException(
    -                "Cant locate template for uri %r" % uri)
    +                "Cant locate template for uri %r" % uri
    +            )
     
         def put_string(self, uri, text):
             """Place a new :class:`.Template` object into this
    @@ -355,10 +360,8 @@ class TemplateLookup(TemplateCollection):
     
             """
             self._collection[uri] = Template(
    -            text,
    -            lookup=self,
    -            uri=uri,
    -            **self.template_args)
    +            text, lookup=self, uri=uri, **self.template_args
    +        )
     
         def put_template(self, uri, template):
             """Place a new :class:`.Template` object into this
    diff --git a/server/www/packages/packages-windows/x86/mako/parsetree.py b/server/www/packages/packages-windows/x86/mako/parsetree.py
    index e129916..801e48a 100644
    --- a/server/www/packages/packages-windows/x86/mako/parsetree.py
    +++ b/server/www/packages/packages-windows/x86/mako/parsetree.py
    @@ -1,14 +1,19 @@
     # mako/parsetree.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
     
     """defines the parse tree components for Mako templates."""
     
    -from mako import exceptions, ast, util, filters, compat
     import re
     
    +from mako import ast
    +from mako import compat
    +from mako import exceptions
    +from mako import filters
    +from mako import util
    +
     
     class Node(object):
     
    @@ -22,8 +27,12 @@ class Node(object):
     
         @property
         def exception_kwargs(self):
    -        return {'source': self.source, 'lineno': self.lineno,
    -                'pos': self.pos, 'filename': self.filename}
    +        return {
    +            "source": self.source,
    +            "lineno": self.lineno,
    +            "pos": self.pos,
    +            "filename": self.filename,
    +        }
     
         def get_children(self):
             return []
    @@ -42,7 +51,7 @@ class TemplateNode(Node):
         """a 'container' node that stores the overall collection of nodes."""
     
         def __init__(self, filename):
    -        super(TemplateNode, self).__init__('', 0, 0, filename)
    +        super(TemplateNode, self).__init__("", 0, 0, filename)
             self.nodes = []
             self.page_attributes = {}
     
    @@ -52,7 +61,8 @@ class TemplateNode(Node):
         def __repr__(self):
             return "TemplateNode(%s, %r)" % (
                 util.sorted_dict_repr(self.page_attributes),
    -            self.nodes)
    +            self.nodes,
    +        )
     
     
     class ControlLine(Node):
    @@ -74,7 +84,7 @@ class ControlLine(Node):
             self.text = text
             self.keyword = keyword
             self.isend = isend
    -        self.is_primary = keyword in ['for', 'if', 'while', 'try', 'with']
    +        self.is_primary = keyword in ["for", "if", "while", "try", "with"]
             self.nodes = []
             if self.isend:
                 self._declared_identifiers = []
    @@ -98,9 +108,9 @@ class ControlLine(Node):
             for this ControlLine"""
     
             return keyword in {
    -            'if': set(['else', 'elif']),
    -            'try': set(['except', 'finally']),
    -            'for': set(['else'])
    +            "if": set(["else", "elif"]),
    +            "try": set(["except", "finally"]),
    +            "for": set(["else"]),
             }.get(self.keyword, [])
     
         def __repr__(self):
    @@ -108,7 +118,7 @@ class ControlLine(Node):
                 self.keyword,
                 self.text,
                 self.isend,
    -            (self.lineno, self.pos)
    +            (self.lineno, self.pos),
             )
     
     
    @@ -158,7 +168,7 @@ class Code(Node):
             return "Code(%r, %r, %r)" % (
                 self.text,
                 self.ismodule,
    -            (self.lineno, self.pos)
    +            (self.lineno, self.pos),
             )
     
     
    @@ -208,7 +218,7 @@ class Expression(Node):
             return "Expression(%r, %r, %r)" % (
                 self.text,
                 self.escapes_code.args,
    -            (self.lineno, self.pos)
    +            (self.lineno, self.pos),
             )
     
     
    @@ -219,45 +229,55 @@ class _TagMeta(type):
     
         _classmap = {}
     
    -    def __init__(cls, clsname, bases, dict):
    -        if getattr(cls, '__keyword__', None) is not None:
    +    def __init__(cls, clsname, bases, dict_):
    +        if getattr(cls, "__keyword__", None) is not None:
                 cls._classmap[cls.__keyword__] = cls
    -        super(_TagMeta, cls).__init__(clsname, bases, dict)
    +        super(_TagMeta, cls).__init__(clsname, bases, dict_)
     
         def __call__(cls, keyword, attributes, **kwargs):
             if ":" in keyword:
    -            ns, defname = keyword.split(':')
    -            return type.__call__(CallNamespaceTag, ns, defname,
    -                                 attributes, **kwargs)
    +            ns, defname = keyword.split(":")
    +            return type.__call__(
    +                CallNamespaceTag, ns, defname, attributes, **kwargs
    +            )
     
             try:
                 cls = _TagMeta._classmap[keyword]
             except KeyError:
                 raise exceptions.CompileException(
                     "No such tag: '%s'" % keyword,
    -                source=kwargs['source'],
    -                lineno=kwargs['lineno'],
    -                pos=kwargs['pos'],
    -                filename=kwargs['filename']
    +                source=kwargs["source"],
    +                lineno=kwargs["lineno"],
    +                pos=kwargs["pos"],
    +                filename=kwargs["filename"],
                 )
             return type.__call__(cls, keyword, attributes, **kwargs)
     
     
     class Tag(compat.with_metaclass(_TagMeta, Node)):
    -
         """abstract base class for tags.
     
    -    <%sometag/>
    +    e.g.::
     
    -    <%someothertag>
    -        stuff
    -    
    +        <%sometag/>
    +
    +        <%someothertag>
    +            stuff
    +        
     
         """
    +
         __keyword__ = None
     
    -    def __init__(self, keyword, attributes, expressions,
    -                 nonexpressions, required, **kwargs):
    +    def __init__(
    +        self,
    +        keyword,
    +        attributes,
    +        expressions,
    +        nonexpressions,
    +        required,
    +        **kwargs
    +    ):
             r"""construct a new Tag instance.
     
             this constructor not called directly, and is only called
    @@ -284,9 +304,10 @@ class Tag(compat.with_metaclass(_TagMeta, Node)):
             missing = [r for r in required if r not in self.parsed_attributes]
             if len(missing):
                 raise exceptions.CompileException(
    -                "Missing attribute(s): %s" %
    -                ",".join([repr(m) for m in missing]),
    -                **self.exception_kwargs)
    +                "Missing attribute(s): %s"
    +                % ",".join([repr(m) for m in missing]),
    +                **self.exception_kwargs
    +            )
             self.parent = None
             self.nodes = []
     
    @@ -302,36 +323,40 @@ class Tag(compat.with_metaclass(_TagMeta, Node)):
             for key in self.attributes:
                 if key in expressions:
                     expr = []
    -                for x in re.compile(r'(\${.+?})',
    -                                    re.S).split(self.attributes[key]):
    -                    m = re.compile(r'^\${(.+?)}$', re.S).match(x)
    +                for x in re.compile(r"(\${.+?})", re.S).split(
    +                    self.attributes[key]
    +                ):
    +                    m = re.compile(r"^\${(.+?)}$", re.S).match(x)
                         if m:
    -                        code = ast.PythonCode(m.group(1).rstrip(),
    -                                              **self.exception_kwargs)
    +                        code = ast.PythonCode(
    +                            m.group(1).rstrip(), **self.exception_kwargs
    +                        )
                             # we aren't discarding "declared_identifiers" here,
                             # which we do so that list comprehension-declared
                             # variables aren't counted.   As yet can't find a
                             # condition that requires it here.
    -                        undeclared_identifiers = \
    -                            undeclared_identifiers.union(
    -                                code.undeclared_identifiers)
    -                        expr.append('(%s)' % m.group(1))
    +                        undeclared_identifiers = undeclared_identifiers.union(
    +                            code.undeclared_identifiers
    +                        )
    +                        expr.append("(%s)" % m.group(1))
                         else:
                             if x:
                                 expr.append(repr(x))
    -                self.parsed_attributes[key] = " + ".join(expr) or repr('')
    +                self.parsed_attributes[key] = " + ".join(expr) or repr("")
                 elif key in nonexpressions:
    -                if re.search(r'\${.+?}', self.attributes[key]):
    +                if re.search(r"\${.+?}", self.attributes[key]):
                         raise exceptions.CompileException(
                             "Attibute '%s' in tag '%s' does not allow embedded "
                             "expressions" % (key, self.keyword),
    -                        **self.exception_kwargs)
    +                        **self.exception_kwargs
    +                    )
                     self.parsed_attributes[key] = repr(self.attributes[key])
                 else:
                     raise exceptions.CompileException(
    -                    "Invalid attribute for tag '%s': '%s'" %
    -                    (self.keyword, key),
    -                    **self.exception_kwargs)
    +                    "Invalid attribute for tag '%s': '%s'"
    +                    % (self.keyword, key),
    +                    **self.exception_kwargs
    +                )
             self.expression_undeclared_identifiers = undeclared_identifiers
     
         def declared_identifiers(self):
    @@ -341,56 +366,64 @@ class Tag(compat.with_metaclass(_TagMeta, Node)):
             return self.expression_undeclared_identifiers
     
         def __repr__(self):
    -        return "%s(%r, %s, %r, %r)" % (self.__class__.__name__,
    -                                       self.keyword,
    -                                       util.sorted_dict_repr(self.attributes),
    -                                       (self.lineno, self.pos),
    -                                       self.nodes
    -                                       )
    +        return "%s(%r, %s, %r, %r)" % (
    +            self.__class__.__name__,
    +            self.keyword,
    +            util.sorted_dict_repr(self.attributes),
    +            (self.lineno, self.pos),
    +            self.nodes,
    +        )
     
     
     class IncludeTag(Tag):
    -    __keyword__ = 'include'
    +    __keyword__ = "include"
     
         def __init__(self, keyword, attributes, **kwargs):
             super(IncludeTag, self).__init__(
                 keyword,
                 attributes,
    -            ('file', 'import', 'args'),
    -            (), ('file',), **kwargs)
    +            ("file", "import", "args"),
    +            (),
    +            ("file",),
    +            **kwargs
    +        )
             self.page_args = ast.PythonCode(
    -            "__DUMMY(%s)" % attributes.get('args', ''),
    -            **self.exception_kwargs)
    +            "__DUMMY(%s)" % attributes.get("args", ""), **self.exception_kwargs
    +        )
     
         def declared_identifiers(self):
             return []
     
         def undeclared_identifiers(self):
    -        identifiers = self.page_args.undeclared_identifiers.\
    -            difference(set(["__DUMMY"])).\
    -            difference(self.page_args.declared_identifiers)
    -        return identifiers.union(super(IncludeTag, self).
    -                                 undeclared_identifiers())
    +        identifiers = self.page_args.undeclared_identifiers.difference(
    +            set(["__DUMMY"])
    +        ).difference(self.page_args.declared_identifiers)
    +        return identifiers.union(
    +            super(IncludeTag, self).undeclared_identifiers()
    +        )
     
     
     class NamespaceTag(Tag):
    -    __keyword__ = 'namespace'
    +    __keyword__ = "namespace"
     
         def __init__(self, keyword, attributes, **kwargs):
             super(NamespaceTag, self).__init__(
    -            keyword, attributes,
    -            ('file',),
    -            ('name', 'inheritable',
    -             'import', 'module'),
    -            (), **kwargs)
    +            keyword,
    +            attributes,
    +            ("file",),
    +            ("name", "inheritable", "import", "module"),
    +            (),
    +            **kwargs
    +        )
     
    -        self.name = attributes.get('name', '__anon_%s' % hex(abs(id(self))))
    -        if 'name' not in attributes and 'import' not in attributes:
    +        self.name = attributes.get("name", "__anon_%s" % hex(abs(id(self))))
    +        if "name" not in attributes and "import" not in attributes:
                 raise exceptions.CompileException(
                     "'name' and/or 'import' attributes are required "
                     "for <%namespace>",
    -                **self.exception_kwargs)
    -        if 'file' in attributes and 'module' in attributes:
    +                **self.exception_kwargs
    +            )
    +        if "file" in attributes and "module" in attributes:
                 raise exceptions.CompileException(
                     "<%namespace> may only have one of 'file' or 'module'",
                     **self.exception_kwargs
    @@ -401,51 +434,51 @@ class NamespaceTag(Tag):
     
     
     class TextTag(Tag):
    -    __keyword__ = 'text'
    +    __keyword__ = "text"
     
         def __init__(self, keyword, attributes, **kwargs):
             super(TextTag, self).__init__(
    -            keyword,
    -            attributes, (),
    -            ('filter'), (), **kwargs)
    +            keyword, attributes, (), ("filter"), (), **kwargs
    +        )
             self.filter_args = ast.ArgumentList(
    -            attributes.get('filter', ''),
    -            **self.exception_kwargs)
    +            attributes.get("filter", ""), **self.exception_kwargs
    +        )
     
         def undeclared_identifiers(self):
    -        return self.filter_args.\
    -            undeclared_identifiers.\
    -            difference(filters.DEFAULT_ESCAPES.keys()).union(
    -                self.expression_undeclared_identifiers
    -            )
    +        return self.filter_args.undeclared_identifiers.difference(
    +            filters.DEFAULT_ESCAPES.keys()
    +        ).union(self.expression_undeclared_identifiers)
     
     
     class DefTag(Tag):
    -    __keyword__ = 'def'
    +    __keyword__ = "def"
     
         def __init__(self, keyword, attributes, **kwargs):
    -        expressions = ['buffered', 'cached'] + [
    -            c for c in attributes if c.startswith('cache_')]
    +        expressions = ["buffered", "cached"] + [
    +            c for c in attributes if c.startswith("cache_")
    +        ]
     
             super(DefTag, self).__init__(
                 keyword,
                 attributes,
                 expressions,
    -            ('name', 'filter', 'decorator'),
    -            ('name',),
    -            **kwargs)
    -        name = attributes['name']
    -        if re.match(r'^[\w_]+$', name):
    +            ("name", "filter", "decorator"),
    +            ("name",),
    +            **kwargs
    +        )
    +        name = attributes["name"]
    +        if re.match(r"^[\w_]+$", name):
                 raise exceptions.CompileException(
    -                "Missing parenthesis in %def",
    -                **self.exception_kwargs)
    -        self.function_decl = ast.FunctionDecl("def " + name + ":pass",
    -                                              **self.exception_kwargs)
    +                "Missing parenthesis in %def", **self.exception_kwargs
    +            )
    +        self.function_decl = ast.FunctionDecl(
    +            "def " + name + ":pass", **self.exception_kwargs
    +        )
             self.name = self.function_decl.funcname
    -        self.decorator = attributes.get('decorator', '')
    +        self.decorator = attributes.get("decorator", "")
             self.filter_args = ast.ArgumentList(
    -            attributes.get('filter', ''),
    -            **self.exception_kwargs)
    +            attributes.get("filter", ""), **self.exception_kwargs
    +        )
     
         is_anonymous = False
         is_block = False
    @@ -463,51 +496,58 @@ class DefTag(Tag):
         def undeclared_identifiers(self):
             res = []
             for c in self.function_decl.defaults:
    -            res += list(ast.PythonCode(c, **self.exception_kwargs).
    -                        undeclared_identifiers)
    -        return set(res).union(
    -            self.filter_args.
    -            undeclared_identifiers.
    -            difference(filters.DEFAULT_ESCAPES.keys())
    -        ).union(
    -            self.expression_undeclared_identifiers
    -        ).difference(
    -            self.function_decl.allargnames
    +            res += list(
    +                ast.PythonCode(
    +                    c, **self.exception_kwargs
    +                ).undeclared_identifiers
    +            )
    +        return (
    +            set(res)
    +            .union(
    +                self.filter_args.undeclared_identifiers.difference(
    +                    filters.DEFAULT_ESCAPES.keys()
    +                )
    +            )
    +            .union(self.expression_undeclared_identifiers)
    +            .difference(self.function_decl.allargnames)
             )
     
     
     class BlockTag(Tag):
    -    __keyword__ = 'block'
    +    __keyword__ = "block"
     
         def __init__(self, keyword, attributes, **kwargs):
    -        expressions = ['buffered', 'cached', 'args'] + [
    -            c for c in attributes if c.startswith('cache_')]
    +        expressions = ["buffered", "cached", "args"] + [
    +            c for c in attributes if c.startswith("cache_")
    +        ]
     
             super(BlockTag, self).__init__(
                 keyword,
                 attributes,
                 expressions,
    -            ('name', 'filter', 'decorator'),
    +            ("name", "filter", "decorator"),
                 (),
    -            **kwargs)
    -        name = attributes.get('name')
    -        if name and not re.match(r'^[\w_]+$', name):
    +            **kwargs
    +        )
    +        name = attributes.get("name")
    +        if name and not re.match(r"^[\w_]+$", name):
                 raise exceptions.CompileException(
                     "%block may not specify an argument signature",
    -                **self.exception_kwargs)
    -        if not name and attributes.get('args', None):
    -            raise exceptions.CompileException(
    -                "Only named %blocks may specify args",
                     **self.exception_kwargs
                 )
    -        self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
    -                                          **self.exception_kwargs)
    +        if not name and attributes.get("args", None):
    +            raise exceptions.CompileException(
    +                "Only named %blocks may specify args", **self.exception_kwargs
    +            )
    +        self.body_decl = ast.FunctionArgs(
    +            attributes.get("args", ""), **self.exception_kwargs
    +        )
     
             self.name = name
    -        self.decorator = attributes.get('decorator', '')
    +        self.decorator = attributes.get("decorator", "")
             self.filter_args = ast.ArgumentList(
    -            attributes.get('filter', ''),
    -            **self.exception_kwargs)
    +            attributes.get("filter", ""), **self.exception_kwargs
    +        )
     
         is_block = True
     
    @@ -517,7 +557,7 @@ class BlockTag(Tag):
     
         @property
         def funcname(self):
    -        return self.name or "__M_anon_%d" % (self.lineno, )
    +        return self.name or "__M_anon_%d" % (self.lineno,)
     
         def get_argument_expressions(self, **kw):
             return self.body_decl.get_argument_expressions(**kw)
    @@ -526,91 +566,100 @@ class BlockTag(Tag):
             return self.body_decl.allargnames
     
         def undeclared_identifiers(self):
    -        return (self.filter_args.
    -                undeclared_identifiers.
    -                difference(filters.DEFAULT_ESCAPES.keys())
    -                ).union(self.expression_undeclared_identifiers)
    +        return (
    +            self.filter_args.undeclared_identifiers.difference(
    +                filters.DEFAULT_ESCAPES.keys()
    +            )
    +        ).union(self.expression_undeclared_identifiers)
     
     
     class CallTag(Tag):
    -    __keyword__ = 'call'
    +    __keyword__ = "call"
     
         def __init__(self, keyword, attributes, **kwargs):
    -        super(CallTag, self).__init__(keyword, attributes,
    -                                      ('args'), ('expr',), ('expr',), **kwargs)
    -        self.expression = attributes['expr']
    +        super(CallTag, self).__init__(
    +            keyword, attributes, ("args"), ("expr",), ("expr",), **kwargs
    +        )
    +        self.expression = attributes["expr"]
             self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
    -        self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
    -                                          **self.exception_kwargs)
    +        self.body_decl = ast.FunctionArgs(
    +            attributes.get("args", ""), **self.exception_kwargs
    +        )
     
         def declared_identifiers(self):
             return self.code.declared_identifiers.union(self.body_decl.allargnames)
     
         def undeclared_identifiers(self):
    -        return self.code.undeclared_identifiers.\
    -            difference(self.code.declared_identifiers)
    +        return self.code.undeclared_identifiers.difference(
    +            self.code.declared_identifiers
    +        )
     
     
     class CallNamespaceTag(Tag):
    -
         def __init__(self, namespace, defname, attributes, **kwargs):
             super(CallNamespaceTag, self).__init__(
                 namespace + ":" + defname,
                 attributes,
    -            tuple(attributes.keys()) + ('args', ),
    +            tuple(attributes.keys()) + ("args",),
                 (),
                 (),
    -            **kwargs)
    +            **kwargs
    +        )
     
             self.expression = "%s.%s(%s)" % (
                 namespace,
                 defname,
    -            ",".join(["%s=%s" % (k, v) for k, v in
    -                      self.parsed_attributes.items()
    -                      if k != 'args'])
    +            ",".join(
    +                [
    +                    "%s=%s" % (k, v)
    +                    for k, v in self.parsed_attributes.items()
    +                    if k != "args"
    +                ]
    +            ),
             )
             self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
             self.body_decl = ast.FunctionArgs(
    -            attributes.get('args', ''),
    -            **self.exception_kwargs)
    +            attributes.get("args", ""), **self.exception_kwargs
    +        )
     
         def declared_identifiers(self):
             return self.code.declared_identifiers.union(self.body_decl.allargnames)
     
         def undeclared_identifiers(self):
    -        return self.code.undeclared_identifiers.\
    -            difference(self.code.declared_identifiers)
    +        return self.code.undeclared_identifiers.difference(
    +            self.code.declared_identifiers
    +        )
     
     
     class InheritTag(Tag):
    -    __keyword__ = 'inherit'
    +    __keyword__ = "inherit"
     
         def __init__(self, keyword, attributes, **kwargs):
             super(InheritTag, self).__init__(
    -            keyword, attributes,
    -            ('file',), (), ('file',), **kwargs)
    +            keyword, attributes, ("file",), (), ("file",), **kwargs
    +        )
     
     
     class PageTag(Tag):
    -    __keyword__ = 'page'
    +    __keyword__ = "page"
     
         def __init__(self, keyword, attributes, **kwargs):
    -        expressions = \
    -            ['cached', 'args', 'expression_filter', 'enable_loop'] + \
    -            [c for c in attributes if c.startswith('cache_')]
    +        expressions = [
    +            "cached",
    +            "args",
    +            "expression_filter",
    +            "enable_loop",
    +        ] + [c for c in attributes if c.startswith("cache_")]
     
             super(PageTag, self).__init__(
    -            keyword,
    -            attributes,
    -            expressions,
    -            (),
    -            (),
    -            **kwargs)
    -        self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
    -                                          **self.exception_kwargs)
    +            keyword, attributes, expressions, (), (), **kwargs
    +        )
    +        self.body_decl = ast.FunctionArgs(
    +            attributes.get("args", ""), **self.exception_kwargs
    +        )
             self.filter_args = ast.ArgumentList(
    -            attributes.get('expression_filter', ''),
    -            **self.exception_kwargs)
    +            attributes.get("expression_filter", ""), **self.exception_kwargs
    +        )
     
         def declared_identifiers(self):
             return self.body_decl.allargnames
    diff --git a/server/www/packages/packages-windows/x86/mako/pygen.py b/server/www/packages/packages-windows/x86/mako/pygen.py
    index 8514e02..947721f 100644
    --- a/server/www/packages/packages-windows/x86/mako/pygen.py
    +++ b/server/www/packages/packages-windows/x86/mako/pygen.py
    @@ -1,5 +1,5 @@
     # mako/pygen.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
    @@ -7,11 +7,11 @@
     """utilities for generating and formatting literal Python code."""
     
     import re
    +
     from mako import exceptions
     
     
     class PythonPrinter(object):
    -
         def __init__(self, stream):
             # indentation counter
             self.indent = 0
    @@ -54,14 +54,16 @@ class PythonPrinter(object):
             self.stream.write("\n" * num)
             self._update_lineno(num)
     
    -    def write_indented_block(self, block):
    +    def write_indented_block(self, block, starting_lineno=None):
             """print a line or lines of python which already contain indentation.
     
             The indentation of the total block of lines will be adjusted to that of
             the current indent level."""
             self.in_indent_lines = False
    -        for l in re.split(r'\r?\n', block):
    +        for i, l in enumerate(re.split(r"\r?\n", block)):
                 self.line_buffer.append(l)
    +            if starting_lineno is not None:
    +                self.start_source(starting_lineno + i)
                 self._update_lineno(1)
     
         def writelines(self, *lines):
    @@ -83,21 +85,18 @@ class PythonPrinter(object):
                 self.in_indent_lines = True
     
             if (
    -            line is None or
    -            re.match(r"^\s*#", line) or
    -            re.match(r"^\s*$", line)
    +            line is None
    +            or re.match(r"^\s*#", line)
    +            or re.match(r"^\s*$", line)
             ):
                 hastext = False
             else:
                 hastext = True
     
    -        is_comment = line and len(line) and line[0] == '#'
    +        is_comment = line and len(line) and line[0] == "#"
     
             # see if this line should decrease the indentation level
    -        if (
    -            not is_comment and
    -            (not hastext or self._is_unindentor(line))
    -        ):
    +        if not is_comment and (not hastext or self._is_unindentor(line)):
     
                 if self.indent > 0:
                     self.indent -= 1
    @@ -106,7 +105,8 @@ class PythonPrinter(object):
                     # module wont compile.
                     if len(self.indent_detail) == 0:
                         raise exceptions.SyntaxException(
    -                        "Too many whitespace closures")
    +                        "Too many whitespace closures"
    +                    )
                     self.indent_detail.pop()
     
             if line is None:
    @@ -136,8 +136,9 @@ class PythonPrinter(object):
                     # its not a "compound" keyword.  but lets also
                     # test for valid Python keywords that might be indenting us,
                     # else assume its a non-indenting line
    -                m2 = re.match(r"^\s*(def|class|else|elif|except|finally)",
    -                              line)
    +                m2 = re.match(
    +                    r"^\s*(def|class|else|elif|except|finally)", line
    +                )
                     if m2:
                         self.indent += 1
                         self.indent_detail.append(indentor)
    @@ -189,14 +190,15 @@ class PythonPrinter(object):
     
             # return False
     
    -    def _indent_line(self, line, stripspace=''):
    +    def _indent_line(self, line, stripspace=""):
             """indent the given line according to the current indent level.
     
             stripspace is a string of space that will be truncated from the
             start of the line before indenting."""
     
    -        return re.sub(r"^%s" % stripspace, self.indentstring
    -                      * self.indent, line)
    +        return re.sub(
    +            r"^%s" % stripspace, self.indentstring * self.indent, line
    +        )
     
         def _reset_multi_line_flags(self):
             """reset the flags which would indicate we are in a backslashed
    @@ -214,7 +216,7 @@ class PythonPrinter(object):
             # a literal multiline string with unfortunately placed
             # whitespace
     
    -        current_state = (self.backslashed or self.triplequoted)
    +        current_state = self.backslashed or self.triplequoted
     
             if re.search(r"\\$", line):
                 self.backslashed = True
    @@ -251,7 +253,7 @@ def adjust_whitespace(text):
         (backslashed, triplequoted) = (0, 1)
     
         def in_multi_line(line):
    -        start_state = (state[backslashed] or state[triplequoted])
    +        start_state = state[backslashed] or state[triplequoted]
     
             if re.search(r"\\$", line):
                 state[backslashed] = True
    @@ -261,7 +263,7 @@ def adjust_whitespace(text):
             def match(reg, t):
                 m = re.match(reg, t)
                 if m:
    -                return m, t[len(m.group(0)):]
    +                return m, t[len(m.group(0)) :]
                 else:
                     return None, t
     
    @@ -273,7 +275,7 @@ def adjust_whitespace(text):
                     else:
                         m, line = match(r".*?(?=%s|$)" % state[triplequoted], line)
                 else:
    -                m, line = match(r'#', line)
    +                m, line = match(r"#", line)
                     if m:
                         return start_state
     
    @@ -286,13 +288,13 @@ def adjust_whitespace(text):
     
             return start_state
     
    -    def _indent_line(line, stripspace=''):
    -        return re.sub(r"^%s" % stripspace, '', line)
    +    def _indent_line(line, stripspace=""):
    +        return re.sub(r"^%s" % stripspace, "", line)
     
         lines = []
         stripspace = None
     
    -    for line in re.split(r'\r?\n', text):
    +    for line in re.split(r"\r?\n", text):
             if in_multi_line(line):
                 lines.append(line)
             else:
    diff --git a/server/www/packages/packages-windows/x86/mako/pyparser.py b/server/www/packages/packages-windows/x86/mako/pyparser.py
    index 15d0da6..b16672d 100644
    --- a/server/www/packages/packages-windows/x86/mako/pyparser.py
    +++ b/server/www/packages/packages-windows/x86/mako/pyparser.py
    @@ -1,5 +1,5 @@
     # mako/pyparser.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
    @@ -10,46 +10,52 @@ Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler
     module is used.
     """
     
    -from mako import exceptions, util, compat
    -from mako.compat import arg_stringname
     import operator
     
    +import _ast
    +
    +from mako import _ast_util
    +from mako import compat
    +from mako import exceptions
    +from mako import util
    +from mako.compat import arg_stringname
    +
     if compat.py3k:
         # words that cannot be assigned to (notably
         # smaller than the total keys in __builtins__)
    -    reserved = set(['True', 'False', 'None', 'print'])
    +    reserved = set(["True", "False", "None", "print"])
     
         # the "id" attribute on a function node
    -    arg_id = operator.attrgetter('arg')
    +    arg_id = operator.attrgetter("arg")
     else:
         # words that cannot be assigned to (notably
         # smaller than the total keys in __builtins__)
    -    reserved = set(['True', 'False', 'None'])
    +    reserved = set(["True", "False", "None"])
     
         # the "id" attribute on a function node
    -    arg_id = operator.attrgetter('id')
    +    arg_id = operator.attrgetter("id")
     
    -import _ast
     util.restore__ast(_ast)
    -from mako import _ast_util
     
     
    -def parse(code, mode='exec', **exception_kwargs):
    +def parse(code, mode="exec", **exception_kwargs):
         """Parse an expression into AST"""
     
         try:
    -        return _ast_util.parse(code, '', mode)
    +        return _ast_util.parse(code, "", mode)
         except Exception:
             raise exceptions.SyntaxException(
    -            "(%s) %s (%r)" % (
    +            "(%s) %s (%r)"
    +            % (
                     compat.exception_as().__class__.__name__,
                     compat.exception_as(),
    -                code[0:50]
    -            ), **exception_kwargs)
    +                code[0:50],
    +            ),
    +            **exception_kwargs
    +        )
     
     
     class FindIdentifiers(_ast_util.NodeVisitor):
    -
         def __init__(self, listener, **exception_kwargs):
             self.in_function = False
             self.in_assign_targets = False
    @@ -119,9 +125,9 @@ class FindIdentifiers(_ast_util.NodeVisitor):
             self.in_function = True
     
             local_ident_stack = self.local_ident_stack
    -        self.local_ident_stack = local_ident_stack.union([
    -            arg_id(arg) for arg in self._expand_tuples(node.args.args)
    -        ])
    +        self.local_ident_stack = local_ident_stack.union(
    +            [arg_id(arg) for arg in self._expand_tuples(node.args.args)]
    +        )
             if islambda:
                 self.visit(node.body)
             else:
    @@ -146,9 +152,11 @@ class FindIdentifiers(_ast_util.NodeVisitor):
                 # this is eqiuvalent to visit_AssName in
                 # compiler
                 self._add_declared(node.id)
    -        elif node.id not in reserved and node.id \
    -            not in self.listener.declared_identifiers and node.id \
    -                not in self.local_ident_stack:
    +        elif (
    +            node.id not in reserved
    +            and node.id not in self.listener.declared_identifiers
    +            and node.id not in self.local_ident_stack
    +        ):
                 self.listener.undeclared_identifiers.add(node.id)
     
         def visit_Import(self, node):
    @@ -156,24 +164,25 @@ class FindIdentifiers(_ast_util.NodeVisitor):
                 if name.asname is not None:
                     self._add_declared(name.asname)
                 else:
    -                self._add_declared(name.name.split('.')[0])
    +                self._add_declared(name.name.split(".")[0])
     
         def visit_ImportFrom(self, node):
             for name in node.names:
                 if name.asname is not None:
                     self._add_declared(name.asname)
                 else:
    -                if name.name == '*':
    +                if name.name == "*":
                         raise exceptions.CompileException(
                             "'import *' is not supported, since all identifier "
                             "names must be explicitly declared.  Please use the "
                             "form 'from  import , , "
    -                        "...' instead.", **self.exception_kwargs)
    +                        "...' instead.",
    +                        **self.exception_kwargs
    +                    )
                     self._add_declared(name.name)
     
     
     class FindTuple(_ast_util.NodeVisitor):
    -
         def __init__(self, listener, code_factory, **exception_kwargs):
             self.listener = listener
             self.exception_kwargs = exception_kwargs
    @@ -184,16 +193,17 @@ class FindTuple(_ast_util.NodeVisitor):
                 p = self.code_factory(n, **self.exception_kwargs)
                 self.listener.codeargs.append(p)
                 self.listener.args.append(ExpressionGenerator(n).value())
    -            self.listener.declared_identifiers = \
    -                self.listener.declared_identifiers.union(
    -                    p.declared_identifiers)
    -            self.listener.undeclared_identifiers = \
    -                self.listener.undeclared_identifiers.union(
    -                    p.undeclared_identifiers)
    +            ldi = self.listener.declared_identifiers
    +            self.listener.declared_identifiers = ldi.union(
    +                p.declared_identifiers
    +            )
    +            lui = self.listener.undeclared_identifiers
    +            self.listener.undeclared_identifiers = lui.union(
    +                p.undeclared_identifiers
    +            )
     
     
     class ParseFunc(_ast_util.NodeVisitor):
    -
         def __init__(self, listener, **exception_kwargs):
             self.listener = listener
             self.exception_kwargs = exception_kwargs
    @@ -224,10 +234,9 @@ class ParseFunc(_ast_util.NodeVisitor):
     
     
     class ExpressionGenerator(object):
    -
         def __init__(self, astnode):
    -        self.generator = _ast_util.SourceGenerator(' ' * 4)
    +        self.generator = _ast_util.SourceGenerator(" " * 4)
             self.generator.visit(astnode)
     
         def value(self):
    -        return ''.join(self.generator.result)
    +        return "".join(self.generator.result)
    diff --git a/server/www/packages/packages-windows/x86/mako/runtime.py b/server/www/packages/packages-windows/x86/mako/runtime.py
    index 769541c..465908e 100644
    --- a/server/www/packages/packages-windows/x86/mako/runtime.py
    +++ b/server/www/packages/packages-windows/x86/mako/runtime.py
    @@ -1,5 +1,5 @@
     # mako/runtime.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
    @@ -7,10 +7,14 @@
     """provides runtime services for templates, including Context,
     Namespace, and various helper functions."""
     
    -from mako import exceptions, util, compat
    -from mako.compat import compat_builtins
    +import functools
     import sys
     
    +from mako import compat
    +from mako import exceptions
    +from mako import util
    +from mako.compat import compat_builtins
    +
     
     class Context(object):
     
    @@ -34,18 +38,19 @@ class Context(object):
     
             # "capture" function which proxies to the
             # generic "capture" function
    -        self._data['capture'] = compat.partial(capture, self)
    +        self._data["capture"] = functools.partial(capture, self)
     
             # "caller" stack used by def calls with content
    -        self.caller_stack = self._data['caller'] = CallerStack()
    +        self.caller_stack = self._data["caller"] = CallerStack()
     
         def _set_with_template(self, t):
             self._with_template = t
             illegal_names = t.reserved_names.intersection(self._data)
             if illegal_names:
                 raise exceptions.NameConflictError(
    -                "Reserved words passed to render(): %s" %
    -                ", ".join(illegal_names))
    +                "Reserved words passed to render(): %s"
    +                % ", ".join(illegal_names)
    +            )
     
         @property
         def lookup(self):
    @@ -177,14 +182,13 @@ class Context(object):
     
             c = self._copy()
             x = c._data
    -        x.pop('self', None)
    -        x.pop('parent', None)
    -        x.pop('next', None)
    +        x.pop("self", None)
    +        x.pop("parent", None)
    +        x.pop("next", None)
             return c
     
     
     class CallerStack(list):
    -
         def __init__(self):
             self.nextcaller = None
     
    @@ -231,6 +235,7 @@ class Undefined(object):
         def __bool__(self):
             return False
     
    +
     UNDEFINED = Undefined()
     STOP_RENDERING = ""
     
    @@ -342,7 +347,6 @@ class LoopContext(object):
     
     
     class _NSAttr(object):
    -
         def __init__(self, parent):
             self.__parent = parent
     
    @@ -373,9 +377,15 @@ class Namespace(object):
     
           """
     
    -    def __init__(self, name, context,
    -                 callables=None, inherits=None,
    -                 populate_self=True, calling_uri=None):
    +    def __init__(
    +        self,
    +        name,
    +        context,
    +        callables=None,
    +        inherits=None,
    +        populate_self=True,
    +        calling_uri=None,
    +    ):
             self.name = name
             self.context = context
             self.inherits = inherits
    @@ -473,9 +483,12 @@ class Namespace(object):
             if key in self.context.namespaces:
                 return self.context.namespaces[key]
             else:
    -            ns = TemplateNamespace(uri, self.context._copy(),
    -                                   templateuri=uri,
    -                                   calling_uri=self._templateuri)
    +            ns = TemplateNamespace(
    +                uri,
    +                self.context._copy(),
    +                templateuri=uri,
    +                calling_uri=self._templateuri,
    +            )
                 self.context.namespaces[key] = ns
                 return ns
     
    @@ -518,7 +531,7 @@ class Namespace(object):
     
         def _populate(self, d, l):
             for ident in l:
    -            if ident == '*':
    +            if ident == "*":
                     for (k, v) in self._get_star():
                         d[k] = v
                 else:
    @@ -536,8 +549,8 @@ class Namespace(object):
                 val = getattr(self.inherits, key)
             else:
                 raise AttributeError(
    -                "Namespace '%s' has no member '%s'" %
    -                (self.name, key))
    +                "Namespace '%s' has no member '%s'" % (self.name, key)
    +            )
             setattr(self, key, val)
             return val
     
    @@ -546,9 +559,17 @@ class TemplateNamespace(Namespace):
     
         """A :class:`.Namespace` specific to a :class:`.Template` instance."""
     
    -    def __init__(self, name, context, template=None, templateuri=None,
    -                 callables=None, inherits=None,
    -                 populate_self=True, calling_uri=None):
    +    def __init__(
    +        self,
    +        name,
    +        context,
    +        template=None,
    +        templateuri=None,
    +        callables=None,
    +        inherits=None,
    +        populate_self=True,
    +        calling_uri=None,
    +    ):
             self.name = name
             self.context = context
             self.inherits = inherits
    @@ -556,8 +577,7 @@ class TemplateNamespace(Namespace):
                 self.callables = dict([(c.__name__, c) for c in callables])
     
             if templateuri is not None:
    -            self.template = _lookup_template(context, templateuri,
    -                                             calling_uri)
    +            self.template = _lookup_template(context, templateuri, calling_uri)
                 self._templateuri = self.template.module._template_uri
             elif template is not None:
                 self.template = template
    @@ -566,9 +586,9 @@ class TemplateNamespace(Namespace):
                 raise TypeError("'template' argument is required.")
     
             if populate_self:
    -            lclcallable, lclcontext = \
    -                _populate_self_namespace(context, self.template,
    -                                         self_ns=self)
    +            lclcallable, lclcontext = _populate_self_namespace(
    +                context, self.template, self_ns=self
    +            )
     
         @property
         def module(self):
    @@ -606,7 +626,8 @@ class TemplateNamespace(Namespace):
     
             def get(key):
                 callable_ = self.template._get_def_callable(key)
    -            return compat.partial(callable_, self.context)
    +            return functools.partial(callable_, self.context)
    +
             for k in self.template.module._exports:
                 yield (k, get(k))
     
    @@ -615,14 +636,14 @@ class TemplateNamespace(Namespace):
                 val = self.callables[key]
             elif self.template.has_def(key):
                 callable_ = self.template._get_def_callable(key)
    -            val = compat.partial(callable_, self.context)
    +            val = functools.partial(callable_, self.context)
             elif self.inherits:
                 val = getattr(self.inherits, key)
     
             else:
                 raise AttributeError(
    -                "Namespace '%s' has no member '%s'" %
    -                (self.name, key))
    +                "Namespace '%s' has no member '%s'" % (self.name, key)
    +            )
             setattr(self, key, val)
             return val
     
    @@ -631,9 +652,16 @@ class ModuleNamespace(Namespace):
     
         """A :class:`.Namespace` specific to a Python module instance."""
     
    -    def __init__(self, name, context, module,
    -                 callables=None, inherits=None,
    -                 populate_self=True, calling_uri=None):
    +    def __init__(
    +        self,
    +        name,
    +        context,
    +        module,
    +        callables=None,
    +        inherits=None,
    +        populate_self=True,
    +        calling_uri=None,
    +    ):
             self.name = name
             self.context = context
             self.inherits = inherits
    @@ -641,7 +669,7 @@ class ModuleNamespace(Namespace):
                 self.callables = dict([(c.__name__, c) for c in callables])
     
             mod = __import__(module)
    -        for token in module.split('.')[1:]:
    +        for token in module.split(".")[1:]:
                 mod = getattr(mod, token)
             self.module = mod
     
    @@ -657,23 +685,23 @@ class ModuleNamespace(Namespace):
                 for key in self.callables:
                     yield (key, self.callables[key])
             for key in dir(self.module):
    -            if key[0] != '_':
    +            if key[0] != "_":
                     callable_ = getattr(self.module, key)
    -                if compat.callable(callable_):
    -                    yield key, compat.partial(callable_, self.context)
    +                if callable(callable_):
    +                    yield key, functools.partial(callable_, self.context)
     
         def __getattr__(self, key):
             if key in self.callables:
                 val = self.callables[key]
             elif hasattr(self.module, key):
                 callable_ = getattr(self.module, key)
    -            val = compat.partial(callable_, self.context)
    +            val = functools.partial(callable_, self.context)
             elif self.inherits:
                 val = getattr(self.inherits, key)
             else:
                 raise AttributeError(
    -                "Namespace '%s' has no member '%s'" %
    -                (self.name, key))
    +                "Namespace '%s' has no member '%s'" % (self.name, key)
    +            )
             setattr(self, key, val)
             return val
     
    @@ -692,6 +720,7 @@ def supports_caller(func):
                 return func(context, *args, **kwargs)
             finally:
                 context.caller_stack._pop_frame()
    +
         return wrap_stackframe
     
     
    @@ -703,7 +732,7 @@ def capture(context, callable_, *args, **kwargs):
     
         """
     
    -    if not compat.callable(callable_):
    +    if not callable(callable_):
             raise exceptions.RuntimeException(
                 "capture() function expects a callable as "
                 "its argument (i.e. capture(func, *args, **kwargs))"
    @@ -721,13 +750,16 @@ def _decorate_toplevel(fn):
             def go(context, *args, **kw):
                 def y(*args, **kw):
                     return render_fn(context, *args, **kw)
    +
                 try:
                     y.__name__ = render_fn.__name__[7:]
                 except TypeError:
                     # < Python 2.4
                     pass
                 return fn(y)(context, *args, **kw)
    +
             return go
    +
         return decorate_render
     
     
    @@ -737,7 +769,9 @@ def _decorate_inline(context, fn):
     
             def go(*args, **kw):
                 return dec(context, *args, **kw)
    +
             return go
    +
         return decorate_render
     
     
    @@ -747,8 +781,8 @@ def _include_file(context, uri, calling_uri, **kwargs):
     
         template = _lookup_template(context, uri, calling_uri)
         (callable_, ctx) = _populate_self_namespace(
    -        context._clean_inheritance_tokens(),
    -        template)
    +        context._clean_inheritance_tokens(), template
    +    )
         kwargs = _kwargs_for_include(callable_, context._data, **kwargs)
         if template.include_error_handler:
             try:
    @@ -769,23 +803,25 @@ def _inherit_from(context, uri, calling_uri):
         if uri is None:
             return None
         template = _lookup_template(context, uri, calling_uri)
    -    self_ns = context['self']
    +    self_ns = context["self"]
         ih = self_ns
         while ih.inherits is not None:
             ih = ih.inherits
    -    lclcontext = context._locals({'next': ih})
    -    ih.inherits = TemplateNamespace("self:%s" % template.uri,
    -                                    lclcontext,
    -                                    template=template,
    -                                    populate_self=False)
    -    context._data['parent'] = lclcontext._data['local'] = ih.inherits
    -    callable_ = getattr(template.module, '_mako_inherit', None)
    +    lclcontext = context._locals({"next": ih})
    +    ih.inherits = TemplateNamespace(
    +        "self:%s" % template.uri,
    +        lclcontext,
    +        template=template,
    +        populate_self=False,
    +    )
    +    context._data["parent"] = lclcontext._data["local"] = ih.inherits
    +    callable_ = getattr(template.module, "_mako_inherit", None)
         if callable_ is not None:
             ret = callable_(template, lclcontext)
             if ret:
                 return ret
     
    -    gen_ns = getattr(template.module, '_mako_generate_namespaces', None)
    +    gen_ns = getattr(template.module, "_mako_generate_namespaces", None)
         if gen_ns is not None:
             gen_ns(context)
         return (template.callable_, lclcontext)
    @@ -795,8 +831,9 @@ def _lookup_template(context, uri, relativeto):
         lookup = context._with_template.lookup
         if lookup is None:
             raise exceptions.TemplateLookupException(
    -            "Template '%s' has no TemplateLookup associated" %
    -            context._with_template.uri)
    +            "Template '%s' has no TemplateLookup associated"
    +            % context._with_template.uri
    +        )
         uri = lookup.adjust_uri(uri, relativeto)
         try:
             return lookup.get_template(uri)
    @@ -806,11 +843,14 @@ def _lookup_template(context, uri, relativeto):
     
     def _populate_self_namespace(context, template, self_ns=None):
         if self_ns is None:
    -        self_ns = TemplateNamespace('self:%s' % template.uri,
    -                                    context, template=template,
    -                                    populate_self=False)
    -    context._data['self'] = context._data['local'] = self_ns
    -    if hasattr(template.module, '_mako_inherit'):
    +        self_ns = TemplateNamespace(
    +            "self:%s" % template.uri,
    +            context,
    +            template=template,
    +            populate_self=False,
    +        )
    +    context._data["self"] = context._data["local"] = self_ns
    +    if hasattr(template.module, "_mako_inherit"):
             ret = template.module._mako_inherit(template, context)
             if ret:
                 return ret
    @@ -829,18 +869,24 @@ def _render(template, callable_, args, data, as_unicode=False):
             buf = util.FastEncodingBuffer(
                 as_unicode=as_unicode,
                 encoding=template.output_encoding,
    -            errors=template.encoding_errors)
    +            errors=template.encoding_errors,
    +        )
         context = Context(buf, **data)
         context._outputting_as_unicode = as_unicode
         context._set_with_template(template)
     
    -    _render_context(template, callable_, context, *args,
    -                    **_kwargs_for_callable(callable_, data))
    +    _render_context(
    +        template,
    +        callable_,
    +        context,
    +        *args,
    +        **_kwargs_for_callable(callable_, data)
    +    )
         return context._pop_buffer().getvalue()
     
     
     def _kwargs_for_callable(callable_, data):
    -    argspec = compat.inspect_func_args(callable_)
    +    argspec = compat.inspect_getargspec(callable_)
         # for normal pages, **pageargs is usually present
         if argspec[2]:
             return data
    @@ -849,22 +895,23 @@ def _kwargs_for_callable(callable_, data):
         namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None]
         kwargs = {}
         for arg in namedargs:
    -        if arg != 'context' and arg in data and arg not in kwargs:
    +        if arg != "context" and arg in data and arg not in kwargs:
                 kwargs[arg] = data[arg]
         return kwargs
     
     
     def _kwargs_for_include(callable_, data, **kwargs):
    -    argspec = compat.inspect_func_args(callable_)
    +    argspec = compat.inspect_getargspec(callable_)
         namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None]
         for arg in namedargs:
    -        if arg != 'context' and arg in data and arg not in kwargs:
    +        if arg != "context" and arg in data and arg not in kwargs:
                 kwargs[arg] = data[arg]
         return kwargs
     
     
     def _render_context(tmpl, callable_, context, *args, **kwargs):
         import mako.template as template
    +
         # create polymorphic 'self' namespace for this
         # template with possibly updated context
         if not isinstance(tmpl, template.DefTemplate):
    @@ -886,8 +933,9 @@ def _exec_template(callable_, context, args=None, kwargs=None):
         be interpreted here.
         """
         template = context._with_template
    -    if template is not None and \
    -            (template.format_exceptions or template.error_handler):
    +    if template is not None and (
    +        template.format_exceptions or template.error_handler
    +    ):
             try:
                 callable_(context, *args, **kwargs)
             except Exception:
    @@ -908,11 +956,15 @@ def _render_error(template, context, error):
             error_template = exceptions.html_error_template()
             if context._outputting_as_unicode:
                 context._buffer_stack[:] = [
    -                util.FastEncodingBuffer(as_unicode=True)]
    +                util.FastEncodingBuffer(as_unicode=True)
    +            ]
             else:
    -            context._buffer_stack[:] = [util.FastEncodingBuffer(
    -                error_template.output_encoding,
    -                error_template.encoding_errors)]
    +            context._buffer_stack[:] = [
    +                util.FastEncodingBuffer(
    +                    error_template.output_encoding,
    +                    error_template.encoding_errors,
    +                )
    +            ]
     
             context._set_with_template(error_template)
             error_template.render_context(context, error=error)
    diff --git a/server/www/packages/packages-windows/x86/mako/template.py b/server/www/packages/packages-windows/x86/mako/template.py
    index 329632c..3fd0871 100644
    --- a/server/www/packages/packages-windows/x86/mako/template.py
    +++ b/server/www/packages/packages-windows/x86/mako/template.py
    @@ -1,5 +1,5 @@
     # mako/template.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
    @@ -7,8 +7,7 @@
     """Provides the Template class, a facade for parsing, generating and executing
     template strings, as well as template runtime operations."""
     
    -from mako.lexer import Lexer
    -from mako import runtime, util, exceptions, codegen, cache, compat
    +import json
     import os
     import re
     import shutil
    @@ -18,6 +17,14 @@ import tempfile
     import types
     import weakref
     
    +from mako import cache
    +from mako import codegen
    +from mako import compat
    +from mako import exceptions
    +from mako import runtime
    +from mako import util
    +from mako.lexer import Lexer
    +
     
     class Template(object):
     
    @@ -230,41 +237,43 @@ class Template(object):
     
         lexer_cls = Lexer
     
    -    def __init__(self,
    -                 text=None,
    -                 filename=None,
    -                 uri=None,
    -                 format_exceptions=False,
    -                 error_handler=None,
    -                 lookup=None,
    -                 output_encoding=None,
    -                 encoding_errors='strict',
    -                 module_directory=None,
    -                 cache_args=None,
    -                 cache_impl='beaker',
    -                 cache_enabled=True,
    -                 cache_type=None,
    -                 cache_dir=None,
    -                 cache_url=None,
    -                 module_filename=None,
    -                 input_encoding=None,
    -                 disable_unicode=False,
    -                 module_writer=None,
    -                 bytestring_passthrough=False,
    -                 default_filters=None,
    -                 buffer_filters=(),
    -                 strict_undefined=False,
    -                 imports=None,
    -                 future_imports=None,
    -                 enable_loop=True,
    -                 preprocessor=None,
    -                 lexer_cls=None,
    -                 include_error_handler=None):
    +    def __init__(
    +        self,
    +        text=None,
    +        filename=None,
    +        uri=None,
    +        format_exceptions=False,
    +        error_handler=None,
    +        lookup=None,
    +        output_encoding=None,
    +        encoding_errors="strict",
    +        module_directory=None,
    +        cache_args=None,
    +        cache_impl="beaker",
    +        cache_enabled=True,
    +        cache_type=None,
    +        cache_dir=None,
    +        cache_url=None,
    +        module_filename=None,
    +        input_encoding=None,
    +        disable_unicode=False,
    +        module_writer=None,
    +        bytestring_passthrough=False,
    +        default_filters=None,
    +        buffer_filters=(),
    +        strict_undefined=False,
    +        imports=None,
    +        future_imports=None,
    +        enable_loop=True,
    +        preprocessor=None,
    +        lexer_cls=None,
    +        include_error_handler=None,
    +    ):
             if uri:
    -            self.module_id = re.sub(r'\W', "_", uri)
    +            self.module_id = re.sub(r"\W", "_", uri)
                 self.uri = uri
             elif filename:
    -            self.module_id = re.sub(r'\W', "_", filename)
    +            self.module_id = re.sub(r"\W", "_", filename)
                 drive, path = os.path.splitdrive(filename)
                 path = os.path.normpath(path).replace(os.path.sep, "/")
                 self.uri = path
    @@ -278,9 +287,10 @@ class Template(object):
             u_norm = os.path.normpath(u_norm)
             if u_norm.startswith(".."):
                 raise exceptions.TemplateLookupException(
    -                "Template uri \"%s\" is invalid - "
    +                'Template uri "%s" is invalid - '
                     "it cannot be relative outside "
    -                "of the root path." % self.uri)
    +                "of the root path." % self.uri
    +            )
     
             self.input_encoding = input_encoding
             self.output_encoding = output_encoding
    @@ -293,17 +303,18 @@ class Template(object):
     
             if compat.py3k and disable_unicode:
                 raise exceptions.UnsupportedError(
    -                "Mako for Python 3 does not "
    -                "support disabling Unicode")
    +                "Mako for Python 3 does not " "support disabling Unicode"
    +            )
             elif output_encoding and disable_unicode:
                 raise exceptions.UnsupportedError(
                     "output_encoding must be set to "
    -                "None when disable_unicode is used.")
    +                "None when disable_unicode is used."
    +            )
             if default_filters is None:
                 if compat.py3k or self.disable_unicode:
    -                self.default_filters = ['str']
    +                self.default_filters = ["str"]
                 else:
    -                self.default_filters = ['unicode']
    +                self.default_filters = ["unicode"]
             else:
                 self.default_filters = default_filters
             self.buffer_filters = buffer_filters
    @@ -320,7 +331,7 @@ class Template(object):
                 (code, module) = _compile_text(self, text, filename)
                 self._code = code
                 self._source = text
    -            ModuleInfo(module, None, self, filename, code, text)
    +            ModuleInfo(module, None, self, filename, code, text, uri)
             elif filename is not None:
                 # if template filename and a module directory, load
                 # a filesystem-based module file, generating if needed
    @@ -329,8 +340,7 @@ class Template(object):
                 elif module_directory is not None:
                     path = os.path.abspath(
                         os.path.join(
    -                        os.path.normpath(module_directory),
    -                        u_norm + ".py"
    +                        os.path.normpath(module_directory), u_norm + ".py"
                         )
                     )
                 else:
    @@ -338,7 +348,8 @@ class Template(object):
                 module = self._compile_from_file(path, filename)
             else:
                 raise exceptions.RuntimeException(
    -                "Template requires text or filename")
    +                "Template requires text or filename"
    +            )
     
             self.module = module
             self.filename = filename
    @@ -351,8 +362,12 @@ class Template(object):
             self.module_directory = module_directory
     
             self._setup_cache_args(
    -            cache_impl, cache_enabled, cache_args,
    -            cache_type, cache_dir, cache_url
    +            cache_impl,
    +            cache_enabled,
    +            cache_args,
    +            cache_type,
    +            cache_dir,
    +            cache_url,
             )
     
         @util.memoized_property
    @@ -360,11 +375,17 @@ class Template(object):
             if self.enable_loop:
                 return codegen.RESERVED_NAMES
             else:
    -            return codegen.RESERVED_NAMES.difference(['loop'])
    +            return codegen.RESERVED_NAMES.difference(["loop"])
     
    -    def _setup_cache_args(self,
    -                          cache_impl, cache_enabled, cache_args,
    -                          cache_type, cache_dir, cache_url):
    +    def _setup_cache_args(
    +        self,
    +        cache_impl,
    +        cache_enabled,
    +        cache_args,
    +        cache_type,
    +        cache_dir,
    +        cache_url,
    +    ):
             self.cache_impl = cache_impl
             self.cache_enabled = cache_enabled
             if cache_args:
    @@ -374,49 +395,42 @@ class Template(object):
     
             # transfer deprecated cache_* args
             if cache_type:
    -            self.cache_args['type'] = cache_type
    +            self.cache_args["type"] = cache_type
             if cache_dir:
    -            self.cache_args['dir'] = cache_dir
    +            self.cache_args["dir"] = cache_dir
             if cache_url:
    -            self.cache_args['url'] = cache_url
    +            self.cache_args["url"] = cache_url
     
         def _compile_from_file(self, path, filename):
             if path is not None:
                 util.verify_directory(os.path.dirname(path))
                 filemtime = os.stat(filename)[stat.ST_MTIME]
    -            if not os.path.exists(path) or \
    -                    os.stat(path)[stat.ST_MTIME] < filemtime:
    +            if (
    +                not os.path.exists(path)
    +                or os.stat(path)[stat.ST_MTIME] < filemtime
    +            ):
                     data = util.read_file(filename)
                     _compile_module_file(
    -                    self,
    -                    data,
    -                    filename,
    -                    path,
    -                    self.module_writer)
    +                    self, data, filename, path, self.module_writer
    +                )
                 module = compat.load_module(self.module_id, path)
                 del sys.modules[self.module_id]
                 if module._magic_number != codegen.MAGIC_NUMBER:
                     data = util.read_file(filename)
                     _compile_module_file(
    -                    self,
    -                    data,
    -                    filename,
    -                    path,
    -                    self.module_writer)
    +                    self, data, filename, path, self.module_writer
    +                )
                     module = compat.load_module(self.module_id, path)
                     del sys.modules[self.module_id]
    -            ModuleInfo(module, path, self, filename, None, None)
    +            ModuleInfo(module, path, self, filename, None, None, None)
             else:
                 # template filename and no module directory, compile code
                 # in memory
                 data = util.read_file(filename)
    -            code, module = _compile_text(
    -                self,
    -                data,
    -                filename)
    +            code, module = _compile_text(self, data, filename)
                 self._source = None
                 self._code = code
    -            ModuleInfo(module, None, self, filename, code, None)
    +            ModuleInfo(module, None, self, filename, code, None, None)
             return module
     
         @property
    @@ -437,15 +451,15 @@ class Template(object):
     
         @property
         def cache_dir(self):
    -        return self.cache_args['dir']
    +        return self.cache_args["dir"]
     
         @property
         def cache_url(self):
    -        return self.cache_args['url']
    +        return self.cache_args["url"]
     
         @property
         def cache_type(self):
    -        return self.cache_args['type']
    +        return self.cache_args["type"]
     
         def render(self, *args, **data):
             """Render the output of this template as a string.
    @@ -464,11 +478,9 @@ class Template(object):
         def render_unicode(self, *args, **data):
             """Render the output of this template as a unicode object."""
     
    -        return runtime._render(self,
    -                               self.callable_,
    -                               args,
    -                               data,
    -                               as_unicode=True)
    +        return runtime._render(
    +            self, self.callable_, args, data, as_unicode=True
    +        )
     
         def render_context(self, context, *args, **kwargs):
             """Render this :class:`.Template` with the given context.
    @@ -476,13 +488,9 @@ class Template(object):
             The data is written to the context's buffer.
     
             """
    -        if getattr(context, '_with_template', None) is None:
    +        if getattr(context, "_with_template", None) is None:
                 context._set_with_template(self)
    -        runtime._render_context(self,
    -                                self.callable_,
    -                                context,
    -                                *args,
    -                                **kwargs)
    +        runtime._render_context(self, self.callable_, context, *args, **kwargs)
     
         def has_def(self, name):
             return hasattr(self.module, "render_%s" % name)
    @@ -498,7 +506,7 @@ class Template(object):
             .. versionadded:: 1.0.4
     
             """
    -        return [i[7:] for i in dir(self.module) if i[:7] == 'render_']
    +        return [i[7:] for i in dir(self.module) if i[:7] == "render_"]
     
         def _get_def_callable(self, name):
             return getattr(self.module, "render_%s" % name)
    @@ -512,42 +520,44 @@ class ModuleTemplate(Template):
     
         """A Template which is constructed given an existing Python module.
     
    -        e.g.::
    +       e.g.::
     
    -        t = Template("this is a template")
    -        f = file("mymodule.py", "w")
    -        f.write(t.code)
    -        f.close()
    +            t = Template("this is a template")
    +            f = file("mymodule.py", "w")
    +            f.write(t.code)
    +            f.close()
     
    -        import mymodule
    +            import mymodule
     
    -        t = ModuleTemplate(mymodule)
    -        print t.render()
    +            t = ModuleTemplate(mymodule)
    +            print(t.render())
     
         """
     
    -    def __init__(self, module,
    -                 module_filename=None,
    -                 template=None,
    -                 template_filename=None,
    -                 module_source=None,
    -                 template_source=None,
    -                 output_encoding=None,
    -                 encoding_errors='strict',
    -                 disable_unicode=False,
    -                 bytestring_passthrough=False,
    -                 format_exceptions=False,
    -                 error_handler=None,
    -                 lookup=None,
    -                 cache_args=None,
    -                 cache_impl='beaker',
    -                 cache_enabled=True,
    -                 cache_type=None,
    -                 cache_dir=None,
    -                 cache_url=None,
    -                 include_error_handler=None,
    -                 ):
    -        self.module_id = re.sub(r'\W', "_", module._template_uri)
    +    def __init__(
    +        self,
    +        module,
    +        module_filename=None,
    +        template=None,
    +        template_filename=None,
    +        module_source=None,
    +        template_source=None,
    +        output_encoding=None,
    +        encoding_errors="strict",
    +        disable_unicode=False,
    +        bytestring_passthrough=False,
    +        format_exceptions=False,
    +        error_handler=None,
    +        lookup=None,
    +        cache_args=None,
    +        cache_impl="beaker",
    +        cache_enabled=True,
    +        cache_type=None,
    +        cache_dir=None,
    +        cache_url=None,
    +        include_error_handler=None,
    +    ):
    +        self.module_id = re.sub(r"\W", "_", module._template_uri)
             self.uri = module._template_uri
             self.input_encoding = module._source_encoding
             self.output_encoding = output_encoding
    @@ -558,21 +568,25 @@ class ModuleTemplate(Template):
     
             if compat.py3k and disable_unicode:
                 raise exceptions.UnsupportedError(
    -                "Mako for Python 3 does not "
    -                "support disabling Unicode")
    +                "Mako for Python 3 does not " "support disabling Unicode"
    +            )
             elif output_encoding and disable_unicode:
                 raise exceptions.UnsupportedError(
                     "output_encoding must be set to "
    -                "None when disable_unicode is used.")
    +                "None when disable_unicode is used."
    +            )
     
             self.module = module
             self.filename = template_filename
    -        ModuleInfo(module,
    -                   module_filename,
    -                   self,
    -                   template_filename,
    -                   module_source,
    -                   template_source)
    +        ModuleInfo(
    +            module,
    +            module_filename,
    +            self,
    +            template_filename,
    +            module_source,
    +            template_source,
    +            module._template_uri,
    +        )
     
             self.callable_ = self.module.render_body
             self.format_exceptions = format_exceptions
    @@ -580,8 +594,12 @@ class ModuleTemplate(Template):
             self.include_error_handler = include_error_handler
             self.lookup = lookup
             self._setup_cache_args(
    -            cache_impl, cache_enabled, cache_args,
    -            cache_type, cache_dir, cache_url
    +            cache_impl,
    +            cache_enabled,
    +            cache_args,
    +            cache_type,
    +            cache_dir,
    +            cache_url,
             )
     
     
    @@ -614,20 +632,25 @@ class ModuleInfo(object):
         source code based on a module's identifier.
     
          """
    +
         _modules = weakref.WeakValueDictionary()
     
    -    def __init__(self,
    -                 module,
    -                 module_filename,
    -                 template,
    -                 template_filename,
    -                 module_source,
    -                 template_source):
    +    def __init__(
    +        self,
    +        module,
    +        module_filename,
    +        template,
    +        template_filename,
    +        module_source,
    +        template_source,
    +        template_uri,
    +    ):
             self.module = module
             self.module_filename = module_filename
             self.template_filename = template_filename
             self.module_source = module_source
             self.template_source = template_source
    +        self.template_uri = template_uri
             self._modules[module.__name__] = template._mmarker = self
             if module_filename:
                 self._modules[module_filename] = self
    @@ -635,15 +658,15 @@ class ModuleInfo(object):
         @classmethod
         def get_module_source_metadata(cls, module_source, full_line_map=False):
             source_map = re.search(
    -            r"__M_BEGIN_METADATA(.+?)__M_END_METADATA",
    -            module_source, re.S).group(1)
    -        source_map = compat.json.loads(source_map)
    -        source_map['line_map'] = dict(
    -            (int(k), int(v))
    -            for k, v in source_map['line_map'].items())
    +            r"__M_BEGIN_METADATA(.+?)__M_END_METADATA", module_source, re.S
    +        ).group(1)
    +        source_map = json.loads(source_map)
    +        source_map["line_map"] = dict(
    +            (int(k), int(v)) for k, v in source_map["line_map"].items()
    +        )
             if full_line_map:
    -            f_line_map = source_map['full_line_map'] = []
    -            line_map = source_map['line_map']
    +            f_line_map = source_map["full_line_map"] = []
    +            line_map = source_map["line_map"]
     
                 curr_templ_line = 1
                 for mod_line in range(1, max(line_map)):
    @@ -662,10 +685,12 @@ class ModuleInfo(object):
         @property
         def source(self):
             if self.template_source is not None:
    -            if self.module._source_encoding and \
    -                    not isinstance(self.template_source, compat.text_type):
    +            if self.module._source_encoding and not isinstance(
    +                self.template_source, compat.text_type
    +            ):
                     return self.template_source.decode(
    -                    self.module._source_encoding)
    +                    self.module._source_encoding
    +                )
                 else:
                     return self.template_source
             else:
    @@ -677,38 +702,46 @@ class ModuleInfo(object):
     
     
     def _compile(template, text, filename, generate_magic_comment):
    -    lexer = template.lexer_cls(text,
    -                               filename,
    -                               disable_unicode=template.disable_unicode,
    -                               input_encoding=template.input_encoding,
    -                               preprocessor=template.preprocessor)
    +    lexer = template.lexer_cls(
    +        text,
    +        filename,
    +        disable_unicode=template.disable_unicode,
    +        input_encoding=template.input_encoding,
    +        preprocessor=template.preprocessor,
    +    )
         node = lexer.parse()
    -    source = codegen.compile(node,
    -                             template.uri,
    -                             filename,
    -                             default_filters=template.default_filters,
    -                             buffer_filters=template.buffer_filters,
    -                             imports=template.imports,
    -                             future_imports=template.future_imports,
    -                             source_encoding=lexer.encoding,
    -                             generate_magic_comment=generate_magic_comment,
    -                             disable_unicode=template.disable_unicode,
    -                             strict_undefined=template.strict_undefined,
    -                             enable_loop=template.enable_loop,
    -                             reserved_names=template.reserved_names)
    +    source = codegen.compile(
    +        node,
    +        template.uri,
    +        filename,
    +        default_filters=template.default_filters,
    +        buffer_filters=template.buffer_filters,
    +        imports=template.imports,
    +        future_imports=template.future_imports,
    +        source_encoding=lexer.encoding,
    +        generate_magic_comment=generate_magic_comment,
    +        disable_unicode=template.disable_unicode,
    +        strict_undefined=template.strict_undefined,
    +        enable_loop=template.enable_loop,
    +        reserved_names=template.reserved_names,
    +    )
         return source, lexer
     
     
     def _compile_text(template, text, filename):
         identifier = template.module_id
    -    source, lexer = _compile(template, text, filename,
    -                             generate_magic_comment=template.disable_unicode)
    +    source, lexer = _compile(
    +        template,
    +        text,
    +        filename,
    +        generate_magic_comment=template.disable_unicode,
    +    )
     
         cid = identifier
         if not compat.py3k and isinstance(cid, compat.text_type):
             cid = cid.encode()
         module = types.ModuleType(cid)
    -    code = compile(source, cid, 'exec')
    +    code = compile(source, cid, "exec")
     
         # this exec() works for 2.4->3.3.
         exec(code, module.__dict__, module.__dict__)
    @@ -716,11 +749,12 @@ def _compile_text(template, text, filename):
     
     
     def _compile_module_file(template, text, filename, outputpath, module_writer):
    -    source, lexer = _compile(template, text, filename,
    -                             generate_magic_comment=True)
    +    source, lexer = _compile(
    +        template, text, filename, generate_magic_comment=True
    +    )
     
         if isinstance(source, compat.text_type):
    -        source = source.encode(lexer.encoding or 'ascii')
    +        source = source.encode(lexer.encoding or "ascii")
     
         if module_writer:
             module_writer(source, outputpath)
    @@ -737,9 +771,9 @@ def _compile_module_file(template, text, filename, outputpath, module_writer):
     
     def _get_module_info_from_callable(callable_):
         if compat.py3k:
    -        return _get_module_info(callable_.__globals__['__name__'])
    +        return _get_module_info(callable_.__globals__["__name__"])
         else:
    -        return _get_module_info(callable_.func_globals['__name__'])
    +        return _get_module_info(callable_.func_globals["__name__"])
     
     
     def _get_module_info(filename):
    diff --git a/server/www/packages/packages-windows/x86/mako/util.py b/server/www/packages/packages-windows/x86/mako/util.py
    index 2f089ff..16e3c72 100644
    --- a/server/www/packages/packages-windows/x86/mako/util.py
    +++ b/server/www/packages/packages-windows/x86/mako/util.py
    @@ -1,15 +1,19 @@
     # mako/util.py
    -# Copyright (C) 2006-2016 the Mako authors and contributors 
    +# Copyright 2006-2020 the Mako authors and contributors 
     #
     # This module is part of Mako and is released under
     # the MIT License: http://www.opensource.org/licenses/mit-license.php
    +from __future__ import absolute_import
     
    -import re
    -import collections
    +from ast import parse
     import codecs
    -import os
    -from mako import compat
    +import collections
     import operator
    +import os
    +import re
    +import timeit
    +
    +from mako import compat
     
     
     def update_wrapper(decorated, fn):
    @@ -19,7 +23,6 @@ def update_wrapper(decorated, fn):
     
     
     class PluginLoader(object):
    -
         def __init__(self, group):
             self.group = group
             self.impls = {}
    @@ -29,16 +32,16 @@ class PluginLoader(object):
                 return self.impls[name]()
             else:
                 import pkg_resources
    -            for impl in pkg_resources.iter_entry_points(
    -                    self.group,
    -                    name):
    +
    +            for impl in pkg_resources.iter_entry_points(self.group, name):
                     self.impls[name] = impl.load
                     return impl.load()
                 else:
                     from mako import exceptions
    +
                     raise exceptions.RuntimeException(
    -                    "Can't load plugin %s %s" %
    -                    (self.group, name))
    +                    "Can't load plugin %s %s" % (self.group, name)
    +                )
     
         def register(self, name, modulepath, objname):
             def load():
    @@ -46,18 +49,19 @@ class PluginLoader(object):
                 for token in modulepath.split(".")[1:]:
                     mod = getattr(mod, token)
                 return getattr(mod, objname)
    +
             self.impls[name] = load
     
     
    -def verify_directory(dir):
    +def verify_directory(dir_):
         """create and/or verify a filesystem directory."""
     
         tries = 0
     
    -    while not os.path.exists(dir):
    +    while not os.path.exists(dir_):
             try:
                 tries += 1
    -            os.makedirs(dir, compat.octal("0775"))
    +            os.makedirs(dir_, compat.octal("0775"))
             except:
                 if tries > 5:
                     raise
    @@ -109,11 +113,15 @@ class memoized_instancemethod(object):
     
             def oneshot(*args, **kw):
                 result = self.fget(obj, *args, **kw)
    -            memo = lambda *a, **kw: result
    +
    +            def memo(*a, **kw):
    +                return result
    +
                 memo.__name__ = self.__name__
                 memo.__doc__ = self.__doc__
                 obj.__dict__[self.__name__] = memo
                 return result
    +
             oneshot.__name__ = self.__name__
             oneshot.__doc__ = self.__doc__
             return oneshot
    @@ -137,13 +145,13 @@ class FastEncodingBuffer(object):
         """a very rudimentary buffer that is faster than StringIO,
         but doesn't crash on unicode data like cStringIO."""
     
    -    def __init__(self, encoding=None, errors='strict', as_unicode=False):
    +    def __init__(self, encoding=None, errors="strict", as_unicode=False):
             self.data = collections.deque()
             self.encoding = encoding
             if as_unicode:
    -            self.delim = compat.u('')
    +            self.delim = compat.u("")
             else:
    -            self.delim = ''
    +            self.delim = ""
             self.as_unicode = as_unicode
             self.errors = errors
             self.write = self.data.append
    @@ -154,8 +162,9 @@ class FastEncodingBuffer(object):
     
         def getvalue(self):
             if self.encoding:
    -            return self.delim.join(self.data).encode(self.encoding,
    -                                                     self.errors)
    +            return self.delim.join(self.data).encode(
    +                self.encoding, self.errors
    +            )
             else:
                 return self.delim.join(self.data)
     
    @@ -171,22 +180,21 @@ class LRUCache(dict):
         """
     
         class _Item(object):
    -
             def __init__(self, key, value):
                 self.key = key
                 self.value = value
    -            self.timestamp = compat.time_func()
    +            self.timestamp = timeit.default_timer()
     
             def __repr__(self):
                 return repr(self.value)
     
    -    def __init__(self, capacity, threshold=.5):
    +    def __init__(self, capacity, threshold=0.5):
             self.capacity = capacity
             self.threshold = threshold
     
         def __getitem__(self, key):
             item = dict.__getitem__(self, key)
    -        item.timestamp = compat.time_func()
    +        item.timestamp = timeit.default_timer()
             return item.value
     
         def values(self):
    @@ -210,9 +218,12 @@ class LRUCache(dict):
     
         def _manage_size(self):
             while len(self) > self.capacity + self.capacity * self.threshold:
    -            bytime = sorted(dict.values(self),
    -                            key=operator.attrgetter('timestamp'), reverse=True)
    -            for item in bytime[self.capacity:]:
    +            bytime = sorted(
    +                dict.values(self),
    +                key=operator.attrgetter("timestamp"),
    +                reverse=True,
    +            )
    +            for item in bytime[self.capacity :]:
                     try:
                         del self[item.key]
                     except KeyError:
    @@ -220,10 +231,11 @@ class LRUCache(dict):
                         # broke in on us. loop around and try again
                         break
     
    +
     # Regexp to match python magic encoding line
     _PYTHON_MAGIC_COMMENT_re = re.compile(
    -    r'[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)',
    -    re.VERBOSE)
    +    r"[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)", re.VERBOSE
    +)
     
     
     def parse_encoding(fp):
    @@ -242,13 +254,12 @@ def parse_encoding(fp):
             line1 = fp.readline()
             has_bom = line1.startswith(codecs.BOM_UTF8)
             if has_bom:
    -            line1 = line1[len(codecs.BOM_UTF8):]
    +            line1 = line1[len(codecs.BOM_UTF8) :]
     
    -        m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode('ascii', 'ignore'))
    +        m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode("ascii", "ignore"))
             if not m:
                 try:
    -                import parser
    -                parser.suite(line1.decode('ascii', 'ignore'))
    +                parse(line1.decode("ascii", "ignore"))
                 except (ImportError, SyntaxError):
                     # Either it's a real syntax error, in which case the source
                     # is not valid python source, or line2 is a continuation of
    @@ -258,14 +269,16 @@ def parse_encoding(fp):
                 else:
                     line2 = fp.readline()
                     m = _PYTHON_MAGIC_COMMENT_re.match(
    -                    line2.decode('ascii', 'ignore'))
    +                    line2.decode("ascii", "ignore")
    +                )
     
             if has_bom:
                 if m:
                     raise SyntaxError(
                         "python refuses to compile code with both a UTF8"
    -                    " byte-order-mark and a magic encoding comment")
    -            return 'utf_8'
    +                    " byte-order-mark and a magic encoding comment"
    +                )
    +            return "utf_8"
             elif m:
                 return m.group(1)
             else:
    @@ -289,10 +302,11 @@ def restore__ast(_ast):
         """Attempt to restore the required classes to the _ast module if it
         appears to be missing them
         """
    -    if hasattr(_ast, 'AST'):
    +    if hasattr(_ast, "AST"):
             return
         _ast.PyCF_ONLY_AST = 2 << 9
    -    m = compile("""\
    +    m = compile(
    +        """\
     def foo(): pass
     class Bar(object): pass
     if False: pass
    @@ -305,13 +319,17 @@ baz = 'mako'
     baz and 'foo' or 'bar'
     (mako is baz == baz) is not baz != mako
     mako > baz < mako >= baz <= mako
    -mako in baz not in mako""", '', 'exec', _ast.PyCF_ONLY_AST)
    +mako in baz not in mako""",
    +        "",
    +        "exec",
    +        _ast.PyCF_ONLY_AST,
    +    )
         _ast.Module = type(m)
     
         for cls in _ast.Module.__mro__:
    -        if cls.__name__ == 'mod':
    +        if cls.__name__ == "mod":
                 _ast.mod = cls
    -        elif cls.__name__ == 'AST':
    +        elif cls.__name__ == "AST":
                 _ast.AST = cls
     
         _ast.FunctionDef = type(m.body[0])
    @@ -361,7 +379,7 @@ mako in baz not in mako""", '', 'exec', _ast.PyCF_ONLY_AST)
         _ast.NotIn = type(m.body[12].value.ops[1])
     
     
    -def read_file(path, mode='rb'):
    +def read_file(path, mode="rb"):
         fp = open(path, mode)
         try:
             data = fp.read()
    diff --git a/server/www/packages/packages-windows/x86/psutil/__init__.py b/server/www/packages/packages-windows/x86/psutil/__init__.py
    index e129965..22bb46f 100644
    --- a/server/www/packages/packages-windows/x86/psutil/__init__.py
    +++ b/server/www/packages/packages-windows/x86/psutil/__init__.py
    @@ -17,7 +17,7 @@ sensors) in Python. Supported platforms:
      - Sun Solaris
      - AIX
     
    -Works with Python versions from 2.6 to 3.X.
    +Works with Python versions from 2.6 to 3.4+.
     """
     
     from __future__ import division
    @@ -25,25 +25,31 @@ from __future__ import division
     import collections
     import contextlib
     import datetime
    -import errno
     import functools
     import os
     import signal
     import subprocess
     import sys
    +import threading
     import time
    -import traceback
     try:
         import pwd
     except ImportError:
         pwd = None
     
     from . import _common
    +from ._common import AccessDenied
     from ._common import deprecated_method
    +from ._common import Error
     from ._common import memoize
     from ._common import memoize_when_activated
    +from ._common import NoSuchProcess
    +from ._common import TimeoutExpired
     from ._common import wrap_numbers as _wrap_numbers
    +from ._common import ZombieProcess
     from ._compat import long
    +from ._compat import PermissionError
    +from ._compat import ProcessLookupError
     from ._compat import PY3 as _PY3
     
     from ._common import STATUS_DEAD
    @@ -87,12 +93,6 @@ from ._common import POSIX  # NOQA
     from ._common import SUNOS
     from ._common import WINDOWS
     
    -from ._exceptions import AccessDenied
    -from ._exceptions import Error
    -from ._exceptions import NoSuchProcess
    -from ._exceptions import TimeoutExpired
    -from ._exceptions import ZombieProcess
    -
     if LINUX:
         # This is public API and it will be retrieved from _pslinux.py
         # via sys.modules.
    @@ -152,6 +152,10 @@ elif WINDOWS:
         from ._psutil_windows import NORMAL_PRIORITY_CLASS  # NOQA
         from ._psutil_windows import REALTIME_PRIORITY_CLASS  # NOQA
         from ._pswindows import CONN_DELETE_TCB  # NOQA
    +    from ._pswindows import IOPRIO_VERYLOW  # NOQA
    +    from ._pswindows import IOPRIO_LOW  # NOQA
    +    from ._pswindows import IOPRIO_NORMAL  # NOQA
    +    from ._pswindows import IOPRIO_HIGH  # NOQA
     
     elif MACOS:
         from . import _psosx as _psplatform
    @@ -212,23 +216,26 @@ __all__ = [
         "pid_exists", "pids", "process_iter", "wait_procs",             # proc
         "virtual_memory", "swap_memory",                                # memory
         "cpu_times", "cpu_percent", "cpu_times_percent", "cpu_count",   # cpu
    -    "cpu_stats",  # "cpu_freq",
    +    "cpu_stats",  # "cpu_freq", "getloadavg"
         "net_io_counters", "net_connections", "net_if_addrs",           # network
         "net_if_stats",
         "disk_io_counters", "disk_partitions", "disk_usage",            # disk
         # "sensors_temperatures", "sensors_battery", "sensors_fans"     # sensors
         "users", "boot_time",                                           # others
     ]
    +
    +
     __all__.extend(_psplatform.__extra__all__)
     __author__ = "Giampaolo Rodola'"
    -__version__ = "5.4.7"
    +__version__ = "5.7.0"
     version_info = tuple([int(num) for num in __version__.split('.')])
    +
    +_timer = getattr(time, 'monotonic', time.time)
     AF_LINK = _psplatform.AF_LINK
     POWER_TIME_UNLIMITED = _common.POWER_TIME_UNLIMITED
     POWER_TIME_UNKNOWN = _common.POWER_TIME_UNKNOWN
     _TOTAL_PHYMEM = None
    -_timer = getattr(time, 'monotonic', time.time)
    -
    +_LOWEST_PID = None
     
     # Sanity check in case the user messed up with psutil installation
     # or did something weird with sys.path. In this case we might end
    @@ -353,7 +360,7 @@ class Process(object):
             self._create_time = None
             self._gone = False
             self._hash = None
    -        self._oneshot_inctx = False
    +        self._lock = threading.RLock()
             # used for caching on Windows only (on POSIX ppid may change)
             self._ppid = None
             # platform-specific modules define an _psplatform.Process
    @@ -457,40 +464,45 @@ class Process(object):
             ...
             >>>
             """
    -        if self._oneshot_inctx:
    -            # NOOP: this covers the use case where the user enters the
    -            # context twice. Since as_dict() internally uses oneshot()
    -            # I expect that the code below will be a pretty common
    -            # "mistake" that the user will make, so let's guard
    -            # against that:
    -            #
    -            # >>> with p.oneshot():
    -            # ...    p.as_dict()
    -            # ...
    -            yield
    -        else:
    -            self._oneshot_inctx = True
    -            try:
    -                # cached in case cpu_percent() is used
    -                self.cpu_times.cache_activate()
    -                # cached in case memory_percent() is used
    -                self.memory_info.cache_activate()
    -                # cached in case parent() is used
    -                self.ppid.cache_activate()
    -                # cached in case username() is used
    -                if POSIX:
    -                    self.uids.cache_activate()
    -                # specific implementation cache
    -                self._proc.oneshot_enter()
    +        with self._lock:
    +            if hasattr(self, "_cache"):
    +                # NOOP: this covers the use case where the user enters the
    +                # context twice:
    +                #
    +                # >>> with p.oneshot():
    +                # ...    with p.oneshot():
    +                # ...
    +                #
    +                # Also, since as_dict() internally uses oneshot()
    +                # I expect that the code below will be a pretty common
    +                # "mistake" that the user will make, so let's guard
    +                # against that:
    +                #
    +                # >>> with p.oneshot():
    +                # ...    p.as_dict()
    +                # ...
                     yield
    -            finally:
    -                self.cpu_times.cache_deactivate()
    -                self.memory_info.cache_deactivate()
    -                self.ppid.cache_deactivate()
    -                if POSIX:
    -                    self.uids.cache_deactivate()
    -                self._proc.oneshot_exit()
    -                self._oneshot_inctx = False
    +            else:
    +                try:
    +                    # cached in case cpu_percent() is used
    +                    self.cpu_times.cache_activate(self)
    +                    # cached in case memory_percent() is used
    +                    self.memory_info.cache_activate(self)
    +                    # cached in case parent() is used
    +                    self.ppid.cache_activate(self)
    +                    # cached in case username() is used
    +                    if POSIX:
    +                        self.uids.cache_activate(self)
    +                    # specific implementation cache
    +                    self._proc.oneshot_enter()
    +                    yield
    +                finally:
    +                    self.cpu_times.cache_deactivate(self)
    +                    self.memory_info.cache_deactivate(self)
    +                    self.ppid.cache_deactivate(self)
    +                    if POSIX:
    +                        self.uids.cache_deactivate(self)
    +                    self._proc.oneshot_exit()
     
         def as_dict(self, attrs=None, ad_value=None):
             """Utility method returning process information as a
    @@ -541,6 +553,9 @@ class Process(object):
             checking whether PID has been reused.
             If no parent is known return None.
             """
    +        lowest_pid = _LOWEST_PID if _LOWEST_PID is not None else pids()[0]
    +        if self.pid == lowest_pid:
    +            return None
             ppid = self.ppid()
             if ppid is not None:
                 ctime = self.create_time()
    @@ -552,6 +567,17 @@ class Process(object):
                 except NoSuchProcess:
                     pass
     
    +    def parents(self):
    +        """Return the parents of this process as a list of Process
    +        instances. If no parents are known return an empty list.
    +        """
    +        parents = []
    +        proc = self.parent()
    +        while proc is not None:
    +            parents.append(proc)
    +            proc = proc.parent()
    +        return parents
    +
         def is_running(self):
             """Return whether this process is running.
             It also checks if PID has been reused by another process in
    @@ -749,7 +775,7 @@ class Process(object):
                 """
                 return self._proc.io_counters()
     
    -    # Linux and Windows >= Vista only
    +    # Linux and Windows
         if hasattr(_psplatform.Process, "ionice_get"):
     
             def ionice(self, ioclass=None, value=None):
    @@ -800,9 +826,6 @@ class Process(object):
                 (and set).
                 (Windows, Linux and BSD only).
                 """
    -            # Automatically remove duplicates both on get and
    -            # set (for get it's not really necessary, it's
    -            # just for extra safety).
                 if cpus is None:
                     return list(set(self._proc.cpu_affinity_get()))
                 else:
    @@ -826,7 +849,7 @@ class Process(object):
                 """
                 return self._proc.cpu_num()
     
    -    # Linux, macOS and Windows only
    +    # Linux, macOS, Windows, Solaris, AIX
         if hasattr(_psplatform.Process, "environ"):
     
             def environ(self):
    @@ -1096,7 +1119,6 @@ class Process(object):
             return (value / float(total_phymem)) * 100
     
         if hasattr(_psplatform.Process, "memory_maps"):
    -        # Available everywhere except OpenBSD and NetBSD.
             def memory_maps(self, grouped=True):
                 """Return process' mapped memory regions as a list of namedtuples
                 whose fields are variable depending on the platform.
    @@ -1168,18 +1190,16 @@ class Process(object):
                         "calling process (os.getpid()) instead of PID 0")
                 try:
                     os.kill(self.pid, sig)
    -            except OSError as err:
    -                if err.errno == errno.ESRCH:
    -                    if OPENBSD and pid_exists(self.pid):
    -                        # We do this because os.kill() lies in case of
    -                        # zombie processes.
    -                        raise ZombieProcess(self.pid, self._name, self._ppid)
    -                    else:
    -                        self._gone = True
    -                        raise NoSuchProcess(self.pid, self._name)
    -                if err.errno in (errno.EPERM, errno.EACCES):
    -                    raise AccessDenied(self.pid, self._name)
    -                raise
    +            except ProcessLookupError:
    +                if OPENBSD and pid_exists(self.pid):
    +                    # We do this because os.kill() lies in case of
    +                    # zombie processes.
    +                    raise ZombieProcess(self.pid, self._name, self._ppid)
    +                else:
    +                    self._gone = True
    +                    raise NoSuchProcess(self.pid, self._name)
    +            except PermissionError:
    +                raise AccessDenied(self.pid, self._name)
     
         @_assert_pid_not_reused
         def send_signal(self, sig):
    @@ -1191,16 +1211,7 @@ class Process(object):
             if POSIX:
                 self._send_signal(sig)
             else:  # pragma: no cover
    -            if sig == signal.SIGTERM:
    -                self._proc.kill()
    -            # py >= 2.7
    -            elif sig in (getattr(signal, "CTRL_C_EVENT", object()),
    -                         getattr(signal, "CTRL_BREAK_EVENT", object())):
    -                self._proc.send_signal(sig)
    -            else:
    -                raise ValueError(
    -                    "only SIGTERM, CTRL_C_EVENT and CTRL_BREAK_EVENT signals "
    -                    "are supported on Windows")
    +            self._proc.send_signal(sig)
     
         @_assert_pid_not_reused
         def suspend(self):
    @@ -1248,6 +1259,8 @@ class Process(object):
         def wait(self, timeout=None):
             """Wait for process to terminate and, if process is a children
             of os.getpid(), also return its exit code, else None.
    +        On Windows there's no such limitation (exit code is always
    +        returned).
     
             If the process is already terminated immediately return None
             instead of raising NoSuchProcess.
    @@ -1299,7 +1312,7 @@ class Popen(Process):
         http://bugs.python.org/issue6973.
     
         For a complete documentation refer to:
    -    http://docs.python.org/library/subprocess.html
    +    http://docs.python.org/3/library/subprocess.html
         """
     
         def __init__(self, *args, **kwargs):
    @@ -1355,7 +1368,7 @@ class Popen(Process):
     _as_dict_attrnames = set(
         [x for x in dir(Process) if not x.startswith('_') and x not in
          ['send_signal', 'suspend', 'resume', 'terminate', 'kill', 'wait',
    -      'is_running', 'as_dict', 'parent', 'children', 'rlimit',
    +      'is_running', 'as_dict', 'parent', 'parents', 'children', 'rlimit',
           'memory_info_ex', 'oneshot']])
     
     
    @@ -1366,7 +1379,10 @@ _as_dict_attrnames = set(
     
     def pids():
         """Return a list of current running PIDs."""
    -    return _psplatform.pids()
    +    global _LOWEST_PID
    +    ret = sorted(_psplatform.pids())
    +    _LOWEST_PID = ret[0]
    +    return ret
     
     
     def pid_exists(pid):
    @@ -1388,6 +1404,7 @@ def pid_exists(pid):
     
     
     _pmap = {}
    +_lock = threading.Lock()
     
     
     def process_iter(attrs=None, ad_value=None):
    @@ -1415,21 +1432,26 @@ def process_iter(attrs=None, ad_value=None):
             proc = Process(pid)
             if attrs is not None:
                 proc.info = proc.as_dict(attrs=attrs, ad_value=ad_value)
    -        _pmap[proc.pid] = proc
    +        with _lock:
    +            _pmap[proc.pid] = proc
             return proc
     
         def remove(pid):
    -        _pmap.pop(pid, None)
    +        with _lock:
    +            _pmap.pop(pid, None)
     
         a = set(pids())
         b = set(_pmap.keys())
         new_pids = a - b
         gone_pids = b - a
    -
         for pid in gone_pids:
             remove(pid)
    -    for pid, proc in sorted(list(_pmap.items()) +
    -                            list(dict.fromkeys(new_pids).items())):
    +
    +    with _lock:
    +        ls = sorted(list(_pmap.items()) +
    +                    list(dict.fromkeys(new_pids).items()))
    +
    +    for pid, proc in ls:
             try:
                 if proc is None:  # new process
                     yield add(pid)
    @@ -1503,6 +1525,7 @@ def wait_procs(procs, timeout=None, callback=None):
                 pass
             else:
                 if returncode is not None or not proc.is_running():
    +                # Set new Process instance attribute.
                     proc.returncode = returncode
                     gone.add(proc)
                     if callback is not None:
    @@ -1609,14 +1632,12 @@ try:
     except Exception:
         # Don't want to crash at import time.
         _last_cpu_times = None
    -    traceback.print_exc()
     
     try:
         _last_per_cpu_times = cpu_times(percpu=True)
     except Exception:
         # Don't want to crash at import time.
         _last_per_cpu_times = None
    -    traceback.print_exc()
     
     
     def _cpu_tot_time(times):
    @@ -1864,18 +1885,41 @@ if hasattr(_psplatform, "cpu_freq"):
                     return ret[0]
                 else:
                     currs, mins, maxs = 0.0, 0.0, 0.0
    +                set_none = False
                     for cpu in ret:
                         currs += cpu.current
    +                    # On Linux if /proc/cpuinfo is used min/max are set
    +                    # to None.
    +                    if LINUX and cpu.min is None:
    +                        set_none = True
    +                        continue
                         mins += cpu.min
                         maxs += cpu.max
    +
                     current = currs / num_cpus
    -                min_ = mins / num_cpus
    -                max_ = maxs / num_cpus
    +
    +                if set_none:
    +                    min_ = max_ = None
    +                else:
    +                    min_ = mins / num_cpus
    +                    max_ = maxs / num_cpus
    +
                     return _common.scpufreq(current, min_, max_)
     
         __all__.append("cpu_freq")
     
     
    +if hasattr(os, "getloadavg") or hasattr(_psplatform, "getloadavg"):
    +    # Perform this hasattr check once on import time to either use the
    +    # platform based code or proxy straight from the os module.
    +    if hasattr(os, "getloadavg"):
    +        getloadavg = os.getloadavg
    +    else:
    +        getloadavg = _psplatform.getloadavg
    +
    +    __all__.append("getloadavg")
    +
    +
     # =====================================================================
     # --- system memory related functions
     # =====================================================================
    @@ -1901,7 +1945,7 @@ def virtual_memory():
          - used:
             memory used, calculated differently depending on the platform and
             designed for informational purposes only:
    -        macOS: active + inactive + wired
    +        macOS: active + wired
             BSD: active + wired + cached
             Linux: total - free
     
    @@ -2297,19 +2341,16 @@ if WINDOWS:
     
     
     def test():  # pragma: no cover
    -    """List info of all currently running processes emulating ps aux
    -    output.
    -    """
    +    from ._common import bytes2human
    +    from ._compat import get_terminal_size
    +
         today_day = datetime.date.today()
    -    templ = "%-10s %5s %4s %7s %7s %-13s %5s %7s  %s"
    -    attrs = ['pid', 'memory_percent', 'name', 'cpu_times', 'create_time',
    -             'memory_info']
    -    if POSIX:
    -        attrs.append('uids')
    -        attrs.append('terminal')
    -    print(templ % ("USER", "PID", "%MEM", "VSZ", "RSS", "TTY", "START", "TIME",
    -                   "COMMAND"))
    -    for p in process_iter(attrs=attrs, ad_value=''):
    +    templ = "%-10s %5s %5s %7s %7s %5s %6s %6s %6s  %s"
    +    attrs = ['pid', 'memory_percent', 'name', 'cmdline', 'cpu_times',
    +             'create_time', 'memory_info', 'status', 'nice', 'username']
    +    print(templ % ("USER", "PID", "%MEM", "VSZ", "RSS", "NICE",
    +                   "STATUS", "START", "TIME", "CMDLINE"))
    +    for p in process_iter(attrs, ad_value=None):
             if p.info['create_time']:
                 ctime = datetime.datetime.fromtimestamp(p.info['create_time'])
                 if ctime.date() == today_day:
    @@ -2318,30 +2359,46 @@ def test():  # pragma: no cover
                     ctime = ctime.strftime("%b%d")
             else:
                 ctime = ''
    -        cputime = time.strftime("%M:%S",
    -                                time.localtime(sum(p.info['cpu_times'])))
    -        try:
    -            user = p.username()
    -        except Error:
    -            user = ''
    -        if WINDOWS and '\\' in user:
    +        if p.info['cpu_times']:
    +            cputime = time.strftime("%M:%S",
    +                                    time.localtime(sum(p.info['cpu_times'])))
    +        else:
    +            cputime = ''
    +
    +        user = p.info['username'] or ''
    +        if not user and POSIX:
    +            try:
    +                user = p.uids()[0]
    +            except Error:
    +                pass
    +        if user and WINDOWS and '\\' in user:
                 user = user.split('\\')[1]
    -        vms = p.info['memory_info'] and \
    -            int(p.info['memory_info'].vms / 1024) or '?'
    -        rss = p.info['memory_info'] and \
    -            int(p.info['memory_info'].rss / 1024) or '?'
    -        memp = p.info['memory_percent'] and \
    -            round(p.info['memory_percent'], 1) or '?'
    -        print(templ % (
    +        user = user[:9]
    +        vms = bytes2human(p.info['memory_info'].vms) if \
    +            p.info['memory_info'] is not None else ''
    +        rss = bytes2human(p.info['memory_info'].rss) if \
    +            p.info['memory_info'] is not None else ''
    +        memp = round(p.info['memory_percent'], 1) if \
    +            p.info['memory_percent'] is not None else ''
    +        nice = int(p.info['nice']) if p.info['nice'] else ''
    +        if p.info['cmdline']:
    +            cmdline = ' '.join(p.info['cmdline'])
    +        else:
    +            cmdline = p.info['name']
    +        status = p.info['status'][:5] if p.info['status'] else ''
    +
    +        line = templ % (
                 user[:10],
                 p.info['pid'],
                 memp,
                 vms,
                 rss,
    -            p.info.get('terminal', '') or '?',
    +            nice,
    +            status,
                 ctime,
                 cputime,
    -            p.info['name'].strip() or '?'))
    +            cmdline)
    +        print(line[:get_terminal_size()[0]])
     
     
     del memoize, memoize_when_activated, division, deprecated_method
    diff --git a/server/www/packages/packages-windows/x86/psutil/_common.py b/server/www/packages/packages-windows/x86/psutil/_common.py
    index 2cc3939..728d9c6 100644
    --- a/server/www/packages/packages-windows/x86/psutil/_common.py
    +++ b/server/www/packages/packages-windows/x86/psutil/_common.py
    @@ -7,7 +7,7 @@
     # Note: this module is imported by setup.py so it should not import
     # psutil or third-party modules.
     
    -from __future__ import division
    +from __future__ import division, print_function
     
     import contextlib
     import errno
    @@ -23,6 +23,7 @@ from collections import namedtuple
     from socket import AF_INET
     from socket import SOCK_DGRAM
     from socket import SOCK_STREAM
    +
     try:
         from socket import AF_INET6
     except ImportError:
    @@ -37,14 +38,14 @@ if sys.version_info >= (3, 4):
     else:
         enum = None
     
    +
     # can't take it from _common.py as this script is imported by setup.py
     PY3 = sys.version_info[0] == 3
     
     __all__ = [
    -    # constants
    +    # OS constants
         'FREEBSD', 'BSD', 'LINUX', 'NETBSD', 'OPENBSD', 'MACOS', 'OSX', 'POSIX',
         'SUNOS', 'WINDOWS',
    -    'ENCODING', 'ENCODING_ERRS', 'AF_INET6',
         # connection constants
         'CONN_CLOSE', 'CONN_CLOSE_WAIT', 'CONN_CLOSING', 'CONN_ESTABLISHED',
         'CONN_FIN_WAIT1', 'CONN_FIN_WAIT2', 'CONN_LAST_ACK', 'CONN_LISTEN',
    @@ -56,6 +57,8 @@ __all__ = [
         'STATUS_RUNNING', 'STATUS_SLEEPING', 'STATUS_STOPPED', 'STATUS_SUSPENDED',
         'STATUS_TRACING_STOP', 'STATUS_WAITING', 'STATUS_WAKE_KILL',
         'STATUS_WAKING', 'STATUS_ZOMBIE', 'STATUS_PARKED',
    +    # other constants
    +    'ENCODING', 'ENCODING_ERRS', 'AF_INET6',
         # named tuples
         'pconn', 'pcputimes', 'pctxsw', 'pgids', 'pio', 'pionice', 'popenfile',
         'pthread', 'puids', 'sconn', 'scpustats', 'sdiskio', 'sdiskpart',
    @@ -64,6 +67,9 @@ __all__ = [
         'conn_tmap', 'deprecated_method', 'isfile_strict', 'memoize',
         'parse_environ_block', 'path_exists_strict', 'usage_percent',
         'supports_ipv6', 'sockfam_to_enum', 'socktype_to_enum', "wrap_numbers",
    +    'bytes2human', 'conn_to_ntuple', 'debug',
    +    # shell utils
    +    'hilite', 'term_supports_colors', 'print_color',
     ]
     
     
    @@ -256,7 +262,109 @@ if AF_UNIX is not None:
             "unix": ([AF_UNIX], [SOCK_STREAM, SOCK_DGRAM]),
         })
     
    -del AF_INET, AF_UNIX, SOCK_STREAM, SOCK_DGRAM
    +
    +# =====================================================================
    +# --- Exceptions
    +# =====================================================================
    +
    +
    +class Error(Exception):
    +    """Base exception class. All other psutil exceptions inherit
    +    from this one.
    +    """
    +    __module__ = 'psutil'
    +
    +    def __init__(self, msg=""):
    +        Exception.__init__(self, msg)
    +        self.msg = msg
    +
    +    def __repr__(self):
    +        ret = "psutil.%s %s" % (self.__class__.__name__, self.msg)
    +        return ret.strip()
    +
    +    __str__ = __repr__
    +
    +
    +class NoSuchProcess(Error):
    +    """Exception raised when a process with a certain PID doesn't
    +    or no longer exists.
    +    """
    +    __module__ = 'psutil'
    +
    +    def __init__(self, pid, name=None, msg=None):
    +        Error.__init__(self, msg)
    +        self.pid = pid
    +        self.name = name
    +        self.msg = msg
    +        if msg is None:
    +            if name:
    +                details = "(pid=%s, name=%s)" % (self.pid, repr(self.name))
    +            else:
    +                details = "(pid=%s)" % self.pid
    +            self.msg = "process no longer exists " + details
    +
    +    def __path__(self):
    +        return 'xxx'
    +
    +
    +class ZombieProcess(NoSuchProcess):
    +    """Exception raised when querying a zombie process. This is
    +    raised on macOS, BSD and Solaris only, and not always: depending
    +    on the query the OS may be able to succeed anyway.
    +    On Linux all zombie processes are querable (hence this is never
    +    raised). Windows doesn't have zombie processes.
    +    """
    +    __module__ = 'psutil'
    +
    +    def __init__(self, pid, name=None, ppid=None, msg=None):
    +        NoSuchProcess.__init__(self, msg)
    +        self.pid = pid
    +        self.ppid = ppid
    +        self.name = name
    +        self.msg = msg
    +        if msg is None:
    +            args = ["pid=%s" % pid]
    +            if name:
    +                args.append("name=%s" % repr(self.name))
    +            if ppid:
    +                args.append("ppid=%s" % self.ppid)
    +            details = "(%s)" % ", ".join(args)
    +            self.msg = "process still exists but it's a zombie " + details
    +
    +
    +class AccessDenied(Error):
    +    """Exception raised when permission to perform an action is denied."""
    +    __module__ = 'psutil'
    +
    +    def __init__(self, pid=None, name=None, msg=None):
    +        Error.__init__(self, msg)
    +        self.pid = pid
    +        self.name = name
    +        self.msg = msg
    +        if msg is None:
    +            if (pid is not None) and (name is not None):
    +                self.msg = "(pid=%s, name=%s)" % (pid, repr(name))
    +            elif (pid is not None):
    +                self.msg = "(pid=%s)" % self.pid
    +            else:
    +                self.msg = ""
    +
    +
    +class TimeoutExpired(Error):
    +    """Raised on Process.wait(timeout) if timeout expires and process
    +    is still alive.
    +    """
    +    __module__ = 'psutil'
    +
    +    def __init__(self, seconds, pid=None, name=None):
    +        Error.__init__(self, "timeout after %s seconds" % seconds)
    +        self.seconds = seconds
    +        self.pid = pid
    +        self.name = name
    +        if (pid is not None) and (name is not None):
    +            self.msg += " (pid=%s, name=%s)" % (pid, repr(name))
    +        elif (pid is not None):
    +            self.msg += " (pid=%s)" % self.pid
     
     
     # ===================================================================
    @@ -267,12 +375,12 @@ del AF_INET, AF_UNIX, SOCK_STREAM, SOCK_DGRAM
     def usage_percent(used, total, round_=None):
         """Calculate percentage usage of 'used' against 'total'."""
         try:
    -        ret = (used / total) * 100
    +        ret = (float(used) / total) * 100
         except ZeroDivisionError:
    -        ret = 0.0 if isinstance(used, float) or isinstance(total, float) else 0
    -    if round_ is not None:
    -        return round(ret, round_)
    +        return 0.0
         else:
    +        if round_ is not None:
    +            ret = round(ret, round_)
             return ret
     
     
    @@ -327,7 +435,7 @@ def memoize_when_activated(fun):
         1
         >>>
         >>> # activated
    -    >>> foo.cache_activate()
    +    >>> foo.cache_activate(self)
         >>> foo()
         1
         >>> foo()
    @@ -336,26 +444,30 @@ def memoize_when_activated(fun):
         """
         @functools.wraps(fun)
         def wrapper(self):
    -        if not wrapper.cache_activated:
    +        try:
    +            # case 1: we previously entered oneshot() ctx
    +            ret = self._cache[fun]
    +        except AttributeError:
    +            # case 2: we never entered oneshot() ctx
                 return fun(self)
    -        else:
    -            try:
    -                ret = cache[fun]
    -            except KeyError:
    -                ret = cache[fun] = fun(self)
    -            return ret
    +        except KeyError:
    +            # case 3: we entered oneshot() ctx but there's no cache
    +            # for this entry yet
    +            ret = self._cache[fun] = fun(self)
    +        return ret
     
    -    def cache_activate():
    -        """Activate cache."""
    -        wrapper.cache_activated = True
    +    def cache_activate(proc):
    +        """Activate cache. Expects a Process instance. Cache will be
    +        stored as a "_cache" instance attribute."""
    +        proc._cache = {}
     
    -    def cache_deactivate():
    +    def cache_deactivate(proc):
             """Deactivate and clear cache."""
    -        wrapper.cache_activated = False
    -        cache.clear()
    +        try:
    +            del proc._cache
    +        except AttributeError:
    +            pass
     
    -    cache = {}
    -    wrapper.cache_activated = False
         wrapper.cache_activate = cache_activate
         wrapper.cache_deactivate = cache_deactivate
         return wrapper
    @@ -442,7 +554,7 @@ def sockfam_to_enum(num):
         else:  # pragma: no cover
             try:
                 return socket.AddressFamily(num)
    -        except (ValueError, AttributeError):
    +        except ValueError:
                 return num
     
     
    @@ -454,11 +566,30 @@ def socktype_to_enum(num):
             return num
         else:  # pragma: no cover
             try:
    -            return socket.AddressType(num)
    -        except (ValueError, AttributeError):
    +            return socket.SocketKind(num)
    +        except ValueError:
                 return num
     
     
    +def conn_to_ntuple(fd, fam, type_, laddr, raddr, status, status_map, pid=None):
    +    """Convert a raw connection tuple to a proper ntuple."""
    +    if fam in (socket.AF_INET, AF_INET6):
    +        if laddr:
    +            laddr = addr(*laddr)
    +        if raddr:
    +            raddr = addr(*raddr)
    +    if type_ == socket.SOCK_STREAM and fam in (AF_INET, AF_INET6):
    +        status = status_map.get(status, CONN_NONE)
    +    else:
    +        status = CONN_NONE  # ignore whatever C returned to us
    +    fam = sockfam_to_enum(fam)
    +    type_ = socktype_to_enum(type_)
    +    if pid is None:
    +        return pconn(fd, fam, type_, laddr, raddr, status)
    +    else:
    +        return sconn(fd, fam, type_, laddr, raddr, status, pid)
    +
    +
     def deprecated_method(replacement):
         """A decorator which can be used to mark a method as deprecated
         'replcement' is the method name which will be called instead.
    @@ -471,7 +602,7 @@ def deprecated_method(replacement):
     
             @functools.wraps(fun)
             def inner(self, *args, **kwargs):
    -            warnings.warn(msg, category=FutureWarning, stacklevel=2)
    +            warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
                 return getattr(self, replacement)(*args, **kwargs)
             return inner
         return outer
    @@ -576,3 +707,140 @@ def wrap_numbers(input_dict, name):
     _wn = _WrapNumbers()
     wrap_numbers.cache_clear = _wn.cache_clear
     wrap_numbers.cache_info = _wn.cache_info
    +
    +
    +def open_binary(fname, **kwargs):
    +    return open(fname, "rb", **kwargs)
    +
    +
    +def open_text(fname, **kwargs):
    +    """On Python 3 opens a file in text mode by using fs encoding and
    +    a proper en/decoding errors handler.
    +    On Python 2 this is just an alias for open(name, 'rt').
    +    """
    +    if PY3:
    +        # See:
    +        # https://github.com/giampaolo/psutil/issues/675
    +        # https://github.com/giampaolo/psutil/pull/733
    +        kwargs.setdefault('encoding', ENCODING)
    +        kwargs.setdefault('errors', ENCODING_ERRS)
    +    return open(fname, "rt", **kwargs)
    +
    +
    +def bytes2human(n, format="%(value).1f%(symbol)s"):
    +    """Used by various scripts. See:
    +    http://goo.gl/zeJZl
    +
    +    >>> bytes2human(10000)
    +    '9.8K'
    +    >>> bytes2human(100001221)
    +    '95.4M'
    +    """
    +    symbols = ('B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
    +    prefix = {}
    +    for i, s in enumerate(symbols[1:]):
    +        prefix[s] = 1 << (i + 1) * 10
    +    for symbol in reversed(symbols[1:]):
    +        if n >= prefix[symbol]:
    +            value = float(n) / prefix[symbol]
    +            return format % locals()
    +    return format % dict(symbol=symbols[0], value=n)
    +
    +
    +def get_procfs_path():
    +    """Return updated psutil.PROCFS_PATH constant."""
    +    return sys.modules['psutil'].PROCFS_PATH
    +
    +
    +if PY3:
    +    def decode(s):
    +        return s.decode(encoding=ENCODING, errors=ENCODING_ERRS)
    +else:
    +    def decode(s):
    +        return s
    +
    +
    +# =====================================================================
    +# --- shell utils
    +# =====================================================================
    +
    +
    +@memoize
    +def term_supports_colors(file=sys.stdout):
    +    if os.name == 'nt':
    +        return True
    +    try:
    +        import curses
    +        assert file.isatty()
    +        curses.setupterm()
    +        assert curses.tigetnum("colors") > 0
    +    except Exception:
    +        return False
    +    else:
    +        return True
    +
    +
    +def hilite(s, color="green", bold=False):
    +    """Return an highlighted version of 'string'."""
    +    if not term_supports_colors():
    +        return s
    +    attr = []
    +    colors = dict(green='32', red='91', brown='33')
    +    colors[None] = '29'
    +    try:
    +        color = colors[color]
    +    except KeyError:
    +        raise ValueError("invalid color %r; choose between %r" % (
    +            list(colors.keys())))
    +    attr.append(color)
    +    if bold:
    +        attr.append('1')
    +    return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), s)
    +
    +
    +def print_color(s, color="green", bold=False, file=sys.stdout):
    +    """Print a colorized version of string."""
    +    if not term_supports_colors():
    +        print(s, file=file)
    +    elif POSIX:
    +        print(hilite(s, color, bold), file=file)
    +    else:
    +        import ctypes
    +
    +        DEFAULT_COLOR = 7
    +        GetStdHandle = ctypes.windll.Kernel32.GetStdHandle
    +        SetConsoleTextAttribute = \
    +            ctypes.windll.Kernel32.SetConsoleTextAttribute
    +
    +        colors = dict(green=2, red=4, brown=6)
    +        colors[None] = DEFAULT_COLOR
    +        try:
    +            color = colors[color]
    +        except KeyError:
    +            raise ValueError("invalid color %r; choose between %r" % (
    +                color, list(colors.keys())))
    +        if bold and color <= 7:
    +            color += 8
    +
    +        handle_id = -12 if file is sys.stderr else -11
    +        GetStdHandle.restype = ctypes.c_ulong
    +        handle = GetStdHandle(handle_id)
    +        SetConsoleTextAttribute(handle, color)
    +        try:
    +            print(s, file=file)
    +        finally:
    +            SetConsoleTextAttribute(handle, DEFAULT_COLOR)
    +
    +
    +if bool(os.getenv('PSUTIL_DEBUG', 0)):
    +    import inspect
    +
    +    def debug(msg):
    +        """If PSUTIL_DEBUG env var is set, print a debug message to stderr."""
    +        fname, lineno, func_name, lines, index = inspect.getframeinfo(
    +            inspect.currentframe().f_back)
    +        print("psutil-debug [%s:%s]> %s" % (fname, lineno, msg),
    +              file=sys.stderr)
    +else:
    +    def debug(msg):
    +        pass
    diff --git a/server/www/packages/packages-windows/x86/psutil/_compat.py b/server/www/packages/packages-windows/x86/psutil/_compat.py
    index 08aefe4..a937138 100644
    --- a/server/www/packages/packages-windows/x86/psutil/_compat.py
    +++ b/server/www/packages/packages-windows/x86/psutil/_compat.py
    @@ -5,12 +5,15 @@
     """Module which provides compatibility with older Python versions."""
     
     import collections
    +import errno
     import functools
     import os
     import sys
     
     __all__ = ["PY3", "long", "xrange", "unicode", "basestring", "u", "b",
    -           "lru_cache", "which"]
    +           "lru_cache", "which", "get_terminal_size",
    +           "FileNotFoundError", "PermissionError", "ProcessLookupError",
    +           "InterruptedError", "ChildProcessError", "FileExistsError"]
     
     PY3 = sys.version_info[0] == 3
     
    @@ -38,6 +41,86 @@ else:
             return s
     
     
    +# --- exceptions
    +
    +
    +if PY3:
    +    FileNotFoundError = FileNotFoundError  # NOQA
    +    PermissionError = PermissionError  # NOQA
    +    ProcessLookupError = ProcessLookupError  # NOQA
    +    InterruptedError = InterruptedError  # NOQA
    +    ChildProcessError = ChildProcessError  # NOQA
    +    FileExistsError = FileExistsError  # NOQA
    +else:
    +    # https://github.com/PythonCharmers/python-future/blob/exceptions/
    +    #     src/future/types/exceptions/pep3151.py
    +    import platform
    +
    +    _singleton = object()
    +
    +    def instance_checking_exception(base_exception=Exception):
    +        def wrapped(instance_checker):
    +            class TemporaryClass(base_exception):
    +
    +                def __init__(self, *args, **kwargs):
    +                    if len(args) == 1 and isinstance(args[0], TemporaryClass):
    +                        unwrap_me = args[0]
    +                        for attr in dir(unwrap_me):
    +                            if not attr.startswith('__'):
    +                                setattr(self, attr, getattr(unwrap_me, attr))
    +                    else:
    +                        super(TemporaryClass, self).__init__(*args, **kwargs)
    +
    +                class __metaclass__(type):
    +                    def __instancecheck__(cls, inst):
    +                        return instance_checker(inst)
    +
    +                    def __subclasscheck__(cls, classinfo):
    +                        value = sys.exc_info()[1]
    +                        return isinstance(value, cls)
    +
    +            TemporaryClass.__name__ = instance_checker.__name__
    +            TemporaryClass.__doc__ = instance_checker.__doc__
    +            return TemporaryClass
    +
    +        return wrapped
    +
    +    @instance_checking_exception(EnvironmentError)
    +    def FileNotFoundError(inst):
    +        return getattr(inst, 'errno', _singleton) == errno.ENOENT
    +
    +    @instance_checking_exception(EnvironmentError)
    +    def ProcessLookupError(inst):
    +        return getattr(inst, 'errno', _singleton) == errno.ESRCH
    +
    +    @instance_checking_exception(EnvironmentError)
    +    def PermissionError(inst):
    +        return getattr(inst, 'errno', _singleton) in (
    +            errno.EACCES, errno.EPERM)
    +
    +    @instance_checking_exception(EnvironmentError)
    +    def InterruptedError(inst):
    +        return getattr(inst, 'errno', _singleton) == errno.EINTR
    +
    +    @instance_checking_exception(EnvironmentError)
    +    def ChildProcessError(inst):
    +        return getattr(inst, 'errno', _singleton) == errno.ECHILD
    +
    +    @instance_checking_exception(EnvironmentError)
    +    def FileExistsError(inst):
    +        return getattr(inst, 'errno', _singleton) == errno.EEXIST
    +
    +    if platform.python_implementation() != "CPython":
    +        try:
    +            raise OSError(errno.EEXIST, "perm")
    +        except FileExistsError:
    +            pass
    +        except OSError:
    +            raise RuntimeError(
    +                "broken / incompatible Python implementation, see: "
    +                "https://github.com/giampaolo/psutil/issues/1659")
    +
    +
     # --- stdlib additions
     
     
    @@ -239,3 +322,24 @@ except ImportError:
                         if _access_check(name, mode):
                             return name
             return None
    +
    +
    +# python 3.3
    +try:
    +    from shutil import get_terminal_size
    +except ImportError:
    +    def get_terminal_size(fallback=(80, 24)):
    +        try:
    +            import fcntl
    +            import termios
    +            import struct
    +        except ImportError:
    +            return fallback
    +        else:
    +            try:
    +                # This should work on Linux.
    +                res = struct.unpack(
    +                    'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, '1234'))
    +                return (res[1], res[0])
    +            except Exception:
    +                return fallback
    diff --git a/server/www/packages/packages-windows/x86/psutil/_exceptions.py b/server/www/packages/packages-windows/x86/psutil/_exceptions.py
    deleted file mode 100644
    index 6dbbd28..0000000
    --- a/server/www/packages/packages-windows/x86/psutil/_exceptions.py
    +++ /dev/null
    @@ -1,94 +0,0 @@
    -# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
    -# Use of this source code is governed by a BSD-style license that can be
    -# found in the LICENSE file.
    -
    -
    -class Error(Exception):
    -    """Base exception class. All other psutil exceptions inherit
    -    from this one.
    -    """
    -
    -    def __init__(self, msg=""):
    -        Exception.__init__(self, msg)
    -        self.msg = msg
    -
    -    def __repr__(self):
    -        ret = "psutil.%s %s" % (self.__class__.__name__, self.msg)
    -        return ret.strip()
    -
    -    __str__ = __repr__
    -
    -
    -class NoSuchProcess(Error):
    -    """Exception raised when a process with a certain PID doesn't
    -    or no longer exists.
    -    """
    -
    -    def __init__(self, pid, name=None, msg=None):
    -        Error.__init__(self, msg)
    -        self.pid = pid
    -        self.name = name
    -        self.msg = msg
    -        if msg is None:
    -            if name:
    -                details = "(pid=%s, name=%s)" % (self.pid, repr(self.name))
    -            else:
    -                details = "(pid=%s)" % self.pid
    -            self.msg = "process no longer exists " + details
    -
    -
    -class ZombieProcess(NoSuchProcess):
    -    """Exception raised when querying a zombie process. This is
    -    raised on macOS, BSD and Solaris only, and not always: depending
    -    on the query the OS may be able to succeed anyway.
    -    On Linux all zombie processes are querable (hence this is never
    -    raised). Windows doesn't have zombie processes.
    -    """
    -
    -    def __init__(self, pid, name=None, ppid=None, msg=None):
    -        NoSuchProcess.__init__(self, msg)
    -        self.pid = pid
    -        self.ppid = ppid
    -        self.name = name
    -        self.msg = msg
    -        if msg is None:
    -            args = ["pid=%s" % pid]
    -            if name:
    -                args.append("name=%s" % repr(self.name))
    -            if ppid:
    -                args.append("ppid=%s" % self.ppid)
    -            details = "(%s)" % ", ".join(args)
    -            self.msg = "process still exists but it's a zombie " + details
    -
    -
    -class AccessDenied(Error):
    -    """Exception raised when permission to perform an action is denied."""
    -
    -    def __init__(self, pid=None, name=None, msg=None):
    -        Error.__init__(self, msg)
    -        self.pid = pid
    -        self.name = name
    -        self.msg = msg
    -        if msg is None:
    -            if (pid is not None) and (name is not None):
    -                self.msg = "(pid=%s, name=%s)" % (pid, repr(name))
    -            elif (pid is not None):
    -                self.msg = "(pid=%s)" % self.pid
    -            else:
    -                self.msg = ""
    -
    -
    -class TimeoutExpired(Error):
    -    """Raised on Process.wait(timeout) if timeout expires and process
    -    is still alive.
    -    """
    -
    -    def __init__(self, seconds, pid=None, name=None):
    -        Error.__init__(self, "timeout after %s seconds" % seconds)
    -        self.seconds = seconds
    -        self.pid = pid
    -        self.name = name
    -        if (pid is not None) and (name is not None):
    -            self.msg += " (pid=%s, name=%s)" % (pid, repr(name))
    -        elif (pid is not None):
    -            self.msg += " (pid=%s)" % self.pid
    diff --git a/server/www/packages/packages-windows/x86/psutil/_psaix.py b/server/www/packages/packages-windows/x86/psutil/_psaix.py
    index 7ba212d..994366a 100644
    --- a/server/www/packages/packages-windows/x86/psutil/_psaix.py
    +++ b/server/www/packages/packages-windows/x86/psutil/_psaix.py
    @@ -6,31 +6,32 @@
     
     """AIX platform implementation."""
     
    -import errno
    +import functools
     import glob
     import os
     import re
     import subprocess
     import sys
     from collections import namedtuple
    -from socket import AF_INET
     
     from . import _common
     from . import _psposix
     from . import _psutil_aix as cext
     from . import _psutil_posix as cext_posix
    -from ._common import AF_INET6
    +from ._common import AccessDenied
    +from ._common import conn_to_ntuple
    +from ._common import get_procfs_path
     from ._common import memoize_when_activated
     from ._common import NIC_DUPLEX_FULL
     from ._common import NIC_DUPLEX_HALF
     from ._common import NIC_DUPLEX_UNKNOWN
    -from ._common import sockfam_to_enum
    -from ._common import socktype_to_enum
    +from ._common import NoSuchProcess
     from ._common import usage_percent
    +from ._common import ZombieProcess
    +from ._compat import FileNotFoundError
    +from ._compat import PermissionError
    +from ._compat import ProcessLookupError
     from ._compat import PY3
    -from ._exceptions import AccessDenied
    -from ._exceptions import NoSuchProcess
    -from ._exceptions import ZombieProcess
     
     
     __extra__all__ = ["PROCFS_PATH"]
    @@ -42,6 +43,8 @@ __extra__all__ = ["PROCFS_PATH"]
     
     
     HAS_THREADS = hasattr(cext, "proc_threads")
    +HAS_NET_IO_COUNTERS = hasattr(cext, "net_io_counters")
    +HAS_PROC_IO_COUNTERS = hasattr(cext, "proc_io_counters")
     
     PAGE_SIZE = os.sysconf('SC_PAGE_SIZE')
     AF_LINK = cext_posix.AF_LINK
    @@ -93,21 +96,6 @@ pfullmem = pmem
     scputimes = namedtuple('scputimes', ['user', 'system', 'idle', 'iowait'])
     # psutil.virtual_memory()
     svmem = namedtuple('svmem', ['total', 'available', 'percent', 'used', 'free'])
    -# psutil.Process.memory_maps(grouped=True)
    -pmmap_grouped = namedtuple('pmmap_grouped', ['path', 'rss', 'anon', 'locked'])
    -# psutil.Process.memory_maps(grouped=False)
    -pmmap_ext = namedtuple(
    -    'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields))
    -
    -
    -# =====================================================================
    -# --- utils
    -# =====================================================================
    -
    -
    -def get_procfs_path():
    -    """Return updated psutil.PROCFS_PATH constant."""
    -    return sys.modules['psutil'].PROCFS_PATH
     
     
     # =====================================================================
    @@ -212,7 +200,9 @@ def disk_partitions(all=False):
     
     
     net_if_addrs = cext_posix.net_if_addrs
    -net_io_counters = cext.net_io_counters
    +
    +if HAS_NET_IO_COUNTERS:
    +    net_io_counters = cext.net_io_counters
     
     
     def net_connections(kind, _pid=-1):
    @@ -225,27 +215,17 @@ def net_connections(kind, _pid=-1):
                              % (kind, ', '.join([repr(x) for x in cmap])))
         families, types = _common.conn_tmap[kind]
         rawlist = cext.net_connections(_pid)
    -    ret = set()
    +    ret = []
         for item in rawlist:
             fd, fam, type_, laddr, raddr, status, pid = item
             if fam not in families:
                 continue
             if type_ not in types:
                 continue
    -        status = TCP_STATUSES[status]
    -        if fam in (AF_INET, AF_INET6):
    -            if laddr:
    -                laddr = _common.addr(*laddr)
    -            if raddr:
    -                raddr = _common.addr(*raddr)
    -        fam = sockfam_to_enum(fam)
    -        type_ = socktype_to_enum(type_)
    -        if _pid == -1:
    -            nt = _common.sconn(fd, fam, type_, laddr, raddr, status, pid)
    -        else:
    -            nt = _common.pconn(fd, fam, type_, laddr, raddr, status)
    -        ret.add(nt)
    -    return list(ret)
    +        nt = conn_to_ntuple(fd, fam, type_, laddr, raddr, status,
    +                            TCP_STATUSES, pid=pid if _pid == -1 else None)
    +        ret.append(nt)
    +    return ret
     
     
     def net_if_stats():
    @@ -328,33 +308,27 @@ def wrap_exceptions(fun):
         """Call callable into a try/except clause and translate ENOENT,
         EACCES and EPERM in NoSuchProcess or AccessDenied exceptions.
         """
    -
    +    @functools.wraps(fun)
         def wrapper(self, *args, **kwargs):
             try:
                 return fun(self, *args, **kwargs)
    -        except EnvironmentError as err:
    -            # support for private module import
    -            if (NoSuchProcess is None or AccessDenied is None or
    -                    ZombieProcess is None):
    -                raise
    +        except (FileNotFoundError, ProcessLookupError):
                 # ENOENT (no such file or directory) gets raised on open().
                 # ESRCH (no such process) can get raised on read() if
                 # process is gone in meantime.
    -            if err.errno in (errno.ENOENT, errno.ESRCH):
    -                if not pid_exists(self.pid):
    -                    raise NoSuchProcess(self.pid, self._name)
    -                else:
    -                    raise ZombieProcess(self.pid, self._name, self._ppid)
    -            if err.errno in (errno.EPERM, errno.EACCES):
    -                raise AccessDenied(self.pid, self._name)
    -            raise
    +            if not pid_exists(self.pid):
    +                raise NoSuchProcess(self.pid, self._name)
    +            else:
    +                raise ZombieProcess(self.pid, self._name, self._ppid)
    +        except PermissionError:
    +            raise AccessDenied(self.pid, self._name)
         return wrapper
     
     
     class Process(object):
         """Wrapper class around underlying C implementation."""
     
    -    __slots__ = ["pid", "_name", "_ppid", "_procfs_path"]
    +    __slots__ = ["pid", "_name", "_ppid", "_procfs_path", "_cache"]
     
         def __init__(self, pid):
             self.pid = pid
    @@ -363,23 +337,19 @@ class Process(object):
             self._procfs_path = get_procfs_path()
     
         def oneshot_enter(self):
    -        self._proc_name_and_args.cache_activate()
    -        self._proc_basic_info.cache_activate()
    -        self._proc_cred.cache_activate()
    +        self._proc_basic_info.cache_activate(self)
    +        self._proc_cred.cache_activate(self)
     
         def oneshot_exit(self):
    -        self._proc_name_and_args.cache_deactivate()
    -        self._proc_basic_info.cache_deactivate()
    -        self._proc_cred.cache_deactivate()
    -
    -    @memoize_when_activated
    -    def _proc_name_and_args(self):
    -        return cext.proc_name_and_args(self.pid, self._procfs_path)
    +        self._proc_basic_info.cache_deactivate(self)
    +        self._proc_cred.cache_deactivate(self)
     
    +    @wrap_exceptions
         @memoize_when_activated
         def _proc_basic_info(self):
             return cext.proc_basic_info(self.pid, self._procfs_path)
     
    +    @wrap_exceptions
         @memoize_when_activated
         def _proc_cred(self):
             return cext.proc_cred(self.pid, self._procfs_path)
    @@ -388,22 +358,25 @@ class Process(object):
         def name(self):
             if self.pid == 0:
                 return "swapper"
    -        # note: this is limited to 15 characters
    -        return self._proc_name_and_args()[0].rstrip("\x00")
    +        # note: max 16 characters
    +        return cext.proc_name(self.pid, self._procfs_path).rstrip("\x00")
     
         @wrap_exceptions
         def exe(self):
             # there is no way to get executable path in AIX other than to guess,
             # and guessing is more complex than what's in the wrapping class
    -        exe = self.cmdline()[0]
    +        cmdline = self.cmdline()
    +        if not cmdline:
    +            return ''
    +        exe = cmdline[0]
             if os.path.sep in exe:
                 # relative or absolute path
                 if not os.path.isabs(exe):
                     # if cwd has changed, we're out of luck - this may be wrong!
                     exe = os.path.abspath(os.path.join(self.cwd(), exe))
                 if (os.path.isabs(exe) and
    -               os.path.isfile(exe) and
    -               os.access(exe, os.X_OK)):
    +                    os.path.isfile(exe) and
    +                    os.access(exe, os.X_OK)):
                     return exe
                 # not found, move to search in PATH using basename only
                 exe = os.path.basename(exe)
    @@ -411,13 +384,17 @@ class Process(object):
             for path in os.environ["PATH"].split(":"):
                 possible_exe = os.path.abspath(os.path.join(path, exe))
                 if (os.path.isfile(possible_exe) and
    -               os.access(possible_exe, os.X_OK)):
    +                    os.access(possible_exe, os.X_OK)):
                     return possible_exe
             return ''
     
         @wrap_exceptions
         def cmdline(self):
    -        return self._proc_name_and_args()[1].split(' ')
    +        return cext.proc_args(self.pid)
    +
    +    @wrap_exceptions
    +    def environ(self):
    +        return cext.proc_environ(self.pid)
     
         @wrap_exceptions
         def create_time(self):
    @@ -503,11 +480,9 @@ class Process(object):
             try:
                 result = os.readlink("%s/%s/cwd" % (procfs_path, self.pid))
                 return result.rstrip('/')
    -        except OSError as err:
    -            if err.errno == errno.ENOENT:
    -                os.stat("%s/%s" % (procfs_path, self.pid))  # raise NSP or AD
    -                return None
    -            raise
    +        except FileNotFoundError:
    +            os.stat("%s/%s" % (procfs_path, self.pid))  # raise NSP or AD
    +            return None
     
         @wrap_exceptions
         def memory_info(self):
    @@ -561,14 +536,15 @@ class Process(object):
         def wait(self, timeout=None):
             return _psposix.wait_pid(self.pid, timeout, self._name)
     
    -    @wrap_exceptions
    -    def io_counters(self):
    -        try:
    -            rc, wc, rb, wb = cext.proc_io_counters(self.pid)
    -        except OSError:
    -            # if process is terminated, proc_io_counters returns OSError
    -            # instead of NSP
    -            if not pid_exists(self.pid):
    -                raise NoSuchProcess(self.pid, self._name)
    -            raise
    -        return _common.pio(rc, wc, rb, wb)
    +    if HAS_PROC_IO_COUNTERS:
    +        @wrap_exceptions
    +        def io_counters(self):
    +            try:
    +                rc, wc, rb, wb = cext.proc_io_counters(self.pid)
    +            except OSError:
    +                # if process is terminated, proc_io_counters returns OSError
    +                # instead of NSP
    +                if not pid_exists(self.pid):
    +                    raise NoSuchProcess(self.pid, self._name)
    +                raise
    +            return _common.pio(rc, wc, rb, wb)
    diff --git a/server/www/packages/packages-windows/x86/psutil/_psbsd.py b/server/www/packages/packages-windows/x86/psutil/_psbsd.py
    index 7f4bcb6..49ad1e9 100644
    --- a/server/www/packages/packages-windows/x86/psutil/_psbsd.py
    +++ b/server/www/packages/packages-windows/x86/psutil/_psbsd.py
    @@ -10,26 +10,28 @@ import functools
     import os
     import xml.etree.ElementTree as ET
     from collections import namedtuple
    -from socket import AF_INET
    +from collections import defaultdict
     
     from . import _common
     from . import _psposix
     from . import _psutil_bsd as cext
     from . import _psutil_posix as cext_posix
    -from ._common import AF_INET6
    +from ._common import AccessDenied
     from ._common import conn_tmap
    +from ._common import conn_to_ntuple
     from ._common import FREEBSD
     from ._common import memoize
     from ._common import memoize_when_activated
     from ._common import NETBSD
    +from ._common import NoSuchProcess
     from ._common import OPENBSD
    -from ._common import sockfam_to_enum
    -from ._common import socktype_to_enum
     from ._common import usage_percent
    +from ._common import ZombieProcess
    +from ._compat import FileNotFoundError
    +from ._compat import PermissionError
    +from ._compat import ProcessLookupError
     from ._compat import which
    -from ._exceptions import AccessDenied
    -from ._exceptions import NoSuchProcess
    -from ._exceptions import ZombieProcess
    +
     
     __extra__all__ = []
     
    @@ -103,6 +105,11 @@ else:
         PAGESIZE = os.sysconf("SC_PAGE_SIZE")
     AF_LINK = cext_posix.AF_LINK
     
    +HAS_PER_CPU_TIMES = hasattr(cext, "per_cpu_times")
    +HAS_PROC_NUM_THREADS = hasattr(cext, "proc_num_threads")
    +HAS_PROC_OPEN_FILES = hasattr(cext, 'proc_open_files')
    +HAS_PROC_NUM_FDS = hasattr(cext, 'proc_num_fds')
    +
     kinfo_proc_map = dict(
         ppid=0,
         status=1,
    @@ -211,7 +218,7 @@ def cpu_times():
         return scputimes(user, nice, system, idle, irq)
     
     
    -if hasattr(cext, "per_cpu_times"):
    +if HAS_PER_CPU_TIMES:
         def per_cpu_times():
             """Return system CPU times as a namedtuple"""
             ret = []
    @@ -389,22 +396,8 @@ def net_connections(kind):
             fd, fam, type, laddr, raddr, status, pid = item
             # TODO: apply filter at C level
             if fam in families and type in types:
    -            try:
    -                status = TCP_STATUSES[status]
    -            except KeyError:
    -                # XXX: Not sure why this happens. I saw this occurring
    -                # with IPv6 sockets opened by 'vim'. Those sockets
    -                # have a very short lifetime so maybe the kernel
    -                # can't initialize their status?
    -                status = TCP_STATUSES[cext.PSUTIL_CONN_NONE]
    -            if fam in (AF_INET, AF_INET6):
    -                if laddr:
    -                    laddr = _common.addr(*laddr)
    -                if raddr:
    -                    raddr = _common.addr(*raddr)
    -            fam = sockfam_to_enum(fam)
    -            type = socktype_to_enum(type)
    -            nt = _common.sconn(fd, fam, type, laddr, raddr, status, pid)
    +            nt = conn_to_ntuple(fd, fam, type, laddr, raddr, status,
    +                                TCP_STATUSES, pid)
                 ret.add(nt)
         return list(ret)
     
    @@ -432,6 +425,47 @@ if FREEBSD:
                 secsleft = minsleft * 60
             return _common.sbattery(percent, secsleft, power_plugged)
     
    +    def sensors_temperatures():
    +        "Return CPU cores temperatures if available, else an empty dict."
    +        ret = defaultdict(list)
    +        num_cpus = cpu_count_logical()
    +        for cpu in range(num_cpus):
    +            try:
    +                current, high = cext.sensors_cpu_temperature(cpu)
    +                if high <= 0:
    +                    high = None
    +                name = "Core %s" % cpu
    +                ret["coretemp"].append(
    +                    _common.shwtemp(name, current, high, high))
    +            except NotImplementedError:
    +                pass
    +
    +        return ret
    +
    +    def cpu_freq():
    +        """Return frequency metrics for CPUs. As of Dec 2018 only
    +        CPU 0 appears to be supported by FreeBSD and all other cores
    +        match the frequency of CPU 0.
    +        """
    +        ret = []
    +        num_cpus = cpu_count_logical()
    +        for cpu in range(num_cpus):
    +            try:
    +                current, available_freq = cext.cpu_frequency(cpu)
    +            except NotImplementedError:
    +                continue
    +            if available_freq:
    +                try:
    +                    min_freq = int(available_freq.split(" ")[-1].split("/")[0])
    +                except(IndexError, ValueError):
    +                    min_freq = None
    +                try:
    +                    max_freq = int(available_freq.split(" ")[0].split("/")[0])
    +                except(IndexError, ValueError):
    +                    max_freq = None
    +            ret.append(_common.scpufreq(current, min_freq, max_freq))
    +        return ret
    +
     
     # =====================================================================
     #  --- other system functions
    @@ -500,6 +534,14 @@ else:
         pid_exists = _psposix.pid_exists
     
     
    +def is_zombie(pid):
    +    try:
    +        st = cext.proc_oneshot_info(pid)[kinfo_proc_map['status']]
    +        return st == cext.SZOMB
    +    except Exception:
    +        return False
    +
    +
     def wrap_exceptions(fun):
         """Decorator which translates bare OSError exceptions into
         NoSuchProcess and AccessDenied.
    @@ -508,19 +550,19 @@ def wrap_exceptions(fun):
         def wrapper(self, *args, **kwargs):
             try:
                 return fun(self, *args, **kwargs)
    -        except OSError as err:
    +        except ProcessLookupError:
    +            if not pid_exists(self.pid):
    +                raise NoSuchProcess(self.pid, self._name)
    +            else:
    +                raise ZombieProcess(self.pid, self._name, self._ppid)
    +        except PermissionError:
    +            raise AccessDenied(self.pid, self._name)
    +        except OSError:
                 if self.pid == 0:
                     if 0 in pids():
                         raise AccessDenied(self.pid, self._name)
                     else:
                         raise
    -            if err.errno == errno.ESRCH:
    -                if not pid_exists(self.pid):
    -                    raise NoSuchProcess(self.pid, self._name)
    -                else:
    -                    raise ZombieProcess(self.pid, self._name, self._ppid)
    -            if err.errno in (errno.EPERM, errno.EACCES):
    -                raise AccessDenied(self.pid, self._name)
                 raise
         return wrapper
     
    @@ -530,30 +572,35 @@ def wrap_exceptions_procfs(inst):
         """Same as above, for routines relying on reading /proc fs."""
         try:
             yield
    -    except EnvironmentError as err:
    +    except (ProcessLookupError, FileNotFoundError):
             # ENOENT (no such file or directory) gets raised on open().
             # ESRCH (no such process) can get raised on read() if
             # process is gone in meantime.
    -        if err.errno in (errno.ENOENT, errno.ESRCH):
    -            if not pid_exists(inst.pid):
    -                raise NoSuchProcess(inst.pid, inst._name)
    -            else:
    -                raise ZombieProcess(inst.pid, inst._name, inst._ppid)
    -        if err.errno in (errno.EPERM, errno.EACCES):
    -            raise AccessDenied(inst.pid, inst._name)
    -        raise
    +        if not pid_exists(inst.pid):
    +            raise NoSuchProcess(inst.pid, inst._name)
    +        else:
    +            raise ZombieProcess(inst.pid, inst._name, inst._ppid)
    +    except PermissionError:
    +        raise AccessDenied(inst.pid, inst._name)
     
     
     class Process(object):
         """Wrapper class around underlying C implementation."""
     
    -    __slots__ = ["pid", "_name", "_ppid"]
    +    __slots__ = ["pid", "_name", "_ppid", "_cache"]
     
         def __init__(self, pid):
             self.pid = pid
             self._name = None
             self._ppid = None
     
    +    def _assert_alive(self):
    +        """Raise NSP if the process disappeared on us."""
    +        # For those C function who do not raise NSP, possibly returning
    +        # incorrect or incomplete result.
    +        cext.proc_name(self.pid)
    +
    +    @wrap_exceptions
         @memoize_when_activated
         def oneshot(self):
             """Retrieves multiple process info in one shot as a raw tuple."""
    @@ -562,10 +609,10 @@ class Process(object):
             return ret
     
         def oneshot_enter(self):
    -        self.oneshot.cache_activate()
    +        self.oneshot.cache_activate(self)
     
         def oneshot_exit(self):
    -        self.oneshot.cache_deactivate()
    +        self.oneshot.cache_deactivate(self)
     
         @wrap_exceptions
         def name(self):
    @@ -575,6 +622,8 @@ class Process(object):
         @wrap_exceptions
         def exe(self):
             if FREEBSD:
    +            if self.pid == 0:
    +                return ''  # else NSP
                 return cext.proc_exe(self.pid)
             elif NETBSD:
                 if self.pid == 0:
    @@ -590,7 +639,7 @@ class Process(object):
                 # cmdline arg (may return None).
                 cmdline = self.cmdline()
                 if cmdline:
    -                return which(cmdline[0])
    +                return which(cmdline[0]) or ""
                 else:
                     return ""
     
    @@ -607,10 +656,14 @@ class Process(object):
                     return cext.proc_cmdline(self.pid)
                 except OSError as err:
                     if err.errno == errno.EINVAL:
    -                    if not pid_exists(self.pid):
    -                        raise NoSuchProcess(self.pid, self._name)
    -                    else:
    +                    if is_zombie(self.pid):
                             raise ZombieProcess(self.pid, self._name, self._ppid)
    +                    elif not pid_exists(self.pid):
    +                        raise NoSuchProcess(self.pid, self._name, self._ppid)
    +                    else:
    +                        # XXX: this happens with unicode tests. It means the C
    +                        # routine is unable to decode invalid unicode chars.
    +                        return []
                     else:
                         raise
             else:
    @@ -678,7 +731,7 @@ class Process(object):
     
         @wrap_exceptions
         def num_threads(self):
    -        if hasattr(cext, "proc_num_threads"):
    +        if HAS_PROC_NUM_THREADS:
                 # FreeBSD
                 return cext.proc_num_threads(self.pid)
             else:
    @@ -700,10 +753,7 @@ class Process(object):
                 ntuple = _common.pthread(thread_id, utime, stime)
                 retlist.append(ntuple)
             if OPENBSD:
    -            # On OpenBSD the underlying C function does not raise NSP
    -            # in case the process is gone (and the returned list may
    -            # incomplete).
    -            self.name()  # raise NSP if the process disappeared on us
    +            self._assert_alive()
             return retlist
     
         @wrap_exceptions
    @@ -714,29 +764,16 @@ class Process(object):
     
             if NETBSD:
                 families, types = conn_tmap[kind]
    -            ret = set()
    +            ret = []
                 rawlist = cext.net_connections(self.pid)
                 for item in rawlist:
                     fd, fam, type, laddr, raddr, status, pid = item
                     assert pid == self.pid
                     if fam in families and type in types:
    -                    try:
    -                        status = TCP_STATUSES[status]
    -                    except KeyError:
    -                        status = TCP_STATUSES[cext.PSUTIL_CONN_NONE]
    -                    if fam in (AF_INET, AF_INET6):
    -                        if laddr:
    -                            laddr = _common.addr(*laddr)
    -                        if raddr:
    -                            raddr = _common.addr(*raddr)
    -                    fam = sockfam_to_enum(fam)
    -                    type = socktype_to_enum(type)
    -                    nt = _common.pconn(fd, fam, type, laddr, raddr, status)
    -                    ret.add(nt)
    -            # On NetBSD the underlying C function does not raise NSP
    -            # in case the process is gone (and the returned list may
    -            # incomplete).
    -            self.name()  # raise NSP if the process disappeared on us
    +                    nt = conn_to_ntuple(fd, fam, type, laddr, raddr, status,
    +                                        TCP_STATUSES)
    +                    ret.append(nt)
    +            self._assert_alive()
                 return list(ret)
     
             families, types = conn_tmap[kind]
    @@ -744,21 +781,13 @@ class Process(object):
             ret = []
             for item in rawlist:
                 fd, fam, type, laddr, raddr, status = item
    -            if fam in (AF_INET, AF_INET6):
    -                if laddr:
    -                    laddr = _common.addr(*laddr)
    -                if raddr:
    -                    raddr = _common.addr(*raddr)
    -            fam = sockfam_to_enum(fam)
    -            type = socktype_to_enum(type)
    -            status = TCP_STATUSES[status]
    -            nt = _common.pconn(fd, fam, type, laddr, raddr, status)
    +            nt = conn_to_ntuple(fd, fam, type, laddr, raddr, status,
    +                                TCP_STATUSES)
                 ret.append(nt)
    +
             if OPENBSD:
    -            # On OpenBSD the underlying C function does not raise NSP
    -            # in case the process is gone (and the returned list may
    -            # incomplete).
    -            self.name()  # raise NSP if the process disappeared on us
    +            self._assert_alive()
    +
             return ret
     
         @wrap_exceptions
    @@ -795,10 +824,7 @@ class Process(object):
             # it into None
             if OPENBSD and self.pid == 0:
                 return None  # ...else it would raise EINVAL
    -        elif NETBSD:
    -            with wrap_exceptions_procfs(self):
    -                return os.readlink("/proc/%s/cwd" % self.pid)
    -        elif hasattr(cext, 'proc_open_files'):
    +        elif NETBSD or HAS_PROC_OPEN_FILES:
                 # FreeBSD < 8 does not support functions based on
                 # kinfo_getfile() and kinfo_getvmmap()
                 return cext.proc_cwd(self.pid) or None
    @@ -817,7 +843,7 @@ class Process(object):
     
         # FreeBSD < 8 does not support functions based on kinfo_getfile()
         # and kinfo_getvmmap()
    -    if hasattr(cext, 'proc_open_files'):
    +    if HAS_PROC_OPEN_FILES:
             @wrap_exceptions
             def open_files(self):
                 """Return files opened by process as a list of namedtuples."""
    @@ -828,15 +854,13 @@ class Process(object):
     
         # FreeBSD < 8 does not support functions based on kinfo_getfile()
         # and kinfo_getvmmap()
    -    if hasattr(cext, 'proc_num_fds'):
    +    if HAS_PROC_NUM_FDS:
             @wrap_exceptions
             def num_fds(self):
                 """Return the number of file descriptors opened by this process."""
                 ret = cext.proc_num_fds(self.pid)
                 if NETBSD:
    -                # On NetBSD the underlying C function does not raise NSP
    -                # in case the process is gone.
    -                self.name()  # raise NSP if the process disappeared on us
    +                self._assert_alive()
                 return ret
         else:
             num_fds = _not_implemented
    diff --git a/server/www/packages/packages-windows/x86/psutil/_pslinux.py b/server/www/packages/packages-windows/x86/psutil/_pslinux.py
    index df624de..9e32f25 100644
    --- a/server/www/packages/packages-windows/x86/psutil/_pslinux.py
    +++ b/server/www/packages/packages-windows/x86/psutil/_pslinux.py
    @@ -25,25 +25,30 @@ from . import _common
     from . import _psposix
     from . import _psutil_linux as cext
     from . import _psutil_posix as cext_posix
    -from ._common import ENCODING
    -from ._common import ENCODING_ERRS
    +from ._common import AccessDenied
    +from ._common import debug
    +from ._common import decode
    +from ._common import get_procfs_path
     from ._common import isfile_strict
     from ._common import memoize
     from ._common import memoize_when_activated
     from ._common import NIC_DUPLEX_FULL
     from ._common import NIC_DUPLEX_HALF
     from ._common import NIC_DUPLEX_UNKNOWN
    +from ._common import NoSuchProcess
    +from ._common import open_binary
    +from ._common import open_text
     from ._common import parse_environ_block
     from ._common import path_exists_strict
     from ._common import supports_ipv6
     from ._common import usage_percent
    +from ._common import ZombieProcess
     from ._compat import b
     from ._compat import basestring
    -from ._compat import long
    +from ._compat import FileNotFoundError
    +from ._compat import PermissionError
    +from ._compat import ProcessLookupError
     from ._compat import PY3
    -from ._exceptions import AccessDenied
    -from ._exceptions import NoSuchProcess
    -from ._exceptions import ZombieProcess
     
     if sys.version_info >= (3, 4):
         import enum
    @@ -71,6 +76,8 @@ __extra__all__ = [
     POWER_SUPPLY_PATH = "/sys/class/power_supply"
     HAS_SMAPS = os.path.exists('/proc/%s/smaps' % os.getpid())
     HAS_PRLIMIT = hasattr(cext, "linux_prlimit")
    +HAS_PROC_IO_PRIORITY = hasattr(cext, "proc_ioprio_get")
    +HAS_CPU_AFFINITY = hasattr(cext, "proc_cpu_affinity_get")
     _DEFAULT = object()
     
     # RLIMIT_* constants, not guaranteed to be present on all kernels
    @@ -194,6 +201,10 @@ pmmap_ext = namedtuple(
     pio = namedtuple('pio', ['read_count', 'write_count',
                              'read_bytes', 'write_bytes',
                              'read_chars', 'write_chars'])
    +# psutil.Process.cpu_times()
    +pcputimes = namedtuple('pcputimes',
    +                       ['user', 'system', 'children_user', 'children_system',
    +                        'iowait'])
     
     
     # =====================================================================
    @@ -201,37 +212,6 @@ pio = namedtuple('pio', ['read_count', 'write_count',
     # =====================================================================
     
     
    -def open_binary(fname, **kwargs):
    -    return open(fname, "rb", **kwargs)
    -
    -
    -def open_text(fname, **kwargs):
    -    """On Python 3 opens a file in text mode by using fs encoding and
    -    a proper en/decoding errors handler.
    -    On Python 2 this is just an alias for open(name, 'rt').
    -    """
    -    if PY3:
    -        # See:
    -        # https://github.com/giampaolo/psutil/issues/675
    -        # https://github.com/giampaolo/psutil/pull/733
    -        kwargs.setdefault('encoding', ENCODING)
    -        kwargs.setdefault('errors', ENCODING_ERRS)
    -    return open(fname, "rt", **kwargs)
    -
    -
    -if PY3:
    -    def decode(s):
    -        return s.decode(encoding=ENCODING, errors=ENCODING_ERRS)
    -else:
    -    def decode(s):
    -        return s
    -
    -
    -def get_procfs_path():
    -    """Return updated psutil.PROCFS_PATH constant."""
    -    return sys.modules['psutil'].PROCFS_PATH
    -
    -
     def readlink(path):
         """Wrapper around os.readlink()."""
         assert isinstance(path, basestring), path
    @@ -638,6 +618,17 @@ def cpu_count_logical():
     
     def cpu_count_physical():
         """Return the number of physical cores in the system."""
    +    # Method #1
    +    core_ids = set()
    +    for path in glob.glob(
    +            "/sys/devices/system/cpu/cpu[0-9]*/topology/core_id"):
    +        with open_binary(path) as f:
    +            core_ids.add(int(f.read()))
    +    result = len(core_ids)
    +    if result != 0:
    +        return result
    +
    +    # Method #2
         mapping = {}
         current_info = {}
         with open_binary('%s/cpuinfo' % get_procfs_path()) as f:
    @@ -657,8 +648,8 @@ def cpu_count_physical():
                         key, value = line.split(b'\t:', 1)
                         current_info[key] = int(value)
     
    -    # mimic os.cpu_count()
    -    return sum(mapping.values()) or None
    +    result = sum(mapping.values())
    +    return result or None  # mimic os.cpu_count()
     
     
     def cpu_stats():
    @@ -682,30 +673,26 @@ def cpu_stats():
             ctx_switches, interrupts, soft_interrupts, syscalls)
     
     
    -if os.path.exists("/sys/devices/system/cpu/cpufreq") or \
    +if os.path.exists("/sys/devices/system/cpu/cpufreq/policy0") or \
             os.path.exists("/sys/devices/system/cpu/cpu0/cpufreq"):
         def cpu_freq():
             """Return frequency metrics for all CPUs.
             Contrarily to other OSes, Linux updates these values in
             real-time.
             """
    -        # scaling_* files seem preferable to cpuinfo_*, see:
    -        # http://unix.stackexchange.com/a/87537/168884
    -        ret = []
    -        ls = glob.glob("/sys/devices/system/cpu/cpufreq/policy*")
    -        if ls:
    -            # Sort the list so that '10' comes after '2'. This should
    -            # ensure the CPU order is consistent with other CPU functions
    -            # having a 'percpu' argument and returning results for multiple
    -            # CPUs (cpu_times(), cpu_percent(), cpu_times_percent()).
    -            ls.sort(key=lambda x: int(os.path.basename(x)[6:]))
    -        else:
    -            # https://github.com/giampaolo/psutil/issues/981
    -            ls = glob.glob("/sys/devices/system/cpu/cpu[0-9]*/cpufreq")
    -            ls.sort(key=lambda x: int(re.search('[0-9]+', x).group(0)))
    +        def get_path(num):
    +            for p in ("/sys/devices/system/cpu/cpufreq/policy%s" % num,
    +                      "/sys/devices/system/cpu/cpu%s/cpufreq" % num):
    +                if os.path.exists(p):
    +                    return p
     
    -        pjoin = os.path.join
    -        for path in ls:
    +        ret = []
    +        for n in range(cpu_count_logical()):
    +            path = get_path(n)
    +            if not path:
    +                continue
    +
    +            pjoin = os.path.join
                 curr = cat(pjoin(path, "scaling_cur_freq"), fallback=None)
                 if curr is None:
                     # Likely an old RedHat, see:
    @@ -720,6 +707,25 @@ if os.path.exists("/sys/devices/system/cpu/cpufreq") or \
                 ret.append(_common.scpufreq(curr, min_, max_))
             return ret
     
    +elif os.path.exists("/proc/cpuinfo"):
    +    def cpu_freq():
    +        """Alternate implementation using /proc/cpuinfo.
    +        min and max frequencies are not available and are set to None.
    +        """
    +        ret = []
    +        with open_binary('%s/cpuinfo' % get_procfs_path()) as f:
    +            for line in f:
    +                if line.lower().startswith(b'cpu mhz'):
    +                    key, value = line.split(b'\t:', 1)
    +                    ret.append(_common.scpufreq(float(value), 0., 0.))
    +        return ret
    +
    +else:
    +    def cpu_freq():
    +        """Dummy implementation when none of the above files are present.
    +        """
    +        return []
    +
     
     # =====================================================================
     # --- network
    @@ -746,6 +752,8 @@ class Connections:
         """
     
         def __init__(self):
    +        # The string represents the basename of the corresponding
    +        # /proc/net/{proto_name} file.
             tcp4 = ("tcp", socket.AF_INET, socket.SOCK_STREAM)
             tcp6 = ("tcp6", socket.AF_INET6, socket.SOCK_STREAM)
             udp4 = ("udp", socket.AF_INET, socket.SOCK_DGRAM)
    @@ -771,17 +779,16 @@ class Connections:
             for fd in os.listdir("%s/%s/fd" % (self._procfs_path, pid)):
                 try:
                     inode = readlink("%s/%s/fd/%s" % (self._procfs_path, pid, fd))
    -            except OSError as err:
    +            except (FileNotFoundError, ProcessLookupError):
                     # ENOENT == file which is gone in the meantime;
                     # os.stat('/proc/%s' % self.pid) will be done later
                     # to force NSP (if it's the case)
    -                if err.errno in (errno.ENOENT, errno.ESRCH):
    -                    continue
    -                elif err.errno == errno.EINVAL:
    +                continue
    +            except OSError as err:
    +                if err.errno == errno.EINVAL:
                         # not a link
                         continue
    -                else:
    -                    raise
    +                raise
                 else:
                     if inode.startswith('socket:['):
                         # the process is using a socket
    @@ -794,7 +801,7 @@ class Connections:
             for pid in pids():
                 try:
                     inodes.update(self.get_proc_inodes(pid))
    -            except OSError as err:
    +            except (FileNotFoundError, ProcessLookupError, PermissionError):
                     # os.listdir() is gonna raise a lot of access denied
                     # exceptions in case of unprivileged user; that's fine
                     # as we'll just end up returning a connection with PID
    @@ -802,9 +809,7 @@ class Connections:
                     # Both netstat -an and lsof does the same so it's
                     # unlikely we can do any better.
                     # ENOENT just means a PID disappeared on us.
    -                if err.errno not in (
    -                        errno.ENOENT, errno.ESRCH, errno.EPERM, errno.EACCES):
    -                    raise
    +                continue
             return inodes
     
         @staticmethod
    @@ -932,7 +937,7 @@ class Connections:
                                 path = tokens[-1]
                             else:
                                 path = ""
    -                        type_ = int(type_)
    +                        type_ = _common.socktype_to_enum(int(type_))
                             # XXX: determining the remote endpoint of a
                             # UNIX socket on Linux is not possible, see:
                             # https://serverfault.com/questions/252723/
    @@ -953,15 +958,14 @@ class Connections:
             else:
                 inodes = self.get_all_inodes()
             ret = set()
    -        for f, family, type_ in self.tmap[kind]:
    +        for proto_name, family, type_ in self.tmap[kind]:
    +            path = "%s/net/%s" % (self._procfs_path, proto_name)
                 if family in (socket.AF_INET, socket.AF_INET6):
                     ls = self.process_inet(
    -                    "%s/net/%s" % (self._procfs_path, f),
    -                    family, type_, inodes, filter_pid=pid)
    +                    path, family, type_, inodes, filter_pid=pid)
                 else:
                     ls = self.process_unix(
    -                    "%s/net/%s" % (self._procfs_path, f),
    -                    family, inodes, filter_pid=pid)
    +                    path, family, inodes, filter_pid=pid)
                 for fd, family, type_, laddr, raddr, status, bound_pid in ls:
                     if pid:
                         conn = _common.pconn(fd, family, type_, laddr, raddr,
    @@ -1062,6 +1066,9 @@ def disk_io_counters(perdisk=False):
             # ...unless (Linux 2.6) the line refers to a partition instead
             # of a disk, in which case the line has less fields (7):
             # "3    1   hda1 8 8 8 8"
    +        # 4.18+ has 4 fields added:
    +        # "3    0   hda 8 8 8 8 8 8 8 8 8 8 8 0 0 0 0"
    +        # 5.5 has 2 more fields.
             # See:
             # https://www.kernel.org/doc/Documentation/iostats.txt
             # https://www.kernel.org/doc/Documentation/ABI/testing/procfs-diskstats
    @@ -1076,7 +1083,7 @@ def disk_io_counters(perdisk=False):
                     reads = int(fields[2])
                     (reads_merged, rbytes, rtime, writes, writes_merged,
                         wbytes, wtime, _, busy_time, _) = map(int, fields[4:14])
    -            elif flen == 14:
    +            elif flen == 14 or flen >= 18:
                     # Linux 2.6+, line referring to a disk
                     name = fields[2]
                     (reads, reads_merged, rbytes, rtime, writes, writes_merged,
    @@ -1100,7 +1107,7 @@ def disk_io_counters(perdisk=False):
                         fields = f.read().strip().split()
                     name = os.path.basename(root)
                     (reads, reads_merged, rbytes, rtime, writes, writes_merged,
    -                    wbytes, wtime, _, busy_time, _) = map(int, fields)
    +                    wbytes, wtime, _, busy_time) = map(int, fields[:10])
                     yield (name, reads, writes, rbytes, wbytes, rtime,
                            wtime, reads_merged, writes_merged, busy_time)
     
    @@ -1142,7 +1149,8 @@ def disk_io_counters(perdisk=False):
     def disk_partitions(all=False):
         """Return mounted disk partitions as a list of namedtuples."""
         fstypes = set()
    -    with open_text("%s/filesystems" % get_procfs_path()) as f:
    +    procfs_path = get_procfs_path()
    +    with open_text("%s/filesystems" % procfs_path) as f:
             for line in f:
                 line = line.strip()
                 if not line.startswith("nodev"):
    @@ -1153,8 +1161,14 @@ def disk_partitions(all=False):
                     if fstype == "zfs":
                         fstypes.add("zfs")
     
    +    # See: https://github.com/giampaolo/psutil/issues/1307
    +    if procfs_path == "/proc" and os.path.isfile('/etc/mtab'):
    +        mounts_path = os.path.realpath("/etc/mtab")
    +    else:
    +        mounts_path = os.path.realpath("%s/self/mounts" % procfs_path)
    +
         retlist = []
    -    partitions = cext.disk_partitions()
    +    partitions = cext.disk_partitions(mounts_path)
         for partition in partitions:
             device, mountpoint, fstype, opts = partition
             if device == 'none':
    @@ -1164,6 +1178,7 @@ def disk_partitions(all=False):
                     continue
             ntuple = _common.sdiskpart(device, mountpoint, fstype, opts)
             retlist.append(ntuple)
    +
         return retlist
     
     
    @@ -1191,6 +1206,8 @@ def sensors_temperatures():
         # https://github.com/giampaolo/psutil/issues/971
         # https://github.com/nicolargo/glances/issues/1060
         basenames.extend(glob.glob('/sys/class/hwmon/hwmon*/device/temp*_*'))
    +    basenames.extend(glob.glob(
    +        '/sys/devices/platform/coretemp.*/hwmon/hwmon*/temp*_*'))
         basenames = sorted(set([x.split('_')[0] for x in basenames]))
     
         for base in basenames:
    @@ -1199,7 +1216,7 @@ def sensors_temperatures():
                 current = float(cat(path)) / 1000.0
                 path = os.path.join(os.path.dirname(base), 'name')
                 unit_name = cat(path, binary=False)
    -        except (IOError, OSError, ValueError) as err:
    +        except (IOError, OSError, ValueError):
                 # A lot of things can go wrong here, so let's just skip the
                 # whole entry. Sure thing is Linux's /sys/class/hwmon really
                 # is a stinky broken mess.
    @@ -1208,8 +1225,6 @@ def sensors_temperatures():
                 # https://github.com/giampaolo/psutil/issues/1129
                 # https://github.com/giampaolo/psutil/issues/1245
                 # https://github.com/giampaolo/psutil/issues/1323
    -            warnings.warn("ignoring %r for file %r" % (err, path),
    -                          RuntimeWarning)
                 continue
     
             high = cat(base + '_max', fallback=None)
    @@ -1229,7 +1244,50 @@ def sensors_temperatures():
     
             ret[unit_name].append((label, current, high, critical))
     
    -    return ret
    +    # Indication that no sensors were detected in /sys/class/hwmon/
    +    if not basenames:
    +        basenames = glob.glob('/sys/class/thermal/thermal_zone*')
    +        basenames = sorted(set(basenames))
    +
    +        for base in basenames:
    +            try:
    +                path = os.path.join(base, 'temp')
    +                current = float(cat(path)) / 1000.0
    +                path = os.path.join(base, 'type')
    +                unit_name = cat(path, binary=False)
    +            except (IOError, OSError, ValueError) as err:
    +                debug("ignoring %r for file %r" % (err, path))
    +                continue
    +
    +            trip_paths = glob.glob(base + '/trip_point*')
    +            trip_points = set(['_'.join(
    +                os.path.basename(p).split('_')[0:3]) for p in trip_paths])
    +            critical = None
    +            high = None
    +            for trip_point in trip_points:
    +                path = os.path.join(base, trip_point + "_type")
    +                trip_type = cat(path, fallback='', binary=False)
    +                if trip_type == 'critical':
    +                    critical = cat(os.path.join(base, trip_point + "_temp"),
    +                                   fallback=None)
    +                elif trip_type == 'high':
    +                    high = cat(os.path.join(base, trip_point + "_temp"),
    +                               fallback=None)
    +
    +                if high is not None:
    +                    try:
    +                        high = float(high) / 1000.0
    +                    except ValueError:
    +                        high = None
    +                if critical is not None:
    +                    try:
    +                        critical = float(critical) / 1000.0
    +                    except ValueError:
    +                        critical = None
    +
    +            ret[unit_name].append(('', current, high, critical))
    +
    +    return dict(ret)
     
     
     def sensors_fans():
    @@ -1436,11 +1494,10 @@ def ppid_map():
             try:
                 with open_binary("%s/%s/stat" % (procfs_path, pid)) as f:
                     data = f.read()
    -        except EnvironmentError as err:
    +        except (FileNotFoundError, ProcessLookupError):
                 # Note: we should be able to access /stat for all processes
                 # aka it's unlikely we'll bump into EPERM, which is good.
    -            if err.errno not in (errno.ENOENT, errno.ESRCH):
    -                raise
    +            pass
             else:
                 rpar = data.rfind(b')')
                 dset = data[rpar + 2:].split()
    @@ -1457,16 +1514,12 @@ def wrap_exceptions(fun):
         def wrapper(self, *args, **kwargs):
             try:
                 return fun(self, *args, **kwargs)
    -        except EnvironmentError as err:
    -            if err.errno in (errno.EPERM, errno.EACCES):
    -                raise AccessDenied(self.pid, self._name)
    -            # ESRCH (no such process) can be raised on read() if
    -            # process is gone in the meantime.
    -            if err.errno == errno.ESRCH:
    -                raise NoSuchProcess(self.pid, self._name)
    -            # ENOENT (no such file or directory) can be raised on open().
    -            if err.errno == errno.ENOENT and not os.path.exists("%s/%s" % (
    -                    self._procfs_path, self.pid)):
    +        except PermissionError:
    +            raise AccessDenied(self.pid, self._name)
    +        except ProcessLookupError:
    +            raise NoSuchProcess(self.pid, self._name)
    +        except FileNotFoundError:
    +            if not os.path.exists("%s/%s" % (self._procfs_path, self.pid)):
                     raise NoSuchProcess(self.pid, self._name)
                 # Note: zombies will keep existing under /proc until they're
                 # gone so there's no way to distinguish them in here.
    @@ -1477,7 +1530,7 @@ def wrap_exceptions(fun):
     class Process(object):
         """Linux process implementation."""
     
    -    __slots__ = ["pid", "_name", "_ppid", "_procfs_path"]
    +    __slots__ = ["pid", "_name", "_ppid", "_procfs_path", "_cache"]
     
         def __init__(self, pid):
             self.pid = pid
    @@ -1485,13 +1538,20 @@ class Process(object):
             self._ppid = None
             self._procfs_path = get_procfs_path()
     
    +    def _assert_alive(self):
    +        """Raise NSP if the process disappeared on us."""
    +        # For those C function who do not raise NSP, possibly returning
    +        # incorrect or incomplete result.
    +        os.stat('%s/%s' % (self._procfs_path, self.pid))
    +
    +    @wrap_exceptions
         @memoize_when_activated
         def _parse_stat_file(self):
    -        """Parse /proc/{pid}/stat file. Return a list of fields where
    -        process name is in position 0.
    +        """Parse /proc/{pid}/stat file and return a dict with various
    +        process info.
             Using "man proc" as a reference: where "man proc" refers to
    -        position N, always substract 2 (e.g starttime pos 22 in
    -        'man proc' == pos 20 in the list returned here).
    +        position N always substract 3 (e.g ppid position 4 in
    +        'man proc' == position 1 in here).
             The return value is cached in case oneshot() ctx manager is
             in use.
             """
    @@ -1502,9 +1562,24 @@ class Process(object):
             # the first occurrence of "(" and the last occurence of ")".
             rpar = data.rfind(b')')
             name = data[data.find(b'(') + 1:rpar]
    -        others = data[rpar + 2:].split()
    -        return [name] + others
    +        fields = data[rpar + 2:].split()
     
    +        ret = {}
    +        ret['name'] = name
    +        ret['status'] = fields[0]
    +        ret['ppid'] = fields[1]
    +        ret['ttynr'] = fields[4]
    +        ret['utime'] = fields[11]
    +        ret['stime'] = fields[12]
    +        ret['children_utime'] = fields[13]
    +        ret['children_stime'] = fields[14]
    +        ret['create_time'] = fields[19]
    +        ret['cpu_num'] = fields[36]
    +        ret['blkio_ticks'] = fields[39]  # aka 'delayacct_blkio_ticks'
    +
    +        return ret
    +
    +    @wrap_exceptions
         @memoize_when_activated
         def _read_status_file(self):
             """Read /proc/{pid}/stat file and return its content.
    @@ -1514,6 +1589,7 @@ class Process(object):
             with open_binary("%s/%s/status" % (self._procfs_path, self.pid)) as f:
                 return f.read()
     
    +    @wrap_exceptions
         @memoize_when_activated
         def _read_smaps_file(self):
             with open_binary("%s/%s/smaps" % (self._procfs_path, self.pid),
    @@ -1521,18 +1597,18 @@ class Process(object):
                 return f.read().strip()
     
         def oneshot_enter(self):
    -        self._parse_stat_file.cache_activate()
    -        self._read_status_file.cache_activate()
    -        self._read_smaps_file.cache_activate()
    +        self._parse_stat_file.cache_activate(self)
    +        self._read_status_file.cache_activate(self)
    +        self._read_smaps_file.cache_activate(self)
     
         def oneshot_exit(self):
    -        self._parse_stat_file.cache_deactivate()
    -        self._read_status_file.cache_deactivate()
    -        self._read_smaps_file.cache_deactivate()
    +        self._parse_stat_file.cache_deactivate(self)
    +        self._read_status_file.cache_deactivate(self)
    +        self._read_smaps_file.cache_deactivate(self)
     
         @wrap_exceptions
         def name(self):
    -        name = self._parse_stat_file()[0]
    +        name = self._parse_stat_file()['name']
             if PY3:
                 name = decode(name)
             # XXX - gets changed later and probably needs refactoring
    @@ -1541,21 +1617,19 @@ class Process(object):
         def exe(self):
             try:
                 return readlink("%s/%s/exe" % (self._procfs_path, self.pid))
    -        except OSError as err:
    -            if err.errno in (errno.ENOENT, errno.ESRCH):
    -                # no such file error; might be raised also if the
    -                # path actually exists for system processes with
    -                # low pids (about 0-20)
    -                if os.path.lexists("%s/%s" % (self._procfs_path, self.pid)):
    -                    return ""
    +        except (FileNotFoundError, ProcessLookupError):
    +            # no such file error; might be raised also if the
    +            # path actually exists for system processes with
    +            # low pids (about 0-20)
    +            if os.path.lexists("%s/%s" % (self._procfs_path, self.pid)):
    +                return ""
    +            else:
    +                if not pid_exists(self.pid):
    +                    raise NoSuchProcess(self.pid, self._name)
                     else:
    -                    if not pid_exists(self.pid):
    -                        raise NoSuchProcess(self.pid, self._name)
    -                    else:
    -                        raise ZombieProcess(self.pid, self._name, self._ppid)
    -            if err.errno in (errno.EPERM, errno.EACCES):
    -                raise AccessDenied(self.pid, self._name)
    -            raise
    +                    raise ZombieProcess(self.pid, self._name, self._ppid)
    +        except PermissionError:
    +            raise AccessDenied(self.pid, self._name)
     
         @wrap_exceptions
         def cmdline(self):
    @@ -1574,7 +1648,13 @@ class Process(object):
             sep = '\x00' if data.endswith('\x00') else ' '
             if data.endswith(sep):
                 data = data[:-1]
    -        return [x for x in data.split(sep)]
    +        cmdline = data.split(sep)
    +        # Sometimes last char is a null byte '\0' but the args are
    +        # separated by spaces, see: https://github.com/giampaolo/psutil/
    +        # issues/1179#issuecomment-552984549
    +        if sep == '\x00' and len(cmdline) == 1 and ' ' in data:
    +            cmdline = data.split(' ')
    +        return cmdline
     
         @wrap_exceptions
         def environ(self):
    @@ -1584,13 +1664,14 @@ class Process(object):
     
         @wrap_exceptions
         def terminal(self):
    -        tty_nr = int(self._parse_stat_file()[5])
    +        tty_nr = int(self._parse_stat_file()['ttynr'])
             tmap = _psposix.get_terminal_map()
             try:
                 return tmap[tty_nr]
             except KeyError:
                 return None
     
    +    # May not be available on old kernels.
         if os.path.exists('/proc/%s/io' % os.getpid()):
             @wrap_exceptions
             def io_counters(self):
    @@ -1601,36 +1682,42 @@ class Process(object):
                         # https://github.com/giampaolo/psutil/issues/1004
                         line = line.strip()
                         if line:
    -                        name, value = line.split(b': ')
    -                        fields[name] = int(value)
    +                        try:
    +                            name, value = line.split(b': ')
    +                        except ValueError:
    +                            # https://github.com/giampaolo/psutil/issues/1004
    +                            continue
    +                        else:
    +                            fields[name] = int(value)
                 if not fields:
                     raise RuntimeError("%s file was empty" % fname)
    -            return pio(
    -                fields[b'syscr'],  # read syscalls
    -                fields[b'syscw'],  # write syscalls
    -                fields[b'read_bytes'],  # read bytes
    -                fields[b'write_bytes'],  # write bytes
    -                fields[b'rchar'],  # read chars
    -                fields[b'wchar'],  # write chars
    -            )
    -    else:
    -        def io_counters(self):
    -            raise NotImplementedError("couldn't find /proc/%s/io (kernel "
    -                                      "too old?)" % self.pid)
    +            try:
    +                return pio(
    +                    fields[b'syscr'],  # read syscalls
    +                    fields[b'syscw'],  # write syscalls
    +                    fields[b'read_bytes'],  # read bytes
    +                    fields[b'write_bytes'],  # write bytes
    +                    fields[b'rchar'],  # read chars
    +                    fields[b'wchar'],  # write chars
    +                )
    +            except KeyError as err:
    +                raise ValueError("%r field was not found in %s; found fields "
    +                                 "are %r" % (err[0], fname, fields))
     
         @wrap_exceptions
         def cpu_times(self):
             values = self._parse_stat_file()
    -        utime = float(values[12]) / CLOCK_TICKS
    -        stime = float(values[13]) / CLOCK_TICKS
    -        children_utime = float(values[14]) / CLOCK_TICKS
    -        children_stime = float(values[15]) / CLOCK_TICKS
    -        return _common.pcputimes(utime, stime, children_utime, children_stime)
    +        utime = float(values['utime']) / CLOCK_TICKS
    +        stime = float(values['stime']) / CLOCK_TICKS
    +        children_utime = float(values['children_utime']) / CLOCK_TICKS
    +        children_stime = float(values['children_stime']) / CLOCK_TICKS
    +        iowait = float(values['blkio_ticks']) / CLOCK_TICKS
    +        return pcputimes(utime, stime, children_utime, children_stime, iowait)
     
         @wrap_exceptions
         def cpu_num(self):
             """What CPU the process is on."""
    -        return int(self._parse_stat_file()[37])
    +        return int(self._parse_stat_file()['cpu_num'])
     
         @wrap_exceptions
         def wait(self, timeout=None):
    @@ -1638,14 +1725,14 @@ class Process(object):
     
         @wrap_exceptions
         def create_time(self):
    -        values = self._parse_stat_file()
    +        ctime = float(self._parse_stat_file()['create_time'])
             # According to documentation, starttime is in field 21 and the
             # unit is jiffies (clock ticks).
             # We first divide it for clock ticks and then add uptime returning
             # seconds since the epoch, in UTC.
             # Also use cached value if available.
             bt = BOOT_TIME or boot_time()
    -        return (float(values[20]) / CLOCK_TICKS) + bt
    +        return (ctime / CLOCK_TICKS) + bt
     
         @wrap_exceptions
         def memory_info(self):
    @@ -1707,6 +1794,9 @@ class Process(object):
                 """Return process's mapped memory regions as a list of named
                 tuples. Fields are explained in 'man proc'; here is an updated
                 (Apr 2012) version: http://goo.gl/fmebo
    +
    +            /proc/{PID}/smaps does not exist on kernels < 2.6.14 or if
    +            CONFIG_MMU kernel configuration option is not enabled.
                 """
                 def get_blocks(lines, current_block):
                     data = {}
    @@ -1754,7 +1844,7 @@ class Process(object):
                             path = path[:-10]
                     ls.append((
                         decode(addr), decode(perms), path,
    -                    data[b'Rss:'],
    +                    data.get(b'Rss:', 0),
                         data.get(b'Size:', 0),
                         data.get(b'Pss:', 0),
                         data.get(b'Shared_Clean:', 0),
    @@ -1767,25 +1857,16 @@ class Process(object):
                     ))
                 return ls
     
    -    else:  # pragma: no cover
    -        def memory_maps(self):
    -            raise NotImplementedError(
    -                "/proc/%s/smaps does not exist on kernels < 2.6.14 or "
    -                "if CONFIG_MMU kernel configuration option is not "
    -                "enabled." % self.pid)
    -
         @wrap_exceptions
         def cwd(self):
             try:
                 return readlink("%s/%s/cwd" % (self._procfs_path, self.pid))
    -        except OSError as err:
    +        except (FileNotFoundError, ProcessLookupError):
                 # https://github.com/giampaolo/psutil/issues/986
    -            if err.errno in (errno.ENOENT, errno.ESRCH):
    -                if not pid_exists(self.pid):
    -                    raise NoSuchProcess(self.pid, self._name)
    -                else:
    -                    raise ZombieProcess(self.pid, self._name, self._ppid)
    -            raise
    +            if not pid_exists(self.pid):
    +                raise NoSuchProcess(self.pid, self._name)
    +            else:
    +                raise ZombieProcess(self.pid, self._name, self._ppid)
     
         @wrap_exceptions
         def num_ctx_switches(self,
    @@ -1821,13 +1902,11 @@ class Process(object):
                 try:
                     with open_binary(fname) as f:
                         st = f.read().strip()
    -            except IOError as err:
    -                if err.errno == errno.ENOENT:
    -                    # no such file or directory; it means thread
    -                    # disappeared on us
    -                    hit_enoent = True
    -                    continue
    -                raise
    +            except FileNotFoundError:
    +                # no such file or directory; it means thread
    +                # disappeared on us
    +                hit_enoent = True
    +                continue
                 # ignore the first two values ("pid (exe)")
                 st = st[st.find(b')') + 2:]
                 values = st.split(b' ')
    @@ -1836,8 +1915,7 @@ class Process(object):
                 ntuple = _common.pthread(int(thread_id), utime, stime)
                 retlist.append(ntuple)
             if hit_enoent:
    -            # raise NSP if the process disappeared on us
    -            os.stat('%s/%s' % (self._procfs_path, self.pid))
    +            self._assert_alive()
             return retlist
     
         @wrap_exceptions
    @@ -1853,41 +1931,44 @@ class Process(object):
         def nice_set(self, value):
             return cext_posix.setpriority(self.pid, value)
     
    -    @wrap_exceptions
    -    def cpu_affinity_get(self):
    -        return cext.proc_cpu_affinity_get(self.pid)
    +    # starting from CentOS 6.
    +    if HAS_CPU_AFFINITY:
     
    -    def _get_eligible_cpus(
    -            self, _re=re.compile(br"Cpus_allowed_list:\t(\d+)-(\d+)")):
    -        # See: https://github.com/giampaolo/psutil/issues/956
    -        data = self._read_status_file()
    -        match = _re.findall(data)
    -        if match:
    -            return list(range(int(match[0][0]), int(match[0][1]) + 1))
    -        else:
    -            return list(range(len(per_cpu_times())))
    +        @wrap_exceptions
    +        def cpu_affinity_get(self):
    +            return cext.proc_cpu_affinity_get(self.pid)
     
    -    @wrap_exceptions
    -    def cpu_affinity_set(self, cpus):
    -        try:
    -            cext.proc_cpu_affinity_set(self.pid, cpus)
    -        except (OSError, ValueError) as err:
    -            if isinstance(err, ValueError) or err.errno == errno.EINVAL:
    -                eligible_cpus = self._get_eligible_cpus()
    -                all_cpus = tuple(range(len(per_cpu_times())))
    -                for cpu in cpus:
    -                    if cpu not in all_cpus:
    -                        raise ValueError(
    -                            "invalid CPU number %r; choose between %s" % (
    -                                cpu, eligible_cpus))
    -                    if cpu not in eligible_cpus:
    -                        raise ValueError(
    -                            "CPU number %r is not eligible; choose "
    -                            "between %s" % (cpu, eligible_cpus))
    -            raise
    +        def _get_eligible_cpus(
    +                self, _re=re.compile(br"Cpus_allowed_list:\t(\d+)-(\d+)")):
    +            # See: https://github.com/giampaolo/psutil/issues/956
    +            data = self._read_status_file()
    +            match = _re.findall(data)
    +            if match:
    +                return list(range(int(match[0][0]), int(match[0][1]) + 1))
    +            else:
    +                return list(range(len(per_cpu_times())))
    +
    +        @wrap_exceptions
    +        def cpu_affinity_set(self, cpus):
    +            try:
    +                cext.proc_cpu_affinity_set(self.pid, cpus)
    +            except (OSError, ValueError) as err:
    +                if isinstance(err, ValueError) or err.errno == errno.EINVAL:
    +                    eligible_cpus = self._get_eligible_cpus()
    +                    all_cpus = tuple(range(len(per_cpu_times())))
    +                    for cpu in cpus:
    +                        if cpu not in all_cpus:
    +                            raise ValueError(
    +                                "invalid CPU number %r; choose between %s" % (
    +                                    cpu, eligible_cpus))
    +                        if cpu not in eligible_cpus:
    +                            raise ValueError(
    +                                "CPU number %r is not eligible; choose "
    +                                "between %s" % (cpu, eligible_cpus))
    +                raise
     
         # only starting from kernel 2.6.13
    -    if hasattr(cext, "proc_ioprio_get"):
    +    if HAS_PROC_IO_PRIORITY:
     
             @wrap_exceptions
             def ionice_get(self):
    @@ -1898,38 +1979,16 @@ class Process(object):
     
             @wrap_exceptions
             def ionice_set(self, ioclass, value):
    -            if value is not None:
    -                if not PY3 and not isinstance(value, (int, long)):
    -                    msg = "value argument is not an integer (gor %r)" % value
    -                    raise TypeError(msg)
    -                if not 0 <= value <= 7:
    -                    raise ValueError(
    -                        "value argument range expected is between 0 and 7")
    -
    -            if ioclass in (IOPRIO_CLASS_NONE, None):
    -                if value:
    -                    msg = "can't specify value with IOPRIO_CLASS_NONE " \
    -                          "(got %r)" % value
    -                    raise ValueError(msg)
    -                ioclass = IOPRIO_CLASS_NONE
    +            if value is None:
                     value = 0
    -            elif ioclass == IOPRIO_CLASS_IDLE:
    -                if value:
    -                    msg = "can't specify value with IOPRIO_CLASS_IDLE " \
    -                          "(got %r)" % value
    -                    raise ValueError(msg)
    -                value = 0
    -            elif ioclass in (IOPRIO_CLASS_RT, IOPRIO_CLASS_BE):
    -                if value is None:
    -                    # TODO: add comment explaining why this is 4 (?)
    -                    value = 4
    -            else:
    -                # otherwise we would get OSError(EVINAL)
    -                raise ValueError("invalid ioclass argument %r" % ioclass)
    -
    +            if value and ioclass in (IOPRIO_CLASS_IDLE, IOPRIO_CLASS_NONE):
    +                raise ValueError("%r ioclass accepts no value" % ioclass)
    +            if value < 0 or value > 7:
    +                raise ValueError("value not in 0-7 range")
                 return cext.proc_ioprio_set(self.pid, ioclass, value)
     
         if HAS_PRLIMIT:
    +
             @wrap_exceptions
             def rlimit(self, resource, limits=None):
                 # If pid is 0 prlimit() applies to the calling process and
    @@ -1959,7 +2018,7 @@ class Process(object):
     
         @wrap_exceptions
         def status(self):
    -        letter = self._parse_stat_file()[1]
    +        letter = self._parse_stat_file()['status']
             if PY3:
                 letter = letter.decode()
             # XXX is '?' legit? (we're not supposed to return it anyway)
    @@ -1974,16 +2033,15 @@ class Process(object):
                 file = "%s/%s/fd/%s" % (self._procfs_path, self.pid, fd)
                 try:
                     path = readlink(file)
    -            except OSError as err:
    +            except (FileNotFoundError, ProcessLookupError):
                     # ENOENT == file which is gone in the meantime
    -                if err.errno in (errno.ENOENT, errno.ESRCH):
    -                    hit_enoent = True
    -                    continue
    -                elif err.errno == errno.EINVAL:
    +                hit_enoent = True
    +                continue
    +            except OSError as err:
    +                if err.errno == errno.EINVAL:
                         # not a link
                         continue
    -                else:
    -                    raise
    +                raise
                 else:
                     # If path is not an absolute there's no way to tell
                     # whether it's a regular file or not, so we skip it.
    @@ -1997,29 +2055,23 @@ class Process(object):
                             with open_binary(file) as f:
                                 pos = int(f.readline().split()[1])
                                 flags = int(f.readline().split()[1], 8)
    -                    except IOError as err:
    -                        if err.errno == errno.ENOENT:
    -                            # fd gone in the meantime; does not
    -                            # necessarily mean the process disappeared
    -                            # on us.
    -                            hit_enoent = True
    -                        else:
    -                            raise
    +                    except FileNotFoundError:
    +                        # fd gone in the meantime; process may
    +                        # still be alive
    +                        hit_enoent = True
                         else:
                             mode = file_flags_to_mode(flags)
                             ntuple = popenfile(
                                 path, int(fd), int(pos), mode, flags)
                             retlist.append(ntuple)
             if hit_enoent:
    -            # raise NSP if the process disappeared on us
    -            os.stat('%s/%s' % (self._procfs_path, self.pid))
    +            self._assert_alive()
             return retlist
     
         @wrap_exceptions
         def connections(self, kind='inet'):
             ret = _connections.retrieve(kind, self.pid)
    -        # raise NSP if the process disappeared on us
    -        os.stat('%s/%s' % (self._procfs_path, self.pid))
    +        self._assert_alive()
             return ret
     
         @wrap_exceptions
    @@ -2028,7 +2080,7 @@ class Process(object):
     
         @wrap_exceptions
         def ppid(self):
    -        return int(self._parse_stat_file()[2])
    +        return int(self._parse_stat_file()['ppid'])
     
         @wrap_exceptions
         def uids(self, _uids_re=re.compile(br'Uid:\t(\d+)\t(\d+)\t(\d+)')):
    diff --git a/server/www/packages/packages-windows/x86/psutil/_psosx.py b/server/www/packages/packages-windows/x86/psutil/_psosx.py
    index fbfedf3..e429649 100644
    --- a/server/www/packages/packages-windows/x86/psutil/_psosx.py
    +++ b/server/www/packages/packages-windows/x86/psutil/_psosx.py
    @@ -8,24 +8,23 @@ import contextlib
     import errno
     import functools
     import os
    -from socket import AF_INET
     from collections import namedtuple
     
     from . import _common
     from . import _psposix
     from . import _psutil_osx as cext
     from . import _psutil_posix as cext_posix
    -from ._common import AF_INET6
    +from ._common import AccessDenied
     from ._common import conn_tmap
    +from ._common import conn_to_ntuple
     from ._common import isfile_strict
     from ._common import memoize_when_activated
    +from ._common import NoSuchProcess
     from ._common import parse_environ_block
    -from ._common import sockfam_to_enum
    -from ._common import socktype_to_enum
     from ._common import usage_percent
    -from ._exceptions import AccessDenied
    -from ._exceptions import NoSuchProcess
    -from ._exceptions import ZombieProcess
    +from ._common import ZombieProcess
    +from ._compat import PermissionError
    +from ._compat import ProcessLookupError
     
     
     __extra__all__ = []
    @@ -103,13 +102,6 @@ svmem = namedtuple(
     pmem = namedtuple('pmem', ['rss', 'vms', 'pfaults', 'pageins'])
     # psutil.Process.memory_full_info()
     pfullmem = namedtuple('pfullmem', pmem._fields + ('uss', ))
    -# psutil.Process.memory_maps(grouped=True)
    -pmmap_grouped = namedtuple(
    -    'pmmap_grouped',
    -    'path rss private swapped dirtied ref_count shadow_depth')
    -# psutil.Process.memory_maps(grouped=False)
    -pmmap_ext = namedtuple(
    -    'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields))
     
     
     # =====================================================================
    @@ -119,9 +111,16 @@ pmmap_ext = namedtuple(
     
     def virtual_memory():
         """System virtual memory as a namedtuple."""
    -    total, active, inactive, wired, free = cext.virtual_mem()
    +    total, active, inactive, wired, free, speculative = cext.virtual_mem()
    +    # This is how Zabbix calculate avail and used mem:
    +    # https://github.com/zabbix/zabbix/blob/trunk/src/libs/zbxsysinfo/
    +    #     osx/memory.c
    +    # Also see: https://github.com/giampaolo/psutil/issues/1277
         avail = inactive + free
    -    used = active + inactive + wired
    +    used = active + wired
    +    # This is NOT how Zabbix calculates free mem but it matches "free"
    +    # cmdline utility.
    +    free -= speculative
         percent = usage_percent((total - avail), total, round_=1)
         return svmem(total, avail, percent, used, free,
                      active, inactive, wired)
    @@ -333,12 +332,10 @@ def wrap_exceptions(fun):
         def wrapper(self, *args, **kwargs):
             try:
                 return fun(self, *args, **kwargs)
    -        except OSError as err:
    -            if err.errno == errno.ESRCH:
    -                raise NoSuchProcess(self.pid, self._name)
    -            if err.errno in (errno.EPERM, errno.EACCES):
    -                raise AccessDenied(self.pid, self._name)
    -            raise
    +        except ProcessLookupError:
    +            raise NoSuchProcess(self.pid, self._name)
    +        except PermissionError:
    +            raise AccessDenied(self.pid, self._name)
             except cext.ZombieProcessError:
                 raise ZombieProcess(self.pid, self._name, self._ppid)
         return wrapper
    @@ -373,13 +370,14 @@ def catch_zombie(proc):
     class Process(object):
         """Wrapper class around underlying C implementation."""
     
    -    __slots__ = ["pid", "_name", "_ppid"]
    +    __slots__ = ["pid", "_name", "_ppid", "_cache"]
     
         def __init__(self, pid):
             self.pid = pid
             self._name = None
             self._ppid = None
     
    +    @wrap_exceptions
         @memoize_when_activated
         def _get_kinfo_proc(self):
             # Note: should work with all PIDs without permission issues.
    @@ -387,6 +385,7 @@ class Process(object):
             assert len(ret) == len(kinfo_proc_map)
             return ret
     
    +    @wrap_exceptions
         @memoize_when_activated
         def _get_pidtaskinfo(self):
             # Note: should work for PIDs owned by user only.
    @@ -396,12 +395,12 @@ class Process(object):
             return ret
     
         def oneshot_enter(self):
    -        self._get_kinfo_proc.cache_activate()
    -        self._get_pidtaskinfo.cache_activate()
    +        self._get_kinfo_proc.cache_activate(self)
    +        self._get_pidtaskinfo.cache_activate(self)
     
         def oneshot_exit(self):
    -        self._get_kinfo_proc.cache_deactivate()
    -        self._get_pidtaskinfo.cache_deactivate()
    +        self._get_kinfo_proc.cache_deactivate(self)
    +        self._get_pidtaskinfo.cache_deactivate(self)
     
         @wrap_exceptions
         def name(self):
    @@ -523,15 +522,8 @@ class Process(object):
             ret = []
             for item in rawlist:
                 fd, fam, type, laddr, raddr, status = item
    -            status = TCP_STATUSES[status]
    -            fam = sockfam_to_enum(fam)
    -            type = socktype_to_enum(type)
    -            if fam in (AF_INET, AF_INET6):
    -                if laddr:
    -                    laddr = _common.addr(*laddr)
    -                if raddr:
    -                    raddr = _common.addr(*raddr)
    -            nt = _common.pconn(fd, fam, type, laddr, raddr, status)
    +            nt = conn_to_ntuple(fd, fam, type, laddr, raddr, status,
    +                                TCP_STATUSES)
                 ret.append(nt)
             return ret
     
    @@ -570,7 +562,3 @@ class Process(object):
                 ntuple = _common.pthread(thread_id, utime, stime)
                 retlist.append(ntuple)
             return retlist
    -
    -    @wrap_exceptions
    -    def memory_maps(self):
    -        return cext.proc_memory_maps(self.pid)
    diff --git a/server/www/packages/packages-windows/x86/psutil/_psposix.py b/server/www/packages/packages-windows/x86/psutil/_psposix.py
    index 9c3fac2..88213ef 100644
    --- a/server/www/packages/packages-windows/x86/psutil/_psposix.py
    +++ b/server/www/packages/packages-windows/x86/psutil/_psposix.py
    @@ -4,7 +4,6 @@
     
     """Routines common to all posix systems."""
     
    -import errno
     import glob
     import os
     import sys
    @@ -12,10 +11,15 @@ import time
     
     from ._common import memoize
     from ._common import sdiskusage
    +from ._common import TimeoutExpired
     from ._common import usage_percent
    +from ._compat import ChildProcessError
    +from ._compat import FileNotFoundError
    +from ._compat import InterruptedError
    +from ._compat import PermissionError
    +from ._compat import ProcessLookupError
     from ._compat import PY3
     from ._compat import unicode
    -from ._exceptions import TimeoutExpired
     
     
     __all__ = ['pid_exists', 'wait_pid', 'disk_usage', 'get_terminal_map']
    @@ -32,19 +36,13 @@ def pid_exists(pid):
             return True
         try:
             os.kill(pid, 0)
    -    except OSError as err:
    -        if err.errno == errno.ESRCH:
    -            # ESRCH == No such process
    -            return False
    -        elif err.errno == errno.EPERM:
    -            # EPERM clearly means there's a process to deny access to
    -            return True
    -        else:
    -            # According to "man 2 kill" possible error values are
    -            # (EINVAL, EPERM, ESRCH) therefore we should never get
    -            # here. If we do let's be explicit in considering this
    -            # an error.
    -            raise err
    +    except ProcessLookupError:
    +        return False
    +    except PermissionError:
    +        # EPERM clearly means there's a process to deny access to
    +        return True
    +    # According to "man 2 kill" possible error values are
    +    # (EINVAL, EPERM, ESRCH)
         else:
             return True
     
    @@ -80,24 +78,20 @@ def wait_pid(pid, timeout=None, proc_name=None):
         while True:
             try:
                 retpid, status = waitcall()
    -        except OSError as err:
    -            if err.errno == errno.EINTR:
    -                delay = check_timeout(delay)
    -                continue
    -            elif err.errno == errno.ECHILD:
    -                # This has two meanings:
    -                # - pid is not a child of os.getpid() in which case
    -                #   we keep polling until it's gone
    -                # - pid never existed in the first place
    -                # In both cases we'll eventually return None as we
    -                # can't determine its exit status code.
    -                while True:
    -                    if pid_exists(pid):
    -                        delay = check_timeout(delay)
    -                    else:
    -                        return
    -            else:
    -                raise
    +        except InterruptedError:
    +            delay = check_timeout(delay)
    +        except ChildProcessError:
    +            # This has two meanings:
    +            # - pid is not a child of os.getpid() in which case
    +            #   we keep polling until it's gone
    +            # - pid never existed in the first place
    +            # In both cases we'll eventually return None as we
    +            # can't determine its exit status code.
    +            while True:
    +                if pid_exists(pid):
    +                    delay = check_timeout(delay)
    +                else:
    +                    return
             else:
                 if retpid == 0:
                     # WNOHANG was used, pid is still running
    @@ -176,7 +170,6 @@ def get_terminal_map():
             assert name not in ret, name
             try:
                 ret[os.stat(name).st_rdev] = name
    -        except OSError as err:
    -            if err.errno != errno.ENOENT:
    -                raise
    +        except FileNotFoundError:
    +            pass
         return ret
    diff --git a/server/www/packages/packages-windows/x86/psutil/_pssunos.py b/server/www/packages/packages-windows/x86/psutil/_pssunos.py
    index e2f33a3..62362b8 100644
    --- a/server/www/packages/packages-windows/x86/psutil/_pssunos.py
    +++ b/server/www/packages/packages-windows/x86/psutil/_pssunos.py
    @@ -5,6 +5,7 @@
     """Sun OS Solaris platform implementation."""
     
     import errno
    +import functools
     import os
     import socket
     import subprocess
    @@ -16,17 +17,22 @@ from . import _common
     from . import _psposix
     from . import _psutil_posix as cext_posix
     from . import _psutil_sunos as cext
    +from ._common import AccessDenied
     from ._common import AF_INET6
    +from ._common import debug
    +from ._common import get_procfs_path
     from ._common import isfile_strict
     from ._common import memoize_when_activated
    +from ._common import NoSuchProcess
     from ._common import sockfam_to_enum
     from ._common import socktype_to_enum
     from ._common import usage_percent
    +from ._common import ZombieProcess
     from ._compat import b
    +from ._compat import FileNotFoundError
    +from ._compat import PermissionError
    +from ._compat import ProcessLookupError
     from ._compat import PY3
    -from ._exceptions import AccessDenied
    -from ._exceptions import NoSuchProcess
    -from ._exceptions import ZombieProcess
     
     
     __extra__all__ = ["CONN_IDLE", "CONN_BOUND", "PROCFS_PATH"]
    @@ -109,16 +115,6 @@ pmmap_ext = namedtuple(
         'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields))
     
     
    -# =====================================================================
    -# --- utils
    -# =====================================================================
    -
    -
    -def get_procfs_path():
    -    """Return updated psutil.PROCFS_PATH constant."""
    -    return sys.modules['psutil'].PROCFS_PATH
    -
    -
     # =====================================================================
     # --- memory
     # =====================================================================
    @@ -230,7 +226,12 @@ def disk_partitions(all=False):
                 # Differently from, say, Linux, we don't have a list of
                 # common fs types so the best we can do, AFAIK, is to
                 # filter by filesystem having a total size > 0.
    -            if not disk_usage(mountpoint).total:
    +            try:
    +                if not disk_usage(mountpoint).total:
    +                    continue
    +            except OSError as err:
    +                # https://github.com/giampaolo/psutil/issues/1674
    +                debug("skipping %r: %r" % (mountpoint, err))
                     continue
             ntuple = _common.sdiskpart(device, mountpoint, fstype, opts)
             retlist.append(ntuple)
    @@ -266,6 +267,7 @@ def net_connections(kind, _pid=-1):
                 continue
             if type_ not in types:
                 continue
    +        # TODO: refactor and use _common.conn_to_ntuple.
             if fam in (AF_INET, AF_INET6):
                 if laddr:
                     laddr = _common.addr(*laddr)
    @@ -341,26 +343,26 @@ def wrap_exceptions(fun):
         """Call callable into a try/except clause and translate ENOENT,
         EACCES and EPERM in NoSuchProcess or AccessDenied exceptions.
         """
    -
    +    @functools.wraps(fun)
         def wrapper(self, *args, **kwargs):
             try:
                 return fun(self, *args, **kwargs)
    -        except EnvironmentError as err:
    +        except (FileNotFoundError, ProcessLookupError):
    +            # ENOENT (no such file or directory) gets raised on open().
    +            # ESRCH (no such process) can get raised on read() if
    +            # process is gone in meantime.
    +            if not pid_exists(self.pid):
    +                raise NoSuchProcess(self.pid, self._name)
    +            else:
    +                raise ZombieProcess(self.pid, self._name, self._ppid)
    +        except PermissionError:
    +            raise AccessDenied(self.pid, self._name)
    +        except OSError:
                 if self.pid == 0:
                     if 0 in pids():
                         raise AccessDenied(self.pid, self._name)
                     else:
                         raise
    -            # ENOENT (no such file or directory) gets raised on open().
    -            # ESRCH (no such process) can get raised on read() if
    -            # process is gone in meantime.
    -            if err.errno in (errno.ENOENT, errno.ESRCH):
    -                if not pid_exists(self.pid):
    -                    raise NoSuchProcess(self.pid, self._name)
    -                else:
    -                    raise ZombieProcess(self.pid, self._name, self._ppid)
    -            if err.errno in (errno.EPERM, errno.EACCES):
    -                raise AccessDenied(self.pid, self._name)
                 raise
         return wrapper
     
    @@ -368,7 +370,7 @@ def wrap_exceptions(fun):
     class Process(object):
         """Wrapper class around underlying C implementation."""
     
    -    __slots__ = ["pid", "_name", "_ppid", "_procfs_path"]
    +    __slots__ = ["pid", "_name", "_ppid", "_procfs_path", "_cache"]
     
         def __init__(self, pid):
             self.pid = pid
    @@ -376,32 +378,41 @@ class Process(object):
             self._ppid = None
             self._procfs_path = get_procfs_path()
     
    +    def _assert_alive(self):
    +        """Raise NSP if the process disappeared on us."""
    +        # For those C function who do not raise NSP, possibly returning
    +        # incorrect or incomplete result.
    +        os.stat('%s/%s' % (self._procfs_path, self.pid))
    +
         def oneshot_enter(self):
    -        self._proc_name_and_args.cache_activate()
    -        self._proc_basic_info.cache_activate()
    -        self._proc_cred.cache_activate()
    +        self._proc_name_and_args.cache_activate(self)
    +        self._proc_basic_info.cache_activate(self)
    +        self._proc_cred.cache_activate(self)
     
         def oneshot_exit(self):
    -        self._proc_name_and_args.cache_deactivate()
    -        self._proc_basic_info.cache_deactivate()
    -        self._proc_cred.cache_deactivate()
    +        self._proc_name_and_args.cache_deactivate(self)
    +        self._proc_basic_info.cache_deactivate(self)
    +        self._proc_cred.cache_deactivate(self)
     
    +    @wrap_exceptions
         @memoize_when_activated
         def _proc_name_and_args(self):
             return cext.proc_name_and_args(self.pid, self._procfs_path)
     
    +    @wrap_exceptions
         @memoize_when_activated
         def _proc_basic_info(self):
    +        if self.pid == 0 and not \
    +                os.path.exists('%s/%s/psinfo' % (self._procfs_path, self.pid)):
    +            raise AccessDenied(self.pid)
             ret = cext.proc_basic_info(self.pid, self._procfs_path)
             assert len(ret) == len(proc_info_map)
             return ret
     
    +    @wrap_exceptions
         @memoize_when_activated
         def _proc_cred(self):
    -        @wrap_exceptions
    -        def proc_cred(self):
    -            return cext.proc_cred(self.pid, self._procfs_path)
    -        return proc_cred(self)
    +        return cext.proc_cred(self.pid, self._procfs_path)
     
         @wrap_exceptions
         def name(self):
    @@ -512,14 +523,11 @@ class Process(object):
                     try:
                         return os.readlink(
                             '%s/%d/path/%d' % (procfs_path, self.pid, x))
    -                except OSError as err:
    -                    if err.errno == errno.ENOENT:
    -                        hit_enoent = True
    -                        continue
    -                    raise
    +                except FileNotFoundError:
    +                    hit_enoent = True
    +                    continue
             if hit_enoent:
    -            # raise NSP if the process disappeared on us
    -            os.stat('%s/%s' % (procfs_path, self.pid))
    +            self._assert_alive()
     
         @wrap_exceptions
         def cwd(self):
    @@ -530,11 +538,9 @@ class Process(object):
             procfs_path = self._procfs_path
             try:
                 return os.readlink("%s/%s/path/cwd" % (procfs_path, self.pid))
    -        except OSError as err:
    -            if err.errno == errno.ENOENT:
    -                os.stat("%s/%s" % (procfs_path, self.pid))  # raise NSP or AD
    -                return None
    -            raise
    +        except FileNotFoundError:
    +            os.stat("%s/%s" % (procfs_path, self.pid))  # raise NSP or AD
    +            return None
     
         @wrap_exceptions
         def memory_info(self):
    @@ -581,8 +587,7 @@ class Process(object):
                     nt = _common.pthread(tid, utime, stime)
                     ret.append(nt)
             if hit_enoent:
    -            # raise NSP if the process disappeared on us
    -            os.stat('%s/%s' % (procfs_path, self.pid))
    +            self._assert_alive()
             return ret
     
         @wrap_exceptions
    @@ -596,18 +601,14 @@ class Process(object):
                 if os.path.islink(path):
                     try:
                         file = os.readlink(path)
    -                except OSError as err:
    -                    # ENOENT == file which is gone in the meantime
    -                    if err.errno == errno.ENOENT:
    -                        hit_enoent = True
    -                        continue
    -                    raise
    +                except FileNotFoundError:
    +                    hit_enoent = True
    +                    continue
                     else:
                         if isfile_strict(file):
                             retlist.append(_common.popenfile(file, int(fd)))
             if hit_enoent:
    -            # raise NSP if the process disappeared on us
    -            os.stat('%s/%s' % (procfs_path, self.pid))
    +            self._assert_alive()
             return retlist
     
         def _get_unix_sockets(self, pid):
    @@ -707,8 +708,7 @@ class Process(object):
                             raise
                 retlist.append((addr, perm, name, rss, anon, locked))
             if hit_enoent:
    -            # raise NSP if the process disappeared on us
    -            os.stat('%s/%s' % (procfs_path, self.pid))
    +            self._assert_alive()
             return retlist
     
         @wrap_exceptions
    diff --git a/server/www/packages/packages-windows/x86/psutil/_psutil_windows.cp37-win32.pyd b/server/www/packages/packages-windows/x86/psutil/_psutil_windows.cp37-win32.pyd
    index feb16eb..7fdabf9 100644
    Binary files a/server/www/packages/packages-windows/x86/psutil/_psutil_windows.cp37-win32.pyd and b/server/www/packages/packages-windows/x86/psutil/_psutil_windows.cp37-win32.pyd differ
    diff --git a/server/www/packages/packages-windows/x86/psutil/_pswindows.py b/server/www/packages/packages-windows/x86/psutil/_pswindows.py
    index 18651d6..99d5d71 100644
    --- a/server/www/packages/packages-windows/x86/psutil/_pswindows.py
    +++ b/server/www/packages/packages-windows/x86/psutil/_pswindows.py
    @@ -8,11 +8,37 @@ import contextlib
     import errno
     import functools
     import os
    +import signal
     import sys
     import time
     from collections import namedtuple
     
     from . import _common
    +from ._common import AccessDenied
    +from ._common import conn_tmap
    +from ._common import conn_to_ntuple
    +from ._common import debug
    +from ._common import ENCODING
    +from ._common import ENCODING_ERRS
    +from ._common import isfile_strict
    +from ._common import memoize
    +from ._common import memoize_when_activated
    +from ._common import NoSuchProcess
    +from ._common import parse_environ_block
    +from ._common import TimeoutExpired
    +from ._common import usage_percent
    +from ._compat import long
    +from ._compat import lru_cache
    +from ._compat import PY3
    +from ._compat import unicode
    +from ._compat import xrange
    +from ._psutil_windows import ABOVE_NORMAL_PRIORITY_CLASS
    +from ._psutil_windows import BELOW_NORMAL_PRIORITY_CLASS
    +from ._psutil_windows import HIGH_PRIORITY_CLASS
    +from ._psutil_windows import IDLE_PRIORITY_CLASS
    +from ._psutil_windows import NORMAL_PRIORITY_CLASS
    +from ._psutil_windows import REALTIME_PRIORITY_CLASS
    +
     try:
         from . import _psutil_windows as cext
     except ImportError as err:
    @@ -22,41 +48,13 @@ except ImportError as err:
             # 1) we are on an old Windows version
             # 2) psutil was installed via pip + wheel
             # See: https://github.com/giampaolo/psutil/issues/811
    -        # It must be noted that psutil can still (kind of) work
    -        # on outdated systems if compiled / installed from sources,
    -        # but if we get here it means this this was a wheel (or exe).
             msg = "this Windows version is too old (< Windows Vista); "
             msg += "psutil 3.4.2 is the latest version which supports Windows "
    -        msg += "2000, XP and 2003 server; it may be possible that psutil "
    -        msg += "will work if compiled from sources though"
    +        msg += "2000, XP and 2003 server"
             raise RuntimeError(msg)
         else:
             raise
     
    -from ._common import conn_tmap
    -from ._common import ENCODING
    -from ._common import ENCODING_ERRS
    -from ._common import isfile_strict
    -from ._common import memoize_when_activated
    -from ._common import parse_environ_block
    -from ._common import sockfam_to_enum
    -from ._common import socktype_to_enum
    -from ._common import usage_percent
    -from ._compat import long
    -from ._compat import lru_cache
    -from ._compat import PY3
    -from ._compat import unicode
    -from ._compat import xrange
    -from ._exceptions import AccessDenied
    -from ._exceptions import NoSuchProcess
    -from ._exceptions import TimeoutExpired
    -from ._psutil_windows import ABOVE_NORMAL_PRIORITY_CLASS
    -from ._psutil_windows import BELOW_NORMAL_PRIORITY_CLASS
    -from ._psutil_windows import HIGH_PRIORITY_CLASS
    -from ._psutil_windows import IDLE_PRIORITY_CLASS
    -from ._psutil_windows import NORMAL_PRIORITY_CLASS
    -from ._psutil_windows import REALTIME_PRIORITY_CLASS
    -
     if sys.version_info >= (3, 4):
         import enum
     else:
    @@ -66,11 +64,14 @@ else:
     # http://msdn.microsoft.com/en-us/library/ms686219(v=vs.85).aspx
     __extra__all__ = [
         "win_service_iter", "win_service_get",
    +    # Process priority
         "ABOVE_NORMAL_PRIORITY_CLASS", "BELOW_NORMAL_PRIORITY_CLASS",
    -    "HIGH_PRIORITY_CLASS", "IDLE_PRIORITY_CLASS",
    -    "NORMAL_PRIORITY_CLASS", "REALTIME_PRIORITY_CLASS",
    -    "CONN_DELETE_TCB",
    -    "AF_LINK",
    +    "HIGH_PRIORITY_CLASS", "IDLE_PRIORITY_CLASS", "NORMAL_PRIORITY_CLASS",
    +    "REALTIME_PRIORITY_CLASS",
    +    # IO priority
    +    "IOPRIO_VERYLOW", "IOPRIO_LOW", "IOPRIO_NORMAL", "IOPRIO_HIGH",
    +    # others
    +    "CONN_DELETE_TCB", "AF_LINK",
     ]
     
     
    @@ -79,11 +80,8 @@ __extra__all__ = [
     # =====================================================================
     
     CONN_DELETE_TCB = "DELETE_TCB"
    -ACCESS_DENIED_ERRSET = frozenset([errno.EPERM, errno.EACCES,
    -                                  cext.ERROR_ACCESS_DENIED])
    -NO_SUCH_SERVICE_ERRSET = frozenset([cext.ERROR_INVALID_NAME,
    -                                    cext.ERROR_SERVICE_DOES_NOT_EXIST])
    -
    +ERROR_PARTIAL_COPY = 299
    +PYPY = '__pypy__' in sys.builtin_module_names
     
     if enum is None:
         AF_LINK = -1
    @@ -118,6 +116,19 @@ if enum is not None:
     
         globals().update(Priority.__members__)
     
    +if enum is None:
    +    IOPRIO_VERYLOW = 0
    +    IOPRIO_LOW = 1
    +    IOPRIO_NORMAL = 2
    +    IOPRIO_HIGH = 3
    +else:
    +    class IOPriority(enum.IntEnum):
    +        IOPRIO_VERYLOW = 0
    +        IOPRIO_LOW = 1
    +        IOPRIO_NORMAL = 2
    +        IOPRIO_HIGH = 3
    +    globals().update(IOPriority.__members__)
    +
     pinfo_map = dict(
         num_handles=0,
         ctx_switches=1,
    @@ -187,7 +198,8 @@ def convert_dos_path(s):
         """
         rawdrive = '\\'.join(s.split('\\')[:3])
         driveletter = cext.win32_QueryDosDevice(rawdrive)
    -    return os.path.join(driveletter, s[len(rawdrive):])
    +    remainder = s[len(rawdrive):]
    +    return os.path.join(driveletter, remainder)
     
     
     def py2_strencode(s):
    @@ -203,6 +215,11 @@ def py2_strencode(s):
                 return s.encode(ENCODING, ENCODING_ERRS)
     
     
    +@memoize
    +def getpagesize():
    +    return cext.getpagesize()
    +
    +
     # =====================================================================
     # --- memory
     # =====================================================================
    @@ -309,6 +326,23 @@ def cpu_freq():
         return [_common.scpufreq(float(curr), min_, float(max_))]
     
     
    +_loadavg_inititialized = False
    +
    +
    +def getloadavg():
    +    """Return the number of processes in the system run queue averaged
    +    over the last 1, 5, and 15 minutes respectively as a tuple"""
    +    global _loadavg_inititialized
    +
    +    if not _loadavg_inititialized:
    +        cext.init_loadavg_counter()
    +        _loadavg_inititialized = True
    +
    +    # Drop to 2 decimal points which is what Linux does
    +    raw_loads = cext.getloadavg()
    +    return tuple([round(load, 2) for load in raw_loads])
    +
    +
     # =====================================================================
     # --- network
     # =====================================================================
    @@ -326,17 +360,8 @@ def net_connections(kind, _pid=-1):
         ret = set()
         for item in rawlist:
             fd, fam, type, laddr, raddr, status, pid = item
    -        if laddr:
    -            laddr = _common.addr(*laddr)
    -        if raddr:
    -            raddr = _common.addr(*raddr)
    -        status = TCP_STATUSES[status]
    -        fam = sockfam_to_enum(fam)
    -        type = socktype_to_enum(type)
    -        if _pid == -1:
    -            nt = _common.sconn(fd, fam, type, laddr, raddr, status, pid)
    -        else:
    -            nt = _common.pconn(fd, fam, type, laddr, raddr, status)
    +        nt = conn_to_ntuple(fd, fam, type, laddr, raddr, status, TCP_STATUSES,
    +                            pid=pid if _pid == -1 else None)
             ret.add(nt)
         return list(ret)
     
    @@ -501,14 +526,14 @@ class WindowsService(object):
             """
             try:
                 yield
    -        except WindowsError as err:
    -            if err.errno in ACCESS_DENIED_ERRSET:
    +        except OSError as err:
    +            if is_permission_err(err):
                     raise AccessDenied(
                         pid=None, name=self._name,
                         msg="service %r is not querable (not enough privileges)" %
                             self._name)
    -            elif err.errno in NO_SUCH_SERVICE_ERRSET or \
    -                    err.winerror in NO_SUCH_SERVICE_ERRSET:
    +            elif err.winerror in (cext.ERROR_INVALID_NAME,
    +                                  cext.ERROR_SERVICE_DOES_NOT_EXIST):
                     raise NoSuchProcess(
                         pid=None, name=self._name,
                         msg="service %r does not exist)" % self._name)
    @@ -625,27 +650,68 @@ pid_exists = cext.pid_exists
     ppid_map = cext.ppid_map  # used internally by Process.children()
     
     
    +def is_permission_err(exc):
    +    """Return True if this is a permission error."""
    +    assert isinstance(exc, OSError), exc
    +    # On Python 2 OSError doesn't always have 'winerror'. Sometimes
    +    # it does, in which case the original exception was WindowsError
    +    # (which is a subclass of OSError).
    +    return exc.errno in (errno.EPERM, errno.EACCES) or \
    +        getattr(exc, "winerror", -1) in (cext.ERROR_ACCESS_DENIED,
    +                                         cext.ERROR_PRIVILEGE_NOT_HELD)
    +
    +
    +def convert_oserror(exc, pid=None, name=None):
    +    """Convert OSError into NoSuchProcess or AccessDenied."""
    +    assert isinstance(exc, OSError), exc
    +    if is_permission_err(exc):
    +        return AccessDenied(pid=pid, name=name)
    +    if exc.errno == errno.ESRCH:
    +        return NoSuchProcess(pid=pid, name=name)
    +    raise exc
    +
    +
     def wrap_exceptions(fun):
    -    """Decorator which translates bare OSError and WindowsError
    -    exceptions into NoSuchProcess and AccessDenied.
    -    """
    +    """Decorator which converts OSError into NoSuchProcess or AccessDenied."""
         @functools.wraps(fun)
         def wrapper(self, *args, **kwargs):
             try:
                 return fun(self, *args, **kwargs)
             except OSError as err:
    -            if err.errno in ACCESS_DENIED_ERRSET:
    -                raise AccessDenied(self.pid, self._name)
    -            if err.errno == errno.ESRCH:
    -                raise NoSuchProcess(self.pid, self._name)
    -            raise
    +            raise convert_oserror(err, pid=self.pid, name=self._name)
    +    return wrapper
    +
    +
    +def retry_error_partial_copy(fun):
    +    """Workaround for https://github.com/giampaolo/psutil/issues/875.
    +    See: https://stackoverflow.com/questions/4457745#4457745
    +    """
    +    @functools.wraps(fun)
    +    def wrapper(self, *args, **kwargs):
    +        delay = 0.0001
    +        times = 33
    +        for x in range(times):  # retries for roughly 1 second
    +            try:
    +                return fun(self, *args, **kwargs)
    +            except WindowsError as _:
    +                err = _
    +                if err.winerror == ERROR_PARTIAL_COPY:
    +                    time.sleep(delay)
    +                    delay = min(delay * 2, 0.04)
    +                    continue
    +                else:
    +                    raise
    +        else:
    +            msg = "%s retried %s times, converted to AccessDenied as it's " \
    +                "still returning %r" % (fun, times, err)
    +            raise AccessDenied(pid=self.pid, name=self._name, msg=msg)
         return wrapper
     
     
     class Process(object):
         """Wrapper class around underlying C implementation."""
     
    -    __slots__ = ["pid", "_name", "_ppid"]
    +    __slots__ = ["pid", "_name", "_ppid", "_cache"]
     
         def __init__(self, pid):
             self.pid = pid
    @@ -655,13 +721,15 @@ class Process(object):
         # --- oneshot() stuff
     
         def oneshot_enter(self):
    -        self.oneshot_info.cache_activate()
    +        self._proc_info.cache_activate(self)
    +        self.exe.cache_activate(self)
     
         def oneshot_exit(self):
    -        self.oneshot_info.cache_deactivate()
    +        self._proc_info.cache_deactivate(self)
    +        self.exe.cache_deactivate(self)
     
         @memoize_when_activated
    -    def oneshot_info(self):
    +    def _proc_info(self):
             """Return multiple information about this process as a
             raw tuple.
             """
    @@ -669,7 +737,6 @@ class Process(object):
             assert len(ret) == len(pinfo_map)
             return ret
     
    -    @wrap_exceptions
         def name(self):
             """Return process name, which on Windows is always the final
             part of the executable.
    @@ -678,37 +745,53 @@ class Process(object):
             # and process-hacker.
             if self.pid == 0:
                 return "System Idle Process"
    -        elif self.pid == 4:
    +        if self.pid == 4:
                 return "System"
    -        else:
    -            try:
    -                # Note: this will fail with AD for most PIDs owned
    -                # by another user but it's faster.
    -                return py2_strencode(os.path.basename(self.exe()))
    -            except AccessDenied:
    -                return py2_strencode(cext.proc_name(self.pid))
    +        return os.path.basename(self.exe())
     
         @wrap_exceptions
    +    @memoize_when_activated
         def exe(self):
    -        # Note: os.path.exists(path) may return False even if the file
    -        # is there, see:
    -        # http://stackoverflow.com/questions/3112546/os-path-exists-lies
    -
    -        # see https://github.com/giampaolo/psutil/issues/414
    -        # see https://github.com/giampaolo/psutil/issues/528
    -        if self.pid in (0, 4):
    -            raise AccessDenied(self.pid, self._name)
    -        return py2_strencode(convert_dos_path(cext.proc_exe(self.pid)))
    +        if PYPY:
    +            try:
    +                exe = cext.proc_exe(self.pid)
    +            except WindowsError as err:
    +                # 24 = ERROR_TOO_MANY_OPEN_FILES. Not sure why this happens
    +                # (perhaps PyPy's JIT delaying garbage collection of files?).
    +                if err.errno == 24:
    +                    debug("%r forced into AccessDenied" % err)
    +                    raise AccessDenied(self.pid, self._name)
    +                raise
    +        else:
    +            exe = cext.proc_exe(self.pid)
    +        if not PY3:
    +            exe = py2_strencode(exe)
    +        if exe.startswith('\\'):
    +            return convert_dos_path(exe)
    +        return exe  # May be "Registry", "MemCompression", ...
     
         @wrap_exceptions
    +    @retry_error_partial_copy
         def cmdline(self):
    -        ret = cext.proc_cmdline(self.pid)
    +        if cext.WINVER >= cext.WINDOWS_8_1:
    +            # PEB method detects cmdline changes but requires more
    +            # privileges: https://github.com/giampaolo/psutil/pull/1398
    +            try:
    +                ret = cext.proc_cmdline(self.pid, use_peb=True)
    +            except OSError as err:
    +                if is_permission_err(err):
    +                    ret = cext.proc_cmdline(self.pid, use_peb=False)
    +                else:
    +                    raise
    +        else:
    +            ret = cext.proc_cmdline(self.pid, use_peb=True)
             if PY3:
                 return ret
             else:
                 return [py2_strencode(s) for s in ret]
     
         @wrap_exceptions
    +    @retry_error_partial_copy
         def environ(self):
             ustr = cext.proc_environ(self.pid)
             if ustr and not PY3:
    @@ -725,10 +808,10 @@ class Process(object):
             try:
                 return cext.proc_memory_info(self.pid)
             except OSError as err:
    -            if err.errno in ACCESS_DENIED_ERRSET:
    +            if is_permission_err(err):
                     # TODO: the C ext can probably be refactored in order
                     # to get this from cext.proc_info()
    -                info = self.oneshot_info()
    +                info = self._proc_info()
                     return (
                         info[pinfo_map['num_page_faults']],
                         info[pinfo_map['peak_wset']],
    @@ -757,6 +840,7 @@ class Process(object):
         def memory_full_info(self):
             basic_mem = self.memory_info()
             uss = cext.proc_memory_uss(self.pid)
    +        uss *= getpagesize()
             return pfullmem(*basic_mem + (uss, ))
     
         def memory_maps(self):
    @@ -765,16 +849,11 @@ class Process(object):
             except OSError as err:
                 # XXX - can't use wrap_exceptions decorator as we're
                 # returning a generator; probably needs refactoring.
    -            if err.errno in ACCESS_DENIED_ERRSET:
    -                raise AccessDenied(self.pid, self._name)
    -            if err.errno == errno.ESRCH:
    -                raise NoSuchProcess(self.pid, self._name)
    -            raise
    +            raise convert_oserror(err, self.pid, self._name)
             else:
                 for addr, perm, path, rss in raw:
                     path = convert_dos_path(path)
                     if not PY3:
    -                    assert isinstance(path, unicode), type(path)
                         path = py2_strencode(path)
                     addr = hex(addr)
                     yield (addr, perm, path, rss)
    @@ -785,7 +864,16 @@ class Process(object):
     
         @wrap_exceptions
         def send_signal(self, sig):
    -        os.kill(self.pid, sig)
    +        if sig == signal.SIGTERM:
    +            cext.proc_kill(self.pid)
    +        # py >= 2.7
    +        elif sig in (getattr(signal, "CTRL_C_EVENT", object()),
    +                     getattr(signal, "CTRL_BREAK_EVENT", object())):
    +            os.kill(self.pid, sig)
    +        else:
    +            raise ValueError(
    +                "only SIGTERM, CTRL_C_EVENT and CTRL_BREAK_EVENT signals "
    +                "are supported on Windows")
     
         @wrap_exceptions
         def wait(self, timeout=None):
    @@ -839,19 +927,19 @@ class Process(object):
     
         @wrap_exceptions
         def create_time(self):
    -        # special case for kernel process PIDs; return system boot time
    -        if self.pid in (0, 4):
    -            return boot_time()
    +        # Note: proc_times() not put under oneshot() 'cause create_time()
    +        # is already cached by the main Process class.
             try:
    -            return cext.proc_create_time(self.pid)
    +            user, system, created = cext.proc_times(self.pid)
    +            return created
             except OSError as err:
    -            if err.errno in ACCESS_DENIED_ERRSET:
    -                return self.oneshot_info()[pinfo_map['create_time']]
    +            if is_permission_err(err):
    +                return self._proc_info()[pinfo_map['create_time']]
                 raise
     
         @wrap_exceptions
         def num_threads(self):
    -        return self.oneshot_info()[pinfo_map['num_threads']]
    +        return self._proc_info()[pinfo_map['num_threads']]
     
         @wrap_exceptions
         def threads(self):
    @@ -865,26 +953,26 @@ class Process(object):
         @wrap_exceptions
         def cpu_times(self):
             try:
    -            user, system = cext.proc_cpu_times(self.pid)
    +            user, system, created = cext.proc_times(self.pid)
             except OSError as err:
    -            if err.errno in ACCESS_DENIED_ERRSET:
    -                info = self.oneshot_info()
    -                user = info[pinfo_map['user_time']]
    -                system = info[pinfo_map['kernel_time']]
    -            else:
    +            if not is_permission_err(err):
                     raise
    +            info = self._proc_info()
    +            user = info[pinfo_map['user_time']]
    +            system = info[pinfo_map['kernel_time']]
             # Children user/system times are not retrievable (set to 0).
             return _common.pcputimes(user, system, 0.0, 0.0)
     
         @wrap_exceptions
         def suspend(self):
    -        return cext.proc_suspend(self.pid)
    +        cext.proc_suspend_or_resume(self.pid, True)
     
         @wrap_exceptions
         def resume(self):
    -        return cext.proc_resume(self.pid)
    +        cext.proc_suspend_or_resume(self.pid, False)
     
         @wrap_exceptions
    +    @retry_error_partial_copy
         def cwd(self):
             if self.pid in (0, 4):
                 raise AccessDenied(self.pid, self._name)
    @@ -927,39 +1015,38 @@ class Process(object):
         def nice_set(self, value):
             return cext.proc_priority_set(self.pid, value)
     
    -    # available on Windows >= Vista
    -    if hasattr(cext, "proc_io_priority_get"):
    -        @wrap_exceptions
    -        def ionice_get(self):
    -            return cext.proc_io_priority_get(self.pid)
    +    @wrap_exceptions
    +    def ionice_get(self):
    +        ret = cext.proc_io_priority_get(self.pid)
    +        if enum is not None:
    +            ret = IOPriority(ret)
    +        return ret
     
    -        @wrap_exceptions
    -        def ionice_set(self, value, _):
    -            if _:
    -                raise TypeError("set_proc_ionice() on Windows takes only "
    -                                "1 argument (2 given)")
    -            if value not in (2, 1, 0):
    -                raise ValueError("value must be 2 (normal), 1 (low) or 0 "
    -                                 "(very low); got %r" % value)
    -            return cext.proc_io_priority_set(self.pid, value)
    +    @wrap_exceptions
    +    def ionice_set(self, ioclass, value):
    +        if value:
    +            raise TypeError("value argument not accepted on Windows")
    +        if ioclass not in (IOPRIO_VERYLOW, IOPRIO_LOW, IOPRIO_NORMAL,
    +                           IOPRIO_HIGH):
    +            raise ValueError("%s is not a valid priority" % ioclass)
    +        cext.proc_io_priority_set(self.pid, ioclass)
     
         @wrap_exceptions
         def io_counters(self):
             try:
                 ret = cext.proc_io_counters(self.pid)
             except OSError as err:
    -            if err.errno in ACCESS_DENIED_ERRSET:
    -                info = self.oneshot_info()
    -                ret = (
    -                    info[pinfo_map['io_rcount']],
    -                    info[pinfo_map['io_wcount']],
    -                    info[pinfo_map['io_rbytes']],
    -                    info[pinfo_map['io_wbytes']],
    -                    info[pinfo_map['io_count_others']],
    -                    info[pinfo_map['io_bytes_others']],
    -                )
    -            else:
    +            if not is_permission_err(err):
                     raise
    +            info = self._proc_info()
    +            ret = (
    +                info[pinfo_map['io_rcount']],
    +                info[pinfo_map['io_wcount']],
    +                info[pinfo_map['io_rbytes']],
    +                info[pinfo_map['io_wbytes']],
    +                info[pinfo_map['io_count_others']],
    +                info[pinfo_map['io_bytes_others']],
    +            )
             return pio(*ret)
     
         @wrap_exceptions
    @@ -1007,12 +1094,12 @@ class Process(object):
             try:
                 return cext.proc_num_handles(self.pid)
             except OSError as err:
    -            if err.errno in ACCESS_DENIED_ERRSET:
    -                return self.oneshot_info()[pinfo_map['num_handles']]
    +            if is_permission_err(err):
    +                return self._proc_info()[pinfo_map['num_handles']]
                 raise
     
         @wrap_exceptions
         def num_ctx_switches(self):
    -        ctx_switches = self.oneshot_info()[pinfo_map['ctx_switches']]
    +        ctx_switches = self._proc_info()[pinfo_map['ctx_switches']]
             # only voluntary ctx switches are supported
             return _common.pctxsw(ctx_switches, 0)
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/__init__.py b/server/www/packages/packages-windows/x86/pyasn1/__init__.py
    index e2e4c5c..5a56a70 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/__init__.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/__init__.py
    @@ -1,7 +1,7 @@
     import sys
     
     # https://www.python.org/dev/peps/pep-0396/
    -__version__ = '0.4.4'
    +__version__ = '0.4.8'
     
     if sys.version_info[:2] < (2, 4):
         raise RuntimeError('PyASN1 requires Python 2.4 or later')
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/codec/ber/decoder.py b/server/www/packages/packages-windows/x86/pyasn1/codec/ber/decoder.py
    index a27b3e0..5ff485f 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/codec/ber/decoder.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/codec/ber/decoder.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     from pyasn1 import debug
    @@ -18,6 +18,8 @@ from pyasn1.type import useful
     
     __all__ = ['decode']
     
    +LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_DECODER)
    +
     noValue = base.noValue
     
     
    @@ -70,6 +72,10 @@ class ExplicitTagDecoder(AbstractSimpleDecoder):
     
             value, _ = decodeFun(head, asn1Spec, tagSet, length, **options)
     
    +        if LOG:
    +            LOG('explicit tag container carries %d octets of trailing payload '
    +                '(will be lost!): %s' % (len(_), debug.hexdump(_)))
    +
             return value, tail
     
         def indefLenValueDecoder(self, substrate, asn1Spec,
    @@ -120,7 +126,8 @@ class BooleanDecoder(IntegerDecoder):
         protoComponent = univ.Boolean(0)
     
         def _createComponent(self, asn1Spec, tagSet, value, **options):
    -        return IntegerDecoder._createComponent(self, asn1Spec, tagSet, value and 1 or 0, **options)
    +        return IntegerDecoder._createComponent(
    +            self, asn1Spec, tagSet, value and 1 or 0, **options)
     
     
     class BitStringDecoder(AbstractSimpleDecoder):
    @@ -134,8 +141,8 @@ class BitStringDecoder(AbstractSimpleDecoder):
             head, tail = substrate[:length], substrate[length:]
     
             if substrateFun:
    -            return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options),
    -                                substrate, length)
    +            return substrateFun(self._createComponent(
    +                asn1Spec, tagSet, noValue, **options), substrate, length)
     
             if not head:
                 raise error.PyAsn1Error('Empty BIT STRING substrate')
    @@ -148,12 +155,17 @@ class BitStringDecoder(AbstractSimpleDecoder):
                         'Trailing bits overflow %s' % trailingBits
                     )
     
    -            value = self.protoComponent.fromOctetString(head[1:], internalFormat=True, padding=trailingBits)
    +            value = self.protoComponent.fromOctetString(
    +                head[1:], internalFormat=True, padding=trailingBits)
     
                 return self._createComponent(asn1Spec, tagSet, value, **options), tail
     
             if not self.supportConstructedForm:
    -            raise error.PyAsn1Error('Constructed encoding form prohibited at %s' % self.__class__.__name__)
    +            raise error.PyAsn1Error('Constructed encoding form prohibited '
    +                                    'at %s' % self.__class__.__name__)
    +
    +        if LOG:
    +            LOG('assembling constructed serialization')
     
             # All inner fragments are of the same type, treat them as octet string
             substrateFun = self.substrateCollector
    @@ -234,6 +246,9 @@ class OctetStringDecoder(AbstractSimpleDecoder):
             if not self.supportConstructedForm:
                 raise error.PyAsn1Error('Constructed encoding form prohibited at %s' % self.__class__.__name__)
     
    +        if LOG:
    +            LOG('assembling constructed serialization')
    +
             # All inner fragments are of the same type, treat them as octet string
             substrateFun = self.substrateCollector
     
    @@ -267,7 +282,9 @@ class OctetStringDecoder(AbstractSimpleDecoder):
                                                  allowEoo=True, **options)
                 if component is eoo.endOfOctets:
                     break
    +
                 header += component
    +
             else:
                 raise error.SubstrateUnderrunError(
                     'No EOO seen before substrate ends'
    @@ -374,59 +391,90 @@ class RealDecoder(AbstractSimpleDecoder):
             if fo & 0x80:  # binary encoding
                 if not head:
                     raise error.PyAsn1Error("Incomplete floating-point value")
    +
    +            if LOG:
    +                LOG('decoding binary encoded REAL')
    +
                 n = (fo & 0x03) + 1
    +
                 if n == 4:
                     n = oct2int(head[0])
                     head = head[1:]
    +
                 eo, head = head[:n], head[n:]
    +
                 if not eo or not head:
                     raise error.PyAsn1Error('Real exponent screwed')
    +
                 e = oct2int(eo[0]) & 0x80 and -1 or 0
    +
                 while eo:  # exponent
                     e <<= 8
                     e |= oct2int(eo[0])
                     eo = eo[1:]
    +
                 b = fo >> 4 & 0x03  # base bits
    +
                 if b > 2:
                     raise error.PyAsn1Error('Illegal Real base')
    +
                 if b == 1:  # encbase = 8
                     e *= 3
    +
                 elif b == 2:  # encbase = 16
                     e *= 4
                 p = 0
    +
                 while head:  # value
                     p <<= 8
                     p |= oct2int(head[0])
                     head = head[1:]
    +
                 if fo & 0x40:  # sign bit
                     p = -p
    +
                 sf = fo >> 2 & 0x03  # scale bits
                 p *= 2 ** sf
                 value = (p, 2, e)
    +
             elif fo & 0x40:  # infinite value
    +            if LOG:
    +                LOG('decoding infinite REAL')
    +
                 value = fo & 0x01 and '-inf' or 'inf'
    +
             elif fo & 0xc0 == 0:  # character encoding
                 if not head:
                     raise error.PyAsn1Error("Incomplete floating-point value")
    +
    +            if LOG:
    +                LOG('decoding character encoded REAL')
    +
                 try:
                     if fo & 0x3 == 0x1:  # NR1
                         value = (int(head), 10, 0)
    +
                     elif fo & 0x3 == 0x2:  # NR2
                         value = float(head)
    +
                     elif fo & 0x3 == 0x3:  # NR3
                         value = float(head)
    +
                     else:
                         raise error.SubstrateUnderrunError(
                             'Unknown NR (tag %s)' % fo
                         )
    +
                 except ValueError:
                     raise error.SubstrateUnderrunError(
                         'Bad character Real syntax'
                     )
    +
             else:
                 raise error.SubstrateUnderrunError(
                     'Unknown encoding (tag %s)' % fo
                 )
    +
             return self._createComponent(asn1Spec, tagSet, value, **options), tail
     
     
    @@ -447,10 +495,12 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
         def _decodeComponents(self, substrate, tagSet=None, decodeFun=None, **options):
             components = []
             componentTypes = set()
    +
             while substrate:
                 component, substrate = decodeFun(substrate, **options)
                 if component is eoo.endOfOctets:
                     break
    +
                 components.append(component)
                 componentTypes.add(component.tagSet)
     
    @@ -460,6 +510,7 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
             # * otherwise -> likely SEQUENCE OF/SET OF
             if len(componentTypes) > 1:
                 protoComponent = self.protoRecordComponent
    +
             else:
                 protoComponent = self.protoSequenceComponent
     
    @@ -469,6 +520,10 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
                 tagSet=tag.TagSet(protoComponent.tagSet.baseTag, *tagSet.superTags)
             )
     
    +        if LOG:
    +            LOG('guessed %r container type (pass `asn1Spec` to guide the '
    +                'decoder)' % asn1Object)
    +
             for idx, component in enumerate(components):
                 asn1Object.setComponentByPosition(
                     idx, component,
    @@ -490,8 +545,10 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
             if substrateFun is not None:
                 if asn1Spec is not None:
                     asn1Object = asn1Spec.clone()
    +
                 elif self.protoComponent is not None:
                     asn1Object = self.protoComponent.clone(tagSet=tagSet)
    +
                 else:
                     asn1Object = self.protoRecordComponent, self.protoSequenceComponent
     
    @@ -501,11 +558,16 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
                 asn1Object, trailing = self._decodeComponents(
                     head, tagSet=tagSet, decodeFun=decodeFun, **options
                 )
    +
                 if trailing:
    -                raise error.PyAsn1Error('Unused trailing %d octets encountered' % len(trailing))
    +                if LOG:
    +                    LOG('Unused trailing %d octets encountered: %s' % (
    +                        len(trailing), debug.hexdump(trailing)))
    +
                 return asn1Object, tail
     
             asn1Object = asn1Spec.clone()
    +        asn1Object.clear()
     
             if asn1Spec.typeId in (univ.Sequence.typeId, univ.Set.typeId):
     
    @@ -514,21 +576,31 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
                 isSetType = asn1Spec.typeId == univ.Set.typeId
                 isDeterministic = not isSetType and not namedTypes.hasOptionalOrDefault
     
    +            if LOG:
    +                LOG('decoding %sdeterministic %s type %r chosen by type ID' % (
    +                    not isDeterministic and 'non-' or '', isSetType and 'SET' or '',
    +                    asn1Spec))
    +
                 seenIndices = set()
                 idx = 0
                 while head:
                     if not namedTypes:
                         componentType = None
    +
                     elif isSetType:
                         componentType = namedTypes.tagMapUnique
    +
                     else:
                         try:
                             if isDeterministic:
                                 componentType = namedTypes[idx].asn1Object
    +
                             elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted:
                                 componentType = namedTypes.getTagMapNearPosition(idx)
    +
                             else:
                                 componentType = namedTypes[idx].asn1Object
    +
                         except IndexError:
                             raise error.PyAsn1Error(
                                 'Excessive components decoded at %r' % (asn1Spec,)
    @@ -539,6 +611,7 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
                     if not isDeterministic and namedTypes:
                         if isSetType:
                             idx = namedTypes.getPositionByType(component.effectiveTagSet)
    +
                         elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted:
                             idx = namedTypes.getPositionNearType(component.effectiveTagSet, idx)
     
    @@ -551,14 +624,25 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
                     seenIndices.add(idx)
                     idx += 1
     
    +            if LOG:
    +                LOG('seen component indices %s' % seenIndices)
    +
                 if namedTypes:
                     if not namedTypes.requiredComponents.issubset(seenIndices):
    -                    raise error.PyAsn1Error('ASN.1 object %s has uninitialized components' % asn1Object.__class__.__name__)
    +                    raise error.PyAsn1Error(
    +                        'ASN.1 object %s has uninitialized '
    +                        'components' % asn1Object.__class__.__name__)
     
                     if  namedTypes.hasOpenTypes:
     
                         openTypes = options.get('openTypes', {})
     
    +                    if LOG:
    +                        LOG('user-specified open types map:')
    +
    +                        for k, v in openTypes.items():
    +                            LOG('%s -> %r' % (k, v))
    +
                         if openTypes or options.get('decodeOpenTypes', False):
     
                             for idx, namedType in enumerate(namedTypes.namedTypes):
    @@ -577,27 +661,67 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
     
                                 except KeyError:
     
    +                                if LOG:
    +                                    LOG('default open types map of component '
    +                                        '"%s.%s" governed by component "%s.%s"'
    +                                        ':' % (asn1Object.__class__.__name__,
    +                                               namedType.name,
    +                                               asn1Object.__class__.__name__,
    +                                               namedType.openType.name))
    +
    +                                    for k, v in namedType.openType.items():
    +                                        LOG('%s -> %r' % (k, v))
    +
                                     try:
                                         openType = namedType.openType[governingValue]
     
                                     except KeyError:
    +                                    if LOG:
    +                                        LOG('failed to resolve open type by governing '
    +                                            'value %r' % (governingValue,))
                                         continue
     
    -                            component, rest = decodeFun(
    -                                asn1Object.getComponentByPosition(idx).asOctets(),
    -                                asn1Spec=openType
    -                            )
    +                            if LOG:
    +                                LOG('resolved open type %r by governing '
    +                                    'value %r' % (openType, governingValue))
     
    -                            asn1Object.setComponentByPosition(idx, component)
    +                            containerValue = asn1Object.getComponentByPosition(idx)
    +
    +                            if containerValue.typeId in (
    +                                    univ.SetOf.typeId, univ.SequenceOf.typeId):
    +
    +                                for pos, containerElement in enumerate(
    +                                        containerValue):
    +
    +                                    component, rest = decodeFun(
    +                                        containerValue[pos].asOctets(),
    +                                        asn1Spec=openType, **options
    +                                    )
    +
    +                                    containerValue[pos] = component
    +
    +                            else:
    +                                component, rest = decodeFun(
    +                                    asn1Object.getComponentByPosition(idx).asOctets(),
    +                                    asn1Spec=openType, **options
    +                                )
    +
    +                                asn1Object.setComponentByPosition(idx, component)
     
                 else:
    -                asn1Object.verifySizeSpec()
    +                inconsistency = asn1Object.isInconsistent
    +                if inconsistency:
    +                    raise inconsistency
     
             else:
                 asn1Object = asn1Spec.clone()
    +            asn1Object.clear()
     
                 componentType = asn1Spec.componentType
     
    +            if LOG:
    +                LOG('decoding type %r chosen by given `asn1Spec`' % componentType)
    +
                 idx = 0
     
                 while head:
    @@ -607,6 +731,7 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
                         verifyConstraints=False,
                         matchTags=False, matchConstraints=False
                     )
    +
                     idx += 1
     
             return asn1Object, tail
    @@ -621,8 +746,10 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
             if substrateFun is not None:
                 if asn1Spec is not None:
                     asn1Object = asn1Spec.clone()
    +
                 elif self.protoComponent is not None:
                     asn1Object = self.protoComponent.clone(tagSet=tagSet)
    +
                 else:
                     asn1Object = self.protoRecordComponent, self.protoSequenceComponent
     
    @@ -630,10 +757,12 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
     
             if asn1Spec is None:
                 return self._decodeComponents(
    -                substrate, tagSet=tagSet, decodeFun=decodeFun, allowEoo=True, **options
    +                substrate, tagSet=tagSet, decodeFun=decodeFun,
    +                **dict(options, allowEoo=True)
                 )
     
             asn1Object = asn1Spec.clone()
    +        asn1Object.clear()
     
             if asn1Spec.typeId in (univ.Sequence.typeId, univ.Set.typeId):
     
    @@ -642,21 +771,31 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
                 isSetType = asn1Object.typeId == univ.Set.typeId
                 isDeterministic = not isSetType and not namedTypes.hasOptionalOrDefault
     
    +            if LOG:
    +                LOG('decoding %sdeterministic %s type %r chosen by type ID' % (
    +                    not isDeterministic and 'non-' or '', isSetType and 'SET' or '',
    +                    asn1Spec))
    +
                 seenIndices = set()
                 idx = 0
                 while substrate:
                     if len(namedTypes) <= idx:
                         asn1Spec = None
    +
                     elif isSetType:
                         asn1Spec = namedTypes.tagMapUnique
    +
                     else:
                         try:
                             if isDeterministic:
                                 asn1Spec = namedTypes[idx].asn1Object
    +
                             elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted:
                                 asn1Spec = namedTypes.getTagMapNearPosition(idx)
    +
                             else:
                                 asn1Spec = namedTypes[idx].asn1Object
    +
                         except IndexError:
                             raise error.PyAsn1Error(
                                 'Excessive components decoded at %r' % (asn1Object,)
    @@ -686,13 +825,22 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
                         'No EOO seen before substrate ends'
                     )
     
    +            if LOG:
    +                LOG('seen component indices %s' % seenIndices)
    +
                 if namedTypes:
                     if not namedTypes.requiredComponents.issubset(seenIndices):
                         raise error.PyAsn1Error('ASN.1 object %s has uninitialized components' % asn1Object.__class__.__name__)
     
    -                if  namedTypes.hasOpenTypes:
    +                if namedTypes.hasOpenTypes:
     
    -                    openTypes = options.get('openTypes', None)
    +                    openTypes = options.get('openTypes', {})
    +
    +                    if LOG:
    +                        LOG('user-specified open types map:')
    +
    +                        for k, v in openTypes.items():
    +                            LOG('%s -> %r' % (k, v))
     
                         if openTypes or options.get('decodeOpenTypes', False):
     
    @@ -712,28 +860,68 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
     
                                 except KeyError:
     
    +                                if LOG:
    +                                    LOG('default open types map of component '
    +                                        '"%s.%s" governed by component "%s.%s"'
    +                                        ':' % (asn1Object.__class__.__name__,
    +                                               namedType.name,
    +                                               asn1Object.__class__.__name__,
    +                                               namedType.openType.name))
    +
    +                                    for k, v in namedType.openType.items():
    +                                        LOG('%s -> %r' % (k, v))
    +
                                     try:
                                         openType = namedType.openType[governingValue]
     
                                     except KeyError:
    +                                    if LOG:
    +                                        LOG('failed to resolve open type by governing '
    +                                            'value %r' % (governingValue,))
                                         continue
     
    -                            component, rest = decodeFun(
    -                                asn1Object.getComponentByPosition(idx).asOctets(),
    -                                asn1Spec=openType, allowEoo=True
    -                            )
    +                            if LOG:
    +                                LOG('resolved open type %r by governing '
    +                                    'value %r' % (openType, governingValue))
     
    -                            if component is not eoo.endOfOctets:
    -                                asn1Object.setComponentByPosition(idx, component)
    +                            containerValue = asn1Object.getComponentByPosition(idx)
    +
    +                            if containerValue.typeId in (
    +                                    univ.SetOf.typeId, univ.SequenceOf.typeId):
    +
    +                                for pos, containerElement in enumerate(
    +                                        containerValue):
    +
    +                                    component, rest = decodeFun(
    +                                        containerValue[pos].asOctets(),
    +                                        asn1Spec=openType, **dict(options, allowEoo=True)
    +                                    )
    +
    +                                    containerValue[pos] = component
    +
    +                            else:
    +                                component, rest = decodeFun(
    +                                    asn1Object.getComponentByPosition(idx).asOctets(),
    +                                    asn1Spec=openType, **dict(options, allowEoo=True)
    +                                )
    +
    +                                if component is not eoo.endOfOctets:
    +                                    asn1Object.setComponentByPosition(idx, component)
     
                     else:
    -                    asn1Object.verifySizeSpec()
    +                    inconsistency = asn1Object.isInconsistent
    +                    if inconsistency:
    +                        raise inconsistency
     
             else:
                 asn1Object = asn1Spec.clone()
    +            asn1Object.clear()
     
                 componentType = asn1Spec.componentType
     
    +            if LOG:
    +                LOG('decoding type %r chosen by given `asn1Spec`' % componentType)
    +
                 idx = 0
     
                 while substrate:
    @@ -747,7 +935,9 @@ class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
                         verifyConstraints=False,
                         matchTags=False, matchConstraints=False
                     )
    +
                     idx += 1
    +
                 else:
                     raise error.SubstrateUnderrunError(
                         'No EOO seen before substrate ends'
    @@ -794,18 +984,25 @@ class ChoiceDecoder(AbstractConstructedDecoder):
     
             if asn1Spec is None:
                 asn1Object = self.protoComponent.clone(tagSet=tagSet)
    +
             else:
                 asn1Object = asn1Spec.clone()
     
             if substrateFun:
                 return substrateFun(asn1Object, substrate, length)
     
    -        if asn1Object.tagSet == tagSet:  # explicitly tagged Choice
    +        if asn1Object.tagSet == tagSet:
    +            if LOG:
    +                LOG('decoding %s as explicitly tagged CHOICE' % (tagSet,))
    +
                 component, head = decodeFun(
                     head, asn1Object.componentTagMap, **options
                 )
     
             else:
    +            if LOG:
    +                LOG('decoding %s as untagged CHOICE' % (tagSet,))
    +
                 component, head = decodeFun(
                     head, asn1Object.componentTagMap,
                     tagSet, length, state, **options
    @@ -813,6 +1010,9 @@ class ChoiceDecoder(AbstractConstructedDecoder):
     
             effectiveTagSet = component.effectiveTagSet
     
    +        if LOG:
    +            LOG('decoded component %s, effective tag set %s' % (component, effectiveTagSet))
    +
             asn1Object.setComponentByType(
                 effectiveTagSet, component,
                 verifyConstraints=False,
    @@ -834,18 +1034,26 @@ class ChoiceDecoder(AbstractConstructedDecoder):
             if substrateFun:
                 return substrateFun(asn1Object, substrate, length)
     
    -        if asn1Object.tagSet == tagSet:  # explicitly tagged Choice
    +        if asn1Object.tagSet == tagSet:
    +            if LOG:
    +                LOG('decoding %s as explicitly tagged CHOICE' % (tagSet,))
    +
                 component, substrate = decodeFun(
                     substrate, asn1Object.componentType.tagMapUnique, **options
                 )
    +
                 # eat up EOO marker
                 eooMarker, substrate = decodeFun(
                     substrate, allowEoo=True, **options
                 )
    +
                 if eooMarker is not eoo.endOfOctets:
                     raise error.PyAsn1Error('No EOO seen before substrate ends')
     
             else:
    +            if LOG:
    +                LOG('decoding %s as untagged CHOICE' % (tagSet,))
    +
                 component, substrate = decodeFun(
                     substrate, asn1Object.componentType.tagMapUnique,
                     tagSet, length, state, **options
    @@ -853,6 +1061,9 @@ class ChoiceDecoder(AbstractConstructedDecoder):
     
             effectiveTagSet = component.effectiveTagSet
     
    +        if LOG:
    +            LOG('decoded component %s, effective tag set %s' % (component, effectiveTagSet))
    +
             asn1Object.setComponentByType(
                 effectiveTagSet, component,
                 verifyConstraints=False,
    @@ -870,13 +1081,25 @@ class AnyDecoder(AbstractSimpleDecoder):
                          tagSet=None, length=None, state=None,
                          decodeFun=None, substrateFun=None,
                          **options):
    -        if asn1Spec is None or asn1Spec is not None and tagSet != asn1Spec.tagSet:
    +        if asn1Spec is None:
    +            isUntagged = True
    +
    +        elif asn1Spec.__class__ is tagmap.TagMap:
    +            isUntagged = tagSet not in asn1Spec.tagMap
    +
    +        else:
    +            isUntagged = tagSet != asn1Spec.tagSet
    +
    +        if isUntagged:
                 fullSubstrate = options['fullSubstrate']
     
                 # untagged Any container, recover inner header substrate
                 length += len(fullSubstrate) - len(substrate)
                 substrate = fullSubstrate
     
    +            if LOG:
    +                LOG('decoding as untagged ANY, substrate %s' % debug.hexdump(substrate))
    +
             if substrateFun:
                 return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options),
                                     substrate, length)
    @@ -889,15 +1112,31 @@ class AnyDecoder(AbstractSimpleDecoder):
                                  tagSet=None, length=None, state=None,
                                  decodeFun=None, substrateFun=None,
                                  **options):
    -        if asn1Spec is not None and tagSet == asn1Spec.tagSet:
    +        if asn1Spec is None:
    +            isTagged = False
    +
    +        elif asn1Spec.__class__ is tagmap.TagMap:
    +            isTagged = tagSet in asn1Spec.tagMap
    +
    +        else:
    +            isTagged = tagSet == asn1Spec.tagSet
    +
    +        if isTagged:
                 # tagged Any type -- consume header substrate
                 header = null
    +
    +            if LOG:
    +                LOG('decoding as tagged ANY')
    +
             else:
                 fullSubstrate = options['fullSubstrate']
     
                 # untagged Any, recover header substrate
                 header = fullSubstrate[:-len(substrate)]
     
    +            if LOG:
    +                LOG('decoding as untagged ANY, header substrate %s' % debug.hexdump(header))
    +
             # Any components do not inherit initial tag
             asn1Spec = self.protoComponent
     
    @@ -905,6 +1144,9 @@ class AnyDecoder(AbstractSimpleDecoder):
                 asn1Object = self._createComponent(asn1Spec, tagSet, noValue, **options)
                 return substrateFun(asn1Object, header + substrate, length + len(header))
     
    +        if LOG:
    +            LOG('assembling constructed serialization')
    +
             # All inner fragments are of the same type, treat them as octet string
             substrateFun = self.substrateCollector
     
    @@ -914,13 +1156,17 @@ class AnyDecoder(AbstractSimpleDecoder):
                                                  allowEoo=True, **options)
                 if component is eoo.endOfOctets:
                     break
    +
                 header += component
    +
             else:
                 raise error.SubstrateUnderrunError(
                     'No EOO seen before substrate ends'
                 )
    +
             if substrateFun:
                 return header, substrate
    +
             else:
                 return self._createComponent(asn1Spec, tagSet, header, **options), substrate
     
    @@ -1045,7 +1291,7 @@ for typeDecoder in tagMap.values():
     
     class Decoder(object):
         defaultErrorState = stErrorCondition
    -    #    defaultErrorState = stDumpRawValue
    +    #defaultErrorState = stDumpRawValue
         defaultRawDecoder = AnyDecoder()
         supportIndefLength = True
     
    @@ -1063,21 +1309,16 @@ class Decoder(object):
                      decodeFun=None, substrateFun=None,
                      **options):
     
    -        if debug.logger & debug.flagDecoder:
    -            logger = debug.logger
    -        else:
    -            logger = None
    -
    -        if logger:
    -            logger('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate)))
    +        if LOG:
    +            LOG('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate)))
     
             allowEoo = options.pop('allowEoo', False)
     
             # Look for end-of-octets sentinel
             if allowEoo and self.supportIndefLength:
                 if substrate[:2] == self.__eooSentinel:
    -                if logger:
    -                    logger('end-of-octets sentinel found')
    +                if LOG:
    +                    LOG('end-of-octets sentinel found')
                     return eoo.endOfOctets, substrate[2:]
     
             value = noValue
    @@ -1090,26 +1331,32 @@ class Decoder(object):
             fullSubstrate = substrate
     
             while state is not stStop:
    +
                 if state is stDecodeTag:
                     if not substrate:
                         raise error.SubstrateUnderrunError(
                             'Short octet stream on tag decoding'
                         )
    +
                     # Decode tag
                     isShortTag = True
                     firstOctet = substrate[0]
                     substrate = substrate[1:]
    +
                     try:
                         lastTag = tagCache[firstOctet]
    +
                     except KeyError:
                         integerTag = oct2int(firstOctet)
                         tagClass = integerTag & 0xC0
                         tagFormat = integerTag & 0x20
                         tagId = integerTag & 0x1F
    +
                         if tagId == 0x1F:
                             isShortTag = False
                             lengthOctetIdx = 0
                             tagId = 0
    +
                             try:
                                 while True:
                                     integerTag = oct2int(substrate[lengthOctetIdx])
    @@ -1118,42 +1365,55 @@ class Decoder(object):
                                     tagId |= (integerTag & 0x7F)
                                     if not integerTag & 0x80:
                                         break
    +
                                 substrate = substrate[lengthOctetIdx:]
    +
                             except IndexError:
                                 raise error.SubstrateUnderrunError(
                                     'Short octet stream on long tag decoding'
                                 )
    +
                         lastTag = tag.Tag(
                             tagClass=tagClass, tagFormat=tagFormat, tagId=tagId
                         )
    +
                         if isShortTag:
                             # cache short tags
                             tagCache[firstOctet] = lastTag
    +
                     if tagSet is None:
                         if isShortTag:
                             try:
                                 tagSet = tagSetCache[firstOctet]
    +
                             except KeyError:
                                 # base tag not recovered
                                 tagSet = tag.TagSet((), lastTag)
                                 tagSetCache[firstOctet] = tagSet
                         else:
                             tagSet = tag.TagSet((), lastTag)
    +
                     else:
                         tagSet = lastTag + tagSet
    +
                     state = stDecodeLength
    -                if logger:
    -                    logger('tag decoded into %s, decoding length' % tagSet)
    +
    +                if LOG:
    +                    LOG('tag decoded into %s, decoding length' % tagSet)
    +
                 if state is stDecodeLength:
                     # Decode length
                     if not substrate:
                         raise error.SubstrateUnderrunError(
                             'Short octet stream on length decoding'
                         )
    +
                     firstOctet = oct2int(substrate[0])
    +
                     if firstOctet < 128:
                         size = 1
                         length = firstOctet
    +
                     elif firstOctet > 128:
                         size = firstOctet & 0x7F
                         # encoded in size bytes
    @@ -1164,28 +1424,36 @@ class Decoder(object):
                             raise error.SubstrateUnderrunError(
                                 '%s<%s at %s' % (size, len(encodedLength), tagSet)
                             )
    +
                         length = 0
                         for lengthOctet in encodedLength:
                             length <<= 8
                             length |= lengthOctet
                         size += 1
    +
                     else:
                         size = 1
                         length = -1
     
                     substrate = substrate[size:]
    +
                     if length == -1:
                         if not self.supportIndefLength:
                             raise error.PyAsn1Error('Indefinite length encoding not supported by this codec')
    +
                     else:
                         if len(substrate) < length:
                             raise error.SubstrateUnderrunError('%d-octet short' % (length - len(substrate)))
    +
                     state = stGetValueDecoder
    -                if logger:
    -                    logger('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length])))
    +
    +                if LOG:
    +                    LOG('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length])))
    +
                 if state is stGetValueDecoder:
                     if asn1Spec is None:
                         state = stGetValueDecoderByTag
    +
                     else:
                         state = stGetValueDecoderByAsn1Spec
                 #
    @@ -1207,41 +1475,55 @@ class Decoder(object):
                 if state is stGetValueDecoderByTag:
                     try:
                         concreteDecoder = tagMap[tagSet]
    +
                     except KeyError:
                         concreteDecoder = None
    +
                     if concreteDecoder:
                         state = stDecodeValue
    +
                     else:
                         try:
                             concreteDecoder = tagMap[tagSet[:1]]
    +
                         except KeyError:
                             concreteDecoder = None
    +
                         if concreteDecoder:
                             state = stDecodeValue
                         else:
                             state = stTryAsExplicitTag
    -                if logger:
    -                    logger('codec %s chosen by a built-in type, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "", state is stDecodeValue and 'value' or 'as explicit tag'))
    +
    +                if LOG:
    +                    LOG('codec %s chosen by a built-in type, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "", state is stDecodeValue and 'value' or 'as explicit tag'))
                         debug.scope.push(concreteDecoder is None and '?' or concreteDecoder.protoComponent.__class__.__name__)
    +
                 if state is stGetValueDecoderByAsn1Spec:
    +
                     if asn1Spec.__class__ is tagmap.TagMap:
                         try:
                             chosenSpec = asn1Spec[tagSet]
    +
                         except KeyError:
                             chosenSpec = None
    -                    if logger:
    -                        logger('candidate ASN.1 spec is a map of:')
    +
    +                    if LOG:
    +                        LOG('candidate ASN.1 spec is a map of:')
    +
                             for firstOctet, v in asn1Spec.presentTypes.items():
    -                            logger('  %s -> %s' % (firstOctet, v.__class__.__name__))
    +                            LOG('  %s -> %s' % (firstOctet, v.__class__.__name__))
    +
                             if asn1Spec.skipTypes:
    -                            logger('but neither of: ')
    +                            LOG('but neither of: ')
                                 for firstOctet, v in asn1Spec.skipTypes.items():
    -                                logger('  %s -> %s' % (firstOctet, v.__class__.__name__))
    -                        logger('new candidate ASN.1 spec is %s, chosen by %s' % (chosenSpec is None and '' or chosenSpec.prettyPrintType(), tagSet))
    +                                LOG('  %s -> %s' % (firstOctet, v.__class__.__name__))
    +                        LOG('new candidate ASN.1 spec is %s, chosen by %s' % (chosenSpec is None and '' or chosenSpec.prettyPrintType(), tagSet))
    +
                     elif tagSet == asn1Spec.tagSet or tagSet in asn1Spec.tagMap:
                         chosenSpec = asn1Spec
    -                    if logger:
    -                        logger('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__)
    +                    if LOG:
    +                        LOG('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__)
    +
                     else:
                         chosenSpec = None
     
    @@ -1249,29 +1531,38 @@ class Decoder(object):
                         try:
                             # ambiguous type or just faster codec lookup
                             concreteDecoder = typeMap[chosenSpec.typeId]
    -                        if logger:
    -                            logger('value decoder chosen for an ambiguous type by type ID %s' % (chosenSpec.typeId,))
    +
    +                        if LOG:
    +                            LOG('value decoder chosen for an ambiguous type by type ID %s' % (chosenSpec.typeId,))
    +
                         except KeyError:
                             # use base type for codec lookup to recover untagged types
                             baseTagSet = tag.TagSet(chosenSpec.tagSet.baseTag,  chosenSpec.tagSet.baseTag)
                             try:
                                 # base type or tagged subtype
                                 concreteDecoder = tagMap[baseTagSet]
    -                            if logger:
    -                                logger('value decoder chosen by base %s' % (baseTagSet,))
    +
    +                            if LOG:
    +                                LOG('value decoder chosen by base %s' % (baseTagSet,))
    +
                             except KeyError:
                                 concreteDecoder = None
    +
                         if concreteDecoder:
                             asn1Spec = chosenSpec
                             state = stDecodeValue
    +
                         else:
                             state = stTryAsExplicitTag
    +
                     else:
                         concreteDecoder = None
                         state = stTryAsExplicitTag
    -                if logger:
    -                    logger('codec %s chosen by ASN.1 spec, decoding %s' % (state is stDecodeValue and concreteDecoder.__class__.__name__ or "", state is stDecodeValue and 'value' or 'as explicit tag'))
    +
    +                if LOG:
    +                    LOG('codec %s chosen by ASN.1 spec, decoding %s' % (state is stDecodeValue and concreteDecoder.__class__.__name__ or "", state is stDecodeValue and 'value' or 'as explicit tag'))
                         debug.scope.push(chosenSpec is None and '?' or chosenSpec.__class__.__name__)
    +
                 if state is stDecodeValue:
                     if not options.get('recursiveFlag', True) and not substrateFun:  # deprecate this
                         substrateFun = lambda a, b, c: (a, b[:c])
    @@ -1285,6 +1576,7 @@ class Decoder(object):
                             self, substrateFun,
                             **options
                         )
    +
                     else:
                         value, substrate = concreteDecoder.valueDecoder(
                             substrate, asn1Spec,
    @@ -1293,33 +1585,44 @@ class Decoder(object):
                             **options
                         )
     
    -                if logger:
    -                    logger('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, isinstance(value, base.Asn1Item) and value.prettyPrint() or value, substrate and debug.hexdump(substrate) or ''))
    +                if LOG:
    +                    LOG('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, isinstance(value, base.Asn1Item) and value.prettyPrint() or value, substrate and debug.hexdump(substrate) or ''))
     
                     state = stStop
                     break
    +
                 if state is stTryAsExplicitTag:
    -                if tagSet and tagSet[0].tagFormat == tag.tagFormatConstructed and tagSet[0].tagClass != tag.tagClassUniversal:
    +                if (tagSet and
    +                        tagSet[0].tagFormat == tag.tagFormatConstructed and
    +                        tagSet[0].tagClass != tag.tagClassUniversal):
                         # Assume explicit tagging
                         concreteDecoder = explicitTagDecoder
                         state = stDecodeValue
    +
                     else:
                         concreteDecoder = None
                         state = self.defaultErrorState
    -                if logger:
    -                    logger('codec %s chosen, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "", state is stDecodeValue and 'value' or 'as failure'))
    +
    +                if LOG:
    +                    LOG('codec %s chosen, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "", state is stDecodeValue and 'value' or 'as failure'))
    +
                 if state is stDumpRawValue:
                     concreteDecoder = self.defaultRawDecoder
    -                if logger:
    -                    logger('codec %s chosen, decoding value' % concreteDecoder.__class__.__name__)
    +
    +                if LOG:
    +                    LOG('codec %s chosen, decoding value' % concreteDecoder.__class__.__name__)
    +
                     state = stDecodeValue
    +
                 if state is stErrorCondition:
                     raise error.PyAsn1Error(
                         '%s not in asn1Spec: %r' % (tagSet, asn1Spec)
                     )
    -        if logger:
    +
    +        if LOG:
                 debug.scope.pop()
    -            logger('decoder left scope %s, call completed' % debug.scope)
    +            LOG('decoder left scope %s, call completed' % debug.scope)
    +
             return value, substrate
     
     
    @@ -1349,7 +1652,7 @@ class Decoder(object):
     #:
     #: Raises
     #: ------
    -#: :py:class:`~pyasn1.error.PyAsn1Error`
    +#: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError
     #:     On decoding errors
     #:
     #: Examples
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/codec/ber/encoder.py b/server/www/packages/packages-windows/x86/pyasn1/codec/ber/encoder.py
    index 0094b22..778aa86 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/codec/ber/encoder.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/codec/ber/encoder.py
    @@ -1,9 +1,11 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
    +import sys
    +
     from pyasn1 import debug
     from pyasn1 import error
     from pyasn1.codec.ber import eoo
    @@ -17,6 +19,8 @@ from pyasn1.type import useful
     
     __all__ = ['encode']
     
    +LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_ENCODER)
    +
     
     class AbstractItemEncoder(object):
         supportIndefLenMode = True
    @@ -31,29 +35,39 @@ class AbstractItemEncoder(object):
             encodedTag = tagClass | tagFormat
             if isConstructed:
                 encodedTag |= tag.tagFormatConstructed
    +
             if tagId < 31:
                 return encodedTag | tagId,
    +
             else:
                 substrate = tagId & 0x7f,
    +
                 tagId >>= 7
    +
                 while tagId:
                     substrate = (0x80 | (tagId & 0x7f),) + substrate
                     tagId >>= 7
    +
                 return (encodedTag | 0x1F,) + substrate
     
         def encodeLength(self, length, defMode):
             if not defMode and self.supportIndefLenMode:
                 return (0x80,)
    +
             if length < 0x80:
                 return length,
    +
             else:
                 substrate = ()
                 while length:
                     substrate = (length & 0xff,) + substrate
                     length >>= 8
    +
                 substrateLen = len(substrate)
    +
                 if substrateLen > 126:
                     raise error.PyAsn1Error('Length octets overflow (%d)' % substrateLen)
    +
                 return (0x80 | substrateLen,) + substrate
     
         def encodeValue(self, value, asn1Spec, encodeFun, **options):
    @@ -75,26 +89,51 @@ class AbstractItemEncoder(object):
     
             defMode = options.get('defMode', True)
     
    +        substrate = null
    +
             for idx, singleTag in enumerate(tagSet.superTags):
     
                 defModeOverride = defMode
     
                 # base tag?
                 if not idx:
    -                substrate, isConstructed, isOctets = self.encodeValue(
    -                    value, asn1Spec, encodeFun, **options
    -                )
    +                try:
    +                    substrate, isConstructed, isOctets = self.encodeValue(
    +                        value, asn1Spec, encodeFun, **options
    +                    )
    +
    +                except error.PyAsn1Error:
    +                    exc = sys.exc_info()
    +                    raise error.PyAsn1Error(
    +                        'Error encoding %r: %s' % (value, exc[1]))
    +
    +                if LOG:
    +                    LOG('encoded %svalue %s into %s' % (
    +                        isConstructed and 'constructed ' or '', value, substrate
    +                    ))
     
                     if not substrate and isConstructed and options.get('ifNotEmpty', False):
                         return substrate
     
    -                # primitive form implies definite mode
                     if not isConstructed:
                         defModeOverride = True
     
    +                    if LOG:
    +                        LOG('overridden encoding mode into definitive for primitive type')
    +
                 header = self.encodeTag(singleTag, isConstructed)
    +
    +            if LOG:
    +                LOG('encoded %stag %s into %s' % (
    +                    isConstructed and 'constructed ' or '',
    +                    singleTag, debug.hexdump(ints2octs(header))))
    +
                 header += self.encodeLength(len(substrate), defModeOverride)
     
    +            if LOG:
    +                LOG('encoded %s octets (tag + payload) into %s' % (
    +                    len(substrate), debug.hexdump(ints2octs(header))))
    +
                 if isOctets:
                     substrate = ints2octs(header) + substrate
     
    @@ -131,6 +170,11 @@ class IntegerEncoder(AbstractItemEncoder):
     
         def encodeValue(self, value, asn1Spec, encodeFun, **options):
             if value == 0:
    +            if LOG:
    +                LOG('encoding %spayload for zero INTEGER' % (
    +                    self.supportCompactZero and 'no ' or ''
    +                ))
    +
                 # de-facto way to encode zero
                 if self.supportCompactZero:
                     return (), False, False
    @@ -157,11 +201,15 @@ class BitStringEncoder(AbstractItemEncoder):
                 substrate = alignedValue.asOctets()
                 return int2oct(len(substrate) * 8 - valueLength) + substrate, False, True
     
    +        if LOG:
    +            LOG('encoding into up to %s-octet chunks' % maxChunkSize)
    +
             baseTag = value.tagSet.baseTag
     
             # strip off explicit tags
             if baseTag:
                 tagSet = tag.TagSet(baseTag, baseTag)
    +
             else:
                 tagSet = tag.TagSet()
     
    @@ -195,44 +243,47 @@ class OctetStringEncoder(AbstractItemEncoder):
             if not maxChunkSize or len(substrate) <= maxChunkSize:
                 return substrate, False, True
     
    -        else:
    +        if LOG:
    +            LOG('encoding into up to %s-octet chunks' % maxChunkSize)
     
    -            # strip off explicit tags for inner chunks
    +        # strip off explicit tags for inner chunks
     
    -            if asn1Spec is None:
    -                baseTag = value.tagSet.baseTag
    +        if asn1Spec is None:
    +            baseTag = value.tagSet.baseTag
     
    -                # strip off explicit tags
    -                if baseTag:
    -                    tagSet = tag.TagSet(baseTag, baseTag)
    -                else:
    -                    tagSet = tag.TagSet()
    +            # strip off explicit tags
    +            if baseTag:
    +                tagSet = tag.TagSet(baseTag, baseTag)
     
    -                asn1Spec = value.clone(tagSet=tagSet)
    +            else:
    +                tagSet = tag.TagSet()
     
    -            elif not isOctetsType(value):
    -                baseTag = asn1Spec.tagSet.baseTag
    +            asn1Spec = value.clone(tagSet=tagSet)
     
    -                # strip off explicit tags
    -                if baseTag:
    -                    tagSet = tag.TagSet(baseTag, baseTag)
    -                else:
    -                    tagSet = tag.TagSet()
    +        elif not isOctetsType(value):
    +            baseTag = asn1Spec.tagSet.baseTag
     
    -                asn1Spec = asn1Spec.clone(tagSet=tagSet)
    +            # strip off explicit tags
    +            if baseTag:
    +                tagSet = tag.TagSet(baseTag, baseTag)
     
    -            pos = 0
    -            substrate = null
    +            else:
    +                tagSet = tag.TagSet()
     
    -            while True:
    -                chunk = value[pos:pos + maxChunkSize]
    -                if not chunk:
    -                    break
    +            asn1Spec = asn1Spec.clone(tagSet=tagSet)
     
    -                substrate += encodeFun(chunk, asn1Spec, **options)
    -                pos += maxChunkSize
    +        pos = 0
    +        substrate = null
     
    -            return substrate, True, True
    +        while True:
    +            chunk = value[pos:pos + maxChunkSize]
    +            if not chunk:
    +                break
    +
    +            substrate += encodeFun(chunk, asn1Spec, **options)
    +            pos += maxChunkSize
    +
    +        return substrate, True, True
     
     
     class NullEncoder(AbstractItemEncoder):
    @@ -268,8 +319,10 @@ class ObjectIdentifierEncoder(AbstractItemEncoder):
                     oid = (second + 80,) + oid[2:]
                 else:
                     raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,))
    +
             elif first == 2:
                 oid = (second + 80,) + oid[2:]
    +
             else:
                 raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,))
     
    @@ -280,15 +333,19 @@ class ObjectIdentifierEncoder(AbstractItemEncoder):
                 if 0 <= subOid <= 127:
                     # Optimize for the common case
                     octets += (subOid,)
    +
                 elif subOid > 127:
                     # Pack large Sub-Object IDs
                     res = (subOid & 0x7f,)
                     subOid >>= 7
    +
                     while subOid:
                         res = (0x80 | (subOid & 0x7f),) + res
                         subOid >>= 7
    +
                     # Add packed Sub-Object ID to resulted Object ID
                     octets += res
    +
                 else:
                     raise error.PyAsn1Error('Negative OID arc %s at %s' % (subOid, value))
     
    @@ -304,12 +361,16 @@ class RealEncoder(AbstractItemEncoder):
             ms, es = 1, 1
             if m < 0:
                 ms = -1  # mantissa sign
    +
             if e < 0:
    -            es = -1  # exponenta sign 
    +            es = -1  # exponent sign
    +
             m *= ms
    +
             if encbase == 8:
                 m *= 2 ** (abs(e) % 3 * es)
                 e = abs(e) // 3 * es
    +
             elif encbase == 16:
                 m *= 2 ** (abs(e) % 4 * es)
                 e = abs(e) // 4 * es
    @@ -320,6 +381,7 @@ class RealEncoder(AbstractItemEncoder):
                     e -= 1
                     continue
                 break
    +
             return ms, int(m), encbase, e
     
         def _chooseEncBase(self, value):
    @@ -327,23 +389,32 @@ class RealEncoder(AbstractItemEncoder):
             encBase = [2, 8, 16]
             if value.binEncBase in encBase:
                 return self._dropFloatingPoint(m, value.binEncBase, e)
    +
             elif self.binEncBase in encBase:
                 return self._dropFloatingPoint(m, self.binEncBase, e)
    -        # auto choosing base 2/8/16 
    +
    +        # auto choosing base 2/8/16
             mantissa = [m, m, m]
    -        exponenta = [e, e, e]
    +        exponent = [e, e, e]
             sign = 1
             encbase = 2
             e = float('inf')
    +
             for i in range(3):
                 (sign,
                  mantissa[i],
                  encBase[i],
    -             exponenta[i]) = self._dropFloatingPoint(mantissa[i], encBase[i], exponenta[i])
    -            if abs(exponenta[i]) < abs(e) or (abs(exponenta[i]) == abs(e) and mantissa[i] < m):
    -                e = exponenta[i]
    +             exponent[i]) = self._dropFloatingPoint(mantissa[i], encBase[i], exponent[i])
    +
    +            if abs(exponent[i]) < abs(e) or (abs(exponent[i]) == abs(e) and mantissa[i] < m):
    +                e = exponent[i]
                     m = int(mantissa[i])
                     encbase = encBase[i]
    +
    +        if LOG:
    +            LOG('automatically chosen REAL encoding base %s, sign %s, mantissa %s, '
    +                'exponent %s' % (encbase, sign, m, e))
    +
             return sign, m, encbase, e
     
         def encodeValue(self, value, asn1Spec, encodeFun, **options):
    @@ -352,69 +423,98 @@ class RealEncoder(AbstractItemEncoder):
     
             if value.isPlusInf:
                 return (0x40,), False, False
    +
             if value.isMinusInf:
                 return (0x41,), False, False
    +
             m, b, e = value
    +
             if not m:
                 return null, False, True
    +
             if b == 10:
    +            if LOG:
    +                LOG('encoding REAL into character form')
    +
                 return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), False, True
    +
             elif b == 2:
                 fo = 0x80  # binary encoding
                 ms, m, encbase, e = self._chooseEncBase(value)
    +
                 if ms < 0:  # mantissa sign
                     fo |= 0x40  # sign bit
    -            # exponenta & mantissa normalization
    +
    +            # exponent & mantissa normalization
                 if encbase == 2:
                     while m & 0x1 == 0:
                         m >>= 1
                         e += 1
    +
                 elif encbase == 8:
                     while m & 0x7 == 0:
                         m >>= 3
                         e += 1
                     fo |= 0x10
    +
                 else:  # encbase = 16
                     while m & 0xf == 0:
                         m >>= 4
                         e += 1
                     fo |= 0x20
    +
                 sf = 0  # scale factor
    +
                 while m & 0x1 == 0:
                     m >>= 1
                     sf += 1
    +
                 if sf > 3:
                     raise error.PyAsn1Error('Scale factor overflow')  # bug if raised
    +
                 fo |= sf << 2
                 eo = null
                 if e == 0 or e == -1:
                     eo = int2oct(e & 0xff)
    +
                 else:
                     while e not in (0, -1):
                         eo = int2oct(e & 0xff) + eo
                         e >>= 8
    +
                     if e == 0 and eo and oct2int(eo[0]) & 0x80:
                         eo = int2oct(0) + eo
    +
                     if e == -1 and eo and not (oct2int(eo[0]) & 0x80):
                         eo = int2oct(0xff) + eo
    +
                 n = len(eo)
                 if n > 0xff:
                     raise error.PyAsn1Error('Real exponent overflow')
    +
                 if n == 1:
                     pass
    +
                 elif n == 2:
                     fo |= 1
    +
                 elif n == 3:
                     fo |= 2
    +
                 else:
                     fo |= 3
                     eo = int2oct(n & 0xff) + eo
    +
                 po = null
    +
                 while m:
                     po = int2oct(m & 0xff) + po
                     m >>= 8
    +
                 substrate = int2oct(fo) + eo + po
    +
                 return substrate, False, True
    +
             else:
                 raise error.PyAsn1Error('Prohibited Real base %s' % b)
     
    @@ -428,9 +528,18 @@ class SequenceEncoder(AbstractItemEncoder):
     
             substrate = null
     
    +        omitEmptyOptionals = options.get(
    +            'omitEmptyOptionals', self.omitEmptyOptionals)
    +
    +        if LOG:
    +            LOG('%sencoding empty OPTIONAL components' % (
    +                    omitEmptyOptionals and 'not ' or ''))
    +
             if asn1Spec is None:
                 # instance of ASN.1 schema
    -            value.verifySizeSpec()
    +            inconsistency = value.isInconsistent
    +            if inconsistency:
    +                raise inconsistency
     
                 namedTypes = value.componentType
     
    @@ -439,23 +548,44 @@ class SequenceEncoder(AbstractItemEncoder):
                         namedType = namedTypes[idx]
     
                         if namedType.isOptional and not component.isValue:
    -                            continue
    +                        if LOG:
    +                            LOG('not encoding OPTIONAL component %r' % (namedType,))
    +                        continue
     
                         if namedType.isDefaulted and component == namedType.asn1Object:
    -                            continue
    +                        if LOG:
    +                            LOG('not encoding DEFAULT component %r' % (namedType,))
    +                        continue
     
    -                    if self.omitEmptyOptionals:
    +                    if omitEmptyOptionals:
                             options.update(ifNotEmpty=namedType.isOptional)
     
    -                chunk = encodeFun(component, asn1Spec, **options)
    -
                     # wrap open type blob if needed
                     if namedTypes and namedType.openType:
    -                    wrapType = namedType.asn1Object
    -                    if wrapType.tagSet and not wrapType.isSameTypeWith(component):
    -                        chunk = encodeFun(chunk, wrapType, **options)
     
    -                substrate += chunk
    +                    wrapType = namedType.asn1Object
    +
    +                    if wrapType.typeId in (
    +                            univ.SetOf.typeId, univ.SequenceOf.typeId):
    +
    +                        substrate += encodeFun(
    +                                component, asn1Spec,
    +                                **dict(options, wrapType=wrapType.componentType))
    +
    +                    else:
    +                        chunk = encodeFun(component, asn1Spec, **options)
    +
    +                        if wrapType.isSameTypeWith(component):
    +                            substrate += chunk
    +
    +                        else:
    +                            substrate += encodeFun(chunk, wrapType, **options)
    +
    +                            if LOG:
    +                                LOG('wrapped with wrap type %r' % (wrapType,))
    +
    +                else:
    +                    substrate += encodeFun(component, asn1Spec, **options)
     
             else:
                 # bare Python value + ASN.1 schema
    @@ -465,43 +595,87 @@ class SequenceEncoder(AbstractItemEncoder):
                         component = value[namedType.name]
     
                     except KeyError:
    -                    raise error.PyAsn1Error('Component name "%s" not found in %r' % (namedType.name, value))
    +                    raise error.PyAsn1Error('Component name "%s" not found in %r' % (
    +                        namedType.name, value))
     
                     if namedType.isOptional and namedType.name not in value:
    +                    if LOG:
    +                        LOG('not encoding OPTIONAL component %r' % (namedType,))
                         continue
     
                     if namedType.isDefaulted and component == namedType.asn1Object:
    +                    if LOG:
    +                        LOG('not encoding DEFAULT component %r' % (namedType,))
                         continue
     
    -                if self.omitEmptyOptionals:
    +                if omitEmptyOptionals:
                         options.update(ifNotEmpty=namedType.isOptional)
     
    -                chunk = encodeFun(component, asn1Spec[idx], **options)
    +                componentSpec = namedType.asn1Object
     
                     # wrap open type blob if needed
                     if namedType.openType:
    -                    wrapType = namedType.asn1Object
    -                    if wrapType.tagSet and not wrapType.isSameTypeWith(component):
    -                        chunk = encodeFun(chunk, wrapType, **options)
     
    -                substrate += chunk
    +                    if componentSpec.typeId in (
    +                            univ.SetOf.typeId, univ.SequenceOf.typeId):
    +
    +                        substrate += encodeFun(
    +                                component, componentSpec,
    +                                **dict(options, wrapType=componentSpec.componentType))
    +
    +                    else:
    +                        chunk = encodeFun(component, componentSpec, **options)
    +
    +                        if componentSpec.isSameTypeWith(component):
    +                            substrate += chunk
    +
    +                        else:
    +                            substrate += encodeFun(chunk, componentSpec, **options)
    +
    +                            if LOG:
    +                                LOG('wrapped with wrap type %r' % (componentSpec,))
    +
    +                else:
    +                    substrate += encodeFun(component, componentSpec, **options)
     
             return substrate, True, True
     
     
     class SequenceOfEncoder(AbstractItemEncoder):
    -    def encodeValue(self, value, asn1Spec, encodeFun, **options):
    +    def _encodeComponents(self, value, asn1Spec, encodeFun, **options):
    +
             if asn1Spec is None:
    -            value.verifySizeSpec()
    +            inconsistency = value.isInconsistent
    +            if inconsistency:
    +                raise inconsistency
    +
             else:
                 asn1Spec = asn1Spec.componentType
     
    -        substrate = null
    +        chunks = []
    +
    +        wrapType = options.pop('wrapType', None)
     
             for idx, component in enumerate(value):
    -            substrate += encodeFun(value[idx], asn1Spec, **options)
    +            chunk = encodeFun(component, asn1Spec, **options)
     
    -        return substrate, True, True
    +            if (wrapType is not None and
    +                    not wrapType.isSameTypeWith(component)):
    +                # wrap encoded value with wrapper container (e.g. ANY)
    +                chunk = encodeFun(chunk, wrapType, **options)
    +
    +                if LOG:
    +                    LOG('wrapped with wrap type %r' % (wrapType,))
    +
    +            chunks.append(chunk)
    +
    +        return chunks
    +
    +    def encodeValue(self, value, asn1Spec, encodeFun, **options):
    +        chunks = self._encodeComponents(
    +            value, asn1Spec, encodeFun, **options)
    +
    +        return null.join(chunks), True, True
     
     
     class ChoiceEncoder(AbstractItemEncoder):
    @@ -620,13 +794,8 @@ class Encoder(object):
                 raise error.PyAsn1Error('Value %r is not ASN.1 type instance '
                                         'and "asn1Spec" not given' % (value,))
     
    -        if debug.logger & debug.flagEncoder:
    -            logger = debug.logger
    -        else:
    -            logger = None
    -
    -        if logger:
    -            logger('encoder called in %sdef mode, chunk size %s for '
    +        if LOG:
    +            LOG('encoder called in %sdef mode, chunk size %s for '
                        'type %s, value:\n%s' % (not options.get('defMode', True) and 'in' or '', options.get('maxChunkSize', 0), asn1Spec is None and value.prettyPrintType() or asn1Spec.prettyPrintType(), value))
     
             if self.fixedDefLengthMode is not None:
    @@ -639,8 +808,8 @@ class Encoder(object):
             try:
                 concreteEncoder = self.__typeMap[typeId]
     
    -            if logger:
    -                logger('using value codec %s chosen by type ID %s' % (concreteEncoder.__class__.__name__, typeId))
    +            if LOG:
    +                LOG('using value codec %s chosen by type ID %s' % (concreteEncoder.__class__.__name__, typeId))
     
             except KeyError:
                 if asn1Spec is None:
    @@ -657,13 +826,13 @@ class Encoder(object):
                 except KeyError:
                     raise error.PyAsn1Error('No encoder for %r (%s)' % (value, tagSet))
     
    -            if logger:
    -                logger('using value codec %s chosen by tagSet %s' % (concreteEncoder.__class__.__name__, tagSet))
    +            if LOG:
    +                LOG('using value codec %s chosen by tagSet %s' % (concreteEncoder.__class__.__name__, tagSet))
     
             substrate = concreteEncoder.encode(value, asn1Spec, self, **options)
     
    -        if logger:
    -            logger('codec %s built %s octets of substrate: %s\nencoder completed' % (concreteEncoder, len(substrate), debug.hexdump(substrate)))
    +        if LOG:
    +            LOG('codec %s built %s octets of substrate: %s\nencoder completed' % (concreteEncoder, len(substrate), debug.hexdump(substrate)))
     
             return substrate
     
    @@ -684,7 +853,7 @@ class Encoder(object):
     #:     Optional ASN.1 schema or value object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
     #:
     #: defMode: :py:class:`bool`
    -#:     If `False`, produces indefinite length encoding
    +#:     If :obj:`False`, produces indefinite length encoding
     #:
     #: maxChunkSize: :py:class:`int`
     #:     Maximum chunk size in chunked encoding mode (0 denotes unlimited chunk size)
    @@ -696,7 +865,7 @@ class Encoder(object):
     #:
     #: Raises
     #: ------
    -#: :py:class:`~pyasn1.error.PyAsn1Error`
    +#: ~pyasn1.error.PyAsn1Error
     #:     On encoding errors
     #:
     #: Examples
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/codec/ber/eoo.py b/server/www/packages/packages-windows/x86/pyasn1/codec/ber/eoo.py
    index d4cd827..48eb859 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/codec/ber/eoo.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/codec/ber/eoo.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     from pyasn1.type import base
    @@ -10,7 +10,7 @@ from pyasn1.type import tag
     __all__ = ['endOfOctets']
     
     
    -class EndOfOctets(base.AbstractSimpleAsn1Item):
    +class EndOfOctets(base.SimpleAsn1Type):
         defaultValue = 0
         tagSet = tag.initTagSet(
             tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x00)
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/codec/cer/decoder.py b/server/www/packages/packages-windows/x86/pyasn1/codec/cer/decoder.py
    index 66572ec..3e86fd0 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/codec/cer/decoder.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/codec/cer/decoder.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     from pyasn1 import error
    @@ -87,7 +87,7 @@ class Decoder(decoder.Decoder):
     #:
     #: Raises
     #: ------
    -#: :py:class:`~pyasn1.error.PyAsn1Error`
    +#: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError
     #:     On decoding errors
     #:
     #: Examples
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/codec/cer/encoder.py b/server/www/packages/packages-windows/x86/pyasn1/codec/cer/encoder.py
    index 768d3c1..935b696 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/codec/cer/encoder.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/codec/cer/encoder.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     from pyasn1 import error
    @@ -31,17 +31,20 @@ class RealEncoder(encoder.RealEncoder):
     # specialized GeneralStringEncoder here
     
     class TimeEncoderMixIn(object):
    -    zchar, = str2octs('Z')
    -    pluschar, = str2octs('+')
    -    minuschar, = str2octs('-')
    -    commachar, = str2octs(',')
    -    minLength = 12
    -    maxLength = 19
    +    Z_CHAR = ord('Z')
    +    PLUS_CHAR = ord('+')
    +    MINUS_CHAR = ord('-')
    +    COMMA_CHAR = ord(',')
    +    DOT_CHAR = ord('.')
    +    ZERO_CHAR = ord('0')
    +
    +    MIN_LENGTH = 12
    +    MAX_LENGTH = 19
     
         def encodeValue(self, value, asn1Spec, encodeFun, **options):
    -        # Encoding constraints:
    +        # CER encoding constraints:
             # - minutes are mandatory, seconds are optional
    -        # - subseconds must NOT be zero
    +        # - sub-seconds must NOT be zero / no meaningless zeros
             # - no hanging fraction dot
             # - time in UTC (Z)
             # - only dot is allowed for fractions
    @@ -49,20 +52,46 @@ class TimeEncoderMixIn(object):
             if asn1Spec is not None:
                 value = asn1Spec.clone(value)
     
    -        octets = value.asOctets()
    +        numbers = value.asNumbers()
     
    -        if not self.minLength < len(octets) < self.maxLength:
    -            raise error.PyAsn1Error('Length constraint violated: %r' % value)
    +        if self.PLUS_CHAR in numbers or self.MINUS_CHAR in numbers:
    +            raise error.PyAsn1Error('Must be UTC time: %r' % value)
     
    -        if self.pluschar in octets or self.minuschar in octets:
    -            raise error.PyAsn1Error('Must be UTC time: %r' % octets)
    +        if numbers[-1] != self.Z_CHAR:
    +            raise error.PyAsn1Error('Missing "Z" time zone specifier: %r' % value)
     
    -        if octets[-1] != self.zchar:
    -            raise error.PyAsn1Error('Missing "Z" time zone specifier: %r' % octets)
    -
    -        if self.commachar in octets:
    +        if self.COMMA_CHAR in numbers:
                 raise error.PyAsn1Error('Comma in fractions disallowed: %r' % value)
     
    +        if self.DOT_CHAR in numbers:
    +
    +            isModified = False
    +
    +            numbers = list(numbers)
    +
    +            searchIndex = min(numbers.index(self.DOT_CHAR) + 4, len(numbers) - 1)
    +
    +            while numbers[searchIndex] != self.DOT_CHAR:
    +                if numbers[searchIndex] == self.ZERO_CHAR:
    +                    del numbers[searchIndex]
    +                    isModified = True
    +
    +                searchIndex -= 1
    +
    +            searchIndex += 1
    +
    +            if searchIndex < len(numbers):
    +                if numbers[searchIndex] == self.Z_CHAR:
    +                    # drop hanging comma
    +                    del numbers[searchIndex - 1]
    +                    isModified = True
    +
    +            if isModified:
    +                value = value.clone(numbers)
    +
    +        if not self.MIN_LENGTH < len(numbers) < self.MAX_LENGTH:
    +            raise error.PyAsn1Error('Length constraint violated: %r' % value)
    +
             options.update(maxChunkSize=1000)
     
             return encoder.OctetStringEncoder.encodeValue(
    @@ -71,13 +100,44 @@ class TimeEncoderMixIn(object):
     
     
     class GeneralizedTimeEncoder(TimeEncoderMixIn, encoder.OctetStringEncoder):
    -    minLength = 12
    -    maxLength = 19
    +    MIN_LENGTH = 12
    +    MAX_LENGTH = 20
     
     
     class UTCTimeEncoder(TimeEncoderMixIn, encoder.OctetStringEncoder):
    -    minLength = 10
    -    maxLength = 14
    +    MIN_LENGTH = 10
    +    MAX_LENGTH = 14
    +
    +
    +class SetOfEncoder(encoder.SequenceOfEncoder):
    +    def encodeValue(self, value, asn1Spec, encodeFun, **options):
    +        chunks = self._encodeComponents(
    +            value, asn1Spec, encodeFun, **options)
    +
    +        # sort by serialised and padded components
    +        if len(chunks) > 1:
    +            zero = str2octs('\x00')
    +            maxLen = max(map(len, chunks))
    +            paddedChunks = [
    +                (x.ljust(maxLen, zero), x) for x in chunks
    +            ]
    +            paddedChunks.sort(key=lambda x: x[0])
    +
    +            chunks = [x[1] for x in paddedChunks]
    +
    +        return null.join(chunks), True, True
    +
    +
    +class SequenceOfEncoder(encoder.SequenceOfEncoder):
    +    def encodeValue(self, value, asn1Spec, encodeFun, **options):
    +
    +        if options.get('ifNotEmpty', False) and not len(value):
    +            return null, True, True
    +
    +        chunks = self._encodeComponents(
    +            value, asn1Spec, encodeFun, **options)
    +
    +        return null.join(chunks), True, True
     
     
     class SetEncoder(encoder.SequenceEncoder):
    @@ -109,7 +169,9 @@ class SetEncoder(encoder.SequenceEncoder):
     
             if asn1Spec is None:
                 # instance of ASN.1 schema
    -            value.verifySizeSpec()
    +            inconsistency = value.isInconsistent
    +            if inconsistency:
    +                raise inconsistency
     
                 namedTypes = value.componentType
     
    @@ -168,55 +230,10 @@ class SetEncoder(encoder.SequenceEncoder):
             return substrate, True, True
     
     
    -class SetOfEncoder(encoder.SequenceOfEncoder):
    -    def encodeValue(self, value, asn1Spec, encodeFun, **options):
    -        if asn1Spec is None:
    -            value.verifySizeSpec()
    -        else:
    -            asn1Spec = asn1Spec.componentType
    -
    -        components = [encodeFun(x, asn1Spec, **options)
    -                      for x in value]
    -
    -        # sort by serialised and padded components
    -        if len(components) > 1:
    -            zero = str2octs('\x00')
    -            maxLen = max(map(len, components))
    -            paddedComponents = [
    -                (x.ljust(maxLen, zero), x) for x in components
    -                ]
    -            paddedComponents.sort(key=lambda x: x[0])
    -
    -            components = [x[1] for x in paddedComponents]
    -
    -        substrate = null.join(components)
    -
    -        return substrate, True, True
    -
    -
     class SequenceEncoder(encoder.SequenceEncoder):
         omitEmptyOptionals = True
     
     
    -class SequenceOfEncoder(encoder.SequenceOfEncoder):
    -    def encodeValue(self, value, asn1Spec, encodeFun, **options):
    -
    -        if options.get('ifNotEmpty', False) and not len(value):
    -            return null, True, True
    -
    -        if asn1Spec is None:
    -            value.verifySizeSpec()
    -        else:
    -            asn1Spec = asn1Spec.componentType
    -
    -        substrate = null
    -
    -        for idx, component in enumerate(value):
    -            substrate += encodeFun(value[idx], asn1Spec, **options)
    -
    -        return substrate, True, True
    -
    -
     tagMap = encoder.tagMap.copy()
     tagMap.update({
         univ.Boolean.tagSet: BooleanEncoder(),
    @@ -269,7 +286,7 @@ class Encoder(encoder.Encoder):
     #:
     #: Raises
     #: ------
    -#: :py:class:`~pyasn1.error.PyAsn1Error`
    +#: ~pyasn1.error.PyAsn1Error
     #:     On encoding errors
     #:
     #: Examples
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/codec/der/decoder.py b/server/www/packages/packages-windows/x86/pyasn1/codec/der/decoder.py
    index f67d025..1a13fdb 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/codec/der/decoder.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/codec/der/decoder.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     from pyasn1.codec.cer import decoder
    @@ -67,7 +67,7 @@ class Decoder(decoder.Decoder):
     #:
     #: Raises
     #: ------
    -#: :py:class:`~pyasn1.error.PyAsn1Error`
    +#: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError
     #:     On decoding errors
     #:
     #: Examples
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/codec/der/encoder.py b/server/www/packages/packages-windows/x86/pyasn1/codec/der/encoder.py
    index 756d9fe..90e982d 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/codec/der/encoder.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/codec/der/encoder.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     from pyasn1 import error
    @@ -82,7 +82,7 @@ class Encoder(encoder.Encoder):
     #:
     #: Raises
     #: ------
    -#: :py:class:`~pyasn1.error.PyAsn1Error`
    +#: ~pyasn1.error.PyAsn1Error
     #:     On encoding errors
     #:
     #: Examples
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/codec/native/decoder.py b/server/www/packages/packages-windows/x86/pyasn1/codec/native/decoder.py
    index 78fcda6..104b92e 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/codec/native/decoder.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/codec/native/decoder.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     from pyasn1 import debug
    @@ -14,6 +14,8 @@ from pyasn1.type import useful
     
     __all__ = ['decode']
     
    +LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_DECODER)
    +
     
     class AbstractScalarDecoder(object):
         def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
    @@ -136,13 +138,10 @@ class Decoder(object):
             self.__typeMap = typeMap
     
         def __call__(self, pyObject, asn1Spec, **options):
    -        if debug.logger & debug.flagDecoder:
    -            logger = debug.logger
    -        else:
    -            logger = None
    -        if logger:
    +
    +        if LOG:
                 debug.scope.push(type(pyObject).__name__)
    -            logger('decoder called at scope %s, working with type %s' % (debug.scope, type(pyObject).__name__))
    +            LOG('decoder called at scope %s, working with type %s' % (debug.scope, type(pyObject).__name__))
     
             if asn1Spec is None or not isinstance(asn1Spec, base.Asn1Item):
                 raise error.PyAsn1Error('asn1Spec is not valid (should be an instance of an ASN.1 Item, not %s)' % asn1Spec.__class__.__name__)
    @@ -159,13 +158,13 @@ class Decoder(object):
                 except KeyError:
                     raise error.PyAsn1Error('Unknown ASN.1 tag %s' % asn1Spec.tagSet)
     
    -        if logger:
    -            logger('calling decoder %s on Python type %s <%s>' % (type(valueDecoder).__name__, type(pyObject).__name__, repr(pyObject)))
    +        if LOG:
    +            LOG('calling decoder %s on Python type %s <%s>' % (type(valueDecoder).__name__, type(pyObject).__name__, repr(pyObject)))
     
             value = valueDecoder(pyObject, asn1Spec, self, **options)
     
    -        if logger:
    -            logger('decoder %s produced ASN.1 type %s <%s>' % (type(valueDecoder).__name__, type(value).__name__, repr(value)))
    +        if LOG:
    +            LOG('decoder %s produced ASN.1 type %s <%s>' % (type(valueDecoder).__name__, type(value).__name__, repr(value)))
                 debug.scope.pop()
     
             return value
    @@ -196,7 +195,7 @@ class Decoder(object):
     #:
     #: Raises
     #: ------
    -#: :py:class:`~pyasn1.error.PyAsn1Error`
    +#: ~pyasn1.error.PyAsn1Error
     #:     On decoding errors
     #:
     #: Examples
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/codec/native/encoder.py b/server/www/packages/packages-windows/x86/pyasn1/codec/native/encoder.py
    index 0956191..4318abd 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/codec/native/encoder.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/codec/native/encoder.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     try:
    @@ -20,6 +20,8 @@ from pyasn1.type import useful
     
     __all__ = ['encode']
     
    +LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_ENCODER)
    +
     
     class AbstractItemEncoder(object):
         def encode(self, value, encodeFun, **options):
    @@ -70,7 +72,9 @@ class SetEncoder(AbstractItemEncoder):
         protoDict = dict
     
         def encode(self, value, encodeFun, **options):
    -        value.verifySizeSpec()
    +        inconsistency = value.isInconsistent
    +        if inconsistency:
    +            raise inconsistency
     
             namedTypes = value.componentType
             substrate = self.protoDict()
    @@ -88,7 +92,9 @@ class SequenceEncoder(SetEncoder):
     
     class SequenceOfEncoder(AbstractItemEncoder):
         def encode(self, value, encodeFun, **options):
    -        value.verifySizeSpec()
    +        inconsistency = value.isInconsistent
    +        if inconsistency:
    +            raise inconsistency
             return [encodeFun(x, **options) for x in value]
     
     
    @@ -180,14 +186,9 @@ class Encoder(object):
             if not isinstance(value, base.Asn1Item):
                 raise error.PyAsn1Error('value is not valid (should be an instance of an ASN.1 Item)')
     
    -        if debug.logger & debug.flagEncoder:
    -            logger = debug.logger
    -        else:
    -            logger = None
    -
    -        if logger:
    +        if LOG:
                 debug.scope.push(type(value).__name__)
    -            logger('encoder called for type %s <%s>' % (type(value).__name__, value.prettyPrint()))
    +            LOG('encoder called for type %s <%s>' % (type(value).__name__, value.prettyPrint()))
     
             tagSet = value.tagSet
     
    @@ -204,13 +205,13 @@ class Encoder(object):
                 except KeyError:
                     raise error.PyAsn1Error('No encoder for %s' % (value,))
     
    -        if logger:
    -            logger('using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet))
    +        if LOG:
    +            LOG('using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet))
     
             pyObject = concreteEncoder.encode(value, self, **options)
     
    -        if logger:
    -            logger('encoder %s produced: %s' % (type(concreteEncoder).__name__, repr(pyObject)))
    +        if LOG:
    +            LOG('encoder %s produced: %s' % (type(concreteEncoder).__name__, repr(pyObject)))
                 debug.scope.pop()
     
             return pyObject
    @@ -238,7 +239,7 @@ class Encoder(object):
     #:
     #: Raises
     #: ------
    -#: :py:class:`~pyasn1.error.PyAsn1Error`
    +#: ~pyasn1.error.PyAsn1Error
     #:     On encoding errors
     #:
     #: Examples
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/compat/binary.py b/server/www/packages/packages-windows/x86/pyasn1/compat/binary.py
    index c38a650..addbdc9 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/compat/binary.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/compat/binary.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     from sys import version_info
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/compat/calling.py b/server/www/packages/packages-windows/x86/pyasn1/compat/calling.py
    index c60b50d..778a3d1 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/compat/calling.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/compat/calling.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     from sys import version_info
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/compat/dateandtime.py b/server/www/packages/packages-windows/x86/pyasn1/compat/dateandtime.py
    index 27526ad..5e471bf 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/compat/dateandtime.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/compat/dateandtime.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     import time
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/compat/integer.py b/server/www/packages/packages-windows/x86/pyasn1/compat/integer.py
    index bb3d099..4b31791 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/compat/integer.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/compat/integer.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     import sys
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/compat/octets.py b/server/www/packages/packages-windows/x86/pyasn1/compat/octets.py
    index a06db5d..99d23bb 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/compat/octets.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/compat/octets.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     from sys import version_info
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/compat/string.py b/server/www/packages/packages-windows/x86/pyasn1/compat/string.py
    index 4d8a045..b9bc8c3 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/compat/string.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/compat/string.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     from sys import version_info
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/debug.py b/server/www/packages/packages-windows/x86/pyasn1/debug.py
    index ab72fa8..8707aa8 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/debug.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/debug.py
    @@ -1,10 +1,11 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     import logging
    +import sys
     
     from pyasn1 import __version__
     from pyasn1 import error
    @@ -12,18 +13,20 @@ from pyasn1.compat.octets import octs2ints
     
     __all__ = ['Debug', 'setLogger', 'hexdump']
     
    -flagNone = 0x0000
    -flagEncoder = 0x0001
    -flagDecoder = 0x0002
    -flagAll = 0xffff
    +DEBUG_NONE = 0x0000
    +DEBUG_ENCODER = 0x0001
    +DEBUG_DECODER = 0x0002
    +DEBUG_ALL = 0xffff
     
    -flagMap = {
    -    'none': flagNone,
    -    'encoder': flagEncoder,
    -    'decoder': flagDecoder,
    -    'all': flagAll
    +FLAG_MAP = {
    +    'none': DEBUG_NONE,
    +    'encoder': DEBUG_ENCODER,
    +    'decoder': DEBUG_DECODER,
    +    'all': DEBUG_ALL
     }
     
    +LOGGEE_MAP = {}
    +
     
     class Printer(object):
         # noinspection PyShadowingNames
    @@ -66,7 +69,7 @@ class Debug(object):
         defaultPrinter = Printer()
     
         def __init__(self, *flags, **options):
    -        self._flags = flagNone
    +        self._flags = DEBUG_NONE
     
             if 'loggerName' in options:
                 # route our logs to parent logger
    @@ -89,9 +92,9 @@ class Debug(object):
                     flag = flag[1:]
                 try:
                     if inverse:
    -                    self._flags &= ~flagMap[flag]
    +                    self._flags &= ~FLAG_MAP[flag]
                     else:
    -                    self._flags |= flagMap[flag]
    +                    self._flags |= FLAG_MAP[flag]
                 except KeyError:
                     raise error.PyAsn1Error('bad debug flag %s' % flag)
     
    @@ -109,17 +112,26 @@ class Debug(object):
         def __rand__(self, flag):
             return flag & self._flags
     
    -
    -logger = 0
    +_LOG = DEBUG_NONE
     
     
     def setLogger(userLogger):
    -    global logger
    +    global _LOG
     
         if userLogger:
    -        logger = userLogger
    +        _LOG = userLogger
         else:
    -        logger = 0
    +        _LOG = DEBUG_NONE
    +
    +    # Update registered logging clients
    +    for module, (name, flags) in LOGGEE_MAP.items():
    +        setattr(module, name, _LOG & flags and _LOG or DEBUG_NONE)
    +
    +
    +def registerLoggee(module, name='LOG', flags=DEBUG_NONE):
    +    LOGGEE_MAP[sys.modules[module]] = name, flags
    +    setLogger(_LOG)
    +    return _LOG
     
     
     def hexdump(octets):
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/error.py b/server/www/packages/packages-windows/x86/pyasn1/error.py
    index c05e65c..4f48db2 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/error.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/error.py
    @@ -1,29 +1,75 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     
     
     class PyAsn1Error(Exception):
    -    """Create pyasn1 exception object
    +    """Base pyasn1 exception
     
    -    The `PyAsn1Error` exception represents generic, usually fatal, error.
    +    `PyAsn1Error` is the base exception class (based on
    +    :class:`Exception`) that represents all possible ASN.1 related
    +    errors.
         """
     
     
     class ValueConstraintError(PyAsn1Error):
    -    """Create pyasn1 exception object
    +    """ASN.1 type constraints violation exception
     
         The `ValueConstraintError` exception indicates an ASN.1 value
         constraint violation.
    +
    +    It might happen on value object instantiation (for scalar types) or on
    +    serialization (for constructed types).
         """
     
     
     class SubstrateUnderrunError(PyAsn1Error):
    -    """Create pyasn1 exception object
    +    """ASN.1 data structure deserialization error
     
         The `SubstrateUnderrunError` exception indicates insufficient serialised
    -    data on input of a deserialisation routine.
    +    data on input of a de-serialization codec.
         """
    +
    +
    +class PyAsn1UnicodeError(PyAsn1Error, UnicodeError):
    +    """Unicode text processing error
    +
    +    The `PyAsn1UnicodeError` exception is a base class for errors relating to
    +    unicode text de/serialization.
    +
    +    Apart from inheriting from :class:`PyAsn1Error`, it also inherits from
    +    :class:`UnicodeError` to help the caller catching unicode-related errors.
    +    """
    +    def __init__(self, message, unicode_error=None):
    +        if isinstance(unicode_error, UnicodeError):
    +            UnicodeError.__init__(self, *unicode_error.args)
    +        PyAsn1Error.__init__(self, message)
    +
    +
    +class PyAsn1UnicodeDecodeError(PyAsn1UnicodeError, UnicodeDecodeError):
    +    """Unicode text decoding error
    +
    +    The `PyAsn1UnicodeDecodeError` exception represents a failure to
    +    deserialize unicode text.
    +
    +    Apart from inheriting from :class:`PyAsn1UnicodeError`, it also inherits
    +    from :class:`UnicodeDecodeError` to help the caller catching unicode-related
    +    errors.
    +    """
    +
    +
    +class PyAsn1UnicodeEncodeError(PyAsn1UnicodeError, UnicodeEncodeError):
    +    """Unicode text encoding error
    +
    +    The `PyAsn1UnicodeEncodeError` exception represents a failure to
    +    serialize unicode text.
    +
    +    Apart from inheriting from :class:`PyAsn1UnicodeError`, it also inherits
    +    from :class:`UnicodeEncodeError` to help the caller catching
    +    unicode-related errors.
    +    """
    +
    +
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/type/base.py b/server/www/packages/packages-windows/x86/pyasn1/type/base.py
    index adaab22..994f1c9 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/type/base.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/type/base.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     import sys
    @@ -12,7 +12,8 @@ from pyasn1.type import constraint
     from pyasn1.type import tag
     from pyasn1.type import tagmap
     
    -__all__ = ['Asn1Item', 'Asn1ItemBase', 'AbstractSimpleAsn1Item', 'AbstractConstructedAsn1Item']
    +__all__ = ['Asn1Item', 'Asn1Type', 'SimpleAsn1Type',
    +           'ConstructedAsn1Type']
     
     
     class Asn1Item(object):
    @@ -25,7 +26,17 @@ class Asn1Item(object):
             return Asn1Item._typeCounter
     
     
    -class Asn1ItemBase(Asn1Item):
    +class Asn1Type(Asn1Item):
    +    """Base class for all classes representing ASN.1 types.
    +
    +    In the user code, |ASN.1| class is normally used only for telling
    +    ASN.1 objects from others.
    +
    +    Note
    +    ----
    +    For as long as ASN.1 is concerned, a way to compare ASN.1 types
    +    is to use :meth:`isSameTypeWith` and :meth:`isSuperTypeOf` methods.
    +    """
         #: Set or return a :py:class:`~pyasn1.type.tag.TagSet` object representing
         #: ASN.1 tag(s) associated with |ASN.1| type.
         tagSet = tag.TagSet()
    @@ -91,8 +102,8 @@ class Asn1ItemBase(Asn1Item):
             Returns
             -------
             : :class:`bool`
    -            :class:`True` if *other* is |ASN.1| type,
    -            :class:`False` otherwise.
    +            :obj:`True` if *other* is |ASN.1| type,
    +            :obj:`False` otherwise.
             """
             return (self is other or
                     (not matchTags or self.tagSet == other.tagSet) and
    @@ -115,8 +126,8 @@ class Asn1ItemBase(Asn1Item):
             Returns
             -------
                 : :class:`bool`
    -                :class:`True` if *other* is a subtype of |ASN.1| type,
    -                :class:`False` otherwise.
    +                :obj:`True` if *other* is a subtype of |ASN.1| type,
    +                :obj:`False` otherwise.
             """
             return (not matchTags or
                     (self.tagSet.isSuperTagSetOf(other.tagSet)) and
    @@ -146,9 +157,13 @@ class Asn1ItemBase(Asn1Item):
         def getSubtypeSpec(self):
             return self.subtypeSpec
     
    +    # backward compatibility
         def hasValue(self):
             return self.isValue
     
    +# Backward compatibility
    +Asn1ItemBase = Asn1Type
    +
     
     class NoValue(object):
         """Create a singleton instance of NoValue class.
    @@ -221,19 +236,31 @@ class NoValue(object):
             raise error.PyAsn1Error('Attempted "%s" operation on ASN.1 schema object' % attr)
     
         def __repr__(self):
    -        return '<%s object at 0x%x>' % (self.__class__.__name__, id(self))
    +        return '<%s object>' % self.__class__.__name__
     
     
     noValue = NoValue()
     
     
    -# Base class for "simple" ASN.1 objects. These are immutable.
    -class AbstractSimpleAsn1Item(Asn1ItemBase):
    +class SimpleAsn1Type(Asn1Type):
    +    """Base class for all simple classes representing ASN.1 types.
    +
    +    ASN.1 distinguishes types by their ability to hold other objects.
    +    Scalar types are known as *simple* in ASN.1.
    +
    +    In the user code, |ASN.1| class is normally used only for telling
    +    ASN.1 objects from others.
    +
    +    Note
    +    ----
    +    For as long as ASN.1 is concerned, a way to compare ASN.1 types
    +    is to use :meth:`isSameTypeWith` and :meth:`isSuperTypeOf` methods.
    +    """
         #: Default payload value
         defaultValue = noValue
     
         def __init__(self, value=noValue, **kwargs):
    -        Asn1ItemBase.__init__(self, **kwargs)
    +        Asn1Type.__init__(self, **kwargs)
             if value is noValue:
                 value = self.defaultValue
             else:
    @@ -248,19 +275,18 @@ class AbstractSimpleAsn1Item(Asn1ItemBase):
             self._value = value
     
         def __repr__(self):
    -        representation = '%s %s object at 0x%x' % (
    -            self.__class__.__name__, self.isValue and 'value' or 'schema', id(self)
    -        )
    +        representation = '%s %s object' % (
    +            self.__class__.__name__, self.isValue and 'value' or 'schema')
     
             for attr, value in self.readOnly.items():
                 if value:
    -                representation += ' %s %s' % (attr, value)
    +                representation += ', %s %s' % (attr, value)
     
             if self.isValue:
                 value = self.prettyPrint()
                 if len(value) > 32:
                     value = value[:16] + '...' + value[-16:]
    -            representation += ' payload [%s]' % value
    +            representation += ', payload [%s]' % value
     
             return '<%s>' % representation
     
    @@ -296,17 +322,18 @@ class AbstractSimpleAsn1Item(Asn1ItemBase):
         def isValue(self):
             """Indicate that |ASN.1| object represents ASN.1 value.
     
    -        If *isValue* is `False` then this object represents just ASN.1 schema.
    +        If *isValue* is :obj:`False` then this object represents just
    +        ASN.1 schema.
     
    -        If *isValue* is `True` then, in addition to its ASN.1 schema features,
    -        this object can also be used like a Python built-in object (e.g. `int`,
    -        `str`, `dict` etc.).
    +        If *isValue* is :obj:`True` then, in addition to its ASN.1 schema
    +        features, this object can also be used like a Python built-in object
    +        (e.g. :class:`int`, :class:`str`, :class:`dict` etc.).
     
             Returns
             -------
             : :class:`bool`
    -            :class:`False` if object represents just ASN.1 schema.
    -            :class:`True` if object represents ASN.1 schema and can be used as a normal value.
    +            :obj:`False` if object represents just ASN.1 schema.
    +            :obj:`True` if object represents ASN.1 schema and can be used as a normal value.
     
             Note
             ----
    @@ -343,10 +370,10 @@ class AbstractSimpleAsn1Item(Asn1ItemBase):
     
                 value = self._value
     
    -        initilaizers = self.readOnly.copy()
    -        initilaizers.update(kwargs)
    +        initializers = self.readOnly.copy()
    +        initializers.update(kwargs)
     
    -        return self.__class__(value, **initilaizers)
    +        return self.__class__(value, **initializers)
     
         def subtype(self, value=noValue, **kwargs):
             """Create a specialization of |ASN.1| schema or value object.
    @@ -425,10 +452,12 @@ class AbstractSimpleAsn1Item(Asn1ItemBase):
         def prettyPrint(self, scope=0):
             return self.prettyOut(self._value)
     
    -    # noinspection PyUnusedLocal
         def prettyPrintType(self, scope=0):
             return '%s -> %s' % (self.tagSet, self.__class__.__name__)
     
    +# Backward compatibility
    +AbstractSimpleAsn1Item = SimpleAsn1Type
    +
     #
     # Constructed types:
     # * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice
    @@ -449,67 +478,102 @@ class AbstractSimpleAsn1Item(Asn1ItemBase):
     #
     
     
    -class AbstractConstructedAsn1Item(Asn1ItemBase):
    +class ConstructedAsn1Type(Asn1Type):
    +    """Base class for all constructed classes representing ASN.1 types.
     
    -    #: If `True`, requires exact component type matching,
    +    ASN.1 distinguishes types by their ability to hold other objects.
    +    Those "nesting" types are known as *constructed* in ASN.1.
    +
    +    In the user code, |ASN.1| class is normally used only for telling
    +    ASN.1 objects from others.
    +
    +    Note
    +    ----
    +    For as long as ASN.1 is concerned, a way to compare ASN.1 types
    +    is to use :meth:`isSameTypeWith` and :meth:`isSuperTypeOf` methods.
    +    """
    +
    +    #: If :obj:`True`, requires exact component type matching,
         #: otherwise subtype relation is only enforced
         strictConstraints = False
     
         componentType = None
    -    sizeSpec = None
    +
    +    # backward compatibility, unused
    +    sizeSpec = constraint.ConstraintsIntersection()
     
         def __init__(self, **kwargs):
             readOnly = {
                 'componentType': self.componentType,
    +            # backward compatibility, unused
                 'sizeSpec': self.sizeSpec
             }
    +
    +        # backward compatibility: preserve legacy sizeSpec support
    +        kwargs = self._moveSizeSpec(**kwargs)
    +
             readOnly.update(kwargs)
     
    -        Asn1ItemBase.__init__(self, **readOnly)
    +        Asn1Type.__init__(self, **readOnly)
     
    -        self._componentValues = []
    +    def _moveSizeSpec(self, **kwargs):
    +        # backward compatibility, unused
    +        sizeSpec = kwargs.pop('sizeSpec', self.sizeSpec)
    +        if sizeSpec:
    +            subtypeSpec = kwargs.pop('subtypeSpec', self.subtypeSpec)
    +            if subtypeSpec:
    +                subtypeSpec = sizeSpec
    +
    +            else:
    +                subtypeSpec += sizeSpec
    +
    +            kwargs['subtypeSpec'] = subtypeSpec
    +
    +        return kwargs
     
         def __repr__(self):
    -        representation = '%s %s object at 0x%x' % (
    -            self.__class__.__name__, self.isValue and 'value' or 'schema', id(self)
    +        representation = '%s %s object' % (
    +            self.__class__.__name__, self.isValue and 'value' or 'schema'
             )
     
             for attr, value in self.readOnly.items():
                 if value is not noValue:
    -                representation += ' %s=%r' % (attr, value)
    +                representation += ', %s=%r' % (attr, value)
     
    -        if self.isValue and self._componentValues:
    -            representation += ' payload [%s]' % ', '.join([repr(x) for x in self._componentValues])
    +        if self.isValue and self.components:
    +            representation += ', payload [%s]' % ', '.join(
    +                [repr(x) for x in self.components])
     
             return '<%s>' % representation
     
         def __eq__(self, other):
    -        return self is other and True or self._componentValues == other
    +        return self is other or self.components == other
     
         def __ne__(self, other):
    -        return self._componentValues != other
    +        return self.components != other
     
         def __lt__(self, other):
    -        return self._componentValues < other
    +        return self.components < other
     
         def __le__(self, other):
    -        return self._componentValues <= other
    +        return self.components <= other
     
         def __gt__(self, other):
    -        return self._componentValues > other
    +        return self.components > other
     
         def __ge__(self, other):
    -        return self._componentValues >= other
    +        return self.components >= other
     
         if sys.version_info[0] <= 2:
             def __nonzero__(self):
    -            return self._componentValues and True or False
    +            return bool(self.components)
         else:
             def __bool__(self):
    -            return self._componentValues and True or False
    +            return bool(self.components)
     
    -    def __len__(self):
    -        return len(self._componentValues)
    +    @property
    +    def components(self):
    +        raise error.PyAsn1Error('Method not implemented')
     
         def _cloneComponentValues(self, myClone, cloneValueFlag):
             pass
    @@ -535,15 +599,14 @@ class AbstractConstructedAsn1Item(Asn1ItemBase):
             Note
             ----
             Due to the mutable nature of the |ASN.1| object, even if no arguments
    -        are supplied, new |ASN.1| object will always be created as a shallow
    -        copy of `self`.
    +        are supplied, a new |ASN.1| object will be created and returned.
             """
             cloneValueFlag = kwargs.pop('cloneValueFlag', False)
     
    -        initilaizers = self.readOnly.copy()
    -        initilaizers.update(kwargs)
    +        initializers = self.readOnly.copy()
    +        initializers.update(kwargs)
     
    -        clone = self.__class__(**initilaizers)
    +        clone = self.__class__(**initializers)
     
             if cloneValueFlag:
                 self._cloneComponentValues(clone, cloneValueFlag)
    @@ -588,9 +651,8 @@ class AbstractConstructedAsn1Item(Asn1ItemBase):
     
             Note
             ----
    -        Due to the immutable nature of the |ASN.1| object, if no arguments
    -        are supplied, no new |ASN.1| object will be created and `self` will
    -        be returned instead.
    +        Due to the mutable nature of the |ASN.1| object, even if no arguments
    +        are supplied, a new |ASN.1| object will be created and returned.
             """
     
             initializers = self.readOnly.copy()
    @@ -615,9 +677,6 @@ class AbstractConstructedAsn1Item(Asn1ItemBase):
     
             return clone
     
    -    def verifySizeSpec(self):
    -        self.sizeSpec(self)
    -
         def getComponentByPosition(self, idx):
             raise error.PyAsn1Error('Method not implemented')
     
    @@ -631,9 +690,6 @@ class AbstractConstructedAsn1Item(Asn1ItemBase):
                 self[k] = kwargs[k]
             return self
     
    -    def clear(self):
    -        self._componentValues = []
    -
         # backward compatibility
     
         def setDefaultComponents(self):
    @@ -641,3 +697,11 @@ class AbstractConstructedAsn1Item(Asn1ItemBase):
     
         def getComponentType(self):
             return self.componentType
    +
    +    # backward compatibility, unused
    +    def verifySizeSpec(self):
    +        self.subtypeSpec(self)
    +
    +
    +        # Backward compatibility
    +AbstractConstructedAsn1Item = ConstructedAsn1Type
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/type/char.py b/server/www/packages/packages-windows/x86/pyasn1/type/char.py
    index 493badb..06074da 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/type/char.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/type/char.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     import sys
    @@ -21,21 +21,27 @@ noValue = univ.noValue
     class AbstractCharacterString(univ.OctetString):
         """Creates |ASN.1| schema or value object.
     
    -    |ASN.1| objects are immutable and duck-type Python 2 :class:`unicode` or Python 3 :class:`str`.
    -    When used in octet-stream context, |ASN.1| type assumes "|encoding|" encoding.
    +    |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`,
    +    its objects are immutable and duck-type Python 2 :class:`str` or Python 3
    +    :class:`bytes`. When used in octet-stream context, |ASN.1| type assumes
    +    "|encoding|" encoding.
     
         Keyword Args
         ------------
         value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object
    -        unicode object (Python 2) or string (Python 3), alternatively string
    -        (Python 2) or bytes (Python 3) representing octet-stream of serialised
    -        unicode string (note `encoding` parameter) or |ASN.1| class instance.
    +        :class:`unicode` object (Python 2) or :class:`str` (Python 3),
    +        alternatively :class:`str` (Python 2) or :class:`bytes` (Python 3)
    +        representing octet-stream of serialised unicode string
    +        (note `encoding` parameter) or |ASN.1| class instance.
    +        If `value` is not given, schema object will be created.
     
         tagSet: :py:class:`~pyasn1.type.tag.TagSet`
             Object representing non-default ASN.1 tag(s)
     
         subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -        Object representing non-default ASN.1 subtype constraint(s)
    +        Object representing non-default ASN.1 subtype constraint(s). Constraints
    +        verification for |ASN.1| type occurs automatically on object
    +        instantiation.
     
         encoding: :py:class:`str`
             Unicode codec ID to encode/decode :class:`unicode` (Python 2) or
    @@ -44,7 +50,7 @@ class AbstractCharacterString(univ.OctetString):
     
         Raises
         ------
    -    :py:class:`~pyasn1.error.PyAsn1Error`
    +    ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
             On constraint violation or bad initializer.
         """
     
    @@ -55,8 +61,10 @@ class AbstractCharacterString(univ.OctetString):
                     return self._value.encode(self.encoding)
     
                 except UnicodeEncodeError:
    -                raise error.PyAsn1Error(
    -                    "Can't encode string '%s' with codec %s" % (self._value, self.encoding)
    +                exc = sys.exc_info()[1]
    +                raise error.PyAsn1UnicodeEncodeError(
    +                    "Can't encode string '%s' with codec "
    +                    "%s" % (self._value, self.encoding), exc
                     )
     
             def __unicode__(self):
    @@ -76,8 +84,10 @@ class AbstractCharacterString(univ.OctetString):
                         return unicode(value)
     
                 except (UnicodeDecodeError, LookupError):
    -                raise error.PyAsn1Error(
    -                    "Can't decode string '%s' with codec %s" % (value, self.encoding)
    +                exc = sys.exc_info()[1]
    +                raise error.PyAsn1UnicodeDecodeError(
    +                    "Can't decode string '%s' with codec "
    +                    "%s" % (value, self.encoding), exc
                     )
     
             def asOctets(self, padding=True):
    @@ -95,8 +105,10 @@ class AbstractCharacterString(univ.OctetString):
                 try:
                     return self._value.encode(self.encoding)
                 except UnicodeEncodeError:
    -                raise error.PyAsn1Error(
    -                    "Can't encode string '%s' with codec %s" % (self._value, self.encoding)
    +                exc = sys.exc_info()[1]
    +                raise error.PyAsn1UnicodeEncodeError(
    +                    "Can't encode string '%s' with codec "
    +                    "%s" % (self._value, self.encoding), exc
                     )
     
             def prettyIn(self, value):
    @@ -113,8 +125,10 @@ class AbstractCharacterString(univ.OctetString):
                         return str(value)
     
                 except (UnicodeDecodeError, LookupError):
    -                raise error.PyAsn1Error(
    -                    "Can't decode string '%s' with codec %s" % (value, self.encoding)
    +                exc = sys.exc_info()[1]
    +                raise error.PyAsn1UnicodeDecodeError(
    +                    "Can't decode string '%s' with codec "
    +                    "%s" % (value, self.encoding), exc
                     )
     
             def asOctets(self, padding=True):
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/type/constraint.py b/server/www/packages/packages-windows/x86/pyasn1/type/constraint.py
    index a704331..8f152e9 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/type/constraint.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/type/constraint.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     # Original concept and code by Mike C. Fletcher.
    @@ -37,10 +37,11 @@ class AbstractConstraint(object):
                 )
     
         def __repr__(self):
    -        representation = '%s object at 0x%x' % (self.__class__.__name__, id(self))
    +        representation = '%s object' % (self.__class__.__name__)
     
             if self._values:
    -            representation += ' consts %s' % ', '.join([repr(x) for x in self._values])
    +            representation += ', consts %s' % ', '.join(
    +                [repr(x) for x in self._values])
     
             return '<%s>' % representation
     
    @@ -102,12 +103,17 @@ class SingleValueConstraint(AbstractConstraint):
         The SingleValueConstraint satisfies any value that
         is present in the set of permitted values.
     
    +    Objects of this type are iterable (emitting constraint values) and
    +    can act as operands for some arithmetic operations e.g. addition
    +    and subtraction. The latter can be used for combining multiple
    +    SingleValueConstraint objects into one.
    +
         The SingleValueConstraint object can be applied to
         any ASN.1 type.
     
         Parameters
         ----------
    -    \*values: :class:`int`
    +    *values: :class:`int`
             Full set of values permitted by this constraint object.
     
         Examples
    @@ -136,6 +142,23 @@ class SingleValueConstraint(AbstractConstraint):
             if value not in self._set:
                 raise error.ValueConstraintError(value)
     
    +    # Constrains can be merged or reduced
    +
    +    def __contains__(self, item):
    +        return item in self._set
    +
    +    def __iter__(self):
    +        return iter(self._set)
    +
    +    def __sub__(self, constraint):
    +        return self.__class__(*(self._set.difference(constraint)))
    +
    +    def __add__(self, constraint):
    +        return self.__class__(*(self._set.union(constraint)))
    +
    +    def __sub__(self, constraint):
    +        return self.__class__(*(self._set.difference(constraint)))
    +
     
     class ContainedSubtypeConstraint(AbstractConstraint):
         """Create a ContainedSubtypeConstraint object.
    @@ -149,7 +172,7 @@ class ContainedSubtypeConstraint(AbstractConstraint):
     
         Parameters
         ----------
    -    \*values:
    +    *values:
             Full set of values and constraint objects permitted
             by this constraint object.
     
    @@ -304,17 +327,21 @@ class PermittedAlphabetConstraint(SingleValueConstraint):
         string for as long as all its characters are present in
         the set of permitted characters.
     
    +    Objects of this type are iterable (emitting constraint values) and
    +    can act as operands for some arithmetic operations e.g. addition
    +    and subtraction.
    +
         The PermittedAlphabetConstraint object can only be applied
         to the :ref:`character ASN.1 types ` such as
         :class:`~pyasn1.type.char.IA5String`.
     
         Parameters
         ----------
    -    \*alphabet: :class:`str`
    +    *alphabet: :class:`str`
             Full set of characters permitted by this constraint object.
     
    -    Examples
    -    --------
    +    Example
    +    -------
         .. code-block:: python
     
             class BooleanValue(IA5String):
    @@ -331,6 +358,42 @@ class PermittedAlphabetConstraint(SingleValueConstraint):
     
             # this will raise ValueConstraintError
             garbage = BooleanValue('TAF')
    +
    +    ASN.1 `FROM ... EXCEPT ...` clause can be modelled by combining multiple
    +    PermittedAlphabetConstraint objects into one:
    +
    +    Example
    +    -------
    +    .. code-block:: python
    +
    +        class Lipogramme(IA5String):
    +            '''
    +            ASN.1 specification:
    +
    +            Lipogramme ::=
    +                IA5String (FROM (ALL EXCEPT ("e"|"E")))
    +            '''
    +            subtypeSpec = (
    +                PermittedAlphabetConstraint(*string.printable) -
    +                PermittedAlphabetConstraint('e', 'E')
    +            )
    +
    +        # this will succeed
    +        lipogramme = Lipogramme('A work of fiction?')
    +
    +        # this will raise ValueConstraintError
    +        lipogramme = Lipogramme('Eel')
    +
    +    Note
    +    ----
    +    Although `ConstraintsExclusion` object could seemingly be used for this
    +    purpose, practically, for it to work, it needs to represent its operand
    +    constraints as sets and intersect one with the other. That would require
    +    the insight into the constraint values (and their types) that are otherwise
    +    hidden inside the constraint object.
    +
    +    Therefore it's more practical to model `EXCEPT` clause at
    +    `PermittedAlphabetConstraint` level instead.
         """
         def _setValues(self, values):
             self._values = values
    @@ -341,6 +404,151 @@ class PermittedAlphabetConstraint(SingleValueConstraint):
                 raise error.ValueConstraintError(value)
     
     
    +class ComponentPresentConstraint(AbstractConstraint):
    +    """Create a ComponentPresentConstraint object.
    +
    +    The ComponentPresentConstraint is only satisfied when the value
    +    is not `None`.
    +
    +    The ComponentPresentConstraint object is typically used with
    +    `WithComponentsConstraint`.
    +
    +    Examples
    +    --------
    +    .. code-block:: python
    +
    +        present = ComponentPresentConstraint()
    +
    +        # this will succeed
    +        present('whatever')
    +
    +        # this will raise ValueConstraintError
    +        present(None)
    +    """
    +    def _setValues(self, values):
    +        self._values = ('',)
    +
    +        if values:
    +            raise error.PyAsn1Error('No arguments expected')
    +
    +    def _testValue(self, value, idx):
    +        if value is None:
    +            raise error.ValueConstraintError(
    +                'Component is not present:')
    +
    +
    +class ComponentAbsentConstraint(AbstractConstraint):
    +    """Create a ComponentAbsentConstraint object.
    +
    +    The ComponentAbsentConstraint is only satisfied when the value
    +    is `None`.
    +
    +    The ComponentAbsentConstraint object is typically used with
    +    `WithComponentsConstraint`.
    +
    +    Examples
    +    --------
    +    .. code-block:: python
    +
    +        absent = ComponentAbsentConstraint()
    +
    +        # this will succeed
    +        absent(None)
    +
    +        # this will raise ValueConstraintError
    +        absent('whatever')
    +    """
    +    def _setValues(self, values):
    +        self._values = ('',)
    +
    +        if values:
    +            raise error.PyAsn1Error('No arguments expected')
    +
    +    def _testValue(self, value, idx):
    +        if value is not None:
    +            raise error.ValueConstraintError(
    +                'Component is not absent: %r' % value)
    +
    +
    +class WithComponentsConstraint(AbstractConstraint):
    +    """Create a WithComponentsConstraint object.
    +
    +    The `WithComponentsConstraint` satisfies any mapping object that has
    +    constrained fields present or absent, what is indicated by
    +    `ComponentPresentConstraint` and `ComponentAbsentConstraint`
    +    objects respectively.
    +
    +    The `WithComponentsConstraint` object is typically applied
    +    to  :class:`~pyasn1.type.univ.Set` or
    +    :class:`~pyasn1.type.univ.Sequence` types.
    +
    +    Parameters
    +    ----------
    +    *fields: :class:`tuple`
    +        Zero or more tuples of (`field`, `constraint`) indicating constrained
    +        fields.
    +
    +    Notes
    +    -----
    +    On top of the primary use of `WithComponentsConstraint` (ensuring presence
    +    or absence of particular components of a :class:`~pyasn1.type.univ.Set` or
    +    :class:`~pyasn1.type.univ.Sequence`), it is also possible to pass any other
    +    constraint objects or their combinations. In case of scalar fields, these
    +    constraints will be verified in addition to the constraints belonging to
    +    scalar components themselves. However, formally, these additional
    +    constraints do not change the type of these ASN.1 objects.
    +
    +    Examples
    +    --------
    +
    +    .. code-block:: python
    +
    +        class Item(Sequence):  #  Set is similar
    +            '''
    +            ASN.1 specification:
    +
    +            Item ::= SEQUENCE {
    +                id    INTEGER OPTIONAL,
    +                name  OCTET STRING OPTIONAL
    +            } WITH COMPONENTS id PRESENT, name ABSENT | id ABSENT, name PRESENT
    +            '''
    +            componentType = NamedTypes(
    +                OptionalNamedType('id', Integer()),
    +                OptionalNamedType('name', OctetString())
    +            )
    +            withComponents = ConstraintsUnion(
    +                WithComponentsConstraint(
    +                    ('id', ComponentPresentConstraint()),
    +                    ('name', ComponentAbsentConstraint())
    +                ),
    +                WithComponentsConstraint(
    +                    ('id', ComponentAbsentConstraint()),
    +                    ('name', ComponentPresentConstraint())
    +                )
    +            )
    +
    +        item = Item()
    +
    +        # This will succeed
    +        item['id'] = 1
    +
    +        # This will succeed
    +        item.reset()
    +        item['name'] = 'John'
    +
    +        # This will fail (on encoding)
    +        item.reset()
    +        descr['id'] = 1
    +        descr['name'] = 'John'
    +    """
    +    def _testValue(self, value, idx):
    +        for field, constraint in self._values:
    +            constraint(value.get(field))
    +
    +    def _setValues(self, values):
    +        AbstractConstraint._setValues(self, values)
    +
    +
     # This is a bit kludgy, meaning two op modes within a single constraint
     class InnerTypeConstraint(AbstractConstraint):
         """Value must satisfy the type and presence constraints"""
    @@ -352,7 +560,7 @@ class InnerTypeConstraint(AbstractConstraint):
                 if idx not in self.__multipleTypeConstraint:
                     raise error.ValueConstraintError(value)
                 constraint, status = self.__multipleTypeConstraint[idx]
    -            if status == 'ABSENT':  # XXX presense is not checked!
    +            if status == 'ABSENT':  # XXX presence is not checked!
                     raise error.ValueConstraintError(value)
                 constraint(value)
     
    @@ -380,49 +588,41 @@ class ConstraintsExclusion(AbstractConstraint):
     
         Parameters
         ----------
    -    constraint:
    -        Constraint or logic operator object.
    +    *constraints:
    +        Constraint or logic operator objects.
     
         Examples
         --------
         .. code-block:: python
     
    -        class Lipogramme(IA5STRING):
    -            '''
    -            ASN.1 specification:
    -
    -            Lipogramme ::=
    -                IA5String (FROM (ALL EXCEPT ("e"|"E")))
    -            '''
    +        class LuckyNumber(Integer):
                 subtypeSpec = ConstraintsExclusion(
    -                PermittedAlphabetConstraint('e', 'E')
    +                SingleValueConstraint(13)
                 )
     
             # this will succeed
    -        lipogramme = Lipogramme('A work of fiction?')
    +        luckyNumber = LuckyNumber(12)
     
             # this will raise ValueConstraintError
    -        lipogramme = Lipogramme('Eel')
    +        luckyNumber = LuckyNumber(13)
     
    -    Warning
    -    -------
    -    The above example involving PermittedAlphabetConstraint might
    -    not work due to the way how PermittedAlphabetConstraint works.
    -    The other constraints might work with ConstraintsExclusion
    -    though.
    +    Note
    +    ----
    +    The `FROM ... EXCEPT ...` ASN.1 clause should be modeled by combining
    +    constraint objects into one. See `PermittedAlphabetConstraint` for more
    +    information.
         """
         def _testValue(self, value, idx):
    -        try:
    -            self._values[0](value, idx)
    -        except error.ValueConstraintError:
    -            return
    -        else:
    +        for constraint in self._values:
    +            try:
    +                constraint(value, idx)
    +
    +            except error.ValueConstraintError:
    +                continue
    +
                 raise error.ValueConstraintError(value)
     
         def _setValues(self, values):
    -        if len(values) != 1:
    -            raise error.PyAsn1Error('Single constraint expected')
    -
             AbstractConstraint._setValues(self, values)
     
     
    @@ -467,7 +667,7 @@ class ConstraintsIntersection(AbstractConstraintSet):
     
         Parameters
         ----------
    -    \*constraints:
    +    *constraints:
             Constraint or logic operator objects.
     
         Examples
    @@ -500,8 +700,8 @@ class ConstraintsIntersection(AbstractConstraintSet):
     class ConstraintsUnion(AbstractConstraintSet):
         """Create a ConstraintsUnion logic operator object.
     
    -    The ConstraintsUnion logic operator only succeeds if
    -    *at least a single* operand succeeds.
    +    The ConstraintsUnion logic operator succeeds if
    +    *at least* a single operand succeeds.
     
         The ConstraintsUnion object can be applied to
         any constraint and logic operator objects.
    @@ -511,7 +711,7 @@ class ConstraintsUnion(AbstractConstraintSet):
     
         Parameters
         ----------
    -    \*constraints:
    +    *constraints:
             Constraint or logic operator objects.
     
         Examples
    @@ -525,7 +725,7 @@ class ConstraintsUnion(AbstractConstraintSet):
                 CapitalOrSmall ::=
                     IA5String (FROM ("A".."Z") | FROM ("a".."z"))
                 '''
    -            subtypeSpec = ConstraintsIntersection(
    +            subtypeSpec = ConstraintsUnion(
                     PermittedAlphabetConstraint('A', 'Z'),
                     PermittedAlphabetConstraint('a', 'z')
                 )
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/type/error.py b/server/www/packages/packages-windows/x86/pyasn1/type/error.py
    index b2056bd..80fcf3b 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/type/error.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/type/error.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     from pyasn1.error import PyAsn1Error
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/type/namedtype.py b/server/www/packages/packages-windows/x86/pyasn1/type/namedtype.py
    index f162d19..cbc1429 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/type/namedtype.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/type/namedtype.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     import sys
    @@ -49,9 +49,10 @@ class NamedType(object):
             representation = '%s=%r' % (self.name, self.asn1Object)
     
             if self.openType:
    -            representation += ' openType: %r' % self.openType
    +            representation += ', open type %r' % self.openType
     
    -        return '<%s object at 0x%x type %s>' % (self.__class__.__name__, id(self), representation)
    +        return '<%s object, type %s>' % (
    +            self.__class__.__name__, representation)
     
         def __eq__(self, other):
             return self.__nameAndType == other
    @@ -173,7 +174,8 @@ class NamedTypes(object):
     
         def __repr__(self):
             representation = ', '.join(['%r' % x for x in self.__namedTypes])
    -        return '<%s object at 0x%x types %s>' % (self.__class__.__name__, id(self), representation)
    +        return '<%s object, types %s>' % (
    +            self.__class__.__name__, representation)
     
         def __eq__(self, other):
             return self.__namedTypes == other
    @@ -265,18 +267,18 @@ class NamedTypes(object):
             return nameToPosMap
     
         def __computeAmbiguousTypes(self):
    -        ambigiousTypes = {}
    -        partialAmbigiousTypes = ()
    +        ambiguousTypes = {}
    +        partialAmbiguousTypes = ()
             for idx, namedType in reversed(tuple(enumerate(self.__namedTypes))):
                 if namedType.isOptional or namedType.isDefaulted:
    -                partialAmbigiousTypes = (namedType,) + partialAmbigiousTypes
    +                partialAmbiguousTypes = (namedType,) + partialAmbiguousTypes
                 else:
    -                partialAmbigiousTypes = (namedType,)
    -            if len(partialAmbigiousTypes) == len(self.__namedTypes):
    -                ambigiousTypes[idx] = self
    +                partialAmbiguousTypes = (namedType,)
    +            if len(partialAmbiguousTypes) == len(self.__namedTypes):
    +                ambiguousTypes[idx] = self
                 else:
    -                ambigiousTypes[idx] = NamedTypes(*partialAmbigiousTypes, **dict(terminal=True))
    -        return ambigiousTypes
    +                ambiguousTypes[idx] = NamedTypes(*partialAmbiguousTypes, **dict(terminal=True))
    +        return ambiguousTypes
     
         def getTypeByPosition(self, idx):
             """Return ASN.1 type object by its position in fields set.
    @@ -293,7 +295,7 @@ class NamedTypes(object):
     
             Raises
             ------
    -        : :class:`~pyasn1.error.PyAsn1Error`
    +        ~pyasn1.error.PyAsn1Error
                 If given position is out of fields range
             """
             try:
    @@ -317,7 +319,7 @@ class NamedTypes(object):
     
             Raises
             ------
    -        : :class:`~pyasn1.error.PyAsn1Error`
    +        ~pyasn1.error.PyAsn1Error
                 If *tagSet* is not present or ASN.1 types are not unique within callee *NamedTypes*
             """
             try:
    @@ -341,7 +343,7 @@ class NamedTypes(object):
     
             Raises
             ------
    -        : :class:`~pyasn1.error.PyAsn1Error`
    +        ~pyasn1.error.PyAsn1Error
                 If given field name is not present in callee *NamedTypes*
             """
             try:
    @@ -365,7 +367,7 @@ class NamedTypes(object):
     
             Raises
             ------
    -        : :class:`~pyasn1.error.PyAsn1Error`
    +        ~pyasn1.error.PyAsn1Error
                 If *name* is not present or not unique within callee *NamedTypes*
             """
             try:
    @@ -394,7 +396,7 @@ class NamedTypes(object):
     
             Raises
             ------
    -        : :class:`~pyasn1.error.PyAsn1Error`
    +        ~pyasn1.error.PyAsn1Error
                 If given position is out of fields range
             """
             try:
    @@ -426,7 +428,7 @@ class NamedTypes(object):
     
             Raises
             ------
    -        : :class:`~pyasn1.error.PyAsn1Error`
    +        ~pyasn1.error.PyAsn1Error
                 If *tagSet* is not present or not unique within callee *NamedTypes*
                 or *idx* is out of fields range
             """
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/type/namedval.py b/server/www/packages/packages-windows/x86/pyasn1/type/namedval.py
    index 59257e4..4247597 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/type/namedval.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/type/namedval.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     # ASN.1 named integers
    @@ -23,7 +23,7 @@ class NamedValues(object):
     
         Parameters
         ----------
    -    \*args: variable number of two-element :py:class:`tuple`
    +    *args: variable number of two-element :py:class:`tuple`
     
             name: :py:class:`str`
                 Value label
    @@ -109,7 +109,8 @@ class NamedValues(object):
             if len(representation) > 64:
                 representation = representation[:32] + '...' + representation[-32:]
     
    -        return '<%s object 0x%x enums %s>' % (self.__class__.__name__, id(self), representation)
    +        return '<%s object, enums %s>' % (
    +            self.__class__.__name__, representation)
     
         def __eq__(self, other):
             return dict(self) == other
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/type/opentype.py b/server/www/packages/packages-windows/x86/pyasn1/type/opentype.py
    index d14ab34..29645f0 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/type/opentype.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/type/opentype.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     
    @@ -11,11 +11,22 @@ __all__ = ['OpenType']
     class OpenType(object):
         """Create ASN.1 type map indexed by a value
     
    -    The *DefinedBy* object models the ASN.1 *DEFINED BY* clause which maps
    -    values to ASN.1 types in the context of the ASN.1 SEQUENCE/SET type.
    +    The *OpenType* object models an untyped field of a constructed ASN.1
    +    type. In ASN.1 syntax it is usually represented by the
    +    `ANY DEFINED BY` for scalars or `SET OF ANY DEFINED BY`,
    +    `SEQUENCE OF ANY DEFINED BY` for container types clauses. Typically
    +    used together with :class:`~pyasn1.type.univ.Any` object.
     
    -    OpenType objects are duck-type a read-only Python :class:`dict` objects,
    -    however the passed `typeMap` is stored by reference.
    +    OpenType objects duck-type a read-only Python :class:`dict` objects,
    +    however the passed `typeMap` is not copied, but stored by reference.
    +    That means the user can manipulate `typeMap` at run time having this
    +    reflected on *OpenType* object behavior.
    +
    +    The |OpenType| class models an untyped field of a constructed ASN.1
    +    type. In ASN.1 syntax it is usually represented by the
    +    `ANY DEFINED BY` for scalars or `SET OF ANY DEFINED BY`,
    +    `SEQUENCE OF ANY DEFINED BY` for container types clauses. Typically
    +    used with :class:`~pyasn1.type.univ.Any` type.
     
         Parameters
         ----------
    @@ -28,12 +39,14 @@ class OpenType(object):
     
         Examples
         --------
    +
    +    For untyped scalars:
    +
         .. code-block:: python
     
             openType = OpenType(
    -            'id',
    -            {1: Integer(),
    -             2: OctetString()}
    +            'id', {1: Integer(),
    +                   2: OctetString()}
             )
             Sequence(
                 componentType=NamedTypes(
    @@ -41,6 +54,22 @@ class OpenType(object):
                     NamedType('blob', Any(), openType=openType)
                 )
             )
    +
    +    For untyped `SET OF` or `SEQUENCE OF` vectors:
    +
    +    .. code-block:: python
    +
    +        openType = OpenType(
    +            'id', {1: Integer(),
    +                   2: OctetString()}
    +        )
    +        Sequence(
    +            componentType=NamedTypes(
    +                NamedType('id', Integer()),
    +                NamedType('blob', SetOf(componentType=Any()),
    +                          openType=openType)
    +            )
    +        )
         """
     
         def __init__(self, name, typeMap=None):
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/type/tag.py b/server/www/packages/packages-windows/x86/pyasn1/type/tag.py
    index 95c226f..b88a734 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/type/tag.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/type/tag.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     from pyasn1 import error
    @@ -64,8 +64,10 @@ class Tag(object):
             self.__hash = hash(self.__tagClassId)
     
         def __repr__(self):
    -        representation = '[%s:%s:%s]' % (self.__tagClass, self.__tagFormat, self.__tagId)
    -        return '<%s object at 0x%x tag %s>' % (self.__class__.__name__, id(self), representation)
    +        representation = '[%s:%s:%s]' % (
    +            self.__tagClass, self.__tagFormat, self.__tagId)
    +        return '<%s object, tag %s>' % (
    +            self.__class__.__name__, representation)
     
         def __eq__(self, other):
             return self.__tagClassId == other
    @@ -199,7 +201,7 @@ class TagSet(object):
             else:
                 representation = 'untagged'
     
    -        return '<%s object at 0x%x %s>' % (self.__class__.__name__, id(self), representation)
    +        return '<%s object, %s>' % (self.__class__.__name__, representation)
     
         def __add__(self, superTag):
             return self.__class__(self.__baseTag, *self.__superTags + (superTag,))
    @@ -318,7 +320,7 @@ class TagSet(object):
             Returns
             -------
             : :py:class:`bool`
    -            `True` if callee is a supertype of *tagSet*
    +            :obj:`True` if callee is a supertype of *tagSet*
             """
             if len(tagSet) < self.__lenOfSuperTags:
                 return False
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/type/tagmap.py b/server/www/packages/packages-windows/x86/pyasn1/type/tagmap.py
    index a9d237f..6f5163b 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/type/tagmap.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/type/tagmap.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     from pyasn1 import error
    @@ -56,16 +56,16 @@ class TagMap(object):
             return iter(self.__presentTypes)
     
         def __repr__(self):
    -        representation = '%s object at 0x%x' % (self.__class__.__name__, id(self))
    +        representation = '%s object' % self.__class__.__name__
     
             if self.__presentTypes:
    -            representation += ' present %s' % repr(self.__presentTypes)
    +            representation += ', present %s' % repr(self.__presentTypes)
     
             if self.__skipTypes:
    -            representation += ' skip %s' % repr(self.__skipTypes)
    +            representation += ', skip %s' % repr(self.__skipTypes)
     
             if self.__defaultType is not None:
    -            representation += ' default %s' % repr(self.__defaultType)
    +            representation += ', default %s' % repr(self.__defaultType)
     
             return '<%s>' % representation
     
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/type/univ.py b/server/www/packages/packages-windows/x86/pyasn1/type/univ.py
    index 898cf25..aa688b2 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/type/univ.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/type/univ.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     import math
    @@ -31,28 +31,32 @@ __all__ = ['Integer', 'Boolean', 'BitString', 'OctetString', 'Null',
     # "Simple" ASN.1 types (yet incomplete)
     
     
    -class Integer(base.AbstractSimpleAsn1Item):
    -    """Create |ASN.1| type or object.
    +class Integer(base.SimpleAsn1Type):
    +    """Create |ASN.1| schema or value object.
     
    -    |ASN.1| objects are immutable and duck-type Python :class:`int` objects.
    +    |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
    +    objects are immutable and duck-type Python :class:`int` objects.
     
         Keyword Args
         ------------
         value: :class:`int`, :class:`str` or |ASN.1| object
    -        Python integer or string literal or |ASN.1| class instance.
    +        Python :class:`int` or :class:`str` literal or |ASN.1| class
    +        instance. If `value` is not given, schema object will be created.
     
         tagSet: :py:class:`~pyasn1.type.tag.TagSet`
             Object representing non-default ASN.1 tag(s)
     
         subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -        Object representing non-default ASN.1 subtype constraint(s)
    +        Object representing non-default ASN.1 subtype constraint(s). Constraints
    +        verification for |ASN.1| type occurs automatically on object
    +        instantiation.
     
         namedValues: :py:class:`~pyasn1.type.namedval.NamedValues`
             Object representing non-default symbolic aliases for numbers
     
         Raises
         ------
    -    :py:class:`~pyasn1.error.PyAsn1Error`
    +    ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
             On constraint violation or bad initializer.
     
         Examples
    @@ -94,13 +98,13 @@ class Integer(base.AbstractSimpleAsn1Item):
         namedValues = namedval.NamedValues()
     
         # Optimization for faster codec lookup
    -    typeId = base.AbstractSimpleAsn1Item.getTypeId()
    +    typeId = base.SimpleAsn1Type.getTypeId()
     
         def __init__(self, value=noValue, **kwargs):
             if 'namedValues' not in kwargs:
                 kwargs['namedValues'] = self.namedValues
     
    -        base.AbstractSimpleAsn1Item.__init__(self, value, **kwargs)
    +        base.SimpleAsn1Type.__init__(self, value, **kwargs)
     
         def __and__(self, value):
             return self.clone(self._value & value)
    @@ -187,7 +191,7 @@ class Integer(base.AbstractSimpleAsn1Item):
             def __rdivmod__(self, value):
                 return self.clone(divmod(value, self._value))
     
    -        __hash__ = base.AbstractSimpleAsn1Item.__hash__
    +        __hash__ = base.SimpleAsn1Type.__hash__
     
         def __int__(self):
             return int(self._value)
    @@ -276,27 +280,31 @@ class Integer(base.AbstractSimpleAsn1Item):
     
     
     class Boolean(Integer):
    -    """Create |ASN.1| type or object.
    +    """Create |ASN.1| schema or value object.
     
    -    |ASN.1| objects are immutable and duck-type Python :class:`int` objects.
    +    |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
    +    objects are immutable and duck-type Python :class:`int` objects.
     
         Keyword Args
         ------------
         value: :class:`int`, :class:`str` or |ASN.1| object
    -        Python integer or boolean or string literal or |ASN.1| class instance.
    +        Python :class:`int` or :class:`str` literal or |ASN.1| class
    +        instance. If `value` is not given, schema object will be created.
     
         tagSet: :py:class:`~pyasn1.type.tag.TagSet`
             Object representing non-default ASN.1 tag(s)
     
         subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -        Object representing non-default ASN.1 subtype constraint(s)
    +        Object representing non-default ASN.1 subtype constraint(s).Constraints
    +        verification for |ASN.1| type occurs automatically on object
    +        instantiation.
     
         namedValues: :py:class:`~pyasn1.type.namedval.NamedValues`
             Object representing non-default symbolic aliases for numbers
     
         Raises
         ------
    -    :py:class:`~pyasn1.error.PyAsn1Error`
    +    ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
             On constraint violation or bad initializer.
     
         Examples
    @@ -355,23 +363,27 @@ class SizedInteger(SizedIntegerBase):
             return self.bitLength
     
     
    -class BitString(base.AbstractSimpleAsn1Item):
    +class BitString(base.SimpleAsn1Type):
         """Create |ASN.1| schema or value object.
     
    -    |ASN.1| objects are immutable and duck-type both Python :class:`tuple` (as a tuple
    +    |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
    +    objects are immutable and duck-type both Python :class:`tuple` (as a tuple
         of bits) and :class:`int` objects.
     
         Keyword Args
         ------------
         value: :class:`int`, :class:`str` or |ASN.1| object
    -        Python integer or string literal representing binary or hexadecimal
    -        number or sequence of integer bits or |ASN.1| object.
    +        Python :class:`int` or :class:`str` literal representing binary
    +        or hexadecimal number or sequence of integer bits or |ASN.1| object.
    +        If `value` is not given, schema object will be created.
     
         tagSet: :py:class:`~pyasn1.type.tag.TagSet`
             Object representing non-default ASN.1 tag(s)
     
         subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -        Object representing non-default ASN.1 subtype constraint(s)
    +        Object representing non-default ASN.1 subtype constraint(s). Constraints
    +        verification for |ASN.1| type occurs automatically on object
    +        instantiation.
     
         namedValues: :py:class:`~pyasn1.type.namedval.NamedValues`
             Object representing non-default symbolic aliases for numbers
    @@ -386,7 +398,7 @@ class BitString(base.AbstractSimpleAsn1Item):
     
         Raises
         ------
    -    :py:class:`~pyasn1.error.PyAsn1Error`
    +    ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
             On constraint violation or bad initializer.
     
         Examples
    @@ -432,7 +444,7 @@ class BitString(base.AbstractSimpleAsn1Item):
         namedValues = namedval.NamedValues()
     
         # Optimization for faster codec lookup
    -    typeId = base.AbstractSimpleAsn1Item.getTypeId()
    +    typeId = base.SimpleAsn1Type.getTypeId()
     
         defaultBinValue = defaultHexValue = noValue
     
    @@ -461,7 +473,7 @@ class BitString(base.AbstractSimpleAsn1Item):
             if 'namedValues' not in kwargs:
                 kwargs['namedValues'] = self.namedValues
     
    -        base.AbstractSimpleAsn1Item.__init__(self, value, **kwargs)
    +        base.SimpleAsn1Type.__init__(self, value, **kwargs)
     
         def __str__(self):
             return self.asBinary()
    @@ -720,24 +732,30 @@ except NameError:  # Python 2.4
             return True
     
     
    -class OctetString(base.AbstractSimpleAsn1Item):
    +class OctetString(base.SimpleAsn1Type):
         """Create |ASN.1| schema or value object.
     
    -    |ASN.1| objects are immutable and duck-type Python 2 :class:`str` or Python 3 :class:`bytes`.
    -    When used in Unicode context, |ASN.1| type assumes "|encoding|" serialisation.
    +    |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
    +    objects are immutable and duck-type Python 2 :class:`str` or
    +    Python 3 :class:`bytes`. When used in Unicode context, |ASN.1| type
    +    assumes "|encoding|" serialisation.
     
         Keyword Args
         ------------
    -    value: :class:`str`, :class:`bytes` or |ASN.1| object
    -        string (Python 2) or bytes (Python 3), alternatively unicode object
    -        (Python 2) or string (Python 3) representing character string to be
    -        serialised into octets (note `encoding` parameter) or |ASN.1| object.
    +    value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object
    +        class:`str` (Python 2) or :class:`bytes` (Python 3), alternatively
    +        class:`unicode` object (Python 2) or :class:`str` (Python 3)
    +        representing character string to be serialised into octets
    +        (note `encoding` parameter) or |ASN.1| object.
    +        If `value` is not given, schema object will be created.
     
         tagSet: :py:class:`~pyasn1.type.tag.TagSet`
             Object representing non-default ASN.1 tag(s)
     
         subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -        Object representing non-default ASN.1 subtype constraint(s)
    +        Object representing non-default ASN.1 subtype constraint(s). Constraints
    +        verification for |ASN.1| type occurs automatically on object
    +        instantiation.
     
         encoding: :py:class:`str`
             Unicode codec ID to encode/decode :class:`unicode` (Python 2) or
    @@ -754,7 +772,7 @@ class OctetString(base.AbstractSimpleAsn1Item):
     
         Raises
         ------
    -    :py:class:`~pyasn1.error.PyAsn1Error`
    +    ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
             On constraint violation or bad initializer.
     
         Examples
    @@ -786,7 +804,7 @@ class OctetString(base.AbstractSimpleAsn1Item):
         subtypeSpec = constraint.ConstraintsIntersection()
     
         # Optimization for faster codec lookup
    -    typeId = base.AbstractSimpleAsn1Item.getTypeId()
    +    typeId = base.SimpleAsn1Type.getTypeId()
     
         defaultBinValue = defaultHexValue = noValue
         encoding = 'iso-8859-1'
    @@ -816,26 +834,33 @@ class OctetString(base.AbstractSimpleAsn1Item):
             if 'encoding' not in kwargs:
                 kwargs['encoding'] = self.encoding
     
    -        base.AbstractSimpleAsn1Item.__init__(self, value, **kwargs)
    +        base.SimpleAsn1Type.__init__(self, value, **kwargs)
     
         if sys.version_info[0] <= 2:
             def prettyIn(self, value):
                 if isinstance(value, str):
                     return value
    +
                 elif isinstance(value, unicode):
                     try:
                         return value.encode(self.encoding)
    +
                     except (LookupError, UnicodeEncodeError):
    -                    raise error.PyAsn1Error(
    -                        "Can't encode string '%s' with codec %s" % (value, self.encoding)
    +                    exc = sys.exc_info()[1]
    +                    raise error.PyAsn1UnicodeEncodeError(
    +                        "Can't encode string '%s' with codec "
    +                        "%s" % (value, self.encoding), exc
                         )
    +
                 elif isinstance(value, (tuple, list)):
                     try:
                         return ''.join([chr(x) for x in value])
    +
                     except ValueError:
                         raise error.PyAsn1Error(
                             "Bad %s initializer '%s'" % (self.__class__.__name__, value)
                         )
    +
                 else:
                     return str(value)
     
    @@ -847,8 +872,10 @@ class OctetString(base.AbstractSimpleAsn1Item):
                     return self._value.decode(self.encoding)
     
                 except UnicodeDecodeError:
    -                raise error.PyAsn1Error(
    -                    "Can't decode string '%s' with codec %s" % (self._value, self.encoding)
    +                exc = sys.exc_info()[1]
    +                raise error.PyAsn1UnicodeDecodeError(
    +                    "Can't decode string '%s' with codec "
    +                    "%s" % (self._value, self.encoding), exc
                     )
     
             def asOctets(self):
    @@ -861,19 +888,26 @@ class OctetString(base.AbstractSimpleAsn1Item):
             def prettyIn(self, value):
                 if isinstance(value, bytes):
                     return value
    +
                 elif isinstance(value, str):
                     try:
                         return value.encode(self.encoding)
    +
                     except UnicodeEncodeError:
    -                    raise error.PyAsn1Error(
    -                        "Can't encode string '%s' with '%s' codec" % (value, self.encoding)
    +                    exc = sys.exc_info()[1]
    +                    raise error.PyAsn1UnicodeEncodeError(
    +                        "Can't encode string '%s' with '%s' "
    +                        "codec" % (value, self.encoding), exc
                         )
                 elif isinstance(value, OctetString):  # a shortcut, bytes() would work the same way
                     return value.asOctets()
    -            elif isinstance(value, base.AbstractSimpleAsn1Item):  # this mostly targets Integer objects
    +
    +            elif isinstance(value, base.SimpleAsn1Type):  # this mostly targets Integer objects
                     return self.prettyIn(str(value))
    +
                 elif isinstance(value, (tuple, list)):
                     return self.prettyIn(bytes(value))
    +
                 else:
                     return bytes(value)
     
    @@ -882,8 +916,11 @@ class OctetString(base.AbstractSimpleAsn1Item):
                     return self._value.decode(self.encoding)
     
                 except UnicodeDecodeError:
    -                raise error.PyAsn1Error(
    -                    "Can't decode string '%s' with '%s' codec at '%s'" % (self._value, self.encoding, self.__class__.__name__)
    +                exc = sys.exc_info()[1]
    +                raise error.PyAsn1UnicodeDecodeError(
    +                    "Can't decode string '%s' with '%s' codec at "
    +                    "'%s'" % (self._value, self.encoding,
    +                              self.__class__.__name__), exc
                     )
     
             def __bytes__(self):
    @@ -1028,19 +1065,22 @@ class OctetString(base.AbstractSimpleAsn1Item):
     class Null(OctetString):
         """Create |ASN.1| schema or value object.
     
    -    |ASN.1| objects are immutable and duck-type Python :class:`str` objects (always empty).
    +    |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
    +    objects are immutable and duck-type Python :class:`str` objects
    +    (always empty).
     
         Keyword Args
         ------------
    -    value: :class:`str` or :py:class:`~pyasn1.type.univ.Null` object
    -        Python empty string literal or any object that evaluates to `False`
    +    value: :class:`str` or |ASN.1| object
    +        Python empty :class:`str` literal or any object that evaluates to :obj:`False`
    +        If `value` is not given, schema object will be created.
     
         tagSet: :py:class:`~pyasn1.type.tag.TagSet`
             Object representing non-default ASN.1 tag(s)
     
         Raises
         ------
    -    :py:class:`~pyasn1.error.PyAsn1Error`
    +    ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
             On constraint violation or bad initializer.
     
         Examples
    @@ -1081,25 +1121,30 @@ else:
     numericTypes = intTypes + (float,)
     
     
    -class ObjectIdentifier(base.AbstractSimpleAsn1Item):
    +class ObjectIdentifier(base.SimpleAsn1Type):
         """Create |ASN.1| schema or value object.
     
    -    |ASN.1| objects are immutable and duck-type Python :class:`tuple` objects (tuple of non-negative integers).
    +    |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
    +    objects are immutable and duck-type Python :class:`tuple` objects
    +    (tuple of non-negative integers).
     
         Keyword Args
         ------------
         value: :class:`tuple`, :class:`str` or |ASN.1| object
    -        Python sequence of :class:`int` or string literal or |ASN.1| object.
    +        Python sequence of :class:`int` or :class:`str` literal or |ASN.1| object.
    +        If `value` is not given, schema object will be created.
     
         tagSet: :py:class:`~pyasn1.type.tag.TagSet`
             Object representing non-default ASN.1 tag(s)
     
         subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -        Object representing non-default ASN.1 subtype constraint(s)
    +        Object representing non-default ASN.1 subtype constraint(s). Constraints
    +        verification for |ASN.1| type occurs automatically on object
    +        instantiation.
     
         Raises
         ------
    -    :py:class:`~pyasn1.error.PyAsn1Error`
    +    ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
             On constraint violation or bad initializer.
     
         Examples
    @@ -1131,7 +1176,7 @@ class ObjectIdentifier(base.AbstractSimpleAsn1Item):
         subtypeSpec = constraint.ConstraintsIntersection()
     
         # Optimization for faster codec lookup
    -    typeId = base.AbstractSimpleAsn1Item.getTypeId()
    +    typeId = base.SimpleAsn1Type.getTypeId()
     
         def __add__(self, other):
             return self.clone(self._value + other)
    @@ -1173,8 +1218,8 @@ class ObjectIdentifier(base.AbstractSimpleAsn1Item):
             Returns
             -------
             : :class:`bool`
    -            :class:`True` if this |ASN.1| object is a parent (e.g. prefix) of the other |ASN.1| object
    -            or :class:`False` otherwise.
    +            :obj:`True` if this |ASN.1| object is a parent (e.g. prefix) of the other |ASN.1| object
    +            or :obj:`False` otherwise.
             """
             l = len(self)
             if l <= len(other):
    @@ -1214,10 +1259,11 @@ class ObjectIdentifier(base.AbstractSimpleAsn1Item):
             return '.'.join([str(x) for x in value])
     
     
    -class Real(base.AbstractSimpleAsn1Item):
    +class Real(base.SimpleAsn1Type):
         """Create |ASN.1| schema or value object.
     
    -    |ASN.1| objects are immutable and duck-type Python :class:`float` objects.
    +    |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
    +    objects are immutable and duck-type Python :class:`float` objects.
         Additionally, |ASN.1| objects behave like a :class:`tuple` in which case its
         elements are mantissa, base and exponent.
     
    @@ -1225,17 +1271,20 @@ class Real(base.AbstractSimpleAsn1Item):
         ------------
         value: :class:`tuple`, :class:`float` or |ASN.1| object
             Python sequence of :class:`int` (representing mantissa, base and
    -        exponent) or float instance or *Real* class instance.
    +        exponent) or :class:`float` instance or |ASN.1| object.
    +        If `value` is not given, schema object will be created.
     
         tagSet: :py:class:`~pyasn1.type.tag.TagSet`
             Object representing non-default ASN.1 tag(s)
     
         subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -        Object representing non-default ASN.1 subtype constraint(s)
    +        Object representing non-default ASN.1 subtype constraint(s). Constraints
    +        verification for |ASN.1| type occurs automatically on object
    +        instantiation.
     
         Raises
         ------
    -    :py:class:`~pyasn1.error.PyAsn1Error`
    +    ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
             On constraint violation or bad initializer.
     
         Examples
    @@ -1278,7 +1327,7 @@ class Real(base.AbstractSimpleAsn1Item):
         subtypeSpec = constraint.ConstraintsIntersection()
     
         # Optimization for faster codec lookup
    -    typeId = base.AbstractSimpleAsn1Item.getTypeId()
    +    typeId = base.SimpleAsn1Type.getTypeId()
     
         @staticmethod
         def __normalizeBase10(value):
    @@ -1342,8 +1391,8 @@ class Real(base.AbstractSimpleAsn1Item):
             Returns
             -------
             : :class:`bool`
    -            :class:`True` if calling object represents plus infinity
    -            or :class:`False` otherwise.
    +            :obj:`True` if calling object represents plus infinity
    +            or :obj:`False` otherwise.
     
             """
             return self._value == self._plusInf
    @@ -1355,8 +1404,8 @@ class Real(base.AbstractSimpleAsn1Item):
             Returns
             -------
             : :class:`bool`
    -            :class:`True` if calling object represents minus infinity
    -            or :class:`False` otherwise.
    +            :obj:`True` if calling object represents minus infinity
    +            or :obj:`False` otherwise.
             """
             return self._value == self._minusInf
     
    @@ -1479,7 +1528,7 @@ class Real(base.AbstractSimpleAsn1Item):
             def __bool__(self):
                 return bool(float(self))
     
    -        __hash__ = base.AbstractSimpleAsn1Item.__hash__
    +        __hash__ = base.SimpleAsn1Type.__hash__
     
         def __getitem__(self, idx):
             if self._value in self._inf:
    @@ -1500,27 +1549,31 @@ class Real(base.AbstractSimpleAsn1Item):
     
     
     class Enumerated(Integer):
    -    """Create |ASN.1| type or object.
    +    """Create |ASN.1| schema or value object.
     
    -    |ASN.1| objects are immutable and duck-type Python :class:`int` objects.
    +    |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
    +    objects are immutable and duck-type Python :class:`int` objects.
     
         Keyword Args
         ------------
         value: :class:`int`, :class:`str` or |ASN.1| object
    -        Python integer or string literal or |ASN.1| class instance.
    +        Python :class:`int` or :class:`str` literal or |ASN.1| object.
    +        If `value` is not given, schema object will be created.
     
         tagSet: :py:class:`~pyasn1.type.tag.TagSet`
             Object representing non-default ASN.1 tag(s)
     
         subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -        Object representing non-default ASN.1 subtype constraint(s)
    +        Object representing non-default ASN.1 subtype constraint(s). Constraints
    +        verification for |ASN.1| type occurs automatically on object
    +        instantiation.
     
         namedValues: :py:class:`~pyasn1.type.namedval.NamedValues`
             Object representing non-default symbolic aliases for numbers
     
         Raises
         ------
    -    :py:class:`~pyasn1.error.PyAsn1Error`
    +    ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
             On constraint violation or bad initializer.
     
         Examples
    @@ -1566,10 +1619,11 @@ class Enumerated(Integer):
     
     # "Structured" ASN.1 types
     
    -class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item):
    -    """Create |ASN.1| type.
    +class SequenceOfAndSetOfBase(base.ConstructedAsn1Type):
    +    """Create |ASN.1| schema or value object.
     
    -    |ASN.1| objects are mutable and duck-type Python :class:`list` objects.
    +    |ASN.1| class is based on :class:`~pyasn1.type.base.ConstructedAsn1Type`,
    +    its objects are mutable and duck-type Python :class:`list` objects.
     
         Keyword Args
         ------------
    @@ -1580,10 +1634,9 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item):
             Object representing non-default ASN.1 tag(s)
     
         subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -        Object representing non-default ASN.1 subtype constraint(s)
    -
    -    sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -        Object representing collection size constraint
    +        Object representing non-default ASN.1 subtype constraint(s). Constraints
    +        verification for |ASN.1| type can only occur on explicit
    +        `.isInconsistent` call.
     
         Examples
         --------
    @@ -1605,12 +1658,14 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item):
             # support positional params for backward compatibility
             if args:
                 for key, value in zip(('componentType', 'tagSet',
    -                                   'subtypeSpec', 'sizeSpec'), args):
    +                                   'subtypeSpec'), args):
                     if key in kwargs:
                         raise error.PyAsn1Error('Conflicting positional and keyword params!')
                     kwargs['componentType'] = value
     
    -        base.AbstractConstructedAsn1Item.__init__(self, **kwargs)
    +        self._componentValues = noValue
    +
    +        base.ConstructedAsn1Type.__init__(self, **kwargs)
     
         # Python list protocol
     
    @@ -1628,24 +1683,36 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item):
             except error.PyAsn1Error:
                 raise IndexError(sys.exc_info()[1])
     
    -    def clear(self):
    -        self._componentValues = []
    -
         def append(self, value):
    -        self[len(self)] = value
    +        if self._componentValues is noValue:
    +            pos = 0
    +
    +        else:
    +            pos = len(self._componentValues)
    +
    +        self[pos] = value
     
         def count(self, value):
    -        return self._componentValues.count(value)
    +        return list(self._componentValues.values()).count(value)
     
         def extend(self, values):
             for value in values:
                 self.append(value)
     
    +        if self._componentValues is noValue:
    +            self._componentValues = {}
    +
         def index(self, value, start=0, stop=None):
             if stop is None:
                 stop = len(self)
    +
    +        indices, values = zip(*self._componentValues.items())
    +
    +        # TODO: remove when Py2.5 support is gone
    +        values = list(values)
    +
             try:
    -            return self._componentValues.index(value, start, stop)
    +            return indices[values.index(value, start, stop)]
     
             except error.PyAsn1Error:
                 raise ValueError(sys.exc_info()[1])
    @@ -1654,15 +1721,24 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item):
             self._componentValues.reverse()
     
         def sort(self, key=None, reverse=False):
    -        self._componentValues.sort(key=key, reverse=reverse)
    +        self._componentValues = dict(
    +            enumerate(sorted(self._componentValues.values(),
    +                             key=key, reverse=reverse)))
    +
    +    def __len__(self):
    +        if self._componentValues is noValue or not self._componentValues:
    +            return 0
    +
    +        return max(self._componentValues) + 1
     
         def __iter__(self):
    -        return iter(self._componentValues)
    +        for idx in range(0, len(self)):
    +            yield self.getComponentByPosition(idx)
     
         def _cloneComponentValues(self, myClone, cloneValueFlag):
    -        for idx, componentValue in enumerate(self._componentValues):
    +        for idx, componentValue in self._componentValues.items():
                 if componentValue is not noValue:
    -                if isinstance(componentValue, base.AbstractConstructedAsn1Item):
    +                if isinstance(componentValue, base.ConstructedAsn1Type):
                         myClone.setComponentByPosition(
                             idx, componentValue.clone(cloneValueFlag=cloneValueFlag)
                         )
    @@ -1689,8 +1765,8 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item):
                 object instead of the requested component.
     
             instantiate: :class:`bool`
    -            If `True` (default), inner component will be automatically instantiated.
    -            If 'False' either existing component or the `noValue` object will be
    +            If :obj:`True` (default), inner component will be automatically instantiated.
    +            If :obj:`False` either existing component or the :class:`NoValue` object will be
                 returned.
     
             Returns
    @@ -1735,10 +1811,21 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item):
                 # returns noValue
                 s.getComponentByPosition(0, instantiate=False)
             """
    +        if isinstance(idx, slice):
    +            indices = tuple(range(len(self)))
    +            return [self.getComponentByPosition(subidx, default, instantiate)
    +                    for subidx in indices[idx]]
    +
    +        if idx < 0:
    +            idx = len(self) + idx
    +            if idx < 0:
    +                raise error.PyAsn1Error(
    +                    'SequenceOf/SetOf index is out of range')
    +
             try:
                 componentValue = self._componentValues[idx]
     
    -        except IndexError:
    +        except (KeyError, error.PyAsn1Error):
                 if not instantiate:
                     return default
     
    @@ -1773,15 +1860,16 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item):
             value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
                 A Python value to initialize |ASN.1| component with (if *componentType* is set)
                 or ASN.1 value object to assign to |ASN.1| component.
    +            If `value` is not given, schema object will be set as a component.
     
             verifyConstraints: :class:`bool`
    -             If `False`, skip constraints validation
    +             If :obj:`False`, skip constraints validation
     
             matchTags: :class:`bool`
    -             If `False`, skip component tags matching
    +             If :obj:`False`, skip component tags matching
     
             matchConstraints: :class:`bool`
    -             If `False`, skip component constraints matching
    +             If :obj:`False`, skip component constraints matching
     
             Returns
             -------
    @@ -1789,51 +1877,75 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item):
     
             Raises
             ------
    -        IndexError:
    +        ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
    +            On constraint violation or bad initializer
    +        IndexError
                 When idx > len(self)
             """
    +        if isinstance(idx, slice):
    +            indices = tuple(range(len(self)))
    +            startIdx = indices and indices[idx][0] or 0
    +            for subIdx, subValue in enumerate(value):
    +                self.setComponentByPosition(
    +                    startIdx + subIdx, subValue, verifyConstraints,
    +                    matchTags, matchConstraints)
    +            return self
    +
    +        if idx < 0:
    +            idx = len(self) + idx
    +            if idx < 0:
    +                raise error.PyAsn1Error(
    +                    'SequenceOf/SetOf index is out of range')
    +
             componentType = self.componentType
     
    -        try:
    -            currentValue = self._componentValues[idx]
    -        except IndexError:
    -            currentValue = noValue
    +        if self._componentValues is noValue:
    +            componentValues = {}
     
    -            if len(self._componentValues) < idx:
    -                raise error.PyAsn1Error('Component index out of range')
    +        else:
    +            componentValues = self._componentValues
    +
    +        currentValue = componentValues.get(idx, noValue)
     
             if value is noValue:
                 if componentType is not None:
                     value = componentType.clone()
    +
                 elif currentValue is noValue:
                     raise error.PyAsn1Error('Component type not defined')
    +
             elif not isinstance(value, base.Asn1Item):
    -            if componentType is not None and isinstance(componentType, base.AbstractSimpleAsn1Item):
    +            if (componentType is not None and
    +                    isinstance(componentType, base.SimpleAsn1Type)):
                     value = componentType.clone(value=value)
    -            elif currentValue is not noValue and isinstance(currentValue, base.AbstractSimpleAsn1Item):
    +
    +            elif (currentValue is not noValue and
    +                    isinstance(currentValue, base.SimpleAsn1Type)):
                     value = currentValue.clone(value=value)
    +
                 else:
    -                raise error.PyAsn1Error('Non-ASN.1 value %r and undefined component type at %r' % (value, self))
    -        elif componentType is not None:
    -            if self.strictConstraints:
    -                if not componentType.isSameTypeWith(value, matchTags, matchConstraints):
    -                    raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType))
    -            else:
    -                if not componentType.isSuperTypeOf(value, matchTags, matchConstraints):
    -                    raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType))
    +                raise error.PyAsn1Error(
    +                    'Non-ASN.1 value %r and undefined component'
    +                    ' type at %r' % (value, self))
     
    -        if verifyConstraints and value.isValue:
    -            try:
    -                self.subtypeSpec(value, idx)
    +        elif componentType is not None and (matchTags or matchConstraints):
    +            subtypeChecker = (
    +                    self.strictConstraints and
    +                    componentType.isSameTypeWith or
    +                    componentType.isSuperTypeOf)
     
    -            except error.PyAsn1Error:
    -                exType, exValue, exTb = sys.exc_info()
    -                raise exType('%s at %s' % (exValue, self.__class__.__name__))
    +            if not subtypeChecker(value, verifyConstraints and matchTags,
    +                                  verifyConstraints and matchConstraints):
    +                # TODO: we should wrap componentType with UnnamedType to carry
    +                # additional properties associated with componentType
    +                if componentType.typeId != Any.typeId:
    +                    raise error.PyAsn1Error(
    +                        'Component value is tag-incompatible: %r vs '
    +                        '%r' % (value, componentType))
     
    -        if currentValue is noValue:
    -            self._componentValues.append(value)
    -        else:
    -            self._componentValues[idx] = value
    +        componentValues[idx] = value
    +
    +        self._componentValues = componentValues
     
             return self
     
    @@ -1842,16 +1954,44 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item):
             if self.componentType is not None:
                 return self.componentType.tagMap
     
    +    @property
    +    def components(self):
    +        return [self._componentValues[idx]
    +                for idx in sorted(self._componentValues)]
    +
    +    def clear(self):
    +        """Remove all components and become an empty |ASN.1| value object.
    +
    +        Has the same effect on |ASN.1| object as it does on :class:`list`
    +        built-in.
    +        """
    +        self._componentValues = {}
    +        return self
    +
    +    def reset(self):
    +        """Remove all components and become a |ASN.1| schema object.
    +
    +        See :meth:`isValue` property for more information on the
    +        distinction between value and schema objects.
    +        """
    +        self._componentValues = noValue
    +        return self
    +
         def prettyPrint(self, scope=0):
             scope += 1
             representation = self.__class__.__name__ + ':\n'
    -        for idx, componentValue in enumerate(self._componentValues):
    +
    +        if not self.isValue:
    +            return representation
    +
    +        for idx, componentValue in enumerate(self):
                 representation += ' ' * scope
                 if (componentValue is noValue and
                         self.componentType is not None):
                     representation += ''
                 else:
                     representation += componentValue.prettyPrint(scope)
    +
             return representation
     
         def prettyPrintType(self, scope=0):
    @@ -1867,17 +2007,17 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item):
         def isValue(self):
             """Indicate that |ASN.1| object represents ASN.1 value.
     
    -        If *isValue* is `False` then this object represents just ASN.1 schema.
    +        If *isValue* is :obj:`False` then this object represents just ASN.1 schema.
     
    -        If *isValue* is `True` then, in addition to its ASN.1 schema features,
    -        this object can also be used like a Python built-in object (e.g. `int`,
    -        `str`, `dict` etc.).
    +        If *isValue* is :obj:`True` then, in addition to its ASN.1 schema features,
    +        this object can also be used like a Python built-in object
    +        (e.g. :class:`int`, :class:`str`, :class:`dict` etc.).
     
             Returns
             -------
             : :class:`bool`
    -            :class:`False` if object represents just ASN.1 schema.
    -            :class:`True` if object represents ASN.1 schema and can be used as a normal value.
    +            :obj:`False` if object represents just ASN.1 schema.
    +            :obj:`True` if object represents ASN.1 schema and can be used as a normal value.
     
             Note
             ----
    @@ -1890,12 +2030,53 @@ class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item):
             The PyASN1 value objects can **additionally** participate in many operations
             involving regular Python objects (e.g. arithmetic, comprehension etc).
             """
    -        for componentValue in self._componentValues:
    +        if self._componentValues is noValue:
    +            return False
    +
    +        if len(self._componentValues) != len(self):
    +            return False
    +
    +        for componentValue in self._componentValues.values():
                 if componentValue is noValue or not componentValue.isValue:
                     return False
     
             return True
     
    +    @property
    +    def isInconsistent(self):
    +        """Run necessary checks to ensure |ASN.1| object consistency.
    +
    +        Default action is to verify |ASN.1| object against constraints imposed
    +        by `subtypeSpec`.
    +
    +        Raises
    +        ------
    +        :py:class:`~pyasn1.error.PyAsn1tError` on any inconsistencies found
    +        """
    +        if self.componentType is noValue or not self.subtypeSpec:
    +            return False
    +
    +        if self._componentValues is noValue:
    +            return True
    +
    +        mapping = {}
    +
    +        for idx, value in self._componentValues.items():
    +            # Absent fields are not in the mapping
    +            if value is noValue:
    +                continue
    +
    +            mapping[idx] = value
    +
    +        try:
    +            # Represent SequenceOf/SetOf as a bare dict to constraints chain
    +            self.subtypeSpec(mapping)
    +
    +        except error.PyAsn1Error:
    +            exc = sys.exc_info()[1]
    +            return exc
    +
    +        return False
     
     class SequenceOf(SequenceOfAndSetOfBase):
         __doc__ = SequenceOfAndSetOfBase.__doc__
    @@ -1916,10 +2097,6 @@ class SequenceOf(SequenceOfAndSetOfBase):
         #: imposing constraints on |ASN.1| type initialization values.
         subtypeSpec = constraint.ConstraintsIntersection()
     
    -    #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -    #: object imposing size constraint on |ASN.1| objects
    -    sizeSpec = constraint.ConstraintsIntersection()
    -
         # Disambiguation ASN.1 types identification
         typeId = SequenceOfAndSetOfBase.getTypeId()
     
    @@ -1943,18 +2120,15 @@ class SetOf(SequenceOfAndSetOfBase):
         #: imposing constraints on |ASN.1| type initialization values.
         subtypeSpec = constraint.ConstraintsIntersection()
     
    -    #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -    #: object imposing size constraint on |ASN.1| objects
    -    sizeSpec = constraint.ConstraintsIntersection()
    -
         # Disambiguation ASN.1 types identification
         typeId = SequenceOfAndSetOfBase.getTypeId()
     
     
    -class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
    -    """Create |ASN.1| type.
    +class SequenceAndSetBase(base.ConstructedAsn1Type):
    +    """Create |ASN.1| schema or value object.
     
    -    |ASN.1| objects are mutable and duck-type Python :class:`dict` objects.
    +    |ASN.1| class is based on :class:`~pyasn1.type.base.ConstructedAsn1Type`,
    +    its objects are mutable and duck-type Python :class:`dict` objects.
     
         Keyword Args
         ------------
    @@ -1965,10 +2139,9 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
             Object representing non-default ASN.1 tag(s)
     
         subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -        Object representing non-default ASN.1 subtype constraint(s)
    -
    -    sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -        Object representing collection size constraint
    +        Object representing non-default ASN.1 subtype constraint(s).  Constraints
    +        verification for |ASN.1| type can only occur on explicit
    +        `.isInconsistent` call.
     
         Examples
         --------
    @@ -2042,8 +2215,12 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
     
     
         def __init__(self, **kwargs):
    -        base.AbstractConstructedAsn1Item.__init__(self, **kwargs)
    +        base.ConstructedAsn1Type.__init__(self, **kwargs)
             self._componentTypeLen = len(self.componentType)
    +        if self._componentTypeLen:
    +            self._componentValues = []
    +        else:
    +            self._componentValues = noValue
             self._dynamicNames = self._componentTypeLen or self.DynamicNames()
     
         def __getitem__(self, idx):
    @@ -2086,6 +2263,9 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
             else:
                 return key in self._dynamicNames
     
    +    def __len__(self):
    +        return len(self._componentValues)
    +
         def __iter__(self):
             return iter(self.componentType or self._dynamicNames)
     
    @@ -2112,13 +2292,36 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
                 self[k] = mappingValue[k]
     
         def clear(self):
    +        """Remove all components and become an empty |ASN.1| value object.
    +
    +        Has the same effect on |ASN.1| object as it does on :class:`dict`
    +        built-in.
    +        """
             self._componentValues = []
             self._dynamicNames = self.DynamicNames()
    +        return self
    +
    +    def reset(self):
    +        """Remove all components and become a |ASN.1| schema object.
    +
    +        See :meth:`isValue` property for more information on the
    +        distinction between value and schema objects.
    +        """
    +        self._componentValues = noValue
    +        self._dynamicNames = self.DynamicNames()
    +        return self
    +
    +    @property
    +    def components(self):
    +        return self._componentValues
     
         def _cloneComponentValues(self, myClone, cloneValueFlag):
    +        if self._componentValues is noValue:
    +            return
    +
             for idx, componentValue in enumerate(self._componentValues):
                 if componentValue is not noValue:
    -                if isinstance(componentValue, base.AbstractConstructedAsn1Item):
    +                if isinstance(componentValue, base.ConstructedAsn1Type):
                         myClone.setComponentByPosition(
                             idx, componentValue.clone(cloneValueFlag=cloneValueFlag)
                         )
    @@ -2142,14 +2345,16 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
                 object instead of the requested component.
     
             instantiate: :class:`bool`
    -            If `True` (default), inner component will be automatically instantiated.
    -            If 'False' either existing component or the `noValue` object will be
    -            returned.
    +            If :obj:`True` (default), inner component will be automatically
    +            instantiated.
    +            If :obj:`False` either existing component or the :class:`NoValue`
    +            object will be returned.
     
             Returns
             -------
             : :py:class:`~pyasn1.type.base.PyAsn1Item`
    -            Instantiate |ASN.1| component type or return existing component value
    +            Instantiate |ASN.1| component type or return existing
    +            component value
             """
             if self._componentTypeLen:
                 idx = self.componentType.getPositionByName(name)
    @@ -2180,15 +2385,16 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
             value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
                 A Python value to initialize |ASN.1| component with (if *componentType* is set)
                 or ASN.1 value object to assign to |ASN.1| component.
    +            If `value` is not given, schema object will be set as a component.
     
             verifyConstraints: :class:`bool`
    -             If `False`, skip constraints validation
    +             If :obj:`False`, skip constraints validation
     
             matchTags: :class:`bool`
    -             If `False`, skip component tags matching
    +             If :obj:`False`, skip component tags matching
     
             matchConstraints: :class:`bool`
    -             If `False`, skip component constraints matching
    +             If :obj:`False`, skip component constraints matching
     
             Returns
             -------
    @@ -2226,9 +2432,10 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
                 object instead of the requested component.
     
             instantiate: :class:`bool`
    -            If `True` (default), inner component will be automatically instantiated.
    -            If 'False' either existing component or the `noValue` object will be
    -            returned.
    +            If :obj:`True` (default), inner component will be automatically
    +            instantiated.
    +            If :obj:`False` either existing component or the :class:`NoValue`
    +            object will be returned.
     
             Returns
             -------
    @@ -2275,7 +2482,11 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
                 s.getComponentByPosition(0, instantiate=False)
             """
             try:
    -            componentValue = self._componentValues[idx]
    +            if self._componentValues is noValue:
    +                componentValue = noValue
    +
    +            else:
    +                componentValue = self._componentValues[idx]
     
             except IndexError:
                 componentValue = noValue
    @@ -2317,15 +2528,16 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
             value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
                 A Python value to initialize |ASN.1| component with (if *componentType* is set)
                 or ASN.1 value object to assign to |ASN.1| component.
    +            If `value` is not given, schema object will be set as a component.
     
             verifyConstraints : :class:`bool`
    -             If `False`, skip constraints validation
    +             If :obj:`False`, skip constraints validation
     
             matchTags: :class:`bool`
    -             If `False`, skip component tags matching
    +             If :obj:`False`, skip component tags matching
     
             matchConstraints: :class:`bool`
    -             If `False`, skip component constraints matching
    +             If :obj:`False`, skip component constraints matching
     
             Returns
             -------
    @@ -2334,8 +2546,14 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
             componentType = self.componentType
             componentTypeLen = self._componentTypeLen
     
    +        if self._componentValues is noValue:
    +            componentValues = []
    +
    +        else:
    +            componentValues = self._componentValues
    +
             try:
    -            currentValue = self._componentValues[idx]
    +            currentValue = componentValues[idx]
     
             except IndexError:
                 currentValue = noValue
    @@ -2343,11 +2561,13 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
                     if componentTypeLen < idx:
                         raise error.PyAsn1Error('component index out of range')
     
    -                self._componentValues = [noValue] * componentTypeLen
    +                componentValues = [noValue] * componentTypeLen
     
             if value is noValue:
                 if componentTypeLen:
    -                value = componentType.getTypeByPosition(idx).clone()
    +                value = componentType.getTypeByPosition(idx)
    +                if isinstance(value, base.ConstructedAsn1Type):
    +                    value = value.clone(cloneValueFlag=componentType[idx].isDefaulted)
     
                 elif currentValue is noValue:
                     raise error.PyAsn1Error('Component type not defined')
    @@ -2355,64 +2575,61 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
             elif not isinstance(value, base.Asn1Item):
                 if componentTypeLen:
                     subComponentType = componentType.getTypeByPosition(idx)
    -                if isinstance(subComponentType, base.AbstractSimpleAsn1Item):
    +                if isinstance(subComponentType, base.SimpleAsn1Type):
                         value = subComponentType.clone(value=value)
     
                     else:
                         raise error.PyAsn1Error('%s can cast only scalar values' % componentType.__class__.__name__)
     
    -            elif currentValue is not noValue and isinstance(currentValue, base.AbstractSimpleAsn1Item):
    +            elif currentValue is not noValue and isinstance(currentValue, base.SimpleAsn1Type):
                     value = currentValue.clone(value=value)
     
                 else:
                     raise error.PyAsn1Error('%s undefined component type' % componentType.__class__.__name__)
     
    -        elif (matchTags or matchConstraints) and componentTypeLen:
    +        elif ((verifyConstraints or matchTags or matchConstraints) and
    +              componentTypeLen):
                 subComponentType = componentType.getTypeByPosition(idx)
                 if subComponentType is not noValue:
                     subtypeChecker = (self.strictConstraints and
                                       subComponentType.isSameTypeWith or
                                       subComponentType.isSuperTypeOf)
     
    -                if not subtypeChecker(value, matchTags, matchConstraints):
    +                if not subtypeChecker(value, verifyConstraints and matchTags,
    +                                      verifyConstraints and matchConstraints):
                         if not componentType[idx].openType:
                             raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType))
     
    -        if verifyConstraints and value.isValue:
    -            try:
    -                self.subtypeSpec(value, idx)
    -
    -            except error.PyAsn1Error:
    -                exType, exValue, exTb = sys.exc_info()
    -                raise exType('%s at %s' % (exValue, self.__class__.__name__))
    -
             if componentTypeLen or idx in self._dynamicNames:
    -            self._componentValues[idx] = value
    +            componentValues[idx] = value
     
    -        elif len(self._componentValues) == idx:
    -            self._componentValues.append(value)
    +        elif len(componentValues) == idx:
    +            componentValues.append(value)
                 self._dynamicNames.addField(idx)
     
             else:
                 raise error.PyAsn1Error('Component index out of range')
     
    +        self._componentValues = componentValues
    +
             return self
     
         @property
         def isValue(self):
             """Indicate that |ASN.1| object represents ASN.1 value.
     
    -        If *isValue* is `False` then this object represents just ASN.1 schema.
    +        If *isValue* is :obj:`False` then this object represents just ASN.1 schema.
     
    -        If *isValue* is `True` then, in addition to its ASN.1 schema features,
    -        this object can also be used like a Python built-in object (e.g. `int`,
    -        `str`, `dict` etc.).
    +        If *isValue* is :obj:`True` then, in addition to its ASN.1 schema features,
    +        this object can also be used like a Python built-in object (e.g.
    +        :class:`int`, :class:`str`, :class:`dict` etc.).
     
             Returns
             -------
             : :class:`bool`
    -            :class:`False` if object represents just ASN.1 schema.
    -            :class:`True` if object represents ASN.1 schema and can be used as a normal value.
    +            :obj:`False` if object represents just ASN.1 schema.
    +            :obj:`True` if object represents ASN.1 schema and can be used as a
    +            normal value.
     
             Note
             ----
    @@ -2424,7 +2641,16 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
     
             The PyASN1 value objects can **additionally** participate in many operations
             involving regular Python objects (e.g. arithmetic, comprehension etc).
    +
    +        It is sufficient for |ASN.1| objects to have all non-optional and non-defaulted
    +        components being value objects to be considered as a value objects as a whole.
    +        In other words, even having one or more optional components not turned into
    +        value objects, |ASN.1| object is still considered as a value object. Defaulted
    +        components are normally value objects by default.
             """
    +        if self._componentValues is noValue:
    +            return False
    +
             componentType = self.componentType
     
             if componentType:
    @@ -2446,6 +2672,44 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
     
             return True
     
    +    @property
    +    def isInconsistent(self):
    +        """Run necessary checks to ensure |ASN.1| object consistency.
    +
    +        Default action is to verify |ASN.1| object against constraints imposed
    +        by `subtypeSpec`.
    +
    +        Raises
    +        ------
    +        :py:class:`~pyasn1.error.PyAsn1tError` on any inconsistencies found
    +        """
    +        if self.componentType is noValue or not self.subtypeSpec:
    +            return False
    +
    +        if self._componentValues is noValue:
    +            return True
    +
    +        mapping = {}
    +
    +        for idx, value in enumerate(self._componentValues):
    +            # Absent fields are not in the mapping
    +            if value is noValue:
    +                continue
    +
    +            name = self.componentType.getNameByPosition(idx)
    +
    +            mapping[name] = value
    +
    +        try:
    +            # Represent Sequence/Set as a bare dict to constraints chain
    +            self.subtypeSpec(mapping)
    +
    +        except error.PyAsn1Error:
    +            exc = sys.exc_info()[1]
    +            return exc
    +
    +        return False
    +
         def prettyPrint(self, scope=0):
             """Return an object representation string.
     
    @@ -2495,7 +2759,6 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
             if self._componentTypeLen:
                 return self.componentType[idx].name
     
    -
     class Sequence(SequenceAndSetBase):
         __doc__ = SequenceAndSetBase.__doc__
     
    @@ -2511,10 +2774,6 @@ class Sequence(SequenceAndSetBase):
         #: imposing constraints on |ASN.1| type initialization values.
         subtypeSpec = constraint.ConstraintsIntersection()
     
    -    #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -    #: object imposing constraints on |ASN.1| objects
    -    sizeSpec = constraint.ConstraintsIntersection()
    -
         #: Default collection of ASN.1 types of component (e.g. :py:class:`~pyasn1.type.namedtype.NamedType`)
         #: object imposing size constraint on |ASN.1| objects
         componentType = namedtype.NamedTypes()
    @@ -2554,10 +2813,6 @@ class Set(SequenceAndSetBase):
         #: imposing constraints on |ASN.1| type initialization values.
         subtypeSpec = constraint.ConstraintsIntersection()
     
    -    #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -    #: object imposing constraints on |ASN.1| objects
    -    sizeSpec = constraint.ConstraintsIntersection()
    -
         # Disambiguation ASN.1 types identification
         typeId = SequenceAndSetBase.getTypeId()
     
    @@ -2581,9 +2836,10 @@ class Set(SequenceAndSetBase):
                 object instead of the requested component.
     
             instantiate: :class:`bool`
    -            If `True` (default), inner component will be automatically instantiated.
    -            If 'False' either existing component or the `noValue` object will be
    -            returned.
    +            If :obj:`True` (default), inner component will be automatically
    +            instantiated.
    +            If :obj:`False` either existing component or the :class:`noValue`
    +            object will be returned.
     
             Returns
             -------
    @@ -2619,18 +2875,19 @@ class Set(SequenceAndSetBase):
             value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
                 A Python value to initialize |ASN.1| component with (if *componentType* is set)
                 or ASN.1 value object to assign to |ASN.1| component.
    +            If `value` is not given, schema object will be set as a component.
     
             verifyConstraints : :class:`bool`
    -            If `False`, skip constraints validation
    +            If :obj:`False`, skip constraints validation
     
             matchTags: :class:`bool`
    -            If `False`, skip component tags matching
    +            If :obj:`False`, skip component tags matching
     
             matchConstraints: :class:`bool`
    -            If `False`, skip component constraints matching
    +            If :obj:`False`, skip component constraints matching
     
             innerFlag: :class:`bool`
    -            If `True`, search for matching *tagSet* recursively.
    +            If :obj:`True`, search for matching *tagSet* recursively.
     
             Returns
             -------
    @@ -2662,9 +2919,10 @@ class Set(SequenceAndSetBase):
     
     
     class Choice(Set):
    -    """Create |ASN.1| type.
    +    """Create |ASN.1| schema or value object.
     
    -    |ASN.1| objects are mutable and duck-type Python :class:`dict` objects.
    +    |ASN.1| class is based on :class:`~pyasn1.type.base.ConstructedAsn1Type`,
    +    its objects are mutable and duck-type Python :class:`list` objects.
     
         Keyword Args
         ------------
    @@ -2675,10 +2933,9 @@ class Choice(Set):
             Object representing non-default ASN.1 tag(s)
     
         subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -        Object representing non-default ASN.1 subtype constraint(s)
    -
    -    sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -        Object representing collection size constraint
    +        Object representing non-default ASN.1 subtype constraint(s).  Constraints
    +        verification for |ASN.1| type can only occur on explicit
    +        `.isInconsistent` call.
     
         Examples
         --------
    @@ -2718,11 +2975,7 @@ class Choice(Set):
         #: Set (on class, not on instance) or return a
         #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object
         #: imposing constraints on |ASN.1| type initialization values.
    -    subtypeSpec = constraint.ConstraintsIntersection()
    -
    -    #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -    #: object imposing size constraint on |ASN.1| objects
    -    sizeSpec = constraint.ConstraintsIntersection(
    +    subtypeSpec = constraint.ConstraintsIntersection(
             constraint.ValueSizeConstraint(1, 1)
         )
     
    @@ -2795,7 +3048,7 @@ class Choice(Set):
             if self._currentIdx is not None:
                 yield self.componentType[self._currentIdx].getName(), self[self._currentIdx]
     
    -    def verifySizeSpec(self):
    +    def checkConsistency(self):
             if self._currentIdx is None:
                 raise error.PyAsn1Error('Component not chosen')
     
    @@ -2809,7 +3062,7 @@ class Choice(Set):
                     tagSet = component.effectiveTagSet
                 else:
                     tagSet = component.tagSet
    -            if isinstance(component, base.AbstractConstructedAsn1Item):
    +            if isinstance(component, base.ConstructedAsn1Type):
                     myClone.setComponentByType(
                         tagSet, component.clone(cloneValueFlag=cloneValueFlag)
                     )
    @@ -2847,15 +3100,16 @@ class Choice(Set):
                 A Python value to initialize |ASN.1| component with (if *componentType* is set)
                 or ASN.1 value object to assign to |ASN.1| component. Once a new value is
                 set to *idx* component, previous value is dropped.
    +            If `value` is not given, schema object will be set as a component.
     
             verifyConstraints : :class:`bool`
    -            If `False`, skip constraints validation
    +            If :obj:`False`, skip constraints validation
     
             matchTags: :class:`bool`
    -            If `False`, skip component tags matching
    +            If :obj:`False`, skip component tags matching
     
             matchConstraints: :class:`bool`
    -            If `False`, skip component constraints matching
    +            If :obj:`False`, skip component constraints matching
     
             Returns
             -------
    @@ -2925,17 +3179,18 @@ class Choice(Set):
         def isValue(self):
             """Indicate that |ASN.1| object represents ASN.1 value.
     
    -        If *isValue* is `False` then this object represents just ASN.1 schema.
    +        If *isValue* is :obj:`False` then this object represents just ASN.1 schema.
     
    -        If *isValue* is `True` then, in addition to its ASN.1 schema features,
    -        this object can also be used like a Python built-in object (e.g. `int`,
    -        `str`, `dict` etc.).
    +        If *isValue* is :obj:`True` then, in addition to its ASN.1 schema features,
    +        this object can also be used like a Python built-in object (e.g.
    +        :class:`int`, :class:`str`, :class:`dict` etc.).
     
             Returns
             -------
             : :class:`bool`
    -            :class:`False` if object represents just ASN.1 schema.
    -            :class:`True` if object represents ASN.1 schema and can be used as a normal value.
    +            :obj:`False` if object represents just ASN.1 schema.
    +            :obj:`True` if object represents ASN.1 schema and can be used as a normal
    +            value.
     
             Note
             ----
    @@ -2957,7 +3212,7 @@ class Choice(Set):
     
         def clear(self):
             self._currentIdx = None
    -        Set.clear(self)
    +        return Set.clear(self)
     
         # compatibility stubs
     
    @@ -2968,22 +3223,27 @@ class Choice(Set):
     class Any(OctetString):
         """Create |ASN.1| schema or value object.
     
    -    |ASN.1| objects are immutable and duck-type Python 2 :class:`str` or Python 3
    -    :class:`bytes`. When used in Unicode context, |ASN.1| type assumes "|encoding|"
    -    serialisation.
    +    |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`,
    +    its objects are immutable and duck-type Python 2 :class:`str` or Python 3
    +    :class:`bytes`. When used in Unicode context, |ASN.1| type assumes
    +    "|encoding|" serialisation.
     
         Keyword Args
         ------------
    -    value: :class:`str`, :class:`bytes` or |ASN.1| object
    -        string (Python 2) or bytes (Python 3), alternatively unicode object
    -        (Python 2) or string (Python 3) representing character string to be
    -        serialised into octets (note `encoding` parameter) or |ASN.1| object.
    +    value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object
    +        :class:`str` (Python 2) or :class:`bytes` (Python 3), alternatively
    +        :class:`unicode` object (Python 2) or :class:`str` (Python 3)
    +        representing character string to be serialised into octets (note
    +        `encoding` parameter) or |ASN.1| object.
    +        If `value` is not given, schema object will be created.
     
         tagSet: :py:class:`~pyasn1.type.tag.TagSet`
             Object representing non-default ASN.1 tag(s)
     
         subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
    -        Object representing non-default ASN.1 subtype constraint(s)
    +        Object representing non-default ASN.1 subtype constraint(s). Constraints
    +        verification for |ASN.1| type occurs automatically on object
    +        instantiation.
     
         encoding: :py:class:`str`
             Unicode codec ID to encode/decode :class:`unicode` (Python 2) or
    @@ -3000,7 +3260,7 @@ class Any(OctetString):
     
         Raises
         ------
    -    :py:class:`~pyasn1.error.PyAsn1Error`
    +    ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
             On constraint violation or bad initializer.
     
         Examples
    diff --git a/server/www/packages/packages-windows/x86/pyasn1/type/useful.py b/server/www/packages/packages-windows/x86/pyasn1/type/useful.py
    index 146916d..7536b95 100644
    --- a/server/www/packages/packages-windows/x86/pyasn1/type/useful.py
    +++ b/server/www/packages/packages-windows/x86/pyasn1/type/useful.py
    @@ -1,7 +1,7 @@
     #
     # This file is part of pyasn1 software.
     #
    -# Copyright (c) 2005-2018, Ilya Etingof 
    +# Copyright (c) 2005-2019, Ilya Etingof 
     # License: http://snmplabs.com/pyasn1/license.html
     #
     import datetime
    diff --git a/server/www/packages/packages-windows/x86/pymysql/__init__.py b/server/www/packages/packages-windows/x86/pymysql/__init__.py
    index b79b4b8..0cb5006 100644
    --- a/server/www/packages/packages-windows/x86/pymysql/__init__.py
    +++ b/server/www/packages/packages-windows/x86/pymysql/__init__.py
    @@ -35,7 +35,7 @@ from .times import (
         DateFromTicks, TimeFromTicks, TimestampFromTicks)
     
     
    -VERSION = (0, 9, 2, None)
    +VERSION = (0, 9, 3, None)
     if VERSION[3] is not None:
         VERSION_STRING = "%d.%d.%d_%s" % VERSION
     else:
    diff --git a/server/www/packages/packages-windows/x86/pymysql/_auth.py b/server/www/packages/packages-windows/x86/pymysql/_auth.py
    index bbb742d..199f36c 100644
    --- a/server/www/packages/packages-windows/x86/pymysql/_auth.py
    +++ b/server/www/packages/packages-windows/x86/pymysql/_auth.py
    @@ -4,14 +4,22 @@ Implements auth methods
     from ._compat import text_type, PY2
     from .constants import CLIENT
     from .err import OperationalError
    +from .util import byte2int, int2byte
     
    -from cryptography.hazmat.backends import default_backend
    -from cryptography.hazmat.primitives import serialization, hashes
    -from cryptography.hazmat.primitives.asymmetric import padding
    +
    +try:
    +    from cryptography.hazmat.backends import default_backend
    +    from cryptography.hazmat.primitives import serialization, hashes
    +    from cryptography.hazmat.primitives.asymmetric import padding
    +    _have_cryptography = True
    +except ImportError:
    +    _have_cryptography = False
     
     from functools import partial
     import hashlib
    +import io
     import struct
    +import warnings
     
     
     DEBUG = False
    @@ -69,6 +77,8 @@ class RandStruct_323(object):
     
     def scramble_old_password(password, message):
         """Scramble for old_password"""
    +    warnings.warn("old password (for MySQL <4.1) is used.  Upgrade your password with newer auth method.\n"
    +                  "old password support will be removed in future PyMySQL version")
         hash_pass = _hash_password_323(password)
         hash_message = _hash_password_323(message[:SCRAMBLE_LENGTH_323])
         hash_pass_n = struct.unpack(">LL", hash_pass)
    @@ -128,6 +138,8 @@ def sha2_rsa_encrypt(password, salt, public_key):
     
         Used for sha256_password and caching_sha2_password.
         """
    +    if not _have_cryptography:
    +        raise RuntimeError("cryptography is required for sha256_password or caching_sha2_password")
         message = _xor_password(password + b'\0', salt)
         rsa_key = serialization.load_pem_public_key(public_key, default_backend())
         return rsa_key.encrypt(
    diff --git a/server/www/packages/packages-windows/x86/pymysql/charset.py b/server/www/packages/packages-windows/x86/pymysql/charset.py
    index 968376c..07d8063 100644
    --- a/server/www/packages/packages-windows/x86/pymysql/charset.py
    +++ b/server/www/packages/packages-windows/x86/pymysql/charset.py
    @@ -18,7 +18,7 @@ class Charset(object):
         @property
         def encoding(self):
             name = self.name
    -        if name == 'utf8mb4':
    +        if name in ('utf8mb4', 'utf8mb3'):
                 return 'utf8'
             return name
     
    @@ -30,18 +30,18 @@ class Charset(object):
     class Charsets:
         def __init__(self):
             self._by_id = {}
    +        self._by_name = {}
     
         def add(self, c):
             self._by_id[c.id] = c
    +        if c.is_default:
    +            self._by_name[c.name] = c
     
         def by_id(self, id):
             return self._by_id[id]
     
         def by_name(self, name):
    -        name = name.lower()
    -        for c in self._by_id.values():
    -            if c.name == name and c.is_default:
    -                return c
    +        return self._by_name.get(name.lower())
     
     _charsets = Charsets()
     """
    @@ -89,7 +89,6 @@ _charsets.add(Charset(31, 'latin1', 'latin1_german2_ci', ''))
     _charsets.add(Charset(32, 'armscii8', 'armscii8_general_ci', 'Yes'))
     _charsets.add(Charset(33, 'utf8', 'utf8_general_ci', 'Yes'))
     _charsets.add(Charset(34, 'cp1250', 'cp1250_czech_cs', ''))
    -_charsets.add(Charset(35, 'ucs2', 'ucs2_general_ci', 'Yes'))
     _charsets.add(Charset(36, 'cp866', 'cp866_general_ci', 'Yes'))
     _charsets.add(Charset(37, 'keybcs2', 'keybcs2_general_ci', 'Yes'))
     _charsets.add(Charset(38, 'macce', 'macce_general_ci', 'Yes'))
    @@ -108,13 +107,9 @@ _charsets.add(Charset(50, 'cp1251', 'cp1251_bin', ''))
     _charsets.add(Charset(51, 'cp1251', 'cp1251_general_ci', 'Yes'))
     _charsets.add(Charset(52, 'cp1251', 'cp1251_general_cs', ''))
     _charsets.add(Charset(53, 'macroman', 'macroman_bin', ''))
    -_charsets.add(Charset(54, 'utf16', 'utf16_general_ci', 'Yes'))
    -_charsets.add(Charset(55, 'utf16', 'utf16_bin', ''))
     _charsets.add(Charset(57, 'cp1256', 'cp1256_general_ci', 'Yes'))
     _charsets.add(Charset(58, 'cp1257', 'cp1257_bin', ''))
     _charsets.add(Charset(59, 'cp1257', 'cp1257_general_ci', 'Yes'))
    -_charsets.add(Charset(60, 'utf32', 'utf32_general_ci', 'Yes'))
    -_charsets.add(Charset(61, 'utf32', 'utf32_bin', ''))
     _charsets.add(Charset(63, 'binary', 'binary', 'Yes'))
     _charsets.add(Charset(64, 'armscii8', 'armscii8_bin', ''))
     _charsets.add(Charset(65, 'ascii', 'ascii_bin', ''))
    @@ -128,6 +123,7 @@ _charsets.add(Charset(72, 'hp8', 'hp8_bin', ''))
     _charsets.add(Charset(73, 'keybcs2', 'keybcs2_bin', ''))
     _charsets.add(Charset(74, 'koi8r', 'koi8r_bin', ''))
     _charsets.add(Charset(75, 'koi8u', 'koi8u_bin', ''))
    +_charsets.add(Charset(76, 'utf8', 'utf8_tolower_ci', ''))
     _charsets.add(Charset(77, 'latin2', 'latin2_bin', ''))
     _charsets.add(Charset(78, 'latin5', 'latin5_bin', ''))
     _charsets.add(Charset(79, 'latin7', 'latin7_bin', ''))
    @@ -141,7 +137,6 @@ _charsets.add(Charset(86, 'gb2312', 'gb2312_bin', ''))
     _charsets.add(Charset(87, 'gbk', 'gbk_bin', ''))
     _charsets.add(Charset(88, 'sjis', 'sjis_bin', ''))
     _charsets.add(Charset(89, 'tis620', 'tis620_bin', ''))
    -_charsets.add(Charset(90, 'ucs2', 'ucs2_bin', ''))
     _charsets.add(Charset(91, 'ujis', 'ujis_bin', ''))
     _charsets.add(Charset(92, 'geostd8', 'geostd8_general_ci', 'Yes'))
     _charsets.add(Charset(93, 'geostd8', 'geostd8_bin', ''))
    @@ -151,67 +146,6 @@ _charsets.add(Charset(96, 'cp932', 'cp932_bin', ''))
     _charsets.add(Charset(97, 'eucjpms', 'eucjpms_japanese_ci', 'Yes'))
     _charsets.add(Charset(98, 'eucjpms', 'eucjpms_bin', ''))
     _charsets.add(Charset(99, 'cp1250', 'cp1250_polish_ci', ''))
    -_charsets.add(Charset(101, 'utf16', 'utf16_unicode_ci', ''))
    -_charsets.add(Charset(102, 'utf16', 'utf16_icelandic_ci', ''))
    -_charsets.add(Charset(103, 'utf16', 'utf16_latvian_ci', ''))
    -_charsets.add(Charset(104, 'utf16', 'utf16_romanian_ci', ''))
    -_charsets.add(Charset(105, 'utf16', 'utf16_slovenian_ci', ''))
    -_charsets.add(Charset(106, 'utf16', 'utf16_polish_ci', ''))
    -_charsets.add(Charset(107, 'utf16', 'utf16_estonian_ci', ''))
    -_charsets.add(Charset(108, 'utf16', 'utf16_spanish_ci', ''))
    -_charsets.add(Charset(109, 'utf16', 'utf16_swedish_ci', ''))
    -_charsets.add(Charset(110, 'utf16', 'utf16_turkish_ci', ''))
    -_charsets.add(Charset(111, 'utf16', 'utf16_czech_ci', ''))
    -_charsets.add(Charset(112, 'utf16', 'utf16_danish_ci', ''))
    -_charsets.add(Charset(113, 'utf16', 'utf16_lithuanian_ci', ''))
    -_charsets.add(Charset(114, 'utf16', 'utf16_slovak_ci', ''))
    -_charsets.add(Charset(115, 'utf16', 'utf16_spanish2_ci', ''))
    -_charsets.add(Charset(116, 'utf16', 'utf16_roman_ci', ''))
    -_charsets.add(Charset(117, 'utf16', 'utf16_persian_ci', ''))
    -_charsets.add(Charset(118, 'utf16', 'utf16_esperanto_ci', ''))
    -_charsets.add(Charset(119, 'utf16', 'utf16_hungarian_ci', ''))
    -_charsets.add(Charset(120, 'utf16', 'utf16_sinhala_ci', ''))
    -_charsets.add(Charset(128, 'ucs2', 'ucs2_unicode_ci', ''))
    -_charsets.add(Charset(129, 'ucs2', 'ucs2_icelandic_ci', ''))
    -_charsets.add(Charset(130, 'ucs2', 'ucs2_latvian_ci', ''))
    -_charsets.add(Charset(131, 'ucs2', 'ucs2_romanian_ci', ''))
    -_charsets.add(Charset(132, 'ucs2', 'ucs2_slovenian_ci', ''))
    -_charsets.add(Charset(133, 'ucs2', 'ucs2_polish_ci', ''))
    -_charsets.add(Charset(134, 'ucs2', 'ucs2_estonian_ci', ''))
    -_charsets.add(Charset(135, 'ucs2', 'ucs2_spanish_ci', ''))
    -_charsets.add(Charset(136, 'ucs2', 'ucs2_swedish_ci', ''))
    -_charsets.add(Charset(137, 'ucs2', 'ucs2_turkish_ci', ''))
    -_charsets.add(Charset(138, 'ucs2', 'ucs2_czech_ci', ''))
    -_charsets.add(Charset(139, 'ucs2', 'ucs2_danish_ci', ''))
    -_charsets.add(Charset(140, 'ucs2', 'ucs2_lithuanian_ci', ''))
    -_charsets.add(Charset(141, 'ucs2', 'ucs2_slovak_ci', ''))
    -_charsets.add(Charset(142, 'ucs2', 'ucs2_spanish2_ci', ''))
    -_charsets.add(Charset(143, 'ucs2', 'ucs2_roman_ci', ''))
    -_charsets.add(Charset(144, 'ucs2', 'ucs2_persian_ci', ''))
    -_charsets.add(Charset(145, 'ucs2', 'ucs2_esperanto_ci', ''))
    -_charsets.add(Charset(146, 'ucs2', 'ucs2_hungarian_ci', ''))
    -_charsets.add(Charset(147, 'ucs2', 'ucs2_sinhala_ci', ''))
    -_charsets.add(Charset(159, 'ucs2', 'ucs2_general_mysql500_ci', ''))
    -_charsets.add(Charset(160, 'utf32', 'utf32_unicode_ci', ''))
    -_charsets.add(Charset(161, 'utf32', 'utf32_icelandic_ci', ''))
    -_charsets.add(Charset(162, 'utf32', 'utf32_latvian_ci', ''))
    -_charsets.add(Charset(163, 'utf32', 'utf32_romanian_ci', ''))
    -_charsets.add(Charset(164, 'utf32', 'utf32_slovenian_ci', ''))
    -_charsets.add(Charset(165, 'utf32', 'utf32_polish_ci', ''))
    -_charsets.add(Charset(166, 'utf32', 'utf32_estonian_ci', ''))
    -_charsets.add(Charset(167, 'utf32', 'utf32_spanish_ci', ''))
    -_charsets.add(Charset(168, 'utf32', 'utf32_swedish_ci', ''))
    -_charsets.add(Charset(169, 'utf32', 'utf32_turkish_ci', ''))
    -_charsets.add(Charset(170, 'utf32', 'utf32_czech_ci', ''))
    -_charsets.add(Charset(171, 'utf32', 'utf32_danish_ci', ''))
    -_charsets.add(Charset(172, 'utf32', 'utf32_lithuanian_ci', ''))
    -_charsets.add(Charset(173, 'utf32', 'utf32_slovak_ci', ''))
    -_charsets.add(Charset(174, 'utf32', 'utf32_spanish2_ci', ''))
    -_charsets.add(Charset(175, 'utf32', 'utf32_roman_ci', ''))
    -_charsets.add(Charset(176, 'utf32', 'utf32_persian_ci', ''))
    -_charsets.add(Charset(177, 'utf32', 'utf32_esperanto_ci', ''))
    -_charsets.add(Charset(178, 'utf32', 'utf32_hungarian_ci', ''))
    -_charsets.add(Charset(179, 'utf32', 'utf32_sinhala_ci', ''))
     _charsets.add(Charset(192, 'utf8', 'utf8_unicode_ci', ''))
     _charsets.add(Charset(193, 'utf8', 'utf8_icelandic_ci', ''))
     _charsets.add(Charset(194, 'utf8', 'utf8_latvian_ci', ''))
    @@ -232,6 +166,10 @@ _charsets.add(Charset(208, 'utf8', 'utf8_persian_ci', ''))
     _charsets.add(Charset(209, 'utf8', 'utf8_esperanto_ci', ''))
     _charsets.add(Charset(210, 'utf8', 'utf8_hungarian_ci', ''))
     _charsets.add(Charset(211, 'utf8', 'utf8_sinhala_ci', ''))
    +_charsets.add(Charset(212, 'utf8', 'utf8_german2_ci', ''))
    +_charsets.add(Charset(213, 'utf8', 'utf8_croatian_ci', ''))
    +_charsets.add(Charset(214, 'utf8', 'utf8_unicode_520_ci', ''))
    +_charsets.add(Charset(215, 'utf8', 'utf8_vietnamese_ci', ''))
     _charsets.add(Charset(223, 'utf8', 'utf8_general_mysql500_ci', ''))
     _charsets.add(Charset(224, 'utf8mb4', 'utf8mb4_unicode_ci', ''))
     _charsets.add(Charset(225, 'utf8mb4', 'utf8mb4_icelandic_ci', ''))
    @@ -257,14 +195,18 @@ _charsets.add(Charset(244, 'utf8mb4', 'utf8mb4_german2_ci', ''))
     _charsets.add(Charset(245, 'utf8mb4', 'utf8mb4_croatian_ci', ''))
     _charsets.add(Charset(246, 'utf8mb4', 'utf8mb4_unicode_520_ci', ''))
     _charsets.add(Charset(247, 'utf8mb4', 'utf8mb4_vietnamese_ci', ''))
    -
    +_charsets.add(Charset(248, 'gb18030', 'gb18030_chinese_ci', 'Yes'))
    +_charsets.add(Charset(249, 'gb18030', 'gb18030_bin', ''))
    +_charsets.add(Charset(250, 'gb18030', 'gb18030_unicode_520_ci', ''))
    +_charsets.add(Charset(255, 'utf8mb4', 'utf8mb4_0900_ai_ci', ''))
     
     charset_by_name = _charsets.by_name
     charset_by_id = _charsets.by_id
     
     
    +#TODO: remove this
     def charset_to_encoding(name):
         """Convert MySQL's charset name to Python's codec name"""
    -    if name == 'utf8mb4':
    +    if name in ('utf8mb4', 'utf8mb3'):
             return 'utf8'
         return name
    diff --git a/server/www/packages/packages-windows/x86/pymysql/connections.py b/server/www/packages/packages-windows/x86/pymysql/connections.py
    index 1e580d2..2e4122b 100644
    --- a/server/www/packages/packages-windows/x86/pymysql/connections.py
    +++ b/server/www/packages/packages-windows/x86/pymysql/connections.py
    @@ -88,7 +88,7 @@ TEXT_TYPES = {
     }
     
     
    -DEFAULT_CHARSET = 'utf8mb4'  # TODO: change to utf8mb4
    +DEFAULT_CHARSET = 'utf8mb4'
     
     MAX_PACKET_LEN = 2**24-1
     
    @@ -152,7 +152,6 @@ class Connection(object):
             (default: 10, min: 1, max: 31536000)
         :param ssl:
             A dict of arguments similar to mysql_ssl_set()'s parameters.
    -        For now the capath and cipher arguments are not supported.
         :param read_default_group: Group to read from in the configuration file.
         :param compress: Not supported
         :param named_pipe: Not supported
    @@ -295,15 +294,15 @@ class Connection(object):
             self._affected_rows = 0
             self.host_info = "Not connected"
     
    -        #: specified autocommit mode. None means use server default.
    +        # specified autocommit mode. None means use server default.
             self.autocommit_mode = autocommit
     
             if conv is None:
                 conv = converters.conversions
     
             # Need for MySQLdb compatibility.
    -        self.encoders = dict([(k, v) for (k, v) in conv.items() if type(k) is not int])
    -        self.decoders = dict([(k, v) for (k, v) in conv.items() if type(k) is int])
    +        self.encoders = {k: v for (k, v) in conv.items() if type(k) is not int}
    +        self.decoders = {k: v for (k, v) in conv.items() if type(k) is int}
             self.sql_mode = sql_mode
             self.init_command = init_command
             self.max_allowed_packet = max_allowed_packet
    @@ -316,10 +315,9 @@ class Connection(object):
                 '_pid': str(os.getpid()),
                 '_client_version': VERSION_STRING,
             }
    +
             if program_name:
                 self._connect_attrs["program_name"] = program_name
    -        elif sys.argv:
    -            self._connect_attrs["program_name"] = sys.argv[0]
     
             if defer_connect:
                 self._sock = None
    @@ -494,6 +492,9 @@ class Connection(object):
     
         def __enter__(self):
             """Context manager that returns a Cursor"""
    +        warnings.warn(
    +            "Context manager API of Connection object is deprecated; Use conn.begin()",
    +            DeprecationWarning)
             return self.cursor()
     
         def __exit__(self, exc, value, traceback):
    @@ -696,6 +697,10 @@ class Connection(object):
                     raise err.OperationalError(
                         CR.CR_SERVER_LOST,
                         "Lost connection to MySQL server during query (%s)" % (e,))
    +            except BaseException:
    +                # Don't convert unknown exception to MySQLError.
    +                self._force_close()
    +                raise
             if len(data) < num_bytes:
                 self._force_close()
                 raise err.OperationalError(
    @@ -804,7 +809,11 @@ class Connection(object):
             authresp = b''
             plugin_name = None
     
    -        if self._auth_plugin_name in ('', 'mysql_native_password'):
    +        if self._auth_plugin_name == '':
    +            plugin_name = b''
    +            authresp = _auth.scramble_native_password(self.password, self.salt)
    +        elif self._auth_plugin_name == 'mysql_native_password':
    +            plugin_name = b'mysql_native_password'
                 authresp = _auth.scramble_native_password(self.password, self.salt)
             elif self._auth_plugin_name == 'caching_sha2_password':
                 plugin_name = b'caching_sha2_password'
    @@ -842,9 +851,9 @@ class Connection(object):
             if self.server_capabilities & CLIENT.CONNECT_ATTRS:
                 connect_attrs = b''
                 for k, v in self._connect_attrs.items():
    -                k = k.encode('utf8')
    +                k = k.encode('utf-8')
                     connect_attrs += struct.pack('B', len(k)) + k
    -                v = v.encode('utf8')
    +                v = v.encode('utf-8')
                     connect_attrs += struct.pack('B', len(v)) + v
                 data += struct.pack('B', len(connect_attrs)) + connect_attrs
     
    diff --git a/server/www/packages/packages-windows/x86/pymysql/converters.py b/server/www/packages/packages-windows/x86/pymysql/converters.py
    index bf1db9d..ce2be06 100644
    --- a/server/www/packages/packages-windows/x86/pymysql/converters.py
    +++ b/server/www/packages/packages-windows/x86/pymysql/converters.py
    @@ -354,21 +354,6 @@ def through(x):
     convert_bit = through
     
     
    -def convert_characters(connection, field, data):
    -    field_charset = charset_by_id(field.charsetnr).name
    -    encoding = charset_to_encoding(field_charset)
    -    if field.flags & FLAG.SET:
    -        return convert_set(data.decode(encoding))
    -    if field.flags & FLAG.BINARY:
    -        return data
    -
    -    if connection.use_unicode:
    -        data = data.decode(encoding)
    -    elif connection.charset != field_charset:
    -        data = data.decode(encoding)
    -        data = data.encode(connection.encoding)
    -    return data
    -
     encoders = {
         bool: escape_bool,
         int: escape_int,
    diff --git a/server/www/packages/packages-windows/x86/pymysql/cursors.py b/server/www/packages/packages-windows/x86/pymysql/cursors.py
    index cc16998..a6d645d 100644
    --- a/server/www/packages/packages-windows/x86/pymysql/cursors.py
    +++ b/server/www/packages/packages-windows/x86/pymysql/cursors.py
    @@ -122,9 +122,9 @@ class Cursor(object):
                 return tuple(conn.literal(arg) for arg in args)
             elif isinstance(args, dict):
                 if PY2:
    -                args = dict((ensure_bytes(key), ensure_bytes(val)) for
    -                            (key, val) in args.items())
    -            return dict((key, conn.literal(val)) for (key, val) in args.items())
    +                args = {ensure_bytes(key): ensure_bytes(val) for
    +                        (key, val) in args.items()}
    +            return {key: conn.literal(val) for (key, val) in args.items()}
             else:
                 # If it's not a dictionary let's try escaping it anyways.
                 # Worst case it will throw a Value error
    diff --git a/server/www/packages/packages-windows/x86/pymysql/util.py b/server/www/packages/packages-windows/x86/pymysql/util.py
    index 3e82ac7..04683f8 100644
    --- a/server/www/packages/packages-windows/x86/pymysql/util.py
    +++ b/server/www/packages/packages-windows/x86/pymysql/util.py
    @@ -11,12 +11,3 @@ def byte2int(b):
     def int2byte(i):
         return struct.pack("!B", i)
     
    -
    -def join_bytes(bs):
    -    if len(bs) == 0:
    -        return ""
    -    else:
    -        rv = bs[0]
    -        for b in bs[1:]:
    -            rv += b
    -        return rv
    diff --git a/server/www/packages/packages-windows/x86/qrcode/main.py b/server/www/packages/packages-windows/x86/qrcode/main.py
    index 1e164f1..e46a9b9 100644
    --- a/server/www/packages/packages-windows/x86/qrcode/main.py
    +++ b/server/www/packages/packages-windows/x86/qrcode/main.py
    @@ -33,6 +33,7 @@ def _check_mask_pattern(mask_pattern):
             raise ValueError(
                 "Mask pattern should be in range(8) (got %s)" % mask_pattern)
     
    +
     class QRCode:
     
         def __init__(self, version=None,
    diff --git a/server/www/packages/packages-windows/x86/qrcode/release.py b/server/www/packages/packages-windows/x86/qrcode/release.py
    index abbabb4..4cc4c19 100644
    --- a/server/www/packages/packages-windows/x86/qrcode/release.py
    +++ b/server/www/packages/packages-windows/x86/qrcode/release.py
    @@ -12,7 +12,6 @@ def update_manpage(data):
         Update the version in the manpage document.
         """
         if data['name'] != 'qrcode':
    -        print('no qrcode')
             return
     
         base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
    diff --git a/server/www/packages/packages-windows/x86/qrcode/util.py b/server/www/packages/packages-windows/x86/qrcode/util.py
    index a9652f7..231b85e 100644
    --- a/server/www/packages/packages-windows/x86/qrcode/util.py
    +++ b/server/www/packages/packages-windows/x86/qrcode/util.py
    @@ -33,7 +33,7 @@ MODE_SIZE_LARGE = {
     }
     
     ALPHA_NUM = six.b('0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ $%*+-./:')
    -RE_ALPHA_NUM = re.compile(six.b('^[') + re.escape(ALPHA_NUM) + six.b(']*\Z'))
    +RE_ALPHA_NUM = re.compile(six.b('^[') + re.escape(ALPHA_NUM) + six.b(r']*\Z'))
     
     # The number of bits for numeric delimited data lengths.
     NUMBER_LENGTH = {3: 10, 2: 7, 1: 4}
    @@ -344,12 +344,17 @@ def optimal_data_chunks(data, minimum=4):
         :param minimum: The minimum number of bytes in a row to split as a chunk.
         """
         data = to_bytestring(data)
    -    re_repeat = (
    -        six.b('{') + six.text_type(minimum).encode('ascii') + six.b(',}'))
    -    num_pattern = re.compile(six.b('\d') + re_repeat)
    +    num_pattern = six.b(r'\d')
    +    alpha_pattern = six.b('[') + re.escape(ALPHA_NUM) + six.b(']')
    +    if len(data) <= minimum:
    +        num_pattern = re.compile(six.b('^') + num_pattern + six.b('+$'))
    +        alpha_pattern = re.compile(six.b('^') + alpha_pattern + six.b('+$'))
    +    else:
    +        re_repeat = (
    +            six.b('{') + six.text_type(minimum).encode('ascii') + six.b(',}'))
    +        num_pattern = re.compile(num_pattern + re_repeat)
    +        alpha_pattern = re.compile(alpha_pattern + re_repeat)
         num_bits = _optimal_split(data, num_pattern)
    -    alpha_pattern = re.compile(
    -        six.b('[') + re.escape(ALPHA_NUM) + six.b(']') + re_repeat)
         for is_num, chunk in num_bits:
             if is_num:
                 yield QRData(chunk, mode=MODE_NUMBER, check_data=False)
    diff --git a/server/www/packages/packages-windows/x86/six.py b/server/www/packages/packages-windows/x86/six.py
    index 6bf4fd3..89b2188 100644
    --- a/server/www/packages/packages-windows/x86/six.py
    +++ b/server/www/packages/packages-windows/x86/six.py
    @@ -1,4 +1,4 @@
    -# Copyright (c) 2010-2017 Benjamin Peterson
    +# Copyright (c) 2010-2018 Benjamin Peterson
     #
     # Permission is hereby granted, free of charge, to any person obtaining a copy
     # of this software and associated documentation files (the "Software"), to deal
    @@ -29,7 +29,7 @@ import sys
     import types
     
     __author__ = "Benjamin Peterson "
    -__version__ = "1.11.0"
    +__version__ = "1.12.0"
     
     
     # Useful for very coarse version differentiation.
    @@ -844,10 +844,71 @@ def add_metaclass(metaclass):
                     orig_vars.pop(slots_var)
             orig_vars.pop('__dict__', None)
             orig_vars.pop('__weakref__', None)
    +        if hasattr(cls, '__qualname__'):
    +            orig_vars['__qualname__'] = cls.__qualname__
             return metaclass(cls.__name__, cls.__bases__, orig_vars)
         return wrapper
     
     
    +def ensure_binary(s, encoding='utf-8', errors='strict'):
    +    """Coerce **s** to six.binary_type.
    +
    +    For Python 2:
    +      - `unicode` -> encoded to `str`
    +      - `str` -> `str`
    +
    +    For Python 3:
    +      - `str` -> encoded to `bytes`
    +      - `bytes` -> `bytes`
    +    """
    +    if isinstance(s, text_type):
    +        return s.encode(encoding, errors)
    +    elif isinstance(s, binary_type):
    +        return s
    +    else:
    +        raise TypeError("not expecting type '%s'" % type(s))
    +
    +
    +def ensure_str(s, encoding='utf-8', errors='strict'):
    +    """Coerce *s* to `str`.
    +
    +    For Python 2:
    +      - `unicode` -> encoded to `str`
    +      - `str` -> `str`
    +
    +    For Python 3:
    +      - `str` -> `str`
    +      - `bytes` -> decoded to `str`
    +    """
    +    if not isinstance(s, (text_type, binary_type)):
    +        raise TypeError("not expecting type '%s'" % type(s))
    +    if PY2 and isinstance(s, text_type):
    +        s = s.encode(encoding, errors)
    +    elif PY3 and isinstance(s, binary_type):
    +        s = s.decode(encoding, errors)
    +    return s
    +
    +
    +def ensure_text(s, encoding='utf-8', errors='strict'):
    +    """Coerce *s* to six.text_type.
    +
    +    For Python 2:
    +      - `unicode` -> `unicode`
    +      - `str` -> `unicode`
    +
    +    For Python 3:
    +      - `str` -> `str`
    +      - `bytes` -> decoded to `str`
    +    """
    +    if isinstance(s, binary_type):
    +        return s.decode(encoding, errors)
    +    elif isinstance(s, text_type):
    +        return s
    +    else:
    +        raise TypeError("not expecting type '%s'" % type(s))
    +
    +
    +
     def python_2_unicode_compatible(klass):
         """
         A decorator that defines __unicode__ and __str__ methods under Python 2.
    diff --git a/server/www/packages/packages-windows/x86/tornado/__init__.py b/server/www/packages/packages-windows/x86/tornado/__init__.py
    index b269cf7..8fb846f 100644
    --- a/server/www/packages/packages-windows/x86/tornado/__init__.py
    +++ b/server/www/packages/packages-windows/x86/tornado/__init__.py
    @@ -15,8 +15,6 @@
     
     """The Tornado web server and tools."""
     
    -from __future__ import absolute_import, division, print_function
    -
     # version is a human-readable version number.
     
     # version_info is a four-tuple for programmatic comparison. The first
    @@ -24,5 +22,5 @@ from __future__ import absolute_import, division, print_function
     # is zero for an official release, positive for a development branch,
     # or negative for a release candidate or beta (after the base version
     # number has been incremented)
    -version = "5.1.1"
    -version_info = (5, 1, 1, 0)
    +version = "6.0.4"
    +version_info = (6, 0, 4, 0)
    diff --git a/server/www/packages/packages-windows/x86/tornado/_locale_data.py b/server/www/packages/packages-windows/x86/tornado/_locale_data.py
    index a2c5039..91416d9 100644
    --- a/server/www/packages/packages-windows/x86/tornado/_locale_data.py
    +++ b/server/www/packages/packages-windows/x86/tornado/_locale_data.py
    @@ -16,8 +16,6 @@
     
     """Data used by the tornado.locale module."""
     
    -from __future__ import absolute_import, division, print_function
    -
     LOCALE_NAMES = {
         "af_ZA": {"name_en": u"Afrikaans", "name": u"Afrikaans"},
         "am_ET": {"name_en": u"Amharic", "name": u"አማርኛ"},
    diff --git a/server/www/packages/packages-windows/x86/tornado/auth.py b/server/www/packages/packages-windows/x86/tornado/auth.py
    index b79ad14..db6d290 100644
    --- a/server/www/packages/packages-windows/x86/tornado/auth.py
    +++ b/server/www/packages/packages-windows/x86/tornado/auth.py
    @@ -54,93 +54,29 @@ Example usage for Google OAuth:
     .. testoutput::
        :hide:
     
    -
    -.. versionchanged:: 4.0
    -   All of the callback interfaces in this module are now guaranteed
    -   to run their callback with an argument of ``None`` on error.
    -   Previously some functions would do this while others would simply
    -   terminate the request on their own.  This change also ensures that
    -   errors are more consistently reported through the ``Future`` interfaces.
     """
     
    -from __future__ import absolute_import, division, print_function
    -
     import base64
     import binascii
    -import functools
     import hashlib
     import hmac
     import time
    +import urllib.parse
     import uuid
    -import warnings
     
    -from tornado.concurrent import (Future, _non_deprecated_return_future,
    -                                future_set_exc_info, chain_future,
    -                                future_set_result_unless_cancelled)
    -from tornado import gen
     from tornado import httpclient
     from tornado import escape
     from tornado.httputil import url_concat
    -from tornado.log import gen_log
    -from tornado.stack_context import ExceptionStackContext, wrap
    -from tornado.util import unicode_type, ArgReplacer, PY3
    +from tornado.util import unicode_type
    +from tornado.web import RequestHandler
     
    -if PY3:
    -    import urllib.parse as urlparse
    -    import urllib.parse as urllib_parse
    -    long = int
    -else:
    -    import urlparse
    -    import urllib as urllib_parse
    +from typing import List, Any, Dict, cast, Iterable, Union, Optional
     
     
     class AuthError(Exception):
         pass
     
     
    -def _auth_future_to_callback(callback, future):
    -    try:
    -        result = future.result()
    -    except AuthError as e:
    -        gen_log.warning(str(e))
    -        result = None
    -    callback(result)
    -
    -
    -def _auth_return_future(f):
    -    """Similar to tornado.concurrent.return_future, but uses the auth
    -    module's legacy callback interface.
    -
    -    Note that when using this decorator the ``callback`` parameter
    -    inside the function will actually be a future.
    -
    -    .. deprecated:: 5.1
    -       Will be removed in 6.0.
    -    """
    -    replacer = ArgReplacer(f, 'callback')
    -
    -    @functools.wraps(f)
    -    def wrapper(*args, **kwargs):
    -        future = Future()
    -        callback, args, kwargs = replacer.replace(future, args, kwargs)
    -        if callback is not None:
    -            warnings.warn("callback arguments are deprecated, use the returned Future instead",
    -                          DeprecationWarning)
    -            future.add_done_callback(
    -                wrap(functools.partial(_auth_future_to_callback, callback)))
    -
    -        def handle_exception(typ, value, tb):
    -            if future.done():
    -                return False
    -            else:
    -                future_set_exc_info(future, (typ, value, tb))
    -                return True
    -        with ExceptionStackContext(handle_exception, delay_warning=True):
    -            f(*args, **kwargs)
    -        return future
    -    return wrapper
    -
    -
     class OpenIdMixin(object):
         """Abstract implementation of OpenID and Attribute Exchange.
     
    @@ -148,10 +84,12 @@ class OpenIdMixin(object):
     
         * ``_OPENID_ENDPOINT``: the identity provider's URI.
         """
    -    @_non_deprecated_return_future
    -    def authenticate_redirect(self, callback_uri=None,
    -                              ax_attrs=["name", "email", "language", "username"],
    -                              callback=None):
    +
    +    def authenticate_redirect(
    +        self,
    +        callback_uri: str = None,
    +        ax_attrs: List[str] = ["name", "email", "language", "username"],
    +    ) -> None:
             """Redirects to the authentication URL for this service.
     
             After authentication, the service will redirect back to the given
    @@ -162,24 +100,22 @@ class OpenIdMixin(object):
             all those attributes for your app, you can request fewer with
             the ax_attrs keyword argument.
     
    -        .. versionchanged:: 3.1
    -           Returns a `.Future` and takes an optional callback.  These are
    -           not strictly necessary as this method is synchronous,
    -           but they are supplied for consistency with
    -           `OAuthMixin.authorize_redirect`.
    +        .. versionchanged:: 6.0
     
    -        .. deprecated:: 5.1
    -
    -           The ``callback`` argument and returned awaitable will be removed
    -           in Tornado 6.0; this will be an ordinary synchronous function.
    +            The ``callback`` argument was removed and this method no
    +            longer returns an awaitable object. It is now an ordinary
    +            synchronous function.
             """
    -        callback_uri = callback_uri or self.request.uri
    +        handler = cast(RequestHandler, self)
    +        callback_uri = callback_uri or handler.request.uri
    +        assert callback_uri is not None
             args = self._openid_args(callback_uri, ax_attrs=ax_attrs)
    -        self.redirect(self._OPENID_ENDPOINT + "?" + urllib_parse.urlencode(args))
    -        callback()
    +        endpoint = self._OPENID_ENDPOINT  # type: ignore
    +        handler.redirect(endpoint + "?" + urllib.parse.urlencode(args))
     
    -    @_auth_return_future
    -    def get_authenticated_user(self, callback, http_client=None):
    +    async def get_authenticated_user(
    +        self, http_client: httpclient.AsyncHTTPClient = None
    +    ) -> Dict[str, Any]:
             """Fetches the authenticated user data upon redirect.
     
             This method should be called by the handler that receives the
    @@ -190,51 +126,57 @@ class OpenIdMixin(object):
     
             The result of this method will generally be used to set a cookie.
     
    -        .. deprecated:: 5.1
    +        .. versionchanged:: 6.0
     
    -           The ``callback`` argument is deprecated and will be removed in 6.0.
    -           Use the returned awaitable object instead.
    +            The ``callback`` argument was removed. Use the returned
    +            awaitable object instead.
             """
    +        handler = cast(RequestHandler, self)
             # Verify the OpenID response via direct request to the OP
    -        args = dict((k, v[-1]) for k, v in self.request.arguments.items())
    +        args = dict(
    +            (k, v[-1]) for k, v in handler.request.arguments.items()
    +        )  # type: Dict[str, Union[str, bytes]]
             args["openid.mode"] = u"check_authentication"
    -        url = self._OPENID_ENDPOINT
    +        url = self._OPENID_ENDPOINT  # type: ignore
             if http_client is None:
                 http_client = self.get_auth_http_client()
    -        fut = http_client.fetch(url, method="POST", body=urllib_parse.urlencode(args))
    -        fut.add_done_callback(wrap(functools.partial(
    -            self._on_authentication_verified, callback)))
    +        resp = await http_client.fetch(
    +            url, method="POST", body=urllib.parse.urlencode(args)
    +        )
    +        return self._on_authentication_verified(resp)
     
    -    def _openid_args(self, callback_uri, ax_attrs=[], oauth_scope=None):
    -        url = urlparse.urljoin(self.request.full_url(), callback_uri)
    +    def _openid_args(
    +        self, callback_uri: str, ax_attrs: Iterable[str] = [], oauth_scope: str = None
    +    ) -> Dict[str, str]:
    +        handler = cast(RequestHandler, self)
    +        url = urllib.parse.urljoin(handler.request.full_url(), callback_uri)
             args = {
                 "openid.ns": "http://specs.openid.net/auth/2.0",
    -            "openid.claimed_id":
    -            "http://specs.openid.net/auth/2.0/identifier_select",
    -            "openid.identity":
    -            "http://specs.openid.net/auth/2.0/identifier_select",
    +            "openid.claimed_id": "http://specs.openid.net/auth/2.0/identifier_select",
    +            "openid.identity": "http://specs.openid.net/auth/2.0/identifier_select",
                 "openid.return_to": url,
    -            "openid.realm": urlparse.urljoin(url, '/'),
    +            "openid.realm": urllib.parse.urljoin(url, "/"),
                 "openid.mode": "checkid_setup",
             }
             if ax_attrs:
    -            args.update({
    -                "openid.ns.ax": "http://openid.net/srv/ax/1.0",
    -                "openid.ax.mode": "fetch_request",
    -            })
    +            args.update(
    +                {
    +                    "openid.ns.ax": "http://openid.net/srv/ax/1.0",
    +                    "openid.ax.mode": "fetch_request",
    +                }
    +            )
                 ax_attrs = set(ax_attrs)
    -            required = []
    +            required = []  # type: List[str]
                 if "name" in ax_attrs:
                     ax_attrs -= set(["name", "firstname", "fullname", "lastname"])
                     required += ["firstname", "fullname", "lastname"]
    -                args.update({
    -                    "openid.ax.type.firstname":
    -                    "http://axschema.org/namePerson/first",
    -                    "openid.ax.type.fullname":
    -                    "http://axschema.org/namePerson",
    -                    "openid.ax.type.lastname":
    -                    "http://axschema.org/namePerson/last",
    -                })
    +                args.update(
    +                    {
    +                        "openid.ax.type.firstname": "http://axschema.org/namePerson/first",
    +                        "openid.ax.type.fullname": "http://axschema.org/namePerson",
    +                        "openid.ax.type.lastname": "http://axschema.org/namePerson/last",
    +                    }
    +                )
                 known_attrs = {
                     "email": "http://axschema.org/contact/email",
                     "language": "http://axschema.org/pref/language",
    @@ -245,47 +187,45 @@ class OpenIdMixin(object):
                     required.append(name)
                 args["openid.ax.required"] = ",".join(required)
             if oauth_scope:
    -            args.update({
    -                "openid.ns.oauth":
    -                "http://specs.openid.net/extensions/oauth/1.0",
    -                "openid.oauth.consumer": self.request.host.split(":")[0],
    -                "openid.oauth.scope": oauth_scope,
    -            })
    +            args.update(
    +                {
    +                    "openid.ns.oauth": "http://specs.openid.net/extensions/oauth/1.0",
    +                    "openid.oauth.consumer": handler.request.host.split(":")[0],
    +                    "openid.oauth.scope": oauth_scope,
    +                }
    +            )
             return args
     
    -    def _on_authentication_verified(self, future, response_fut):
    -        try:
    -            response = response_fut.result()
    -        except Exception as e:
    -            future.set_exception(AuthError(
    -                "Error response %s" % e))
    -            return
    +    def _on_authentication_verified(
    +        self, response: httpclient.HTTPResponse
    +    ) -> Dict[str, Any]:
    +        handler = cast(RequestHandler, self)
             if b"is_valid:true" not in response.body:
    -            future.set_exception(AuthError(
    -                "Invalid OpenID response: %s" % response.body))
    -            return
    +            raise AuthError("Invalid OpenID response: %s" % response.body)
     
             # Make sure we got back at least an email from attribute exchange
             ax_ns = None
    -        for name in self.request.arguments:
    -            if name.startswith("openid.ns.") and \
    -                    self.get_argument(name) == u"http://openid.net/srv/ax/1.0":
    -                ax_ns = name[10:]
    +        for key in handler.request.arguments:
    +            if (
    +                key.startswith("openid.ns.")
    +                and handler.get_argument(key) == u"http://openid.net/srv/ax/1.0"
    +            ):
    +                ax_ns = key[10:]
                     break
     
    -        def get_ax_arg(uri):
    +        def get_ax_arg(uri: str) -> str:
                 if not ax_ns:
                     return u""
                 prefix = "openid." + ax_ns + ".type."
                 ax_name = None
    -            for name in self.request.arguments.keys():
    -                if self.get_argument(name) == uri and name.startswith(prefix):
    -                    part = name[len(prefix):]
    +            for name in handler.request.arguments.keys():
    +                if handler.get_argument(name) == uri and name.startswith(prefix):
    +                    part = name[len(prefix) :]
                         ax_name = "openid." + ax_ns + ".value." + part
                         break
                 if not ax_name:
                     return u""
    -            return self.get_argument(ax_name, u"")
    +            return handler.get_argument(ax_name, u"")
     
             email = get_ax_arg("http://axschema.org/contact/email")
             name = get_ax_arg("http://axschema.org/namePerson")
    @@ -313,12 +253,12 @@ class OpenIdMixin(object):
                 user["locale"] = locale
             if username:
                 user["username"] = username
    -        claimed_id = self.get_argument("openid.claimed_id", None)
    +        claimed_id = handler.get_argument("openid.claimed_id", None)
             if claimed_id:
                 user["claimed_id"] = claimed_id
    -        future_set_result_unless_cancelled(future, user)
    +        return user
     
    -    def get_auth_http_client(self):
    +    def get_auth_http_client(self) -> httpclient.AsyncHTTPClient:
             """Returns the `.AsyncHTTPClient` instance to be used for auth requests.
     
             May be overridden by subclasses to use an HTTP client other than
    @@ -343,9 +283,13 @@ class OAuthMixin(object):
         Subclasses must also override the `_oauth_get_user_future` and
         `_oauth_consumer_token` methods.
         """
    -    @_non_deprecated_return_future
    -    def authorize_redirect(self, callback_uri=None, extra_params=None,
    -                           http_client=None, callback=None):
    +
    +    async def authorize_redirect(
    +        self,
    +        callback_uri: str = None,
    +        extra_params: Dict[str, Any] = None,
    +        http_client: httpclient.AsyncHTTPClient = None,
    +    ) -> None:
             """Redirects the user to obtain OAuth authorization for this service.
     
             The ``callback_uri`` may be omitted if you have previously
    @@ -367,35 +311,31 @@ class OAuthMixin(object):
                Now returns a `.Future` and takes an optional callback, for
                compatibility with `.gen.coroutine`.
     
    -        .. deprecated:: 5.1
    +        .. versionchanged:: 6.0
     
    -           The ``callback`` argument is deprecated and will be removed in 6.0.
    -           Use the returned awaitable object instead.
    +           The ``callback`` argument was removed. Use the returned
    +           awaitable object instead.
     
             """
             if callback_uri and getattr(self, "_OAUTH_NO_CALLBACKS", False):
                 raise Exception("This service does not support oauth_callback")
             if http_client is None:
                 http_client = self.get_auth_http_client()
    +        assert http_client is not None
             if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
    -            fut = http_client.fetch(
    -                self._oauth_request_token_url(callback_uri=callback_uri,
    -                                              extra_params=extra_params))
    -            fut.add_done_callback(wrap(functools.partial(
    -                self._on_request_token,
    -                self._OAUTH_AUTHORIZE_URL,
    -                callback_uri,
    -                callback)))
    +            response = await http_client.fetch(
    +                self._oauth_request_token_url(
    +                    callback_uri=callback_uri, extra_params=extra_params
    +                )
    +            )
             else:
    -            fut = http_client.fetch(self._oauth_request_token_url())
    -            fut.add_done_callback(
    -                wrap(functools.partial(
    -                    self._on_request_token, self._OAUTH_AUTHORIZE_URL,
    -                    callback_uri,
    -                    callback)))
    +            response = await http_client.fetch(self._oauth_request_token_url())
    +        url = self._OAUTH_AUTHORIZE_URL  # type: ignore
    +        self._on_request_token(url, callback_uri, response)
     
    -    @_auth_return_future
    -    def get_authenticated_user(self, callback, http_client=None):
    +    async def get_authenticated_user(
    +        self, http_client: httpclient.AsyncHTTPClient = None
    +    ) -> Dict[str, Any]:
             """Gets the OAuth authorized user and access token.
     
             This method should be called from the handler for your
    @@ -406,37 +346,45 @@ class OAuthMixin(object):
             also contain other fields such as ``name``, depending on the service
             used.
     
    -        .. deprecated:: 5.1
    +        .. versionchanged:: 6.0
     
    -           The ``callback`` argument is deprecated and will be removed in 6.0.
    -           Use the returned awaitable object instead.
    +           The ``callback`` argument was removed. Use the returned
    +           awaitable object instead.
             """
    -        future = callback
    -        request_key = escape.utf8(self.get_argument("oauth_token"))
    -        oauth_verifier = self.get_argument("oauth_verifier", None)
    -        request_cookie = self.get_cookie("_oauth_request_token")
    +        handler = cast(RequestHandler, self)
    +        request_key = escape.utf8(handler.get_argument("oauth_token"))
    +        oauth_verifier = handler.get_argument("oauth_verifier", None)
    +        request_cookie = handler.get_cookie("_oauth_request_token")
             if not request_cookie:
    -            future.set_exception(AuthError(
    -                "Missing OAuth request token cookie"))
    -            return
    -        self.clear_cookie("_oauth_request_token")
    +            raise AuthError("Missing OAuth request token cookie")
    +        handler.clear_cookie("_oauth_request_token")
             cookie_key, cookie_secret = [
    -            base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|")]
    +            base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|")
    +        ]
             if cookie_key != request_key:
    -            future.set_exception(AuthError(
    -                "Request token does not match cookie"))
    -            return
    -        token = dict(key=cookie_key, secret=cookie_secret)
    +            raise AuthError("Request token does not match cookie")
    +        token = dict(
    +            key=cookie_key, secret=cookie_secret
    +        )  # type: Dict[str, Union[str, bytes]]
             if oauth_verifier:
                 token["verifier"] = oauth_verifier
             if http_client is None:
                 http_client = self.get_auth_http_client()
    -        fut = http_client.fetch(self._oauth_access_token_url(token))
    -        fut.add_done_callback(wrap(functools.partial(self._on_access_token, callback)))
    +        assert http_client is not None
    +        response = await http_client.fetch(self._oauth_access_token_url(token))
    +        access_token = _oauth_parse_response(response.body)
    +        user = await self._oauth_get_user_future(access_token)
    +        if not user:
    +            raise AuthError("Error getting user")
    +        user["access_token"] = access_token
    +        return user
     
    -    def _oauth_request_token_url(self, callback_uri=None, extra_params=None):
    +    def _oauth_request_token_url(
    +        self, callback_uri: str = None, extra_params: Dict[str, Any] = None
    +    ) -> str:
    +        handler = cast(RequestHandler, self)
             consumer_token = self._oauth_consumer_token()
    -        url = self._OAUTH_REQUEST_TOKEN_URL
    +        url = self._OAUTH_REQUEST_TOKEN_URL  # type: ignore
             args = dict(
                 oauth_consumer_key=escape.to_basestring(consumer_token["key"]),
                 oauth_signature_method="HMAC-SHA1",
    @@ -448,8 +396,9 @@ class OAuthMixin(object):
                 if callback_uri == "oob":
                     args["oauth_callback"] = "oob"
                 elif callback_uri:
    -                args["oauth_callback"] = urlparse.urljoin(
    -                    self.request.full_url(), callback_uri)
    +                args["oauth_callback"] = urllib.parse.urljoin(
    +                    handler.request.full_url(), callback_uri
    +                )
                 if extra_params:
                     args.update(extra_params)
                 signature = _oauth10a_signature(consumer_token, "GET", url, args)
    @@ -457,32 +406,35 @@ class OAuthMixin(object):
                 signature = _oauth_signature(consumer_token, "GET", url, args)
     
             args["oauth_signature"] = signature
    -        return url + "?" + urllib_parse.urlencode(args)
    +        return url + "?" + urllib.parse.urlencode(args)
     
    -    def _on_request_token(self, authorize_url, callback_uri, callback,
    -                          response_fut):
    -        try:
    -            response = response_fut.result()
    -        except Exception as e:
    -            raise Exception("Could not get request token: %s" % e)
    +    def _on_request_token(
    +        self,
    +        authorize_url: str,
    +        callback_uri: Optional[str],
    +        response: httpclient.HTTPResponse,
    +    ) -> None:
    +        handler = cast(RequestHandler, self)
             request_token = _oauth_parse_response(response.body)
    -        data = (base64.b64encode(escape.utf8(request_token["key"])) + b"|" +
    -                base64.b64encode(escape.utf8(request_token["secret"])))
    -        self.set_cookie("_oauth_request_token", data)
    +        data = (
    +            base64.b64encode(escape.utf8(request_token["key"]))
    +            + b"|"
    +            + base64.b64encode(escape.utf8(request_token["secret"]))
    +        )
    +        handler.set_cookie("_oauth_request_token", data)
             args = dict(oauth_token=request_token["key"])
             if callback_uri == "oob":
    -            self.finish(authorize_url + "?" + urllib_parse.urlencode(args))
    -            callback()
    +            handler.finish(authorize_url + "?" + urllib.parse.urlencode(args))
                 return
             elif callback_uri:
    -            args["oauth_callback"] = urlparse.urljoin(
    -                self.request.full_url(), callback_uri)
    -        self.redirect(authorize_url + "?" + urllib_parse.urlencode(args))
    -        callback()
    +            args["oauth_callback"] = urllib.parse.urljoin(
    +                handler.request.full_url(), callback_uri
    +            )
    +        handler.redirect(authorize_url + "?" + urllib.parse.urlencode(args))
     
    -    def _oauth_access_token_url(self, request_token):
    +    def _oauth_access_token_url(self, request_token: Dict[str, Any]) -> str:
             consumer_token = self._oauth_consumer_token()
    -        url = self._OAUTH_ACCESS_TOKEN_URL
    +        url = self._OAUTH_ACCESS_TOKEN_URL  # type: ignore
             args = dict(
                 oauth_consumer_key=escape.to_basestring(consumer_token["key"]),
                 oauth_token=escape.to_basestring(request_token["key"]),
    @@ -495,41 +447,31 @@ class OAuthMixin(object):
                 args["oauth_verifier"] = request_token["verifier"]
     
             if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
    -            signature = _oauth10a_signature(consumer_token, "GET", url, args,
    -                                            request_token)
    +            signature = _oauth10a_signature(
    +                consumer_token, "GET", url, args, request_token
    +            )
             else:
    -            signature = _oauth_signature(consumer_token, "GET", url, args,
    -                                         request_token)
    +            signature = _oauth_signature(
    +                consumer_token, "GET", url, args, request_token
    +            )
     
             args["oauth_signature"] = signature
    -        return url + "?" + urllib_parse.urlencode(args)
    +        return url + "?" + urllib.parse.urlencode(args)
     
    -    def _on_access_token(self, future, response_fut):
    -        try:
    -            response = response_fut.result()
    -        except Exception:
    -            future.set_exception(AuthError("Could not fetch access token"))
    -            return
    -
    -        access_token = _oauth_parse_response(response.body)
    -        fut = self._oauth_get_user_future(access_token)
    -        fut = gen.convert_yielded(fut)
    -        fut.add_done_callback(
    -            wrap(functools.partial(self._on_oauth_get_user, access_token, future)))
    -
    -    def _oauth_consumer_token(self):
    +    def _oauth_consumer_token(self) -> Dict[str, Any]:
             """Subclasses must override this to return their OAuth consumer keys.
     
             The return value should be a `dict` with keys ``key`` and ``secret``.
             """
             raise NotImplementedError()
     
    -    @_non_deprecated_return_future
    -    def _oauth_get_user_future(self, access_token, callback):
    +    async def _oauth_get_user_future(
    +        self, access_token: Dict[str, Any]
    +    ) -> Dict[str, Any]:
             """Subclasses must override this to get basic information about the
             user.
     
    -        Should return a `.Future` whose result is a dictionary
    +        Should be a coroutine whose result is a dictionary
             containing information about the user, which may have been
             retrieved by using ``access_token`` to make a request to the
             service.
    @@ -537,40 +479,23 @@ class OAuthMixin(object):
             The access token will be added to the returned dictionary to make
             the result of `get_authenticated_user`.
     
    -        For backwards compatibility, the callback-based ``_oauth_get_user``
    -        method is also supported.
    -
             .. versionchanged:: 5.1
     
                Subclasses may also define this method with ``async def``.
     
    -        .. deprecated:: 5.1
    +        .. versionchanged:: 6.0
     
    -           The ``_oauth_get_user`` fallback is deprecated and support for it
    -           will be removed in 6.0.
    +           A synchronous fallback to ``_oauth_get_user`` was removed.
             """
    -        warnings.warn("_oauth_get_user is deprecated, override _oauth_get_user_future instead",
    -                      DeprecationWarning)
    -        # By default, call the old-style _oauth_get_user, but new code
    -        # should override this method instead.
    -        self._oauth_get_user(access_token, callback)
    -
    -    def _oauth_get_user(self, access_token, callback):
             raise NotImplementedError()
     
    -    def _on_oauth_get_user(self, access_token, future, user_future):
    -        if user_future.exception() is not None:
    -            future.set_exception(user_future.exception())
    -            return
    -        user = user_future.result()
    -        if not user:
    -            future.set_exception(AuthError("Error getting user"))
    -            return
    -        user["access_token"] = access_token
    -        future_set_result_unless_cancelled(future, user)
    -
    -    def _oauth_request_parameters(self, url, access_token, parameters={},
    -                                  method="GET"):
    +    def _oauth_request_parameters(
    +        self,
    +        url: str,
    +        access_token: Dict[str, Any],
    +        parameters: Dict[str, Any] = {},
    +        method: str = "GET",
    +    ) -> Dict[str, Any]:
             """Returns the OAuth parameters as a dict for the given request.
     
             parameters should include all POST arguments and query string arguments
    @@ -589,15 +514,17 @@ class OAuthMixin(object):
             args.update(base_args)
             args.update(parameters)
             if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
    -            signature = _oauth10a_signature(consumer_token, method, url, args,
    -                                            access_token)
    +            signature = _oauth10a_signature(
    +                consumer_token, method, url, args, access_token
    +            )
             else:
    -            signature = _oauth_signature(consumer_token, method, url, args,
    -                                         access_token)
    +            signature = _oauth_signature(
    +                consumer_token, method, url, args, access_token
    +            )
             base_args["oauth_signature"] = escape.to_basestring(signature)
             return base_args
     
    -    def get_auth_http_client(self):
    +    def get_auth_http_client(self) -> httpclient.AsyncHTTPClient:
             """Returns the `.AsyncHTTPClient` instance to be used for auth requests.
     
             May be overridden by subclasses to use an HTTP client other than
    @@ -617,10 +544,16 @@ class OAuth2Mixin(object):
         * ``_OAUTH_AUTHORIZE_URL``: The service's authorization url.
         * ``_OAUTH_ACCESS_TOKEN_URL``:  The service's access token url.
         """
    -    @_non_deprecated_return_future
    -    def authorize_redirect(self, redirect_uri=None, client_id=None,
    -                           client_secret=None, extra_params=None,
    -                           callback=None, scope=None, response_type="code"):
    +
    +    def authorize_redirect(
    +        self,
    +        redirect_uri: str = None,
    +        client_id: str = None,
    +        client_secret: str = None,
    +        extra_params: Dict[str, Any] = None,
    +        scope: str = None,
    +        response_type: str = "code",
    +    ) -> None:
             """Redirects the user to obtain OAuth authorization for this service.
     
             Some providers require that you register a redirect URL with
    @@ -629,47 +562,53 @@ class OAuth2Mixin(object):
             ``get_authenticated_user`` in the handler for your
             redirect URL to complete the authorization process.
     
    -        .. versionchanged:: 3.1
    -           Returns a `.Future` and takes an optional callback.  These are
    -           not strictly necessary as this method is synchronous,
    -           but they are supplied for consistency with
    -           `OAuthMixin.authorize_redirect`.
    +        .. versionchanged:: 6.0
     
    -        .. deprecated:: 5.1
    -
    -           The ``callback`` argument and returned awaitable will be removed
    -           in Tornado 6.0; this will be an ordinary synchronous function.
    +           The ``callback`` argument and returned awaitable were removed;
    +           this is now an ordinary synchronous function.
             """
    -        args = {
    -            "redirect_uri": redirect_uri,
    -            "client_id": client_id,
    -            "response_type": response_type
    -        }
    +        handler = cast(RequestHandler, self)
    +        args = {"response_type": response_type}
    +        if redirect_uri is not None:
    +            args["redirect_uri"] = redirect_uri
    +        if client_id is not None:
    +            args["client_id"] = client_id
             if extra_params:
                 args.update(extra_params)
             if scope:
    -            args['scope'] = ' '.join(scope)
    -        self.redirect(
    -            url_concat(self._OAUTH_AUTHORIZE_URL, args))
    -        callback()
    +            args["scope"] = " ".join(scope)
    +        url = self._OAUTH_AUTHORIZE_URL  # type: ignore
    +        handler.redirect(url_concat(url, args))
     
    -    def _oauth_request_token_url(self, redirect_uri=None, client_id=None,
    -                                 client_secret=None, code=None,
    -                                 extra_params=None):
    -        url = self._OAUTH_ACCESS_TOKEN_URL
    -        args = dict(
    -            redirect_uri=redirect_uri,
    -            code=code,
    -            client_id=client_id,
    -            client_secret=client_secret,
    -        )
    +    def _oauth_request_token_url(
    +        self,
    +        redirect_uri: str = None,
    +        client_id: str = None,
    +        client_secret: str = None,
    +        code: str = None,
    +        extra_params: Dict[str, Any] = None,
    +    ) -> str:
    +        url = self._OAUTH_ACCESS_TOKEN_URL  # type: ignore
    +        args = {}  # type: Dict[str, str]
    +        if redirect_uri is not None:
    +            args["redirect_uri"] = redirect_uri
    +        if code is not None:
    +            args["code"] = code
    +        if client_id is not None:
    +            args["client_id"] = client_id
    +        if client_secret is not None:
    +            args["client_secret"] = client_secret
             if extra_params:
                 args.update(extra_params)
             return url_concat(url, args)
     
    -    @_auth_return_future
    -    def oauth2_request(self, url, callback, access_token=None,
    -                       post_args=None, **args):
    +    async def oauth2_request(
    +        self,
    +        url: str,
    +        access_token: str = None,
    +        post_args: Dict[str, Any] = None,
    +        **args: Any
    +    ) -> Any:
             """Fetches the given URL auth an OAuth2 access token.
     
             If the request is a POST, ``post_args`` should be provided. Query
    @@ -699,10 +638,9 @@ class OAuth2Mixin(object):
     
             .. versionadded:: 4.3
     
    -        .. deprecated:: 5.1
    +        .. versionchanged::: 6.0
     
    -           The ``callback`` argument is deprecated and will be removed in 6.0.
    -           Use the returned awaitable object instead.
    +           The ``callback`` argument was removed. Use the returned awaitable object instead.
             """
             all_args = {}
             if access_token:
    @@ -710,25 +648,17 @@ class OAuth2Mixin(object):
                 all_args.update(args)
     
             if all_args:
    -            url += "?" + urllib_parse.urlencode(all_args)
    -        callback = wrap(functools.partial(self._on_oauth2_request, callback))
    +            url += "?" + urllib.parse.urlencode(all_args)
             http = self.get_auth_http_client()
             if post_args is not None:
    -            fut = http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args))
    +            response = await http.fetch(
    +                url, method="POST", body=urllib.parse.urlencode(post_args)
    +            )
             else:
    -            fut = http.fetch(url)
    -        fut.add_done_callback(callback)
    +            response = await http.fetch(url)
    +        return escape.json_decode(response.body)
     
    -    def _on_oauth2_request(self, future, response_fut):
    -        try:
    -            response = response_fut.result()
    -        except Exception as e:
    -            future.set_exception(AuthError("Error response %s" % e))
    -            return
    -
    -        future_set_result_unless_cancelled(future, escape.json_decode(response.body))
    -
    -    def get_auth_http_client(self):
    +    def get_auth_http_client(self) -> httpclient.AsyncHTTPClient:
             """Returns the `.AsyncHTTPClient` instance to be used for auth requests.
     
             May be overridden by subclasses to use an HTTP client other than
    @@ -771,6 +701,7 @@ class TwitterMixin(OAuthMixin):
         and all of the custom Twitter user attributes described at
         https://dev.twitter.com/docs/api/1.1/get/users/show
         """
    +
         _OAUTH_REQUEST_TOKEN_URL = "https://api.twitter.com/oauth/request_token"
         _OAUTH_ACCESS_TOKEN_URL = "https://api.twitter.com/oauth/access_token"
         _OAUTH_AUTHORIZE_URL = "https://api.twitter.com/oauth/authorize"
    @@ -778,8 +709,7 @@ class TwitterMixin(OAuthMixin):
         _OAUTH_NO_CALLBACKS = False
         _TWITTER_BASE_URL = "https://api.twitter.com/1.1"
     
    -    @_non_deprecated_return_future
    -    def authenticate_redirect(self, callback_uri=None, callback=None):
    +    async def authenticate_redirect(self, callback_uri: str = None) -> None:
             """Just like `~OAuthMixin.authorize_redirect`, but
             auto-redirects if authorized.
     
    @@ -790,20 +720,24 @@ class TwitterMixin(OAuthMixin):
                Now returns a `.Future` and takes an optional callback, for
                compatibility with `.gen.coroutine`.
     
    -        .. deprecated:: 5.1
    +        .. versionchanged:: 6.0
     
    -           The ``callback`` argument is deprecated and will be removed in 6.0.
    -           Use the returned awaitable object instead.
    +           The ``callback`` argument was removed. Use the returned
    +           awaitable object instead.
             """
             http = self.get_auth_http_client()
    -        fut = http.fetch(self._oauth_request_token_url(callback_uri=callback_uri))
    -        fut.add_done_callback(wrap(functools.partial(
    -            self._on_request_token, self._OAUTH_AUTHENTICATE_URL,
    -            None, callback)))
    +        response = await http.fetch(
    +            self._oauth_request_token_url(callback_uri=callback_uri)
    +        )
    +        self._on_request_token(self._OAUTH_AUTHENTICATE_URL, None, response)
     
    -    @_auth_return_future
    -    def twitter_request(self, path, callback=None, access_token=None,
    -                        post_args=None, **args):
    +    async def twitter_request(
    +        self,
    +        path: str,
    +        access_token: Dict[str, Any],
    +        post_args: Dict[str, Any] = None,
    +        **args: Any
    +    ) -> Any:
             """Fetches the given API path, e.g., ``statuses/user_timeline/btaylor``
     
             The path should not include the format or API version number.
    @@ -840,12 +774,12 @@ class TwitterMixin(OAuthMixin):
             .. testoutput::
                :hide:
     
    -        .. deprecated:: 5.1
    +        .. versionchanged:: 6.0
     
    -           The ``callback`` argument is deprecated and will be removed in 6.0.
    -           Use the returned awaitable object instead.
    +           The ``callback`` argument was removed. Use the returned
    +           awaitable object instead.
             """
    -        if path.startswith('http:') or path.startswith('https:'):
    +        if path.startswith("http:") or path.startswith("https:"):
                 # Raw urls are useful for e.g. search which doesn't follow the
                 # usual pattern: http://search.twitter.com/search.json
                 url = path
    @@ -858,42 +792,38 @@ class TwitterMixin(OAuthMixin):
                 all_args.update(post_args or {})
                 method = "POST" if post_args is not None else "GET"
                 oauth = self._oauth_request_parameters(
    -                url, access_token, all_args, method=method)
    +                url, access_token, all_args, method=method
    +            )
                 args.update(oauth)
             if args:
    -            url += "?" + urllib_parse.urlencode(args)
    +            url += "?" + urllib.parse.urlencode(args)
             http = self.get_auth_http_client()
    -        http_callback = wrap(functools.partial(self._on_twitter_request, callback, url))
             if post_args is not None:
    -            fut = http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args))
    +            response = await http.fetch(
    +                url, method="POST", body=urllib.parse.urlencode(post_args)
    +            )
             else:
    -            fut = http.fetch(url)
    -        fut.add_done_callback(http_callback)
    +            response = await http.fetch(url)
    +        return escape.json_decode(response.body)
     
    -    def _on_twitter_request(self, future, url, response_fut):
    -        try:
    -            response = response_fut.result()
    -        except Exception as e:
    -            future.set_exception(AuthError(
    -                "Error response %s fetching %s" % (e, url)))
    -            return
    -        future_set_result_unless_cancelled(future, escape.json_decode(response.body))
    -
    -    def _oauth_consumer_token(self):
    -        self.require_setting("twitter_consumer_key", "Twitter OAuth")
    -        self.require_setting("twitter_consumer_secret", "Twitter OAuth")
    +    def _oauth_consumer_token(self) -> Dict[str, Any]:
    +        handler = cast(RequestHandler, self)
    +        handler.require_setting("twitter_consumer_key", "Twitter OAuth")
    +        handler.require_setting("twitter_consumer_secret", "Twitter OAuth")
             return dict(
    -            key=self.settings["twitter_consumer_key"],
    -            secret=self.settings["twitter_consumer_secret"])
    +            key=handler.settings["twitter_consumer_key"],
    +            secret=handler.settings["twitter_consumer_secret"],
    +        )
     
    -    @gen.coroutine
    -    def _oauth_get_user_future(self, access_token):
    -        user = yield self.twitter_request(
    -            "/account/verify_credentials",
    -            access_token=access_token)
    +    async def _oauth_get_user_future(
    +        self, access_token: Dict[str, Any]
    +    ) -> Dict[str, Any]:
    +        user = await self.twitter_request(
    +            "/account/verify_credentials", access_token=access_token
    +        )
             if user:
                 user["username"] = user["screen_name"]
    -        raise gen.Return(user)
    +        return user
     
     
     class GoogleOAuth2Mixin(OAuth2Mixin):
    @@ -910,18 +840,20 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
         * In the OAuth section of the page, select Create New Client ID.
         * Set the Redirect URI to point to your auth handler
         * Copy the "Client secret" and "Client ID" to the application settings as
    -      {"google_oauth": {"key": CLIENT_ID, "secret": CLIENT_SECRET}}
    +      ``{"google_oauth": {"key": CLIENT_ID, "secret": CLIENT_SECRET}}``
     
         .. versionadded:: 3.2
         """
    +
         _OAUTH_AUTHORIZE_URL = "https://accounts.google.com/o/oauth2/v2/auth"
         _OAUTH_ACCESS_TOKEN_URL = "https://www.googleapis.com/oauth2/v4/token"
         _OAUTH_USERINFO_URL = "https://www.googleapis.com/oauth2/v1/userinfo"
         _OAUTH_NO_CALLBACKS = False
    -    _OAUTH_SETTINGS_KEY = 'google_oauth'
    +    _OAUTH_SETTINGS_KEY = "google_oauth"
     
    -    @_auth_return_future
    -    def get_authenticated_user(self, redirect_uri, code, callback):
    +    async def get_authenticated_user(
    +        self, redirect_uri: str, code: str
    +    ) -> Dict[str, Any]:
             """Handles the login for the Google user, returning an access token.
     
             The result is a dictionary containing an ``access_token`` field
    @@ -959,48 +891,47 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
             .. testoutput::
                :hide:
     
    -        .. deprecated:: 5.1
    +        .. versionchanged:: 6.0
     
    -           The ``callback`` argument is deprecated and will be removed in 6.0.
    -           Use the returned awaitable object instead.
    +           The ``callback`` argument was removed. Use the returned awaitable object instead.
             """  # noqa: E501
    +        handler = cast(RequestHandler, self)
             http = self.get_auth_http_client()
    -        body = urllib_parse.urlencode({
    -            "redirect_uri": redirect_uri,
    -            "code": code,
    -            "client_id": self.settings[self._OAUTH_SETTINGS_KEY]['key'],
    -            "client_secret": self.settings[self._OAUTH_SETTINGS_KEY]['secret'],
    -            "grant_type": "authorization_code",
    -        })
    +        body = urllib.parse.urlencode(
    +            {
    +                "redirect_uri": redirect_uri,
    +                "code": code,
    +                "client_id": handler.settings[self._OAUTH_SETTINGS_KEY]["key"],
    +                "client_secret": handler.settings[self._OAUTH_SETTINGS_KEY]["secret"],
    +                "grant_type": "authorization_code",
    +            }
    +        )
     
    -        fut = http.fetch(self._OAUTH_ACCESS_TOKEN_URL,
    -                         method="POST",
    -                         headers={'Content-Type': 'application/x-www-form-urlencoded'},
    -                         body=body)
    -        fut.add_done_callback(wrap(functools.partial(self._on_access_token, callback)))
    -
    -    def _on_access_token(self, future, response_fut):
    -        """Callback function for the exchange to the access token."""
    -        try:
    -            response = response_fut.result()
    -        except Exception as e:
    -            future.set_exception(AuthError('Google auth error: %s' % str(e)))
    -            return
    -
    -        args = escape.json_decode(response.body)
    -        future_set_result_unless_cancelled(future, args)
    +        response = await http.fetch(
    +            self._OAUTH_ACCESS_TOKEN_URL,
    +            method="POST",
    +            headers={"Content-Type": "application/x-www-form-urlencoded"},
    +            body=body,
    +        )
    +        return escape.json_decode(response.body)
     
     
     class FacebookGraphMixin(OAuth2Mixin):
         """Facebook authentication using the new Graph API and OAuth2."""
    +
         _OAUTH_ACCESS_TOKEN_URL = "https://graph.facebook.com/oauth/access_token?"
         _OAUTH_AUTHORIZE_URL = "https://www.facebook.com/dialog/oauth?"
         _OAUTH_NO_CALLBACKS = False
         _FACEBOOK_BASE_URL = "https://graph.facebook.com"
     
    -    @_auth_return_future
    -    def get_authenticated_user(self, redirect_uri, client_id, client_secret,
    -                               code, callback, extra_fields=None):
    +    async def get_authenticated_user(
    +        self,
    +        redirect_uri: str,
    +        client_id: str,
    +        client_secret: str,
    +        code: str,
    +        extra_fields: Dict[str, Any] = None,
    +    ) -> Optional[Dict[str, Any]]:
             """Handles the login for the Facebook user, returning a user object.
     
             Example usage:
    @@ -1042,10 +973,9 @@ class FacebookGraphMixin(OAuth2Mixin):
                The ``session_expires`` field was updated to support changes made to the
                Facebook API in March 2017.
     
    -        .. deprecated:: 5.1
    +        .. versionchanged:: 6.0
     
    -           The ``callback`` argument is deprecated and will be removed in 6.0.
    -           Use the returned awaitable object instead.
    +           The ``callback`` argument was removed. Use the returned awaitable object instead.
             """
             http = self.get_auth_http_client()
             args = {
    @@ -1055,42 +985,35 @@ class FacebookGraphMixin(OAuth2Mixin):
                 "client_secret": client_secret,
             }
     
    -        fields = set(['id', 'name', 'first_name', 'last_name',
    -                      'locale', 'picture', 'link'])
    +        fields = set(
    +            ["id", "name", "first_name", "last_name", "locale", "picture", "link"]
    +        )
             if extra_fields:
                 fields.update(extra_fields)
     
    -        fut = http.fetch(self._oauth_request_token_url(**args))
    -        fut.add_done_callback(wrap(functools.partial(self._on_access_token, redirect_uri, client_id,
    -                                                     client_secret, callback, fields)))
    -
    -    @gen.coroutine
    -    def _on_access_token(self, redirect_uri, client_id, client_secret,
    -                         future, fields, response_fut):
    -        try:
    -            response = response_fut.result()
    -        except Exception as e:
    -            future.set_exception(AuthError('Facebook auth error: %s' % str(e)))
    -            return
    -
    +        response = await http.fetch(
    +            self._oauth_request_token_url(**args)  # type: ignore
    +        )
             args = escape.json_decode(response.body)
             session = {
                 "access_token": args.get("access_token"),
    -            "expires_in": args.get("expires_in")
    +            "expires_in": args.get("expires_in"),
             }
    +        assert session["access_token"] is not None
     
    -        user = yield self.facebook_request(
    +        user = await self.facebook_request(
                 path="/me",
                 access_token=session["access_token"],
    -            appsecret_proof=hmac.new(key=client_secret.encode('utf8'),
    -                                     msg=session["access_token"].encode('utf8'),
    -                                     digestmod=hashlib.sha256).hexdigest(),
    -            fields=",".join(fields)
    +            appsecret_proof=hmac.new(
    +                key=client_secret.encode("utf8"),
    +                msg=session["access_token"].encode("utf8"),
    +                digestmod=hashlib.sha256,
    +            ).hexdigest(),
    +            fields=",".join(fields),
             )
     
             if user is None:
    -            future_set_result_unless_cancelled(future, None)
    -            return
    +            return None
     
             fieldmap = {}
             for field in fields:
    @@ -1100,13 +1023,21 @@ class FacebookGraphMixin(OAuth2Mixin):
             # older versions in which the server used url-encoding and
             # this code simply returned the string verbatim.
             # This should change in Tornado 5.0.
    -        fieldmap.update({"access_token": session["access_token"],
    -                         "session_expires": str(session.get("expires_in"))})
    -        future_set_result_unless_cancelled(future, fieldmap)
    +        fieldmap.update(
    +            {
    +                "access_token": session["access_token"],
    +                "session_expires": str(session.get("expires_in")),
    +            }
    +        )
    +        return fieldmap
     
    -    @_auth_return_future
    -    def facebook_request(self, path, callback, access_token=None,
    -                         post_args=None, **args):
    +    async def facebook_request(
    +        self,
    +        path: str,
    +        access_token: str = None,
    +        post_args: Dict[str, Any] = None,
    +        **args: Any
    +    ) -> Any:
             """Fetches the given relative API path, e.g., "/btaylor/picture"
     
             If the request is a POST, ``post_args`` should be provided. Query
    @@ -1153,35 +1084,39 @@ class FacebookGraphMixin(OAuth2Mixin):
             .. versionchanged:: 3.1
                Added the ability to override ``self._FACEBOOK_BASE_URL``.
     
    -        .. deprecated:: 5.1
    +        .. versionchanged:: 6.0
     
    -           The ``callback`` argument is deprecated and will be removed in 6.0.
    -           Use the returned awaitable object instead.
    +           The ``callback`` argument was removed. Use the returned awaitable object instead.
             """
             url = self._FACEBOOK_BASE_URL + path
    -        # Thanks to the _auth_return_future decorator, our "callback"
    -        # argument is a Future, which we cannot pass as a callback to
    -        # oauth2_request. Instead, have oauth2_request return a
    -        # future and chain them together.
    -        oauth_future = self.oauth2_request(url, access_token=access_token,
    -                                           post_args=post_args, **args)
    -        chain_future(oauth_future, callback)
    +        return await self.oauth2_request(
    +            url, access_token=access_token, post_args=post_args, **args
    +        )
     
     
    -def _oauth_signature(consumer_token, method, url, parameters={}, token=None):
    +def _oauth_signature(
    +    consumer_token: Dict[str, Any],
    +    method: str,
    +    url: str,
    +    parameters: Dict[str, Any] = {},
    +    token: Dict[str, Any] = None,
    +) -> bytes:
         """Calculates the HMAC-SHA1 OAuth signature for the given request.
     
         See http://oauth.net/core/1.0/#signing_process
         """
    -    parts = urlparse.urlparse(url)
    +    parts = urllib.parse.urlparse(url)
         scheme, netloc, path = parts[:3]
         normalized_url = scheme.lower() + "://" + netloc.lower() + path
     
         base_elems = []
         base_elems.append(method.upper())
         base_elems.append(normalized_url)
    -    base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v)))
    -                               for k, v in sorted(parameters.items())))
    +    base_elems.append(
    +        "&".join(
    +            "%s=%s" % (k, _oauth_escape(str(v))) for k, v in sorted(parameters.items())
    +        )
    +    )
         base_string = "&".join(_oauth_escape(e) for e in base_elems)
     
         key_elems = [escape.utf8(consumer_token["secret"])]
    @@ -1192,42 +1127,53 @@ def _oauth_signature(consumer_token, method, url, parameters={}, token=None):
         return binascii.b2a_base64(hash.digest())[:-1]
     
     
    -def _oauth10a_signature(consumer_token, method, url, parameters={}, token=None):
    +def _oauth10a_signature(
    +    consumer_token: Dict[str, Any],
    +    method: str,
    +    url: str,
    +    parameters: Dict[str, Any] = {},
    +    token: Dict[str, Any] = None,
    +) -> bytes:
         """Calculates the HMAC-SHA1 OAuth 1.0a signature for the given request.
     
         See http://oauth.net/core/1.0a/#signing_process
         """
    -    parts = urlparse.urlparse(url)
    +    parts = urllib.parse.urlparse(url)
         scheme, netloc, path = parts[:3]
         normalized_url = scheme.lower() + "://" + netloc.lower() + path
     
         base_elems = []
         base_elems.append(method.upper())
         base_elems.append(normalized_url)
    -    base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v)))
    -                               for k, v in sorted(parameters.items())))
    +    base_elems.append(
    +        "&".join(
    +            "%s=%s" % (k, _oauth_escape(str(v))) for k, v in sorted(parameters.items())
    +        )
    +    )
     
         base_string = "&".join(_oauth_escape(e) for e in base_elems)
    -    key_elems = [escape.utf8(urllib_parse.quote(consumer_token["secret"], safe='~'))]
    -    key_elems.append(escape.utf8(urllib_parse.quote(token["secret"], safe='~') if token else ""))
    +    key_elems = [escape.utf8(urllib.parse.quote(consumer_token["secret"], safe="~"))]
    +    key_elems.append(
    +        escape.utf8(urllib.parse.quote(token["secret"], safe="~") if token else "")
    +    )
         key = b"&".join(key_elems)
     
         hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1)
         return binascii.b2a_base64(hash.digest())[:-1]
     
     
    -def _oauth_escape(val):
    +def _oauth_escape(val: Union[str, bytes]) -> str:
         if isinstance(val, unicode_type):
             val = val.encode("utf-8")
    -    return urllib_parse.quote(val, safe="~")
    +    return urllib.parse.quote(val, safe="~")
     
     
    -def _oauth_parse_response(body):
    +def _oauth_parse_response(body: bytes) -> Dict[str, Any]:
         # I can't find an officially-defined encoding for oauth responses and
         # have never seen anyone use non-ascii.  Leave the response in a byte
         # string for python 2, and use utf8 on python 3.
    -    body = escape.native_str(body)
    -    p = urlparse.parse_qs(body, keep_blank_values=False)
    +    body_str = escape.native_str(body)
    +    p = urllib.parse.parse_qs(body_str, keep_blank_values=False)
         token = dict(key=p["oauth_token"][0], secret=p["oauth_token_secret"][0])
     
         # Add the extra parameters the Provider included to the token
    diff --git a/server/www/packages/packages-windows/x86/tornado/autoreload.py b/server/www/packages/packages-windows/x86/tornado/autoreload.py
    index 7d69474..1c47aae 100644
    --- a/server/www/packages/packages-windows/x86/tornado/autoreload.py
    +++ b/server/www/packages/packages-windows/x86/tornado/autoreload.py
    @@ -33,9 +33,8 @@ This combination is encouraged as the wrapper catches syntax errors and
     other import-time failures, while debug mode catches changes once
     the server has started.
     
    -This module depends on `.IOLoop`, so it will not work in WSGI applications
    -and Google App Engine.  It also will not work correctly when `.HTTPServer`'s
    -multi-process mode is used.
    +This module will not work correctly when `.HTTPServer`'s multi-process
    +mode is used.
     
     Reloading loses any Python interpreter command-line arguments (e.g. ``-u``)
     because it re-executes Python using ``sys.executable`` and ``sys.argv``.
    @@ -44,8 +43,6 @@ incorrectly.
     
     """
     
    -from __future__ import absolute_import, division, print_function
    -
     import os
     import sys
     
    @@ -96,23 +93,29 @@ from tornado.util import exec_in
     try:
         import signal
     except ImportError:
    -    signal = None
    +    signal = None  # type: ignore
    +
    +import typing
    +from typing import Callable, Dict
    +
    +if typing.TYPE_CHECKING:
    +    from typing import List, Optional, Union  # noqa: F401
     
     # os.execv is broken on Windows and can't properly parse command line
     # arguments and executable name if they contain whitespaces. subprocess
     # fixes that behavior.
    -_has_execv = sys.platform != 'win32'
    +_has_execv = sys.platform != "win32"
     
     _watched_files = set()
     _reload_hooks = []
     _reload_attempted = False
     _io_loops = weakref.WeakKeyDictionary()  # type: ignore
     _autoreload_is_main = False
    -_original_argv = None
    +_original_argv = None  # type: Optional[List[str]]
     _original_spec = None
     
     
    -def start(check_time=500):
    +def start(check_time: int = 500) -> None:
         """Begins watching source files for changes.
     
         .. versionchanged:: 5.0
    @@ -124,13 +127,13 @@ def start(check_time=500):
         _io_loops[io_loop] = True
         if len(_io_loops) > 1:
             gen_log.warning("tornado.autoreload started more than once in the same process")
    -    modify_times = {}
    +    modify_times = {}  # type: Dict[str, float]
         callback = functools.partial(_reload_on_update, modify_times)
         scheduler = ioloop.PeriodicCallback(callback, check_time)
         scheduler.start()
     
     
    -def wait():
    +def wait() -> None:
         """Wait for a watched file to change, then restart the process.
     
         Intended to be used at the end of scripts like unit test runners,
    @@ -142,7 +145,7 @@ def wait():
         io_loop.start()
     
     
    -def watch(filename):
    +def watch(filename: str) -> None:
         """Add a file to the watch list.
     
         All imported modules are watched by default.
    @@ -150,7 +153,7 @@ def watch(filename):
         _watched_files.add(filename)
     
     
    -def add_reload_hook(fn):
    +def add_reload_hook(fn: Callable[[], None]) -> None:
         """Add a function to be called before reloading the process.
     
         Note that for open file and socket handles it is generally
    @@ -161,7 +164,7 @@ def add_reload_hook(fn):
         _reload_hooks.append(fn)
     
     
    -def _reload_on_update(modify_times):
    +def _reload_on_update(modify_times: Dict[str, float]) -> None:
         if _reload_attempted:
             # We already tried to reload and it didn't work, so don't try again.
             return
    @@ -187,7 +190,7 @@ def _reload_on_update(modify_times):
             _check_file(modify_times, path)
     
     
    -def _check_file(modify_times, path):
    +def _check_file(modify_times: Dict[str, float], path: str) -> None:
         try:
             modified = os.stat(path).st_mtime
         except Exception:
    @@ -200,7 +203,7 @@ def _check_file(modify_times, path):
             _reload()
     
     
    -def _reload():
    +def _reload() -> None:
         global _reload_attempted
         _reload_attempted = True
         for fn in _reload_hooks:
    @@ -218,19 +221,20 @@ def _reload():
         # sys.path[0] is an empty string and add the current directory to
         # $PYTHONPATH.
         if _autoreload_is_main:
    +        assert _original_argv is not None
             spec = _original_spec
             argv = _original_argv
         else:
    -        spec = getattr(sys.modules['__main__'], '__spec__', None)
    +        spec = getattr(sys.modules["__main__"], "__spec__", None)
             argv = sys.argv
         if spec:
    -        argv = ['-m', spec.name] + argv[1:]
    +        argv = ["-m", spec.name] + argv[1:]
         else:
    -        path_prefix = '.' + os.pathsep
    -        if (sys.path[0] == '' and
    -                not os.environ.get("PYTHONPATH", "").startswith(path_prefix)):
    -            os.environ["PYTHONPATH"] = (path_prefix +
    -                                        os.environ.get("PYTHONPATH", ""))
    +        path_prefix = "." + os.pathsep
    +        if sys.path[0] == "" and not os.environ.get("PYTHONPATH", "").startswith(
    +            path_prefix
    +        ):
    +            os.environ["PYTHONPATH"] = path_prefix + os.environ.get("PYTHONPATH", "")
         if not _has_execv:
             subprocess.Popen([sys.executable] + argv)
             os._exit(0)
    @@ -249,7 +253,9 @@ def _reload():
                 # Unfortunately the errno returned in this case does not
                 # appear to be consistent, so we can't easily check for
                 # this error specifically.
    -            os.spawnv(os.P_NOWAIT, sys.executable, [sys.executable] + argv)
    +            os.spawnv(  # type: ignore
    +                os.P_NOWAIT, sys.executable, [sys.executable] + argv
    +            )
                 # At this point the IOLoop has been closed and finally
                 # blocks will experience errors if we allow the stack to
                 # unwind, so just exit uncleanly.
    @@ -263,7 +269,7 @@ Usage:
     """
     
     
    -def main():
    +def main() -> None:
         """Command-line wrapper to re-run a script whenever its source changes.
     
         Scripts may be specified by filename or module name::
    @@ -280,12 +286,13 @@ def main():
         # The main module can be tricky; set the variables both in our globals
         # (which may be __main__) and the real importable version.
         import tornado.autoreload
    +
         global _autoreload_is_main
         global _original_argv, _original_spec
         tornado.autoreload._autoreload_is_main = _autoreload_is_main = True
         original_argv = sys.argv
         tornado.autoreload._original_argv = _original_argv = original_argv
    -    original_spec = getattr(sys.modules['__main__'], '__spec__', None)
    +    original_spec = getattr(sys.modules["__main__"], "__spec__", None)
         tornado.autoreload._original_spec = _original_spec = original_spec
         sys.argv = sys.argv[:]
         if len(sys.argv) >= 3 and sys.argv[1] == "-m":
    @@ -303,6 +310,7 @@ def main():
         try:
             if mode == "module":
                 import runpy
    +
                 runpy.run_module(module, run_name="__main__", alter_sys=True)
             elif mode == "script":
                 with open(script) as f:
    @@ -340,12 +348,12 @@ def main():
         # restore sys.argv so subsequent executions will include autoreload
         sys.argv = original_argv
     
    -    if mode == 'module':
    +    if mode == "module":
             # runpy did a fake import of the module as __main__, but now it's
             # no longer in sys.modules.  Figure out where it is and watch it.
             loader = pkgutil.get_loader(module)
             if loader is not None:
    -            watch(loader.get_filename())
    +            watch(loader.get_filename())  # type: ignore
     
         wait()
     
    diff --git a/server/www/packages/packages-windows/x86/tornado/concurrent.py b/server/www/packages/packages-windows/x86/tornado/concurrent.py
    index f7e6bcc..3a49940 100644
    --- a/server/www/packages/packages-windows/x86/tornado/concurrent.py
    +++ b/server/www/packages/packages-windows/x86/tornado/concurrent.py
    @@ -14,389 +14,64 @@
     # under the License.
     """Utilities for working with ``Future`` objects.
     
    -``Futures`` are a pattern for concurrent programming introduced in
    -Python 3.2 in the `concurrent.futures` package, and also adopted (in a
    -slightly different form) in Python 3.4's `asyncio` package. This
    -package defines a ``Future`` class that is an alias for `asyncio.Future`
    -when available, and a compatible implementation for older versions of
    -Python. It also includes some utility functions for interacting with
    -``Future`` objects.
    +Tornado previously provided its own ``Future`` class, but now uses
    +`asyncio.Future`. This module contains utility functions for working
    +with `asyncio.Future` in a way that is backwards-compatible with
    +Tornado's old ``Future`` implementation.
     
    -While this package is an important part of Tornado's internal
    +While this module is an important part of Tornado's internal
     implementation, applications rarely need to interact with it
     directly.
    -"""
    -from __future__ import absolute_import, division, print_function
     
    +"""
    +
    +import asyncio
    +from concurrent import futures
     import functools
    -import platform
    -import textwrap
    -import traceback
     import sys
    -import warnings
    +import types
     
     from tornado.log import app_log
    -from tornado.stack_context import ExceptionStackContext, wrap
    -from tornado.util import raise_exc_info, ArgReplacer, is_finalizing
     
    -try:
    -    from concurrent import futures
    -except ImportError:
    -    futures = None
    +import typing
    +from typing import Any, Callable, Optional, Tuple, Union
     
    -try:
    -    import asyncio
    -except ImportError:
    -    asyncio = None
    -
    -try:
    -    import typing
    -except ImportError:
    -    typing = None
    -
    -
    -# Can the garbage collector handle cycles that include __del__ methods?
    -# This is true in cpython beginning with version 3.4 (PEP 442).
    -_GC_CYCLE_FINALIZERS = (platform.python_implementation() == 'CPython' and
    -                        sys.version_info >= (3, 4))
    +_T = typing.TypeVar("_T")
     
     
     class ReturnValueIgnoredError(Exception):
    +    # No longer used; was previously used by @return_future
         pass
     
    -# This class and associated code in the future object is derived
    -# from the Trollius project, a backport of asyncio to Python 2.x - 3.x
    +
    +Future = asyncio.Future
    +
    +FUTURES = (futures.Future, Future)
     
     
    -class _TracebackLogger(object):
    -    """Helper to log a traceback upon destruction if not cleared.
    -
    -    This solves a nasty problem with Futures and Tasks that have an
    -    exception set: if nobody asks for the exception, the exception is
    -    never logged.  This violates the Zen of Python: 'Errors should
    -    never pass silently.  Unless explicitly silenced.'
    -
    -    However, we don't want to log the exception as soon as
    -    set_exception() is called: if the calling code is written
    -    properly, it will get the exception and handle it properly.  But
    -    we *do* want to log it if result() or exception() was never called
    -    -- otherwise developers waste a lot of time wondering why their
    -    buggy code fails silently.
    -
    -    An earlier attempt added a __del__() method to the Future class
    -    itself, but this backfired because the presence of __del__()
    -    prevents garbage collection from breaking cycles.  A way out of
    -    this catch-22 is to avoid having a __del__() method on the Future
    -    class itself, but instead to have a reference to a helper object
    -    with a __del__() method that logs the traceback, where we ensure
    -    that the helper object doesn't participate in cycles, and only the
    -    Future has a reference to it.
    -
    -    The helper object is added when set_exception() is called.  When
    -    the Future is collected, and the helper is present, the helper
    -    object is also collected, and its __del__() method will log the
    -    traceback.  When the Future's result() or exception() method is
    -    called (and a helper object is present), it removes the the helper
    -    object, after calling its clear() method to prevent it from
    -    logging.
    -
    -    One downside is that we do a fair amount of work to extract the
    -    traceback from the exception, even when it is never logged.  It
    -    would seem cheaper to just store the exception object, but that
    -    references the traceback, which references stack frames, which may
    -    reference the Future, which references the _TracebackLogger, and
    -    then the _TracebackLogger would be included in a cycle, which is
    -    what we're trying to avoid!  As an optimization, we don't
    -    immediately format the exception; we only do the work when
    -    activate() is called, which call is delayed until after all the
    -    Future's callbacks have run.  Since usually a Future has at least
    -    one callback (typically set by 'yield From') and usually that
    -    callback extracts the callback, thereby removing the need to
    -    format the exception.
    -
    -    PS. I don't claim credit for this solution.  I first heard of it
    -    in a discussion about closing files when they are collected.
    -    """
    -
    -    __slots__ = ('exc_info', 'formatted_tb')
    -
    -    def __init__(self, exc_info):
    -        self.exc_info = exc_info
    -        self.formatted_tb = None
    -
    -    def activate(self):
    -        exc_info = self.exc_info
    -        if exc_info is not None:
    -            self.exc_info = None
    -            self.formatted_tb = traceback.format_exception(*exc_info)
    -
    -    def clear(self):
    -        self.exc_info = None
    -        self.formatted_tb = None
    -
    -    def __del__(self, is_finalizing=is_finalizing):
    -        if not is_finalizing() and self.formatted_tb:
    -            app_log.error('Future exception was never retrieved: %s',
    -                          ''.join(self.formatted_tb).rstrip())
    -
    -
    -class Future(object):
    -    """Placeholder for an asynchronous result.
    -
    -    A ``Future`` encapsulates the result of an asynchronous
    -    operation.  In synchronous applications ``Futures`` are used
    -    to wait for the result from a thread or process pool; in
    -    Tornado they are normally used with `.IOLoop.add_future` or by
    -    yielding them in a `.gen.coroutine`.
    -
    -    `tornado.concurrent.Future` is an alias for `asyncio.Future` when
    -    that package is available (Python 3.4+). Unlike
    -    `concurrent.futures.Future`, the ``Futures`` used by Tornado and
    -    `asyncio` are not thread-safe (and therefore faster for use with
    -    single-threaded event loops).
    -
    -    In addition to ``exception`` and ``set_exception``, Tornado's
    -    ``Future`` implementation supports storing an ``exc_info`` triple
    -    to support better tracebacks on Python 2. To set an ``exc_info``
    -    triple, use `future_set_exc_info`, and to retrieve one, call
    -    `result()` (which will raise it).
    -
    -    .. versionchanged:: 4.0
    -       `tornado.concurrent.Future` is always a thread-unsafe ``Future``
    -       with support for the ``exc_info`` methods.  Previously it would
    -       be an alias for the thread-safe `concurrent.futures.Future`
    -       if that package was available and fall back to the thread-unsafe
    -       implementation if it was not.
    -
    -    .. versionchanged:: 4.1
    -       If a `.Future` contains an error but that error is never observed
    -       (by calling ``result()``, ``exception()``, or ``exc_info()``),
    -       a stack trace will be logged when the `.Future` is garbage collected.
    -       This normally indicates an error in the application, but in cases
    -       where it results in undesired logging it may be necessary to
    -       suppress the logging by ensuring that the exception is observed:
    -       ``f.add_done_callback(lambda f: f.exception())``.
    -
    -    .. versionchanged:: 5.0
    -
    -       This class was previoiusly available under the name
    -       ``TracebackFuture``. This name, which was deprecated since
    -       version 4.0, has been removed. When `asyncio` is available
    -       ``tornado.concurrent.Future`` is now an alias for
    -       `asyncio.Future`. Like `asyncio.Future`, callbacks are now
    -       always scheduled on the `.IOLoop` and are never run
    -       synchronously.
    -
    -    """
    -    def __init__(self):
    -        self._done = False
    -        self._result = None
    -        self._exc_info = None
    -
    -        self._log_traceback = False   # Used for Python >= 3.4
    -        self._tb_logger = None        # Used for Python <= 3.3
    -
    -        self._callbacks = []
    -
    -    # Implement the Python 3.5 Awaitable protocol if possible
    -    # (we can't use return and yield together until py33).
    -    if sys.version_info >= (3, 3):
    -        exec(textwrap.dedent("""
    -        def __await__(self):
    -            return (yield self)
    -        """))
    -    else:
    -        # Py2-compatible version for use with cython.
    -        def __await__(self):
    -            result = yield self
    -            # StopIteration doesn't take args before py33,
    -            # but Cython recognizes the args tuple.
    -            e = StopIteration()
    -            e.args = (result,)
    -            raise e
    -
    -    def cancel(self):
    -        """Cancel the operation, if possible.
    -
    -        Tornado ``Futures`` do not support cancellation, so this method always
    -        returns False.
    -        """
    -        return False
    -
    -    def cancelled(self):
    -        """Returns True if the operation has been cancelled.
    -
    -        Tornado ``Futures`` do not support cancellation, so this method
    -        always returns False.
    -        """
    -        return False
    -
    -    def running(self):
    -        """Returns True if this operation is currently running."""
    -        return not self._done
    -
    -    def done(self):
    -        """Returns True if the future has finished running."""
    -        return self._done
    -
    -    def _clear_tb_log(self):
    -        self._log_traceback = False
    -        if self._tb_logger is not None:
    -            self._tb_logger.clear()
    -            self._tb_logger = None
    -
    -    def result(self, timeout=None):
    -        """If the operation succeeded, return its result.  If it failed,
    -        re-raise its exception.
    -
    -        This method takes a ``timeout`` argument for compatibility with
    -        `concurrent.futures.Future` but it is an error to call it
    -        before the `Future` is done, so the ``timeout`` is never used.
    -        """
    -        self._clear_tb_log()
    -        if self._result is not None:
    -            return self._result
    -        if self._exc_info is not None:
    -            try:
    -                raise_exc_info(self._exc_info)
    -            finally:
    -                self = None
    -        self._check_done()
    -        return self._result
    -
    -    def exception(self, timeout=None):
    -        """If the operation raised an exception, return the `Exception`
    -        object.  Otherwise returns None.
    -
    -        This method takes a ``timeout`` argument for compatibility with
    -        `concurrent.futures.Future` but it is an error to call it
    -        before the `Future` is done, so the ``timeout`` is never used.
    -        """
    -        self._clear_tb_log()
    -        if self._exc_info is not None:
    -            return self._exc_info[1]
    -        else:
    -            self._check_done()
    -            return None
    -
    -    def add_done_callback(self, fn):
    -        """Attaches the given callback to the `Future`.
    -
    -        It will be invoked with the `Future` as its argument when the Future
    -        has finished running and its result is available.  In Tornado
    -        consider using `.IOLoop.add_future` instead of calling
    -        `add_done_callback` directly.
    -        """
    -        if self._done:
    -            from tornado.ioloop import IOLoop
    -            IOLoop.current().add_callback(fn, self)
    -        else:
    -            self._callbacks.append(fn)
    -
    -    def set_result(self, result):
    -        """Sets the result of a ``Future``.
    -
    -        It is undefined to call any of the ``set`` methods more than once
    -        on the same object.
    -        """
    -        self._result = result
    -        self._set_done()
    -
    -    def set_exception(self, exception):
    -        """Sets the exception of a ``Future.``"""
    -        self.set_exc_info(
    -            (exception.__class__,
    -             exception,
    -             getattr(exception, '__traceback__', None)))
    -
    -    def exc_info(self):
    -        """Returns a tuple in the same format as `sys.exc_info` or None.
    -
    -        .. versionadded:: 4.0
    -        """
    -        self._clear_tb_log()
    -        return self._exc_info
    -
    -    def set_exc_info(self, exc_info):
    -        """Sets the exception information of a ``Future.``
    -
    -        Preserves tracebacks on Python 2.
    -
    -        .. versionadded:: 4.0
    -        """
    -        self._exc_info = exc_info
    -        self._log_traceback = True
    -        if not _GC_CYCLE_FINALIZERS:
    -            self._tb_logger = _TracebackLogger(exc_info)
    -
    -        try:
    -            self._set_done()
    -        finally:
    -            # Activate the logger after all callbacks have had a
    -            # chance to call result() or exception().
    -            if self._log_traceback and self._tb_logger is not None:
    -                self._tb_logger.activate()
    -        self._exc_info = exc_info
    -
    -    def _check_done(self):
    -        if not self._done:
    -            raise Exception("DummyFuture does not support blocking for results")
    -
    -    def _set_done(self):
    -        self._done = True
    -        if self._callbacks:
    -            from tornado.ioloop import IOLoop
    -            loop = IOLoop.current()
    -            for cb in self._callbacks:
    -                loop.add_callback(cb, self)
    -            self._callbacks = None
    -
    -    # On Python 3.3 or older, objects with a destructor part of a reference
    -    # cycle are never destroyed. It's no longer the case on Python 3.4 thanks to
    -    # the PEP 442.
    -    if _GC_CYCLE_FINALIZERS:
    -        def __del__(self, is_finalizing=is_finalizing):
    -            if is_finalizing() or not self._log_traceback:
    -                # set_exception() was not called, or result() or exception()
    -                # has consumed the exception
    -                return
    -
    -            tb = traceback.format_exception(*self._exc_info)
    -
    -            app_log.error('Future %r exception was never retrieved: %s',
    -                          self, ''.join(tb).rstrip())
    -
    -
    -if asyncio is not None:
    -    Future = asyncio.Future  # noqa
    -
    -if futures is None:
    -    FUTURES = Future  # type: typing.Union[type, typing.Tuple[type, ...]]
    -else:
    -    FUTURES = (futures.Future, Future)
    -
    -
    -def is_future(x):
    +def is_future(x: Any) -> bool:
         return isinstance(x, FUTURES)
     
     
    -class DummyExecutor(object):
    -    def submit(self, fn, *args, **kwargs):
    -        future = Future()
    +class DummyExecutor(futures.Executor):
    +    def submit(
    +        self, fn: Callable[..., _T], *args: Any, **kwargs: Any
    +    ) -> "futures.Future[_T]":
    +        future = futures.Future()  # type: futures.Future[_T]
             try:
                 future_set_result_unless_cancelled(future, fn(*args, **kwargs))
             except Exception:
                 future_set_exc_info(future, sys.exc_info())
             return future
     
    -    def shutdown(self, wait=True):
    +    def shutdown(self, wait: bool = True) -> None:
             pass
     
     
     dummy_executor = DummyExecutor()
     
     
    -def run_on_executor(*args, **kwargs):
    +def run_on_executor(*args: Any, **kwargs: Any) -> Callable:
         """Decorator to run a synchronous method asynchronously on an executor.
     
         The decorated method may be called with a ``callback`` keyword
    @@ -432,24 +107,25 @@ def run_on_executor(*args, **kwargs):
            The ``callback`` argument is deprecated and will be removed in
            6.0. The decorator itself is discouraged in new code but will
            not be removed in 6.0.
    +
    +    .. versionchanged:: 6.0
    +
    +       The ``callback`` argument was removed.
         """
    -    def run_on_executor_decorator(fn):
    +    # Fully type-checking decorators is tricky, and this one is
    +    # discouraged anyway so it doesn't have all the generic magic.
    +    def run_on_executor_decorator(fn: Callable) -> Callable[..., Future]:
             executor = kwargs.get("executor", "executor")
     
             @functools.wraps(fn)
    -        def wrapper(self, *args, **kwargs):
    -            callback = kwargs.pop("callback", None)
    -            async_future = Future()
    +        def wrapper(self: Any, *args: Any, **kwargs: Any) -> Future:
    +            async_future = Future()  # type: Future
                 conc_future = getattr(self, executor).submit(fn, self, *args, **kwargs)
                 chain_future(conc_future, async_future)
    -            if callback:
    -                warnings.warn("callback arguments are deprecated, use the returned Future instead",
    -                              DeprecationWarning)
    -                from tornado.ioloop import IOLoop
    -                IOLoop.current().add_future(
    -                    async_future, lambda future: callback(future.result()))
                 return async_future
    +
             return wrapper
    +
         if args and kwargs:
             raise ValueError("cannot combine positional and keyword args")
         if len(args) == 1:
    @@ -462,129 +138,7 @@ def run_on_executor(*args, **kwargs):
     _NO_RESULT = object()
     
     
    -def return_future(f):
    -    """Decorator to make a function that returns via callback return a
    -    `Future`.
    -
    -    This decorator was provided to ease the transition from
    -    callback-oriented code to coroutines. It is not recommended for
    -    new code.
    -
    -    The wrapped function should take a ``callback`` keyword argument
    -    and invoke it with one argument when it has finished.  To signal failure,
    -    the function can simply raise an exception (which will be
    -    captured by the `.StackContext` and passed along to the ``Future``).
    -
    -    From the caller's perspective, the callback argument is optional.
    -    If one is given, it will be invoked when the function is complete
    -    with ``Future.result()`` as an argument.  If the function fails, the
    -    callback will not be run and an exception will be raised into the
    -    surrounding `.StackContext`.
    -
    -    If no callback is given, the caller should use the ``Future`` to
    -    wait for the function to complete (perhaps by yielding it in a
    -    coroutine, or passing it to `.IOLoop.add_future`).
    -
    -    Usage:
    -
    -    .. testcode::
    -
    -        @return_future
    -        def future_func(arg1, arg2, callback):
    -            # Do stuff (possibly asynchronous)
    -            callback(result)
    -
    -        async def caller():
    -            await future_func(arg1, arg2)
    -
    -    ..
    -
    -    Note that ``@return_future`` and ``@gen.engine`` can be applied to the
    -    same function, provided ``@return_future`` appears first.  However,
    -    consider using ``@gen.coroutine`` instead of this combination.
    -
    -    .. versionchanged:: 5.1
    -
    -       Now raises a `.DeprecationWarning` if a callback argument is passed to
    -       the decorated function and deprecation warnings are enabled.
    -
    -    .. deprecated:: 5.1
    -
    -       This decorator will be removed in Tornado 6.0. New code should
    -       use coroutines directly instead of wrapping callback-based code
    -       with this decorator. Interactions with non-Tornado
    -       callback-based code should be managed explicitly to avoid
    -       relying on the `.ExceptionStackContext` built into this
    -       decorator.
    -    """
    -    warnings.warn("@return_future is deprecated, use coroutines instead",
    -                  DeprecationWarning)
    -    return _non_deprecated_return_future(f, warn=True)
    -
    -
    -def _non_deprecated_return_future(f, warn=False):
    -    # Allow auth.py to use this decorator without triggering
    -    # deprecation warnings. This will go away once auth.py has removed
    -    # its legacy interfaces in 6.0.
    -    replacer = ArgReplacer(f, 'callback')
    -
    -    @functools.wraps(f)
    -    def wrapper(*args, **kwargs):
    -        future = Future()
    -        callback, args, kwargs = replacer.replace(
    -            lambda value=_NO_RESULT: future_set_result_unless_cancelled(future, value),
    -            args, kwargs)
    -
    -        def handle_error(typ, value, tb):
    -            future_set_exc_info(future, (typ, value, tb))
    -            return True
    -        exc_info = None
    -        esc = ExceptionStackContext(handle_error, delay_warning=True)
    -        with esc:
    -            if not warn:
    -                # HACK: In non-deprecated mode (only used in auth.py),
    -                # suppress the warning entirely. Since this is added
    -                # in a 5.1 patch release and already removed in 6.0
    -                # I'm prioritizing a minimial change instead of a
    -                # clean solution.
    -                esc.delay_warning = False
    -            try:
    -                result = f(*args, **kwargs)
    -                if result is not None:
    -                    raise ReturnValueIgnoredError(
    -                        "@return_future should not be used with functions "
    -                        "that return values")
    -            except:
    -                exc_info = sys.exc_info()
    -                raise
    -        if exc_info is not None:
    -            # If the initial synchronous part of f() raised an exception,
    -            # go ahead and raise it to the caller directly without waiting
    -            # for them to inspect the Future.
    -            future.result()
    -
    -        # If the caller passed in a callback, schedule it to be called
    -        # when the future resolves.  It is important that this happens
    -        # just before we return the future, or else we risk confusing
    -        # stack contexts with multiple exceptions (one here with the
    -        # immediate exception, and again when the future resolves and
    -        # the callback triggers its exception by calling future.result()).
    -        if callback is not None:
    -            warnings.warn("callback arguments are deprecated, use the returned Future instead",
    -                          DeprecationWarning)
    -
    -            def run_callback(future):
    -                result = future.result()
    -                if result is _NO_RESULT:
    -                    callback()
    -                else:
    -                    callback(future.result())
    -            future_add_done_callback(future, wrap(run_callback))
    -        return future
    -    return wrapper
    -
    -
    -def chain_future(a, b):
    +def chain_future(a: "Future[_T]", b: "Future[_T]") -> None:
         """Chain two futures together so that when one completes, so does the other.
     
         The result (success or failure) of ``a`` will be copied to ``b``, unless
    @@ -596,29 +150,33 @@ def chain_future(a, b):
            `concurrent.futures.Future`.
     
         """
    -    def copy(future):
    +
    +    def copy(future: "Future[_T]") -> None:
             assert future is a
             if b.done():
                 return
    -        if (hasattr(a, 'exc_info') and
    -                a.exc_info() is not None):
    -            future_set_exc_info(b, a.exc_info())
    +        if hasattr(a, "exc_info") and a.exc_info() is not None:  # type: ignore
    +            future_set_exc_info(b, a.exc_info())  # type: ignore
             elif a.exception() is not None:
                 b.set_exception(a.exception())
             else:
                 b.set_result(a.result())
    +
         if isinstance(a, Future):
             future_add_done_callback(a, copy)
         else:
             # concurrent.futures.Future
             from tornado.ioloop import IOLoop
    +
             IOLoop.current().add_future(a, copy)
     
     
    -def future_set_result_unless_cancelled(future, value):
    +def future_set_result_unless_cancelled(
    +    future: "Union[futures.Future[_T], Future[_T]]", value: _T
    +) -> None:
         """Set the given ``value`` as the `Future`'s result, if not cancelled.
     
    -    Avoids asyncio.InvalidStateError when calling set_result() on
    +    Avoids ``asyncio.InvalidStateError`` when calling ``set_result()`` on
         a cancelled `asyncio.Future`.
     
         .. versionadded:: 5.0
    @@ -627,23 +185,69 @@ def future_set_result_unless_cancelled(future, value):
             future.set_result(value)
     
     
    -def future_set_exc_info(future, exc_info):
    +def future_set_exception_unless_cancelled(
    +    future: "Union[futures.Future[_T], Future[_T]]", exc: BaseException
    +) -> None:
    +    """Set the given ``exc`` as the `Future`'s exception.
    +
    +    If the Future is already canceled, logs the exception instead. If
    +    this logging is not desired, the caller should explicitly check
    +    the state of the Future and call ``Future.set_exception`` instead of
    +    this wrapper.
    +
    +    Avoids ``asyncio.InvalidStateError`` when calling ``set_exception()`` on
    +    a cancelled `asyncio.Future`.
    +
    +    .. versionadded:: 6.0
    +
    +    """
    +    if not future.cancelled():
    +        future.set_exception(exc)
    +    else:
    +        app_log.error("Exception after Future was cancelled", exc_info=exc)
    +
    +
    +def future_set_exc_info(
    +    future: "Union[futures.Future[_T], Future[_T]]",
    +    exc_info: Tuple[
    +        Optional[type], Optional[BaseException], Optional[types.TracebackType]
    +    ],
    +) -> None:
         """Set the given ``exc_info`` as the `Future`'s exception.
     
    -    Understands both `asyncio.Future` and Tornado's extensions to
    -    enable better tracebacks on Python 2.
    +    Understands both `asyncio.Future` and the extensions in older
    +    versions of Tornado to enable better tracebacks on Python 2.
     
         .. versionadded:: 5.0
    +
    +    .. versionchanged:: 6.0
    +
    +       If the future is already cancelled, this function is a no-op.
    +       (previously ``asyncio.InvalidStateError`` would be raised)
    +
         """
    -    if hasattr(future, 'set_exc_info'):
    -        # Tornado's Future
    -        future.set_exc_info(exc_info)
    -    else:
    -        # asyncio.Future
    -        future.set_exception(exc_info[1])
    +    if exc_info[1] is None:
    +        raise Exception("future_set_exc_info called with no exception")
    +    future_set_exception_unless_cancelled(future, exc_info[1])
     
     
    -def future_add_done_callback(future, callback):
    +@typing.overload
    +def future_add_done_callback(
    +    future: "futures.Future[_T]", callback: Callable[["futures.Future[_T]"], None]
    +) -> None:
    +    pass
    +
    +
    +@typing.overload  # noqa: F811
    +def future_add_done_callback(
    +    future: "Future[_T]", callback: Callable[["Future[_T]"], None]
    +) -> None:
    +    pass
    +
    +
    +def future_add_done_callback(  # noqa: F811
    +    future: "Union[futures.Future[_T], Future[_T]]", callback: Callable[..., None]
    +) -> None:
         """Arrange to call ``callback`` when ``future`` is complete.
     
         ``callback`` is invoked with one argument, the ``future``.
    diff --git a/server/www/packages/packages-windows/x86/tornado/curl_httpclient.py b/server/www/packages/packages-windows/x86/tornado/curl_httpclient.py
    index 7f5cb10..4119585 100644
    --- a/server/www/packages/packages-windows/x86/tornado/curl_httpclient.py
    +++ b/server/www/packages/packages-windows/x86/tornado/curl_httpclient.py
    @@ -15,8 +15,6 @@
     
     """Non-blocking HTTP client implementation using pycurl."""
     
    -from __future__ import absolute_import, division, print_function
    -
     import collections
     import functools
     import logging
    @@ -27,32 +25,49 @@ from io import BytesIO
     
     from tornado import httputil
     from tornado import ioloop
    -from tornado import stack_context
     
     from tornado.escape import utf8, native_str
    -from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main
    +from tornado.httpclient import (
    +    HTTPRequest,
    +    HTTPResponse,
    +    HTTPError,
    +    AsyncHTTPClient,
    +    main,
    +)
    +from tornado.log import app_log
     
    -curl_log = logging.getLogger('tornado.curl_httpclient')
    +from typing import Dict, Any, Callable, Union
    +import typing
    +
    +if typing.TYPE_CHECKING:
    +    from typing import Deque, Tuple, Optional  # noqa: F401
    +
    +curl_log = logging.getLogger("tornado.curl_httpclient")
     
     
     class CurlAsyncHTTPClient(AsyncHTTPClient):
    -    def initialize(self, max_clients=10, defaults=None):
    +    def initialize(  # type: ignore
    +        self, max_clients: int = 10, defaults: Dict[str, Any] = None
    +    ) -> None:
             super(CurlAsyncHTTPClient, self).initialize(defaults=defaults)
             self._multi = pycurl.CurlMulti()
             self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
             self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
             self._curls = [self._curl_create() for i in range(max_clients)]
             self._free_list = self._curls[:]
    -        self._requests = collections.deque()
    -        self._fds = {}
    -        self._timeout = None
    +        self._requests = (
    +            collections.deque()
    +        )  # type: Deque[Tuple[HTTPRequest, Callable[[HTTPResponse], None], float]]
    +        self._fds = {}  # type: Dict[int, int]
    +        self._timeout = None  # type: Optional[object]
     
             # libcurl has bugs that sometimes cause it to not report all
             # relevant file descriptors and timeouts to TIMERFUNCTION/
             # SOCKETFUNCTION.  Mitigate the effects of such bugs by
             # forcing a periodic scan of all active requests.
             self._force_timeout_callback = ioloop.PeriodicCallback(
    -            self._handle_force_timeout, 1000)
    +            self._handle_force_timeout, 1000
    +        )
             self._force_timeout_callback.start()
     
             # Work around a bug in libcurl 7.29.0: Some fields in the curl
    @@ -64,7 +79,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
             self._multi.add_handle(dummy_curl_handle)
             self._multi.remove_handle(dummy_curl_handle)
     
    -    def close(self):
    +    def close(self) -> None:
             self._force_timeout_callback.stop()
             if self._timeout is not None:
                 self.io_loop.remove_timeout(self._timeout)
    @@ -76,15 +91,17 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
             # Set below properties to None to reduce the reference count of current
             # instance, because those properties hold some methods of current
             # instance that will case circular reference.
    -        self._force_timeout_callback = None
    +        self._force_timeout_callback = None  # type: ignore
             self._multi = None
     
    -    def fetch_impl(self, request, callback):
    +    def fetch_impl(
    +        self, request: HTTPRequest, callback: Callable[[HTTPResponse], None]
    +    ) -> None:
             self._requests.append((request, callback, self.io_loop.time()))
             self._process_queue()
             self._set_timeout(0)
     
    -    def _handle_socket(self, event, fd, multi, data):
    +    def _handle_socket(self, event: int, fd: int, multi: Any, data: bytes) -> None:
             """Called by libcurl when it wants to change the file descriptors
             it cares about.
             """
    @@ -92,7 +109,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
                 pycurl.POLL_NONE: ioloop.IOLoop.NONE,
                 pycurl.POLL_IN: ioloop.IOLoop.READ,
                 pycurl.POLL_OUT: ioloop.IOLoop.WRITE,
    -            pycurl.POLL_INOUT: ioloop.IOLoop.READ | ioloop.IOLoop.WRITE
    +            pycurl.POLL_INOUT: ioloop.IOLoop.READ | ioloop.IOLoop.WRITE,
             }
             if event == pycurl.POLL_REMOVE:
                 if fd in self._fds:
    @@ -110,18 +127,18 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
                 # instead of update.
                 if fd in self._fds:
                     self.io_loop.remove_handler(fd)
    -            self.io_loop.add_handler(fd, self._handle_events,
    -                                     ioloop_event)
    +            self.io_loop.add_handler(fd, self._handle_events, ioloop_event)
                 self._fds[fd] = ioloop_event
     
    -    def _set_timeout(self, msecs):
    +    def _set_timeout(self, msecs: int) -> None:
             """Called by libcurl to schedule a timeout."""
             if self._timeout is not None:
                 self.io_loop.remove_timeout(self._timeout)
             self._timeout = self.io_loop.add_timeout(
    -            self.io_loop.time() + msecs / 1000.0, self._handle_timeout)
    +            self.io_loop.time() + msecs / 1000.0, self._handle_timeout
    +        )
     
    -    def _handle_events(self, fd, events):
    +    def _handle_events(self, fd: int, events: int) -> None:
             """Called by IOLoop when there is activity on one of our
             file descriptors.
             """
    @@ -139,19 +156,17 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
                     break
             self._finish_pending_requests()
     
    -    def _handle_timeout(self):
    +    def _handle_timeout(self) -> None:
             """Called by IOLoop when the requested timeout has passed."""
    -        with stack_context.NullContext():
    -            self._timeout = None
    -            while True:
    -                try:
    -                    ret, num_handles = self._multi.socket_action(
    -                        pycurl.SOCKET_TIMEOUT, 0)
    -                except pycurl.error as e:
    -                    ret = e.args[0]
    -                if ret != pycurl.E_CALL_MULTI_PERFORM:
    -                    break
    -            self._finish_pending_requests()
    +        self._timeout = None
    +        while True:
    +            try:
    +                ret, num_handles = self._multi.socket_action(pycurl.SOCKET_TIMEOUT, 0)
    +            except pycurl.error as e:
    +                ret = e.args[0]
    +            if ret != pycurl.E_CALL_MULTI_PERFORM:
    +                break
    +        self._finish_pending_requests()
     
             # In theory, we shouldn't have to do this because curl will
             # call _set_timeout whenever the timeout changes.  However,
    @@ -170,21 +185,20 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
             if new_timeout >= 0:
                 self._set_timeout(new_timeout)
     
    -    def _handle_force_timeout(self):
    +    def _handle_force_timeout(self) -> None:
             """Called by IOLoop periodically to ask libcurl to process any
             events it may have forgotten about.
             """
    -        with stack_context.NullContext():
    -            while True:
    -                try:
    -                    ret, num_handles = self._multi.socket_all()
    -                except pycurl.error as e:
    -                    ret = e.args[0]
    -                if ret != pycurl.E_CALL_MULTI_PERFORM:
    -                    break
    -            self._finish_pending_requests()
    +        while True:
    +            try:
    +                ret, num_handles = self._multi.socket_all()
    +            except pycurl.error as e:
    +                ret = e.args[0]
    +            if ret != pycurl.E_CALL_MULTI_PERFORM:
    +                break
    +        self._finish_pending_requests()
     
    -    def _finish_pending_requests(self):
    +    def _finish_pending_requests(self) -> None:
             """Process any requests that were completed by the last
             call to multi.socket_action.
             """
    @@ -198,55 +212,55 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
                     break
             self._process_queue()
     
    -    def _process_queue(self):
    -        with stack_context.NullContext():
    -            while True:
    -                started = 0
    -                while self._free_list and self._requests:
    -                    started += 1
    -                    curl = self._free_list.pop()
    -                    (request, callback, queue_start_time) = self._requests.popleft()
    -                    curl.info = {
    -                        "headers": httputil.HTTPHeaders(),
    -                        "buffer": BytesIO(),
    -                        "request": request,
    -                        "callback": callback,
    -                        "queue_start_time": queue_start_time,
    -                        "curl_start_time": time.time(),
    -                        "curl_start_ioloop_time": self.io_loop.current().time(),
    -                    }
    -                    try:
    -                        self._curl_setup_request(
    -                            curl, request, curl.info["buffer"],
    -                            curl.info["headers"])
    -                    except Exception as e:
    -                        # If there was an error in setup, pass it on
    -                        # to the callback. Note that allowing the
    -                        # error to escape here will appear to work
    -                        # most of the time since we are still in the
    -                        # caller's original stack frame, but when
    -                        # _process_queue() is called from
    -                        # _finish_pending_requests the exceptions have
    -                        # nowhere to go.
    -                        self._free_list.append(curl)
    -                        callback(HTTPResponse(
    -                            request=request,
    -                            code=599,
    -                            error=e))
    -                    else:
    -                        self._multi.add_handle(curl)
    +    def _process_queue(self) -> None:
    +        while True:
    +            started = 0
    +            while self._free_list and self._requests:
    +                started += 1
    +                curl = self._free_list.pop()
    +                (request, callback, queue_start_time) = self._requests.popleft()
    +                curl.info = {
    +                    "headers": httputil.HTTPHeaders(),
    +                    "buffer": BytesIO(),
    +                    "request": request,
    +                    "callback": callback,
    +                    "queue_start_time": queue_start_time,
    +                    "curl_start_time": time.time(),
    +                    "curl_start_ioloop_time": self.io_loop.current().time(),
    +                }
    +                try:
    +                    self._curl_setup_request(
    +                        curl, request, curl.info["buffer"], curl.info["headers"]
    +                    )
    +                except Exception as e:
    +                    # If there was an error in setup, pass it on
    +                    # to the callback. Note that allowing the
    +                    # error to escape here will appear to work
    +                    # most of the time since we are still in the
    +                    # caller's original stack frame, but when
    +                    # _process_queue() is called from
    +                    # _finish_pending_requests the exceptions have
    +                    # nowhere to go.
    +                    self._free_list.append(curl)
    +                    callback(HTTPResponse(request=request, code=599, error=e))
    +                else:
    +                    self._multi.add_handle(curl)
     
    -                if not started:
    -                    break
    +            if not started:
    +                break
     
    -    def _finish(self, curl, curl_error=None, curl_message=None):
    +    def _finish(
    +        self, curl: pycurl.Curl, curl_error: int = None, curl_message: str = None
    +    ) -> None:
             info = curl.info
             curl.info = None
             self._multi.remove_handle(curl)
             self._free_list.append(curl)
             buffer = info["buffer"]
             if curl_error:
    -            error = CurlError(curl_error, curl_message)
    +            assert curl_message is not None
    +            error = CurlError(curl_error, curl_message)  # type: Optional[CurlError]
    +            assert error is not None
                 code = error.code
                 effective_url = None
                 buffer.close()
    @@ -269,30 +283,45 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
                 redirect=curl.getinfo(pycurl.REDIRECT_TIME),
             )
             try:
    -            info["callback"](HTTPResponse(
    -                request=info["request"], code=code, headers=info["headers"],
    -                buffer=buffer, effective_url=effective_url, error=error,
    -                reason=info['headers'].get("X-Http-Reason", None),
    -                request_time=self.io_loop.time() - info["curl_start_ioloop_time"],
    -                start_time=info["curl_start_time"],
    -                time_info=time_info))
    +            info["callback"](
    +                HTTPResponse(
    +                    request=info["request"],
    +                    code=code,
    +                    headers=info["headers"],
    +                    buffer=buffer,
    +                    effective_url=effective_url,
    +                    error=error,
    +                    reason=info["headers"].get("X-Http-Reason", None),
    +                    request_time=self.io_loop.time() - info["curl_start_ioloop_time"],
    +                    start_time=info["curl_start_time"],
    +                    time_info=time_info,
    +                )
    +            )
             except Exception:
                 self.handle_callback_exception(info["callback"])
     
    -    def handle_callback_exception(self, callback):
    -        self.io_loop.handle_callback_exception(callback)
    +    def handle_callback_exception(self, callback: Any) -> None:
    +        app_log.error("Exception in callback %r", callback, exc_info=True)
     
    -    def _curl_create(self):
    +    def _curl_create(self) -> pycurl.Curl:
             curl = pycurl.Curl()
             if curl_log.isEnabledFor(logging.DEBUG):
                 curl.setopt(pycurl.VERBOSE, 1)
                 curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug)
    -        if hasattr(pycurl, 'PROTOCOLS'):  # PROTOCOLS first appeared in pycurl 7.19.5 (2014-07-12)
    +        if hasattr(
    +            pycurl, "PROTOCOLS"
    +        ):  # PROTOCOLS first appeared in pycurl 7.19.5 (2014-07-12)
                 curl.setopt(pycurl.PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS)
                 curl.setopt(pycurl.REDIR_PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS)
             return curl
     
    -    def _curl_setup_request(self, curl, request, buffer, headers):
    +    def _curl_setup_request(
    +        self,
    +        curl: pycurl.Curl,
    +        request: HTTPRequest,
    +        buffer: BytesIO,
    +        headers: httputil.HTTPHeaders,
    +    ) -> None:
             curl.setopt(pycurl.URL, native_str(request.url))
     
             # libcurl's magic "Expect: 100-continue" behavior causes delays
    @@ -310,22 +339,35 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
             if "Pragma" not in request.headers:
                 request.headers["Pragma"] = ""
     
    -        curl.setopt(pycurl.HTTPHEADER,
    -                    ["%s: %s" % (native_str(k), native_str(v))
    -                     for k, v in request.headers.get_all()])
    +        curl.setopt(
    +            pycurl.HTTPHEADER,
    +            [
    +                "%s: %s" % (native_str(k), native_str(v))
    +                for k, v in request.headers.get_all()
    +            ],
    +        )
     
    -        curl.setopt(pycurl.HEADERFUNCTION,
    -                    functools.partial(self._curl_header_callback,
    -                                      headers, request.header_callback))
    +        curl.setopt(
    +            pycurl.HEADERFUNCTION,
    +            functools.partial(
    +                self._curl_header_callback, headers, request.header_callback
    +            ),
    +        )
             if request.streaming_callback:
    -            def write_function(chunk):
    -                self.io_loop.add_callback(request.streaming_callback, chunk)
    +
    +            def write_function(b: Union[bytes, bytearray]) -> int:
    +                assert request.streaming_callback is not None
    +                self.io_loop.add_callback(request.streaming_callback, b)
    +                return len(b)
    +
             else:
                 write_function = buffer.write
             curl.setopt(pycurl.WRITEFUNCTION, write_function)
             curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
             curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
    +        assert request.connect_timeout is not None
             curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
    +        assert request.request_timeout is not None
             curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
             if request.user_agent:
                 curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
    @@ -341,20 +383,22 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
                 curl.setopt(pycurl.PROXY, request.proxy_host)
                 curl.setopt(pycurl.PROXYPORT, request.proxy_port)
                 if request.proxy_username:
    -                credentials = httputil.encode_username_password(request.proxy_username,
    -                                                                request.proxy_password)
    +                assert request.proxy_password is not None
    +                credentials = httputil.encode_username_password(
    +                    request.proxy_username, request.proxy_password
    +                )
                     curl.setopt(pycurl.PROXYUSERPWD, credentials)
     
    -            if (request.proxy_auth_mode is None or
    -                    request.proxy_auth_mode == "basic"):
    +            if request.proxy_auth_mode is None or request.proxy_auth_mode == "basic":
                     curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_BASIC)
                 elif request.proxy_auth_mode == "digest":
                     curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_DIGEST)
                 else:
                     raise ValueError(
    -                    "Unsupported proxy_auth_mode %s" % request.proxy_auth_mode)
    +                    "Unsupported proxy_auth_mode %s" % request.proxy_auth_mode
    +                )
             else:
    -            curl.setopt(pycurl.PROXY, '')
    +            curl.setopt(pycurl.PROXY, "")
                 curl.unsetopt(pycurl.PROXYUSERPWD)
             if request.validate_cert:
                 curl.setopt(pycurl.SSL_VERIFYPEER, 1)
    @@ -397,7 +441,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
             elif request.allow_nonstandard_methods or request.method in custom_methods:
                 curl.setopt(pycurl.CUSTOMREQUEST, request.method)
             else:
    -            raise KeyError('unknown method ' + request.method)
    +            raise KeyError("unknown method " + request.method)
     
             body_expected = request.method in ("POST", "PATCH", "PUT")
             body_present = request.body is not None
    @@ -405,12 +449,14 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
                 # Some HTTP methods nearly always have bodies while others
                 # almost never do. Fail in this case unless the user has
                 # opted out of sanity checks with allow_nonstandard_methods.
    -            if ((body_expected and not body_present) or
    -                    (body_present and not body_expected)):
    +            if (body_expected and not body_present) or (
    +                body_present and not body_expected
    +            ):
                     raise ValueError(
    -                    'Body must %sbe None for method %s (unless '
    -                    'allow_nonstandard_methods is true)' %
    -                    ('not ' if body_expected else '', request.method))
    +                    "Body must %sbe None for method %s (unless "
    +                    "allow_nonstandard_methods is true)"
    +                    % ("not " if body_expected else "", request.method)
    +                )
     
             if body_expected or body_present:
                 if request.method == "GET":
    @@ -419,21 +465,23 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
                     # unless we use CUSTOMREQUEST). While the spec doesn't
                     # forbid clients from sending a body, it arguably
                     # disallows the server from doing anything with them.
    -                raise ValueError('Body must be None for GET request')
    -            request_buffer = BytesIO(utf8(request.body or ''))
    +                raise ValueError("Body must be None for GET request")
    +            request_buffer = BytesIO(utf8(request.body or ""))
     
    -            def ioctl(cmd):
    +            def ioctl(cmd: int) -> None:
                     if cmd == curl.IOCMD_RESTARTREAD:
                         request_buffer.seek(0)
    +
                 curl.setopt(pycurl.READFUNCTION, request_buffer.read)
                 curl.setopt(pycurl.IOCTLFUNCTION, ioctl)
                 if request.method == "POST":
    -                curl.setopt(pycurl.POSTFIELDSIZE, len(request.body or ''))
    +                curl.setopt(pycurl.POSTFIELDSIZE, len(request.body or ""))
                 else:
                     curl.setopt(pycurl.UPLOAD, True)
    -                curl.setopt(pycurl.INFILESIZE, len(request.body or ''))
    +                curl.setopt(pycurl.INFILESIZE, len(request.body or ""))
     
             if request.auth_username is not None:
    +            assert request.auth_password is not None
                 if request.auth_mode is None or request.auth_mode == "basic":
                     curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
                 elif request.auth_mode == "digest":
    @@ -441,11 +489,16 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
                 else:
                     raise ValueError("Unsupported auth_mode %s" % request.auth_mode)
     
    -            userpwd = httputil.encode_username_password(request.auth_username,
    -                                                        request.auth_password)
    +            userpwd = httputil.encode_username_password(
    +                request.auth_username, request.auth_password
    +            )
                 curl.setopt(pycurl.USERPWD, userpwd)
    -            curl_log.debug("%s %s (username: %r)", request.method, request.url,
    -                           request.auth_username)
    +            curl_log.debug(
    +                "%s %s (username: %r)",
    +                request.method,
    +                request.url,
    +                request.auth_username,
    +            )
             else:
                 curl.unsetopt(pycurl.USERPWD)
                 curl_log.debug("%s %s", request.method, request.url)
    @@ -459,7 +512,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
             if request.ssl_options is not None:
                 raise ValueError("ssl_options not supported in curl_httpclient")
     
    -        if threading.activeCount() > 1:
    +        if threading.active_count() > 1:
                 # libcurl/pycurl is not thread-safe by default.  When multiple threads
                 # are used, signals should be disabled.  This has the side effect
                 # of disabling DNS timeouts in some environments (when libcurl is
    @@ -472,8 +525,13 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
             if request.prepare_curl_callback is not None:
                 request.prepare_curl_callback(curl)
     
    -    def _curl_header_callback(self, headers, header_callback, header_line):
    -        header_line = native_str(header_line.decode('latin1'))
    +    def _curl_header_callback(
    +        self,
    +        headers: httputil.HTTPHeaders,
    +        header_callback: Callable[[str], None],
    +        header_line_bytes: bytes,
    +    ) -> None:
    +        header_line = native_str(header_line_bytes.decode("latin1"))
             if header_callback is not None:
                 self.io_loop.add_callback(header_callback, header_line)
             # header_line as returned by curl includes the end-of-line characters.
    @@ -490,21 +548,21 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
                 return
             headers.parse_line(header_line)
     
    -    def _curl_debug(self, debug_type, debug_msg):
    -        debug_types = ('I', '<', '>', '<', '>')
    +    def _curl_debug(self, debug_type: int, debug_msg: str) -> None:
    +        debug_types = ("I", "<", ">", "<", ">")
             if debug_type == 0:
                 debug_msg = native_str(debug_msg)
    -            curl_log.debug('%s', debug_msg.strip())
    +            curl_log.debug("%s", debug_msg.strip())
             elif debug_type in (1, 2):
                 debug_msg = native_str(debug_msg)
                 for line in debug_msg.splitlines():
    -                curl_log.debug('%s %s', debug_types[debug_type], line)
    +                curl_log.debug("%s %s", debug_types[debug_type], line)
             elif debug_type == 4:
    -            curl_log.debug('%s %r', debug_types[debug_type], debug_msg)
    +            curl_log.debug("%s %r", debug_types[debug_type], debug_msg)
     
     
     class CurlError(HTTPError):
    -    def __init__(self, errno, message):
    +    def __init__(self, errno: int, message: str) -> None:
             HTTPError.__init__(self, 599, message)
             self.errno = errno
     
    diff --git a/server/www/packages/packages-windows/x86/tornado/escape.py b/server/www/packages/packages-windows/x86/tornado/escape.py
    index a79ece6..b0ec332 100644
    --- a/server/www/packages/packages-windows/x86/tornado/escape.py
    +++ b/server/www/packages/packages-windows/x86/tornado/escape.py
    @@ -19,35 +19,28 @@ Also includes a few other miscellaneous string manipulation functions that
     have crept in over time.
     """
     
    -from __future__ import absolute_import, division, print_function
    -
    +import html.entities
     import json
     import re
    +import urllib.parse
     
    -from tornado.util import PY3, unicode_type, basestring_type
    +from tornado.util import unicode_type
     
    -if PY3:
    -    from urllib.parse import parse_qs as _parse_qs
    -    import html.entities as htmlentitydefs
    -    import urllib.parse as urllib_parse
    -    unichr = chr
    -else:
    -    from urlparse import parse_qs as _parse_qs
    -    import htmlentitydefs
    -    import urllib as urllib_parse
    -
    -try:
    -    import typing  # noqa
    -except ImportError:
    -    pass
    +import typing
    +from typing import Union, Any, Optional, Dict, List, Callable
     
     
    -_XHTML_ESCAPE_RE = re.compile('[&<>"\']')
    -_XHTML_ESCAPE_DICT = {'&': '&', '<': '<', '>': '>', '"': '"',
    -                      '\'': '''}
    +_XHTML_ESCAPE_RE = re.compile("[&<>\"']")
    +_XHTML_ESCAPE_DICT = {
    +    "&": "&",
    +    "<": "<",
    +    ">": ">",
    +    '"': """,
    +    "'": "'",
    +}
     
     
    -def xhtml_escape(value):
    +def xhtml_escape(value: Union[str, bytes]) -> str:
         """Escapes a string so it is valid within HTML or XML.
     
         Escapes the characters ``<``, ``>``, ``"``, ``'``, and ``&``.
    @@ -58,11 +51,12 @@ def xhtml_escape(value):
     
            Added the single quote to the list of escaped characters.
         """
    -    return _XHTML_ESCAPE_RE.sub(lambda match: _XHTML_ESCAPE_DICT[match.group(0)],
    -                                to_basestring(value))
    +    return _XHTML_ESCAPE_RE.sub(
    +        lambda match: _XHTML_ESCAPE_DICT[match.group(0)], to_basestring(value)
    +    )
     
     
    -def xhtml_unescape(value):
    +def xhtml_unescape(value: Union[str, bytes]) -> str:
         """Un-escapes an XML-escaped string."""
         return re.sub(r"&(#?)(\w+?);", _convert_entity, _unicode(value))
     
    @@ -70,7 +64,7 @@ def xhtml_unescape(value):
     # The fact that json_encode wraps json.dumps is an implementation detail.
     # Please see https://github.com/tornadoweb/tornado/pull/706
     # before sending a pull request that adds **kwargs to this function.
    -def json_encode(value):
    +def json_encode(value: Any) -> str:
         """JSON-encodes the given Python object."""
         # JSON permits but does not require forward slashes to be escaped.
         # This is useful when json data is emitted in a '
    -                       for p in paths)
    +        return "".join(
    +            ''
    +            for p in paths
    +        )
     
    -    def render_embed_js(self, js_embed):
    +    def render_embed_js(self, js_embed: Iterable[bytes]) -> bytes:
             """Default method used to render the final embedded js for the
             rendered webpage.
     
             Override this method in a sub-classed controller to change the output.
             """
    -        return b''
    +        return (
    +            b'"
    +        )
     
    -    def render_linked_css(self, css_files):
    +    def render_linked_css(self, css_files: Iterable[str]) -> str:
             """Default method used to render the final css links for the
             rendered webpage.
     
             Override this method in a sub-classed controller to change the output.
             """
             paths = []
    -        unique_paths = set()
    +        unique_paths = set()  # type: Set[str]
     
             for path in css_files:
                 if not is_absolute(path):
    @@ -868,20 +964,21 @@ class RequestHandler(object):
                     paths.append(path)
                     unique_paths.add(path)
     
    -        return ''.join(''
    -                       for p in paths)
    +        return "".join(
    +            ''
    +            for p in paths
    +        )
     
    -    def render_embed_css(self, css_embed):
    +    def render_embed_css(self, css_embed: Iterable[bytes]) -> bytes:
             """Default method used to render the final embedded css for the
             rendered webpage.
     
             Override this method in a sub-classed controller to change the output.
             """
    -        return b''
    +        return b'"
     
    -    def render_string(self, template_name, **kwargs):
    +    def render_string(self, template_name: str, **kwargs: Any) -> bytes:
             """Generate the given template with the given arguments.
     
             We return the generated byte string (in utf8). To generate and
    @@ -894,6 +991,7 @@ class RequestHandler(object):
                 web_file = frame.f_code.co_filename
                 while frame.f_code.co_filename == web_file:
                     frame = frame.f_back
    +            assert frame.f_code.co_filename is not None
                 template_path = os.path.dirname(frame.f_code.co_filename)
             with RequestHandler._template_loader_lock:
                 if template_path not in RequestHandler._template_loaders:
    @@ -906,7 +1004,7 @@ class RequestHandler(object):
             namespace.update(kwargs)
             return t.generate(**namespace)
     
    -    def get_template_namespace(self):
    +    def get_template_namespace(self) -> Dict[str, Any]:
             """Returns a dictionary to be used as the default template namespace.
     
             May be overridden by subclasses to add or modify values.
    @@ -924,12 +1022,12 @@ class RequestHandler(object):
                 pgettext=self.locale.pgettext,
                 static_url=self.static_url,
                 xsrf_form_html=self.xsrf_form_html,
    -            reverse_url=self.reverse_url
    +            reverse_url=self.reverse_url,
             )
             namespace.update(self.ui)
             return namespace
     
    -    def create_template_loader(self, template_path):
    +    def create_template_loader(self, template_path: str) -> template.BaseLoader:
             """Returns a new template loader for the given path.
     
             May be overridden by subclasses.  By default returns a
    @@ -950,7 +1048,7 @@ class RequestHandler(object):
                 kwargs["whitespace"] = settings["template_whitespace"]
             return template.Loader(template_path, **kwargs)
     
    -    def flush(self, include_footers=False, callback=None):
    +    def flush(self, include_footers: bool = False) -> "Future[None]":
             """Flushes the current output buffer to the network.
     
             The ``callback`` argument, if given, can be used for flow control:
    @@ -962,23 +1060,27 @@ class RequestHandler(object):
             .. versionchanged:: 4.0
                Now returns a `.Future` if no callback is given.
     
    -        .. deprecated:: 5.1
    +        .. versionchanged:: 6.0
     
    -           The ``callback`` argument is deprecated and will be removed in
    -           Tornado 6.0.
    +           The ``callback`` argument was removed.
             """
    +        assert self.request.connection is not None
             chunk = b"".join(self._write_buffer)
             self._write_buffer = []
             if not self._headers_written:
                 self._headers_written = True
                 for transform in self._transforms:
    -                self._status_code, self._headers, chunk = \
    -                    transform.transform_first_chunk(
    -                        self._status_code, self._headers,
    -                        chunk, include_footers)
    +                assert chunk is not None
    +                (
    +                    self._status_code,
    +                    self._headers,
    +                    chunk,
    +                ) = transform.transform_first_chunk(
    +                    self._status_code, self._headers, chunk, include_footers
    +                )
                 # Ignore the chunk and only write the headers for HEAD requests
                 if self.request.method == "HEAD":
    -                chunk = None
    +                chunk = b""
     
                 # Finalize the cookie headers (which have been stored in a side
                 # object so an outgoing cookie could be overwritten before it
    @@ -987,23 +1089,22 @@ class RequestHandler(object):
                     for cookie in self._new_cookie.values():
                         self.add_header("Set-Cookie", cookie.OutputString(None))
     
    -            start_line = httputil.ResponseStartLine('',
    -                                                    self._status_code,
    -                                                    self._reason)
    +            start_line = httputil.ResponseStartLine("", self._status_code, self._reason)
                 return self.request.connection.write_headers(
    -                start_line, self._headers, chunk, callback=callback)
    +                start_line, self._headers, chunk
    +            )
             else:
                 for transform in self._transforms:
                     chunk = transform.transform_chunk(chunk, include_footers)
                 # Ignore the chunk and only write the headers for HEAD requests
                 if self.request.method != "HEAD":
    -                return self.request.connection.write(chunk, callback=callback)
    +                return self.request.connection.write(chunk)
                 else:
    -                future = Future()
    +                future = Future()  # type: Future[None]
                     future.set_result(None)
                     return future
     
    -    def finish(self, chunk=None):
    +    def finish(self, chunk: Union[str, bytes, dict] = None) -> "Future[None]":
             """Finishes this response, ending the HTTP request.
     
             Passing a ``chunk`` to ``finish()`` is equivalent to passing that
    @@ -1027,27 +1128,32 @@ class RequestHandler(object):
             # Automatically support ETags and add the Content-Length header if
             # we have not flushed any content yet.
             if not self._headers_written:
    -            if (self._status_code == 200 and
    -                self.request.method in ("GET", "HEAD") and
    -                    "Etag" not in self._headers):
    +            if (
    +                self._status_code == 200
    +                and self.request.method in ("GET", "HEAD")
    +                and "Etag" not in self._headers
    +            ):
                     self.set_etag_header()
                     if self.check_etag_header():
                         self._write_buffer = []
                         self.set_status(304)
    -            if (self._status_code in (204, 304) or
    -                    (self._status_code >= 100 and self._status_code < 200)):
    -                assert not self._write_buffer, "Cannot send body with %s" % self._status_code
    +            if self._status_code in (204, 304) or (
    +                self._status_code >= 100 and self._status_code < 200
    +            ):
    +                assert not self._write_buffer, (
    +                    "Cannot send body with %s" % self._status_code
    +                )
                     self._clear_headers_for_304()
                 elif "Content-Length" not in self._headers:
                     content_length = sum(len(part) for part in self._write_buffer)
                     self.set_header("Content-Length", content_length)
     
    -        if hasattr(self.request, "connection"):
    -            # Now that the request is finished, clear the callback we
    -            # set on the HTTPConnection (which would otherwise prevent the
    -            # garbage collection of the RequestHandler when there
    -            # are keepalive connections)
    -            self.request.connection.set_close_callback(None)
    +        assert self.request.connection is not None
    +        # Now that the request is finished, clear the callback we
    +        # set on the HTTPConnection (which would otherwise prevent the
    +        # garbage collection of the RequestHandler when there
    +        # are keepalive connections)
    +        self.request.connection.set_close_callback(None)  # type: ignore
     
             future = self.flush(include_footers=True)
             self.request.connection.finish()
    @@ -1057,7 +1163,7 @@ class RequestHandler(object):
             self._break_cycles()
             return future
     
    -    def detach(self):
    +    def detach(self) -> iostream.IOStream:
             """Take control of the underlying stream.
     
             Returns the underlying `.IOStream` object and stops all
    @@ -1069,14 +1175,15 @@ class RequestHandler(object):
             .. versionadded:: 5.1
             """
             self._finished = True
    -        return self.request.connection.detach()
    +        # TODO: add detach to HTTPConnection?
    +        return self.request.connection.detach()  # type: ignore
     
    -    def _break_cycles(self):
    +    def _break_cycles(self) -> None:
             # Break up a reference cycle between this handler and the
             # _ui_module closures to allow for faster GC on CPython.
    -        self.ui = None
    +        self.ui = None  # type: ignore
     
    -    def send_error(self, status_code=500, **kwargs):
    +    def send_error(self, status_code: int = 500, **kwargs: Any) -> None:
             """Sends the given HTTP error code to the browser.
     
             If `flush()` has already been called, it is not possible to send
    @@ -1097,14 +1204,13 @@ class RequestHandler(object):
                     try:
                         self.finish()
                     except Exception:
    -                    gen_log.error("Failed to flush partial response",
    -                                  exc_info=True)
    +                    gen_log.error("Failed to flush partial response", exc_info=True)
                 return
             self.clear()
     
    -        reason = kwargs.get('reason')
    -        if 'exc_info' in kwargs:
    -            exception = kwargs['exc_info'][1]
    +        reason = kwargs.get("reason")
    +        if "exc_info" in kwargs:
    +            exception = kwargs["exc_info"][1]
                 if isinstance(exception, HTTPError) and exception.reason:
                     reason = exception.reason
             self.set_status(status_code, reason=reason)
    @@ -1115,7 +1221,7 @@ class RequestHandler(object):
             if not self._finished:
                 self.finish()
     
    -    def write_error(self, status_code, **kwargs):
    +    def write_error(self, status_code: int, **kwargs: Any) -> None:
             """Override to implement custom error pages.
     
             ``write_error`` may call `write`, `render`, `set_header`, etc
    @@ -1129,19 +1235,19 @@ class RequestHandler(object):
             """
             if self.settings.get("serve_traceback") and "exc_info" in kwargs:
                 # in debug mode, try to send a traceback
    -            self.set_header('Content-Type', 'text/plain')
    +            self.set_header("Content-Type", "text/plain")
                 for line in traceback.format_exception(*kwargs["exc_info"]):
                     self.write(line)
                 self.finish()
             else:
    -            self.finish("%(code)d: %(message)s"
    -                        "%(code)d: %(message)s" % {
    -                            "code": status_code,
    -                            "message": self._reason,
    -                        })
    +            self.finish(
    +                "%(code)d: %(message)s"
    +                "%(code)d: %(message)s"
    +                % {"code": status_code, "message": self._reason}
    +            )
     
         @property
    -    def locale(self):
    +    def locale(self) -> tornado.locale.Locale:
             """The locale for the current session.
     
             Determined by either `get_user_locale`, which you can override to
    @@ -1153,17 +1259,19 @@ class RequestHandler(object):
                Added a property setter.
             """
             if not hasattr(self, "_locale"):
    -            self._locale = self.get_user_locale()
    -            if not self._locale:
    +            loc = self.get_user_locale()
    +            if loc is not None:
    +                self._locale = loc
    +            else:
                     self._locale = self.get_browser_locale()
                     assert self._locale
             return self._locale
     
         @locale.setter
    -    def locale(self, value):
    +    def locale(self, value: tornado.locale.Locale) -> None:
             self._locale = value
     
    -    def get_user_locale(self):
    +    def get_user_locale(self) -> Optional[tornado.locale.Locale]:
             """Override to determine the locale from the authenticated user.
     
             If None is returned, we fall back to `get_browser_locale()`.
    @@ -1173,7 +1281,7 @@ class RequestHandler(object):
             """
             return None
     
    -    def get_browser_locale(self, default="en_US"):
    +    def get_browser_locale(self, default: str = "en_US") -> tornado.locale.Locale:
             """Determines the user's locale from ``Accept-Language`` header.
     
             See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
    @@ -1198,7 +1306,7 @@ class RequestHandler(object):
             return locale.get(default)
     
         @property
    -    def current_user(self):
    +    def current_user(self) -> Any:
             """The authenticated user for this request.
     
             This is set in one of two ways:
    @@ -1234,17 +1342,17 @@ class RequestHandler(object):
             return self._current_user
     
         @current_user.setter
    -    def current_user(self, value):
    +    def current_user(self, value: Any) -> None:
             self._current_user = value
     
    -    def get_current_user(self):
    +    def get_current_user(self) -> Any:
             """Override to determine the current user from, e.g., a cookie.
     
             This method may not be a coroutine.
             """
             return None
     
    -    def get_login_url(self):
    +    def get_login_url(self) -> str:
             """Override to customize the login URL based on the request.
     
             By default, we use the ``login_url`` application setting.
    @@ -1252,7 +1360,7 @@ class RequestHandler(object):
             self.require_setting("login_url", "@tornado.web.authenticated")
             return self.application.settings["login_url"]
     
    -    def get_template_path(self):
    +    def get_template_path(self) -> Optional[str]:
             """Override to customize template path for each handler.
     
             By default, we use the ``template_path`` application setting.
    @@ -1261,7 +1369,7 @@ class RequestHandler(object):
             return self.application.settings.get("template_path")
     
         @property
    -    def xsrf_token(self):
    +    def xsrf_token(self) -> bytes:
             """The XSRF-prevention token for the current user/session.
     
             To prevent cross-site request forgery, we set an '_xsrf' cookie
    @@ -1301,22 +1409,23 @@ class RequestHandler(object):
                     self._xsrf_token = binascii.b2a_hex(token)
                 elif output_version == 2:
                     mask = os.urandom(4)
    -                self._xsrf_token = b"|".join([
    -                    b"2",
    -                    binascii.b2a_hex(mask),
    -                    binascii.b2a_hex(_websocket_mask(mask, token)),
    -                    utf8(str(int(timestamp)))])
    +                self._xsrf_token = b"|".join(
    +                    [
    +                        b"2",
    +                        binascii.b2a_hex(mask),
    +                        binascii.b2a_hex(_websocket_mask(mask, token)),
    +                        utf8(str(int(timestamp))),
    +                    ]
    +                )
                 else:
    -                raise ValueError("unknown xsrf cookie version %d",
    -                                 output_version)
    +                raise ValueError("unknown xsrf cookie version %d", output_version)
                 if version is None:
    -                expires_days = 30 if self.current_user else None
    -                self.set_cookie("_xsrf", self._xsrf_token,
    -                                expires_days=expires_days,
    -                                **cookie_kwargs)
    +                if self.current_user and "expires_days" not in cookie_kwargs:
    +                    cookie_kwargs["expires_days"] = 30
    +                self.set_cookie("_xsrf", self._xsrf_token, **cookie_kwargs)
             return self._xsrf_token
     
    -    def _get_raw_xsrf_token(self):
    +    def _get_raw_xsrf_token(self) -> Tuple[Optional[int], bytes, float]:
             """Read or generate the xsrf token in its raw form.
     
             The raw_xsrf_token is a tuple containing:
    @@ -1327,7 +1436,7 @@ class RequestHandler(object):
             * timestamp: the time this token was generated (will not be accurate
               for version 1 cookies)
             """
    -        if not hasattr(self, '_raw_xsrf_token'):
    +        if not hasattr(self, "_raw_xsrf_token"):
                 cookie = self.get_cookie("_xsrf")
                 if cookie:
                     version, token, timestamp = self._decode_xsrf_token(cookie)
    @@ -1337,10 +1446,14 @@ class RequestHandler(object):
                     version = None
                     token = os.urandom(16)
                     timestamp = time.time()
    +            assert token is not None
    +            assert timestamp is not None
                 self._raw_xsrf_token = (version, token, timestamp)
             return self._raw_xsrf_token
     
    -    def _decode_xsrf_token(self, cookie):
    +    def _decode_xsrf_token(
    +        self, cookie: str
    +    ) -> Tuple[Optional[int], Optional[bytes], Optional[float]]:
             """Convert a cookie string into a the tuple form returned by
             _get_raw_xsrf_token.
             """
    @@ -1351,12 +1464,11 @@ class RequestHandler(object):
                 if m:
                     version = int(m.group(1))
                     if version == 2:
    -                    _, mask, masked_token, timestamp = cookie.split("|")
    +                    _, mask_str, masked_token, timestamp_str = cookie.split("|")
     
    -                    mask = binascii.a2b_hex(utf8(mask))
    -                    token = _websocket_mask(
    -                        mask, binascii.a2b_hex(utf8(masked_token)))
    -                    timestamp = int(timestamp)
    +                    mask = binascii.a2b_hex(utf8(mask_str))
    +                    token = _websocket_mask(mask, binascii.a2b_hex(utf8(masked_token)))
    +                    timestamp = int(timestamp_str)
                         return version, token, timestamp
                     else:
                         # Treat unknown versions as not present instead of failing.
    @@ -1372,11 +1484,10 @@ class RequestHandler(object):
                     return (version, token, timestamp)
             except Exception:
                 # Catch exceptions and return nothing instead of failing.
    -            gen_log.debug("Uncaught exception in _decode_xsrf_token",
    -                          exc_info=True)
    +            gen_log.debug("Uncaught exception in _decode_xsrf_token", exc_info=True)
                 return None, None, None
     
    -    def check_xsrf_cookie(self):
    +    def check_xsrf_cookie(self) -> None:
             """Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
     
             To prevent cross-site request forgery, we set an ``_xsrf``
    @@ -1390,30 +1501,31 @@ class RequestHandler(object):
     
             See http://en.wikipedia.org/wiki/Cross-site_request_forgery
     
    -        Prior to release 1.1.1, this check was ignored if the HTTP header
    -        ``X-Requested-With: XMLHTTPRequest`` was present.  This exception
    -        has been shown to be insecure and has been removed.  For more
    -        information please see
    -        http://www.djangoproject.com/weblog/2011/feb/08/security/
    -        http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
    -
             .. versionchanged:: 3.2.2
                Added support for cookie version 2.  Both versions 1 and 2 are
                supported.
             """
    -        token = (self.get_argument("_xsrf", None) or
    -                 self.request.headers.get("X-Xsrftoken") or
    -                 self.request.headers.get("X-Csrftoken"))
    +        # Prior to release 1.1.1, this check was ignored if the HTTP header
    +        # ``X-Requested-With: XMLHTTPRequest`` was present.  This exception
    +        # has been shown to be insecure and has been removed.  For more
    +        # information please see
    +        # http://www.djangoproject.com/weblog/2011/feb/08/security/
    +        # http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
    +        token = (
    +            self.get_argument("_xsrf", None)
    +            or self.request.headers.get("X-Xsrftoken")
    +            or self.request.headers.get("X-Csrftoken")
    +        )
             if not token:
                 raise HTTPError(403, "'_xsrf' argument missing from POST")
             _, token, _ = self._decode_xsrf_token(token)
             _, expected_token, _ = self._get_raw_xsrf_token()
             if not token:
                 raise HTTPError(403, "'_xsrf' argument has invalid format")
    -        if not _time_independent_equals(utf8(token), utf8(expected_token)):
    +        if not hmac.compare_digest(utf8(token), utf8(expected_token)):
                 raise HTTPError(403, "XSRF cookie does not match POST argument")
     
    -    def xsrf_form_html(self):
    +    def xsrf_form_html(self) -> str:
             """An HTML ```` element to be included with all POST forms.
     
             It defines the ``_xsrf`` input value, which we check on all POST
    @@ -1426,10 +1538,13 @@ class RequestHandler(object):
     
             See `check_xsrf_cookie()` above for more information.
             """
    -        return ''
    +        return (
    +            ''
    +        )
     
    -    def static_url(self, path, include_host=None, **kwargs):
    +    def static_url(self, path: str, include_host: bool = None, **kwargs: Any) -> str:
             """Returns a static URL for the given relative static file path.
     
             This method requires you set the ``static_path`` setting in your
    @@ -1451,8 +1566,9 @@ class RequestHandler(object):
     
             """
             self.require_setting("static_path", "static_url")
    -        get_url = self.settings.get("static_handler_class",
    -                                    StaticFileHandler).make_static_url
    +        get_url = self.settings.get(
    +            "static_handler_class", StaticFileHandler
    +        ).make_static_url
     
             if include_host is None:
                 include_host = getattr(self, "include_host", False)
    @@ -1464,17 +1580,19 @@ class RequestHandler(object):
     
             return base + get_url(self.settings, path, **kwargs)
     
    -    def require_setting(self, name, feature="this feature"):
    +    def require_setting(self, name: str, feature: str = "this feature") -> None:
             """Raises an exception if the given app setting is not defined."""
             if not self.application.settings.get(name):
    -            raise Exception("You must define the '%s' setting in your "
    -                            "application to use %s" % (name, feature))
    +            raise Exception(
    +                "You must define the '%s' setting in your "
    +                "application to use %s" % (name, feature)
    +            )
     
    -    def reverse_url(self, name, *args):
    +    def reverse_url(self, name: str, *args: Any) -> str:
             """Alias for `Application.reverse_url`."""
             return self.application.reverse_url(name, *args)
     
    -    def compute_etag(self):
    +    def compute_etag(self) -> Optional[str]:
             """Computes the etag header to be used for this request.
     
             By default uses a hash of the content written so far.
    @@ -1487,7 +1605,7 @@ class RequestHandler(object):
                 hasher.update(part)
             return '"%s"' % hasher.hexdigest()
     
    -    def set_etag_header(self):
    +    def set_etag_header(self) -> None:
             """Sets the response's Etag header using ``self.compute_etag()``.
     
             Note: no header will be set if ``compute_etag()`` returns ``None``.
    @@ -1498,7 +1616,7 @@ class RequestHandler(object):
             if etag is not None:
                 self.set_header("Etag", etag)
     
    -    def check_etag_header(self):
    +    def check_etag_header(self) -> bool:
             """Checks the ``Etag`` header against requests's ``If-None-Match``.
     
             Returns ``True`` if the request's Etag matches and a 304 should be
    @@ -1519,19 +1637,18 @@ class RequestHandler(object):
             # Find all weak and strong etag values from If-None-Match header
             # because RFC 7232 allows multiple etag values in a single header.
             etags = re.findall(
    -            br'\*|(?:W/)?"[^"]*"',
    -            utf8(self.request.headers.get("If-None-Match", ""))
    +            br'\*|(?:W/)?"[^"]*"', utf8(self.request.headers.get("If-None-Match", ""))
             )
             if not computed_etag or not etags:
                 return False
     
             match = False
    -        if etags[0] == b'*':
    +        if etags[0] == b"*":
                 match = True
             else:
                 # Use a weak comparison when comparing entity-tags.
    -            def val(x):
    -                return x[2:] if x.startswith(b'W/') else x
    +            def val(x: bytes) -> bytes:
    +                return x[2:] if x.startswith(b"W/") else x
     
                 for etag in etags:
                     if val(etag) == val(computed_etag):
    @@ -1539,36 +1656,30 @@ class RequestHandler(object):
                         break
             return match
     
    -    def _stack_context_handle_exception(self, type, value, traceback):
    -        try:
    -            # For historical reasons _handle_request_exception only takes
    -            # the exception value instead of the full triple,
    -            # so re-raise the exception to ensure that it's in
    -            # sys.exc_info()
    -            raise_exc_info((type, value, traceback))
    -        except Exception:
    -            self._handle_request_exception(value)
    -        return True
    -
    -    @gen.coroutine
    -    def _execute(self, transforms, *args, **kwargs):
    +    async def _execute(
    +        self, transforms: List["OutputTransform"], *args: bytes, **kwargs: bytes
    +    ) -> None:
             """Executes this request with the given output transforms."""
             self._transforms = transforms
             try:
                 if self.request.method not in self.SUPPORTED_METHODS:
                     raise HTTPError(405)
                 self.path_args = [self.decode_argument(arg) for arg in args]
    -            self.path_kwargs = dict((k, self.decode_argument(v, name=k))
    -                                    for (k, v) in kwargs.items())
    +            self.path_kwargs = dict(
    +                (k, self.decode_argument(v, name=k)) for (k, v) in kwargs.items()
    +            )
                 # If XSRF cookies are turned on, reject form submissions without
                 # the proper cookie
    -            if self.request.method not in ("GET", "HEAD", "OPTIONS") and \
    -                    self.application.settings.get("xsrf_cookies"):
    +            if self.request.method not in (
    +                "GET",
    +                "HEAD",
    +                "OPTIONS",
    +            ) and self.application.settings.get("xsrf_cookies"):
                     self.check_xsrf_cookie()
     
                 result = self.prepare()
                 if result is not None:
    -                result = yield result
    +                result = await result
                 if self._prepared_future is not None:
                     # Tell the Application we've finished with prepare()
                     # and are ready for the body to arrive.
    @@ -1582,14 +1693,14 @@ class RequestHandler(object):
                     # result; the data has been passed to self.data_received
                     # instead.
                     try:
    -                    yield self.request.body
    +                    await self.request._body_future
                     except iostream.StreamClosedError:
                         return
     
                 method = getattr(self, self.request.method.lower())
                 result = method(*self.path_args, **self.path_kwargs)
                 if result is not None:
    -                result = yield result
    +                result = await result
                 if self._auto_finish and not self._finished:
                     self.finish()
             except Exception as e:
    @@ -1600,21 +1711,22 @@ class RequestHandler(object):
                 finally:
                     # Unset result to avoid circular references
                     result = None
    -            if (self._prepared_future is not None and
    -                    not self._prepared_future.done()):
    +            if self._prepared_future is not None and not self._prepared_future.done():
                     # In case we failed before setting _prepared_future, do it
                     # now (to unblock the HTTP server).  Note that this is not
                     # in a finally block to avoid GC issues prior to Python 3.4.
                     self._prepared_future.set_result(None)
     
    -    def data_received(self, chunk):
    +    def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]:
             """Implement this method to handle streamed request data.
     
             Requires the `.stream_request_body` decorator.
    +
    +        May be a coroutine for flow control.
             """
             raise NotImplementedError()
     
    -    def _log(self):
    +    def _log(self) -> None:
             """Logs the current request.
     
             Sort of deprecated since this functionality was moved to the
    @@ -1623,11 +1735,14 @@ class RequestHandler(object):
             """
             self.application.log_request(self)
     
    -    def _request_summary(self):
    -        return "%s %s (%s)" % (self.request.method, self.request.uri,
    -                               self.request.remote_ip)
    +    def _request_summary(self) -> str:
    +        return "%s %s (%s)" % (
    +            self.request.method,
    +            self.request.uri,
    +            self.request.remote_ip,
    +        )
     
    -    def _handle_request_exception(self, e):
    +    def _handle_request_exception(self, e: BaseException) -> None:
             if isinstance(e, Finish):
                 # Not an error; just finish the request without logging.
                 if not self._finished:
    @@ -1649,7 +1764,12 @@ class RequestHandler(object):
             else:
                 self.send_error(500, exc_info=sys.exc_info())
     
    -    def log_exception(self, typ, value, tb):
    +    def log_exception(
    +        self,
    +        typ: "Optional[Type[BaseException]]",
    +        value: Optional[BaseException],
    +        tb: Optional[TracebackType],
    +    ) -> None:
             """Override to customize logging of uncaught exceptions.
     
             By default logs instances of `HTTPError` as warnings without
    @@ -1662,123 +1782,50 @@ class RequestHandler(object):
             if isinstance(value, HTTPError):
                 if value.log_message:
                     format = "%d %s: " + value.log_message
    -                args = ([value.status_code, self._request_summary()] +
    -                        list(value.args))
    +                args = [value.status_code, self._request_summary()] + list(value.args)
                     gen_log.warning(format, *args)
             else:
    -            app_log.error("Uncaught exception %s\n%r", self._request_summary(),
    -                          self.request, exc_info=(typ, value, tb))
    +            app_log.error(  # type: ignore
    +                "Uncaught exception %s\n%r",
    +                self._request_summary(),
    +                self.request,
    +                exc_info=(typ, value, tb),
    +            )
     
    -    def _ui_module(self, name, module):
    -        def render(*args, **kwargs):
    +    def _ui_module(self, name: str, module: Type["UIModule"]) -> Callable[..., str]:
    +        def render(*args, **kwargs) -> str:  # type: ignore
                 if not hasattr(self, "_active_modules"):
    -                self._active_modules = {}
    +                self._active_modules = {}  # type: Dict[str, UIModule]
                 if name not in self._active_modules:
                     self._active_modules[name] = module(self)
                 rendered = self._active_modules[name].render(*args, **kwargs)
                 return rendered
    +
             return render
     
    -    def _ui_method(self, method):
    +    def _ui_method(self, method: Callable[..., str]) -> Callable[..., str]:
             return lambda *args, **kwargs: method(self, *args, **kwargs)
     
    -    def _clear_headers_for_304(self):
    +    def _clear_headers_for_304(self) -> None:
             # 304 responses should not contain entity headers (defined in
             # http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
             # not explicitly allowed by
             # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
    -        headers = ["Allow", "Content-Encoding", "Content-Language",
    -                   "Content-Length", "Content-MD5", "Content-Range",
    -                   "Content-Type", "Last-Modified"]
    +        headers = [
    +            "Allow",
    +            "Content-Encoding",
    +            "Content-Language",
    +            "Content-Length",
    +            "Content-MD5",
    +            "Content-Range",
    +            "Content-Type",
    +            "Last-Modified",
    +        ]
             for h in headers:
                 self.clear_header(h)
     
     
    -def asynchronous(method):
    -    """Wrap request handler methods with this if they are asynchronous.
    -
    -    This decorator is for callback-style asynchronous methods; for
    -    coroutines, use the ``@gen.coroutine`` decorator without
    -    ``@asynchronous``. (It is legal for legacy reasons to use the two
    -    decorators together provided ``@asynchronous`` is first, but
    -    ``@asynchronous`` will be ignored in this case)
    -
    -    This decorator should only be applied to the :ref:`HTTP verb
    -    methods `; its behavior is undefined for any other method.
    -    This decorator does not *make* a method asynchronous; it tells
    -    the framework that the method *is* asynchronous.  For this decorator
    -    to be useful the method must (at least sometimes) do something
    -    asynchronous.
    -
    -    If this decorator is given, the response is not finished when the
    -    method returns. It is up to the request handler to call
    -    `self.finish() ` to finish the HTTP
    -    request. Without this decorator, the request is automatically
    -    finished when the ``get()`` or ``post()`` method returns. Example:
    -
    -    .. testcode::
    -
    -       class MyRequestHandler(RequestHandler):
    -           @asynchronous
    -           def get(self):
    -              http = httpclient.AsyncHTTPClient()
    -              http.fetch("http://friendfeed.com/", self._on_download)
    -
    -           def _on_download(self, response):
    -              self.write("Downloaded!")
    -              self.finish()
    -
    -    .. testoutput::
    -       :hide:
    -
    -    .. versionchanged:: 3.1
    -       The ability to use ``@gen.coroutine`` without ``@asynchronous``.
    -
    -    .. versionchanged:: 4.3 Returning anything but ``None`` or a
    -       yieldable object from a method decorated with ``@asynchronous``
    -       is an error. Such return values were previously ignored silently.
    -
    -    .. deprecated:: 5.1
    -
    -       This decorator is deprecated and will be removed in Tornado 6.0.
    -       Use coroutines instead.
    -    """
    -    warnings.warn("@asynchronous is deprecated, use coroutines instead",
    -                  DeprecationWarning)
    -    # Delay the IOLoop import because it's not available on app engine.
    -    from tornado.ioloop import IOLoop
    -
    -    @functools.wraps(method)
    -    def wrapper(self, *args, **kwargs):
    -        self._auto_finish = False
    -        with stack_context.ExceptionStackContext(
    -                self._stack_context_handle_exception, delay_warning=True):
    -            result = method(self, *args, **kwargs)
    -            if result is not None:
    -                result = gen.convert_yielded(result)
    -
    -                # If @asynchronous is used with @gen.coroutine, (but
    -                # not @gen.engine), we can automatically finish the
    -                # request when the future resolves.  Additionally,
    -                # the Future will swallow any exceptions so we need
    -                # to throw them back out to the stack context to finish
    -                # the request.
    -                def future_complete(f):
    -                    f.result()
    -                    if not self._finished:
    -                        self.finish()
    -                IOLoop.current().add_future(result, future_complete)
    -                # Once we have done this, hide the Future from our
    -                # caller (i.e. RequestHandler._when_complete), which
    -                # would otherwise set up its own callback and
    -                # exception handler (resulting in exceptions being
    -                # logged twice).
    -                return None
    -            return result
    -    return wrapper
    -
    -
    -def stream_request_body(cls):
    +def stream_request_body(cls: Type[RequestHandler]) -> Type[RequestHandler]:
         """Apply to `RequestHandler` subclasses to enable streaming body support.
     
         This decorator implies the following changes:
    @@ -1805,21 +1852,26 @@ def stream_request_body(cls):
         return cls
     
     
    -def _has_stream_request_body(cls):
    +def _has_stream_request_body(cls: Type[RequestHandler]) -> bool:
         if not issubclass(cls, RequestHandler):
             raise TypeError("expected subclass of RequestHandler, got %r", cls)
    -    return getattr(cls, '_stream_request_body', False)
    +    return cls._stream_request_body
     
     
    -def removeslash(method):
    +def removeslash(
    +    method: Callable[..., Optional[Awaitable[None]]]
    +) -> Callable[..., Optional[Awaitable[None]]]:
         """Use this decorator to remove trailing slashes from the request path.
     
         For example, a request to ``/foo/`` would redirect to ``/foo`` with this
         decorator. Your request handler mapping should use a regular expression
         like ``r'/foo/*'`` in conjunction with using the decorator.
         """
    +
         @functools.wraps(method)
    -    def wrapper(self, *args, **kwargs):
    +    def wrapper(  # type: ignore
    +        self: RequestHandler, *args, **kwargs
    +    ) -> Optional[Awaitable[None]]:
             if self.request.path.endswith("/"):
                 if self.request.method in ("GET", "HEAD"):
                     uri = self.request.path.rstrip("/")
    @@ -1827,31 +1879,38 @@ def removeslash(method):
                         if self.request.query:
                             uri += "?" + self.request.query
                         self.redirect(uri, permanent=True)
    -                    return
    +                    return None
                 else:
                     raise HTTPError(404)
             return method(self, *args, **kwargs)
    +
         return wrapper
     
     
    -def addslash(method):
    +def addslash(
    +    method: Callable[..., Optional[Awaitable[None]]]
    +) -> Callable[..., Optional[Awaitable[None]]]:
         """Use this decorator to add a missing trailing slash to the request path.
     
         For example, a request to ``/foo`` would redirect to ``/foo/`` with this
         decorator. Your request handler mapping should use a regular expression
         like ``r'/foo/?'`` in conjunction with using the decorator.
         """
    +
         @functools.wraps(method)
    -    def wrapper(self, *args, **kwargs):
    +    def wrapper(  # type: ignore
    +        self: RequestHandler, *args, **kwargs
    +    ) -> Optional[Awaitable[None]]:
             if not self.request.path.endswith("/"):
                 if self.request.method in ("GET", "HEAD"):
                     uri = self.request.path + "/"
                     if self.request.query:
                         uri += "?" + self.request.query
                     self.redirect(uri, permanent=True)
    -                return
    +                return None
                 raise HTTPError(404)
             return method(self, *args, **kwargs)
    +
         return wrapper
     
     
    @@ -1866,28 +1925,36 @@ class _ApplicationRouter(ReversibleRuleRouter):
             `_ApplicationRouter` instance.
         """
     
    -    def __init__(self, application, rules=None):
    +    def __init__(self, application: "Application", rules: _RuleList = None) -> None:
             assert isinstance(application, Application)
             self.application = application
             super(_ApplicationRouter, self).__init__(rules)
     
    -    def process_rule(self, rule):
    +    def process_rule(self, rule: Rule) -> Rule:
             rule = super(_ApplicationRouter, self).process_rule(rule)
     
             if isinstance(rule.target, (list, tuple)):
    -            rule.target = _ApplicationRouter(self.application, rule.target)
    +            rule.target = _ApplicationRouter(  # type: ignore
    +                self.application, rule.target
    +            )
     
             return rule
     
    -    def get_target_delegate(self, target, request, **target_params):
    +    def get_target_delegate(
    +        self, target: Any, request: httputil.HTTPServerRequest, **target_params: Any
    +    ) -> Optional[httputil.HTTPMessageDelegate]:
             if isclass(target) and issubclass(target, RequestHandler):
    -            return self.application.get_handler_delegate(request, target, **target_params)
    +            return self.application.get_handler_delegate(
    +                request, target, **target_params
    +            )
     
    -        return super(_ApplicationRouter, self).get_target_delegate(target, request, **target_params)
    +        return super(_ApplicationRouter, self).get_target_delegate(
    +            target, request, **target_params
    +        )
     
     
     class Application(ReversibleRouter):
    -    """A collection of request handlers that make up a web application.
    +    r"""A collection of request handlers that make up a web application.
     
         Instances of this class are callable and can be passed directly to
         HTTPServer to serve the application::
    @@ -1951,7 +2018,7 @@ class Application(ReversibleRouter):
     
            Applications that do not use TLS may be vulnerable to :ref:`DNS
            rebinding ` attacks. This attack is especially
    -       relevant to applications that only listen on ``127.0.0.1` or
    +       relevant to applications that only listen on ``127.0.0.1`` or
            other private networks. Appropriate host patterns must be used
            (instead of the default of ``r'.*'``) to prevent this risk. The
            ``default_host`` argument must not be used in applications that
    @@ -1969,54 +2036,64 @@ class Application(ReversibleRouter):
            Integration with the new `tornado.routing` module.
     
         """
    -    def __init__(self, handlers=None, default_host=None, transforms=None,
    -                 **settings):
    +
    +    def __init__(
    +        self,
    +        handlers: _RuleList = None,
    +        default_host: str = None,
    +        transforms: List[Type["OutputTransform"]] = None,
    +        **settings: Any
    +    ) -> None:
             if transforms is None:
    -            self.transforms = []
    +            self.transforms = []  # type: List[Type[OutputTransform]]
                 if settings.get("compress_response") or settings.get("gzip"):
                     self.transforms.append(GZipContentEncoding)
             else:
                 self.transforms = transforms
             self.default_host = default_host
             self.settings = settings
    -        self.ui_modules = {'linkify': _linkify,
    -                           'xsrf_form_html': _xsrf_form_html,
    -                           'Template': TemplateModule,
    -                           }
    -        self.ui_methods = {}
    +        self.ui_modules = {
    +            "linkify": _linkify,
    +            "xsrf_form_html": _xsrf_form_html,
    +            "Template": TemplateModule,
    +        }
    +        self.ui_methods = {}  # type: Dict[str, Callable[..., str]]
             self._load_ui_modules(settings.get("ui_modules", {}))
             self._load_ui_methods(settings.get("ui_methods", {}))
             if self.settings.get("static_path"):
                 path = self.settings["static_path"]
                 handlers = list(handlers or [])
    -            static_url_prefix = settings.get("static_url_prefix",
    -                                             "/static/")
    -            static_handler_class = settings.get("static_handler_class",
    -                                                StaticFileHandler)
    +            static_url_prefix = settings.get("static_url_prefix", "/static/")
    +            static_handler_class = settings.get(
    +                "static_handler_class", StaticFileHandler
    +            )
                 static_handler_args = settings.get("static_handler_args", {})
    -            static_handler_args['path'] = path
    -            for pattern in [re.escape(static_url_prefix) + r"(.*)",
    -                            r"/(favicon\.ico)", r"/(robots\.txt)"]:
    -                handlers.insert(0, (pattern, static_handler_class,
    -                                    static_handler_args))
    +            static_handler_args["path"] = path
    +            for pattern in [
    +                re.escape(static_url_prefix) + r"(.*)",
    +                r"/(favicon\.ico)",
    +                r"/(robots\.txt)",
    +            ]:
    +                handlers.insert(0, (pattern, static_handler_class, static_handler_args))
     
    -        if self.settings.get('debug'):
    -            self.settings.setdefault('autoreload', True)
    -            self.settings.setdefault('compiled_template_cache', False)
    -            self.settings.setdefault('static_hash_cache', False)
    -            self.settings.setdefault('serve_traceback', True)
    +        if self.settings.get("debug"):
    +            self.settings.setdefault("autoreload", True)
    +            self.settings.setdefault("compiled_template_cache", False)
    +            self.settings.setdefault("static_hash_cache", False)
    +            self.settings.setdefault("serve_traceback", True)
     
             self.wildcard_router = _ApplicationRouter(self, handlers)
    -        self.default_router = _ApplicationRouter(self, [
    -            Rule(AnyMatches(), self.wildcard_router)
    -        ])
    +        self.default_router = _ApplicationRouter(
    +            self, [Rule(AnyMatches(), self.wildcard_router)]
    +        )
     
             # Automatically reload modified modules
    -        if self.settings.get('autoreload'):
    +        if self.settings.get("autoreload"):
                 from tornado import autoreload
    +
                 autoreload.start()
     
    -    def listen(self, port, address="", **kwargs):
    +    def listen(self, port: int, address: str = "", **kwargs: Any) -> HTTPServer:
             """Starts an HTTP server for this application on the given port.
     
             This is a convenience alias for creating an `.HTTPServer`
    @@ -2035,14 +2112,11 @@ class Application(ReversibleRouter):
             .. versionchanged:: 4.3
                Now returns the `.HTTPServer` object.
             """
    -        # import is here rather than top level because HTTPServer
    -        # is not importable on appengine
    -        from tornado.httpserver import HTTPServer
             server = HTTPServer(self, **kwargs)
             server.listen(port, address)
             return server
     
    -    def add_handlers(self, host_pattern, host_handlers):
    +    def add_handlers(self, host_pattern: str, host_handlers: _RuleList) -> None:
             """Appends the given handlers to our handler list.
     
             Host patterns are processed sequentially in the order they were
    @@ -2054,31 +2128,31 @@ class Application(ReversibleRouter):
             self.default_router.rules.insert(-1, rule)
     
             if self.default_host is not None:
    -            self.wildcard_router.add_rules([(
    -                DefaultHostMatches(self, host_matcher.host_pattern),
    -                host_handlers
    -            )])
    +            self.wildcard_router.add_rules(
    +                [(DefaultHostMatches(self, host_matcher.host_pattern), host_handlers)]
    +            )
     
    -    def add_transform(self, transform_class):
    +    def add_transform(self, transform_class: Type["OutputTransform"]) -> None:
             self.transforms.append(transform_class)
     
    -    def _load_ui_methods(self, methods):
    +    def _load_ui_methods(self, methods: Any) -> None:
             if isinstance(methods, types.ModuleType):
    -            self._load_ui_methods(dict((n, getattr(methods, n))
    -                                       for n in dir(methods)))
    +            self._load_ui_methods(dict((n, getattr(methods, n)) for n in dir(methods)))
             elif isinstance(methods, list):
                 for m in methods:
                     self._load_ui_methods(m)
             else:
                 for name, fn in methods.items():
    -                if not name.startswith("_") and hasattr(fn, "__call__") \
    -                        and name[0].lower() == name[0]:
    +                if (
    +                    not name.startswith("_")
    +                    and hasattr(fn, "__call__")
    +                    and name[0].lower() == name[0]
    +                ):
                         self.ui_methods[name] = fn
     
    -    def _load_ui_modules(self, modules):
    +    def _load_ui_modules(self, modules: Any) -> None:
             if isinstance(modules, types.ModuleType):
    -            self._load_ui_modules(dict((n, getattr(modules, n))
    -                                       for n in dir(modules)))
    +            self._load_ui_modules(dict((n, getattr(modules, n)) for n in dir(modules)))
             elif isinstance(modules, list):
                 for m in modules:
                     self._load_ui_modules(m)
    @@ -2091,27 +2165,37 @@ class Application(ReversibleRouter):
                     except TypeError:
                         pass
     
    -    def __call__(self, request):
    +    def __call__(
    +        self, request: httputil.HTTPServerRequest
    +    ) -> Optional[Awaitable[None]]:
             # Legacy HTTPServer interface
             dispatcher = self.find_handler(request)
             return dispatcher.execute()
     
    -    def find_handler(self, request, **kwargs):
    +    def find_handler(
    +        self, request: httputil.HTTPServerRequest, **kwargs: Any
    +    ) -> "_HandlerDelegate":
             route = self.default_router.find_handler(request)
             if route is not None:
    -            return route
    +            return cast("_HandlerDelegate", route)
     
    -        if self.settings.get('default_handler_class'):
    +        if self.settings.get("default_handler_class"):
                 return self.get_handler_delegate(
                     request,
    -                self.settings['default_handler_class'],
    -                self.settings.get('default_handler_args', {}))
    +                self.settings["default_handler_class"],
    +                self.settings.get("default_handler_args", {}),
    +            )
     
    -        return self.get_handler_delegate(
    -            request, ErrorHandler, {'status_code': 404})
    +        return self.get_handler_delegate(request, ErrorHandler, {"status_code": 404})
     
    -    def get_handler_delegate(self, request, target_class, target_kwargs=None,
    -                             path_args=None, path_kwargs=None):
    +    def get_handler_delegate(
    +        self,
    +        request: httputil.HTTPServerRequest,
    +        target_class: Type[RequestHandler],
    +        target_kwargs: Dict[str, Any] = None,
    +        path_args: List[bytes] = None,
    +        path_kwargs: Dict[str, bytes] = None,
    +    ) -> "_HandlerDelegate":
             """Returns `~.httputil.HTTPMessageDelegate` that can serve a request
             for application and `RequestHandler` subclass.
     
    @@ -2123,9 +2207,10 @@ class Application(ReversibleRouter):
             :arg dict path_kwargs: keyword arguments for ``target_class`` HTTP method.
             """
             return _HandlerDelegate(
    -            self, request, target_class, target_kwargs, path_args, path_kwargs)
    +            self, request, target_class, target_kwargs, path_args, path_kwargs
    +        )
     
    -    def reverse_url(self, name, *args):
    +    def reverse_url(self, name: str, *args: Any) -> str:
             """Returns a URL path for handler named ``name``
     
             The handler must be added to the application as a named `URLSpec`.
    @@ -2140,7 +2225,7 @@ class Application(ReversibleRouter):
     
             raise KeyError("%s not found in named urls" % name)
     
    -    def log_request(self, handler):
    +    def log_request(self, handler: RequestHandler) -> None:
             """Writes a completed HTTP request to the logs.
     
             By default writes to the python root logger.  To change
    @@ -2158,13 +2243,24 @@ class Application(ReversibleRouter):
             else:
                 log_method = access_log.error
             request_time = 1000.0 * handler.request.request_time()
    -        log_method("%d %s %.2fms", handler.get_status(),
    -                   handler._request_summary(), request_time)
    +        log_method(
    +            "%d %s %.2fms",
    +            handler.get_status(),
    +            handler._request_summary(),
    +            request_time,
    +        )
     
     
     class _HandlerDelegate(httputil.HTTPMessageDelegate):
    -    def __init__(self, application, request, handler_class, handler_kwargs,
    -                 path_args, path_kwargs):
    +    def __init__(
    +        self,
    +        application: Application,
    +        request: httputil.HTTPServerRequest,
    +        handler_class: Type[RequestHandler],
    +        handler_kwargs: Optional[Dict[str, Any]],
    +        path_args: Optional[List[bytes]],
    +        path_kwargs: Optional[Dict[str, bytes]],
    +    ) -> None:
             self.application = application
             self.connection = request.connection
             self.request = request
    @@ -2172,35 +2268,41 @@ class _HandlerDelegate(httputil.HTTPMessageDelegate):
             self.handler_kwargs = handler_kwargs or {}
             self.path_args = path_args or []
             self.path_kwargs = path_kwargs or {}
    -        self.chunks = []
    +        self.chunks = []  # type: List[bytes]
             self.stream_request_body = _has_stream_request_body(self.handler_class)
     
    -    def headers_received(self, start_line, headers):
    +    def headers_received(
    +        self,
    +        start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine],
    +        headers: httputil.HTTPHeaders,
    +    ) -> Optional[Awaitable[None]]:
             if self.stream_request_body:
    -            self.request.body = Future()
    +            self.request._body_future = Future()
                 return self.execute()
    +        return None
     
    -    def data_received(self, data):
    +    def data_received(self, data: bytes) -> Optional[Awaitable[None]]:
             if self.stream_request_body:
                 return self.handler.data_received(data)
             else:
                 self.chunks.append(data)
    +            return None
     
    -    def finish(self):
    +    def finish(self) -> None:
             if self.stream_request_body:
    -            future_set_result_unless_cancelled(self.request.body, None)
    +            future_set_result_unless_cancelled(self.request._body_future, None)
             else:
    -            self.request.body = b''.join(self.chunks)
    +            self.request.body = b"".join(self.chunks)
                 self.request._parse_body()
                 self.execute()
     
    -    def on_connection_close(self):
    +    def on_connection_close(self) -> None:
             if self.stream_request_body:
                 self.handler.on_connection_close()
             else:
    -            self.chunks = None
    +            self.chunks = None  # type: ignore
     
    -    def execute(self):
    +    def execute(self) -> Optional[Awaitable[None]]:
             # If template cache is disabled (usually in the debug mode),
             # re-compile templates and reload static files on every
             # request so you don't need to restart to see changes
    @@ -2208,11 +2310,12 @@ class _HandlerDelegate(httputil.HTTPMessageDelegate):
                 with RequestHandler._template_loader_lock:
                     for loader in RequestHandler._template_loaders.values():
                         loader.reset()
    -        if not self.application.settings.get('static_hash_cache', True):
    +        if not self.application.settings.get("static_hash_cache", True):
                 StaticFileHandler.reset()
     
    -        self.handler = self.handler_class(self.application, self.request,
    -                                          **self.handler_kwargs)
    +        self.handler = self.handler_class(
    +            self.application, self.request, **self.handler_kwargs
    +        )
             transforms = [t(self.request) for t in self.application.transforms]
     
             if self.stream_request_body:
    @@ -2224,8 +2327,10 @@ class _HandlerDelegate(httputil.HTTPMessageDelegate):
             # except handler, and we cannot easily access the IOLoop here to
             # call add_future (because of the requirement to remain compatible
             # with WSGI)
    -        self.handler._execute(transforms, *self.path_args,
    -                              **self.path_kwargs)
    +        fut = gen.convert_yielded(
    +            self.handler._execute(transforms, *self.path_args, **self.path_kwargs)
    +        )
    +        fut.add_done_callback(lambda f: f.result())
             # If we are streaming the request body, then execute() is finished
             # when the handler has prepared to receive the body.  If not,
             # it doesn't matter when execute() finishes (so we return None)
    @@ -2254,18 +2359,22 @@ class HTTPError(Exception):
             determined automatically from ``status_code``, but can be used
             to use a non-standard numeric code.
         """
    -    def __init__(self, status_code=500, log_message=None, *args, **kwargs):
    +
    +    def __init__(
    +        self, status_code: int = 500, log_message: str = None, *args: Any, **kwargs: Any
    +    ) -> None:
             self.status_code = status_code
             self.log_message = log_message
             self.args = args
    -        self.reason = kwargs.get('reason', None)
    +        self.reason = kwargs.get("reason", None)
             if log_message and not args:
    -            self.log_message = log_message.replace('%', '%%')
    +            self.log_message = log_message.replace("%", "%%")
     
    -    def __str__(self):
    +    def __str__(self) -> str:
             message = "HTTP %d: %s" % (
                 self.status_code,
    -            self.reason or httputil.responses.get(self.status_code, 'Unknown'))
    +            self.reason or httputil.responses.get(self.status_code, "Unknown"),
    +        )
             if self.log_message:
                 return message + " (" + (self.log_message % self.args) + ")"
             else:
    @@ -2296,6 +2405,7 @@ class Finish(Exception):
            Arguments passed to ``Finish()`` will be passed on to
            `RequestHandler.finish`.
         """
    +
         pass
     
     
    @@ -2307,21 +2417,24 @@ class MissingArgumentError(HTTPError):
     
         .. versionadded:: 3.1
         """
    -    def __init__(self, arg_name):
    +
    +    def __init__(self, arg_name: str) -> None:
             super(MissingArgumentError, self).__init__(
    -            400, 'Missing argument %s' % arg_name)
    +            400, "Missing argument %s" % arg_name
    +        )
             self.arg_name = arg_name
     
     
     class ErrorHandler(RequestHandler):
         """Generates an error response with ``status_code`` for all requests."""
    -    def initialize(self, status_code):
    +
    +    def initialize(self, status_code: int) -> None:
             self.set_status(status_code)
     
    -    def prepare(self):
    +    def prepare(self) -> None:
             raise HTTPError(self._status_code)
     
    -    def check_xsrf_cookie(self):
    +    def check_xsrf_cookie(self) -> None:
             # POSTs to an ErrorHandler don't actually have side effects,
             # so we don't need to check the xsrf token.  This allows POSTs
             # to the wrong url to return a 404 instead of 403.
    @@ -2360,15 +2473,19 @@ class RedirectHandler(RequestHandler):
            If any query arguments are present, they will be copied to the
            destination URL.
         """
    -    def initialize(self, url, permanent=True):
    +
    +    def initialize(self, url: str, permanent: bool = True) -> None:
             self._url = url
             self._permanent = permanent
     
    -    def get(self, *args):
    +    def get(self, *args: Any) -> None:
             to_url = self._url.format(*args)
             if self.request.query_arguments:
    +            # TODO: figure out typing for the next line.
                 to_url = httputil.url_concat(
    -                to_url, list(httputil.qs_to_qsl(self.request.query_arguments)))
    +                to_url,
    +                list(httputil.qs_to_qsl(self.request.query_arguments)),  # type: ignore
    +            )
             self.redirect(to_url, permanent=self._permanent)
     
     
    @@ -2438,31 +2555,30 @@ class StaticFileHandler(RequestHandler):
         .. versionchanged:: 3.1
            Many of the methods for subclasses were added in Tornado 3.1.
         """
    +
         CACHE_MAX_AGE = 86400 * 365 * 10  # 10 years
     
    -    _static_hashes = {}  # type: typing.Dict
    +    _static_hashes = {}  # type: Dict[str, Optional[str]]
         _lock = threading.Lock()  # protects _static_hashes
     
    -    def initialize(self, path, default_filename=None):
    +    def initialize(self, path: str, default_filename: str = None) -> None:
             self.root = path
             self.default_filename = default_filename
     
         @classmethod
    -    def reset(cls):
    +    def reset(cls) -> None:
             with cls._lock:
                 cls._static_hashes = {}
     
    -    def head(self, path):
    +    def head(self, path: str) -> Awaitable[None]:
             return self.get(path, include_body=False)
     
    -    @gen.coroutine
    -    def get(self, path, include_body=True):
    +    async def get(self, path: str, include_body: bool = True) -> None:
             # Set up our path instance variables.
             self.path = self.parse_url_path(path)
             del path  # make sure we don't refer to path instead of self.path again
             absolute_path = self.get_absolute_path(self.root, self.path)
    -        self.absolute_path = self.validate_absolute_path(
    -            self.root, absolute_path)
    +        self.absolute_path = self.validate_absolute_path(self.root, absolute_path)
             if self.absolute_path is None:
                 return
     
    @@ -2483,16 +2599,24 @@ class StaticFileHandler(RequestHandler):
             size = self.get_content_size()
             if request_range:
                 start, end = request_range
    -            if (start is not None and start >= size) or end == 0:
    -                # As per RFC 2616 14.35.1, a range is not satisfiable only: if
    -                # the first requested byte is equal to or greater than the
    -                # content, or when a suffix with length 0 is specified
    -                self.set_status(416)  # Range Not Satisfiable
    -                self.set_header("Content-Type", "text/plain")
    -                self.set_header("Content-Range", "bytes */%s" % (size, ))
    -                return
                 if start is not None and start < 0:
                     start += size
    +                if start < 0:
    +                    start = 0
    +            if (
    +                start is not None
    +                and (start >= size or (end is not None and start >= end))
    +            ) or end == 0:
    +                # As per RFC 2616 14.35.1, a range is not satisfiable only: if
    +                # the first requested byte is equal to or greater than the
    +                # content, or when a suffix with length 0 is specified.
    +                # https://tools.ietf.org/html/rfc7233#section-2.1
    +                # A byte-range-spec is invalid if the last-byte-pos value is present
    +                # and less than the first-byte-pos.
    +                self.set_status(416)  # Range Not Satisfiable
    +                self.set_header("Content-Type", "text/plain")
    +                self.set_header("Content-Range", "bytes */%s" % (size,))
    +                return
                 if end is not None and end > size:
                     # Clients sometimes blindly use a large range to limit their
                     # download size; cap the endpoint at the actual file size.
    @@ -2503,8 +2627,9 @@ class StaticFileHandler(RequestHandler):
                 # ``Range: bytes=0-``.
                 if size != (end or size) - (start or 0):
                     self.set_status(206)  # Partial Content
    -                self.set_header("Content-Range",
    -                                httputil._get_content_range(start, end, size))
    +                self.set_header(
    +                    "Content-Range", httputil._get_content_range(start, end, size)
    +                )
             else:
                 start = end = None
     
    @@ -2525,13 +2650,13 @@ class StaticFileHandler(RequestHandler):
                 for chunk in content:
                     try:
                         self.write(chunk)
    -                    yield self.flush()
    +                    await self.flush()
                     except iostream.StreamClosedError:
                         return
             else:
                 assert self.request.method == "HEAD"
     
    -    def compute_etag(self):
    +    def compute_etag(self) -> Optional[str]:
             """Sets the ``Etag`` header based on static url version.
     
             This allows efficient ``If-None-Match`` checks against cached
    @@ -2540,12 +2665,13 @@ class StaticFileHandler(RequestHandler):
     
             .. versionadded:: 3.1
             """
    +        assert self.absolute_path is not None
             version_hash = self._get_cached_version(self.absolute_path)
             if not version_hash:
                 return None
    -        return '"%s"' % (version_hash, )
    +        return '"%s"' % (version_hash,)
     
    -    def set_headers(self):
    +    def set_headers(self) -> None:
             """Sets the content and caching headers on the response.
     
             .. versionadded:: 3.1
    @@ -2560,22 +2686,23 @@ class StaticFileHandler(RequestHandler):
             if content_type:
                 self.set_header("Content-Type", content_type)
     
    -        cache_time = self.get_cache_time(self.path, self.modified,
    -                                         content_type)
    +        cache_time = self.get_cache_time(self.path, self.modified, content_type)
             if cache_time > 0:
    -            self.set_header("Expires", datetime.datetime.utcnow() +
    -                            datetime.timedelta(seconds=cache_time))
    +            self.set_header(
    +                "Expires",
    +                datetime.datetime.utcnow() + datetime.timedelta(seconds=cache_time),
    +            )
                 self.set_header("Cache-Control", "max-age=" + str(cache_time))
     
             self.set_extra_headers(self.path)
     
    -    def should_return_304(self):
    +    def should_return_304(self) -> bool:
             """Returns True if the headers indicate that we should return 304.
     
             .. versionadded:: 3.1
             """
             # If client sent If-None-Match, use it, ignore If-Modified-Since
    -        if self.request.headers.get('If-None-Match'):
    +        if self.request.headers.get("If-None-Match"):
                 return self.check_etag_header()
     
             # Check the If-Modified-Since, and don't send the result if the
    @@ -2585,13 +2712,14 @@ class StaticFileHandler(RequestHandler):
                 date_tuple = email.utils.parsedate(ims_value)
                 if date_tuple is not None:
                     if_since = datetime.datetime(*date_tuple[:6])
    +                assert self.modified is not None
                     if if_since >= self.modified:
                         return True
     
             return False
     
         @classmethod
    -    def get_absolute_path(cls, root, path):
    +    def get_absolute_path(cls, root: str, path: str) -> str:
             """Returns the absolute location of ``path`` relative to ``root``.
     
             ``root`` is the path configured for this `StaticFileHandler`
    @@ -2607,7 +2735,7 @@ class StaticFileHandler(RequestHandler):
             abspath = os.path.abspath(os.path.join(root, path))
             return abspath
     
    -    def validate_absolute_path(self, root, absolute_path):
    +    def validate_absolute_path(self, root: str, absolute_path: str) -> Optional[str]:
             """Validate and return the absolute path.
     
             ``root`` is the configured path for the `StaticFileHandler`,
    @@ -2642,16 +2770,14 @@ class StaticFileHandler(RequestHandler):
             # The trailing slash also needs to be temporarily added back
             # the requested path so a request to root/ will match.
             if not (absolute_path + os.path.sep).startswith(root):
    -            raise HTTPError(403, "%s is not in root static directory",
    -                            self.path)
    -        if (os.path.isdir(absolute_path) and
    -                self.default_filename is not None):
    +            raise HTTPError(403, "%s is not in root static directory", self.path)
    +        if os.path.isdir(absolute_path) and self.default_filename is not None:
                 # need to look at the request.path here for when path is empty
                 # but there is some prefix to the path that was already
                 # trimmed by the routing
                 if not self.request.path.endswith("/"):
                     self.redirect(self.request.path + "/", permanent=True)
    -                return
    +                return None
                 absolute_path = os.path.join(absolute_path, self.default_filename)
             if not os.path.exists(absolute_path):
                 raise HTTPError(404)
    @@ -2660,7 +2786,9 @@ class StaticFileHandler(RequestHandler):
             return absolute_path
     
         @classmethod
    -    def get_content(cls, abspath, start=None, end=None):
    +    def get_content(
    +        cls, abspath: str, start: int = None, end: int = None
    +    ) -> Generator[bytes, None, None]:
             """Retrieve the content of the requested resource which is located
             at the given absolute path.
     
    @@ -2679,7 +2807,7 @@ class StaticFileHandler(RequestHandler):
                 if start is not None:
                     file.seek(start)
                 if end is not None:
    -                remaining = end - (start or 0)
    +                remaining = end - (start or 0)  # type: Optional[int]
                 else:
                     remaining = None
                 while True:
    @@ -2697,7 +2825,7 @@ class StaticFileHandler(RequestHandler):
                         return
     
         @classmethod
    -    def get_content_version(cls, abspath):
    +    def get_content_version(cls, abspath: str) -> str:
             """Returns a version string for the resource at the given path.
     
             This class method may be overridden by subclasses.  The
    @@ -2714,12 +2842,13 @@ class StaticFileHandler(RequestHandler):
                     hasher.update(chunk)
             return hasher.hexdigest()
     
    -    def _stat(self):
    -        if not hasattr(self, '_stat_result'):
    +    def _stat(self) -> os.stat_result:
    +        assert self.absolute_path is not None
    +        if not hasattr(self, "_stat_result"):
                 self._stat_result = os.stat(self.absolute_path)
             return self._stat_result
     
    -    def get_content_size(self):
    +    def get_content_size(self) -> int:
             """Retrieve the total size of the resource at the given path.
     
             This method may be overridden by subclasses.
    @@ -2731,9 +2860,9 @@ class StaticFileHandler(RequestHandler):
                partial results are requested.
             """
             stat_result = self._stat()
    -        return stat_result[stat.ST_SIZE]
    +        return stat_result.st_size
     
    -    def get_modified_time(self):
    +    def get_modified_time(self) -> Optional[datetime.datetime]:
             """Returns the time that ``self.absolute_path`` was last modified.
     
             May be overridden in subclasses.  Should return a `~datetime.datetime`
    @@ -2742,15 +2871,23 @@ class StaticFileHandler(RequestHandler):
             .. versionadded:: 3.1
             """
             stat_result = self._stat()
    -        modified = datetime.datetime.utcfromtimestamp(
    -            stat_result[stat.ST_MTIME])
    +        # NOTE: Historically, this used stat_result[stat.ST_MTIME],
    +        # which truncates the fractional portion of the timestamp. It
    +        # was changed from that form to stat_result.st_mtime to
    +        # satisfy mypy (which disallows the bracket operator), but the
    +        # latter form returns a float instead of an int. For
    +        # consistency with the past (and because we have a unit test
    +        # that relies on this), we truncate the float here, although
    +        # I'm not sure that's the right thing to do.
    +        modified = datetime.datetime.utcfromtimestamp(int(stat_result.st_mtime))
             return modified
     
    -    def get_content_type(self):
    +    def get_content_type(self) -> str:
             """Returns the ``Content-Type`` header to be used for this request.
     
             .. versionadded:: 3.1
             """
    +        assert self.absolute_path is not None
             mime_type, encoding = mimetypes.guess_type(self.absolute_path)
             # per RFC 6713, use the appropriate type for a gzip compressed file
             if encoding == "gzip":
    @@ -2766,11 +2903,13 @@ class StaticFileHandler(RequestHandler):
             else:
                 return "application/octet-stream"
     
    -    def set_extra_headers(self, path):
    +    def set_extra_headers(self, path: str) -> None:
             """For subclass to add extra headers to the response"""
             pass
     
    -    def get_cache_time(self, path, modified, mime_type):
    +    def get_cache_time(
    +        self, path: str, modified: Optional[datetime.datetime], mime_type: str
    +    ) -> int:
             """Override to customize cache control behavior.
     
             Return a positive number of seconds to make the result
    @@ -2784,7 +2923,9 @@ class StaticFileHandler(RequestHandler):
             return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0
     
         @classmethod
    -    def make_static_url(cls, settings, path, include_version=True):
    +    def make_static_url(
    +        cls, settings: Dict[str, Any], path: str, include_version: bool = True
    +    ) -> str:
             """Constructs a versioned url for the given path.
     
             This method may be overridden in subclasses (but note that it
    @@ -2803,7 +2944,7 @@ class StaticFileHandler(RequestHandler):
             file corresponding to the given ``path``.
     
             """
    -        url = settings.get('static_url_prefix', '/static/') + path
    +        url = settings.get("static_url_prefix", "/static/") + path
             if not include_version:
                 return url
     
    @@ -2811,9 +2952,9 @@ class StaticFileHandler(RequestHandler):
             if not version_hash:
                 return url
     
    -        return '%s?v=%s' % (url, version_hash)
    +        return "%s?v=%s" % (url, version_hash)
     
    -    def parse_url_path(self, url_path):
    +    def parse_url_path(self, url_path: str) -> str:
             """Converts a static URL path into a filesystem path.
     
             ``url_path`` is the path component of the URL with
    @@ -2827,7 +2968,7 @@ class StaticFileHandler(RequestHandler):
             return url_path
     
         @classmethod
    -    def get_version(cls, settings, path):
    +    def get_version(cls, settings: Dict[str, Any], path: str) -> Optional[str]:
             """Generate the version string to be used in static URLs.
     
             ``settings`` is the `Application.settings` dictionary and ``path``
    @@ -2840,11 +2981,11 @@ class StaticFileHandler(RequestHandler):
                `get_content_version` is now preferred as it allows the base
                class to handle caching of the result.
             """
    -        abs_path = cls.get_absolute_path(settings['static_path'], path)
    +        abs_path = cls.get_absolute_path(settings["static_path"], path)
             return cls._get_cached_version(abs_path)
     
         @classmethod
    -    def _get_cached_version(cls, abs_path):
    +    def _get_cached_version(cls, abs_path: str) -> Optional[str]:
             with cls._lock:
                 hashes = cls._static_hashes
                 if abs_path not in hashes:
    @@ -2875,10 +3016,13 @@ class FallbackHandler(RequestHandler):
                 (r".*", FallbackHandler, dict(fallback=wsgi_app),
             ])
         """
    -    def initialize(self, fallback):
    +
    +    def initialize(
    +        self, fallback: Callable[[httputil.HTTPServerRequest], None]
    +    ) -> None:
             self.fallback = fallback
     
    -    def prepare(self):
    +    def prepare(self) -> None:
             self.fallback(self.request)
             self._finished = True
             self.on_finish()
    @@ -2891,14 +3035,20 @@ class OutputTransform(object):
         or interact with them directly; the framework chooses which transforms
         (if any) to apply.
         """
    -    def __init__(self, request):
    +
    +    def __init__(self, request: httputil.HTTPServerRequest) -> None:
             pass
     
    -    def transform_first_chunk(self, status_code, headers, chunk, finishing):
    -        # type: (int, httputil.HTTPHeaders, bytes, bool) -> typing.Tuple[int, httputil.HTTPHeaders, bytes] # noqa: E501
    +    def transform_first_chunk(
    +        self,
    +        status_code: int,
    +        headers: httputil.HTTPHeaders,
    +        chunk: bytes,
    +        finishing: bool,
    +    ) -> Tuple[int, httputil.HTTPHeaders, bytes]:
             return status_code, headers, chunk
     
    -    def transform_chunk(self, chunk, finishing):
    +    def transform_chunk(self, chunk: bytes, finishing: bool) -> bytes:
             return chunk
     
     
    @@ -2912,12 +3062,20 @@ class GZipContentEncoding(OutputTransform):
             of just a whitelist. (the whitelist is still used for certain
             non-text mime types).
         """
    +
         # Whitelist of compressible mime types (in addition to any types
         # beginning with "text/").
    -    CONTENT_TYPES = set(["application/javascript", "application/x-javascript",
    -                         "application/xml", "application/atom+xml",
    -                         "application/json", "application/xhtml+xml",
    -                         "image/svg+xml"])
    +    CONTENT_TYPES = set(
    +        [
    +            "application/javascript",
    +            "application/x-javascript",
    +            "application/xml",
    +            "application/atom+xml",
    +            "application/json",
    +            "application/xhtml+xml",
    +            "image/svg+xml",
    +        ]
    +    )
         # Python's GzipFile defaults to level 9, while most other gzip
         # tools (including gzip itself) default to 6, which is probably a
         # better CPU/size tradeoff.
    @@ -2929,29 +3087,37 @@ class GZipContentEncoding(OutputTransform):
         # regardless of size.
         MIN_LENGTH = 1024
     
    -    def __init__(self, request):
    +    def __init__(self, request: httputil.HTTPServerRequest) -> None:
             self._gzipping = "gzip" in request.headers.get("Accept-Encoding", "")
     
    -    def _compressible_type(self, ctype):
    -        return ctype.startswith('text/') or ctype in self.CONTENT_TYPES
    +    def _compressible_type(self, ctype: str) -> bool:
    +        return ctype.startswith("text/") or ctype in self.CONTENT_TYPES
     
    -    def transform_first_chunk(self, status_code, headers, chunk, finishing):
    -        # type: (int, httputil.HTTPHeaders, bytes, bool) -> typing.Tuple[int, httputil.HTTPHeaders, bytes] # noqa: E501
    +    def transform_first_chunk(
    +        self,
    +        status_code: int,
    +        headers: httputil.HTTPHeaders,
    +        chunk: bytes,
    +        finishing: bool,
    +    ) -> Tuple[int, httputil.HTTPHeaders, bytes]:
             # TODO: can/should this type be inherited from the superclass?
    -        if 'Vary' in headers:
    -            headers['Vary'] += ', Accept-Encoding'
    +        if "Vary" in headers:
    +            headers["Vary"] += ", Accept-Encoding"
             else:
    -            headers['Vary'] = 'Accept-Encoding'
    +            headers["Vary"] = "Accept-Encoding"
             if self._gzipping:
                 ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
    -            self._gzipping = self._compressible_type(ctype) and \
    -                (not finishing or len(chunk) >= self.MIN_LENGTH) and \
    -                ("Content-Encoding" not in headers)
    +            self._gzipping = (
    +                self._compressible_type(ctype)
    +                and (not finishing or len(chunk) >= self.MIN_LENGTH)
    +                and ("Content-Encoding" not in headers)
    +            )
             if self._gzipping:
                 headers["Content-Encoding"] = "gzip"
                 self._gzip_value = BytesIO()
    -            self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value,
    -                                            compresslevel=self.GZIP_LEVEL)
    +            self._gzip_file = gzip.GzipFile(
    +                mode="w", fileobj=self._gzip_value, compresslevel=self.GZIP_LEVEL
    +            )
                 chunk = self.transform_chunk(chunk, finishing)
                 if "Content-Length" in headers:
                     # The original content length is no longer correct.
    @@ -2964,7 +3130,7 @@ class GZipContentEncoding(OutputTransform):
                         del headers["Content-Length"]
             return status_code, headers, chunk
     
    -    def transform_chunk(self, chunk, finishing):
    +    def transform_chunk(self, chunk: bytes, finishing: bool) -> bytes:
             if self._gzipping:
                 self._gzip_file.write(chunk)
                 if finishing:
    @@ -2977,7 +3143,9 @@ class GZipContentEncoding(OutputTransform):
             return chunk
     
     
    -def authenticated(method):
    +def authenticated(
    +    method: Callable[..., Optional[Awaitable[None]]]
    +) -> Callable[..., Optional[Awaitable[None]]]:
         """Decorate methods with this to require that the user be logged in.
     
         If the user is not logged in, they will be redirected to the configured
    @@ -2988,22 +3156,27 @@ def authenticated(method):
         will add a `next` parameter so the login page knows where to send
         you once you're logged in.
         """
    +
         @functools.wraps(method)
    -    def wrapper(self, *args, **kwargs):
    +    def wrapper(  # type: ignore
    +        self: RequestHandler, *args, **kwargs
    +    ) -> Optional[Awaitable[None]]:
             if not self.current_user:
                 if self.request.method in ("GET", "HEAD"):
                     url = self.get_login_url()
                     if "?" not in url:
    -                    if urlparse.urlsplit(url).scheme:
    +                    if urllib.parse.urlsplit(url).scheme:
                             # if login url is absolute, make next absolute too
                             next_url = self.request.full_url()
                         else:
    +                        assert self.request.uri is not None
                             next_url = self.request.uri
                         url += "?" + urlencode(dict(next=next_url))
                     self.redirect(url)
    -                return
    +                return None
                 raise HTTPError(403)
             return method(self, *args, **kwargs)
    +
         return wrapper
     
     
    @@ -3016,26 +3189,27 @@ class UIModule(object):
     
         Subclasses of UIModule must override the `render` method.
         """
    -    def __init__(self, handler):
    +
    +    def __init__(self, handler: RequestHandler) -> None:
             self.handler = handler
             self.request = handler.request
             self.ui = handler.ui
             self.locale = handler.locale
     
         @property
    -    def current_user(self):
    +    def current_user(self) -> Any:
             return self.handler.current_user
     
    -    def render(self, *args, **kwargs):
    +    def render(self, *args: Any, **kwargs: Any) -> str:
             """Override in subclasses to return this module's output."""
             raise NotImplementedError()
     
    -    def embedded_javascript(self):
    +    def embedded_javascript(self) -> Optional[str]:
             """Override to return a JavaScript string
             to be embedded in the page."""
             return None
     
    -    def javascript_files(self):
    +    def javascript_files(self) -> Optional[Iterable[str]]:
             """Override to return a list of JavaScript files needed by this module.
     
             If the return values are relative paths, they will be passed to
    @@ -3043,12 +3217,12 @@ class UIModule(object):
             """
             return None
     
    -    def embedded_css(self):
    +    def embedded_css(self) -> Optional[str]:
             """Override to return a CSS string
             that will be embedded in the page."""
             return None
     
    -    def css_files(self):
    +    def css_files(self) -> Optional[Iterable[str]]:
             """Override to returns a list of CSS files required by this module.
     
             If the return values are relative paths, they will be passed to
    @@ -3056,30 +3230,30 @@ class UIModule(object):
             """
             return None
     
    -    def html_head(self):
    +    def html_head(self) -> Optional[str]:
             """Override to return an HTML string that will be put in the 
             element.
             """
             return None
     
    -    def html_body(self):
    +    def html_body(self) -> Optional[str]:
             """Override to return an HTML string that will be put at the end of
             the  element.
             """
             return None
     
    -    def render_string(self, path, **kwargs):
    +    def render_string(self, path: str, **kwargs: Any) -> bytes:
             """Renders a template and returns it as a string."""
             return self.handler.render_string(path, **kwargs)
     
     
     class _linkify(UIModule):
    -    def render(self, text, **kwargs):
    +    def render(self, text: str, **kwargs: Any) -> str:  # type: ignore
             return escape.linkify(text, **kwargs)
     
     
     class _xsrf_form_html(UIModule):
    -    def render(self):
    +    def render(self) -> str:  # type: ignore
             return self.handler.xsrf_form_html()
     
     
    @@ -3098,32 +3272,35 @@ class TemplateModule(UIModule):
         per instantiation of the template, so they must not depend on
         any arguments to the template.
         """
    -    def __init__(self, handler):
    +
    +    def __init__(self, handler: RequestHandler) -> None:
             super(TemplateModule, self).__init__(handler)
             # keep resources in both a list and a dict to preserve order
    -        self._resource_list = []
    -        self._resource_dict = {}
    +        self._resource_list = []  # type: List[Dict[str, Any]]
    +        self._resource_dict = {}  # type: Dict[str, Dict[str, Any]]
     
    -    def render(self, path, **kwargs):
    -        def set_resources(**kwargs):
    +    def render(self, path: str, **kwargs: Any) -> bytes:  # type: ignore
    +        def set_resources(**kwargs) -> str:  # type: ignore
                 if path not in self._resource_dict:
                     self._resource_list.append(kwargs)
                     self._resource_dict[path] = kwargs
                 else:
                     if self._resource_dict[path] != kwargs:
    -                    raise ValueError("set_resources called with different "
    -                                     "resources for the same template")
    +                    raise ValueError(
    +                        "set_resources called with different "
    +                        "resources for the same template"
    +                    )
                 return ""
    -        return self.render_string(path, set_resources=set_resources,
    -                                  **kwargs)
     
    -    def _get_resources(self, key):
    +        return self.render_string(path, set_resources=set_resources, **kwargs)
    +
    +    def _get_resources(self, key: str) -> Iterable[str]:
             return (r[key] for r in self._resource_list if key in r)
     
    -    def embedded_javascript(self):
    +    def embedded_javascript(self) -> str:
             return "\n".join(self._get_resources("embedded_javascript"))
     
    -    def javascript_files(self):
    +    def javascript_files(self) -> Iterable[str]:
             result = []
             for f in self._get_resources("javascript_files"):
                 if isinstance(f, (unicode_type, bytes)):
    @@ -3132,10 +3309,10 @@ class TemplateModule(UIModule):
                     result.extend(f)
             return result
     
    -    def embedded_css(self):
    +    def embedded_css(self) -> str:
             return "\n".join(self._get_resources("embedded_css"))
     
    -    def css_files(self):
    +    def css_files(self) -> Iterable[str]:
             result = []
             for f in self._get_resources("css_files"):
                 if isinstance(f, (unicode_type, bytes)):
    @@ -3144,47 +3321,40 @@ class TemplateModule(UIModule):
                     result.extend(f)
             return result
     
    -    def html_head(self):
    +    def html_head(self) -> str:
             return "".join(self._get_resources("html_head"))
     
    -    def html_body(self):
    +    def html_body(self) -> str:
             return "".join(self._get_resources("html_body"))
     
     
     class _UIModuleNamespace(object):
         """Lazy namespace which creates UIModule proxies bound to a handler."""
    -    def __init__(self, handler, ui_modules):
    +
    +    def __init__(
    +        self, handler: RequestHandler, ui_modules: Dict[str, Type[UIModule]]
    +    ) -> None:
             self.handler = handler
             self.ui_modules = ui_modules
     
    -    def __getitem__(self, key):
    +    def __getitem__(self, key: str) -> Callable[..., str]:
             return self.handler._ui_module(key, self.ui_modules[key])
     
    -    def __getattr__(self, key):
    +    def __getattr__(self, key: str) -> Callable[..., str]:
             try:
                 return self[key]
             except KeyError as e:
                 raise AttributeError(str(e))
     
     
    -if hasattr(hmac, 'compare_digest'):  # python 3.3
    -    _time_independent_equals = hmac.compare_digest
    -else:
    -    def _time_independent_equals(a, b):
    -        if len(a) != len(b):
    -            return False
    -        result = 0
    -        if isinstance(a[0], int):  # python3 byte strings
    -            for x, y in zip(a, b):
    -                result |= x ^ y
    -        else:  # python2
    -            for x, y in zip(a, b):
    -                result |= ord(x) ^ ord(y)
    -        return result == 0
    -
    -
    -def create_signed_value(secret, name, value, version=None, clock=None,
    -                        key_version=None):
    +def create_signed_value(
    +    secret: _CookieSecretTypes,
    +    name: str,
    +    value: Union[str, bytes],
    +    version: int = None,
    +    clock: Callable[[], float] = None,
    +    key_version: int = None,
    +) -> bytes:
         if version is None:
             version = DEFAULT_SIGNED_VALUE_VERSION
         if clock is None:
    @@ -3193,6 +3363,7 @@ def create_signed_value(secret, name, value, version=None, clock=None,
         timestamp = utf8(str(int(clock())))
         value = base64.b64encode(utf8(value))
         if version == 1:
    +        assert not isinstance(secret, dict)
             signature = _create_signature_v1(secret, name, value, timestamp)
             value = b"|".join([value, timestamp, signature])
             return value
    @@ -3211,19 +3382,25 @@ def create_signed_value(secret, name, value, version=None, clock=None,
             # - name (not encoded; assumed to be ~alphanumeric)
             # - value (base64-encoded)
             # - signature (hex-encoded; no length prefix)
    -        def format_field(s):
    +        def format_field(s: Union[str, bytes]) -> bytes:
                 return utf8("%d:" % len(s)) + utf8(s)
    -        to_sign = b"|".join([
    -            b"2",
    -            format_field(str(key_version or 0)),
    -            format_field(timestamp),
    -            format_field(name),
    -            format_field(value),
    -            b''])
    +
    +        to_sign = b"|".join(
    +            [
    +                b"2",
    +                format_field(str(key_version or 0)),
    +                format_field(timestamp),
    +                format_field(name),
    +                format_field(value),
    +                b"",
    +            ]
    +        )
     
             if isinstance(secret, dict):
    -            assert key_version is not None, 'Key version must be set when sign key dict is used'
    -            assert version >= 2, 'Version must be at least 2 for key version support'
    +            assert (
    +                key_version is not None
    +            ), "Key version must be set when sign key dict is used"
    +            assert version >= 2, "Version must be at least 2 for key version support"
                 secret = secret[key_version]
     
             signature = _create_signature_v2(secret, to_sign)
    @@ -3237,7 +3414,7 @@ def create_signed_value(secret, name, value, version=None, clock=None,
     _signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$")
     
     
    -def _get_version(value):
    +def _get_version(value: bytes) -> int:
         # Figures out what version value is.  Version 1 did not include an
         # explicit version field and started with arbitrary base64 data,
         # which makes this tricky.
    @@ -3260,8 +3437,14 @@ def _get_version(value):
         return version
     
     
    -def decode_signed_value(secret, name, value, max_age_days=31,
    -                        clock=None, min_version=None):
    +def decode_signed_value(
    +    secret: _CookieSecretTypes,
    +    name: str,
    +    value: Union[None, str, bytes],
    +    max_age_days: int = 31,
    +    clock: Callable[[], float] = None,
    +    min_version: int = None,
    +) -> Optional[bytes]:
         if clock is None:
             clock = time.time
         if min_version is None:
    @@ -3277,21 +3460,26 @@ def decode_signed_value(secret, name, value, max_age_days=31,
         if version < min_version:
             return None
         if version == 1:
    -        return _decode_signed_value_v1(secret, name, value,
    -                                       max_age_days, clock)
    +        assert not isinstance(secret, dict)
    +        return _decode_signed_value_v1(secret, name, value, max_age_days, clock)
         elif version == 2:
    -        return _decode_signed_value_v2(secret, name, value,
    -                                       max_age_days, clock)
    +        return _decode_signed_value_v2(secret, name, value, max_age_days, clock)
         else:
             return None
     
     
    -def _decode_signed_value_v1(secret, name, value, max_age_days, clock):
    +def _decode_signed_value_v1(
    +    secret: Union[str, bytes],
    +    name: str,
    +    value: bytes,
    +    max_age_days: int,
    +    clock: Callable[[], float],
    +) -> Optional[bytes]:
         parts = utf8(value).split(b"|")
         if len(parts) != 3:
             return None
         signature = _create_signature_v1(secret, name, parts[0], parts[1])
    -    if not _time_independent_equals(parts[2], signature):
    +    if not hmac.compare_digest(parts[2], signature):
             gen_log.warning("Invalid cookie signature %r", value)
             return None
         timestamp = int(parts[1])
    @@ -3304,8 +3492,7 @@ def _decode_signed_value_v1(secret, name, value, max_age_days, clock):
             # digits from the payload to the timestamp without altering the
             # signature.  For backwards compatibility, sanity-check timestamp
             # here instead of modifying _cookie_signature.
    -        gen_log.warning("Cookie timestamp in future; possible tampering %r",
    -                        value)
    +        gen_log.warning("Cookie timestamp in future; possible tampering %r", value)
             return None
         if parts[1].startswith(b"0"):
             gen_log.warning("Tampered cookie %r", value)
    @@ -3316,16 +3503,16 @@ def _decode_signed_value_v1(secret, name, value, max_age_days, clock):
             return None
     
     
    -def _decode_fields_v2(value):
    -    def _consume_field(s):
    -        length, _, rest = s.partition(b':')
    +def _decode_fields_v2(value: bytes) -> Tuple[int, bytes, bytes, bytes, bytes]:
    +    def _consume_field(s: bytes) -> Tuple[bytes, bytes]:
    +        length, _, rest = s.partition(b":")
             n = int(length)
             field_value = rest[:n]
             # In python 3, indexing bytes returns small integers; we must
             # use a slice to get a byte string as in python 2.
    -        if rest[n:n + 1] != b'|':
    +        if rest[n : n + 1] != b"|":
                 raise ValueError("malformed v2 signed value field")
    -        rest = rest[n + 1:]
    +        rest = rest[n + 1 :]
             return field_value, rest
     
         rest = value[2:]  # remove version number
    @@ -3336,12 +3523,24 @@ def _decode_fields_v2(value):
         return int(key_version), timestamp, name_field, value_field, passed_sig
     
     
    -def _decode_signed_value_v2(secret, name, value, max_age_days, clock):
    +def _decode_signed_value_v2(
    +    secret: _CookieSecretTypes,
    +    name: str,
    +    value: bytes,
    +    max_age_days: int,
    +    clock: Callable[[], float],
    +) -> Optional[bytes]:
         try:
    -        key_version, timestamp, name_field, value_field, passed_sig = _decode_fields_v2(value)
    +        (
    +            key_version,
    +            timestamp_bytes,
    +            name_field,
    +            value_field,
    +            passed_sig,
    +        ) = _decode_fields_v2(value)
         except ValueError:
             return None
    -    signed_string = value[:-len(passed_sig)]
    +    signed_string = value[: -len(passed_sig)]
     
         if isinstance(secret, dict):
             try:
    @@ -3350,11 +3549,11 @@ def _decode_signed_value_v2(secret, name, value, max_age_days, clock):
                 return None
     
         expected_sig = _create_signature_v2(secret, signed_string)
    -    if not _time_independent_equals(passed_sig, expected_sig):
    +    if not hmac.compare_digest(passed_sig, expected_sig):
             return None
         if name_field != utf8(name):
             return None
    -    timestamp = int(timestamp)
    +    timestamp = int(timestamp_bytes)
         if timestamp < clock() - max_age_days * 86400:
             # The signature has expired.
             return None
    @@ -3364,7 +3563,7 @@ def _decode_signed_value_v2(secret, name, value, max_age_days, clock):
             return None
     
     
    -def get_signature_key_version(value):
    +def get_signature_key_version(value: Union[str, bytes]) -> Optional[int]:
         value = utf8(value)
         version = _get_version(value)
         if version < 2:
    @@ -3377,18 +3576,18 @@ def get_signature_key_version(value):
         return key_version
     
     
    -def _create_signature_v1(secret, *parts):
    +def _create_signature_v1(secret: Union[str, bytes], *parts: Union[str, bytes]) -> bytes:
         hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
         for part in parts:
             hash.update(utf8(part))
         return utf8(hash.hexdigest())
     
     
    -def _create_signature_v2(secret, s):
    +def _create_signature_v2(secret: Union[str, bytes], s: bytes) -> bytes:
         hash = hmac.new(utf8(secret), digestmod=hashlib.sha256)
         hash.update(utf8(s))
         return utf8(hash.hexdigest())
     
     
    -def is_absolute(path):
    +def is_absolute(path: str) -> bool:
         return any(path.startswith(x) for x in ["/", "http:", "https:"])
    diff --git a/server/www/packages/packages-windows/x86/tornado/websocket.py b/server/www/packages/packages-windows/x86/tornado/websocket.py
    index 0b994fc..d991fee 100644
    --- a/server/www/packages/packages-windows/x86/tornado/websocket.py
    +++ b/server/www/packages/packages-windows/x86/tornado/websocket.py
    @@ -16,8 +16,8 @@ the protocol (known as "draft 76") and are not compatible with this module.
        Removed support for the draft 76 protocol version.
     """
     
    -from __future__ import absolute_import, division, print_function
    -
    +import abc
    +import asyncio
     import base64
     import hashlib
     import os
    @@ -25,24 +25,79 @@ import sys
     import struct
     import tornado.escape
     import tornado.web
    +from urllib.parse import urlparse
     import zlib
     
     from tornado.concurrent import Future, future_set_result_unless_cancelled
     from tornado.escape import utf8, native_str, to_unicode
     from tornado import gen, httpclient, httputil
     from tornado.ioloop import IOLoop, PeriodicCallback
    -from tornado.iostream import StreamClosedError
    -from tornado.log import gen_log
    +from tornado.iostream import StreamClosedError, IOStream
    +from tornado.log import gen_log, app_log
     from tornado import simple_httpclient
     from tornado.queues import Queue
     from tornado.tcpclient import TCPClient
    -from tornado.util import _websocket_mask, PY3
    +from tornado.util import _websocket_mask
    +
    +from typing import (
    +    TYPE_CHECKING,
    +    cast,
    +    Any,
    +    Optional,
    +    Dict,
    +    Union,
    +    List,
    +    Awaitable,
    +    Callable,
    +    Tuple,
    +    Type,
    +)
    +from types import TracebackType
    +
    +if TYPE_CHECKING:
    +    from typing_extensions import Protocol
    +
    +    # The zlib compressor types aren't actually exposed anywhere
    +    # publicly, so declare protocols for the portions we use.
    +    class _Compressor(Protocol):
    +        def compress(self, data: bytes) -> bytes:
    +            pass
    +
    +        def flush(self, mode: int) -> bytes:
    +            pass
    +
    +    class _Decompressor(Protocol):
    +        unconsumed_tail = b""  # type: bytes
    +
    +        def decompress(self, data: bytes, max_length: int) -> bytes:
    +            pass
    +
    +    class _WebSocketDelegate(Protocol):
    +        # The common base interface implemented by WebSocketHandler on
    +        # the server side and WebSocketClientConnection on the client
    +        # side.
    +        def on_ws_connection_close(
    +            self, close_code: int = None, close_reason: str = None
    +        ) -> None:
    +            pass
    +
    +        def on_message(self, message: Union[str, bytes]) -> Optional["Awaitable[None]"]:
    +            pass
    +
    +        def on_ping(self, data: bytes) -> None:
    +            pass
    +
    +        def on_pong(self, data: bytes) -> None:
    +            pass
    +
    +        def log_exception(
    +            self,
    +            typ: Optional[Type[BaseException]],
    +            value: Optional[BaseException],
    +            tb: Optional[TracebackType],
    +        ) -> None:
    +            pass
     
    -if PY3:
    -    from urllib.parse import urlparse  # py2
    -    xrange = range
    -else:
    -    from urlparse import urlparse  # py3
     
     _default_max_message_size = 10 * 1024 * 1024
     
    @@ -56,6 +111,7 @@ class WebSocketClosedError(WebSocketError):
     
         .. versionadded:: 3.2
         """
    +
         pass
     
     
    @@ -63,6 +119,20 @@ class _DecompressTooLargeError(Exception):
         pass
     
     
    +class _WebSocketParams(object):
    +    def __init__(
    +        self,
    +        ping_interval: float = None,
    +        ping_timeout: float = None,
    +        max_message_size: int = _default_max_message_size,
    +        compression_options: Dict[str, Any] = None,
    +    ) -> None:
    +        self.ping_interval = ping_interval
    +        self.ping_timeout = ping_timeout
    +        self.max_message_size = max_message_size
    +        self.compression_options = compression_options
    +
    +
     class WebSocketHandler(tornado.web.RequestHandler):
         """Subclass this class to create a basic WebSocket handler.
     
    @@ -144,22 +214,28 @@ class WebSocketHandler(tornado.web.RequestHandler):
            Added ``websocket_ping_interval``, ``websocket_ping_timeout``, and
            ``websocket_max_message_size``.
         """
    -    def __init__(self, application, request, **kwargs):
    +
    +    def __init__(
    +        self,
    +        application: tornado.web.Application,
    +        request: httputil.HTTPServerRequest,
    +        **kwargs: Any
    +    ) -> None:
             super(WebSocketHandler, self).__init__(application, request, **kwargs)
    -        self.ws_connection = None
    -        self.close_code = None
    -        self.close_reason = None
    -        self.stream = None
    +        self.ws_connection = None  # type: Optional[WebSocketProtocol]
    +        self.close_code = None  # type: Optional[int]
    +        self.close_reason = None  # type: Optional[str]
    +        self.stream = None  # type: Optional[IOStream]
             self._on_close_called = False
     
    -    def get(self, *args, **kwargs):
    +    async def get(self, *args: Any, **kwargs: Any) -> None:
             self.open_args = args
             self.open_kwargs = kwargs
     
             # Upgrade header should be present and should be equal to WebSocket
    -        if self.request.headers.get("Upgrade", "").lower() != 'websocket':
    +        if self.request.headers.get("Upgrade", "").lower() != "websocket":
                 self.set_status(400)
    -            log_msg = "Can \"Upgrade\" only to \"WebSocket\"."
    +            log_msg = 'Can "Upgrade" only to "WebSocket".'
                 self.finish(log_msg)
                 gen_log.debug(log_msg)
                 return
    @@ -168,11 +244,12 @@ class WebSocketHandler(tornado.web.RequestHandler):
             # Some proxy servers/load balancers
             # might mess with it.
             headers = self.request.headers
    -        connection = map(lambda s: s.strip().lower(),
    -                         headers.get("Connection", "").split(","))
    -        if 'upgrade' not in connection:
    +        connection = map(
    +            lambda s: s.strip().lower(), headers.get("Connection", "").split(",")
    +        )
    +        if "upgrade" not in connection:
                 self.set_status(400)
    -            log_msg = "\"Connection\" must be \"Upgrade\"."
    +            log_msg = '"Connection" must be "Upgrade".'
                 self.finish(log_msg)
                 gen_log.debug(log_msg)
                 return
    @@ -198,32 +275,31 @@ class WebSocketHandler(tornado.web.RequestHandler):
     
             self.ws_connection = self.get_websocket_protocol()
             if self.ws_connection:
    -            self.ws_connection.accept_connection()
    +            await self.ws_connection.accept_connection(self)
             else:
                 self.set_status(426, "Upgrade Required")
                 self.set_header("Sec-WebSocket-Version", "7, 8, 13")
    -            self.finish()
     
         stream = None
     
         @property
    -    def ping_interval(self):
    +    def ping_interval(self) -> Optional[float]:
             """The interval for websocket keep-alive pings.
     
             Set websocket_ping_interval = 0 to disable pings.
             """
    -        return self.settings.get('websocket_ping_interval', None)
    +        return self.settings.get("websocket_ping_interval", None)
     
         @property
    -    def ping_timeout(self):
    +    def ping_timeout(self) -> Optional[float]:
             """If no ping is received in this many seconds,
             close the websocket connection (VPNs, etc. can fail to cleanly close ws connections).
             Default is max of 3 pings or 30 seconds.
             """
    -        return self.settings.get('websocket_ping_timeout', None)
    +        return self.settings.get("websocket_ping_timeout", None)
     
         @property
    -    def max_message_size(self):
    +    def max_message_size(self) -> int:
             """Maximum allowed message size.
     
             If the remote peer sends a message larger than this, the connection
    @@ -231,9 +307,13 @@ class WebSocketHandler(tornado.web.RequestHandler):
     
             Default is 10MiB.
             """
    -        return self.settings.get('websocket_max_message_size', _default_max_message_size)
    +        return self.settings.get(
    +            "websocket_max_message_size", _default_max_message_size
    +        )
     
    -    def write_message(self, message, binary=False):
    +    def write_message(
    +        self, message: Union[bytes, str, Dict[str, Any]], binary: bool = False
    +    ) -> "Future[None]":
             """Sends the given message to the client of this Web Socket.
     
             The message may be either a string or a dict (which will be
    @@ -255,13 +335,13 @@ class WebSocketHandler(tornado.web.RequestHandler):
                Consistently raises `WebSocketClosedError`. Previously could
                sometimes raise `.StreamClosedError`.
             """
    -        if self.ws_connection is None:
    +        if self.ws_connection is None or self.ws_connection.is_closing():
                 raise WebSocketClosedError()
             if isinstance(message, dict):
                 message = tornado.escape.json_encode(message)
             return self.ws_connection.write_message(message, binary=binary)
     
    -    def select_subprotocol(self, subprotocols):
    +    def select_subprotocol(self, subprotocols: List[str]) -> Optional[str]:
             """Override to implement subprotocol negotiation.
     
             ``subprotocols`` is a list of strings identifying the
    @@ -287,14 +367,15 @@ class WebSocketHandler(tornado.web.RequestHandler):
             return None
     
         @property
    -    def selected_subprotocol(self):
    +    def selected_subprotocol(self) -> Optional[str]:
             """The subprotocol returned by `select_subprotocol`.
     
             .. versionadded:: 5.1
             """
    +        assert self.ws_connection is not None
             return self.ws_connection.selected_subprotocol
     
    -    def get_compression_options(self):
    +    def get_compression_options(self) -> Optional[Dict[str, Any]]:
             """Override to return compression options for the connection.
     
             If this method returns None (the default), compression will
    @@ -318,7 +399,7 @@ class WebSocketHandler(tornado.web.RequestHandler):
             # TODO: Add wbits option.
             return None
     
    -    def open(self, *args, **kwargs):
    +    def open(self, *args: str, **kwargs: str) -> Optional[Awaitable[None]]:
             """Invoked when a new WebSocket is opened.
     
             The arguments to `open` are extracted from the `tornado.web.URLSpec`
    @@ -334,7 +415,7 @@ class WebSocketHandler(tornado.web.RequestHandler):
             """
             pass
     
    -    def on_message(self, message):
    +    def on_message(self, message: Union[str, bytes]) -> Optional[Awaitable[None]]:
             """Handle incoming messages on the WebSocket
     
             This method must be overridden.
    @@ -345,7 +426,7 @@ class WebSocketHandler(tornado.web.RequestHandler):
             """
             raise NotImplementedError
     
    -    def ping(self, data=b''):
    +    def ping(self, data: Union[str, bytes] = b"") -> None:
             """Send ping frame to the remote end.
     
             The data argument allows a small amount of data (up to 125
    @@ -362,19 +443,19 @@ class WebSocketHandler(tornado.web.RequestHandler):
     
             """
             data = utf8(data)
    -        if self.ws_connection is None:
    +        if self.ws_connection is None or self.ws_connection.is_closing():
                 raise WebSocketClosedError()
             self.ws_connection.write_ping(data)
     
    -    def on_pong(self, data):
    +    def on_pong(self, data: bytes) -> None:
             """Invoked when the response to a ping frame is received."""
             pass
     
    -    def on_ping(self, data):
    +    def on_ping(self, data: bytes) -> None:
             """Invoked when the a ping frame is received."""
             pass
     
    -    def on_close(self):
    +    def on_close(self) -> None:
             """Invoked when the WebSocket is closed.
     
             If the connection was closed cleanly and a status code or reason
    @@ -387,7 +468,7 @@ class WebSocketHandler(tornado.web.RequestHandler):
             """
             pass
     
    -    def close(self, code=None, reason=None):
    +    def close(self, code: int = None, reason: str = None) -> None:
             """Closes this Web Socket.
     
             Once the close handshake is successful the socket will be closed.
    @@ -407,7 +488,7 @@ class WebSocketHandler(tornado.web.RequestHandler):
                 self.ws_connection.close(code, reason)
                 self.ws_connection = None
     
    -    def check_origin(self, origin):
    +    def check_origin(self, origin: str) -> bool:
             """Override to enable support for allowing alternate origins.
     
             The ``origin`` argument is the value of the ``Origin`` HTTP
    @@ -417,9 +498,9 @@ class WebSocketHandler(tornado.web.RequestHandler):
             implement WebSockets support this header, and non-browser
             clients do not have the same cross-site security concerns).
     
    -        Should return True to accept the request or False to reject it.
    -        By default, rejects all requests with an origin on a host other
    -        than this one.
    +        Should return ``True`` to accept the request or ``False`` to
    +        reject it. By default, rejects all requests with an origin on
    +        a host other than this one.
     
             This is a security protection against cross site scripting attacks on
             browsers, since WebSockets are allowed to bypass the usual same-origin
    @@ -439,7 +520,7 @@ class WebSocketHandler(tornado.web.RequestHandler):
                for more.
     
             To accept all cross-origin traffic (which was the default prior to
    -        Tornado 4.0), simply override this method to always return true::
    +        Tornado 4.0), simply override this method to always return ``True``::
     
                 def check_origin(self, origin):
                     return True
    @@ -463,7 +544,7 @@ class WebSocketHandler(tornado.web.RequestHandler):
             # Check to see that origin matches host directly, including ports
             return origin == host
     
    -    def set_nodelay(self, value):
    +    def set_nodelay(self, value: bool) -> None:
             """Set the no-delay flag for this stream.
     
             By default, small messages may be delayed and/or combined to minimize
    @@ -477,9 +558,10 @@ class WebSocketHandler(tornado.web.RequestHandler):
     
             .. versionadded:: 3.1
             """
    -        self.stream.set_nodelay(value)
    +        assert self.ws_connection is not None
    +        self.ws_connection.set_nodelay(value)
     
    -    def on_connection_close(self):
    +    def on_connection_close(self) -> None:
             if self.ws_connection:
                 self.ws_connection.on_connection_close()
                 self.ws_connection = None
    @@ -488,7 +570,14 @@ class WebSocketHandler(tornado.web.RequestHandler):
                 self.on_close()
                 self._break_cycles()
     
    -    def _break_cycles(self):
    +    def on_ws_connection_close(
    +        self, close_code: int = None, close_reason: str = None
    +    ) -> None:
    +        self.close_code = close_code
    +        self.close_reason = close_reason
    +        self.on_connection_close()
    +
    +    def _break_cycles(self) -> None:
             # WebSocketHandlers call finish() early, but we don't want to
             # break up reference cycles (which makes it impossible to call
             # self.render_string) until after we've really closed the
    @@ -497,7 +586,7 @@ class WebSocketHandler(tornado.web.RequestHandler):
             if self.get_status() != 101 or self._on_close_called:
                 super(WebSocketHandler, self)._break_cycles()
     
    -    def send_error(self, *args, **kwargs):
    +    def send_error(self, *args: Any, **kwargs: Any) -> None:
             if self.stream is None:
                 super(WebSocketHandler, self).send_error(*args, **kwargs)
             else:
    @@ -507,36 +596,50 @@ class WebSocketHandler(tornado.web.RequestHandler):
                 # we can close the connection more gracefully.
                 self.stream.close()
     
    -    def get_websocket_protocol(self):
    +    def get_websocket_protocol(self) -> Optional["WebSocketProtocol"]:
             websocket_version = self.request.headers.get("Sec-WebSocket-Version")
             if websocket_version in ("7", "8", "13"):
    -            return WebSocketProtocol13(
    -                self, compression_options=self.get_compression_options())
    +            params = _WebSocketParams(
    +                ping_interval=self.ping_interval,
    +                ping_timeout=self.ping_timeout,
    +                max_message_size=self.max_message_size,
    +                compression_options=self.get_compression_options(),
    +            )
    +            return WebSocketProtocol13(self, False, params)
    +        return None
     
    -    def _attach_stream(self):
    -        self.stream = self.detach()
    -        self.stream.set_close_callback(self.on_connection_close)
    +    def _detach_stream(self) -> IOStream:
             # disable non-WS methods
    -        for method in ["write", "redirect", "set_header", "set_cookie",
    -                       "set_status", "flush", "finish"]:
    +        for method in [
    +            "write",
    +            "redirect",
    +            "set_header",
    +            "set_cookie",
    +            "set_status",
    +            "flush",
    +            "finish",
    +        ]:
                 setattr(self, method, _raise_not_supported_for_websockets)
    +        return self.detach()
     
     
    -def _raise_not_supported_for_websockets(*args, **kwargs):
    +def _raise_not_supported_for_websockets(*args: Any, **kwargs: Any) -> None:
         raise RuntimeError("Method not supported for Web Sockets")
     
     
    -class WebSocketProtocol(object):
    +class WebSocketProtocol(abc.ABC):
         """Base class for WebSocket protocol versions.
         """
    -    def __init__(self, handler):
    +
    +    def __init__(self, handler: "_WebSocketDelegate") -> None:
             self.handler = handler
    -        self.request = handler.request
    -        self.stream = handler.stream
    +        self.stream = None  # type: Optional[IOStream]
             self.client_terminated = False
             self.server_terminated = False
     
    -    def _run_callback(self, callback, *args, **kwargs):
    +    def _run_callback(
    +        self, callback: Callable, *args: Any, **kwargs: Any
    +    ) -> "Optional[Future[Any]]":
             """Runs the given callback with exception handling.
     
             If the callback is a coroutine, returns its Future. On error, aborts the
    @@ -547,80 +650,156 @@ class WebSocketProtocol(object):
             except Exception:
                 self.handler.log_exception(*sys.exc_info())
                 self._abort()
    +            return None
             else:
                 if result is not None:
                     result = gen.convert_yielded(result)
    +                assert self.stream is not None
                     self.stream.io_loop.add_future(result, lambda f: f.result())
                 return result
     
    -    def on_connection_close(self):
    +    def on_connection_close(self) -> None:
             self._abort()
     
    -    def _abort(self):
    +    def _abort(self) -> None:
             """Instantly aborts the WebSocket connection by closing the socket"""
             self.client_terminated = True
             self.server_terminated = True
    -        self.stream.close()  # forcibly tear down the connection
    +        if self.stream is not None:
    +            self.stream.close()  # forcibly tear down the connection
             self.close()  # let the subclass cleanup
     
    +    @abc.abstractmethod
    +    def close(self, code: int = None, reason: str = None) -> None:
    +        raise NotImplementedError()
    +
    +    @abc.abstractmethod
    +    def is_closing(self) -> bool:
    +        raise NotImplementedError()
    +
    +    @abc.abstractmethod
    +    async def accept_connection(self, handler: WebSocketHandler) -> None:
    +        raise NotImplementedError()
    +
    +    @abc.abstractmethod
    +    def write_message(
    +        self, message: Union[str, bytes], binary: bool = False
    +    ) -> "Future[None]":
    +        raise NotImplementedError()
    +
    +    @property
    +    @abc.abstractmethod
    +    def selected_subprotocol(self) -> Optional[str]:
    +        raise NotImplementedError()
    +
    +    @abc.abstractmethod
    +    def write_ping(self, data: bytes) -> None:
    +        raise NotImplementedError()
    +
    +    # The entry points below are used by WebSocketClientConnection,
    +    # which was introduced after we only supported a single version of
    +    # WebSocketProtocol. The WebSocketProtocol/WebSocketProtocol13
    +    # boundary is currently pretty ad-hoc.
    +    @abc.abstractmethod
    +    def _process_server_headers(
    +        self, key: Union[str, bytes], headers: httputil.HTTPHeaders
    +    ) -> None:
    +        raise NotImplementedError()
    +
    +    @abc.abstractmethod
    +    def start_pinging(self) -> None:
    +        raise NotImplementedError()
    +
    +    @abc.abstractmethod
    +    async def _receive_frame_loop(self) -> None:
    +        raise NotImplementedError()
    +
    +    @abc.abstractmethod
    +    def set_nodelay(self, x: bool) -> None:
    +        raise NotImplementedError()
    +
     
     class _PerMessageDeflateCompressor(object):
    -    def __init__(self, persistent, max_wbits, compression_options=None):
    +    def __init__(
    +        self,
    +        persistent: bool,
    +        max_wbits: Optional[int],
    +        compression_options: Dict[str, Any] = None,
    +    ) -> None:
             if max_wbits is None:
                 max_wbits = zlib.MAX_WBITS
             # There is no symbolic constant for the minimum wbits value.
             if not (8 <= max_wbits <= zlib.MAX_WBITS):
    -            raise ValueError("Invalid max_wbits value %r; allowed range 8-%d",
    -                             max_wbits, zlib.MAX_WBITS)
    +            raise ValueError(
    +                "Invalid max_wbits value %r; allowed range 8-%d",
    +                max_wbits,
    +                zlib.MAX_WBITS,
    +            )
             self._max_wbits = max_wbits
     
    -        if compression_options is None or 'compression_level' not in compression_options:
    +        if (
    +            compression_options is None
    +            or "compression_level" not in compression_options
    +        ):
                 self._compression_level = tornado.web.GZipContentEncoding.GZIP_LEVEL
             else:
    -            self._compression_level = compression_options['compression_level']
    +            self._compression_level = compression_options["compression_level"]
     
    -        if compression_options is None or 'mem_level' not in compression_options:
    +        if compression_options is None or "mem_level" not in compression_options:
                 self._mem_level = 8
             else:
    -            self._mem_level = compression_options['mem_level']
    +            self._mem_level = compression_options["mem_level"]
     
             if persistent:
    -            self._compressor = self._create_compressor()
    +            self._compressor = self._create_compressor()  # type: Optional[_Compressor]
             else:
                 self._compressor = None
     
    -    def _create_compressor(self):
    -        return zlib.compressobj(self._compression_level,
    -                                zlib.DEFLATED, -self._max_wbits, self._mem_level)
    +    def _create_compressor(self) -> "_Compressor":
    +        return zlib.compressobj(
    +            self._compression_level, zlib.DEFLATED, -self._max_wbits, self._mem_level
    +        )
     
    -    def compress(self, data):
    +    def compress(self, data: bytes) -> bytes:
             compressor = self._compressor or self._create_compressor()
    -        data = (compressor.compress(data) +
    -                compressor.flush(zlib.Z_SYNC_FLUSH))
    -        assert data.endswith(b'\x00\x00\xff\xff')
    +        data = compressor.compress(data) + compressor.flush(zlib.Z_SYNC_FLUSH)
    +        assert data.endswith(b"\x00\x00\xff\xff")
             return data[:-4]
     
     
     class _PerMessageDeflateDecompressor(object):
    -    def __init__(self, persistent, max_wbits, max_message_size, compression_options=None):
    +    def __init__(
    +        self,
    +        persistent: bool,
    +        max_wbits: Optional[int],
    +        max_message_size: int,
    +        compression_options: Dict[str, Any] = None,
    +    ) -> None:
             self._max_message_size = max_message_size
             if max_wbits is None:
                 max_wbits = zlib.MAX_WBITS
             if not (8 <= max_wbits <= zlib.MAX_WBITS):
    -            raise ValueError("Invalid max_wbits value %r; allowed range 8-%d",
    -                             max_wbits, zlib.MAX_WBITS)
    +            raise ValueError(
    +                "Invalid max_wbits value %r; allowed range 8-%d",
    +                max_wbits,
    +                zlib.MAX_WBITS,
    +            )
             self._max_wbits = max_wbits
             if persistent:
    -            self._decompressor = self._create_decompressor()
    +            self._decompressor = (
    +                self._create_decompressor()
    +            )  # type: Optional[_Decompressor]
             else:
                 self._decompressor = None
     
    -    def _create_decompressor(self):
    +    def _create_decompressor(self) -> "_Decompressor":
             return zlib.decompressobj(-self._max_wbits)
     
    -    def decompress(self, data):
    +    def decompress(self, data: bytes) -> bytes:
             decompressor = self._decompressor or self._create_decompressor()
    -        result = decompressor.decompress(data + b'\x00\x00\xff\xff', self._max_message_size)
    +        result = decompressor.decompress(
    +            data + b"\x00\x00\xff\xff", self._max_message_size
    +        )
             if decompressor.unconsumed_tail:
                 raise _DecompressTooLargeError()
             return result
    @@ -632,30 +811,38 @@ class WebSocketProtocol13(WebSocketProtocol):
         This class supports versions 7 and 8 of the protocol in addition to the
         final version 13.
         """
    +
         # Bit masks for the first byte of a frame.
         FIN = 0x80
         RSV1 = 0x40
         RSV2 = 0x20
         RSV3 = 0x10
         RSV_MASK = RSV1 | RSV2 | RSV3
    -    OPCODE_MASK = 0x0f
    +    OPCODE_MASK = 0x0F
     
    -    def __init__(self, handler, mask_outgoing=False,
    -                 compression_options=None):
    +    stream = None  # type: IOStream
    +
    +    def __init__(
    +        self,
    +        handler: "_WebSocketDelegate",
    +        mask_outgoing: bool,
    +        params: _WebSocketParams,
    +    ) -> None:
             WebSocketProtocol.__init__(self, handler)
             self.mask_outgoing = mask_outgoing
    +        self.params = params
             self._final_frame = False
             self._frame_opcode = None
             self._masked_frame = None
    -        self._frame_mask = None
    +        self._frame_mask = None  # type: Optional[bytes]
             self._frame_length = None
    -        self._fragmented_message_buffer = None
    +        self._fragmented_message_buffer = None  # type: Optional[bytes]
             self._fragmented_message_opcode = None
    -        self._waiting = None
    -        self._compression_options = compression_options
    -        self._decompressor = None
    -        self._compressor = None
    -        self._frame_compressed = None
    +        self._waiting = None  # type: object
    +        self._compression_options = params.compression_options
    +        self._decompressor = None  # type: Optional[_PerMessageDeflateDecompressor]
    +        self._compressor = None  # type: Optional[_PerMessageDeflateCompressor]
    +        self._frame_compressed = None  # type: Optional[bool]
             # The total uncompressed size of all messages received or sent.
             # Unicode messages are encoded to utf8.
             # Only for testing; subject to change.
    @@ -665,40 +852,53 @@ class WebSocketProtocol13(WebSocketProtocol):
             # the effect of compression, frame overhead, and control frames.
             self._wire_bytes_in = 0
             self._wire_bytes_out = 0
    -        self.ping_callback = None
    -        self.last_ping = 0
    -        self.last_pong = 0
    +        self.ping_callback = None  # type: Optional[PeriodicCallback]
    +        self.last_ping = 0.0
    +        self.last_pong = 0.0
    +        self.close_code = None  # type: Optional[int]
    +        self.close_reason = None  # type: Optional[str]
     
    -    def accept_connection(self):
    +    # Use a property for this to satisfy the abc.
    +    @property
    +    def selected_subprotocol(self) -> Optional[str]:
    +        return self._selected_subprotocol
    +
    +    @selected_subprotocol.setter
    +    def selected_subprotocol(self, value: Optional[str]) -> None:
    +        self._selected_subprotocol = value
    +
    +    async def accept_connection(self, handler: WebSocketHandler) -> None:
             try:
    -            self._handle_websocket_headers()
    +            self._handle_websocket_headers(handler)
             except ValueError:
    -            self.handler.set_status(400)
    +            handler.set_status(400)
                 log_msg = "Missing/Invalid WebSocket headers"
    -            self.handler.finish(log_msg)
    +            handler.finish(log_msg)
                 gen_log.debug(log_msg)
                 return
     
             try:
    -            self._accept_connection()
    +            await self._accept_connection(handler)
    +        except asyncio.CancelledError:
    +            self._abort()
    +            return
             except ValueError:
    -            gen_log.debug("Malformed WebSocket request received",
    -                          exc_info=True)
    +            gen_log.debug("Malformed WebSocket request received", exc_info=True)
                 self._abort()
                 return
     
    -    def _handle_websocket_headers(self):
    +    def _handle_websocket_headers(self, handler: WebSocketHandler) -> None:
             """Verifies all invariant- and required headers
     
             If a header is missing or have an incorrect value ValueError will be
             raised
             """
             fields = ("Host", "Sec-Websocket-Key", "Sec-Websocket-Version")
    -        if not all(map(lambda f: self.request.headers.get(f), fields)):
    +        if not all(map(lambda f: handler.request.headers.get(f), fields)):
                 raise ValueError("Missing/Invalid WebSocket headers")
     
         @staticmethod
    -    def compute_accept_value(key):
    +    def compute_accept_value(key: Union[str, bytes]) -> str:
             """Computes the value for the Sec-WebSocket-Accept header,
             given the value for Sec-WebSocket-Key.
             """
    @@ -707,114 +907,143 @@ class WebSocketProtocol13(WebSocketProtocol):
             sha1.update(b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11")  # Magic value
             return native_str(base64.b64encode(sha1.digest()))
     
    -    def _challenge_response(self):
    +    def _challenge_response(self, handler: WebSocketHandler) -> str:
             return WebSocketProtocol13.compute_accept_value(
    -            self.request.headers.get("Sec-Websocket-Key"))
    +            cast(str, handler.request.headers.get("Sec-Websocket-Key"))
    +        )
     
    -    @gen.coroutine
    -    def _accept_connection(self):
    -        subprotocol_header = self.request.headers.get("Sec-WebSocket-Protocol")
    +    async def _accept_connection(self, handler: WebSocketHandler) -> None:
    +        subprotocol_header = handler.request.headers.get("Sec-WebSocket-Protocol")
             if subprotocol_header:
    -            subprotocols = [s.strip() for s in subprotocol_header.split(',')]
    +            subprotocols = [s.strip() for s in subprotocol_header.split(",")]
             else:
                 subprotocols = []
    -        self.selected_subprotocol = self.handler.select_subprotocol(subprotocols)
    +        self.selected_subprotocol = handler.select_subprotocol(subprotocols)
             if self.selected_subprotocol:
                 assert self.selected_subprotocol in subprotocols
    -            self.handler.set_header("Sec-WebSocket-Protocol", self.selected_subprotocol)
    +            handler.set_header("Sec-WebSocket-Protocol", self.selected_subprotocol)
     
    -        extensions = self._parse_extensions_header(self.request.headers)
    +        extensions = self._parse_extensions_header(handler.request.headers)
             for ext in extensions:
    -            if (ext[0] == 'permessage-deflate' and
    -                    self._compression_options is not None):
    +            if ext[0] == "permessage-deflate" and self._compression_options is not None:
                     # TODO: negotiate parameters if compression_options
                     # specifies limits.
    -                self._create_compressors('server', ext[1], self._compression_options)
    -                if ('client_max_window_bits' in ext[1] and
    -                        ext[1]['client_max_window_bits'] is None):
    +                self._create_compressors("server", ext[1], self._compression_options)
    +                if (
    +                    "client_max_window_bits" in ext[1]
    +                    and ext[1]["client_max_window_bits"] is None
    +                ):
                         # Don't echo an offered client_max_window_bits
                         # parameter with no value.
    -                    del ext[1]['client_max_window_bits']
    -                self.handler.set_header("Sec-WebSocket-Extensions",
    -                                        httputil._encode_header(
    -                                            'permessage-deflate', ext[1]))
    +                    del ext[1]["client_max_window_bits"]
    +                handler.set_header(
    +                    "Sec-WebSocket-Extensions",
    +                    httputil._encode_header("permessage-deflate", ext[1]),
    +                )
                     break
     
    -        self.handler.clear_header("Content-Type")
    -        self.handler.set_status(101)
    -        self.handler.set_header("Upgrade", "websocket")
    -        self.handler.set_header("Connection", "Upgrade")
    -        self.handler.set_header("Sec-WebSocket-Accept", self._challenge_response())
    -        self.handler.finish()
    +        handler.clear_header("Content-Type")
    +        handler.set_status(101)
    +        handler.set_header("Upgrade", "websocket")
    +        handler.set_header("Connection", "Upgrade")
    +        handler.set_header("Sec-WebSocket-Accept", self._challenge_response(handler))
    +        handler.finish()
     
    -        self.handler._attach_stream()
    -        self.stream = self.handler.stream
    +        self.stream = handler._detach_stream()
     
             self.start_pinging()
    -        open_result = self._run_callback(self.handler.open, *self.handler.open_args,
    -                                         **self.handler.open_kwargs)
    -        if open_result is not None:
    -            yield open_result
    -        yield self._receive_frame_loop()
    +        try:
    +            open_result = handler.open(*handler.open_args, **handler.open_kwargs)
    +            if open_result is not None:
    +                await open_result
    +        except Exception:
    +            handler.log_exception(*sys.exc_info())
    +            self._abort()
    +            return
     
    -    def _parse_extensions_header(self, headers):
    -        extensions = headers.get("Sec-WebSocket-Extensions", '')
    +        await self._receive_frame_loop()
    +
    +    def _parse_extensions_header(
    +        self, headers: httputil.HTTPHeaders
    +    ) -> List[Tuple[str, Dict[str, str]]]:
    +        extensions = headers.get("Sec-WebSocket-Extensions", "")
             if extensions:
    -            return [httputil._parse_header(e.strip())
    -                    for e in extensions.split(',')]
    +            return [httputil._parse_header(e.strip()) for e in extensions.split(",")]
             return []
     
    -    def _process_server_headers(self, key, headers):
    +    def _process_server_headers(
    +        self, key: Union[str, bytes], headers: httputil.HTTPHeaders
    +    ) -> None:
             """Process the headers sent by the server to this client connection.
     
             'key' is the websocket handshake challenge/response key.
             """
    -        assert headers['Upgrade'].lower() == 'websocket'
    -        assert headers['Connection'].lower() == 'upgrade'
    +        assert headers["Upgrade"].lower() == "websocket"
    +        assert headers["Connection"].lower() == "upgrade"
             accept = self.compute_accept_value(key)
    -        assert headers['Sec-Websocket-Accept'] == accept
    +        assert headers["Sec-Websocket-Accept"] == accept
     
             extensions = self._parse_extensions_header(headers)
             for ext in extensions:
    -            if (ext[0] == 'permessage-deflate' and
    -                    self._compression_options is not None):
    -                self._create_compressors('client', ext[1])
    +            if ext[0] == "permessage-deflate" and self._compression_options is not None:
    +                self._create_compressors("client", ext[1])
                 else:
                     raise ValueError("unsupported extension %r", ext)
     
    -        self.selected_subprotocol = headers.get('Sec-WebSocket-Protocol', None)
    +        self.selected_subprotocol = headers.get("Sec-WebSocket-Protocol", None)
     
    -    def _get_compressor_options(self, side, agreed_parameters, compression_options=None):
    +    def _get_compressor_options(
    +        self,
    +        side: str,
    +        agreed_parameters: Dict[str, Any],
    +        compression_options: Dict[str, Any] = None,
    +    ) -> Dict[str, Any]:
             """Converts a websocket agreed_parameters set to keyword arguments
             for our compressor objects.
             """
             options = dict(
    -            persistent=(side + '_no_context_takeover') not in agreed_parameters)
    -        wbits_header = agreed_parameters.get(side + '_max_window_bits', None)
    +            persistent=(side + "_no_context_takeover") not in agreed_parameters
    +        )  # type: Dict[str, Any]
    +        wbits_header = agreed_parameters.get(side + "_max_window_bits", None)
             if wbits_header is None:
    -            options['max_wbits'] = zlib.MAX_WBITS
    +            options["max_wbits"] = zlib.MAX_WBITS
             else:
    -            options['max_wbits'] = int(wbits_header)
    -        options['compression_options'] = compression_options
    +            options["max_wbits"] = int(wbits_header)
    +        options["compression_options"] = compression_options
             return options
     
    -    def _create_compressors(self, side, agreed_parameters, compression_options=None):
    +    def _create_compressors(
    +        self,
    +        side: str,
    +        agreed_parameters: Dict[str, Any],
    +        compression_options: Dict[str, Any] = None,
    +    ) -> None:
             # TODO: handle invalid parameters gracefully
    -        allowed_keys = set(['server_no_context_takeover',
    -                            'client_no_context_takeover',
    -                            'server_max_window_bits',
    -                            'client_max_window_bits'])
    +        allowed_keys = set(
    +            [
    +                "server_no_context_takeover",
    +                "client_no_context_takeover",
    +                "server_max_window_bits",
    +                "client_max_window_bits",
    +            ]
    +        )
             for key in agreed_parameters:
                 if key not in allowed_keys:
                     raise ValueError("unsupported compression parameter %r" % key)
    -        other_side = 'client' if (side == 'server') else 'server'
    +        other_side = "client" if (side == "server") else "server"
             self._compressor = _PerMessageDeflateCompressor(
    -            **self._get_compressor_options(side, agreed_parameters, compression_options))
    +            **self._get_compressor_options(side, agreed_parameters, compression_options)
    +        )
             self._decompressor = _PerMessageDeflateDecompressor(
    -            max_message_size=self.handler.max_message_size,
    -            **self._get_compressor_options(other_side, agreed_parameters, compression_options))
    +            max_message_size=self.params.max_message_size,
    +            **self._get_compressor_options(
    +                other_side, agreed_parameters, compression_options
    +            )
    +        )
     
    -    def _write_frame(self, fin, opcode, data, flags=0):
    +    def _write_frame(
    +        self, fin: bool, opcode: int, data: bytes, flags: int = 0
    +    ) -> "Future[None]":
             data_len = len(data)
             if opcode & 0x8:
                 # All control frames MUST have a payload length of 125
    @@ -845,7 +1074,9 @@ class WebSocketProtocol13(WebSocketProtocol):
             self._wire_bytes_out += len(frame)
             return self.stream.write(frame)
     
    -    def write_message(self, message, binary=False):
    +    def write_message(
    +        self, message: Union[str, bytes], binary: bool = False
    +    ) -> "Future[None]":
             """Sends the given message to the client of this Web Socket."""
             if binary:
                 opcode = 0x2
    @@ -868,35 +1099,35 @@ class WebSocketProtocol13(WebSocketProtocol):
             except StreamClosedError:
                 raise WebSocketClosedError()
     
    -        @gen.coroutine
    -        def wrapper():
    +        async def wrapper() -> None:
                 try:
    -                yield fut
    +                await fut
                 except StreamClosedError:
                     raise WebSocketClosedError()
    -        return wrapper()
     
    -    def write_ping(self, data):
    +        return asyncio.ensure_future(wrapper())
    +
    +    def write_ping(self, data: bytes) -> None:
             """Send ping frame."""
             assert isinstance(data, bytes)
             self._write_frame(True, 0x9, data)
     
    -    @gen.coroutine
    -    def _receive_frame_loop(self):
    +    async def _receive_frame_loop(self) -> None:
             try:
                 while not self.client_terminated:
    -                yield self._receive_frame()
    +                await self._receive_frame()
             except StreamClosedError:
                 self._abort()
    +        self.handler.on_ws_connection_close(self.close_code, self.close_reason)
     
    -    def _read_bytes(self, n):
    +    async def _read_bytes(self, n: int) -> bytes:
    +        data = await self.stream.read_bytes(n)
             self._wire_bytes_in += n
    -        return self.stream.read_bytes(n)
    +        return data
     
    -    @gen.coroutine
    -    def _receive_frame(self):
    +    async def _receive_frame(self) -> None:
             # Read the frame header.
    -        data = yield self._read_bytes(2)
    +        data = await self._read_bytes(2)
             header, mask_payloadlen = struct.unpack("BB", data)
             is_final_frame = header & self.FIN
             reserved_bits = header & self.RSV_MASK
    @@ -913,7 +1144,7 @@ class WebSocketProtocol13(WebSocketProtocol):
                 self._abort()
                 return
             is_masked = bool(mask_payloadlen & 0x80)
    -        payloadlen = mask_payloadlen & 0x7f
    +        payloadlen = mask_payloadlen & 0x7F
     
             # Parse and validate the length.
             if opcode_is_control and payloadlen >= 126:
    @@ -923,24 +1154,25 @@ class WebSocketProtocol13(WebSocketProtocol):
             if payloadlen < 126:
                 self._frame_length = payloadlen
             elif payloadlen == 126:
    -            data = yield self._read_bytes(2)
    +            data = await self._read_bytes(2)
                 payloadlen = struct.unpack("!H", data)[0]
             elif payloadlen == 127:
    -            data = yield self._read_bytes(8)
    +            data = await self._read_bytes(8)
                 payloadlen = struct.unpack("!Q", data)[0]
             new_len = payloadlen
             if self._fragmented_message_buffer is not None:
                 new_len += len(self._fragmented_message_buffer)
    -        if new_len > self.handler.max_message_size:
    +        if new_len > self.params.max_message_size:
                 self.close(1009, "message too big")
                 self._abort()
                 return
     
             # Read the payload, unmasking if necessary.
             if is_masked:
    -            self._frame_mask = yield self._read_bytes(4)
    -        data = yield self._read_bytes(payloadlen)
    +            self._frame_mask = await self._read_bytes(4)
    +        data = await self._read_bytes(payloadlen)
             if is_masked:
    +            assert self._frame_mask is not None
                 data = _websocket_mask(self._frame_mask, data)
     
             # Decide what to do with this frame.
    @@ -974,20 +1206,21 @@ class WebSocketProtocol13(WebSocketProtocol):
             if is_final_frame:
                 handled_future = self._handle_message(opcode, data)
                 if handled_future is not None:
    -                yield handled_future
    +                await handled_future
     
    -    def _handle_message(self, opcode, data):
    +    def _handle_message(self, opcode: int, data: bytes) -> "Optional[Future[None]]":
             """Execute on_message, returning its Future if it is a coroutine."""
             if self.client_terminated:
    -            return
    +            return None
     
             if self._frame_compressed:
    +            assert self._decompressor is not None
                 try:
                     data = self._decompressor.decompress(data)
                 except _DecompressTooLargeError:
                     self.close(1009, "message too big after decompression")
                     self._abort()
    -                return
    +                return None
     
             if opcode == 0x1:
                 # UTF-8 data
    @@ -996,7 +1229,7 @@ class WebSocketProtocol13(WebSocketProtocol):
                     decoded = data.decode("utf-8")
                 except UnicodeDecodeError:
                     self._abort()
    -                return
    +                return None
                 return self._run_callback(self.handler.on_message, decoded)
             elif opcode == 0x2:
                 # Binary data
    @@ -1006,11 +1239,11 @@ class WebSocketProtocol13(WebSocketProtocol):
                 # Close
                 self.client_terminated = True
                 if len(data) >= 2:
    -                self.handler.close_code = struct.unpack('>H', data[:2])[0]
    +                self.close_code = struct.unpack(">H", data[:2])[0]
                 if len(data) > 2:
    -                self.handler.close_reason = to_unicode(data[2:])
    +                self.close_reason = to_unicode(data[2:])
                 # Echo the received close code, if any (RFC 6455 section 5.5.1).
    -            self.close(self.handler.close_code)
    +            self.close(self.close_code)
             elif opcode == 0x9:
                 # Ping
                 try:
    @@ -1024,17 +1257,18 @@ class WebSocketProtocol13(WebSocketProtocol):
                 return self._run_callback(self.handler.on_pong, data)
             else:
                 self._abort()
    +        return None
     
    -    def close(self, code=None, reason=None):
    +    def close(self, code: int = None, reason: str = None) -> None:
             """Closes the WebSocket connection."""
             if not self.server_terminated:
                 if not self.stream.closed():
                     if code is None and reason is not None:
                         code = 1000  # "normal closure" status code
                     if code is None:
    -                    close_data = b''
    +                    close_data = b""
                     else:
    -                    close_data = struct.pack('>H', code)
    +                    close_data = struct.pack(">H", code)
                     if reason is not None:
                         close_data += utf8(reason)
                     try:
    @@ -1051,36 +1285,49 @@ class WebSocketProtocol13(WebSocketProtocol):
                 # Give the client a few seconds to complete a clean shutdown,
                 # otherwise just close the connection.
                 self._waiting = self.stream.io_loop.add_timeout(
    -                self.stream.io_loop.time() + 5, self._abort)
    +                self.stream.io_loop.time() + 5, self._abort
    +            )
    +
    +    def is_closing(self) -> bool:
    +        """Return ``True`` if this connection is closing.
    +
    +        The connection is considered closing if either side has
    +        initiated its closing handshake or if the stream has been
    +        shut down uncleanly.
    +        """
    +        return self.stream.closed() or self.client_terminated or self.server_terminated
     
         @property
    -    def ping_interval(self):
    -        interval = self.handler.ping_interval
    +    def ping_interval(self) -> Optional[float]:
    +        interval = self.params.ping_interval
             if interval is not None:
                 return interval
             return 0
     
         @property
    -    def ping_timeout(self):
    -        timeout = self.handler.ping_timeout
    +    def ping_timeout(self) -> Optional[float]:
    +        timeout = self.params.ping_timeout
             if timeout is not None:
                 return timeout
    +        assert self.ping_interval is not None
             return max(3 * self.ping_interval, 30)
     
    -    def start_pinging(self):
    +    def start_pinging(self) -> None:
             """Start sending periodic pings to keep the connection alive"""
    +        assert self.ping_interval is not None
             if self.ping_interval > 0:
                 self.last_ping = self.last_pong = IOLoop.current().time()
                 self.ping_callback = PeriodicCallback(
    -                self.periodic_ping, self.ping_interval * 1000)
    +                self.periodic_ping, self.ping_interval * 1000
    +            )
                 self.ping_callback.start()
     
    -    def periodic_ping(self):
    +    def periodic_ping(self) -> None:
             """Send a ping to keep the websocket alive
     
             Called periodically if the websocket_ping_interval is set and non-zero.
             """
    -        if self.stream.closed() and self.ping_callback is not None:
    +        if self.is_closing() and self.ping_callback is not None:
                 self.ping_callback.stop()
                 return
     
    @@ -1090,14 +1337,21 @@ class WebSocketProtocol13(WebSocketProtocol):
             now = IOLoop.current().time()
             since_last_pong = now - self.last_pong
             since_last_ping = now - self.last_ping
    -        if (since_last_ping < 2 * self.ping_interval and
    -                since_last_pong > self.ping_timeout):
    +        assert self.ping_interval is not None
    +        assert self.ping_timeout is not None
    +        if (
    +            since_last_ping < 2 * self.ping_interval
    +            and since_last_pong > self.ping_timeout
    +        ):
                 self.close()
                 return
     
    -        self.write_ping(b'')
    +        self.write_ping(b"")
             self.last_ping = now
     
    +    def set_nodelay(self, x: bool) -> None:
    +        self.stream.set_nodelay(x)
    +
     
     class WebSocketClientConnection(simple_httpclient._HTTPConnection):
         """WebSocket client connection.
    @@ -1105,46 +1359,68 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
         This class should not be instantiated directly; use the
         `websocket_connect` function instead.
         """
    -    def __init__(self, request, on_message_callback=None,
    -                 compression_options=None, ping_interval=None, ping_timeout=None,
    -                 max_message_size=None, subprotocols=[]):
    -        self.compression_options = compression_options
    -        self.connect_future = Future()
    -        self.protocol = None
    -        self.read_queue = Queue(1)
    +
    +    protocol = None  # type: WebSocketProtocol
    +
    +    def __init__(
    +        self,
    +        request: httpclient.HTTPRequest,
    +        on_message_callback: Callable[[Union[None, str, bytes]], None] = None,
    +        compression_options: Dict[str, Any] = None,
    +        ping_interval: float = None,
    +        ping_timeout: float = None,
    +        max_message_size: int = _default_max_message_size,
    +        subprotocols: Optional[List[str]] = [],
    +    ) -> None:
    +        self.connect_future = Future()  # type: Future[WebSocketClientConnection]
    +        self.read_queue = Queue(1)  # type: Queue[Union[None, str, bytes]]
             self.key = base64.b64encode(os.urandom(16))
             self._on_message_callback = on_message_callback
    -        self.close_code = self.close_reason = None
    -        self.ping_interval = ping_interval
    -        self.ping_timeout = ping_timeout
    -        self.max_message_size = max_message_size
    +        self.close_code = None  # type: Optional[int]
    +        self.close_reason = None  # type: Optional[str]
    +        self.params = _WebSocketParams(
    +            ping_interval=ping_interval,
    +            ping_timeout=ping_timeout,
    +            max_message_size=max_message_size,
    +            compression_options=compression_options,
    +        )
     
    -        scheme, sep, rest = request.url.partition(':')
    -        scheme = {'ws': 'http', 'wss': 'https'}[scheme]
    +        scheme, sep, rest = request.url.partition(":")
    +        scheme = {"ws": "http", "wss": "https"}[scheme]
             request.url = scheme + sep + rest
    -        request.headers.update({
    -            'Upgrade': 'websocket',
    -            'Connection': 'Upgrade',
    -            'Sec-WebSocket-Key': self.key,
    -            'Sec-WebSocket-Version': '13',
    -        })
    +        request.headers.update(
    +            {
    +                "Upgrade": "websocket",
    +                "Connection": "Upgrade",
    +                "Sec-WebSocket-Key": self.key,
    +                "Sec-WebSocket-Version": "13",
    +            }
    +        )
             if subprotocols is not None:
    -            request.headers['Sec-WebSocket-Protocol'] = ','.join(subprotocols)
    -        if self.compression_options is not None:
    +            request.headers["Sec-WebSocket-Protocol"] = ",".join(subprotocols)
    +        if compression_options is not None:
                 # Always offer to let the server set our max_wbits (and even though
                 # we don't offer it, we will accept a client_no_context_takeover
                 # from the server).
                 # TODO: set server parameters for deflate extension
                 # if requested in self.compression_options.
    -            request.headers['Sec-WebSocket-Extensions'] = (
    -                'permessage-deflate; client_max_window_bits')
    +            request.headers[
    +                "Sec-WebSocket-Extensions"
    +            ] = "permessage-deflate; client_max_window_bits"
     
             self.tcp_client = TCPClient()
             super(WebSocketClientConnection, self).__init__(
    -            None, request, lambda: None, self._on_http_response,
    -            104857600, self.tcp_client, 65536, 104857600)
    +            None,
    +            request,
    +            lambda: None,
    +            self._on_http_response,
    +            104857600,
    +            self.tcp_client,
    +            65536,
    +            104857600,
    +        )
     
    -    def close(self, code=None, reason=None):
    +    def close(self, code: int = None, reason: str = None) -> None:
             """Closes the websocket connection.
     
             ``code`` and ``reason`` are documented under
    @@ -1158,49 +1434,66 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
             """
             if self.protocol is not None:
                 self.protocol.close(code, reason)
    -            self.protocol = None
    +            self.protocol = None  # type: ignore
     
    -    def on_connection_close(self):
    +    def on_connection_close(self) -> None:
             if not self.connect_future.done():
                 self.connect_future.set_exception(StreamClosedError())
    -        self.on_message(None)
    +        self._on_message(None)
             self.tcp_client.close()
             super(WebSocketClientConnection, self).on_connection_close()
     
    -    def _on_http_response(self, response):
    +    def on_ws_connection_close(
    +        self, close_code: int = None, close_reason: str = None
    +    ) -> None:
    +        self.close_code = close_code
    +        self.close_reason = close_reason
    +        self.on_connection_close()
    +
    +    def _on_http_response(self, response: httpclient.HTTPResponse) -> None:
             if not self.connect_future.done():
                 if response.error:
                     self.connect_future.set_exception(response.error)
                 else:
    -                self.connect_future.set_exception(WebSocketError(
    -                    "Non-websocket response"))
    +                self.connect_future.set_exception(
    +                    WebSocketError("Non-websocket response")
    +                )
     
    -    def headers_received(self, start_line, headers):
    +    async def headers_received(
    +        self,
    +        start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine],
    +        headers: httputil.HTTPHeaders,
    +    ) -> None:
    +        assert isinstance(start_line, httputil.ResponseStartLine)
             if start_line.code != 101:
    -            return super(WebSocketClientConnection, self).headers_received(
    -                start_line, headers)
    -
    -        self.headers = headers
    -        self.protocol = self.get_websocket_protocol()
    -        self.protocol._process_server_headers(self.key, self.headers)
    -        self.protocol.start_pinging()
    -        IOLoop.current().add_callback(self.protocol._receive_frame_loop)
    +            await super(WebSocketClientConnection, self).headers_received(
    +                start_line, headers
    +            )
    +            return
     
             if self._timeout is not None:
                 self.io_loop.remove_timeout(self._timeout)
                 self._timeout = None
     
    -        self.stream = self.connection.detach()
    -        self.stream.set_close_callback(self.on_connection_close)
    +        self.headers = headers
    +        self.protocol = self.get_websocket_protocol()
    +        self.protocol._process_server_headers(self.key, self.headers)
    +        self.protocol.stream = self.connection.detach()
    +
    +        IOLoop.current().add_callback(self.protocol._receive_frame_loop)
    +        self.protocol.start_pinging()
    +
             # Once we've taken over the connection, clear the final callback
             # we set on the http request.  This deactivates the error handling
             # in simple_httpclient that would otherwise interfere with our
             # ability to see exceptions.
    -        self.final_callback = None
    +        self.final_callback = None  # type: ignore
     
             future_set_result_unless_cancelled(self.connect_future, self)
     
    -    def write_message(self, message, binary=False):
    +    def write_message(
    +        self, message: Union[str, bytes], binary: bool = False
    +    ) -> "Future[None]":
             """Sends a message to the WebSocket server.
     
             If the stream is closed, raises `WebSocketClosedError`.
    @@ -1212,7 +1505,9 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
             """
             return self.protocol.write_message(message, binary=binary)
     
    -    def read_message(self, callback=None):
    +    def read_message(
    +        self, callback: Callable[["Future[Union[None, str, bytes]]"], None] = None
    +    ) -> Awaitable[Union[None, str, bytes]]:
             """Reads a message from the WebSocket server.
     
             If on_message_callback was specified at WebSocket
    @@ -1224,18 +1519,24 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
             ready.
             """
     
    -        future = self.read_queue.get()
    +        awaitable = self.read_queue.get()
             if callback is not None:
    -            self.io_loop.add_future(future, callback)
    -        return future
    +            self.io_loop.add_future(asyncio.ensure_future(awaitable), callback)
    +        return awaitable
     
    -    def on_message(self, message):
    +    def on_message(self, message: Union[str, bytes]) -> Optional[Awaitable[None]]:
    +        return self._on_message(message)
    +
    +    def _on_message(
    +        self, message: Union[None, str, bytes]
    +    ) -> Optional[Awaitable[None]]:
             if self._on_message_callback:
                 self._on_message_callback(message)
    +            return None
             else:
                 return self.read_queue.put(message)
     
    -    def ping(self, data=b''):
    +    def ping(self, data: bytes = b"") -> None:
             """Send ping frame to the remote end.
     
             The data argument allows a small amount of data (up to 125
    @@ -1254,29 +1555,45 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
                 raise WebSocketClosedError()
             self.protocol.write_ping(data)
     
    -    def on_pong(self, data):
    +    def on_pong(self, data: bytes) -> None:
             pass
     
    -    def on_ping(self, data):
    +    def on_ping(self, data: bytes) -> None:
             pass
     
    -    def get_websocket_protocol(self):
    -        return WebSocketProtocol13(self, mask_outgoing=True,
    -                                   compression_options=self.compression_options)
    +    def get_websocket_protocol(self) -> WebSocketProtocol:
    +        return WebSocketProtocol13(self, mask_outgoing=True, params=self.params)
     
         @property
    -    def selected_subprotocol(self):
    +    def selected_subprotocol(self) -> Optional[str]:
             """The subprotocol selected by the server.
     
             .. versionadded:: 5.1
             """
             return self.protocol.selected_subprotocol
     
    +    def log_exception(
    +        self,
    +        typ: "Optional[Type[BaseException]]",
    +        value: Optional[BaseException],
    +        tb: Optional[TracebackType],
    +    ) -> None:
    +        assert typ is not None
    +        assert value is not None
    +        app_log.error("Uncaught exception %s", value, exc_info=(typ, value, tb))
     
    -def websocket_connect(url, callback=None, connect_timeout=None,
    -                      on_message_callback=None, compression_options=None,
    -                      ping_interval=None, ping_timeout=None,
    -                      max_message_size=_default_max_message_size, subprotocols=None):
    +
    +def websocket_connect(
    +    url: Union[str, httpclient.HTTPRequest],
    +    callback: Callable[["Future[WebSocketClientConnection]"], None] = None,
    +    connect_timeout: float = None,
    +    on_message_callback: Callable[[Union[None, str, bytes]], None] = None,
    +    compression_options: Dict[str, Any] = None,
    +    ping_interval: float = None,
    +    ping_timeout: float = None,
    +    max_message_size: int = _default_max_message_size,
    +    subprotocols: List[str] = None,
    +) -> "Awaitable[WebSocketClientConnection]":
         """Client-side websocket support.
     
         Takes a url and returns a Future whose result is a
    @@ -1328,15 +1645,19 @@ def websocket_connect(url, callback=None, connect_timeout=None,
             request.headers = httputil.HTTPHeaders(request.headers)
         else:
             request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout)
    -    request = httpclient._RequestProxy(
    -        request, httpclient.HTTPRequest._DEFAULTS)
    -    conn = WebSocketClientConnection(request,
    -                                     on_message_callback=on_message_callback,
    -                                     compression_options=compression_options,
    -                                     ping_interval=ping_interval,
    -                                     ping_timeout=ping_timeout,
    -                                     max_message_size=max_message_size,
    -                                     subprotocols=subprotocols)
    +    request = cast(
    +        httpclient.HTTPRequest,
    +        httpclient._RequestProxy(request, httpclient.HTTPRequest._DEFAULTS),
    +    )
    +    conn = WebSocketClientConnection(
    +        request,
    +        on_message_callback=on_message_callback,
    +        compression_options=compression_options,
    +        ping_interval=ping_interval,
    +        ping_timeout=ping_timeout,
    +        max_message_size=max_message_size,
    +        subprotocols=subprotocols,
    +    )
         if callback is not None:
             IOLoop.current().add_future(conn.connect_future, callback)
         return conn.connect_future
    diff --git a/server/www/packages/packages-windows/x86/tornado/wsgi.py b/server/www/packages/packages-windows/x86/tornado/wsgi.py
    index e1230da..77124aa 100644
    --- a/server/www/packages/packages-windows/x86/tornado/wsgi.py
    +++ b/server/www/packages/packages-windows/x86/tornado/wsgi.py
    @@ -16,235 +16,41 @@
     """WSGI support for the Tornado web framework.
     
     WSGI is the Python standard for web servers, and allows for interoperability
    -between Tornado and other Python web frameworks and servers.  This module
    -provides WSGI support in two ways:
    +between Tornado and other Python web frameworks and servers.
    +
    +This module provides WSGI support via the `WSGIContainer` class, which
    +makes it possible to run applications using other WSGI frameworks on
    +the Tornado HTTP server. The reverse is not supported; the Tornado
    +`.Application` and `.RequestHandler` classes are designed for use with
    +the Tornado `.HTTPServer` and cannot be used in a generic WSGI
    +container.
     
    -* `WSGIAdapter` converts a `tornado.web.Application` to the WSGI application
    -  interface.  This is useful for running a Tornado app on another
    -  HTTP server, such as Google App Engine.  See the `WSGIAdapter` class
    -  documentation for limitations that apply.
    -* `WSGIContainer` lets you run other WSGI applications and frameworks on the
    -  Tornado HTTP server.  For example, with this class you can mix Django
    -  and Tornado handlers in a single server.
     """
     
    -from __future__ import absolute_import, division, print_function
    -
     import sys
     from io import BytesIO
     import tornado
    -import warnings
     
    -from tornado.concurrent import Future
     from tornado import escape
     from tornado import httputil
     from tornado.log import access_log
    -from tornado import web
    -from tornado.escape import native_str
    -from tornado.util import unicode_type, PY3
     
    +from typing import List, Tuple, Optional, Callable, Any, Dict, Text
    +from types import TracebackType
    +import typing
    +
    +if typing.TYPE_CHECKING:
    +    from typing import Type  # noqa: F401
    +    from wsgiref.types import WSGIApplication as WSGIAppType  # noqa: F401
     
    -if PY3:
    -    import urllib.parse as urllib_parse  # py3
    -else:
    -    import urllib as urllib_parse
     
     # PEP 3333 specifies that WSGI on python 3 generally deals with byte strings
     # that are smuggled inside objects of type unicode (via the latin1 encoding).
    -# These functions are like those in the tornado.escape module, but defined
    -# here to minimize the temptation to use them in non-wsgi contexts.
    -if str is unicode_type:
    -    def to_wsgi_str(s):
    -        assert isinstance(s, bytes)
    -        return s.decode('latin1')
    -
    -    def from_wsgi_str(s):
    -        assert isinstance(s, str)
    -        return s.encode('latin1')
    -else:
    -    def to_wsgi_str(s):
    -        assert isinstance(s, bytes)
    -        return s
    -
    -    def from_wsgi_str(s):
    -        assert isinstance(s, str)
    -        return s
    -
    -
    -class WSGIApplication(web.Application):
    -    """A WSGI equivalent of `tornado.web.Application`.
    -
    -    .. deprecated:: 4.0
    -
    -       Use a regular `.Application` and wrap it in `WSGIAdapter` instead.
    -       This class will be removed in Tornado 6.0.
    -    """
    -    def __call__(self, environ, start_response):
    -        return WSGIAdapter(self)(environ, start_response)
    -
    -
    -# WSGI has no facilities for flow control, so just return an already-done
    -# Future when the interface requires it.
    -def _dummy_future():
    -    f = Future()
    -    f.set_result(None)
    -    return f
    -
    -
    -class _WSGIConnection(httputil.HTTPConnection):
    -    def __init__(self, method, start_response, context):
    -        self.method = method
    -        self.start_response = start_response
    -        self.context = context
    -        self._write_buffer = []
    -        self._finished = False
    -        self._expected_content_remaining = None
    -        self._error = None
    -
    -    def set_close_callback(self, callback):
    -        # WSGI has no facility for detecting a closed connection mid-request,
    -        # so we can simply ignore the callback.
    -        pass
    -
    -    def write_headers(self, start_line, headers, chunk=None, callback=None):
    -        if self.method == 'HEAD':
    -            self._expected_content_remaining = 0
    -        elif 'Content-Length' in headers:
    -            self._expected_content_remaining = int(headers['Content-Length'])
    -        else:
    -            self._expected_content_remaining = None
    -        self.start_response(
    -            '%s %s' % (start_line.code, start_line.reason),
    -            [(native_str(k), native_str(v)) for (k, v) in headers.get_all()])
    -        if chunk is not None:
    -            self.write(chunk, callback)
    -        elif callback is not None:
    -            callback()
    -        return _dummy_future()
    -
    -    def write(self, chunk, callback=None):
    -        if self._expected_content_remaining is not None:
    -            self._expected_content_remaining -= len(chunk)
    -            if self._expected_content_remaining < 0:
    -                self._error = httputil.HTTPOutputError(
    -                    "Tried to write more data than Content-Length")
    -                raise self._error
    -        self._write_buffer.append(chunk)
    -        if callback is not None:
    -            callback()
    -        return _dummy_future()
    -
    -    def finish(self):
    -        if (self._expected_content_remaining is not None and
    -                self._expected_content_remaining != 0):
    -            self._error = httputil.HTTPOutputError(
    -                "Tried to write %d bytes less than Content-Length" %
    -                self._expected_content_remaining)
    -            raise self._error
    -        self._finished = True
    -
    -
    -class _WSGIRequestContext(object):
    -    def __init__(self, remote_ip, protocol):
    -        self.remote_ip = remote_ip
    -        self.protocol = protocol
    -
    -    def __str__(self):
    -        return self.remote_ip
    -
    -
    -class WSGIAdapter(object):
    -    """Converts a `tornado.web.Application` instance into a WSGI application.
    -
    -    Example usage::
    -
    -        import tornado.web
    -        import tornado.wsgi
    -        import wsgiref.simple_server
    -
    -        class MainHandler(tornado.web.RequestHandler):
    -            def get(self):
    -                self.write("Hello, world")
    -
    -        if __name__ == "__main__":
    -            application = tornado.web.Application([
    -                (r"/", MainHandler),
    -            ])
    -            wsgi_app = tornado.wsgi.WSGIAdapter(application)
    -            server = wsgiref.simple_server.make_server('', 8888, wsgi_app)
    -            server.serve_forever()
    -
    -    See the `appengine demo
    -    `_
    -    for an example of using this module to run a Tornado app on Google
    -    App Engine.
    -
    -    In WSGI mode asynchronous methods are not supported.  This means
    -    that it is not possible to use `.AsyncHTTPClient`, or the
    -    `tornado.auth` or `tornado.websocket` modules.
    -
    -    In multithreaded WSGI servers on Python 3, it may be necessary to
    -    permit `asyncio` to create event loops on any thread. Run the
    -    following at startup (typically import time for WSGI
    -    applications)::
    -
    -        import asyncio
    -        from tornado.platform.asyncio import AnyThreadEventLoopPolicy
    -        asyncio.set_event_loop_policy(AnyThreadEventLoopPolicy())
    -
    -    .. versionadded:: 4.0
    -
    -    .. deprecated:: 5.1
    -
    -       This class is deprecated and will be removed in Tornado 6.0.
    -       Use Tornado's `.HTTPServer` instead of a WSGI container.
    -    """
    -    def __init__(self, application):
    -        warnings.warn("WSGIAdapter is deprecated, use Tornado's HTTPServer instead",
    -                      DeprecationWarning)
    -        if isinstance(application, WSGIApplication):
    -            self.application = lambda request: web.Application.__call__(
    -                application, request)
    -        else:
    -            self.application = application
    -
    -    def __call__(self, environ, start_response):
    -        method = environ["REQUEST_METHOD"]
    -        uri = urllib_parse.quote(from_wsgi_str(environ.get("SCRIPT_NAME", "")))
    -        uri += urllib_parse.quote(from_wsgi_str(environ.get("PATH_INFO", "")))
    -        if environ.get("QUERY_STRING"):
    -            uri += "?" + environ["QUERY_STRING"]
    -        headers = httputil.HTTPHeaders()
    -        if environ.get("CONTENT_TYPE"):
    -            headers["Content-Type"] = environ["CONTENT_TYPE"]
    -        if environ.get("CONTENT_LENGTH"):
    -            headers["Content-Length"] = environ["CONTENT_LENGTH"]
    -        for key in environ:
    -            if key.startswith("HTTP_"):
    -                headers[key[5:].replace("_", "-")] = environ[key]
    -        if headers.get("Content-Length"):
    -            body = environ["wsgi.input"].read(
    -                int(headers["Content-Length"]))
    -        else:
    -            body = b""
    -        protocol = environ["wsgi.url_scheme"]
    -        remote_ip = environ.get("REMOTE_ADDR", "")
    -        if environ.get("HTTP_HOST"):
    -            host = environ["HTTP_HOST"]
    -        else:
    -            host = environ["SERVER_NAME"]
    -        connection = _WSGIConnection(method, start_response,
    -                                     _WSGIRequestContext(remote_ip, protocol))
    -        request = httputil.HTTPServerRequest(
    -            method, uri, "HTTP/1.1", headers=headers, body=body,
    -            host=host, connection=connection)
    -        request._parse_body()
    -        self.application(request)
    -        if connection._error:
    -            raise connection._error
    -        if not connection._finished:
    -            raise Exception("request did not finish synchronously")
    -        return connection._write_buffer
    +# This function is like those in the tornado.escape module, but defined
    +# here to minimize the temptation to use it in non-wsgi contexts.
    +def to_wsgi_str(s: bytes) -> str:
    +    assert isinstance(s, bytes)
    +    return s.decode("latin1")
     
     
     class WSGIContainer(object):
    @@ -281,31 +87,44 @@ class WSGIContainer(object):
         Tornado and WSGI apps in the same server.  See
         https://github.com/bdarnell/django-tornado-demo for a complete example.
         """
    -    def __init__(self, wsgi_application):
    +
    +    def __init__(self, wsgi_application: "WSGIAppType") -> None:
             self.wsgi_application = wsgi_application
     
    -    def __call__(self, request):
    -        data = {}
    -        response = []
    +    def __call__(self, request: httputil.HTTPServerRequest) -> None:
    +        data = {}  # type: Dict[str, Any]
    +        response = []  # type: List[bytes]
     
    -        def start_response(status, response_headers, exc_info=None):
    +        def start_response(
    +            status: str,
    +            headers: List[Tuple[str, str]],
    +            exc_info: Optional[
    +                Tuple[
    +                    "Optional[Type[BaseException]]",
    +                    Optional[BaseException],
    +                    Optional[TracebackType],
    +                ]
    +            ] = None,
    +        ) -> Callable[[bytes], Any]:
                 data["status"] = status
    -            data["headers"] = response_headers
    +            data["headers"] = headers
                 return response.append
    +
             app_response = self.wsgi_application(
    -            WSGIContainer.environ(request), start_response)
    +            WSGIContainer.environ(request), start_response
    +        )
             try:
                 response.extend(app_response)
                 body = b"".join(response)
             finally:
                 if hasattr(app_response, "close"):
    -                app_response.close()
    +                app_response.close()  # type: ignore
             if not data:
                 raise Exception("WSGI app did not call start_response")
     
    -        status_code, reason = data["status"].split(' ', 1)
    -        status_code = int(status_code)
    -        headers = data["headers"]
    +        status_code_str, reason = data["status"].split(" ", 1)
    +        status_code = int(status_code_str)
    +        headers = data["headers"]  # type: List[Tuple[str, str]]
             header_set = set(k.lower() for (k, v) in headers)
             body = escape.utf8(body)
             if status_code != 304:
    @@ -320,12 +139,13 @@ class WSGIContainer(object):
             header_obj = httputil.HTTPHeaders()
             for key, value in headers:
                 header_obj.add(key, value)
    +        assert request.connection is not None
             request.connection.write_headers(start_line, header_obj, chunk=body)
             request.connection.finish()
             self._log(status_code, request)
     
         @staticmethod
    -    def environ(request):
    +    def environ(request: httputil.HTTPServerRequest) -> Dict[Text, Any]:
             """Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment.
             """
             hostport = request.host.split(":")
    @@ -338,8 +158,9 @@ class WSGIContainer(object):
             environ = {
                 "REQUEST_METHOD": request.method,
                 "SCRIPT_NAME": "",
    -            "PATH_INFO": to_wsgi_str(escape.url_unescape(
    -                request.path, encoding=None, plus=False)),
    +            "PATH_INFO": to_wsgi_str(
    +                escape.url_unescape(request.path, encoding=None, plus=False)
    +            ),
                 "QUERY_STRING": request.query,
                 "REMOTE_ADDR": request.remote_ip,
                 "SERVER_NAME": host,
    @@ -361,7 +182,7 @@ class WSGIContainer(object):
                 environ["HTTP_" + key.replace("-", "_").upper()] = value
             return environ
     
    -    def _log(self, status_code, request):
    +    def _log(self, status_code: int, request: httputil.HTTPServerRequest) -> None:
             if status_code < 400:
                 log_method = access_log.info
             elif status_code < 500:
    @@ -369,8 +190,9 @@ class WSGIContainer(object):
             else:
                 log_method = access_log.error
             request_time = 1000.0 * request.request_time()
    -        summary = request.method + " " + request.uri + " (" + \
    -            request.remote_ip + ")"
    +        assert request.method is not None
    +        assert request.uri is not None
    +        summary = request.method + " " + request.uri + " (" + request.remote_ip + ")"
             log_method("%d %s %.2fms", status_code, summary, request_time)
     
     
    diff --git a/server/www/teleport/static/img/login/login-bg-0.png b/server/www/teleport/static/img/login/login-bg-0.png
    index e406241..865c4e8 100644
    Binary files a/server/www/teleport/static/img/login/login-bg-0.png and b/server/www/teleport/static/img/login/login-bg-0.png differ
    diff --git a/server/www/teleport/static/img/login/login-bg-2.png b/server/www/teleport/static/img/login/login-bg-2.png
    index 2a0ce7a..a1faea5 100644
    Binary files a/server/www/teleport/static/img/login/login-bg-2.png and b/server/www/teleport/static/img/login/login-bg-2.png differ
    diff --git a/server/www/teleport/static/img/login/login-bg-3.png b/server/www/teleport/static/img/login/login-bg-3.png
    index 51558b1..4252c6d 100644
    Binary files a/server/www/teleport/static/img/login/login-bg-3.png and b/server/www/teleport/static/img/login/login-bg-3.png differ
    diff --git a/server/www/teleport/static/img/login/login-bg-4.png b/server/www/teleport/static/img/login/login-bg-4.png
    index 4c9f133..3265718 100644
    Binary files a/server/www/teleport/static/img/login/login-bg-4.png and b/server/www/teleport/static/img/login/login-bg-4.png differ
    diff --git a/server/www/teleport/static/img/login/login-bg-5.png b/server/www/teleport/static/img/login/login-bg-5.png
    index 8ab412a..b1bc506 100644
    Binary files a/server/www/teleport/static/img/login/login-bg-5.png and b/server/www/teleport/static/img/login/login-bg-5.png differ
    diff --git a/server/www/teleport/static/img/login/login-bg-6.png b/server/www/teleport/static/img/login/login-bg-6.png
    index a1faea5..d0375d3 100644
    Binary files a/server/www/teleport/static/img/login/login-bg-6.png and b/server/www/teleport/static/img/login/login-bg-6.png differ
    diff --git a/server/www/teleport/static/js/asset/account-group-list.js b/server/www/teleport/static/js/asset/account-group-list.js
    index ea6a78e..9f8dac5 100644
    --- a/server/www/teleport/static/js/asset/account-group-list.js
    +++ b/server/www/teleport/static/js/asset/account-group-list.js
    @@ -178,7 +178,7 @@ $app.on_table_groups_cell_created = function (tbl, row_id, col_key, cell_obj) {
                 $app.dlg_edit_group.show_edit(_row_id);
             });
             cell_obj.find('[data-btn-remove]').click(function () {
    -            console.log(_row_id);
    +            // console.log(_row_id);
                 $app.on_btn_remove_group_click(_row_id);
             });
         }
    @@ -244,7 +244,7 @@ $app.on_table_groups_render_created = function (render) {
             if (_.isUndefined(fields.members))
                 return '';
     
    -        console.log(fields.members);
    +        // console.log(fields.members);
     
             var ret = [];
             for (var i = 0; i < fields.members.length; ++i) {
    @@ -516,7 +516,7 @@ $app.create_dlg_edit_group = function () {
         };
     
         dlg.on_save = function () {
    -        console.log('---save.');
    +        // console.log('---save.');
             dlg.hide_error();
             if (!dlg.check_input())
                 return;
    diff --git a/server/www/teleport/static/js/asset/host-group-list.js b/server/www/teleport/static/js/asset/host-group-list.js
    index f3c5db5..1d5a2a3 100644
    --- a/server/www/teleport/static/js/asset/host-group-list.js
    +++ b/server/www/teleport/static/js/asset/host-group-list.js
    @@ -174,7 +174,7 @@ $app.on_table_groups_cell_created = function (tbl, row_id, col_key, cell_obj) {
                 $app.dlg_edit_group.show_edit(_row_id);
             });
             cell_obj.find('[data-btn-remove]').click(function () {
    -            console.log(_row_id);
    +            // console.log(_row_id);
                 $app.on_btn_remove_group_click(_row_id);
             });
         }
    @@ -258,8 +258,6 @@ $app.on_table_groups_render_created = function (render) {
             if (_.isUndefined(fields.members))
                 return '';
     
    -        console.log('xxx', fields.members);
    -
             var ret = [];
             for (var i = 0; i < fields.members.length; ++i) {
                 ret.push('
    ' + tp_second2str(tp_local2utc() - fields.time_begin) + ''; + return ' ' + tp_second2str(tp_timestamp_sec() - fields.time_begin) + ''; } else { if (fields.time_end === 0) { return ' 未知'; @@ -282,7 +282,7 @@ $app.on_table_host_render_created = function (render) { // _style = 'warning'; // else if (fields.state === TP_SESS_STAT_STARTED) // _style = 'primary'; - // return ' ' + tp_second2str(tp_local2utc() - fields.time_begin) + ''; + // return ' ' + tp_second2str(tp_timestamp_sec() - fields.time_begin) + ''; // } else { // return tp_second2str(fields.time_end - fields.time_begin); // } @@ -363,83 +363,26 @@ $app.on_table_host_render_created = function (render) { }; $app.do_replay_rdp = function (record_id, user_username, acc_username, host_ip, time_begin) { + if(!$app.options.core_running) { + $tp.notify_error(tp_error_msg(TPE_NO_CORE_SERVER), '无法播放。'); + return; + } + + if(!$assist.check()) + return; + $assist.do_rdp_replay( - { - rid: record_id - // , web: $tp.web_server // + '/audit/get_rdp_record/' + record_id // 'http://' + ip + ':' + port + '/log/replay/rdp/' + record_id; - // , sid: Cookies.get('_sid') - , user: user_username - , acc: acc_username - , host: host_ip - , start: time_begin//tp_format_datetime(tp_utc2local(time_begin), 'yyyyMMdd-HHmmss') - } + record_id , function () { // func_success } , function (code, message) { - if (code === TPE_NO_ASSIST) + if (code === TPE_NO_ASSIST) { + $assist.errcode = TPE_NO_ASSIST; $assist.alert_assist_not_found(); + } else $tp.notify_error('播放RDP操作录像失败:' + tp_error_msg(code, message)); } ); }; - - -// $app.on_table_host_header_created = function (header) { -// $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { -// CALLBACK_STACK.create() -// .add(header._table_ctrl.load_data) -// .add(header._table_ctrl.reset_filters) -// .exec(); -// }); -// -// // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) -// header._table_ctrl.get_filter_ctrl('search').on_created(); -// }; - -// $app.get_selected_record = function (tbl) { -// var records = []; -// var _objs = $('#' + $app.table_record.dom_id + ' tbody tr td input[data-check-box]'); -// $.each(_objs, function (i, _obj) { -// if ($(_obj).is(':checked')) { -// var _row_data = tbl.get_row(_obj); -// records.push(_row_data.id); -// } -// }); -// return records; -// }; - -// $app.on_btn_remove_record_click = function () { -// var records = $app.get_selected_record($app.table_record); -// if (records.length === 0) { -// $tp.notify_error('请选择要删除的会话记录!'); -// return; -// } -// -// var _fn_sure = function (cb_stack, cb_args) { -// $tp.ajax_post_json('/user/remove-user', {users: users}, -// function (ret) { -// if (ret.code === TPE_OK) { -// cb_stack.add($app.check_host_all_selected); -// cb_stack.add($app.table_record.load_data); -// $tp.notify_success('删除用户账号操作成功!'); -// } else { -// $tp.notify_error('删除用户账号操作失败:' + tp_error_msg(ret.code, ret.message)); -// } -// -// cb_stack.exec(); -// }, -// function () { -// $tp.notify_error('网络故障,删除用户账号操作失败!'); -// cb_stack.exec(); -// } -// ); -// }; -// -// var cb_stack = CALLBACK_STACK.create(); -// $tp.dlg_confirm(cb_stack, { -// msg: '

    注意:删除操作不可恢复!!

    删除用户账号将同时将其从所在用户组中移除,并且删除所有分配给此用户的授权!

    如果您希望禁止某个用户登录本系统,可对其进行“禁用”操作!

    您确定要移除所有选定的 ' + user_list.length + '个 用户账号吗?

    ', -// fn_yes: _fn_sure -// }); -// }; diff --git a/server/www/teleport/static/js/audit/replay-ssh.js b/server/www/teleport/static/js/audit/replay-ssh.js index 0e82d68..a7f5104 100644 --- a/server/www/teleport/static/js/audit/replay-ssh.js +++ b/server/www/teleport/static/js/audit/replay-ssh.js @@ -56,7 +56,7 @@ $app.on_init = function (cb_stack) { $app.record_hdr.height = 24; console.log('header', $app.record_hdr); - $('#recorder-info').html(tp_format_datetime($app.record_hdr.start) + ': ' + $app.record_hdr.user_name + '@' + $app.record_hdr.client_ip + ' 访问 ' + $app.record_hdr.account + '@' + $app.record_hdr.conn_ip + ':' + $app.record_hdr.conn_port); + $('#recorder-info').html(tp_format_datetime($app.record_hdr.start) + ',用户' + $app.record_hdr.user_name + '(来自' + $app.record_hdr.client_ip + ') 访问远程主机 ' + $app.record_hdr.account + '@' + $app.record_hdr.conn_ip + ':' + $app.record_hdr.conn_port); $app.req_record_data(record_id, 0); @@ -163,12 +163,13 @@ $app.on_init = function (cb_stack) { $app.req_record_data = function (record_id, offset) { $tp.ajax_post_json('/audit/get-record-data', {protocol: TP_PROTOCOL_TYPE_SSH, id: record_id, offset: offset}, function (ret) { - if (ret.code === TPE_OK) { - // console.log('data', ret.data); + if (ret.code === TPE_OK || ret.code === TPE_NO_MORE_DATA) { + console.log('data', ret.data); $app.record_data = $app.record_data.concat(ret.data.data_list); $app.record_data_offset += ret.data.data_size; - if ($app.record_data.length < $app.record_hdr.pkg_count) { + //if ($app.record_data.length < $app.record_hdr.pkg_count) { + if(ret.code === TPE_OK) { $app.req_record_data(record_id, $app.record_data_offset); } } else { @@ -252,7 +253,8 @@ $app.do_play = function() { $app.player_console_term.write(tp_base64_decode(play_data.d)); } - if (($app.played_pkg_count + 1) === $app.record_hdr.pkg_count) { + //if (($app.played_pkg_count + 1) === $app.record_hdr.pkg_count) { + if (($app.played_pkg_count + 1) === $app.record_data.length) { $app.dom.progress.val(100); $app.dom.status.text('播放完成'); $app.dom.time.text(parseInt($app.record_hdr.time_used / 1000) + '秒'); @@ -287,7 +289,8 @@ $app.do_play = function() { $app.dom.time.text(temp + '/' + parseInt($app.record_hdr.time_used / 1000) + '秒'); // if all packages played - if ($app.played_pkg_count >= $app.record_hdr.pkg_count) { + // if ($app.played_pkg_count >= $app.record_hdr.pkg_count) { + if ($app.played_pkg_count >= $app.record_data.length) { $app.dom.progress.val(100); $app.dom.status.text('播放完成'); $app.dom.time.text(parseInt($app.record_hdr.time_used / 1000) + '秒'); diff --git a/server/www/teleport/static/js/auth/login.js b/server/www/teleport/static/js/auth/login.js index 653cd5b..bc4f9fc 100644 --- a/server/www/teleport/static/js/auth/login.js +++ b/server/www/teleport/static/js/auth/login.js @@ -20,9 +20,8 @@ var SLOGAN = [ '追求进步,
    不求完美。' ]; - $app.on_init = function (cb_stack) { - $app.login_type = TP_LOGIN_AUTH_USERNAME_PASSWORD_CAPTCHA; + $app.login_type = -1; $app.dom = { slogan: $('#msg-slogan'), auth_box: $('#auth-box-container'), @@ -47,9 +46,11 @@ $app.on_init = function (cb_stack) { $app.last_img_idx = 0; $app.last_slogan_idx = 0; - // console.log($app.options); if ($app.options.username.length > 0) { $app.dom.input_username.val($app.options.username); + $app.dom.input_password.focus(); + } else { + $app.dom.input_username.focus(); } $app.dom.captcha_image.attr('src', '/auth/captcha?h=36&rnd=' + Math.random()); @@ -59,18 +60,10 @@ $app.on_init = function (cb_stack) { //$app.init_slogan(); $app.dom.btn_login_type_password.click(function () { - $app.login_type = TP_LOGIN_AUTH_USERNAME_PASSWORD_CAPTCHA; - $app.dom.btn_login_type_oath.removeClass('selected'); - $(this).addClass('selected'); - $app.dom.area_oath.slideUp(100); - $app.dom.area_captcha.slideDown(100); + $app.switch_login_type(TP_LOGIN_AUTH_USERNAME_PASSWORD_CAPTCHA, true); }); $app.dom.btn_login_type_oath.click(function () { - $app.login_type = TP_LOGIN_AUTH_USERNAME_PASSWORD_OATH; - $app.dom.btn_login_type_password.removeClass('selected'); - $(this).addClass('selected'); - $app.dom.area_oath.slideDown(100); - $app.dom.area_captcha.slideUp(100); + $app.switch_login_type(TP_LOGIN_AUTH_USERNAME_PASSWORD_OATH, true); }); $app.dom.btn_login.click($app.login_account); @@ -108,14 +101,43 @@ $app.on_init = function (cb_stack) { }); if ($app.options.default_auth & TP_LOGIN_AUTH_USERNAME_PASSWORD_CAPTCHA) { - $app.dom.btn_login_type_password.click(); + $app.switch_login_type(TP_LOGIN_AUTH_USERNAME_PASSWORD_CAPTCHA, false); } else if ($app.options.default_auth & TP_LOGIN_AUTH_USERNAME_PASSWORD_OATH) { - $app.dom.btn_login_type_oath.click(); + $app.switch_login_type(TP_LOGIN_AUTH_USERNAME_PASSWORD_OATH, false); } cb_stack.exec(); }; +$app.switch_login_type = function(login_type, animate) { + if($app.login_type === login_type) + return; + + if(login_type === TP_LOGIN_AUTH_USERNAME_PASSWORD_CAPTCHA) { + $app.login_type = login_type; + $app.dom.btn_login_type_oath.removeClass('selected'); + $app.dom.btn_login_type_password.addClass('selected'); + if(animate) { + $app.dom.area_oath.slideUp(100); + $app.dom.area_captcha.slideDown(100); + } else { + $app.dom.area_oath.hide(); + $app.dom.area_captcha.show(); + } + } else if(login_type === TP_LOGIN_AUTH_USERNAME_PASSWORD_OATH) { + $app.login_type = login_type; + $app.dom.btn_login_type_password.removeClass('selected'); + $app.dom.btn_login_type_oath.addClass('selected'); + if(animate) { + $app.dom.area_oath.slideDown(100); + $app.dom.area_captcha.slideUp(100); + } else { + $app.dom.area_oath.show(); + $app.dom.area_captcha.hide(); + } + } +}; + $app.hide_op_box = function () { $app.dom.message.hide(); }; @@ -157,7 +179,7 @@ $app.login_account = function () { } } else if ($app.login_type === TP_LOGIN_AUTH_USERNAME_PASSWORD_OATH) { var test_oath = '' + parseInt(str_oath); - if(str_oath.length === 6) { + if (str_oath.length === 6) { for (; ;) { if (test_oath.length < 6) test_oath = '0' + test_oath; @@ -216,8 +238,14 @@ $app.do_account_login = function (username, password, captcha, oath, is_remember window.location.href = $app.options.ref; } else { $app.hide_op_box(); - $app.show_op_box('error', '登录失败:' + tp_error_msg(ret.code, ret.message)); $app.dom.captcha_image.attr('src', '/auth/captcha?h=36&rnd=' + Math.random()); + + if (ret.code === TPE_EXPIRED) { + // must change password before login. + window.location.href = '/user/change-expired-password?username=' + encodeURIComponent(username); + } + + $app.show_op_box('error', '登录失败:' + tp_error_msg(ret.code, ret.message)); console.log(ret); } @@ -239,8 +267,8 @@ $app.on_screen_resize = function () { $app.init_blur_bg = function () { $app.last_img_idx = Math.floor(Math.random() * (BLUR_BG_IMG.length)); $('body').backgroundBlur({ - imageURL: '/static/img/login/' + BLUR_BG_IMG[$app.last_img_idx] + '?' + Math.random(), - blurAmount: 15, + imageURL: '/static/img/login/' + BLUR_BG_IMG[$app.last_img_idx], // + '?' + Math.random(), + blurAmount: 8, duration: 1000, imageClass: 'bg-blur', overlayClass: 'bg-blur-overlay' @@ -250,13 +278,7 @@ $app.init_blur_bg = function () { setTimeout(function () { $app.init_slogan(); - }, 2000); - - setTimeout(function () { - $app.dom.auth_box.fadeIn(800, function () { - $app.dom.input_username.focus(); - }); - }, 300); + }, 1200); }; $app._update_blur_bg = function () { @@ -267,14 +289,14 @@ $app._update_blur_bg = function () { break; } } - $('body').backgroundBlur('/static/img/login/' + BLUR_BG_IMG[$app.last_img_idx] + '?' + Math.random()); + $('body').backgroundBlur('/static/img/login/' + BLUR_BG_IMG[$app.last_img_idx]);// + '?' + Math.random()); }; $app.init_slogan = function () { $app.last_slogan_idx = Math.floor(Math.random() * SLOGAN.length); $app.dom.slogan.html(SLOGAN[$app.last_slogan_idx]).fadeIn(1000); - setInterval($app._update_slogan, 8100); + setInterval($app._update_slogan, 12100); }; $app._update_slogan = function () { diff --git a/server/www/teleport/static/js/dashboard/dashboard.js b/server/www/teleport/static/js/dashboard/dashboard.js index 761bbc3..5ef4094 100644 --- a/server/www/teleport/static/js/dashboard/dashboard.js +++ b/server/www/teleport/static/js/dashboard/dashboard.js @@ -48,7 +48,7 @@ $app.init_sys_status_info = function (data) { fontSize: 11, fontFamily: 'Monaco, Lucida Console, Consolas, Courier', formatter: function (value, index) { - return tp_format_datetime_ms(tp_utc2local_ms(value), 'HH:mm'); + return tp_format_datetime(value, 'HH:mm'); } } }; @@ -95,8 +95,8 @@ $app.init_sys_status_info = function (data) { $app.bar_cpu_user = []; $app.bar_cpu_sys = []; for (i = 0; i < data.length; i++) { - $app.bar_cpu_user.push({name: tp_format_datetime_ms(tp_utc2local_ms(data[i].t), 'HH:mm:ss'), value: [data[i].t, data[i].cpu.u]}); - $app.bar_cpu_sys.push({name: tp_format_datetime_ms(tp_utc2local_ms(data[i].t), 'HH:mm:ss'), value: [data[i].t, data[i].cpu.s]}); + $app.bar_cpu_user.push({name: tp_format_datetime(data[i].t, 'HH:mm:ss'), value: [data[i].t, data[i].cpu.u]}); + $app.bar_cpu_sys.push({name: tp_format_datetime(data[i].t, 'HH:mm:ss'), value: [data[i].t, data[i].cpu.s]}); } var clr_cpu_user = '#e2524c'; @@ -191,7 +191,7 @@ $app.init_sys_status_info = function (data) { $app.bar_mem_used = []; for (i = 0; i < data.length; i++) { - $app.bar_mem_used.push({name: tp_format_datetime_ms(tp_utc2local_ms(data[i].t), 'HH:mm:ss'), value: [data[i].t, tp_digital_precision(data[i].mem.u * 100 / data[i].mem.t, 1)]}); + $app.bar_mem_used.push({name: tp_format_datetime(data[i].t, 'HH:mm:ss'), value: [data[i].t, tp_digital_precision(data[i].mem.u * 100 / data[i].mem.t, 1)]}); } var clr_mem = '#5671e2'; @@ -269,8 +269,8 @@ $app.init_sys_status_info = function (data) { $app.bar_net_recv = []; $app.bar_net_sent = []; for (i = 0; i < data.length; i++) { - $app.bar_net_recv.push({name: tp_format_datetime_ms(tp_utc2local_ms(data[i].t), 'HH:mm:ss'), value: [data[i].t, data[i].net.r]}); - $app.bar_net_sent.push({name: tp_format_datetime_ms(tp_utc2local_ms(data[i].t), 'HH:mm:ss'), value: [data[i].t, data[i].net.s]}); + $app.bar_net_recv.push({name: tp_format_datetime(data[i].t, 'HH:mm:ss'), value: [data[i].t, data[i].net.r]}); + $app.bar_net_sent.push({name: tp_format_datetime(data[i].t, 'HH:mm:ss'), value: [data[i].t, data[i].net.s]}); } var clr_net_sent = '#558c5a'; @@ -342,8 +342,8 @@ $app.init_sys_status_info = function (data) { $app.bar_disk_read = []; $app.bar_disk_write = []; for (i = 0; i < data.length; i++) { - $app.bar_disk_read.push({name: tp_format_datetime_ms(tp_utc2local_ms(data[i].t), 'HH:mm:ss'), value: [data[i].t, data[i].disk.r]}); - $app.bar_disk_write.push({name: tp_format_datetime_ms(tp_utc2local_ms(data[i].t), 'HH:mm:ss'), value: [data[i].t, data[i].disk.w]}); + $app.bar_disk_read.push({name: tp_format_datetime(data[i].t, 'HH:mm:ss'), value: [data[i].t, data[i].disk.r]}); + $app.bar_disk_write.push({name: tp_format_datetime(data[i].t, 'HH:mm:ss'), value: [data[i].t, data[i].disk.w]}); } var clr_disk_read = '#558c5a'; @@ -489,31 +489,31 @@ $app.init_ws = function () { if (t.method === 'subscribe' && t.param === 'sys_status') { $app.bar_cpu_user.shift(); - $app.bar_cpu_user.push({name: tp_format_datetime_ms(tp_utc2local_ms(t.data.t), 'HH:mm:ss'), value: [t.data.t, t.data.cpu.u]}); + $app.bar_cpu_user.push({name: tp_format_datetime(t.data.t, 'HH:mm:ss'), value: [t.data.t, t.data.cpu.u]}); $app.bar_cpu_sys.shift(); - $app.bar_cpu_sys.push({name: tp_format_datetime_ms(tp_utc2local_ms(t.data.t), 'HH:mm:ss'), value: [t.data.t, t.data.cpu.s]}); + $app.bar_cpu_sys.push({name: tp_format_datetime(t.data.t, 'HH:mm:ss'), value: [t.data.t, t.data.cpu.s]}); $app.bar_cpu.setOption( {series: [{data: $app.bar_cpu_sys}, {data: $app.bar_cpu_user}]} ); $app.bar_mem_used.shift(); - $app.bar_mem_used.push({name: tp_format_datetime_ms(tp_utc2local_ms(t.data.t), 'HH:mm:ss'), value: [t.data.t, Math.round(t.data.mem.u / t.data.mem.t * 100, 2)]}); + $app.bar_mem_used.push({name: tp_format_datetime(t.data.t, 'HH:mm:ss'), value: [t.data.t, Math.round(t.data.mem.u / t.data.mem.t * 100, 2)]}); $app.bar_mem.setOption( {series: [{data: $app.bar_mem_used}]} ); $app.bar_net_recv.shift(); - $app.bar_net_recv.push({name: tp_format_datetime_ms(tp_utc2local_ms(t.data.t), 'HH:mm:ss'), value: [t.data.t, t.data.net.r]}); + $app.bar_net_recv.push({name: tp_format_datetime(t.data.t, 'HH:mm:ss'), value: [t.data.t, t.data.net.r]}); $app.bar_net_sent.shift(); - $app.bar_net_sent.push({name: tp_format_datetime_ms(tp_utc2local_ms(t.data.t), 'HH:mm:ss'), value: [t.data.t, t.data.net.s]}); + $app.bar_net_sent.push({name: tp_format_datetime(t.data.t, 'HH:mm:ss'), value: [t.data.t, t.data.net.s]}); $app.bar_net.setOption( {series: [{data: $app.bar_net_sent}, {data: $app.bar_net_recv}]} ); $app.bar_disk_read.shift(); - $app.bar_disk_read.push({name: tp_format_datetime_ms(tp_utc2local_ms(t.data.t), 'HH:mm:ss'), value: [t.data.t, t.data.disk.r]}); + $app.bar_disk_read.push({name: tp_format_datetime(t.data.t, 'HH:mm:ss'), value: [t.data.t, t.data.disk.r]}); $app.bar_disk_write.shift(); - $app.bar_disk_write.push({name: tp_format_datetime_ms(tp_utc2local_ms(t.data.t), 'HH:mm:ss'), value: [t.data.t, t.data.disk.w]}); + $app.bar_disk_write.push({name: tp_format_datetime(t.data.t, 'HH:mm:ss'), value: [t.data.t, t.data.disk.w]}); $app.bar_disk.setOption( {series: [{data: $app.bar_disk_read}, {data: $app.bar_disk_write}]} ); diff --git a/server/www/teleport/static/js/maintenance/upgrade.js b/server/www/teleport/static/js/maintenance/upgrade.js new file mode 100644 index 0000000..8d8e4bb --- /dev/null +++ b/server/www/teleport/static/js/maintenance/upgrade.js @@ -0,0 +1,317 @@ +"use strict"; + +$app.on_init = function (cb_stack, cb_args) { + $app.dom = { + btn_upgrade: $('#btn-upgrade'), + steps_detail: $('#steps-detail'), + // db_info: $('#db-info'), + // account: $('#sysadmin-account'), + // email: $('#sysadmin-email'), + // password: $('#password'), + // password2: $('#password-again'), + message: $('#message'), + step2: $('#step2') + }; + + $app._make_info = function (key, value) { + return '' + key + ':' + value + ''; + }; + + // var html = []; + // if ($app.options.db.type === DB_TYPE_SQLITE) { + // html.push($app._make_info('数据库类型', 'SQLite')); + // html.push($app._make_info('数据库文件', $app.options.db.sqlite_file)); + // } else if ($app.options.db.type === DB_TYPE_MYSQL) { + // html.push($app._make_info('数据库类型', 'MySQL')); + // html.push($app._make_info('MySQL主机', $app.options.db.mysql_host)); + // html.push($app._make_info('MySQL端口', $app.options.db.mysql_port)); + // html.push($app._make_info('数据库名称', $app.options.db.mysql_db)); + // html.push($app._make_info('用户名', $app.options.db.mysql_user)); + // + // var _t = []; + // _t.push('
    '); + // _t.push(' 注意:请确保您在执行后续创建操作之前,已经在MySQL中使用 UTF8字符集 创建了库“'); + // _t.push($app.options.db.mysql_db); + // _t.push('”,并且用户“'); + // _t.push($app.options.db.mysql_user); + // _t.push('”拥有在此库创建表的权限!'); + // _t.push('
    '); + // $app.dom.db_info.after(_t.join('')); + // } else { + // html.push($app._make_info('数据库类型', '未知的数据库类型,请检查您的配置文件!')); + // $app.dom.btn_upgrade.attr('disabled', 'disabled').hide(); + // } + // $app.dom.db_info.append(html.join('')); + + $app.hide_op_box = function () { + $app.dom.message.hide(); + }; + + $app.show_op_box = function (op_type, op_msg) { + $app.dom.message.html(op_msg); + $app.dom.message.removeClass().addClass('op_box op_' + op_type); + $app.dom.message.show(); + }; + + $app.dom.btn_upgrade.click(function () { + // var str_account = $app.dom.account.val(); + // var str_email = $app.dom.email.val(); + // var str_password = $app.dom.password.val(); + // var str_password2 = $app.dom.password2.val(); + // + // if (str_account.length === 0) { + // $app.show_op_box('error', '请填写系统管理员登录账号名称!'); + // $app.dom.account.focus(); + // return; + // } + // if (str_email.length === 0) { + // $app.show_op_box('error', '请填写系统管理员的电子邮件地址!'); + // $app.dom.email.focus(); + // return; + // } + // if (!tp_is_email(str_email)) { + // $app.show_op_box('error', '电子邮件地址格式错啦,你会收不到邮件的!'); + // $app.dom.email.focus(); + // return; + // } + // if (str_password.length === 0) { + // $app.show_op_box('error', '请设置系统管理员登录密码!'); + // $app.dom.password.focus(); + // return; + // } + // if (str_password2.length === 0) { + // $app.show_op_box('error', '请再次输入系统管理员登录密码!'); + // $app.dom.password.focus(); + // return; + // } + // if (str_password !== str_password2) { + // $app.show_op_box('error', '两次输入的密码不一致!'); + // $app.dom.password2.focus().select(); + // return; + // } + + $app.dom.btn_upgrade.attr('disabled', 'disabled').hide(); + $app.hide_op_box(); + $app.dom.steps_detail.show(); + + $tp.ajax_post_json('/maintenance/rpc', {cmd: 'upgrade_db'}, + function (ret) { + if (ret.code === TPE_OK) { + + var cb_stack = CALLBACK_STACK.create(); + cb_stack + .add_delay(500, $app.get_task_ret, {task_id: ret.data.task_id}) + .exec(); + } + + }, + function () { +// $app.show_message('error', '无法连接到服务器!'); + $app.show_op_box('error', '无法连接到服务器!'); + } + ); + + }); + + $app.get_task_ret = function (cb_stack, cb_args) { + var task_id = cb_args.task_id || 0; + if (task_id === 0) { + console.log('task-id', task_id); + return; + } + + $tp.ajax_post_json('/maintenance/rpc', {cmd: 'get_task_ret', 'tid': task_id}, + function (ret) { + if (ret.code === TPE_OK) { + + // show step progress. + var all_ok = true; + var steps = ret.data.steps; + $app.dom.steps_detail.empty(); + + var html = []; + var icon_class = ''; + var err_class = ''; + for (var i = 0; i < steps.length; ++i) { + if (steps[i].stat === 0) + icon_class = 'fa-check'; + else + icon_class = 'fa-cog fa-spin'; + + if (steps[i].code !== 0) { + icon_class = 'fa-exclamation-circle'; + err_class = ' class="error"'; + steps[i].msg += ' 失败!'; + all_ok = false; + } + else { + err_class = ''; + } + + html.push(' '); + html.push(steps[i].msg); + html.push('

    ') + } + $app.dom.steps_detail.html(html.join('')); + $('html').animate({scrollTop: $(document).height()}, 300); + + if (!ret.data.running) { + if (all_ok) { + + $tp.ajax_post_json('/auth/do-logout', {}, + function () { + }, + function () { + } + ); + + $app.dom.step2.show('fast', function () { + // 确保页面滚动到最低端,使得下一步提示能够被看到。 + $('html').animate({scrollTop: $(document).height()}, 300); + }); + } + return; + } + + cb_stack + .add_delay(500, $app.get_task_ret, {task_id: task_id}) + .exec(); + } + + }, + function () { + $app.show_op_box('error', '无法连接到服务器!'); + } + ); + + }; + + cb_stack.exec(); +}; + + + + + + + + + + + + + + + + + + + + + + + + + + ywl.on_init = function (cb_stack, cb_args) { + ywl.dom = { + btn_upgrade_db: $('#btn-upgrade-db'), + steps_detail: $('#steps-detail') + }; + + ywl.dom.btn_upgrade_db.click(function () { + + ywl.dom.btn_upgrade_db.attr('disabled', 'disabled').hide(); + ywl.dom.steps_detail.show(); + + console.log('upgrade-db-click'); + ywl.ajax_post_json('/maintenance/rpc', {cmd: 'upgrade_db'}, + function (ret) { + console.log('upgrade-db:', ret); + if (ret.code === 0) { + + var cb_stack = CALLBACK_STACK.create(); + cb_stack + .add(ywl.get_task_ret, {task_id: ret.data.task_id}) + .add(ywl.delay_exec, {delay_ms: 500}) + .exec(); + } + + }, + function () { + ywl.show_message('error', '无法连接到服务器!'); + } + ); + + }); + + ywl.get_task_ret = function (cb_stack, cb_args) { + var task_id = cb_args.task_id || 0; + if (task_id === 0) { + console.log('task-id', task_id); + return; + } + + ywl.ajax_post_json('/maintenance/rpc', {cmd: 'get_task_ret', 'tid': task_id}, + function (ret) { + console.log('get_task_ret:', ret); + if (ret.code === 0) { + + // show step progress. + var steps = ret.data.steps; + ywl.dom.steps_detail.empty(); + + var html = []; + var icon_class = ''; + var err_class = ''; + for(var i = 0; i < steps.length; ++i) { + if(steps[i].code !== 0) { + err_class = ' class="error"'; + icon_class = 'fa-times-circle'; + } + else { + err_class = ''; + icon_class = 'fa-check'; + } + + if(steps[i].stat === 0) + ;//icon_class = 'fa-check'; + else + icon_class = 'fa-cog fa-spin'; + + html.push(' '); + html.push(steps[i].msg); + html.push('

    ') + } + ywl.dom.steps_detail.html(html.join('')); + + + if (!ret.data.running) { + $('#step2').show('fast'); + return; + } + + cb_stack + .add(ywl.get_task_ret, {task_id: task_id}) + .add(ywl.delay_exec, {delay_ms: 500}) + .exec(); + } + + }, + function () { + ywl.show_message('error', '无法连接到服务器!'); + } + ); + + }; + + cb_stack.exec(); + }; diff --git a/server/www/teleport/static/js/ops/remote-list.js b/server/www/teleport/static/js/ops/remote-list.js index 6ecf137..b2eb43b 100644 --- a/server/www/teleport/static/js/ops/remote-list.js +++ b/server/www/teleport/static/js/ops/remote-list.js @@ -8,7 +8,7 @@ $app.on_init = function (cb_stack) { // , group_selected: $('#group-selected') }; - console.log($app.options); + // console.log($app.options); if(!$app.options.core_cfg.detected) { $tp.notify_error('核心服务未启动,无法进行远程连接!'); cb_stack.exec(); diff --git a/server/www/teleport/static/js/ops/session-list.js b/server/www/teleport/static/js/ops/session-list.js index eccc26a..098e6be 100644 --- a/server/www/teleport/static/js/ops/session-list.js +++ b/server/www/teleport/static/js/ops/session-list.js @@ -283,7 +283,7 @@ $app.on_table_session_render_created = function (render) { }; render.time_begin = function (row_id, fields) { - return tp_format_datetime(tp_utc2local(fields.time_begin), 'MM-dd HH:mm:ss'); + return tp_format_datetime(fields.time_begin, 'MM-dd HH:mm:ss'); }; render.time_cost = function (row_id, fields) { @@ -293,7 +293,7 @@ $app.on_table_session_render_created = function (render) { _style = 'warning'; else if (fields.state === TP_SESS_STAT_STARTED) _style = 'primary'; - return ' ' + tp_second2str(tp_local2utc() - fields.time_begin) + ''; + return ' ' + tp_second2str(tp_timestamp_sec() - fields.time_begin) + ''; } else { return tp_second2str(fields.time_end - fields.time_begin); } @@ -402,7 +402,7 @@ $app.get_selected_sessions = function (tbl) { $app.on_btn_kill_sessions_click = function () { var sessions = $app.get_selected_sessions($app.table_session); - console.log(sessions); + // console.log(sessions); if (sessions.length === 0) { $tp.notify_error('请选择要强行终止的会话!'); return; diff --git a/server/www/teleport/static/js/system/config.js b/server/www/teleport/static/js/system/config.js index 5ce2671..5f37f38 100644 --- a/server/www/teleport/static/js/system/config.js +++ b/server/www/teleport/static/js/system/config.js @@ -706,6 +706,7 @@ $app.create_config_storage = function () { storage_size: $('#storage-size'), btn_save: $('#btn-save-storage-config'), btn_cleanup: $('#btn-clear-storage'), + btn_export_db: $('#btn-export-db'), input_keep_log: $('#storage-keep-log'), input_keep_record: $('#storage-keep-record'), @@ -735,6 +736,10 @@ $app.create_config_storage = function () { _sto.on_btn_cleanup(); }); + _sto.dom.btn_export_db.click(function() { + _sto.on_export_db(); + }); + cb_stack.exec(); }; @@ -835,5 +840,9 @@ $app.create_config_storage = function () { ); }; + _sto.on_export_db = function() { + window.location.href = '/system/export-db'; + }; + return _sto; }; diff --git a/server/www/teleport/static/js/system/syslog.js b/server/www/teleport/static/js/system/syslog.js index 90efb48..a624617 100644 --- a/server/www/teleport/static/js/system/syslog.js +++ b/server/www/teleport/static/js/system/syslog.js @@ -184,7 +184,7 @@ $app.on_table_host_render_created = function (render) { // }; render.log_time = function (row_id, fields) { - return tp_format_datetime(tp_utc2local(fields.log_time)); + return tp_format_datetime(fields.log_time); }; render.user = function (row, fields) { diff --git a/server/www/teleport/static/js/teleport/controls.js b/server/www/teleport/static/js/teleport/controls.js index c673404..dd67f9a 100644 --- a/server/www/teleport/static/js/teleport/controls.js +++ b/server/www/teleport/static/js/teleport/controls.js @@ -28,7 +28,7 @@ $tp.create_table = function (options) { // clear // reset_filter - var _tbl = {}; + let _tbl = {}; // 此表格绑定的DOM对象的ID,用于JQuery的选择器 _tbl.dom_id = options.dom_id; @@ -223,7 +223,6 @@ $tp.create_table = function (options) { _tbl.load_data = function (cb_stack, cb_args) { cb_stack = cb_stack || CALLBACK_STACK.create(); - //log.v('load table data.', cb_args); if (_tbl.paging_ctrl) _tbl.per_page = _tbl.paging_ctrl.get_per_page(); else @@ -238,7 +237,6 @@ $tp.create_table = function (options) { console.error('filter', name, 'has have no get_filter() interface.'); } var _f = ctrl.get_filter(); - // console.log('filter from', name, _f); $.each(_f, function (k, v) { _filter[k] = v; }); @@ -949,7 +947,7 @@ $tp.create_table_render = function (tbl, on_created) { _tbl_render.date_time = function (row_id, fields) { if (0 === fields.timestamp) return ''; - return '' + tp_format_datetime(tp_utc2local(fields.timestamp)) + ''; + return '' + tp_format_datetime(fields.timestamp) + ''; }; _tbl_render.date_time_local = function (row_id, fields) { diff --git a/server/www/teleport/static/js/tp-assist.js b/server/www/teleport/static/js/tp-assist.js index 335a95e..2569ec2 100644 --- a/server/www/teleport/static/js/tp-assist.js +++ b/server/www/teleport/static/js/tp-assist.js @@ -3,11 +3,19 @@ $tp.assist = { running: false, version: '', + ver_require: '0.0.0', + errcode: TPE_OK, api_url: '', - teleport_ip: window.location.hostname + teleport_ip: window.location.hostname, + + dom: { + msg_box_title: null, + msg_box_info: null, + msg_box_desc: null + } }; -console.log(window.location.protocol); +// console.log(window.location.protocol); // $assist 是 $tp.assist 的别名,方便使用。 var $assist = $tp.assist; @@ -58,23 +66,70 @@ $assist.init = function (cb_stack) { cb_stack.exec(); }; +$assist.check = function() { + if (!$assist.running) { + $assist.errcode = TPE_NO_ASSIST; + $assist.alert_assist_not_found(); + return false; + } else if (!$assist._version_compare()) { + $assist.errcode = TPE_OLD_ASSIST; + $assist.alert_assist_not_found(); + return false; + } + return true; +}; + + $assist.alert_assist_not_found = function () { + if($assist.errcode === TPE_NO_ASSIST) { + $assist.dom.msg_box_title.html('未检测到TELEPORT助手'); + $assist.dom.msg_box_info.html('需要TELEPORT助手来辅助远程连接,请确认本机运行了TELEPORT助手!'); + $assist.dom.msg_box_desc.html('如果您尚未运行TELEPORT助手,请 下载最新版TELEPORT助手安装包 并安装。一旦运行了TELEPORT助手,即可刷新页面,重新进行远程连接。'); + } else if($assist.errcode === TPE_OLD_ASSIST) { + $assist.dom.msg_box_title.html('TELEPORT助手需要升级'); + $assist.dom.msg_box_info.html('检测到TELEPORT助手版本 v'+ $assist.version +',但需要最低版本 v'+ $assist.ver_require+'。'); + $assist.dom.msg_box_desc.html('请 下载最新版TELEPORT助手安装包 并安装。一旦升级了TELEPORT助手,即可刷新页面,重新进行远程连接。'); + } + $('#dialog-need-assist').modal(); }; +// 1.2.0 > 1.1.0 +// 1.2 = 1.2.0 +// 2.1.1 > 1.2.9 +// 2.1.10 > 2.1.9 +$assist._version_compare = function () { + var ver_current = $assist.version.split("."); + var ver_require = $assist.ver_require.split("."); + + var count = ver_current.length; + if(ver_require.length > count) + count = ver_require.length; + + var c, r; + for(var i = 0; i < count; ++i) { + c = ver_current[i] || 0; + r = ver_require[i] || 0; + if(c < r) + return false; + } + + return true; +}; + $assist._make_message_box = function () { var _html = [ '